reload1.c (new_spill_reg): Improve fixed or forbidden register spill error message.
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include <stdio.h>
24 #include "rtl.h"
25 #include "obstack.h"
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
29 #include "flags.h"
30 #include "expr.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "reload.h"
34 #include "recog.h"
35 #include "basic-block.h"
36 #include "output.h"
37 #include "real.h"
38
39 /* This file contains the reload pass of the compiler, which is
40 run after register allocation has been done. It checks that
41 each insn is valid (operands required to be in registers really
42 are in registers of the proper class) and fixes up invalid ones
43 by copying values temporarily into registers for the insns
44 that need them.
45
46 The results of register allocation are described by the vector
47 reg_renumber; the insns still contain pseudo regs, but reg_renumber
48 can be used to find which hard reg, if any, a pseudo reg is in.
49
50 The technique we always use is to free up a few hard regs that are
51 called ``reload regs'', and for each place where a pseudo reg
52 must be in a hard reg, copy it temporarily into one of the reload regs.
53
54 All the pseudos that were formerly allocated to the hard regs that
55 are now in use as reload regs must be ``spilled''. This means
56 that they go to other hard regs, or to stack slots if no other
57 available hard regs can be found. Spilling can invalidate more
58 insns, requiring additional need for reloads, so we must keep checking
59 until the process stabilizes.
60
61 For machines with different classes of registers, we must keep track
62 of the register class needed for each reload, and make sure that
63 we allocate enough reload registers of each class.
64
65 The file reload.c contains the code that checks one insn for
66 validity and reports the reloads that it needs. This file
67 is in charge of scanning the entire rtl code, accumulating the
68 reload needs, spilling, assigning reload registers to use for
69 fixing up each insn, and generating the new insns to copy values
70 into the reload registers. */
71
72
73 #ifndef REGISTER_MOVE_COST
74 #define REGISTER_MOVE_COST(x, y) 2
75 #endif
76
77 #ifndef MEMORY_MOVE_COST
78 #define MEMORY_MOVE_COST(x) 4
79 #endif
80 \f
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx *reg_last_reload_reg;
84
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload;
88
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload;
92
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx *reg_equiv_constant;
98
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx *reg_equiv_memory_loc;
104
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx *reg_equiv_address;
109
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
112 rtx *reg_equiv_mem;
113
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width;
116
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx *reg_equiv_init;
120
121 /* During reload_as_needed, element N contains the last pseudo regno
122 reloaded into the Nth reload register. This vector is in parallel
123 with spill_regs. If that pseudo reg occupied more than one register,
124 reg_reloaded_contents points to that pseudo for each spill register in
125 use; all of these must remain set for an inheritance to occur. */
126 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
127
128 /* During reload_as_needed, element N contains the insn for which
129 the Nth reload register was last used. This vector is in parallel
130 with spill_regs, and its contents are significant only when
131 reg_reloaded_contents is significant. */
132 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
133
134 /* Number of spill-regs so far; number of valid elements of spill_regs. */
135 static int n_spills;
136
137 /* In parallel with spill_regs, contains REG rtx's for those regs.
138 Holds the last rtx used for any given reg, or 0 if it has never
139 been used for spilling yet. This rtx is reused, provided it has
140 the proper mode. */
141 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
142
143 /* In parallel with spill_regs, contains nonzero for a spill reg
144 that was stored after the last time it was used.
145 The precise value is the insn generated to do the store. */
146 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
147
148 /* This table is the inverse mapping of spill_regs:
149 indexed by hard reg number,
150 it contains the position of that reg in spill_regs,
151 or -1 for something that is not in spill_regs. */
152 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
153
154 /* This reg set indicates registers that may not be used for retrying global
155 allocation. The registers that may not be used include all spill registers
156 and the frame pointer (if we are using one). */
157 HARD_REG_SET forbidden_regs;
158
159 /* This reg set indicates registers that are not good for spill registers.
160 They will not be used to complete groups of spill registers. This includes
161 all fixed registers, registers that may be eliminated, and, if
162 SMALL_REGISTER_CLASSES is zero, registers explicitly used in the rtl.
163
164 (spill_reg_order prevents these registers from being used to start a
165 group.) */
166 static HARD_REG_SET bad_spill_regs;
167
168 /* Describes order of use of registers for reloading
169 of spilled pseudo-registers. `spills' is the number of
170 elements that are actually valid; new ones are added at the end. */
171 static short spill_regs[FIRST_PSEUDO_REGISTER];
172
173 /* This reg set indicates those registers that have been used a spill
174 registers. This information is used in reorg.c, to help figure out
175 what registers are live at any point. It is assumed that all spill_regs
176 are dead at every CODE_LABEL. */
177
178 HARD_REG_SET used_spill_regs;
179
180 /* Index of last register assigned as a spill register. We allocate in
181 a round-robin fashion. */
182
183 static int last_spill_reg;
184
185 /* Describes order of preference for putting regs into spill_regs.
186 Contains the numbers of all the hard regs, in order most preferred first.
187 This order is different for each function.
188 It is set up by order_regs_for_reload.
189 Empty elements at the end contain -1. */
190 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
191
192 /* 1 for a hard register that appears explicitly in the rtl
193 (for example, function value registers, special registers
194 used by insns, structure value pointer registers). */
195 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
196
197 /* Indicates if a register was counted against the need for
198 groups. 0 means it can count against max_nongroup instead. */
199 static HARD_REG_SET counted_for_groups;
200
201 /* Indicates if a register was counted against the need for
202 non-groups. 0 means it can become part of a new group.
203 During choose_reload_regs, 1 here means don't use this reg
204 as part of a group, even if it seems to be otherwise ok. */
205 static HARD_REG_SET counted_for_nongroups;
206
207 /* Indexed by pseudo reg number N,
208 says may not delete stores into the real (memory) home of pseudo N.
209 This is set if we already substituted a memory equivalent in some uses,
210 which happens when we have to eliminate the fp from it. */
211 static char *cannot_omit_stores;
212
213 /* Nonzero if indirect addressing is supported on the machine; this means
214 that spilling (REG n) does not require reloading it into a register in
215 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
216 value indicates the level of indirect addressing supported, e.g., two
217 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
218 a hard register. */
219
220 static char spill_indirect_levels;
221
222 /* Nonzero if indirect addressing is supported when the innermost MEM is
223 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
224 which these are valid is the same as spill_indirect_levels, above. */
225
226 char indirect_symref_ok;
227
228 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
229
230 char double_reg_address_ok;
231
232 /* Record the stack slot for each spilled hard register. */
233
234 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
235
236 /* Width allocated so far for that stack slot. */
237
238 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
239
240 /* Indexed by register class and basic block number, nonzero if there is
241 any need for a spill register of that class in that basic block.
242 The pointer is 0 if we did stupid allocation and don't know
243 the structure of basic blocks. */
244
245 char *basic_block_needs[N_REG_CLASSES];
246
247 /* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249 int reload_first_uid;
250
251 /* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
253
254 int caller_save_needed;
255
256 /* The register class to use for a base register when reloading an
257 address. This is normally BASE_REG_CLASS, but it may be different
258 when using SMALL_REGISTER_CLASSES and passing parameters in
259 registers. */
260 enum reg_class reload_address_base_reg_class;
261
262 /* The register class to use for an index register when reloading an
263 address. This is normally INDEX_REG_CLASS, but it may be different
264 when using SMALL_REGISTER_CLASSES and passing parameters in
265 registers. */
266 enum reg_class reload_address_index_reg_class;
267
268 /* Set to 1 while reload_as_needed is operating.
269 Required by some machines to handle any generated moves differently. */
270
271 int reload_in_progress = 0;
272
273 /* These arrays record the insn_code of insns that may be needed to
274 perform input and output reloads of special objects. They provide a
275 place to pass a scratch register. */
276
277 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
278 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
279
280 /* This obstack is used for allocation of rtl during register elimination.
281 The allocated storage can be freed once find_reloads has processed the
282 insn. */
283
284 struct obstack reload_obstack;
285 char *reload_firstobj;
286
287 #define obstack_chunk_alloc xmalloc
288 #define obstack_chunk_free free
289
290 /* List of labels that must never be deleted. */
291 extern rtx forced_labels;
292
293 /* Allocation number table from global register allocation. */
294 extern int *reg_allocno;
295 \f
296 /* This structure is used to record information about register eliminations.
297 Each array entry describes one possible way of eliminating a register
298 in favor of another. If there is more than one way of eliminating a
299 particular register, the most preferred should be specified first. */
300
301 static struct elim_table
302 {
303 int from; /* Register number to be eliminated. */
304 int to; /* Register number used as replacement. */
305 int initial_offset; /* Initial difference between values. */
306 int can_eliminate; /* Non-zero if this elimination can be done. */
307 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
308 insns made by reload. */
309 int offset; /* Current offset between the two regs. */
310 int max_offset; /* Maximum offset between the two regs. */
311 int previous_offset; /* Offset at end of previous insn. */
312 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
313 rtx from_rtx; /* REG rtx for the register to be eliminated.
314 We cannot simply compare the number since
315 we might then spuriously replace a hard
316 register corresponding to a pseudo
317 assigned to the reg to be eliminated. */
318 rtx to_rtx; /* REG rtx for the replacement. */
319 } reg_eliminate[] =
320
321 /* If a set of eliminable registers was specified, define the table from it.
322 Otherwise, default to the normal case of the frame pointer being
323 replaced by the stack pointer. */
324
325 #ifdef ELIMINABLE_REGS
326 ELIMINABLE_REGS;
327 #else
328 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
329 #endif
330
331 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
332
333 /* Record the number of pending eliminations that have an offset not equal
334 to their initial offset. If non-zero, we use a new copy of each
335 replacement result in any insns encountered. */
336 static int num_not_at_initial_offset;
337
338 /* Count the number of registers that we may be able to eliminate. */
339 static int num_eliminable;
340
341 /* For each label, we record the offset of each elimination. If we reach
342 a label by more than one path and an offset differs, we cannot do the
343 elimination. This information is indexed by the number of the label.
344 The first table is an array of flags that records whether we have yet
345 encountered a label and the second table is an array of arrays, one
346 entry in the latter array for each elimination. */
347
348 static char *offsets_known_at;
349 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
350
351 /* Number of labels in the current function. */
352
353 static int num_labels;
354
355 struct hard_reg_n_uses { int regno; int uses; };
356 \f
357 static int possible_group_p PROTO((int, int *));
358 static void count_possible_groups PROTO((int *, enum machine_mode *,
359 int *, int));
360 static int modes_equiv_for_class_p PROTO((enum machine_mode,
361 enum machine_mode,
362 enum reg_class));
363 static void spill_failure PROTO((rtx));
364 static int new_spill_reg PROTO((int, int, int *, int *, int,
365 FILE *));
366 static void delete_dead_insn PROTO((rtx));
367 static void alter_reg PROTO((int, int));
368 static void mark_scratch_live PROTO((rtx));
369 static void set_label_offsets PROTO((rtx, rtx, int));
370 static int eliminate_regs_in_insn PROTO((rtx, int));
371 static void mark_not_eliminable PROTO((rtx, rtx));
372 static int spill_hard_reg PROTO((int, int, FILE *, int));
373 static void scan_paradoxical_subregs PROTO((rtx));
374 static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
375 static void order_regs_for_reload PROTO((int));
376 static int compare_spill_regs PROTO((const GENERIC_PTR, const GENERIC_PTR));
377 static void reload_as_needed PROTO((rtx, int));
378 static void forget_old_reloads_1 PROTO((rtx, rtx));
379 static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
380 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
381 enum machine_mode));
382 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
383 enum machine_mode));
384 static int reload_reg_free_p PROTO((int, int, enum reload_type));
385 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
386 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
387 static int reloads_conflict PROTO((int, int));
388 static int allocate_reload_reg PROTO((int, rtx, int, int));
389 static void choose_reload_regs PROTO((rtx, rtx));
390 static void merge_assigned_reloads PROTO((rtx));
391 static void emit_reload_insns PROTO((rtx));
392 static void delete_output_reload PROTO((rtx, int, rtx));
393 static void inc_for_reload PROTO((rtx, rtx, int));
394 static int constraint_accepts_reg_p PROTO((char *, rtx));
395 static int count_occurrences PROTO((rtx, rtx));
396 static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
397 static int reload_cse_mem_conflict_p PROTO((rtx, rtx));
398 static void reload_cse_invalidate_mem PROTO((rtx));
399 static void reload_cse_invalidate_rtx PROTO((rtx, rtx));
400 static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
401 static int reload_cse_noop_set_p PROTO((rtx, rtx));
402 static int reload_cse_simplify_set PROTO((rtx, rtx));
403 static int reload_cse_simplify_operands PROTO((rtx));
404 static void reload_cse_check_clobber PROTO((rtx, rtx));
405 static void reload_cse_record_set PROTO((rtx, rtx));
406 static void reload_cse_delete_death_notes PROTO((rtx));
407 static void reload_cse_no_longer_dead PROTO((int, enum machine_mode));
408 \f
409 /* Initialize the reload pass once per compilation. */
410
411 void
412 init_reload ()
413 {
414 register int i;
415
416 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
417 Set spill_indirect_levels to the number of levels such addressing is
418 permitted, zero if it is not permitted at all. */
419
420 register rtx tem
421 = gen_rtx (MEM, Pmode,
422 gen_rtx (PLUS, Pmode,
423 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
424 GEN_INT (4)));
425 spill_indirect_levels = 0;
426
427 while (memory_address_p (QImode, tem))
428 {
429 spill_indirect_levels++;
430 tem = gen_rtx (MEM, Pmode, tem);
431 }
432
433 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
434
435 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
436 indirect_symref_ok = memory_address_p (QImode, tem);
437
438 /* See if reg+reg is a valid (and offsettable) address. */
439
440 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
441 {
442 tem = gen_rtx (PLUS, Pmode,
443 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
444 gen_rtx (REG, Pmode, i));
445 /* This way, we make sure that reg+reg is an offsettable address. */
446 tem = plus_constant (tem, 4);
447
448 if (memory_address_p (QImode, tem))
449 {
450 double_reg_address_ok = 1;
451 break;
452 }
453 }
454
455 /* Initialize obstack for our rtl allocation. */
456 gcc_obstack_init (&reload_obstack);
457 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
458
459 /* Decide which register class should be used when reloading
460 addresses. If we are using SMALL_REGISTER_CLASSES, and any
461 parameters are passed in registers, then we do not want to use
462 those registers when reloading an address. Otherwise, if a
463 function argument needs a reload, we may wind up clobbering
464 another argument to the function which was already computed. If
465 we find a subset class which simply avoids those registers, we
466 use it instead. ??? It would be better to only use the
467 restricted class when we actually are loading function arguments,
468 but that is hard to determine. */
469 reload_address_base_reg_class = BASE_REG_CLASS;
470 reload_address_index_reg_class = INDEX_REG_CLASS;
471 if (SMALL_REGISTER_CLASSES)
472 {
473 int regno;
474 HARD_REG_SET base, index;
475 enum reg_class *p;
476
477 COPY_HARD_REG_SET (base, reg_class_contents[BASE_REG_CLASS]);
478 COPY_HARD_REG_SET (index, reg_class_contents[INDEX_REG_CLASS]);
479 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
480 {
481 if (FUNCTION_ARG_REGNO_P (regno))
482 {
483 CLEAR_HARD_REG_BIT (base, regno);
484 CLEAR_HARD_REG_BIT (index, regno);
485 }
486 }
487
488 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[BASE_REG_CLASS],
489 baseok);
490 for (p = reg_class_subclasses[BASE_REG_CLASS];
491 *p != LIM_REG_CLASSES;
492 p++)
493 {
494 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[*p], usebase);
495 continue;
496 usebase:
497 reload_address_base_reg_class = *p;
498 break;
499 }
500 baseok:;
501
502 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[INDEX_REG_CLASS],
503 indexok);
504 for (p = reg_class_subclasses[INDEX_REG_CLASS];
505 *p != LIM_REG_CLASSES;
506 p++)
507 {
508 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[*p], useindex);
509 continue;
510 useindex:
511 reload_address_index_reg_class = *p;
512 break;
513 }
514 indexok:;
515 }
516 }
517
518 /* Main entry point for the reload pass.
519
520 FIRST is the first insn of the function being compiled.
521
522 GLOBAL nonzero means we were called from global_alloc
523 and should attempt to reallocate any pseudoregs that we
524 displace from hard regs we will use for reloads.
525 If GLOBAL is zero, we do not have enough information to do that,
526 so any pseudo reg that is spilled must go to the stack.
527
528 DUMPFILE is the global-reg debugging dump file stream, or 0.
529 If it is nonzero, messages are written to it to describe
530 which registers are seized as reload regs, which pseudo regs
531 are spilled from them, and where the pseudo regs are reallocated to.
532
533 Return value is nonzero if reload failed
534 and we must not do any more for this function. */
535
536 int
537 reload (first, global, dumpfile)
538 rtx first;
539 int global;
540 FILE *dumpfile;
541 {
542 register int class;
543 register int i, j, k;
544 register rtx insn;
545 register struct elim_table *ep;
546
547 /* The two pointers used to track the true location of the memory used
548 for label offsets. */
549 char *real_known_ptr = NULL_PTR;
550 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
551
552 int something_changed;
553 int something_needs_reloads;
554 int something_needs_elimination;
555 int new_basic_block_needs;
556 enum reg_class caller_save_spill_class = NO_REGS;
557 int caller_save_group_size = 1;
558
559 /* Nonzero means we couldn't get enough spill regs. */
560 int failure = 0;
561
562 /* The basic block number currently being processed for INSN. */
563 int this_block;
564
565 /* Make sure even insns with volatile mem refs are recognizable. */
566 init_recog ();
567
568 /* Enable find_equiv_reg to distinguish insns made by reload. */
569 reload_first_uid = get_max_uid ();
570
571 for (i = 0; i < N_REG_CLASSES; i++)
572 basic_block_needs[i] = 0;
573
574 #ifdef SECONDARY_MEMORY_NEEDED
575 /* Initialize the secondary memory table. */
576 clear_secondary_mem ();
577 #endif
578
579 /* Remember which hard regs appear explicitly
580 before we merge into `regs_ever_live' the ones in which
581 pseudo regs have been allocated. */
582 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
583
584 /* We don't have a stack slot for any spill reg yet. */
585 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
586 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
587
588 /* Initialize the save area information for caller-save, in case some
589 are needed. */
590 init_save_areas ();
591
592 /* Compute which hard registers are now in use
593 as homes for pseudo registers.
594 This is done here rather than (eg) in global_alloc
595 because this point is reached even if not optimizing. */
596 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
597 mark_home_live (i);
598
599 /* A function that receives a nonlocal goto must save all call-saved
600 registers. */
601 if (current_function_has_nonlocal_label)
602 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
603 {
604 if (! call_used_regs[i] && ! fixed_regs[i])
605 regs_ever_live[i] = 1;
606 }
607
608 for (i = 0; i < scratch_list_length; i++)
609 if (scratch_list[i])
610 mark_scratch_live (scratch_list[i]);
611
612 /* Make sure that the last insn in the chain
613 is not something that needs reloading. */
614 emit_note (NULL_PTR, NOTE_INSN_DELETED);
615
616 /* Find all the pseudo registers that didn't get hard regs
617 but do have known equivalent constants or memory slots.
618 These include parameters (known equivalent to parameter slots)
619 and cse'd or loop-moved constant memory addresses.
620
621 Record constant equivalents in reg_equiv_constant
622 so they will be substituted by find_reloads.
623 Record memory equivalents in reg_mem_equiv so they can
624 be substituted eventually by altering the REG-rtx's. */
625
626 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
627 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
628 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
629 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
630 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
631 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
632 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
633 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
634 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
635 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
636 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
637 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
638 cannot_omit_stores = (char *) alloca (max_regno);
639 bzero (cannot_omit_stores, max_regno);
640
641 if (SMALL_REGISTER_CLASSES)
642 CLEAR_HARD_REG_SET (forbidden_regs);
643
644 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
645 Also find all paradoxical subregs and find largest such for each pseudo.
646 On machines with small register classes, record hard registers that
647 are used for user variables. These can never be used for spills.
648 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
649 caller-saved registers must be marked live. */
650
651 for (insn = first; insn; insn = NEXT_INSN (insn))
652 {
653 rtx set = single_set (insn);
654
655 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
656 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
657 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
658 if (! call_used_regs[i])
659 regs_ever_live[i] = 1;
660
661 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
662 {
663 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
664 if (note
665 #ifdef LEGITIMATE_PIC_OPERAND_P
666 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
667 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
668 #endif
669 )
670 {
671 rtx x = XEXP (note, 0);
672 i = REGNO (SET_DEST (set));
673 if (i > LAST_VIRTUAL_REGISTER)
674 {
675 if (GET_CODE (x) == MEM)
676 reg_equiv_memory_loc[i] = x;
677 else if (CONSTANT_P (x))
678 {
679 if (LEGITIMATE_CONSTANT_P (x))
680 reg_equiv_constant[i] = x;
681 else
682 reg_equiv_memory_loc[i]
683 = force_const_mem (GET_MODE (SET_DEST (set)), x);
684 }
685 else
686 continue;
687
688 /* If this register is being made equivalent to a MEM
689 and the MEM is not SET_SRC, the equivalencing insn
690 is one with the MEM as a SET_DEST and it occurs later.
691 So don't mark this insn now. */
692 if (GET_CODE (x) != MEM
693 || rtx_equal_p (SET_SRC (set), x))
694 reg_equiv_init[i] = insn;
695 }
696 }
697 }
698
699 /* If this insn is setting a MEM from a register equivalent to it,
700 this is the equivalencing insn. */
701 else if (set && GET_CODE (SET_DEST (set)) == MEM
702 && GET_CODE (SET_SRC (set)) == REG
703 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
704 && rtx_equal_p (SET_DEST (set),
705 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
706 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
707
708 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
709 scan_paradoxical_subregs (PATTERN (insn));
710 }
711
712 /* Does this function require a frame pointer? */
713
714 frame_pointer_needed = (! flag_omit_frame_pointer
715 #ifdef EXIT_IGNORE_STACK
716 /* ?? If EXIT_IGNORE_STACK is set, we will not save
717 and restore sp for alloca. So we can't eliminate
718 the frame pointer in that case. At some point,
719 we should improve this by emitting the
720 sp-adjusting insns for this case. */
721 || (current_function_calls_alloca
722 && EXIT_IGNORE_STACK)
723 #endif
724 || FRAME_POINTER_REQUIRED);
725
726 num_eliminable = 0;
727
728 /* Initialize the table of registers to eliminate. The way we do this
729 depends on how the eliminable registers were defined. */
730 #ifdef ELIMINABLE_REGS
731 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
732 {
733 ep->can_eliminate = ep->can_eliminate_previous
734 = (CAN_ELIMINATE (ep->from, ep->to)
735 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
736 }
737 #else
738 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
739 = ! frame_pointer_needed;
740 #endif
741
742 /* Count the number of eliminable registers and build the FROM and TO
743 REG rtx's. Note that code in gen_rtx will cause, e.g.,
744 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
745 We depend on this. */
746 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
747 {
748 num_eliminable += ep->can_eliminate;
749 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
750 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
751 }
752
753 num_labels = max_label_num () - get_first_label_num ();
754
755 /* Allocate the tables used to store offset information at labels. */
756 /* We used to use alloca here, but the size of what it would try to
757 allocate would occasionally cause it to exceed the stack limit and
758 cause a core dump. */
759 real_known_ptr = xmalloc (num_labels);
760 real_at_ptr
761 = (int (*)[NUM_ELIMINABLE_REGS])
762 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
763
764 offsets_known_at = real_known_ptr - get_first_label_num ();
765 offsets_at
766 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
767
768 /* Alter each pseudo-reg rtx to contain its hard reg number.
769 Assign stack slots to the pseudos that lack hard regs or equivalents.
770 Do not touch virtual registers. */
771
772 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
773 alter_reg (i, -1);
774
775 /* If we have some registers we think can be eliminated, scan all insns to
776 see if there is an insn that sets one of these registers to something
777 other than itself plus a constant. If so, the register cannot be
778 eliminated. Doing this scan here eliminates an extra pass through the
779 main reload loop in the most common case where register elimination
780 cannot be done. */
781 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
782 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
783 || GET_CODE (insn) == CALL_INSN)
784 note_stores (PATTERN (insn), mark_not_eliminable);
785
786 #ifndef REGISTER_CONSTRAINTS
787 /* If all the pseudo regs have hard regs,
788 except for those that are never referenced,
789 we know that no reloads are needed. */
790 /* But that is not true if there are register constraints, since
791 in that case some pseudos might be in the wrong kind of hard reg. */
792
793 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
794 if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0)
795 break;
796
797 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
798 {
799 free (real_known_ptr);
800 free (real_at_ptr);
801 return;
802 }
803 #endif
804
805 /* Compute the order of preference for hard registers to spill.
806 Store them by decreasing preference in potential_reload_regs. */
807
808 order_regs_for_reload (global);
809
810 /* So far, no hard regs have been spilled. */
811 n_spills = 0;
812 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
813 spill_reg_order[i] = -1;
814
815 /* Initialize to -1, which means take the first spill register. */
816 last_spill_reg = -1;
817
818 /* On most machines, we can't use any register explicitly used in the
819 rtl as a spill register. But on some, we have to. Those will have
820 taken care to keep the life of hard regs as short as possible. */
821
822 if (! SMALL_REGISTER_CLASSES)
823 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
824
825 /* Spill any hard regs that we know we can't eliminate. */
826 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
827 if (! ep->can_eliminate)
828 spill_hard_reg (ep->from, global, dumpfile, 1);
829
830 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
831 if (frame_pointer_needed)
832 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
833 #endif
834
835 if (global)
836 for (i = 0; i < N_REG_CLASSES; i++)
837 {
838 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
839 bzero (basic_block_needs[i], n_basic_blocks);
840 }
841
842 /* From now on, we need to emit any moves without making new pseudos. */
843 reload_in_progress = 1;
844
845 /* This loop scans the entire function each go-round
846 and repeats until one repetition spills no additional hard regs. */
847
848 /* This flag is set when a pseudo reg is spilled,
849 to require another pass. Note that getting an additional reload
850 reg does not necessarily imply any pseudo reg was spilled;
851 sometimes we find a reload reg that no pseudo reg was allocated in. */
852 something_changed = 1;
853 /* This flag is set if there are any insns that require reloading. */
854 something_needs_reloads = 0;
855 /* This flag is set if there are any insns that require register
856 eliminations. */
857 something_needs_elimination = 0;
858 while (something_changed)
859 {
860 rtx after_call = 0;
861
862 /* For each class, number of reload regs needed in that class.
863 This is the maximum over all insns of the needs in that class
864 of the individual insn. */
865 int max_needs[N_REG_CLASSES];
866 /* For each class, size of group of consecutive regs
867 that is needed for the reloads of this class. */
868 int group_size[N_REG_CLASSES];
869 /* For each class, max number of consecutive groups needed.
870 (Each group contains group_size[CLASS] consecutive registers.) */
871 int max_groups[N_REG_CLASSES];
872 /* For each class, max number needed of regs that don't belong
873 to any of the groups. */
874 int max_nongroups[N_REG_CLASSES];
875 /* For each class, the machine mode which requires consecutive
876 groups of regs of that class.
877 If two different modes ever require groups of one class,
878 they must be the same size and equally restrictive for that class,
879 otherwise we can't handle the complexity. */
880 enum machine_mode group_mode[N_REG_CLASSES];
881 /* Record the insn where each maximum need is first found. */
882 rtx max_needs_insn[N_REG_CLASSES];
883 rtx max_groups_insn[N_REG_CLASSES];
884 rtx max_nongroups_insn[N_REG_CLASSES];
885 rtx x;
886 HOST_WIDE_INT starting_frame_size;
887 int previous_frame_pointer_needed = frame_pointer_needed;
888 static char *reg_class_names[] = REG_CLASS_NAMES;
889
890 something_changed = 0;
891 bzero ((char *) max_needs, sizeof max_needs);
892 bzero ((char *) max_groups, sizeof max_groups);
893 bzero ((char *) max_nongroups, sizeof max_nongroups);
894 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
895 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
896 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
897 bzero ((char *) group_size, sizeof group_size);
898 for (i = 0; i < N_REG_CLASSES; i++)
899 group_mode[i] = VOIDmode;
900
901 /* Keep track of which basic blocks are needing the reloads. */
902 this_block = 0;
903
904 /* Remember whether any element of basic_block_needs
905 changes from 0 to 1 in this pass. */
906 new_basic_block_needs = 0;
907
908 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
909 here because the stack size may be a part of the offset computation
910 for register elimination, and there might have been new stack slots
911 created in the last iteration of this loop. */
912 assign_stack_local (BLKmode, 0, 0);
913
914 starting_frame_size = get_frame_size ();
915
916 /* Reset all offsets on eliminable registers to their initial values. */
917 #ifdef ELIMINABLE_REGS
918 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
919 {
920 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
921 ep->previous_offset = ep->offset
922 = ep->max_offset = ep->initial_offset;
923 }
924 #else
925 #ifdef INITIAL_FRAME_POINTER_OFFSET
926 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
927 #else
928 if (!FRAME_POINTER_REQUIRED)
929 abort ();
930 reg_eliminate[0].initial_offset = 0;
931 #endif
932 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
933 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
934 #endif
935
936 num_not_at_initial_offset = 0;
937
938 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
939
940 /* Set a known offset for each forced label to be at the initial offset
941 of each elimination. We do this because we assume that all
942 computed jumps occur from a location where each elimination is
943 at its initial offset. */
944
945 for (x = forced_labels; x; x = XEXP (x, 1))
946 if (XEXP (x, 0))
947 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
948
949 /* For each pseudo register that has an equivalent location defined,
950 try to eliminate any eliminable registers (such as the frame pointer)
951 assuming initial offsets for the replacement register, which
952 is the normal case.
953
954 If the resulting location is directly addressable, substitute
955 the MEM we just got directly for the old REG.
956
957 If it is not addressable but is a constant or the sum of a hard reg
958 and constant, it is probably not addressable because the constant is
959 out of range, in that case record the address; we will generate
960 hairy code to compute the address in a register each time it is
961 needed. Similarly if it is a hard register, but one that is not
962 valid as an address register.
963
964 If the location is not addressable, but does not have one of the
965 above forms, assign a stack slot. We have to do this to avoid the
966 potential of producing lots of reloads if, e.g., a location involves
967 a pseudo that didn't get a hard register and has an equivalent memory
968 location that also involves a pseudo that didn't get a hard register.
969
970 Perhaps at some point we will improve reload_when_needed handling
971 so this problem goes away. But that's very hairy. */
972
973 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
974 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
975 {
976 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX, 0);
977
978 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
979 XEXP (x, 0)))
980 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
981 else if (CONSTANT_P (XEXP (x, 0))
982 || (GET_CODE (XEXP (x, 0)) == REG
983 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
984 || (GET_CODE (XEXP (x, 0)) == PLUS
985 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
986 && (REGNO (XEXP (XEXP (x, 0), 0))
987 < FIRST_PSEUDO_REGISTER)
988 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
989 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
990 else
991 {
992 /* Make a new stack slot. Then indicate that something
993 changed so we go back and recompute offsets for
994 eliminable registers because the allocation of memory
995 below might change some offset. reg_equiv_{mem,address}
996 will be set up for this pseudo on the next pass around
997 the loop. */
998 reg_equiv_memory_loc[i] = 0;
999 reg_equiv_init[i] = 0;
1000 alter_reg (i, -1);
1001 something_changed = 1;
1002 }
1003 }
1004
1005 /* If we allocated another pseudo to the stack, redo elimination
1006 bookkeeping. */
1007 if (something_changed)
1008 continue;
1009
1010 /* If caller-saves needs a group, initialize the group to include
1011 the size and mode required for caller-saves. */
1012
1013 if (caller_save_group_size > 1)
1014 {
1015 group_mode[(int) caller_save_spill_class] = Pmode;
1016 group_size[(int) caller_save_spill_class] = caller_save_group_size;
1017 }
1018
1019 /* Compute the most additional registers needed by any instruction.
1020 Collect information separately for each class of regs. */
1021
1022 for (insn = first; insn; insn = NEXT_INSN (insn))
1023 {
1024 if (global && this_block + 1 < n_basic_blocks
1025 && insn == basic_block_head[this_block+1])
1026 ++this_block;
1027
1028 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
1029 might include REG_LABEL), we need to see what effects this
1030 has on the known offsets at labels. */
1031
1032 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1033 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1034 && REG_NOTES (insn) != 0))
1035 set_label_offsets (insn, insn, 0);
1036
1037 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1038 {
1039 /* Nonzero means don't use a reload reg that overlaps
1040 the place where a function value can be returned. */
1041 rtx avoid_return_reg = 0;
1042
1043 rtx old_body = PATTERN (insn);
1044 int old_code = INSN_CODE (insn);
1045 rtx old_notes = REG_NOTES (insn);
1046 int did_elimination = 0;
1047
1048 /* To compute the number of reload registers of each class
1049 needed for an insn, we must simulate what choose_reload_regs
1050 can do. We do this by splitting an insn into an "input" and
1051 an "output" part. RELOAD_OTHER reloads are used in both.
1052 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
1053 which must be live over the entire input section of reloads,
1054 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1055 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
1056 inputs.
1057
1058 The registers needed for output are RELOAD_OTHER and
1059 RELOAD_FOR_OUTPUT, which are live for the entire output
1060 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
1061 reloads for each operand.
1062
1063 The total number of registers needed is the maximum of the
1064 inputs and outputs. */
1065
1066 struct needs
1067 {
1068 /* [0] is normal, [1] is nongroup. */
1069 int regs[2][N_REG_CLASSES];
1070 int groups[N_REG_CLASSES];
1071 };
1072
1073 /* Each `struct needs' corresponds to one RELOAD_... type. */
1074 struct {
1075 struct needs other;
1076 struct needs input;
1077 struct needs output;
1078 struct needs insn;
1079 struct needs other_addr;
1080 struct needs op_addr;
1081 struct needs op_addr_reload;
1082 struct needs in_addr[MAX_RECOG_OPERANDS];
1083 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1084 struct needs out_addr[MAX_RECOG_OPERANDS];
1085 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1086 } insn_needs;
1087
1088 /* If needed, eliminate any eliminable registers. */
1089 if (num_eliminable)
1090 did_elimination = eliminate_regs_in_insn (insn, 0);
1091
1092 /* Set avoid_return_reg if this is an insn
1093 that might use the value of a function call. */
1094 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
1095 {
1096 if (GET_CODE (PATTERN (insn)) == SET)
1097 after_call = SET_DEST (PATTERN (insn));
1098 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1099 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1100 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1101 else
1102 after_call = 0;
1103 }
1104 else if (SMALL_REGISTER_CLASSES && after_call != 0
1105 && !(GET_CODE (PATTERN (insn)) == SET
1106 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1107 {
1108 if (reg_referenced_p (after_call, PATTERN (insn)))
1109 avoid_return_reg = after_call;
1110 after_call = 0;
1111 }
1112
1113 /* Analyze the instruction. */
1114 find_reloads (insn, 0, spill_indirect_levels, global,
1115 spill_reg_order);
1116
1117 /* Remember for later shortcuts which insns had any reloads or
1118 register eliminations.
1119
1120 One might think that it would be worthwhile to mark insns
1121 that need register replacements but not reloads, but this is
1122 not safe because find_reloads may do some manipulation of
1123 the insn (such as swapping commutative operands), which would
1124 be lost when we restore the old pattern after register
1125 replacement. So the actions of find_reloads must be redone in
1126 subsequent passes or in reload_as_needed.
1127
1128 However, it is safe to mark insns that need reloads
1129 but not register replacement. */
1130
1131 PUT_MODE (insn, (did_elimination ? QImode
1132 : n_reloads ? HImode
1133 : GET_MODE (insn) == DImode ? DImode
1134 : VOIDmode));
1135
1136 /* Discard any register replacements done. */
1137 if (did_elimination)
1138 {
1139 obstack_free (&reload_obstack, reload_firstobj);
1140 PATTERN (insn) = old_body;
1141 INSN_CODE (insn) = old_code;
1142 REG_NOTES (insn) = old_notes;
1143 something_needs_elimination = 1;
1144 }
1145
1146 /* If this insn has no reloads, we need not do anything except
1147 in the case of a CALL_INSN when we have caller-saves and
1148 caller-save needs reloads. */
1149
1150 if (n_reloads == 0
1151 && ! (GET_CODE (insn) == CALL_INSN
1152 && caller_save_spill_class != NO_REGS))
1153 continue;
1154
1155 something_needs_reloads = 1;
1156 bzero ((char *) &insn_needs, sizeof insn_needs);
1157
1158 /* Count each reload once in every class
1159 containing the reload's own class. */
1160
1161 for (i = 0; i < n_reloads; i++)
1162 {
1163 register enum reg_class *p;
1164 enum reg_class class = reload_reg_class[i];
1165 int size;
1166 enum machine_mode mode;
1167 int nongroup_need;
1168 struct needs *this_needs;
1169
1170 /* Don't count the dummy reloads, for which one of the
1171 regs mentioned in the insn can be used for reloading.
1172 Don't count optional reloads.
1173 Don't count reloads that got combined with others. */
1174 if (reload_reg_rtx[i] != 0
1175 || reload_optional[i] != 0
1176 || (reload_out[i] == 0 && reload_in[i] == 0
1177 && ! reload_secondary_p[i]))
1178 continue;
1179
1180 /* Show that a reload register of this class is needed
1181 in this basic block. We do not use insn_needs and
1182 insn_groups because they are overly conservative for
1183 this purpose. */
1184 if (global && ! basic_block_needs[(int) class][this_block])
1185 {
1186 basic_block_needs[(int) class][this_block] = 1;
1187 new_basic_block_needs = 1;
1188 }
1189
1190 mode = reload_inmode[i];
1191 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1192 mode = reload_outmode[i];
1193 size = CLASS_MAX_NREGS (class, mode);
1194
1195 /* If this class doesn't want a group, determine if we have
1196 a nongroup need or a regular need. We have a nongroup
1197 need if this reload conflicts with a group reload whose
1198 class intersects with this reload's class. */
1199
1200 nongroup_need = 0;
1201 if (size == 1)
1202 for (j = 0; j < n_reloads; j++)
1203 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1204 (GET_MODE_SIZE (reload_outmode[j])
1205 > GET_MODE_SIZE (reload_inmode[j]))
1206 ? reload_outmode[j]
1207 : reload_inmode[j])
1208 > 1)
1209 && (!reload_optional[j])
1210 && (reload_in[j] != 0 || reload_out[j] != 0
1211 || reload_secondary_p[j])
1212 && reloads_conflict (i, j)
1213 && reg_classes_intersect_p (class,
1214 reload_reg_class[j]))
1215 {
1216 nongroup_need = 1;
1217 break;
1218 }
1219
1220 /* Decide which time-of-use to count this reload for. */
1221 switch (reload_when_needed[i])
1222 {
1223 case RELOAD_OTHER:
1224 this_needs = &insn_needs.other;
1225 break;
1226 case RELOAD_FOR_INPUT:
1227 this_needs = &insn_needs.input;
1228 break;
1229 case RELOAD_FOR_OUTPUT:
1230 this_needs = &insn_needs.output;
1231 break;
1232 case RELOAD_FOR_INSN:
1233 this_needs = &insn_needs.insn;
1234 break;
1235 case RELOAD_FOR_OTHER_ADDRESS:
1236 this_needs = &insn_needs.other_addr;
1237 break;
1238 case RELOAD_FOR_INPUT_ADDRESS:
1239 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1240 break;
1241 case RELOAD_FOR_INPADDR_ADDRESS:
1242 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1243 break;
1244 case RELOAD_FOR_OUTPUT_ADDRESS:
1245 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1246 break;
1247 case RELOAD_FOR_OUTADDR_ADDRESS:
1248 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1249 break;
1250 case RELOAD_FOR_OPERAND_ADDRESS:
1251 this_needs = &insn_needs.op_addr;
1252 break;
1253 case RELOAD_FOR_OPADDR_ADDR:
1254 this_needs = &insn_needs.op_addr_reload;
1255 break;
1256 }
1257
1258 if (size > 1)
1259 {
1260 enum machine_mode other_mode, allocate_mode;
1261
1262 /* Count number of groups needed separately from
1263 number of individual regs needed. */
1264 this_needs->groups[(int) class]++;
1265 p = reg_class_superclasses[(int) class];
1266 while (*p != LIM_REG_CLASSES)
1267 this_needs->groups[(int) *p++]++;
1268
1269 /* Record size and mode of a group of this class. */
1270 /* If more than one size group is needed,
1271 make all groups the largest needed size. */
1272 if (group_size[(int) class] < size)
1273 {
1274 other_mode = group_mode[(int) class];
1275 allocate_mode = mode;
1276
1277 group_size[(int) class] = size;
1278 group_mode[(int) class] = mode;
1279 }
1280 else
1281 {
1282 other_mode = mode;
1283 allocate_mode = group_mode[(int) class];
1284 }
1285
1286 /* Crash if two dissimilar machine modes both need
1287 groups of consecutive regs of the same class. */
1288
1289 if (other_mode != VOIDmode && other_mode != allocate_mode
1290 && ! modes_equiv_for_class_p (allocate_mode,
1291 other_mode, class))
1292 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1293 insn);
1294 }
1295 else if (size == 1)
1296 {
1297 this_needs->regs[nongroup_need][(int) class] += 1;
1298 p = reg_class_superclasses[(int) class];
1299 while (*p != LIM_REG_CLASSES)
1300 this_needs->regs[nongroup_need][(int) *p++] += 1;
1301 }
1302 else
1303 abort ();
1304 }
1305
1306 /* All reloads have been counted for this insn;
1307 now merge the various times of use.
1308 This sets insn_needs, etc., to the maximum total number
1309 of registers needed at any point in this insn. */
1310
1311 for (i = 0; i < N_REG_CLASSES; i++)
1312 {
1313 int in_max, out_max;
1314
1315 /* Compute normal and nongroup needs. */
1316 for (j = 0; j <= 1; j++)
1317 {
1318 for (in_max = 0, out_max = 0, k = 0;
1319 k < reload_n_operands; k++)
1320 {
1321 in_max
1322 = MAX (in_max,
1323 (insn_needs.in_addr[k].regs[j][i]
1324 + insn_needs.in_addr_addr[k].regs[j][i]));
1325 out_max
1326 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1327 out_max
1328 = MAX (out_max,
1329 insn_needs.out_addr_addr[k].regs[j][i]);
1330 }
1331
1332 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1333 and operand addresses but not things used to reload
1334 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1335 don't conflict with things needed to reload inputs or
1336 outputs. */
1337
1338 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1339 insn_needs.op_addr_reload.regs[j][i]),
1340 in_max);
1341
1342 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1343
1344 insn_needs.input.regs[j][i]
1345 = MAX (insn_needs.input.regs[j][i]
1346 + insn_needs.op_addr.regs[j][i]
1347 + insn_needs.insn.regs[j][i],
1348 in_max + insn_needs.input.regs[j][i]);
1349
1350 insn_needs.output.regs[j][i] += out_max;
1351 insn_needs.other.regs[j][i]
1352 += MAX (MAX (insn_needs.input.regs[j][i],
1353 insn_needs.output.regs[j][i]),
1354 insn_needs.other_addr.regs[j][i]);
1355
1356 }
1357
1358 /* Now compute group needs. */
1359 for (in_max = 0, out_max = 0, j = 0;
1360 j < reload_n_operands; j++)
1361 {
1362 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1363 in_max = MAX (in_max,
1364 insn_needs.in_addr_addr[j].groups[i]);
1365 out_max
1366 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1367 out_max
1368 = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1369 }
1370
1371 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1372 insn_needs.op_addr_reload.groups[i]),
1373 in_max);
1374 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1375
1376 insn_needs.input.groups[i]
1377 = MAX (insn_needs.input.groups[i]
1378 + insn_needs.op_addr.groups[i]
1379 + insn_needs.insn.groups[i],
1380 in_max + insn_needs.input.groups[i]);
1381
1382 insn_needs.output.groups[i] += out_max;
1383 insn_needs.other.groups[i]
1384 += MAX (MAX (insn_needs.input.groups[i],
1385 insn_needs.output.groups[i]),
1386 insn_needs.other_addr.groups[i]);
1387 }
1388
1389 /* If this is a CALL_INSN and caller-saves will need
1390 a spill register, act as if the spill register is
1391 needed for this insn. However, the spill register
1392 can be used by any reload of this insn, so we only
1393 need do something if no need for that class has
1394 been recorded.
1395
1396 The assumption that every CALL_INSN will trigger a
1397 caller-save is highly conservative, however, the number
1398 of cases where caller-saves will need a spill register but
1399 a block containing a CALL_INSN won't need a spill register
1400 of that class should be quite rare.
1401
1402 If a group is needed, the size and mode of the group will
1403 have been set up at the beginning of this loop. */
1404
1405 if (GET_CODE (insn) == CALL_INSN
1406 && caller_save_spill_class != NO_REGS)
1407 {
1408 /* See if this register would conflict with any reload
1409 that needs a group. */
1410 int nongroup_need = 0;
1411 int *caller_save_needs;
1412
1413 for (j = 0; j < n_reloads; j++)
1414 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1415 (GET_MODE_SIZE (reload_outmode[j])
1416 > GET_MODE_SIZE (reload_inmode[j]))
1417 ? reload_outmode[j]
1418 : reload_inmode[j])
1419 > 1)
1420 && reg_classes_intersect_p (caller_save_spill_class,
1421 reload_reg_class[j]))
1422 {
1423 nongroup_need = 1;
1424 break;
1425 }
1426
1427 caller_save_needs
1428 = (caller_save_group_size > 1
1429 ? insn_needs.other.groups
1430 : insn_needs.other.regs[nongroup_need]);
1431
1432 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1433 {
1434 register enum reg_class *p
1435 = reg_class_superclasses[(int) caller_save_spill_class];
1436
1437 caller_save_needs[(int) caller_save_spill_class]++;
1438
1439 while (*p != LIM_REG_CLASSES)
1440 caller_save_needs[(int) *p++] += 1;
1441 }
1442
1443 /* Show that this basic block will need a register of
1444 this class. */
1445
1446 if (global
1447 && ! (basic_block_needs[(int) caller_save_spill_class]
1448 [this_block]))
1449 {
1450 basic_block_needs[(int) caller_save_spill_class]
1451 [this_block] = 1;
1452 new_basic_block_needs = 1;
1453 }
1454 }
1455
1456 /* If this insn stores the value of a function call,
1457 and that value is in a register that has been spilled,
1458 and if the insn needs a reload in a class
1459 that might use that register as the reload register,
1460 then add add an extra need in that class.
1461 This makes sure we have a register available that does
1462 not overlap the return value. */
1463
1464 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
1465 {
1466 int regno = REGNO (avoid_return_reg);
1467 int nregs
1468 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1469 int r;
1470 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1471
1472 /* First compute the "basic needs", which counts a
1473 need only in the smallest class in which it
1474 is required. */
1475
1476 bcopy ((char *) insn_needs.other.regs[0],
1477 (char *) basic_needs, sizeof basic_needs);
1478 bcopy ((char *) insn_needs.other.groups,
1479 (char *) basic_groups, sizeof basic_groups);
1480
1481 for (i = 0; i < N_REG_CLASSES; i++)
1482 {
1483 enum reg_class *p;
1484
1485 if (basic_needs[i] >= 0)
1486 for (p = reg_class_superclasses[i];
1487 *p != LIM_REG_CLASSES; p++)
1488 basic_needs[(int) *p] -= basic_needs[i];
1489
1490 if (basic_groups[i] >= 0)
1491 for (p = reg_class_superclasses[i];
1492 *p != LIM_REG_CLASSES; p++)
1493 basic_groups[(int) *p] -= basic_groups[i];
1494 }
1495
1496 /* Now count extra regs if there might be a conflict with
1497 the return value register. */
1498
1499 for (r = regno; r < regno + nregs; r++)
1500 if (spill_reg_order[r] >= 0)
1501 for (i = 0; i < N_REG_CLASSES; i++)
1502 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1503 {
1504 if (basic_needs[i] > 0)
1505 {
1506 enum reg_class *p;
1507
1508 insn_needs.other.regs[0][i]++;
1509 p = reg_class_superclasses[i];
1510 while (*p != LIM_REG_CLASSES)
1511 insn_needs.other.regs[0][(int) *p++]++;
1512 }
1513 if (basic_groups[i] > 0)
1514 {
1515 enum reg_class *p;
1516
1517 insn_needs.other.groups[i]++;
1518 p = reg_class_superclasses[i];
1519 while (*p != LIM_REG_CLASSES)
1520 insn_needs.other.groups[(int) *p++]++;
1521 }
1522 }
1523 }
1524
1525 /* For each class, collect maximum need of any insn. */
1526
1527 for (i = 0; i < N_REG_CLASSES; i++)
1528 {
1529 if (max_needs[i] < insn_needs.other.regs[0][i])
1530 {
1531 max_needs[i] = insn_needs.other.regs[0][i];
1532 max_needs_insn[i] = insn;
1533 }
1534 if (max_groups[i] < insn_needs.other.groups[i])
1535 {
1536 max_groups[i] = insn_needs.other.groups[i];
1537 max_groups_insn[i] = insn;
1538 }
1539 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1540 {
1541 max_nongroups[i] = insn_needs.other.regs[1][i];
1542 max_nongroups_insn[i] = insn;
1543 }
1544 }
1545 }
1546 /* Note that there is a continue statement above. */
1547 }
1548
1549 /* If we allocated any new memory locations, make another pass
1550 since it might have changed elimination offsets. */
1551 if (starting_frame_size != get_frame_size ())
1552 something_changed = 1;
1553
1554 if (dumpfile)
1555 for (i = 0; i < N_REG_CLASSES; i++)
1556 {
1557 if (max_needs[i] > 0)
1558 fprintf (dumpfile,
1559 ";; Need %d reg%s of class %s (for insn %d).\n",
1560 max_needs[i], max_needs[i] == 1 ? "" : "s",
1561 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1562 if (max_nongroups[i] > 0)
1563 fprintf (dumpfile,
1564 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1565 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1566 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1567 if (max_groups[i] > 0)
1568 fprintf (dumpfile,
1569 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1570 max_groups[i], max_groups[i] == 1 ? "" : "s",
1571 mode_name[(int) group_mode[i]],
1572 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1573 }
1574
1575 /* If we have caller-saves, set up the save areas and see if caller-save
1576 will need a spill register. */
1577
1578 if (caller_save_needed)
1579 {
1580 /* Set the offsets for setup_save_areas. */
1581 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
1582 ep++)
1583 ep->previous_offset = ep->max_offset;
1584
1585 if ( ! setup_save_areas (&something_changed)
1586 && caller_save_spill_class == NO_REGS)
1587 {
1588 /* The class we will need depends on whether the machine
1589 supports the sum of two registers for an address; see
1590 find_address_reloads for details. */
1591
1592 caller_save_spill_class
1593 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1594 caller_save_group_size
1595 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1596 something_changed = 1;
1597 }
1598 }
1599
1600 /* See if anything that happened changes which eliminations are valid.
1601 For example, on the Sparc, whether or not the frame pointer can
1602 be eliminated can depend on what registers have been used. We need
1603 not check some conditions again (such as flag_omit_frame_pointer)
1604 since they can't have changed. */
1605
1606 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1607 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1608 #ifdef ELIMINABLE_REGS
1609 || ! CAN_ELIMINATE (ep->from, ep->to)
1610 #endif
1611 )
1612 ep->can_eliminate = 0;
1613
1614 /* Look for the case where we have discovered that we can't replace
1615 register A with register B and that means that we will now be
1616 trying to replace register A with register C. This means we can
1617 no longer replace register C with register B and we need to disable
1618 such an elimination, if it exists. This occurs often with A == ap,
1619 B == sp, and C == fp. */
1620
1621 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1622 {
1623 struct elim_table *op;
1624 register int new_to = -1;
1625
1626 if (! ep->can_eliminate && ep->can_eliminate_previous)
1627 {
1628 /* Find the current elimination for ep->from, if there is a
1629 new one. */
1630 for (op = reg_eliminate;
1631 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1632 if (op->from == ep->from && op->can_eliminate)
1633 {
1634 new_to = op->to;
1635 break;
1636 }
1637
1638 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1639 disable it. */
1640 for (op = reg_eliminate;
1641 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1642 if (op->from == new_to && op->to == ep->to)
1643 op->can_eliminate = 0;
1644 }
1645 }
1646
1647 /* See if any registers that we thought we could eliminate the previous
1648 time are no longer eliminable. If so, something has changed and we
1649 must spill the register. Also, recompute the number of eliminable
1650 registers and see if the frame pointer is needed; it is if there is
1651 no elimination of the frame pointer that we can perform. */
1652
1653 frame_pointer_needed = 1;
1654 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1655 {
1656 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1657 && ep->to != HARD_FRAME_POINTER_REGNUM)
1658 frame_pointer_needed = 0;
1659
1660 if (! ep->can_eliminate && ep->can_eliminate_previous)
1661 {
1662 ep->can_eliminate_previous = 0;
1663 spill_hard_reg (ep->from, global, dumpfile, 1);
1664 something_changed = 1;
1665 num_eliminable--;
1666 }
1667 }
1668
1669 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1670 /* If we didn't need a frame pointer last time, but we do now, spill
1671 the hard frame pointer. */
1672 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1673 {
1674 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1675 something_changed = 1;
1676 }
1677 #endif
1678
1679 /* If all needs are met, we win. */
1680
1681 for (i = 0; i < N_REG_CLASSES; i++)
1682 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1683 break;
1684 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1685 break;
1686
1687 /* Not all needs are met; must spill some hard regs. */
1688
1689 /* Put all registers spilled so far back in potential_reload_regs, but
1690 put them at the front, since we've already spilled most of the
1691 pseudos in them (we might have left some pseudos unspilled if they
1692 were in a block that didn't need any spill registers of a conflicting
1693 class. We used to try to mark off the need for those registers,
1694 but doing so properly is very complex and reallocating them is the
1695 simpler approach. First, "pack" potential_reload_regs by pushing
1696 any nonnegative entries towards the end. That will leave room
1697 for the registers we already spilled.
1698
1699 Also, undo the marking of the spill registers from the last time
1700 around in FORBIDDEN_REGS since we will be probably be allocating
1701 them again below.
1702
1703 ??? It is theoretically possible that we might end up not using one
1704 of our previously-spilled registers in this allocation, even though
1705 they are at the head of the list. It's not clear what to do about
1706 this, but it was no better before, when we marked off the needs met
1707 by the previously-spilled registers. With the current code, globals
1708 can be allocated into these registers, but locals cannot. */
1709
1710 if (n_spills)
1711 {
1712 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1713 if (potential_reload_regs[i] != -1)
1714 potential_reload_regs[j--] = potential_reload_regs[i];
1715
1716 for (i = 0; i < n_spills; i++)
1717 {
1718 potential_reload_regs[i] = spill_regs[i];
1719 spill_reg_order[spill_regs[i]] = -1;
1720 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1721 }
1722
1723 n_spills = 0;
1724 }
1725
1726 /* Now find more reload regs to satisfy the remaining need
1727 Do it by ascending class number, since otherwise a reg
1728 might be spilled for a big class and might fail to count
1729 for a smaller class even though it belongs to that class.
1730
1731 Count spilled regs in `spills', and add entries to
1732 `spill_regs' and `spill_reg_order'.
1733
1734 ??? Note there is a problem here.
1735 When there is a need for a group in a high-numbered class,
1736 and also need for non-group regs that come from a lower class,
1737 the non-group regs are chosen first. If there aren't many regs,
1738 they might leave no room for a group.
1739
1740 This was happening on the 386. To fix it, we added the code
1741 that calls possible_group_p, so that the lower class won't
1742 break up the last possible group.
1743
1744 Really fixing the problem would require changes above
1745 in counting the regs already spilled, and in choose_reload_regs.
1746 It might be hard to avoid introducing bugs there. */
1747
1748 CLEAR_HARD_REG_SET (counted_for_groups);
1749 CLEAR_HARD_REG_SET (counted_for_nongroups);
1750
1751 for (class = 0; class < N_REG_CLASSES; class++)
1752 {
1753 /* First get the groups of registers.
1754 If we got single registers first, we might fragment
1755 possible groups. */
1756 while (max_groups[class] > 0)
1757 {
1758 /* If any single spilled regs happen to form groups,
1759 count them now. Maybe we don't really need
1760 to spill another group. */
1761 count_possible_groups (group_size, group_mode, max_groups,
1762 class);
1763
1764 if (max_groups[class] <= 0)
1765 break;
1766
1767 /* Groups of size 2 (the only groups used on most machines)
1768 are treated specially. */
1769 if (group_size[class] == 2)
1770 {
1771 /* First, look for a register that will complete a group. */
1772 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1773 {
1774 int other;
1775
1776 j = potential_reload_regs[i];
1777 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1778 &&
1779 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1780 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1781 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1782 && HARD_REGNO_MODE_OK (other, group_mode[class])
1783 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1784 other)
1785 /* We don't want one part of another group.
1786 We could get "two groups" that overlap! */
1787 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1788 ||
1789 (j < FIRST_PSEUDO_REGISTER - 1
1790 && (other = j + 1, spill_reg_order[other] >= 0)
1791 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1792 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1793 && HARD_REGNO_MODE_OK (j, group_mode[class])
1794 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1795 other)
1796 && ! TEST_HARD_REG_BIT (counted_for_groups,
1797 other))))
1798 {
1799 register enum reg_class *p;
1800
1801 /* We have found one that will complete a group,
1802 so count off one group as provided. */
1803 max_groups[class]--;
1804 p = reg_class_superclasses[class];
1805 while (*p != LIM_REG_CLASSES)
1806 {
1807 if (group_size [(int) *p] <= group_size [class])
1808 max_groups[(int) *p]--;
1809 p++;
1810 }
1811
1812 /* Indicate both these regs are part of a group. */
1813 SET_HARD_REG_BIT (counted_for_groups, j);
1814 SET_HARD_REG_BIT (counted_for_groups, other);
1815 break;
1816 }
1817 }
1818 /* We can't complete a group, so start one. */
1819 /* Look for a pair neither of which is explicitly used. */
1820 if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER)
1821 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1822 {
1823 int k;
1824 j = potential_reload_regs[i];
1825 /* Verify that J+1 is a potential reload reg. */
1826 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1827 if (potential_reload_regs[k] == j + 1)
1828 break;
1829 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1830 && k < FIRST_PSEUDO_REGISTER
1831 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1832 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1833 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1834 && HARD_REGNO_MODE_OK (j, group_mode[class])
1835 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1836 j + 1)
1837 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1838 /* Reject J at this stage
1839 if J+1 was explicitly used. */
1840 && ! regs_explicitly_used[j + 1])
1841 break;
1842 }
1843 /* Now try any group at all
1844 whose registers are not in bad_spill_regs. */
1845 if (i == FIRST_PSEUDO_REGISTER)
1846 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1847 {
1848 int k;
1849 j = potential_reload_regs[i];
1850 /* Verify that J+1 is a potential reload reg. */
1851 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1852 if (potential_reload_regs[k] == j + 1)
1853 break;
1854 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1855 && k < FIRST_PSEUDO_REGISTER
1856 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1857 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1858 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1859 && HARD_REGNO_MODE_OK (j, group_mode[class])
1860 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1861 j + 1)
1862 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1863 break;
1864 }
1865
1866 /* I should be the index in potential_reload_regs
1867 of the new reload reg we have found. */
1868
1869 if (i >= FIRST_PSEUDO_REGISTER)
1870 {
1871 /* There are no groups left to spill. */
1872 spill_failure (max_groups_insn[class]);
1873 failure = 1;
1874 goto failed;
1875 }
1876 else
1877 something_changed
1878 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1879 global, dumpfile);
1880 }
1881 else
1882 {
1883 /* For groups of more than 2 registers,
1884 look for a sufficient sequence of unspilled registers,
1885 and spill them all at once. */
1886 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1887 {
1888 int k;
1889
1890 j = potential_reload_regs[i];
1891 if (j >= 0
1892 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1893 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1894 {
1895 /* Check each reg in the sequence. */
1896 for (k = 0; k < group_size[class]; k++)
1897 if (! (spill_reg_order[j + k] < 0
1898 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1899 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1900 break;
1901 /* We got a full sequence, so spill them all. */
1902 if (k == group_size[class])
1903 {
1904 register enum reg_class *p;
1905 for (k = 0; k < group_size[class]; k++)
1906 {
1907 int idx;
1908 SET_HARD_REG_BIT (counted_for_groups, j + k);
1909 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1910 if (potential_reload_regs[idx] == j + k)
1911 break;
1912 something_changed
1913 |= new_spill_reg (idx, class,
1914 max_needs, NULL_PTR,
1915 global, dumpfile);
1916 }
1917
1918 /* We have found one that will complete a group,
1919 so count off one group as provided. */
1920 max_groups[class]--;
1921 p = reg_class_superclasses[class];
1922 while (*p != LIM_REG_CLASSES)
1923 {
1924 if (group_size [(int) *p]
1925 <= group_size [class])
1926 max_groups[(int) *p]--;
1927 p++;
1928 }
1929 break;
1930 }
1931 }
1932 }
1933 /* We couldn't find any registers for this reload.
1934 Avoid going into an infinite loop. */
1935 if (i >= FIRST_PSEUDO_REGISTER)
1936 {
1937 /* There are no groups left. */
1938 spill_failure (max_groups_insn[class]);
1939 failure = 1;
1940 goto failed;
1941 }
1942 }
1943 }
1944
1945 /* Now similarly satisfy all need for single registers. */
1946
1947 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1948 {
1949 /* If we spilled enough regs, but they weren't counted
1950 against the non-group need, see if we can count them now.
1951 If so, we can avoid some actual spilling. */
1952 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1953 for (i = 0; i < n_spills; i++)
1954 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1955 spill_regs[i])
1956 && !TEST_HARD_REG_BIT (counted_for_groups,
1957 spill_regs[i])
1958 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1959 spill_regs[i])
1960 && max_nongroups[class] > 0)
1961 {
1962 register enum reg_class *p;
1963
1964 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1965 max_nongroups[class]--;
1966 p = reg_class_superclasses[class];
1967 while (*p != LIM_REG_CLASSES)
1968 max_nongroups[(int) *p++]--;
1969 }
1970 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1971 break;
1972
1973 /* Consider the potential reload regs that aren't
1974 yet in use as reload regs, in order of preference.
1975 Find the most preferred one that's in this class. */
1976
1977 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1978 if (potential_reload_regs[i] >= 0
1979 && TEST_HARD_REG_BIT (reg_class_contents[class],
1980 potential_reload_regs[i])
1981 /* If this reg will not be available for groups,
1982 pick one that does not foreclose possible groups.
1983 This is a kludge, and not very general,
1984 but it should be sufficient to make the 386 work,
1985 and the problem should not occur on machines with
1986 more registers. */
1987 && (max_nongroups[class] == 0
1988 || possible_group_p (potential_reload_regs[i], max_groups)))
1989 break;
1990
1991 /* If we couldn't get a register, try to get one even if we
1992 might foreclose possible groups. This may cause problems
1993 later, but that's better than aborting now, since it is
1994 possible that we will, in fact, be able to form the needed
1995 group even with this allocation. */
1996
1997 if (i >= FIRST_PSEUDO_REGISTER
1998 && (asm_noperands (max_needs[class] > 0
1999 ? max_needs_insn[class]
2000 : max_nongroups_insn[class])
2001 < 0))
2002 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2003 if (potential_reload_regs[i] >= 0
2004 && TEST_HARD_REG_BIT (reg_class_contents[class],
2005 potential_reload_regs[i]))
2006 break;
2007
2008 /* I should be the index in potential_reload_regs
2009 of the new reload reg we have found. */
2010
2011 if (i >= FIRST_PSEUDO_REGISTER)
2012 {
2013 /* There are no possible registers left to spill. */
2014 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
2015 : max_nongroups_insn[class]);
2016 failure = 1;
2017 goto failed;
2018 }
2019 else
2020 something_changed
2021 |= new_spill_reg (i, class, max_needs, max_nongroups,
2022 global, dumpfile);
2023 }
2024 }
2025 }
2026
2027 /* If global-alloc was run, notify it of any register eliminations we have
2028 done. */
2029 if (global)
2030 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2031 if (ep->can_eliminate)
2032 mark_elimination (ep->from, ep->to);
2033
2034 /* Insert code to save and restore call-clobbered hard regs
2035 around calls. Tell if what mode to use so that we will process
2036 those insns in reload_as_needed if we have to. */
2037
2038 if (caller_save_needed)
2039 save_call_clobbered_regs (num_eliminable ? QImode
2040 : caller_save_spill_class != NO_REGS ? HImode
2041 : VOIDmode);
2042
2043 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
2044 If that insn didn't set the register (i.e., it copied the register to
2045 memory), just delete that insn instead of the equivalencing insn plus
2046 anything now dead. If we call delete_dead_insn on that insn, we may
2047 delete the insn that actually sets the register if the register die
2048 there and that is incorrect. */
2049
2050 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2051 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
2052 && GET_CODE (reg_equiv_init[i]) != NOTE)
2053 {
2054 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
2055 delete_dead_insn (reg_equiv_init[i]);
2056 else
2057 {
2058 PUT_CODE (reg_equiv_init[i], NOTE);
2059 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
2060 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
2061 }
2062 }
2063
2064 /* Use the reload registers where necessary
2065 by generating move instructions to move the must-be-register
2066 values into or out of the reload registers. */
2067
2068 if (something_needs_reloads || something_needs_elimination
2069 || (caller_save_needed && num_eliminable)
2070 || caller_save_spill_class != NO_REGS)
2071 reload_as_needed (first, global);
2072
2073 /* If we were able to eliminate the frame pointer, show that it is no
2074 longer live at the start of any basic block. If it ls live by
2075 virtue of being in a pseudo, that pseudo will be marked live
2076 and hence the frame pointer will be known to be live via that
2077 pseudo. */
2078
2079 if (! frame_pointer_needed)
2080 for (i = 0; i < n_basic_blocks; i++)
2081 CLEAR_REGNO_REG_SET (basic_block_live_at_start[i],
2082 HARD_FRAME_POINTER_REGNUM);
2083
2084 /* Come here (with failure set nonzero) if we can't get enough spill regs
2085 and we decide not to abort about it. */
2086 failed:
2087
2088 reload_in_progress = 0;
2089
2090 /* Now eliminate all pseudo regs by modifying them into
2091 their equivalent memory references.
2092 The REG-rtx's for the pseudos are modified in place,
2093 so all insns that used to refer to them now refer to memory.
2094
2095 For a reg that has a reg_equiv_address, all those insns
2096 were changed by reloading so that no insns refer to it any longer;
2097 but the DECL_RTL of a variable decl may refer to it,
2098 and if so this causes the debugging info to mention the variable. */
2099
2100 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2101 {
2102 rtx addr = 0;
2103 int in_struct = 0;
2104 if (reg_equiv_mem[i])
2105 {
2106 addr = XEXP (reg_equiv_mem[i], 0);
2107 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
2108 }
2109 if (reg_equiv_address[i])
2110 addr = reg_equiv_address[i];
2111 if (addr)
2112 {
2113 if (reg_renumber[i] < 0)
2114 {
2115 rtx reg = regno_reg_rtx[i];
2116 XEXP (reg, 0) = addr;
2117 REG_USERVAR_P (reg) = 0;
2118 MEM_IN_STRUCT_P (reg) = in_struct;
2119 PUT_CODE (reg, MEM);
2120 }
2121 else if (reg_equiv_mem[i])
2122 XEXP (reg_equiv_mem[i], 0) = addr;
2123 }
2124 }
2125
2126 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2127 /* Make a pass over all the insns and remove death notes for things that
2128 are no longer registers or no longer die in the insn (e.g., an input
2129 and output pseudo being tied). */
2130
2131 for (insn = first; insn; insn = NEXT_INSN (insn))
2132 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2133 {
2134 rtx note, next;
2135
2136 for (note = REG_NOTES (insn); note; note = next)
2137 {
2138 next = XEXP (note, 1);
2139 if (REG_NOTE_KIND (note) == REG_DEAD
2140 && (GET_CODE (XEXP (note, 0)) != REG
2141 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2142 remove_note (insn, note);
2143 }
2144 }
2145 #endif
2146
2147 /* If we are doing stack checking, give a warning if this function's
2148 frame size is larger than we expect. */
2149 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
2150 {
2151 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
2152
2153 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2154 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
2155 size += UNITS_PER_WORD;
2156
2157 if (size > STACK_CHECK_MAX_FRAME_SIZE)
2158 warning ("frame size too large for reliable stack checking");
2159 }
2160
2161 /* Indicate that we no longer have known memory locations or constants. */
2162 reg_equiv_constant = 0;
2163 reg_equiv_memory_loc = 0;
2164
2165 if (real_known_ptr)
2166 free (real_known_ptr);
2167 if (real_at_ptr)
2168 free (real_at_ptr);
2169
2170 if (scratch_list)
2171 free (scratch_list);
2172 scratch_list = 0;
2173 if (scratch_block)
2174 free (scratch_block);
2175 scratch_block = 0;
2176
2177 CLEAR_HARD_REG_SET (used_spill_regs);
2178 for (i = 0; i < n_spills; i++)
2179 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2180
2181 return failure;
2182 }
2183 \f
2184 /* Nonzero if, after spilling reg REGNO for non-groups,
2185 it will still be possible to find a group if we still need one. */
2186
2187 static int
2188 possible_group_p (regno, max_groups)
2189 int regno;
2190 int *max_groups;
2191 {
2192 int i;
2193 int class = (int) NO_REGS;
2194
2195 for (i = 0; i < (int) N_REG_CLASSES; i++)
2196 if (max_groups[i] > 0)
2197 {
2198 class = i;
2199 break;
2200 }
2201
2202 if (class == (int) NO_REGS)
2203 return 1;
2204
2205 /* Consider each pair of consecutive registers. */
2206 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2207 {
2208 /* Ignore pairs that include reg REGNO. */
2209 if (i == regno || i + 1 == regno)
2210 continue;
2211
2212 /* Ignore pairs that are outside the class that needs the group.
2213 ??? Here we fail to handle the case where two different classes
2214 independently need groups. But this never happens with our
2215 current machine descriptions. */
2216 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2217 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2218 continue;
2219
2220 /* A pair of consecutive regs we can still spill does the trick. */
2221 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2222 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2223 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2224 return 1;
2225
2226 /* A pair of one already spilled and one we can spill does it
2227 provided the one already spilled is not otherwise reserved. */
2228 if (spill_reg_order[i] < 0
2229 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2230 && spill_reg_order[i + 1] >= 0
2231 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2232 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2233 return 1;
2234 if (spill_reg_order[i + 1] < 0
2235 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2236 && spill_reg_order[i] >= 0
2237 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2238 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2239 return 1;
2240 }
2241
2242 return 0;
2243 }
2244 \f
2245 /* Count any groups of CLASS that can be formed from the registers recently
2246 spilled. */
2247
2248 static void
2249 count_possible_groups (group_size, group_mode, max_groups, class)
2250 int *group_size;
2251 enum machine_mode *group_mode;
2252 int *max_groups;
2253 int class;
2254 {
2255 HARD_REG_SET new;
2256 int i, j;
2257
2258 /* Now find all consecutive groups of spilled registers
2259 and mark each group off against the need for such groups.
2260 But don't count them against ordinary need, yet. */
2261
2262 if (group_size[class] == 0)
2263 return;
2264
2265 CLEAR_HARD_REG_SET (new);
2266
2267 /* Make a mask of all the regs that are spill regs in class I. */
2268 for (i = 0; i < n_spills; i++)
2269 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2270 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2271 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2272 SET_HARD_REG_BIT (new, spill_regs[i]);
2273
2274 /* Find each consecutive group of them. */
2275 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2276 if (TEST_HARD_REG_BIT (new, i)
2277 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2278 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2279 {
2280 for (j = 1; j < group_size[class]; j++)
2281 if (! TEST_HARD_REG_BIT (new, i + j))
2282 break;
2283
2284 if (j == group_size[class])
2285 {
2286 /* We found a group. Mark it off against this class's need for
2287 groups, and against each superclass too. */
2288 register enum reg_class *p;
2289
2290 max_groups[class]--;
2291 p = reg_class_superclasses[class];
2292 while (*p != LIM_REG_CLASSES)
2293 {
2294 if (group_size [(int) *p] <= group_size [class])
2295 max_groups[(int) *p]--;
2296 p++;
2297 }
2298
2299 /* Don't count these registers again. */
2300 for (j = 0; j < group_size[class]; j++)
2301 SET_HARD_REG_BIT (counted_for_groups, i + j);
2302 }
2303
2304 /* Skip to the last reg in this group. When i is incremented above,
2305 it will then point to the first reg of the next possible group. */
2306 i += j - 1;
2307 }
2308 }
2309 \f
2310 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2311 another mode that needs to be reloaded for the same register class CLASS.
2312 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2313 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2314
2315 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2316 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2317 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2318 causes unnecessary failures on machines requiring alignment of register
2319 groups when the two modes are different sizes, because the larger mode has
2320 more strict alignment rules than the smaller mode. */
2321
2322 static int
2323 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2324 enum machine_mode allocate_mode, other_mode;
2325 enum reg_class class;
2326 {
2327 register int regno;
2328 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2329 {
2330 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2331 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2332 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2333 return 0;
2334 }
2335 return 1;
2336 }
2337
2338 /* Handle the failure to find a register to spill.
2339 INSN should be one of the insns which needed this particular spill reg. */
2340
2341 static void
2342 spill_failure (insn)
2343 rtx insn;
2344 {
2345 if (asm_noperands (PATTERN (insn)) >= 0)
2346 error_for_asm (insn, "`asm' needs too many reloads");
2347 else
2348 fatal_insn ("Unable to find a register to spill.", insn);
2349 }
2350
2351 /* Add a new register to the tables of available spill-registers
2352 (as well as spilling all pseudos allocated to the register).
2353 I is the index of this register in potential_reload_regs.
2354 CLASS is the regclass whose need is being satisfied.
2355 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2356 so that this register can count off against them.
2357 MAX_NONGROUPS is 0 if this register is part of a group.
2358 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2359
2360 static int
2361 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2362 int i;
2363 int class;
2364 int *max_needs;
2365 int *max_nongroups;
2366 int global;
2367 FILE *dumpfile;
2368 {
2369 register enum reg_class *p;
2370 int val;
2371 int regno = potential_reload_regs[i];
2372
2373 if (i >= FIRST_PSEUDO_REGISTER)
2374 abort (); /* Caller failed to find any register. */
2375
2376 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2377 {
2378 static char *reg_class_names[] = REG_CLASS_NAMES;
2379 fatal ("fixed or forbidden register %d (%s) was spilled for class %s.\n\
2380 This may be due to a compiler bug or to impossible asm\n\
2381 statements or clauses.", regno, reg_names[regno], reg_class_names[class]);
2382 }
2383
2384 /* Make reg REGNO an additional reload reg. */
2385
2386 potential_reload_regs[i] = -1;
2387 spill_regs[n_spills] = regno;
2388 spill_reg_order[regno] = n_spills;
2389 if (dumpfile)
2390 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2391
2392 /* Clear off the needs we just satisfied. */
2393
2394 max_needs[class]--;
2395 p = reg_class_superclasses[class];
2396 while (*p != LIM_REG_CLASSES)
2397 max_needs[(int) *p++]--;
2398
2399 if (max_nongroups && max_nongroups[class] > 0)
2400 {
2401 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2402 max_nongroups[class]--;
2403 p = reg_class_superclasses[class];
2404 while (*p != LIM_REG_CLASSES)
2405 max_nongroups[(int) *p++]--;
2406 }
2407
2408 /* Spill every pseudo reg that was allocated to this reg
2409 or to something that overlaps this reg. */
2410
2411 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2412
2413 /* If there are some registers still to eliminate and this register
2414 wasn't ever used before, additional stack space may have to be
2415 allocated to store this register. Thus, we may have changed the offset
2416 between the stack and frame pointers, so mark that something has changed.
2417 (If new pseudos were spilled, thus requiring more space, VAL would have
2418 been set non-zero by the call to spill_hard_reg above since additional
2419 reloads may be needed in that case.
2420
2421 One might think that we need only set VAL to 1 if this is a call-used
2422 register. However, the set of registers that must be saved by the
2423 prologue is not identical to the call-used set. For example, the
2424 register used by the call insn for the return PC is a call-used register,
2425 but must be saved by the prologue. */
2426 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2427 val = 1;
2428
2429 regs_ever_live[spill_regs[n_spills]] = 1;
2430 n_spills++;
2431
2432 return val;
2433 }
2434 \f
2435 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2436 data that is dead in INSN. */
2437
2438 static void
2439 delete_dead_insn (insn)
2440 rtx insn;
2441 {
2442 rtx prev = prev_real_insn (insn);
2443 rtx prev_dest;
2444
2445 /* If the previous insn sets a register that dies in our insn, delete it
2446 too. */
2447 if (prev && GET_CODE (PATTERN (prev)) == SET
2448 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2449 && reg_mentioned_p (prev_dest, PATTERN (insn))
2450 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2451 delete_dead_insn (prev);
2452
2453 PUT_CODE (insn, NOTE);
2454 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2455 NOTE_SOURCE_FILE (insn) = 0;
2456 }
2457
2458 /* Modify the home of pseudo-reg I.
2459 The new home is present in reg_renumber[I].
2460
2461 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2462 or it may be -1, meaning there is none or it is not relevant.
2463 This is used so that all pseudos spilled from a given hard reg
2464 can share one stack slot. */
2465
2466 static void
2467 alter_reg (i, from_reg)
2468 register int i;
2469 int from_reg;
2470 {
2471 /* When outputting an inline function, this can happen
2472 for a reg that isn't actually used. */
2473 if (regno_reg_rtx[i] == 0)
2474 return;
2475
2476 /* If the reg got changed to a MEM at rtl-generation time,
2477 ignore it. */
2478 if (GET_CODE (regno_reg_rtx[i]) != REG)
2479 return;
2480
2481 /* Modify the reg-rtx to contain the new hard reg
2482 number or else to contain its pseudo reg number. */
2483 REGNO (regno_reg_rtx[i])
2484 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2485
2486 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2487 allocate a stack slot for it. */
2488
2489 if (reg_renumber[i] < 0
2490 && REG_N_REFS (i) > 0
2491 && reg_equiv_constant[i] == 0
2492 && reg_equiv_memory_loc[i] == 0)
2493 {
2494 register rtx x;
2495 int inherent_size = PSEUDO_REGNO_BYTES (i);
2496 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2497 int adjust = 0;
2498
2499 /* Each pseudo reg has an inherent size which comes from its own mode,
2500 and a total size which provides room for paradoxical subregs
2501 which refer to the pseudo reg in wider modes.
2502
2503 We can use a slot already allocated if it provides both
2504 enough inherent space and enough total space.
2505 Otherwise, we allocate a new slot, making sure that it has no less
2506 inherent space, and no less total space, then the previous slot. */
2507 if (from_reg == -1)
2508 {
2509 /* No known place to spill from => no slot to reuse. */
2510 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2511 inherent_size == total_size ? 0 : -1);
2512 if (BYTES_BIG_ENDIAN)
2513 /* Cancel the big-endian correction done in assign_stack_local.
2514 Get the address of the beginning of the slot.
2515 This is so we can do a big-endian correction unconditionally
2516 below. */
2517 adjust = inherent_size - total_size;
2518
2519 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2520 }
2521 /* Reuse a stack slot if possible. */
2522 else if (spill_stack_slot[from_reg] != 0
2523 && spill_stack_slot_width[from_reg] >= total_size
2524 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2525 >= inherent_size))
2526 x = spill_stack_slot[from_reg];
2527 /* Allocate a bigger slot. */
2528 else
2529 {
2530 /* Compute maximum size needed, both for inherent size
2531 and for total size. */
2532 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2533 rtx stack_slot;
2534 if (spill_stack_slot[from_reg])
2535 {
2536 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2537 > inherent_size)
2538 mode = GET_MODE (spill_stack_slot[from_reg]);
2539 if (spill_stack_slot_width[from_reg] > total_size)
2540 total_size = spill_stack_slot_width[from_reg];
2541 }
2542 /* Make a slot with that size. */
2543 x = assign_stack_local (mode, total_size,
2544 inherent_size == total_size ? 0 : -1);
2545 stack_slot = x;
2546 if (BYTES_BIG_ENDIAN)
2547 {
2548 /* Cancel the big-endian correction done in assign_stack_local.
2549 Get the address of the beginning of the slot.
2550 This is so we can do a big-endian correction unconditionally
2551 below. */
2552 adjust = GET_MODE_SIZE (mode) - total_size;
2553 if (adjust)
2554 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2555 * BITS_PER_UNIT,
2556 MODE_INT, 1),
2557 plus_constant (XEXP (x, 0), adjust));
2558 }
2559 spill_stack_slot[from_reg] = stack_slot;
2560 spill_stack_slot_width[from_reg] = total_size;
2561 }
2562
2563 /* On a big endian machine, the "address" of the slot
2564 is the address of the low part that fits its inherent mode. */
2565 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2566 adjust += (total_size - inherent_size);
2567
2568 /* If we have any adjustment to make, or if the stack slot is the
2569 wrong mode, make a new stack slot. */
2570 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2571 {
2572 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2573 plus_constant (XEXP (x, 0), adjust));
2574 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2575 }
2576
2577 /* Save the stack slot for later. */
2578 reg_equiv_memory_loc[i] = x;
2579 }
2580 }
2581
2582 /* Mark the slots in regs_ever_live for the hard regs
2583 used by pseudo-reg number REGNO. */
2584
2585 void
2586 mark_home_live (regno)
2587 int regno;
2588 {
2589 register int i, lim;
2590 i = reg_renumber[regno];
2591 if (i < 0)
2592 return;
2593 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2594 while (i < lim)
2595 regs_ever_live[i++] = 1;
2596 }
2597
2598 /* Mark the registers used in SCRATCH as being live. */
2599
2600 static void
2601 mark_scratch_live (scratch)
2602 rtx scratch;
2603 {
2604 register int i;
2605 int regno = REGNO (scratch);
2606 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2607
2608 for (i = regno; i < lim; i++)
2609 regs_ever_live[i] = 1;
2610 }
2611 \f
2612 /* This function handles the tracking of elimination offsets around branches.
2613
2614 X is a piece of RTL being scanned.
2615
2616 INSN is the insn that it came from, if any.
2617
2618 INITIAL_P is non-zero if we are to set the offset to be the initial
2619 offset and zero if we are setting the offset of the label to be the
2620 current offset. */
2621
2622 static void
2623 set_label_offsets (x, insn, initial_p)
2624 rtx x;
2625 rtx insn;
2626 int initial_p;
2627 {
2628 enum rtx_code code = GET_CODE (x);
2629 rtx tem;
2630 int i;
2631 struct elim_table *p;
2632
2633 switch (code)
2634 {
2635 case LABEL_REF:
2636 if (LABEL_REF_NONLOCAL_P (x))
2637 return;
2638
2639 x = XEXP (x, 0);
2640
2641 /* ... fall through ... */
2642
2643 case CODE_LABEL:
2644 /* If we know nothing about this label, set the desired offsets. Note
2645 that this sets the offset at a label to be the offset before a label
2646 if we don't know anything about the label. This is not correct for
2647 the label after a BARRIER, but is the best guess we can make. If
2648 we guessed wrong, we will suppress an elimination that might have
2649 been possible had we been able to guess correctly. */
2650
2651 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2652 {
2653 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2654 offsets_at[CODE_LABEL_NUMBER (x)][i]
2655 = (initial_p ? reg_eliminate[i].initial_offset
2656 : reg_eliminate[i].offset);
2657 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2658 }
2659
2660 /* Otherwise, if this is the definition of a label and it is
2661 preceded by a BARRIER, set our offsets to the known offset of
2662 that label. */
2663
2664 else if (x == insn
2665 && (tem = prev_nonnote_insn (insn)) != 0
2666 && GET_CODE (tem) == BARRIER)
2667 {
2668 num_not_at_initial_offset = 0;
2669 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2670 {
2671 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2672 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2673 if (reg_eliminate[i].can_eliminate
2674 && (reg_eliminate[i].offset
2675 != reg_eliminate[i].initial_offset))
2676 num_not_at_initial_offset++;
2677 }
2678 }
2679
2680 else
2681 /* If neither of the above cases is true, compare each offset
2682 with those previously recorded and suppress any eliminations
2683 where the offsets disagree. */
2684
2685 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2686 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2687 != (initial_p ? reg_eliminate[i].initial_offset
2688 : reg_eliminate[i].offset))
2689 reg_eliminate[i].can_eliminate = 0;
2690
2691 return;
2692
2693 case JUMP_INSN:
2694 set_label_offsets (PATTERN (insn), insn, initial_p);
2695
2696 /* ... fall through ... */
2697
2698 case INSN:
2699 case CALL_INSN:
2700 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2701 and hence must have all eliminations at their initial offsets. */
2702 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2703 if (REG_NOTE_KIND (tem) == REG_LABEL)
2704 set_label_offsets (XEXP (tem, 0), insn, 1);
2705 return;
2706
2707 case ADDR_VEC:
2708 case ADDR_DIFF_VEC:
2709 /* Each of the labels in the address vector must be at their initial
2710 offsets. We want the first first for ADDR_VEC and the second
2711 field for ADDR_DIFF_VEC. */
2712
2713 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2714 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2715 insn, initial_p);
2716 return;
2717
2718 case SET:
2719 /* We only care about setting PC. If the source is not RETURN,
2720 IF_THEN_ELSE, or a label, disable any eliminations not at
2721 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2722 isn't one of those possibilities. For branches to a label,
2723 call ourselves recursively.
2724
2725 Note that this can disable elimination unnecessarily when we have
2726 a non-local goto since it will look like a non-constant jump to
2727 someplace in the current function. This isn't a significant
2728 problem since such jumps will normally be when all elimination
2729 pairs are back to their initial offsets. */
2730
2731 if (SET_DEST (x) != pc_rtx)
2732 return;
2733
2734 switch (GET_CODE (SET_SRC (x)))
2735 {
2736 case PC:
2737 case RETURN:
2738 return;
2739
2740 case LABEL_REF:
2741 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2742 return;
2743
2744 case IF_THEN_ELSE:
2745 tem = XEXP (SET_SRC (x), 1);
2746 if (GET_CODE (tem) == LABEL_REF)
2747 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2748 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2749 break;
2750
2751 tem = XEXP (SET_SRC (x), 2);
2752 if (GET_CODE (tem) == LABEL_REF)
2753 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2754 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2755 break;
2756 return;
2757
2758 default:
2759 break;
2760 }
2761
2762 /* If we reach here, all eliminations must be at their initial
2763 offset because we are doing a jump to a variable address. */
2764 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2765 if (p->offset != p->initial_offset)
2766 p->can_eliminate = 0;
2767 break;
2768
2769 default:
2770 break;
2771 }
2772 }
2773 \f
2774 /* Used for communication between the next two function to properly share
2775 the vector for an ASM_OPERANDS. */
2776
2777 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2778
2779 /* Scan X and replace any eliminable registers (such as fp) with a
2780 replacement (such as sp), plus an offset.
2781
2782 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2783 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2784 MEM, we are allowed to replace a sum of a register and the constant zero
2785 with the register, which we cannot do outside a MEM. In addition, we need
2786 to record the fact that a register is referenced outside a MEM.
2787
2788 If INSN is an insn, it is the insn containing X. If we replace a REG
2789 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2790 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2791 that the REG is being modified.
2792
2793 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2794 That's used when we eliminate in expressions stored in notes.
2795 This means, do not set ref_outside_mem even if the reference
2796 is outside of MEMs.
2797
2798 If we see a modification to a register we know about, take the
2799 appropriate action (see case SET, below).
2800
2801 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2802 replacements done assuming all offsets are at their initial values. If
2803 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2804 encounter, return the actual location so that find_reloads will do
2805 the proper thing. */
2806
2807 rtx
2808 eliminate_regs (x, mem_mode, insn, storing)
2809 rtx x;
2810 enum machine_mode mem_mode;
2811 rtx insn;
2812 int storing;
2813 {
2814 enum rtx_code code = GET_CODE (x);
2815 struct elim_table *ep;
2816 int regno;
2817 rtx new;
2818 int i, j;
2819 char *fmt;
2820 int copied = 0;
2821
2822 switch (code)
2823 {
2824 case CONST_INT:
2825 case CONST_DOUBLE:
2826 case CONST:
2827 case SYMBOL_REF:
2828 case CODE_LABEL:
2829 case PC:
2830 case CC0:
2831 case ASM_INPUT:
2832 case ADDR_VEC:
2833 case ADDR_DIFF_VEC:
2834 case RETURN:
2835 return x;
2836
2837 case ADDRESSOF:
2838 /* This is only for the benefit of the debugging backends, which call
2839 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2840 removed after CSE. */
2841 new = eliminate_regs (XEXP (x, 0), 0, insn, 0);
2842 if (GET_CODE (new) == MEM)
2843 return XEXP (new, 0);
2844 return x;
2845
2846 case REG:
2847 regno = REGNO (x);
2848
2849 /* First handle the case where we encounter a bare register that
2850 is eliminable. Replace it with a PLUS. */
2851 if (regno < FIRST_PSEUDO_REGISTER)
2852 {
2853 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2854 ep++)
2855 if (ep->from_rtx == x && ep->can_eliminate)
2856 {
2857 if (! mem_mode
2858 /* Refs inside notes don't count for this purpose. */
2859 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2860 || GET_CODE (insn) == INSN_LIST)))
2861 ep->ref_outside_mem = 1;
2862 return plus_constant (ep->to_rtx, ep->previous_offset);
2863 }
2864
2865 }
2866 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2867 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2868 {
2869 /* In this case, find_reloads would attempt to either use an
2870 incorrect address (if something is not at its initial offset)
2871 or substitute an replaced address into an insn (which loses
2872 if the offset is changed by some later action). So we simply
2873 return the replaced stack slot (assuming it is changed by
2874 elimination) and ignore the fact that this is actually a
2875 reference to the pseudo. Ensure we make a copy of the
2876 address in case it is shared. */
2877 new = eliminate_regs (reg_equiv_memory_loc[regno],
2878 mem_mode, insn, 0);
2879 if (new != reg_equiv_memory_loc[regno])
2880 {
2881 cannot_omit_stores[regno] = 1;
2882 return copy_rtx (new);
2883 }
2884 }
2885 return x;
2886
2887 case PLUS:
2888 /* If this is the sum of an eliminable register and a constant, rework
2889 the sum. */
2890 if (GET_CODE (XEXP (x, 0)) == REG
2891 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2892 && CONSTANT_P (XEXP (x, 1)))
2893 {
2894 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2895 ep++)
2896 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2897 {
2898 if (! mem_mode
2899 /* Refs inside notes don't count for this purpose. */
2900 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2901 || GET_CODE (insn) == INSN_LIST)))
2902 ep->ref_outside_mem = 1;
2903
2904 /* The only time we want to replace a PLUS with a REG (this
2905 occurs when the constant operand of the PLUS is the negative
2906 of the offset) is when we are inside a MEM. We won't want
2907 to do so at other times because that would change the
2908 structure of the insn in a way that reload can't handle.
2909 We special-case the commonest situation in
2910 eliminate_regs_in_insn, so just replace a PLUS with a
2911 PLUS here, unless inside a MEM. */
2912 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2913 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2914 return ep->to_rtx;
2915 else
2916 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2917 plus_constant (XEXP (x, 1),
2918 ep->previous_offset));
2919 }
2920
2921 /* If the register is not eliminable, we are done since the other
2922 operand is a constant. */
2923 return x;
2924 }
2925
2926 /* If this is part of an address, we want to bring any constant to the
2927 outermost PLUS. We will do this by doing register replacement in
2928 our operands and seeing if a constant shows up in one of them.
2929
2930 We assume here this is part of an address (or a "load address" insn)
2931 since an eliminable register is not likely to appear in any other
2932 context.
2933
2934 If we have (plus (eliminable) (reg)), we want to produce
2935 (plus (plus (replacement) (reg) (const))). If this was part of a
2936 normal add insn, (plus (replacement) (reg)) will be pushed as a
2937 reload. This is the desired action. */
2938
2939 {
2940 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
2941 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn, 0);
2942
2943 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2944 {
2945 /* If one side is a PLUS and the other side is a pseudo that
2946 didn't get a hard register but has a reg_equiv_constant,
2947 we must replace the constant here since it may no longer
2948 be in the position of any operand. */
2949 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2950 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2951 && reg_renumber[REGNO (new1)] < 0
2952 && reg_equiv_constant != 0
2953 && reg_equiv_constant[REGNO (new1)] != 0)
2954 new1 = reg_equiv_constant[REGNO (new1)];
2955 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2956 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2957 && reg_renumber[REGNO (new0)] < 0
2958 && reg_equiv_constant[REGNO (new0)] != 0)
2959 new0 = reg_equiv_constant[REGNO (new0)];
2960
2961 new = form_sum (new0, new1);
2962
2963 /* As above, if we are not inside a MEM we do not want to
2964 turn a PLUS into something else. We might try to do so here
2965 for an addition of 0 if we aren't optimizing. */
2966 if (! mem_mode && GET_CODE (new) != PLUS)
2967 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2968 else
2969 return new;
2970 }
2971 }
2972 return x;
2973
2974 case MULT:
2975 /* If this is the product of an eliminable register and a
2976 constant, apply the distribute law and move the constant out
2977 so that we have (plus (mult ..) ..). This is needed in order
2978 to keep load-address insns valid. This case is pathological.
2979 We ignore the possibility of overflow here. */
2980 if (GET_CODE (XEXP (x, 0)) == REG
2981 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2982 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2983 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2984 ep++)
2985 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2986 {
2987 if (! mem_mode
2988 /* Refs inside notes don't count for this purpose. */
2989 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2990 || GET_CODE (insn) == INSN_LIST)))
2991 ep->ref_outside_mem = 1;
2992
2993 return
2994 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2995 ep->previous_offset * INTVAL (XEXP (x, 1)));
2996 }
2997
2998 /* ... fall through ... */
2999
3000 case CALL:
3001 case COMPARE:
3002 case MINUS:
3003 case DIV: case UDIV:
3004 case MOD: case UMOD:
3005 case AND: case IOR: case XOR:
3006 case ROTATERT: case ROTATE:
3007 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3008 case NE: case EQ:
3009 case GE: case GT: case GEU: case GTU:
3010 case LE: case LT: case LEU: case LTU:
3011 {
3012 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3013 rtx new1
3014 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn, 0) : 0;
3015
3016 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
3017 return gen_rtx (code, GET_MODE (x), new0, new1);
3018 }
3019 return x;
3020
3021 case EXPR_LIST:
3022 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
3023 if (XEXP (x, 0))
3024 {
3025 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3026 if (new != XEXP (x, 0))
3027 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
3028 }
3029
3030 /* ... fall through ... */
3031
3032 case INSN_LIST:
3033 /* Now do eliminations in the rest of the chain. If this was
3034 an EXPR_LIST, this might result in allocating more memory than is
3035 strictly needed, but it simplifies the code. */
3036 if (XEXP (x, 1))
3037 {
3038 new = eliminate_regs (XEXP (x, 1), mem_mode, insn, 0);
3039 if (new != XEXP (x, 1))
3040 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
3041 }
3042 return x;
3043
3044 case PRE_INC:
3045 case POST_INC:
3046 case PRE_DEC:
3047 case POST_DEC:
3048 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3049 if (ep->to_rtx == XEXP (x, 0))
3050 {
3051 int size = GET_MODE_SIZE (mem_mode);
3052
3053 /* If more bytes than MEM_MODE are pushed, account for them. */
3054 #ifdef PUSH_ROUNDING
3055 if (ep->to_rtx == stack_pointer_rtx)
3056 size = PUSH_ROUNDING (size);
3057 #endif
3058 if (code == PRE_DEC || code == POST_DEC)
3059 ep->offset += size;
3060 else
3061 ep->offset -= size;
3062 }
3063
3064 /* Fall through to generic unary operation case. */
3065 case STRICT_LOW_PART:
3066 case NEG: case NOT:
3067 case SIGN_EXTEND: case ZERO_EXTEND:
3068 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3069 case FLOAT: case FIX:
3070 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3071 case ABS:
3072 case SQRT:
3073 case FFS:
3074 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3075 if (new != XEXP (x, 0))
3076 return gen_rtx (code, GET_MODE (x), new);
3077 return x;
3078
3079 case SUBREG:
3080 /* Similar to above processing, but preserve SUBREG_WORD.
3081 Convert (subreg (mem)) to (mem) if not paradoxical.
3082 Also, if we have a non-paradoxical (subreg (pseudo)) and the
3083 pseudo didn't get a hard reg, we must replace this with the
3084 eliminated version of the memory location because push_reloads
3085 may do the replacement in certain circumstances. */
3086 if (GET_CODE (SUBREG_REG (x)) == REG
3087 && (GET_MODE_SIZE (GET_MODE (x))
3088 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3089 && reg_equiv_memory_loc != 0
3090 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3091 {
3092 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
3093 mem_mode, insn, 0);
3094
3095 /* If we didn't change anything, we must retain the pseudo. */
3096 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
3097 new = SUBREG_REG (x);
3098 else
3099 {
3100 /* Otherwise, ensure NEW isn't shared in case we have to reload
3101 it. */
3102 new = copy_rtx (new);
3103
3104 /* In this case, we must show that the pseudo is used in this
3105 insn so that delete_output_reload will do the right thing. */
3106 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
3107 && GET_CODE (insn) != INSN_LIST)
3108 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
3109 insn);
3110 }
3111 }
3112 else
3113 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn, 0);
3114
3115 if (new != XEXP (x, 0))
3116 {
3117 int x_size = GET_MODE_SIZE (GET_MODE (x));
3118 int new_size = GET_MODE_SIZE (GET_MODE (new));
3119
3120 /* When asked to spill a partial word subreg, we need to go
3121 ahead and spill the whole thing against the possibility
3122 that we reload the whole reg and find garbage at the top. */
3123 if (storing
3124 && GET_CODE (new) == MEM
3125 && x_size < new_size
3126 && ((x_size + UNITS_PER_WORD-1) / UNITS_PER_WORD
3127 == (new_size + UNITS_PER_WORD-1) / UNITS_PER_WORD))
3128 return new;
3129 else if (GET_CODE (new) == MEM
3130 && x_size <= new_size
3131 #ifdef LOAD_EXTEND_OP
3132 /* On these machines we will be reloading what is
3133 inside the SUBREG if it originally was a pseudo and
3134 the inner and outer modes are both a word or
3135 smaller. So leave the SUBREG then. */
3136 && ! (GET_CODE (SUBREG_REG (x)) == REG
3137 && x_size <= UNITS_PER_WORD
3138 && new_size <= UNITS_PER_WORD
3139 && x_size > new_size
3140 && INTEGRAL_MODE_P (GET_MODE (new))
3141 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
3142 #endif
3143 )
3144 {
3145 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
3146 enum machine_mode mode = GET_MODE (x);
3147
3148 if (BYTES_BIG_ENDIAN)
3149 offset += (MIN (UNITS_PER_WORD,
3150 GET_MODE_SIZE (GET_MODE (new)))
3151 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
3152
3153 PUT_MODE (new, mode);
3154 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
3155 return new;
3156 }
3157 else
3158 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
3159 }
3160
3161 return x;
3162
3163 case USE:
3164 /* If using a register that is the source of an eliminate we still
3165 think can be performed, note it cannot be performed since we don't
3166 know how this register is used. */
3167 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3168 if (ep->from_rtx == XEXP (x, 0))
3169 ep->can_eliminate = 0;
3170
3171 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3172 if (new != XEXP (x, 0))
3173 return gen_rtx (code, GET_MODE (x), new);
3174 return x;
3175
3176 case CLOBBER:
3177 /* If clobbering a register that is the replacement register for an
3178 elimination we still think can be performed, note that it cannot
3179 be performed. Otherwise, we need not be concerned about it. */
3180 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3181 if (ep->to_rtx == XEXP (x, 0))
3182 ep->can_eliminate = 0;
3183
3184 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3185 if (new != XEXP (x, 0))
3186 return gen_rtx (code, GET_MODE (x), new);
3187 return x;
3188
3189 case ASM_OPERANDS:
3190 {
3191 rtx *temp_vec;
3192 /* Properly handle sharing input and constraint vectors. */
3193 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3194 {
3195 /* When we come to a new vector not seen before,
3196 scan all its elements; keep the old vector if none
3197 of them changes; otherwise, make a copy. */
3198 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3199 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3200 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3201 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3202 mem_mode, insn, 0);
3203
3204 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3205 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3206 break;
3207
3208 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3209 new_asm_operands_vec = old_asm_operands_vec;
3210 else
3211 new_asm_operands_vec
3212 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3213 }
3214
3215 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3216 if (new_asm_operands_vec == old_asm_operands_vec)
3217 return x;
3218
3219 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3220 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3221 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3222 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3223 ASM_OPERANDS_SOURCE_FILE (x),
3224 ASM_OPERANDS_SOURCE_LINE (x));
3225 new->volatil = x->volatil;
3226 return new;
3227 }
3228
3229 case SET:
3230 /* Check for setting a register that we know about. */
3231 if (GET_CODE (SET_DEST (x)) == REG)
3232 {
3233 /* See if this is setting the replacement register for an
3234 elimination.
3235
3236 If DEST is the hard frame pointer, we do nothing because we
3237 assume that all assignments to the frame pointer are for
3238 non-local gotos and are being done at a time when they are valid
3239 and do not disturb anything else. Some machines want to
3240 eliminate a fake argument pointer (or even a fake frame pointer)
3241 with either the real frame or the stack pointer. Assignments to
3242 the hard frame pointer must not prevent this elimination. */
3243
3244 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3245 ep++)
3246 if (ep->to_rtx == SET_DEST (x)
3247 && SET_DEST (x) != hard_frame_pointer_rtx)
3248 {
3249 /* If it is being incremented, adjust the offset. Otherwise,
3250 this elimination can't be done. */
3251 rtx src = SET_SRC (x);
3252
3253 if (GET_CODE (src) == PLUS
3254 && XEXP (src, 0) == SET_DEST (x)
3255 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3256 ep->offset -= INTVAL (XEXP (src, 1));
3257 else
3258 ep->can_eliminate = 0;
3259 }
3260
3261 /* Now check to see we are assigning to a register that can be
3262 eliminated. If so, it must be as part of a PARALLEL, since we
3263 will not have been called if this is a single SET. So indicate
3264 that we can no longer eliminate this reg. */
3265 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3266 ep++)
3267 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3268 ep->can_eliminate = 0;
3269 }
3270
3271 /* Now avoid the loop below in this common case. */
3272 {
3273 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn, 1);
3274 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn, 0);
3275
3276 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3277 write a CLOBBER insn. */
3278 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3279 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3280 && GET_CODE (insn) != INSN_LIST)
3281 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3282
3283 /* If SET_DEST was a partial-word subreg, NEW0 may have been widened
3284 to spill the entire register (see SUBREG case above). If the
3285 widths of SET_DEST and NEW0 no longer match, adjust NEW1. */
3286 if (GET_MODE (SET_DEST (x)) != GET_MODE (new0))
3287 new1 = gen_rtx (SUBREG, GET_MODE (new0), new1, 0);
3288
3289 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3290 return gen_rtx (SET, VOIDmode, new0, new1);
3291 }
3292
3293 return x;
3294
3295 case MEM:
3296 /* This is only for the benefit of the debugging backends, which call
3297 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
3298 removed after CSE. */
3299 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
3300 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn, 0);
3301
3302 /* Our only special processing is to pass the mode of the MEM to our
3303 recursive call and copy the flags. While we are here, handle this
3304 case more efficiently. */
3305 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn, 0);
3306 if (new != XEXP (x, 0))
3307 {
3308 new = gen_rtx (MEM, GET_MODE (x), new);
3309 new->volatil = x->volatil;
3310 new->unchanging = x->unchanging;
3311 new->in_struct = x->in_struct;
3312 return new;
3313 }
3314 else
3315 return x;
3316
3317 default:
3318 break;
3319 }
3320
3321 /* Process each of our operands recursively. If any have changed, make a
3322 copy of the rtx. */
3323 fmt = GET_RTX_FORMAT (code);
3324 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3325 {
3326 if (*fmt == 'e')
3327 {
3328 new = eliminate_regs (XEXP (x, i), mem_mode, insn, 0);
3329 if (new != XEXP (x, i) && ! copied)
3330 {
3331 rtx new_x = rtx_alloc (code);
3332 bcopy ((char *) x, (char *) new_x,
3333 (sizeof (*new_x) - sizeof (new_x->fld)
3334 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3335 x = new_x;
3336 copied = 1;
3337 }
3338 XEXP (x, i) = new;
3339 }
3340 else if (*fmt == 'E')
3341 {
3342 int copied_vec = 0;
3343 for (j = 0; j < XVECLEN (x, i); j++)
3344 {
3345 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn, 0);
3346 if (new != XVECEXP (x, i, j) && ! copied_vec)
3347 {
3348 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3349 XVEC (x, i)->elem);
3350 if (! copied)
3351 {
3352 rtx new_x = rtx_alloc (code);
3353 bcopy ((char *) x, (char *) new_x,
3354 (sizeof (*new_x) - sizeof (new_x->fld)
3355 + (sizeof (new_x->fld[0])
3356 * GET_RTX_LENGTH (code))));
3357 x = new_x;
3358 copied = 1;
3359 }
3360 XVEC (x, i) = new_v;
3361 copied_vec = 1;
3362 }
3363 XVECEXP (x, i, j) = new;
3364 }
3365 }
3366 }
3367
3368 return x;
3369 }
3370 \f
3371 /* Scan INSN and eliminate all eliminable registers in it.
3372
3373 If REPLACE is nonzero, do the replacement destructively. Also
3374 delete the insn as dead it if it is setting an eliminable register.
3375
3376 If REPLACE is zero, do all our allocations in reload_obstack.
3377
3378 If no eliminations were done and this insn doesn't require any elimination
3379 processing (these are not identical conditions: it might be updating sp,
3380 but not referencing fp; this needs to be seen during reload_as_needed so
3381 that the offset between fp and sp can be taken into consideration), zero
3382 is returned. Otherwise, 1 is returned. */
3383
3384 static int
3385 eliminate_regs_in_insn (insn, replace)
3386 rtx insn;
3387 int replace;
3388 {
3389 rtx old_body = PATTERN (insn);
3390 rtx old_set = single_set (insn);
3391 rtx new_body;
3392 int val = 0;
3393 struct elim_table *ep;
3394
3395 if (! replace)
3396 push_obstacks (&reload_obstack, &reload_obstack);
3397
3398 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3399 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3400 {
3401 /* Check for setting an eliminable register. */
3402 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3403 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3404 {
3405 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3406 /* If this is setting the frame pointer register to the
3407 hardware frame pointer register and this is an elimination
3408 that will be done (tested above), this insn is really
3409 adjusting the frame pointer downward to compensate for
3410 the adjustment done before a nonlocal goto. */
3411 if (ep->from == FRAME_POINTER_REGNUM
3412 && ep->to == HARD_FRAME_POINTER_REGNUM)
3413 {
3414 rtx src = SET_SRC (old_set);
3415 int offset, ok = 0;
3416 rtx prev_insn, prev_set;
3417
3418 if (src == ep->to_rtx)
3419 offset = 0, ok = 1;
3420 else if (GET_CODE (src) == PLUS
3421 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3422 offset = INTVAL (XEXP (src, 0)), ok = 1;
3423 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3424 && (prev_set = single_set (prev_insn)) != 0
3425 && rtx_equal_p (SET_DEST (prev_set), src))
3426 {
3427 src = SET_SRC (prev_set);
3428 if (src == ep->to_rtx)
3429 offset = 0, ok = 1;
3430 else if (GET_CODE (src) == PLUS
3431 && GET_CODE (XEXP (src, 0)) == CONST_INT
3432 && XEXP (src, 1) == ep->to_rtx)
3433 offset = INTVAL (XEXP (src, 0)), ok = 1;
3434 else if (GET_CODE (src) == PLUS
3435 && GET_CODE (XEXP (src, 1)) == CONST_INT
3436 && XEXP (src, 0) == ep->to_rtx)
3437 offset = INTVAL (XEXP (src, 1)), ok = 1;
3438 }
3439
3440 if (ok)
3441 {
3442 if (replace)
3443 {
3444 rtx src
3445 = plus_constant (ep->to_rtx, offset - ep->offset);
3446
3447 /* First see if this insn remains valid when we
3448 make the change. If not, keep the INSN_CODE
3449 the same and let reload fit it up. */
3450 validate_change (insn, &SET_SRC (old_set), src, 1);
3451 validate_change (insn, &SET_DEST (old_set),
3452 ep->to_rtx, 1);
3453 if (! apply_change_group ())
3454 {
3455 SET_SRC (old_set) = src;
3456 SET_DEST (old_set) = ep->to_rtx;
3457 }
3458 }
3459
3460 val = 1;
3461 goto done;
3462 }
3463 }
3464 #endif
3465
3466 /* In this case this insn isn't serving a useful purpose. We
3467 will delete it in reload_as_needed once we know that this
3468 elimination is, in fact, being done.
3469
3470 If REPLACE isn't set, we can't delete this insn, but needn't
3471 process it since it won't be used unless something changes. */
3472 if (replace)
3473 delete_dead_insn (insn);
3474 val = 1;
3475 goto done;
3476 }
3477
3478 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3479 in the insn is the negative of the offset in FROM. Substitute
3480 (set (reg) (reg to)) for the insn and change its code.
3481
3482 We have to do this here, rather than in eliminate_regs, do that we can
3483 change the insn code. */
3484
3485 if (GET_CODE (SET_SRC (old_set)) == PLUS
3486 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3487 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3488 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3489 ep++)
3490 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3491 && ep->can_eliminate)
3492 {
3493 /* We must stop at the first elimination that will be used.
3494 If this one would replace the PLUS with a REG, do it
3495 now. Otherwise, quit the loop and let eliminate_regs
3496 do its normal replacement. */
3497 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3498 {
3499 /* We assume here that we don't need a PARALLEL of
3500 any CLOBBERs for this assignment. There's not
3501 much we can do if we do need it. */
3502 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3503 SET_DEST (old_set), ep->to_rtx);
3504 INSN_CODE (insn) = -1;
3505 val = 1;
3506 goto done;
3507 }
3508
3509 break;
3510 }
3511 }
3512
3513 old_asm_operands_vec = 0;
3514
3515 /* Replace the body of this insn with a substituted form. If we changed
3516 something, return non-zero.
3517
3518 If we are replacing a body that was a (set X (plus Y Z)), try to
3519 re-recognize the insn. We do this in case we had a simple addition
3520 but now can do this as a load-address. This saves an insn in this
3521 common case. */
3522
3523 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX, 0);
3524 if (new_body != old_body)
3525 {
3526 /* If we aren't replacing things permanently and we changed something,
3527 make another copy to ensure that all the RTL is new. Otherwise
3528 things can go wrong if find_reload swaps commutative operands
3529 and one is inside RTL that has been copied while the other is not. */
3530
3531 /* Don't copy an asm_operands because (1) there's no need and (2)
3532 copy_rtx can't do it properly when there are multiple outputs. */
3533 if (! replace && asm_noperands (old_body) < 0)
3534 new_body = copy_rtx (new_body);
3535
3536 /* If we had a move insn but now we don't, rerecognize it. This will
3537 cause spurious re-recognition if the old move had a PARALLEL since
3538 the new one still will, but we can't call single_set without
3539 having put NEW_BODY into the insn and the re-recognition won't
3540 hurt in this rare case. */
3541 if (old_set != 0
3542 && ((GET_CODE (SET_SRC (old_set)) == REG
3543 && (GET_CODE (new_body) != SET
3544 || GET_CODE (SET_SRC (new_body)) != REG))
3545 /* If this was a load from or store to memory, compare
3546 the MEM in recog_operand to the one in the insn. If they
3547 are not equal, then rerecognize the insn. */
3548 || (old_set != 0
3549 && ((GET_CODE (SET_SRC (old_set)) == MEM
3550 && SET_SRC (old_set) != recog_operand[1])
3551 || (GET_CODE (SET_DEST (old_set)) == MEM
3552 && SET_DEST (old_set) != recog_operand[0])))
3553 /* If this was an add insn before, rerecognize. */
3554 || GET_CODE (SET_SRC (old_set)) == PLUS))
3555 {
3556 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3557 /* If recognition fails, store the new body anyway.
3558 It's normal to have recognition failures here
3559 due to bizarre memory addresses; reloading will fix them. */
3560 PATTERN (insn) = new_body;
3561 }
3562 else
3563 PATTERN (insn) = new_body;
3564
3565 val = 1;
3566 }
3567
3568 /* Loop through all elimination pairs. See if any have changed and
3569 recalculate the number not at initial offset.
3570
3571 Compute the maximum offset (minimum offset if the stack does not
3572 grow downward) for each elimination pair.
3573
3574 We also detect a cases where register elimination cannot be done,
3575 namely, if a register would be both changed and referenced outside a MEM
3576 in the resulting insn since such an insn is often undefined and, even if
3577 not, we cannot know what meaning will be given to it. Note that it is
3578 valid to have a register used in an address in an insn that changes it
3579 (presumably with a pre- or post-increment or decrement).
3580
3581 If anything changes, return nonzero. */
3582
3583 num_not_at_initial_offset = 0;
3584 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3585 {
3586 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3587 ep->can_eliminate = 0;
3588
3589 ep->ref_outside_mem = 0;
3590
3591 if (ep->previous_offset != ep->offset)
3592 val = 1;
3593
3594 ep->previous_offset = ep->offset;
3595 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3596 num_not_at_initial_offset++;
3597
3598 #ifdef STACK_GROWS_DOWNWARD
3599 ep->max_offset = MAX (ep->max_offset, ep->offset);
3600 #else
3601 ep->max_offset = MIN (ep->max_offset, ep->offset);
3602 #endif
3603 }
3604
3605 done:
3606 /* If we changed something, perform elimination in REG_NOTES. This is
3607 needed even when REPLACE is zero because a REG_DEAD note might refer
3608 to a register that we eliminate and could cause a different number
3609 of spill registers to be needed in the final reload pass than in
3610 the pre-passes. */
3611 if (val && REG_NOTES (insn) != 0)
3612 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn), 0);
3613
3614 if (! replace)
3615 pop_obstacks ();
3616
3617 return val;
3618 }
3619
3620 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3621 replacement we currently believe is valid, mark it as not eliminable if X
3622 modifies DEST in any way other than by adding a constant integer to it.
3623
3624 If DEST is the frame pointer, we do nothing because we assume that
3625 all assignments to the hard frame pointer are nonlocal gotos and are being
3626 done at a time when they are valid and do not disturb anything else.
3627 Some machines want to eliminate a fake argument pointer with either the
3628 frame or stack pointer. Assignments to the hard frame pointer must not
3629 prevent this elimination.
3630
3631 Called via note_stores from reload before starting its passes to scan
3632 the insns of the function. */
3633
3634 static void
3635 mark_not_eliminable (dest, x)
3636 rtx dest;
3637 rtx x;
3638 {
3639 register int i;
3640
3641 /* A SUBREG of a hard register here is just changing its mode. We should
3642 not see a SUBREG of an eliminable hard register, but check just in
3643 case. */
3644 if (GET_CODE (dest) == SUBREG)
3645 dest = SUBREG_REG (dest);
3646
3647 if (dest == hard_frame_pointer_rtx)
3648 return;
3649
3650 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3651 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3652 && (GET_CODE (x) != SET
3653 || GET_CODE (SET_SRC (x)) != PLUS
3654 || XEXP (SET_SRC (x), 0) != dest
3655 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3656 {
3657 reg_eliminate[i].can_eliminate_previous
3658 = reg_eliminate[i].can_eliminate = 0;
3659 num_eliminable--;
3660 }
3661 }
3662 \f
3663 /* Kick all pseudos out of hard register REGNO.
3664 If GLOBAL is nonzero, try to find someplace else to put them.
3665 If DUMPFILE is nonzero, log actions taken on that file.
3666
3667 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3668 because we found we can't eliminate some register. In the case, no pseudos
3669 are allowed to be in the register, even if they are only in a block that
3670 doesn't require spill registers, unlike the case when we are spilling this
3671 hard reg to produce another spill register.
3672
3673 Return nonzero if any pseudos needed to be kicked out. */
3674
3675 static int
3676 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3677 register int regno;
3678 int global;
3679 FILE *dumpfile;
3680 int cant_eliminate;
3681 {
3682 enum reg_class class = REGNO_REG_CLASS (regno);
3683 int something_changed = 0;
3684 register int i;
3685
3686 SET_HARD_REG_BIT (forbidden_regs, regno);
3687
3688 if (cant_eliminate)
3689 regs_ever_live[regno] = 1;
3690
3691 /* Spill every pseudo reg that was allocated to this reg
3692 or to something that overlaps this reg. */
3693
3694 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3695 if (reg_renumber[i] >= 0
3696 && reg_renumber[i] <= regno
3697 && (reg_renumber[i]
3698 + HARD_REGNO_NREGS (reg_renumber[i],
3699 PSEUDO_REGNO_MODE (i))
3700 > regno))
3701 {
3702 /* If this register belongs solely to a basic block which needed no
3703 spilling of any class that this register is contained in,
3704 leave it be, unless we are spilling this register because
3705 it was a hard register that can't be eliminated. */
3706
3707 if (! cant_eliminate
3708 && basic_block_needs[0]
3709 && REG_BASIC_BLOCK (i) >= 0
3710 && basic_block_needs[(int) class][REG_BASIC_BLOCK (i)] == 0)
3711 {
3712 enum reg_class *p;
3713
3714 for (p = reg_class_superclasses[(int) class];
3715 *p != LIM_REG_CLASSES; p++)
3716 if (basic_block_needs[(int) *p][REG_BASIC_BLOCK (i)] > 0)
3717 break;
3718
3719 if (*p == LIM_REG_CLASSES)
3720 continue;
3721 }
3722
3723 /* Mark it as no longer having a hard register home. */
3724 reg_renumber[i] = -1;
3725 /* We will need to scan everything again. */
3726 something_changed = 1;
3727 if (global)
3728 retry_global_alloc (i, forbidden_regs);
3729
3730 alter_reg (i, regno);
3731 if (dumpfile)
3732 {
3733 if (reg_renumber[i] == -1)
3734 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3735 else
3736 fprintf (dumpfile, " Register %d now in %d.\n\n",
3737 i, reg_renumber[i]);
3738 }
3739 }
3740 for (i = 0; i < scratch_list_length; i++)
3741 {
3742 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3743 {
3744 if (! cant_eliminate && basic_block_needs[0]
3745 && ! basic_block_needs[(int) class][scratch_block[i]])
3746 {
3747 enum reg_class *p;
3748
3749 for (p = reg_class_superclasses[(int) class];
3750 *p != LIM_REG_CLASSES; p++)
3751 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3752 break;
3753
3754 if (*p == LIM_REG_CLASSES)
3755 continue;
3756 }
3757 PUT_CODE (scratch_list[i], SCRATCH);
3758 scratch_list[i] = 0;
3759 something_changed = 1;
3760 continue;
3761 }
3762 }
3763
3764 return something_changed;
3765 }
3766 \f
3767 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3768 Also mark any hard registers used to store user variables as
3769 forbidden from being used for spill registers. */
3770
3771 static void
3772 scan_paradoxical_subregs (x)
3773 register rtx x;
3774 {
3775 register int i;
3776 register char *fmt;
3777 register enum rtx_code code = GET_CODE (x);
3778
3779 switch (code)
3780 {
3781 case REG:
3782 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
3783 && REG_USERVAR_P (x))
3784 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3785 return;
3786
3787 case CONST_INT:
3788 case CONST:
3789 case SYMBOL_REF:
3790 case LABEL_REF:
3791 case CONST_DOUBLE:
3792 case CC0:
3793 case PC:
3794 case USE:
3795 case CLOBBER:
3796 return;
3797
3798 case SUBREG:
3799 if (GET_CODE (SUBREG_REG (x)) == REG
3800 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3801 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3802 = GET_MODE_SIZE (GET_MODE (x));
3803 return;
3804
3805 default:
3806 break;
3807 }
3808
3809 fmt = GET_RTX_FORMAT (code);
3810 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3811 {
3812 if (fmt[i] == 'e')
3813 scan_paradoxical_subregs (XEXP (x, i));
3814 else if (fmt[i] == 'E')
3815 {
3816 register int j;
3817 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3818 scan_paradoxical_subregs (XVECEXP (x, i, j));
3819 }
3820 }
3821 }
3822 \f
3823 static int
3824 hard_reg_use_compare (p1p, p2p)
3825 const GENERIC_PTR p1p;
3826 const GENERIC_PTR p2p;
3827 {
3828 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p,
3829 *p2 = (struct hard_reg_n_uses *)p2p;
3830 int tem = p1->uses - p2->uses;
3831 if (tem != 0) return tem;
3832 /* If regs are equally good, sort by regno,
3833 so that the results of qsort leave nothing to chance. */
3834 return p1->regno - p2->regno;
3835 }
3836
3837 /* Choose the order to consider regs for use as reload registers
3838 based on how much trouble would be caused by spilling one.
3839 Store them in order of decreasing preference in potential_reload_regs. */
3840
3841 static void
3842 order_regs_for_reload (global)
3843 int global;
3844 {
3845 register int i;
3846 register int o = 0;
3847 int large = 0;
3848
3849 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3850
3851 CLEAR_HARD_REG_SET (bad_spill_regs);
3852
3853 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3854 potential_reload_regs[i] = -1;
3855
3856 /* Count number of uses of each hard reg by pseudo regs allocated to it
3857 and then order them by decreasing use. */
3858
3859 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3860 {
3861 hard_reg_n_uses[i].uses = 0;
3862 hard_reg_n_uses[i].regno = i;
3863 }
3864
3865 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3866 {
3867 int regno = reg_renumber[i];
3868 if (regno >= 0)
3869 {
3870 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3871 while (regno < lim)
3872 {
3873 /* If allocated by local-alloc, show more uses since
3874 we're not going to be able to reallocate it, but
3875 we might if allocated by global alloc. */
3876 if (global && reg_allocno[i] < 0)
3877 hard_reg_n_uses[regno].uses += (REG_N_REFS (i) + 1) / 2;
3878
3879 hard_reg_n_uses[regno++].uses += REG_N_REFS (i);
3880 }
3881 }
3882 large += REG_N_REFS (i);
3883 }
3884
3885 /* Now fixed registers (which cannot safely be used for reloading)
3886 get a very high use count so they will be considered least desirable.
3887 Registers used explicitly in the rtl code are almost as bad. */
3888
3889 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3890 {
3891 if (fixed_regs[i])
3892 {
3893 hard_reg_n_uses[i].uses += 2 * large + 2;
3894 SET_HARD_REG_BIT (bad_spill_regs, i);
3895 }
3896 else if (regs_explicitly_used[i])
3897 {
3898 hard_reg_n_uses[i].uses += large + 1;
3899 if (! SMALL_REGISTER_CLASSES)
3900 /* ??? We are doing this here because of the potential
3901 that bad code may be generated if a register explicitly
3902 used in an insn was used as a spill register for that
3903 insn. But not using these are spill registers may lose
3904 on some machine. We'll have to see how this works out. */
3905 SET_HARD_REG_BIT (bad_spill_regs, i);
3906 }
3907 }
3908 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3909 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3910
3911 #ifdef ELIMINABLE_REGS
3912 /* If registers other than the frame pointer are eliminable, mark them as
3913 poor choices. */
3914 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3915 {
3916 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3917 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3918 }
3919 #endif
3920
3921 /* Prefer registers not so far used, for use in temporary loading.
3922 Among them, if REG_ALLOC_ORDER is defined, use that order.
3923 Otherwise, prefer registers not preserved by calls. */
3924
3925 #ifdef REG_ALLOC_ORDER
3926 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3927 {
3928 int regno = reg_alloc_order[i];
3929
3930 if (hard_reg_n_uses[regno].uses == 0)
3931 potential_reload_regs[o++] = regno;
3932 }
3933 #else
3934 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3935 {
3936 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3937 potential_reload_regs[o++] = i;
3938 }
3939 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3940 {
3941 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3942 potential_reload_regs[o++] = i;
3943 }
3944 #endif
3945
3946 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3947 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3948
3949 /* Now add the regs that are already used,
3950 preferring those used less often. The fixed and otherwise forbidden
3951 registers will be at the end of this list. */
3952
3953 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3954 if (hard_reg_n_uses[i].uses != 0)
3955 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3956 }
3957 \f
3958 /* Used in reload_as_needed to sort the spilled regs. */
3959
3960 static int
3961 compare_spill_regs (r1p, r2p)
3962 const GENERIC_PTR r1p;
3963 const GENERIC_PTR r2p;
3964 {
3965 short r1 = *(short *)r1p, r2 = *(short *)r2p;
3966 return r1 - r2;
3967 }
3968
3969 /* Reload pseudo-registers into hard regs around each insn as needed.
3970 Additional register load insns are output before the insn that needs it
3971 and perhaps store insns after insns that modify the reloaded pseudo reg.
3972
3973 reg_last_reload_reg and reg_reloaded_contents keep track of
3974 which registers are already available in reload registers.
3975 We update these for the reloads that we perform,
3976 as the insns are scanned. */
3977
3978 static void
3979 reload_as_needed (first, live_known)
3980 rtx first;
3981 int live_known;
3982 {
3983 register rtx insn;
3984 register int i;
3985 int this_block = 0;
3986 rtx x;
3987 rtx after_call = 0;
3988
3989 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3990 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3991 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3992 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3993 reg_has_output_reload = (char *) alloca (max_regno);
3994 for (i = 0; i < n_spills; i++)
3995 {
3996 reg_reloaded_contents[i] = -1;
3997 reg_reloaded_insn[i] = 0;
3998 }
3999
4000 /* Reset all offsets on eliminable registers to their initial values. */
4001 #ifdef ELIMINABLE_REGS
4002 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4003 {
4004 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
4005 reg_eliminate[i].initial_offset);
4006 reg_eliminate[i].previous_offset
4007 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
4008 }
4009 #else
4010 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
4011 reg_eliminate[0].previous_offset
4012 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
4013 #endif
4014
4015 num_not_at_initial_offset = 0;
4016
4017 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
4018 pack registers with group needs. */
4019 if (n_spills > 1)
4020 {
4021 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
4022 for (i = 0; i < n_spills; i++)
4023 spill_reg_order[spill_regs[i]] = i;
4024 }
4025
4026 for (insn = first; insn;)
4027 {
4028 register rtx next = NEXT_INSN (insn);
4029
4030 /* Notice when we move to a new basic block. */
4031 if (live_known && this_block + 1 < n_basic_blocks
4032 && insn == basic_block_head[this_block+1])
4033 ++this_block;
4034
4035 /* If we pass a label, copy the offsets from the label information
4036 into the current offsets of each elimination. */
4037 if (GET_CODE (insn) == CODE_LABEL)
4038 {
4039 num_not_at_initial_offset = 0;
4040 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4041 {
4042 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
4043 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
4044 if (reg_eliminate[i].can_eliminate
4045 && (reg_eliminate[i].offset
4046 != reg_eliminate[i].initial_offset))
4047 num_not_at_initial_offset++;
4048 }
4049 }
4050
4051 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4052 {
4053 rtx avoid_return_reg = 0;
4054 rtx oldpat = PATTERN (insn);
4055
4056 /* Set avoid_return_reg if this is an insn
4057 that might use the value of a function call. */
4058 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
4059 {
4060 if (GET_CODE (PATTERN (insn)) == SET)
4061 after_call = SET_DEST (PATTERN (insn));
4062 else if (GET_CODE (PATTERN (insn)) == PARALLEL
4063 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4064 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
4065 else
4066 after_call = 0;
4067 }
4068 else if (SMALL_REGISTER_CLASSES && after_call != 0
4069 && !(GET_CODE (PATTERN (insn)) == SET
4070 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
4071 {
4072 if (reg_referenced_p (after_call, PATTERN (insn)))
4073 avoid_return_reg = after_call;
4074 after_call = 0;
4075 }
4076
4077 /* If this is a USE and CLOBBER of a MEM, ensure that any
4078 references to eliminable registers have been removed. */
4079
4080 if ((GET_CODE (PATTERN (insn)) == USE
4081 || GET_CODE (PATTERN (insn)) == CLOBBER)
4082 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4083 XEXP (XEXP (PATTERN (insn), 0), 0)
4084 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4085 GET_MODE (XEXP (PATTERN (insn), 0)),
4086 NULL_RTX, 0);
4087
4088 /* If we need to do register elimination processing, do so.
4089 This might delete the insn, in which case we are done. */
4090 if (num_eliminable && GET_MODE (insn) == QImode)
4091 {
4092 eliminate_regs_in_insn (insn, 1);
4093 if (GET_CODE (insn) == NOTE)
4094 {
4095 insn = next;
4096 continue;
4097 }
4098 }
4099
4100 if (GET_MODE (insn) == VOIDmode)
4101 n_reloads = 0;
4102 /* First find the pseudo regs that must be reloaded for this insn.
4103 This info is returned in the tables reload_... (see reload.h).
4104 Also modify the body of INSN by substituting RELOAD
4105 rtx's for those pseudo regs. */
4106 else
4107 {
4108 bzero (reg_has_output_reload, max_regno);
4109 CLEAR_HARD_REG_SET (reg_is_output_reload);
4110
4111 find_reloads (insn, 1, spill_indirect_levels, live_known,
4112 spill_reg_order);
4113 }
4114
4115 if (n_reloads > 0)
4116 {
4117 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
4118 rtx p;
4119 int class;
4120
4121 /* If this block has not had spilling done for a
4122 particular clas and we have any non-optionals that need a
4123 spill reg in that class, abort. */
4124
4125 for (class = 0; class < N_REG_CLASSES; class++)
4126 if (basic_block_needs[class] != 0
4127 && basic_block_needs[class][this_block] == 0)
4128 for (i = 0; i < n_reloads; i++)
4129 if (class == (int) reload_reg_class[i]
4130 && reload_reg_rtx[i] == 0
4131 && ! reload_optional[i]
4132 && (reload_in[i] != 0 || reload_out[i] != 0
4133 || reload_secondary_p[i] != 0))
4134 fatal_insn ("Non-optional registers need a spill register", insn);
4135
4136 /* Now compute which reload regs to reload them into. Perhaps
4137 reusing reload regs from previous insns, or else output
4138 load insns to reload them. Maybe output store insns too.
4139 Record the choices of reload reg in reload_reg_rtx. */
4140 choose_reload_regs (insn, avoid_return_reg);
4141
4142 /* Merge any reloads that we didn't combine for fear of
4143 increasing the number of spill registers needed but now
4144 discover can be safely merged. */
4145 if (SMALL_REGISTER_CLASSES)
4146 merge_assigned_reloads (insn);
4147
4148 /* Generate the insns to reload operands into or out of
4149 their reload regs. */
4150 emit_reload_insns (insn);
4151
4152 /* Substitute the chosen reload regs from reload_reg_rtx
4153 into the insn's body (or perhaps into the bodies of other
4154 load and store insn that we just made for reloading
4155 and that we moved the structure into). */
4156 subst_reloads ();
4157
4158 /* If this was an ASM, make sure that all the reload insns
4159 we have generated are valid. If not, give an error
4160 and delete them. */
4161
4162 if (asm_noperands (PATTERN (insn)) >= 0)
4163 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4164 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4165 && (recog_memoized (p) < 0
4166 || (insn_extract (p),
4167 ! constrain_operands (INSN_CODE (p), 1))))
4168 {
4169 error_for_asm (insn,
4170 "`asm' operand requires impossible reload");
4171 PUT_CODE (p, NOTE);
4172 NOTE_SOURCE_FILE (p) = 0;
4173 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4174 }
4175 }
4176 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4177 is no longer validly lying around to save a future reload.
4178 Note that this does not detect pseudos that were reloaded
4179 for this insn in order to be stored in
4180 (obeying register constraints). That is correct; such reload
4181 registers ARE still valid. */
4182 note_stores (oldpat, forget_old_reloads_1);
4183
4184 /* There may have been CLOBBER insns placed after INSN. So scan
4185 between INSN and NEXT and use them to forget old reloads. */
4186 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
4187 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4188 note_stores (PATTERN (x), forget_old_reloads_1);
4189
4190 #ifdef AUTO_INC_DEC
4191 /* Likewise for regs altered by auto-increment in this insn.
4192 But note that the reg-notes are not changed by reloading:
4193 they still contain the pseudo-regs, not the spill regs. */
4194 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4195 if (REG_NOTE_KIND (x) == REG_INC)
4196 {
4197 /* See if this pseudo reg was reloaded in this insn.
4198 If so, its last-reload info is still valid
4199 because it is based on this insn's reload. */
4200 for (i = 0; i < n_reloads; i++)
4201 if (reload_out[i] == XEXP (x, 0))
4202 break;
4203
4204 if (i == n_reloads)
4205 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4206 }
4207 #endif
4208 }
4209 /* A reload reg's contents are unknown after a label. */
4210 if (GET_CODE (insn) == CODE_LABEL)
4211 for (i = 0; i < n_spills; i++)
4212 {
4213 reg_reloaded_contents[i] = -1;
4214 reg_reloaded_insn[i] = 0;
4215 }
4216
4217 /* Don't assume a reload reg is still good after a call insn
4218 if it is a call-used reg. */
4219 else if (GET_CODE (insn) == CALL_INSN)
4220 for (i = 0; i < n_spills; i++)
4221 if (call_used_regs[spill_regs[i]])
4222 {
4223 reg_reloaded_contents[i] = -1;
4224 reg_reloaded_insn[i] = 0;
4225 }
4226
4227 /* In case registers overlap, allow certain insns to invalidate
4228 particular hard registers. */
4229
4230 #ifdef INSN_CLOBBERS_REGNO_P
4231 for (i = 0 ; i < n_spills ; i++)
4232 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
4233 {
4234 reg_reloaded_contents[i] = -1;
4235 reg_reloaded_insn[i] = 0;
4236 }
4237 #endif
4238
4239 insn = next;
4240
4241 #ifdef USE_C_ALLOCA
4242 alloca (0);
4243 #endif
4244 }
4245 }
4246
4247 /* Discard all record of any value reloaded from X,
4248 or reloaded in X from someplace else;
4249 unless X is an output reload reg of the current insn.
4250
4251 X may be a hard reg (the reload reg)
4252 or it may be a pseudo reg that was reloaded from. */
4253
4254 static void
4255 forget_old_reloads_1 (x, ignored)
4256 rtx x;
4257 rtx ignored;
4258 {
4259 register int regno;
4260 int nr;
4261 int offset = 0;
4262
4263 /* note_stores does give us subregs of hard regs. */
4264 while (GET_CODE (x) == SUBREG)
4265 {
4266 offset += SUBREG_WORD (x);
4267 x = SUBREG_REG (x);
4268 }
4269
4270 if (GET_CODE (x) != REG)
4271 return;
4272
4273 regno = REGNO (x) + offset;
4274
4275 if (regno >= FIRST_PSEUDO_REGISTER)
4276 nr = 1;
4277 else
4278 {
4279 int i;
4280 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4281 /* Storing into a spilled-reg invalidates its contents.
4282 This can happen if a block-local pseudo is allocated to that reg
4283 and it wasn't spilled because this block's total need is 0.
4284 Then some insn might have an optional reload and use this reg. */
4285 for (i = 0; i < nr; i++)
4286 if (spill_reg_order[regno + i] >= 0
4287 /* But don't do this if the reg actually serves as an output
4288 reload reg in the current instruction. */
4289 && (n_reloads == 0
4290 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4291 {
4292 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4293 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4294 }
4295 }
4296
4297 /* Since value of X has changed,
4298 forget any value previously copied from it. */
4299
4300 while (nr-- > 0)
4301 /* But don't forget a copy if this is the output reload
4302 that establishes the copy's validity. */
4303 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4304 reg_last_reload_reg[regno + nr] = 0;
4305 }
4306 \f
4307 /* For each reload, the mode of the reload register. */
4308 static enum machine_mode reload_mode[MAX_RELOADS];
4309
4310 /* For each reload, the largest number of registers it will require. */
4311 static int reload_nregs[MAX_RELOADS];
4312
4313 /* Comparison function for qsort to decide which of two reloads
4314 should be handled first. *P1 and *P2 are the reload numbers. */
4315
4316 static int
4317 reload_reg_class_lower (r1p, r2p)
4318 const GENERIC_PTR r1p;
4319 const GENERIC_PTR r2p;
4320 {
4321 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
4322 register int t;
4323
4324 /* Consider required reloads before optional ones. */
4325 t = reload_optional[r1] - reload_optional[r2];
4326 if (t != 0)
4327 return t;
4328
4329 /* Count all solitary classes before non-solitary ones. */
4330 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4331 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4332 if (t != 0)
4333 return t;
4334
4335 /* Aside from solitaires, consider all multi-reg groups first. */
4336 t = reload_nregs[r2] - reload_nregs[r1];
4337 if (t != 0)
4338 return t;
4339
4340 /* Consider reloads in order of increasing reg-class number. */
4341 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4342 if (t != 0)
4343 return t;
4344
4345 /* If reloads are equally urgent, sort by reload number,
4346 so that the results of qsort leave nothing to chance. */
4347 return r1 - r2;
4348 }
4349 \f
4350 /* The following HARD_REG_SETs indicate when each hard register is
4351 used for a reload of various parts of the current insn. */
4352
4353 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4354 static HARD_REG_SET reload_reg_used;
4355 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4356 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4357 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4358 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4359 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4360 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4361 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4362 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4363 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4364 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4365 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4366 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4367 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4368 static HARD_REG_SET reload_reg_used_in_op_addr;
4369 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4370 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4371 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4372 static HARD_REG_SET reload_reg_used_in_insn;
4373 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4374 static HARD_REG_SET reload_reg_used_in_other_addr;
4375
4376 /* If reg is in use as a reload reg for any sort of reload. */
4377 static HARD_REG_SET reload_reg_used_at_all;
4378
4379 /* If reg is use as an inherited reload. We just mark the first register
4380 in the group. */
4381 static HARD_REG_SET reload_reg_used_for_inherit;
4382
4383 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4384 TYPE. MODE is used to indicate how many consecutive regs are
4385 actually used. */
4386
4387 static void
4388 mark_reload_reg_in_use (regno, opnum, type, mode)
4389 int regno;
4390 int opnum;
4391 enum reload_type type;
4392 enum machine_mode mode;
4393 {
4394 int nregs = HARD_REGNO_NREGS (regno, mode);
4395 int i;
4396
4397 for (i = regno; i < nregs + regno; i++)
4398 {
4399 switch (type)
4400 {
4401 case RELOAD_OTHER:
4402 SET_HARD_REG_BIT (reload_reg_used, i);
4403 break;
4404
4405 case RELOAD_FOR_INPUT_ADDRESS:
4406 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4407 break;
4408
4409 case RELOAD_FOR_INPADDR_ADDRESS:
4410 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4411 break;
4412
4413 case RELOAD_FOR_OUTPUT_ADDRESS:
4414 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4415 break;
4416
4417 case RELOAD_FOR_OUTADDR_ADDRESS:
4418 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4419 break;
4420
4421 case RELOAD_FOR_OPERAND_ADDRESS:
4422 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4423 break;
4424
4425 case RELOAD_FOR_OPADDR_ADDR:
4426 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4427 break;
4428
4429 case RELOAD_FOR_OTHER_ADDRESS:
4430 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4431 break;
4432
4433 case RELOAD_FOR_INPUT:
4434 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4435 break;
4436
4437 case RELOAD_FOR_OUTPUT:
4438 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4439 break;
4440
4441 case RELOAD_FOR_INSN:
4442 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4443 break;
4444 }
4445
4446 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4447 }
4448 }
4449
4450 /* Similarly, but show REGNO is no longer in use for a reload. */
4451
4452 static void
4453 clear_reload_reg_in_use (regno, opnum, type, mode)
4454 int regno;
4455 int opnum;
4456 enum reload_type type;
4457 enum machine_mode mode;
4458 {
4459 int nregs = HARD_REGNO_NREGS (regno, mode);
4460 int i;
4461
4462 for (i = regno; i < nregs + regno; i++)
4463 {
4464 switch (type)
4465 {
4466 case RELOAD_OTHER:
4467 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4468 break;
4469
4470 case RELOAD_FOR_INPUT_ADDRESS:
4471 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4472 break;
4473
4474 case RELOAD_FOR_INPADDR_ADDRESS:
4475 CLEAR_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4476 break;
4477
4478 case RELOAD_FOR_OUTPUT_ADDRESS:
4479 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4480 break;
4481
4482 case RELOAD_FOR_OUTADDR_ADDRESS:
4483 CLEAR_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4484 break;
4485
4486 case RELOAD_FOR_OPERAND_ADDRESS:
4487 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4488 break;
4489
4490 case RELOAD_FOR_OPADDR_ADDR:
4491 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4492 break;
4493
4494 case RELOAD_FOR_OTHER_ADDRESS:
4495 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4496 break;
4497
4498 case RELOAD_FOR_INPUT:
4499 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4500 break;
4501
4502 case RELOAD_FOR_OUTPUT:
4503 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4504 break;
4505
4506 case RELOAD_FOR_INSN:
4507 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4508 break;
4509 }
4510 }
4511 }
4512
4513 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4514 specified by OPNUM and TYPE. */
4515
4516 static int
4517 reload_reg_free_p (regno, opnum, type)
4518 int regno;
4519 int opnum;
4520 enum reload_type type;
4521 {
4522 int i;
4523
4524 /* In use for a RELOAD_OTHER means it's not available for anything. */
4525 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4526 return 0;
4527
4528 switch (type)
4529 {
4530 case RELOAD_OTHER:
4531 /* In use for anything means we can't use it for RELOAD_OTHER. */
4532 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4533 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4534 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4535 return 0;
4536
4537 for (i = 0; i < reload_n_operands; i++)
4538 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4539 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4540 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4541 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4542 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4543 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4544 return 0;
4545
4546 return 1;
4547
4548 case RELOAD_FOR_INPUT:
4549 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4550 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4551 return 0;
4552
4553 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4554 return 0;
4555
4556 /* If it is used for some other input, can't use it. */
4557 for (i = 0; i < reload_n_operands; i++)
4558 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4559 return 0;
4560
4561 /* If it is used in a later operand's address, can't use it. */
4562 for (i = opnum + 1; i < reload_n_operands; i++)
4563 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4564 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4565 return 0;
4566
4567 return 1;
4568
4569 case RELOAD_FOR_INPUT_ADDRESS:
4570 /* Can't use a register if it is used for an input address for this
4571 operand or used as an input in an earlier one. */
4572 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4573 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4574 return 0;
4575
4576 for (i = 0; i < opnum; i++)
4577 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4578 return 0;
4579
4580 return 1;
4581
4582 case RELOAD_FOR_INPADDR_ADDRESS:
4583 /* Can't use a register if it is used for an input address
4584 address for this operand or used as an input in an earlier
4585 one. */
4586 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4587 return 0;
4588
4589 for (i = 0; i < opnum; i++)
4590 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4591 return 0;
4592
4593 return 1;
4594
4595 case RELOAD_FOR_OUTPUT_ADDRESS:
4596 /* Can't use a register if it is used for an output address for this
4597 operand or used as an output in this or a later operand. */
4598 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4599 return 0;
4600
4601 for (i = opnum; i < reload_n_operands; i++)
4602 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4603 return 0;
4604
4605 return 1;
4606
4607 case RELOAD_FOR_OUTADDR_ADDRESS:
4608 /* Can't use a register if it is used for an output address
4609 address for this operand or used as an output in this or a
4610 later operand. */
4611 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4612 return 0;
4613
4614 for (i = opnum; i < reload_n_operands; i++)
4615 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4616 return 0;
4617
4618 return 1;
4619
4620 case RELOAD_FOR_OPERAND_ADDRESS:
4621 for (i = 0; i < reload_n_operands; i++)
4622 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4623 return 0;
4624
4625 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4626 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4627
4628 case RELOAD_FOR_OPADDR_ADDR:
4629 for (i = 0; i < reload_n_operands; i++)
4630 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4631 return 0;
4632
4633 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4634
4635 case RELOAD_FOR_OUTPUT:
4636 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4637 outputs, or an operand address for this or an earlier output. */
4638 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4639 return 0;
4640
4641 for (i = 0; i < reload_n_operands; i++)
4642 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4643 return 0;
4644
4645 for (i = 0; i <= opnum; i++)
4646 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4647 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4648 return 0;
4649
4650 return 1;
4651
4652 case RELOAD_FOR_INSN:
4653 for (i = 0; i < reload_n_operands; i++)
4654 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4655 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4656 return 0;
4657
4658 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4659 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4660
4661 case RELOAD_FOR_OTHER_ADDRESS:
4662 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4663 }
4664 abort ();
4665 }
4666
4667 /* Return 1 if the value in reload reg REGNO, as used by a reload
4668 needed for the part of the insn specified by OPNUM and TYPE,
4669 is not in use for a reload in any prior part of the insn.
4670
4671 We can assume that the reload reg was already tested for availability
4672 at the time it is needed, and we should not check this again,
4673 in case the reg has already been marked in use. */
4674
4675 static int
4676 reload_reg_free_before_p (regno, opnum, type)
4677 int regno;
4678 int opnum;
4679 enum reload_type type;
4680 {
4681 int i;
4682
4683 switch (type)
4684 {
4685 case RELOAD_FOR_OTHER_ADDRESS:
4686 /* These always come first. */
4687 return 1;
4688
4689 case RELOAD_OTHER:
4690 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4691
4692 /* If this use is for part of the insn,
4693 check the reg is not in use for any prior part. It is tempting
4694 to try to do this by falling through from objecs that occur
4695 later in the insn to ones that occur earlier, but that will not
4696 correctly take into account the fact that here we MUST ignore
4697 things that would prevent the register from being allocated in
4698 the first place, since we know that it was allocated. */
4699
4700 case RELOAD_FOR_OUTPUT_ADDRESS:
4701 case RELOAD_FOR_OUTADDR_ADDRESS:
4702 /* Earlier reloads are for earlier outputs or their addresses,
4703 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4704 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4705 RELOAD_OTHER).. */
4706 for (i = 0; i < opnum; i++)
4707 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4708 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4709 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4710 return 0;
4711
4712 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4713 return 0;
4714
4715 for (i = 0; i < reload_n_operands; i++)
4716 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4717 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4718 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4719 return 0;
4720
4721 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4722 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4723 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4724
4725 case RELOAD_FOR_OUTPUT:
4726 /* This can't be used in the output address for this operand and
4727 anything that can't be used for it, except that we've already
4728 tested for RELOAD_FOR_INSN objects. */
4729
4730 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno)
4731 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4732 return 0;
4733
4734 for (i = 0; i < opnum; i++)
4735 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4736 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4737 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4738 return 0;
4739
4740 for (i = 0; i < reload_n_operands; i++)
4741 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4742 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4743 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4744 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4745 return 0;
4746
4747 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4748
4749 case RELOAD_FOR_OPERAND_ADDRESS:
4750 /* Earlier reloads include RELOAD_FOR_OPADDR_ADDR reloads. */
4751 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4752 return 0;
4753
4754 /* ... fall through ... */
4755
4756 case RELOAD_FOR_OPADDR_ADDR:
4757 case RELOAD_FOR_INSN:
4758 /* These can't conflict with inputs, or each other, so all we have to
4759 test is input addresses and the addresses of OTHER items. */
4760
4761 for (i = 0; i < reload_n_operands; i++)
4762 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4763 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4764 return 0;
4765
4766 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4767
4768 case RELOAD_FOR_INPUT:
4769 /* The only things earlier are the address for this and
4770 earlier inputs, other inputs (which we know we don't conflict
4771 with), and addresses of RELOAD_OTHER objects. */
4772
4773 for (i = 0; i <= opnum; i++)
4774 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4775 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4776 return 0;
4777
4778 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4779
4780 case RELOAD_FOR_INPUT_ADDRESS:
4781 case RELOAD_FOR_INPADDR_ADDRESS:
4782 /* Similarly, all we have to check is for use in earlier inputs'
4783 addresses. */
4784 for (i = 0; i < opnum; i++)
4785 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4786 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4787 return 0;
4788
4789 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4790 }
4791 abort ();
4792 }
4793
4794 /* Return 1 if the value in reload reg REGNO, as used by a reload
4795 needed for the part of the insn specified by OPNUM and TYPE,
4796 is still available in REGNO at the end of the insn.
4797
4798 We can assume that the reload reg was already tested for availability
4799 at the time it is needed, and we should not check this again,
4800 in case the reg has already been marked in use. */
4801
4802 static int
4803 reload_reg_reaches_end_p (regno, opnum, type)
4804 int regno;
4805 int opnum;
4806 enum reload_type type;
4807 {
4808 int i;
4809
4810 switch (type)
4811 {
4812 case RELOAD_OTHER:
4813 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4814 its value must reach the end. */
4815 return 1;
4816
4817 /* If this use is for part of the insn,
4818 its value reaches if no subsequent part uses the same register.
4819 Just like the above function, don't try to do this with lots
4820 of fallthroughs. */
4821
4822 case RELOAD_FOR_OTHER_ADDRESS:
4823 /* Here we check for everything else, since these don't conflict
4824 with anything else and everything comes later. */
4825
4826 for (i = 0; i < reload_n_operands; i++)
4827 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4828 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4829 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4830 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4831 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4832 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4833 return 0;
4834
4835 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4836 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4837 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4838
4839 case RELOAD_FOR_INPUT_ADDRESS:
4840 case RELOAD_FOR_INPADDR_ADDRESS:
4841 /* Similar, except that we check only for this and subsequent inputs
4842 and the address of only subsequent inputs and we do not need
4843 to check for RELOAD_OTHER objects since they are known not to
4844 conflict. */
4845
4846 for (i = opnum; i < reload_n_operands; i++)
4847 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4848 return 0;
4849
4850 for (i = opnum + 1; i < reload_n_operands; i++)
4851 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4852 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4853 return 0;
4854
4855 for (i = 0; i < reload_n_operands; i++)
4856 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4857 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4858 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4859 return 0;
4860
4861 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4862 return 0;
4863
4864 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4865 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4866
4867 case RELOAD_FOR_INPUT:
4868 /* Similar to input address, except we start at the next operand for
4869 both input and input address and we do not check for
4870 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4871 would conflict. */
4872
4873 for (i = opnum + 1; i < reload_n_operands; i++)
4874 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4875 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4876 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4877 return 0;
4878
4879 /* ... fall through ... */
4880
4881 case RELOAD_FOR_OPERAND_ADDRESS:
4882 /* Check outputs and their addresses. */
4883
4884 for (i = 0; i < reload_n_operands; i++)
4885 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4886 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4887 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4888 return 0;
4889
4890 return 1;
4891
4892 case RELOAD_FOR_OPADDR_ADDR:
4893 for (i = 0; i < reload_n_operands; i++)
4894 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4895 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4896 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4897 return 0;
4898
4899 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4900 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4901
4902 case RELOAD_FOR_INSN:
4903 /* These conflict with other outputs with RELOAD_OTHER. So
4904 we need only check for output addresses. */
4905
4906 opnum = -1;
4907
4908 /* ... fall through ... */
4909
4910 case RELOAD_FOR_OUTPUT:
4911 case RELOAD_FOR_OUTPUT_ADDRESS:
4912 case RELOAD_FOR_OUTADDR_ADDRESS:
4913 /* We already know these can't conflict with a later output. So the
4914 only thing to check are later output addresses. */
4915 for (i = opnum + 1; i < reload_n_operands; i++)
4916 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4917 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4918 return 0;
4919
4920 return 1;
4921 }
4922
4923 abort ();
4924 }
4925 \f
4926 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4927 Return 0 otherwise.
4928
4929 This function uses the same algorithm as reload_reg_free_p above. */
4930
4931 static int
4932 reloads_conflict (r1, r2)
4933 int r1, r2;
4934 {
4935 enum reload_type r1_type = reload_when_needed[r1];
4936 enum reload_type r2_type = reload_when_needed[r2];
4937 int r1_opnum = reload_opnum[r1];
4938 int r2_opnum = reload_opnum[r2];
4939
4940 /* RELOAD_OTHER conflicts with everything. */
4941 if (r2_type == RELOAD_OTHER)
4942 return 1;
4943
4944 /* Otherwise, check conflicts differently for each type. */
4945
4946 switch (r1_type)
4947 {
4948 case RELOAD_FOR_INPUT:
4949 return (r2_type == RELOAD_FOR_INSN
4950 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4951 || r2_type == RELOAD_FOR_OPADDR_ADDR
4952 || r2_type == RELOAD_FOR_INPUT
4953 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4954 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4955 && r2_opnum > r1_opnum));
4956
4957 case RELOAD_FOR_INPUT_ADDRESS:
4958 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4959 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4960
4961 case RELOAD_FOR_INPADDR_ADDRESS:
4962 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4963 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4964
4965 case RELOAD_FOR_OUTPUT_ADDRESS:
4966 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4967 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4968
4969 case RELOAD_FOR_OUTADDR_ADDRESS:
4970 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4971 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4972
4973 case RELOAD_FOR_OPERAND_ADDRESS:
4974 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4975 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4976
4977 case RELOAD_FOR_OPADDR_ADDR:
4978 return (r2_type == RELOAD_FOR_INPUT
4979 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4980
4981 case RELOAD_FOR_OUTPUT:
4982 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4983 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4984 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4985 && r2_opnum >= r1_opnum));
4986
4987 case RELOAD_FOR_INSN:
4988 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4989 || r2_type == RELOAD_FOR_INSN
4990 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4991
4992 case RELOAD_FOR_OTHER_ADDRESS:
4993 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4994
4995 case RELOAD_OTHER:
4996 return 1;
4997
4998 default:
4999 abort ();
5000 }
5001 }
5002 \f
5003 /* Vector of reload-numbers showing the order in which the reloads should
5004 be processed. */
5005 short reload_order[MAX_RELOADS];
5006
5007 /* Indexed by reload number, 1 if incoming value
5008 inherited from previous insns. */
5009 char reload_inherited[MAX_RELOADS];
5010
5011 /* For an inherited reload, this is the insn the reload was inherited from,
5012 if we know it. Otherwise, this is 0. */
5013 rtx reload_inheritance_insn[MAX_RELOADS];
5014
5015 /* If non-zero, this is a place to get the value of the reload,
5016 rather than using reload_in. */
5017 rtx reload_override_in[MAX_RELOADS];
5018
5019 /* For each reload, the index in spill_regs of the spill register used,
5020 or -1 if we did not need one of the spill registers for this reload. */
5021 int reload_spill_index[MAX_RELOADS];
5022
5023 /* Find a spill register to use as a reload register for reload R.
5024 LAST_RELOAD is non-zero if this is the last reload for the insn being
5025 processed.
5026
5027 Set reload_reg_rtx[R] to the register allocated.
5028
5029 If NOERROR is nonzero, we return 1 if successful,
5030 or 0 if we couldn't find a spill reg and we didn't change anything. */
5031
5032 static int
5033 allocate_reload_reg (r, insn, last_reload, noerror)
5034 int r;
5035 rtx insn;
5036 int last_reload;
5037 int noerror;
5038 {
5039 int i;
5040 int pass;
5041 int count;
5042 rtx new;
5043 int regno;
5044
5045 /* If we put this reload ahead, thinking it is a group,
5046 then insist on finding a group. Otherwise we can grab a
5047 reg that some other reload needs.
5048 (That can happen when we have a 68000 DATA_OR_FP_REG
5049 which is a group of data regs or one fp reg.)
5050 We need not be so restrictive if there are no more reloads
5051 for this insn.
5052
5053 ??? Really it would be nicer to have smarter handling
5054 for that kind of reg class, where a problem like this is normal.
5055 Perhaps those classes should be avoided for reloading
5056 by use of more alternatives. */
5057
5058 int force_group = reload_nregs[r] > 1 && ! last_reload;
5059
5060 /* If we want a single register and haven't yet found one,
5061 take any reg in the right class and not in use.
5062 If we want a consecutive group, here is where we look for it.
5063
5064 We use two passes so we can first look for reload regs to
5065 reuse, which are already in use for other reloads in this insn,
5066 and only then use additional registers.
5067 I think that maximizing reuse is needed to make sure we don't
5068 run out of reload regs. Suppose we have three reloads, and
5069 reloads A and B can share regs. These need two regs.
5070 Suppose A and B are given different regs.
5071 That leaves none for C. */
5072 for (pass = 0; pass < 2; pass++)
5073 {
5074 /* I is the index in spill_regs.
5075 We advance it round-robin between insns to use all spill regs
5076 equally, so that inherited reloads have a chance
5077 of leapfrogging each other. Don't do this, however, when we have
5078 group needs and failure would be fatal; if we only have a relatively
5079 small number of spill registers, and more than one of them has
5080 group needs, then by starting in the middle, we may end up
5081 allocating the first one in such a way that we are not left with
5082 sufficient groups to handle the rest. */
5083
5084 if (noerror || ! force_group)
5085 i = last_spill_reg;
5086 else
5087 i = -1;
5088
5089 for (count = 0; count < n_spills; count++)
5090 {
5091 int class = (int) reload_reg_class[r];
5092
5093 i = (i + 1) % n_spills;
5094
5095 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
5096 reload_when_needed[r])
5097 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
5098 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5099 /* Look first for regs to share, then for unshared. But
5100 don't share regs used for inherited reloads; they are
5101 the ones we want to preserve. */
5102 && (pass
5103 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5104 spill_regs[i])
5105 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5106 spill_regs[i]))))
5107 {
5108 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5109 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5110 (on 68000) got us two FP regs. If NR is 1,
5111 we would reject both of them. */
5112 if (force_group)
5113 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5114 /* If we need only one reg, we have already won. */
5115 if (nr == 1)
5116 {
5117 /* But reject a single reg if we demand a group. */
5118 if (force_group)
5119 continue;
5120 break;
5121 }
5122 /* Otherwise check that as many consecutive regs as we need
5123 are available here.
5124 Also, don't use for a group registers that are
5125 needed for nongroups. */
5126 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
5127 while (nr > 1)
5128 {
5129 regno = spill_regs[i] + nr - 1;
5130 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5131 && spill_reg_order[regno] >= 0
5132 && reload_reg_free_p (regno, reload_opnum[r],
5133 reload_when_needed[r])
5134 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
5135 regno)))
5136 break;
5137 nr--;
5138 }
5139 if (nr == 1)
5140 break;
5141 }
5142 }
5143
5144 /* If we found something on pass 1, omit pass 2. */
5145 if (count < n_spills)
5146 break;
5147 }
5148
5149 /* We should have found a spill register by now. */
5150 if (count == n_spills)
5151 {
5152 if (noerror)
5153 return 0;
5154 goto failure;
5155 }
5156
5157 /* I is the index in SPILL_REG_RTX of the reload register we are to
5158 allocate. Get an rtx for it and find its register number. */
5159
5160 new = spill_reg_rtx[i];
5161
5162 if (new == 0 || GET_MODE (new) != reload_mode[r])
5163 spill_reg_rtx[i] = new
5164 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
5165
5166 regno = true_regnum (new);
5167
5168 /* Detect when the reload reg can't hold the reload mode.
5169 This used to be one `if', but Sequent compiler can't handle that. */
5170 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5171 {
5172 enum machine_mode test_mode = VOIDmode;
5173 if (reload_in[r])
5174 test_mode = GET_MODE (reload_in[r]);
5175 /* If reload_in[r] has VOIDmode, it means we will load it
5176 in whatever mode the reload reg has: to wit, reload_mode[r].
5177 We have already tested that for validity. */
5178 /* Aside from that, we need to test that the expressions
5179 to reload from or into have modes which are valid for this
5180 reload register. Otherwise the reload insns would be invalid. */
5181 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5182 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5183 if (! (reload_out[r] != 0
5184 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
5185 {
5186 /* The reg is OK. */
5187 last_spill_reg = i;
5188
5189 /* Mark as in use for this insn the reload regs we use
5190 for this. */
5191 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5192 reload_when_needed[r], reload_mode[r]);
5193
5194 reload_reg_rtx[r] = new;
5195 reload_spill_index[r] = i;
5196 return 1;
5197 }
5198 }
5199
5200 /* The reg is not OK. */
5201 if (noerror)
5202 return 0;
5203
5204 failure:
5205 if (asm_noperands (PATTERN (insn)) < 0)
5206 /* It's the compiler's fault. */
5207 fatal_insn ("Could not find a spill register", insn);
5208
5209 /* It's the user's fault; the operand's mode and constraint
5210 don't match. Disable this reload so we don't crash in final. */
5211 error_for_asm (insn,
5212 "`asm' operand constraint incompatible with operand size");
5213 reload_in[r] = 0;
5214 reload_out[r] = 0;
5215 reload_reg_rtx[r] = 0;
5216 reload_optional[r] = 1;
5217 reload_secondary_p[r] = 1;
5218
5219 return 1;
5220 }
5221 \f
5222 /* Assign hard reg targets for the pseudo-registers we must reload
5223 into hard regs for this insn.
5224 Also output the instructions to copy them in and out of the hard regs.
5225
5226 For machines with register classes, we are responsible for
5227 finding a reload reg in the proper class. */
5228
5229 static void
5230 choose_reload_regs (insn, avoid_return_reg)
5231 rtx insn;
5232 rtx avoid_return_reg;
5233 {
5234 register int i, j;
5235 int max_group_size = 1;
5236 enum reg_class group_class = NO_REGS;
5237 int inheritance;
5238
5239 rtx save_reload_reg_rtx[MAX_RELOADS];
5240 char save_reload_inherited[MAX_RELOADS];
5241 rtx save_reload_inheritance_insn[MAX_RELOADS];
5242 rtx save_reload_override_in[MAX_RELOADS];
5243 int save_reload_spill_index[MAX_RELOADS];
5244 HARD_REG_SET save_reload_reg_used;
5245 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5246 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5247 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5248 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5249 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5250 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5251 HARD_REG_SET save_reload_reg_used_in_op_addr;
5252 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
5253 HARD_REG_SET save_reload_reg_used_in_insn;
5254 HARD_REG_SET save_reload_reg_used_in_other_addr;
5255 HARD_REG_SET save_reload_reg_used_at_all;
5256
5257 bzero (reload_inherited, MAX_RELOADS);
5258 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5259 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
5260
5261 CLEAR_HARD_REG_SET (reload_reg_used);
5262 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5263 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5264 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5265 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5266 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5267
5268 for (i = 0; i < reload_n_operands; i++)
5269 {
5270 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5271 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5272 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5273 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5274 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5275 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5276 }
5277
5278 /* Don't bother with avoiding the return reg
5279 if we have no mandatory reload that could use it. */
5280 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5281 {
5282 int do_avoid = 0;
5283 int regno = REGNO (avoid_return_reg);
5284 int nregs
5285 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5286 int r;
5287
5288 for (r = regno; r < regno + nregs; r++)
5289 if (spill_reg_order[r] >= 0)
5290 for (j = 0; j < n_reloads; j++)
5291 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5292 && (reload_in[j] != 0 || reload_out[j] != 0
5293 || reload_secondary_p[j])
5294 &&
5295 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5296 do_avoid = 1;
5297 if (!do_avoid)
5298 avoid_return_reg = 0;
5299 }
5300
5301 #if 0 /* Not needed, now that we can always retry without inheritance. */
5302 /* See if we have more mandatory reloads than spill regs.
5303 If so, then we cannot risk optimizations that could prevent
5304 reloads from sharing one spill register.
5305
5306 Since we will try finding a better register than reload_reg_rtx
5307 unless it is equal to reload_in or reload_out, count such reloads. */
5308
5309 {
5310 int tem = SMALL_REGISTER_CLASSES? (avoid_return_reg != 0): 0;
5311 for (j = 0; j < n_reloads; j++)
5312 if (! reload_optional[j]
5313 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5314 && (reload_reg_rtx[j] == 0
5315 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5316 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5317 tem++;
5318 if (tem > n_spills)
5319 must_reuse = 1;
5320 }
5321 #endif
5322
5323 /* Don't use the subroutine call return reg for a reload
5324 if we are supposed to avoid it. */
5325 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5326 {
5327 int regno = REGNO (avoid_return_reg);
5328 int nregs
5329 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5330 int r;
5331
5332 for (r = regno; r < regno + nregs; r++)
5333 if (spill_reg_order[r] >= 0)
5334 SET_HARD_REG_BIT (reload_reg_used, r);
5335 }
5336
5337 /* In order to be certain of getting the registers we need,
5338 we must sort the reloads into order of increasing register class.
5339 Then our grabbing of reload registers will parallel the process
5340 that provided the reload registers.
5341
5342 Also note whether any of the reloads wants a consecutive group of regs.
5343 If so, record the maximum size of the group desired and what
5344 register class contains all the groups needed by this insn. */
5345
5346 for (j = 0; j < n_reloads; j++)
5347 {
5348 reload_order[j] = j;
5349 reload_spill_index[j] = -1;
5350
5351 reload_mode[j]
5352 = (reload_inmode[j] == VOIDmode
5353 || (GET_MODE_SIZE (reload_outmode[j])
5354 > GET_MODE_SIZE (reload_inmode[j])))
5355 ? reload_outmode[j] : reload_inmode[j];
5356
5357 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5358
5359 if (reload_nregs[j] > 1)
5360 {
5361 max_group_size = MAX (reload_nregs[j], max_group_size);
5362 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5363 }
5364
5365 /* If we have already decided to use a certain register,
5366 don't use it in another way. */
5367 if (reload_reg_rtx[j])
5368 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5369 reload_when_needed[j], reload_mode[j]);
5370 }
5371
5372 if (n_reloads > 1)
5373 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5374
5375 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5376 sizeof reload_reg_rtx);
5377 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5378 bcopy ((char *) reload_inheritance_insn,
5379 (char *) save_reload_inheritance_insn,
5380 sizeof reload_inheritance_insn);
5381 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5382 sizeof reload_override_in);
5383 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5384 sizeof reload_spill_index);
5385 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5386 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5387 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5388 reload_reg_used_in_op_addr);
5389
5390 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5391 reload_reg_used_in_op_addr_reload);
5392
5393 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5394 reload_reg_used_in_insn);
5395 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5396 reload_reg_used_in_other_addr);
5397
5398 for (i = 0; i < reload_n_operands; i++)
5399 {
5400 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5401 reload_reg_used_in_output[i]);
5402 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5403 reload_reg_used_in_input[i]);
5404 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5405 reload_reg_used_in_input_addr[i]);
5406 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5407 reload_reg_used_in_inpaddr_addr[i]);
5408 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5409 reload_reg_used_in_output_addr[i]);
5410 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5411 reload_reg_used_in_outaddr_addr[i]);
5412 }
5413
5414 /* If -O, try first with inheritance, then turning it off.
5415 If not -O, don't do inheritance.
5416 Using inheritance when not optimizing leads to paradoxes
5417 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5418 because one side of the comparison might be inherited. */
5419
5420 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5421 {
5422 /* Process the reloads in order of preference just found.
5423 Beyond this point, subregs can be found in reload_reg_rtx.
5424
5425 This used to look for an existing reloaded home for all
5426 of the reloads, and only then perform any new reloads.
5427 But that could lose if the reloads were done out of reg-class order
5428 because a later reload with a looser constraint might have an old
5429 home in a register needed by an earlier reload with a tighter constraint.
5430
5431 To solve this, we make two passes over the reloads, in the order
5432 described above. In the first pass we try to inherit a reload
5433 from a previous insn. If there is a later reload that needs a
5434 class that is a proper subset of the class being processed, we must
5435 also allocate a spill register during the first pass.
5436
5437 Then make a second pass over the reloads to allocate any reloads
5438 that haven't been given registers yet. */
5439
5440 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5441
5442 for (j = 0; j < n_reloads; j++)
5443 {
5444 register int r = reload_order[j];
5445
5446 /* Ignore reloads that got marked inoperative. */
5447 if (reload_out[r] == 0 && reload_in[r] == 0
5448 && ! reload_secondary_p[r])
5449 continue;
5450
5451 /* If find_reloads chose a to use reload_in or reload_out as a reload
5452 register, we don't need to chose one. Otherwise, try even if it
5453 found one since we might save an insn if we find the value lying
5454 around. */
5455 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5456 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5457 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5458 continue;
5459
5460 #if 0 /* No longer needed for correct operation.
5461 It might give better code, or might not; worth an experiment? */
5462 /* If this is an optional reload, we can't inherit from earlier insns
5463 until we are sure that any non-optional reloads have been allocated.
5464 The following code takes advantage of the fact that optional reloads
5465 are at the end of reload_order. */
5466 if (reload_optional[r] != 0)
5467 for (i = 0; i < j; i++)
5468 if ((reload_out[reload_order[i]] != 0
5469 || reload_in[reload_order[i]] != 0
5470 || reload_secondary_p[reload_order[i]])
5471 && ! reload_optional[reload_order[i]]
5472 && reload_reg_rtx[reload_order[i]] == 0)
5473 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5474 #endif
5475
5476 /* First see if this pseudo is already available as reloaded
5477 for a previous insn. We cannot try to inherit for reloads
5478 that are smaller than the maximum number of registers needed
5479 for groups unless the register we would allocate cannot be used
5480 for the groups.
5481
5482 We could check here to see if this is a secondary reload for
5483 an object that is already in a register of the desired class.
5484 This would avoid the need for the secondary reload register.
5485 But this is complex because we can't easily determine what
5486 objects might want to be loaded via this reload. So let a
5487 register be allocated here. In `emit_reload_insns' we suppress
5488 one of the loads in the case described above. */
5489
5490 if (inheritance)
5491 {
5492 register int regno = -1;
5493 enum machine_mode mode;
5494
5495 if (reload_in[r] == 0)
5496 ;
5497 else if (GET_CODE (reload_in[r]) == REG)
5498 {
5499 regno = REGNO (reload_in[r]);
5500 mode = GET_MODE (reload_in[r]);
5501 }
5502 else if (GET_CODE (reload_in_reg[r]) == REG)
5503 {
5504 regno = REGNO (reload_in_reg[r]);
5505 mode = GET_MODE (reload_in_reg[r]);
5506 }
5507 #if 0
5508 /* This won't work, since REGNO can be a pseudo reg number.
5509 Also, it takes much more hair to keep track of all the things
5510 that can invalidate an inherited reload of part of a pseudoreg. */
5511 else if (GET_CODE (reload_in[r]) == SUBREG
5512 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5513 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5514 #endif
5515
5516 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5517 {
5518 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5519
5520 if (reg_reloaded_contents[i] == regno
5521 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5522 >= GET_MODE_SIZE (mode))
5523 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5524 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5525 spill_regs[i])
5526 && (reload_nregs[r] == max_group_size
5527 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5528 spill_regs[i]))
5529 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5530 reload_when_needed[r])
5531 && reload_reg_free_before_p (spill_regs[i],
5532 reload_opnum[r],
5533 reload_when_needed[r]))
5534 {
5535 /* If a group is needed, verify that all the subsequent
5536 registers still have their values intact. */
5537 int nr
5538 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5539 int k;
5540
5541 for (k = 1; k < nr; k++)
5542 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5543 != regno)
5544 break;
5545
5546 if (k == nr)
5547 {
5548 int i1;
5549
5550 /* We found a register that contains the
5551 value we need. If this register is the
5552 same as an `earlyclobber' operand of the
5553 current insn, just mark it as a place to
5554 reload from since we can't use it as the
5555 reload register itself. */
5556
5557 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5558 if (reg_overlap_mentioned_for_reload_p
5559 (reg_last_reload_reg[regno],
5560 reload_earlyclobbers[i1]))
5561 break;
5562
5563 if (i1 != n_earlyclobbers
5564 /* Don't really use the inherited spill reg
5565 if we need it wider than we've got it. */
5566 || (GET_MODE_SIZE (reload_mode[r])
5567 > GET_MODE_SIZE (mode)))
5568 reload_override_in[r] = reg_last_reload_reg[regno];
5569 else
5570 {
5571 int k;
5572 /* We can use this as a reload reg. */
5573 /* Mark the register as in use for this part of
5574 the insn. */
5575 mark_reload_reg_in_use (spill_regs[i],
5576 reload_opnum[r],
5577 reload_when_needed[r],
5578 reload_mode[r]);
5579 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5580 reload_inherited[r] = 1;
5581 reload_inheritance_insn[r]
5582 = reg_reloaded_insn[i];
5583 reload_spill_index[r] = i;
5584 for (k = 0; k < nr; k++)
5585 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5586 spill_regs[i + k]);
5587 }
5588 }
5589 }
5590 }
5591 }
5592
5593 /* Here's another way to see if the value is already lying around. */
5594 if (inheritance
5595 && reload_in[r] != 0
5596 && ! reload_inherited[r]
5597 && reload_out[r] == 0
5598 && (CONSTANT_P (reload_in[r])
5599 || GET_CODE (reload_in[r]) == PLUS
5600 || GET_CODE (reload_in[r]) == REG
5601 || GET_CODE (reload_in[r]) == MEM)
5602 && (reload_nregs[r] == max_group_size
5603 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5604 {
5605 register rtx equiv
5606 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5607 -1, NULL_PTR, 0, reload_mode[r]);
5608 int regno;
5609
5610 if (equiv != 0)
5611 {
5612 if (GET_CODE (equiv) == REG)
5613 regno = REGNO (equiv);
5614 else if (GET_CODE (equiv) == SUBREG)
5615 {
5616 /* This must be a SUBREG of a hard register.
5617 Make a new REG since this might be used in an
5618 address and not all machines support SUBREGs
5619 there. */
5620 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5621 equiv = gen_rtx (REG, reload_mode[r], regno);
5622 }
5623 else
5624 abort ();
5625 }
5626
5627 /* If we found a spill reg, reject it unless it is free
5628 and of the desired class. */
5629 if (equiv != 0
5630 && ((spill_reg_order[regno] >= 0
5631 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5632 reload_when_needed[r]))
5633 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5634 regno)))
5635 equiv = 0;
5636
5637 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5638 equiv = 0;
5639
5640 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5641 equiv = 0;
5642
5643 /* We found a register that contains the value we need.
5644 If this register is the same as an `earlyclobber' operand
5645 of the current insn, just mark it as a place to reload from
5646 since we can't use it as the reload register itself. */
5647
5648 if (equiv != 0)
5649 for (i = 0; i < n_earlyclobbers; i++)
5650 if (reg_overlap_mentioned_for_reload_p (equiv,
5651 reload_earlyclobbers[i]))
5652 {
5653 reload_override_in[r] = equiv;
5654 equiv = 0;
5655 break;
5656 }
5657
5658 /* JRV: If the equiv register we have found is
5659 explicitly clobbered in the current insn, mark but
5660 don't use, as above. */
5661
5662 if (equiv != 0 && regno_clobbered_p (regno, insn))
5663 {
5664 reload_override_in[r] = equiv;
5665 equiv = 0;
5666 }
5667
5668 /* If we found an equivalent reg, say no code need be generated
5669 to load it, and use it as our reload reg. */
5670 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5671 {
5672 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5673 int k;
5674 reload_reg_rtx[r] = equiv;
5675 reload_inherited[r] = 1;
5676
5677 /* If any of the hard registers in EQUIV are spill
5678 registers, mark them as in use for this insn. */
5679 for (k = 0; k < nr; k++)
5680 {
5681 i = spill_reg_order[regno + k];
5682 if (i >= 0)
5683 {
5684 mark_reload_reg_in_use (regno, reload_opnum[r],
5685 reload_when_needed[r],
5686 reload_mode[r]);
5687 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5688 regno + k);
5689 }
5690 }
5691 }
5692 }
5693
5694 /* If we found a register to use already, or if this is an optional
5695 reload, we are done. */
5696 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5697 continue;
5698
5699 #if 0 /* No longer needed for correct operation. Might or might not
5700 give better code on the average. Want to experiment? */
5701
5702 /* See if there is a later reload that has a class different from our
5703 class that intersects our class or that requires less register
5704 than our reload. If so, we must allocate a register to this
5705 reload now, since that reload might inherit a previous reload
5706 and take the only available register in our class. Don't do this
5707 for optional reloads since they will force all previous reloads
5708 to be allocated. Also don't do this for reloads that have been
5709 turned off. */
5710
5711 for (i = j + 1; i < n_reloads; i++)
5712 {
5713 int s = reload_order[i];
5714
5715 if ((reload_in[s] == 0 && reload_out[s] == 0
5716 && ! reload_secondary_p[s])
5717 || reload_optional[s])
5718 continue;
5719
5720 if ((reload_reg_class[s] != reload_reg_class[r]
5721 && reg_classes_intersect_p (reload_reg_class[r],
5722 reload_reg_class[s]))
5723 || reload_nregs[s] < reload_nregs[r])
5724 break;
5725 }
5726
5727 if (i == n_reloads)
5728 continue;
5729
5730 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5731 #endif
5732 }
5733
5734 /* Now allocate reload registers for anything non-optional that
5735 didn't get one yet. */
5736 for (j = 0; j < n_reloads; j++)
5737 {
5738 register int r = reload_order[j];
5739
5740 /* Ignore reloads that got marked inoperative. */
5741 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5742 continue;
5743
5744 /* Skip reloads that already have a register allocated or are
5745 optional. */
5746 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5747 continue;
5748
5749 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5750 break;
5751 }
5752
5753 /* If that loop got all the way, we have won. */
5754 if (j == n_reloads)
5755 break;
5756
5757 fail:
5758 /* Loop around and try without any inheritance. */
5759 /* First undo everything done by the failed attempt
5760 to allocate with inheritance. */
5761 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5762 sizeof reload_reg_rtx);
5763 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5764 sizeof reload_inherited);
5765 bcopy ((char *) save_reload_inheritance_insn,
5766 (char *) reload_inheritance_insn,
5767 sizeof reload_inheritance_insn);
5768 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5769 sizeof reload_override_in);
5770 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5771 sizeof reload_spill_index);
5772 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5773 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5774 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5775 save_reload_reg_used_in_op_addr);
5776 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5777 save_reload_reg_used_in_op_addr_reload);
5778 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5779 save_reload_reg_used_in_insn);
5780 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5781 save_reload_reg_used_in_other_addr);
5782
5783 for (i = 0; i < reload_n_operands; i++)
5784 {
5785 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5786 save_reload_reg_used_in_input[i]);
5787 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5788 save_reload_reg_used_in_output[i]);
5789 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5790 save_reload_reg_used_in_input_addr[i]);
5791 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
5792 save_reload_reg_used_in_inpaddr_addr[i]);
5793 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5794 save_reload_reg_used_in_output_addr[i]);
5795 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
5796 save_reload_reg_used_in_outaddr_addr[i]);
5797 }
5798 }
5799
5800 /* If we thought we could inherit a reload, because it seemed that
5801 nothing else wanted the same reload register earlier in the insn,
5802 verify that assumption, now that all reloads have been assigned. */
5803
5804 for (j = 0; j < n_reloads; j++)
5805 {
5806 register int r = reload_order[j];
5807
5808 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5809 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5810 reload_opnum[r],
5811 reload_when_needed[r]))
5812 reload_inherited[r] = 0;
5813
5814 /* If we found a better place to reload from,
5815 validate it in the same fashion, if it is a reload reg. */
5816 if (reload_override_in[r]
5817 && (GET_CODE (reload_override_in[r]) == REG
5818 || GET_CODE (reload_override_in[r]) == SUBREG))
5819 {
5820 int regno = true_regnum (reload_override_in[r]);
5821 if (spill_reg_order[regno] >= 0
5822 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5823 reload_when_needed[r]))
5824 reload_override_in[r] = 0;
5825 }
5826 }
5827
5828 /* Now that reload_override_in is known valid,
5829 actually override reload_in. */
5830 for (j = 0; j < n_reloads; j++)
5831 if (reload_override_in[j])
5832 reload_in[j] = reload_override_in[j];
5833
5834 /* If this reload won't be done because it has been cancelled or is
5835 optional and not inherited, clear reload_reg_rtx so other
5836 routines (such as subst_reloads) don't get confused. */
5837 for (j = 0; j < n_reloads; j++)
5838 if (reload_reg_rtx[j] != 0
5839 && ((reload_optional[j] && ! reload_inherited[j])
5840 || (reload_in[j] == 0 && reload_out[j] == 0
5841 && ! reload_secondary_p[j])))
5842 {
5843 int regno = true_regnum (reload_reg_rtx[j]);
5844
5845 if (spill_reg_order[regno] >= 0)
5846 clear_reload_reg_in_use (regno, reload_opnum[j],
5847 reload_when_needed[j], reload_mode[j]);
5848 reload_reg_rtx[j] = 0;
5849 }
5850
5851 /* Record which pseudos and which spill regs have output reloads. */
5852 for (j = 0; j < n_reloads; j++)
5853 {
5854 register int r = reload_order[j];
5855
5856 i = reload_spill_index[r];
5857
5858 /* I is nonneg if this reload used one of the spill regs.
5859 If reload_reg_rtx[r] is 0, this is an optional reload
5860 that we opted to ignore. */
5861 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5862 && reload_reg_rtx[r] != 0)
5863 {
5864 register int nregno = REGNO (reload_out[r]);
5865 int nr = 1;
5866
5867 if (nregno < FIRST_PSEUDO_REGISTER)
5868 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5869
5870 while (--nr >= 0)
5871 reg_has_output_reload[nregno + nr] = 1;
5872
5873 if (i >= 0)
5874 {
5875 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5876 while (--nr >= 0)
5877 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5878 }
5879
5880 if (reload_when_needed[r] != RELOAD_OTHER
5881 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5882 && reload_when_needed[r] != RELOAD_FOR_INSN)
5883 abort ();
5884 }
5885 }
5886 }
5887 \f
5888 /* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
5889 reloads of the same item for fear that we might not have enough reload
5890 registers. However, normally they will get the same reload register
5891 and hence actually need not be loaded twice.
5892
5893 Here we check for the most common case of this phenomenon: when we have
5894 a number of reloads for the same object, each of which were allocated
5895 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5896 reload, and is not modified in the insn itself. If we find such,
5897 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5898 This will not increase the number of spill registers needed and will
5899 prevent redundant code. */
5900
5901 static void
5902 merge_assigned_reloads (insn)
5903 rtx insn;
5904 {
5905 int i, j;
5906
5907 /* Scan all the reloads looking for ones that only load values and
5908 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5909 assigned and not modified by INSN. */
5910
5911 for (i = 0; i < n_reloads; i++)
5912 {
5913 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5914 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5915 || reg_set_p (reload_reg_rtx[i], insn))
5916 continue;
5917
5918 /* Look at all other reloads. Ensure that the only use of this
5919 reload_reg_rtx is in a reload that just loads the same value
5920 as we do. Note that any secondary reloads must be of the identical
5921 class since the values, modes, and result registers are the
5922 same, so we need not do anything with any secondary reloads. */
5923
5924 for (j = 0; j < n_reloads; j++)
5925 {
5926 if (i == j || reload_reg_rtx[j] == 0
5927 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5928 reload_reg_rtx[i]))
5929 continue;
5930
5931 /* If the reload regs aren't exactly the same (e.g, different modes)
5932 or if the values are different, we can't merge anything with this
5933 reload register. */
5934
5935 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5936 || reload_out[j] != 0 || reload_in[j] == 0
5937 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5938 break;
5939 }
5940
5941 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5942 we, in fact, found any matching reloads. */
5943
5944 if (j == n_reloads)
5945 {
5946 for (j = 0; j < n_reloads; j++)
5947 if (i != j && reload_reg_rtx[j] != 0
5948 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5949 {
5950 reload_when_needed[i] = RELOAD_OTHER;
5951 reload_in[j] = 0;
5952 transfer_replacements (i, j);
5953 }
5954
5955 /* If this is now RELOAD_OTHER, look for any reloads that load
5956 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5957 if they were for inputs, RELOAD_OTHER for outputs. Note that
5958 this test is equivalent to looking for reloads for this operand
5959 number. */
5960
5961 if (reload_when_needed[i] == RELOAD_OTHER)
5962 for (j = 0; j < n_reloads; j++)
5963 if (reload_in[j] != 0
5964 && reload_when_needed[i] != RELOAD_OTHER
5965 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5966 reload_in[i]))
5967 reload_when_needed[j]
5968 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5969 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
5970 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
5971 }
5972 }
5973 }
5974
5975 \f
5976 /* Output insns to reload values in and out of the chosen reload regs. */
5977
5978 static void
5979 emit_reload_insns (insn)
5980 rtx insn;
5981 {
5982 register int j;
5983 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5984 rtx other_input_address_reload_insns = 0;
5985 rtx other_input_reload_insns = 0;
5986 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5987 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5988 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5989 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5990 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5991 rtx operand_reload_insns = 0;
5992 rtx other_operand_reload_insns = 0;
5993 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
5994 rtx following_insn = NEXT_INSN (insn);
5995 rtx before_insn = insn;
5996 int special;
5997 /* Values to be put in spill_reg_store are put here first. */
5998 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5999
6000 for (j = 0; j < reload_n_operands; j++)
6001 input_reload_insns[j] = input_address_reload_insns[j]
6002 = inpaddr_address_reload_insns[j]
6003 = output_reload_insns[j] = output_address_reload_insns[j]
6004 = outaddr_address_reload_insns[j]
6005 = other_output_reload_insns[j] = 0;
6006
6007 /* Now output the instructions to copy the data into and out of the
6008 reload registers. Do these in the order that the reloads were reported,
6009 since reloads of base and index registers precede reloads of operands
6010 and the operands may need the base and index registers reloaded. */
6011
6012 for (j = 0; j < n_reloads; j++)
6013 {
6014 register rtx old;
6015 rtx oldequiv_reg = 0;
6016 rtx this_reload_insn = 0;
6017
6018 if (reload_spill_index[j] >= 0)
6019 new_spill_reg_store[reload_spill_index[j]] = 0;
6020
6021 old = reload_in[j];
6022 if (old != 0 && ! reload_inherited[j]
6023 && ! rtx_equal_p (reload_reg_rtx[j], old)
6024 && reload_reg_rtx[j] != 0)
6025 {
6026 register rtx reloadreg = reload_reg_rtx[j];
6027 rtx oldequiv = 0;
6028 enum machine_mode mode;
6029 rtx *where;
6030
6031 /* Determine the mode to reload in.
6032 This is very tricky because we have three to choose from.
6033 There is the mode the insn operand wants (reload_inmode[J]).
6034 There is the mode of the reload register RELOADREG.
6035 There is the intrinsic mode of the operand, which we could find
6036 by stripping some SUBREGs.
6037 It turns out that RELOADREG's mode is irrelevant:
6038 we can change that arbitrarily.
6039
6040 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6041 then the reload reg may not support QImode moves, so use SImode.
6042 If foo is in memory due to spilling a pseudo reg, this is safe,
6043 because the QImode value is in the least significant part of a
6044 slot big enough for a SImode. If foo is some other sort of
6045 memory reference, then it is impossible to reload this case,
6046 so previous passes had better make sure this never happens.
6047
6048 Then consider a one-word union which has SImode and one of its
6049 members is a float, being fetched as (SUBREG:SF union:SI).
6050 We must fetch that as SFmode because we could be loading into
6051 a float-only register. In this case OLD's mode is correct.
6052
6053 Consider an immediate integer: it has VOIDmode. Here we need
6054 to get a mode from something else.
6055
6056 In some cases, there is a fourth mode, the operand's
6057 containing mode. If the insn specifies a containing mode for
6058 this operand, it overrides all others.
6059
6060 I am not sure whether the algorithm here is always right,
6061 but it does the right things in those cases. */
6062
6063 mode = GET_MODE (old);
6064 if (mode == VOIDmode)
6065 mode = reload_inmode[j];
6066
6067 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6068 /* If we need a secondary register for this operation, see if
6069 the value is already in a register in that class. Don't
6070 do this if the secondary register will be used as a scratch
6071 register. */
6072
6073 if (reload_secondary_in_reload[j] >= 0
6074 && reload_secondary_in_icode[j] == CODE_FOR_nothing
6075 && optimize)
6076 oldequiv
6077 = find_equiv_reg (old, insn,
6078 reload_reg_class[reload_secondary_in_reload[j]],
6079 -1, NULL_PTR, 0, mode);
6080 #endif
6081
6082 /* If reloading from memory, see if there is a register
6083 that already holds the same value. If so, reload from there.
6084 We can pass 0 as the reload_reg_p argument because
6085 any other reload has either already been emitted,
6086 in which case find_equiv_reg will see the reload-insn,
6087 or has yet to be emitted, in which case it doesn't matter
6088 because we will use this equiv reg right away. */
6089
6090 if (oldequiv == 0 && optimize
6091 && (GET_CODE (old) == MEM
6092 || (GET_CODE (old) == REG
6093 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6094 && reg_renumber[REGNO (old)] < 0)))
6095 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
6096 -1, NULL_PTR, 0, mode);
6097
6098 if (oldequiv)
6099 {
6100 int regno = true_regnum (oldequiv);
6101
6102 /* If OLDEQUIV is a spill register, don't use it for this
6103 if any other reload needs it at an earlier stage of this insn
6104 or at this stage. */
6105 if (spill_reg_order[regno] >= 0
6106 && (! reload_reg_free_p (regno, reload_opnum[j],
6107 reload_when_needed[j])
6108 || ! reload_reg_free_before_p (regno, reload_opnum[j],
6109 reload_when_needed[j])))
6110 oldequiv = 0;
6111
6112 /* If OLDEQUIV is not a spill register,
6113 don't use it if any other reload wants it. */
6114 if (spill_reg_order[regno] < 0)
6115 {
6116 int k;
6117 for (k = 0; k < n_reloads; k++)
6118 if (reload_reg_rtx[k] != 0 && k != j
6119 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
6120 oldequiv))
6121 {
6122 oldequiv = 0;
6123 break;
6124 }
6125 }
6126
6127 /* If it is no cheaper to copy from OLDEQUIV into the
6128 reload register than it would be to move from memory,
6129 don't use it. Likewise, if we need a secondary register
6130 or memory. */
6131
6132 if (oldequiv != 0
6133 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6134 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6135 reload_reg_class[j])
6136 >= MEMORY_MOVE_COST (mode)))
6137 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6138 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6139 mode, oldequiv)
6140 != NO_REGS)
6141 #endif
6142 #ifdef SECONDARY_MEMORY_NEEDED
6143 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
6144 REGNO_REG_CLASS (regno),
6145 mode)
6146 #endif
6147 ))
6148 oldequiv = 0;
6149 }
6150
6151 if (oldequiv == 0)
6152 oldequiv = old;
6153 else if (GET_CODE (oldequiv) == REG)
6154 oldequiv_reg = oldequiv;
6155 else if (GET_CODE (oldequiv) == SUBREG)
6156 oldequiv_reg = SUBREG_REG (oldequiv);
6157
6158 /* If we are reloading from a register that was recently stored in
6159 with an output-reload, see if we can prove there was
6160 actually no need to store the old value in it. */
6161
6162 if (optimize && GET_CODE (oldequiv) == REG
6163 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6164 && spill_reg_order[REGNO (oldequiv)] >= 0
6165 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
6166 && find_reg_note (insn, REG_DEAD, reload_in[j])
6167 /* This is unsafe if operand occurs more than once in current
6168 insn. Perhaps some occurrences weren't reloaded. */
6169 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6170 delete_output_reload
6171 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
6172
6173 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6174 then load RELOADREG from OLDEQUIV. Note that we cannot use
6175 gen_lowpart_common since it can do the wrong thing when
6176 RELOADREG has a multi-word mode. Note that RELOADREG
6177 must always be a REG here. */
6178
6179 if (GET_MODE (reloadreg) != mode)
6180 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6181 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6182 oldequiv = SUBREG_REG (oldequiv);
6183 if (GET_MODE (oldequiv) != VOIDmode
6184 && mode != GET_MODE (oldequiv))
6185 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
6186
6187 /* Switch to the right place to emit the reload insns. */
6188 switch (reload_when_needed[j])
6189 {
6190 case RELOAD_OTHER:
6191 where = &other_input_reload_insns;
6192 break;
6193 case RELOAD_FOR_INPUT:
6194 where = &input_reload_insns[reload_opnum[j]];
6195 break;
6196 case RELOAD_FOR_INPUT_ADDRESS:
6197 where = &input_address_reload_insns[reload_opnum[j]];
6198 break;
6199 case RELOAD_FOR_INPADDR_ADDRESS:
6200 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6201 break;
6202 case RELOAD_FOR_OUTPUT_ADDRESS:
6203 where = &output_address_reload_insns[reload_opnum[j]];
6204 break;
6205 case RELOAD_FOR_OUTADDR_ADDRESS:
6206 where = &outaddr_address_reload_insns[reload_opnum[j]];
6207 break;
6208 case RELOAD_FOR_OPERAND_ADDRESS:
6209 where = &operand_reload_insns;
6210 break;
6211 case RELOAD_FOR_OPADDR_ADDR:
6212 where = &other_operand_reload_insns;
6213 break;
6214 case RELOAD_FOR_OTHER_ADDRESS:
6215 where = &other_input_address_reload_insns;
6216 break;
6217 default:
6218 abort ();
6219 }
6220
6221 push_to_sequence (*where);
6222 special = 0;
6223
6224 /* Auto-increment addresses must be reloaded in a special way. */
6225 if (GET_CODE (oldequiv) == POST_INC
6226 || GET_CODE (oldequiv) == POST_DEC
6227 || GET_CODE (oldequiv) == PRE_INC
6228 || GET_CODE (oldequiv) == PRE_DEC)
6229 {
6230 /* We are not going to bother supporting the case where a
6231 incremented register can't be copied directly from
6232 OLDEQUIV since this seems highly unlikely. */
6233 if (reload_secondary_in_reload[j] >= 0)
6234 abort ();
6235 /* Prevent normal processing of this reload. */
6236 special = 1;
6237 /* Output a special code sequence for this case. */
6238 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
6239 }
6240
6241 /* If we are reloading a pseudo-register that was set by the previous
6242 insn, see if we can get rid of that pseudo-register entirely
6243 by redirecting the previous insn into our reload register. */
6244
6245 else if (optimize && GET_CODE (old) == REG
6246 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6247 && dead_or_set_p (insn, old)
6248 /* This is unsafe if some other reload
6249 uses the same reg first. */
6250 && reload_reg_free_before_p (REGNO (reloadreg),
6251 reload_opnum[j],
6252 reload_when_needed[j]))
6253 {
6254 rtx temp = PREV_INSN (insn);
6255 while (temp && GET_CODE (temp) == NOTE)
6256 temp = PREV_INSN (temp);
6257 if (temp
6258 && GET_CODE (temp) == INSN
6259 && GET_CODE (PATTERN (temp)) == SET
6260 && SET_DEST (PATTERN (temp)) == old
6261 /* Make sure we can access insn_operand_constraint. */
6262 && asm_noperands (PATTERN (temp)) < 0
6263 /* This is unsafe if prev insn rejects our reload reg. */
6264 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6265 reloadreg)
6266 /* This is unsafe if operand occurs more than once in current
6267 insn. Perhaps some occurrences aren't reloaded. */
6268 && count_occurrences (PATTERN (insn), old) == 1
6269 /* Don't risk splitting a matching pair of operands. */
6270 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6271 {
6272 /* Store into the reload register instead of the pseudo. */
6273 SET_DEST (PATTERN (temp)) = reloadreg;
6274 /* If these are the only uses of the pseudo reg,
6275 pretend for GDB it lives in the reload reg we used. */
6276 if (REG_N_DEATHS (REGNO (old)) == 1
6277 && REG_N_SETS (REGNO (old)) == 1)
6278 {
6279 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6280 alter_reg (REGNO (old), -1);
6281 }
6282 special = 1;
6283 }
6284 }
6285
6286 /* We can't do that, so output an insn to load RELOADREG. */
6287
6288 if (! special)
6289 {
6290 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6291 rtx second_reload_reg = 0;
6292 enum insn_code icode;
6293
6294 /* If we have a secondary reload, pick up the secondary register
6295 and icode, if any. If OLDEQUIV and OLD are different or
6296 if this is an in-out reload, recompute whether or not we
6297 still need a secondary register and what the icode should
6298 be. If we still need a secondary register and the class or
6299 icode is different, go back to reloading from OLD if using
6300 OLDEQUIV means that we got the wrong type of register. We
6301 cannot have different class or icode due to an in-out reload
6302 because we don't make such reloads when both the input and
6303 output need secondary reload registers. */
6304
6305 if (reload_secondary_in_reload[j] >= 0)
6306 {
6307 int secondary_reload = reload_secondary_in_reload[j];
6308 rtx real_oldequiv = oldequiv;
6309 rtx real_old = old;
6310
6311 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6312 and similarly for OLD.
6313 See comments in get_secondary_reload in reload.c. */
6314 if (GET_CODE (oldequiv) == REG
6315 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6316 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6317 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6318
6319 if (GET_CODE (old) == REG
6320 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6321 && reg_equiv_mem[REGNO (old)] != 0)
6322 real_old = reg_equiv_mem[REGNO (old)];
6323
6324 second_reload_reg = reload_reg_rtx[secondary_reload];
6325 icode = reload_secondary_in_icode[j];
6326
6327 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6328 || (reload_in[j] != 0 && reload_out[j] != 0))
6329 {
6330 enum reg_class new_class
6331 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6332 mode, real_oldequiv);
6333
6334 if (new_class == NO_REGS)
6335 second_reload_reg = 0;
6336 else
6337 {
6338 enum insn_code new_icode;
6339 enum machine_mode new_mode;
6340
6341 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6342 REGNO (second_reload_reg)))
6343 oldequiv = old, real_oldequiv = real_old;
6344 else
6345 {
6346 new_icode = reload_in_optab[(int) mode];
6347 if (new_icode != CODE_FOR_nothing
6348 && ((insn_operand_predicate[(int) new_icode][0]
6349 && ! ((*insn_operand_predicate[(int) new_icode][0])
6350 (reloadreg, mode)))
6351 || (insn_operand_predicate[(int) new_icode][1]
6352 && ! ((*insn_operand_predicate[(int) new_icode][1])
6353 (real_oldequiv, mode)))))
6354 new_icode = CODE_FOR_nothing;
6355
6356 if (new_icode == CODE_FOR_nothing)
6357 new_mode = mode;
6358 else
6359 new_mode = insn_operand_mode[(int) new_icode][2];
6360
6361 if (GET_MODE (second_reload_reg) != new_mode)
6362 {
6363 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6364 new_mode))
6365 oldequiv = old, real_oldequiv = real_old;
6366 else
6367 second_reload_reg
6368 = gen_rtx (REG, new_mode,
6369 REGNO (second_reload_reg));
6370 }
6371 }
6372 }
6373 }
6374
6375 /* If we still need a secondary reload register, check
6376 to see if it is being used as a scratch or intermediate
6377 register and generate code appropriately. If we need
6378 a scratch register, use REAL_OLDEQUIV since the form of
6379 the insn may depend on the actual address if it is
6380 a MEM. */
6381
6382 if (second_reload_reg)
6383 {
6384 if (icode != CODE_FOR_nothing)
6385 {
6386 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6387 second_reload_reg));
6388 special = 1;
6389 }
6390 else
6391 {
6392 /* See if we need a scratch register to load the
6393 intermediate register (a tertiary reload). */
6394 enum insn_code tertiary_icode
6395 = reload_secondary_in_icode[secondary_reload];
6396
6397 if (tertiary_icode != CODE_FOR_nothing)
6398 {
6399 rtx third_reload_reg
6400 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6401
6402 emit_insn ((GEN_FCN (tertiary_icode)
6403 (second_reload_reg, real_oldequiv,
6404 third_reload_reg)));
6405 }
6406 else
6407 gen_reload (second_reload_reg, oldequiv,
6408 reload_opnum[j],
6409 reload_when_needed[j]);
6410
6411 oldequiv = second_reload_reg;
6412 }
6413 }
6414 }
6415 #endif
6416
6417 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6418 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6419 reload_when_needed[j]);
6420
6421 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6422 /* We may have to make a REG_DEAD note for the secondary reload
6423 register in the insns we just made. Find the last insn that
6424 mentioned the register. */
6425 if (! special && second_reload_reg
6426 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6427 {
6428 rtx prev;
6429
6430 for (prev = get_last_insn (); prev;
6431 prev = PREV_INSN (prev))
6432 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6433 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6434 PATTERN (prev)))
6435 {
6436 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6437 second_reload_reg,
6438 REG_NOTES (prev));
6439 break;
6440 }
6441 }
6442 #endif
6443 }
6444
6445 this_reload_insn = get_last_insn ();
6446 /* End this sequence. */
6447 *where = get_insns ();
6448 end_sequence ();
6449 }
6450
6451 /* Add a note saying the input reload reg
6452 dies in this insn, if anyone cares. */
6453 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6454 if (old != 0
6455 && reload_reg_rtx[j] != old
6456 && reload_reg_rtx[j] != 0
6457 && reload_out[j] == 0
6458 && ! reload_inherited[j]
6459 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6460 {
6461 register rtx reloadreg = reload_reg_rtx[j];
6462
6463 #if 0
6464 /* We can't abort here because we need to support this for sched.c.
6465 It's not terrible to miss a REG_DEAD note, but we should try
6466 to figure out how to do this correctly. */
6467 /* The code below is incorrect for address-only reloads. */
6468 if (reload_when_needed[j] != RELOAD_OTHER
6469 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6470 abort ();
6471 #endif
6472
6473 /* Add a death note to this insn, for an input reload. */
6474
6475 if ((reload_when_needed[j] == RELOAD_OTHER
6476 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6477 && ! dead_or_set_p (insn, reloadreg))
6478 REG_NOTES (insn)
6479 = gen_rtx (EXPR_LIST, REG_DEAD,
6480 reloadreg, REG_NOTES (insn));
6481 }
6482
6483 /* When we inherit a reload, the last marked death of the reload reg
6484 may no longer really be a death. */
6485 if (reload_reg_rtx[j] != 0
6486 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6487 && reload_inherited[j])
6488 {
6489 /* Handle inheriting an output reload.
6490 Remove the death note from the output reload insn. */
6491 if (reload_spill_index[j] >= 0
6492 && GET_CODE (reload_in[j]) == REG
6493 && spill_reg_store[reload_spill_index[j]] != 0
6494 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6495 REG_DEAD, REGNO (reload_reg_rtx[j])))
6496 remove_death (REGNO (reload_reg_rtx[j]),
6497 spill_reg_store[reload_spill_index[j]]);
6498 /* Likewise for input reloads that were inherited. */
6499 else if (reload_spill_index[j] >= 0
6500 && GET_CODE (reload_in[j]) == REG
6501 && spill_reg_store[reload_spill_index[j]] == 0
6502 && reload_inheritance_insn[j] != 0
6503 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6504 REGNO (reload_reg_rtx[j])))
6505 remove_death (REGNO (reload_reg_rtx[j]),
6506 reload_inheritance_insn[j]);
6507 else
6508 {
6509 rtx prev;
6510
6511 /* We got this register from find_equiv_reg.
6512 Search back for its last death note and get rid of it.
6513 But don't search back too far.
6514 Don't go past a place where this reg is set,
6515 since a death note before that remains valid. */
6516 for (prev = PREV_INSN (insn);
6517 prev && GET_CODE (prev) != CODE_LABEL;
6518 prev = PREV_INSN (prev))
6519 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6520 && dead_or_set_p (prev, reload_reg_rtx[j]))
6521 {
6522 if (find_regno_note (prev, REG_DEAD,
6523 REGNO (reload_reg_rtx[j])))
6524 remove_death (REGNO (reload_reg_rtx[j]), prev);
6525 break;
6526 }
6527 }
6528 }
6529
6530 /* We might have used find_equiv_reg above to choose an alternate
6531 place from which to reload. If so, and it died, we need to remove
6532 that death and move it to one of the insns we just made. */
6533
6534 if (oldequiv_reg != 0
6535 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6536 {
6537 rtx prev, prev1;
6538
6539 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6540 prev = PREV_INSN (prev))
6541 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6542 && dead_or_set_p (prev, oldequiv_reg))
6543 {
6544 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6545 {
6546 for (prev1 = this_reload_insn;
6547 prev1; prev1 = PREV_INSN (prev1))
6548 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6549 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6550 PATTERN (prev1)))
6551 {
6552 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6553 oldequiv_reg,
6554 REG_NOTES (prev1));
6555 break;
6556 }
6557 remove_death (REGNO (oldequiv_reg), prev);
6558 }
6559 break;
6560 }
6561 }
6562 #endif
6563
6564 /* If we are reloading a register that was recently stored in with an
6565 output-reload, see if we can prove there was
6566 actually no need to store the old value in it. */
6567
6568 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6569 && reload_in[j] != 0
6570 && GET_CODE (reload_in[j]) == REG
6571 #if 0
6572 /* There doesn't seem to be any reason to restrict this to pseudos
6573 and doing so loses in the case where we are copying from a
6574 register of the wrong class. */
6575 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6576 #endif
6577 && spill_reg_store[reload_spill_index[j]] != 0
6578 /* This is unsafe if some other reload uses the same reg first. */
6579 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6580 reload_opnum[j], reload_when_needed[j])
6581 && dead_or_set_p (insn, reload_in[j])
6582 /* This is unsafe if operand occurs more than once in current
6583 insn. Perhaps some occurrences weren't reloaded. */
6584 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6585 delete_output_reload (insn, j,
6586 spill_reg_store[reload_spill_index[j]]);
6587
6588 /* Input-reloading is done. Now do output-reloading,
6589 storing the value from the reload-register after the main insn
6590 if reload_out[j] is nonzero.
6591
6592 ??? At some point we need to support handling output reloads of
6593 JUMP_INSNs or insns that set cc0. */
6594 old = reload_out[j];
6595 if (old != 0
6596 && reload_reg_rtx[j] != old
6597 && reload_reg_rtx[j] != 0)
6598 {
6599 register rtx reloadreg = reload_reg_rtx[j];
6600 register rtx second_reloadreg = 0;
6601 rtx note, p;
6602 enum machine_mode mode;
6603 int special = 0;
6604
6605 /* An output operand that dies right away does need a reload,
6606 but need not be copied from it. Show the new location in the
6607 REG_UNUSED note. */
6608 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6609 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6610 {
6611 XEXP (note, 0) = reload_reg_rtx[j];
6612 continue;
6613 }
6614 /* Likewise for a SUBREG of an operand that dies. */
6615 else if (GET_CODE (old) == SUBREG
6616 && GET_CODE (SUBREG_REG (old)) == REG
6617 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6618 SUBREG_REG (old))))
6619 {
6620 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6621 reload_reg_rtx[j]);
6622 continue;
6623 }
6624 else if (GET_CODE (old) == SCRATCH)
6625 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6626 but we don't want to make an output reload. */
6627 continue;
6628
6629 #if 0
6630 /* Strip off of OLD any size-increasing SUBREGs such as
6631 (SUBREG:SI foo:QI 0). */
6632
6633 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6634 && (GET_MODE_SIZE (GET_MODE (old))
6635 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6636 old = SUBREG_REG (old);
6637 #endif
6638
6639 /* If is a JUMP_INSN, we can't support output reloads yet. */
6640 if (GET_CODE (insn) == JUMP_INSN)
6641 abort ();
6642
6643 if (reload_when_needed[j] == RELOAD_OTHER)
6644 start_sequence ();
6645 else
6646 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6647
6648 /* Determine the mode to reload in.
6649 See comments above (for input reloading). */
6650
6651 mode = GET_MODE (old);
6652 if (mode == VOIDmode)
6653 {
6654 /* VOIDmode should never happen for an output. */
6655 if (asm_noperands (PATTERN (insn)) < 0)
6656 /* It's the compiler's fault. */
6657 fatal_insn ("VOIDmode on an output", insn);
6658 error_for_asm (insn, "output operand is constant in `asm'");
6659 /* Prevent crash--use something we know is valid. */
6660 mode = word_mode;
6661 old = gen_rtx (REG, mode, REGNO (reloadreg));
6662 }
6663
6664 if (GET_MODE (reloadreg) != mode)
6665 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6666
6667 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6668
6669 /* If we need two reload regs, set RELOADREG to the intermediate
6670 one, since it will be stored into OLD. We might need a secondary
6671 register only for an input reload, so check again here. */
6672
6673 if (reload_secondary_out_reload[j] >= 0)
6674 {
6675 rtx real_old = old;
6676
6677 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6678 && reg_equiv_mem[REGNO (old)] != 0)
6679 real_old = reg_equiv_mem[REGNO (old)];
6680
6681 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6682 mode, real_old)
6683 != NO_REGS))
6684 {
6685 second_reloadreg = reloadreg;
6686 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6687
6688 /* See if RELOADREG is to be used as a scratch register
6689 or as an intermediate register. */
6690 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6691 {
6692 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6693 (real_old, second_reloadreg, reloadreg)));
6694 special = 1;
6695 }
6696 else
6697 {
6698 /* See if we need both a scratch and intermediate reload
6699 register. */
6700
6701 int secondary_reload = reload_secondary_out_reload[j];
6702 enum insn_code tertiary_icode
6703 = reload_secondary_out_icode[secondary_reload];
6704
6705 if (GET_MODE (reloadreg) != mode)
6706 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6707
6708 if (tertiary_icode != CODE_FOR_nothing)
6709 {
6710 rtx third_reloadreg
6711 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6712 rtx tem;
6713
6714 /* Copy primary reload reg to secondary reload reg.
6715 (Note that these have been swapped above, then
6716 secondary reload reg to OLD using our insn. */
6717
6718 /* If REAL_OLD is a paradoxical SUBREG, remove it
6719 and try to put the opposite SUBREG on
6720 RELOADREG. */
6721 if (GET_CODE (real_old) == SUBREG
6722 && (GET_MODE_SIZE (GET_MODE (real_old))
6723 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6724 && 0 != (tem = gen_lowpart_common
6725 (GET_MODE (SUBREG_REG (real_old)),
6726 reloadreg)))
6727 real_old = SUBREG_REG (real_old), reloadreg = tem;
6728
6729 gen_reload (reloadreg, second_reloadreg,
6730 reload_opnum[j], reload_when_needed[j]);
6731 emit_insn ((GEN_FCN (tertiary_icode)
6732 (real_old, reloadreg, third_reloadreg)));
6733 special = 1;
6734 }
6735
6736 else
6737 /* Copy between the reload regs here and then to
6738 OUT later. */
6739
6740 gen_reload (reloadreg, second_reloadreg,
6741 reload_opnum[j], reload_when_needed[j]);
6742 }
6743 }
6744 }
6745 #endif
6746
6747 /* Output the last reload insn. */
6748 if (! special)
6749 gen_reload (old, reloadreg, reload_opnum[j],
6750 reload_when_needed[j]);
6751
6752 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6753 /* If final will look at death notes for this reg,
6754 put one on the last output-reload insn to use it. Similarly
6755 for any secondary register. */
6756 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6757 for (p = get_last_insn (); p; p = PREV_INSN (p))
6758 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6759 && reg_overlap_mentioned_for_reload_p (reloadreg,
6760 PATTERN (p)))
6761 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6762 reloadreg, REG_NOTES (p));
6763
6764 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6765 if (! special && second_reloadreg
6766 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6767 for (p = get_last_insn (); p; p = PREV_INSN (p))
6768 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6769 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6770 PATTERN (p)))
6771 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6772 second_reloadreg, REG_NOTES (p));
6773 #endif
6774 #endif
6775 /* Look at all insns we emitted, just to be safe. */
6776 for (p = get_insns (); p; p = NEXT_INSN (p))
6777 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6778 {
6779 /* If this output reload doesn't come from a spill reg,
6780 clear any memory of reloaded copies of the pseudo reg.
6781 If this output reload comes from a spill reg,
6782 reg_has_output_reload will make this do nothing. */
6783 note_stores (PATTERN (p), forget_old_reloads_1);
6784
6785 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6786 && reload_spill_index[j] >= 0)
6787 new_spill_reg_store[reload_spill_index[j]] = p;
6788 }
6789
6790 if (reload_when_needed[j] == RELOAD_OTHER)
6791 {
6792 emit_insns (other_output_reload_insns[reload_opnum[j]]);
6793 other_output_reload_insns[reload_opnum[j]] = get_insns ();
6794 }
6795 else
6796 output_reload_insns[reload_opnum[j]] = get_insns ();
6797
6798 end_sequence ();
6799 }
6800 }
6801
6802 /* Now write all the insns we made for reloads in the order expected by
6803 the allocation functions. Prior to the insn being reloaded, we write
6804 the following reloads:
6805
6806 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6807
6808 RELOAD_OTHER reloads.
6809
6810 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
6811 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
6812 RELOAD_FOR_INPUT reload for the operand.
6813
6814 RELOAD_FOR_OPADDR_ADDRS reloads.
6815
6816 RELOAD_FOR_OPERAND_ADDRESS reloads.
6817
6818 After the insn being reloaded, we write the following:
6819
6820 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
6821 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
6822 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
6823 reloads for the operand. The RELOAD_OTHER output reloads are
6824 output in descending order by reload number. */
6825
6826 emit_insns_before (other_input_address_reload_insns, before_insn);
6827 emit_insns_before (other_input_reload_insns, before_insn);
6828
6829 for (j = 0; j < reload_n_operands; j++)
6830 {
6831 emit_insns_before (inpaddr_address_reload_insns[j], before_insn);
6832 emit_insns_before (input_address_reload_insns[j], before_insn);
6833 emit_insns_before (input_reload_insns[j], before_insn);
6834 }
6835
6836 emit_insns_before (other_operand_reload_insns, before_insn);
6837 emit_insns_before (operand_reload_insns, before_insn);
6838
6839 for (j = 0; j < reload_n_operands; j++)
6840 {
6841 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
6842 emit_insns_before (output_address_reload_insns[j], following_insn);
6843 emit_insns_before (output_reload_insns[j], following_insn);
6844 emit_insns_before (other_output_reload_insns[j], following_insn);
6845 }
6846
6847 /* Move death notes from INSN
6848 to output-operand-address and output reload insns. */
6849 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6850 {
6851 rtx insn1;
6852 /* Loop over those insns, last ones first. */
6853 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6854 insn1 = PREV_INSN (insn1))
6855 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6856 {
6857 rtx source = SET_SRC (PATTERN (insn1));
6858 rtx dest = SET_DEST (PATTERN (insn1));
6859
6860 /* The note we will examine next. */
6861 rtx reg_notes = REG_NOTES (insn);
6862 /* The place that pointed to this note. */
6863 rtx *prev_reg_note = &REG_NOTES (insn);
6864
6865 /* If the note is for something used in the source of this
6866 reload insn, or in the output address, move the note. */
6867 while (reg_notes)
6868 {
6869 rtx next_reg_notes = XEXP (reg_notes, 1);
6870 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6871 && GET_CODE (XEXP (reg_notes, 0)) == REG
6872 && ((GET_CODE (dest) != REG
6873 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6874 dest))
6875 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6876 source)))
6877 {
6878 *prev_reg_note = next_reg_notes;
6879 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6880 REG_NOTES (insn1) = reg_notes;
6881 }
6882 else
6883 prev_reg_note = &XEXP (reg_notes, 1);
6884
6885 reg_notes = next_reg_notes;
6886 }
6887 }
6888 }
6889 #endif
6890
6891 /* For all the spill regs newly reloaded in this instruction,
6892 record what they were reloaded from, so subsequent instructions
6893 can inherit the reloads.
6894
6895 Update spill_reg_store for the reloads of this insn.
6896 Copy the elements that were updated in the loop above. */
6897
6898 for (j = 0; j < n_reloads; j++)
6899 {
6900 register int r = reload_order[j];
6901 register int i = reload_spill_index[r];
6902
6903 /* I is nonneg if this reload used one of the spill regs.
6904 If reload_reg_rtx[r] is 0, this is an optional reload
6905 that we opted to ignore. */
6906
6907 if (i >= 0 && reload_reg_rtx[r] != 0)
6908 {
6909 int nr
6910 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6911 int k;
6912 int part_reaches_end = 0;
6913 int all_reaches_end = 1;
6914
6915 /* For a multi register reload, we need to check if all or part
6916 of the value lives to the end. */
6917 for (k = 0; k < nr; k++)
6918 {
6919 if (reload_reg_reaches_end_p (spill_regs[i] + k, reload_opnum[r],
6920 reload_when_needed[r]))
6921 part_reaches_end = 1;
6922 else
6923 all_reaches_end = 0;
6924 }
6925
6926 /* Ignore reloads that don't reach the end of the insn in
6927 entirety. */
6928 if (all_reaches_end)
6929 {
6930 /* First, clear out memory of what used to be in this spill reg.
6931 If consecutive registers are used, clear them all. */
6932
6933 for (k = 0; k < nr; k++)
6934 {
6935 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6936 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6937 }
6938
6939 /* Maybe the spill reg contains a copy of reload_out. */
6940 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6941 {
6942 register int nregno = REGNO (reload_out[r]);
6943 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6944 : HARD_REGNO_NREGS (nregno,
6945 GET_MODE (reload_reg_rtx[r])));
6946
6947 spill_reg_store[i] = new_spill_reg_store[i];
6948 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6949
6950 /* If NREGNO is a hard register, it may occupy more than
6951 one register. If it does, say what is in the
6952 rest of the registers assuming that both registers
6953 agree on how many words the object takes. If not,
6954 invalidate the subsequent registers. */
6955
6956 if (nregno < FIRST_PSEUDO_REGISTER)
6957 for (k = 1; k < nnr; k++)
6958 reg_last_reload_reg[nregno + k]
6959 = (nr == nnr
6960 ? gen_rtx (REG,
6961 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6962 REGNO (reload_reg_rtx[r]) + k)
6963 : 0);
6964
6965 /* Now do the inverse operation. */
6966 for (k = 0; k < nr; k++)
6967 {
6968 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6969 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6970 ? nregno
6971 : nregno + k);
6972 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6973 }
6974 }
6975
6976 /* Maybe the spill reg contains a copy of reload_in. Only do
6977 something if there will not be an output reload for
6978 the register being reloaded. */
6979 else if (reload_out[r] == 0
6980 && reload_in[r] != 0
6981 && ((GET_CODE (reload_in[r]) == REG
6982 && ! reg_has_output_reload[REGNO (reload_in[r])])
6983 || (GET_CODE (reload_in_reg[r]) == REG
6984 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
6985 {
6986 register int nregno;
6987 int nnr;
6988
6989 if (GET_CODE (reload_in[r]) == REG)
6990 nregno = REGNO (reload_in[r]);
6991 else
6992 nregno = REGNO (reload_in_reg[r]);
6993
6994 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6995 : HARD_REGNO_NREGS (nregno,
6996 GET_MODE (reload_reg_rtx[r])));
6997
6998 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6999
7000 if (nregno < FIRST_PSEUDO_REGISTER)
7001 for (k = 1; k < nnr; k++)
7002 reg_last_reload_reg[nregno + k]
7003 = (nr == nnr
7004 ? gen_rtx (REG,
7005 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7006 REGNO (reload_reg_rtx[r]) + k)
7007 : 0);
7008
7009 /* Unless we inherited this reload, show we haven't
7010 recently done a store. */
7011 if (! reload_inherited[r])
7012 spill_reg_store[i] = 0;
7013
7014 for (k = 0; k < nr; k++)
7015 {
7016 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
7017 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7018 ? nregno
7019 : nregno + k);
7020 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
7021 = insn;
7022 }
7023 }
7024 }
7025
7026 /* However, if part of the reload reaches the end, then we must
7027 invalidate the old info for the part that survives to the end. */
7028 else if (part_reaches_end)
7029 {
7030 for (k = 0; k < nr; k++)
7031 if (reload_reg_reaches_end_p (spill_regs[i] + k,
7032 reload_opnum[r],
7033 reload_when_needed[r]))
7034 {
7035 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
7036 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
7037 }
7038 }
7039 }
7040
7041 /* The following if-statement was #if 0'd in 1.34 (or before...).
7042 It's reenabled in 1.35 because supposedly nothing else
7043 deals with this problem. */
7044
7045 /* If a register gets output-reloaded from a non-spill register,
7046 that invalidates any previous reloaded copy of it.
7047 But forget_old_reloads_1 won't get to see it, because
7048 it thinks only about the original insn. So invalidate it here. */
7049 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
7050 {
7051 register int nregno = REGNO (reload_out[r]);
7052 if (nregno >= FIRST_PSEUDO_REGISTER)
7053 reg_last_reload_reg[nregno] = 0;
7054 else
7055 {
7056 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
7057
7058 while (num_regs-- > 0)
7059 reg_last_reload_reg[nregno + num_regs] = 0;
7060 }
7061 }
7062 }
7063 }
7064 \f
7065 /* Emit code to perform a reload from IN (which may be a reload register) to
7066 OUT (which may also be a reload register). IN or OUT is from operand
7067 OPNUM with reload type TYPE.
7068
7069 Returns first insn emitted. */
7070
7071 rtx
7072 gen_reload (out, in, opnum, type)
7073 rtx out;
7074 rtx in;
7075 int opnum;
7076 enum reload_type type;
7077 {
7078 rtx last = get_last_insn ();
7079 rtx tem;
7080
7081 /* If IN is a paradoxical SUBREG, remove it and try to put the
7082 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7083 if (GET_CODE (in) == SUBREG
7084 && (GET_MODE_SIZE (GET_MODE (in))
7085 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7086 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7087 in = SUBREG_REG (in), out = tem;
7088 else if (GET_CODE (out) == SUBREG
7089 && (GET_MODE_SIZE (GET_MODE (out))
7090 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7091 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7092 out = SUBREG_REG (out), in = tem;
7093
7094 /* How to do this reload can get quite tricky. Normally, we are being
7095 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7096 register that didn't get a hard register. In that case we can just
7097 call emit_move_insn.
7098
7099 We can also be asked to reload a PLUS that adds a register or a MEM to
7100 another register, constant or MEM. This can occur during frame pointer
7101 elimination and while reloading addresses. This case is handled by
7102 trying to emit a single insn to perform the add. If it is not valid,
7103 we use a two insn sequence.
7104
7105 Finally, we could be called to handle an 'o' constraint by putting
7106 an address into a register. In that case, we first try to do this
7107 with a named pattern of "reload_load_address". If no such pattern
7108 exists, we just emit a SET insn and hope for the best (it will normally
7109 be valid on machines that use 'o').
7110
7111 This entire process is made complex because reload will never
7112 process the insns we generate here and so we must ensure that
7113 they will fit their constraints and also by the fact that parts of
7114 IN might be being reloaded separately and replaced with spill registers.
7115 Because of this, we are, in some sense, just guessing the right approach
7116 here. The one listed above seems to work.
7117
7118 ??? At some point, this whole thing needs to be rethought. */
7119
7120 if (GET_CODE (in) == PLUS
7121 && (GET_CODE (XEXP (in, 0)) == REG
7122 || GET_CODE (XEXP (in, 0)) == SUBREG
7123 || GET_CODE (XEXP (in, 0)) == MEM)
7124 && (GET_CODE (XEXP (in, 1)) == REG
7125 || GET_CODE (XEXP (in, 1)) == SUBREG
7126 || CONSTANT_P (XEXP (in, 1))
7127 || GET_CODE (XEXP (in, 1)) == MEM))
7128 {
7129 /* We need to compute the sum of a register or a MEM and another
7130 register, constant, or MEM, and put it into the reload
7131 register. The best possible way of doing this is if the machine
7132 has a three-operand ADD insn that accepts the required operands.
7133
7134 The simplest approach is to try to generate such an insn and see if it
7135 is recognized and matches its constraints. If so, it can be used.
7136
7137 It might be better not to actually emit the insn unless it is valid,
7138 but we need to pass the insn as an operand to `recog' and
7139 `insn_extract' and it is simpler to emit and then delete the insn if
7140 not valid than to dummy things up. */
7141
7142 rtx op0, op1, tem, insn;
7143 int code;
7144
7145 op0 = find_replacement (&XEXP (in, 0));
7146 op1 = find_replacement (&XEXP (in, 1));
7147
7148 /* Since constraint checking is strict, commutativity won't be
7149 checked, so we need to do that here to avoid spurious failure
7150 if the add instruction is two-address and the second operand
7151 of the add is the same as the reload reg, which is frequently
7152 the case. If the insn would be A = B + A, rearrange it so
7153 it will be A = A + B as constrain_operands expects. */
7154
7155 if (GET_CODE (XEXP (in, 1)) == REG
7156 && REGNO (out) == REGNO (XEXP (in, 1)))
7157 tem = op0, op0 = op1, op1 = tem;
7158
7159 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7160 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
7161
7162 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
7163 code = recog_memoized (insn);
7164
7165 if (code >= 0)
7166 {
7167 insn_extract (insn);
7168 /* We want constrain operands to treat this insn strictly in
7169 its validity determination, i.e., the way it would after reload
7170 has completed. */
7171 if (constrain_operands (code, 1))
7172 return insn;
7173 }
7174
7175 delete_insns_since (last);
7176
7177 /* If that failed, we must use a conservative two-insn sequence.
7178 use move to copy constant, MEM, or pseudo register to the reload
7179 register since "move" will be able to handle an arbitrary operand,
7180 unlike add which can't, in general. Then add the registers.
7181
7182 If there is another way to do this for a specific machine, a
7183 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7184 we emit below. */
7185
7186 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7187 || (GET_CODE (op1) == REG
7188 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
7189 tem = op0, op0 = op1, op1 = tem;
7190
7191 gen_reload (out, op0, opnum, type);
7192
7193 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7194 This fixes a problem on the 32K where the stack pointer cannot
7195 be used as an operand of an add insn. */
7196
7197 if (rtx_equal_p (op0, op1))
7198 op1 = out;
7199
7200 insn = emit_insn (gen_add2_insn (out, op1));
7201
7202 /* If that failed, copy the address register to the reload register.
7203 Then add the constant to the reload register. */
7204
7205 code = recog_memoized (insn);
7206
7207 if (code >= 0)
7208 {
7209 insn_extract (insn);
7210 /* We want constrain operands to treat this insn strictly in
7211 its validity determination, i.e., the way it would after reload
7212 has completed. */
7213 if (constrain_operands (code, 1))
7214 return insn;
7215 }
7216
7217 delete_insns_since (last);
7218
7219 gen_reload (out, op1, opnum, type);
7220 emit_insn (gen_add2_insn (out, op0));
7221 }
7222
7223 #ifdef SECONDARY_MEMORY_NEEDED
7224 /* If we need a memory location to do the move, do it that way. */
7225 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7226 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7227 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7228 REGNO_REG_CLASS (REGNO (out)),
7229 GET_MODE (out)))
7230 {
7231 /* Get the memory to use and rewrite both registers to its mode. */
7232 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7233
7234 if (GET_MODE (loc) != GET_MODE (out))
7235 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
7236
7237 if (GET_MODE (loc) != GET_MODE (in))
7238 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
7239
7240 gen_reload (loc, in, opnum, type);
7241 gen_reload (out, loc, opnum, type);
7242 }
7243 #endif
7244
7245 /* If IN is a simple operand, use gen_move_insn. */
7246 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7247 emit_insn (gen_move_insn (out, in));
7248
7249 #ifdef HAVE_reload_load_address
7250 else if (HAVE_reload_load_address)
7251 emit_insn (gen_reload_load_address (out, in));
7252 #endif
7253
7254 /* Otherwise, just write (set OUT IN) and hope for the best. */
7255 else
7256 emit_insn (gen_rtx (SET, VOIDmode, out, in));
7257
7258 /* Return the first insn emitted.
7259 We can not just return get_last_insn, because there may have
7260 been multiple instructions emitted. Also note that gen_move_insn may
7261 emit more than one insn itself, so we can not assume that there is one
7262 insn emitted per emit_insn_before call. */
7263
7264 return last ? NEXT_INSN (last) : get_insns ();
7265 }
7266 \f
7267 /* Delete a previously made output-reload
7268 whose result we now believe is not needed.
7269 First we double-check.
7270
7271 INSN is the insn now being processed.
7272 OUTPUT_RELOAD_INSN is the insn of the output reload.
7273 J is the reload-number for this insn. */
7274
7275 static void
7276 delete_output_reload (insn, j, output_reload_insn)
7277 rtx insn;
7278 int j;
7279 rtx output_reload_insn;
7280 {
7281 register rtx i1;
7282
7283 /* Get the raw pseudo-register referred to. */
7284
7285 rtx reg = reload_in[j];
7286 while (GET_CODE (reg) == SUBREG)
7287 reg = SUBREG_REG (reg);
7288
7289 /* If the pseudo-reg we are reloading is no longer referenced
7290 anywhere between the store into it and here,
7291 and no jumps or labels intervene, then the value can get
7292 here through the reload reg alone.
7293 Otherwise, give up--return. */
7294 for (i1 = NEXT_INSN (output_reload_insn);
7295 i1 != insn; i1 = NEXT_INSN (i1))
7296 {
7297 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7298 return;
7299 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7300 && reg_mentioned_p (reg, PATTERN (i1)))
7301 return;
7302 }
7303
7304 if (cannot_omit_stores[REGNO (reg)])
7305 return;
7306
7307 /* If this insn will store in the pseudo again,
7308 the previous store can be removed. */
7309 if (reload_out[j] == reload_in[j])
7310 delete_insn (output_reload_insn);
7311
7312 /* See if the pseudo reg has been completely replaced
7313 with reload regs. If so, delete the store insn
7314 and forget we had a stack slot for the pseudo. */
7315 else if (REG_N_DEATHS (REGNO (reg)) == 1
7316 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7317 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7318 {
7319 rtx i2;
7320
7321 /* We know that it was used only between here
7322 and the beginning of the current basic block.
7323 (We also know that the last use before INSN was
7324 the output reload we are thinking of deleting, but never mind that.)
7325 Search that range; see if any ref remains. */
7326 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7327 {
7328 rtx set = single_set (i2);
7329
7330 /* Uses which just store in the pseudo don't count,
7331 since if they are the only uses, they are dead. */
7332 if (set != 0 && SET_DEST (set) == reg)
7333 continue;
7334 if (GET_CODE (i2) == CODE_LABEL
7335 || GET_CODE (i2) == JUMP_INSN)
7336 break;
7337 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7338 && reg_mentioned_p (reg, PATTERN (i2)))
7339 /* Some other ref remains;
7340 we can't do anything. */
7341 return;
7342 }
7343
7344 /* Delete the now-dead stores into this pseudo. */
7345 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7346 {
7347 rtx set = single_set (i2);
7348
7349 if (set != 0 && SET_DEST (set) == reg)
7350 {
7351 /* This might be a basic block head,
7352 thus don't use delete_insn. */
7353 PUT_CODE (i2, NOTE);
7354 NOTE_SOURCE_FILE (i2) = 0;
7355 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7356 }
7357 if (GET_CODE (i2) == CODE_LABEL
7358 || GET_CODE (i2) == JUMP_INSN)
7359 break;
7360 }
7361
7362 /* For the debugging info,
7363 say the pseudo lives in this reload reg. */
7364 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7365 alter_reg (REGNO (reg), -1);
7366 }
7367 }
7368 \f
7369 /* Output reload-insns to reload VALUE into RELOADREG.
7370 VALUE is an autoincrement or autodecrement RTX whose operand
7371 is a register or memory location;
7372 so reloading involves incrementing that location.
7373
7374 INC_AMOUNT is the number to increment or decrement by (always positive).
7375 This cannot be deduced from VALUE. */
7376
7377 static void
7378 inc_for_reload (reloadreg, value, inc_amount)
7379 rtx reloadreg;
7380 rtx value;
7381 int inc_amount;
7382 {
7383 /* REG or MEM to be copied and incremented. */
7384 rtx incloc = XEXP (value, 0);
7385 /* Nonzero if increment after copying. */
7386 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7387 rtx last;
7388 rtx inc;
7389 rtx add_insn;
7390 int code;
7391
7392 /* No hard register is equivalent to this register after
7393 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7394 we could inc/dec that register as well (maybe even using it for
7395 the source), but I'm not sure it's worth worrying about. */
7396 if (GET_CODE (incloc) == REG)
7397 reg_last_reload_reg[REGNO (incloc)] = 0;
7398
7399 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7400 inc_amount = - inc_amount;
7401
7402 inc = GEN_INT (inc_amount);
7403
7404 /* If this is post-increment, first copy the location to the reload reg. */
7405 if (post)
7406 emit_insn (gen_move_insn (reloadreg, incloc));
7407
7408 /* See if we can directly increment INCLOC. Use a method similar to that
7409 in gen_reload. */
7410
7411 last = get_last_insn ();
7412 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7413 gen_rtx (PLUS, GET_MODE (incloc),
7414 incloc, inc)));
7415
7416 code = recog_memoized (add_insn);
7417 if (code >= 0)
7418 {
7419 insn_extract (add_insn);
7420 if (constrain_operands (code, 1))
7421 {
7422 /* If this is a pre-increment and we have incremented the value
7423 where it lives, copy the incremented value to RELOADREG to
7424 be used as an address. */
7425
7426 if (! post)
7427 emit_insn (gen_move_insn (reloadreg, incloc));
7428
7429 return;
7430 }
7431 }
7432
7433 delete_insns_since (last);
7434
7435 /* If couldn't do the increment directly, must increment in RELOADREG.
7436 The way we do this depends on whether this is pre- or post-increment.
7437 For pre-increment, copy INCLOC to the reload register, increment it
7438 there, then save back. */
7439
7440 if (! post)
7441 {
7442 emit_insn (gen_move_insn (reloadreg, incloc));
7443 emit_insn (gen_add2_insn (reloadreg, inc));
7444 emit_insn (gen_move_insn (incloc, reloadreg));
7445 }
7446 else
7447 {
7448 /* Postincrement.
7449 Because this might be a jump insn or a compare, and because RELOADREG
7450 may not be available after the insn in an input reload, we must do
7451 the incrementation before the insn being reloaded for.
7452
7453 We have already copied INCLOC to RELOADREG. Increment the copy in
7454 RELOADREG, save that back, then decrement RELOADREG so it has
7455 the original value. */
7456
7457 emit_insn (gen_add2_insn (reloadreg, inc));
7458 emit_insn (gen_move_insn (incloc, reloadreg));
7459 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7460 }
7461
7462 return;
7463 }
7464 \f
7465 /* Return 1 if we are certain that the constraint-string STRING allows
7466 the hard register REG. Return 0 if we can't be sure of this. */
7467
7468 static int
7469 constraint_accepts_reg_p (string, reg)
7470 char *string;
7471 rtx reg;
7472 {
7473 int value = 0;
7474 int regno = true_regnum (reg);
7475 int c;
7476
7477 /* Initialize for first alternative. */
7478 value = 0;
7479 /* Check that each alternative contains `g' or `r'. */
7480 while (1)
7481 switch (c = *string++)
7482 {
7483 case 0:
7484 /* If an alternative lacks `g' or `r', we lose. */
7485 return value;
7486 case ',':
7487 /* If an alternative lacks `g' or `r', we lose. */
7488 if (value == 0)
7489 return 0;
7490 /* Initialize for next alternative. */
7491 value = 0;
7492 break;
7493 case 'g':
7494 case 'r':
7495 /* Any general reg wins for this alternative. */
7496 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7497 value = 1;
7498 break;
7499 default:
7500 /* Any reg in specified class wins for this alternative. */
7501 {
7502 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7503
7504 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7505 value = 1;
7506 }
7507 }
7508 }
7509 \f
7510 /* Return the number of places FIND appears within X, but don't count
7511 an occurrence if some SET_DEST is FIND. */
7512
7513 static int
7514 count_occurrences (x, find)
7515 register rtx x, find;
7516 {
7517 register int i, j;
7518 register enum rtx_code code;
7519 register char *format_ptr;
7520 int count;
7521
7522 if (x == find)
7523 return 1;
7524 if (x == 0)
7525 return 0;
7526
7527 code = GET_CODE (x);
7528
7529 switch (code)
7530 {
7531 case REG:
7532 case QUEUED:
7533 case CONST_INT:
7534 case CONST_DOUBLE:
7535 case SYMBOL_REF:
7536 case CODE_LABEL:
7537 case PC:
7538 case CC0:
7539 return 0;
7540
7541 case SET:
7542 if (SET_DEST (x) == find)
7543 return count_occurrences (SET_SRC (x), find);
7544 break;
7545
7546 default:
7547 break;
7548 }
7549
7550 format_ptr = GET_RTX_FORMAT (code);
7551 count = 0;
7552
7553 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7554 {
7555 switch (*format_ptr++)
7556 {
7557 case 'e':
7558 count += count_occurrences (XEXP (x, i), find);
7559 break;
7560
7561 case 'E':
7562 if (XVEC (x, i) != NULL)
7563 {
7564 for (j = 0; j < XVECLEN (x, i); j++)
7565 count += count_occurrences (XVECEXP (x, i, j), find);
7566 }
7567 break;
7568 }
7569 }
7570 return count;
7571 }
7572 \f
7573 /* This array holds values which are equivalent to a hard register
7574 during reload_cse_regs. Each array element is an EXPR_LIST of
7575 values. Each time a hard register is set, we set the corresponding
7576 array element to the value. Each time a hard register is copied
7577 into memory, we add the memory location to the corresponding array
7578 element. We don't store values or memory addresses with side
7579 effects in this array.
7580
7581 If the value is a CONST_INT, then the mode of the containing
7582 EXPR_LIST is the mode in which that CONST_INT was referenced.
7583
7584 We sometimes clobber a specific entry in a list. In that case, we
7585 just set XEXP (list-entry, 0) to 0. */
7586
7587 static rtx *reg_values;
7588
7589 /* This is a preallocated REG rtx which we use as a temporary in
7590 reload_cse_invalidate_regno, so that we don't need to allocate a
7591 new one each time through a loop in that function. */
7592
7593 static rtx invalidate_regno_rtx;
7594
7595 /* This is a set of registers for which we must remove REG_DEAD notes in
7596 previous insns, because our modifications made them invalid. That can
7597 happen if we introduced the register into the current insn, or we deleted
7598 the current insn which used to set the register. */
7599
7600 static HARD_REG_SET no_longer_dead_regs;
7601
7602 /* Invalidate any entries in reg_values which depend on REGNO,
7603 including those for REGNO itself. This is called if REGNO is
7604 changing. If CLOBBER is true, then always forget anything we
7605 currently know about REGNO. MODE is the mode of the assignment to
7606 REGNO, which is used to determine how many hard registers are being
7607 changed. If MODE is VOIDmode, then only REGNO is being changed;
7608 this is used when invalidating call clobbered registers across a
7609 call. */
7610
7611 static void
7612 reload_cse_invalidate_regno (regno, mode, clobber)
7613 int regno;
7614 enum machine_mode mode;
7615 int clobber;
7616 {
7617 int endregno;
7618 register int i;
7619
7620 /* Our callers don't always go through true_regnum; we may see a
7621 pseudo-register here from a CLOBBER or the like. We probably
7622 won't ever see a pseudo-register that has a real register number,
7623 for we check anyhow for safety. */
7624 if (regno >= FIRST_PSEUDO_REGISTER)
7625 regno = reg_renumber[regno];
7626 if (regno < 0)
7627 return;
7628
7629 if (mode == VOIDmode)
7630 endregno = regno + 1;
7631 else
7632 endregno = regno + HARD_REGNO_NREGS (regno, mode);
7633
7634 if (clobber)
7635 for (i = regno; i < endregno; i++)
7636 reg_values[i] = 0;
7637
7638 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7639 {
7640 rtx x;
7641
7642 for (x = reg_values[i]; x; x = XEXP (x, 1))
7643 {
7644 if (XEXP (x, 0) != 0
7645 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR))
7646 {
7647 /* If this is the only entry on the list, clear
7648 reg_values[i]. Otherwise, just clear this entry on
7649 the list. */
7650 if (XEXP (x, 1) == 0 && x == reg_values[i])
7651 {
7652 reg_values[i] = 0;
7653 break;
7654 }
7655 XEXP (x, 0) = 0;
7656 }
7657 }
7658 }
7659
7660 /* We must look at earlier registers, in case REGNO is part of a
7661 multi word value but is not the first register. If an earlier
7662 register has a value in a mode which overlaps REGNO, then we must
7663 invalidate that earlier register. Note that we do not need to
7664 check REGNO or later registers (we must not check REGNO itself,
7665 because we would incorrectly conclude that there was a conflict). */
7666
7667 for (i = 0; i < regno; i++)
7668 {
7669 rtx x;
7670
7671 for (x = reg_values[i]; x; x = XEXP (x, 1))
7672 {
7673 if (XEXP (x, 0) != 0)
7674 {
7675 PUT_MODE (invalidate_regno_rtx, GET_MODE (x));
7676 REGNO (invalidate_regno_rtx) = i;
7677 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx,
7678 NULL_PTR))
7679 {
7680 reload_cse_invalidate_regno (i, VOIDmode, 1);
7681 break;
7682 }
7683 }
7684 }
7685 }
7686 }
7687
7688 /* The memory at address MEM_BASE is being changed.
7689 Return whether this change will invalidate VAL. */
7690
7691 static int
7692 reload_cse_mem_conflict_p (mem_base, val)
7693 rtx mem_base;
7694 rtx val;
7695 {
7696 enum rtx_code code;
7697 char *fmt;
7698 int i;
7699
7700 code = GET_CODE (val);
7701 switch (code)
7702 {
7703 /* Get rid of a few simple cases quickly. */
7704 case REG:
7705 case PC:
7706 case CC0:
7707 case SCRATCH:
7708 case CONST:
7709 case CONST_INT:
7710 case CONST_DOUBLE:
7711 case SYMBOL_REF:
7712 case LABEL_REF:
7713 return 0;
7714
7715 case MEM:
7716 if (GET_MODE (mem_base) == BLKmode
7717 || GET_MODE (val) == BLKmode)
7718 return 1;
7719 if (anti_dependence (val, mem_base))
7720 return 1;
7721 /* The address may contain nested MEMs. */
7722 break;
7723
7724 default:
7725 break;
7726 }
7727
7728 fmt = GET_RTX_FORMAT (code);
7729
7730 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7731 {
7732 if (fmt[i] == 'e')
7733 {
7734 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i)))
7735 return 1;
7736 }
7737 else if (fmt[i] == 'E')
7738 {
7739 int j;
7740
7741 for (j = 0; j < XVECLEN (val, i); j++)
7742 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
7743 return 1;
7744 }
7745 }
7746
7747 return 0;
7748 }
7749
7750 /* Invalidate any entries in reg_values which are changed because of a
7751 store to MEM_RTX. If this is called because of a non-const call
7752 instruction, MEM_RTX is (mem:BLK const0_rtx). */
7753
7754 static void
7755 reload_cse_invalidate_mem (mem_rtx)
7756 rtx mem_rtx;
7757 {
7758 register int i;
7759
7760 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7761 {
7762 rtx x;
7763
7764 for (x = reg_values[i]; x; x = XEXP (x, 1))
7765 {
7766 if (XEXP (x, 0) != 0
7767 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0)))
7768 {
7769 /* If this is the only entry on the list, clear
7770 reg_values[i]. Otherwise, just clear this entry on
7771 the list. */
7772 if (XEXP (x, 1) == 0 && x == reg_values[i])
7773 {
7774 reg_values[i] = 0;
7775 break;
7776 }
7777 XEXP (x, 0) = 0;
7778 }
7779 }
7780 }
7781 }
7782
7783 /* Invalidate DEST, which is being assigned to or clobbered. The
7784 second parameter exists so that this function can be passed to
7785 note_stores; it is ignored. */
7786
7787 static void
7788 reload_cse_invalidate_rtx (dest, ignore)
7789 rtx dest;
7790 rtx ignore;
7791 {
7792 while (GET_CODE (dest) == STRICT_LOW_PART
7793 || GET_CODE (dest) == SIGN_EXTRACT
7794 || GET_CODE (dest) == ZERO_EXTRACT
7795 || GET_CODE (dest) == SUBREG)
7796 dest = XEXP (dest, 0);
7797
7798 if (GET_CODE (dest) == REG)
7799 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
7800 else if (GET_CODE (dest) == MEM)
7801 reload_cse_invalidate_mem (dest);
7802 }
7803
7804 /* Possibly delete death notes on the insns before INSN if modifying INSN
7805 extended the lifespan of the registers. */
7806
7807 static void
7808 reload_cse_delete_death_notes (insn)
7809 rtx insn;
7810 {
7811 int dreg;
7812
7813 for (dreg = 0; dreg < FIRST_PSEUDO_REGISTER; dreg++)
7814 {
7815 rtx trial;
7816
7817 if (! TEST_HARD_REG_BIT (no_longer_dead_regs, dreg))
7818 continue;
7819
7820 for (trial = prev_nonnote_insn (insn);
7821 (trial
7822 && GET_CODE (trial) != CODE_LABEL
7823 && GET_CODE (trial) != BARRIER);
7824 trial = prev_nonnote_insn (trial))
7825 {
7826 if (find_regno_note (trial, REG_DEAD, dreg))
7827 {
7828 remove_death (dreg, trial);
7829 break;
7830 }
7831 }
7832 }
7833 }
7834
7835 /* Record that the current insn uses hard reg REGNO in mode MODE. This
7836 will be used in reload_cse_delete_death_notes to delete prior REG_DEAD
7837 notes for this register. */
7838
7839 static void
7840 reload_cse_no_longer_dead (regno, mode)
7841 int regno;
7842 enum machine_mode mode;
7843 {
7844 int nregs = HARD_REGNO_NREGS (regno, mode);
7845 while (nregs-- > 0)
7846 {
7847 SET_HARD_REG_BIT (no_longer_dead_regs, regno);
7848 regno++;
7849 }
7850 }
7851
7852
7853 /* Do a very simple CSE pass over the hard registers.
7854
7855 This function detects no-op moves where we happened to assign two
7856 different pseudo-registers to the same hard register, and then
7857 copied one to the other. Reload will generate a useless
7858 instruction copying a register to itself.
7859
7860 This function also detects cases where we load a value from memory
7861 into two different registers, and (if memory is more expensive than
7862 registers) changes it to simply copy the first register into the
7863 second register.
7864
7865 Another optimization is performed that scans the operands of each
7866 instruction to see whether the value is already available in a
7867 hard register. It then replaces the operand with the hard register
7868 if possible, much like an optional reload would. */
7869
7870 void
7871 reload_cse_regs (first)
7872 rtx first;
7873 {
7874 char *firstobj;
7875 rtx callmem;
7876 register int i;
7877 rtx insn;
7878
7879 init_alias_analysis ();
7880
7881 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7882 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7883 reg_values[i] = 0;
7884
7885 /* Create our EXPR_LIST structures on reload_obstack, so that we can
7886 free them when we are done. */
7887 push_obstacks (&reload_obstack, &reload_obstack);
7888 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
7889
7890 /* We pass this to reload_cse_invalidate_mem to invalidate all of
7891 memory for a non-const call instruction. */
7892 callmem = gen_rtx (MEM, BLKmode, const0_rtx);
7893
7894 /* This is used in reload_cse_invalidate_regno to avoid consing a
7895 new REG in a loop in that function. */
7896 invalidate_regno_rtx = gen_rtx (REG, VOIDmode, 0);
7897
7898 for (insn = first; insn; insn = NEXT_INSN (insn))
7899 {
7900 rtx body;
7901
7902 if (GET_CODE (insn) == CODE_LABEL)
7903 {
7904 /* Forget all the register values at a code label. We don't
7905 try to do anything clever around jumps. */
7906 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7907 reg_values[i] = 0;
7908
7909 continue;
7910 }
7911
7912 #ifdef NON_SAVING_SETJMP
7913 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
7914 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
7915 {
7916 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7917 reg_values[i] = 0;
7918
7919 continue;
7920 }
7921 #endif
7922
7923 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7924 continue;
7925
7926 CLEAR_HARD_REG_SET (no_longer_dead_regs);
7927
7928 /* If this is a call instruction, forget anything stored in a
7929 call clobbered register, or, if this is not a const call, in
7930 memory. */
7931 if (GET_CODE (insn) == CALL_INSN)
7932 {
7933 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7934 if (call_used_regs[i])
7935 reload_cse_invalidate_regno (i, VOIDmode, 1);
7936
7937 if (! CONST_CALL_P (insn))
7938 reload_cse_invalidate_mem (callmem);
7939 }
7940
7941 body = PATTERN (insn);
7942 if (GET_CODE (body) == SET)
7943 {
7944 int count = 0;
7945 if (reload_cse_noop_set_p (body, insn))
7946 {
7947 PUT_CODE (insn, NOTE);
7948 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7949 NOTE_SOURCE_FILE (insn) = 0;
7950 reload_cse_delete_death_notes (insn);
7951
7952 /* We're done with this insn. */
7953 continue;
7954 }
7955
7956 /* It's not a no-op, but we can try to simplify it. */
7957 CLEAR_HARD_REG_SET (no_longer_dead_regs);
7958 count += reload_cse_simplify_set (body, insn);
7959
7960 if (count > 0 && apply_change_group ())
7961 reload_cse_delete_death_notes (insn);
7962 else if (reload_cse_simplify_operands (insn))
7963 reload_cse_delete_death_notes (insn);
7964
7965 reload_cse_record_set (body, body);
7966 }
7967 else if (GET_CODE (body) == PARALLEL)
7968 {
7969 int count = 0;
7970
7971 /* If every action in a PARALLEL is a noop, we can delete
7972 the entire PARALLEL. */
7973 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
7974 if ((GET_CODE (XVECEXP (body, 0, i)) != SET
7975 || ! reload_cse_noop_set_p (XVECEXP (body, 0, i), insn))
7976 && GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
7977 break;
7978 if (i < 0)
7979 {
7980 PUT_CODE (insn, NOTE);
7981 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7982 NOTE_SOURCE_FILE (insn) = 0;
7983 reload_cse_delete_death_notes (insn);
7984
7985 /* We're done with this insn. */
7986 continue;
7987 }
7988
7989 /* It's not a no-op, but we can try to simplify it. */
7990 CLEAR_HARD_REG_SET (no_longer_dead_regs);
7991 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
7992 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
7993 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
7994
7995 if (count > 0 && apply_change_group ())
7996 reload_cse_delete_death_notes (insn);
7997 else if (reload_cse_simplify_operands (insn))
7998 reload_cse_delete_death_notes (insn);
7999
8000 /* Look through the PARALLEL and record the values being
8001 set, if possible. Also handle any CLOBBERs. */
8002 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8003 {
8004 rtx x = XVECEXP (body, 0, i);
8005
8006 if (GET_CODE (x) == SET)
8007 reload_cse_record_set (x, body);
8008 else
8009 note_stores (x, reload_cse_invalidate_rtx);
8010 }
8011 }
8012 else
8013 note_stores (body, reload_cse_invalidate_rtx);
8014
8015 #ifdef AUTO_INC_DEC
8016 /* Clobber any registers which appear in REG_INC notes. We
8017 could keep track of the changes to their values, but it is
8018 unlikely to help. */
8019 {
8020 rtx x;
8021
8022 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
8023 if (REG_NOTE_KIND (x) == REG_INC)
8024 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX);
8025 }
8026 #endif
8027
8028 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
8029 after we have processed the insn. */
8030 if (GET_CODE (insn) == CALL_INSN)
8031 {
8032 rtx x;
8033
8034 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
8035 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
8036 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX);
8037 }
8038 }
8039
8040 /* Free all the temporary structures we created, and go back to the
8041 regular obstacks. */
8042 obstack_free (&reload_obstack, firstobj);
8043 pop_obstacks ();
8044 }
8045
8046 /* Return whether the values known for REGNO are equal to VAL. MODE
8047 is the mode of the object that VAL is being copied to; this matters
8048 if VAL is a CONST_INT. */
8049
8050 static int
8051 reload_cse_regno_equal_p (regno, val, mode)
8052 int regno;
8053 rtx val;
8054 enum machine_mode mode;
8055 {
8056 rtx x;
8057
8058 if (val == 0)
8059 return 0;
8060
8061 for (x = reg_values[regno]; x; x = XEXP (x, 1))
8062 if (XEXP (x, 0) != 0
8063 && rtx_equal_p (XEXP (x, 0), val)
8064 && (GET_CODE (val) != CONST_INT
8065 || mode == GET_MODE (x)
8066 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
8067 /* On a big endian machine if the value spans more than
8068 one register then this register holds the high part of
8069 it and we can't use it.
8070
8071 ??? We should also compare with the high part of the
8072 value. */
8073 && !(WORDS_BIG_ENDIAN
8074 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
8075 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
8076 GET_MODE_BITSIZE (GET_MODE (x))))))
8077 return 1;
8078
8079 return 0;
8080 }
8081
8082 /* See whether a single set is a noop. SET is the set instruction we
8083 are should check, and INSN is the instruction from which it came. */
8084
8085 static int
8086 reload_cse_noop_set_p (set, insn)
8087 rtx set;
8088 rtx insn;
8089 {
8090 rtx src, dest;
8091 enum machine_mode dest_mode;
8092 int dreg, sreg;
8093 int ret;
8094
8095 src = SET_SRC (set);
8096 dest = SET_DEST (set);
8097 dest_mode = GET_MODE (dest);
8098
8099 if (side_effects_p (src))
8100 return 0;
8101
8102 dreg = true_regnum (dest);
8103 sreg = true_regnum (src);
8104
8105 /* Check for setting a register to itself. In this case, we don't
8106 have to worry about REG_DEAD notes. */
8107 if (dreg >= 0 && dreg == sreg)
8108 return 1;
8109
8110 ret = 0;
8111 if (dreg >= 0)
8112 {
8113 /* Check for setting a register to itself. */
8114 if (dreg == sreg)
8115 ret = 1;
8116
8117 /* Check for setting a register to a value which we already know
8118 is in the register. */
8119 else if (reload_cse_regno_equal_p (dreg, src, dest_mode))
8120 ret = 1;
8121
8122 /* Check for setting a register DREG to another register SREG
8123 where SREG is equal to a value which is already in DREG. */
8124 else if (sreg >= 0)
8125 {
8126 rtx x;
8127
8128 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8129 {
8130 rtx tmp;
8131
8132 if (XEXP (x, 0) == 0)
8133 continue;
8134
8135 if (dest_mode == GET_MODE (x))
8136 tmp = XEXP (x, 0);
8137 else if (GET_MODE_BITSIZE (dest_mode)
8138 < GET_MODE_BITSIZE (GET_MODE (x)))
8139 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8140 else
8141 continue;
8142
8143 if (tmp
8144 && reload_cse_regno_equal_p (dreg, tmp, dest_mode))
8145 {
8146 ret = 1;
8147 break;
8148 }
8149 }
8150 }
8151 }
8152 else if (GET_CODE (dest) == MEM)
8153 {
8154 /* Check for storing a register to memory when we know that the
8155 register is equivalent to the memory location. */
8156 if (sreg >= 0
8157 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
8158 && ! side_effects_p (dest))
8159 ret = 1;
8160 }
8161
8162 /* If we can delete this SET, then we need to look for an earlier
8163 REG_DEAD note on DREG, and remove it if it exists. */
8164 if (ret && dreg >= 0)
8165 {
8166 if (! find_regno_note (insn, REG_UNUSED, dreg))
8167 reload_cse_no_longer_dead (dreg, dest_mode);
8168 }
8169
8170 return ret;
8171 }
8172
8173 /* Try to simplify a single SET instruction. SET is the set pattern.
8174 INSN is the instruction it came from.
8175 This function only handles one case: if we set a register to a value
8176 which is not a register, we try to find that value in some other register
8177 and change the set into a register copy. */
8178
8179 static int
8180 reload_cse_simplify_set (set, insn)
8181 rtx set;
8182 rtx insn;
8183 {
8184 int dreg;
8185 rtx src;
8186 enum machine_mode dest_mode;
8187 enum reg_class dclass;
8188 register int i;
8189
8190 dreg = true_regnum (SET_DEST (set));
8191 if (dreg < 0)
8192 return 0;
8193
8194 src = SET_SRC (set);
8195 if (side_effects_p (src) || true_regnum (src) >= 0)
8196 return 0;
8197
8198 /* If memory loads are cheaper than register copies, don't change
8199 them. */
8200 if (GET_CODE (src) == MEM && MEMORY_MOVE_COST (GET_MODE (src)) < 2)
8201 return 0;
8202
8203 dest_mode = GET_MODE (SET_DEST (set));
8204 dclass = REGNO_REG_CLASS (dreg);
8205 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8206 {
8207 if (i != dreg
8208 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
8209 && reload_cse_regno_equal_p (i, src, dest_mode))
8210 {
8211 int validated;
8212
8213 /* Pop back to the real obstacks while changing the insn. */
8214 pop_obstacks ();
8215
8216 validated = validate_change (insn, &SET_SRC (set),
8217 gen_rtx (REG, dest_mode, i), 1);
8218
8219 /* Go back to the obstack we are using for temporary
8220 storage. */
8221 push_obstacks (&reload_obstack, &reload_obstack);
8222
8223 if (validated && ! find_regno_note (insn, REG_UNUSED, i))
8224 {
8225 reload_cse_no_longer_dead (i, dest_mode);
8226 return 1;
8227 }
8228 }
8229 }
8230 return 0;
8231 }
8232
8233 /* Try to replace operands in INSN with equivalent values that are already
8234 in registers. This can be viewed as optional reloading.
8235
8236 For each non-register operand in the insn, see if any hard regs are
8237 known to be equivalent to that operand. Record the alternatives which
8238 can accept these hard registers. Among all alternatives, select the
8239 ones which are better or equal to the one currently matching, where
8240 "better" is in terms of '?' and '!' constraints. Among the remaining
8241 alternatives, select the one which replaces most operands with
8242 hard registers. */
8243
8244 static int
8245 reload_cse_simplify_operands (insn)
8246 rtx insn;
8247 {
8248 #ifdef REGISTER_CONSTRAINTS
8249 int insn_code_number, n_operands, n_alternatives;
8250 int i,j;
8251
8252 char *constraints[MAX_RECOG_OPERANDS];
8253
8254 /* Vector recording how bad an alternative is. */
8255 int *alternative_reject;
8256 /* Vector recording how many registers can be introduced by choosing
8257 this alternative. */
8258 int *alternative_nregs;
8259 /* Array of vectors recording, for each operand and each alternative,
8260 which hard register to substitute, or -1 if the operand should be
8261 left as it is. */
8262 int *op_alt_regno[MAX_RECOG_OPERANDS];
8263 /* Array of alternatives, sorted in order of decreasing desirability. */
8264 int *alternative_order;
8265
8266 /* Find out some information about this insn. */
8267 insn_code_number = recog_memoized (insn);
8268 /* We don't modify asm instructions. */
8269 if (insn_code_number < 0)
8270 return 0;
8271
8272 n_operands = insn_n_operands[insn_code_number];
8273 n_alternatives = insn_n_alternatives[insn_code_number];
8274
8275 if (n_alternatives == 0 || n_operands == 0)
8276 return;
8277 insn_extract (insn);
8278
8279 /* Figure out which alternative currently matches. */
8280 if (! constrain_operands (insn_code_number, 1))
8281 abort ();
8282
8283 alternative_reject = (int *) alloca (n_alternatives * sizeof (int));
8284 alternative_nregs = (int *) alloca (n_alternatives * sizeof (int));
8285 alternative_order = (int *) alloca (n_alternatives * sizeof (int));
8286 bzero ((char *)alternative_reject, n_alternatives * sizeof (int));
8287 bzero ((char *)alternative_nregs, n_alternatives * sizeof (int));
8288
8289 for (i = 0; i < n_operands; i++)
8290 {
8291 enum machine_mode mode;
8292 int regno;
8293 char *p;
8294
8295 op_alt_regno[i] = (int *) alloca (n_alternatives * sizeof (int));
8296 for (j = 0; j < n_alternatives; j++)
8297 op_alt_regno[i][j] = -1;
8298
8299 p = constraints[i] = insn_operand_constraint[insn_code_number][i];
8300 mode = insn_operand_mode[insn_code_number][i];
8301
8302 /* Add the reject values for each alternative given by the constraints
8303 for this operand. */
8304 j = 0;
8305 while (*p != '\0')
8306 {
8307 char c = *p++;
8308 if (c == ',')
8309 j++;
8310 else if (c == '?')
8311 alternative_reject[j] += 3;
8312 else if (c == '!')
8313 alternative_reject[j] += 300;
8314 }
8315
8316 /* We won't change operands which are already registers. We
8317 also don't want to modify output operands. */
8318 regno = true_regnum (recog_operand[i]);
8319 if (regno >= 0
8320 || constraints[i][0] == '='
8321 || constraints[i][0] == '+')
8322 continue;
8323
8324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8325 {
8326 int class = (int) NO_REGS;
8327
8328 if (! reload_cse_regno_equal_p (regno, recog_operand[i], mode))
8329 continue;
8330
8331 /* We found a register equal to this operand. Now look for all
8332 alternatives that can accept this register and have not been
8333 assigned a register they can use yet. */
8334 j = 0;
8335 p = constraints[i];
8336 for (;;)
8337 {
8338 char c = *p++;
8339
8340 switch (c)
8341 {
8342 case '=': case '+': case '?':
8343 case '#': case '&': case '!':
8344 case '*': case '%':
8345 case '0': case '1': case '2': case '3': case '4':
8346 case 'm': case '<': case '>': case 'V': case 'o':
8347 case 'E': case 'F': case 'G': case 'H':
8348 case 's': case 'i': case 'n':
8349 case 'I': case 'J': case 'K': case 'L':
8350 case 'M': case 'N': case 'O': case 'P':
8351 #ifdef EXTRA_CONSTRAINT
8352 case 'Q': case 'R': case 'S': case 'T': case 'U':
8353 #endif
8354 case 'p': case 'X':
8355 /* These don't say anything we care about. */
8356 break;
8357
8358 case 'g': case 'r':
8359 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
8360 break;
8361
8362 default:
8363 class
8364 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER (c)];
8365 break;
8366
8367 case ',': case '\0':
8368 /* See if REGNO fits this alternative, and set it up as the
8369 replacement register if we don't have one for this
8370 alternative yet. */
8371 if (op_alt_regno[i][j] == -1
8372 && reg_fits_class_p (gen_rtx (REG, mode, regno), class,
8373 0, mode))
8374 {
8375 alternative_nregs[j]++;
8376 op_alt_regno[i][j] = regno;
8377 }
8378 j++;
8379 break;
8380 }
8381
8382 if (c == '\0')
8383 break;
8384 }
8385 }
8386 }
8387
8388 /* Record all alternatives which are better or equal to the currently
8389 matching one in the alternative_order array. */
8390 for (i = j = 0; i < n_alternatives; i++)
8391 if (alternative_reject[i] <= alternative_reject[which_alternative])
8392 alternative_order[j++] = i;
8393 n_alternatives = j;
8394
8395 /* Sort it. Given a small number of alternatives, a dumb algorithm
8396 won't hurt too much. */
8397 for (i = 0; i < n_alternatives - 1; i++)
8398 {
8399 int best = i;
8400 int best_reject = alternative_reject[alternative_order[i]];
8401 int best_nregs = alternative_nregs[alternative_order[i]];
8402 int tmp;
8403
8404 for (j = i + 1; j < n_alternatives; j++)
8405 {
8406 int this_reject = alternative_reject[alternative_order[j]];
8407 int this_nregs = alternative_nregs[alternative_order[j]];
8408
8409 if (this_reject < best_reject
8410 || (this_reject == best_reject && this_nregs < best_nregs))
8411 {
8412 best = j;
8413 best_reject = this_reject;
8414 best_nregs = this_nregs;
8415 }
8416 }
8417
8418 tmp = alternative_order[best];
8419 alternative_order[best] = alternative_order[i];
8420 alternative_order[i] = tmp;
8421 }
8422
8423 /* Substitute the operands as determined by op_alt_regno for the best
8424 alternative. */
8425 j = alternative_order[0];
8426 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8427
8428 /* Pop back to the real obstacks while changing the insn. */
8429 pop_obstacks ();
8430
8431 for (i = 0; i < n_operands; i++)
8432 {
8433 enum machine_mode mode = insn_operand_mode[insn_code_number][i];
8434 if (op_alt_regno[i][j] == -1)
8435 continue;
8436
8437 reload_cse_no_longer_dead (op_alt_regno[i][j], mode);
8438 validate_change (insn, recog_operand_loc[i],
8439 gen_rtx (REG, mode, op_alt_regno[i][j]), 1);
8440 }
8441
8442 for (i = insn_n_dups[insn_code_number] - 1; i >= 0; i--)
8443 {
8444 int op = recog_dup_num[i];
8445 enum machine_mode mode = insn_operand_mode[insn_code_number][op];
8446
8447 if (op_alt_regno[op][j] == -1)
8448 continue;
8449
8450 reload_cse_no_longer_dead (op_alt_regno[op][j], mode);
8451 validate_change (insn, recog_dup_loc[i],
8452 gen_rtx (REG, mode, op_alt_regno[op][j]), 1);
8453 }
8454
8455 /* Go back to the obstack we are using for temporary
8456 storage. */
8457 push_obstacks (&reload_obstack, &reload_obstack);
8458
8459 return apply_change_group ();
8460 #else
8461 return 0;
8462 #endif
8463 }
8464
8465 /* These two variables are used to pass information from
8466 reload_cse_record_set to reload_cse_check_clobber. */
8467
8468 static int reload_cse_check_clobbered;
8469 static rtx reload_cse_check_src;
8470
8471 /* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
8472 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
8473 second argument, which is passed by note_stores, is ignored. */
8474
8475 static void
8476 reload_cse_check_clobber (dest, ignore)
8477 rtx dest;
8478 rtx ignore;
8479 {
8480 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
8481 reload_cse_check_clobbered = 1;
8482 }
8483
8484 /* Record the result of a SET instruction. SET is the set pattern.
8485 BODY is the pattern of the insn that it came from. */
8486
8487 static void
8488 reload_cse_record_set (set, body)
8489 rtx set;
8490 rtx body;
8491 {
8492 rtx dest, src, x;
8493 int dreg, sreg;
8494 enum machine_mode dest_mode;
8495
8496 dest = SET_DEST (set);
8497 src = SET_SRC (set);
8498 dreg = true_regnum (dest);
8499 sreg = true_regnum (src);
8500 dest_mode = GET_MODE (dest);
8501
8502 /* Some machines don't define AUTO_INC_DEC, but they still use push
8503 instructions. We need to catch that case here in order to
8504 invalidate the stack pointer correctly. Note that invalidating
8505 the stack pointer is different from invalidating DEST. */
8506 x = dest;
8507 while (GET_CODE (x) == SUBREG
8508 || GET_CODE (x) == ZERO_EXTRACT
8509 || GET_CODE (x) == SIGN_EXTRACT
8510 || GET_CODE (x) == STRICT_LOW_PART)
8511 x = XEXP (x, 0);
8512 if (push_operand (x, GET_MODE (x)))
8513 {
8514 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX);
8515 reload_cse_invalidate_rtx (dest, NULL_RTX);
8516 return;
8517 }
8518
8519 /* We can only handle an assignment to a register, or a store of a
8520 register to a memory location. For other cases, we just clobber
8521 the destination. We also have to just clobber if there are side
8522 effects in SRC or DEST. */
8523 if ((dreg < 0 && GET_CODE (dest) != MEM)
8524 || side_effects_p (src)
8525 || side_effects_p (dest))
8526 {
8527 reload_cse_invalidate_rtx (dest, NULL_RTX);
8528 return;
8529 }
8530
8531 #ifdef HAVE_cc0
8532 /* We don't try to handle values involving CC, because it's a pain
8533 to keep track of when they have to be invalidated. */
8534 if (reg_mentioned_p (cc0_rtx, src)
8535 || reg_mentioned_p (cc0_rtx, dest))
8536 {
8537 reload_cse_invalidate_rtx (dest, NULL_RTX);
8538 return;
8539 }
8540 #endif
8541
8542 /* If BODY is a PARALLEL, then we need to see whether the source of
8543 SET is clobbered by some other instruction in the PARALLEL. */
8544 if (GET_CODE (body) == PARALLEL)
8545 {
8546 int i;
8547
8548 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8549 {
8550 rtx x;
8551
8552 x = XVECEXP (body, 0, i);
8553 if (x == set)
8554 continue;
8555
8556 reload_cse_check_clobbered = 0;
8557 reload_cse_check_src = src;
8558 note_stores (x, reload_cse_check_clobber);
8559 if (reload_cse_check_clobbered)
8560 {
8561 reload_cse_invalidate_rtx (dest, NULL_RTX);
8562 return;
8563 }
8564 }
8565 }
8566
8567 if (dreg >= 0)
8568 {
8569 int i;
8570
8571 /* This is an assignment to a register. Update the value we
8572 have stored for the register. */
8573 if (sreg >= 0)
8574 {
8575 rtx x;
8576
8577 /* This is a copy from one register to another. Any values
8578 which were valid for SREG are now valid for DREG. If the
8579 mode changes, we use gen_lowpart_common to extract only
8580 the part of the value that is copied. */
8581 reg_values[dreg] = 0;
8582 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8583 {
8584 rtx tmp;
8585
8586 if (XEXP (x, 0) == 0)
8587 continue;
8588 if (dest_mode == GET_MODE (XEXP (x, 0)))
8589 tmp = XEXP (x, 0);
8590 else if (GET_MODE_BITSIZE (dest_mode)
8591 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
8592 continue;
8593 else
8594 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8595 if (tmp)
8596 reg_values[dreg] = gen_rtx (EXPR_LIST, dest_mode, tmp,
8597 reg_values[dreg]);
8598 }
8599 }
8600 else
8601 reg_values[dreg] = gen_rtx (EXPR_LIST, dest_mode, src, NULL_RTX);
8602
8603 /* We've changed DREG, so invalidate any values held by other
8604 registers that depend upon it. */
8605 reload_cse_invalidate_regno (dreg, dest_mode, 0);
8606
8607 /* If this assignment changes more than one hard register,
8608 forget anything we know about the others. */
8609 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
8610 reg_values[dreg + i] = 0;
8611 }
8612 else if (GET_CODE (dest) == MEM)
8613 {
8614 /* Invalidate conflicting memory locations. */
8615 reload_cse_invalidate_mem (dest);
8616
8617 /* If we're storing a register to memory, add DEST to the list
8618 in REG_VALUES. */
8619 if (sreg >= 0 && ! side_effects_p (dest))
8620 reg_values[sreg] = gen_rtx (EXPR_LIST, dest_mode, dest,
8621 reg_values[sreg]);
8622 }
8623 else
8624 {
8625 /* We should have bailed out earlier. */
8626 abort ();
8627 }
8628 }