reload.h, [...]: Revert March 15 change.
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include <stdio.h>
24 #include "rtl.h"
25 #include "obstack.h"
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
29 #include "flags.h"
30 #include "expr.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "reload.h"
34 #include "recog.h"
35 #include "basic-block.h"
36 #include "output.h"
37 #include "real.h"
38
39 /* This file contains the reload pass of the compiler, which is
40 run after register allocation has been done. It checks that
41 each insn is valid (operands required to be in registers really
42 are in registers of the proper class) and fixes up invalid ones
43 by copying values temporarily into registers for the insns
44 that need them.
45
46 The results of register allocation are described by the vector
47 reg_renumber; the insns still contain pseudo regs, but reg_renumber
48 can be used to find which hard reg, if any, a pseudo reg is in.
49
50 The technique we always use is to free up a few hard regs that are
51 called ``reload regs'', and for each place where a pseudo reg
52 must be in a hard reg, copy it temporarily into one of the reload regs.
53
54 All the pseudos that were formerly allocated to the hard regs that
55 are now in use as reload regs must be ``spilled''. This means
56 that they go to other hard regs, or to stack slots if no other
57 available hard regs can be found. Spilling can invalidate more
58 insns, requiring additional need for reloads, so we must keep checking
59 until the process stabilizes.
60
61 For machines with different classes of registers, we must keep track
62 of the register class needed for each reload, and make sure that
63 we allocate enough reload registers of each class.
64
65 The file reload.c contains the code that checks one insn for
66 validity and reports the reloads that it needs. This file
67 is in charge of scanning the entire rtl code, accumulating the
68 reload needs, spilling, assigning reload registers to use for
69 fixing up each insn, and generating the new insns to copy values
70 into the reload registers. */
71
72
73 #ifndef REGISTER_MOVE_COST
74 #define REGISTER_MOVE_COST(x, y) 2
75 #endif
76
77 #ifndef MEMORY_MOVE_COST
78 #define MEMORY_MOVE_COST(x) 4
79 #endif
80 \f
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx *reg_last_reload_reg;
84
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload;
88
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload;
92
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx *reg_equiv_constant;
98
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx *reg_equiv_memory_loc;
104
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx *reg_equiv_address;
109
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
112 rtx *reg_equiv_mem;
113
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width;
116
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx *reg_equiv_init;
120
121 /* During reload_as_needed, element N contains the last pseudo regno
122 reloaded into the Nth reload register. This vector is in parallel
123 with spill_regs. If that pseudo reg occupied more than one register,
124 reg_reloaded_contents points to that pseudo for each spill register in
125 use; all of these must remain set for an inheritance to occur. */
126 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
127
128 /* During reload_as_needed, element N contains the insn for which
129 the Nth reload register was last used. This vector is in parallel
130 with spill_regs, and its contents are significant only when
131 reg_reloaded_contents is significant. */
132 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
133
134 /* Number of spill-regs so far; number of valid elements of spill_regs. */
135 static int n_spills;
136
137 /* In parallel with spill_regs, contains REG rtx's for those regs.
138 Holds the last rtx used for any given reg, or 0 if it has never
139 been used for spilling yet. This rtx is reused, provided it has
140 the proper mode. */
141 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
142
143 /* In parallel with spill_regs, contains nonzero for a spill reg
144 that was stored after the last time it was used.
145 The precise value is the insn generated to do the store. */
146 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
147
148 /* This table is the inverse mapping of spill_regs:
149 indexed by hard reg number,
150 it contains the position of that reg in spill_regs,
151 or -1 for something that is not in spill_regs. */
152 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
153
154 /* This reg set indicates registers that may not be used for retrying global
155 allocation. The registers that may not be used include all spill registers
156 and the frame pointer (if we are using one). */
157 HARD_REG_SET forbidden_regs;
158
159 /* This reg set indicates registers that are not good for spill registers.
160 They will not be used to complete groups of spill registers. This includes
161 all fixed registers, registers that may be eliminated, and, if
162 SMALL_REGISTER_CLASSES is zero, registers explicitly used in the rtl.
163
164 (spill_reg_order prevents these registers from being used to start a
165 group.) */
166 static HARD_REG_SET bad_spill_regs;
167
168 /* Describes order of use of registers for reloading
169 of spilled pseudo-registers. `spills' is the number of
170 elements that are actually valid; new ones are added at the end. */
171 static short spill_regs[FIRST_PSEUDO_REGISTER];
172
173 /* This reg set indicates those registers that have been used a spill
174 registers. This information is used in reorg.c, to help figure out
175 what registers are live at any point. It is assumed that all spill_regs
176 are dead at every CODE_LABEL. */
177
178 HARD_REG_SET used_spill_regs;
179
180 /* Index of last register assigned as a spill register. We allocate in
181 a round-robin fashion. */
182
183 static int last_spill_reg;
184
185 /* Describes order of preference for putting regs into spill_regs.
186 Contains the numbers of all the hard regs, in order most preferred first.
187 This order is different for each function.
188 It is set up by order_regs_for_reload.
189 Empty elements at the end contain -1. */
190 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
191
192 /* 1 for a hard register that appears explicitly in the rtl
193 (for example, function value registers, special registers
194 used by insns, structure value pointer registers). */
195 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
196
197 /* Indicates if a register was counted against the need for
198 groups. 0 means it can count against max_nongroup instead. */
199 static HARD_REG_SET counted_for_groups;
200
201 /* Indicates if a register was counted against the need for
202 non-groups. 0 means it can become part of a new group.
203 During choose_reload_regs, 1 here means don't use this reg
204 as part of a group, even if it seems to be otherwise ok. */
205 static HARD_REG_SET counted_for_nongroups;
206
207 /* Indexed by pseudo reg number N,
208 says may not delete stores into the real (memory) home of pseudo N.
209 This is set if we already substituted a memory equivalent in some uses,
210 which happens when we have to eliminate the fp from it. */
211 static char *cannot_omit_stores;
212
213 /* Nonzero if indirect addressing is supported on the machine; this means
214 that spilling (REG n) does not require reloading it into a register in
215 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
216 value indicates the level of indirect addressing supported, e.g., two
217 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
218 a hard register. */
219
220 static char spill_indirect_levels;
221
222 /* Nonzero if indirect addressing is supported when the innermost MEM is
223 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
224 which these are valid is the same as spill_indirect_levels, above. */
225
226 char indirect_symref_ok;
227
228 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
229
230 char double_reg_address_ok;
231
232 /* Record the stack slot for each spilled hard register. */
233
234 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
235
236 /* Width allocated so far for that stack slot. */
237
238 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
239
240 /* Indexed by register class and basic block number, nonzero if there is
241 any need for a spill register of that class in that basic block.
242 The pointer is 0 if we did stupid allocation and don't know
243 the structure of basic blocks. */
244
245 char *basic_block_needs[N_REG_CLASSES];
246
247 /* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249 int reload_first_uid;
250
251 /* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
253
254 int caller_save_needed;
255
256 /* The register class to use for a base register when reloading an
257 address. This is normally BASE_REG_CLASS, but it may be different
258 when using SMALL_REGISTER_CLASSES and passing parameters in
259 registers. */
260 enum reg_class reload_address_base_reg_class;
261
262 /* The register class to use for an index register when reloading an
263 address. This is normally INDEX_REG_CLASS, but it may be different
264 when using SMALL_REGISTER_CLASSES and passing parameters in
265 registers. */
266 enum reg_class reload_address_index_reg_class;
267
268 /* Set to 1 while reload_as_needed is operating.
269 Required by some machines to handle any generated moves differently. */
270
271 int reload_in_progress = 0;
272
273 /* These arrays record the insn_code of insns that may be needed to
274 perform input and output reloads of special objects. They provide a
275 place to pass a scratch register. */
276
277 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
278 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
279
280 /* This obstack is used for allocation of rtl during register elimination.
281 The allocated storage can be freed once find_reloads has processed the
282 insn. */
283
284 struct obstack reload_obstack;
285 char *reload_firstobj;
286
287 #define obstack_chunk_alloc xmalloc
288 #define obstack_chunk_free free
289
290 /* List of labels that must never be deleted. */
291 extern rtx forced_labels;
292
293 /* Allocation number table from global register allocation. */
294 extern int *reg_allocno;
295 \f
296 /* This structure is used to record information about register eliminations.
297 Each array entry describes one possible way of eliminating a register
298 in favor of another. If there is more than one way of eliminating a
299 particular register, the most preferred should be specified first. */
300
301 static struct elim_table
302 {
303 int from; /* Register number to be eliminated. */
304 int to; /* Register number used as replacement. */
305 int initial_offset; /* Initial difference between values. */
306 int can_eliminate; /* Non-zero if this elimination can be done. */
307 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
308 insns made by reload. */
309 int offset; /* Current offset between the two regs. */
310 int max_offset; /* Maximum offset between the two regs. */
311 int previous_offset; /* Offset at end of previous insn. */
312 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
313 rtx from_rtx; /* REG rtx for the register to be eliminated.
314 We cannot simply compare the number since
315 we might then spuriously replace a hard
316 register corresponding to a pseudo
317 assigned to the reg to be eliminated. */
318 rtx to_rtx; /* REG rtx for the replacement. */
319 } reg_eliminate[] =
320
321 /* If a set of eliminable registers was specified, define the table from it.
322 Otherwise, default to the normal case of the frame pointer being
323 replaced by the stack pointer. */
324
325 #ifdef ELIMINABLE_REGS
326 ELIMINABLE_REGS;
327 #else
328 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
329 #endif
330
331 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
332
333 /* Record the number of pending eliminations that have an offset not equal
334 to their initial offset. If non-zero, we use a new copy of each
335 replacement result in any insns encountered. */
336 static int num_not_at_initial_offset;
337
338 /* Count the number of registers that we may be able to eliminate. */
339 static int num_eliminable;
340
341 /* For each label, we record the offset of each elimination. If we reach
342 a label by more than one path and an offset differs, we cannot do the
343 elimination. This information is indexed by the number of the label.
344 The first table is an array of flags that records whether we have yet
345 encountered a label and the second table is an array of arrays, one
346 entry in the latter array for each elimination. */
347
348 static char *offsets_known_at;
349 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
350
351 /* Number of labels in the current function. */
352
353 static int num_labels;
354
355 struct hard_reg_n_uses { int regno; int uses; };
356 \f
357 static int possible_group_p PROTO((int, int *));
358 static void count_possible_groups PROTO((int *, enum machine_mode *,
359 int *, int));
360 static int modes_equiv_for_class_p PROTO((enum machine_mode,
361 enum machine_mode,
362 enum reg_class));
363 static void spill_failure PROTO((rtx));
364 static int new_spill_reg PROTO((int, int, int *, int *, int,
365 FILE *));
366 static void delete_dead_insn PROTO((rtx));
367 static void alter_reg PROTO((int, int));
368 static void mark_scratch_live PROTO((rtx));
369 static void set_label_offsets PROTO((rtx, rtx, int));
370 static int eliminate_regs_in_insn PROTO((rtx, int));
371 static void mark_not_eliminable PROTO((rtx, rtx));
372 static int spill_hard_reg PROTO((int, int, FILE *, int));
373 static void scan_paradoxical_subregs PROTO((rtx));
374 static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
375 static void order_regs_for_reload PROTO((int));
376 static int compare_spill_regs PROTO((const GENERIC_PTR, const GENERIC_PTR));
377 static void reload_as_needed PROTO((rtx, int));
378 static void forget_old_reloads_1 PROTO((rtx, rtx));
379 static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
380 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
381 enum machine_mode));
382 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
383 enum machine_mode));
384 static int reload_reg_free_p PROTO((int, int, enum reload_type));
385 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
386 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
387 static int reloads_conflict PROTO((int, int));
388 static int allocate_reload_reg PROTO((int, rtx, int, int));
389 static void choose_reload_regs PROTO((rtx, rtx));
390 static void merge_assigned_reloads PROTO((rtx));
391 static void emit_reload_insns PROTO((rtx));
392 static void delete_output_reload PROTO((rtx, int, rtx));
393 static void inc_for_reload PROTO((rtx, rtx, int));
394 static int constraint_accepts_reg_p PROTO((char *, rtx));
395 static int count_occurrences PROTO((rtx, rtx));
396 static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
397 static int reload_cse_mem_conflict_p PROTO((rtx, rtx));
398 static void reload_cse_invalidate_mem PROTO((rtx));
399 static void reload_cse_invalidate_rtx PROTO((rtx, rtx));
400 static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
401 static int reload_cse_noop_set_p PROTO((rtx, rtx));
402 static int reload_cse_simplify_set PROTO((rtx, rtx));
403 static int reload_cse_simplify_operands PROTO((rtx));
404 static void reload_cse_check_clobber PROTO((rtx, rtx));
405 static void reload_cse_record_set PROTO((rtx, rtx));
406 static void reload_cse_delete_death_notes PROTO((rtx));
407 static void reload_cse_no_longer_dead PROTO((int, enum machine_mode));
408 \f
409 /* Initialize the reload pass once per compilation. */
410
411 void
412 init_reload ()
413 {
414 register int i;
415
416 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
417 Set spill_indirect_levels to the number of levels such addressing is
418 permitted, zero if it is not permitted at all. */
419
420 register rtx tem
421 = gen_rtx (MEM, Pmode,
422 gen_rtx (PLUS, Pmode,
423 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
424 GEN_INT (4)));
425 spill_indirect_levels = 0;
426
427 while (memory_address_p (QImode, tem))
428 {
429 spill_indirect_levels++;
430 tem = gen_rtx (MEM, Pmode, tem);
431 }
432
433 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
434
435 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
436 indirect_symref_ok = memory_address_p (QImode, tem);
437
438 /* See if reg+reg is a valid (and offsettable) address. */
439
440 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
441 {
442 tem = gen_rtx (PLUS, Pmode,
443 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
444 gen_rtx (REG, Pmode, i));
445 /* This way, we make sure that reg+reg is an offsettable address. */
446 tem = plus_constant (tem, 4);
447
448 if (memory_address_p (QImode, tem))
449 {
450 double_reg_address_ok = 1;
451 break;
452 }
453 }
454
455 /* Initialize obstack for our rtl allocation. */
456 gcc_obstack_init (&reload_obstack);
457 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
458
459 /* Decide which register class should be used when reloading
460 addresses. If we are using SMALL_REGISTER_CLASSES, and any
461 parameters are passed in registers, then we do not want to use
462 those registers when reloading an address. Otherwise, if a
463 function argument needs a reload, we may wind up clobbering
464 another argument to the function which was already computed. If
465 we find a subset class which simply avoids those registers, we
466 use it instead. ??? It would be better to only use the
467 restricted class when we actually are loading function arguments,
468 but that is hard to determine. */
469 reload_address_base_reg_class = BASE_REG_CLASS;
470 reload_address_index_reg_class = INDEX_REG_CLASS;
471 if (SMALL_REGISTER_CLASSES)
472 {
473 int regno;
474 HARD_REG_SET base, index;
475 enum reg_class *p;
476
477 COPY_HARD_REG_SET (base, reg_class_contents[BASE_REG_CLASS]);
478 COPY_HARD_REG_SET (index, reg_class_contents[INDEX_REG_CLASS]);
479 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
480 {
481 if (FUNCTION_ARG_REGNO_P (regno))
482 {
483 CLEAR_HARD_REG_BIT (base, regno);
484 CLEAR_HARD_REG_BIT (index, regno);
485 }
486 }
487
488 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[BASE_REG_CLASS],
489 baseok);
490 for (p = reg_class_subclasses[BASE_REG_CLASS];
491 *p != LIM_REG_CLASSES;
492 p++)
493 {
494 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[*p], usebase);
495 continue;
496 usebase:
497 reload_address_base_reg_class = *p;
498 break;
499 }
500 baseok:;
501
502 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[INDEX_REG_CLASS],
503 indexok);
504 for (p = reg_class_subclasses[INDEX_REG_CLASS];
505 *p != LIM_REG_CLASSES;
506 p++)
507 {
508 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[*p], useindex);
509 continue;
510 useindex:
511 reload_address_index_reg_class = *p;
512 break;
513 }
514 indexok:;
515 }
516 }
517
518 /* Main entry point for the reload pass.
519
520 FIRST is the first insn of the function being compiled.
521
522 GLOBAL nonzero means we were called from global_alloc
523 and should attempt to reallocate any pseudoregs that we
524 displace from hard regs we will use for reloads.
525 If GLOBAL is zero, we do not have enough information to do that,
526 so any pseudo reg that is spilled must go to the stack.
527
528 DUMPFILE is the global-reg debugging dump file stream, or 0.
529 If it is nonzero, messages are written to it to describe
530 which registers are seized as reload regs, which pseudo regs
531 are spilled from them, and where the pseudo regs are reallocated to.
532
533 Return value is nonzero if reload failed
534 and we must not do any more for this function. */
535
536 int
537 reload (first, global, dumpfile)
538 rtx first;
539 int global;
540 FILE *dumpfile;
541 {
542 register int class;
543 register int i, j, k;
544 register rtx insn;
545 register struct elim_table *ep;
546
547 /* The two pointers used to track the true location of the memory used
548 for label offsets. */
549 char *real_known_ptr = NULL_PTR;
550 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
551
552 int something_changed;
553 int something_needs_reloads;
554 int something_needs_elimination;
555 int new_basic_block_needs;
556 enum reg_class caller_save_spill_class = NO_REGS;
557 int caller_save_group_size = 1;
558
559 /* Nonzero means we couldn't get enough spill regs. */
560 int failure = 0;
561
562 /* The basic block number currently being processed for INSN. */
563 int this_block;
564
565 /* Make sure even insns with volatile mem refs are recognizable. */
566 init_recog ();
567
568 /* Enable find_equiv_reg to distinguish insns made by reload. */
569 reload_first_uid = get_max_uid ();
570
571 for (i = 0; i < N_REG_CLASSES; i++)
572 basic_block_needs[i] = 0;
573
574 #ifdef SECONDARY_MEMORY_NEEDED
575 /* Initialize the secondary memory table. */
576 clear_secondary_mem ();
577 #endif
578
579 /* Remember which hard regs appear explicitly
580 before we merge into `regs_ever_live' the ones in which
581 pseudo regs have been allocated. */
582 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
583
584 /* We don't have a stack slot for any spill reg yet. */
585 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
586 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
587
588 /* Initialize the save area information for caller-save, in case some
589 are needed. */
590 init_save_areas ();
591
592 /* Compute which hard registers are now in use
593 as homes for pseudo registers.
594 This is done here rather than (eg) in global_alloc
595 because this point is reached even if not optimizing. */
596 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
597 mark_home_live (i);
598
599 /* A function that receives a nonlocal goto must save all call-saved
600 registers. */
601 if (current_function_has_nonlocal_label)
602 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
603 {
604 if (! call_used_regs[i] && ! fixed_regs[i])
605 regs_ever_live[i] = 1;
606 }
607
608 for (i = 0; i < scratch_list_length; i++)
609 if (scratch_list[i])
610 mark_scratch_live (scratch_list[i]);
611
612 /* Make sure that the last insn in the chain
613 is not something that needs reloading. */
614 emit_note (NULL_PTR, NOTE_INSN_DELETED);
615
616 /* Find all the pseudo registers that didn't get hard regs
617 but do have known equivalent constants or memory slots.
618 These include parameters (known equivalent to parameter slots)
619 and cse'd or loop-moved constant memory addresses.
620
621 Record constant equivalents in reg_equiv_constant
622 so they will be substituted by find_reloads.
623 Record memory equivalents in reg_mem_equiv so they can
624 be substituted eventually by altering the REG-rtx's. */
625
626 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
627 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
628 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
629 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
630 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
631 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
632 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
633 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
634 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
635 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
636 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
637 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
638 cannot_omit_stores = (char *) alloca (max_regno);
639 bzero (cannot_omit_stores, max_regno);
640
641 if (SMALL_REGISTER_CLASSES)
642 CLEAR_HARD_REG_SET (forbidden_regs);
643
644 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
645 Also find all paradoxical subregs and find largest such for each pseudo.
646 On machines with small register classes, record hard registers that
647 are used for user variables. These can never be used for spills.
648 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
649 caller-saved registers must be marked live. */
650
651 for (insn = first; insn; insn = NEXT_INSN (insn))
652 {
653 rtx set = single_set (insn);
654
655 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
656 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
657 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
658 if (! call_used_regs[i])
659 regs_ever_live[i] = 1;
660
661 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
662 {
663 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
664 if (note
665 #ifdef LEGITIMATE_PIC_OPERAND_P
666 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
667 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
668 #endif
669 )
670 {
671 rtx x = XEXP (note, 0);
672 i = REGNO (SET_DEST (set));
673 if (i > LAST_VIRTUAL_REGISTER)
674 {
675 if (GET_CODE (x) == MEM)
676 {
677 /* If the operand is a PLUS, the MEM may be shared,
678 so make sure we have an unshared copy here. */
679 if (GET_CODE (XEXP (x, 0)) == PLUS)
680 x = copy_rtx (x);
681
682 reg_equiv_memory_loc[i] = x;
683 }
684 else if (CONSTANT_P (x))
685 {
686 if (LEGITIMATE_CONSTANT_P (x))
687 reg_equiv_constant[i] = x;
688 else
689 reg_equiv_memory_loc[i]
690 = force_const_mem (GET_MODE (SET_DEST (set)), x);
691 }
692 else
693 continue;
694
695 /* If this register is being made equivalent to a MEM
696 and the MEM is not SET_SRC, the equivalencing insn
697 is one with the MEM as a SET_DEST and it occurs later.
698 So don't mark this insn now. */
699 if (GET_CODE (x) != MEM
700 || rtx_equal_p (SET_SRC (set), x))
701 reg_equiv_init[i] = insn;
702 }
703 }
704 }
705
706 /* If this insn is setting a MEM from a register equivalent to it,
707 this is the equivalencing insn. */
708 else if (set && GET_CODE (SET_DEST (set)) == MEM
709 && GET_CODE (SET_SRC (set)) == REG
710 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
711 && rtx_equal_p (SET_DEST (set),
712 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
713 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
714
715 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
716 scan_paradoxical_subregs (PATTERN (insn));
717 }
718
719 /* Does this function require a frame pointer? */
720
721 frame_pointer_needed = (! flag_omit_frame_pointer
722 #ifdef EXIT_IGNORE_STACK
723 /* ?? If EXIT_IGNORE_STACK is set, we will not save
724 and restore sp for alloca. So we can't eliminate
725 the frame pointer in that case. At some point,
726 we should improve this by emitting the
727 sp-adjusting insns for this case. */
728 || (current_function_calls_alloca
729 && EXIT_IGNORE_STACK)
730 #endif
731 || FRAME_POINTER_REQUIRED);
732
733 num_eliminable = 0;
734
735 /* Initialize the table of registers to eliminate. The way we do this
736 depends on how the eliminable registers were defined. */
737 #ifdef ELIMINABLE_REGS
738 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
739 {
740 ep->can_eliminate = ep->can_eliminate_previous
741 = (CAN_ELIMINATE (ep->from, ep->to)
742 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
743 }
744 #else
745 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
746 = ! frame_pointer_needed;
747 #endif
748
749 /* Count the number of eliminable registers and build the FROM and TO
750 REG rtx's. Note that code in gen_rtx will cause, e.g.,
751 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
752 We depend on this. */
753 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
754 {
755 num_eliminable += ep->can_eliminate;
756 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
757 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
758 }
759
760 num_labels = max_label_num () - get_first_label_num ();
761
762 /* Allocate the tables used to store offset information at labels. */
763 /* We used to use alloca here, but the size of what it would try to
764 allocate would occasionally cause it to exceed the stack limit and
765 cause a core dump. */
766 real_known_ptr = xmalloc (num_labels);
767 real_at_ptr
768 = (int (*)[NUM_ELIMINABLE_REGS])
769 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
770
771 offsets_known_at = real_known_ptr - get_first_label_num ();
772 offsets_at
773 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
774
775 /* Alter each pseudo-reg rtx to contain its hard reg number.
776 Assign stack slots to the pseudos that lack hard regs or equivalents.
777 Do not touch virtual registers. */
778
779 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
780 alter_reg (i, -1);
781
782 /* If we have some registers we think can be eliminated, scan all insns to
783 see if there is an insn that sets one of these registers to something
784 other than itself plus a constant. If so, the register cannot be
785 eliminated. Doing this scan here eliminates an extra pass through the
786 main reload loop in the most common case where register elimination
787 cannot be done. */
788 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
789 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
790 || GET_CODE (insn) == CALL_INSN)
791 note_stores (PATTERN (insn), mark_not_eliminable);
792
793 #ifndef REGISTER_CONSTRAINTS
794 /* If all the pseudo regs have hard regs,
795 except for those that are never referenced,
796 we know that no reloads are needed. */
797 /* But that is not true if there are register constraints, since
798 in that case some pseudos might be in the wrong kind of hard reg. */
799
800 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
801 if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0)
802 break;
803
804 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
805 {
806 free (real_known_ptr);
807 free (real_at_ptr);
808 return;
809 }
810 #endif
811
812 /* Compute the order of preference for hard registers to spill.
813 Store them by decreasing preference in potential_reload_regs. */
814
815 order_regs_for_reload (global);
816
817 /* So far, no hard regs have been spilled. */
818 n_spills = 0;
819 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
820 spill_reg_order[i] = -1;
821
822 /* Initialize to -1, which means take the first spill register. */
823 last_spill_reg = -1;
824
825 /* On most machines, we can't use any register explicitly used in the
826 rtl as a spill register. But on some, we have to. Those will have
827 taken care to keep the life of hard regs as short as possible. */
828
829 if (! SMALL_REGISTER_CLASSES)
830 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
831
832 /* Spill any hard regs that we know we can't eliminate. */
833 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
834 if (! ep->can_eliminate)
835 spill_hard_reg (ep->from, global, dumpfile, 1);
836
837 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
838 if (frame_pointer_needed)
839 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
840 #endif
841
842 if (global)
843 for (i = 0; i < N_REG_CLASSES; i++)
844 {
845 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
846 bzero (basic_block_needs[i], n_basic_blocks);
847 }
848
849 /* From now on, we need to emit any moves without making new pseudos. */
850 reload_in_progress = 1;
851
852 /* This loop scans the entire function each go-round
853 and repeats until one repetition spills no additional hard regs. */
854
855 /* This flag is set when a pseudo reg is spilled,
856 to require another pass. Note that getting an additional reload
857 reg does not necessarily imply any pseudo reg was spilled;
858 sometimes we find a reload reg that no pseudo reg was allocated in. */
859 something_changed = 1;
860 /* This flag is set if there are any insns that require reloading. */
861 something_needs_reloads = 0;
862 /* This flag is set if there are any insns that require register
863 eliminations. */
864 something_needs_elimination = 0;
865 while (something_changed)
866 {
867 rtx after_call = 0;
868
869 /* For each class, number of reload regs needed in that class.
870 This is the maximum over all insns of the needs in that class
871 of the individual insn. */
872 int max_needs[N_REG_CLASSES];
873 /* For each class, size of group of consecutive regs
874 that is needed for the reloads of this class. */
875 int group_size[N_REG_CLASSES];
876 /* For each class, max number of consecutive groups needed.
877 (Each group contains group_size[CLASS] consecutive registers.) */
878 int max_groups[N_REG_CLASSES];
879 /* For each class, max number needed of regs that don't belong
880 to any of the groups. */
881 int max_nongroups[N_REG_CLASSES];
882 /* For each class, the machine mode which requires consecutive
883 groups of regs of that class.
884 If two different modes ever require groups of one class,
885 they must be the same size and equally restrictive for that class,
886 otherwise we can't handle the complexity. */
887 enum machine_mode group_mode[N_REG_CLASSES];
888 /* Record the insn where each maximum need is first found. */
889 rtx max_needs_insn[N_REG_CLASSES];
890 rtx max_groups_insn[N_REG_CLASSES];
891 rtx max_nongroups_insn[N_REG_CLASSES];
892 rtx x;
893 HOST_WIDE_INT starting_frame_size;
894 int previous_frame_pointer_needed = frame_pointer_needed;
895 static char *reg_class_names[] = REG_CLASS_NAMES;
896
897 something_changed = 0;
898 bzero ((char *) max_needs, sizeof max_needs);
899 bzero ((char *) max_groups, sizeof max_groups);
900 bzero ((char *) max_nongroups, sizeof max_nongroups);
901 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
902 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
903 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
904 bzero ((char *) group_size, sizeof group_size);
905 for (i = 0; i < N_REG_CLASSES; i++)
906 group_mode[i] = VOIDmode;
907
908 /* Keep track of which basic blocks are needing the reloads. */
909 this_block = 0;
910
911 /* Remember whether any element of basic_block_needs
912 changes from 0 to 1 in this pass. */
913 new_basic_block_needs = 0;
914
915 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
916 here because the stack size may be a part of the offset computation
917 for register elimination, and there might have been new stack slots
918 created in the last iteration of this loop. */
919 assign_stack_local (BLKmode, 0, 0);
920
921 starting_frame_size = get_frame_size ();
922
923 /* Reset all offsets on eliminable registers to their initial values. */
924 #ifdef ELIMINABLE_REGS
925 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
926 {
927 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
928 ep->previous_offset = ep->offset
929 = ep->max_offset = ep->initial_offset;
930 }
931 #else
932 #ifdef INITIAL_FRAME_POINTER_OFFSET
933 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
934 #else
935 if (!FRAME_POINTER_REQUIRED)
936 abort ();
937 reg_eliminate[0].initial_offset = 0;
938 #endif
939 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
940 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
941 #endif
942
943 num_not_at_initial_offset = 0;
944
945 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
946
947 /* Set a known offset for each forced label to be at the initial offset
948 of each elimination. We do this because we assume that all
949 computed jumps occur from a location where each elimination is
950 at its initial offset. */
951
952 for (x = forced_labels; x; x = XEXP (x, 1))
953 if (XEXP (x, 0))
954 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
955
956 /* For each pseudo register that has an equivalent location defined,
957 try to eliminate any eliminable registers (such as the frame pointer)
958 assuming initial offsets for the replacement register, which
959 is the normal case.
960
961 If the resulting location is directly addressable, substitute
962 the MEM we just got directly for the old REG.
963
964 If it is not addressable but is a constant or the sum of a hard reg
965 and constant, it is probably not addressable because the constant is
966 out of range, in that case record the address; we will generate
967 hairy code to compute the address in a register each time it is
968 needed. Similarly if it is a hard register, but one that is not
969 valid as an address register.
970
971 If the location is not addressable, but does not have one of the
972 above forms, assign a stack slot. We have to do this to avoid the
973 potential of producing lots of reloads if, e.g., a location involves
974 a pseudo that didn't get a hard register and has an equivalent memory
975 location that also involves a pseudo that didn't get a hard register.
976
977 Perhaps at some point we will improve reload_when_needed handling
978 so this problem goes away. But that's very hairy. */
979
980 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
981 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
982 {
983 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
984
985 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
986 XEXP (x, 0)))
987 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
988 else if (CONSTANT_P (XEXP (x, 0))
989 || (GET_CODE (XEXP (x, 0)) == REG
990 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
991 || (GET_CODE (XEXP (x, 0)) == PLUS
992 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
993 && (REGNO (XEXP (XEXP (x, 0), 0))
994 < FIRST_PSEUDO_REGISTER)
995 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
996 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
997 else
998 {
999 /* Make a new stack slot. Then indicate that something
1000 changed so we go back and recompute offsets for
1001 eliminable registers because the allocation of memory
1002 below might change some offset. reg_equiv_{mem,address}
1003 will be set up for this pseudo on the next pass around
1004 the loop. */
1005 reg_equiv_memory_loc[i] = 0;
1006 reg_equiv_init[i] = 0;
1007 alter_reg (i, -1);
1008 something_changed = 1;
1009 }
1010 }
1011
1012 /* If we allocated another pseudo to the stack, redo elimination
1013 bookkeeping. */
1014 if (something_changed)
1015 continue;
1016
1017 /* If caller-saves needs a group, initialize the group to include
1018 the size and mode required for caller-saves. */
1019
1020 if (caller_save_group_size > 1)
1021 {
1022 group_mode[(int) caller_save_spill_class] = Pmode;
1023 group_size[(int) caller_save_spill_class] = caller_save_group_size;
1024 }
1025
1026 /* Compute the most additional registers needed by any instruction.
1027 Collect information separately for each class of regs. */
1028
1029 for (insn = first; insn; insn = NEXT_INSN (insn))
1030 {
1031 if (global && this_block + 1 < n_basic_blocks
1032 && insn == basic_block_head[this_block+1])
1033 ++this_block;
1034
1035 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
1036 might include REG_LABEL), we need to see what effects this
1037 has on the known offsets at labels. */
1038
1039 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1040 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1041 && REG_NOTES (insn) != 0))
1042 set_label_offsets (insn, insn, 0);
1043
1044 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1045 {
1046 /* Nonzero means don't use a reload reg that overlaps
1047 the place where a function value can be returned. */
1048 rtx avoid_return_reg = 0;
1049
1050 rtx old_body = PATTERN (insn);
1051 int old_code = INSN_CODE (insn);
1052 rtx old_notes = REG_NOTES (insn);
1053 int did_elimination = 0;
1054
1055 /* To compute the number of reload registers of each class
1056 needed for an insn, we must simulate what choose_reload_regs
1057 can do. We do this by splitting an insn into an "input" and
1058 an "output" part. RELOAD_OTHER reloads are used in both.
1059 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
1060 which must be live over the entire input section of reloads,
1061 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1062 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
1063 inputs.
1064
1065 The registers needed for output are RELOAD_OTHER and
1066 RELOAD_FOR_OUTPUT, which are live for the entire output
1067 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
1068 reloads for each operand.
1069
1070 The total number of registers needed is the maximum of the
1071 inputs and outputs. */
1072
1073 struct needs
1074 {
1075 /* [0] is normal, [1] is nongroup. */
1076 int regs[2][N_REG_CLASSES];
1077 int groups[N_REG_CLASSES];
1078 };
1079
1080 /* Each `struct needs' corresponds to one RELOAD_... type. */
1081 struct {
1082 struct needs other;
1083 struct needs input;
1084 struct needs output;
1085 struct needs insn;
1086 struct needs other_addr;
1087 struct needs op_addr;
1088 struct needs op_addr_reload;
1089 struct needs in_addr[MAX_RECOG_OPERANDS];
1090 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1091 struct needs out_addr[MAX_RECOG_OPERANDS];
1092 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1093 } insn_needs;
1094
1095 /* If needed, eliminate any eliminable registers. */
1096 if (num_eliminable)
1097 did_elimination = eliminate_regs_in_insn (insn, 0);
1098
1099 /* Set avoid_return_reg if this is an insn
1100 that might use the value of a function call. */
1101 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
1102 {
1103 if (GET_CODE (PATTERN (insn)) == SET)
1104 after_call = SET_DEST (PATTERN (insn));
1105 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1106 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1107 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1108 else
1109 after_call = 0;
1110 }
1111 else if (SMALL_REGISTER_CLASSES && after_call != 0
1112 && !(GET_CODE (PATTERN (insn)) == SET
1113 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1114 {
1115 if (reg_referenced_p (after_call, PATTERN (insn)))
1116 avoid_return_reg = after_call;
1117 after_call = 0;
1118 }
1119
1120 /* Analyze the instruction. */
1121 find_reloads (insn, 0, spill_indirect_levels, global,
1122 spill_reg_order);
1123
1124 /* Remember for later shortcuts which insns had any reloads or
1125 register eliminations.
1126
1127 One might think that it would be worthwhile to mark insns
1128 that need register replacements but not reloads, but this is
1129 not safe because find_reloads may do some manipulation of
1130 the insn (such as swapping commutative operands), which would
1131 be lost when we restore the old pattern after register
1132 replacement. So the actions of find_reloads must be redone in
1133 subsequent passes or in reload_as_needed.
1134
1135 However, it is safe to mark insns that need reloads
1136 but not register replacement. */
1137
1138 PUT_MODE (insn, (did_elimination ? QImode
1139 : n_reloads ? HImode
1140 : GET_MODE (insn) == DImode ? DImode
1141 : VOIDmode));
1142
1143 /* Discard any register replacements done. */
1144 if (did_elimination)
1145 {
1146 obstack_free (&reload_obstack, reload_firstobj);
1147 PATTERN (insn) = old_body;
1148 INSN_CODE (insn) = old_code;
1149 REG_NOTES (insn) = old_notes;
1150 something_needs_elimination = 1;
1151 }
1152
1153 /* If this insn has no reloads, we need not do anything except
1154 in the case of a CALL_INSN when we have caller-saves and
1155 caller-save needs reloads. */
1156
1157 if (n_reloads == 0
1158 && ! (GET_CODE (insn) == CALL_INSN
1159 && caller_save_spill_class != NO_REGS))
1160 continue;
1161
1162 something_needs_reloads = 1;
1163 bzero ((char *) &insn_needs, sizeof insn_needs);
1164
1165 /* Count each reload once in every class
1166 containing the reload's own class. */
1167
1168 for (i = 0; i < n_reloads; i++)
1169 {
1170 register enum reg_class *p;
1171 enum reg_class class = reload_reg_class[i];
1172 int size;
1173 enum machine_mode mode;
1174 int nongroup_need;
1175 struct needs *this_needs;
1176
1177 /* Don't count the dummy reloads, for which one of the
1178 regs mentioned in the insn can be used for reloading.
1179 Don't count optional reloads.
1180 Don't count reloads that got combined with others. */
1181 if (reload_reg_rtx[i] != 0
1182 || reload_optional[i] != 0
1183 || (reload_out[i] == 0 && reload_in[i] == 0
1184 && ! reload_secondary_p[i]))
1185 continue;
1186
1187 /* Show that a reload register of this class is needed
1188 in this basic block. We do not use insn_needs and
1189 insn_groups because they are overly conservative for
1190 this purpose. */
1191 if (global && ! basic_block_needs[(int) class][this_block])
1192 {
1193 basic_block_needs[(int) class][this_block] = 1;
1194 new_basic_block_needs = 1;
1195 }
1196
1197 mode = reload_inmode[i];
1198 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1199 mode = reload_outmode[i];
1200 size = CLASS_MAX_NREGS (class, mode);
1201
1202 /* If this class doesn't want a group, determine if we have
1203 a nongroup need or a regular need. We have a nongroup
1204 need if this reload conflicts with a group reload whose
1205 class intersects with this reload's class. */
1206
1207 nongroup_need = 0;
1208 if (size == 1)
1209 for (j = 0; j < n_reloads; j++)
1210 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1211 (GET_MODE_SIZE (reload_outmode[j])
1212 > GET_MODE_SIZE (reload_inmode[j]))
1213 ? reload_outmode[j]
1214 : reload_inmode[j])
1215 > 1)
1216 && (!reload_optional[j])
1217 && (reload_in[j] != 0 || reload_out[j] != 0
1218 || reload_secondary_p[j])
1219 && reloads_conflict (i, j)
1220 && reg_classes_intersect_p (class,
1221 reload_reg_class[j]))
1222 {
1223 nongroup_need = 1;
1224 break;
1225 }
1226
1227 /* Decide which time-of-use to count this reload for. */
1228 switch (reload_when_needed[i])
1229 {
1230 case RELOAD_OTHER:
1231 this_needs = &insn_needs.other;
1232 break;
1233 case RELOAD_FOR_INPUT:
1234 this_needs = &insn_needs.input;
1235 break;
1236 case RELOAD_FOR_OUTPUT:
1237 this_needs = &insn_needs.output;
1238 break;
1239 case RELOAD_FOR_INSN:
1240 this_needs = &insn_needs.insn;
1241 break;
1242 case RELOAD_FOR_OTHER_ADDRESS:
1243 this_needs = &insn_needs.other_addr;
1244 break;
1245 case RELOAD_FOR_INPUT_ADDRESS:
1246 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1247 break;
1248 case RELOAD_FOR_INPADDR_ADDRESS:
1249 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1250 break;
1251 case RELOAD_FOR_OUTPUT_ADDRESS:
1252 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1253 break;
1254 case RELOAD_FOR_OUTADDR_ADDRESS:
1255 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1256 break;
1257 case RELOAD_FOR_OPERAND_ADDRESS:
1258 this_needs = &insn_needs.op_addr;
1259 break;
1260 case RELOAD_FOR_OPADDR_ADDR:
1261 this_needs = &insn_needs.op_addr_reload;
1262 break;
1263 }
1264
1265 if (size > 1)
1266 {
1267 enum machine_mode other_mode, allocate_mode;
1268
1269 /* Count number of groups needed separately from
1270 number of individual regs needed. */
1271 this_needs->groups[(int) class]++;
1272 p = reg_class_superclasses[(int) class];
1273 while (*p != LIM_REG_CLASSES)
1274 this_needs->groups[(int) *p++]++;
1275
1276 /* Record size and mode of a group of this class. */
1277 /* If more than one size group is needed,
1278 make all groups the largest needed size. */
1279 if (group_size[(int) class] < size)
1280 {
1281 other_mode = group_mode[(int) class];
1282 allocate_mode = mode;
1283
1284 group_size[(int) class] = size;
1285 group_mode[(int) class] = mode;
1286 }
1287 else
1288 {
1289 other_mode = mode;
1290 allocate_mode = group_mode[(int) class];
1291 }
1292
1293 /* Crash if two dissimilar machine modes both need
1294 groups of consecutive regs of the same class. */
1295
1296 if (other_mode != VOIDmode && other_mode != allocate_mode
1297 && ! modes_equiv_for_class_p (allocate_mode,
1298 other_mode, class))
1299 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1300 insn);
1301 }
1302 else if (size == 1)
1303 {
1304 this_needs->regs[nongroup_need][(int) class] += 1;
1305 p = reg_class_superclasses[(int) class];
1306 while (*p != LIM_REG_CLASSES)
1307 this_needs->regs[nongroup_need][(int) *p++] += 1;
1308 }
1309 else
1310 abort ();
1311 }
1312
1313 /* All reloads have been counted for this insn;
1314 now merge the various times of use.
1315 This sets insn_needs, etc., to the maximum total number
1316 of registers needed at any point in this insn. */
1317
1318 for (i = 0; i < N_REG_CLASSES; i++)
1319 {
1320 int in_max, out_max;
1321
1322 /* Compute normal and nongroup needs. */
1323 for (j = 0; j <= 1; j++)
1324 {
1325 for (in_max = 0, out_max = 0, k = 0;
1326 k < reload_n_operands; k++)
1327 {
1328 in_max
1329 = MAX (in_max,
1330 (insn_needs.in_addr[k].regs[j][i]
1331 + insn_needs.in_addr_addr[k].regs[j][i]));
1332 out_max
1333 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1334 out_max
1335 = MAX (out_max,
1336 insn_needs.out_addr_addr[k].regs[j][i]);
1337 }
1338
1339 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1340 and operand addresses but not things used to reload
1341 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1342 don't conflict with things needed to reload inputs or
1343 outputs. */
1344
1345 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1346 insn_needs.op_addr_reload.regs[j][i]),
1347 in_max);
1348
1349 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1350
1351 insn_needs.input.regs[j][i]
1352 = MAX (insn_needs.input.regs[j][i]
1353 + insn_needs.op_addr.regs[j][i]
1354 + insn_needs.insn.regs[j][i],
1355 in_max + insn_needs.input.regs[j][i]);
1356
1357 insn_needs.output.regs[j][i] += out_max;
1358 insn_needs.other.regs[j][i]
1359 += MAX (MAX (insn_needs.input.regs[j][i],
1360 insn_needs.output.regs[j][i]),
1361 insn_needs.other_addr.regs[j][i]);
1362
1363 }
1364
1365 /* Now compute group needs. */
1366 for (in_max = 0, out_max = 0, j = 0;
1367 j < reload_n_operands; j++)
1368 {
1369 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1370 in_max = MAX (in_max,
1371 insn_needs.in_addr_addr[j].groups[i]);
1372 out_max
1373 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1374 out_max
1375 = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1376 }
1377
1378 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1379 insn_needs.op_addr_reload.groups[i]),
1380 in_max);
1381 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1382
1383 insn_needs.input.groups[i]
1384 = MAX (insn_needs.input.groups[i]
1385 + insn_needs.op_addr.groups[i]
1386 + insn_needs.insn.groups[i],
1387 in_max + insn_needs.input.groups[i]);
1388
1389 insn_needs.output.groups[i] += out_max;
1390 insn_needs.other.groups[i]
1391 += MAX (MAX (insn_needs.input.groups[i],
1392 insn_needs.output.groups[i]),
1393 insn_needs.other_addr.groups[i]);
1394 }
1395
1396 /* If this is a CALL_INSN and caller-saves will need
1397 a spill register, act as if the spill register is
1398 needed for this insn. However, the spill register
1399 can be used by any reload of this insn, so we only
1400 need do something if no need for that class has
1401 been recorded.
1402
1403 The assumption that every CALL_INSN will trigger a
1404 caller-save is highly conservative, however, the number
1405 of cases where caller-saves will need a spill register but
1406 a block containing a CALL_INSN won't need a spill register
1407 of that class should be quite rare.
1408
1409 If a group is needed, the size and mode of the group will
1410 have been set up at the beginning of this loop. */
1411
1412 if (GET_CODE (insn) == CALL_INSN
1413 && caller_save_spill_class != NO_REGS)
1414 {
1415 /* See if this register would conflict with any reload
1416 that needs a group. */
1417 int nongroup_need = 0;
1418 int *caller_save_needs;
1419
1420 for (j = 0; j < n_reloads; j++)
1421 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1422 (GET_MODE_SIZE (reload_outmode[j])
1423 > GET_MODE_SIZE (reload_inmode[j]))
1424 ? reload_outmode[j]
1425 : reload_inmode[j])
1426 > 1)
1427 && reg_classes_intersect_p (caller_save_spill_class,
1428 reload_reg_class[j]))
1429 {
1430 nongroup_need = 1;
1431 break;
1432 }
1433
1434 caller_save_needs
1435 = (caller_save_group_size > 1
1436 ? insn_needs.other.groups
1437 : insn_needs.other.regs[nongroup_need]);
1438
1439 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1440 {
1441 register enum reg_class *p
1442 = reg_class_superclasses[(int) caller_save_spill_class];
1443
1444 caller_save_needs[(int) caller_save_spill_class]++;
1445
1446 while (*p != LIM_REG_CLASSES)
1447 caller_save_needs[(int) *p++] += 1;
1448 }
1449
1450 /* Show that this basic block will need a register of
1451 this class. */
1452
1453 if (global
1454 && ! (basic_block_needs[(int) caller_save_spill_class]
1455 [this_block]))
1456 {
1457 basic_block_needs[(int) caller_save_spill_class]
1458 [this_block] = 1;
1459 new_basic_block_needs = 1;
1460 }
1461 }
1462
1463 /* If this insn stores the value of a function call,
1464 and that value is in a register that has been spilled,
1465 and if the insn needs a reload in a class
1466 that might use that register as the reload register,
1467 then add add an extra need in that class.
1468 This makes sure we have a register available that does
1469 not overlap the return value. */
1470
1471 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
1472 {
1473 int regno = REGNO (avoid_return_reg);
1474 int nregs
1475 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1476 int r;
1477 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1478
1479 /* First compute the "basic needs", which counts a
1480 need only in the smallest class in which it
1481 is required. */
1482
1483 bcopy ((char *) insn_needs.other.regs[0],
1484 (char *) basic_needs, sizeof basic_needs);
1485 bcopy ((char *) insn_needs.other.groups,
1486 (char *) basic_groups, sizeof basic_groups);
1487
1488 for (i = 0; i < N_REG_CLASSES; i++)
1489 {
1490 enum reg_class *p;
1491
1492 if (basic_needs[i] >= 0)
1493 for (p = reg_class_superclasses[i];
1494 *p != LIM_REG_CLASSES; p++)
1495 basic_needs[(int) *p] -= basic_needs[i];
1496
1497 if (basic_groups[i] >= 0)
1498 for (p = reg_class_superclasses[i];
1499 *p != LIM_REG_CLASSES; p++)
1500 basic_groups[(int) *p] -= basic_groups[i];
1501 }
1502
1503 /* Now count extra regs if there might be a conflict with
1504 the return value register. */
1505
1506 for (r = regno; r < regno + nregs; r++)
1507 if (spill_reg_order[r] >= 0)
1508 for (i = 0; i < N_REG_CLASSES; i++)
1509 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1510 {
1511 if (basic_needs[i] > 0)
1512 {
1513 enum reg_class *p;
1514
1515 insn_needs.other.regs[0][i]++;
1516 p = reg_class_superclasses[i];
1517 while (*p != LIM_REG_CLASSES)
1518 insn_needs.other.regs[0][(int) *p++]++;
1519 }
1520 if (basic_groups[i] > 0)
1521 {
1522 enum reg_class *p;
1523
1524 insn_needs.other.groups[i]++;
1525 p = reg_class_superclasses[i];
1526 while (*p != LIM_REG_CLASSES)
1527 insn_needs.other.groups[(int) *p++]++;
1528 }
1529 }
1530 }
1531
1532 /* For each class, collect maximum need of any insn. */
1533
1534 for (i = 0; i < N_REG_CLASSES; i++)
1535 {
1536 if (max_needs[i] < insn_needs.other.regs[0][i])
1537 {
1538 max_needs[i] = insn_needs.other.regs[0][i];
1539 max_needs_insn[i] = insn;
1540 }
1541 if (max_groups[i] < insn_needs.other.groups[i])
1542 {
1543 max_groups[i] = insn_needs.other.groups[i];
1544 max_groups_insn[i] = insn;
1545 }
1546 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1547 {
1548 max_nongroups[i] = insn_needs.other.regs[1][i];
1549 max_nongroups_insn[i] = insn;
1550 }
1551 }
1552 }
1553 /* Note that there is a continue statement above. */
1554 }
1555
1556 /* If we allocated any new memory locations, make another pass
1557 since it might have changed elimination offsets. */
1558 if (starting_frame_size != get_frame_size ())
1559 something_changed = 1;
1560
1561 if (dumpfile)
1562 for (i = 0; i < N_REG_CLASSES; i++)
1563 {
1564 if (max_needs[i] > 0)
1565 fprintf (dumpfile,
1566 ";; Need %d reg%s of class %s (for insn %d).\n",
1567 max_needs[i], max_needs[i] == 1 ? "" : "s",
1568 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1569 if (max_nongroups[i] > 0)
1570 fprintf (dumpfile,
1571 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1572 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1573 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1574 if (max_groups[i] > 0)
1575 fprintf (dumpfile,
1576 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1577 max_groups[i], max_groups[i] == 1 ? "" : "s",
1578 mode_name[(int) group_mode[i]],
1579 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1580 }
1581
1582 /* If we have caller-saves, set up the save areas and see if caller-save
1583 will need a spill register. */
1584
1585 if (caller_save_needed)
1586 {
1587 /* Set the offsets for setup_save_areas. */
1588 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
1589 ep++)
1590 ep->previous_offset = ep->max_offset;
1591
1592 if ( ! setup_save_areas (&something_changed)
1593 && caller_save_spill_class == NO_REGS)
1594 {
1595 /* The class we will need depends on whether the machine
1596 supports the sum of two registers for an address; see
1597 find_address_reloads for details. */
1598
1599 caller_save_spill_class
1600 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1601 caller_save_group_size
1602 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1603 something_changed = 1;
1604 }
1605 }
1606
1607 /* See if anything that happened changes which eliminations are valid.
1608 For example, on the Sparc, whether or not the frame pointer can
1609 be eliminated can depend on what registers have been used. We need
1610 not check some conditions again (such as flag_omit_frame_pointer)
1611 since they can't have changed. */
1612
1613 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1614 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1615 #ifdef ELIMINABLE_REGS
1616 || ! CAN_ELIMINATE (ep->from, ep->to)
1617 #endif
1618 )
1619 ep->can_eliminate = 0;
1620
1621 /* Look for the case where we have discovered that we can't replace
1622 register A with register B and that means that we will now be
1623 trying to replace register A with register C. This means we can
1624 no longer replace register C with register B and we need to disable
1625 such an elimination, if it exists. This occurs often with A == ap,
1626 B == sp, and C == fp. */
1627
1628 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1629 {
1630 struct elim_table *op;
1631 register int new_to = -1;
1632
1633 if (! ep->can_eliminate && ep->can_eliminate_previous)
1634 {
1635 /* Find the current elimination for ep->from, if there is a
1636 new one. */
1637 for (op = reg_eliminate;
1638 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1639 if (op->from == ep->from && op->can_eliminate)
1640 {
1641 new_to = op->to;
1642 break;
1643 }
1644
1645 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1646 disable it. */
1647 for (op = reg_eliminate;
1648 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1649 if (op->from == new_to && op->to == ep->to)
1650 op->can_eliminate = 0;
1651 }
1652 }
1653
1654 /* See if any registers that we thought we could eliminate the previous
1655 time are no longer eliminable. If so, something has changed and we
1656 must spill the register. Also, recompute the number of eliminable
1657 registers and see if the frame pointer is needed; it is if there is
1658 no elimination of the frame pointer that we can perform. */
1659
1660 frame_pointer_needed = 1;
1661 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1662 {
1663 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1664 && ep->to != HARD_FRAME_POINTER_REGNUM)
1665 frame_pointer_needed = 0;
1666
1667 if (! ep->can_eliminate && ep->can_eliminate_previous)
1668 {
1669 ep->can_eliminate_previous = 0;
1670 spill_hard_reg (ep->from, global, dumpfile, 1);
1671 something_changed = 1;
1672 num_eliminable--;
1673 }
1674 }
1675
1676 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1677 /* If we didn't need a frame pointer last time, but we do now, spill
1678 the hard frame pointer. */
1679 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1680 {
1681 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1682 something_changed = 1;
1683 }
1684 #endif
1685
1686 /* If all needs are met, we win. */
1687
1688 for (i = 0; i < N_REG_CLASSES; i++)
1689 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1690 break;
1691 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1692 break;
1693
1694 /* Not all needs are met; must spill some hard regs. */
1695
1696 /* Put all registers spilled so far back in potential_reload_regs, but
1697 put them at the front, since we've already spilled most of the
1698 pseudos in them (we might have left some pseudos unspilled if they
1699 were in a block that didn't need any spill registers of a conflicting
1700 class. We used to try to mark off the need for those registers,
1701 but doing so properly is very complex and reallocating them is the
1702 simpler approach. First, "pack" potential_reload_regs by pushing
1703 any nonnegative entries towards the end. That will leave room
1704 for the registers we already spilled.
1705
1706 Also, undo the marking of the spill registers from the last time
1707 around in FORBIDDEN_REGS since we will be probably be allocating
1708 them again below.
1709
1710 ??? It is theoretically possible that we might end up not using one
1711 of our previously-spilled registers in this allocation, even though
1712 they are at the head of the list. It's not clear what to do about
1713 this, but it was no better before, when we marked off the needs met
1714 by the previously-spilled registers. With the current code, globals
1715 can be allocated into these registers, but locals cannot. */
1716
1717 if (n_spills)
1718 {
1719 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1720 if (potential_reload_regs[i] != -1)
1721 potential_reload_regs[j--] = potential_reload_regs[i];
1722
1723 for (i = 0; i < n_spills; i++)
1724 {
1725 potential_reload_regs[i] = spill_regs[i];
1726 spill_reg_order[spill_regs[i]] = -1;
1727 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1728 }
1729
1730 n_spills = 0;
1731 }
1732
1733 /* Now find more reload regs to satisfy the remaining need
1734 Do it by ascending class number, since otherwise a reg
1735 might be spilled for a big class and might fail to count
1736 for a smaller class even though it belongs to that class.
1737
1738 Count spilled regs in `spills', and add entries to
1739 `spill_regs' and `spill_reg_order'.
1740
1741 ??? Note there is a problem here.
1742 When there is a need for a group in a high-numbered class,
1743 and also need for non-group regs that come from a lower class,
1744 the non-group regs are chosen first. If there aren't many regs,
1745 they might leave no room for a group.
1746
1747 This was happening on the 386. To fix it, we added the code
1748 that calls possible_group_p, so that the lower class won't
1749 break up the last possible group.
1750
1751 Really fixing the problem would require changes above
1752 in counting the regs already spilled, and in choose_reload_regs.
1753 It might be hard to avoid introducing bugs there. */
1754
1755 CLEAR_HARD_REG_SET (counted_for_groups);
1756 CLEAR_HARD_REG_SET (counted_for_nongroups);
1757
1758 for (class = 0; class < N_REG_CLASSES; class++)
1759 {
1760 /* First get the groups of registers.
1761 If we got single registers first, we might fragment
1762 possible groups. */
1763 while (max_groups[class] > 0)
1764 {
1765 /* If any single spilled regs happen to form groups,
1766 count them now. Maybe we don't really need
1767 to spill another group. */
1768 count_possible_groups (group_size, group_mode, max_groups,
1769 class);
1770
1771 if (max_groups[class] <= 0)
1772 break;
1773
1774 /* Groups of size 2 (the only groups used on most machines)
1775 are treated specially. */
1776 if (group_size[class] == 2)
1777 {
1778 /* First, look for a register that will complete a group. */
1779 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1780 {
1781 int other;
1782
1783 j = potential_reload_regs[i];
1784 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1785 &&
1786 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1787 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1788 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1789 && HARD_REGNO_MODE_OK (other, group_mode[class])
1790 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1791 other)
1792 /* We don't want one part of another group.
1793 We could get "two groups" that overlap! */
1794 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1795 ||
1796 (j < FIRST_PSEUDO_REGISTER - 1
1797 && (other = j + 1, spill_reg_order[other] >= 0)
1798 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1799 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1800 && HARD_REGNO_MODE_OK (j, group_mode[class])
1801 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1802 other)
1803 && ! TEST_HARD_REG_BIT (counted_for_groups,
1804 other))))
1805 {
1806 register enum reg_class *p;
1807
1808 /* We have found one that will complete a group,
1809 so count off one group as provided. */
1810 max_groups[class]--;
1811 p = reg_class_superclasses[class];
1812 while (*p != LIM_REG_CLASSES)
1813 {
1814 if (group_size [(int) *p] <= group_size [class])
1815 max_groups[(int) *p]--;
1816 p++;
1817 }
1818
1819 /* Indicate both these regs are part of a group. */
1820 SET_HARD_REG_BIT (counted_for_groups, j);
1821 SET_HARD_REG_BIT (counted_for_groups, other);
1822 break;
1823 }
1824 }
1825 /* We can't complete a group, so start one. */
1826 /* Look for a pair neither of which is explicitly used. */
1827 if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER)
1828 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1829 {
1830 int k;
1831 j = potential_reload_regs[i];
1832 /* Verify that J+1 is a potential reload reg. */
1833 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1834 if (potential_reload_regs[k] == j + 1)
1835 break;
1836 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1837 && k < FIRST_PSEUDO_REGISTER
1838 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1839 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1840 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1841 && HARD_REGNO_MODE_OK (j, group_mode[class])
1842 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1843 j + 1)
1844 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1845 /* Reject J at this stage
1846 if J+1 was explicitly used. */
1847 && ! regs_explicitly_used[j + 1])
1848 break;
1849 }
1850 /* Now try any group at all
1851 whose registers are not in bad_spill_regs. */
1852 if (i == FIRST_PSEUDO_REGISTER)
1853 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1854 {
1855 int k;
1856 j = potential_reload_regs[i];
1857 /* Verify that J+1 is a potential reload reg. */
1858 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1859 if (potential_reload_regs[k] == j + 1)
1860 break;
1861 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1862 && k < FIRST_PSEUDO_REGISTER
1863 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1864 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1865 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1866 && HARD_REGNO_MODE_OK (j, group_mode[class])
1867 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1868 j + 1)
1869 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1870 break;
1871 }
1872
1873 /* I should be the index in potential_reload_regs
1874 of the new reload reg we have found. */
1875
1876 if (i >= FIRST_PSEUDO_REGISTER)
1877 {
1878 /* There are no groups left to spill. */
1879 spill_failure (max_groups_insn[class]);
1880 failure = 1;
1881 goto failed;
1882 }
1883 else
1884 something_changed
1885 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1886 global, dumpfile);
1887 }
1888 else
1889 {
1890 /* For groups of more than 2 registers,
1891 look for a sufficient sequence of unspilled registers,
1892 and spill them all at once. */
1893 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1894 {
1895 int k;
1896
1897 j = potential_reload_regs[i];
1898 if (j >= 0
1899 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1900 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1901 {
1902 /* Check each reg in the sequence. */
1903 for (k = 0; k < group_size[class]; k++)
1904 if (! (spill_reg_order[j + k] < 0
1905 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1906 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1907 break;
1908 /* We got a full sequence, so spill them all. */
1909 if (k == group_size[class])
1910 {
1911 register enum reg_class *p;
1912 for (k = 0; k < group_size[class]; k++)
1913 {
1914 int idx;
1915 SET_HARD_REG_BIT (counted_for_groups, j + k);
1916 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1917 if (potential_reload_regs[idx] == j + k)
1918 break;
1919 something_changed
1920 |= new_spill_reg (idx, class,
1921 max_needs, NULL_PTR,
1922 global, dumpfile);
1923 }
1924
1925 /* We have found one that will complete a group,
1926 so count off one group as provided. */
1927 max_groups[class]--;
1928 p = reg_class_superclasses[class];
1929 while (*p != LIM_REG_CLASSES)
1930 {
1931 if (group_size [(int) *p]
1932 <= group_size [class])
1933 max_groups[(int) *p]--;
1934 p++;
1935 }
1936 break;
1937 }
1938 }
1939 }
1940 /* We couldn't find any registers for this reload.
1941 Avoid going into an infinite loop. */
1942 if (i >= FIRST_PSEUDO_REGISTER)
1943 {
1944 /* There are no groups left. */
1945 spill_failure (max_groups_insn[class]);
1946 failure = 1;
1947 goto failed;
1948 }
1949 }
1950 }
1951
1952 /* Now similarly satisfy all need for single registers. */
1953
1954 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1955 {
1956 /* If we spilled enough regs, but they weren't counted
1957 against the non-group need, see if we can count them now.
1958 If so, we can avoid some actual spilling. */
1959 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1960 for (i = 0; i < n_spills; i++)
1961 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1962 spill_regs[i])
1963 && !TEST_HARD_REG_BIT (counted_for_groups,
1964 spill_regs[i])
1965 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1966 spill_regs[i])
1967 && max_nongroups[class] > 0)
1968 {
1969 register enum reg_class *p;
1970
1971 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1972 max_nongroups[class]--;
1973 p = reg_class_superclasses[class];
1974 while (*p != LIM_REG_CLASSES)
1975 max_nongroups[(int) *p++]--;
1976 }
1977 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1978 break;
1979
1980 /* Consider the potential reload regs that aren't
1981 yet in use as reload regs, in order of preference.
1982 Find the most preferred one that's in this class. */
1983
1984 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1985 if (potential_reload_regs[i] >= 0
1986 && TEST_HARD_REG_BIT (reg_class_contents[class],
1987 potential_reload_regs[i])
1988 /* If this reg will not be available for groups,
1989 pick one that does not foreclose possible groups.
1990 This is a kludge, and not very general,
1991 but it should be sufficient to make the 386 work,
1992 and the problem should not occur on machines with
1993 more registers. */
1994 && (max_nongroups[class] == 0
1995 || possible_group_p (potential_reload_regs[i], max_groups)))
1996 break;
1997
1998 /* If we couldn't get a register, try to get one even if we
1999 might foreclose possible groups. This may cause problems
2000 later, but that's better than aborting now, since it is
2001 possible that we will, in fact, be able to form the needed
2002 group even with this allocation. */
2003
2004 if (i >= FIRST_PSEUDO_REGISTER
2005 && (asm_noperands (max_needs[class] > 0
2006 ? max_needs_insn[class]
2007 : max_nongroups_insn[class])
2008 < 0))
2009 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2010 if (potential_reload_regs[i] >= 0
2011 && TEST_HARD_REG_BIT (reg_class_contents[class],
2012 potential_reload_regs[i]))
2013 break;
2014
2015 /* I should be the index in potential_reload_regs
2016 of the new reload reg we have found. */
2017
2018 if (i >= FIRST_PSEUDO_REGISTER)
2019 {
2020 /* There are no possible registers left to spill. */
2021 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
2022 : max_nongroups_insn[class]);
2023 failure = 1;
2024 goto failed;
2025 }
2026 else
2027 something_changed
2028 |= new_spill_reg (i, class, max_needs, max_nongroups,
2029 global, dumpfile);
2030 }
2031 }
2032 }
2033
2034 /* If global-alloc was run, notify it of any register eliminations we have
2035 done. */
2036 if (global)
2037 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2038 if (ep->can_eliminate)
2039 mark_elimination (ep->from, ep->to);
2040
2041 /* Insert code to save and restore call-clobbered hard regs
2042 around calls. Tell if what mode to use so that we will process
2043 those insns in reload_as_needed if we have to. */
2044
2045 if (caller_save_needed)
2046 save_call_clobbered_regs (num_eliminable ? QImode
2047 : caller_save_spill_class != NO_REGS ? HImode
2048 : VOIDmode);
2049
2050 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
2051 If that insn didn't set the register (i.e., it copied the register to
2052 memory), just delete that insn instead of the equivalencing insn plus
2053 anything now dead. If we call delete_dead_insn on that insn, we may
2054 delete the insn that actually sets the register if the register die
2055 there and that is incorrect. */
2056
2057 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2058 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
2059 && GET_CODE (reg_equiv_init[i]) != NOTE)
2060 {
2061 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
2062 delete_dead_insn (reg_equiv_init[i]);
2063 else
2064 {
2065 PUT_CODE (reg_equiv_init[i], NOTE);
2066 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
2067 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
2068 }
2069 }
2070
2071 /* Use the reload registers where necessary
2072 by generating move instructions to move the must-be-register
2073 values into or out of the reload registers. */
2074
2075 if (something_needs_reloads || something_needs_elimination
2076 || (caller_save_needed && num_eliminable)
2077 || caller_save_spill_class != NO_REGS)
2078 reload_as_needed (first, global);
2079
2080 /* If we were able to eliminate the frame pointer, show that it is no
2081 longer live at the start of any basic block. If it ls live by
2082 virtue of being in a pseudo, that pseudo will be marked live
2083 and hence the frame pointer will be known to be live via that
2084 pseudo. */
2085
2086 if (! frame_pointer_needed)
2087 for (i = 0; i < n_basic_blocks; i++)
2088 CLEAR_REGNO_REG_SET (basic_block_live_at_start[i],
2089 HARD_FRAME_POINTER_REGNUM);
2090
2091 /* Come here (with failure set nonzero) if we can't get enough spill regs
2092 and we decide not to abort about it. */
2093 failed:
2094
2095 reload_in_progress = 0;
2096
2097 /* Now eliminate all pseudo regs by modifying them into
2098 their equivalent memory references.
2099 The REG-rtx's for the pseudos are modified in place,
2100 so all insns that used to refer to them now refer to memory.
2101
2102 For a reg that has a reg_equiv_address, all those insns
2103 were changed by reloading so that no insns refer to it any longer;
2104 but the DECL_RTL of a variable decl may refer to it,
2105 and if so this causes the debugging info to mention the variable. */
2106
2107 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2108 {
2109 rtx addr = 0;
2110 int in_struct = 0;
2111 if (reg_equiv_mem[i])
2112 {
2113 addr = XEXP (reg_equiv_mem[i], 0);
2114 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
2115 }
2116 if (reg_equiv_address[i])
2117 addr = reg_equiv_address[i];
2118 if (addr)
2119 {
2120 if (reg_renumber[i] < 0)
2121 {
2122 rtx reg = regno_reg_rtx[i];
2123 XEXP (reg, 0) = addr;
2124 REG_USERVAR_P (reg) = 0;
2125 MEM_IN_STRUCT_P (reg) = in_struct;
2126 PUT_CODE (reg, MEM);
2127 }
2128 else if (reg_equiv_mem[i])
2129 XEXP (reg_equiv_mem[i], 0) = addr;
2130 }
2131 }
2132
2133 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2134 /* Make a pass over all the insns and remove death notes for things that
2135 are no longer registers or no longer die in the insn (e.g., an input
2136 and output pseudo being tied). */
2137
2138 for (insn = first; insn; insn = NEXT_INSN (insn))
2139 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2140 {
2141 rtx note, next;
2142
2143 for (note = REG_NOTES (insn); note; note = next)
2144 {
2145 next = XEXP (note, 1);
2146 if (REG_NOTE_KIND (note) == REG_DEAD
2147 && (GET_CODE (XEXP (note, 0)) != REG
2148 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2149 remove_note (insn, note);
2150 }
2151 }
2152 #endif
2153
2154 /* If we are doing stack checking, give a warning if this function's
2155 frame size is larger than we expect. */
2156 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
2157 {
2158 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
2159
2160 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2161 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
2162 size += UNITS_PER_WORD;
2163
2164 if (size > STACK_CHECK_MAX_FRAME_SIZE)
2165 warning ("frame size too large for reliable stack checking");
2166 }
2167
2168 /* Indicate that we no longer have known memory locations or constants. */
2169 reg_equiv_constant = 0;
2170 reg_equiv_memory_loc = 0;
2171
2172 if (real_known_ptr)
2173 free (real_known_ptr);
2174 if (real_at_ptr)
2175 free (real_at_ptr);
2176
2177 if (scratch_list)
2178 free (scratch_list);
2179 scratch_list = 0;
2180 if (scratch_block)
2181 free (scratch_block);
2182 scratch_block = 0;
2183
2184 CLEAR_HARD_REG_SET (used_spill_regs);
2185 for (i = 0; i < n_spills; i++)
2186 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2187
2188 return failure;
2189 }
2190 \f
2191 /* Nonzero if, after spilling reg REGNO for non-groups,
2192 it will still be possible to find a group if we still need one. */
2193
2194 static int
2195 possible_group_p (regno, max_groups)
2196 int regno;
2197 int *max_groups;
2198 {
2199 int i;
2200 int class = (int) NO_REGS;
2201
2202 for (i = 0; i < (int) N_REG_CLASSES; i++)
2203 if (max_groups[i] > 0)
2204 {
2205 class = i;
2206 break;
2207 }
2208
2209 if (class == (int) NO_REGS)
2210 return 1;
2211
2212 /* Consider each pair of consecutive registers. */
2213 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2214 {
2215 /* Ignore pairs that include reg REGNO. */
2216 if (i == regno || i + 1 == regno)
2217 continue;
2218
2219 /* Ignore pairs that are outside the class that needs the group.
2220 ??? Here we fail to handle the case where two different classes
2221 independently need groups. But this never happens with our
2222 current machine descriptions. */
2223 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2224 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2225 continue;
2226
2227 /* A pair of consecutive regs we can still spill does the trick. */
2228 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2229 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2230 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2231 return 1;
2232
2233 /* A pair of one already spilled and one we can spill does it
2234 provided the one already spilled is not otherwise reserved. */
2235 if (spill_reg_order[i] < 0
2236 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2237 && spill_reg_order[i + 1] >= 0
2238 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2239 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2240 return 1;
2241 if (spill_reg_order[i + 1] < 0
2242 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2243 && spill_reg_order[i] >= 0
2244 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2245 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2246 return 1;
2247 }
2248
2249 return 0;
2250 }
2251 \f
2252 /* Count any groups of CLASS that can be formed from the registers recently
2253 spilled. */
2254
2255 static void
2256 count_possible_groups (group_size, group_mode, max_groups, class)
2257 int *group_size;
2258 enum machine_mode *group_mode;
2259 int *max_groups;
2260 int class;
2261 {
2262 HARD_REG_SET new;
2263 int i, j;
2264
2265 /* Now find all consecutive groups of spilled registers
2266 and mark each group off against the need for such groups.
2267 But don't count them against ordinary need, yet. */
2268
2269 if (group_size[class] == 0)
2270 return;
2271
2272 CLEAR_HARD_REG_SET (new);
2273
2274 /* Make a mask of all the regs that are spill regs in class I. */
2275 for (i = 0; i < n_spills; i++)
2276 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2277 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2278 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2279 SET_HARD_REG_BIT (new, spill_regs[i]);
2280
2281 /* Find each consecutive group of them. */
2282 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2283 if (TEST_HARD_REG_BIT (new, i)
2284 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2285 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2286 {
2287 for (j = 1; j < group_size[class]; j++)
2288 if (! TEST_HARD_REG_BIT (new, i + j))
2289 break;
2290
2291 if (j == group_size[class])
2292 {
2293 /* We found a group. Mark it off against this class's need for
2294 groups, and against each superclass too. */
2295 register enum reg_class *p;
2296
2297 max_groups[class]--;
2298 p = reg_class_superclasses[class];
2299 while (*p != LIM_REG_CLASSES)
2300 {
2301 if (group_size [(int) *p] <= group_size [class])
2302 max_groups[(int) *p]--;
2303 p++;
2304 }
2305
2306 /* Don't count these registers again. */
2307 for (j = 0; j < group_size[class]; j++)
2308 SET_HARD_REG_BIT (counted_for_groups, i + j);
2309 }
2310
2311 /* Skip to the last reg in this group. When i is incremented above,
2312 it will then point to the first reg of the next possible group. */
2313 i += j - 1;
2314 }
2315 }
2316 \f
2317 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2318 another mode that needs to be reloaded for the same register class CLASS.
2319 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2320 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2321
2322 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2323 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2324 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2325 causes unnecessary failures on machines requiring alignment of register
2326 groups when the two modes are different sizes, because the larger mode has
2327 more strict alignment rules than the smaller mode. */
2328
2329 static int
2330 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2331 enum machine_mode allocate_mode, other_mode;
2332 enum reg_class class;
2333 {
2334 register int regno;
2335 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2336 {
2337 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2338 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2339 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2340 return 0;
2341 }
2342 return 1;
2343 }
2344
2345 /* Handle the failure to find a register to spill.
2346 INSN should be one of the insns which needed this particular spill reg. */
2347
2348 static void
2349 spill_failure (insn)
2350 rtx insn;
2351 {
2352 if (asm_noperands (PATTERN (insn)) >= 0)
2353 error_for_asm (insn, "`asm' needs too many reloads");
2354 else
2355 fatal_insn ("Unable to find a register to spill.", insn);
2356 }
2357
2358 /* Add a new register to the tables of available spill-registers
2359 (as well as spilling all pseudos allocated to the register).
2360 I is the index of this register in potential_reload_regs.
2361 CLASS is the regclass whose need is being satisfied.
2362 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2363 so that this register can count off against them.
2364 MAX_NONGROUPS is 0 if this register is part of a group.
2365 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2366
2367 static int
2368 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2369 int i;
2370 int class;
2371 int *max_needs;
2372 int *max_nongroups;
2373 int global;
2374 FILE *dumpfile;
2375 {
2376 register enum reg_class *p;
2377 int val;
2378 int regno = potential_reload_regs[i];
2379
2380 if (i >= FIRST_PSEUDO_REGISTER)
2381 abort (); /* Caller failed to find any register. */
2382
2383 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2384 {
2385 static char *reg_class_names[] = REG_CLASS_NAMES;
2386 fatal ("fixed or forbidden register %d (%s) was spilled for class %s.\n\
2387 This may be due to a compiler bug or to impossible asm\n\
2388 statements or clauses.", regno, reg_names[regno], reg_class_names[class]);
2389 }
2390
2391 /* Make reg REGNO an additional reload reg. */
2392
2393 potential_reload_regs[i] = -1;
2394 spill_regs[n_spills] = regno;
2395 spill_reg_order[regno] = n_spills;
2396 if (dumpfile)
2397 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2398
2399 /* Clear off the needs we just satisfied. */
2400
2401 max_needs[class]--;
2402 p = reg_class_superclasses[class];
2403 while (*p != LIM_REG_CLASSES)
2404 max_needs[(int) *p++]--;
2405
2406 if (max_nongroups && max_nongroups[class] > 0)
2407 {
2408 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2409 max_nongroups[class]--;
2410 p = reg_class_superclasses[class];
2411 while (*p != LIM_REG_CLASSES)
2412 max_nongroups[(int) *p++]--;
2413 }
2414
2415 /* Spill every pseudo reg that was allocated to this reg
2416 or to something that overlaps this reg. */
2417
2418 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2419
2420 /* If there are some registers still to eliminate and this register
2421 wasn't ever used before, additional stack space may have to be
2422 allocated to store this register. Thus, we may have changed the offset
2423 between the stack and frame pointers, so mark that something has changed.
2424 (If new pseudos were spilled, thus requiring more space, VAL would have
2425 been set non-zero by the call to spill_hard_reg above since additional
2426 reloads may be needed in that case.
2427
2428 One might think that we need only set VAL to 1 if this is a call-used
2429 register. However, the set of registers that must be saved by the
2430 prologue is not identical to the call-used set. For example, the
2431 register used by the call insn for the return PC is a call-used register,
2432 but must be saved by the prologue. */
2433 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2434 val = 1;
2435
2436 regs_ever_live[spill_regs[n_spills]] = 1;
2437 n_spills++;
2438
2439 return val;
2440 }
2441 \f
2442 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2443 data that is dead in INSN. */
2444
2445 static void
2446 delete_dead_insn (insn)
2447 rtx insn;
2448 {
2449 rtx prev = prev_real_insn (insn);
2450 rtx prev_dest;
2451
2452 /* If the previous insn sets a register that dies in our insn, delete it
2453 too. */
2454 if (prev && GET_CODE (PATTERN (prev)) == SET
2455 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2456 && reg_mentioned_p (prev_dest, PATTERN (insn))
2457 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2458 delete_dead_insn (prev);
2459
2460 PUT_CODE (insn, NOTE);
2461 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2462 NOTE_SOURCE_FILE (insn) = 0;
2463 }
2464
2465 /* Modify the home of pseudo-reg I.
2466 The new home is present in reg_renumber[I].
2467
2468 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2469 or it may be -1, meaning there is none or it is not relevant.
2470 This is used so that all pseudos spilled from a given hard reg
2471 can share one stack slot. */
2472
2473 static void
2474 alter_reg (i, from_reg)
2475 register int i;
2476 int from_reg;
2477 {
2478 /* When outputting an inline function, this can happen
2479 for a reg that isn't actually used. */
2480 if (regno_reg_rtx[i] == 0)
2481 return;
2482
2483 /* If the reg got changed to a MEM at rtl-generation time,
2484 ignore it. */
2485 if (GET_CODE (regno_reg_rtx[i]) != REG)
2486 return;
2487
2488 /* Modify the reg-rtx to contain the new hard reg
2489 number or else to contain its pseudo reg number. */
2490 REGNO (regno_reg_rtx[i])
2491 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2492
2493 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2494 allocate a stack slot for it. */
2495
2496 if (reg_renumber[i] < 0
2497 && REG_N_REFS (i) > 0
2498 && reg_equiv_constant[i] == 0
2499 && reg_equiv_memory_loc[i] == 0)
2500 {
2501 register rtx x;
2502 int inherent_size = PSEUDO_REGNO_BYTES (i);
2503 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2504 int adjust = 0;
2505
2506 /* Each pseudo reg has an inherent size which comes from its own mode,
2507 and a total size which provides room for paradoxical subregs
2508 which refer to the pseudo reg in wider modes.
2509
2510 We can use a slot already allocated if it provides both
2511 enough inherent space and enough total space.
2512 Otherwise, we allocate a new slot, making sure that it has no less
2513 inherent space, and no less total space, then the previous slot. */
2514 if (from_reg == -1)
2515 {
2516 /* No known place to spill from => no slot to reuse. */
2517 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2518 inherent_size == total_size ? 0 : -1);
2519 if (BYTES_BIG_ENDIAN)
2520 /* Cancel the big-endian correction done in assign_stack_local.
2521 Get the address of the beginning of the slot.
2522 This is so we can do a big-endian correction unconditionally
2523 below. */
2524 adjust = inherent_size - total_size;
2525
2526 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2527 }
2528 /* Reuse a stack slot if possible. */
2529 else if (spill_stack_slot[from_reg] != 0
2530 && spill_stack_slot_width[from_reg] >= total_size
2531 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2532 >= inherent_size))
2533 x = spill_stack_slot[from_reg];
2534 /* Allocate a bigger slot. */
2535 else
2536 {
2537 /* Compute maximum size needed, both for inherent size
2538 and for total size. */
2539 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2540 rtx stack_slot;
2541 if (spill_stack_slot[from_reg])
2542 {
2543 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2544 > inherent_size)
2545 mode = GET_MODE (spill_stack_slot[from_reg]);
2546 if (spill_stack_slot_width[from_reg] > total_size)
2547 total_size = spill_stack_slot_width[from_reg];
2548 }
2549 /* Make a slot with that size. */
2550 x = assign_stack_local (mode, total_size,
2551 inherent_size == total_size ? 0 : -1);
2552 stack_slot = x;
2553 if (BYTES_BIG_ENDIAN)
2554 {
2555 /* Cancel the big-endian correction done in assign_stack_local.
2556 Get the address of the beginning of the slot.
2557 This is so we can do a big-endian correction unconditionally
2558 below. */
2559 adjust = GET_MODE_SIZE (mode) - total_size;
2560 if (adjust)
2561 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2562 * BITS_PER_UNIT,
2563 MODE_INT, 1),
2564 plus_constant (XEXP (x, 0), adjust));
2565 }
2566 spill_stack_slot[from_reg] = stack_slot;
2567 spill_stack_slot_width[from_reg] = total_size;
2568 }
2569
2570 /* On a big endian machine, the "address" of the slot
2571 is the address of the low part that fits its inherent mode. */
2572 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2573 adjust += (total_size - inherent_size);
2574
2575 /* If we have any adjustment to make, or if the stack slot is the
2576 wrong mode, make a new stack slot. */
2577 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2578 {
2579 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2580 plus_constant (XEXP (x, 0), adjust));
2581 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2582 }
2583
2584 /* Save the stack slot for later. */
2585 reg_equiv_memory_loc[i] = x;
2586 }
2587 }
2588
2589 /* Mark the slots in regs_ever_live for the hard regs
2590 used by pseudo-reg number REGNO. */
2591
2592 void
2593 mark_home_live (regno)
2594 int regno;
2595 {
2596 register int i, lim;
2597 i = reg_renumber[regno];
2598 if (i < 0)
2599 return;
2600 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2601 while (i < lim)
2602 regs_ever_live[i++] = 1;
2603 }
2604
2605 /* Mark the registers used in SCRATCH as being live. */
2606
2607 static void
2608 mark_scratch_live (scratch)
2609 rtx scratch;
2610 {
2611 register int i;
2612 int regno = REGNO (scratch);
2613 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2614
2615 for (i = regno; i < lim; i++)
2616 regs_ever_live[i] = 1;
2617 }
2618 \f
2619 /* This function handles the tracking of elimination offsets around branches.
2620
2621 X is a piece of RTL being scanned.
2622
2623 INSN is the insn that it came from, if any.
2624
2625 INITIAL_P is non-zero if we are to set the offset to be the initial
2626 offset and zero if we are setting the offset of the label to be the
2627 current offset. */
2628
2629 static void
2630 set_label_offsets (x, insn, initial_p)
2631 rtx x;
2632 rtx insn;
2633 int initial_p;
2634 {
2635 enum rtx_code code = GET_CODE (x);
2636 rtx tem;
2637 int i;
2638 struct elim_table *p;
2639
2640 switch (code)
2641 {
2642 case LABEL_REF:
2643 if (LABEL_REF_NONLOCAL_P (x))
2644 return;
2645
2646 x = XEXP (x, 0);
2647
2648 /* ... fall through ... */
2649
2650 case CODE_LABEL:
2651 /* If we know nothing about this label, set the desired offsets. Note
2652 that this sets the offset at a label to be the offset before a label
2653 if we don't know anything about the label. This is not correct for
2654 the label after a BARRIER, but is the best guess we can make. If
2655 we guessed wrong, we will suppress an elimination that might have
2656 been possible had we been able to guess correctly. */
2657
2658 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2659 {
2660 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2661 offsets_at[CODE_LABEL_NUMBER (x)][i]
2662 = (initial_p ? reg_eliminate[i].initial_offset
2663 : reg_eliminate[i].offset);
2664 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2665 }
2666
2667 /* Otherwise, if this is the definition of a label and it is
2668 preceded by a BARRIER, set our offsets to the known offset of
2669 that label. */
2670
2671 else if (x == insn
2672 && (tem = prev_nonnote_insn (insn)) != 0
2673 && GET_CODE (tem) == BARRIER)
2674 {
2675 num_not_at_initial_offset = 0;
2676 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2677 {
2678 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2679 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2680 if (reg_eliminate[i].can_eliminate
2681 && (reg_eliminate[i].offset
2682 != reg_eliminate[i].initial_offset))
2683 num_not_at_initial_offset++;
2684 }
2685 }
2686
2687 else
2688 /* If neither of the above cases is true, compare each offset
2689 with those previously recorded and suppress any eliminations
2690 where the offsets disagree. */
2691
2692 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2693 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2694 != (initial_p ? reg_eliminate[i].initial_offset
2695 : reg_eliminate[i].offset))
2696 reg_eliminate[i].can_eliminate = 0;
2697
2698 return;
2699
2700 case JUMP_INSN:
2701 set_label_offsets (PATTERN (insn), insn, initial_p);
2702
2703 /* ... fall through ... */
2704
2705 case INSN:
2706 case CALL_INSN:
2707 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2708 and hence must have all eliminations at their initial offsets. */
2709 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2710 if (REG_NOTE_KIND (tem) == REG_LABEL)
2711 set_label_offsets (XEXP (tem, 0), insn, 1);
2712 return;
2713
2714 case ADDR_VEC:
2715 case ADDR_DIFF_VEC:
2716 /* Each of the labels in the address vector must be at their initial
2717 offsets. We want the first first for ADDR_VEC and the second
2718 field for ADDR_DIFF_VEC. */
2719
2720 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2721 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2722 insn, initial_p);
2723 return;
2724
2725 case SET:
2726 /* We only care about setting PC. If the source is not RETURN,
2727 IF_THEN_ELSE, or a label, disable any eliminations not at
2728 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2729 isn't one of those possibilities. For branches to a label,
2730 call ourselves recursively.
2731
2732 Note that this can disable elimination unnecessarily when we have
2733 a non-local goto since it will look like a non-constant jump to
2734 someplace in the current function. This isn't a significant
2735 problem since such jumps will normally be when all elimination
2736 pairs are back to their initial offsets. */
2737
2738 if (SET_DEST (x) != pc_rtx)
2739 return;
2740
2741 switch (GET_CODE (SET_SRC (x)))
2742 {
2743 case PC:
2744 case RETURN:
2745 return;
2746
2747 case LABEL_REF:
2748 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2749 return;
2750
2751 case IF_THEN_ELSE:
2752 tem = XEXP (SET_SRC (x), 1);
2753 if (GET_CODE (tem) == LABEL_REF)
2754 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2755 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2756 break;
2757
2758 tem = XEXP (SET_SRC (x), 2);
2759 if (GET_CODE (tem) == LABEL_REF)
2760 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2761 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2762 break;
2763 return;
2764
2765 default:
2766 break;
2767 }
2768
2769 /* If we reach here, all eliminations must be at their initial
2770 offset because we are doing a jump to a variable address. */
2771 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2772 if (p->offset != p->initial_offset)
2773 p->can_eliminate = 0;
2774 break;
2775
2776 default:
2777 break;
2778 }
2779 }
2780 \f
2781 /* Used for communication between the next two function to properly share
2782 the vector for an ASM_OPERANDS. */
2783
2784 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2785
2786 /* Scan X and replace any eliminable registers (such as fp) with a
2787 replacement (such as sp), plus an offset.
2788
2789 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2790 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2791 MEM, we are allowed to replace a sum of a register and the constant zero
2792 with the register, which we cannot do outside a MEM. In addition, we need
2793 to record the fact that a register is referenced outside a MEM.
2794
2795 If INSN is an insn, it is the insn containing X. If we replace a REG
2796 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2797 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2798 that the REG is being modified.
2799
2800 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2801 That's used when we eliminate in expressions stored in notes.
2802 This means, do not set ref_outside_mem even if the reference
2803 is outside of MEMs.
2804
2805 If we see a modification to a register we know about, take the
2806 appropriate action (see case SET, below).
2807
2808 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2809 replacements done assuming all offsets are at their initial values. If
2810 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2811 encounter, return the actual location so that find_reloads will do
2812 the proper thing. */
2813
2814 rtx
2815 eliminate_regs (x, mem_mode, insn)
2816 rtx x;
2817 enum machine_mode mem_mode;
2818 rtx insn;
2819 {
2820 enum rtx_code code = GET_CODE (x);
2821 struct elim_table *ep;
2822 int regno;
2823 rtx new;
2824 int i, j;
2825 char *fmt;
2826 int copied = 0;
2827
2828 switch (code)
2829 {
2830 case CONST_INT:
2831 case CONST_DOUBLE:
2832 case CONST:
2833 case SYMBOL_REF:
2834 case CODE_LABEL:
2835 case PC:
2836 case CC0:
2837 case ASM_INPUT:
2838 case ADDR_VEC:
2839 case ADDR_DIFF_VEC:
2840 case RETURN:
2841 return x;
2842
2843 case ADDRESSOF:
2844 /* This is only for the benefit of the debugging backends, which call
2845 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2846 removed after CSE. */
2847 new = eliminate_regs (XEXP (x, 0), 0, insn);
2848 if (GET_CODE (new) == MEM)
2849 return XEXP (new, 0);
2850 return x;
2851
2852 case REG:
2853 regno = REGNO (x);
2854
2855 /* First handle the case where we encounter a bare register that
2856 is eliminable. Replace it with a PLUS. */
2857 if (regno < FIRST_PSEUDO_REGISTER)
2858 {
2859 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2860 ep++)
2861 if (ep->from_rtx == x && ep->can_eliminate)
2862 {
2863 if (! mem_mode
2864 /* Refs inside notes don't count for this purpose. */
2865 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2866 || GET_CODE (insn) == INSN_LIST)))
2867 ep->ref_outside_mem = 1;
2868 return plus_constant (ep->to_rtx, ep->previous_offset);
2869 }
2870
2871 }
2872 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2873 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2874 {
2875 /* In this case, find_reloads would attempt to either use an
2876 incorrect address (if something is not at its initial offset)
2877 or substitute an replaced address into an insn (which loses
2878 if the offset is changed by some later action). So we simply
2879 return the replaced stack slot (assuming it is changed by
2880 elimination) and ignore the fact that this is actually a
2881 reference to the pseudo. Ensure we make a copy of the
2882 address in case it is shared. */
2883 new = eliminate_regs (reg_equiv_memory_loc[regno], mem_mode, insn);
2884 if (new != reg_equiv_memory_loc[regno])
2885 {
2886 cannot_omit_stores[regno] = 1;
2887 return copy_rtx (new);
2888 }
2889 }
2890 return x;
2891
2892 case PLUS:
2893 /* If this is the sum of an eliminable register and a constant, rework
2894 the sum. */
2895 if (GET_CODE (XEXP (x, 0)) == REG
2896 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2897 && CONSTANT_P (XEXP (x, 1)))
2898 {
2899 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2900 ep++)
2901 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2902 {
2903 if (! mem_mode
2904 /* Refs inside notes don't count for this purpose. */
2905 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2906 || GET_CODE (insn) == INSN_LIST)))
2907 ep->ref_outside_mem = 1;
2908
2909 /* The only time we want to replace a PLUS with a REG (this
2910 occurs when the constant operand of the PLUS is the negative
2911 of the offset) is when we are inside a MEM. We won't want
2912 to do so at other times because that would change the
2913 structure of the insn in a way that reload can't handle.
2914 We special-case the commonest situation in
2915 eliminate_regs_in_insn, so just replace a PLUS with a
2916 PLUS here, unless inside a MEM. */
2917 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2918 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2919 return ep->to_rtx;
2920 else
2921 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2922 plus_constant (XEXP (x, 1),
2923 ep->previous_offset));
2924 }
2925
2926 /* If the register is not eliminable, we are done since the other
2927 operand is a constant. */
2928 return x;
2929 }
2930
2931 /* If this is part of an address, we want to bring any constant to the
2932 outermost PLUS. We will do this by doing register replacement in
2933 our operands and seeing if a constant shows up in one of them.
2934
2935 We assume here this is part of an address (or a "load address" insn)
2936 since an eliminable register is not likely to appear in any other
2937 context.
2938
2939 If we have (plus (eliminable) (reg)), we want to produce
2940 (plus (plus (replacement) (reg) (const))). If this was part of a
2941 normal add insn, (plus (replacement) (reg)) will be pushed as a
2942 reload. This is the desired action. */
2943
2944 {
2945 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2946 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2947
2948 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2949 {
2950 /* If one side is a PLUS and the other side is a pseudo that
2951 didn't get a hard register but has a reg_equiv_constant,
2952 we must replace the constant here since it may no longer
2953 be in the position of any operand. */
2954 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2955 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2956 && reg_renumber[REGNO (new1)] < 0
2957 && reg_equiv_constant != 0
2958 && reg_equiv_constant[REGNO (new1)] != 0)
2959 new1 = reg_equiv_constant[REGNO (new1)];
2960 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2961 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2962 && reg_renumber[REGNO (new0)] < 0
2963 && reg_equiv_constant[REGNO (new0)] != 0)
2964 new0 = reg_equiv_constant[REGNO (new0)];
2965
2966 new = form_sum (new0, new1);
2967
2968 /* As above, if we are not inside a MEM we do not want to
2969 turn a PLUS into something else. We might try to do so here
2970 for an addition of 0 if we aren't optimizing. */
2971 if (! mem_mode && GET_CODE (new) != PLUS)
2972 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2973 else
2974 return new;
2975 }
2976 }
2977 return x;
2978
2979 case MULT:
2980 /* If this is the product of an eliminable register and a
2981 constant, apply the distribute law and move the constant out
2982 so that we have (plus (mult ..) ..). This is needed in order
2983 to keep load-address insns valid. This case is pathological.
2984 We ignore the possibility of overflow here. */
2985 if (GET_CODE (XEXP (x, 0)) == REG
2986 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2987 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2988 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2989 ep++)
2990 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2991 {
2992 if (! mem_mode
2993 /* Refs inside notes don't count for this purpose. */
2994 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2995 || GET_CODE (insn) == INSN_LIST)))
2996 ep->ref_outside_mem = 1;
2997
2998 return
2999 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
3000 ep->previous_offset * INTVAL (XEXP (x, 1)));
3001 }
3002
3003 /* ... fall through ... */
3004
3005 case CALL:
3006 case COMPARE:
3007 case MINUS:
3008 case DIV: case UDIV:
3009 case MOD: case UMOD:
3010 case AND: case IOR: case XOR:
3011 case ROTATERT: case ROTATE:
3012 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3013 case NE: case EQ:
3014 case GE: case GT: case GEU: case GTU:
3015 case LE: case LT: case LEU: case LTU:
3016 {
3017 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3018 rtx new1
3019 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
3020
3021 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
3022 return gen_rtx (code, GET_MODE (x), new0, new1);
3023 }
3024 return x;
3025
3026 case EXPR_LIST:
3027 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
3028 if (XEXP (x, 0))
3029 {
3030 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3031 if (new != XEXP (x, 0))
3032 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
3033 }
3034
3035 /* ... fall through ... */
3036
3037 case INSN_LIST:
3038 /* Now do eliminations in the rest of the chain. If this was
3039 an EXPR_LIST, this might result in allocating more memory than is
3040 strictly needed, but it simplifies the code. */
3041 if (XEXP (x, 1))
3042 {
3043 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
3044 if (new != XEXP (x, 1))
3045 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
3046 }
3047 return x;
3048
3049 case PRE_INC:
3050 case POST_INC:
3051 case PRE_DEC:
3052 case POST_DEC:
3053 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3054 if (ep->to_rtx == XEXP (x, 0))
3055 {
3056 int size = GET_MODE_SIZE (mem_mode);
3057
3058 /* If more bytes than MEM_MODE are pushed, account for them. */
3059 #ifdef PUSH_ROUNDING
3060 if (ep->to_rtx == stack_pointer_rtx)
3061 size = PUSH_ROUNDING (size);
3062 #endif
3063 if (code == PRE_DEC || code == POST_DEC)
3064 ep->offset += size;
3065 else
3066 ep->offset -= size;
3067 }
3068
3069 /* Fall through to generic unary operation case. */
3070 case STRICT_LOW_PART:
3071 case NEG: case NOT:
3072 case SIGN_EXTEND: case ZERO_EXTEND:
3073 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3074 case FLOAT: case FIX:
3075 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3076 case ABS:
3077 case SQRT:
3078 case FFS:
3079 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3080 if (new != XEXP (x, 0))
3081 return gen_rtx (code, GET_MODE (x), new);
3082 return x;
3083
3084 case SUBREG:
3085 /* Similar to above processing, but preserve SUBREG_WORD.
3086 Convert (subreg (mem)) to (mem) if not paradoxical.
3087 Also, if we have a non-paradoxical (subreg (pseudo)) and the
3088 pseudo didn't get a hard reg, we must replace this with the
3089 eliminated version of the memory location because push_reloads
3090 may do the replacement in certain circumstances. */
3091 if (GET_CODE (SUBREG_REG (x)) == REG
3092 && (GET_MODE_SIZE (GET_MODE (x))
3093 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3094 && reg_equiv_memory_loc != 0
3095 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3096 {
3097 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
3098 mem_mode, insn);
3099
3100 /* If we didn't change anything, we must retain the pseudo. */
3101 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
3102 new = SUBREG_REG (x);
3103 else
3104 {
3105 /* Otherwise, ensure NEW isn't shared in case we have to reload
3106 it. */
3107 new = copy_rtx (new);
3108
3109 /* In this case, we must show that the pseudo is used in this
3110 insn so that delete_output_reload will do the right thing. */
3111 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
3112 && GET_CODE (insn) != INSN_LIST)
3113 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
3114 insn);
3115 }
3116 }
3117 else
3118 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
3119
3120 if (new != XEXP (x, 0))
3121 {
3122 int x_size = GET_MODE_SIZE (GET_MODE (x));
3123 int new_size = GET_MODE_SIZE (GET_MODE (new));
3124
3125 if (GET_CODE (new) == MEM
3126 && x_size <= new_size
3127 #ifdef WORD_REGISTER_OPERATIONS
3128 /* On these machines, combine can create rtl of the form
3129 (set (subreg:m1 (reg:m2 R) 0) ...)
3130 where m1 < m2, and expects something interesting to
3131 happen to the entire word. Moreover, it will use the
3132 (reg:m2 R) later, expecting all bits to be preserved.
3133 So if the number of words is the same, preserve the
3134 subreg so that push_reloads can see it. */
3135 && ! ((x_size-1)/UNITS_PER_WORD == (new_size-1)/UNITS_PER_WORD)
3136 #endif
3137 #ifdef LOAD_EXTEND_OP
3138 /* On these machines we will be reloading what is
3139 inside the SUBREG if it originally was a pseudo and
3140 the inner and outer modes are both a word or
3141 smaller. So leave the SUBREG then. */
3142 && ! (GET_CODE (SUBREG_REG (x)) == REG
3143 && x_size <= UNITS_PER_WORD
3144 && new_size <= UNITS_PER_WORD
3145 && x_size > new_size
3146 && INTEGRAL_MODE_P (GET_MODE (new))
3147 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
3148 #endif
3149 )
3150 {
3151 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
3152 enum machine_mode mode = GET_MODE (x);
3153
3154 if (BYTES_BIG_ENDIAN)
3155 offset += (MIN (UNITS_PER_WORD,
3156 GET_MODE_SIZE (GET_MODE (new)))
3157 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
3158
3159 PUT_MODE (new, mode);
3160 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
3161 return new;
3162 }
3163 else
3164 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
3165 }
3166
3167 return x;
3168
3169 case USE:
3170 /* If using a register that is the source of an eliminate we still
3171 think can be performed, note it cannot be performed since we don't
3172 know how this register is used. */
3173 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3174 if (ep->from_rtx == XEXP (x, 0))
3175 ep->can_eliminate = 0;
3176
3177 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3178 if (new != XEXP (x, 0))
3179 return gen_rtx (code, GET_MODE (x), new);
3180 return x;
3181
3182 case CLOBBER:
3183 /* If clobbering a register that is the replacement register for an
3184 elimination we still think can be performed, note that it cannot
3185 be performed. Otherwise, we need not be concerned about it. */
3186 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3187 if (ep->to_rtx == XEXP (x, 0))
3188 ep->can_eliminate = 0;
3189
3190 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3191 if (new != XEXP (x, 0))
3192 return gen_rtx (code, GET_MODE (x), new);
3193 return x;
3194
3195 case ASM_OPERANDS:
3196 {
3197 rtx *temp_vec;
3198 /* Properly handle sharing input and constraint vectors. */
3199 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3200 {
3201 /* When we come to a new vector not seen before,
3202 scan all its elements; keep the old vector if none
3203 of them changes; otherwise, make a copy. */
3204 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3205 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3206 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3207 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3208 mem_mode, insn);
3209
3210 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3211 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3212 break;
3213
3214 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3215 new_asm_operands_vec = old_asm_operands_vec;
3216 else
3217 new_asm_operands_vec
3218 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3219 }
3220
3221 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3222 if (new_asm_operands_vec == old_asm_operands_vec)
3223 return x;
3224
3225 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3226 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3227 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3228 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3229 ASM_OPERANDS_SOURCE_FILE (x),
3230 ASM_OPERANDS_SOURCE_LINE (x));
3231 new->volatil = x->volatil;
3232 return new;
3233 }
3234
3235 case SET:
3236 /* Check for setting a register that we know about. */
3237 if (GET_CODE (SET_DEST (x)) == REG)
3238 {
3239 /* See if this is setting the replacement register for an
3240 elimination.
3241
3242 If DEST is the hard frame pointer, we do nothing because we
3243 assume that all assignments to the frame pointer are for
3244 non-local gotos and are being done at a time when they are valid
3245 and do not disturb anything else. Some machines want to
3246 eliminate a fake argument pointer (or even a fake frame pointer)
3247 with either the real frame or the stack pointer. Assignments to
3248 the hard frame pointer must not prevent this elimination. */
3249
3250 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3251 ep++)
3252 if (ep->to_rtx == SET_DEST (x)
3253 && SET_DEST (x) != hard_frame_pointer_rtx)
3254 {
3255 /* If it is being incremented, adjust the offset. Otherwise,
3256 this elimination can't be done. */
3257 rtx src = SET_SRC (x);
3258
3259 if (GET_CODE (src) == PLUS
3260 && XEXP (src, 0) == SET_DEST (x)
3261 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3262 ep->offset -= INTVAL (XEXP (src, 1));
3263 else
3264 ep->can_eliminate = 0;
3265 }
3266
3267 /* Now check to see we are assigning to a register that can be
3268 eliminated. If so, it must be as part of a PARALLEL, since we
3269 will not have been called if this is a single SET. So indicate
3270 that we can no longer eliminate this reg. */
3271 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3272 ep++)
3273 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3274 ep->can_eliminate = 0;
3275 }
3276
3277 /* Now avoid the loop below in this common case. */
3278 {
3279 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3280 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3281
3282 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3283 write a CLOBBER insn. */
3284 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3285 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3286 && GET_CODE (insn) != INSN_LIST)
3287 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3288
3289 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3290 return gen_rtx (SET, VOIDmode, new0, new1);
3291 }
3292
3293 return x;
3294
3295 case MEM:
3296 /* This is only for the benefit of the debugging backends, which call
3297 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
3298 removed after CSE. */
3299 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
3300 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn);
3301
3302 /* Our only special processing is to pass the mode of the MEM to our
3303 recursive call and copy the flags. While we are here, handle this
3304 case more efficiently. */
3305 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3306 if (new != XEXP (x, 0))
3307 {
3308 new = gen_rtx (MEM, GET_MODE (x), new);
3309 new->volatil = x->volatil;
3310 new->unchanging = x->unchanging;
3311 new->in_struct = x->in_struct;
3312 return new;
3313 }
3314 else
3315 return x;
3316
3317 default:
3318 break;
3319 }
3320
3321 /* Process each of our operands recursively. If any have changed, make a
3322 copy of the rtx. */
3323 fmt = GET_RTX_FORMAT (code);
3324 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3325 {
3326 if (*fmt == 'e')
3327 {
3328 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3329 if (new != XEXP (x, i) && ! copied)
3330 {
3331 rtx new_x = rtx_alloc (code);
3332 bcopy ((char *) x, (char *) new_x,
3333 (sizeof (*new_x) - sizeof (new_x->fld)
3334 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3335 x = new_x;
3336 copied = 1;
3337 }
3338 XEXP (x, i) = new;
3339 }
3340 else if (*fmt == 'E')
3341 {
3342 int copied_vec = 0;
3343 for (j = 0; j < XVECLEN (x, i); j++)
3344 {
3345 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3346 if (new != XVECEXP (x, i, j) && ! copied_vec)
3347 {
3348 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3349 XVEC (x, i)->elem);
3350 if (! copied)
3351 {
3352 rtx new_x = rtx_alloc (code);
3353 bcopy ((char *) x, (char *) new_x,
3354 (sizeof (*new_x) - sizeof (new_x->fld)
3355 + (sizeof (new_x->fld[0])
3356 * GET_RTX_LENGTH (code))));
3357 x = new_x;
3358 copied = 1;
3359 }
3360 XVEC (x, i) = new_v;
3361 copied_vec = 1;
3362 }
3363 XVECEXP (x, i, j) = new;
3364 }
3365 }
3366 }
3367
3368 return x;
3369 }
3370 \f
3371 /* Scan INSN and eliminate all eliminable registers in it.
3372
3373 If REPLACE is nonzero, do the replacement destructively. Also
3374 delete the insn as dead it if it is setting an eliminable register.
3375
3376 If REPLACE is zero, do all our allocations in reload_obstack.
3377
3378 If no eliminations were done and this insn doesn't require any elimination
3379 processing (these are not identical conditions: it might be updating sp,
3380 but not referencing fp; this needs to be seen during reload_as_needed so
3381 that the offset between fp and sp can be taken into consideration), zero
3382 is returned. Otherwise, 1 is returned. */
3383
3384 static int
3385 eliminate_regs_in_insn (insn, replace)
3386 rtx insn;
3387 int replace;
3388 {
3389 rtx old_body = PATTERN (insn);
3390 rtx old_set = single_set (insn);
3391 rtx new_body;
3392 int val = 0;
3393 struct elim_table *ep;
3394
3395 if (! replace)
3396 push_obstacks (&reload_obstack, &reload_obstack);
3397
3398 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3399 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3400 {
3401 /* Check for setting an eliminable register. */
3402 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3403 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3404 {
3405 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3406 /* If this is setting the frame pointer register to the
3407 hardware frame pointer register and this is an elimination
3408 that will be done (tested above), this insn is really
3409 adjusting the frame pointer downward to compensate for
3410 the adjustment done before a nonlocal goto. */
3411 if (ep->from == FRAME_POINTER_REGNUM
3412 && ep->to == HARD_FRAME_POINTER_REGNUM)
3413 {
3414 rtx src = SET_SRC (old_set);
3415 int offset, ok = 0;
3416 rtx prev_insn, prev_set;
3417
3418 if (src == ep->to_rtx)
3419 offset = 0, ok = 1;
3420 else if (GET_CODE (src) == PLUS
3421 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3422 offset = INTVAL (XEXP (src, 0)), ok = 1;
3423 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3424 && (prev_set = single_set (prev_insn)) != 0
3425 && rtx_equal_p (SET_DEST (prev_set), src))
3426 {
3427 src = SET_SRC (prev_set);
3428 if (src == ep->to_rtx)
3429 offset = 0, ok = 1;
3430 else if (GET_CODE (src) == PLUS
3431 && GET_CODE (XEXP (src, 0)) == CONST_INT
3432 && XEXP (src, 1) == ep->to_rtx)
3433 offset = INTVAL (XEXP (src, 0)), ok = 1;
3434 else if (GET_CODE (src) == PLUS
3435 && GET_CODE (XEXP (src, 1)) == CONST_INT
3436 && XEXP (src, 0) == ep->to_rtx)
3437 offset = INTVAL (XEXP (src, 1)), ok = 1;
3438 }
3439
3440 if (ok)
3441 {
3442 if (replace)
3443 {
3444 rtx src
3445 = plus_constant (ep->to_rtx, offset - ep->offset);
3446
3447 /* First see if this insn remains valid when we
3448 make the change. If not, keep the INSN_CODE
3449 the same and let reload fit it up. */
3450 validate_change (insn, &SET_SRC (old_set), src, 1);
3451 validate_change (insn, &SET_DEST (old_set),
3452 ep->to_rtx, 1);
3453 if (! apply_change_group ())
3454 {
3455 SET_SRC (old_set) = src;
3456 SET_DEST (old_set) = ep->to_rtx;
3457 }
3458 }
3459
3460 val = 1;
3461 goto done;
3462 }
3463 }
3464 #endif
3465
3466 /* In this case this insn isn't serving a useful purpose. We
3467 will delete it in reload_as_needed once we know that this
3468 elimination is, in fact, being done.
3469
3470 If REPLACE isn't set, we can't delete this insn, but needn't
3471 process it since it won't be used unless something changes. */
3472 if (replace)
3473 delete_dead_insn (insn);
3474 val = 1;
3475 goto done;
3476 }
3477
3478 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3479 in the insn is the negative of the offset in FROM. Substitute
3480 (set (reg) (reg to)) for the insn and change its code.
3481
3482 We have to do this here, rather than in eliminate_regs, do that we can
3483 change the insn code. */
3484
3485 if (GET_CODE (SET_SRC (old_set)) == PLUS
3486 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3487 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3488 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3489 ep++)
3490 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3491 && ep->can_eliminate)
3492 {
3493 /* We must stop at the first elimination that will be used.
3494 If this one would replace the PLUS with a REG, do it
3495 now. Otherwise, quit the loop and let eliminate_regs
3496 do its normal replacement. */
3497 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3498 {
3499 /* We assume here that we don't need a PARALLEL of
3500 any CLOBBERs for this assignment. There's not
3501 much we can do if we do need it. */
3502 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3503 SET_DEST (old_set), ep->to_rtx);
3504 INSN_CODE (insn) = -1;
3505 val = 1;
3506 goto done;
3507 }
3508
3509 break;
3510 }
3511 }
3512
3513 old_asm_operands_vec = 0;
3514
3515 /* Replace the body of this insn with a substituted form. If we changed
3516 something, return non-zero.
3517
3518 If we are replacing a body that was a (set X (plus Y Z)), try to
3519 re-recognize the insn. We do this in case we had a simple addition
3520 but now can do this as a load-address. This saves an insn in this
3521 common case. */
3522
3523 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3524 if (new_body != old_body)
3525 {
3526 /* If we aren't replacing things permanently and we changed something,
3527 make another copy to ensure that all the RTL is new. Otherwise
3528 things can go wrong if find_reload swaps commutative operands
3529 and one is inside RTL that has been copied while the other is not. */
3530
3531 /* Don't copy an asm_operands because (1) there's no need and (2)
3532 copy_rtx can't do it properly when there are multiple outputs. */
3533 if (! replace && asm_noperands (old_body) < 0)
3534 new_body = copy_rtx (new_body);
3535
3536 /* If we had a move insn but now we don't, rerecognize it. This will
3537 cause spurious re-recognition if the old move had a PARALLEL since
3538 the new one still will, but we can't call single_set without
3539 having put NEW_BODY into the insn and the re-recognition won't
3540 hurt in this rare case. */
3541 if (old_set != 0
3542 && ((GET_CODE (SET_SRC (old_set)) == REG
3543 && (GET_CODE (new_body) != SET
3544 || GET_CODE (SET_SRC (new_body)) != REG))
3545 /* If this was a load from or store to memory, compare
3546 the MEM in recog_operand to the one in the insn. If they
3547 are not equal, then rerecognize the insn. */
3548 || (old_set != 0
3549 && ((GET_CODE (SET_SRC (old_set)) == MEM
3550 && SET_SRC (old_set) != recog_operand[1])
3551 || (GET_CODE (SET_DEST (old_set)) == MEM
3552 && SET_DEST (old_set) != recog_operand[0])))
3553 /* If this was an add insn before, rerecognize. */
3554 || GET_CODE (SET_SRC (old_set)) == PLUS))
3555 {
3556 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3557 /* If recognition fails, store the new body anyway.
3558 It's normal to have recognition failures here
3559 due to bizarre memory addresses; reloading will fix them. */
3560 PATTERN (insn) = new_body;
3561 }
3562 else
3563 PATTERN (insn) = new_body;
3564
3565 val = 1;
3566 }
3567
3568 /* Loop through all elimination pairs. See if any have changed and
3569 recalculate the number not at initial offset.
3570
3571 Compute the maximum offset (minimum offset if the stack does not
3572 grow downward) for each elimination pair.
3573
3574 We also detect a cases where register elimination cannot be done,
3575 namely, if a register would be both changed and referenced outside a MEM
3576 in the resulting insn since such an insn is often undefined and, even if
3577 not, we cannot know what meaning will be given to it. Note that it is
3578 valid to have a register used in an address in an insn that changes it
3579 (presumably with a pre- or post-increment or decrement).
3580
3581 If anything changes, return nonzero. */
3582
3583 num_not_at_initial_offset = 0;
3584 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3585 {
3586 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3587 ep->can_eliminate = 0;
3588
3589 ep->ref_outside_mem = 0;
3590
3591 if (ep->previous_offset != ep->offset)
3592 val = 1;
3593
3594 ep->previous_offset = ep->offset;
3595 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3596 num_not_at_initial_offset++;
3597
3598 #ifdef STACK_GROWS_DOWNWARD
3599 ep->max_offset = MAX (ep->max_offset, ep->offset);
3600 #else
3601 ep->max_offset = MIN (ep->max_offset, ep->offset);
3602 #endif
3603 }
3604
3605 done:
3606 /* If we changed something, perform elimination in REG_NOTES. This is
3607 needed even when REPLACE is zero because a REG_DEAD note might refer
3608 to a register that we eliminate and could cause a different number
3609 of spill registers to be needed in the final reload pass than in
3610 the pre-passes. */
3611 if (val && REG_NOTES (insn) != 0)
3612 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3613
3614 if (! replace)
3615 pop_obstacks ();
3616
3617 return val;
3618 }
3619
3620 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3621 replacement we currently believe is valid, mark it as not eliminable if X
3622 modifies DEST in any way other than by adding a constant integer to it.
3623
3624 If DEST is the frame pointer, we do nothing because we assume that
3625 all assignments to the hard frame pointer are nonlocal gotos and are being
3626 done at a time when they are valid and do not disturb anything else.
3627 Some machines want to eliminate a fake argument pointer with either the
3628 frame or stack pointer. Assignments to the hard frame pointer must not
3629 prevent this elimination.
3630
3631 Called via note_stores from reload before starting its passes to scan
3632 the insns of the function. */
3633
3634 static void
3635 mark_not_eliminable (dest, x)
3636 rtx dest;
3637 rtx x;
3638 {
3639 register int i;
3640
3641 /* A SUBREG of a hard register here is just changing its mode. We should
3642 not see a SUBREG of an eliminable hard register, but check just in
3643 case. */
3644 if (GET_CODE (dest) == SUBREG)
3645 dest = SUBREG_REG (dest);
3646
3647 if (dest == hard_frame_pointer_rtx)
3648 return;
3649
3650 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3651 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3652 && (GET_CODE (x) != SET
3653 || GET_CODE (SET_SRC (x)) != PLUS
3654 || XEXP (SET_SRC (x), 0) != dest
3655 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3656 {
3657 reg_eliminate[i].can_eliminate_previous
3658 = reg_eliminate[i].can_eliminate = 0;
3659 num_eliminable--;
3660 }
3661 }
3662 \f
3663 /* Kick all pseudos out of hard register REGNO.
3664 If GLOBAL is nonzero, try to find someplace else to put them.
3665 If DUMPFILE is nonzero, log actions taken on that file.
3666
3667 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3668 because we found we can't eliminate some register. In the case, no pseudos
3669 are allowed to be in the register, even if they are only in a block that
3670 doesn't require spill registers, unlike the case when we are spilling this
3671 hard reg to produce another spill register.
3672
3673 Return nonzero if any pseudos needed to be kicked out. */
3674
3675 static int
3676 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3677 register int regno;
3678 int global;
3679 FILE *dumpfile;
3680 int cant_eliminate;
3681 {
3682 enum reg_class class = REGNO_REG_CLASS (regno);
3683 int something_changed = 0;
3684 register int i;
3685
3686 SET_HARD_REG_BIT (forbidden_regs, regno);
3687
3688 if (cant_eliminate)
3689 regs_ever_live[regno] = 1;
3690
3691 /* Spill every pseudo reg that was allocated to this reg
3692 or to something that overlaps this reg. */
3693
3694 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3695 if (reg_renumber[i] >= 0
3696 && reg_renumber[i] <= regno
3697 && (reg_renumber[i]
3698 + HARD_REGNO_NREGS (reg_renumber[i],
3699 PSEUDO_REGNO_MODE (i))
3700 > regno))
3701 {
3702 /* If this register belongs solely to a basic block which needed no
3703 spilling of any class that this register is contained in,
3704 leave it be, unless we are spilling this register because
3705 it was a hard register that can't be eliminated. */
3706
3707 if (! cant_eliminate
3708 && basic_block_needs[0]
3709 && REG_BASIC_BLOCK (i) >= 0
3710 && basic_block_needs[(int) class][REG_BASIC_BLOCK (i)] == 0)
3711 {
3712 enum reg_class *p;
3713
3714 for (p = reg_class_superclasses[(int) class];
3715 *p != LIM_REG_CLASSES; p++)
3716 if (basic_block_needs[(int) *p][REG_BASIC_BLOCK (i)] > 0)
3717 break;
3718
3719 if (*p == LIM_REG_CLASSES)
3720 continue;
3721 }
3722
3723 /* Mark it as no longer having a hard register home. */
3724 reg_renumber[i] = -1;
3725 /* We will need to scan everything again. */
3726 something_changed = 1;
3727 if (global)
3728 retry_global_alloc (i, forbidden_regs);
3729
3730 alter_reg (i, regno);
3731 if (dumpfile)
3732 {
3733 if (reg_renumber[i] == -1)
3734 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3735 else
3736 fprintf (dumpfile, " Register %d now in %d.\n\n",
3737 i, reg_renumber[i]);
3738 }
3739 }
3740 for (i = 0; i < scratch_list_length; i++)
3741 {
3742 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3743 {
3744 if (! cant_eliminate && basic_block_needs[0]
3745 && ! basic_block_needs[(int) class][scratch_block[i]])
3746 {
3747 enum reg_class *p;
3748
3749 for (p = reg_class_superclasses[(int) class];
3750 *p != LIM_REG_CLASSES; p++)
3751 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3752 break;
3753
3754 if (*p == LIM_REG_CLASSES)
3755 continue;
3756 }
3757 PUT_CODE (scratch_list[i], SCRATCH);
3758 scratch_list[i] = 0;
3759 something_changed = 1;
3760 continue;
3761 }
3762 }
3763
3764 return something_changed;
3765 }
3766 \f
3767 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3768 Also mark any hard registers used to store user variables as
3769 forbidden from being used for spill registers. */
3770
3771 static void
3772 scan_paradoxical_subregs (x)
3773 register rtx x;
3774 {
3775 register int i;
3776 register char *fmt;
3777 register enum rtx_code code = GET_CODE (x);
3778
3779 switch (code)
3780 {
3781 case REG:
3782 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
3783 && REG_USERVAR_P (x))
3784 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3785 return;
3786
3787 case CONST_INT:
3788 case CONST:
3789 case SYMBOL_REF:
3790 case LABEL_REF:
3791 case CONST_DOUBLE:
3792 case CC0:
3793 case PC:
3794 case USE:
3795 case CLOBBER:
3796 return;
3797
3798 case SUBREG:
3799 if (GET_CODE (SUBREG_REG (x)) == REG
3800 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3801 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3802 = GET_MODE_SIZE (GET_MODE (x));
3803 return;
3804
3805 default:
3806 break;
3807 }
3808
3809 fmt = GET_RTX_FORMAT (code);
3810 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3811 {
3812 if (fmt[i] == 'e')
3813 scan_paradoxical_subregs (XEXP (x, i));
3814 else if (fmt[i] == 'E')
3815 {
3816 register int j;
3817 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3818 scan_paradoxical_subregs (XVECEXP (x, i, j));
3819 }
3820 }
3821 }
3822 \f
3823 static int
3824 hard_reg_use_compare (p1p, p2p)
3825 const GENERIC_PTR p1p;
3826 const GENERIC_PTR p2p;
3827 {
3828 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p,
3829 *p2 = (struct hard_reg_n_uses *)p2p;
3830 int tem = p1->uses - p2->uses;
3831 if (tem != 0) return tem;
3832 /* If regs are equally good, sort by regno,
3833 so that the results of qsort leave nothing to chance. */
3834 return p1->regno - p2->regno;
3835 }
3836
3837 /* Choose the order to consider regs for use as reload registers
3838 based on how much trouble would be caused by spilling one.
3839 Store them in order of decreasing preference in potential_reload_regs. */
3840
3841 static void
3842 order_regs_for_reload (global)
3843 int global;
3844 {
3845 register int i;
3846 register int o = 0;
3847 int large = 0;
3848
3849 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3850
3851 CLEAR_HARD_REG_SET (bad_spill_regs);
3852
3853 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3854 potential_reload_regs[i] = -1;
3855
3856 /* Count number of uses of each hard reg by pseudo regs allocated to it
3857 and then order them by decreasing use. */
3858
3859 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3860 {
3861 hard_reg_n_uses[i].uses = 0;
3862 hard_reg_n_uses[i].regno = i;
3863 }
3864
3865 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3866 {
3867 int regno = reg_renumber[i];
3868 if (regno >= 0)
3869 {
3870 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3871 while (regno < lim)
3872 {
3873 /* If allocated by local-alloc, show more uses since
3874 we're not going to be able to reallocate it, but
3875 we might if allocated by global alloc. */
3876 if (global && reg_allocno[i] < 0)
3877 hard_reg_n_uses[regno].uses += (REG_N_REFS (i) + 1) / 2;
3878
3879 hard_reg_n_uses[regno++].uses += REG_N_REFS (i);
3880 }
3881 }
3882 large += REG_N_REFS (i);
3883 }
3884
3885 /* Now fixed registers (which cannot safely be used for reloading)
3886 get a very high use count so they will be considered least desirable.
3887 Registers used explicitly in the rtl code are almost as bad. */
3888
3889 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3890 {
3891 if (fixed_regs[i])
3892 {
3893 hard_reg_n_uses[i].uses += 2 * large + 2;
3894 SET_HARD_REG_BIT (bad_spill_regs, i);
3895 }
3896 else if (regs_explicitly_used[i])
3897 {
3898 hard_reg_n_uses[i].uses += large + 1;
3899 if (! SMALL_REGISTER_CLASSES)
3900 /* ??? We are doing this here because of the potential
3901 that bad code may be generated if a register explicitly
3902 used in an insn was used as a spill register for that
3903 insn. But not using these are spill registers may lose
3904 on some machine. We'll have to see how this works out. */
3905 SET_HARD_REG_BIT (bad_spill_regs, i);
3906 }
3907 }
3908 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3909 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3910
3911 #ifdef ELIMINABLE_REGS
3912 /* If registers other than the frame pointer are eliminable, mark them as
3913 poor choices. */
3914 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3915 {
3916 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3917 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3918 }
3919 #endif
3920
3921 /* Prefer registers not so far used, for use in temporary loading.
3922 Among them, if REG_ALLOC_ORDER is defined, use that order.
3923 Otherwise, prefer registers not preserved by calls. */
3924
3925 #ifdef REG_ALLOC_ORDER
3926 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3927 {
3928 int regno = reg_alloc_order[i];
3929
3930 if (hard_reg_n_uses[regno].uses == 0)
3931 potential_reload_regs[o++] = regno;
3932 }
3933 #else
3934 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3935 {
3936 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3937 potential_reload_regs[o++] = i;
3938 }
3939 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3940 {
3941 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3942 potential_reload_regs[o++] = i;
3943 }
3944 #endif
3945
3946 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3947 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3948
3949 /* Now add the regs that are already used,
3950 preferring those used less often. The fixed and otherwise forbidden
3951 registers will be at the end of this list. */
3952
3953 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3954 if (hard_reg_n_uses[i].uses != 0)
3955 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3956 }
3957 \f
3958 /* Used in reload_as_needed to sort the spilled regs. */
3959
3960 static int
3961 compare_spill_regs (r1p, r2p)
3962 const GENERIC_PTR r1p;
3963 const GENERIC_PTR r2p;
3964 {
3965 short r1 = *(short *)r1p, r2 = *(short *)r2p;
3966 return r1 - r2;
3967 }
3968
3969 /* Reload pseudo-registers into hard regs around each insn as needed.
3970 Additional register load insns are output before the insn that needs it
3971 and perhaps store insns after insns that modify the reloaded pseudo reg.
3972
3973 reg_last_reload_reg and reg_reloaded_contents keep track of
3974 which registers are already available in reload registers.
3975 We update these for the reloads that we perform,
3976 as the insns are scanned. */
3977
3978 static void
3979 reload_as_needed (first, live_known)
3980 rtx first;
3981 int live_known;
3982 {
3983 register rtx insn;
3984 register int i;
3985 int this_block = 0;
3986 rtx x;
3987 rtx after_call = 0;
3988
3989 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3990 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3991 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3992 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3993 reg_has_output_reload = (char *) alloca (max_regno);
3994 for (i = 0; i < n_spills; i++)
3995 {
3996 reg_reloaded_contents[i] = -1;
3997 reg_reloaded_insn[i] = 0;
3998 }
3999
4000 /* Reset all offsets on eliminable registers to their initial values. */
4001 #ifdef ELIMINABLE_REGS
4002 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4003 {
4004 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
4005 reg_eliminate[i].initial_offset);
4006 reg_eliminate[i].previous_offset
4007 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
4008 }
4009 #else
4010 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
4011 reg_eliminate[0].previous_offset
4012 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
4013 #endif
4014
4015 num_not_at_initial_offset = 0;
4016
4017 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
4018 pack registers with group needs. */
4019 if (n_spills > 1)
4020 {
4021 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
4022 for (i = 0; i < n_spills; i++)
4023 spill_reg_order[spill_regs[i]] = i;
4024 }
4025
4026 for (insn = first; insn;)
4027 {
4028 register rtx next = NEXT_INSN (insn);
4029
4030 /* Notice when we move to a new basic block. */
4031 if (live_known && this_block + 1 < n_basic_blocks
4032 && insn == basic_block_head[this_block+1])
4033 ++this_block;
4034
4035 /* If we pass a label, copy the offsets from the label information
4036 into the current offsets of each elimination. */
4037 if (GET_CODE (insn) == CODE_LABEL)
4038 {
4039 num_not_at_initial_offset = 0;
4040 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4041 {
4042 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
4043 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
4044 if (reg_eliminate[i].can_eliminate
4045 && (reg_eliminate[i].offset
4046 != reg_eliminate[i].initial_offset))
4047 num_not_at_initial_offset++;
4048 }
4049 }
4050
4051 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4052 {
4053 rtx avoid_return_reg = 0;
4054 rtx oldpat = PATTERN (insn);
4055
4056 /* Set avoid_return_reg if this is an insn
4057 that might use the value of a function call. */
4058 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
4059 {
4060 if (GET_CODE (PATTERN (insn)) == SET)
4061 after_call = SET_DEST (PATTERN (insn));
4062 else if (GET_CODE (PATTERN (insn)) == PARALLEL
4063 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4064 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
4065 else
4066 after_call = 0;
4067 }
4068 else if (SMALL_REGISTER_CLASSES && after_call != 0
4069 && !(GET_CODE (PATTERN (insn)) == SET
4070 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
4071 {
4072 if (reg_referenced_p (after_call, PATTERN (insn)))
4073 avoid_return_reg = after_call;
4074 after_call = 0;
4075 }
4076
4077 /* If this is a USE and CLOBBER of a MEM, ensure that any
4078 references to eliminable registers have been removed. */
4079
4080 if ((GET_CODE (PATTERN (insn)) == USE
4081 || GET_CODE (PATTERN (insn)) == CLOBBER)
4082 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4083 XEXP (XEXP (PATTERN (insn), 0), 0)
4084 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4085 GET_MODE (XEXP (PATTERN (insn), 0)),
4086 NULL_RTX);
4087
4088 /* If we need to do register elimination processing, do so.
4089 This might delete the insn, in which case we are done. */
4090 if (num_eliminable && GET_MODE (insn) == QImode)
4091 {
4092 eliminate_regs_in_insn (insn, 1);
4093 if (GET_CODE (insn) == NOTE)
4094 {
4095 insn = next;
4096 continue;
4097 }
4098 }
4099
4100 if (GET_MODE (insn) == VOIDmode)
4101 n_reloads = 0;
4102 /* First find the pseudo regs that must be reloaded for this insn.
4103 This info is returned in the tables reload_... (see reload.h).
4104 Also modify the body of INSN by substituting RELOAD
4105 rtx's for those pseudo regs. */
4106 else
4107 {
4108 bzero (reg_has_output_reload, max_regno);
4109 CLEAR_HARD_REG_SET (reg_is_output_reload);
4110
4111 find_reloads (insn, 1, spill_indirect_levels, live_known,
4112 spill_reg_order);
4113 }
4114
4115 if (n_reloads > 0)
4116 {
4117 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
4118 rtx p;
4119 int class;
4120
4121 /* If this block has not had spilling done for a
4122 particular clas and we have any non-optionals that need a
4123 spill reg in that class, abort. */
4124
4125 for (class = 0; class < N_REG_CLASSES; class++)
4126 if (basic_block_needs[class] != 0
4127 && basic_block_needs[class][this_block] == 0)
4128 for (i = 0; i < n_reloads; i++)
4129 if (class == (int) reload_reg_class[i]
4130 && reload_reg_rtx[i] == 0
4131 && ! reload_optional[i]
4132 && (reload_in[i] != 0 || reload_out[i] != 0
4133 || reload_secondary_p[i] != 0))
4134 fatal_insn ("Non-optional registers need a spill register", insn);
4135
4136 /* Now compute which reload regs to reload them into. Perhaps
4137 reusing reload regs from previous insns, or else output
4138 load insns to reload them. Maybe output store insns too.
4139 Record the choices of reload reg in reload_reg_rtx. */
4140 choose_reload_regs (insn, avoid_return_reg);
4141
4142 /* Merge any reloads that we didn't combine for fear of
4143 increasing the number of spill registers needed but now
4144 discover can be safely merged. */
4145 if (SMALL_REGISTER_CLASSES)
4146 merge_assigned_reloads (insn);
4147
4148 /* Generate the insns to reload operands into or out of
4149 their reload regs. */
4150 emit_reload_insns (insn);
4151
4152 /* Substitute the chosen reload regs from reload_reg_rtx
4153 into the insn's body (or perhaps into the bodies of other
4154 load and store insn that we just made for reloading
4155 and that we moved the structure into). */
4156 subst_reloads ();
4157
4158 /* If this was an ASM, make sure that all the reload insns
4159 we have generated are valid. If not, give an error
4160 and delete them. */
4161
4162 if (asm_noperands (PATTERN (insn)) >= 0)
4163 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4164 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4165 && (recog_memoized (p) < 0
4166 || (insn_extract (p),
4167 ! constrain_operands (INSN_CODE (p), 1))))
4168 {
4169 error_for_asm (insn,
4170 "`asm' operand requires impossible reload");
4171 PUT_CODE (p, NOTE);
4172 NOTE_SOURCE_FILE (p) = 0;
4173 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4174 }
4175 }
4176 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4177 is no longer validly lying around to save a future reload.
4178 Note that this does not detect pseudos that were reloaded
4179 for this insn in order to be stored in
4180 (obeying register constraints). That is correct; such reload
4181 registers ARE still valid. */
4182 note_stores (oldpat, forget_old_reloads_1);
4183
4184 /* There may have been CLOBBER insns placed after INSN. So scan
4185 between INSN and NEXT and use them to forget old reloads. */
4186 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
4187 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4188 note_stores (PATTERN (x), forget_old_reloads_1);
4189
4190 #ifdef AUTO_INC_DEC
4191 /* Likewise for regs altered by auto-increment in this insn.
4192 But note that the reg-notes are not changed by reloading:
4193 they still contain the pseudo-regs, not the spill regs. */
4194 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4195 if (REG_NOTE_KIND (x) == REG_INC)
4196 {
4197 /* See if this pseudo reg was reloaded in this insn.
4198 If so, its last-reload info is still valid
4199 because it is based on this insn's reload. */
4200 for (i = 0; i < n_reloads; i++)
4201 if (reload_out[i] == XEXP (x, 0))
4202 break;
4203
4204 if (i == n_reloads)
4205 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4206 }
4207 #endif
4208 }
4209 /* A reload reg's contents are unknown after a label. */
4210 if (GET_CODE (insn) == CODE_LABEL)
4211 for (i = 0; i < n_spills; i++)
4212 {
4213 reg_reloaded_contents[i] = -1;
4214 reg_reloaded_insn[i] = 0;
4215 }
4216
4217 /* Don't assume a reload reg is still good after a call insn
4218 if it is a call-used reg. */
4219 else if (GET_CODE (insn) == CALL_INSN)
4220 for (i = 0; i < n_spills; i++)
4221 if (call_used_regs[spill_regs[i]])
4222 {
4223 reg_reloaded_contents[i] = -1;
4224 reg_reloaded_insn[i] = 0;
4225 }
4226
4227 /* In case registers overlap, allow certain insns to invalidate
4228 particular hard registers. */
4229
4230 #ifdef INSN_CLOBBERS_REGNO_P
4231 for (i = 0 ; i < n_spills ; i++)
4232 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
4233 {
4234 reg_reloaded_contents[i] = -1;
4235 reg_reloaded_insn[i] = 0;
4236 }
4237 #endif
4238
4239 insn = next;
4240
4241 #ifdef USE_C_ALLOCA
4242 alloca (0);
4243 #endif
4244 }
4245 }
4246
4247 /* Discard all record of any value reloaded from X,
4248 or reloaded in X from someplace else;
4249 unless X is an output reload reg of the current insn.
4250
4251 X may be a hard reg (the reload reg)
4252 or it may be a pseudo reg that was reloaded from. */
4253
4254 static void
4255 forget_old_reloads_1 (x, ignored)
4256 rtx x;
4257 rtx ignored;
4258 {
4259 register int regno;
4260 int nr;
4261 int offset = 0;
4262
4263 /* note_stores does give us subregs of hard regs. */
4264 while (GET_CODE (x) == SUBREG)
4265 {
4266 offset += SUBREG_WORD (x);
4267 x = SUBREG_REG (x);
4268 }
4269
4270 if (GET_CODE (x) != REG)
4271 return;
4272
4273 regno = REGNO (x) + offset;
4274
4275 if (regno >= FIRST_PSEUDO_REGISTER)
4276 nr = 1;
4277 else
4278 {
4279 int i;
4280 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4281 /* Storing into a spilled-reg invalidates its contents.
4282 This can happen if a block-local pseudo is allocated to that reg
4283 and it wasn't spilled because this block's total need is 0.
4284 Then some insn might have an optional reload and use this reg. */
4285 for (i = 0; i < nr; i++)
4286 if (spill_reg_order[regno + i] >= 0
4287 /* But don't do this if the reg actually serves as an output
4288 reload reg in the current instruction. */
4289 && (n_reloads == 0
4290 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4291 {
4292 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4293 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4294 }
4295 }
4296
4297 /* Since value of X has changed,
4298 forget any value previously copied from it. */
4299
4300 while (nr-- > 0)
4301 /* But don't forget a copy if this is the output reload
4302 that establishes the copy's validity. */
4303 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4304 reg_last_reload_reg[regno + nr] = 0;
4305 }
4306 \f
4307 /* For each reload, the mode of the reload register. */
4308 static enum machine_mode reload_mode[MAX_RELOADS];
4309
4310 /* For each reload, the largest number of registers it will require. */
4311 static int reload_nregs[MAX_RELOADS];
4312
4313 /* Comparison function for qsort to decide which of two reloads
4314 should be handled first. *P1 and *P2 are the reload numbers. */
4315
4316 static int
4317 reload_reg_class_lower (r1p, r2p)
4318 const GENERIC_PTR r1p;
4319 const GENERIC_PTR r2p;
4320 {
4321 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
4322 register int t;
4323
4324 /* Consider required reloads before optional ones. */
4325 t = reload_optional[r1] - reload_optional[r2];
4326 if (t != 0)
4327 return t;
4328
4329 /* Count all solitary classes before non-solitary ones. */
4330 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4331 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4332 if (t != 0)
4333 return t;
4334
4335 /* Aside from solitaires, consider all multi-reg groups first. */
4336 t = reload_nregs[r2] - reload_nregs[r1];
4337 if (t != 0)
4338 return t;
4339
4340 /* Consider reloads in order of increasing reg-class number. */
4341 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4342 if (t != 0)
4343 return t;
4344
4345 /* If reloads are equally urgent, sort by reload number,
4346 so that the results of qsort leave nothing to chance. */
4347 return r1 - r2;
4348 }
4349 \f
4350 /* The following HARD_REG_SETs indicate when each hard register is
4351 used for a reload of various parts of the current insn. */
4352
4353 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4354 static HARD_REG_SET reload_reg_used;
4355 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4356 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4357 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4358 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4359 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4360 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4361 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4362 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4363 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4364 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4365 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4366 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4367 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4368 static HARD_REG_SET reload_reg_used_in_op_addr;
4369 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4370 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4371 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4372 static HARD_REG_SET reload_reg_used_in_insn;
4373 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4374 static HARD_REG_SET reload_reg_used_in_other_addr;
4375
4376 /* If reg is in use as a reload reg for any sort of reload. */
4377 static HARD_REG_SET reload_reg_used_at_all;
4378
4379 /* If reg is use as an inherited reload. We just mark the first register
4380 in the group. */
4381 static HARD_REG_SET reload_reg_used_for_inherit;
4382
4383 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4384 TYPE. MODE is used to indicate how many consecutive regs are
4385 actually used. */
4386
4387 static void
4388 mark_reload_reg_in_use (regno, opnum, type, mode)
4389 int regno;
4390 int opnum;
4391 enum reload_type type;
4392 enum machine_mode mode;
4393 {
4394 int nregs = HARD_REGNO_NREGS (regno, mode);
4395 int i;
4396
4397 for (i = regno; i < nregs + regno; i++)
4398 {
4399 switch (type)
4400 {
4401 case RELOAD_OTHER:
4402 SET_HARD_REG_BIT (reload_reg_used, i);
4403 break;
4404
4405 case RELOAD_FOR_INPUT_ADDRESS:
4406 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4407 break;
4408
4409 case RELOAD_FOR_INPADDR_ADDRESS:
4410 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4411 break;
4412
4413 case RELOAD_FOR_OUTPUT_ADDRESS:
4414 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4415 break;
4416
4417 case RELOAD_FOR_OUTADDR_ADDRESS:
4418 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4419 break;
4420
4421 case RELOAD_FOR_OPERAND_ADDRESS:
4422 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4423 break;
4424
4425 case RELOAD_FOR_OPADDR_ADDR:
4426 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4427 break;
4428
4429 case RELOAD_FOR_OTHER_ADDRESS:
4430 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4431 break;
4432
4433 case RELOAD_FOR_INPUT:
4434 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4435 break;
4436
4437 case RELOAD_FOR_OUTPUT:
4438 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4439 break;
4440
4441 case RELOAD_FOR_INSN:
4442 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4443 break;
4444 }
4445
4446 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4447 }
4448 }
4449
4450 /* Similarly, but show REGNO is no longer in use for a reload. */
4451
4452 static void
4453 clear_reload_reg_in_use (regno, opnum, type, mode)
4454 int regno;
4455 int opnum;
4456 enum reload_type type;
4457 enum machine_mode mode;
4458 {
4459 int nregs = HARD_REGNO_NREGS (regno, mode);
4460 int i;
4461
4462 for (i = regno; i < nregs + regno; i++)
4463 {
4464 switch (type)
4465 {
4466 case RELOAD_OTHER:
4467 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4468 break;
4469
4470 case RELOAD_FOR_INPUT_ADDRESS:
4471 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4472 break;
4473
4474 case RELOAD_FOR_INPADDR_ADDRESS:
4475 CLEAR_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4476 break;
4477
4478 case RELOAD_FOR_OUTPUT_ADDRESS:
4479 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4480 break;
4481
4482 case RELOAD_FOR_OUTADDR_ADDRESS:
4483 CLEAR_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4484 break;
4485
4486 case RELOAD_FOR_OPERAND_ADDRESS:
4487 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4488 break;
4489
4490 case RELOAD_FOR_OPADDR_ADDR:
4491 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4492 break;
4493
4494 case RELOAD_FOR_OTHER_ADDRESS:
4495 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4496 break;
4497
4498 case RELOAD_FOR_INPUT:
4499 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4500 break;
4501
4502 case RELOAD_FOR_OUTPUT:
4503 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4504 break;
4505
4506 case RELOAD_FOR_INSN:
4507 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4508 break;
4509 }
4510 }
4511 }
4512
4513 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4514 specified by OPNUM and TYPE. */
4515
4516 static int
4517 reload_reg_free_p (regno, opnum, type)
4518 int regno;
4519 int opnum;
4520 enum reload_type type;
4521 {
4522 int i;
4523
4524 /* In use for a RELOAD_OTHER means it's not available for anything. */
4525 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4526 return 0;
4527
4528 switch (type)
4529 {
4530 case RELOAD_OTHER:
4531 /* In use for anything means we can't use it for RELOAD_OTHER. */
4532 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4533 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4534 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4535 return 0;
4536
4537 for (i = 0; i < reload_n_operands; i++)
4538 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4539 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4540 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4541 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4542 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4543 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4544 return 0;
4545
4546 return 1;
4547
4548 case RELOAD_FOR_INPUT:
4549 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4550 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4551 return 0;
4552
4553 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4554 return 0;
4555
4556 /* If it is used for some other input, can't use it. */
4557 for (i = 0; i < reload_n_operands; i++)
4558 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4559 return 0;
4560
4561 /* If it is used in a later operand's address, can't use it. */
4562 for (i = opnum + 1; i < reload_n_operands; i++)
4563 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4564 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4565 return 0;
4566
4567 return 1;
4568
4569 case RELOAD_FOR_INPUT_ADDRESS:
4570 /* Can't use a register if it is used for an input address for this
4571 operand or used as an input in an earlier one. */
4572 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4573 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4574 return 0;
4575
4576 for (i = 0; i < opnum; i++)
4577 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4578 return 0;
4579
4580 return 1;
4581
4582 case RELOAD_FOR_INPADDR_ADDRESS:
4583 /* Can't use a register if it is used for an input address
4584 address for this operand or used as an input in an earlier
4585 one. */
4586 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4587 return 0;
4588
4589 for (i = 0; i < opnum; i++)
4590 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4591 return 0;
4592
4593 return 1;
4594
4595 case RELOAD_FOR_OUTPUT_ADDRESS:
4596 /* Can't use a register if it is used for an output address for this
4597 operand or used as an output in this or a later operand. */
4598 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4599 return 0;
4600
4601 for (i = opnum; i < reload_n_operands; i++)
4602 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4603 return 0;
4604
4605 return 1;
4606
4607 case RELOAD_FOR_OUTADDR_ADDRESS:
4608 /* Can't use a register if it is used for an output address
4609 address for this operand or used as an output in this or a
4610 later operand. */
4611 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4612 return 0;
4613
4614 for (i = opnum; i < reload_n_operands; i++)
4615 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4616 return 0;
4617
4618 return 1;
4619
4620 case RELOAD_FOR_OPERAND_ADDRESS:
4621 for (i = 0; i < reload_n_operands; i++)
4622 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4623 return 0;
4624
4625 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4626 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4627
4628 case RELOAD_FOR_OPADDR_ADDR:
4629 for (i = 0; i < reload_n_operands; i++)
4630 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4631 return 0;
4632
4633 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4634
4635 case RELOAD_FOR_OUTPUT:
4636 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4637 outputs, or an operand address for this or an earlier output. */
4638 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4639 return 0;
4640
4641 for (i = 0; i < reload_n_operands; i++)
4642 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4643 return 0;
4644
4645 for (i = 0; i <= opnum; i++)
4646 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4647 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4648 return 0;
4649
4650 return 1;
4651
4652 case RELOAD_FOR_INSN:
4653 for (i = 0; i < reload_n_operands; i++)
4654 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4655 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4656 return 0;
4657
4658 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4659 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4660
4661 case RELOAD_FOR_OTHER_ADDRESS:
4662 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4663 }
4664 abort ();
4665 }
4666
4667 /* Return 1 if the value in reload reg REGNO, as used by a reload
4668 needed for the part of the insn specified by OPNUM and TYPE,
4669 is not in use for a reload in any prior part of the insn.
4670
4671 We can assume that the reload reg was already tested for availability
4672 at the time it is needed, and we should not check this again,
4673 in case the reg has already been marked in use. */
4674
4675 static int
4676 reload_reg_free_before_p (regno, opnum, type)
4677 int regno;
4678 int opnum;
4679 enum reload_type type;
4680 {
4681 int i;
4682
4683 switch (type)
4684 {
4685 case RELOAD_FOR_OTHER_ADDRESS:
4686 /* These always come first. */
4687 return 1;
4688
4689 case RELOAD_OTHER:
4690 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4691
4692 /* If this use is for part of the insn,
4693 check the reg is not in use for any prior part. It is tempting
4694 to try to do this by falling through from objecs that occur
4695 later in the insn to ones that occur earlier, but that will not
4696 correctly take into account the fact that here we MUST ignore
4697 things that would prevent the register from being allocated in
4698 the first place, since we know that it was allocated. */
4699
4700 case RELOAD_FOR_OUTPUT_ADDRESS:
4701 case RELOAD_FOR_OUTADDR_ADDRESS:
4702 /* Earlier reloads are for earlier outputs or their addresses,
4703 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4704 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4705 RELOAD_OTHER).. */
4706 for (i = 0; i < opnum; i++)
4707 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4708 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4709 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4710 return 0;
4711
4712 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4713 return 0;
4714
4715 for (i = 0; i < reload_n_operands; i++)
4716 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4717 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4718 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4719 return 0;
4720
4721 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4722 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4723 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4724
4725 case RELOAD_FOR_OUTPUT:
4726 /* This can't be used in the output address for this operand and
4727 anything that can't be used for it, except that we've already
4728 tested for RELOAD_FOR_INSN objects. */
4729
4730 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno)
4731 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4732 return 0;
4733
4734 for (i = 0; i < opnum; i++)
4735 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4736 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4737 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4738 return 0;
4739
4740 for (i = 0; i < reload_n_operands; i++)
4741 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4742 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4743 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4744 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4745 return 0;
4746
4747 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4748
4749 case RELOAD_FOR_OPERAND_ADDRESS:
4750 /* Earlier reloads include RELOAD_FOR_OPADDR_ADDR reloads. */
4751 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4752 return 0;
4753
4754 /* ... fall through ... */
4755
4756 case RELOAD_FOR_OPADDR_ADDR:
4757 case RELOAD_FOR_INSN:
4758 /* These can't conflict with inputs, or each other, so all we have to
4759 test is input addresses and the addresses of OTHER items. */
4760
4761 for (i = 0; i < reload_n_operands; i++)
4762 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4763 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4764 return 0;
4765
4766 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4767
4768 case RELOAD_FOR_INPUT:
4769 /* The only things earlier are the address for this and
4770 earlier inputs, other inputs (which we know we don't conflict
4771 with), and addresses of RELOAD_OTHER objects. */
4772
4773 for (i = 0; i <= opnum; i++)
4774 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4775 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4776 return 0;
4777
4778 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4779
4780 case RELOAD_FOR_INPUT_ADDRESS:
4781 case RELOAD_FOR_INPADDR_ADDRESS:
4782 /* Similarly, all we have to check is for use in earlier inputs'
4783 addresses. */
4784 for (i = 0; i < opnum; i++)
4785 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4786 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4787 return 0;
4788
4789 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4790 }
4791 abort ();
4792 }
4793
4794 /* Return 1 if the value in reload reg REGNO, as used by a reload
4795 needed for the part of the insn specified by OPNUM and TYPE,
4796 is still available in REGNO at the end of the insn.
4797
4798 We can assume that the reload reg was already tested for availability
4799 at the time it is needed, and we should not check this again,
4800 in case the reg has already been marked in use. */
4801
4802 static int
4803 reload_reg_reaches_end_p (regno, opnum, type)
4804 int regno;
4805 int opnum;
4806 enum reload_type type;
4807 {
4808 int i;
4809
4810 switch (type)
4811 {
4812 case RELOAD_OTHER:
4813 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4814 its value must reach the end. */
4815 return 1;
4816
4817 /* If this use is for part of the insn,
4818 its value reaches if no subsequent part uses the same register.
4819 Just like the above function, don't try to do this with lots
4820 of fallthroughs. */
4821
4822 case RELOAD_FOR_OTHER_ADDRESS:
4823 /* Here we check for everything else, since these don't conflict
4824 with anything else and everything comes later. */
4825
4826 for (i = 0; i < reload_n_operands; i++)
4827 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4828 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4829 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4830 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4831 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4832 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4833 return 0;
4834
4835 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4836 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4837 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4838
4839 case RELOAD_FOR_INPUT_ADDRESS:
4840 case RELOAD_FOR_INPADDR_ADDRESS:
4841 /* Similar, except that we check only for this and subsequent inputs
4842 and the address of only subsequent inputs and we do not need
4843 to check for RELOAD_OTHER objects since they are known not to
4844 conflict. */
4845
4846 for (i = opnum; i < reload_n_operands; i++)
4847 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4848 return 0;
4849
4850 for (i = opnum + 1; i < reload_n_operands; i++)
4851 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4852 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4853 return 0;
4854
4855 for (i = 0; i < reload_n_operands; i++)
4856 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4857 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4858 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4859 return 0;
4860
4861 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4862 return 0;
4863
4864 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4865 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4866
4867 case RELOAD_FOR_INPUT:
4868 /* Similar to input address, except we start at the next operand for
4869 both input and input address and we do not check for
4870 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4871 would conflict. */
4872
4873 for (i = opnum + 1; i < reload_n_operands; i++)
4874 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4875 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4876 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4877 return 0;
4878
4879 /* ... fall through ... */
4880
4881 case RELOAD_FOR_OPERAND_ADDRESS:
4882 /* Check outputs and their addresses. */
4883
4884 for (i = 0; i < reload_n_operands; i++)
4885 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4886 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4887 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4888 return 0;
4889
4890 return 1;
4891
4892 case RELOAD_FOR_OPADDR_ADDR:
4893 for (i = 0; i < reload_n_operands; i++)
4894 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4895 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4896 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4897 return 0;
4898
4899 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4900 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4901
4902 case RELOAD_FOR_INSN:
4903 /* These conflict with other outputs with RELOAD_OTHER. So
4904 we need only check for output addresses. */
4905
4906 opnum = -1;
4907
4908 /* ... fall through ... */
4909
4910 case RELOAD_FOR_OUTPUT:
4911 case RELOAD_FOR_OUTPUT_ADDRESS:
4912 case RELOAD_FOR_OUTADDR_ADDRESS:
4913 /* We already know these can't conflict with a later output. So the
4914 only thing to check are later output addresses. */
4915 for (i = opnum + 1; i < reload_n_operands; i++)
4916 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4917 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4918 return 0;
4919
4920 return 1;
4921 }
4922
4923 abort ();
4924 }
4925 \f
4926 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4927 Return 0 otherwise.
4928
4929 This function uses the same algorithm as reload_reg_free_p above. */
4930
4931 static int
4932 reloads_conflict (r1, r2)
4933 int r1, r2;
4934 {
4935 enum reload_type r1_type = reload_when_needed[r1];
4936 enum reload_type r2_type = reload_when_needed[r2];
4937 int r1_opnum = reload_opnum[r1];
4938 int r2_opnum = reload_opnum[r2];
4939
4940 /* RELOAD_OTHER conflicts with everything. */
4941 if (r2_type == RELOAD_OTHER)
4942 return 1;
4943
4944 /* Otherwise, check conflicts differently for each type. */
4945
4946 switch (r1_type)
4947 {
4948 case RELOAD_FOR_INPUT:
4949 return (r2_type == RELOAD_FOR_INSN
4950 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4951 || r2_type == RELOAD_FOR_OPADDR_ADDR
4952 || r2_type == RELOAD_FOR_INPUT
4953 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4954 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4955 && r2_opnum > r1_opnum));
4956
4957 case RELOAD_FOR_INPUT_ADDRESS:
4958 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4959 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4960
4961 case RELOAD_FOR_INPADDR_ADDRESS:
4962 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4963 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4964
4965 case RELOAD_FOR_OUTPUT_ADDRESS:
4966 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4967 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4968
4969 case RELOAD_FOR_OUTADDR_ADDRESS:
4970 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4971 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4972
4973 case RELOAD_FOR_OPERAND_ADDRESS:
4974 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4975 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4976
4977 case RELOAD_FOR_OPADDR_ADDR:
4978 return (r2_type == RELOAD_FOR_INPUT
4979 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4980
4981 case RELOAD_FOR_OUTPUT:
4982 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4983 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4984 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4985 && r2_opnum >= r1_opnum));
4986
4987 case RELOAD_FOR_INSN:
4988 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4989 || r2_type == RELOAD_FOR_INSN
4990 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4991
4992 case RELOAD_FOR_OTHER_ADDRESS:
4993 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4994
4995 case RELOAD_OTHER:
4996 return 1;
4997
4998 default:
4999 abort ();
5000 }
5001 }
5002 \f
5003 /* Vector of reload-numbers showing the order in which the reloads should
5004 be processed. */
5005 short reload_order[MAX_RELOADS];
5006
5007 /* Indexed by reload number, 1 if incoming value
5008 inherited from previous insns. */
5009 char reload_inherited[MAX_RELOADS];
5010
5011 /* For an inherited reload, this is the insn the reload was inherited from,
5012 if we know it. Otherwise, this is 0. */
5013 rtx reload_inheritance_insn[MAX_RELOADS];
5014
5015 /* If non-zero, this is a place to get the value of the reload,
5016 rather than using reload_in. */
5017 rtx reload_override_in[MAX_RELOADS];
5018
5019 /* For each reload, the index in spill_regs of the spill register used,
5020 or -1 if we did not need one of the spill registers for this reload. */
5021 int reload_spill_index[MAX_RELOADS];
5022
5023 /* Find a spill register to use as a reload register for reload R.
5024 LAST_RELOAD is non-zero if this is the last reload for the insn being
5025 processed.
5026
5027 Set reload_reg_rtx[R] to the register allocated.
5028
5029 If NOERROR is nonzero, we return 1 if successful,
5030 or 0 if we couldn't find a spill reg and we didn't change anything. */
5031
5032 static int
5033 allocate_reload_reg (r, insn, last_reload, noerror)
5034 int r;
5035 rtx insn;
5036 int last_reload;
5037 int noerror;
5038 {
5039 int i;
5040 int pass;
5041 int count;
5042 rtx new;
5043 int regno;
5044
5045 /* If we put this reload ahead, thinking it is a group,
5046 then insist on finding a group. Otherwise we can grab a
5047 reg that some other reload needs.
5048 (That can happen when we have a 68000 DATA_OR_FP_REG
5049 which is a group of data regs or one fp reg.)
5050 We need not be so restrictive if there are no more reloads
5051 for this insn.
5052
5053 ??? Really it would be nicer to have smarter handling
5054 for that kind of reg class, where a problem like this is normal.
5055 Perhaps those classes should be avoided for reloading
5056 by use of more alternatives. */
5057
5058 int force_group = reload_nregs[r] > 1 && ! last_reload;
5059
5060 /* If we want a single register and haven't yet found one,
5061 take any reg in the right class and not in use.
5062 If we want a consecutive group, here is where we look for it.
5063
5064 We use two passes so we can first look for reload regs to
5065 reuse, which are already in use for other reloads in this insn,
5066 and only then use additional registers.
5067 I think that maximizing reuse is needed to make sure we don't
5068 run out of reload regs. Suppose we have three reloads, and
5069 reloads A and B can share regs. These need two regs.
5070 Suppose A and B are given different regs.
5071 That leaves none for C. */
5072 for (pass = 0; pass < 2; pass++)
5073 {
5074 /* I is the index in spill_regs.
5075 We advance it round-robin between insns to use all spill regs
5076 equally, so that inherited reloads have a chance
5077 of leapfrogging each other. Don't do this, however, when we have
5078 group needs and failure would be fatal; if we only have a relatively
5079 small number of spill registers, and more than one of them has
5080 group needs, then by starting in the middle, we may end up
5081 allocating the first one in such a way that we are not left with
5082 sufficient groups to handle the rest. */
5083
5084 if (noerror || ! force_group)
5085 i = last_spill_reg;
5086 else
5087 i = -1;
5088
5089 for (count = 0; count < n_spills; count++)
5090 {
5091 int class = (int) reload_reg_class[r];
5092
5093 i = (i + 1) % n_spills;
5094
5095 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
5096 reload_when_needed[r])
5097 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
5098 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5099 /* Look first for regs to share, then for unshared. But
5100 don't share regs used for inherited reloads; they are
5101 the ones we want to preserve. */
5102 && (pass
5103 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5104 spill_regs[i])
5105 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5106 spill_regs[i]))))
5107 {
5108 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5109 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5110 (on 68000) got us two FP regs. If NR is 1,
5111 we would reject both of them. */
5112 if (force_group)
5113 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5114 /* If we need only one reg, we have already won. */
5115 if (nr == 1)
5116 {
5117 /* But reject a single reg if we demand a group. */
5118 if (force_group)
5119 continue;
5120 break;
5121 }
5122 /* Otherwise check that as many consecutive regs as we need
5123 are available here.
5124 Also, don't use for a group registers that are
5125 needed for nongroups. */
5126 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
5127 while (nr > 1)
5128 {
5129 regno = spill_regs[i] + nr - 1;
5130 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5131 && spill_reg_order[regno] >= 0
5132 && reload_reg_free_p (regno, reload_opnum[r],
5133 reload_when_needed[r])
5134 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
5135 regno)))
5136 break;
5137 nr--;
5138 }
5139 if (nr == 1)
5140 break;
5141 }
5142 }
5143
5144 /* If we found something on pass 1, omit pass 2. */
5145 if (count < n_spills)
5146 break;
5147 }
5148
5149 /* We should have found a spill register by now. */
5150 if (count == n_spills)
5151 {
5152 if (noerror)
5153 return 0;
5154 goto failure;
5155 }
5156
5157 /* I is the index in SPILL_REG_RTX of the reload register we are to
5158 allocate. Get an rtx for it and find its register number. */
5159
5160 new = spill_reg_rtx[i];
5161
5162 if (new == 0 || GET_MODE (new) != reload_mode[r])
5163 spill_reg_rtx[i] = new
5164 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
5165
5166 regno = true_regnum (new);
5167
5168 /* Detect when the reload reg can't hold the reload mode.
5169 This used to be one `if', but Sequent compiler can't handle that. */
5170 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5171 {
5172 enum machine_mode test_mode = VOIDmode;
5173 if (reload_in[r])
5174 test_mode = GET_MODE (reload_in[r]);
5175 /* If reload_in[r] has VOIDmode, it means we will load it
5176 in whatever mode the reload reg has: to wit, reload_mode[r].
5177 We have already tested that for validity. */
5178 /* Aside from that, we need to test that the expressions
5179 to reload from or into have modes which are valid for this
5180 reload register. Otherwise the reload insns would be invalid. */
5181 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5182 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5183 if (! (reload_out[r] != 0
5184 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
5185 {
5186 /* The reg is OK. */
5187 last_spill_reg = i;
5188
5189 /* Mark as in use for this insn the reload regs we use
5190 for this. */
5191 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5192 reload_when_needed[r], reload_mode[r]);
5193
5194 reload_reg_rtx[r] = new;
5195 reload_spill_index[r] = i;
5196 return 1;
5197 }
5198 }
5199
5200 /* The reg is not OK. */
5201 if (noerror)
5202 return 0;
5203
5204 failure:
5205 if (asm_noperands (PATTERN (insn)) < 0)
5206 /* It's the compiler's fault. */
5207 fatal_insn ("Could not find a spill register", insn);
5208
5209 /* It's the user's fault; the operand's mode and constraint
5210 don't match. Disable this reload so we don't crash in final. */
5211 error_for_asm (insn,
5212 "`asm' operand constraint incompatible with operand size");
5213 reload_in[r] = 0;
5214 reload_out[r] = 0;
5215 reload_reg_rtx[r] = 0;
5216 reload_optional[r] = 1;
5217 reload_secondary_p[r] = 1;
5218
5219 return 1;
5220 }
5221 \f
5222 /* Assign hard reg targets for the pseudo-registers we must reload
5223 into hard regs for this insn.
5224 Also output the instructions to copy them in and out of the hard regs.
5225
5226 For machines with register classes, we are responsible for
5227 finding a reload reg in the proper class. */
5228
5229 static void
5230 choose_reload_regs (insn, avoid_return_reg)
5231 rtx insn;
5232 rtx avoid_return_reg;
5233 {
5234 register int i, j;
5235 int max_group_size = 1;
5236 enum reg_class group_class = NO_REGS;
5237 int inheritance;
5238
5239 rtx save_reload_reg_rtx[MAX_RELOADS];
5240 char save_reload_inherited[MAX_RELOADS];
5241 rtx save_reload_inheritance_insn[MAX_RELOADS];
5242 rtx save_reload_override_in[MAX_RELOADS];
5243 int save_reload_spill_index[MAX_RELOADS];
5244 HARD_REG_SET save_reload_reg_used;
5245 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5246 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5247 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5248 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5249 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5250 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5251 HARD_REG_SET save_reload_reg_used_in_op_addr;
5252 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
5253 HARD_REG_SET save_reload_reg_used_in_insn;
5254 HARD_REG_SET save_reload_reg_used_in_other_addr;
5255 HARD_REG_SET save_reload_reg_used_at_all;
5256
5257 bzero (reload_inherited, MAX_RELOADS);
5258 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5259 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
5260
5261 CLEAR_HARD_REG_SET (reload_reg_used);
5262 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5263 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5264 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5265 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5266 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5267
5268 for (i = 0; i < reload_n_operands; i++)
5269 {
5270 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5271 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5272 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5273 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5274 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5275 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5276 }
5277
5278 /* Don't bother with avoiding the return reg
5279 if we have no mandatory reload that could use it. */
5280 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5281 {
5282 int do_avoid = 0;
5283 int regno = REGNO (avoid_return_reg);
5284 int nregs
5285 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5286 int r;
5287
5288 for (r = regno; r < regno + nregs; r++)
5289 if (spill_reg_order[r] >= 0)
5290 for (j = 0; j < n_reloads; j++)
5291 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5292 && (reload_in[j] != 0 || reload_out[j] != 0
5293 || reload_secondary_p[j])
5294 &&
5295 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5296 do_avoid = 1;
5297 if (!do_avoid)
5298 avoid_return_reg = 0;
5299 }
5300
5301 #if 0 /* Not needed, now that we can always retry without inheritance. */
5302 /* See if we have more mandatory reloads than spill regs.
5303 If so, then we cannot risk optimizations that could prevent
5304 reloads from sharing one spill register.
5305
5306 Since we will try finding a better register than reload_reg_rtx
5307 unless it is equal to reload_in or reload_out, count such reloads. */
5308
5309 {
5310 int tem = SMALL_REGISTER_CLASSES? (avoid_return_reg != 0): 0;
5311 for (j = 0; j < n_reloads; j++)
5312 if (! reload_optional[j]
5313 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5314 && (reload_reg_rtx[j] == 0
5315 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5316 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5317 tem++;
5318 if (tem > n_spills)
5319 must_reuse = 1;
5320 }
5321 #endif
5322
5323 /* Don't use the subroutine call return reg for a reload
5324 if we are supposed to avoid it. */
5325 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5326 {
5327 int regno = REGNO (avoid_return_reg);
5328 int nregs
5329 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5330 int r;
5331
5332 for (r = regno; r < regno + nregs; r++)
5333 if (spill_reg_order[r] >= 0)
5334 SET_HARD_REG_BIT (reload_reg_used, r);
5335 }
5336
5337 /* In order to be certain of getting the registers we need,
5338 we must sort the reloads into order of increasing register class.
5339 Then our grabbing of reload registers will parallel the process
5340 that provided the reload registers.
5341
5342 Also note whether any of the reloads wants a consecutive group of regs.
5343 If so, record the maximum size of the group desired and what
5344 register class contains all the groups needed by this insn. */
5345
5346 for (j = 0; j < n_reloads; j++)
5347 {
5348 reload_order[j] = j;
5349 reload_spill_index[j] = -1;
5350
5351 reload_mode[j]
5352 = (reload_inmode[j] == VOIDmode
5353 || (GET_MODE_SIZE (reload_outmode[j])
5354 > GET_MODE_SIZE (reload_inmode[j])))
5355 ? reload_outmode[j] : reload_inmode[j];
5356
5357 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5358
5359 if (reload_nregs[j] > 1)
5360 {
5361 max_group_size = MAX (reload_nregs[j], max_group_size);
5362 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5363 }
5364
5365 /* If we have already decided to use a certain register,
5366 don't use it in another way. */
5367 if (reload_reg_rtx[j])
5368 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5369 reload_when_needed[j], reload_mode[j]);
5370 }
5371
5372 if (n_reloads > 1)
5373 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5374
5375 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5376 sizeof reload_reg_rtx);
5377 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5378 bcopy ((char *) reload_inheritance_insn,
5379 (char *) save_reload_inheritance_insn,
5380 sizeof reload_inheritance_insn);
5381 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5382 sizeof reload_override_in);
5383 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5384 sizeof reload_spill_index);
5385 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5386 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5387 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5388 reload_reg_used_in_op_addr);
5389
5390 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5391 reload_reg_used_in_op_addr_reload);
5392
5393 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5394 reload_reg_used_in_insn);
5395 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5396 reload_reg_used_in_other_addr);
5397
5398 for (i = 0; i < reload_n_operands; i++)
5399 {
5400 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5401 reload_reg_used_in_output[i]);
5402 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5403 reload_reg_used_in_input[i]);
5404 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5405 reload_reg_used_in_input_addr[i]);
5406 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5407 reload_reg_used_in_inpaddr_addr[i]);
5408 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5409 reload_reg_used_in_output_addr[i]);
5410 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5411 reload_reg_used_in_outaddr_addr[i]);
5412 }
5413
5414 /* If -O, try first with inheritance, then turning it off.
5415 If not -O, don't do inheritance.
5416 Using inheritance when not optimizing leads to paradoxes
5417 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5418 because one side of the comparison might be inherited. */
5419
5420 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5421 {
5422 /* Process the reloads in order of preference just found.
5423 Beyond this point, subregs can be found in reload_reg_rtx.
5424
5425 This used to look for an existing reloaded home for all
5426 of the reloads, and only then perform any new reloads.
5427 But that could lose if the reloads were done out of reg-class order
5428 because a later reload with a looser constraint might have an old
5429 home in a register needed by an earlier reload with a tighter constraint.
5430
5431 To solve this, we make two passes over the reloads, in the order
5432 described above. In the first pass we try to inherit a reload
5433 from a previous insn. If there is a later reload that needs a
5434 class that is a proper subset of the class being processed, we must
5435 also allocate a spill register during the first pass.
5436
5437 Then make a second pass over the reloads to allocate any reloads
5438 that haven't been given registers yet. */
5439
5440 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5441
5442 for (j = 0; j < n_reloads; j++)
5443 {
5444 register int r = reload_order[j];
5445
5446 /* Ignore reloads that got marked inoperative. */
5447 if (reload_out[r] == 0 && reload_in[r] == 0
5448 && ! reload_secondary_p[r])
5449 continue;
5450
5451 /* If find_reloads chose a to use reload_in or reload_out as a reload
5452 register, we don't need to chose one. Otherwise, try even if it
5453 found one since we might save an insn if we find the value lying
5454 around. */
5455 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5456 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5457 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5458 continue;
5459
5460 #if 0 /* No longer needed for correct operation.
5461 It might give better code, or might not; worth an experiment? */
5462 /* If this is an optional reload, we can't inherit from earlier insns
5463 until we are sure that any non-optional reloads have been allocated.
5464 The following code takes advantage of the fact that optional reloads
5465 are at the end of reload_order. */
5466 if (reload_optional[r] != 0)
5467 for (i = 0; i < j; i++)
5468 if ((reload_out[reload_order[i]] != 0
5469 || reload_in[reload_order[i]] != 0
5470 || reload_secondary_p[reload_order[i]])
5471 && ! reload_optional[reload_order[i]]
5472 && reload_reg_rtx[reload_order[i]] == 0)
5473 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5474 #endif
5475
5476 /* First see if this pseudo is already available as reloaded
5477 for a previous insn. We cannot try to inherit for reloads
5478 that are smaller than the maximum number of registers needed
5479 for groups unless the register we would allocate cannot be used
5480 for the groups.
5481
5482 We could check here to see if this is a secondary reload for
5483 an object that is already in a register of the desired class.
5484 This would avoid the need for the secondary reload register.
5485 But this is complex because we can't easily determine what
5486 objects might want to be loaded via this reload. So let a
5487 register be allocated here. In `emit_reload_insns' we suppress
5488 one of the loads in the case described above. */
5489
5490 if (inheritance)
5491 {
5492 register int regno = -1;
5493 enum machine_mode mode;
5494
5495 if (reload_in[r] == 0)
5496 ;
5497 else if (GET_CODE (reload_in[r]) == REG)
5498 {
5499 regno = REGNO (reload_in[r]);
5500 mode = GET_MODE (reload_in[r]);
5501 }
5502 else if (GET_CODE (reload_in_reg[r]) == REG)
5503 {
5504 regno = REGNO (reload_in_reg[r]);
5505 mode = GET_MODE (reload_in_reg[r]);
5506 }
5507 #if 0
5508 /* This won't work, since REGNO can be a pseudo reg number.
5509 Also, it takes much more hair to keep track of all the things
5510 that can invalidate an inherited reload of part of a pseudoreg. */
5511 else if (GET_CODE (reload_in[r]) == SUBREG
5512 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5513 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5514 #endif
5515
5516 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5517 {
5518 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5519
5520 if (reg_reloaded_contents[i] == regno
5521 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5522 >= GET_MODE_SIZE (mode))
5523 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5524 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5525 spill_regs[i])
5526 && (reload_nregs[r] == max_group_size
5527 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5528 spill_regs[i]))
5529 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5530 reload_when_needed[r])
5531 && reload_reg_free_before_p (spill_regs[i],
5532 reload_opnum[r],
5533 reload_when_needed[r]))
5534 {
5535 /* If a group is needed, verify that all the subsequent
5536 registers still have their values intact. */
5537 int nr
5538 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5539 int k;
5540
5541 for (k = 1; k < nr; k++)
5542 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5543 != regno)
5544 break;
5545
5546 if (k == nr)
5547 {
5548 int i1;
5549
5550 /* We found a register that contains the
5551 value we need. If this register is the
5552 same as an `earlyclobber' operand of the
5553 current insn, just mark it as a place to
5554 reload from since we can't use it as the
5555 reload register itself. */
5556
5557 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5558 if (reg_overlap_mentioned_for_reload_p
5559 (reg_last_reload_reg[regno],
5560 reload_earlyclobbers[i1]))
5561 break;
5562
5563 if (i1 != n_earlyclobbers
5564 /* Don't really use the inherited spill reg
5565 if we need it wider than we've got it. */
5566 || (GET_MODE_SIZE (reload_mode[r])
5567 > GET_MODE_SIZE (mode)))
5568 reload_override_in[r] = reg_last_reload_reg[regno];
5569 else
5570 {
5571 int k;
5572 /* We can use this as a reload reg. */
5573 /* Mark the register as in use for this part of
5574 the insn. */
5575 mark_reload_reg_in_use (spill_regs[i],
5576 reload_opnum[r],
5577 reload_when_needed[r],
5578 reload_mode[r]);
5579 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5580 reload_inherited[r] = 1;
5581 reload_inheritance_insn[r]
5582 = reg_reloaded_insn[i];
5583 reload_spill_index[r] = i;
5584 for (k = 0; k < nr; k++)
5585 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5586 spill_regs[i + k]);
5587 }
5588 }
5589 }
5590 }
5591 }
5592
5593 /* Here's another way to see if the value is already lying around. */
5594 if (inheritance
5595 && reload_in[r] != 0
5596 && ! reload_inherited[r]
5597 && reload_out[r] == 0
5598 && (CONSTANT_P (reload_in[r])
5599 || GET_CODE (reload_in[r]) == PLUS
5600 || GET_CODE (reload_in[r]) == REG
5601 || GET_CODE (reload_in[r]) == MEM)
5602 && (reload_nregs[r] == max_group_size
5603 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5604 {
5605 register rtx equiv
5606 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5607 -1, NULL_PTR, 0, reload_mode[r]);
5608 int regno;
5609
5610 if (equiv != 0)
5611 {
5612 if (GET_CODE (equiv) == REG)
5613 regno = REGNO (equiv);
5614 else if (GET_CODE (equiv) == SUBREG)
5615 {
5616 /* This must be a SUBREG of a hard register.
5617 Make a new REG since this might be used in an
5618 address and not all machines support SUBREGs
5619 there. */
5620 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5621 equiv = gen_rtx (REG, reload_mode[r], regno);
5622 }
5623 else
5624 abort ();
5625 }
5626
5627 /* If we found a spill reg, reject it unless it is free
5628 and of the desired class. */
5629 if (equiv != 0
5630 && ((spill_reg_order[regno] >= 0
5631 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5632 reload_when_needed[r]))
5633 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5634 regno)))
5635 equiv = 0;
5636
5637 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5638 equiv = 0;
5639
5640 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5641 equiv = 0;
5642
5643 /* We found a register that contains the value we need.
5644 If this register is the same as an `earlyclobber' operand
5645 of the current insn, just mark it as a place to reload from
5646 since we can't use it as the reload register itself. */
5647
5648 if (equiv != 0)
5649 for (i = 0; i < n_earlyclobbers; i++)
5650 if (reg_overlap_mentioned_for_reload_p (equiv,
5651 reload_earlyclobbers[i]))
5652 {
5653 reload_override_in[r] = equiv;
5654 equiv = 0;
5655 break;
5656 }
5657
5658 /* JRV: If the equiv register we have found is
5659 explicitly clobbered in the current insn, mark but
5660 don't use, as above. */
5661
5662 if (equiv != 0 && regno_clobbered_p (regno, insn))
5663 {
5664 reload_override_in[r] = equiv;
5665 equiv = 0;
5666 }
5667
5668 /* If we found an equivalent reg, say no code need be generated
5669 to load it, and use it as our reload reg. */
5670 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5671 {
5672 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5673 int k;
5674 reload_reg_rtx[r] = equiv;
5675 reload_inherited[r] = 1;
5676
5677 /* If any of the hard registers in EQUIV are spill
5678 registers, mark them as in use for this insn. */
5679 for (k = 0; k < nr; k++)
5680 {
5681 i = spill_reg_order[regno + k];
5682 if (i >= 0)
5683 {
5684 mark_reload_reg_in_use (regno, reload_opnum[r],
5685 reload_when_needed[r],
5686 reload_mode[r]);
5687 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5688 regno + k);
5689 }
5690 }
5691 }
5692 }
5693
5694 /* If we found a register to use already, or if this is an optional
5695 reload, we are done. */
5696 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5697 continue;
5698
5699 #if 0 /* No longer needed for correct operation. Might or might not
5700 give better code on the average. Want to experiment? */
5701
5702 /* See if there is a later reload that has a class different from our
5703 class that intersects our class or that requires less register
5704 than our reload. If so, we must allocate a register to this
5705 reload now, since that reload might inherit a previous reload
5706 and take the only available register in our class. Don't do this
5707 for optional reloads since they will force all previous reloads
5708 to be allocated. Also don't do this for reloads that have been
5709 turned off. */
5710
5711 for (i = j + 1; i < n_reloads; i++)
5712 {
5713 int s = reload_order[i];
5714
5715 if ((reload_in[s] == 0 && reload_out[s] == 0
5716 && ! reload_secondary_p[s])
5717 || reload_optional[s])
5718 continue;
5719
5720 if ((reload_reg_class[s] != reload_reg_class[r]
5721 && reg_classes_intersect_p (reload_reg_class[r],
5722 reload_reg_class[s]))
5723 || reload_nregs[s] < reload_nregs[r])
5724 break;
5725 }
5726
5727 if (i == n_reloads)
5728 continue;
5729
5730 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5731 #endif
5732 }
5733
5734 /* Now allocate reload registers for anything non-optional that
5735 didn't get one yet. */
5736 for (j = 0; j < n_reloads; j++)
5737 {
5738 register int r = reload_order[j];
5739
5740 /* Ignore reloads that got marked inoperative. */
5741 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5742 continue;
5743
5744 /* Skip reloads that already have a register allocated or are
5745 optional. */
5746 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5747 continue;
5748
5749 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5750 break;
5751 }
5752
5753 /* If that loop got all the way, we have won. */
5754 if (j == n_reloads)
5755 break;
5756
5757 fail:
5758 /* Loop around and try without any inheritance. */
5759 /* First undo everything done by the failed attempt
5760 to allocate with inheritance. */
5761 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5762 sizeof reload_reg_rtx);
5763 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5764 sizeof reload_inherited);
5765 bcopy ((char *) save_reload_inheritance_insn,
5766 (char *) reload_inheritance_insn,
5767 sizeof reload_inheritance_insn);
5768 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5769 sizeof reload_override_in);
5770 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5771 sizeof reload_spill_index);
5772 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5773 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5774 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5775 save_reload_reg_used_in_op_addr);
5776 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5777 save_reload_reg_used_in_op_addr_reload);
5778 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5779 save_reload_reg_used_in_insn);
5780 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5781 save_reload_reg_used_in_other_addr);
5782
5783 for (i = 0; i < reload_n_operands; i++)
5784 {
5785 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5786 save_reload_reg_used_in_input[i]);
5787 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5788 save_reload_reg_used_in_output[i]);
5789 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5790 save_reload_reg_used_in_input_addr[i]);
5791 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
5792 save_reload_reg_used_in_inpaddr_addr[i]);
5793 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5794 save_reload_reg_used_in_output_addr[i]);
5795 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
5796 save_reload_reg_used_in_outaddr_addr[i]);
5797 }
5798 }
5799
5800 /* If we thought we could inherit a reload, because it seemed that
5801 nothing else wanted the same reload register earlier in the insn,
5802 verify that assumption, now that all reloads have been assigned. */
5803
5804 for (j = 0; j < n_reloads; j++)
5805 {
5806 register int r = reload_order[j];
5807
5808 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5809 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5810 reload_opnum[r],
5811 reload_when_needed[r]))
5812 reload_inherited[r] = 0;
5813
5814 /* If we found a better place to reload from,
5815 validate it in the same fashion, if it is a reload reg. */
5816 if (reload_override_in[r]
5817 && (GET_CODE (reload_override_in[r]) == REG
5818 || GET_CODE (reload_override_in[r]) == SUBREG))
5819 {
5820 int regno = true_regnum (reload_override_in[r]);
5821 if (spill_reg_order[regno] >= 0
5822 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5823 reload_when_needed[r]))
5824 reload_override_in[r] = 0;
5825 }
5826 }
5827
5828 /* Now that reload_override_in is known valid,
5829 actually override reload_in. */
5830 for (j = 0; j < n_reloads; j++)
5831 if (reload_override_in[j])
5832 reload_in[j] = reload_override_in[j];
5833
5834 /* If this reload won't be done because it has been cancelled or is
5835 optional and not inherited, clear reload_reg_rtx so other
5836 routines (such as subst_reloads) don't get confused. */
5837 for (j = 0; j < n_reloads; j++)
5838 if (reload_reg_rtx[j] != 0
5839 && ((reload_optional[j] && ! reload_inherited[j])
5840 || (reload_in[j] == 0 && reload_out[j] == 0
5841 && ! reload_secondary_p[j])))
5842 {
5843 int regno = true_regnum (reload_reg_rtx[j]);
5844
5845 if (spill_reg_order[regno] >= 0)
5846 clear_reload_reg_in_use (regno, reload_opnum[j],
5847 reload_when_needed[j], reload_mode[j]);
5848 reload_reg_rtx[j] = 0;
5849 }
5850
5851 /* Record which pseudos and which spill regs have output reloads. */
5852 for (j = 0; j < n_reloads; j++)
5853 {
5854 register int r = reload_order[j];
5855
5856 i = reload_spill_index[r];
5857
5858 /* I is nonneg if this reload used one of the spill regs.
5859 If reload_reg_rtx[r] is 0, this is an optional reload
5860 that we opted to ignore. */
5861 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5862 && reload_reg_rtx[r] != 0)
5863 {
5864 register int nregno = REGNO (reload_out[r]);
5865 int nr = 1;
5866
5867 if (nregno < FIRST_PSEUDO_REGISTER)
5868 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5869
5870 while (--nr >= 0)
5871 reg_has_output_reload[nregno + nr] = 1;
5872
5873 if (i >= 0)
5874 {
5875 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5876 while (--nr >= 0)
5877 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5878 }
5879
5880 if (reload_when_needed[r] != RELOAD_OTHER
5881 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5882 && reload_when_needed[r] != RELOAD_FOR_INSN)
5883 abort ();
5884 }
5885 }
5886 }
5887 \f
5888 /* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
5889 reloads of the same item for fear that we might not have enough reload
5890 registers. However, normally they will get the same reload register
5891 and hence actually need not be loaded twice.
5892
5893 Here we check for the most common case of this phenomenon: when we have
5894 a number of reloads for the same object, each of which were allocated
5895 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5896 reload, and is not modified in the insn itself. If we find such,
5897 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5898 This will not increase the number of spill registers needed and will
5899 prevent redundant code. */
5900
5901 static void
5902 merge_assigned_reloads (insn)
5903 rtx insn;
5904 {
5905 int i, j;
5906
5907 /* Scan all the reloads looking for ones that only load values and
5908 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5909 assigned and not modified by INSN. */
5910
5911 for (i = 0; i < n_reloads; i++)
5912 {
5913 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5914 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5915 || reg_set_p (reload_reg_rtx[i], insn))
5916 continue;
5917
5918 /* Look at all other reloads. Ensure that the only use of this
5919 reload_reg_rtx is in a reload that just loads the same value
5920 as we do. Note that any secondary reloads must be of the identical
5921 class since the values, modes, and result registers are the
5922 same, so we need not do anything with any secondary reloads. */
5923
5924 for (j = 0; j < n_reloads; j++)
5925 {
5926 if (i == j || reload_reg_rtx[j] == 0
5927 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5928 reload_reg_rtx[i]))
5929 continue;
5930
5931 /* If the reload regs aren't exactly the same (e.g, different modes)
5932 or if the values are different, we can't merge anything with this
5933 reload register. */
5934
5935 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5936 || reload_out[j] != 0 || reload_in[j] == 0
5937 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5938 break;
5939 }
5940
5941 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5942 we, in fact, found any matching reloads. */
5943
5944 if (j == n_reloads)
5945 {
5946 for (j = 0; j < n_reloads; j++)
5947 if (i != j && reload_reg_rtx[j] != 0
5948 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5949 {
5950 reload_when_needed[i] = RELOAD_OTHER;
5951 reload_in[j] = 0;
5952 transfer_replacements (i, j);
5953 }
5954
5955 /* If this is now RELOAD_OTHER, look for any reloads that load
5956 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5957 if they were for inputs, RELOAD_OTHER for outputs. Note that
5958 this test is equivalent to looking for reloads for this operand
5959 number. */
5960
5961 if (reload_when_needed[i] == RELOAD_OTHER)
5962 for (j = 0; j < n_reloads; j++)
5963 if (reload_in[j] != 0
5964 && reload_when_needed[i] != RELOAD_OTHER
5965 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5966 reload_in[i]))
5967 reload_when_needed[j]
5968 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5969 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
5970 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
5971 }
5972 }
5973 }
5974
5975 \f
5976 /* Output insns to reload values in and out of the chosen reload regs. */
5977
5978 static void
5979 emit_reload_insns (insn)
5980 rtx insn;
5981 {
5982 register int j;
5983 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5984 rtx other_input_address_reload_insns = 0;
5985 rtx other_input_reload_insns = 0;
5986 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5987 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5988 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5989 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5990 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5991 rtx operand_reload_insns = 0;
5992 rtx other_operand_reload_insns = 0;
5993 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
5994 rtx following_insn = NEXT_INSN (insn);
5995 rtx before_insn = insn;
5996 int special;
5997 /* Values to be put in spill_reg_store are put here first. */
5998 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5999
6000 for (j = 0; j < reload_n_operands; j++)
6001 input_reload_insns[j] = input_address_reload_insns[j]
6002 = inpaddr_address_reload_insns[j]
6003 = output_reload_insns[j] = output_address_reload_insns[j]
6004 = outaddr_address_reload_insns[j]
6005 = other_output_reload_insns[j] = 0;
6006
6007 /* Now output the instructions to copy the data into and out of the
6008 reload registers. Do these in the order that the reloads were reported,
6009 since reloads of base and index registers precede reloads of operands
6010 and the operands may need the base and index registers reloaded. */
6011
6012 for (j = 0; j < n_reloads; j++)
6013 {
6014 register rtx old;
6015 rtx oldequiv_reg = 0;
6016 rtx this_reload_insn = 0;
6017
6018 if (reload_spill_index[j] >= 0)
6019 new_spill_reg_store[reload_spill_index[j]] = 0;
6020
6021 old = reload_in[j];
6022 if (old != 0 && ! reload_inherited[j]
6023 && ! rtx_equal_p (reload_reg_rtx[j], old)
6024 && reload_reg_rtx[j] != 0)
6025 {
6026 register rtx reloadreg = reload_reg_rtx[j];
6027 rtx oldequiv = 0;
6028 enum machine_mode mode;
6029 rtx *where;
6030
6031 /* Determine the mode to reload in.
6032 This is very tricky because we have three to choose from.
6033 There is the mode the insn operand wants (reload_inmode[J]).
6034 There is the mode of the reload register RELOADREG.
6035 There is the intrinsic mode of the operand, which we could find
6036 by stripping some SUBREGs.
6037 It turns out that RELOADREG's mode is irrelevant:
6038 we can change that arbitrarily.
6039
6040 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6041 then the reload reg may not support QImode moves, so use SImode.
6042 If foo is in memory due to spilling a pseudo reg, this is safe,
6043 because the QImode value is in the least significant part of a
6044 slot big enough for a SImode. If foo is some other sort of
6045 memory reference, then it is impossible to reload this case,
6046 so previous passes had better make sure this never happens.
6047
6048 Then consider a one-word union which has SImode and one of its
6049 members is a float, being fetched as (SUBREG:SF union:SI).
6050 We must fetch that as SFmode because we could be loading into
6051 a float-only register. In this case OLD's mode is correct.
6052
6053 Consider an immediate integer: it has VOIDmode. Here we need
6054 to get a mode from something else.
6055
6056 In some cases, there is a fourth mode, the operand's
6057 containing mode. If the insn specifies a containing mode for
6058 this operand, it overrides all others.
6059
6060 I am not sure whether the algorithm here is always right,
6061 but it does the right things in those cases. */
6062
6063 mode = GET_MODE (old);
6064 if (mode == VOIDmode)
6065 mode = reload_inmode[j];
6066
6067 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6068 /* If we need a secondary register for this operation, see if
6069 the value is already in a register in that class. Don't
6070 do this if the secondary register will be used as a scratch
6071 register. */
6072
6073 if (reload_secondary_in_reload[j] >= 0
6074 && reload_secondary_in_icode[j] == CODE_FOR_nothing
6075 && optimize)
6076 oldequiv
6077 = find_equiv_reg (old, insn,
6078 reload_reg_class[reload_secondary_in_reload[j]],
6079 -1, NULL_PTR, 0, mode);
6080 #endif
6081
6082 /* If reloading from memory, see if there is a register
6083 that already holds the same value. If so, reload from there.
6084 We can pass 0 as the reload_reg_p argument because
6085 any other reload has either already been emitted,
6086 in which case find_equiv_reg will see the reload-insn,
6087 or has yet to be emitted, in which case it doesn't matter
6088 because we will use this equiv reg right away. */
6089
6090 if (oldequiv == 0 && optimize
6091 && (GET_CODE (old) == MEM
6092 || (GET_CODE (old) == REG
6093 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6094 && reg_renumber[REGNO (old)] < 0)))
6095 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
6096 -1, NULL_PTR, 0, mode);
6097
6098 if (oldequiv)
6099 {
6100 int regno = true_regnum (oldequiv);
6101
6102 /* If OLDEQUIV is a spill register, don't use it for this
6103 if any other reload needs it at an earlier stage of this insn
6104 or at this stage. */
6105 if (spill_reg_order[regno] >= 0
6106 && (! reload_reg_free_p (regno, reload_opnum[j],
6107 reload_when_needed[j])
6108 || ! reload_reg_free_before_p (regno, reload_opnum[j],
6109 reload_when_needed[j])))
6110 oldequiv = 0;
6111
6112 /* If OLDEQUIV is not a spill register,
6113 don't use it if any other reload wants it. */
6114 if (spill_reg_order[regno] < 0)
6115 {
6116 int k;
6117 for (k = 0; k < n_reloads; k++)
6118 if (reload_reg_rtx[k] != 0 && k != j
6119 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
6120 oldequiv))
6121 {
6122 oldequiv = 0;
6123 break;
6124 }
6125 }
6126
6127 /* If it is no cheaper to copy from OLDEQUIV into the
6128 reload register than it would be to move from memory,
6129 don't use it. Likewise, if we need a secondary register
6130 or memory. */
6131
6132 if (oldequiv != 0
6133 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6134 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6135 reload_reg_class[j])
6136 >= MEMORY_MOVE_COST (mode)))
6137 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6138 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6139 mode, oldequiv)
6140 != NO_REGS)
6141 #endif
6142 #ifdef SECONDARY_MEMORY_NEEDED
6143 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
6144 REGNO_REG_CLASS (regno),
6145 mode)
6146 #endif
6147 ))
6148 oldequiv = 0;
6149 }
6150
6151 if (oldequiv == 0)
6152 oldequiv = old;
6153 else if (GET_CODE (oldequiv) == REG)
6154 oldequiv_reg = oldequiv;
6155 else if (GET_CODE (oldequiv) == SUBREG)
6156 oldequiv_reg = SUBREG_REG (oldequiv);
6157
6158 /* If we are reloading from a register that was recently stored in
6159 with an output-reload, see if we can prove there was
6160 actually no need to store the old value in it. */
6161
6162 if (optimize && GET_CODE (oldequiv) == REG
6163 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6164 && spill_reg_order[REGNO (oldequiv)] >= 0
6165 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
6166 && find_reg_note (insn, REG_DEAD, reload_in[j])
6167 /* This is unsafe if operand occurs more than once in current
6168 insn. Perhaps some occurrences weren't reloaded. */
6169 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6170 delete_output_reload
6171 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
6172
6173 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6174 then load RELOADREG from OLDEQUIV. Note that we cannot use
6175 gen_lowpart_common since it can do the wrong thing when
6176 RELOADREG has a multi-word mode. Note that RELOADREG
6177 must always be a REG here. */
6178
6179 if (GET_MODE (reloadreg) != mode)
6180 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6181 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6182 oldequiv = SUBREG_REG (oldequiv);
6183 if (GET_MODE (oldequiv) != VOIDmode
6184 && mode != GET_MODE (oldequiv))
6185 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
6186
6187 /* Switch to the right place to emit the reload insns. */
6188 switch (reload_when_needed[j])
6189 {
6190 case RELOAD_OTHER:
6191 where = &other_input_reload_insns;
6192 break;
6193 case RELOAD_FOR_INPUT:
6194 where = &input_reload_insns[reload_opnum[j]];
6195 break;
6196 case RELOAD_FOR_INPUT_ADDRESS:
6197 where = &input_address_reload_insns[reload_opnum[j]];
6198 break;
6199 case RELOAD_FOR_INPADDR_ADDRESS:
6200 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6201 break;
6202 case RELOAD_FOR_OUTPUT_ADDRESS:
6203 where = &output_address_reload_insns[reload_opnum[j]];
6204 break;
6205 case RELOAD_FOR_OUTADDR_ADDRESS:
6206 where = &outaddr_address_reload_insns[reload_opnum[j]];
6207 break;
6208 case RELOAD_FOR_OPERAND_ADDRESS:
6209 where = &operand_reload_insns;
6210 break;
6211 case RELOAD_FOR_OPADDR_ADDR:
6212 where = &other_operand_reload_insns;
6213 break;
6214 case RELOAD_FOR_OTHER_ADDRESS:
6215 where = &other_input_address_reload_insns;
6216 break;
6217 default:
6218 abort ();
6219 }
6220
6221 push_to_sequence (*where);
6222 special = 0;
6223
6224 /* Auto-increment addresses must be reloaded in a special way. */
6225 if (GET_CODE (oldequiv) == POST_INC
6226 || GET_CODE (oldequiv) == POST_DEC
6227 || GET_CODE (oldequiv) == PRE_INC
6228 || GET_CODE (oldequiv) == PRE_DEC)
6229 {
6230 /* We are not going to bother supporting the case where a
6231 incremented register can't be copied directly from
6232 OLDEQUIV since this seems highly unlikely. */
6233 if (reload_secondary_in_reload[j] >= 0)
6234 abort ();
6235 /* Prevent normal processing of this reload. */
6236 special = 1;
6237 /* Output a special code sequence for this case. */
6238 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
6239 }
6240
6241 /* If we are reloading a pseudo-register that was set by the previous
6242 insn, see if we can get rid of that pseudo-register entirely
6243 by redirecting the previous insn into our reload register. */
6244
6245 else if (optimize && GET_CODE (old) == REG
6246 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6247 && dead_or_set_p (insn, old)
6248 /* This is unsafe if some other reload
6249 uses the same reg first. */
6250 && reload_reg_free_before_p (REGNO (reloadreg),
6251 reload_opnum[j],
6252 reload_when_needed[j]))
6253 {
6254 rtx temp = PREV_INSN (insn);
6255 while (temp && GET_CODE (temp) == NOTE)
6256 temp = PREV_INSN (temp);
6257 if (temp
6258 && GET_CODE (temp) == INSN
6259 && GET_CODE (PATTERN (temp)) == SET
6260 && SET_DEST (PATTERN (temp)) == old
6261 /* Make sure we can access insn_operand_constraint. */
6262 && asm_noperands (PATTERN (temp)) < 0
6263 /* This is unsafe if prev insn rejects our reload reg. */
6264 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6265 reloadreg)
6266 /* This is unsafe if operand occurs more than once in current
6267 insn. Perhaps some occurrences aren't reloaded. */
6268 && count_occurrences (PATTERN (insn), old) == 1
6269 /* Don't risk splitting a matching pair of operands. */
6270 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6271 {
6272 /* Store into the reload register instead of the pseudo. */
6273 SET_DEST (PATTERN (temp)) = reloadreg;
6274 /* If these are the only uses of the pseudo reg,
6275 pretend for GDB it lives in the reload reg we used. */
6276 if (REG_N_DEATHS (REGNO (old)) == 1
6277 && REG_N_SETS (REGNO (old)) == 1)
6278 {
6279 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6280 alter_reg (REGNO (old), -1);
6281 }
6282 special = 1;
6283 }
6284 }
6285
6286 /* We can't do that, so output an insn to load RELOADREG. */
6287
6288 if (! special)
6289 {
6290 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6291 rtx second_reload_reg = 0;
6292 enum insn_code icode;
6293
6294 /* If we have a secondary reload, pick up the secondary register
6295 and icode, if any. If OLDEQUIV and OLD are different or
6296 if this is an in-out reload, recompute whether or not we
6297 still need a secondary register and what the icode should
6298 be. If we still need a secondary register and the class or
6299 icode is different, go back to reloading from OLD if using
6300 OLDEQUIV means that we got the wrong type of register. We
6301 cannot have different class or icode due to an in-out reload
6302 because we don't make such reloads when both the input and
6303 output need secondary reload registers. */
6304
6305 if (reload_secondary_in_reload[j] >= 0)
6306 {
6307 int secondary_reload = reload_secondary_in_reload[j];
6308 rtx real_oldequiv = oldequiv;
6309 rtx real_old = old;
6310
6311 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6312 and similarly for OLD.
6313 See comments in get_secondary_reload in reload.c. */
6314 if (GET_CODE (oldequiv) == REG
6315 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6316 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6317 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6318
6319 if (GET_CODE (old) == REG
6320 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6321 && reg_equiv_mem[REGNO (old)] != 0)
6322 real_old = reg_equiv_mem[REGNO (old)];
6323
6324 second_reload_reg = reload_reg_rtx[secondary_reload];
6325 icode = reload_secondary_in_icode[j];
6326
6327 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6328 || (reload_in[j] != 0 && reload_out[j] != 0))
6329 {
6330 enum reg_class new_class
6331 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6332 mode, real_oldequiv);
6333
6334 if (new_class == NO_REGS)
6335 second_reload_reg = 0;
6336 else
6337 {
6338 enum insn_code new_icode;
6339 enum machine_mode new_mode;
6340
6341 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6342 REGNO (second_reload_reg)))
6343 oldequiv = old, real_oldequiv = real_old;
6344 else
6345 {
6346 new_icode = reload_in_optab[(int) mode];
6347 if (new_icode != CODE_FOR_nothing
6348 && ((insn_operand_predicate[(int) new_icode][0]
6349 && ! ((*insn_operand_predicate[(int) new_icode][0])
6350 (reloadreg, mode)))
6351 || (insn_operand_predicate[(int) new_icode][1]
6352 && ! ((*insn_operand_predicate[(int) new_icode][1])
6353 (real_oldequiv, mode)))))
6354 new_icode = CODE_FOR_nothing;
6355
6356 if (new_icode == CODE_FOR_nothing)
6357 new_mode = mode;
6358 else
6359 new_mode = insn_operand_mode[(int) new_icode][2];
6360
6361 if (GET_MODE (second_reload_reg) != new_mode)
6362 {
6363 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6364 new_mode))
6365 oldequiv = old, real_oldequiv = real_old;
6366 else
6367 second_reload_reg
6368 = gen_rtx (REG, new_mode,
6369 REGNO (second_reload_reg));
6370 }
6371 }
6372 }
6373 }
6374
6375 /* If we still need a secondary reload register, check
6376 to see if it is being used as a scratch or intermediate
6377 register and generate code appropriately. If we need
6378 a scratch register, use REAL_OLDEQUIV since the form of
6379 the insn may depend on the actual address if it is
6380 a MEM. */
6381
6382 if (second_reload_reg)
6383 {
6384 if (icode != CODE_FOR_nothing)
6385 {
6386 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6387 second_reload_reg));
6388 special = 1;
6389 }
6390 else
6391 {
6392 /* See if we need a scratch register to load the
6393 intermediate register (a tertiary reload). */
6394 enum insn_code tertiary_icode
6395 = reload_secondary_in_icode[secondary_reload];
6396
6397 if (tertiary_icode != CODE_FOR_nothing)
6398 {
6399 rtx third_reload_reg
6400 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6401
6402 emit_insn ((GEN_FCN (tertiary_icode)
6403 (second_reload_reg, real_oldequiv,
6404 third_reload_reg)));
6405 }
6406 else
6407 gen_reload (second_reload_reg, oldequiv,
6408 reload_opnum[j],
6409 reload_when_needed[j]);
6410
6411 oldequiv = second_reload_reg;
6412 }
6413 }
6414 }
6415 #endif
6416
6417 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6418 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6419 reload_when_needed[j]);
6420
6421 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6422 /* We may have to make a REG_DEAD note for the secondary reload
6423 register in the insns we just made. Find the last insn that
6424 mentioned the register. */
6425 if (! special && second_reload_reg
6426 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6427 {
6428 rtx prev;
6429
6430 for (prev = get_last_insn (); prev;
6431 prev = PREV_INSN (prev))
6432 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6433 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6434 PATTERN (prev)))
6435 {
6436 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6437 second_reload_reg,
6438 REG_NOTES (prev));
6439 break;
6440 }
6441 }
6442 #endif
6443 }
6444
6445 this_reload_insn = get_last_insn ();
6446 /* End this sequence. */
6447 *where = get_insns ();
6448 end_sequence ();
6449 }
6450
6451 /* Add a note saying the input reload reg
6452 dies in this insn, if anyone cares. */
6453 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6454 if (old != 0
6455 && reload_reg_rtx[j] != old
6456 && reload_reg_rtx[j] != 0
6457 && reload_out[j] == 0
6458 && ! reload_inherited[j]
6459 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6460 {
6461 register rtx reloadreg = reload_reg_rtx[j];
6462
6463 #if 0
6464 /* We can't abort here because we need to support this for sched.c.
6465 It's not terrible to miss a REG_DEAD note, but we should try
6466 to figure out how to do this correctly. */
6467 /* The code below is incorrect for address-only reloads. */
6468 if (reload_when_needed[j] != RELOAD_OTHER
6469 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6470 abort ();
6471 #endif
6472
6473 /* Add a death note to this insn, for an input reload. */
6474
6475 if ((reload_when_needed[j] == RELOAD_OTHER
6476 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6477 && ! dead_or_set_p (insn, reloadreg))
6478 REG_NOTES (insn)
6479 = gen_rtx (EXPR_LIST, REG_DEAD,
6480 reloadreg, REG_NOTES (insn));
6481 }
6482
6483 /* When we inherit a reload, the last marked death of the reload reg
6484 may no longer really be a death. */
6485 if (reload_reg_rtx[j] != 0
6486 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6487 && reload_inherited[j])
6488 {
6489 /* Handle inheriting an output reload.
6490 Remove the death note from the output reload insn. */
6491 if (reload_spill_index[j] >= 0
6492 && GET_CODE (reload_in[j]) == REG
6493 && spill_reg_store[reload_spill_index[j]] != 0
6494 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6495 REG_DEAD, REGNO (reload_reg_rtx[j])))
6496 remove_death (REGNO (reload_reg_rtx[j]),
6497 spill_reg_store[reload_spill_index[j]]);
6498 /* Likewise for input reloads that were inherited. */
6499 else if (reload_spill_index[j] >= 0
6500 && GET_CODE (reload_in[j]) == REG
6501 && spill_reg_store[reload_spill_index[j]] == 0
6502 && reload_inheritance_insn[j] != 0
6503 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6504 REGNO (reload_reg_rtx[j])))
6505 remove_death (REGNO (reload_reg_rtx[j]),
6506 reload_inheritance_insn[j]);
6507 else
6508 {
6509 rtx prev;
6510
6511 /* We got this register from find_equiv_reg.
6512 Search back for its last death note and get rid of it.
6513 But don't search back too far.
6514 Don't go past a place where this reg is set,
6515 since a death note before that remains valid. */
6516 for (prev = PREV_INSN (insn);
6517 prev && GET_CODE (prev) != CODE_LABEL;
6518 prev = PREV_INSN (prev))
6519 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6520 && dead_or_set_p (prev, reload_reg_rtx[j]))
6521 {
6522 if (find_regno_note (prev, REG_DEAD,
6523 REGNO (reload_reg_rtx[j])))
6524 remove_death (REGNO (reload_reg_rtx[j]), prev);
6525 break;
6526 }
6527 }
6528 }
6529
6530 /* We might have used find_equiv_reg above to choose an alternate
6531 place from which to reload. If so, and it died, we need to remove
6532 that death and move it to one of the insns we just made. */
6533
6534 if (oldequiv_reg != 0
6535 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6536 {
6537 rtx prev, prev1;
6538
6539 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6540 prev = PREV_INSN (prev))
6541 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6542 && dead_or_set_p (prev, oldequiv_reg))
6543 {
6544 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6545 {
6546 for (prev1 = this_reload_insn;
6547 prev1; prev1 = PREV_INSN (prev1))
6548 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6549 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6550 PATTERN (prev1)))
6551 {
6552 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6553 oldequiv_reg,
6554 REG_NOTES (prev1));
6555 break;
6556 }
6557 remove_death (REGNO (oldequiv_reg), prev);
6558 }
6559 break;
6560 }
6561 }
6562 #endif
6563
6564 /* If we are reloading a register that was recently stored in with an
6565 output-reload, see if we can prove there was
6566 actually no need to store the old value in it. */
6567
6568 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6569 && reload_in[j] != 0
6570 && GET_CODE (reload_in[j]) == REG
6571 #if 0
6572 /* There doesn't seem to be any reason to restrict this to pseudos
6573 and doing so loses in the case where we are copying from a
6574 register of the wrong class. */
6575 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6576 #endif
6577 && spill_reg_store[reload_spill_index[j]] != 0
6578 /* This is unsafe if some other reload uses the same reg first. */
6579 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6580 reload_opnum[j], reload_when_needed[j])
6581 && dead_or_set_p (insn, reload_in[j])
6582 /* This is unsafe if operand occurs more than once in current
6583 insn. Perhaps some occurrences weren't reloaded. */
6584 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6585 delete_output_reload (insn, j,
6586 spill_reg_store[reload_spill_index[j]]);
6587
6588 /* Input-reloading is done. Now do output-reloading,
6589 storing the value from the reload-register after the main insn
6590 if reload_out[j] is nonzero.
6591
6592 ??? At some point we need to support handling output reloads of
6593 JUMP_INSNs or insns that set cc0. */
6594 old = reload_out[j];
6595 if (old != 0
6596 && reload_reg_rtx[j] != old
6597 && reload_reg_rtx[j] != 0)
6598 {
6599 register rtx reloadreg = reload_reg_rtx[j];
6600 register rtx second_reloadreg = 0;
6601 rtx note, p;
6602 enum machine_mode mode;
6603 int special = 0;
6604
6605 /* An output operand that dies right away does need a reload,
6606 but need not be copied from it. Show the new location in the
6607 REG_UNUSED note. */
6608 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6609 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6610 {
6611 XEXP (note, 0) = reload_reg_rtx[j];
6612 continue;
6613 }
6614 /* Likewise for a SUBREG of an operand that dies. */
6615 else if (GET_CODE (old) == SUBREG
6616 && GET_CODE (SUBREG_REG (old)) == REG
6617 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6618 SUBREG_REG (old))))
6619 {
6620 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6621 reload_reg_rtx[j]);
6622 continue;
6623 }
6624 else if (GET_CODE (old) == SCRATCH)
6625 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6626 but we don't want to make an output reload. */
6627 continue;
6628
6629 #if 0
6630 /* Strip off of OLD any size-increasing SUBREGs such as
6631 (SUBREG:SI foo:QI 0). */
6632
6633 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6634 && (GET_MODE_SIZE (GET_MODE (old))
6635 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6636 old = SUBREG_REG (old);
6637 #endif
6638
6639 /* If is a JUMP_INSN, we can't support output reloads yet. */
6640 if (GET_CODE (insn) == JUMP_INSN)
6641 abort ();
6642
6643 if (reload_when_needed[j] == RELOAD_OTHER)
6644 start_sequence ();
6645 else
6646 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6647
6648 /* Determine the mode to reload in.
6649 See comments above (for input reloading). */
6650
6651 mode = GET_MODE (old);
6652 if (mode == VOIDmode)
6653 {
6654 /* VOIDmode should never happen for an output. */
6655 if (asm_noperands (PATTERN (insn)) < 0)
6656 /* It's the compiler's fault. */
6657 fatal_insn ("VOIDmode on an output", insn);
6658 error_for_asm (insn, "output operand is constant in `asm'");
6659 /* Prevent crash--use something we know is valid. */
6660 mode = word_mode;
6661 old = gen_rtx (REG, mode, REGNO (reloadreg));
6662 }
6663
6664 if (GET_MODE (reloadreg) != mode)
6665 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6666
6667 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6668
6669 /* If we need two reload regs, set RELOADREG to the intermediate
6670 one, since it will be stored into OLD. We might need a secondary
6671 register only for an input reload, so check again here. */
6672
6673 if (reload_secondary_out_reload[j] >= 0)
6674 {
6675 rtx real_old = old;
6676
6677 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6678 && reg_equiv_mem[REGNO (old)] != 0)
6679 real_old = reg_equiv_mem[REGNO (old)];
6680
6681 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6682 mode, real_old)
6683 != NO_REGS))
6684 {
6685 second_reloadreg = reloadreg;
6686 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6687
6688 /* See if RELOADREG is to be used as a scratch register
6689 or as an intermediate register. */
6690 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6691 {
6692 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6693 (real_old, second_reloadreg, reloadreg)));
6694 special = 1;
6695 }
6696 else
6697 {
6698 /* See if we need both a scratch and intermediate reload
6699 register. */
6700
6701 int secondary_reload = reload_secondary_out_reload[j];
6702 enum insn_code tertiary_icode
6703 = reload_secondary_out_icode[secondary_reload];
6704
6705 if (GET_MODE (reloadreg) != mode)
6706 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6707
6708 if (tertiary_icode != CODE_FOR_nothing)
6709 {
6710 rtx third_reloadreg
6711 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6712 rtx tem;
6713
6714 /* Copy primary reload reg to secondary reload reg.
6715 (Note that these have been swapped above, then
6716 secondary reload reg to OLD using our insn. */
6717
6718 /* If REAL_OLD is a paradoxical SUBREG, remove it
6719 and try to put the opposite SUBREG on
6720 RELOADREG. */
6721 if (GET_CODE (real_old) == SUBREG
6722 && (GET_MODE_SIZE (GET_MODE (real_old))
6723 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6724 && 0 != (tem = gen_lowpart_common
6725 (GET_MODE (SUBREG_REG (real_old)),
6726 reloadreg)))
6727 real_old = SUBREG_REG (real_old), reloadreg = tem;
6728
6729 gen_reload (reloadreg, second_reloadreg,
6730 reload_opnum[j], reload_when_needed[j]);
6731 emit_insn ((GEN_FCN (tertiary_icode)
6732 (real_old, reloadreg, third_reloadreg)));
6733 special = 1;
6734 }
6735
6736 else
6737 /* Copy between the reload regs here and then to
6738 OUT later. */
6739
6740 gen_reload (reloadreg, second_reloadreg,
6741 reload_opnum[j], reload_when_needed[j]);
6742 }
6743 }
6744 }
6745 #endif
6746
6747 /* Output the last reload insn. */
6748 if (! special)
6749 gen_reload (old, reloadreg, reload_opnum[j],
6750 reload_when_needed[j]);
6751
6752 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6753 /* If final will look at death notes for this reg,
6754 put one on the last output-reload insn to use it. Similarly
6755 for any secondary register. */
6756 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6757 for (p = get_last_insn (); p; p = PREV_INSN (p))
6758 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6759 && reg_overlap_mentioned_for_reload_p (reloadreg,
6760 PATTERN (p)))
6761 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6762 reloadreg, REG_NOTES (p));
6763
6764 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6765 if (! special && second_reloadreg
6766 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6767 for (p = get_last_insn (); p; p = PREV_INSN (p))
6768 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6769 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6770 PATTERN (p)))
6771 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6772 second_reloadreg, REG_NOTES (p));
6773 #endif
6774 #endif
6775 /* Look at all insns we emitted, just to be safe. */
6776 for (p = get_insns (); p; p = NEXT_INSN (p))
6777 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6778 {
6779 /* If this output reload doesn't come from a spill reg,
6780 clear any memory of reloaded copies of the pseudo reg.
6781 If this output reload comes from a spill reg,
6782 reg_has_output_reload will make this do nothing. */
6783 note_stores (PATTERN (p), forget_old_reloads_1);
6784
6785 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6786 && reload_spill_index[j] >= 0)
6787 new_spill_reg_store[reload_spill_index[j]] = p;
6788 }
6789
6790 if (reload_when_needed[j] == RELOAD_OTHER)
6791 {
6792 emit_insns (other_output_reload_insns[reload_opnum[j]]);
6793 other_output_reload_insns[reload_opnum[j]] = get_insns ();
6794 }
6795 else
6796 output_reload_insns[reload_opnum[j]] = get_insns ();
6797
6798 end_sequence ();
6799 }
6800 }
6801
6802 /* Now write all the insns we made for reloads in the order expected by
6803 the allocation functions. Prior to the insn being reloaded, we write
6804 the following reloads:
6805
6806 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6807
6808 RELOAD_OTHER reloads.
6809
6810 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
6811 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
6812 RELOAD_FOR_INPUT reload for the operand.
6813
6814 RELOAD_FOR_OPADDR_ADDRS reloads.
6815
6816 RELOAD_FOR_OPERAND_ADDRESS reloads.
6817
6818 After the insn being reloaded, we write the following:
6819
6820 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
6821 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
6822 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
6823 reloads for the operand. The RELOAD_OTHER output reloads are
6824 output in descending order by reload number. */
6825
6826 emit_insns_before (other_input_address_reload_insns, before_insn);
6827 emit_insns_before (other_input_reload_insns, before_insn);
6828
6829 for (j = 0; j < reload_n_operands; j++)
6830 {
6831 emit_insns_before (inpaddr_address_reload_insns[j], before_insn);
6832 emit_insns_before (input_address_reload_insns[j], before_insn);
6833 emit_insns_before (input_reload_insns[j], before_insn);
6834 }
6835
6836 emit_insns_before (other_operand_reload_insns, before_insn);
6837 emit_insns_before (operand_reload_insns, before_insn);
6838
6839 for (j = 0; j < reload_n_operands; j++)
6840 {
6841 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
6842 emit_insns_before (output_address_reload_insns[j], following_insn);
6843 emit_insns_before (output_reload_insns[j], following_insn);
6844 emit_insns_before (other_output_reload_insns[j], following_insn);
6845 }
6846
6847 /* Move death notes from INSN
6848 to output-operand-address and output reload insns. */
6849 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6850 {
6851 rtx insn1;
6852 /* Loop over those insns, last ones first. */
6853 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6854 insn1 = PREV_INSN (insn1))
6855 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6856 {
6857 rtx source = SET_SRC (PATTERN (insn1));
6858 rtx dest = SET_DEST (PATTERN (insn1));
6859
6860 /* The note we will examine next. */
6861 rtx reg_notes = REG_NOTES (insn);
6862 /* The place that pointed to this note. */
6863 rtx *prev_reg_note = &REG_NOTES (insn);
6864
6865 /* If the note is for something used in the source of this
6866 reload insn, or in the output address, move the note. */
6867 while (reg_notes)
6868 {
6869 rtx next_reg_notes = XEXP (reg_notes, 1);
6870 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6871 && GET_CODE (XEXP (reg_notes, 0)) == REG
6872 && ((GET_CODE (dest) != REG
6873 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6874 dest))
6875 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6876 source)))
6877 {
6878 *prev_reg_note = next_reg_notes;
6879 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6880 REG_NOTES (insn1) = reg_notes;
6881 }
6882 else
6883 prev_reg_note = &XEXP (reg_notes, 1);
6884
6885 reg_notes = next_reg_notes;
6886 }
6887 }
6888 }
6889 #endif
6890
6891 /* For all the spill regs newly reloaded in this instruction,
6892 record what they were reloaded from, so subsequent instructions
6893 can inherit the reloads.
6894
6895 Update spill_reg_store for the reloads of this insn.
6896 Copy the elements that were updated in the loop above. */
6897
6898 for (j = 0; j < n_reloads; j++)
6899 {
6900 register int r = reload_order[j];
6901 register int i = reload_spill_index[r];
6902
6903 /* I is nonneg if this reload used one of the spill regs.
6904 If reload_reg_rtx[r] is 0, this is an optional reload
6905 that we opted to ignore. */
6906
6907 if (i >= 0 && reload_reg_rtx[r] != 0)
6908 {
6909 int nr
6910 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6911 int k;
6912 int part_reaches_end = 0;
6913 int all_reaches_end = 1;
6914
6915 /* For a multi register reload, we need to check if all or part
6916 of the value lives to the end. */
6917 for (k = 0; k < nr; k++)
6918 {
6919 if (reload_reg_reaches_end_p (spill_regs[i] + k, reload_opnum[r],
6920 reload_when_needed[r]))
6921 part_reaches_end = 1;
6922 else
6923 all_reaches_end = 0;
6924 }
6925
6926 /* Ignore reloads that don't reach the end of the insn in
6927 entirety. */
6928 if (all_reaches_end)
6929 {
6930 /* First, clear out memory of what used to be in this spill reg.
6931 If consecutive registers are used, clear them all. */
6932
6933 for (k = 0; k < nr; k++)
6934 {
6935 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6936 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6937 }
6938
6939 /* Maybe the spill reg contains a copy of reload_out. */
6940 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6941 {
6942 register int nregno = REGNO (reload_out[r]);
6943 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6944 : HARD_REGNO_NREGS (nregno,
6945 GET_MODE (reload_reg_rtx[r])));
6946
6947 spill_reg_store[i] = new_spill_reg_store[i];
6948 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6949
6950 /* If NREGNO is a hard register, it may occupy more than
6951 one register. If it does, say what is in the
6952 rest of the registers assuming that both registers
6953 agree on how many words the object takes. If not,
6954 invalidate the subsequent registers. */
6955
6956 if (nregno < FIRST_PSEUDO_REGISTER)
6957 for (k = 1; k < nnr; k++)
6958 reg_last_reload_reg[nregno + k]
6959 = (nr == nnr
6960 ? gen_rtx (REG,
6961 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6962 REGNO (reload_reg_rtx[r]) + k)
6963 : 0);
6964
6965 /* Now do the inverse operation. */
6966 for (k = 0; k < nr; k++)
6967 {
6968 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6969 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6970 ? nregno
6971 : nregno + k);
6972 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6973 }
6974 }
6975
6976 /* Maybe the spill reg contains a copy of reload_in. Only do
6977 something if there will not be an output reload for
6978 the register being reloaded. */
6979 else if (reload_out[r] == 0
6980 && reload_in[r] != 0
6981 && ((GET_CODE (reload_in[r]) == REG
6982 && ! reg_has_output_reload[REGNO (reload_in[r])])
6983 || (GET_CODE (reload_in_reg[r]) == REG
6984 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
6985 {
6986 register int nregno;
6987 int nnr;
6988
6989 if (GET_CODE (reload_in[r]) == REG)
6990 nregno = REGNO (reload_in[r]);
6991 else
6992 nregno = REGNO (reload_in_reg[r]);
6993
6994 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6995 : HARD_REGNO_NREGS (nregno,
6996 GET_MODE (reload_reg_rtx[r])));
6997
6998 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6999
7000 if (nregno < FIRST_PSEUDO_REGISTER)
7001 for (k = 1; k < nnr; k++)
7002 reg_last_reload_reg[nregno + k]
7003 = (nr == nnr
7004 ? gen_rtx (REG,
7005 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7006 REGNO (reload_reg_rtx[r]) + k)
7007 : 0);
7008
7009 /* Unless we inherited this reload, show we haven't
7010 recently done a store. */
7011 if (! reload_inherited[r])
7012 spill_reg_store[i] = 0;
7013
7014 for (k = 0; k < nr; k++)
7015 {
7016 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
7017 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7018 ? nregno
7019 : nregno + k);
7020 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
7021 = insn;
7022 }
7023 }
7024 }
7025
7026 /* However, if part of the reload reaches the end, then we must
7027 invalidate the old info for the part that survives to the end. */
7028 else if (part_reaches_end)
7029 {
7030 for (k = 0; k < nr; k++)
7031 if (reload_reg_reaches_end_p (spill_regs[i] + k,
7032 reload_opnum[r],
7033 reload_when_needed[r]))
7034 {
7035 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
7036 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
7037 }
7038 }
7039 }
7040
7041 /* The following if-statement was #if 0'd in 1.34 (or before...).
7042 It's reenabled in 1.35 because supposedly nothing else
7043 deals with this problem. */
7044
7045 /* If a register gets output-reloaded from a non-spill register,
7046 that invalidates any previous reloaded copy of it.
7047 But forget_old_reloads_1 won't get to see it, because
7048 it thinks only about the original insn. So invalidate it here. */
7049 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
7050 {
7051 register int nregno = REGNO (reload_out[r]);
7052 if (nregno >= FIRST_PSEUDO_REGISTER)
7053 reg_last_reload_reg[nregno] = 0;
7054 else
7055 {
7056 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
7057
7058 while (num_regs-- > 0)
7059 reg_last_reload_reg[nregno + num_regs] = 0;
7060 }
7061 }
7062 }
7063 }
7064 \f
7065 /* Emit code to perform a reload from IN (which may be a reload register) to
7066 OUT (which may also be a reload register). IN or OUT is from operand
7067 OPNUM with reload type TYPE.
7068
7069 Returns first insn emitted. */
7070
7071 rtx
7072 gen_reload (out, in, opnum, type)
7073 rtx out;
7074 rtx in;
7075 int opnum;
7076 enum reload_type type;
7077 {
7078 rtx last = get_last_insn ();
7079 rtx tem;
7080
7081 /* If IN is a paradoxical SUBREG, remove it and try to put the
7082 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7083 if (GET_CODE (in) == SUBREG
7084 && (GET_MODE_SIZE (GET_MODE (in))
7085 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7086 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7087 in = SUBREG_REG (in), out = tem;
7088 else if (GET_CODE (out) == SUBREG
7089 && (GET_MODE_SIZE (GET_MODE (out))
7090 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7091 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7092 out = SUBREG_REG (out), in = tem;
7093
7094 /* How to do this reload can get quite tricky. Normally, we are being
7095 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7096 register that didn't get a hard register. In that case we can just
7097 call emit_move_insn.
7098
7099 We can also be asked to reload a PLUS that adds a register or a MEM to
7100 another register, constant or MEM. This can occur during frame pointer
7101 elimination and while reloading addresses. This case is handled by
7102 trying to emit a single insn to perform the add. If it is not valid,
7103 we use a two insn sequence.
7104
7105 Finally, we could be called to handle an 'o' constraint by putting
7106 an address into a register. In that case, we first try to do this
7107 with a named pattern of "reload_load_address". If no such pattern
7108 exists, we just emit a SET insn and hope for the best (it will normally
7109 be valid on machines that use 'o').
7110
7111 This entire process is made complex because reload will never
7112 process the insns we generate here and so we must ensure that
7113 they will fit their constraints and also by the fact that parts of
7114 IN might be being reloaded separately and replaced with spill registers.
7115 Because of this, we are, in some sense, just guessing the right approach
7116 here. The one listed above seems to work.
7117
7118 ??? At some point, this whole thing needs to be rethought. */
7119
7120 if (GET_CODE (in) == PLUS
7121 && (GET_CODE (XEXP (in, 0)) == REG
7122 || GET_CODE (XEXP (in, 0)) == SUBREG
7123 || GET_CODE (XEXP (in, 0)) == MEM)
7124 && (GET_CODE (XEXP (in, 1)) == REG
7125 || GET_CODE (XEXP (in, 1)) == SUBREG
7126 || CONSTANT_P (XEXP (in, 1))
7127 || GET_CODE (XEXP (in, 1)) == MEM))
7128 {
7129 /* We need to compute the sum of a register or a MEM and another
7130 register, constant, or MEM, and put it into the reload
7131 register. The best possible way of doing this is if the machine
7132 has a three-operand ADD insn that accepts the required operands.
7133
7134 The simplest approach is to try to generate such an insn and see if it
7135 is recognized and matches its constraints. If so, it can be used.
7136
7137 It might be better not to actually emit the insn unless it is valid,
7138 but we need to pass the insn as an operand to `recog' and
7139 `insn_extract' and it is simpler to emit and then delete the insn if
7140 not valid than to dummy things up. */
7141
7142 rtx op0, op1, tem, insn;
7143 int code;
7144
7145 op0 = find_replacement (&XEXP (in, 0));
7146 op1 = find_replacement (&XEXP (in, 1));
7147
7148 /* Since constraint checking is strict, commutativity won't be
7149 checked, so we need to do that here to avoid spurious failure
7150 if the add instruction is two-address and the second operand
7151 of the add is the same as the reload reg, which is frequently
7152 the case. If the insn would be A = B + A, rearrange it so
7153 it will be A = A + B as constrain_operands expects. */
7154
7155 if (GET_CODE (XEXP (in, 1)) == REG
7156 && REGNO (out) == REGNO (XEXP (in, 1)))
7157 tem = op0, op0 = op1, op1 = tem;
7158
7159 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7160 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
7161
7162 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
7163 code = recog_memoized (insn);
7164
7165 if (code >= 0)
7166 {
7167 insn_extract (insn);
7168 /* We want constrain operands to treat this insn strictly in
7169 its validity determination, i.e., the way it would after reload
7170 has completed. */
7171 if (constrain_operands (code, 1))
7172 return insn;
7173 }
7174
7175 delete_insns_since (last);
7176
7177 /* If that failed, we must use a conservative two-insn sequence.
7178 use move to copy constant, MEM, or pseudo register to the reload
7179 register since "move" will be able to handle an arbitrary operand,
7180 unlike add which can't, in general. Then add the registers.
7181
7182 If there is another way to do this for a specific machine, a
7183 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7184 we emit below. */
7185
7186 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7187 || (GET_CODE (op1) == REG
7188 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
7189 tem = op0, op0 = op1, op1 = tem;
7190
7191 gen_reload (out, op0, opnum, type);
7192
7193 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7194 This fixes a problem on the 32K where the stack pointer cannot
7195 be used as an operand of an add insn. */
7196
7197 if (rtx_equal_p (op0, op1))
7198 op1 = out;
7199
7200 insn = emit_insn (gen_add2_insn (out, op1));
7201
7202 /* If that failed, copy the address register to the reload register.
7203 Then add the constant to the reload register. */
7204
7205 code = recog_memoized (insn);
7206
7207 if (code >= 0)
7208 {
7209 insn_extract (insn);
7210 /* We want constrain operands to treat this insn strictly in
7211 its validity determination, i.e., the way it would after reload
7212 has completed. */
7213 if (constrain_operands (code, 1))
7214 return insn;
7215 }
7216
7217 delete_insns_since (last);
7218
7219 gen_reload (out, op1, opnum, type);
7220 emit_insn (gen_add2_insn (out, op0));
7221 }
7222
7223 #ifdef SECONDARY_MEMORY_NEEDED
7224 /* If we need a memory location to do the move, do it that way. */
7225 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7226 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7227 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7228 REGNO_REG_CLASS (REGNO (out)),
7229 GET_MODE (out)))
7230 {
7231 /* Get the memory to use and rewrite both registers to its mode. */
7232 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7233
7234 if (GET_MODE (loc) != GET_MODE (out))
7235 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
7236
7237 if (GET_MODE (loc) != GET_MODE (in))
7238 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
7239
7240 gen_reload (loc, in, opnum, type);
7241 gen_reload (out, loc, opnum, type);
7242 }
7243 #endif
7244
7245 /* If IN is a simple operand, use gen_move_insn. */
7246 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7247 emit_insn (gen_move_insn (out, in));
7248
7249 #ifdef HAVE_reload_load_address
7250 else if (HAVE_reload_load_address)
7251 emit_insn (gen_reload_load_address (out, in));
7252 #endif
7253
7254 /* Otherwise, just write (set OUT IN) and hope for the best. */
7255 else
7256 emit_insn (gen_rtx (SET, VOIDmode, out, in));
7257
7258 /* Return the first insn emitted.
7259 We can not just return get_last_insn, because there may have
7260 been multiple instructions emitted. Also note that gen_move_insn may
7261 emit more than one insn itself, so we can not assume that there is one
7262 insn emitted per emit_insn_before call. */
7263
7264 return last ? NEXT_INSN (last) : get_insns ();
7265 }
7266 \f
7267 /* Delete a previously made output-reload
7268 whose result we now believe is not needed.
7269 First we double-check.
7270
7271 INSN is the insn now being processed.
7272 OUTPUT_RELOAD_INSN is the insn of the output reload.
7273 J is the reload-number for this insn. */
7274
7275 static void
7276 delete_output_reload (insn, j, output_reload_insn)
7277 rtx insn;
7278 int j;
7279 rtx output_reload_insn;
7280 {
7281 register rtx i1;
7282
7283 /* Get the raw pseudo-register referred to. */
7284
7285 rtx reg = reload_in[j];
7286 while (GET_CODE (reg) == SUBREG)
7287 reg = SUBREG_REG (reg);
7288
7289 /* If the pseudo-reg we are reloading is no longer referenced
7290 anywhere between the store into it and here,
7291 and no jumps or labels intervene, then the value can get
7292 here through the reload reg alone.
7293 Otherwise, give up--return. */
7294 for (i1 = NEXT_INSN (output_reload_insn);
7295 i1 != insn; i1 = NEXT_INSN (i1))
7296 {
7297 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7298 return;
7299 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7300 && reg_mentioned_p (reg, PATTERN (i1)))
7301 return;
7302 }
7303
7304 if (cannot_omit_stores[REGNO (reg)])
7305 return;
7306
7307 /* If this insn will store in the pseudo again,
7308 the previous store can be removed. */
7309 if (reload_out[j] == reload_in[j])
7310 delete_insn (output_reload_insn);
7311
7312 /* See if the pseudo reg has been completely replaced
7313 with reload regs. If so, delete the store insn
7314 and forget we had a stack slot for the pseudo. */
7315 else if (REG_N_DEATHS (REGNO (reg)) == 1
7316 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7317 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7318 {
7319 rtx i2;
7320
7321 /* We know that it was used only between here
7322 and the beginning of the current basic block.
7323 (We also know that the last use before INSN was
7324 the output reload we are thinking of deleting, but never mind that.)
7325 Search that range; see if any ref remains. */
7326 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7327 {
7328 rtx set = single_set (i2);
7329
7330 /* Uses which just store in the pseudo don't count,
7331 since if they are the only uses, they are dead. */
7332 if (set != 0 && SET_DEST (set) == reg)
7333 continue;
7334 if (GET_CODE (i2) == CODE_LABEL
7335 || GET_CODE (i2) == JUMP_INSN)
7336 break;
7337 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7338 && reg_mentioned_p (reg, PATTERN (i2)))
7339 /* Some other ref remains;
7340 we can't do anything. */
7341 return;
7342 }
7343
7344 /* Delete the now-dead stores into this pseudo. */
7345 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7346 {
7347 rtx set = single_set (i2);
7348
7349 if (set != 0 && SET_DEST (set) == reg)
7350 {
7351 /* This might be a basic block head,
7352 thus don't use delete_insn. */
7353 PUT_CODE (i2, NOTE);
7354 NOTE_SOURCE_FILE (i2) = 0;
7355 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7356 }
7357 if (GET_CODE (i2) == CODE_LABEL
7358 || GET_CODE (i2) == JUMP_INSN)
7359 break;
7360 }
7361
7362 /* For the debugging info,
7363 say the pseudo lives in this reload reg. */
7364 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7365 alter_reg (REGNO (reg), -1);
7366 }
7367 }
7368 \f
7369 /* Output reload-insns to reload VALUE into RELOADREG.
7370 VALUE is an autoincrement or autodecrement RTX whose operand
7371 is a register or memory location;
7372 so reloading involves incrementing that location.
7373
7374 INC_AMOUNT is the number to increment or decrement by (always positive).
7375 This cannot be deduced from VALUE. */
7376
7377 static void
7378 inc_for_reload (reloadreg, value, inc_amount)
7379 rtx reloadreg;
7380 rtx value;
7381 int inc_amount;
7382 {
7383 /* REG or MEM to be copied and incremented. */
7384 rtx incloc = XEXP (value, 0);
7385 /* Nonzero if increment after copying. */
7386 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7387 rtx last;
7388 rtx inc;
7389 rtx add_insn;
7390 int code;
7391
7392 /* No hard register is equivalent to this register after
7393 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7394 we could inc/dec that register as well (maybe even using it for
7395 the source), but I'm not sure it's worth worrying about. */
7396 if (GET_CODE (incloc) == REG)
7397 reg_last_reload_reg[REGNO (incloc)] = 0;
7398
7399 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7400 inc_amount = - inc_amount;
7401
7402 inc = GEN_INT (inc_amount);
7403
7404 /* If this is post-increment, first copy the location to the reload reg. */
7405 if (post)
7406 emit_insn (gen_move_insn (reloadreg, incloc));
7407
7408 /* See if we can directly increment INCLOC. Use a method similar to that
7409 in gen_reload. */
7410
7411 last = get_last_insn ();
7412 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7413 gen_rtx (PLUS, GET_MODE (incloc),
7414 incloc, inc)));
7415
7416 code = recog_memoized (add_insn);
7417 if (code >= 0)
7418 {
7419 insn_extract (add_insn);
7420 if (constrain_operands (code, 1))
7421 {
7422 /* If this is a pre-increment and we have incremented the value
7423 where it lives, copy the incremented value to RELOADREG to
7424 be used as an address. */
7425
7426 if (! post)
7427 emit_insn (gen_move_insn (reloadreg, incloc));
7428
7429 return;
7430 }
7431 }
7432
7433 delete_insns_since (last);
7434
7435 /* If couldn't do the increment directly, must increment in RELOADREG.
7436 The way we do this depends on whether this is pre- or post-increment.
7437 For pre-increment, copy INCLOC to the reload register, increment it
7438 there, then save back. */
7439
7440 if (! post)
7441 {
7442 emit_insn (gen_move_insn (reloadreg, incloc));
7443 emit_insn (gen_add2_insn (reloadreg, inc));
7444 emit_insn (gen_move_insn (incloc, reloadreg));
7445 }
7446 else
7447 {
7448 /* Postincrement.
7449 Because this might be a jump insn or a compare, and because RELOADREG
7450 may not be available after the insn in an input reload, we must do
7451 the incrementation before the insn being reloaded for.
7452
7453 We have already copied INCLOC to RELOADREG. Increment the copy in
7454 RELOADREG, save that back, then decrement RELOADREG so it has
7455 the original value. */
7456
7457 emit_insn (gen_add2_insn (reloadreg, inc));
7458 emit_insn (gen_move_insn (incloc, reloadreg));
7459 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7460 }
7461
7462 return;
7463 }
7464 \f
7465 /* Return 1 if we are certain that the constraint-string STRING allows
7466 the hard register REG. Return 0 if we can't be sure of this. */
7467
7468 static int
7469 constraint_accepts_reg_p (string, reg)
7470 char *string;
7471 rtx reg;
7472 {
7473 int value = 0;
7474 int regno = true_regnum (reg);
7475 int c;
7476
7477 /* Initialize for first alternative. */
7478 value = 0;
7479 /* Check that each alternative contains `g' or `r'. */
7480 while (1)
7481 switch (c = *string++)
7482 {
7483 case 0:
7484 /* If an alternative lacks `g' or `r', we lose. */
7485 return value;
7486 case ',':
7487 /* If an alternative lacks `g' or `r', we lose. */
7488 if (value == 0)
7489 return 0;
7490 /* Initialize for next alternative. */
7491 value = 0;
7492 break;
7493 case 'g':
7494 case 'r':
7495 /* Any general reg wins for this alternative. */
7496 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7497 value = 1;
7498 break;
7499 default:
7500 /* Any reg in specified class wins for this alternative. */
7501 {
7502 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7503
7504 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7505 value = 1;
7506 }
7507 }
7508 }
7509 \f
7510 /* Return the number of places FIND appears within X, but don't count
7511 an occurrence if some SET_DEST is FIND. */
7512
7513 static int
7514 count_occurrences (x, find)
7515 register rtx x, find;
7516 {
7517 register int i, j;
7518 register enum rtx_code code;
7519 register char *format_ptr;
7520 int count;
7521
7522 if (x == find)
7523 return 1;
7524 if (x == 0)
7525 return 0;
7526
7527 code = GET_CODE (x);
7528
7529 switch (code)
7530 {
7531 case REG:
7532 case QUEUED:
7533 case CONST_INT:
7534 case CONST_DOUBLE:
7535 case SYMBOL_REF:
7536 case CODE_LABEL:
7537 case PC:
7538 case CC0:
7539 return 0;
7540
7541 case SET:
7542 if (SET_DEST (x) == find)
7543 return count_occurrences (SET_SRC (x), find);
7544 break;
7545
7546 default:
7547 break;
7548 }
7549
7550 format_ptr = GET_RTX_FORMAT (code);
7551 count = 0;
7552
7553 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7554 {
7555 switch (*format_ptr++)
7556 {
7557 case 'e':
7558 count += count_occurrences (XEXP (x, i), find);
7559 break;
7560
7561 case 'E':
7562 if (XVEC (x, i) != NULL)
7563 {
7564 for (j = 0; j < XVECLEN (x, i); j++)
7565 count += count_occurrences (XVECEXP (x, i, j), find);
7566 }
7567 break;
7568 }
7569 }
7570 return count;
7571 }
7572 \f
7573 /* This array holds values which are equivalent to a hard register
7574 during reload_cse_regs. Each array element is an EXPR_LIST of
7575 values. Each time a hard register is set, we set the corresponding
7576 array element to the value. Each time a hard register is copied
7577 into memory, we add the memory location to the corresponding array
7578 element. We don't store values or memory addresses with side
7579 effects in this array.
7580
7581 If the value is a CONST_INT, then the mode of the containing
7582 EXPR_LIST is the mode in which that CONST_INT was referenced.
7583
7584 We sometimes clobber a specific entry in a list. In that case, we
7585 just set XEXP (list-entry, 0) to 0. */
7586
7587 static rtx *reg_values;
7588
7589 /* This is a preallocated REG rtx which we use as a temporary in
7590 reload_cse_invalidate_regno, so that we don't need to allocate a
7591 new one each time through a loop in that function. */
7592
7593 static rtx invalidate_regno_rtx;
7594
7595 /* This is a set of registers for which we must remove REG_DEAD notes in
7596 previous insns, because our modifications made them invalid. That can
7597 happen if we introduced the register into the current insn, or we deleted
7598 the current insn which used to set the register. */
7599
7600 static HARD_REG_SET no_longer_dead_regs;
7601
7602 /* Invalidate any entries in reg_values which depend on REGNO,
7603 including those for REGNO itself. This is called if REGNO is
7604 changing. If CLOBBER is true, then always forget anything we
7605 currently know about REGNO. MODE is the mode of the assignment to
7606 REGNO, which is used to determine how many hard registers are being
7607 changed. If MODE is VOIDmode, then only REGNO is being changed;
7608 this is used when invalidating call clobbered registers across a
7609 call. */
7610
7611 static void
7612 reload_cse_invalidate_regno (regno, mode, clobber)
7613 int regno;
7614 enum machine_mode mode;
7615 int clobber;
7616 {
7617 int endregno;
7618 register int i;
7619
7620 /* Our callers don't always go through true_regnum; we may see a
7621 pseudo-register here from a CLOBBER or the like. We probably
7622 won't ever see a pseudo-register that has a real register number,
7623 for we check anyhow for safety. */
7624 if (regno >= FIRST_PSEUDO_REGISTER)
7625 regno = reg_renumber[regno];
7626 if (regno < 0)
7627 return;
7628
7629 if (mode == VOIDmode)
7630 endregno = regno + 1;
7631 else
7632 endregno = regno + HARD_REGNO_NREGS (regno, mode);
7633
7634 if (clobber)
7635 for (i = regno; i < endregno; i++)
7636 reg_values[i] = 0;
7637
7638 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7639 {
7640 rtx x;
7641
7642 for (x = reg_values[i]; x; x = XEXP (x, 1))
7643 {
7644 if (XEXP (x, 0) != 0
7645 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR))
7646 {
7647 /* If this is the only entry on the list, clear
7648 reg_values[i]. Otherwise, just clear this entry on
7649 the list. */
7650 if (XEXP (x, 1) == 0 && x == reg_values[i])
7651 {
7652 reg_values[i] = 0;
7653 break;
7654 }
7655 XEXP (x, 0) = 0;
7656 }
7657 }
7658 }
7659
7660 /* We must look at earlier registers, in case REGNO is part of a
7661 multi word value but is not the first register. If an earlier
7662 register has a value in a mode which overlaps REGNO, then we must
7663 invalidate that earlier register. Note that we do not need to
7664 check REGNO or later registers (we must not check REGNO itself,
7665 because we would incorrectly conclude that there was a conflict). */
7666
7667 for (i = 0; i < regno; i++)
7668 {
7669 rtx x;
7670
7671 for (x = reg_values[i]; x; x = XEXP (x, 1))
7672 {
7673 if (XEXP (x, 0) != 0)
7674 {
7675 PUT_MODE (invalidate_regno_rtx, GET_MODE (x));
7676 REGNO (invalidate_regno_rtx) = i;
7677 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx,
7678 NULL_PTR))
7679 {
7680 reload_cse_invalidate_regno (i, VOIDmode, 1);
7681 break;
7682 }
7683 }
7684 }
7685 }
7686 }
7687
7688 /* The memory at address MEM_BASE is being changed.
7689 Return whether this change will invalidate VAL. */
7690
7691 static int
7692 reload_cse_mem_conflict_p (mem_base, val)
7693 rtx mem_base;
7694 rtx val;
7695 {
7696 enum rtx_code code;
7697 char *fmt;
7698 int i;
7699
7700 code = GET_CODE (val);
7701 switch (code)
7702 {
7703 /* Get rid of a few simple cases quickly. */
7704 case REG:
7705 case PC:
7706 case CC0:
7707 case SCRATCH:
7708 case CONST:
7709 case CONST_INT:
7710 case CONST_DOUBLE:
7711 case SYMBOL_REF:
7712 case LABEL_REF:
7713 return 0;
7714
7715 case MEM:
7716 if (GET_MODE (mem_base) == BLKmode
7717 || GET_MODE (val) == BLKmode)
7718 return 1;
7719 if (anti_dependence (val, mem_base))
7720 return 1;
7721 /* The address may contain nested MEMs. */
7722 break;
7723
7724 default:
7725 break;
7726 }
7727
7728 fmt = GET_RTX_FORMAT (code);
7729
7730 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7731 {
7732 if (fmt[i] == 'e')
7733 {
7734 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i)))
7735 return 1;
7736 }
7737 else if (fmt[i] == 'E')
7738 {
7739 int j;
7740
7741 for (j = 0; j < XVECLEN (val, i); j++)
7742 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
7743 return 1;
7744 }
7745 }
7746
7747 return 0;
7748 }
7749
7750 /* Invalidate any entries in reg_values which are changed because of a
7751 store to MEM_RTX. If this is called because of a non-const call
7752 instruction, MEM_RTX is (mem:BLK const0_rtx). */
7753
7754 static void
7755 reload_cse_invalidate_mem (mem_rtx)
7756 rtx mem_rtx;
7757 {
7758 register int i;
7759
7760 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7761 {
7762 rtx x;
7763
7764 for (x = reg_values[i]; x; x = XEXP (x, 1))
7765 {
7766 if (XEXP (x, 0) != 0
7767 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0)))
7768 {
7769 /* If this is the only entry on the list, clear
7770 reg_values[i]. Otherwise, just clear this entry on
7771 the list. */
7772 if (XEXP (x, 1) == 0 && x == reg_values[i])
7773 {
7774 reg_values[i] = 0;
7775 break;
7776 }
7777 XEXP (x, 0) = 0;
7778 }
7779 }
7780 }
7781 }
7782
7783 /* Invalidate DEST, which is being assigned to or clobbered. The
7784 second parameter exists so that this function can be passed to
7785 note_stores; it is ignored. */
7786
7787 static void
7788 reload_cse_invalidate_rtx (dest, ignore)
7789 rtx dest;
7790 rtx ignore;
7791 {
7792 while (GET_CODE (dest) == STRICT_LOW_PART
7793 || GET_CODE (dest) == SIGN_EXTRACT
7794 || GET_CODE (dest) == ZERO_EXTRACT
7795 || GET_CODE (dest) == SUBREG)
7796 dest = XEXP (dest, 0);
7797
7798 if (GET_CODE (dest) == REG)
7799 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
7800 else if (GET_CODE (dest) == MEM)
7801 reload_cse_invalidate_mem (dest);
7802 }
7803
7804 /* Possibly delete death notes on the insns before INSN if modifying INSN
7805 extended the lifespan of the registers. */
7806
7807 static void
7808 reload_cse_delete_death_notes (insn)
7809 rtx insn;
7810 {
7811 int dreg;
7812
7813 for (dreg = 0; dreg < FIRST_PSEUDO_REGISTER; dreg++)
7814 {
7815 rtx trial;
7816
7817 if (! TEST_HARD_REG_BIT (no_longer_dead_regs, dreg))
7818 continue;
7819
7820 for (trial = prev_nonnote_insn (insn);
7821 (trial
7822 && GET_CODE (trial) != CODE_LABEL
7823 && GET_CODE (trial) != BARRIER);
7824 trial = prev_nonnote_insn (trial))
7825 {
7826 if (find_regno_note (trial, REG_DEAD, dreg))
7827 {
7828 remove_death (dreg, trial);
7829 break;
7830 }
7831 }
7832 }
7833 }
7834
7835 /* Record that the current insn uses hard reg REGNO in mode MODE. This
7836 will be used in reload_cse_delete_death_notes to delete prior REG_DEAD
7837 notes for this register. */
7838
7839 static void
7840 reload_cse_no_longer_dead (regno, mode)
7841 int regno;
7842 enum machine_mode mode;
7843 {
7844 int nregs = HARD_REGNO_NREGS (regno, mode);
7845 while (nregs-- > 0)
7846 {
7847 SET_HARD_REG_BIT (no_longer_dead_regs, regno);
7848 regno++;
7849 }
7850 }
7851
7852
7853 /* Do a very simple CSE pass over the hard registers.
7854
7855 This function detects no-op moves where we happened to assign two
7856 different pseudo-registers to the same hard register, and then
7857 copied one to the other. Reload will generate a useless
7858 instruction copying a register to itself.
7859
7860 This function also detects cases where we load a value from memory
7861 into two different registers, and (if memory is more expensive than
7862 registers) changes it to simply copy the first register into the
7863 second register.
7864
7865 Another optimization is performed that scans the operands of each
7866 instruction to see whether the value is already available in a
7867 hard register. It then replaces the operand with the hard register
7868 if possible, much like an optional reload would. */
7869
7870 void
7871 reload_cse_regs (first)
7872 rtx first;
7873 {
7874 char *firstobj;
7875 rtx callmem;
7876 register int i;
7877 rtx insn;
7878
7879 init_alias_analysis ();
7880
7881 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7882 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7883 reg_values[i] = 0;
7884
7885 /* Create our EXPR_LIST structures on reload_obstack, so that we can
7886 free them when we are done. */
7887 push_obstacks (&reload_obstack, &reload_obstack);
7888 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
7889
7890 /* We pass this to reload_cse_invalidate_mem to invalidate all of
7891 memory for a non-const call instruction. */
7892 callmem = gen_rtx (MEM, BLKmode, const0_rtx);
7893
7894 /* This is used in reload_cse_invalidate_regno to avoid consing a
7895 new REG in a loop in that function. */
7896 invalidate_regno_rtx = gen_rtx (REG, VOIDmode, 0);
7897
7898 for (insn = first; insn; insn = NEXT_INSN (insn))
7899 {
7900 rtx body;
7901
7902 if (GET_CODE (insn) == CODE_LABEL)
7903 {
7904 /* Forget all the register values at a code label. We don't
7905 try to do anything clever around jumps. */
7906 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7907 reg_values[i] = 0;
7908
7909 continue;
7910 }
7911
7912 #ifdef NON_SAVING_SETJMP
7913 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
7914 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
7915 {
7916 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7917 reg_values[i] = 0;
7918
7919 continue;
7920 }
7921 #endif
7922
7923 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7924 continue;
7925
7926 CLEAR_HARD_REG_SET (no_longer_dead_regs);
7927
7928 /* If this is a call instruction, forget anything stored in a
7929 call clobbered register, or, if this is not a const call, in
7930 memory. */
7931 if (GET_CODE (insn) == CALL_INSN)
7932 {
7933 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7934 if (call_used_regs[i])
7935 reload_cse_invalidate_regno (i, VOIDmode, 1);
7936
7937 if (! CONST_CALL_P (insn))
7938 reload_cse_invalidate_mem (callmem);
7939 }
7940
7941 body = PATTERN (insn);
7942 if (GET_CODE (body) == SET)
7943 {
7944 int count = 0;
7945 if (reload_cse_noop_set_p (body, insn))
7946 {
7947 PUT_CODE (insn, NOTE);
7948 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7949 NOTE_SOURCE_FILE (insn) = 0;
7950 reload_cse_delete_death_notes (insn);
7951
7952 /* We're done with this insn. */
7953 continue;
7954 }
7955
7956 /* It's not a no-op, but we can try to simplify it. */
7957 CLEAR_HARD_REG_SET (no_longer_dead_regs);
7958 count += reload_cse_simplify_set (body, insn);
7959
7960 if (count > 0 && apply_change_group ())
7961 reload_cse_delete_death_notes (insn);
7962 else if (reload_cse_simplify_operands (insn))
7963 reload_cse_delete_death_notes (insn);
7964
7965 reload_cse_record_set (body, body);
7966 }
7967 else if (GET_CODE (body) == PARALLEL)
7968 {
7969 int count = 0;
7970
7971 /* If every action in a PARALLEL is a noop, we can delete
7972 the entire PARALLEL. */
7973 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
7974 if ((GET_CODE (XVECEXP (body, 0, i)) != SET
7975 || ! reload_cse_noop_set_p (XVECEXP (body, 0, i), insn))
7976 && GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
7977 break;
7978 if (i < 0)
7979 {
7980 PUT_CODE (insn, NOTE);
7981 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7982 NOTE_SOURCE_FILE (insn) = 0;
7983 reload_cse_delete_death_notes (insn);
7984
7985 /* We're done with this insn. */
7986 continue;
7987 }
7988
7989 /* It's not a no-op, but we can try to simplify it. */
7990 CLEAR_HARD_REG_SET (no_longer_dead_regs);
7991 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
7992 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
7993 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
7994
7995 if (count > 0 && apply_change_group ())
7996 reload_cse_delete_death_notes (insn);
7997 else if (reload_cse_simplify_operands (insn))
7998 reload_cse_delete_death_notes (insn);
7999
8000 /* Look through the PARALLEL and record the values being
8001 set, if possible. Also handle any CLOBBERs. */
8002 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8003 {
8004 rtx x = XVECEXP (body, 0, i);
8005
8006 if (GET_CODE (x) == SET)
8007 reload_cse_record_set (x, body);
8008 else
8009 note_stores (x, reload_cse_invalidate_rtx);
8010 }
8011 }
8012 else
8013 note_stores (body, reload_cse_invalidate_rtx);
8014
8015 #ifdef AUTO_INC_DEC
8016 /* Clobber any registers which appear in REG_INC notes. We
8017 could keep track of the changes to their values, but it is
8018 unlikely to help. */
8019 {
8020 rtx x;
8021
8022 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
8023 if (REG_NOTE_KIND (x) == REG_INC)
8024 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX);
8025 }
8026 #endif
8027
8028 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
8029 after we have processed the insn. */
8030 if (GET_CODE (insn) == CALL_INSN)
8031 {
8032 rtx x;
8033
8034 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
8035 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
8036 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX);
8037 }
8038 }
8039
8040 /* Free all the temporary structures we created, and go back to the
8041 regular obstacks. */
8042 obstack_free (&reload_obstack, firstobj);
8043 pop_obstacks ();
8044 }
8045
8046 /* Return whether the values known for REGNO are equal to VAL. MODE
8047 is the mode of the object that VAL is being copied to; this matters
8048 if VAL is a CONST_INT. */
8049
8050 static int
8051 reload_cse_regno_equal_p (regno, val, mode)
8052 int regno;
8053 rtx val;
8054 enum machine_mode mode;
8055 {
8056 rtx x;
8057
8058 if (val == 0)
8059 return 0;
8060
8061 for (x = reg_values[regno]; x; x = XEXP (x, 1))
8062 if (XEXP (x, 0) != 0
8063 && rtx_equal_p (XEXP (x, 0), val)
8064 && (GET_CODE (val) != CONST_INT
8065 || mode == GET_MODE (x)
8066 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
8067 /* On a big endian machine if the value spans more than
8068 one register then this register holds the high part of
8069 it and we can't use it.
8070
8071 ??? We should also compare with the high part of the
8072 value. */
8073 && !(WORDS_BIG_ENDIAN
8074 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
8075 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
8076 GET_MODE_BITSIZE (GET_MODE (x))))))
8077 return 1;
8078
8079 return 0;
8080 }
8081
8082 /* See whether a single set is a noop. SET is the set instruction we
8083 are should check, and INSN is the instruction from which it came. */
8084
8085 static int
8086 reload_cse_noop_set_p (set, insn)
8087 rtx set;
8088 rtx insn;
8089 {
8090 rtx src, dest;
8091 enum machine_mode dest_mode;
8092 int dreg, sreg;
8093 int ret;
8094
8095 src = SET_SRC (set);
8096 dest = SET_DEST (set);
8097 dest_mode = GET_MODE (dest);
8098
8099 if (side_effects_p (src))
8100 return 0;
8101
8102 dreg = true_regnum (dest);
8103 sreg = true_regnum (src);
8104
8105 /* Check for setting a register to itself. In this case, we don't
8106 have to worry about REG_DEAD notes. */
8107 if (dreg >= 0 && dreg == sreg)
8108 return 1;
8109
8110 ret = 0;
8111 if (dreg >= 0)
8112 {
8113 /* Check for setting a register to itself. */
8114 if (dreg == sreg)
8115 ret = 1;
8116
8117 /* Check for setting a register to a value which we already know
8118 is in the register. */
8119 else if (reload_cse_regno_equal_p (dreg, src, dest_mode))
8120 ret = 1;
8121
8122 /* Check for setting a register DREG to another register SREG
8123 where SREG is equal to a value which is already in DREG. */
8124 else if (sreg >= 0)
8125 {
8126 rtx x;
8127
8128 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8129 {
8130 rtx tmp;
8131
8132 if (XEXP (x, 0) == 0)
8133 continue;
8134
8135 if (dest_mode == GET_MODE (x))
8136 tmp = XEXP (x, 0);
8137 else if (GET_MODE_BITSIZE (dest_mode)
8138 < GET_MODE_BITSIZE (GET_MODE (x)))
8139 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8140 else
8141 continue;
8142
8143 if (tmp
8144 && reload_cse_regno_equal_p (dreg, tmp, dest_mode))
8145 {
8146 ret = 1;
8147 break;
8148 }
8149 }
8150 }
8151 }
8152 else if (GET_CODE (dest) == MEM)
8153 {
8154 /* Check for storing a register to memory when we know that the
8155 register is equivalent to the memory location. */
8156 if (sreg >= 0
8157 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
8158 && ! side_effects_p (dest))
8159 ret = 1;
8160 }
8161
8162 /* If we can delete this SET, then we need to look for an earlier
8163 REG_DEAD note on DREG, and remove it if it exists. */
8164 if (ret && dreg >= 0)
8165 {
8166 if (! find_regno_note (insn, REG_UNUSED, dreg))
8167 reload_cse_no_longer_dead (dreg, dest_mode);
8168 }
8169
8170 return ret;
8171 }
8172
8173 /* Try to simplify a single SET instruction. SET is the set pattern.
8174 INSN is the instruction it came from.
8175 This function only handles one case: if we set a register to a value
8176 which is not a register, we try to find that value in some other register
8177 and change the set into a register copy. */
8178
8179 static int
8180 reload_cse_simplify_set (set, insn)
8181 rtx set;
8182 rtx insn;
8183 {
8184 int dreg;
8185 rtx src;
8186 enum machine_mode dest_mode;
8187 enum reg_class dclass;
8188 register int i;
8189
8190 dreg = true_regnum (SET_DEST (set));
8191 if (dreg < 0)
8192 return 0;
8193
8194 src = SET_SRC (set);
8195 if (side_effects_p (src) || true_regnum (src) >= 0)
8196 return 0;
8197
8198 /* If memory loads are cheaper than register copies, don't change
8199 them. */
8200 if (GET_CODE (src) == MEM && MEMORY_MOVE_COST (GET_MODE (src)) < 2)
8201 return 0;
8202
8203 dest_mode = GET_MODE (SET_DEST (set));
8204 dclass = REGNO_REG_CLASS (dreg);
8205 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8206 {
8207 if (i != dreg
8208 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
8209 && reload_cse_regno_equal_p (i, src, dest_mode))
8210 {
8211 int validated;
8212
8213 /* Pop back to the real obstacks while changing the insn. */
8214 pop_obstacks ();
8215
8216 validated = validate_change (insn, &SET_SRC (set),
8217 gen_rtx (REG, dest_mode, i), 1);
8218
8219 /* Go back to the obstack we are using for temporary
8220 storage. */
8221 push_obstacks (&reload_obstack, &reload_obstack);
8222
8223 if (validated && ! find_regno_note (insn, REG_UNUSED, i))
8224 {
8225 reload_cse_no_longer_dead (i, dest_mode);
8226 return 1;
8227 }
8228 }
8229 }
8230 return 0;
8231 }
8232
8233 /* Try to replace operands in INSN with equivalent values that are already
8234 in registers. This can be viewed as optional reloading.
8235
8236 For each non-register operand in the insn, see if any hard regs are
8237 known to be equivalent to that operand. Record the alternatives which
8238 can accept these hard registers. Among all alternatives, select the
8239 ones which are better or equal to the one currently matching, where
8240 "better" is in terms of '?' and '!' constraints. Among the remaining
8241 alternatives, select the one which replaces most operands with
8242 hard registers. */
8243
8244 static int
8245 reload_cse_simplify_operands (insn)
8246 rtx insn;
8247 {
8248 #ifdef REGISTER_CONSTRAINTS
8249 int insn_code_number, n_operands, n_alternatives;
8250 int i,j;
8251
8252 char *constraints[MAX_RECOG_OPERANDS];
8253
8254 /* Vector recording how bad an alternative is. */
8255 int *alternative_reject;
8256 /* Vector recording how many registers can be introduced by choosing
8257 this alternative. */
8258 int *alternative_nregs;
8259 /* Array of vectors recording, for each operand and each alternative,
8260 which hard register to substitute, or -1 if the operand should be
8261 left as it is. */
8262 int *op_alt_regno[MAX_RECOG_OPERANDS];
8263 /* Array of alternatives, sorted in order of decreasing desirability. */
8264 int *alternative_order;
8265
8266 /* Find out some information about this insn. */
8267 insn_code_number = recog_memoized (insn);
8268 /* We don't modify asm instructions. */
8269 if (insn_code_number < 0)
8270 return 0;
8271
8272 n_operands = insn_n_operands[insn_code_number];
8273 n_alternatives = insn_n_alternatives[insn_code_number];
8274
8275 if (n_alternatives == 0 || n_operands == 0)
8276 return;
8277 insn_extract (insn);
8278
8279 /* Figure out which alternative currently matches. */
8280 if (! constrain_operands (insn_code_number, 1))
8281 abort ();
8282
8283 alternative_reject = (int *) alloca (n_alternatives * sizeof (int));
8284 alternative_nregs = (int *) alloca (n_alternatives * sizeof (int));
8285 alternative_order = (int *) alloca (n_alternatives * sizeof (int));
8286 bzero ((char *)alternative_reject, n_alternatives * sizeof (int));
8287 bzero ((char *)alternative_nregs, n_alternatives * sizeof (int));
8288
8289 for (i = 0; i < n_operands; i++)
8290 {
8291 enum machine_mode mode;
8292 int regno;
8293 char *p;
8294
8295 op_alt_regno[i] = (int *) alloca (n_alternatives * sizeof (int));
8296 for (j = 0; j < n_alternatives; j++)
8297 op_alt_regno[i][j] = -1;
8298
8299 p = constraints[i] = insn_operand_constraint[insn_code_number][i];
8300 mode = insn_operand_mode[insn_code_number][i];
8301
8302 /* Add the reject values for each alternative given by the constraints
8303 for this operand. */
8304 j = 0;
8305 while (*p != '\0')
8306 {
8307 char c = *p++;
8308 if (c == ',')
8309 j++;
8310 else if (c == '?')
8311 alternative_reject[j] += 3;
8312 else if (c == '!')
8313 alternative_reject[j] += 300;
8314 }
8315
8316 /* We won't change operands which are already registers. We
8317 also don't want to modify output operands. */
8318 regno = true_regnum (recog_operand[i]);
8319 if (regno >= 0
8320 || constraints[i][0] == '='
8321 || constraints[i][0] == '+')
8322 continue;
8323
8324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8325 {
8326 int class = (int) NO_REGS;
8327
8328 if (! reload_cse_regno_equal_p (regno, recog_operand[i], mode))
8329 continue;
8330
8331 /* We found a register equal to this operand. Now look for all
8332 alternatives that can accept this register and have not been
8333 assigned a register they can use yet. */
8334 j = 0;
8335 p = constraints[i];
8336 for (;;)
8337 {
8338 char c = *p++;
8339
8340 switch (c)
8341 {
8342 case '=': case '+': case '?':
8343 case '#': case '&': case '!':
8344 case '*': case '%':
8345 case '0': case '1': case '2': case '3': case '4':
8346 case 'm': case '<': case '>': case 'V': case 'o':
8347 case 'E': case 'F': case 'G': case 'H':
8348 case 's': case 'i': case 'n':
8349 case 'I': case 'J': case 'K': case 'L':
8350 case 'M': case 'N': case 'O': case 'P':
8351 #ifdef EXTRA_CONSTRAINT
8352 case 'Q': case 'R': case 'S': case 'T': case 'U':
8353 #endif
8354 case 'p': case 'X':
8355 /* These don't say anything we care about. */
8356 break;
8357
8358 case 'g': case 'r':
8359 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
8360 break;
8361
8362 default:
8363 class
8364 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER (c)];
8365 break;
8366
8367 case ',': case '\0':
8368 /* See if REGNO fits this alternative, and set it up as the
8369 replacement register if we don't have one for this
8370 alternative yet. */
8371 if (op_alt_regno[i][j] == -1
8372 && reg_fits_class_p (gen_rtx (REG, mode, regno), class,
8373 0, mode))
8374 {
8375 alternative_nregs[j]++;
8376 op_alt_regno[i][j] = regno;
8377 }
8378 j++;
8379 break;
8380 }
8381
8382 if (c == '\0')
8383 break;
8384 }
8385 }
8386 }
8387
8388 /* Record all alternatives which are better or equal to the currently
8389 matching one in the alternative_order array. */
8390 for (i = j = 0; i < n_alternatives; i++)
8391 if (alternative_reject[i] <= alternative_reject[which_alternative])
8392 alternative_order[j++] = i;
8393 n_alternatives = j;
8394
8395 /* Sort it. Given a small number of alternatives, a dumb algorithm
8396 won't hurt too much. */
8397 for (i = 0; i < n_alternatives - 1; i++)
8398 {
8399 int best = i;
8400 int best_reject = alternative_reject[alternative_order[i]];
8401 int best_nregs = alternative_nregs[alternative_order[i]];
8402 int tmp;
8403
8404 for (j = i + 1; j < n_alternatives; j++)
8405 {
8406 int this_reject = alternative_reject[alternative_order[j]];
8407 int this_nregs = alternative_nregs[alternative_order[j]];
8408
8409 if (this_reject < best_reject
8410 || (this_reject == best_reject && this_nregs < best_nregs))
8411 {
8412 best = j;
8413 best_reject = this_reject;
8414 best_nregs = this_nregs;
8415 }
8416 }
8417
8418 tmp = alternative_order[best];
8419 alternative_order[best] = alternative_order[i];
8420 alternative_order[i] = tmp;
8421 }
8422
8423 /* Substitute the operands as determined by op_alt_regno for the best
8424 alternative. */
8425 j = alternative_order[0];
8426 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8427
8428 /* Pop back to the real obstacks while changing the insn. */
8429 pop_obstacks ();
8430
8431 for (i = 0; i < n_operands; i++)
8432 {
8433 enum machine_mode mode = insn_operand_mode[insn_code_number][i];
8434 if (op_alt_regno[i][j] == -1)
8435 continue;
8436
8437 reload_cse_no_longer_dead (op_alt_regno[i][j], mode);
8438 validate_change (insn, recog_operand_loc[i],
8439 gen_rtx (REG, mode, op_alt_regno[i][j]), 1);
8440 }
8441
8442 for (i = insn_n_dups[insn_code_number] - 1; i >= 0; i--)
8443 {
8444 int op = recog_dup_num[i];
8445 enum machine_mode mode = insn_operand_mode[insn_code_number][op];
8446
8447 if (op_alt_regno[op][j] == -1)
8448 continue;
8449
8450 reload_cse_no_longer_dead (op_alt_regno[op][j], mode);
8451 validate_change (insn, recog_dup_loc[i],
8452 gen_rtx (REG, mode, op_alt_regno[op][j]), 1);
8453 }
8454
8455 /* Go back to the obstack we are using for temporary
8456 storage. */
8457 push_obstacks (&reload_obstack, &reload_obstack);
8458
8459 return apply_change_group ();
8460 #else
8461 return 0;
8462 #endif
8463 }
8464
8465 /* These two variables are used to pass information from
8466 reload_cse_record_set to reload_cse_check_clobber. */
8467
8468 static int reload_cse_check_clobbered;
8469 static rtx reload_cse_check_src;
8470
8471 /* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
8472 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
8473 second argument, which is passed by note_stores, is ignored. */
8474
8475 static void
8476 reload_cse_check_clobber (dest, ignore)
8477 rtx dest;
8478 rtx ignore;
8479 {
8480 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
8481 reload_cse_check_clobbered = 1;
8482 }
8483
8484 /* Record the result of a SET instruction. SET is the set pattern.
8485 BODY is the pattern of the insn that it came from. */
8486
8487 static void
8488 reload_cse_record_set (set, body)
8489 rtx set;
8490 rtx body;
8491 {
8492 rtx dest, src, x;
8493 int dreg, sreg;
8494 enum machine_mode dest_mode;
8495
8496 dest = SET_DEST (set);
8497 src = SET_SRC (set);
8498 dreg = true_regnum (dest);
8499 sreg = true_regnum (src);
8500 dest_mode = GET_MODE (dest);
8501
8502 /* Some machines don't define AUTO_INC_DEC, but they still use push
8503 instructions. We need to catch that case here in order to
8504 invalidate the stack pointer correctly. Note that invalidating
8505 the stack pointer is different from invalidating DEST. */
8506 x = dest;
8507 while (GET_CODE (x) == SUBREG
8508 || GET_CODE (x) == ZERO_EXTRACT
8509 || GET_CODE (x) == SIGN_EXTRACT
8510 || GET_CODE (x) == STRICT_LOW_PART)
8511 x = XEXP (x, 0);
8512 if (push_operand (x, GET_MODE (x)))
8513 {
8514 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX);
8515 reload_cse_invalidate_rtx (dest, NULL_RTX);
8516 return;
8517 }
8518
8519 /* We can only handle an assignment to a register, or a store of a
8520 register to a memory location. For other cases, we just clobber
8521 the destination. We also have to just clobber if there are side
8522 effects in SRC or DEST. */
8523 if ((dreg < 0 && GET_CODE (dest) != MEM)
8524 || side_effects_p (src)
8525 || side_effects_p (dest))
8526 {
8527 reload_cse_invalidate_rtx (dest, NULL_RTX);
8528 return;
8529 }
8530
8531 #ifdef HAVE_cc0
8532 /* We don't try to handle values involving CC, because it's a pain
8533 to keep track of when they have to be invalidated. */
8534 if (reg_mentioned_p (cc0_rtx, src)
8535 || reg_mentioned_p (cc0_rtx, dest))
8536 {
8537 reload_cse_invalidate_rtx (dest, NULL_RTX);
8538 return;
8539 }
8540 #endif
8541
8542 /* If BODY is a PARALLEL, then we need to see whether the source of
8543 SET is clobbered by some other instruction in the PARALLEL. */
8544 if (GET_CODE (body) == PARALLEL)
8545 {
8546 int i;
8547
8548 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8549 {
8550 rtx x;
8551
8552 x = XVECEXP (body, 0, i);
8553 if (x == set)
8554 continue;
8555
8556 reload_cse_check_clobbered = 0;
8557 reload_cse_check_src = src;
8558 note_stores (x, reload_cse_check_clobber);
8559 if (reload_cse_check_clobbered)
8560 {
8561 reload_cse_invalidate_rtx (dest, NULL_RTX);
8562 return;
8563 }
8564 }
8565 }
8566
8567 if (dreg >= 0)
8568 {
8569 int i;
8570
8571 /* This is an assignment to a register. Update the value we
8572 have stored for the register. */
8573 if (sreg >= 0)
8574 {
8575 rtx x;
8576
8577 /* This is a copy from one register to another. Any values
8578 which were valid for SREG are now valid for DREG. If the
8579 mode changes, we use gen_lowpart_common to extract only
8580 the part of the value that is copied. */
8581 reg_values[dreg] = 0;
8582 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8583 {
8584 rtx tmp;
8585
8586 if (XEXP (x, 0) == 0)
8587 continue;
8588 if (dest_mode == GET_MODE (XEXP (x, 0)))
8589 tmp = XEXP (x, 0);
8590 else if (GET_MODE_BITSIZE (dest_mode)
8591 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
8592 continue;
8593 else
8594 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8595 if (tmp)
8596 reg_values[dreg] = gen_rtx (EXPR_LIST, dest_mode, tmp,
8597 reg_values[dreg]);
8598 }
8599 }
8600 else
8601 reg_values[dreg] = gen_rtx (EXPR_LIST, dest_mode, src, NULL_RTX);
8602
8603 /* We've changed DREG, so invalidate any values held by other
8604 registers that depend upon it. */
8605 reload_cse_invalidate_regno (dreg, dest_mode, 0);
8606
8607 /* If this assignment changes more than one hard register,
8608 forget anything we know about the others. */
8609 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
8610 reg_values[dreg + i] = 0;
8611 }
8612 else if (GET_CODE (dest) == MEM)
8613 {
8614 /* Invalidate conflicting memory locations. */
8615 reload_cse_invalidate_mem (dest);
8616
8617 /* If we're storing a register to memory, add DEST to the list
8618 in REG_VALUES. */
8619 if (sreg >= 0 && ! side_effects_p (dest))
8620 reg_values[sreg] = gen_rtx (EXPR_LIST, dest_mode, dest,
8621 reg_values[sreg]);
8622 }
8623 else
8624 {
8625 /* We should have bailed out earlier. */
8626 abort ();
8627 }
8628 }