cgraph.c: Fix typos in comments.
[gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
28
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
31
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
38
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
45
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
54
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
57
58 NOTE SIDE EFFECTS:
59
60 find_reloads can alter the operands of the instruction it is called on.
61
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
65 better that way.
66
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
69
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
73
74 Using a reload register for several reloads in one insn:
75
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
79
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
82 register.
83
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
87
88 #define REG_OK_STRICT
89
90 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
91 #undef DEBUG_RELOAD
92
93 #include "config.h"
94 #include "system.h"
95 #include "coretypes.h"
96 #include "tm.h"
97 #include "rtl.h"
98 #include "tm_p.h"
99 #include "insn-config.h"
100 #include "expr.h"
101 #include "optabs.h"
102 #include "recog.h"
103 #include "reload.h"
104 #include "regs.h"
105 #include "addresses.h"
106 #include "hard-reg-set.h"
107 #include "flags.h"
108 #include "real.h"
109 #include "output.h"
110 #include "function.h"
111 #include "toplev.h"
112 #include "params.h"
113 #include "target.h"
114 #include "df.h"
115
116 /* True if X is a constant that can be forced into the constant pool. */
117 #define CONST_POOL_OK_P(X) \
118 (CONSTANT_P (X) \
119 && GET_CODE (X) != HIGH \
120 && !targetm.cannot_force_const_mem (X))
121
122 /* True if C is a non-empty register class that has too few registers
123 to be safely used as a reload target class. */
124 #define SMALL_REGISTER_CLASS_P(C) \
125 (reg_class_size [(C)] == 1 \
126 || (reg_class_size [(C)] >= 1 && CLASS_LIKELY_SPILLED_P (C)))
127
128 \f
129 /* All reloads of the current insn are recorded here. See reload.h for
130 comments. */
131 int n_reloads;
132 struct reload rld[MAX_RELOADS];
133
134 /* All the "earlyclobber" operands of the current insn
135 are recorded here. */
136 int n_earlyclobbers;
137 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
138
139 int reload_n_operands;
140
141 /* Replacing reloads.
142
143 If `replace_reloads' is nonzero, then as each reload is recorded
144 an entry is made for it in the table `replacements'.
145 Then later `subst_reloads' can look through that table and
146 perform all the replacements needed. */
147
148 /* Nonzero means record the places to replace. */
149 static int replace_reloads;
150
151 /* Each replacement is recorded with a structure like this. */
152 struct replacement
153 {
154 rtx *where; /* Location to store in */
155 rtx *subreg_loc; /* Location of SUBREG if WHERE is inside
156 a SUBREG; 0 otherwise. */
157 int what; /* which reload this is for */
158 enum machine_mode mode; /* mode it must have */
159 };
160
161 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
162
163 /* Number of replacements currently recorded. */
164 static int n_replacements;
165
166 /* Used to track what is modified by an operand. */
167 struct decomposition
168 {
169 int reg_flag; /* Nonzero if referencing a register. */
170 int safe; /* Nonzero if this can't conflict with anything. */
171 rtx base; /* Base address for MEM. */
172 HOST_WIDE_INT start; /* Starting offset or register number. */
173 HOST_WIDE_INT end; /* Ending offset or register number. */
174 };
175
176 #ifdef SECONDARY_MEMORY_NEEDED
177
178 /* Save MEMs needed to copy from one class of registers to another. One MEM
179 is used per mode, but normally only one or two modes are ever used.
180
181 We keep two versions, before and after register elimination. The one
182 after register elimination is record separately for each operand. This
183 is done in case the address is not valid to be sure that we separately
184 reload each. */
185
186 static rtx secondary_memlocs[NUM_MACHINE_MODES];
187 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
188 static int secondary_memlocs_elim_used = 0;
189 #endif
190
191 /* The instruction we are doing reloads for;
192 so we can test whether a register dies in it. */
193 static rtx this_insn;
194
195 /* Nonzero if this instruction is a user-specified asm with operands. */
196 static int this_insn_is_asm;
197
198 /* If hard_regs_live_known is nonzero,
199 we can tell which hard regs are currently live,
200 at least enough to succeed in choosing dummy reloads. */
201 static int hard_regs_live_known;
202
203 /* Indexed by hard reg number,
204 element is nonnegative if hard reg has been spilled.
205 This vector is passed to `find_reloads' as an argument
206 and is not changed here. */
207 static short *static_reload_reg_p;
208
209 /* Set to 1 in subst_reg_equivs if it changes anything. */
210 static int subst_reg_equivs_changed;
211
212 /* On return from push_reload, holds the reload-number for the OUT
213 operand, which can be different for that from the input operand. */
214 static int output_reloadnum;
215
216 /* Compare two RTX's. */
217 #define MATCHES(x, y) \
218 (x == y || (x != 0 && (REG_P (x) \
219 ? REG_P (y) && REGNO (x) == REGNO (y) \
220 : rtx_equal_p (x, y) && ! side_effects_p (x))))
221
222 /* Indicates if two reloads purposes are for similar enough things that we
223 can merge their reloads. */
224 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
225 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
226 || ((when1) == (when2) && (op1) == (op2)) \
227 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
228 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
229 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
230 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
231 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
232
233 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
234 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
235 ((when1) != (when2) \
236 || ! ((op1) == (op2) \
237 || (when1) == RELOAD_FOR_INPUT \
238 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
239 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
240
241 /* If we are going to reload an address, compute the reload type to
242 use. */
243 #define ADDR_TYPE(type) \
244 ((type) == RELOAD_FOR_INPUT_ADDRESS \
245 ? RELOAD_FOR_INPADDR_ADDRESS \
246 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
247 ? RELOAD_FOR_OUTADDR_ADDRESS \
248 : (type)))
249
250 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
251 enum machine_mode, enum reload_type,
252 enum insn_code *, secondary_reload_info *);
253 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
254 int, unsigned int);
255 static int reload_inner_reg_of_subreg (rtx, enum machine_mode, int);
256 static void push_replacement (rtx *, int, enum machine_mode);
257 static void dup_replacements (rtx *, rtx *);
258 static void combine_reloads (void);
259 static int find_reusable_reload (rtx *, rtx, enum reg_class,
260 enum reload_type, int, int);
261 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
262 enum machine_mode, enum reg_class, int, int);
263 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
264 static struct decomposition decompose (rtx);
265 static int immune_p (rtx, rtx, struct decomposition);
266 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
267 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
268 int *);
269 static rtx make_memloc (rtx, int);
270 static int maybe_memory_address_p (enum machine_mode, rtx, rtx *);
271 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
272 int, enum reload_type, int, rtx);
273 static rtx subst_reg_equivs (rtx, rtx);
274 static rtx subst_indexed_address (rtx);
275 static void update_auto_inc_notes (rtx, int, int);
276 static int find_reloads_address_1 (enum machine_mode, rtx, int,
277 enum rtx_code, enum rtx_code, rtx *,
278 int, enum reload_type,int, rtx);
279 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
280 enum machine_mode, int,
281 enum reload_type, int);
282 static rtx find_reloads_subreg_address (rtx, int, int, enum reload_type,
283 int, rtx);
284 static void copy_replacements_1 (rtx *, rtx *, int);
285 static int find_inc_amount (rtx, rtx);
286 static int refers_to_mem_for_reload_p (rtx);
287 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
288 rtx, rtx *);
289
290 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
291 list yet. */
292
293 static void
294 push_reg_equiv_alt_mem (int regno, rtx mem)
295 {
296 rtx it;
297
298 for (it = reg_equiv_alt_mem_list [regno]; it; it = XEXP (it, 1))
299 if (rtx_equal_p (XEXP (it, 0), mem))
300 return;
301
302 reg_equiv_alt_mem_list [regno]
303 = alloc_EXPR_LIST (REG_EQUIV, mem,
304 reg_equiv_alt_mem_list [regno]);
305 }
306 \f
307 /* Determine if any secondary reloads are needed for loading (if IN_P is
308 nonzero) or storing (if IN_P is zero) X to or from a reload register of
309 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
310 are needed, push them.
311
312 Return the reload number of the secondary reload we made, or -1 if
313 we didn't need one. *PICODE is set to the insn_code to use if we do
314 need a secondary reload. */
315
316 static int
317 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
318 enum reg_class reload_class,
319 enum machine_mode reload_mode, enum reload_type type,
320 enum insn_code *picode, secondary_reload_info *prev_sri)
321 {
322 enum reg_class class = NO_REGS;
323 enum reg_class scratch_class;
324 enum machine_mode mode = reload_mode;
325 enum insn_code icode = CODE_FOR_nothing;
326 enum insn_code t_icode = CODE_FOR_nothing;
327 enum reload_type secondary_type;
328 int s_reload, t_reload = -1;
329 const char *scratch_constraint;
330 char letter;
331 secondary_reload_info sri;
332
333 if (type == RELOAD_FOR_INPUT_ADDRESS
334 || type == RELOAD_FOR_OUTPUT_ADDRESS
335 || type == RELOAD_FOR_INPADDR_ADDRESS
336 || type == RELOAD_FOR_OUTADDR_ADDRESS)
337 secondary_type = type;
338 else
339 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
340
341 *picode = CODE_FOR_nothing;
342
343 /* If X is a paradoxical SUBREG, use the inner value to determine both the
344 mode and object being reloaded. */
345 if (GET_CODE (x) == SUBREG
346 && (GET_MODE_SIZE (GET_MODE (x))
347 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
348 {
349 x = SUBREG_REG (x);
350 reload_mode = GET_MODE (x);
351 }
352
353 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
354 is still a pseudo-register by now, it *must* have an equivalent MEM
355 but we don't want to assume that), use that equivalent when seeing if
356 a secondary reload is needed since whether or not a reload is needed
357 might be sensitive to the form of the MEM. */
358
359 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
360 && reg_equiv_mem[REGNO (x)] != 0)
361 x = reg_equiv_mem[REGNO (x)];
362
363 sri.icode = CODE_FOR_nothing;
364 sri.prev_sri = prev_sri;
365 class = targetm.secondary_reload (in_p, x, reload_class, reload_mode, &sri);
366 icode = sri.icode;
367
368 /* If we don't need any secondary registers, done. */
369 if (class == NO_REGS && icode == CODE_FOR_nothing)
370 return -1;
371
372 if (class != NO_REGS)
373 t_reload = push_secondary_reload (in_p, x, opnum, optional, class,
374 reload_mode, type, &t_icode, &sri);
375
376 /* If we will be using an insn, the secondary reload is for a
377 scratch register. */
378
379 if (icode != CODE_FOR_nothing)
380 {
381 /* If IN_P is nonzero, the reload register will be the output in
382 operand 0. If IN_P is zero, the reload register will be the input
383 in operand 1. Outputs should have an initial "=", which we must
384 skip. */
385
386 /* ??? It would be useful to be able to handle only two, or more than
387 three, operands, but for now we can only handle the case of having
388 exactly three: output, input and one temp/scratch. */
389 gcc_assert (insn_data[(int) icode].n_operands == 3);
390
391 /* ??? We currently have no way to represent a reload that needs
392 an icode to reload from an intermediate tertiary reload register.
393 We should probably have a new field in struct reload to tag a
394 chain of scratch operand reloads onto. */
395 gcc_assert (class == NO_REGS);
396
397 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
398 gcc_assert (*scratch_constraint == '=');
399 scratch_constraint++;
400 if (*scratch_constraint == '&')
401 scratch_constraint++;
402 letter = *scratch_constraint;
403 scratch_class = (letter == 'r' ? GENERAL_REGS
404 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
405 scratch_constraint));
406
407 class = scratch_class;
408 mode = insn_data[(int) icode].operand[2].mode;
409 }
410
411 /* This case isn't valid, so fail. Reload is allowed to use the same
412 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
413 in the case of a secondary register, we actually need two different
414 registers for correct code. We fail here to prevent the possibility of
415 silently generating incorrect code later.
416
417 The convention is that secondary input reloads are valid only if the
418 secondary_class is different from class. If you have such a case, you
419 can not use secondary reloads, you must work around the problem some
420 other way.
421
422 Allow this when a reload_in/out pattern is being used. I.e. assume
423 that the generated code handles this case. */
424
425 gcc_assert (!in_p || class != reload_class || icode != CODE_FOR_nothing
426 || t_icode != CODE_FOR_nothing);
427
428 /* See if we can reuse an existing secondary reload. */
429 for (s_reload = 0; s_reload < n_reloads; s_reload++)
430 if (rld[s_reload].secondary_p
431 && (reg_class_subset_p (class, rld[s_reload].class)
432 || reg_class_subset_p (rld[s_reload].class, class))
433 && ((in_p && rld[s_reload].inmode == mode)
434 || (! in_p && rld[s_reload].outmode == mode))
435 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
436 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
437 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
438 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
439 && (SMALL_REGISTER_CLASS_P (class) || SMALL_REGISTER_CLASSES)
440 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
441 opnum, rld[s_reload].opnum))
442 {
443 if (in_p)
444 rld[s_reload].inmode = mode;
445 if (! in_p)
446 rld[s_reload].outmode = mode;
447
448 if (reg_class_subset_p (class, rld[s_reload].class))
449 rld[s_reload].class = class;
450
451 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
452 rld[s_reload].optional &= optional;
453 rld[s_reload].secondary_p = 1;
454 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
455 opnum, rld[s_reload].opnum))
456 rld[s_reload].when_needed = RELOAD_OTHER;
457
458 break;
459 }
460
461 if (s_reload == n_reloads)
462 {
463 #ifdef SECONDARY_MEMORY_NEEDED
464 /* If we need a memory location to copy between the two reload regs,
465 set it up now. Note that we do the input case before making
466 the reload and the output case after. This is due to the
467 way reloads are output. */
468
469 if (in_p && icode == CODE_FOR_nothing
470 && SECONDARY_MEMORY_NEEDED (class, reload_class, mode))
471 {
472 get_secondary_mem (x, reload_mode, opnum, type);
473
474 /* We may have just added new reloads. Make sure we add
475 the new reload at the end. */
476 s_reload = n_reloads;
477 }
478 #endif
479
480 /* We need to make a new secondary reload for this register class. */
481 rld[s_reload].in = rld[s_reload].out = 0;
482 rld[s_reload].class = class;
483
484 rld[s_reload].inmode = in_p ? mode : VOIDmode;
485 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
486 rld[s_reload].reg_rtx = 0;
487 rld[s_reload].optional = optional;
488 rld[s_reload].inc = 0;
489 /* Maybe we could combine these, but it seems too tricky. */
490 rld[s_reload].nocombine = 1;
491 rld[s_reload].in_reg = 0;
492 rld[s_reload].out_reg = 0;
493 rld[s_reload].opnum = opnum;
494 rld[s_reload].when_needed = secondary_type;
495 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
496 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
497 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
498 rld[s_reload].secondary_out_icode
499 = ! in_p ? t_icode : CODE_FOR_nothing;
500 rld[s_reload].secondary_p = 1;
501
502 n_reloads++;
503
504 #ifdef SECONDARY_MEMORY_NEEDED
505 if (! in_p && icode == CODE_FOR_nothing
506 && SECONDARY_MEMORY_NEEDED (reload_class, class, mode))
507 get_secondary_mem (x, mode, opnum, type);
508 #endif
509 }
510
511 *picode = icode;
512 return s_reload;
513 }
514
515 /* If a secondary reload is needed, return its class. If both an intermediate
516 register and a scratch register is needed, we return the class of the
517 intermediate register. */
518 enum reg_class
519 secondary_reload_class (bool in_p, enum reg_class class,
520 enum machine_mode mode, rtx x)
521 {
522 enum insn_code icode;
523 secondary_reload_info sri;
524
525 sri.icode = CODE_FOR_nothing;
526 sri.prev_sri = NULL;
527 class = targetm.secondary_reload (in_p, x, class, mode, &sri);
528 icode = sri.icode;
529
530 /* If there are no secondary reloads at all, we return NO_REGS.
531 If an intermediate register is needed, we return its class. */
532 if (icode == CODE_FOR_nothing || class != NO_REGS)
533 return class;
534
535 /* No intermediate register is needed, but we have a special reload
536 pattern, which we assume for now needs a scratch register. */
537 return scratch_reload_class (icode);
538 }
539
540 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
541 three operands, verify that operand 2 is an output operand, and return
542 its register class.
543 ??? We'd like to be able to handle any pattern with at least 2 operands,
544 for zero or more scratch registers, but that needs more infrastructure. */
545 enum reg_class
546 scratch_reload_class (enum insn_code icode)
547 {
548 const char *scratch_constraint;
549 char scratch_letter;
550 enum reg_class class;
551
552 gcc_assert (insn_data[(int) icode].n_operands == 3);
553 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
554 gcc_assert (*scratch_constraint == '=');
555 scratch_constraint++;
556 if (*scratch_constraint == '&')
557 scratch_constraint++;
558 scratch_letter = *scratch_constraint;
559 if (scratch_letter == 'r')
560 return GENERAL_REGS;
561 class = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
562 scratch_constraint);
563 gcc_assert (class != NO_REGS);
564 return class;
565 }
566 \f
567 #ifdef SECONDARY_MEMORY_NEEDED
568
569 /* Return a memory location that will be used to copy X in mode MODE.
570 If we haven't already made a location for this mode in this insn,
571 call find_reloads_address on the location being returned. */
572
573 rtx
574 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
575 int opnum, enum reload_type type)
576 {
577 rtx loc;
578 int mem_valid;
579
580 /* By default, if MODE is narrower than a word, widen it to a word.
581 This is required because most machines that require these memory
582 locations do not support short load and stores from all registers
583 (e.g., FP registers). */
584
585 #ifdef SECONDARY_MEMORY_NEEDED_MODE
586 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
587 #else
588 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
589 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
590 #endif
591
592 /* If we already have made a MEM for this operand in MODE, return it. */
593 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
594 return secondary_memlocs_elim[(int) mode][opnum];
595
596 /* If this is the first time we've tried to get a MEM for this mode,
597 allocate a new one. `something_changed' in reload will get set
598 by noticing that the frame size has changed. */
599
600 if (secondary_memlocs[(int) mode] == 0)
601 {
602 #ifdef SECONDARY_MEMORY_NEEDED_RTX
603 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
604 #else
605 secondary_memlocs[(int) mode]
606 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
607 #endif
608 }
609
610 /* Get a version of the address doing any eliminations needed. If that
611 didn't give us a new MEM, make a new one if it isn't valid. */
612
613 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
614 mem_valid = strict_memory_address_p (mode, XEXP (loc, 0));
615
616 if (! mem_valid && loc == secondary_memlocs[(int) mode])
617 loc = copy_rtx (loc);
618
619 /* The only time the call below will do anything is if the stack
620 offset is too large. In that case IND_LEVELS doesn't matter, so we
621 can just pass a zero. Adjust the type to be the address of the
622 corresponding object. If the address was valid, save the eliminated
623 address. If it wasn't valid, we need to make a reload each time, so
624 don't save it. */
625
626 if (! mem_valid)
627 {
628 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
629 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
630 : RELOAD_OTHER);
631
632 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
633 opnum, type, 0, 0);
634 }
635
636 secondary_memlocs_elim[(int) mode][opnum] = loc;
637 if (secondary_memlocs_elim_used <= (int)mode)
638 secondary_memlocs_elim_used = (int)mode + 1;
639 return loc;
640 }
641
642 /* Clear any secondary memory locations we've made. */
643
644 void
645 clear_secondary_mem (void)
646 {
647 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
648 }
649 #endif /* SECONDARY_MEMORY_NEEDED */
650 \f
651
652 /* Find the largest class which has at least one register valid in
653 mode INNER, and which for every such register, that register number
654 plus N is also valid in OUTER (if in range) and is cheap to move
655 into REGNO. Such a class must exist. */
656
657 static enum reg_class
658 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
659 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
660 unsigned int dest_regno ATTRIBUTE_UNUSED)
661 {
662 int best_cost = -1;
663 int class;
664 int regno;
665 enum reg_class best_class = NO_REGS;
666 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
667 unsigned int best_size = 0;
668 int cost;
669
670 for (class = 1; class < N_REG_CLASSES; class++)
671 {
672 int bad = 0;
673 int good = 0;
674 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
675 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno))
676 {
677 if (HARD_REGNO_MODE_OK (regno, inner))
678 {
679 good = 1;
680 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno + n)
681 || ! HARD_REGNO_MODE_OK (regno + n, outer))
682 bad = 1;
683 }
684 }
685
686 if (bad || !good)
687 continue;
688 cost = REGISTER_MOVE_COST (outer, class, dest_class);
689
690 if ((reg_class_size[class] > best_size
691 && (best_cost < 0 || best_cost >= cost))
692 || best_cost > cost)
693 {
694 best_class = class;
695 best_size = reg_class_size[class];
696 best_cost = REGISTER_MOVE_COST (outer, class, dest_class);
697 }
698 }
699
700 gcc_assert (best_size != 0);
701
702 return best_class;
703 }
704 \f
705 /* Return the number of a previously made reload that can be combined with
706 a new one, or n_reloads if none of the existing reloads can be used.
707 OUT, CLASS, TYPE and OPNUM are the same arguments as passed to
708 push_reload, they determine the kind of the new reload that we try to
709 combine. P_IN points to the corresponding value of IN, which can be
710 modified by this function.
711 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
712
713 static int
714 find_reusable_reload (rtx *p_in, rtx out, enum reg_class class,
715 enum reload_type type, int opnum, int dont_share)
716 {
717 rtx in = *p_in;
718 int i;
719 /* We can't merge two reloads if the output of either one is
720 earlyclobbered. */
721
722 if (earlyclobber_operand_p (out))
723 return n_reloads;
724
725 /* We can use an existing reload if the class is right
726 and at least one of IN and OUT is a match
727 and the other is at worst neutral.
728 (A zero compared against anything is neutral.)
729
730 If SMALL_REGISTER_CLASSES, don't use existing reloads unless they are
731 for the same thing since that can cause us to need more reload registers
732 than we otherwise would. */
733
734 for (i = 0; i < n_reloads; i++)
735 if ((reg_class_subset_p (class, rld[i].class)
736 || reg_class_subset_p (rld[i].class, class))
737 /* If the existing reload has a register, it must fit our class. */
738 && (rld[i].reg_rtx == 0
739 || TEST_HARD_REG_BIT (reg_class_contents[(int) class],
740 true_regnum (rld[i].reg_rtx)))
741 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
742 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
743 || (out != 0 && MATCHES (rld[i].out, out)
744 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
745 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
746 && (SMALL_REGISTER_CLASS_P (class) || SMALL_REGISTER_CLASSES)
747 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
748 return i;
749
750 /* Reloading a plain reg for input can match a reload to postincrement
751 that reg, since the postincrement's value is the right value.
752 Likewise, it can match a preincrement reload, since we regard
753 the preincrementation as happening before any ref in this insn
754 to that register. */
755 for (i = 0; i < n_reloads; i++)
756 if ((reg_class_subset_p (class, rld[i].class)
757 || reg_class_subset_p (rld[i].class, class))
758 /* If the existing reload has a register, it must fit our
759 class. */
760 && (rld[i].reg_rtx == 0
761 || TEST_HARD_REG_BIT (reg_class_contents[(int) class],
762 true_regnum (rld[i].reg_rtx)))
763 && out == 0 && rld[i].out == 0 && rld[i].in != 0
764 && ((REG_P (in)
765 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
766 && MATCHES (XEXP (rld[i].in, 0), in))
767 || (REG_P (rld[i].in)
768 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
769 && MATCHES (XEXP (in, 0), rld[i].in)))
770 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
771 && (SMALL_REGISTER_CLASS_P (class) || SMALL_REGISTER_CLASSES)
772 && MERGABLE_RELOADS (type, rld[i].when_needed,
773 opnum, rld[i].opnum))
774 {
775 /* Make sure reload_in ultimately has the increment,
776 not the plain register. */
777 if (REG_P (in))
778 *p_in = rld[i].in;
779 return i;
780 }
781 return n_reloads;
782 }
783
784 /* Return nonzero if X is a SUBREG which will require reloading of its
785 SUBREG_REG expression. */
786
787 static int
788 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, int output)
789 {
790 rtx inner;
791
792 /* Only SUBREGs are problematical. */
793 if (GET_CODE (x) != SUBREG)
794 return 0;
795
796 inner = SUBREG_REG (x);
797
798 /* If INNER is a constant or PLUS, then INNER must be reloaded. */
799 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
800 return 1;
801
802 /* If INNER is not a hard register, then INNER will not need to
803 be reloaded. */
804 if (!REG_P (inner)
805 || REGNO (inner) >= FIRST_PSEUDO_REGISTER)
806 return 0;
807
808 /* If INNER is not ok for MODE, then INNER will need reloading. */
809 if (! HARD_REGNO_MODE_OK (subreg_regno (x), mode))
810 return 1;
811
812 /* If the outer part is a word or smaller, INNER larger than a
813 word and the number of regs for INNER is not the same as the
814 number of words in INNER, then INNER will need reloading. */
815 return (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
816 && output
817 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
818 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
819 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
820 }
821
822 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
823 requiring an extra reload register. The caller has already found that
824 IN contains some reference to REGNO, so check that we can produce the
825 new value in a single step. E.g. if we have
826 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
827 instruction that adds one to a register, this should succeed.
828 However, if we have something like
829 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
830 needs to be loaded into a register first, we need a separate reload
831 register.
832 Such PLUS reloads are generated by find_reload_address_part.
833 The out-of-range PLUS expressions are usually introduced in the instruction
834 patterns by register elimination and substituting pseudos without a home
835 by their function-invariant equivalences. */
836 static int
837 can_reload_into (rtx in, int regno, enum machine_mode mode)
838 {
839 rtx dst, test_insn;
840 int r = 0;
841 struct recog_data save_recog_data;
842
843 /* For matching constraints, we often get notional input reloads where
844 we want to use the original register as the reload register. I.e.
845 technically this is a non-optional input-output reload, but IN is
846 already a valid register, and has been chosen as the reload register.
847 Speed this up, since it trivially works. */
848 if (REG_P (in))
849 return 1;
850
851 /* To test MEMs properly, we'd have to take into account all the reloads
852 that are already scheduled, which can become quite complicated.
853 And since we've already handled address reloads for this MEM, it
854 should always succeed anyway. */
855 if (MEM_P (in))
856 return 1;
857
858 /* If we can make a simple SET insn that does the job, everything should
859 be fine. */
860 dst = gen_rtx_REG (mode, regno);
861 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
862 save_recog_data = recog_data;
863 if (recog_memoized (test_insn) >= 0)
864 {
865 extract_insn (test_insn);
866 r = constrain_operands (1);
867 }
868 recog_data = save_recog_data;
869 return r;
870 }
871
872 /* Record one reload that needs to be performed.
873 IN is an rtx saying where the data are to be found before this instruction.
874 OUT says where they must be stored after the instruction.
875 (IN is zero for data not read, and OUT is zero for data not written.)
876 INLOC and OUTLOC point to the places in the instructions where
877 IN and OUT were found.
878 If IN and OUT are both nonzero, it means the same register must be used
879 to reload both IN and OUT.
880
881 CLASS is a register class required for the reloaded data.
882 INMODE is the machine mode that the instruction requires
883 for the reg that replaces IN and OUTMODE is likewise for OUT.
884
885 If IN is zero, then OUT's location and mode should be passed as
886 INLOC and INMODE.
887
888 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
889
890 OPTIONAL nonzero means this reload does not need to be performed:
891 it can be discarded if that is more convenient.
892
893 OPNUM and TYPE say what the purpose of this reload is.
894
895 The return value is the reload-number for this reload.
896
897 If both IN and OUT are nonzero, in some rare cases we might
898 want to make two separate reloads. (Actually we never do this now.)
899 Therefore, the reload-number for OUT is stored in
900 output_reloadnum when we return; the return value applies to IN.
901 Usually (presently always), when IN and OUT are nonzero,
902 the two reload-numbers are equal, but the caller should be careful to
903 distinguish them. */
904
905 int
906 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
907 enum reg_class class, enum machine_mode inmode,
908 enum machine_mode outmode, int strict_low, int optional,
909 int opnum, enum reload_type type)
910 {
911 int i;
912 int dont_share = 0;
913 int dont_remove_subreg = 0;
914 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
915 int secondary_in_reload = -1, secondary_out_reload = -1;
916 enum insn_code secondary_in_icode = CODE_FOR_nothing;
917 enum insn_code secondary_out_icode = CODE_FOR_nothing;
918
919 /* INMODE and/or OUTMODE could be VOIDmode if no mode
920 has been specified for the operand. In that case,
921 use the operand's mode as the mode to reload. */
922 if (inmode == VOIDmode && in != 0)
923 inmode = GET_MODE (in);
924 if (outmode == VOIDmode && out != 0)
925 outmode = GET_MODE (out);
926
927 /* If find_reloads and friends until now missed to replace a pseudo
928 with a constant of reg_equiv_constant something went wrong
929 beforehand.
930 Note that it can't simply be done here if we missed it earlier
931 since the constant might need to be pushed into the literal pool
932 and the resulting memref would probably need further
933 reloading. */
934 if (in != 0 && REG_P (in))
935 {
936 int regno = REGNO (in);
937
938 gcc_assert (regno < FIRST_PSEUDO_REGISTER
939 || reg_renumber[regno] >= 0
940 || reg_equiv_constant[regno] == NULL_RTX);
941 }
942
943 /* reg_equiv_constant only contains constants which are obviously
944 not appropriate as destination. So if we would need to replace
945 the destination pseudo with a constant we are in real
946 trouble. */
947 if (out != 0 && REG_P (out))
948 {
949 int regno = REGNO (out);
950
951 gcc_assert (regno < FIRST_PSEUDO_REGISTER
952 || reg_renumber[regno] >= 0
953 || reg_equiv_constant[regno] == NULL_RTX);
954 }
955
956 /* If we have a read-write operand with an address side-effect,
957 change either IN or OUT so the side-effect happens only once. */
958 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
959 switch (GET_CODE (XEXP (in, 0)))
960 {
961 case POST_INC: case POST_DEC: case POST_MODIFY:
962 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
963 break;
964
965 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
966 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
967 break;
968
969 default:
970 break;
971 }
972
973 /* If we are reloading a (SUBREG constant ...), really reload just the
974 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
975 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
976 a pseudo and hence will become a MEM) with M1 wider than M2 and the
977 register is a pseudo, also reload the inside expression.
978 For machines that extend byte loads, do this for any SUBREG of a pseudo
979 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
980 M2 is an integral mode that gets extended when loaded.
981 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
982 either M1 is not valid for R or M2 is wider than a word but we only
983 need one word to store an M2-sized quantity in R.
984 (However, if OUT is nonzero, we need to reload the reg *and*
985 the subreg, so do nothing here, and let following statement handle it.)
986
987 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
988 we can't handle it here because CONST_INT does not indicate a mode.
989
990 Similarly, we must reload the inside expression if we have a
991 STRICT_LOW_PART (presumably, in == out in this case).
992
993 Also reload the inner expression if it does not require a secondary
994 reload but the SUBREG does.
995
996 Finally, reload the inner expression if it is a register that is in
997 the class whose registers cannot be referenced in a different size
998 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
999 cannot reload just the inside since we might end up with the wrong
1000 register class. But if it is inside a STRICT_LOW_PART, we have
1001 no choice, so we hope we do get the right register class there. */
1002
1003 if (in != 0 && GET_CODE (in) == SUBREG
1004 && (subreg_lowpart_p (in) || strict_low)
1005 #ifdef CANNOT_CHANGE_MODE_CLASS
1006 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, class)
1007 #endif
1008 && (CONSTANT_P (SUBREG_REG (in))
1009 || GET_CODE (SUBREG_REG (in)) == PLUS
1010 || strict_low
1011 || (((REG_P (SUBREG_REG (in))
1012 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1013 || MEM_P (SUBREG_REG (in)))
1014 && ((GET_MODE_SIZE (inmode)
1015 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1016 #ifdef LOAD_EXTEND_OP
1017 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1018 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1019 <= UNITS_PER_WORD)
1020 && (GET_MODE_SIZE (inmode)
1021 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1022 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1023 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1024 #endif
1025 #ifdef WORD_REGISTER_OPERATIONS
1026 || ((GET_MODE_SIZE (inmode)
1027 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1028 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1029 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1030 / UNITS_PER_WORD)))
1031 #endif
1032 ))
1033 || (REG_P (SUBREG_REG (in))
1034 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1035 /* The case where out is nonzero
1036 is handled differently in the following statement. */
1037 && (out == 0 || subreg_lowpart_p (in))
1038 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1039 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1040 > UNITS_PER_WORD)
1041 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1042 / UNITS_PER_WORD)
1043 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1044 [GET_MODE (SUBREG_REG (in))]))
1045 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1046 || (secondary_reload_class (1, class, inmode, in) != NO_REGS
1047 && (secondary_reload_class (1, class, GET_MODE (SUBREG_REG (in)),
1048 SUBREG_REG (in))
1049 == NO_REGS))
1050 #ifdef CANNOT_CHANGE_MODE_CLASS
1051 || (REG_P (SUBREG_REG (in))
1052 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1053 && REG_CANNOT_CHANGE_MODE_P
1054 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1055 #endif
1056 ))
1057 {
1058 in_subreg_loc = inloc;
1059 inloc = &SUBREG_REG (in);
1060 in = *inloc;
1061 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1062 if (MEM_P (in))
1063 /* This is supposed to happen only for paradoxical subregs made by
1064 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1065 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1066 #endif
1067 inmode = GET_MODE (in);
1068 }
1069
1070 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1071 either M1 is not valid for R or M2 is wider than a word but we only
1072 need one word to store an M2-sized quantity in R.
1073
1074 However, we must reload the inner reg *as well as* the subreg in
1075 that case. */
1076
1077 /* Similar issue for (SUBREG constant ...) if it was not handled by the
1078 code above. This can happen if SUBREG_BYTE != 0. */
1079
1080 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, 0))
1081 {
1082 enum reg_class in_class = class;
1083
1084 if (REG_P (SUBREG_REG (in)))
1085 in_class
1086 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1087 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1088 GET_MODE (SUBREG_REG (in)),
1089 SUBREG_BYTE (in),
1090 GET_MODE (in)),
1091 REGNO (SUBREG_REG (in)));
1092
1093 /* This relies on the fact that emit_reload_insns outputs the
1094 instructions for input reloads of type RELOAD_OTHER in the same
1095 order as the reloads. Thus if the outer reload is also of type
1096 RELOAD_OTHER, we are guaranteed that this inner reload will be
1097 output before the outer reload. */
1098 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1099 in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1100 dont_remove_subreg = 1;
1101 }
1102
1103 /* Similarly for paradoxical and problematical SUBREGs on the output.
1104 Note that there is no reason we need worry about the previous value
1105 of SUBREG_REG (out); even if wider than out,
1106 storing in a subreg is entitled to clobber it all
1107 (except in the case of STRICT_LOW_PART,
1108 and in that case the constraint should label it input-output.) */
1109 if (out != 0 && GET_CODE (out) == SUBREG
1110 && (subreg_lowpart_p (out) || strict_low)
1111 #ifdef CANNOT_CHANGE_MODE_CLASS
1112 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, class)
1113 #endif
1114 && (CONSTANT_P (SUBREG_REG (out))
1115 || strict_low
1116 || (((REG_P (SUBREG_REG (out))
1117 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1118 || MEM_P (SUBREG_REG (out)))
1119 && ((GET_MODE_SIZE (outmode)
1120 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1121 #ifdef WORD_REGISTER_OPERATIONS
1122 || ((GET_MODE_SIZE (outmode)
1123 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1124 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1125 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1126 / UNITS_PER_WORD)))
1127 #endif
1128 ))
1129 || (REG_P (SUBREG_REG (out))
1130 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1131 && ((GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1132 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1133 > UNITS_PER_WORD)
1134 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1135 / UNITS_PER_WORD)
1136 != (int) hard_regno_nregs[REGNO (SUBREG_REG (out))]
1137 [GET_MODE (SUBREG_REG (out))]))
1138 || ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode)))
1139 || (secondary_reload_class (0, class, outmode, out) != NO_REGS
1140 && (secondary_reload_class (0, class, GET_MODE (SUBREG_REG (out)),
1141 SUBREG_REG (out))
1142 == NO_REGS))
1143 #ifdef CANNOT_CHANGE_MODE_CLASS
1144 || (REG_P (SUBREG_REG (out))
1145 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1146 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1147 GET_MODE (SUBREG_REG (out)),
1148 outmode))
1149 #endif
1150 ))
1151 {
1152 out_subreg_loc = outloc;
1153 outloc = &SUBREG_REG (out);
1154 out = *outloc;
1155 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1156 gcc_assert (!MEM_P (out)
1157 || GET_MODE_SIZE (GET_MODE (out))
1158 <= GET_MODE_SIZE (outmode));
1159 #endif
1160 outmode = GET_MODE (out);
1161 }
1162
1163 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1164 either M1 is not valid for R or M2 is wider than a word but we only
1165 need one word to store an M2-sized quantity in R.
1166
1167 However, we must reload the inner reg *as well as* the subreg in
1168 that case. In this case, the inner reg is an in-out reload. */
1169
1170 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, 1))
1171 {
1172 /* This relies on the fact that emit_reload_insns outputs the
1173 instructions for output reloads of type RELOAD_OTHER in reverse
1174 order of the reloads. Thus if the outer reload is also of type
1175 RELOAD_OTHER, we are guaranteed that this inner reload will be
1176 output after the outer reload. */
1177 dont_remove_subreg = 1;
1178 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1179 &SUBREG_REG (out),
1180 find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1181 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1182 GET_MODE (SUBREG_REG (out)),
1183 SUBREG_BYTE (out),
1184 GET_MODE (out)),
1185 REGNO (SUBREG_REG (out))),
1186 VOIDmode, VOIDmode, 0, 0,
1187 opnum, RELOAD_OTHER);
1188 }
1189
1190 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1191 if (in != 0 && out != 0 && MEM_P (out)
1192 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1193 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1194 dont_share = 1;
1195
1196 /* If IN is a SUBREG of a hard register, make a new REG. This
1197 simplifies some of the cases below. */
1198
1199 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1200 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1201 && ! dont_remove_subreg)
1202 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1203
1204 /* Similarly for OUT. */
1205 if (out != 0 && GET_CODE (out) == SUBREG
1206 && REG_P (SUBREG_REG (out))
1207 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1208 && ! dont_remove_subreg)
1209 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1210
1211 /* Narrow down the class of register wanted if that is
1212 desirable on this machine for efficiency. */
1213 {
1214 enum reg_class preferred_class = class;
1215
1216 if (in != 0)
1217 preferred_class = PREFERRED_RELOAD_CLASS (in, class);
1218
1219 /* Output reloads may need analogous treatment, different in detail. */
1220 #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
1221 if (out != 0)
1222 preferred_class = PREFERRED_OUTPUT_RELOAD_CLASS (out, preferred_class);
1223 #endif
1224
1225 /* Discard what the target said if we cannot do it. */
1226 if (preferred_class != NO_REGS
1227 || (optional && type == RELOAD_FOR_OUTPUT))
1228 class = preferred_class;
1229 }
1230
1231 /* Make sure we use a class that can handle the actual pseudo
1232 inside any subreg. For example, on the 386, QImode regs
1233 can appear within SImode subregs. Although GENERAL_REGS
1234 can handle SImode, QImode needs a smaller class. */
1235 #ifdef LIMIT_RELOAD_CLASS
1236 if (in_subreg_loc)
1237 class = LIMIT_RELOAD_CLASS (inmode, class);
1238 else if (in != 0 && GET_CODE (in) == SUBREG)
1239 class = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), class);
1240
1241 if (out_subreg_loc)
1242 class = LIMIT_RELOAD_CLASS (outmode, class);
1243 if (out != 0 && GET_CODE (out) == SUBREG)
1244 class = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), class);
1245 #endif
1246
1247 /* Verify that this class is at least possible for the mode that
1248 is specified. */
1249 if (this_insn_is_asm)
1250 {
1251 enum machine_mode mode;
1252 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1253 mode = inmode;
1254 else
1255 mode = outmode;
1256 if (mode == VOIDmode)
1257 {
1258 error_for_asm (this_insn, "cannot reload integer constant "
1259 "operand in %<asm%>");
1260 mode = word_mode;
1261 if (in != 0)
1262 inmode = word_mode;
1263 if (out != 0)
1264 outmode = word_mode;
1265 }
1266 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1267 if (HARD_REGNO_MODE_OK (i, mode)
1268 && in_hard_reg_set_p (reg_class_contents[(int) class], mode, i))
1269 break;
1270 if (i == FIRST_PSEUDO_REGISTER)
1271 {
1272 error_for_asm (this_insn, "impossible register constraint "
1273 "in %<asm%>");
1274 /* Avoid further trouble with this insn. */
1275 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1276 /* We used to continue here setting class to ALL_REGS, but it triggers
1277 sanity check on i386 for:
1278 void foo(long double d)
1279 {
1280 asm("" :: "a" (d));
1281 }
1282 Returning zero here ought to be safe as we take care in
1283 find_reloads to not process the reloads when instruction was
1284 replaced by USE. */
1285
1286 return 0;
1287 }
1288 }
1289
1290 /* Optional output reloads are always OK even if we have no register class,
1291 since the function of these reloads is only to have spill_reg_store etc.
1292 set, so that the storing insn can be deleted later. */
1293 gcc_assert (class != NO_REGS
1294 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1295
1296 i = find_reusable_reload (&in, out, class, type, opnum, dont_share);
1297
1298 if (i == n_reloads)
1299 {
1300 /* See if we need a secondary reload register to move between CLASS
1301 and IN or CLASS and OUT. Get the icode and push any required reloads
1302 needed for each of them if so. */
1303
1304 if (in != 0)
1305 secondary_in_reload
1306 = push_secondary_reload (1, in, opnum, optional, class, inmode, type,
1307 &secondary_in_icode, NULL);
1308 if (out != 0 && GET_CODE (out) != SCRATCH)
1309 secondary_out_reload
1310 = push_secondary_reload (0, out, opnum, optional, class, outmode,
1311 type, &secondary_out_icode, NULL);
1312
1313 /* We found no existing reload suitable for re-use.
1314 So add an additional reload. */
1315
1316 #ifdef SECONDARY_MEMORY_NEEDED
1317 /* If a memory location is needed for the copy, make one. */
1318 if (in != 0
1319 && (REG_P (in)
1320 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1321 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
1322 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
1323 class, inmode))
1324 get_secondary_mem (in, inmode, opnum, type);
1325 #endif
1326
1327 i = n_reloads;
1328 rld[i].in = in;
1329 rld[i].out = out;
1330 rld[i].class = class;
1331 rld[i].inmode = inmode;
1332 rld[i].outmode = outmode;
1333 rld[i].reg_rtx = 0;
1334 rld[i].optional = optional;
1335 rld[i].inc = 0;
1336 rld[i].nocombine = 0;
1337 rld[i].in_reg = inloc ? *inloc : 0;
1338 rld[i].out_reg = outloc ? *outloc : 0;
1339 rld[i].opnum = opnum;
1340 rld[i].when_needed = type;
1341 rld[i].secondary_in_reload = secondary_in_reload;
1342 rld[i].secondary_out_reload = secondary_out_reload;
1343 rld[i].secondary_in_icode = secondary_in_icode;
1344 rld[i].secondary_out_icode = secondary_out_icode;
1345 rld[i].secondary_p = 0;
1346
1347 n_reloads++;
1348
1349 #ifdef SECONDARY_MEMORY_NEEDED
1350 if (out != 0
1351 && (REG_P (out)
1352 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1353 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1354 && SECONDARY_MEMORY_NEEDED (class,
1355 REGNO_REG_CLASS (reg_or_subregno (out)),
1356 outmode))
1357 get_secondary_mem (out, outmode, opnum, type);
1358 #endif
1359 }
1360 else
1361 {
1362 /* We are reusing an existing reload,
1363 but we may have additional information for it.
1364 For example, we may now have both IN and OUT
1365 while the old one may have just one of them. */
1366
1367 /* The modes can be different. If they are, we want to reload in
1368 the larger mode, so that the value is valid for both modes. */
1369 if (inmode != VOIDmode
1370 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1371 rld[i].inmode = inmode;
1372 if (outmode != VOIDmode
1373 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1374 rld[i].outmode = outmode;
1375 if (in != 0)
1376 {
1377 rtx in_reg = inloc ? *inloc : 0;
1378 /* If we merge reloads for two distinct rtl expressions that
1379 are identical in content, there might be duplicate address
1380 reloads. Remove the extra set now, so that if we later find
1381 that we can inherit this reload, we can get rid of the
1382 address reloads altogether.
1383
1384 Do not do this if both reloads are optional since the result
1385 would be an optional reload which could potentially leave
1386 unresolved address replacements.
1387
1388 It is not sufficient to call transfer_replacements since
1389 choose_reload_regs will remove the replacements for address
1390 reloads of inherited reloads which results in the same
1391 problem. */
1392 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1393 && ! (rld[i].optional && optional))
1394 {
1395 /* We must keep the address reload with the lower operand
1396 number alive. */
1397 if (opnum > rld[i].opnum)
1398 {
1399 remove_address_replacements (in);
1400 in = rld[i].in;
1401 in_reg = rld[i].in_reg;
1402 }
1403 else
1404 remove_address_replacements (rld[i].in);
1405 }
1406 rld[i].in = in;
1407 rld[i].in_reg = in_reg;
1408 }
1409 if (out != 0)
1410 {
1411 rld[i].out = out;
1412 rld[i].out_reg = outloc ? *outloc : 0;
1413 }
1414 if (reg_class_subset_p (class, rld[i].class))
1415 rld[i].class = class;
1416 rld[i].optional &= optional;
1417 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1418 opnum, rld[i].opnum))
1419 rld[i].when_needed = RELOAD_OTHER;
1420 rld[i].opnum = MIN (rld[i].opnum, opnum);
1421 }
1422
1423 /* If the ostensible rtx being reloaded differs from the rtx found
1424 in the location to substitute, this reload is not safe to combine
1425 because we cannot reliably tell whether it appears in the insn. */
1426
1427 if (in != 0 && in != *inloc)
1428 rld[i].nocombine = 1;
1429
1430 #if 0
1431 /* This was replaced by changes in find_reloads_address_1 and the new
1432 function inc_for_reload, which go with a new meaning of reload_inc. */
1433
1434 /* If this is an IN/OUT reload in an insn that sets the CC,
1435 it must be for an autoincrement. It doesn't work to store
1436 the incremented value after the insn because that would clobber the CC.
1437 So we must do the increment of the value reloaded from,
1438 increment it, store it back, then decrement again. */
1439 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1440 {
1441 out = 0;
1442 rld[i].out = 0;
1443 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1444 /* If we did not find a nonzero amount-to-increment-by,
1445 that contradicts the belief that IN is being incremented
1446 in an address in this insn. */
1447 gcc_assert (rld[i].inc != 0);
1448 }
1449 #endif
1450
1451 /* If we will replace IN and OUT with the reload-reg,
1452 record where they are located so that substitution need
1453 not do a tree walk. */
1454
1455 if (replace_reloads)
1456 {
1457 if (inloc != 0)
1458 {
1459 struct replacement *r = &replacements[n_replacements++];
1460 r->what = i;
1461 r->subreg_loc = in_subreg_loc;
1462 r->where = inloc;
1463 r->mode = inmode;
1464 }
1465 if (outloc != 0 && outloc != inloc)
1466 {
1467 struct replacement *r = &replacements[n_replacements++];
1468 r->what = i;
1469 r->where = outloc;
1470 r->subreg_loc = out_subreg_loc;
1471 r->mode = outmode;
1472 }
1473 }
1474
1475 /* If this reload is just being introduced and it has both
1476 an incoming quantity and an outgoing quantity that are
1477 supposed to be made to match, see if either one of the two
1478 can serve as the place to reload into.
1479
1480 If one of them is acceptable, set rld[i].reg_rtx
1481 to that one. */
1482
1483 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1484 {
1485 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1486 inmode, outmode,
1487 rld[i].class, i,
1488 earlyclobber_operand_p (out));
1489
1490 /* If the outgoing register already contains the same value
1491 as the incoming one, we can dispense with loading it.
1492 The easiest way to tell the caller that is to give a phony
1493 value for the incoming operand (same as outgoing one). */
1494 if (rld[i].reg_rtx == out
1495 && (REG_P (in) || CONSTANT_P (in))
1496 && 0 != find_equiv_reg (in, this_insn, 0, REGNO (out),
1497 static_reload_reg_p, i, inmode))
1498 rld[i].in = out;
1499 }
1500
1501 /* If this is an input reload and the operand contains a register that
1502 dies in this insn and is used nowhere else, see if it is the right class
1503 to be used for this reload. Use it if so. (This occurs most commonly
1504 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1505 this if it is also an output reload that mentions the register unless
1506 the output is a SUBREG that clobbers an entire register.
1507
1508 Note that the operand might be one of the spill regs, if it is a
1509 pseudo reg and we are in a block where spilling has not taken place.
1510 But if there is no spilling in this block, that is OK.
1511 An explicitly used hard reg cannot be a spill reg. */
1512
1513 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1514 {
1515 rtx note;
1516 int regno;
1517 enum machine_mode rel_mode = inmode;
1518
1519 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1520 rel_mode = outmode;
1521
1522 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1523 if (REG_NOTE_KIND (note) == REG_DEAD
1524 && REG_P (XEXP (note, 0))
1525 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1526 && reg_mentioned_p (XEXP (note, 0), in)
1527 /* Check that a former pseudo is valid; see find_dummy_reload. */
1528 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1529 || (!bitmap_bit_p (DF_LIVE_OUT (ENTRY_BLOCK_PTR),
1530 ORIGINAL_REGNO (XEXP (note, 0)))
1531 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1532 && ! refers_to_regno_for_reload_p (regno,
1533 end_hard_regno (rel_mode,
1534 regno),
1535 PATTERN (this_insn), inloc)
1536 /* If this is also an output reload, IN cannot be used as
1537 the reload register if it is set in this insn unless IN
1538 is also OUT. */
1539 && (out == 0 || in == out
1540 || ! hard_reg_set_here_p (regno,
1541 end_hard_regno (rel_mode, regno),
1542 PATTERN (this_insn)))
1543 /* ??? Why is this code so different from the previous?
1544 Is there any simple coherent way to describe the two together?
1545 What's going on here. */
1546 && (in != out
1547 || (GET_CODE (in) == SUBREG
1548 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1549 / UNITS_PER_WORD)
1550 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1551 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1552 /* Make sure the operand fits in the reg that dies. */
1553 && (GET_MODE_SIZE (rel_mode)
1554 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1555 && HARD_REGNO_MODE_OK (regno, inmode)
1556 && HARD_REGNO_MODE_OK (regno, outmode))
1557 {
1558 unsigned int offs;
1559 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1560 hard_regno_nregs[regno][outmode]);
1561
1562 for (offs = 0; offs < nregs; offs++)
1563 if (fixed_regs[regno + offs]
1564 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
1565 regno + offs))
1566 break;
1567
1568 if (offs == nregs
1569 && (! (refers_to_regno_for_reload_p
1570 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1571 || can_reload_into (in, regno, inmode)))
1572 {
1573 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1574 break;
1575 }
1576 }
1577 }
1578
1579 if (out)
1580 output_reloadnum = i;
1581
1582 return i;
1583 }
1584
1585 /* Record an additional place we must replace a value
1586 for which we have already recorded a reload.
1587 RELOADNUM is the value returned by push_reload
1588 when the reload was recorded.
1589 This is used in insn patterns that use match_dup. */
1590
1591 static void
1592 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1593 {
1594 if (replace_reloads)
1595 {
1596 struct replacement *r = &replacements[n_replacements++];
1597 r->what = reloadnum;
1598 r->where = loc;
1599 r->subreg_loc = 0;
1600 r->mode = mode;
1601 }
1602 }
1603
1604 /* Duplicate any replacement we have recorded to apply at
1605 location ORIG_LOC to also be performed at DUP_LOC.
1606 This is used in insn patterns that use match_dup. */
1607
1608 static void
1609 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1610 {
1611 int i, n = n_replacements;
1612
1613 for (i = 0; i < n; i++)
1614 {
1615 struct replacement *r = &replacements[i];
1616 if (r->where == orig_loc)
1617 push_replacement (dup_loc, r->what, r->mode);
1618 }
1619 }
1620 \f
1621 /* Transfer all replacements that used to be in reload FROM to be in
1622 reload TO. */
1623
1624 void
1625 transfer_replacements (int to, int from)
1626 {
1627 int i;
1628
1629 for (i = 0; i < n_replacements; i++)
1630 if (replacements[i].what == from)
1631 replacements[i].what = to;
1632 }
1633 \f
1634 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1635 or a subpart of it. If we have any replacements registered for IN_RTX,
1636 cancel the reloads that were supposed to load them.
1637 Return nonzero if we canceled any reloads. */
1638 int
1639 remove_address_replacements (rtx in_rtx)
1640 {
1641 int i, j;
1642 char reload_flags[MAX_RELOADS];
1643 int something_changed = 0;
1644
1645 memset (reload_flags, 0, sizeof reload_flags);
1646 for (i = 0, j = 0; i < n_replacements; i++)
1647 {
1648 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1649 reload_flags[replacements[i].what] |= 1;
1650 else
1651 {
1652 replacements[j++] = replacements[i];
1653 reload_flags[replacements[i].what] |= 2;
1654 }
1655 }
1656 /* Note that the following store must be done before the recursive calls. */
1657 n_replacements = j;
1658
1659 for (i = n_reloads - 1; i >= 0; i--)
1660 {
1661 if (reload_flags[i] == 1)
1662 {
1663 deallocate_reload_reg (i);
1664 remove_address_replacements (rld[i].in);
1665 rld[i].in = 0;
1666 something_changed = 1;
1667 }
1668 }
1669 return something_changed;
1670 }
1671 \f
1672 /* If there is only one output reload, and it is not for an earlyclobber
1673 operand, try to combine it with a (logically unrelated) input reload
1674 to reduce the number of reload registers needed.
1675
1676 This is safe if the input reload does not appear in
1677 the value being output-reloaded, because this implies
1678 it is not needed any more once the original insn completes.
1679
1680 If that doesn't work, see we can use any of the registers that
1681 die in this insn as a reload register. We can if it is of the right
1682 class and does not appear in the value being output-reloaded. */
1683
1684 static void
1685 combine_reloads (void)
1686 {
1687 int i, regno;
1688 int output_reload = -1;
1689 int secondary_out = -1;
1690 rtx note;
1691
1692 /* Find the output reload; return unless there is exactly one
1693 and that one is mandatory. */
1694
1695 for (i = 0; i < n_reloads; i++)
1696 if (rld[i].out != 0)
1697 {
1698 if (output_reload >= 0)
1699 return;
1700 output_reload = i;
1701 }
1702
1703 if (output_reload < 0 || rld[output_reload].optional)
1704 return;
1705
1706 /* An input-output reload isn't combinable. */
1707
1708 if (rld[output_reload].in != 0)
1709 return;
1710
1711 /* If this reload is for an earlyclobber operand, we can't do anything. */
1712 if (earlyclobber_operand_p (rld[output_reload].out))
1713 return;
1714
1715 /* If there is a reload for part of the address of this operand, we would
1716 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1717 its life to the point where doing this combine would not lower the
1718 number of spill registers needed. */
1719 for (i = 0; i < n_reloads; i++)
1720 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1721 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1722 && rld[i].opnum == rld[output_reload].opnum)
1723 return;
1724
1725 /* Check each input reload; can we combine it? */
1726
1727 for (i = 0; i < n_reloads; i++)
1728 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1729 /* Life span of this reload must not extend past main insn. */
1730 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1731 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1732 && rld[i].when_needed != RELOAD_OTHER
1733 && (CLASS_MAX_NREGS (rld[i].class, rld[i].inmode)
1734 == CLASS_MAX_NREGS (rld[output_reload].class,
1735 rld[output_reload].outmode))
1736 && rld[i].inc == 0
1737 && rld[i].reg_rtx == 0
1738 #ifdef SECONDARY_MEMORY_NEEDED
1739 /* Don't combine two reloads with different secondary
1740 memory locations. */
1741 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1742 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1743 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1744 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1745 #endif
1746 && (SMALL_REGISTER_CLASSES
1747 ? (rld[i].class == rld[output_reload].class)
1748 : (reg_class_subset_p (rld[i].class,
1749 rld[output_reload].class)
1750 || reg_class_subset_p (rld[output_reload].class,
1751 rld[i].class)))
1752 && (MATCHES (rld[i].in, rld[output_reload].out)
1753 /* Args reversed because the first arg seems to be
1754 the one that we imagine being modified
1755 while the second is the one that might be affected. */
1756 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1757 rld[i].in)
1758 /* However, if the input is a register that appears inside
1759 the output, then we also can't share.
1760 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1761 If the same reload reg is used for both reg 69 and the
1762 result to be stored in memory, then that result
1763 will clobber the address of the memory ref. */
1764 && ! (REG_P (rld[i].in)
1765 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1766 rld[output_reload].out))))
1767 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1768 rld[i].when_needed != RELOAD_FOR_INPUT)
1769 && (reg_class_size[(int) rld[i].class]
1770 || SMALL_REGISTER_CLASSES)
1771 /* We will allow making things slightly worse by combining an
1772 input and an output, but no worse than that. */
1773 && (rld[i].when_needed == RELOAD_FOR_INPUT
1774 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1775 {
1776 int j;
1777
1778 /* We have found a reload to combine with! */
1779 rld[i].out = rld[output_reload].out;
1780 rld[i].out_reg = rld[output_reload].out_reg;
1781 rld[i].outmode = rld[output_reload].outmode;
1782 /* Mark the old output reload as inoperative. */
1783 rld[output_reload].out = 0;
1784 /* The combined reload is needed for the entire insn. */
1785 rld[i].when_needed = RELOAD_OTHER;
1786 /* If the output reload had a secondary reload, copy it. */
1787 if (rld[output_reload].secondary_out_reload != -1)
1788 {
1789 rld[i].secondary_out_reload
1790 = rld[output_reload].secondary_out_reload;
1791 rld[i].secondary_out_icode
1792 = rld[output_reload].secondary_out_icode;
1793 }
1794
1795 #ifdef SECONDARY_MEMORY_NEEDED
1796 /* Copy any secondary MEM. */
1797 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1798 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1799 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1800 #endif
1801 /* If required, minimize the register class. */
1802 if (reg_class_subset_p (rld[output_reload].class,
1803 rld[i].class))
1804 rld[i].class = rld[output_reload].class;
1805
1806 /* Transfer all replacements from the old reload to the combined. */
1807 for (j = 0; j < n_replacements; j++)
1808 if (replacements[j].what == output_reload)
1809 replacements[j].what = i;
1810
1811 return;
1812 }
1813
1814 /* If this insn has only one operand that is modified or written (assumed
1815 to be the first), it must be the one corresponding to this reload. It
1816 is safe to use anything that dies in this insn for that output provided
1817 that it does not occur in the output (we already know it isn't an
1818 earlyclobber. If this is an asm insn, give up. */
1819
1820 if (INSN_CODE (this_insn) == -1)
1821 return;
1822
1823 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1824 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1825 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1826 return;
1827
1828 /* See if some hard register that dies in this insn and is not used in
1829 the output is the right class. Only works if the register we pick
1830 up can fully hold our output reload. */
1831 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1832 if (REG_NOTE_KIND (note) == REG_DEAD
1833 && REG_P (XEXP (note, 0))
1834 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1835 rld[output_reload].out)
1836 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1837 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1838 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].class],
1839 regno)
1840 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1841 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1842 /* Ensure that a secondary or tertiary reload for this output
1843 won't want this register. */
1844 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1845 || (!(TEST_HARD_REG_BIT
1846 (reg_class_contents[(int) rld[secondary_out].class], regno))
1847 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1848 || !(TEST_HARD_REG_BIT
1849 (reg_class_contents[(int) rld[secondary_out].class],
1850 regno)))))
1851 && !fixed_regs[regno]
1852 /* Check that a former pseudo is valid; see find_dummy_reload. */
1853 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1854 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1855 ORIGINAL_REGNO (XEXP (note, 0)))
1856 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1857 {
1858 rld[output_reload].reg_rtx
1859 = gen_rtx_REG (rld[output_reload].outmode, regno);
1860 return;
1861 }
1862 }
1863 \f
1864 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1865 See if one of IN and OUT is a register that may be used;
1866 this is desirable since a spill-register won't be needed.
1867 If so, return the register rtx that proves acceptable.
1868
1869 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1870 CLASS is the register class required for the reload.
1871
1872 If FOR_REAL is >= 0, it is the number of the reload,
1873 and in some cases when it can be discovered that OUT doesn't need
1874 to be computed, clear out rld[FOR_REAL].out.
1875
1876 If FOR_REAL is -1, this should not be done, because this call
1877 is just to see if a register can be found, not to find and install it.
1878
1879 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1880 puts an additional constraint on being able to use IN for OUT since
1881 IN must not appear elsewhere in the insn (it is assumed that IN itself
1882 is safe from the earlyclobber). */
1883
1884 static rtx
1885 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1886 enum machine_mode inmode, enum machine_mode outmode,
1887 enum reg_class class, int for_real, int earlyclobber)
1888 {
1889 rtx in = real_in;
1890 rtx out = real_out;
1891 int in_offset = 0;
1892 int out_offset = 0;
1893 rtx value = 0;
1894
1895 /* If operands exceed a word, we can't use either of them
1896 unless they have the same size. */
1897 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1898 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1899 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1900 return 0;
1901
1902 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1903 respectively refers to a hard register. */
1904
1905 /* Find the inside of any subregs. */
1906 while (GET_CODE (out) == SUBREG)
1907 {
1908 if (REG_P (SUBREG_REG (out))
1909 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1910 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1911 GET_MODE (SUBREG_REG (out)),
1912 SUBREG_BYTE (out),
1913 GET_MODE (out));
1914 out = SUBREG_REG (out);
1915 }
1916 while (GET_CODE (in) == SUBREG)
1917 {
1918 if (REG_P (SUBREG_REG (in))
1919 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1920 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1921 GET_MODE (SUBREG_REG (in)),
1922 SUBREG_BYTE (in),
1923 GET_MODE (in));
1924 in = SUBREG_REG (in);
1925 }
1926
1927 /* Narrow down the reg class, the same way push_reload will;
1928 otherwise we might find a dummy now, but push_reload won't. */
1929 {
1930 enum reg_class preferred_class = PREFERRED_RELOAD_CLASS (in, class);
1931 if (preferred_class != NO_REGS)
1932 class = preferred_class;
1933 }
1934
1935 /* See if OUT will do. */
1936 if (REG_P (out)
1937 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1938 {
1939 unsigned int regno = REGNO (out) + out_offset;
1940 unsigned int nwords = hard_regno_nregs[regno][outmode];
1941 rtx saved_rtx;
1942
1943 /* When we consider whether the insn uses OUT,
1944 ignore references within IN. They don't prevent us
1945 from copying IN into OUT, because those refs would
1946 move into the insn that reloads IN.
1947
1948 However, we only ignore IN in its role as this reload.
1949 If the insn uses IN elsewhere and it contains OUT,
1950 that counts. We can't be sure it's the "same" operand
1951 so it might not go through this reload. */
1952 saved_rtx = *inloc;
1953 *inloc = const0_rtx;
1954
1955 if (regno < FIRST_PSEUDO_REGISTER
1956 && HARD_REGNO_MODE_OK (regno, outmode)
1957 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1958 PATTERN (this_insn), outloc))
1959 {
1960 unsigned int i;
1961
1962 for (i = 0; i < nwords; i++)
1963 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
1964 regno + i))
1965 break;
1966
1967 if (i == nwords)
1968 {
1969 if (REG_P (real_out))
1970 value = real_out;
1971 else
1972 value = gen_rtx_REG (outmode, regno);
1973 }
1974 }
1975
1976 *inloc = saved_rtx;
1977 }
1978
1979 /* Consider using IN if OUT was not acceptable
1980 or if OUT dies in this insn (like the quotient in a divmod insn).
1981 We can't use IN unless it is dies in this insn,
1982 which means we must know accurately which hard regs are live.
1983 Also, the result can't go in IN if IN is used within OUT,
1984 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
1985 if (hard_regs_live_known
1986 && REG_P (in)
1987 && REGNO (in) < FIRST_PSEUDO_REGISTER
1988 && (value == 0
1989 || find_reg_note (this_insn, REG_UNUSED, real_out))
1990 && find_reg_note (this_insn, REG_DEAD, real_in)
1991 && !fixed_regs[REGNO (in)]
1992 && HARD_REGNO_MODE_OK (REGNO (in),
1993 /* The only case where out and real_out might
1994 have different modes is where real_out
1995 is a subreg, and in that case, out
1996 has a real mode. */
1997 (GET_MODE (out) != VOIDmode
1998 ? GET_MODE (out) : outmode))
1999 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2000 /* However only do this if we can be sure that this input
2001 operand doesn't correspond with an uninitialized pseudo.
2002 global can assign some hardreg to it that is the same as
2003 the one assigned to a different, also live pseudo (as it
2004 can ignore the conflict). We must never introduce writes
2005 to such hardregs, as they would clobber the other live
2006 pseudo. See PR 20973. */
2007 || (!bitmap_bit_p (DF_LIVE_OUT (ENTRY_BLOCK_PTR),
2008 ORIGINAL_REGNO (in))
2009 /* Similarly, only do this if we can be sure that the death
2010 note is still valid. global can assign some hardreg to
2011 the pseudo referenced in the note and simultaneously a
2012 subword of this hardreg to a different, also live pseudo,
2013 because only another subword of the hardreg is actually
2014 used in the insn. This cannot happen if the pseudo has
2015 been assigned exactly one hardreg. See PR 33732. */
2016 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2017 {
2018 unsigned int regno = REGNO (in) + in_offset;
2019 unsigned int nwords = hard_regno_nregs[regno][inmode];
2020
2021 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2022 && ! hard_reg_set_here_p (regno, regno + nwords,
2023 PATTERN (this_insn))
2024 && (! earlyclobber
2025 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2026 PATTERN (this_insn), inloc)))
2027 {
2028 unsigned int i;
2029
2030 for (i = 0; i < nwords; i++)
2031 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2032 regno + i))
2033 break;
2034
2035 if (i == nwords)
2036 {
2037 /* If we were going to use OUT as the reload reg
2038 and changed our mind, it means OUT is a dummy that
2039 dies here. So don't bother copying value to it. */
2040 if (for_real >= 0 && value == real_out)
2041 rld[for_real].out = 0;
2042 if (REG_P (real_in))
2043 value = real_in;
2044 else
2045 value = gen_rtx_REG (inmode, regno);
2046 }
2047 }
2048 }
2049
2050 return value;
2051 }
2052 \f
2053 /* This page contains subroutines used mainly for determining
2054 whether the IN or an OUT of a reload can serve as the
2055 reload register. */
2056
2057 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2058
2059 int
2060 earlyclobber_operand_p (rtx x)
2061 {
2062 int i;
2063
2064 for (i = 0; i < n_earlyclobbers; i++)
2065 if (reload_earlyclobbers[i] == x)
2066 return 1;
2067
2068 return 0;
2069 }
2070
2071 /* Return 1 if expression X alters a hard reg in the range
2072 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2073 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2074 X should be the body of an instruction. */
2075
2076 static int
2077 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2078 {
2079 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2080 {
2081 rtx op0 = SET_DEST (x);
2082
2083 while (GET_CODE (op0) == SUBREG)
2084 op0 = SUBREG_REG (op0);
2085 if (REG_P (op0))
2086 {
2087 unsigned int r = REGNO (op0);
2088
2089 /* See if this reg overlaps range under consideration. */
2090 if (r < end_regno
2091 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2092 return 1;
2093 }
2094 }
2095 else if (GET_CODE (x) == PARALLEL)
2096 {
2097 int i = XVECLEN (x, 0) - 1;
2098
2099 for (; i >= 0; i--)
2100 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2101 return 1;
2102 }
2103
2104 return 0;
2105 }
2106
2107 /* Return 1 if ADDR is a valid memory address for mode MODE,
2108 and check that each pseudo reg has the proper kind of
2109 hard reg. */
2110
2111 int
2112 strict_memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
2113 {
2114 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2115 return 0;
2116
2117 win:
2118 return 1;
2119 }
2120 \f
2121 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2122 if they are the same hard reg, and has special hacks for
2123 autoincrement and autodecrement.
2124 This is specifically intended for find_reloads to use
2125 in determining whether two operands match.
2126 X is the operand whose number is the lower of the two.
2127
2128 The value is 2 if Y contains a pre-increment that matches
2129 a non-incrementing address in X. */
2130
2131 /* ??? To be completely correct, we should arrange to pass
2132 for X the output operand and for Y the input operand.
2133 For now, we assume that the output operand has the lower number
2134 because that is natural in (SET output (... input ...)). */
2135
2136 int
2137 operands_match_p (rtx x, rtx y)
2138 {
2139 int i;
2140 RTX_CODE code = GET_CODE (x);
2141 const char *fmt;
2142 int success_2;
2143
2144 if (x == y)
2145 return 1;
2146 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2147 && (REG_P (y) || (GET_CODE (y) == SUBREG
2148 && REG_P (SUBREG_REG (y)))))
2149 {
2150 int j;
2151
2152 if (code == SUBREG)
2153 {
2154 i = REGNO (SUBREG_REG (x));
2155 if (i >= FIRST_PSEUDO_REGISTER)
2156 goto slow;
2157 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2158 GET_MODE (SUBREG_REG (x)),
2159 SUBREG_BYTE (x),
2160 GET_MODE (x));
2161 }
2162 else
2163 i = REGNO (x);
2164
2165 if (GET_CODE (y) == SUBREG)
2166 {
2167 j = REGNO (SUBREG_REG (y));
2168 if (j >= FIRST_PSEUDO_REGISTER)
2169 goto slow;
2170 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2171 GET_MODE (SUBREG_REG (y)),
2172 SUBREG_BYTE (y),
2173 GET_MODE (y));
2174 }
2175 else
2176 j = REGNO (y);
2177
2178 /* On a WORDS_BIG_ENDIAN machine, point to the last register of a
2179 multiple hard register group of scalar integer registers, so that
2180 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2181 register. */
2182 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2183 && SCALAR_INT_MODE_P (GET_MODE (x))
2184 && i < FIRST_PSEUDO_REGISTER)
2185 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2186 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2187 && SCALAR_INT_MODE_P (GET_MODE (y))
2188 && j < FIRST_PSEUDO_REGISTER)
2189 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2190
2191 return i == j;
2192 }
2193 /* If two operands must match, because they are really a single
2194 operand of an assembler insn, then two postincrements are invalid
2195 because the assembler insn would increment only once.
2196 On the other hand, a postincrement matches ordinary indexing
2197 if the postincrement is the output operand. */
2198 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2199 return operands_match_p (XEXP (x, 0), y);
2200 /* Two preincrements are invalid
2201 because the assembler insn would increment only once.
2202 On the other hand, a preincrement matches ordinary indexing
2203 if the preincrement is the input operand.
2204 In this case, return 2, since some callers need to do special
2205 things when this happens. */
2206 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2207 || GET_CODE (y) == PRE_MODIFY)
2208 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2209
2210 slow:
2211
2212 /* Now we have disposed of all the cases in which different rtx codes
2213 can match. */
2214 if (code != GET_CODE (y))
2215 return 0;
2216
2217 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2218 if (GET_MODE (x) != GET_MODE (y))
2219 return 0;
2220
2221 switch (code)
2222 {
2223 case CONST_INT:
2224 case CONST_DOUBLE:
2225 case CONST_FIXED:
2226 return 0;
2227
2228 case LABEL_REF:
2229 return XEXP (x, 0) == XEXP (y, 0);
2230 case SYMBOL_REF:
2231 return XSTR (x, 0) == XSTR (y, 0);
2232
2233 default:
2234 break;
2235 }
2236
2237 /* Compare the elements. If any pair of corresponding elements
2238 fail to match, return 0 for the whole things. */
2239
2240 success_2 = 0;
2241 fmt = GET_RTX_FORMAT (code);
2242 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2243 {
2244 int val, j;
2245 switch (fmt[i])
2246 {
2247 case 'w':
2248 if (XWINT (x, i) != XWINT (y, i))
2249 return 0;
2250 break;
2251
2252 case 'i':
2253 if (XINT (x, i) != XINT (y, i))
2254 return 0;
2255 break;
2256
2257 case 'e':
2258 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2259 if (val == 0)
2260 return 0;
2261 /* If any subexpression returns 2,
2262 we should return 2 if we are successful. */
2263 if (val == 2)
2264 success_2 = 1;
2265 break;
2266
2267 case '0':
2268 break;
2269
2270 case 'E':
2271 if (XVECLEN (x, i) != XVECLEN (y, i))
2272 return 0;
2273 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2274 {
2275 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2276 if (val == 0)
2277 return 0;
2278 if (val == 2)
2279 success_2 = 1;
2280 }
2281 break;
2282
2283 /* It is believed that rtx's at this level will never
2284 contain anything but integers and other rtx's,
2285 except for within LABEL_REFs and SYMBOL_REFs. */
2286 default:
2287 gcc_unreachable ();
2288 }
2289 }
2290 return 1 + success_2;
2291 }
2292 \f
2293 /* Describe the range of registers or memory referenced by X.
2294 If X is a register, set REG_FLAG and put the first register
2295 number into START and the last plus one into END.
2296 If X is a memory reference, put a base address into BASE
2297 and a range of integer offsets into START and END.
2298 If X is pushing on the stack, we can assume it causes no trouble,
2299 so we set the SAFE field. */
2300
2301 static struct decomposition
2302 decompose (rtx x)
2303 {
2304 struct decomposition val;
2305 int all_const = 0;
2306
2307 memset (&val, 0, sizeof (val));
2308
2309 switch (GET_CODE (x))
2310 {
2311 case MEM:
2312 {
2313 rtx base = NULL_RTX, offset = 0;
2314 rtx addr = XEXP (x, 0);
2315
2316 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2317 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2318 {
2319 val.base = XEXP (addr, 0);
2320 val.start = -GET_MODE_SIZE (GET_MODE (x));
2321 val.end = GET_MODE_SIZE (GET_MODE (x));
2322 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2323 return val;
2324 }
2325
2326 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2327 {
2328 if (GET_CODE (XEXP (addr, 1)) == PLUS
2329 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2330 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2331 {
2332 val.base = XEXP (addr, 0);
2333 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2334 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2335 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2336 return val;
2337 }
2338 }
2339
2340 if (GET_CODE (addr) == CONST)
2341 {
2342 addr = XEXP (addr, 0);
2343 all_const = 1;
2344 }
2345 if (GET_CODE (addr) == PLUS)
2346 {
2347 if (CONSTANT_P (XEXP (addr, 0)))
2348 {
2349 base = XEXP (addr, 1);
2350 offset = XEXP (addr, 0);
2351 }
2352 else if (CONSTANT_P (XEXP (addr, 1)))
2353 {
2354 base = XEXP (addr, 0);
2355 offset = XEXP (addr, 1);
2356 }
2357 }
2358
2359 if (offset == 0)
2360 {
2361 base = addr;
2362 offset = const0_rtx;
2363 }
2364 if (GET_CODE (offset) == CONST)
2365 offset = XEXP (offset, 0);
2366 if (GET_CODE (offset) == PLUS)
2367 {
2368 if (GET_CODE (XEXP (offset, 0)) == CONST_INT)
2369 {
2370 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2371 offset = XEXP (offset, 0);
2372 }
2373 else if (GET_CODE (XEXP (offset, 1)) == CONST_INT)
2374 {
2375 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2376 offset = XEXP (offset, 1);
2377 }
2378 else
2379 {
2380 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2381 offset = const0_rtx;
2382 }
2383 }
2384 else if (GET_CODE (offset) != CONST_INT)
2385 {
2386 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2387 offset = const0_rtx;
2388 }
2389
2390 if (all_const && GET_CODE (base) == PLUS)
2391 base = gen_rtx_CONST (GET_MODE (base), base);
2392
2393 gcc_assert (GET_CODE (offset) == CONST_INT);
2394
2395 val.start = INTVAL (offset);
2396 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2397 val.base = base;
2398 }
2399 break;
2400
2401 case REG:
2402 val.reg_flag = 1;
2403 val.start = true_regnum (x);
2404 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2405 {
2406 /* A pseudo with no hard reg. */
2407 val.start = REGNO (x);
2408 val.end = val.start + 1;
2409 }
2410 else
2411 /* A hard reg. */
2412 val.end = end_hard_regno (GET_MODE (x), val.start);
2413 break;
2414
2415 case SUBREG:
2416 if (!REG_P (SUBREG_REG (x)))
2417 /* This could be more precise, but it's good enough. */
2418 return decompose (SUBREG_REG (x));
2419 val.reg_flag = 1;
2420 val.start = true_regnum (x);
2421 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2422 return decompose (SUBREG_REG (x));
2423 else
2424 /* A hard reg. */
2425 val.end = val.start + subreg_nregs (x);
2426 break;
2427
2428 case SCRATCH:
2429 /* This hasn't been assigned yet, so it can't conflict yet. */
2430 val.safe = 1;
2431 break;
2432
2433 default:
2434 gcc_assert (CONSTANT_P (x));
2435 val.safe = 1;
2436 break;
2437 }
2438 return val;
2439 }
2440
2441 /* Return 1 if altering Y will not modify the value of X.
2442 Y is also described by YDATA, which should be decompose (Y). */
2443
2444 static int
2445 immune_p (rtx x, rtx y, struct decomposition ydata)
2446 {
2447 struct decomposition xdata;
2448
2449 if (ydata.reg_flag)
2450 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2451 if (ydata.safe)
2452 return 1;
2453
2454 gcc_assert (MEM_P (y));
2455 /* If Y is memory and X is not, Y can't affect X. */
2456 if (!MEM_P (x))
2457 return 1;
2458
2459 xdata = decompose (x);
2460
2461 if (! rtx_equal_p (xdata.base, ydata.base))
2462 {
2463 /* If bases are distinct symbolic constants, there is no overlap. */
2464 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2465 return 1;
2466 /* Constants and stack slots never overlap. */
2467 if (CONSTANT_P (xdata.base)
2468 && (ydata.base == frame_pointer_rtx
2469 || ydata.base == hard_frame_pointer_rtx
2470 || ydata.base == stack_pointer_rtx))
2471 return 1;
2472 if (CONSTANT_P (ydata.base)
2473 && (xdata.base == frame_pointer_rtx
2474 || xdata.base == hard_frame_pointer_rtx
2475 || xdata.base == stack_pointer_rtx))
2476 return 1;
2477 /* If either base is variable, we don't know anything. */
2478 return 0;
2479 }
2480
2481 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2482 }
2483
2484 /* Similar, but calls decompose. */
2485
2486 int
2487 safe_from_earlyclobber (rtx op, rtx clobber)
2488 {
2489 struct decomposition early_data;
2490
2491 early_data = decompose (clobber);
2492 return immune_p (op, clobber, early_data);
2493 }
2494 \f
2495 /* Main entry point of this file: search the body of INSN
2496 for values that need reloading and record them with push_reload.
2497 REPLACE nonzero means record also where the values occur
2498 so that subst_reloads can be used.
2499
2500 IND_LEVELS says how many levels of indirection are supported by this
2501 machine; a value of zero means that a memory reference is not a valid
2502 memory address.
2503
2504 LIVE_KNOWN says we have valid information about which hard
2505 regs are live at each point in the program; this is true when
2506 we are called from global_alloc but false when stupid register
2507 allocation has been done.
2508
2509 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2510 which is nonnegative if the reg has been commandeered for reloading into.
2511 It is copied into STATIC_RELOAD_REG_P and referenced from there
2512 by various subroutines.
2513
2514 Return TRUE if some operands need to be changed, because of swapping
2515 commutative operands, reg_equiv_address substitution, or whatever. */
2516
2517 int
2518 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2519 short *reload_reg_p)
2520 {
2521 int insn_code_number;
2522 int i, j;
2523 int noperands;
2524 /* These start out as the constraints for the insn
2525 and they are chewed up as we consider alternatives. */
2526 const char *constraints[MAX_RECOG_OPERANDS];
2527 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2528 a register. */
2529 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2530 char pref_or_nothing[MAX_RECOG_OPERANDS];
2531 /* Nonzero for a MEM operand whose entire address needs a reload.
2532 May be -1 to indicate the entire address may or may not need a reload. */
2533 int address_reloaded[MAX_RECOG_OPERANDS];
2534 /* Nonzero for an address operand that needs to be completely reloaded.
2535 May be -1 to indicate the entire operand may or may not need a reload. */
2536 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2537 /* Value of enum reload_type to use for operand. */
2538 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2539 /* Value of enum reload_type to use within address of operand. */
2540 enum reload_type address_type[MAX_RECOG_OPERANDS];
2541 /* Save the usage of each operand. */
2542 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2543 int no_input_reloads = 0, no_output_reloads = 0;
2544 int n_alternatives;
2545 int this_alternative[MAX_RECOG_OPERANDS];
2546 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2547 char this_alternative_win[MAX_RECOG_OPERANDS];
2548 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2549 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2550 int this_alternative_matches[MAX_RECOG_OPERANDS];
2551 int swapped;
2552 int goal_alternative[MAX_RECOG_OPERANDS];
2553 int this_alternative_number;
2554 int goal_alternative_number = 0;
2555 int operand_reloadnum[MAX_RECOG_OPERANDS];
2556 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2557 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2558 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2559 char goal_alternative_win[MAX_RECOG_OPERANDS];
2560 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2561 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2562 int goal_alternative_swapped;
2563 int best;
2564 int commutative;
2565 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2566 rtx substed_operand[MAX_RECOG_OPERANDS];
2567 rtx body = PATTERN (insn);
2568 rtx set = single_set (insn);
2569 int goal_earlyclobber = 0, this_earlyclobber;
2570 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2571 int retval = 0;
2572
2573 this_insn = insn;
2574 n_reloads = 0;
2575 n_replacements = 0;
2576 n_earlyclobbers = 0;
2577 replace_reloads = replace;
2578 hard_regs_live_known = live_known;
2579 static_reload_reg_p = reload_reg_p;
2580
2581 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2582 neither are insns that SET cc0. Insns that use CC0 are not allowed
2583 to have any input reloads. */
2584 if (JUMP_P (insn) || CALL_P (insn))
2585 no_output_reloads = 1;
2586
2587 #ifdef HAVE_cc0
2588 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2589 no_input_reloads = 1;
2590 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2591 no_output_reloads = 1;
2592 #endif
2593
2594 #ifdef SECONDARY_MEMORY_NEEDED
2595 /* The eliminated forms of any secondary memory locations are per-insn, so
2596 clear them out here. */
2597
2598 if (secondary_memlocs_elim_used)
2599 {
2600 memset (secondary_memlocs_elim, 0,
2601 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2602 secondary_memlocs_elim_used = 0;
2603 }
2604 #endif
2605
2606 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2607 is cheap to move between them. If it is not, there may not be an insn
2608 to do the copy, so we may need a reload. */
2609 if (GET_CODE (body) == SET
2610 && REG_P (SET_DEST (body))
2611 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2612 && REG_P (SET_SRC (body))
2613 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2614 && REGISTER_MOVE_COST (GET_MODE (SET_SRC (body)),
2615 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2616 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2617 return 0;
2618
2619 extract_insn (insn);
2620
2621 noperands = reload_n_operands = recog_data.n_operands;
2622 n_alternatives = recog_data.n_alternatives;
2623
2624 /* Just return "no reloads" if insn has no operands with constraints. */
2625 if (noperands == 0 || n_alternatives == 0)
2626 return 0;
2627
2628 insn_code_number = INSN_CODE (insn);
2629 this_insn_is_asm = insn_code_number < 0;
2630
2631 memcpy (operand_mode, recog_data.operand_mode,
2632 noperands * sizeof (enum machine_mode));
2633 memcpy (constraints, recog_data.constraints,
2634 noperands * sizeof (const char *));
2635
2636 commutative = -1;
2637
2638 /* If we will need to know, later, whether some pair of operands
2639 are the same, we must compare them now and save the result.
2640 Reloading the base and index registers will clobber them
2641 and afterward they will fail to match. */
2642
2643 for (i = 0; i < noperands; i++)
2644 {
2645 const char *p;
2646 int c;
2647 char *end;
2648
2649 substed_operand[i] = recog_data.operand[i];
2650 p = constraints[i];
2651
2652 modified[i] = RELOAD_READ;
2653
2654 /* Scan this operand's constraint to see if it is an output operand,
2655 an in-out operand, is commutative, or should match another. */
2656
2657 while ((c = *p))
2658 {
2659 p += CONSTRAINT_LEN (c, p);
2660 switch (c)
2661 {
2662 case '=':
2663 modified[i] = RELOAD_WRITE;
2664 break;
2665 case '+':
2666 modified[i] = RELOAD_READ_WRITE;
2667 break;
2668 case '%':
2669 {
2670 /* The last operand should not be marked commutative. */
2671 gcc_assert (i != noperands - 1);
2672
2673 /* We currently only support one commutative pair of
2674 operands. Some existing asm code currently uses more
2675 than one pair. Previously, that would usually work,
2676 but sometimes it would crash the compiler. We
2677 continue supporting that case as well as we can by
2678 silently ignoring all but the first pair. In the
2679 future we may handle it correctly. */
2680 if (commutative < 0)
2681 commutative = i;
2682 else
2683 gcc_assert (this_insn_is_asm);
2684 }
2685 break;
2686 /* Use of ISDIGIT is tempting here, but it may get expensive because
2687 of locale support we don't want. */
2688 case '0': case '1': case '2': case '3': case '4':
2689 case '5': case '6': case '7': case '8': case '9':
2690 {
2691 c = strtoul (p - 1, &end, 10);
2692 p = end;
2693
2694 operands_match[c][i]
2695 = operands_match_p (recog_data.operand[c],
2696 recog_data.operand[i]);
2697
2698 /* An operand may not match itself. */
2699 gcc_assert (c != i);
2700
2701 /* If C can be commuted with C+1, and C might need to match I,
2702 then C+1 might also need to match I. */
2703 if (commutative >= 0)
2704 {
2705 if (c == commutative || c == commutative + 1)
2706 {
2707 int other = c + (c == commutative ? 1 : -1);
2708 operands_match[other][i]
2709 = operands_match_p (recog_data.operand[other],
2710 recog_data.operand[i]);
2711 }
2712 if (i == commutative || i == commutative + 1)
2713 {
2714 int other = i + (i == commutative ? 1 : -1);
2715 operands_match[c][other]
2716 = operands_match_p (recog_data.operand[c],
2717 recog_data.operand[other]);
2718 }
2719 /* Note that C is supposed to be less than I.
2720 No need to consider altering both C and I because in
2721 that case we would alter one into the other. */
2722 }
2723 }
2724 }
2725 }
2726 }
2727
2728 /* Examine each operand that is a memory reference or memory address
2729 and reload parts of the addresses into index registers.
2730 Also here any references to pseudo regs that didn't get hard regs
2731 but are equivalent to constants get replaced in the insn itself
2732 with those constants. Nobody will ever see them again.
2733
2734 Finally, set up the preferred classes of each operand. */
2735
2736 for (i = 0; i < noperands; i++)
2737 {
2738 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2739
2740 address_reloaded[i] = 0;
2741 address_operand_reloaded[i] = 0;
2742 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2743 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2744 : RELOAD_OTHER);
2745 address_type[i]
2746 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2747 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2748 : RELOAD_OTHER);
2749
2750 if (*constraints[i] == 0)
2751 /* Ignore things like match_operator operands. */
2752 ;
2753 else if (constraints[i][0] == 'p'
2754 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2755 {
2756 address_operand_reloaded[i]
2757 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2758 recog_data.operand[i],
2759 recog_data.operand_loc[i],
2760 i, operand_type[i], ind_levels, insn);
2761
2762 /* If we now have a simple operand where we used to have a
2763 PLUS or MULT, re-recognize and try again. */
2764 if ((OBJECT_P (*recog_data.operand_loc[i])
2765 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2766 && (GET_CODE (recog_data.operand[i]) == MULT
2767 || GET_CODE (recog_data.operand[i]) == PLUS))
2768 {
2769 INSN_CODE (insn) = -1;
2770 retval = find_reloads (insn, replace, ind_levels, live_known,
2771 reload_reg_p);
2772 return retval;
2773 }
2774
2775 recog_data.operand[i] = *recog_data.operand_loc[i];
2776 substed_operand[i] = recog_data.operand[i];
2777
2778 /* Address operands are reloaded in their existing mode,
2779 no matter what is specified in the machine description. */
2780 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2781 }
2782 else if (code == MEM)
2783 {
2784 address_reloaded[i]
2785 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2786 recog_data.operand_loc[i],
2787 XEXP (recog_data.operand[i], 0),
2788 &XEXP (recog_data.operand[i], 0),
2789 i, address_type[i], ind_levels, insn);
2790 recog_data.operand[i] = *recog_data.operand_loc[i];
2791 substed_operand[i] = recog_data.operand[i];
2792 }
2793 else if (code == SUBREG)
2794 {
2795 rtx reg = SUBREG_REG (recog_data.operand[i]);
2796 rtx op
2797 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2798 ind_levels,
2799 set != 0
2800 && &SET_DEST (set) == recog_data.operand_loc[i],
2801 insn,
2802 &address_reloaded[i]);
2803
2804 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2805 that didn't get a hard register, emit a USE with a REG_EQUAL
2806 note in front so that we might inherit a previous, possibly
2807 wider reload. */
2808
2809 if (replace
2810 && MEM_P (op)
2811 && REG_P (reg)
2812 && (GET_MODE_SIZE (GET_MODE (reg))
2813 >= GET_MODE_SIZE (GET_MODE (op)))
2814 && reg_equiv_constant[REGNO (reg)] == 0)
2815 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2816 insn),
2817 REG_EQUAL, reg_equiv_memory_loc[REGNO (reg)]);
2818
2819 substed_operand[i] = recog_data.operand[i] = op;
2820 }
2821 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2822 /* We can get a PLUS as an "operand" as a result of register
2823 elimination. See eliminate_regs and gen_reload. We handle
2824 a unary operator by reloading the operand. */
2825 substed_operand[i] = recog_data.operand[i]
2826 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2827 ind_levels, 0, insn,
2828 &address_reloaded[i]);
2829 else if (code == REG)
2830 {
2831 /* This is equivalent to calling find_reloads_toplev.
2832 The code is duplicated for speed.
2833 When we find a pseudo always equivalent to a constant,
2834 we replace it by the constant. We must be sure, however,
2835 that we don't try to replace it in the insn in which it
2836 is being set. */
2837 int regno = REGNO (recog_data.operand[i]);
2838 if (reg_equiv_constant[regno] != 0
2839 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2840 {
2841 /* Record the existing mode so that the check if constants are
2842 allowed will work when operand_mode isn't specified. */
2843
2844 if (operand_mode[i] == VOIDmode)
2845 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2846
2847 substed_operand[i] = recog_data.operand[i]
2848 = reg_equiv_constant[regno];
2849 }
2850 if (reg_equiv_memory_loc[regno] != 0
2851 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
2852 /* We need not give a valid is_set_dest argument since the case
2853 of a constant equivalence was checked above. */
2854 substed_operand[i] = recog_data.operand[i]
2855 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2856 ind_levels, 0, insn,
2857 &address_reloaded[i]);
2858 }
2859 /* If the operand is still a register (we didn't replace it with an
2860 equivalent), get the preferred class to reload it into. */
2861 code = GET_CODE (recog_data.operand[i]);
2862 preferred_class[i]
2863 = ((code == REG && REGNO (recog_data.operand[i])
2864 >= FIRST_PSEUDO_REGISTER)
2865 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2866 : NO_REGS);
2867 pref_or_nothing[i]
2868 = (code == REG
2869 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2870 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2871 }
2872
2873 /* If this is simply a copy from operand 1 to operand 0, merge the
2874 preferred classes for the operands. */
2875 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2876 && recog_data.operand[1] == SET_SRC (set))
2877 {
2878 preferred_class[0] = preferred_class[1]
2879 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2880 pref_or_nothing[0] |= pref_or_nothing[1];
2881 pref_or_nothing[1] |= pref_or_nothing[0];
2882 }
2883
2884 /* Now see what we need for pseudo-regs that didn't get hard regs
2885 or got the wrong kind of hard reg. For this, we must consider
2886 all the operands together against the register constraints. */
2887
2888 best = MAX_RECOG_OPERANDS * 2 + 600;
2889
2890 swapped = 0;
2891 goal_alternative_swapped = 0;
2892 try_swapped:
2893
2894 /* The constraints are made of several alternatives.
2895 Each operand's constraint looks like foo,bar,... with commas
2896 separating the alternatives. The first alternatives for all
2897 operands go together, the second alternatives go together, etc.
2898
2899 First loop over alternatives. */
2900
2901 for (this_alternative_number = 0;
2902 this_alternative_number < n_alternatives;
2903 this_alternative_number++)
2904 {
2905 /* Loop over operands for one constraint alternative. */
2906 /* LOSERS counts those that don't fit this alternative
2907 and would require loading. */
2908 int losers = 0;
2909 /* BAD is set to 1 if it some operand can't fit this alternative
2910 even after reloading. */
2911 int bad = 0;
2912 /* REJECT is a count of how undesirable this alternative says it is
2913 if any reloading is required. If the alternative matches exactly
2914 then REJECT is ignored, but otherwise it gets this much
2915 counted against it in addition to the reloading needed. Each
2916 ? counts three times here since we want the disparaging caused by
2917 a bad register class to only count 1/3 as much. */
2918 int reject = 0;
2919
2920 if (!recog_data.alternative_enabled_p[this_alternative_number])
2921 {
2922 int i;
2923
2924 for (i = 0; i < recog_data.n_operands; i++)
2925 constraints[i] = skip_alternative (constraints[i]);
2926
2927 continue;
2928 }
2929
2930 this_earlyclobber = 0;
2931
2932 for (i = 0; i < noperands; i++)
2933 {
2934 const char *p = constraints[i];
2935 char *end;
2936 int len;
2937 int win = 0;
2938 int did_match = 0;
2939 /* 0 => this operand can be reloaded somehow for this alternative. */
2940 int badop = 1;
2941 /* 0 => this operand can be reloaded if the alternative allows regs. */
2942 int winreg = 0;
2943 int c;
2944 int m;
2945 rtx operand = recog_data.operand[i];
2946 int offset = 0;
2947 /* Nonzero means this is a MEM that must be reloaded into a reg
2948 regardless of what the constraint says. */
2949 int force_reload = 0;
2950 int offmemok = 0;
2951 /* Nonzero if a constant forced into memory would be OK for this
2952 operand. */
2953 int constmemok = 0;
2954 int earlyclobber = 0;
2955
2956 /* If the predicate accepts a unary operator, it means that
2957 we need to reload the operand, but do not do this for
2958 match_operator and friends. */
2959 if (UNARY_P (operand) && *p != 0)
2960 operand = XEXP (operand, 0);
2961
2962 /* If the operand is a SUBREG, extract
2963 the REG or MEM (or maybe even a constant) within.
2964 (Constants can occur as a result of reg_equiv_constant.) */
2965
2966 while (GET_CODE (operand) == SUBREG)
2967 {
2968 /* Offset only matters when operand is a REG and
2969 it is a hard reg. This is because it is passed
2970 to reg_fits_class_p if it is a REG and all pseudos
2971 return 0 from that function. */
2972 if (REG_P (SUBREG_REG (operand))
2973 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
2974 {
2975 if (!subreg_offset_representable_p
2976 (REGNO (SUBREG_REG (operand)),
2977 GET_MODE (SUBREG_REG (operand)),
2978 SUBREG_BYTE (operand),
2979 GET_MODE (operand)))
2980 force_reload = 1;
2981 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
2982 GET_MODE (SUBREG_REG (operand)),
2983 SUBREG_BYTE (operand),
2984 GET_MODE (operand));
2985 }
2986 operand = SUBREG_REG (operand);
2987 /* Force reload if this is a constant or PLUS or if there may
2988 be a problem accessing OPERAND in the outer mode. */
2989 if (CONSTANT_P (operand)
2990 || GET_CODE (operand) == PLUS
2991 /* We must force a reload of paradoxical SUBREGs
2992 of a MEM because the alignment of the inner value
2993 may not be enough to do the outer reference. On
2994 big-endian machines, it may also reference outside
2995 the object.
2996
2997 On machines that extend byte operations and we have a
2998 SUBREG where both the inner and outer modes are no wider
2999 than a word and the inner mode is narrower, is integral,
3000 and gets extended when loaded from memory, combine.c has
3001 made assumptions about the behavior of the machine in such
3002 register access. If the data is, in fact, in memory we
3003 must always load using the size assumed to be in the
3004 register and let the insn do the different-sized
3005 accesses.
3006
3007 This is doubly true if WORD_REGISTER_OPERATIONS. In
3008 this case eliminate_regs has left non-paradoxical
3009 subregs for push_reload to see. Make sure it does
3010 by forcing the reload.
3011
3012 ??? When is it right at this stage to have a subreg
3013 of a mem that is _not_ to be handled specially? IMO
3014 those should have been reduced to just a mem. */
3015 || ((MEM_P (operand)
3016 || (REG_P (operand)
3017 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3018 #ifndef WORD_REGISTER_OPERATIONS
3019 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3020 < BIGGEST_ALIGNMENT)
3021 && (GET_MODE_SIZE (operand_mode[i])
3022 > GET_MODE_SIZE (GET_MODE (operand))))
3023 || BYTES_BIG_ENDIAN
3024 #ifdef LOAD_EXTEND_OP
3025 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3026 && (GET_MODE_SIZE (GET_MODE (operand))
3027 <= UNITS_PER_WORD)
3028 && (GET_MODE_SIZE (operand_mode[i])
3029 > GET_MODE_SIZE (GET_MODE (operand)))
3030 && INTEGRAL_MODE_P (GET_MODE (operand))
3031 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3032 #endif
3033 )
3034 #endif
3035 )
3036 )
3037 force_reload = 1;
3038 }
3039
3040 this_alternative[i] = (int) NO_REGS;
3041 this_alternative_win[i] = 0;
3042 this_alternative_match_win[i] = 0;
3043 this_alternative_offmemok[i] = 0;
3044 this_alternative_earlyclobber[i] = 0;
3045 this_alternative_matches[i] = -1;
3046
3047 /* An empty constraint or empty alternative
3048 allows anything which matched the pattern. */
3049 if (*p == 0 || *p == ',')
3050 win = 1, badop = 0;
3051
3052 /* Scan this alternative's specs for this operand;
3053 set WIN if the operand fits any letter in this alternative.
3054 Otherwise, clear BADOP if this operand could
3055 fit some letter after reloads,
3056 or set WINREG if this operand could fit after reloads
3057 provided the constraint allows some registers. */
3058
3059 do
3060 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3061 {
3062 case '\0':
3063 len = 0;
3064 break;
3065 case ',':
3066 c = '\0';
3067 break;
3068
3069 case '=': case '+': case '*':
3070 break;
3071
3072 case '%':
3073 /* We only support one commutative marker, the first
3074 one. We already set commutative above. */
3075 break;
3076
3077 case '?':
3078 reject += 6;
3079 break;
3080
3081 case '!':
3082 reject = 600;
3083 break;
3084
3085 case '#':
3086 /* Ignore rest of this alternative as far as
3087 reloading is concerned. */
3088 do
3089 p++;
3090 while (*p && *p != ',');
3091 len = 0;
3092 break;
3093
3094 case '0': case '1': case '2': case '3': case '4':
3095 case '5': case '6': case '7': case '8': case '9':
3096 m = strtoul (p, &end, 10);
3097 p = end;
3098 len = 0;
3099
3100 this_alternative_matches[i] = m;
3101 /* We are supposed to match a previous operand.
3102 If we do, we win if that one did.
3103 If we do not, count both of the operands as losers.
3104 (This is too conservative, since most of the time
3105 only a single reload insn will be needed to make
3106 the two operands win. As a result, this alternative
3107 may be rejected when it is actually desirable.) */
3108 if ((swapped && (m != commutative || i != commutative + 1))
3109 /* If we are matching as if two operands were swapped,
3110 also pretend that operands_match had been computed
3111 with swapped.
3112 But if I is the second of those and C is the first,
3113 don't exchange them, because operands_match is valid
3114 only on one side of its diagonal. */
3115 ? (operands_match
3116 [(m == commutative || m == commutative + 1)
3117 ? 2 * commutative + 1 - m : m]
3118 [(i == commutative || i == commutative + 1)
3119 ? 2 * commutative + 1 - i : i])
3120 : operands_match[m][i])
3121 {
3122 /* If we are matching a non-offsettable address where an
3123 offsettable address was expected, then we must reject
3124 this combination, because we can't reload it. */
3125 if (this_alternative_offmemok[m]
3126 && MEM_P (recog_data.operand[m])
3127 && this_alternative[m] == (int) NO_REGS
3128 && ! this_alternative_win[m])
3129 bad = 1;
3130
3131 did_match = this_alternative_win[m];
3132 }
3133 else
3134 {
3135 /* Operands don't match. */
3136 rtx value;
3137 int loc1, loc2;
3138 /* Retroactively mark the operand we had to match
3139 as a loser, if it wasn't already. */
3140 if (this_alternative_win[m])
3141 losers++;
3142 this_alternative_win[m] = 0;
3143 if (this_alternative[m] == (int) NO_REGS)
3144 bad = 1;
3145 /* But count the pair only once in the total badness of
3146 this alternative, if the pair can be a dummy reload.
3147 The pointers in operand_loc are not swapped; swap
3148 them by hand if necessary. */
3149 if (swapped && i == commutative)
3150 loc1 = commutative + 1;
3151 else if (swapped && i == commutative + 1)
3152 loc1 = commutative;
3153 else
3154 loc1 = i;
3155 if (swapped && m == commutative)
3156 loc2 = commutative + 1;
3157 else if (swapped && m == commutative + 1)
3158 loc2 = commutative;
3159 else
3160 loc2 = m;
3161 value
3162 = find_dummy_reload (recog_data.operand[i],
3163 recog_data.operand[m],
3164 recog_data.operand_loc[loc1],
3165 recog_data.operand_loc[loc2],
3166 operand_mode[i], operand_mode[m],
3167 this_alternative[m], -1,
3168 this_alternative_earlyclobber[m]);
3169
3170 if (value != 0)
3171 losers--;
3172 }
3173 /* This can be fixed with reloads if the operand
3174 we are supposed to match can be fixed with reloads. */
3175 badop = 0;
3176 this_alternative[i] = this_alternative[m];
3177
3178 /* If we have to reload this operand and some previous
3179 operand also had to match the same thing as this
3180 operand, we don't know how to do that. So reject this
3181 alternative. */
3182 if (! did_match || force_reload)
3183 for (j = 0; j < i; j++)
3184 if (this_alternative_matches[j]
3185 == this_alternative_matches[i])
3186 badop = 1;
3187 break;
3188
3189 case 'p':
3190 /* All necessary reloads for an address_operand
3191 were handled in find_reloads_address. */
3192 this_alternative[i]
3193 = (int) base_reg_class (VOIDmode, ADDRESS, SCRATCH);
3194 win = 1;
3195 badop = 0;
3196 break;
3197
3198 case TARGET_MEM_CONSTRAINT:
3199 if (force_reload)
3200 break;
3201 if (MEM_P (operand)
3202 || (REG_P (operand)
3203 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3204 && reg_renumber[REGNO (operand)] < 0))
3205 win = 1;
3206 if (CONST_POOL_OK_P (operand))
3207 badop = 0;
3208 constmemok = 1;
3209 break;
3210
3211 case '<':
3212 if (MEM_P (operand)
3213 && ! address_reloaded[i]
3214 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3215 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3216 win = 1;
3217 break;
3218
3219 case '>':
3220 if (MEM_P (operand)
3221 && ! address_reloaded[i]
3222 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3223 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3224 win = 1;
3225 break;
3226
3227 /* Memory operand whose address is not offsettable. */
3228 case 'V':
3229 if (force_reload)
3230 break;
3231 if (MEM_P (operand)
3232 && ! (ind_levels ? offsettable_memref_p (operand)
3233 : offsettable_nonstrict_memref_p (operand))
3234 /* Certain mem addresses will become offsettable
3235 after they themselves are reloaded. This is important;
3236 we don't want our own handling of unoffsettables
3237 to override the handling of reg_equiv_address. */
3238 && !(REG_P (XEXP (operand, 0))
3239 && (ind_levels == 0
3240 || reg_equiv_address[REGNO (XEXP (operand, 0))] != 0)))
3241 win = 1;
3242 break;
3243
3244 /* Memory operand whose address is offsettable. */
3245 case 'o':
3246 if (force_reload)
3247 break;
3248 if ((MEM_P (operand)
3249 /* If IND_LEVELS, find_reloads_address won't reload a
3250 pseudo that didn't get a hard reg, so we have to
3251 reject that case. */
3252 && ((ind_levels ? offsettable_memref_p (operand)
3253 : offsettable_nonstrict_memref_p (operand))
3254 /* A reloaded address is offsettable because it is now
3255 just a simple register indirect. */
3256 || address_reloaded[i] == 1))
3257 || (REG_P (operand)
3258 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3259 && reg_renumber[REGNO (operand)] < 0
3260 /* If reg_equiv_address is nonzero, we will be
3261 loading it into a register; hence it will be
3262 offsettable, but we cannot say that reg_equiv_mem
3263 is offsettable without checking. */
3264 && ((reg_equiv_mem[REGNO (operand)] != 0
3265 && offsettable_memref_p (reg_equiv_mem[REGNO (operand)]))
3266 || (reg_equiv_address[REGNO (operand)] != 0))))
3267 win = 1;
3268 if (CONST_POOL_OK_P (operand)
3269 || MEM_P (operand))
3270 badop = 0;
3271 constmemok = 1;
3272 offmemok = 1;
3273 break;
3274
3275 case '&':
3276 /* Output operand that is stored before the need for the
3277 input operands (and their index registers) is over. */
3278 earlyclobber = 1, this_earlyclobber = 1;
3279 break;
3280
3281 case 'E':
3282 case 'F':
3283 if (GET_CODE (operand) == CONST_DOUBLE
3284 || (GET_CODE (operand) == CONST_VECTOR
3285 && (GET_MODE_CLASS (GET_MODE (operand))
3286 == MODE_VECTOR_FLOAT)))
3287 win = 1;
3288 break;
3289
3290 case 'G':
3291 case 'H':
3292 if (GET_CODE (operand) == CONST_DOUBLE
3293 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3294 win = 1;
3295 break;
3296
3297 case 's':
3298 if (GET_CODE (operand) == CONST_INT
3299 || (GET_CODE (operand) == CONST_DOUBLE
3300 && GET_MODE (operand) == VOIDmode))
3301 break;
3302 case 'i':
3303 if (CONSTANT_P (operand)
3304 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3305 win = 1;
3306 break;
3307
3308 case 'n':
3309 if (GET_CODE (operand) == CONST_INT
3310 || (GET_CODE (operand) == CONST_DOUBLE
3311 && GET_MODE (operand) == VOIDmode))
3312 win = 1;
3313 break;
3314
3315 case 'I':
3316 case 'J':
3317 case 'K':
3318 case 'L':
3319 case 'M':
3320 case 'N':
3321 case 'O':
3322 case 'P':
3323 if (GET_CODE (operand) == CONST_INT
3324 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3325 win = 1;
3326 break;
3327
3328 case 'X':
3329 force_reload = 0;
3330 win = 1;
3331 break;
3332
3333 case 'g':
3334 if (! force_reload
3335 /* A PLUS is never a valid operand, but reload can make
3336 it from a register when eliminating registers. */
3337 && GET_CODE (operand) != PLUS
3338 /* A SCRATCH is not a valid operand. */
3339 && GET_CODE (operand) != SCRATCH
3340 && (! CONSTANT_P (operand)
3341 || ! flag_pic
3342 || LEGITIMATE_PIC_OPERAND_P (operand))
3343 && (GENERAL_REGS == ALL_REGS
3344 || !REG_P (operand)
3345 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3346 && reg_renumber[REGNO (operand)] < 0)))
3347 win = 1;
3348 /* Drop through into 'r' case. */
3349
3350 case 'r':
3351 this_alternative[i]
3352 = (int) reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3353 goto reg;
3354
3355 default:
3356 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3357 {
3358 #ifdef EXTRA_CONSTRAINT_STR
3359 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3360 {
3361 if (force_reload)
3362 break;
3363 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3364 win = 1;
3365 /* If the address was already reloaded,
3366 we win as well. */
3367 else if (MEM_P (operand)
3368 && address_reloaded[i] == 1)
3369 win = 1;
3370 /* Likewise if the address will be reloaded because
3371 reg_equiv_address is nonzero. For reg_equiv_mem
3372 we have to check. */
3373 else if (REG_P (operand)
3374 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3375 && reg_renumber[REGNO (operand)] < 0
3376 && ((reg_equiv_mem[REGNO (operand)] != 0
3377 && EXTRA_CONSTRAINT_STR (reg_equiv_mem[REGNO (operand)], c, p))
3378 || (reg_equiv_address[REGNO (operand)] != 0)))
3379 win = 1;
3380
3381 /* If we didn't already win, we can reload
3382 constants via force_const_mem, and other
3383 MEMs by reloading the address like for 'o'. */
3384 if (CONST_POOL_OK_P (operand)
3385 || MEM_P (operand))
3386 badop = 0;
3387 constmemok = 1;
3388 offmemok = 1;
3389 break;
3390 }
3391 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3392 {
3393 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3394 win = 1;
3395
3396 /* If we didn't already win, we can reload
3397 the address into a base register. */
3398 this_alternative[i]
3399 = (int) base_reg_class (VOIDmode, ADDRESS, SCRATCH);
3400 badop = 0;
3401 break;
3402 }
3403
3404 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3405 win = 1;
3406 #endif
3407 break;
3408 }
3409
3410 this_alternative[i]
3411 = (int) (reg_class_subunion
3412 [this_alternative[i]]
3413 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3414 reg:
3415 if (GET_MODE (operand) == BLKmode)
3416 break;
3417 winreg = 1;
3418 if (REG_P (operand)
3419 && reg_fits_class_p (operand, this_alternative[i],
3420 offset, GET_MODE (recog_data.operand[i])))
3421 win = 1;
3422 break;
3423 }
3424 while ((p += len), c);
3425
3426 constraints[i] = p;
3427
3428 /* If this operand could be handled with a reg,
3429 and some reg is allowed, then this operand can be handled. */
3430 if (winreg && this_alternative[i] != (int) NO_REGS)
3431 badop = 0;
3432
3433 /* Record which operands fit this alternative. */
3434 this_alternative_earlyclobber[i] = earlyclobber;
3435 if (win && ! force_reload)
3436 this_alternative_win[i] = 1;
3437 else if (did_match && ! force_reload)
3438 this_alternative_match_win[i] = 1;
3439 else
3440 {
3441 int const_to_mem = 0;
3442
3443 this_alternative_offmemok[i] = offmemok;
3444 losers++;
3445 if (badop)
3446 bad = 1;
3447 /* Alternative loses if it has no regs for a reg operand. */
3448 if (REG_P (operand)
3449 && this_alternative[i] == (int) NO_REGS
3450 && this_alternative_matches[i] < 0)
3451 bad = 1;
3452
3453 /* If this is a constant that is reloaded into the desired
3454 class by copying it to memory first, count that as another
3455 reload. This is consistent with other code and is
3456 required to avoid choosing another alternative when
3457 the constant is moved into memory by this function on
3458 an early reload pass. Note that the test here is
3459 precisely the same as in the code below that calls
3460 force_const_mem. */
3461 if (CONST_POOL_OK_P (operand)
3462 && ((PREFERRED_RELOAD_CLASS (operand,
3463 (enum reg_class) this_alternative[i])
3464 == NO_REGS)
3465 || no_input_reloads)
3466 && operand_mode[i] != VOIDmode)
3467 {
3468 const_to_mem = 1;
3469 if (this_alternative[i] != (int) NO_REGS)
3470 losers++;
3471 }
3472
3473 /* Alternative loses if it requires a type of reload not
3474 permitted for this insn. We can always reload SCRATCH
3475 and objects with a REG_UNUSED note. */
3476 if (GET_CODE (operand) != SCRATCH
3477 && modified[i] != RELOAD_READ && no_output_reloads
3478 && ! find_reg_note (insn, REG_UNUSED, operand))
3479 bad = 1;
3480 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3481 && ! const_to_mem)
3482 bad = 1;
3483
3484 /* If we can't reload this value at all, reject this
3485 alternative. Note that we could also lose due to
3486 LIMIT_RELOAD_CLASS, but we don't check that
3487 here. */
3488
3489 if (! CONSTANT_P (operand)
3490 && (enum reg_class) this_alternative[i] != NO_REGS)
3491 {
3492 if (PREFERRED_RELOAD_CLASS
3493 (operand, (enum reg_class) this_alternative[i])
3494 == NO_REGS)
3495 reject = 600;
3496
3497 #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
3498 if (operand_type[i] == RELOAD_FOR_OUTPUT
3499 && PREFERRED_OUTPUT_RELOAD_CLASS
3500 (operand, (enum reg_class) this_alternative[i])
3501 == NO_REGS)
3502 reject = 600;
3503 #endif
3504 }
3505
3506 /* We prefer to reload pseudos over reloading other things,
3507 since such reloads may be able to be eliminated later.
3508 If we are reloading a SCRATCH, we won't be generating any
3509 insns, just using a register, so it is also preferred.
3510 So bump REJECT in other cases. Don't do this in the
3511 case where we are forcing a constant into memory and
3512 it will then win since we don't want to have a different
3513 alternative match then. */
3514 if (! (REG_P (operand)
3515 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3516 && GET_CODE (operand) != SCRATCH
3517 && ! (const_to_mem && constmemok))
3518 reject += 2;
3519
3520 /* Input reloads can be inherited more often than output
3521 reloads can be removed, so penalize output reloads. */
3522 if (operand_type[i] != RELOAD_FOR_INPUT
3523 && GET_CODE (operand) != SCRATCH)
3524 reject++;
3525 }
3526
3527 /* If this operand is a pseudo register that didn't get a hard
3528 reg and this alternative accepts some register, see if the
3529 class that we want is a subset of the preferred class for this
3530 register. If not, but it intersects that class, use the
3531 preferred class instead. If it does not intersect the preferred
3532 class, show that usage of this alternative should be discouraged;
3533 it will be discouraged more still if the register is `preferred
3534 or nothing'. We do this because it increases the chance of
3535 reusing our spill register in a later insn and avoiding a pair
3536 of memory stores and loads.
3537
3538 Don't bother with this if this alternative will accept this
3539 operand.
3540
3541 Don't do this for a multiword operand, since it is only a
3542 small win and has the risk of requiring more spill registers,
3543 which could cause a large loss.
3544
3545 Don't do this if the preferred class has only one register
3546 because we might otherwise exhaust the class. */
3547
3548 if (! win && ! did_match
3549 && this_alternative[i] != (int) NO_REGS
3550 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3551 && reg_class_size [(int) preferred_class[i]] > 0
3552 && ! SMALL_REGISTER_CLASS_P (preferred_class[i]))
3553 {
3554 if (! reg_class_subset_p (this_alternative[i],
3555 preferred_class[i]))
3556 {
3557 /* Since we don't have a way of forming the intersection,
3558 we just do something special if the preferred class
3559 is a subset of the class we have; that's the most
3560 common case anyway. */
3561 if (reg_class_subset_p (preferred_class[i],
3562 this_alternative[i]))
3563 this_alternative[i] = (int) preferred_class[i];
3564 else
3565 reject += (2 + 2 * pref_or_nothing[i]);
3566 }
3567 }
3568 }
3569
3570 /* Now see if any output operands that are marked "earlyclobber"
3571 in this alternative conflict with any input operands
3572 or any memory addresses. */
3573
3574 for (i = 0; i < noperands; i++)
3575 if (this_alternative_earlyclobber[i]
3576 && (this_alternative_win[i] || this_alternative_match_win[i]))
3577 {
3578 struct decomposition early_data;
3579
3580 early_data = decompose (recog_data.operand[i]);
3581
3582 gcc_assert (modified[i] != RELOAD_READ);
3583
3584 if (this_alternative[i] == NO_REGS)
3585 {
3586 this_alternative_earlyclobber[i] = 0;
3587 gcc_assert (this_insn_is_asm);
3588 error_for_asm (this_insn,
3589 "%<&%> constraint used with no register class");
3590 }
3591
3592 for (j = 0; j < noperands; j++)
3593 /* Is this an input operand or a memory ref? */
3594 if ((MEM_P (recog_data.operand[j])
3595 || modified[j] != RELOAD_WRITE)
3596 && j != i
3597 /* Ignore things like match_operator operands. */
3598 && *recog_data.constraints[j] != 0
3599 /* Don't count an input operand that is constrained to match
3600 the early clobber operand. */
3601 && ! (this_alternative_matches[j] == i
3602 && rtx_equal_p (recog_data.operand[i],
3603 recog_data.operand[j]))
3604 /* Is it altered by storing the earlyclobber operand? */
3605 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3606 early_data))
3607 {
3608 /* If the output is in a non-empty few-regs class,
3609 it's costly to reload it, so reload the input instead. */
3610 if (SMALL_REGISTER_CLASS_P (this_alternative[i])
3611 && (REG_P (recog_data.operand[j])
3612 || GET_CODE (recog_data.operand[j]) == SUBREG))
3613 {
3614 losers++;
3615 this_alternative_win[j] = 0;
3616 this_alternative_match_win[j] = 0;
3617 }
3618 else
3619 break;
3620 }
3621 /* If an earlyclobber operand conflicts with something,
3622 it must be reloaded, so request this and count the cost. */
3623 if (j != noperands)
3624 {
3625 losers++;
3626 this_alternative_win[i] = 0;
3627 this_alternative_match_win[j] = 0;
3628 for (j = 0; j < noperands; j++)
3629 if (this_alternative_matches[j] == i
3630 && this_alternative_match_win[j])
3631 {
3632 this_alternative_win[j] = 0;
3633 this_alternative_match_win[j] = 0;
3634 losers++;
3635 }
3636 }
3637 }
3638
3639 /* If one alternative accepts all the operands, no reload required,
3640 choose that alternative; don't consider the remaining ones. */
3641 if (losers == 0)
3642 {
3643 /* Unswap these so that they are never swapped at `finish'. */
3644 if (commutative >= 0)
3645 {
3646 recog_data.operand[commutative] = substed_operand[commutative];
3647 recog_data.operand[commutative + 1]
3648 = substed_operand[commutative + 1];
3649 }
3650 for (i = 0; i < noperands; i++)
3651 {
3652 goal_alternative_win[i] = this_alternative_win[i];
3653 goal_alternative_match_win[i] = this_alternative_match_win[i];
3654 goal_alternative[i] = this_alternative[i];
3655 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3656 goal_alternative_matches[i] = this_alternative_matches[i];
3657 goal_alternative_earlyclobber[i]
3658 = this_alternative_earlyclobber[i];
3659 }
3660 goal_alternative_number = this_alternative_number;
3661 goal_alternative_swapped = swapped;
3662 goal_earlyclobber = this_earlyclobber;
3663 goto finish;
3664 }
3665
3666 /* REJECT, set by the ! and ? constraint characters and when a register
3667 would be reloaded into a non-preferred class, discourages the use of
3668 this alternative for a reload goal. REJECT is incremented by six
3669 for each ? and two for each non-preferred class. */
3670 losers = losers * 6 + reject;
3671
3672 /* If this alternative can be made to work by reloading,
3673 and it needs less reloading than the others checked so far,
3674 record it as the chosen goal for reloading. */
3675 if (! bad && best > losers)
3676 {
3677 for (i = 0; i < noperands; i++)
3678 {
3679 goal_alternative[i] = this_alternative[i];
3680 goal_alternative_win[i] = this_alternative_win[i];
3681 goal_alternative_match_win[i] = this_alternative_match_win[i];
3682 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3683 goal_alternative_matches[i] = this_alternative_matches[i];
3684 goal_alternative_earlyclobber[i]
3685 = this_alternative_earlyclobber[i];
3686 }
3687 goal_alternative_swapped = swapped;
3688 best = losers;
3689 goal_alternative_number = this_alternative_number;
3690 goal_earlyclobber = this_earlyclobber;
3691 }
3692 }
3693
3694 /* If insn is commutative (it's safe to exchange a certain pair of operands)
3695 then we need to try each alternative twice,
3696 the second time matching those two operands
3697 as if we had exchanged them.
3698 To do this, really exchange them in operands.
3699
3700 If we have just tried the alternatives the second time,
3701 return operands to normal and drop through. */
3702
3703 if (commutative >= 0)
3704 {
3705 swapped = !swapped;
3706 if (swapped)
3707 {
3708 enum reg_class tclass;
3709 int t;
3710
3711 recog_data.operand[commutative] = substed_operand[commutative + 1];
3712 recog_data.operand[commutative + 1] = substed_operand[commutative];
3713 /* Swap the duplicates too. */
3714 for (i = 0; i < recog_data.n_dups; i++)
3715 if (recog_data.dup_num[i] == commutative
3716 || recog_data.dup_num[i] == commutative + 1)
3717 *recog_data.dup_loc[i]
3718 = recog_data.operand[(int) recog_data.dup_num[i]];
3719
3720 tclass = preferred_class[commutative];
3721 preferred_class[commutative] = preferred_class[commutative + 1];
3722 preferred_class[commutative + 1] = tclass;
3723
3724 t = pref_or_nothing[commutative];
3725 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3726 pref_or_nothing[commutative + 1] = t;
3727
3728 t = address_reloaded[commutative];
3729 address_reloaded[commutative] = address_reloaded[commutative + 1];
3730 address_reloaded[commutative + 1] = t;
3731
3732 memcpy (constraints, recog_data.constraints,
3733 noperands * sizeof (const char *));
3734 goto try_swapped;
3735 }
3736 else
3737 {
3738 recog_data.operand[commutative] = substed_operand[commutative];
3739 recog_data.operand[commutative + 1]
3740 = substed_operand[commutative + 1];
3741 /* Unswap the duplicates too. */
3742 for (i = 0; i < recog_data.n_dups; i++)
3743 if (recog_data.dup_num[i] == commutative
3744 || recog_data.dup_num[i] == commutative + 1)
3745 *recog_data.dup_loc[i]
3746 = recog_data.operand[(int) recog_data.dup_num[i]];
3747 }
3748 }
3749
3750 /* The operands don't meet the constraints.
3751 goal_alternative describes the alternative
3752 that we could reach by reloading the fewest operands.
3753 Reload so as to fit it. */
3754
3755 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3756 {
3757 /* No alternative works with reloads?? */
3758 if (insn_code_number >= 0)
3759 fatal_insn ("unable to generate reloads for:", insn);
3760 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3761 /* Avoid further trouble with this insn. */
3762 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3763 n_reloads = 0;
3764 return 0;
3765 }
3766
3767 /* Jump to `finish' from above if all operands are valid already.
3768 In that case, goal_alternative_win is all 1. */
3769 finish:
3770
3771 /* Right now, for any pair of operands I and J that are required to match,
3772 with I < J,
3773 goal_alternative_matches[J] is I.
3774 Set up goal_alternative_matched as the inverse function:
3775 goal_alternative_matched[I] = J. */
3776
3777 for (i = 0; i < noperands; i++)
3778 goal_alternative_matched[i] = -1;
3779
3780 for (i = 0; i < noperands; i++)
3781 if (! goal_alternative_win[i]
3782 && goal_alternative_matches[i] >= 0)
3783 goal_alternative_matched[goal_alternative_matches[i]] = i;
3784
3785 for (i = 0; i < noperands; i++)
3786 goal_alternative_win[i] |= goal_alternative_match_win[i];
3787
3788 /* If the best alternative is with operands 1 and 2 swapped,
3789 consider them swapped before reporting the reloads. Update the
3790 operand numbers of any reloads already pushed. */
3791
3792 if (goal_alternative_swapped)
3793 {
3794 rtx tem;
3795
3796 tem = substed_operand[commutative];
3797 substed_operand[commutative] = substed_operand[commutative + 1];
3798 substed_operand[commutative + 1] = tem;
3799 tem = recog_data.operand[commutative];
3800 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3801 recog_data.operand[commutative + 1] = tem;
3802 tem = *recog_data.operand_loc[commutative];
3803 *recog_data.operand_loc[commutative]
3804 = *recog_data.operand_loc[commutative + 1];
3805 *recog_data.operand_loc[commutative + 1] = tem;
3806
3807 for (i = 0; i < n_reloads; i++)
3808 {
3809 if (rld[i].opnum == commutative)
3810 rld[i].opnum = commutative + 1;
3811 else if (rld[i].opnum == commutative + 1)
3812 rld[i].opnum = commutative;
3813 }
3814 }
3815
3816 for (i = 0; i < noperands; i++)
3817 {
3818 operand_reloadnum[i] = -1;
3819
3820 /* If this is an earlyclobber operand, we need to widen the scope.
3821 The reload must remain valid from the start of the insn being
3822 reloaded until after the operand is stored into its destination.
3823 We approximate this with RELOAD_OTHER even though we know that we
3824 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3825
3826 One special case that is worth checking is when we have an
3827 output that is earlyclobber but isn't used past the insn (typically
3828 a SCRATCH). In this case, we only need have the reload live
3829 through the insn itself, but not for any of our input or output
3830 reloads.
3831 But we must not accidentally narrow the scope of an existing
3832 RELOAD_OTHER reload - leave these alone.
3833
3834 In any case, anything needed to address this operand can remain
3835 however they were previously categorized. */
3836
3837 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3838 operand_type[i]
3839 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3840 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3841 }
3842
3843 /* Any constants that aren't allowed and can't be reloaded
3844 into registers are here changed into memory references. */
3845 for (i = 0; i < noperands; i++)
3846 if (! goal_alternative_win[i]
3847 && CONST_POOL_OK_P (recog_data.operand[i])
3848 && ((PREFERRED_RELOAD_CLASS (recog_data.operand[i],
3849 (enum reg_class) goal_alternative[i])
3850 == NO_REGS)
3851 || no_input_reloads)
3852 && operand_mode[i] != VOIDmode)
3853 {
3854 int this_address_reloaded;
3855
3856 this_address_reloaded = 0;
3857 substed_operand[i] = recog_data.operand[i]
3858 = find_reloads_toplev (force_const_mem (operand_mode[i],
3859 recog_data.operand[i]),
3860 i, address_type[i], ind_levels, 0, insn,
3861 &this_address_reloaded);
3862 if (alternative_allows_const_pool_ref (this_address_reloaded == 0
3863 ? substed_operand[i]
3864 : NULL,
3865 recog_data.constraints[i],
3866 goal_alternative_number))
3867 goal_alternative_win[i] = 1;
3868 }
3869
3870 /* Likewise any invalid constants appearing as operand of a PLUS
3871 that is to be reloaded. */
3872 for (i = 0; i < noperands; i++)
3873 if (! goal_alternative_win[i]
3874 && GET_CODE (recog_data.operand[i]) == PLUS
3875 && CONST_POOL_OK_P (XEXP (recog_data.operand[i], 1))
3876 && (PREFERRED_RELOAD_CLASS (XEXP (recog_data.operand[i], 1),
3877 (enum reg_class) goal_alternative[i])
3878 == NO_REGS)
3879 && operand_mode[i] != VOIDmode)
3880 {
3881 rtx tem = force_const_mem (operand_mode[i],
3882 XEXP (recog_data.operand[i], 1));
3883 tem = gen_rtx_PLUS (operand_mode[i],
3884 XEXP (recog_data.operand[i], 0), tem);
3885
3886 substed_operand[i] = recog_data.operand[i]
3887 = find_reloads_toplev (tem, i, address_type[i],
3888 ind_levels, 0, insn, NULL);
3889 }
3890
3891 /* Record the values of the earlyclobber operands for the caller. */
3892 if (goal_earlyclobber)
3893 for (i = 0; i < noperands; i++)
3894 if (goal_alternative_earlyclobber[i])
3895 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3896
3897 /* Now record reloads for all the operands that need them. */
3898 for (i = 0; i < noperands; i++)
3899 if (! goal_alternative_win[i])
3900 {
3901 /* Operands that match previous ones have already been handled. */
3902 if (goal_alternative_matches[i] >= 0)
3903 ;
3904 /* Handle an operand with a nonoffsettable address
3905 appearing where an offsettable address will do
3906 by reloading the address into a base register.
3907
3908 ??? We can also do this when the operand is a register and
3909 reg_equiv_mem is not offsettable, but this is a bit tricky,
3910 so we don't bother with it. It may not be worth doing. */
3911 else if (goal_alternative_matched[i] == -1
3912 && goal_alternative_offmemok[i]
3913 && MEM_P (recog_data.operand[i]))
3914 {
3915 /* If the address to be reloaded is a VOIDmode constant,
3916 use Pmode as mode of the reload register, as would have
3917 been done by find_reloads_address. */
3918 enum machine_mode address_mode;
3919 address_mode = GET_MODE (XEXP (recog_data.operand[i], 0));
3920 if (address_mode == VOIDmode)
3921 address_mode = Pmode;
3922
3923 operand_reloadnum[i]
3924 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3925 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3926 base_reg_class (VOIDmode, MEM, SCRATCH),
3927 address_mode,
3928 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
3929 rld[operand_reloadnum[i]].inc
3930 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3931
3932 /* If this operand is an output, we will have made any
3933 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3934 now we are treating part of the operand as an input, so
3935 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
3936
3937 if (modified[i] == RELOAD_WRITE)
3938 {
3939 for (j = 0; j < n_reloads; j++)
3940 {
3941 if (rld[j].opnum == i)
3942 {
3943 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
3944 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
3945 else if (rld[j].when_needed
3946 == RELOAD_FOR_OUTADDR_ADDRESS)
3947 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
3948 }
3949 }
3950 }
3951 }
3952 else if (goal_alternative_matched[i] == -1)
3953 {
3954 operand_reloadnum[i]
3955 = push_reload ((modified[i] != RELOAD_WRITE
3956 ? recog_data.operand[i] : 0),
3957 (modified[i] != RELOAD_READ
3958 ? recog_data.operand[i] : 0),
3959 (modified[i] != RELOAD_WRITE
3960 ? recog_data.operand_loc[i] : 0),
3961 (modified[i] != RELOAD_READ
3962 ? recog_data.operand_loc[i] : 0),
3963 (enum reg_class) goal_alternative[i],
3964 (modified[i] == RELOAD_WRITE
3965 ? VOIDmode : operand_mode[i]),
3966 (modified[i] == RELOAD_READ
3967 ? VOIDmode : operand_mode[i]),
3968 (insn_code_number < 0 ? 0
3969 : insn_data[insn_code_number].operand[i].strict_low),
3970 0, i, operand_type[i]);
3971 }
3972 /* In a matching pair of operands, one must be input only
3973 and the other must be output only.
3974 Pass the input operand as IN and the other as OUT. */
3975 else if (modified[i] == RELOAD_READ
3976 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
3977 {
3978 operand_reloadnum[i]
3979 = push_reload (recog_data.operand[i],
3980 recog_data.operand[goal_alternative_matched[i]],
3981 recog_data.operand_loc[i],
3982 recog_data.operand_loc[goal_alternative_matched[i]],
3983 (enum reg_class) goal_alternative[i],
3984 operand_mode[i],
3985 operand_mode[goal_alternative_matched[i]],
3986 0, 0, i, RELOAD_OTHER);
3987 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
3988 }
3989 else if (modified[i] == RELOAD_WRITE
3990 && modified[goal_alternative_matched[i]] == RELOAD_READ)
3991 {
3992 operand_reloadnum[goal_alternative_matched[i]]
3993 = push_reload (recog_data.operand[goal_alternative_matched[i]],
3994 recog_data.operand[i],
3995 recog_data.operand_loc[goal_alternative_matched[i]],
3996 recog_data.operand_loc[i],
3997 (enum reg_class) goal_alternative[i],
3998 operand_mode[goal_alternative_matched[i]],
3999 operand_mode[i],
4000 0, 0, i, RELOAD_OTHER);
4001 operand_reloadnum[i] = output_reloadnum;
4002 }
4003 else
4004 {
4005 gcc_assert (insn_code_number < 0);
4006 error_for_asm (insn, "inconsistent operand constraints "
4007 "in an %<asm%>");
4008 /* Avoid further trouble with this insn. */
4009 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4010 n_reloads = 0;
4011 return 0;
4012 }
4013 }
4014 else if (goal_alternative_matched[i] < 0
4015 && goal_alternative_matches[i] < 0
4016 && address_operand_reloaded[i] != 1
4017 && optimize)
4018 {
4019 /* For each non-matching operand that's a MEM or a pseudo-register
4020 that didn't get a hard register, make an optional reload.
4021 This may get done even if the insn needs no reloads otherwise. */
4022
4023 rtx operand = recog_data.operand[i];
4024
4025 while (GET_CODE (operand) == SUBREG)
4026 operand = SUBREG_REG (operand);
4027 if ((MEM_P (operand)
4028 || (REG_P (operand)
4029 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4030 /* If this is only for an output, the optional reload would not
4031 actually cause us to use a register now, just note that
4032 something is stored here. */
4033 && ((enum reg_class) goal_alternative[i] != NO_REGS
4034 || modified[i] == RELOAD_WRITE)
4035 && ! no_input_reloads
4036 /* An optional output reload might allow to delete INSN later.
4037 We mustn't make in-out reloads on insns that are not permitted
4038 output reloads.
4039 If this is an asm, we can't delete it; we must not even call
4040 push_reload for an optional output reload in this case,
4041 because we can't be sure that the constraint allows a register,
4042 and push_reload verifies the constraints for asms. */
4043 && (modified[i] == RELOAD_READ
4044 || (! no_output_reloads && ! this_insn_is_asm)))
4045 operand_reloadnum[i]
4046 = push_reload ((modified[i] != RELOAD_WRITE
4047 ? recog_data.operand[i] : 0),
4048 (modified[i] != RELOAD_READ
4049 ? recog_data.operand[i] : 0),
4050 (modified[i] != RELOAD_WRITE
4051 ? recog_data.operand_loc[i] : 0),
4052 (modified[i] != RELOAD_READ
4053 ? recog_data.operand_loc[i] : 0),
4054 (enum reg_class) goal_alternative[i],
4055 (modified[i] == RELOAD_WRITE
4056 ? VOIDmode : operand_mode[i]),
4057 (modified[i] == RELOAD_READ
4058 ? VOIDmode : operand_mode[i]),
4059 (insn_code_number < 0 ? 0
4060 : insn_data[insn_code_number].operand[i].strict_low),
4061 1, i, operand_type[i]);
4062 /* If a memory reference remains (either as a MEM or a pseudo that
4063 did not get a hard register), yet we can't make an optional
4064 reload, check if this is actually a pseudo register reference;
4065 we then need to emit a USE and/or a CLOBBER so that reload
4066 inheritance will do the right thing. */
4067 else if (replace
4068 && (MEM_P (operand)
4069 || (REG_P (operand)
4070 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4071 && reg_renumber [REGNO (operand)] < 0)))
4072 {
4073 operand = *recog_data.operand_loc[i];
4074
4075 while (GET_CODE (operand) == SUBREG)
4076 operand = SUBREG_REG (operand);
4077 if (REG_P (operand))
4078 {
4079 if (modified[i] != RELOAD_WRITE)
4080 /* We mark the USE with QImode so that we recognize
4081 it as one that can be safely deleted at the end
4082 of reload. */
4083 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4084 insn), QImode);
4085 if (modified[i] != RELOAD_READ)
4086 emit_insn_after (gen_clobber (operand), insn);
4087 }
4088 }
4089 }
4090 else if (goal_alternative_matches[i] >= 0
4091 && goal_alternative_win[goal_alternative_matches[i]]
4092 && modified[i] == RELOAD_READ
4093 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4094 && ! no_input_reloads && ! no_output_reloads
4095 && optimize)
4096 {
4097 /* Similarly, make an optional reload for a pair of matching
4098 objects that are in MEM or a pseudo that didn't get a hard reg. */
4099
4100 rtx operand = recog_data.operand[i];
4101
4102 while (GET_CODE (operand) == SUBREG)
4103 operand = SUBREG_REG (operand);
4104 if ((MEM_P (operand)
4105 || (REG_P (operand)
4106 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4107 && ((enum reg_class) goal_alternative[goal_alternative_matches[i]]
4108 != NO_REGS))
4109 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4110 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4111 recog_data.operand[i],
4112 recog_data.operand_loc[goal_alternative_matches[i]],
4113 recog_data.operand_loc[i],
4114 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4115 operand_mode[goal_alternative_matches[i]],
4116 operand_mode[i],
4117 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4118 }
4119
4120 /* Perform whatever substitutions on the operands we are supposed
4121 to make due to commutativity or replacement of registers
4122 with equivalent constants or memory slots. */
4123
4124 for (i = 0; i < noperands; i++)
4125 {
4126 /* We only do this on the last pass through reload, because it is
4127 possible for some data (like reg_equiv_address) to be changed during
4128 later passes. Moreover, we lose the opportunity to get a useful
4129 reload_{in,out}_reg when we do these replacements. */
4130
4131 if (replace)
4132 {
4133 rtx substitution = substed_operand[i];
4134
4135 *recog_data.operand_loc[i] = substitution;
4136
4137 /* If we're replacing an operand with a LABEL_REF, we need to
4138 make sure that there's a REG_LABEL_OPERAND note attached to
4139 this instruction. */
4140 if (GET_CODE (substitution) == LABEL_REF
4141 && !find_reg_note (insn, REG_LABEL_OPERAND,
4142 XEXP (substitution, 0))
4143 /* For a JUMP_P, if it was a branch target it must have
4144 already been recorded as such. */
4145 && (!JUMP_P (insn)
4146 || !label_is_jump_target_p (XEXP (substitution, 0),
4147 insn)))
4148 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL_OPERAND,
4149 XEXP (substitution, 0),
4150 REG_NOTES (insn));
4151 }
4152 else
4153 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4154 }
4155
4156 /* If this insn pattern contains any MATCH_DUP's, make sure that
4157 they will be substituted if the operands they match are substituted.
4158 Also do now any substitutions we already did on the operands.
4159
4160 Don't do this if we aren't making replacements because we might be
4161 propagating things allocated by frame pointer elimination into places
4162 it doesn't expect. */
4163
4164 if (insn_code_number >= 0 && replace)
4165 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4166 {
4167 int opno = recog_data.dup_num[i];
4168 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4169 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4170 }
4171
4172 #if 0
4173 /* This loses because reloading of prior insns can invalidate the equivalence
4174 (or at least find_equiv_reg isn't smart enough to find it any more),
4175 causing this insn to need more reload regs than it needed before.
4176 It may be too late to make the reload regs available.
4177 Now this optimization is done safely in choose_reload_regs. */
4178
4179 /* For each reload of a reg into some other class of reg,
4180 search for an existing equivalent reg (same value now) in the right class.
4181 We can use it as long as we don't need to change its contents. */
4182 for (i = 0; i < n_reloads; i++)
4183 if (rld[i].reg_rtx == 0
4184 && rld[i].in != 0
4185 && REG_P (rld[i].in)
4186 && rld[i].out == 0)
4187 {
4188 rld[i].reg_rtx
4189 = find_equiv_reg (rld[i].in, insn, rld[i].class, -1,
4190 static_reload_reg_p, 0, rld[i].inmode);
4191 /* Prevent generation of insn to load the value
4192 because the one we found already has the value. */
4193 if (rld[i].reg_rtx)
4194 rld[i].in = rld[i].reg_rtx;
4195 }
4196 #endif
4197
4198 /* If we detected error and replaced asm instruction by USE, forget about the
4199 reloads. */
4200 if (GET_CODE (PATTERN (insn)) == USE
4201 && GET_CODE (XEXP (PATTERN (insn), 0)) == CONST_INT)
4202 n_reloads = 0;
4203
4204 /* Perhaps an output reload can be combined with another
4205 to reduce needs by one. */
4206 if (!goal_earlyclobber)
4207 combine_reloads ();
4208
4209 /* If we have a pair of reloads for parts of an address, they are reloading
4210 the same object, the operands themselves were not reloaded, and they
4211 are for two operands that are supposed to match, merge the reloads and
4212 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4213
4214 for (i = 0; i < n_reloads; i++)
4215 {
4216 int k;
4217
4218 for (j = i + 1; j < n_reloads; j++)
4219 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4220 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4221 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4222 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4223 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4224 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4225 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4226 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4227 && rtx_equal_p (rld[i].in, rld[j].in)
4228 && (operand_reloadnum[rld[i].opnum] < 0
4229 || rld[operand_reloadnum[rld[i].opnum]].optional)
4230 && (operand_reloadnum[rld[j].opnum] < 0
4231 || rld[operand_reloadnum[rld[j].opnum]].optional)
4232 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4233 || (goal_alternative_matches[rld[j].opnum]
4234 == rld[i].opnum)))
4235 {
4236 for (k = 0; k < n_replacements; k++)
4237 if (replacements[k].what == j)
4238 replacements[k].what = i;
4239
4240 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4241 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4242 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4243 else
4244 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4245 rld[j].in = 0;
4246 }
4247 }
4248
4249 /* Scan all the reloads and update their type.
4250 If a reload is for the address of an operand and we didn't reload
4251 that operand, change the type. Similarly, change the operand number
4252 of a reload when two operands match. If a reload is optional, treat it
4253 as though the operand isn't reloaded.
4254
4255 ??? This latter case is somewhat odd because if we do the optional
4256 reload, it means the object is hanging around. Thus we need only
4257 do the address reload if the optional reload was NOT done.
4258
4259 Change secondary reloads to be the address type of their operand, not
4260 the normal type.
4261
4262 If an operand's reload is now RELOAD_OTHER, change any
4263 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4264 RELOAD_FOR_OTHER_ADDRESS. */
4265
4266 for (i = 0; i < n_reloads; i++)
4267 {
4268 if (rld[i].secondary_p
4269 && rld[i].when_needed == operand_type[rld[i].opnum])
4270 rld[i].when_needed = address_type[rld[i].opnum];
4271
4272 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4273 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4274 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4275 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4276 && (operand_reloadnum[rld[i].opnum] < 0
4277 || rld[operand_reloadnum[rld[i].opnum]].optional))
4278 {
4279 /* If we have a secondary reload to go along with this reload,
4280 change its type to RELOAD_FOR_OPADDR_ADDR. */
4281
4282 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4283 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4284 && rld[i].secondary_in_reload != -1)
4285 {
4286 int secondary_in_reload = rld[i].secondary_in_reload;
4287
4288 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4289
4290 /* If there's a tertiary reload we have to change it also. */
4291 if (secondary_in_reload > 0
4292 && rld[secondary_in_reload].secondary_in_reload != -1)
4293 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4294 = RELOAD_FOR_OPADDR_ADDR;
4295 }
4296
4297 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4298 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4299 && rld[i].secondary_out_reload != -1)
4300 {
4301 int secondary_out_reload = rld[i].secondary_out_reload;
4302
4303 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4304
4305 /* If there's a tertiary reload we have to change it also. */
4306 if (secondary_out_reload
4307 && rld[secondary_out_reload].secondary_out_reload != -1)
4308 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4309 = RELOAD_FOR_OPADDR_ADDR;
4310 }
4311
4312 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4313 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4314 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4315 else
4316 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4317 }
4318
4319 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4320 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4321 && operand_reloadnum[rld[i].opnum] >= 0
4322 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4323 == RELOAD_OTHER))
4324 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4325
4326 if (goal_alternative_matches[rld[i].opnum] >= 0)
4327 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4328 }
4329
4330 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4331 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4332 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4333
4334 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4335 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4336 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4337 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4338 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4339 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4340 This is complicated by the fact that a single operand can have more
4341 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4342 choose_reload_regs without affecting code quality, and cases that
4343 actually fail are extremely rare, so it turns out to be better to fix
4344 the problem here by not generating cases that choose_reload_regs will
4345 fail for. */
4346 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4347 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4348 a single operand.
4349 We can reduce the register pressure by exploiting that a
4350 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4351 does not conflict with any of them, if it is only used for the first of
4352 the RELOAD_FOR_X_ADDRESS reloads. */
4353 {
4354 int first_op_addr_num = -2;
4355 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4356 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4357 int need_change = 0;
4358 /* We use last_op_addr_reload and the contents of the above arrays
4359 first as flags - -2 means no instance encountered, -1 means exactly
4360 one instance encountered.
4361 If more than one instance has been encountered, we store the reload
4362 number of the first reload of the kind in question; reload numbers
4363 are known to be non-negative. */
4364 for (i = 0; i < noperands; i++)
4365 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4366 for (i = n_reloads - 1; i >= 0; i--)
4367 {
4368 switch (rld[i].when_needed)
4369 {
4370 case RELOAD_FOR_OPERAND_ADDRESS:
4371 if (++first_op_addr_num >= 0)
4372 {
4373 first_op_addr_num = i;
4374 need_change = 1;
4375 }
4376 break;
4377 case RELOAD_FOR_INPUT_ADDRESS:
4378 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4379 {
4380 first_inpaddr_num[rld[i].opnum] = i;
4381 need_change = 1;
4382 }
4383 break;
4384 case RELOAD_FOR_OUTPUT_ADDRESS:
4385 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4386 {
4387 first_outpaddr_num[rld[i].opnum] = i;
4388 need_change = 1;
4389 }
4390 break;
4391 default:
4392 break;
4393 }
4394 }
4395
4396 if (need_change)
4397 {
4398 for (i = 0; i < n_reloads; i++)
4399 {
4400 int first_num;
4401 enum reload_type type;
4402
4403 switch (rld[i].when_needed)
4404 {
4405 case RELOAD_FOR_OPADDR_ADDR:
4406 first_num = first_op_addr_num;
4407 type = RELOAD_FOR_OPERAND_ADDRESS;
4408 break;
4409 case RELOAD_FOR_INPADDR_ADDRESS:
4410 first_num = first_inpaddr_num[rld[i].opnum];
4411 type = RELOAD_FOR_INPUT_ADDRESS;
4412 break;
4413 case RELOAD_FOR_OUTADDR_ADDRESS:
4414 first_num = first_outpaddr_num[rld[i].opnum];
4415 type = RELOAD_FOR_OUTPUT_ADDRESS;
4416 break;
4417 default:
4418 continue;
4419 }
4420 if (first_num < 0)
4421 continue;
4422 else if (i > first_num)
4423 rld[i].when_needed = type;
4424 else
4425 {
4426 /* Check if the only TYPE reload that uses reload I is
4427 reload FIRST_NUM. */
4428 for (j = n_reloads - 1; j > first_num; j--)
4429 {
4430 if (rld[j].when_needed == type
4431 && (rld[i].secondary_p
4432 ? rld[j].secondary_in_reload == i
4433 : reg_mentioned_p (rld[i].in, rld[j].in)))
4434 {
4435 rld[i].when_needed = type;
4436 break;
4437 }
4438 }
4439 }
4440 }
4441 }
4442 }
4443
4444 /* See if we have any reloads that are now allowed to be merged
4445 because we've changed when the reload is needed to
4446 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4447 check for the most common cases. */
4448
4449 for (i = 0; i < n_reloads; i++)
4450 if (rld[i].in != 0 && rld[i].out == 0
4451 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4452 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4453 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4454 for (j = 0; j < n_reloads; j++)
4455 if (i != j && rld[j].in != 0 && rld[j].out == 0
4456 && rld[j].when_needed == rld[i].when_needed
4457 && MATCHES (rld[i].in, rld[j].in)
4458 && rld[i].class == rld[j].class
4459 && !rld[i].nocombine && !rld[j].nocombine
4460 && rld[i].reg_rtx == rld[j].reg_rtx)
4461 {
4462 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4463 transfer_replacements (i, j);
4464 rld[j].in = 0;
4465 }
4466
4467 #ifdef HAVE_cc0
4468 /* If we made any reloads for addresses, see if they violate a
4469 "no input reloads" requirement for this insn. But loads that we
4470 do after the insn (such as for output addresses) are fine. */
4471 if (no_input_reloads)
4472 for (i = 0; i < n_reloads; i++)
4473 gcc_assert (rld[i].in == 0
4474 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4475 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4476 #endif
4477
4478 /* Compute reload_mode and reload_nregs. */
4479 for (i = 0; i < n_reloads; i++)
4480 {
4481 rld[i].mode
4482 = (rld[i].inmode == VOIDmode
4483 || (GET_MODE_SIZE (rld[i].outmode)
4484 > GET_MODE_SIZE (rld[i].inmode)))
4485 ? rld[i].outmode : rld[i].inmode;
4486
4487 rld[i].nregs = CLASS_MAX_NREGS (rld[i].class, rld[i].mode);
4488 }
4489
4490 /* Special case a simple move with an input reload and a
4491 destination of a hard reg, if the hard reg is ok, use it. */
4492 for (i = 0; i < n_reloads; i++)
4493 if (rld[i].when_needed == RELOAD_FOR_INPUT
4494 && GET_CODE (PATTERN (insn)) == SET
4495 && REG_P (SET_DEST (PATTERN (insn)))
4496 && (SET_SRC (PATTERN (insn)) == rld[i].in
4497 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4498 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4499 {
4500 rtx dest = SET_DEST (PATTERN (insn));
4501 unsigned int regno = REGNO (dest);
4502
4503 if (regno < FIRST_PSEUDO_REGISTER
4504 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].class], regno)
4505 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4506 {
4507 int nr = hard_regno_nregs[regno][rld[i].mode];
4508 int ok = 1, nri;
4509
4510 for (nri = 1; nri < nr; nri ++)
4511 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].class], regno + nri))
4512 ok = 0;
4513
4514 if (ok)
4515 rld[i].reg_rtx = dest;
4516 }
4517 }
4518
4519 return retval;
4520 }
4521
4522 /* Return true if alternative number ALTNUM in constraint-string
4523 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4524 MEM gives the reference if it didn't need any reloads, otherwise it
4525 is null. */
4526
4527 static bool
4528 alternative_allows_const_pool_ref (rtx mem, const char *constraint, int altnum)
4529 {
4530 int c;
4531
4532 /* Skip alternatives before the one requested. */
4533 while (altnum > 0)
4534 {
4535 while (*constraint++ != ',');
4536 altnum--;
4537 }
4538 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4539 If one of them is present, this alternative accepts the result of
4540 passing a constant-pool reference through find_reloads_toplev.
4541
4542 The same is true of extra memory constraints if the address
4543 was reloaded into a register. However, the target may elect
4544 to disallow the original constant address, forcing it to be
4545 reloaded into a register instead. */
4546 for (; (c = *constraint) && c != ',' && c != '#';
4547 constraint += CONSTRAINT_LEN (c, constraint))
4548 {
4549 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4550 return true;
4551 #ifdef EXTRA_CONSTRAINT_STR
4552 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4553 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4554 return true;
4555 #endif
4556 }
4557 return false;
4558 }
4559 \f
4560 /* Scan X for memory references and scan the addresses for reloading.
4561 Also checks for references to "constant" regs that we want to eliminate
4562 and replaces them with the values they stand for.
4563 We may alter X destructively if it contains a reference to such.
4564 If X is just a constant reg, we return the equivalent value
4565 instead of X.
4566
4567 IND_LEVELS says how many levels of indirect addressing this machine
4568 supports.
4569
4570 OPNUM and TYPE identify the purpose of the reload.
4571
4572 IS_SET_DEST is true if X is the destination of a SET, which is not
4573 appropriate to be replaced by a constant.
4574
4575 INSN, if nonzero, is the insn in which we do the reload. It is used
4576 to determine if we may generate output reloads, and where to put USEs
4577 for pseudos that we have to replace with stack slots.
4578
4579 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4580 result of find_reloads_address. */
4581
4582 static rtx
4583 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4584 int ind_levels, int is_set_dest, rtx insn,
4585 int *address_reloaded)
4586 {
4587 RTX_CODE code = GET_CODE (x);
4588
4589 const char *fmt = GET_RTX_FORMAT (code);
4590 int i;
4591 int copied;
4592
4593 if (code == REG)
4594 {
4595 /* This code is duplicated for speed in find_reloads. */
4596 int regno = REGNO (x);
4597 if (reg_equiv_constant[regno] != 0 && !is_set_dest)
4598 x = reg_equiv_constant[regno];
4599 #if 0
4600 /* This creates (subreg (mem...)) which would cause an unnecessary
4601 reload of the mem. */
4602 else if (reg_equiv_mem[regno] != 0)
4603 x = reg_equiv_mem[regno];
4604 #endif
4605 else if (reg_equiv_memory_loc[regno]
4606 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
4607 {
4608 rtx mem = make_memloc (x, regno);
4609 if (reg_equiv_address[regno]
4610 || ! rtx_equal_p (mem, reg_equiv_mem[regno]))
4611 {
4612 /* If this is not a toplevel operand, find_reloads doesn't see
4613 this substitution. We have to emit a USE of the pseudo so
4614 that delete_output_reload can see it. */
4615 if (replace_reloads && recog_data.operand[opnum] != x)
4616 /* We mark the USE with QImode so that we recognize it
4617 as one that can be safely deleted at the end of
4618 reload. */
4619 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4620 QImode);
4621 x = mem;
4622 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4623 opnum, type, ind_levels, insn);
4624 if (!rtx_equal_p (x, mem))
4625 push_reg_equiv_alt_mem (regno, x);
4626 if (address_reloaded)
4627 *address_reloaded = i;
4628 }
4629 }
4630 return x;
4631 }
4632 if (code == MEM)
4633 {
4634 rtx tem = x;
4635
4636 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4637 opnum, type, ind_levels, insn);
4638 if (address_reloaded)
4639 *address_reloaded = i;
4640
4641 return tem;
4642 }
4643
4644 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4645 {
4646 /* Check for SUBREG containing a REG that's equivalent to a
4647 constant. If the constant has a known value, truncate it
4648 right now. Similarly if we are extracting a single-word of a
4649 multi-word constant. If the constant is symbolic, allow it
4650 to be substituted normally. push_reload will strip the
4651 subreg later. The constant must not be VOIDmode, because we
4652 will lose the mode of the register (this should never happen
4653 because one of the cases above should handle it). */
4654
4655 int regno = REGNO (SUBREG_REG (x));
4656 rtx tem;
4657
4658 if (regno >= FIRST_PSEUDO_REGISTER
4659 && reg_renumber[regno] < 0
4660 && reg_equiv_constant[regno] != 0)
4661 {
4662 tem =
4663 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant[regno],
4664 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4665 gcc_assert (tem);
4666 if (CONSTANT_P (tem) && !LEGITIMATE_CONSTANT_P (tem))
4667 {
4668 tem = force_const_mem (GET_MODE (x), tem);
4669 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4670 &XEXP (tem, 0), opnum, type,
4671 ind_levels, insn);
4672 if (address_reloaded)
4673 *address_reloaded = i;
4674 }
4675 return tem;
4676 }
4677
4678 /* If the subreg contains a reg that will be converted to a mem,
4679 convert the subreg to a narrower memref now.
4680 Otherwise, we would get (subreg (mem ...) ...),
4681 which would force reload of the mem.
4682
4683 We also need to do this if there is an equivalent MEM that is
4684 not offsettable. In that case, alter_subreg would produce an
4685 invalid address on big-endian machines.
4686
4687 For machines that extend byte loads, we must not reload using
4688 a wider mode if we have a paradoxical SUBREG. find_reloads will
4689 force a reload in that case. So we should not do anything here. */
4690
4691 if (regno >= FIRST_PSEUDO_REGISTER
4692 #ifdef LOAD_EXTEND_OP
4693 && (GET_MODE_SIZE (GET_MODE (x))
4694 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4695 #endif
4696 && (reg_equiv_address[regno] != 0
4697 || (reg_equiv_mem[regno] != 0
4698 && (! strict_memory_address_p (GET_MODE (x),
4699 XEXP (reg_equiv_mem[regno], 0))
4700 || ! offsettable_memref_p (reg_equiv_mem[regno])
4701 || num_not_at_initial_offset))))
4702 x = find_reloads_subreg_address (x, 1, opnum, type, ind_levels,
4703 insn);
4704 }
4705
4706 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4707 {
4708 if (fmt[i] == 'e')
4709 {
4710 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4711 ind_levels, is_set_dest, insn,
4712 address_reloaded);
4713 /* If we have replaced a reg with it's equivalent memory loc -
4714 that can still be handled here e.g. if it's in a paradoxical
4715 subreg - we must make the change in a copy, rather than using
4716 a destructive change. This way, find_reloads can still elect
4717 not to do the change. */
4718 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4719 {
4720 x = shallow_copy_rtx (x);
4721 copied = 1;
4722 }
4723 XEXP (x, i) = new_part;
4724 }
4725 }
4726 return x;
4727 }
4728
4729 /* Return a mem ref for the memory equivalent of reg REGNO.
4730 This mem ref is not shared with anything. */
4731
4732 static rtx
4733 make_memloc (rtx ad, int regno)
4734 {
4735 /* We must rerun eliminate_regs, in case the elimination
4736 offsets have changed. */
4737 rtx tem
4738 = XEXP (eliminate_regs (reg_equiv_memory_loc[regno], 0, NULL_RTX), 0);
4739
4740 /* If TEM might contain a pseudo, we must copy it to avoid
4741 modifying it when we do the substitution for the reload. */
4742 if (rtx_varies_p (tem, 0))
4743 tem = copy_rtx (tem);
4744
4745 tem = replace_equiv_address_nv (reg_equiv_memory_loc[regno], tem);
4746 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4747
4748 /* Copy the result if it's still the same as the equivalence, to avoid
4749 modifying it when we do the substitution for the reload. */
4750 if (tem == reg_equiv_memory_loc[regno])
4751 tem = copy_rtx (tem);
4752 return tem;
4753 }
4754
4755 /* Returns true if AD could be turned into a valid memory reference
4756 to mode MODE by reloading the part pointed to by PART into a
4757 register. */
4758
4759 static int
4760 maybe_memory_address_p (enum machine_mode mode, rtx ad, rtx *part)
4761 {
4762 int retv;
4763 rtx tem = *part;
4764 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4765
4766 *part = reg;
4767 retv = memory_address_p (mode, ad);
4768 *part = tem;
4769
4770 return retv;
4771 }
4772
4773 /* Record all reloads needed for handling memory address AD
4774 which appears in *LOC in a memory reference to mode MODE
4775 which itself is found in location *MEMREFLOC.
4776 Note that we take shortcuts assuming that no multi-reg machine mode
4777 occurs as part of an address.
4778
4779 OPNUM and TYPE specify the purpose of this reload.
4780
4781 IND_LEVELS says how many levels of indirect addressing this machine
4782 supports.
4783
4784 INSN, if nonzero, is the insn in which we do the reload. It is used
4785 to determine if we may generate output reloads, and where to put USEs
4786 for pseudos that we have to replace with stack slots.
4787
4788 Value is one if this address is reloaded or replaced as a whole; it is
4789 zero if the top level of this address was not reloaded or replaced, and
4790 it is -1 if it may or may not have been reloaded or replaced.
4791
4792 Note that there is no verification that the address will be valid after
4793 this routine does its work. Instead, we rely on the fact that the address
4794 was valid when reload started. So we need only undo things that reload
4795 could have broken. These are wrong register types, pseudos not allocated
4796 to a hard register, and frame pointer elimination. */
4797
4798 static int
4799 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4800 rtx *loc, int opnum, enum reload_type type,
4801 int ind_levels, rtx insn)
4802 {
4803 int regno;
4804 int removed_and = 0;
4805 int op_index;
4806 rtx tem;
4807
4808 /* If the address is a register, see if it is a legitimate address and
4809 reload if not. We first handle the cases where we need not reload
4810 or where we must reload in a non-standard way. */
4811
4812 if (REG_P (ad))
4813 {
4814 regno = REGNO (ad);
4815
4816 if (reg_equiv_constant[regno] != 0)
4817 {
4818 find_reloads_address_part (reg_equiv_constant[regno], loc,
4819 base_reg_class (mode, MEM, SCRATCH),
4820 GET_MODE (ad), opnum, type, ind_levels);
4821 return 1;
4822 }
4823
4824 tem = reg_equiv_memory_loc[regno];
4825 if (tem != 0)
4826 {
4827 if (reg_equiv_address[regno] != 0 || num_not_at_initial_offset)
4828 {
4829 tem = make_memloc (ad, regno);
4830 if (! strict_memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
4831 {
4832 rtx orig = tem;
4833
4834 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4835 &XEXP (tem, 0), opnum,
4836 ADDR_TYPE (type), ind_levels, insn);
4837 if (!rtx_equal_p (tem, orig))
4838 push_reg_equiv_alt_mem (regno, tem);
4839 }
4840 /* We can avoid a reload if the register's equivalent memory
4841 expression is valid as an indirect memory address.
4842 But not all addresses are valid in a mem used as an indirect
4843 address: only reg or reg+constant. */
4844
4845 if (ind_levels > 0
4846 && strict_memory_address_p (mode, tem)
4847 && (REG_P (XEXP (tem, 0))
4848 || (GET_CODE (XEXP (tem, 0)) == PLUS
4849 && REG_P (XEXP (XEXP (tem, 0), 0))
4850 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4851 {
4852 /* TEM is not the same as what we'll be replacing the
4853 pseudo with after reload, put a USE in front of INSN
4854 in the final reload pass. */
4855 if (replace_reloads
4856 && num_not_at_initial_offset
4857 && ! rtx_equal_p (tem, reg_equiv_mem[regno]))
4858 {
4859 *loc = tem;
4860 /* We mark the USE with QImode so that we
4861 recognize it as one that can be safely
4862 deleted at the end of reload. */
4863 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4864 insn), QImode);
4865
4866 /* This doesn't really count as replacing the address
4867 as a whole, since it is still a memory access. */
4868 }
4869 return 0;
4870 }
4871 ad = tem;
4872 }
4873 }
4874
4875 /* The only remaining case where we can avoid a reload is if this is a
4876 hard register that is valid as a base register and which is not the
4877 subject of a CLOBBER in this insn. */
4878
4879 else if (regno < FIRST_PSEUDO_REGISTER
4880 && regno_ok_for_base_p (regno, mode, MEM, SCRATCH)
4881 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4882 return 0;
4883
4884 /* If we do not have one of the cases above, we must do the reload. */
4885 push_reload (ad, NULL_RTX, loc, (rtx*) 0, base_reg_class (mode, MEM, SCRATCH),
4886 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4887 return 1;
4888 }
4889
4890 if (strict_memory_address_p (mode, ad))
4891 {
4892 /* The address appears valid, so reloads are not needed.
4893 But the address may contain an eliminable register.
4894 This can happen because a machine with indirect addressing
4895 may consider a pseudo register by itself a valid address even when
4896 it has failed to get a hard reg.
4897 So do a tree-walk to find and eliminate all such regs. */
4898
4899 /* But first quickly dispose of a common case. */
4900 if (GET_CODE (ad) == PLUS
4901 && GET_CODE (XEXP (ad, 1)) == CONST_INT
4902 && REG_P (XEXP (ad, 0))
4903 && reg_equiv_constant[REGNO (XEXP (ad, 0))] == 0)
4904 return 0;
4905
4906 subst_reg_equivs_changed = 0;
4907 *loc = subst_reg_equivs (ad, insn);
4908
4909 if (! subst_reg_equivs_changed)
4910 return 0;
4911
4912 /* Check result for validity after substitution. */
4913 if (strict_memory_address_p (mode, ad))
4914 return 0;
4915 }
4916
4917 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4918 do
4919 {
4920 if (memrefloc)
4921 {
4922 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4923 ind_levels, win);
4924 }
4925 break;
4926 win:
4927 *memrefloc = copy_rtx (*memrefloc);
4928 XEXP (*memrefloc, 0) = ad;
4929 move_replacements (&ad, &XEXP (*memrefloc, 0));
4930 return -1;
4931 }
4932 while (0);
4933 #endif
4934
4935 /* The address is not valid. We have to figure out why. First see if
4936 we have an outer AND and remove it if so. Then analyze what's inside. */
4937
4938 if (GET_CODE (ad) == AND)
4939 {
4940 removed_and = 1;
4941 loc = &XEXP (ad, 0);
4942 ad = *loc;
4943 }
4944
4945 /* One possibility for why the address is invalid is that it is itself
4946 a MEM. This can happen when the frame pointer is being eliminated, a
4947 pseudo is not allocated to a hard register, and the offset between the
4948 frame and stack pointers is not its initial value. In that case the
4949 pseudo will have been replaced by a MEM referring to the
4950 stack pointer. */
4951 if (MEM_P (ad))
4952 {
4953 /* First ensure that the address in this MEM is valid. Then, unless
4954 indirect addresses are valid, reload the MEM into a register. */
4955 tem = ad;
4956 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
4957 opnum, ADDR_TYPE (type),
4958 ind_levels == 0 ? 0 : ind_levels - 1, insn);
4959
4960 /* If tem was changed, then we must create a new memory reference to
4961 hold it and store it back into memrefloc. */
4962 if (tem != ad && memrefloc)
4963 {
4964 *memrefloc = copy_rtx (*memrefloc);
4965 copy_replacements (tem, XEXP (*memrefloc, 0));
4966 loc = &XEXP (*memrefloc, 0);
4967 if (removed_and)
4968 loc = &XEXP (*loc, 0);
4969 }
4970
4971 /* Check similar cases as for indirect addresses as above except
4972 that we can allow pseudos and a MEM since they should have been
4973 taken care of above. */
4974
4975 if (ind_levels == 0
4976 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
4977 || MEM_P (XEXP (tem, 0))
4978 || ! (REG_P (XEXP (tem, 0))
4979 || (GET_CODE (XEXP (tem, 0)) == PLUS
4980 && REG_P (XEXP (XEXP (tem, 0), 0))
4981 && GET_CODE (XEXP (XEXP (tem, 0), 1)) == CONST_INT)))
4982 {
4983 /* Must use TEM here, not AD, since it is the one that will
4984 have any subexpressions reloaded, if needed. */
4985 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
4986 base_reg_class (mode, MEM, SCRATCH), GET_MODE (tem),
4987 VOIDmode, 0,
4988 0, opnum, type);
4989 return ! removed_and;
4990 }
4991 else
4992 return 0;
4993 }
4994
4995 /* If we have address of a stack slot but it's not valid because the
4996 displacement is too large, compute the sum in a register.
4997 Handle all base registers here, not just fp/ap/sp, because on some
4998 targets (namely SH) we can also get too large displacements from
4999 big-endian corrections. */
5000 else if (GET_CODE (ad) == PLUS
5001 && REG_P (XEXP (ad, 0))
5002 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5003 && GET_CODE (XEXP (ad, 1)) == CONST_INT
5004 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, PLUS,
5005 CONST_INT))
5006
5007 {
5008 /* Unshare the MEM rtx so we can safely alter it. */
5009 if (memrefloc)
5010 {
5011 *memrefloc = copy_rtx (*memrefloc);
5012 loc = &XEXP (*memrefloc, 0);
5013 if (removed_and)
5014 loc = &XEXP (*loc, 0);
5015 }
5016
5017 if (double_reg_address_ok)
5018 {
5019 /* Unshare the sum as well. */
5020 *loc = ad = copy_rtx (ad);
5021
5022 /* Reload the displacement into an index reg.
5023 We assume the frame pointer or arg pointer is a base reg. */
5024 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5025 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5026 type, ind_levels);
5027 return 0;
5028 }
5029 else
5030 {
5031 /* If the sum of two regs is not necessarily valid,
5032 reload the sum into a base reg.
5033 That will at least work. */
5034 find_reloads_address_part (ad, loc,
5035 base_reg_class (mode, MEM, SCRATCH),
5036 Pmode, opnum, type, ind_levels);
5037 }
5038 return ! removed_and;
5039 }
5040
5041 /* If we have an indexed stack slot, there are three possible reasons why
5042 it might be invalid: The index might need to be reloaded, the address
5043 might have been made by frame pointer elimination and hence have a
5044 constant out of range, or both reasons might apply.
5045
5046 We can easily check for an index needing reload, but even if that is the
5047 case, we might also have an invalid constant. To avoid making the
5048 conservative assumption and requiring two reloads, we see if this address
5049 is valid when not interpreted strictly. If it is, the only problem is
5050 that the index needs a reload and find_reloads_address_1 will take care
5051 of it.
5052
5053 Handle all base registers here, not just fp/ap/sp, because on some
5054 targets (namely SPARC) we can also get invalid addresses from preventive
5055 subreg big-endian corrections made by find_reloads_toplev. We
5056 can also get expressions involving LO_SUM (rather than PLUS) from
5057 find_reloads_subreg_address.
5058
5059 If we decide to do something, it must be that `double_reg_address_ok'
5060 is true. We generate a reload of the base register + constant and
5061 rework the sum so that the reload register will be added to the index.
5062 This is safe because we know the address isn't shared.
5063
5064 We check for the base register as both the first and second operand of
5065 the innermost PLUS and/or LO_SUM. */
5066
5067 for (op_index = 0; op_index < 2; ++op_index)
5068 {
5069 rtx operand, addend;
5070 enum rtx_code inner_code;
5071
5072 if (GET_CODE (ad) != PLUS)
5073 continue;
5074
5075 inner_code = GET_CODE (XEXP (ad, 0));
5076 if (!(GET_CODE (ad) == PLUS
5077 && GET_CODE (XEXP (ad, 1)) == CONST_INT
5078 && (inner_code == PLUS || inner_code == LO_SUM)))
5079 continue;
5080
5081 operand = XEXP (XEXP (ad, 0), op_index);
5082 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5083 continue;
5084
5085 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5086
5087 if ((regno_ok_for_base_p (REGNO (operand), mode, inner_code,
5088 GET_CODE (addend))
5089 || operand == frame_pointer_rtx
5090 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5091 || operand == hard_frame_pointer_rtx
5092 #endif
5093 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5094 || operand == arg_pointer_rtx
5095 #endif
5096 || operand == stack_pointer_rtx)
5097 && ! maybe_memory_address_p (mode, ad,
5098 &XEXP (XEXP (ad, 0), 1 - op_index)))
5099 {
5100 rtx offset_reg;
5101 enum reg_class cls;
5102
5103 offset_reg = plus_constant (operand, INTVAL (XEXP (ad, 1)));
5104
5105 /* Form the adjusted address. */
5106 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5107 ad = gen_rtx_PLUS (GET_MODE (ad),
5108 op_index == 0 ? offset_reg : addend,
5109 op_index == 0 ? addend : offset_reg);
5110 else
5111 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5112 op_index == 0 ? offset_reg : addend,
5113 op_index == 0 ? addend : offset_reg);
5114 *loc = ad;
5115
5116 cls = base_reg_class (mode, MEM, GET_CODE (addend));
5117 find_reloads_address_part (XEXP (ad, op_index),
5118 &XEXP (ad, op_index), cls,
5119 GET_MODE (ad), opnum, type, ind_levels);
5120 find_reloads_address_1 (mode,
5121 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5122 GET_CODE (XEXP (ad, op_index)),
5123 &XEXP (ad, 1 - op_index), opnum,
5124 type, 0, insn);
5125
5126 return 0;
5127 }
5128 }
5129
5130 /* See if address becomes valid when an eliminable register
5131 in a sum is replaced. */
5132
5133 tem = ad;
5134 if (GET_CODE (ad) == PLUS)
5135 tem = subst_indexed_address (ad);
5136 if (tem != ad && strict_memory_address_p (mode, tem))
5137 {
5138 /* Ok, we win that way. Replace any additional eliminable
5139 registers. */
5140
5141 subst_reg_equivs_changed = 0;
5142 tem = subst_reg_equivs (tem, insn);
5143
5144 /* Make sure that didn't make the address invalid again. */
5145
5146 if (! subst_reg_equivs_changed || strict_memory_address_p (mode, tem))
5147 {
5148 *loc = tem;
5149 return 0;
5150 }
5151 }
5152
5153 /* If constants aren't valid addresses, reload the constant address
5154 into a register. */
5155 if (CONSTANT_P (ad) && ! strict_memory_address_p (mode, ad))
5156 {
5157 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5158 Unshare it so we can safely alter it. */
5159 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5160 && CONSTANT_POOL_ADDRESS_P (ad))
5161 {
5162 *memrefloc = copy_rtx (*memrefloc);
5163 loc = &XEXP (*memrefloc, 0);
5164 if (removed_and)
5165 loc = &XEXP (*loc, 0);
5166 }
5167
5168 find_reloads_address_part (ad, loc, base_reg_class (mode, MEM, SCRATCH),
5169 Pmode, opnum, type, ind_levels);
5170 return ! removed_and;
5171 }
5172
5173 return find_reloads_address_1 (mode, ad, 0, MEM, SCRATCH, loc, opnum, type,
5174 ind_levels, insn);
5175 }
5176 \f
5177 /* Find all pseudo regs appearing in AD
5178 that are eliminable in favor of equivalent values
5179 and do not have hard regs; replace them by their equivalents.
5180 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5181 front of it for pseudos that we have to replace with stack slots. */
5182
5183 static rtx
5184 subst_reg_equivs (rtx ad, rtx insn)
5185 {
5186 RTX_CODE code = GET_CODE (ad);
5187 int i;
5188 const char *fmt;
5189
5190 switch (code)
5191 {
5192 case HIGH:
5193 case CONST_INT:
5194 case CONST:
5195 case CONST_DOUBLE:
5196 case CONST_FIXED:
5197 case CONST_VECTOR:
5198 case SYMBOL_REF:
5199 case LABEL_REF:
5200 case PC:
5201 case CC0:
5202 return ad;
5203
5204 case REG:
5205 {
5206 int regno = REGNO (ad);
5207
5208 if (reg_equiv_constant[regno] != 0)
5209 {
5210 subst_reg_equivs_changed = 1;
5211 return reg_equiv_constant[regno];
5212 }
5213 if (reg_equiv_memory_loc[regno] && num_not_at_initial_offset)
5214 {
5215 rtx mem = make_memloc (ad, regno);
5216 if (! rtx_equal_p (mem, reg_equiv_mem[regno]))
5217 {
5218 subst_reg_equivs_changed = 1;
5219 /* We mark the USE with QImode so that we recognize it
5220 as one that can be safely deleted at the end of
5221 reload. */
5222 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5223 QImode);
5224 return mem;
5225 }
5226 }
5227 }
5228 return ad;
5229
5230 case PLUS:
5231 /* Quickly dispose of a common case. */
5232 if (XEXP (ad, 0) == frame_pointer_rtx
5233 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
5234 return ad;
5235 break;
5236
5237 default:
5238 break;
5239 }
5240
5241 fmt = GET_RTX_FORMAT (code);
5242 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5243 if (fmt[i] == 'e')
5244 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5245 return ad;
5246 }
5247 \f
5248 /* Compute the sum of X and Y, making canonicalizations assumed in an
5249 address, namely: sum constant integers, surround the sum of two
5250 constants with a CONST, put the constant as the second operand, and
5251 group the constant on the outermost sum.
5252
5253 This routine assumes both inputs are already in canonical form. */
5254
5255 rtx
5256 form_sum (rtx x, rtx y)
5257 {
5258 rtx tem;
5259 enum machine_mode mode = GET_MODE (x);
5260
5261 if (mode == VOIDmode)
5262 mode = GET_MODE (y);
5263
5264 if (mode == VOIDmode)
5265 mode = Pmode;
5266
5267 if (GET_CODE (x) == CONST_INT)
5268 return plus_constant (y, INTVAL (x));
5269 else if (GET_CODE (y) == CONST_INT)
5270 return plus_constant (x, INTVAL (y));
5271 else if (CONSTANT_P (x))
5272 tem = x, x = y, y = tem;
5273
5274 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5275 return form_sum (XEXP (x, 0), form_sum (XEXP (x, 1), y));
5276
5277 /* Note that if the operands of Y are specified in the opposite
5278 order in the recursive calls below, infinite recursion will occur. */
5279 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5280 return form_sum (form_sum (x, XEXP (y, 0)), XEXP (y, 1));
5281
5282 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5283 constant will have been placed second. */
5284 if (CONSTANT_P (x) && CONSTANT_P (y))
5285 {
5286 if (GET_CODE (x) == CONST)
5287 x = XEXP (x, 0);
5288 if (GET_CODE (y) == CONST)
5289 y = XEXP (y, 0);
5290
5291 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5292 }
5293
5294 return gen_rtx_PLUS (mode, x, y);
5295 }
5296 \f
5297 /* If ADDR is a sum containing a pseudo register that should be
5298 replaced with a constant (from reg_equiv_constant),
5299 return the result of doing so, and also apply the associative
5300 law so that the result is more likely to be a valid address.
5301 (But it is not guaranteed to be one.)
5302
5303 Note that at most one register is replaced, even if more are
5304 replaceable. Also, we try to put the result into a canonical form
5305 so it is more likely to be a valid address.
5306
5307 In all other cases, return ADDR. */
5308
5309 static rtx
5310 subst_indexed_address (rtx addr)
5311 {
5312 rtx op0 = 0, op1 = 0, op2 = 0;
5313 rtx tem;
5314 int regno;
5315
5316 if (GET_CODE (addr) == PLUS)
5317 {
5318 /* Try to find a register to replace. */
5319 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5320 if (REG_P (op0)
5321 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5322 && reg_renumber[regno] < 0
5323 && reg_equiv_constant[regno] != 0)
5324 op0 = reg_equiv_constant[regno];
5325 else if (REG_P (op1)
5326 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5327 && reg_renumber[regno] < 0
5328 && reg_equiv_constant[regno] != 0)
5329 op1 = reg_equiv_constant[regno];
5330 else if (GET_CODE (op0) == PLUS
5331 && (tem = subst_indexed_address (op0)) != op0)
5332 op0 = tem;
5333 else if (GET_CODE (op1) == PLUS
5334 && (tem = subst_indexed_address (op1)) != op1)
5335 op1 = tem;
5336 else
5337 return addr;
5338
5339 /* Pick out up to three things to add. */
5340 if (GET_CODE (op1) == PLUS)
5341 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5342 else if (GET_CODE (op0) == PLUS)
5343 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5344
5345 /* Compute the sum. */
5346 if (op2 != 0)
5347 op1 = form_sum (op1, op2);
5348 if (op1 != 0)
5349 op0 = form_sum (op0, op1);
5350
5351 return op0;
5352 }
5353 return addr;
5354 }
5355 \f
5356 /* Update the REG_INC notes for an insn. It updates all REG_INC
5357 notes for the instruction which refer to REGNO the to refer
5358 to the reload number.
5359
5360 INSN is the insn for which any REG_INC notes need updating.
5361
5362 REGNO is the register number which has been reloaded.
5363
5364 RELOADNUM is the reload number. */
5365
5366 static void
5367 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5368 int reloadnum ATTRIBUTE_UNUSED)
5369 {
5370 #ifdef AUTO_INC_DEC
5371 rtx link;
5372
5373 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5374 if (REG_NOTE_KIND (link) == REG_INC
5375 && (int) REGNO (XEXP (link, 0)) == regno)
5376 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5377 #endif
5378 }
5379 \f
5380 /* Record the pseudo registers we must reload into hard registers in a
5381 subexpression of a would-be memory address, X referring to a value
5382 in mode MODE. (This function is not called if the address we find
5383 is strictly valid.)
5384
5385 CONTEXT = 1 means we are considering regs as index regs,
5386 = 0 means we are considering them as base regs.
5387 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5388 or an autoinc code.
5389 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5390 is the code of the index part of the address. Otherwise, pass SCRATCH
5391 for this argument.
5392 OPNUM and TYPE specify the purpose of any reloads made.
5393
5394 IND_LEVELS says how many levels of indirect addressing are
5395 supported at this point in the address.
5396
5397 INSN, if nonzero, is the insn in which we do the reload. It is used
5398 to determine if we may generate output reloads.
5399
5400 We return nonzero if X, as a whole, is reloaded or replaced. */
5401
5402 /* Note that we take shortcuts assuming that no multi-reg machine mode
5403 occurs as part of an address.
5404 Also, this is not fully machine-customizable; it works for machines
5405 such as VAXen and 68000's and 32000's, but other possible machines
5406 could have addressing modes that this does not handle right.
5407 If you add push_reload calls here, you need to make sure gen_reload
5408 handles those cases gracefully. */
5409
5410 static int
5411 find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
5412 enum rtx_code outer_code, enum rtx_code index_code,
5413 rtx *loc, int opnum, enum reload_type type,
5414 int ind_levels, rtx insn)
5415 {
5416 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, OUTER, INDEX) \
5417 ((CONTEXT) == 0 \
5418 ? regno_ok_for_base_p (REGNO, MODE, OUTER, INDEX) \
5419 : REGNO_OK_FOR_INDEX_P (REGNO))
5420
5421 enum reg_class context_reg_class;
5422 RTX_CODE code = GET_CODE (x);
5423
5424 if (context == 1)
5425 context_reg_class = INDEX_REG_CLASS;
5426 else
5427 context_reg_class = base_reg_class (mode, outer_code, index_code);
5428
5429 switch (code)
5430 {
5431 case PLUS:
5432 {
5433 rtx orig_op0 = XEXP (x, 0);
5434 rtx orig_op1 = XEXP (x, 1);
5435 RTX_CODE code0 = GET_CODE (orig_op0);
5436 RTX_CODE code1 = GET_CODE (orig_op1);
5437 rtx op0 = orig_op0;
5438 rtx op1 = orig_op1;
5439
5440 if (GET_CODE (op0) == SUBREG)
5441 {
5442 op0 = SUBREG_REG (op0);
5443 code0 = GET_CODE (op0);
5444 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5445 op0 = gen_rtx_REG (word_mode,
5446 (REGNO (op0) +
5447 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5448 GET_MODE (SUBREG_REG (orig_op0)),
5449 SUBREG_BYTE (orig_op0),
5450 GET_MODE (orig_op0))));
5451 }
5452
5453 if (GET_CODE (op1) == SUBREG)
5454 {
5455 op1 = SUBREG_REG (op1);
5456 code1 = GET_CODE (op1);
5457 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5458 /* ??? Why is this given op1's mode and above for
5459 ??? op0 SUBREGs we use word_mode? */
5460 op1 = gen_rtx_REG (GET_MODE (op1),
5461 (REGNO (op1) +
5462 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5463 GET_MODE (SUBREG_REG (orig_op1)),
5464 SUBREG_BYTE (orig_op1),
5465 GET_MODE (orig_op1))));
5466 }
5467 /* Plus in the index register may be created only as a result of
5468 register rematerialization for expression like &localvar*4. Reload it.
5469 It may be possible to combine the displacement on the outer level,
5470 but it is probably not worthwhile to do so. */
5471 if (context == 1)
5472 {
5473 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5474 opnum, ADDR_TYPE (type), ind_levels, insn);
5475 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5476 context_reg_class,
5477 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5478 return 1;
5479 }
5480
5481 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5482 || code0 == ZERO_EXTEND || code1 == MEM)
5483 {
5484 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5485 &XEXP (x, 0), opnum, type, ind_levels,
5486 insn);
5487 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5488 &XEXP (x, 1), opnum, type, ind_levels,
5489 insn);
5490 }
5491
5492 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5493 || code1 == ZERO_EXTEND || code0 == MEM)
5494 {
5495 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5496 &XEXP (x, 0), opnum, type, ind_levels,
5497 insn);
5498 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5499 &XEXP (x, 1), opnum, type, ind_levels,
5500 insn);
5501 }
5502
5503 else if (code0 == CONST_INT || code0 == CONST
5504 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5505 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5506 &XEXP (x, 1), opnum, type, ind_levels,
5507 insn);
5508
5509 else if (code1 == CONST_INT || code1 == CONST
5510 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5511 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5512 &XEXP (x, 0), opnum, type, ind_levels,
5513 insn);
5514
5515 else if (code0 == REG && code1 == REG)
5516 {
5517 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5518 && regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5519 return 0;
5520 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5521 && regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5522 return 0;
5523 else if (regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5524 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5525 &XEXP (x, 1), opnum, type, ind_levels,
5526 insn);
5527 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5528 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5529 &XEXP (x, 0), opnum, type, ind_levels,
5530 insn);
5531 else if (regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5532 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5533 &XEXP (x, 0), opnum, type, ind_levels,
5534 insn);
5535 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5536 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5537 &XEXP (x, 1), opnum, type, ind_levels,
5538 insn);
5539 else
5540 {
5541 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5542 &XEXP (x, 0), opnum, type, ind_levels,
5543 insn);
5544 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5545 &XEXP (x, 1), opnum, type, ind_levels,
5546 insn);
5547 }
5548 }
5549
5550 else if (code0 == REG)
5551 {
5552 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5553 &XEXP (x, 0), opnum, type, ind_levels,
5554 insn);
5555 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5556 &XEXP (x, 1), opnum, type, ind_levels,
5557 insn);
5558 }
5559
5560 else if (code1 == REG)
5561 {
5562 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5563 &XEXP (x, 1), opnum, type, ind_levels,
5564 insn);
5565 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5566 &XEXP (x, 0), opnum, type, ind_levels,
5567 insn);
5568 }
5569 }
5570
5571 return 0;
5572
5573 case POST_MODIFY:
5574 case PRE_MODIFY:
5575 {
5576 rtx op0 = XEXP (x, 0);
5577 rtx op1 = XEXP (x, 1);
5578 enum rtx_code index_code;
5579 int regno;
5580 int reloadnum;
5581
5582 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5583 return 0;
5584
5585 /* Currently, we only support {PRE,POST}_MODIFY constructs
5586 where a base register is {inc,dec}remented by the contents
5587 of another register or by a constant value. Thus, these
5588 operands must match. */
5589 gcc_assert (op0 == XEXP (op1, 0));
5590
5591 /* Require index register (or constant). Let's just handle the
5592 register case in the meantime... If the target allows
5593 auto-modify by a constant then we could try replacing a pseudo
5594 register with its equivalent constant where applicable.
5595
5596 We also handle the case where the register was eliminated
5597 resulting in a PLUS subexpression.
5598
5599 If we later decide to reload the whole PRE_MODIFY or
5600 POST_MODIFY, inc_for_reload might clobber the reload register
5601 before reading the index. The index register might therefore
5602 need to live longer than a TYPE reload normally would, so be
5603 conservative and class it as RELOAD_OTHER. */
5604 if ((REG_P (XEXP (op1, 1))
5605 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5606 || GET_CODE (XEXP (op1, 1)) == PLUS)
5607 find_reloads_address_1 (mode, XEXP (op1, 1), 1, code, SCRATCH,
5608 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5609 ind_levels, insn);
5610
5611 gcc_assert (REG_P (XEXP (op1, 0)));
5612
5613 regno = REGNO (XEXP (op1, 0));
5614 index_code = GET_CODE (XEXP (op1, 1));
5615
5616 /* A register that is incremented cannot be constant! */
5617 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5618 || reg_equiv_constant[regno] == 0);
5619
5620 /* Handle a register that is equivalent to a memory location
5621 which cannot be addressed directly. */
5622 if (reg_equiv_memory_loc[regno] != 0
5623 && (reg_equiv_address[regno] != 0
5624 || num_not_at_initial_offset))
5625 {
5626 rtx tem = make_memloc (XEXP (x, 0), regno);
5627
5628 if (reg_equiv_address[regno]
5629 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5630 {
5631 rtx orig = tem;
5632
5633 /* First reload the memory location's address.
5634 We can't use ADDR_TYPE (type) here, because we need to
5635 write back the value after reading it, hence we actually
5636 need two registers. */
5637 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5638 &XEXP (tem, 0), opnum,
5639 RELOAD_OTHER,
5640 ind_levels, insn);
5641
5642 if (!rtx_equal_p (tem, orig))
5643 push_reg_equiv_alt_mem (regno, tem);
5644
5645 /* Then reload the memory location into a base
5646 register. */
5647 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5648 &XEXP (op1, 0),
5649 base_reg_class (mode, code,
5650 index_code),
5651 GET_MODE (x), GET_MODE (x), 0,
5652 0, opnum, RELOAD_OTHER);
5653
5654 update_auto_inc_notes (this_insn, regno, reloadnum);
5655 return 0;
5656 }
5657 }
5658
5659 if (reg_renumber[regno] >= 0)
5660 regno = reg_renumber[regno];
5661
5662 /* We require a base register here... */
5663 if (!regno_ok_for_base_p (regno, GET_MODE (x), code, index_code))
5664 {
5665 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5666 &XEXP (op1, 0), &XEXP (x, 0),
5667 base_reg_class (mode, code, index_code),
5668 GET_MODE (x), GET_MODE (x), 0, 0,
5669 opnum, RELOAD_OTHER);
5670
5671 update_auto_inc_notes (this_insn, regno, reloadnum);
5672 return 0;
5673 }
5674 }
5675 return 0;
5676
5677 case POST_INC:
5678 case POST_DEC:
5679 case PRE_INC:
5680 case PRE_DEC:
5681 if (REG_P (XEXP (x, 0)))
5682 {
5683 int regno = REGNO (XEXP (x, 0));
5684 int value = 0;
5685 rtx x_orig = x;
5686
5687 /* A register that is incremented cannot be constant! */
5688 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5689 || reg_equiv_constant[regno] == 0);
5690
5691 /* Handle a register that is equivalent to a memory location
5692 which cannot be addressed directly. */
5693 if (reg_equiv_memory_loc[regno] != 0
5694 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
5695 {
5696 rtx tem = make_memloc (XEXP (x, 0), regno);
5697 if (reg_equiv_address[regno]
5698 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5699 {
5700 rtx orig = tem;
5701
5702 /* First reload the memory location's address.
5703 We can't use ADDR_TYPE (type) here, because we need to
5704 write back the value after reading it, hence we actually
5705 need two registers. */
5706 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5707 &XEXP (tem, 0), opnum, type,
5708 ind_levels, insn);
5709 if (!rtx_equal_p (tem, orig))
5710 push_reg_equiv_alt_mem (regno, tem);
5711 /* Put this inside a new increment-expression. */
5712 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5713 /* Proceed to reload that, as if it contained a register. */
5714 }
5715 }
5716
5717 /* If we have a hard register that is ok in this incdec context,
5718 don't make a reload. If the register isn't nice enough for
5719 autoincdec, we can reload it. But, if an autoincrement of a
5720 register that we here verified as playing nice, still outside
5721 isn't "valid", it must be that no autoincrement is "valid".
5722 If that is true and something made an autoincrement anyway,
5723 this must be a special context where one is allowed.
5724 (For example, a "push" instruction.)
5725 We can't improve this address, so leave it alone. */
5726
5727 /* Otherwise, reload the autoincrement into a suitable hard reg
5728 and record how much to increment by. */
5729
5730 if (reg_renumber[regno] >= 0)
5731 regno = reg_renumber[regno];
5732 if (regno >= FIRST_PSEUDO_REGISTER
5733 || !REG_OK_FOR_CONTEXT (context, regno, mode, code,
5734 index_code))
5735 {
5736 int reloadnum;
5737
5738 /* If we can output the register afterwards, do so, this
5739 saves the extra update.
5740 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5741 CALL_INSN - and it does not set CC0.
5742 But don't do this if we cannot directly address the
5743 memory location, since this will make it harder to
5744 reuse address reloads, and increases register pressure.
5745 Also don't do this if we can probably update x directly. */
5746 rtx equiv = (MEM_P (XEXP (x, 0))
5747 ? XEXP (x, 0)
5748 : reg_equiv_mem[regno]);
5749 int icode = (int) optab_handler (add_optab, Pmode)->insn_code;
5750 if (insn && NONJUMP_INSN_P (insn) && equiv
5751 && memory_operand (equiv, GET_MODE (equiv))
5752 #ifdef HAVE_cc0
5753 && ! sets_cc0_p (PATTERN (insn))
5754 #endif
5755 && ! (icode != CODE_FOR_nothing
5756 && ((*insn_data[icode].operand[0].predicate)
5757 (equiv, Pmode))
5758 && ((*insn_data[icode].operand[1].predicate)
5759 (equiv, Pmode))))
5760 {
5761 /* We use the original pseudo for loc, so that
5762 emit_reload_insns() knows which pseudo this
5763 reload refers to and updates the pseudo rtx, not
5764 its equivalent memory location, as well as the
5765 corresponding entry in reg_last_reload_reg. */
5766 loc = &XEXP (x_orig, 0);
5767 x = XEXP (x, 0);
5768 reloadnum
5769 = push_reload (x, x, loc, loc,
5770 context_reg_class,
5771 GET_MODE (x), GET_MODE (x), 0, 0,
5772 opnum, RELOAD_OTHER);
5773 }
5774 else
5775 {
5776 reloadnum
5777 = push_reload (x, x, loc, (rtx*) 0,
5778 context_reg_class,
5779 GET_MODE (x), GET_MODE (x), 0, 0,
5780 opnum, type);
5781 rld[reloadnum].inc
5782 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5783
5784 value = 1;
5785 }
5786
5787 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5788 reloadnum);
5789 }
5790 return value;
5791 }
5792 return 0;
5793
5794 case TRUNCATE:
5795 case SIGN_EXTEND:
5796 case ZERO_EXTEND:
5797 /* Look for parts to reload in the inner expression and reload them
5798 too, in addition to this operation. Reloading all inner parts in
5799 addition to this one shouldn't be necessary, but at this point,
5800 we don't know if we can possibly omit any part that *can* be
5801 reloaded. Targets that are better off reloading just either part
5802 (or perhaps even a different part of an outer expression), should
5803 define LEGITIMIZE_RELOAD_ADDRESS. */
5804 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), XEXP (x, 0),
5805 context, code, SCRATCH, &XEXP (x, 0), opnum,
5806 type, ind_levels, insn);
5807 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5808 context_reg_class,
5809 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5810 return 1;
5811
5812 case MEM:
5813 /* This is probably the result of a substitution, by eliminate_regs, of
5814 an equivalent address for a pseudo that was not allocated to a hard
5815 register. Verify that the specified address is valid and reload it
5816 into a register.
5817
5818 Since we know we are going to reload this item, don't decrement for
5819 the indirection level.
5820
5821 Note that this is actually conservative: it would be slightly more
5822 efficient to use the value of SPILL_INDIRECT_LEVELS from
5823 reload1.c here. */
5824
5825 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5826 opnum, ADDR_TYPE (type), ind_levels, insn);
5827 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5828 context_reg_class,
5829 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5830 return 1;
5831
5832 case REG:
5833 {
5834 int regno = REGNO (x);
5835
5836 if (reg_equiv_constant[regno] != 0)
5837 {
5838 find_reloads_address_part (reg_equiv_constant[regno], loc,
5839 context_reg_class,
5840 GET_MODE (x), opnum, type, ind_levels);
5841 return 1;
5842 }
5843
5844 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5845 that feeds this insn. */
5846 if (reg_equiv_mem[regno] != 0)
5847 {
5848 push_reload (reg_equiv_mem[regno], NULL_RTX, loc, (rtx*) 0,
5849 context_reg_class,
5850 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5851 return 1;
5852 }
5853 #endif
5854
5855 if (reg_equiv_memory_loc[regno]
5856 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
5857 {
5858 rtx tem = make_memloc (x, regno);
5859 if (reg_equiv_address[regno] != 0
5860 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5861 {
5862 x = tem;
5863 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5864 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5865 ind_levels, insn);
5866 if (!rtx_equal_p (x, tem))
5867 push_reg_equiv_alt_mem (regno, x);
5868 }
5869 }
5870
5871 if (reg_renumber[regno] >= 0)
5872 regno = reg_renumber[regno];
5873
5874 if (regno >= FIRST_PSEUDO_REGISTER
5875 || !REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5876 index_code))
5877 {
5878 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5879 context_reg_class,
5880 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5881 return 1;
5882 }
5883
5884 /* If a register appearing in an address is the subject of a CLOBBER
5885 in this insn, reload it into some other register to be safe.
5886 The CLOBBER is supposed to make the register unavailable
5887 from before this insn to after it. */
5888 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5889 {
5890 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5891 context_reg_class,
5892 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5893 return 1;
5894 }
5895 }
5896 return 0;
5897
5898 case SUBREG:
5899 if (REG_P (SUBREG_REG (x)))
5900 {
5901 /* If this is a SUBREG of a hard register and the resulting register
5902 is of the wrong class, reload the whole SUBREG. This avoids
5903 needless copies if SUBREG_REG is multi-word. */
5904 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5905 {
5906 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5907
5908 if (!REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5909 index_code))
5910 {
5911 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5912 context_reg_class,
5913 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5914 return 1;
5915 }
5916 }
5917 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5918 is larger than the class size, then reload the whole SUBREG. */
5919 else
5920 {
5921 enum reg_class class = context_reg_class;
5922 if ((unsigned) CLASS_MAX_NREGS (class, GET_MODE (SUBREG_REG (x)))
5923 > reg_class_size[class])
5924 {
5925 x = find_reloads_subreg_address (x, 0, opnum,
5926 ADDR_TYPE (type),
5927 ind_levels, insn);
5928 push_reload (x, NULL_RTX, loc, (rtx*) 0, class,
5929 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5930 return 1;
5931 }
5932 }
5933 }
5934 break;
5935
5936 default:
5937 break;
5938 }
5939
5940 {
5941 const char *fmt = GET_RTX_FORMAT (code);
5942 int i;
5943
5944 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5945 {
5946 if (fmt[i] == 'e')
5947 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
5948 we get here. */
5949 find_reloads_address_1 (mode, XEXP (x, i), context, code, SCRATCH,
5950 &XEXP (x, i), opnum, type, ind_levels, insn);
5951 }
5952 }
5953
5954 #undef REG_OK_FOR_CONTEXT
5955 return 0;
5956 }
5957 \f
5958 /* X, which is found at *LOC, is a part of an address that needs to be
5959 reloaded into a register of class CLASS. If X is a constant, or if
5960 X is a PLUS that contains a constant, check that the constant is a
5961 legitimate operand and that we are supposed to be able to load
5962 it into the register.
5963
5964 If not, force the constant into memory and reload the MEM instead.
5965
5966 MODE is the mode to use, in case X is an integer constant.
5967
5968 OPNUM and TYPE describe the purpose of any reloads made.
5969
5970 IND_LEVELS says how many levels of indirect addressing this machine
5971 supports. */
5972
5973 static void
5974 find_reloads_address_part (rtx x, rtx *loc, enum reg_class class,
5975 enum machine_mode mode, int opnum,
5976 enum reload_type type, int ind_levels)
5977 {
5978 if (CONSTANT_P (x)
5979 && (! LEGITIMATE_CONSTANT_P (x)
5980 || PREFERRED_RELOAD_CLASS (x, class) == NO_REGS))
5981 {
5982 x = force_const_mem (mode, x);
5983 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
5984 opnum, type, ind_levels, 0);
5985 }
5986
5987 else if (GET_CODE (x) == PLUS
5988 && CONSTANT_P (XEXP (x, 1))
5989 && (! LEGITIMATE_CONSTANT_P (XEXP (x, 1))
5990 || PREFERRED_RELOAD_CLASS (XEXP (x, 1), class) == NO_REGS))
5991 {
5992 rtx tem;
5993
5994 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
5995 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
5996 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
5997 opnum, type, ind_levels, 0);
5998 }
5999
6000 push_reload (x, NULL_RTX, loc, (rtx*) 0, class,
6001 mode, VOIDmode, 0, 0, opnum, type);
6002 }
6003 \f
6004 /* X, a subreg of a pseudo, is a part of an address that needs to be
6005 reloaded.
6006
6007 If the pseudo is equivalent to a memory location that cannot be directly
6008 addressed, make the necessary address reloads.
6009
6010 If address reloads have been necessary, or if the address is changed
6011 by register elimination, return the rtx of the memory location;
6012 otherwise, return X.
6013
6014 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
6015 memory location.
6016
6017 OPNUM and TYPE identify the purpose of the reload.
6018
6019 IND_LEVELS says how many levels of indirect addressing are
6020 supported at this point in the address.
6021
6022 INSN, if nonzero, is the insn in which we do the reload. It is used
6023 to determine where to put USEs for pseudos that we have to replace with
6024 stack slots. */
6025
6026 static rtx
6027 find_reloads_subreg_address (rtx x, int force_replace, int opnum,
6028 enum reload_type type, int ind_levels, rtx insn)
6029 {
6030 int regno = REGNO (SUBREG_REG (x));
6031
6032 if (reg_equiv_memory_loc[regno])
6033 {
6034 /* If the address is not directly addressable, or if the address is not
6035 offsettable, then it must be replaced. */
6036 if (! force_replace
6037 && (reg_equiv_address[regno]
6038 || ! offsettable_memref_p (reg_equiv_mem[regno])))
6039 force_replace = 1;
6040
6041 if (force_replace || num_not_at_initial_offset)
6042 {
6043 rtx tem = make_memloc (SUBREG_REG (x), regno);
6044
6045 /* If the address changes because of register elimination, then
6046 it must be replaced. */
6047 if (force_replace
6048 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
6049 {
6050 unsigned outer_size = GET_MODE_SIZE (GET_MODE (x));
6051 unsigned inner_size = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
6052 int offset;
6053 rtx orig = tem;
6054 enum machine_mode orig_mode = GET_MODE (orig);
6055 int reloaded;
6056
6057 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
6058 hold the correct (negative) byte offset. */
6059 if (BYTES_BIG_ENDIAN && outer_size > inner_size)
6060 offset = inner_size - outer_size;
6061 else
6062 offset = SUBREG_BYTE (x);
6063
6064 XEXP (tem, 0) = plus_constant (XEXP (tem, 0), offset);
6065 PUT_MODE (tem, GET_MODE (x));
6066 if (MEM_OFFSET (tem))
6067 set_mem_offset (tem, plus_constant (MEM_OFFSET (tem), offset));
6068
6069 /* If this was a paradoxical subreg that we replaced, the
6070 resulting memory must be sufficiently aligned to allow
6071 us to widen the mode of the memory. */
6072 if (outer_size > inner_size)
6073 {
6074 rtx base;
6075
6076 base = XEXP (tem, 0);
6077 if (GET_CODE (base) == PLUS)
6078 {
6079 if (GET_CODE (XEXP (base, 1)) == CONST_INT
6080 && INTVAL (XEXP (base, 1)) % outer_size != 0)
6081 return x;
6082 base = XEXP (base, 0);
6083 }
6084 if (!REG_P (base)
6085 || (REGNO_POINTER_ALIGN (REGNO (base))
6086 < outer_size * BITS_PER_UNIT))
6087 return x;
6088 }
6089
6090 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6091 XEXP (tem, 0), &XEXP (tem, 0),
6092 opnum, type, ind_levels, insn);
6093 /* ??? Do we need to handle nonzero offsets somehow? */
6094 if (!offset && !rtx_equal_p (tem, orig))
6095 push_reg_equiv_alt_mem (regno, tem);
6096
6097 /* For some processors an address may be valid in the
6098 original mode but not in a smaller mode. For
6099 example, ARM accepts a scaled index register in
6100 SImode but not in HImode. find_reloads_address
6101 assumes that we pass it a valid address, and doesn't
6102 force a reload. This will probably be fine if
6103 find_reloads_address finds some reloads. But if it
6104 doesn't find any, then we may have just converted a
6105 valid address into an invalid one. Check for that
6106 here. */
6107 if (reloaded != 1
6108 && strict_memory_address_p (orig_mode, XEXP (tem, 0))
6109 && !strict_memory_address_p (GET_MODE (tem),
6110 XEXP (tem, 0)))
6111 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6112 base_reg_class (GET_MODE (tem), MEM, SCRATCH),
6113 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0,
6114 opnum, type);
6115
6116 /* If this is not a toplevel operand, find_reloads doesn't see
6117 this substitution. We have to emit a USE of the pseudo so
6118 that delete_output_reload can see it. */
6119 if (replace_reloads && recog_data.operand[opnum] != x)
6120 /* We mark the USE with QImode so that we recognize it
6121 as one that can be safely deleted at the end of
6122 reload. */
6123 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode,
6124 SUBREG_REG (x)),
6125 insn), QImode);
6126 x = tem;
6127 }
6128 }
6129 }
6130 return x;
6131 }
6132 \f
6133 /* Substitute into the current INSN the registers into which we have reloaded
6134 the things that need reloading. The array `replacements'
6135 contains the locations of all pointers that must be changed
6136 and says what to replace them with.
6137
6138 Return the rtx that X translates into; usually X, but modified. */
6139
6140 void
6141 subst_reloads (rtx insn)
6142 {
6143 int i;
6144
6145 for (i = 0; i < n_replacements; i++)
6146 {
6147 struct replacement *r = &replacements[i];
6148 rtx reloadreg = rld[r->what].reg_rtx;
6149 if (reloadreg)
6150 {
6151 #ifdef DEBUG_RELOAD
6152 /* This checking takes a very long time on some platforms
6153 causing the gcc.c-torture/compile/limits-fnargs.c test
6154 to time out during testing. See PR 31850.
6155
6156 Internal consistency test. Check that we don't modify
6157 anything in the equivalence arrays. Whenever something from
6158 those arrays needs to be reloaded, it must be unshared before
6159 being substituted into; the equivalence must not be modified.
6160 Otherwise, if the equivalence is used after that, it will
6161 have been modified, and the thing substituted (probably a
6162 register) is likely overwritten and not a usable equivalence. */
6163 int check_regno;
6164
6165 for (check_regno = 0; check_regno < max_regno; check_regno++)
6166 {
6167 #define CHECK_MODF(ARRAY) \
6168 gcc_assert (!ARRAY[check_regno] \
6169 || !loc_mentioned_in_p (r->where, \
6170 ARRAY[check_regno]))
6171
6172 CHECK_MODF (reg_equiv_constant);
6173 CHECK_MODF (reg_equiv_memory_loc);
6174 CHECK_MODF (reg_equiv_address);
6175 CHECK_MODF (reg_equiv_mem);
6176 #undef CHECK_MODF
6177 }
6178 #endif /* DEBUG_RELOAD */
6179
6180 /* If we're replacing a LABEL_REF with a register, there must
6181 already be an indication (to e.g. flow) which label this
6182 register refers to. */
6183 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6184 || !JUMP_P (insn)
6185 || find_reg_note (insn,
6186 REG_LABEL_OPERAND,
6187 XEXP (*r->where, 0))
6188 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6189
6190 /* Encapsulate RELOADREG so its machine mode matches what
6191 used to be there. Note that gen_lowpart_common will
6192 do the wrong thing if RELOADREG is multi-word. RELOADREG
6193 will always be a REG here. */
6194 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6195 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6196
6197 /* If we are putting this into a SUBREG and RELOADREG is a
6198 SUBREG, we would be making nested SUBREGs, so we have to fix
6199 this up. Note that r->where == &SUBREG_REG (*r->subreg_loc). */
6200
6201 if (r->subreg_loc != 0 && GET_CODE (reloadreg) == SUBREG)
6202 {
6203 if (GET_MODE (*r->subreg_loc)
6204 == GET_MODE (SUBREG_REG (reloadreg)))
6205 *r->subreg_loc = SUBREG_REG (reloadreg);
6206 else
6207 {
6208 int final_offset =
6209 SUBREG_BYTE (*r->subreg_loc) + SUBREG_BYTE (reloadreg);
6210
6211 /* When working with SUBREGs the rule is that the byte
6212 offset must be a multiple of the SUBREG's mode. */
6213 final_offset = (final_offset /
6214 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6215 final_offset = (final_offset *
6216 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6217
6218 *r->where = SUBREG_REG (reloadreg);
6219 SUBREG_BYTE (*r->subreg_loc) = final_offset;
6220 }
6221 }
6222 else
6223 *r->where = reloadreg;
6224 }
6225 /* If reload got no reg and isn't optional, something's wrong. */
6226 else
6227 gcc_assert (rld[r->what].optional);
6228 }
6229 }
6230 \f
6231 /* Make a copy of any replacements being done into X and move those
6232 copies to locations in Y, a copy of X. */
6233
6234 void
6235 copy_replacements (rtx x, rtx y)
6236 {
6237 /* We can't support X being a SUBREG because we might then need to know its
6238 location if something inside it was replaced. */
6239 gcc_assert (GET_CODE (x) != SUBREG);
6240
6241 copy_replacements_1 (&x, &y, n_replacements);
6242 }
6243
6244 static void
6245 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6246 {
6247 int i, j;
6248 rtx x, y;
6249 struct replacement *r;
6250 enum rtx_code code;
6251 const char *fmt;
6252
6253 for (j = 0; j < orig_replacements; j++)
6254 {
6255 if (replacements[j].subreg_loc == px)
6256 {
6257 r = &replacements[n_replacements++];
6258 r->where = replacements[j].where;
6259 r->subreg_loc = py;
6260 r->what = replacements[j].what;
6261 r->mode = replacements[j].mode;
6262 }
6263 else if (replacements[j].where == px)
6264 {
6265 r = &replacements[n_replacements++];
6266 r->where = py;
6267 r->subreg_loc = 0;
6268 r->what = replacements[j].what;
6269 r->mode = replacements[j].mode;
6270 }
6271 }
6272
6273 x = *px;
6274 y = *py;
6275 code = GET_CODE (x);
6276 fmt = GET_RTX_FORMAT (code);
6277
6278 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6279 {
6280 if (fmt[i] == 'e')
6281 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6282 else if (fmt[i] == 'E')
6283 for (j = XVECLEN (x, i); --j >= 0; )
6284 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6285 orig_replacements);
6286 }
6287 }
6288
6289 /* Change any replacements being done to *X to be done to *Y. */
6290
6291 void
6292 move_replacements (rtx *x, rtx *y)
6293 {
6294 int i;
6295
6296 for (i = 0; i < n_replacements; i++)
6297 if (replacements[i].subreg_loc == x)
6298 replacements[i].subreg_loc = y;
6299 else if (replacements[i].where == x)
6300 {
6301 replacements[i].where = y;
6302 replacements[i].subreg_loc = 0;
6303 }
6304 }
6305 \f
6306 /* If LOC was scheduled to be replaced by something, return the replacement.
6307 Otherwise, return *LOC. */
6308
6309 rtx
6310 find_replacement (rtx *loc)
6311 {
6312 struct replacement *r;
6313
6314 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6315 {
6316 rtx reloadreg = rld[r->what].reg_rtx;
6317
6318 if (reloadreg && r->where == loc)
6319 {
6320 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6321 reloadreg = gen_rtx_REG (r->mode, REGNO (reloadreg));
6322
6323 return reloadreg;
6324 }
6325 else if (reloadreg && r->subreg_loc == loc)
6326 {
6327 /* RELOADREG must be either a REG or a SUBREG.
6328
6329 ??? Is it actually still ever a SUBREG? If so, why? */
6330
6331 if (REG_P (reloadreg))
6332 return gen_rtx_REG (GET_MODE (*loc),
6333 (REGNO (reloadreg) +
6334 subreg_regno_offset (REGNO (SUBREG_REG (*loc)),
6335 GET_MODE (SUBREG_REG (*loc)),
6336 SUBREG_BYTE (*loc),
6337 GET_MODE (*loc))));
6338 else if (GET_MODE (reloadreg) == GET_MODE (*loc))
6339 return reloadreg;
6340 else
6341 {
6342 int final_offset = SUBREG_BYTE (reloadreg) + SUBREG_BYTE (*loc);
6343
6344 /* When working with SUBREGs the rule is that the byte
6345 offset must be a multiple of the SUBREG's mode. */
6346 final_offset = (final_offset / GET_MODE_SIZE (GET_MODE (*loc)));
6347 final_offset = (final_offset * GET_MODE_SIZE (GET_MODE (*loc)));
6348 return gen_rtx_SUBREG (GET_MODE (*loc), SUBREG_REG (reloadreg),
6349 final_offset);
6350 }
6351 }
6352 }
6353
6354 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6355 what's inside and make a new rtl if so. */
6356 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6357 || GET_CODE (*loc) == MULT)
6358 {
6359 rtx x = find_replacement (&XEXP (*loc, 0));
6360 rtx y = find_replacement (&XEXP (*loc, 1));
6361
6362 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6363 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6364 }
6365
6366 return *loc;
6367 }
6368 \f
6369 /* Return nonzero if register in range [REGNO, ENDREGNO)
6370 appears either explicitly or implicitly in X
6371 other than being stored into (except for earlyclobber operands).
6372
6373 References contained within the substructure at LOC do not count.
6374 LOC may be zero, meaning don't ignore anything.
6375
6376 This is similar to refers_to_regno_p in rtlanal.c except that we
6377 look at equivalences for pseudos that didn't get hard registers. */
6378
6379 static int
6380 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6381 rtx x, rtx *loc)
6382 {
6383 int i;
6384 unsigned int r;
6385 RTX_CODE code;
6386 const char *fmt;
6387
6388 if (x == 0)
6389 return 0;
6390
6391 repeat:
6392 code = GET_CODE (x);
6393
6394 switch (code)
6395 {
6396 case REG:
6397 r = REGNO (x);
6398
6399 /* If this is a pseudo, a hard register must not have been allocated.
6400 X must therefore either be a constant or be in memory. */
6401 if (r >= FIRST_PSEUDO_REGISTER)
6402 {
6403 if (reg_equiv_memory_loc[r])
6404 return refers_to_regno_for_reload_p (regno, endregno,
6405 reg_equiv_memory_loc[r],
6406 (rtx*) 0);
6407
6408 gcc_assert (reg_equiv_constant[r] || reg_equiv_invariant[r]);
6409 return 0;
6410 }
6411
6412 return (endregno > r
6413 && regno < r + (r < FIRST_PSEUDO_REGISTER
6414 ? hard_regno_nregs[r][GET_MODE (x)]
6415 : 1));
6416
6417 case SUBREG:
6418 /* If this is a SUBREG of a hard reg, we can see exactly which
6419 registers are being modified. Otherwise, handle normally. */
6420 if (REG_P (SUBREG_REG (x))
6421 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6422 {
6423 unsigned int inner_regno = subreg_regno (x);
6424 unsigned int inner_endregno
6425 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6426 ? subreg_nregs (x) : 1);
6427
6428 return endregno > inner_regno && regno < inner_endregno;
6429 }
6430 break;
6431
6432 case CLOBBER:
6433 case SET:
6434 if (&SET_DEST (x) != loc
6435 /* Note setting a SUBREG counts as referring to the REG it is in for
6436 a pseudo but not for hard registers since we can
6437 treat each word individually. */
6438 && ((GET_CODE (SET_DEST (x)) == SUBREG
6439 && loc != &SUBREG_REG (SET_DEST (x))
6440 && REG_P (SUBREG_REG (SET_DEST (x)))
6441 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6442 && refers_to_regno_for_reload_p (regno, endregno,
6443 SUBREG_REG (SET_DEST (x)),
6444 loc))
6445 /* If the output is an earlyclobber operand, this is
6446 a conflict. */
6447 || ((!REG_P (SET_DEST (x))
6448 || earlyclobber_operand_p (SET_DEST (x)))
6449 && refers_to_regno_for_reload_p (regno, endregno,
6450 SET_DEST (x), loc))))
6451 return 1;
6452
6453 if (code == CLOBBER || loc == &SET_SRC (x))
6454 return 0;
6455 x = SET_SRC (x);
6456 goto repeat;
6457
6458 default:
6459 break;
6460 }
6461
6462 /* X does not match, so try its subexpressions. */
6463
6464 fmt = GET_RTX_FORMAT (code);
6465 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6466 {
6467 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6468 {
6469 if (i == 0)
6470 {
6471 x = XEXP (x, 0);
6472 goto repeat;
6473 }
6474 else
6475 if (refers_to_regno_for_reload_p (regno, endregno,
6476 XEXP (x, i), loc))
6477 return 1;
6478 }
6479 else if (fmt[i] == 'E')
6480 {
6481 int j;
6482 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6483 if (loc != &XVECEXP (x, i, j)
6484 && refers_to_regno_for_reload_p (regno, endregno,
6485 XVECEXP (x, i, j), loc))
6486 return 1;
6487 }
6488 }
6489 return 0;
6490 }
6491
6492 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6493 we check if any register number in X conflicts with the relevant register
6494 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6495 contains a MEM (we don't bother checking for memory addresses that can't
6496 conflict because we expect this to be a rare case.
6497
6498 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6499 that we look at equivalences for pseudos that didn't get hard registers. */
6500
6501 int
6502 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6503 {
6504 int regno, endregno;
6505
6506 /* Overly conservative. */
6507 if (GET_CODE (x) == STRICT_LOW_PART
6508 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6509 x = XEXP (x, 0);
6510
6511 /* If either argument is a constant, then modifying X can not affect IN. */
6512 if (CONSTANT_P (x) || CONSTANT_P (in))
6513 return 0;
6514 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
6515 return refers_to_mem_for_reload_p (in);
6516 else if (GET_CODE (x) == SUBREG)
6517 {
6518 regno = REGNO (SUBREG_REG (x));
6519 if (regno < FIRST_PSEUDO_REGISTER)
6520 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6521 GET_MODE (SUBREG_REG (x)),
6522 SUBREG_BYTE (x),
6523 GET_MODE (x));
6524 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6525 ? subreg_nregs (x) : 1);
6526
6527 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6528 }
6529 else if (REG_P (x))
6530 {
6531 regno = REGNO (x);
6532
6533 /* If this is a pseudo, it must not have been assigned a hard register.
6534 Therefore, it must either be in memory or be a constant. */
6535
6536 if (regno >= FIRST_PSEUDO_REGISTER)
6537 {
6538 if (reg_equiv_memory_loc[regno])
6539 return refers_to_mem_for_reload_p (in);
6540 gcc_assert (reg_equiv_constant[regno]);
6541 return 0;
6542 }
6543
6544 endregno = END_HARD_REGNO (x);
6545
6546 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6547 }
6548 else if (MEM_P (x))
6549 return refers_to_mem_for_reload_p (in);
6550 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6551 || GET_CODE (x) == CC0)
6552 return reg_mentioned_p (x, in);
6553 else
6554 {
6555 gcc_assert (GET_CODE (x) == PLUS);
6556
6557 /* We actually want to know if X is mentioned somewhere inside IN.
6558 We must not say that (plus (sp) (const_int 124)) is in
6559 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6560 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6561 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6562 while (MEM_P (in))
6563 in = XEXP (in, 0);
6564 if (REG_P (in))
6565 return 0;
6566 else if (GET_CODE (in) == PLUS)
6567 return (rtx_equal_p (x, in)
6568 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6569 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6570 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6571 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6572 }
6573
6574 gcc_unreachable ();
6575 }
6576
6577 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6578 registers. */
6579
6580 static int
6581 refers_to_mem_for_reload_p (rtx x)
6582 {
6583 const char *fmt;
6584 int i;
6585
6586 if (MEM_P (x))
6587 return 1;
6588
6589 if (REG_P (x))
6590 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6591 && reg_equiv_memory_loc[REGNO (x)]);
6592
6593 fmt = GET_RTX_FORMAT (GET_CODE (x));
6594 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6595 if (fmt[i] == 'e'
6596 && (MEM_P (XEXP (x, i))
6597 || refers_to_mem_for_reload_p (XEXP (x, i))))
6598 return 1;
6599
6600 return 0;
6601 }
6602 \f
6603 /* Check the insns before INSN to see if there is a suitable register
6604 containing the same value as GOAL.
6605 If OTHER is -1, look for a register in class CLASS.
6606 Otherwise, just see if register number OTHER shares GOAL's value.
6607
6608 Return an rtx for the register found, or zero if none is found.
6609
6610 If RELOAD_REG_P is (short *)1,
6611 we reject any hard reg that appears in reload_reg_rtx
6612 because such a hard reg is also needed coming into this insn.
6613
6614 If RELOAD_REG_P is any other nonzero value,
6615 it is a vector indexed by hard reg number
6616 and we reject any hard reg whose element in the vector is nonnegative
6617 as well as any that appears in reload_reg_rtx.
6618
6619 If GOAL is zero, then GOALREG is a register number; we look
6620 for an equivalent for that register.
6621
6622 MODE is the machine mode of the value we want an equivalence for.
6623 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6624
6625 This function is used by jump.c as well as in the reload pass.
6626
6627 If GOAL is the sum of the stack pointer and a constant, we treat it
6628 as if it were a constant except that sp is required to be unchanging. */
6629
6630 rtx
6631 find_equiv_reg (rtx goal, rtx insn, enum reg_class class, int other,
6632 short *reload_reg_p, int goalreg, enum machine_mode mode)
6633 {
6634 rtx p = insn;
6635 rtx goaltry, valtry, value, where;
6636 rtx pat;
6637 int regno = -1;
6638 int valueno;
6639 int goal_mem = 0;
6640 int goal_const = 0;
6641 int goal_mem_addr_varies = 0;
6642 int need_stable_sp = 0;
6643 int nregs;
6644 int valuenregs;
6645 int num = 0;
6646
6647 if (goal == 0)
6648 regno = goalreg;
6649 else if (REG_P (goal))
6650 regno = REGNO (goal);
6651 else if (MEM_P (goal))
6652 {
6653 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6654 if (MEM_VOLATILE_P (goal))
6655 return 0;
6656 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6657 return 0;
6658 /* An address with side effects must be reexecuted. */
6659 switch (code)
6660 {
6661 case POST_INC:
6662 case PRE_INC:
6663 case POST_DEC:
6664 case PRE_DEC:
6665 case POST_MODIFY:
6666 case PRE_MODIFY:
6667 return 0;
6668 default:
6669 break;
6670 }
6671 goal_mem = 1;
6672 }
6673 else if (CONSTANT_P (goal))
6674 goal_const = 1;
6675 else if (GET_CODE (goal) == PLUS
6676 && XEXP (goal, 0) == stack_pointer_rtx
6677 && CONSTANT_P (XEXP (goal, 1)))
6678 goal_const = need_stable_sp = 1;
6679 else if (GET_CODE (goal) == PLUS
6680 && XEXP (goal, 0) == frame_pointer_rtx
6681 && CONSTANT_P (XEXP (goal, 1)))
6682 goal_const = 1;
6683 else
6684 return 0;
6685
6686 num = 0;
6687 /* Scan insns back from INSN, looking for one that copies
6688 a value into or out of GOAL.
6689 Stop and give up if we reach a label. */
6690
6691 while (1)
6692 {
6693 p = PREV_INSN (p);
6694 num++;
6695 if (p == 0 || LABEL_P (p)
6696 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6697 return 0;
6698
6699 if (NONJUMP_INSN_P (p)
6700 /* If we don't want spill regs ... */
6701 && (! (reload_reg_p != 0
6702 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6703 /* ... then ignore insns introduced by reload; they aren't
6704 useful and can cause results in reload_as_needed to be
6705 different from what they were when calculating the need for
6706 spills. If we notice an input-reload insn here, we will
6707 reject it below, but it might hide a usable equivalent.
6708 That makes bad code. It may even fail: perhaps no reg was
6709 spilled for this insn because it was assumed we would find
6710 that equivalent. */
6711 || INSN_UID (p) < reload_first_uid))
6712 {
6713 rtx tem;
6714 pat = single_set (p);
6715
6716 /* First check for something that sets some reg equal to GOAL. */
6717 if (pat != 0
6718 && ((regno >= 0
6719 && true_regnum (SET_SRC (pat)) == regno
6720 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6721 ||
6722 (regno >= 0
6723 && true_regnum (SET_DEST (pat)) == regno
6724 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6725 ||
6726 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6727 /* When looking for stack pointer + const,
6728 make sure we don't use a stack adjust. */
6729 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6730 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6731 || (goal_mem
6732 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6733 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6734 || (goal_mem
6735 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6736 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6737 /* If we are looking for a constant,
6738 and something equivalent to that constant was copied
6739 into a reg, we can use that reg. */
6740 || (goal_const && REG_NOTES (p) != 0
6741 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6742 && ((rtx_equal_p (XEXP (tem, 0), goal)
6743 && (valueno
6744 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6745 || (REG_P (SET_DEST (pat))
6746 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6747 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6748 && GET_CODE (goal) == CONST_INT
6749 && 0 != (goaltry
6750 = operand_subword (XEXP (tem, 0), 0, 0,
6751 VOIDmode))
6752 && rtx_equal_p (goal, goaltry)
6753 && (valtry
6754 = operand_subword (SET_DEST (pat), 0, 0,
6755 VOIDmode))
6756 && (valueno = true_regnum (valtry)) >= 0)))
6757 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6758 NULL_RTX))
6759 && REG_P (SET_DEST (pat))
6760 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6761 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6762 && GET_CODE (goal) == CONST_INT
6763 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6764 VOIDmode))
6765 && rtx_equal_p (goal, goaltry)
6766 && (valtry
6767 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6768 && (valueno = true_regnum (valtry)) >= 0)))
6769 {
6770 if (other >= 0)
6771 {
6772 if (valueno != other)
6773 continue;
6774 }
6775 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6776 continue;
6777 else if (!in_hard_reg_set_p (reg_class_contents[(int) class],
6778 mode, valueno))
6779 continue;
6780 value = valtry;
6781 where = p;
6782 break;
6783 }
6784 }
6785 }
6786
6787 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6788 (or copying VALUE into GOAL, if GOAL is also a register).
6789 Now verify that VALUE is really valid. */
6790
6791 /* VALUENO is the register number of VALUE; a hard register. */
6792
6793 /* Don't try to re-use something that is killed in this insn. We want
6794 to be able to trust REG_UNUSED notes. */
6795 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6796 return 0;
6797
6798 /* If we propose to get the value from the stack pointer or if GOAL is
6799 a MEM based on the stack pointer, we need a stable SP. */
6800 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6801 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6802 goal)))
6803 need_stable_sp = 1;
6804
6805 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6806 if (GET_MODE (value) != mode)
6807 return 0;
6808
6809 /* Reject VALUE if it was loaded from GOAL
6810 and is also a register that appears in the address of GOAL. */
6811
6812 if (goal_mem && value == SET_DEST (single_set (where))
6813 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6814 goal, (rtx*) 0))
6815 return 0;
6816
6817 /* Reject registers that overlap GOAL. */
6818
6819 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6820 nregs = hard_regno_nregs[regno][mode];
6821 else
6822 nregs = 1;
6823 valuenregs = hard_regno_nregs[valueno][mode];
6824
6825 if (!goal_mem && !goal_const
6826 && regno + nregs > valueno && regno < valueno + valuenregs)
6827 return 0;
6828
6829 /* Reject VALUE if it is one of the regs reserved for reloads.
6830 Reload1 knows how to reuse them anyway, and it would get
6831 confused if we allocated one without its knowledge.
6832 (Now that insns introduced by reload are ignored above,
6833 this case shouldn't happen, but I'm not positive.) */
6834
6835 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6836 {
6837 int i;
6838 for (i = 0; i < valuenregs; ++i)
6839 if (reload_reg_p[valueno + i] >= 0)
6840 return 0;
6841 }
6842
6843 /* Reject VALUE if it is a register being used for an input reload
6844 even if it is not one of those reserved. */
6845
6846 if (reload_reg_p != 0)
6847 {
6848 int i;
6849 for (i = 0; i < n_reloads; i++)
6850 if (rld[i].reg_rtx != 0 && rld[i].in)
6851 {
6852 int regno1 = REGNO (rld[i].reg_rtx);
6853 int nregs1 = hard_regno_nregs[regno1]
6854 [GET_MODE (rld[i].reg_rtx)];
6855 if (regno1 < valueno + valuenregs
6856 && regno1 + nregs1 > valueno)
6857 return 0;
6858 }
6859 }
6860
6861 if (goal_mem)
6862 /* We must treat frame pointer as varying here,
6863 since it can vary--in a nonlocal goto as generated by expand_goto. */
6864 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6865
6866 /* Now verify that the values of GOAL and VALUE remain unaltered
6867 until INSN is reached. */
6868
6869 p = insn;
6870 while (1)
6871 {
6872 p = PREV_INSN (p);
6873 if (p == where)
6874 return value;
6875
6876 /* Don't trust the conversion past a function call
6877 if either of the two is in a call-clobbered register, or memory. */
6878 if (CALL_P (p))
6879 {
6880 int i;
6881
6882 if (goal_mem || need_stable_sp)
6883 return 0;
6884
6885 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6886 for (i = 0; i < nregs; ++i)
6887 if (call_used_regs[regno + i]
6888 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6889 return 0;
6890
6891 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6892 for (i = 0; i < valuenregs; ++i)
6893 if (call_used_regs[valueno + i]
6894 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6895 return 0;
6896 }
6897
6898 if (INSN_P (p))
6899 {
6900 pat = PATTERN (p);
6901
6902 /* Watch out for unspec_volatile, and volatile asms. */
6903 if (volatile_insn_p (pat))
6904 return 0;
6905
6906 /* If this insn P stores in either GOAL or VALUE, return 0.
6907 If GOAL is a memory ref and this insn writes memory, return 0.
6908 If GOAL is a memory ref and its address is not constant,
6909 and this insn P changes a register used in GOAL, return 0. */
6910
6911 if (GET_CODE (pat) == COND_EXEC)
6912 pat = COND_EXEC_CODE (pat);
6913 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6914 {
6915 rtx dest = SET_DEST (pat);
6916 while (GET_CODE (dest) == SUBREG
6917 || GET_CODE (dest) == ZERO_EXTRACT
6918 || GET_CODE (dest) == STRICT_LOW_PART)
6919 dest = XEXP (dest, 0);
6920 if (REG_P (dest))
6921 {
6922 int xregno = REGNO (dest);
6923 int xnregs;
6924 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6925 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6926 else
6927 xnregs = 1;
6928 if (xregno < regno + nregs && xregno + xnregs > regno)
6929 return 0;
6930 if (xregno < valueno + valuenregs
6931 && xregno + xnregs > valueno)
6932 return 0;
6933 if (goal_mem_addr_varies
6934 && reg_overlap_mentioned_for_reload_p (dest, goal))
6935 return 0;
6936 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6937 return 0;
6938 }
6939 else if (goal_mem && MEM_P (dest)
6940 && ! push_operand (dest, GET_MODE (dest)))
6941 return 0;
6942 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6943 && reg_equiv_memory_loc[regno] != 0)
6944 return 0;
6945 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6946 return 0;
6947 }
6948 else if (GET_CODE (pat) == PARALLEL)
6949 {
6950 int i;
6951 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6952 {
6953 rtx v1 = XVECEXP (pat, 0, i);
6954 if (GET_CODE (v1) == COND_EXEC)
6955 v1 = COND_EXEC_CODE (v1);
6956 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
6957 {
6958 rtx dest = SET_DEST (v1);
6959 while (GET_CODE (dest) == SUBREG
6960 || GET_CODE (dest) == ZERO_EXTRACT
6961 || GET_CODE (dest) == STRICT_LOW_PART)
6962 dest = XEXP (dest, 0);
6963 if (REG_P (dest))
6964 {
6965 int xregno = REGNO (dest);
6966 int xnregs;
6967 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6968 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6969 else
6970 xnregs = 1;
6971 if (xregno < regno + nregs
6972 && xregno + xnregs > regno)
6973 return 0;
6974 if (xregno < valueno + valuenregs
6975 && xregno + xnregs > valueno)
6976 return 0;
6977 if (goal_mem_addr_varies
6978 && reg_overlap_mentioned_for_reload_p (dest,
6979 goal))
6980 return 0;
6981 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6982 return 0;
6983 }
6984 else if (goal_mem && MEM_P (dest)
6985 && ! push_operand (dest, GET_MODE (dest)))
6986 return 0;
6987 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6988 && reg_equiv_memory_loc[regno] != 0)
6989 return 0;
6990 else if (need_stable_sp
6991 && push_operand (dest, GET_MODE (dest)))
6992 return 0;
6993 }
6994 }
6995 }
6996
6997 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
6998 {
6999 rtx link;
7000
7001 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7002 link = XEXP (link, 1))
7003 {
7004 pat = XEXP (link, 0);
7005 if (GET_CODE (pat) == CLOBBER)
7006 {
7007 rtx dest = SET_DEST (pat);
7008
7009 if (REG_P (dest))
7010 {
7011 int xregno = REGNO (dest);
7012 int xnregs
7013 = hard_regno_nregs[xregno][GET_MODE (dest)];
7014
7015 if (xregno < regno + nregs
7016 && xregno + xnregs > regno)
7017 return 0;
7018 else if (xregno < valueno + valuenregs
7019 && xregno + xnregs > valueno)
7020 return 0;
7021 else if (goal_mem_addr_varies
7022 && reg_overlap_mentioned_for_reload_p (dest,
7023 goal))
7024 return 0;
7025 }
7026
7027 else if (goal_mem && MEM_P (dest)
7028 && ! push_operand (dest, GET_MODE (dest)))
7029 return 0;
7030 else if (need_stable_sp
7031 && push_operand (dest, GET_MODE (dest)))
7032 return 0;
7033 }
7034 }
7035 }
7036
7037 #ifdef AUTO_INC_DEC
7038 /* If this insn auto-increments or auto-decrements
7039 either regno or valueno, return 0 now.
7040 If GOAL is a memory ref and its address is not constant,
7041 and this insn P increments a register used in GOAL, return 0. */
7042 {
7043 rtx link;
7044
7045 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7046 if (REG_NOTE_KIND (link) == REG_INC
7047 && REG_P (XEXP (link, 0)))
7048 {
7049 int incno = REGNO (XEXP (link, 0));
7050 if (incno < regno + nregs && incno >= regno)
7051 return 0;
7052 if (incno < valueno + valuenregs && incno >= valueno)
7053 return 0;
7054 if (goal_mem_addr_varies
7055 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7056 goal))
7057 return 0;
7058 }
7059 }
7060 #endif
7061 }
7062 }
7063 }
7064 \f
7065 /* Find a place where INCED appears in an increment or decrement operator
7066 within X, and return the amount INCED is incremented or decremented by.
7067 The value is always positive. */
7068
7069 static int
7070 find_inc_amount (rtx x, rtx inced)
7071 {
7072 enum rtx_code code = GET_CODE (x);
7073 const char *fmt;
7074 int i;
7075
7076 if (code == MEM)
7077 {
7078 rtx addr = XEXP (x, 0);
7079 if ((GET_CODE (addr) == PRE_DEC
7080 || GET_CODE (addr) == POST_DEC
7081 || GET_CODE (addr) == PRE_INC
7082 || GET_CODE (addr) == POST_INC)
7083 && XEXP (addr, 0) == inced)
7084 return GET_MODE_SIZE (GET_MODE (x));
7085 else if ((GET_CODE (addr) == PRE_MODIFY
7086 || GET_CODE (addr) == POST_MODIFY)
7087 && GET_CODE (XEXP (addr, 1)) == PLUS
7088 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7089 && XEXP (addr, 0) == inced
7090 && GET_CODE (XEXP (XEXP (addr, 1), 1)) == CONST_INT)
7091 {
7092 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7093 return i < 0 ? -i : i;
7094 }
7095 }
7096
7097 fmt = GET_RTX_FORMAT (code);
7098 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7099 {
7100 if (fmt[i] == 'e')
7101 {
7102 int tem = find_inc_amount (XEXP (x, i), inced);
7103 if (tem != 0)
7104 return tem;
7105 }
7106 if (fmt[i] == 'E')
7107 {
7108 int j;
7109 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7110 {
7111 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7112 if (tem != 0)
7113 return tem;
7114 }
7115 }
7116 }
7117
7118 return 0;
7119 }
7120 \f
7121 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7122 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7123
7124 #ifdef AUTO_INC_DEC
7125 static int
7126 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7127 rtx insn)
7128 {
7129 rtx link;
7130
7131 gcc_assert (insn);
7132
7133 if (! INSN_P (insn))
7134 return 0;
7135
7136 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7137 if (REG_NOTE_KIND (link) == REG_INC)
7138 {
7139 unsigned int test = (int) REGNO (XEXP (link, 0));
7140 if (test >= regno && test < endregno)
7141 return 1;
7142 }
7143 return 0;
7144 }
7145 #else
7146
7147 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7148
7149 #endif
7150
7151 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7152 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7153 REG_INC. REGNO must refer to a hard register. */
7154
7155 int
7156 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7157 int sets)
7158 {
7159 unsigned int nregs, endregno;
7160
7161 /* regno must be a hard register. */
7162 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7163
7164 nregs = hard_regno_nregs[regno][mode];
7165 endregno = regno + nregs;
7166
7167 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7168 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7169 && REG_P (XEXP (PATTERN (insn), 0)))
7170 {
7171 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7172
7173 return test >= regno && test < endregno;
7174 }
7175
7176 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7177 return 1;
7178
7179 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7180 {
7181 int i = XVECLEN (PATTERN (insn), 0) - 1;
7182
7183 for (; i >= 0; i--)
7184 {
7185 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7186 if ((GET_CODE (elt) == CLOBBER
7187 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7188 && REG_P (XEXP (elt, 0)))
7189 {
7190 unsigned int test = REGNO (XEXP (elt, 0));
7191
7192 if (test >= regno && test < endregno)
7193 return 1;
7194 }
7195 if (sets == 2
7196 && reg_inc_found_and_valid_p (regno, endregno, elt))
7197 return 1;
7198 }
7199 }
7200
7201 return 0;
7202 }
7203
7204 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7205 rtx
7206 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7207 {
7208 int regno;
7209
7210 if (GET_MODE (reloadreg) == mode)
7211 return reloadreg;
7212
7213 regno = REGNO (reloadreg);
7214
7215 if (WORDS_BIG_ENDIAN)
7216 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7217 - (int) hard_regno_nregs[regno][mode];
7218
7219 return gen_rtx_REG (mode, regno);
7220 }
7221
7222 static const char *const reload_when_needed_name[] =
7223 {
7224 "RELOAD_FOR_INPUT",
7225 "RELOAD_FOR_OUTPUT",
7226 "RELOAD_FOR_INSN",
7227 "RELOAD_FOR_INPUT_ADDRESS",
7228 "RELOAD_FOR_INPADDR_ADDRESS",
7229 "RELOAD_FOR_OUTPUT_ADDRESS",
7230 "RELOAD_FOR_OUTADDR_ADDRESS",
7231 "RELOAD_FOR_OPERAND_ADDRESS",
7232 "RELOAD_FOR_OPADDR_ADDR",
7233 "RELOAD_OTHER",
7234 "RELOAD_FOR_OTHER_ADDRESS"
7235 };
7236
7237 /* These functions are used to print the variables set by 'find_reloads' */
7238
7239 void
7240 debug_reload_to_stream (FILE *f)
7241 {
7242 int r;
7243 const char *prefix;
7244
7245 if (! f)
7246 f = stderr;
7247 for (r = 0; r < n_reloads; r++)
7248 {
7249 fprintf (f, "Reload %d: ", r);
7250
7251 if (rld[r].in != 0)
7252 {
7253 fprintf (f, "reload_in (%s) = ",
7254 GET_MODE_NAME (rld[r].inmode));
7255 print_inline_rtx (f, rld[r].in, 24);
7256 fprintf (f, "\n\t");
7257 }
7258
7259 if (rld[r].out != 0)
7260 {
7261 fprintf (f, "reload_out (%s) = ",
7262 GET_MODE_NAME (rld[r].outmode));
7263 print_inline_rtx (f, rld[r].out, 24);
7264 fprintf (f, "\n\t");
7265 }
7266
7267 fprintf (f, "%s, ", reg_class_names[(int) rld[r].class]);
7268
7269 fprintf (f, "%s (opnum = %d)",
7270 reload_when_needed_name[(int) rld[r].when_needed],
7271 rld[r].opnum);
7272
7273 if (rld[r].optional)
7274 fprintf (f, ", optional");
7275
7276 if (rld[r].nongroup)
7277 fprintf (f, ", nongroup");
7278
7279 if (rld[r].inc != 0)
7280 fprintf (f, ", inc by %d", rld[r].inc);
7281
7282 if (rld[r].nocombine)
7283 fprintf (f, ", can't combine");
7284
7285 if (rld[r].secondary_p)
7286 fprintf (f, ", secondary_reload_p");
7287
7288 if (rld[r].in_reg != 0)
7289 {
7290 fprintf (f, "\n\treload_in_reg: ");
7291 print_inline_rtx (f, rld[r].in_reg, 24);
7292 }
7293
7294 if (rld[r].out_reg != 0)
7295 {
7296 fprintf (f, "\n\treload_out_reg: ");
7297 print_inline_rtx (f, rld[r].out_reg, 24);
7298 }
7299
7300 if (rld[r].reg_rtx != 0)
7301 {
7302 fprintf (f, "\n\treload_reg_rtx: ");
7303 print_inline_rtx (f, rld[r].reg_rtx, 24);
7304 }
7305
7306 prefix = "\n\t";
7307 if (rld[r].secondary_in_reload != -1)
7308 {
7309 fprintf (f, "%ssecondary_in_reload = %d",
7310 prefix, rld[r].secondary_in_reload);
7311 prefix = ", ";
7312 }
7313
7314 if (rld[r].secondary_out_reload != -1)
7315 fprintf (f, "%ssecondary_out_reload = %d\n",
7316 prefix, rld[r].secondary_out_reload);
7317
7318 prefix = "\n\t";
7319 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7320 {
7321 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7322 insn_data[rld[r].secondary_in_icode].name);
7323 prefix = ", ";
7324 }
7325
7326 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7327 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7328 insn_data[rld[r].secondary_out_icode].name);
7329
7330 fprintf (f, "\n");
7331 }
7332 }
7333
7334 void
7335 debug_reload (void)
7336 {
7337 debug_reload_to_stream (stderr);
7338 }