target.def (preferred_reload_class): New hook.
[gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
28
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
31
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
38
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
45
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
54
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
57
58 NOTE SIDE EFFECTS:
59
60 find_reloads can alter the operands of the instruction it is called on.
61
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
65 better that way.
66
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
69
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
73
74 Using a reload register for several reloads in one insn:
75
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
79
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
82 register.
83
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
87
88 #define REG_OK_STRICT
89
90 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
91 #undef DEBUG_RELOAD
92
93 #include "config.h"
94 #include "system.h"
95 #include "coretypes.h"
96 #include "tm.h"
97 #include "rtl-error.h"
98 #include "tm_p.h"
99 #include "insn-config.h"
100 #include "expr.h"
101 #include "optabs.h"
102 #include "recog.h"
103 #include "df.h"
104 #include "reload.h"
105 #include "regs.h"
106 #include "addresses.h"
107 #include "hard-reg-set.h"
108 #include "flags.h"
109 #include "output.h"
110 #include "function.h"
111 #include "params.h"
112 #include "target.h"
113 #include "ira.h"
114 #include "toplev.h" /* exact_log2 may be used by targets */
115
116 /* True if X is a constant that can be forced into the constant pool. */
117 #define CONST_POOL_OK_P(X) \
118 (CONSTANT_P (X) \
119 && GET_CODE (X) != HIGH \
120 && !targetm.cannot_force_const_mem (X))
121
122 /* True if C is a non-empty register class that has too few registers
123 to be safely used as a reload target class. */
124
125 static inline bool
126 small_register_class_p (reg_class_t rclass)
127 {
128 return (reg_class_size [(int) rclass] == 1
129 || (reg_class_size [(int) rclass] >= 1
130 && targetm.class_likely_spilled_p (rclass)));
131 }
132
133 \f
134 /* All reloads of the current insn are recorded here. See reload.h for
135 comments. */
136 int n_reloads;
137 struct reload rld[MAX_RELOADS];
138
139 /* All the "earlyclobber" operands of the current insn
140 are recorded here. */
141 int n_earlyclobbers;
142 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
143
144 int reload_n_operands;
145
146 /* Replacing reloads.
147
148 If `replace_reloads' is nonzero, then as each reload is recorded
149 an entry is made for it in the table `replacements'.
150 Then later `subst_reloads' can look through that table and
151 perform all the replacements needed. */
152
153 /* Nonzero means record the places to replace. */
154 static int replace_reloads;
155
156 /* Each replacement is recorded with a structure like this. */
157 struct replacement
158 {
159 rtx *where; /* Location to store in */
160 rtx *subreg_loc; /* Location of SUBREG if WHERE is inside
161 a SUBREG; 0 otherwise. */
162 int what; /* which reload this is for */
163 enum machine_mode mode; /* mode it must have */
164 };
165
166 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
167
168 /* Number of replacements currently recorded. */
169 static int n_replacements;
170
171 /* Used to track what is modified by an operand. */
172 struct decomposition
173 {
174 int reg_flag; /* Nonzero if referencing a register. */
175 int safe; /* Nonzero if this can't conflict with anything. */
176 rtx base; /* Base address for MEM. */
177 HOST_WIDE_INT start; /* Starting offset or register number. */
178 HOST_WIDE_INT end; /* Ending offset or register number. */
179 };
180
181 #ifdef SECONDARY_MEMORY_NEEDED
182
183 /* Save MEMs needed to copy from one class of registers to another. One MEM
184 is used per mode, but normally only one or two modes are ever used.
185
186 We keep two versions, before and after register elimination. The one
187 after register elimination is record separately for each operand. This
188 is done in case the address is not valid to be sure that we separately
189 reload each. */
190
191 static rtx secondary_memlocs[NUM_MACHINE_MODES];
192 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
193 static int secondary_memlocs_elim_used = 0;
194 #endif
195
196 /* The instruction we are doing reloads for;
197 so we can test whether a register dies in it. */
198 static rtx this_insn;
199
200 /* Nonzero if this instruction is a user-specified asm with operands. */
201 static int this_insn_is_asm;
202
203 /* If hard_regs_live_known is nonzero,
204 we can tell which hard regs are currently live,
205 at least enough to succeed in choosing dummy reloads. */
206 static int hard_regs_live_known;
207
208 /* Indexed by hard reg number,
209 element is nonnegative if hard reg has been spilled.
210 This vector is passed to `find_reloads' as an argument
211 and is not changed here. */
212 static short *static_reload_reg_p;
213
214 /* Set to 1 in subst_reg_equivs if it changes anything. */
215 static int subst_reg_equivs_changed;
216
217 /* On return from push_reload, holds the reload-number for the OUT
218 operand, which can be different for that from the input operand. */
219 static int output_reloadnum;
220
221 /* Compare two RTX's. */
222 #define MATCHES(x, y) \
223 (x == y || (x != 0 && (REG_P (x) \
224 ? REG_P (y) && REGNO (x) == REGNO (y) \
225 : rtx_equal_p (x, y) && ! side_effects_p (x))))
226
227 /* Indicates if two reloads purposes are for similar enough things that we
228 can merge their reloads. */
229 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
230 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
231 || ((when1) == (when2) && (op1) == (op2)) \
232 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
233 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
234 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
235 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
236 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
237
238 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
239 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
240 ((when1) != (when2) \
241 || ! ((op1) == (op2) \
242 || (when1) == RELOAD_FOR_INPUT \
243 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
244 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
245
246 /* If we are going to reload an address, compute the reload type to
247 use. */
248 #define ADDR_TYPE(type) \
249 ((type) == RELOAD_FOR_INPUT_ADDRESS \
250 ? RELOAD_FOR_INPADDR_ADDRESS \
251 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
252 ? RELOAD_FOR_OUTADDR_ADDRESS \
253 : (type)))
254
255 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
256 enum machine_mode, enum reload_type,
257 enum insn_code *, secondary_reload_info *);
258 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
259 int, unsigned int);
260 static int reload_inner_reg_of_subreg (rtx, enum machine_mode, int);
261 static void push_replacement (rtx *, int, enum machine_mode);
262 static void dup_replacements (rtx *, rtx *);
263 static void combine_reloads (void);
264 static int find_reusable_reload (rtx *, rtx, enum reg_class,
265 enum reload_type, int, int);
266 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
267 enum machine_mode, enum reg_class, int, int);
268 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
269 static struct decomposition decompose (rtx);
270 static int immune_p (rtx, rtx, struct decomposition);
271 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
272 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
273 int *);
274 static rtx make_memloc (rtx, int);
275 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
276 addr_space_t, rtx *);
277 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
278 int, enum reload_type, int, rtx);
279 static rtx subst_reg_equivs (rtx, rtx);
280 static rtx subst_indexed_address (rtx);
281 static void update_auto_inc_notes (rtx, int, int);
282 static int find_reloads_address_1 (enum machine_mode, rtx, int,
283 enum rtx_code, enum rtx_code, rtx *,
284 int, enum reload_type,int, rtx);
285 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
286 enum machine_mode, int,
287 enum reload_type, int);
288 static rtx find_reloads_subreg_address (rtx, int, int, enum reload_type,
289 int, rtx);
290 static void copy_replacements_1 (rtx *, rtx *, int);
291 static int find_inc_amount (rtx, rtx);
292 static int refers_to_mem_for_reload_p (rtx);
293 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
294 rtx, rtx *);
295
296 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
297 list yet. */
298
299 static void
300 push_reg_equiv_alt_mem (int regno, rtx mem)
301 {
302 rtx it;
303
304 for (it = reg_equiv_alt_mem_list [regno]; it; it = XEXP (it, 1))
305 if (rtx_equal_p (XEXP (it, 0), mem))
306 return;
307
308 reg_equiv_alt_mem_list [regno]
309 = alloc_EXPR_LIST (REG_EQUIV, mem,
310 reg_equiv_alt_mem_list [regno]);
311 }
312 \f
313 /* Determine if any secondary reloads are needed for loading (if IN_P is
314 nonzero) or storing (if IN_P is zero) X to or from a reload register of
315 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
316 are needed, push them.
317
318 Return the reload number of the secondary reload we made, or -1 if
319 we didn't need one. *PICODE is set to the insn_code to use if we do
320 need a secondary reload. */
321
322 static int
323 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
324 enum reg_class reload_class,
325 enum machine_mode reload_mode, enum reload_type type,
326 enum insn_code *picode, secondary_reload_info *prev_sri)
327 {
328 enum reg_class rclass = NO_REGS;
329 enum reg_class scratch_class;
330 enum machine_mode mode = reload_mode;
331 enum insn_code icode = CODE_FOR_nothing;
332 enum insn_code t_icode = CODE_FOR_nothing;
333 enum reload_type secondary_type;
334 int s_reload, t_reload = -1;
335 const char *scratch_constraint;
336 char letter;
337 secondary_reload_info sri;
338
339 if (type == RELOAD_FOR_INPUT_ADDRESS
340 || type == RELOAD_FOR_OUTPUT_ADDRESS
341 || type == RELOAD_FOR_INPADDR_ADDRESS
342 || type == RELOAD_FOR_OUTADDR_ADDRESS)
343 secondary_type = type;
344 else
345 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
346
347 *picode = CODE_FOR_nothing;
348
349 /* If X is a paradoxical SUBREG, use the inner value to determine both the
350 mode and object being reloaded. */
351 if (GET_CODE (x) == SUBREG
352 && (GET_MODE_SIZE (GET_MODE (x))
353 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
354 {
355 x = SUBREG_REG (x);
356 reload_mode = GET_MODE (x);
357 }
358
359 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
360 is still a pseudo-register by now, it *must* have an equivalent MEM
361 but we don't want to assume that), use that equivalent when seeing if
362 a secondary reload is needed since whether or not a reload is needed
363 might be sensitive to the form of the MEM. */
364
365 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
366 && reg_equiv_mem[REGNO (x)] != 0)
367 x = reg_equiv_mem[REGNO (x)];
368
369 sri.icode = CODE_FOR_nothing;
370 sri.prev_sri = prev_sri;
371 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
372 reload_mode, &sri);
373 icode = (enum insn_code) sri.icode;
374
375 /* If we don't need any secondary registers, done. */
376 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
377 return -1;
378
379 if (rclass != NO_REGS)
380 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
381 reload_mode, type, &t_icode, &sri);
382
383 /* If we will be using an insn, the secondary reload is for a
384 scratch register. */
385
386 if (icode != CODE_FOR_nothing)
387 {
388 /* If IN_P is nonzero, the reload register will be the output in
389 operand 0. If IN_P is zero, the reload register will be the input
390 in operand 1. Outputs should have an initial "=", which we must
391 skip. */
392
393 /* ??? It would be useful to be able to handle only two, or more than
394 three, operands, but for now we can only handle the case of having
395 exactly three: output, input and one temp/scratch. */
396 gcc_assert (insn_data[(int) icode].n_operands == 3);
397
398 /* ??? We currently have no way to represent a reload that needs
399 an icode to reload from an intermediate tertiary reload register.
400 We should probably have a new field in struct reload to tag a
401 chain of scratch operand reloads onto. */
402 gcc_assert (rclass == NO_REGS);
403
404 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
405 gcc_assert (*scratch_constraint == '=');
406 scratch_constraint++;
407 if (*scratch_constraint == '&')
408 scratch_constraint++;
409 letter = *scratch_constraint;
410 scratch_class = (letter == 'r' ? GENERAL_REGS
411 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
412 scratch_constraint));
413
414 rclass = scratch_class;
415 mode = insn_data[(int) icode].operand[2].mode;
416 }
417
418 /* This case isn't valid, so fail. Reload is allowed to use the same
419 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
420 in the case of a secondary register, we actually need two different
421 registers for correct code. We fail here to prevent the possibility of
422 silently generating incorrect code later.
423
424 The convention is that secondary input reloads are valid only if the
425 secondary_class is different from class. If you have such a case, you
426 can not use secondary reloads, you must work around the problem some
427 other way.
428
429 Allow this when a reload_in/out pattern is being used. I.e. assume
430 that the generated code handles this case. */
431
432 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
433 || t_icode != CODE_FOR_nothing);
434
435 /* See if we can reuse an existing secondary reload. */
436 for (s_reload = 0; s_reload < n_reloads; s_reload++)
437 if (rld[s_reload].secondary_p
438 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
439 || reg_class_subset_p (rld[s_reload].rclass, rclass))
440 && ((in_p && rld[s_reload].inmode == mode)
441 || (! in_p && rld[s_reload].outmode == mode))
442 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
443 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
444 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
445 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
446 && (small_register_class_p (rclass)
447 || targetm.small_register_classes_for_mode_p (VOIDmode))
448 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
449 opnum, rld[s_reload].opnum))
450 {
451 if (in_p)
452 rld[s_reload].inmode = mode;
453 if (! in_p)
454 rld[s_reload].outmode = mode;
455
456 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
457 rld[s_reload].rclass = rclass;
458
459 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
460 rld[s_reload].optional &= optional;
461 rld[s_reload].secondary_p = 1;
462 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
463 opnum, rld[s_reload].opnum))
464 rld[s_reload].when_needed = RELOAD_OTHER;
465
466 break;
467 }
468
469 if (s_reload == n_reloads)
470 {
471 #ifdef SECONDARY_MEMORY_NEEDED
472 /* If we need a memory location to copy between the two reload regs,
473 set it up now. Note that we do the input case before making
474 the reload and the output case after. This is due to the
475 way reloads are output. */
476
477 if (in_p && icode == CODE_FOR_nothing
478 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
479 {
480 get_secondary_mem (x, reload_mode, opnum, type);
481
482 /* We may have just added new reloads. Make sure we add
483 the new reload at the end. */
484 s_reload = n_reloads;
485 }
486 #endif
487
488 /* We need to make a new secondary reload for this register class. */
489 rld[s_reload].in = rld[s_reload].out = 0;
490 rld[s_reload].rclass = rclass;
491
492 rld[s_reload].inmode = in_p ? mode : VOIDmode;
493 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
494 rld[s_reload].reg_rtx = 0;
495 rld[s_reload].optional = optional;
496 rld[s_reload].inc = 0;
497 /* Maybe we could combine these, but it seems too tricky. */
498 rld[s_reload].nocombine = 1;
499 rld[s_reload].in_reg = 0;
500 rld[s_reload].out_reg = 0;
501 rld[s_reload].opnum = opnum;
502 rld[s_reload].when_needed = secondary_type;
503 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
504 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
505 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
506 rld[s_reload].secondary_out_icode
507 = ! in_p ? t_icode : CODE_FOR_nothing;
508 rld[s_reload].secondary_p = 1;
509
510 n_reloads++;
511
512 #ifdef SECONDARY_MEMORY_NEEDED
513 if (! in_p && icode == CODE_FOR_nothing
514 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
515 get_secondary_mem (x, mode, opnum, type);
516 #endif
517 }
518
519 *picode = icode;
520 return s_reload;
521 }
522
523 /* If a secondary reload is needed, return its class. If both an intermediate
524 register and a scratch register is needed, we return the class of the
525 intermediate register. */
526 reg_class_t
527 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
528 rtx x)
529 {
530 enum insn_code icode;
531 secondary_reload_info sri;
532
533 sri.icode = CODE_FOR_nothing;
534 sri.prev_sri = NULL;
535 rclass
536 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
537 icode = (enum insn_code) sri.icode;
538
539 /* If there are no secondary reloads at all, we return NO_REGS.
540 If an intermediate register is needed, we return its class. */
541 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
542 return rclass;
543
544 /* No intermediate register is needed, but we have a special reload
545 pattern, which we assume for now needs a scratch register. */
546 return scratch_reload_class (icode);
547 }
548
549 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
550 three operands, verify that operand 2 is an output operand, and return
551 its register class.
552 ??? We'd like to be able to handle any pattern with at least 2 operands,
553 for zero or more scratch registers, but that needs more infrastructure. */
554 enum reg_class
555 scratch_reload_class (enum insn_code icode)
556 {
557 const char *scratch_constraint;
558 char scratch_letter;
559 enum reg_class rclass;
560
561 gcc_assert (insn_data[(int) icode].n_operands == 3);
562 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
563 gcc_assert (*scratch_constraint == '=');
564 scratch_constraint++;
565 if (*scratch_constraint == '&')
566 scratch_constraint++;
567 scratch_letter = *scratch_constraint;
568 if (scratch_letter == 'r')
569 return GENERAL_REGS;
570 rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
571 scratch_constraint);
572 gcc_assert (rclass != NO_REGS);
573 return rclass;
574 }
575 \f
576 #ifdef SECONDARY_MEMORY_NEEDED
577
578 /* Return a memory location that will be used to copy X in mode MODE.
579 If we haven't already made a location for this mode in this insn,
580 call find_reloads_address on the location being returned. */
581
582 rtx
583 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
584 int opnum, enum reload_type type)
585 {
586 rtx loc;
587 int mem_valid;
588
589 /* By default, if MODE is narrower than a word, widen it to a word.
590 This is required because most machines that require these memory
591 locations do not support short load and stores from all registers
592 (e.g., FP registers). */
593
594 #ifdef SECONDARY_MEMORY_NEEDED_MODE
595 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
596 #else
597 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
598 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
599 #endif
600
601 /* If we already have made a MEM for this operand in MODE, return it. */
602 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
603 return secondary_memlocs_elim[(int) mode][opnum];
604
605 /* If this is the first time we've tried to get a MEM for this mode,
606 allocate a new one. `something_changed' in reload will get set
607 by noticing that the frame size has changed. */
608
609 if (secondary_memlocs[(int) mode] == 0)
610 {
611 #ifdef SECONDARY_MEMORY_NEEDED_RTX
612 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
613 #else
614 secondary_memlocs[(int) mode]
615 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
616 #endif
617 }
618
619 /* Get a version of the address doing any eliminations needed. If that
620 didn't give us a new MEM, make a new one if it isn't valid. */
621
622 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
623 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
624 MEM_ADDR_SPACE (loc));
625
626 if (! mem_valid && loc == secondary_memlocs[(int) mode])
627 loc = copy_rtx (loc);
628
629 /* The only time the call below will do anything is if the stack
630 offset is too large. In that case IND_LEVELS doesn't matter, so we
631 can just pass a zero. Adjust the type to be the address of the
632 corresponding object. If the address was valid, save the eliminated
633 address. If it wasn't valid, we need to make a reload each time, so
634 don't save it. */
635
636 if (! mem_valid)
637 {
638 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
639 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
640 : RELOAD_OTHER);
641
642 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
643 opnum, type, 0, 0);
644 }
645
646 secondary_memlocs_elim[(int) mode][opnum] = loc;
647 if (secondary_memlocs_elim_used <= (int)mode)
648 secondary_memlocs_elim_used = (int)mode + 1;
649 return loc;
650 }
651
652 /* Clear any secondary memory locations we've made. */
653
654 void
655 clear_secondary_mem (void)
656 {
657 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
658 }
659 #endif /* SECONDARY_MEMORY_NEEDED */
660 \f
661
662 /* Find the largest class which has at least one register valid in
663 mode INNER, and which for every such register, that register number
664 plus N is also valid in OUTER (if in range) and is cheap to move
665 into REGNO. Such a class must exist. */
666
667 static enum reg_class
668 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
669 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
670 unsigned int dest_regno ATTRIBUTE_UNUSED)
671 {
672 int best_cost = -1;
673 int rclass;
674 int regno;
675 enum reg_class best_class = NO_REGS;
676 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
677 unsigned int best_size = 0;
678 int cost;
679
680 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
681 {
682 int bad = 0;
683 int good = 0;
684 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
685 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
686 {
687 if (HARD_REGNO_MODE_OK (regno, inner))
688 {
689 good = 1;
690 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
691 || ! HARD_REGNO_MODE_OK (regno + n, outer))
692 bad = 1;
693 }
694 }
695
696 if (bad || !good)
697 continue;
698 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
699
700 if ((reg_class_size[rclass] > best_size
701 && (best_cost < 0 || best_cost >= cost))
702 || best_cost > cost)
703 {
704 best_class = (enum reg_class) rclass;
705 best_size = reg_class_size[rclass];
706 best_cost = register_move_cost (outer, (enum reg_class) rclass,
707 dest_class);
708 }
709 }
710
711 gcc_assert (best_size != 0);
712
713 return best_class;
714 }
715 \f
716 /* Return the number of a previously made reload that can be combined with
717 a new one, or n_reloads if none of the existing reloads can be used.
718 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
719 push_reload, they determine the kind of the new reload that we try to
720 combine. P_IN points to the corresponding value of IN, which can be
721 modified by this function.
722 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
723
724 static int
725 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
726 enum reload_type type, int opnum, int dont_share)
727 {
728 rtx in = *p_in;
729 int i;
730 /* We can't merge two reloads if the output of either one is
731 earlyclobbered. */
732
733 if (earlyclobber_operand_p (out))
734 return n_reloads;
735
736 /* We can use an existing reload if the class is right
737 and at least one of IN and OUT is a match
738 and the other is at worst neutral.
739 (A zero compared against anything is neutral.)
740
741 For targets with small register classes, don't use existing reloads
742 unless they are for the same thing since that can cause us to need
743 more reload registers than we otherwise would. */
744
745 for (i = 0; i < n_reloads; i++)
746 if ((reg_class_subset_p (rclass, rld[i].rclass)
747 || reg_class_subset_p (rld[i].rclass, rclass))
748 /* If the existing reload has a register, it must fit our class. */
749 && (rld[i].reg_rtx == 0
750 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
751 true_regnum (rld[i].reg_rtx)))
752 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
753 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
754 || (out != 0 && MATCHES (rld[i].out, out)
755 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
756 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
757 && (small_register_class_p (rclass)
758 || targetm.small_register_classes_for_mode_p (VOIDmode))
759 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
760 return i;
761
762 /* Reloading a plain reg for input can match a reload to postincrement
763 that reg, since the postincrement's value is the right value.
764 Likewise, it can match a preincrement reload, since we regard
765 the preincrementation as happening before any ref in this insn
766 to that register. */
767 for (i = 0; i < n_reloads; i++)
768 if ((reg_class_subset_p (rclass, rld[i].rclass)
769 || reg_class_subset_p (rld[i].rclass, rclass))
770 /* If the existing reload has a register, it must fit our
771 class. */
772 && (rld[i].reg_rtx == 0
773 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
774 true_regnum (rld[i].reg_rtx)))
775 && out == 0 && rld[i].out == 0 && rld[i].in != 0
776 && ((REG_P (in)
777 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
778 && MATCHES (XEXP (rld[i].in, 0), in))
779 || (REG_P (rld[i].in)
780 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
781 && MATCHES (XEXP (in, 0), rld[i].in)))
782 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
783 && (small_register_class_p (rclass)
784 || targetm.small_register_classes_for_mode_p (VOIDmode))
785 && MERGABLE_RELOADS (type, rld[i].when_needed,
786 opnum, rld[i].opnum))
787 {
788 /* Make sure reload_in ultimately has the increment,
789 not the plain register. */
790 if (REG_P (in))
791 *p_in = rld[i].in;
792 return i;
793 }
794 return n_reloads;
795 }
796
797 /* Return nonzero if X is a SUBREG which will require reloading of its
798 SUBREG_REG expression. */
799
800 static int
801 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, int output)
802 {
803 rtx inner;
804
805 /* Only SUBREGs are problematical. */
806 if (GET_CODE (x) != SUBREG)
807 return 0;
808
809 inner = SUBREG_REG (x);
810
811 /* If INNER is a constant or PLUS, then INNER must be reloaded. */
812 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
813 return 1;
814
815 /* If INNER is not a hard register, then INNER will not need to
816 be reloaded. */
817 if (!REG_P (inner)
818 || REGNO (inner) >= FIRST_PSEUDO_REGISTER)
819 return 0;
820
821 /* If INNER is not ok for MODE, then INNER will need reloading. */
822 if (! HARD_REGNO_MODE_OK (subreg_regno (x), mode))
823 return 1;
824
825 /* If the outer part is a word or smaller, INNER larger than a
826 word and the number of regs for INNER is not the same as the
827 number of words in INNER, then INNER will need reloading. */
828 return (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
829 && output
830 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
831 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
832 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
833 }
834
835 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
836 requiring an extra reload register. The caller has already found that
837 IN contains some reference to REGNO, so check that we can produce the
838 new value in a single step. E.g. if we have
839 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
840 instruction that adds one to a register, this should succeed.
841 However, if we have something like
842 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
843 needs to be loaded into a register first, we need a separate reload
844 register.
845 Such PLUS reloads are generated by find_reload_address_part.
846 The out-of-range PLUS expressions are usually introduced in the instruction
847 patterns by register elimination and substituting pseudos without a home
848 by their function-invariant equivalences. */
849 static int
850 can_reload_into (rtx in, int regno, enum machine_mode mode)
851 {
852 rtx dst, test_insn;
853 int r = 0;
854 struct recog_data save_recog_data;
855
856 /* For matching constraints, we often get notional input reloads where
857 we want to use the original register as the reload register. I.e.
858 technically this is a non-optional input-output reload, but IN is
859 already a valid register, and has been chosen as the reload register.
860 Speed this up, since it trivially works. */
861 if (REG_P (in))
862 return 1;
863
864 /* To test MEMs properly, we'd have to take into account all the reloads
865 that are already scheduled, which can become quite complicated.
866 And since we've already handled address reloads for this MEM, it
867 should always succeed anyway. */
868 if (MEM_P (in))
869 return 1;
870
871 /* If we can make a simple SET insn that does the job, everything should
872 be fine. */
873 dst = gen_rtx_REG (mode, regno);
874 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
875 save_recog_data = recog_data;
876 if (recog_memoized (test_insn) >= 0)
877 {
878 extract_insn (test_insn);
879 r = constrain_operands (1);
880 }
881 recog_data = save_recog_data;
882 return r;
883 }
884
885 /* Record one reload that needs to be performed.
886 IN is an rtx saying where the data are to be found before this instruction.
887 OUT says where they must be stored after the instruction.
888 (IN is zero for data not read, and OUT is zero for data not written.)
889 INLOC and OUTLOC point to the places in the instructions where
890 IN and OUT were found.
891 If IN and OUT are both nonzero, it means the same register must be used
892 to reload both IN and OUT.
893
894 RCLASS is a register class required for the reloaded data.
895 INMODE is the machine mode that the instruction requires
896 for the reg that replaces IN and OUTMODE is likewise for OUT.
897
898 If IN is zero, then OUT's location and mode should be passed as
899 INLOC and INMODE.
900
901 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
902
903 OPTIONAL nonzero means this reload does not need to be performed:
904 it can be discarded if that is more convenient.
905
906 OPNUM and TYPE say what the purpose of this reload is.
907
908 The return value is the reload-number for this reload.
909
910 If both IN and OUT are nonzero, in some rare cases we might
911 want to make two separate reloads. (Actually we never do this now.)
912 Therefore, the reload-number for OUT is stored in
913 output_reloadnum when we return; the return value applies to IN.
914 Usually (presently always), when IN and OUT are nonzero,
915 the two reload-numbers are equal, but the caller should be careful to
916 distinguish them. */
917
918 int
919 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
920 enum reg_class rclass, enum machine_mode inmode,
921 enum machine_mode outmode, int strict_low, int optional,
922 int opnum, enum reload_type type)
923 {
924 int i;
925 int dont_share = 0;
926 int dont_remove_subreg = 0;
927 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
928 int secondary_in_reload = -1, secondary_out_reload = -1;
929 enum insn_code secondary_in_icode = CODE_FOR_nothing;
930 enum insn_code secondary_out_icode = CODE_FOR_nothing;
931
932 /* INMODE and/or OUTMODE could be VOIDmode if no mode
933 has been specified for the operand. In that case,
934 use the operand's mode as the mode to reload. */
935 if (inmode == VOIDmode && in != 0)
936 inmode = GET_MODE (in);
937 if (outmode == VOIDmode && out != 0)
938 outmode = GET_MODE (out);
939
940 /* If find_reloads and friends until now missed to replace a pseudo
941 with a constant of reg_equiv_constant something went wrong
942 beforehand.
943 Note that it can't simply be done here if we missed it earlier
944 since the constant might need to be pushed into the literal pool
945 and the resulting memref would probably need further
946 reloading. */
947 if (in != 0 && REG_P (in))
948 {
949 int regno = REGNO (in);
950
951 gcc_assert (regno < FIRST_PSEUDO_REGISTER
952 || reg_renumber[regno] >= 0
953 || reg_equiv_constant[regno] == NULL_RTX);
954 }
955
956 /* reg_equiv_constant only contains constants which are obviously
957 not appropriate as destination. So if we would need to replace
958 the destination pseudo with a constant we are in real
959 trouble. */
960 if (out != 0 && REG_P (out))
961 {
962 int regno = REGNO (out);
963
964 gcc_assert (regno < FIRST_PSEUDO_REGISTER
965 || reg_renumber[regno] >= 0
966 || reg_equiv_constant[regno] == NULL_RTX);
967 }
968
969 /* If we have a read-write operand with an address side-effect,
970 change either IN or OUT so the side-effect happens only once. */
971 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
972 switch (GET_CODE (XEXP (in, 0)))
973 {
974 case POST_INC: case POST_DEC: case POST_MODIFY:
975 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
976 break;
977
978 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
979 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
980 break;
981
982 default:
983 break;
984 }
985
986 /* If we are reloading a (SUBREG constant ...), really reload just the
987 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
988 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
989 a pseudo and hence will become a MEM) with M1 wider than M2 and the
990 register is a pseudo, also reload the inside expression.
991 For machines that extend byte loads, do this for any SUBREG of a pseudo
992 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
993 M2 is an integral mode that gets extended when loaded.
994 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
995 either M1 is not valid for R or M2 is wider than a word but we only
996 need one word to store an M2-sized quantity in R.
997 (However, if OUT is nonzero, we need to reload the reg *and*
998 the subreg, so do nothing here, and let following statement handle it.)
999
1000 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1001 we can't handle it here because CONST_INT does not indicate a mode.
1002
1003 Similarly, we must reload the inside expression if we have a
1004 STRICT_LOW_PART (presumably, in == out in this case).
1005
1006 Also reload the inner expression if it does not require a secondary
1007 reload but the SUBREG does.
1008
1009 Finally, reload the inner expression if it is a register that is in
1010 the class whose registers cannot be referenced in a different size
1011 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1012 cannot reload just the inside since we might end up with the wrong
1013 register class. But if it is inside a STRICT_LOW_PART, we have
1014 no choice, so we hope we do get the right register class there. */
1015
1016 if (in != 0 && GET_CODE (in) == SUBREG
1017 && (subreg_lowpart_p (in) || strict_low)
1018 #ifdef CANNOT_CHANGE_MODE_CLASS
1019 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1020 #endif
1021 && (CONSTANT_P (SUBREG_REG (in))
1022 || GET_CODE (SUBREG_REG (in)) == PLUS
1023 || strict_low
1024 || (((REG_P (SUBREG_REG (in))
1025 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1026 || MEM_P (SUBREG_REG (in)))
1027 && ((GET_MODE_SIZE (inmode)
1028 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1029 #ifdef LOAD_EXTEND_OP
1030 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1031 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1032 <= UNITS_PER_WORD)
1033 && (GET_MODE_SIZE (inmode)
1034 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1035 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1036 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1037 #endif
1038 #ifdef WORD_REGISTER_OPERATIONS
1039 || ((GET_MODE_SIZE (inmode)
1040 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1041 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1042 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1043 / UNITS_PER_WORD)))
1044 #endif
1045 ))
1046 || (REG_P (SUBREG_REG (in))
1047 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1048 /* The case where out is nonzero
1049 is handled differently in the following statement. */
1050 && (out == 0 || subreg_lowpart_p (in))
1051 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1052 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1053 > UNITS_PER_WORD)
1054 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1055 / UNITS_PER_WORD)
1056 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1057 [GET_MODE (SUBREG_REG (in))]))
1058 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1059 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1060 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1061 SUBREG_REG (in))
1062 == NO_REGS))
1063 #ifdef CANNOT_CHANGE_MODE_CLASS
1064 || (REG_P (SUBREG_REG (in))
1065 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1066 && REG_CANNOT_CHANGE_MODE_P
1067 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1068 #endif
1069 ))
1070 {
1071 in_subreg_loc = inloc;
1072 inloc = &SUBREG_REG (in);
1073 in = *inloc;
1074 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1075 if (MEM_P (in))
1076 /* This is supposed to happen only for paradoxical subregs made by
1077 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1078 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1079 #endif
1080 inmode = GET_MODE (in);
1081 }
1082
1083 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1084 either M1 is not valid for R or M2 is wider than a word but we only
1085 need one word to store an M2-sized quantity in R.
1086
1087 However, we must reload the inner reg *as well as* the subreg in
1088 that case. */
1089
1090 /* Similar issue for (SUBREG constant ...) if it was not handled by the
1091 code above. This can happen if SUBREG_BYTE != 0. */
1092
1093 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, 0))
1094 {
1095 enum reg_class in_class = rclass;
1096
1097 if (REG_P (SUBREG_REG (in)))
1098 in_class
1099 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1100 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1101 GET_MODE (SUBREG_REG (in)),
1102 SUBREG_BYTE (in),
1103 GET_MODE (in)),
1104 REGNO (SUBREG_REG (in)));
1105
1106 /* This relies on the fact that emit_reload_insns outputs the
1107 instructions for input reloads of type RELOAD_OTHER in the same
1108 order as the reloads. Thus if the outer reload is also of type
1109 RELOAD_OTHER, we are guaranteed that this inner reload will be
1110 output before the outer reload. */
1111 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1112 in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1113 dont_remove_subreg = 1;
1114 }
1115
1116 /* Similarly for paradoxical and problematical SUBREGs on the output.
1117 Note that there is no reason we need worry about the previous value
1118 of SUBREG_REG (out); even if wider than out,
1119 storing in a subreg is entitled to clobber it all
1120 (except in the case of STRICT_LOW_PART,
1121 and in that case the constraint should label it input-output.) */
1122 if (out != 0 && GET_CODE (out) == SUBREG
1123 && (subreg_lowpart_p (out) || strict_low)
1124 #ifdef CANNOT_CHANGE_MODE_CLASS
1125 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1126 #endif
1127 && (CONSTANT_P (SUBREG_REG (out))
1128 || strict_low
1129 || (((REG_P (SUBREG_REG (out))
1130 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1131 || MEM_P (SUBREG_REG (out)))
1132 && ((GET_MODE_SIZE (outmode)
1133 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1134 #ifdef WORD_REGISTER_OPERATIONS
1135 || ((GET_MODE_SIZE (outmode)
1136 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1137 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1138 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1139 / UNITS_PER_WORD)))
1140 #endif
1141 ))
1142 || (REG_P (SUBREG_REG (out))
1143 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1144 && ((GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1145 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1146 > UNITS_PER_WORD)
1147 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1148 / UNITS_PER_WORD)
1149 != (int) hard_regno_nregs[REGNO (SUBREG_REG (out))]
1150 [GET_MODE (SUBREG_REG (out))]))
1151 || ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode)))
1152 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1153 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1154 SUBREG_REG (out))
1155 == NO_REGS))
1156 #ifdef CANNOT_CHANGE_MODE_CLASS
1157 || (REG_P (SUBREG_REG (out))
1158 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1159 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1160 GET_MODE (SUBREG_REG (out)),
1161 outmode))
1162 #endif
1163 ))
1164 {
1165 out_subreg_loc = outloc;
1166 outloc = &SUBREG_REG (out);
1167 out = *outloc;
1168 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1169 gcc_assert (!MEM_P (out)
1170 || GET_MODE_SIZE (GET_MODE (out))
1171 <= GET_MODE_SIZE (outmode));
1172 #endif
1173 outmode = GET_MODE (out);
1174 }
1175
1176 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1177 either M1 is not valid for R or M2 is wider than a word but we only
1178 need one word to store an M2-sized quantity in R.
1179
1180 However, we must reload the inner reg *as well as* the subreg in
1181 that case. In this case, the inner reg is an in-out reload. */
1182
1183 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, 1))
1184 {
1185 /* This relies on the fact that emit_reload_insns outputs the
1186 instructions for output reloads of type RELOAD_OTHER in reverse
1187 order of the reloads. Thus if the outer reload is also of type
1188 RELOAD_OTHER, we are guaranteed that this inner reload will be
1189 output after the outer reload. */
1190 dont_remove_subreg = 1;
1191 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1192 &SUBREG_REG (out),
1193 find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1194 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1195 GET_MODE (SUBREG_REG (out)),
1196 SUBREG_BYTE (out),
1197 GET_MODE (out)),
1198 REGNO (SUBREG_REG (out))),
1199 VOIDmode, VOIDmode, 0, 0,
1200 opnum, RELOAD_OTHER);
1201 }
1202
1203 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1204 if (in != 0 && out != 0 && MEM_P (out)
1205 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1206 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1207 dont_share = 1;
1208
1209 /* If IN is a SUBREG of a hard register, make a new REG. This
1210 simplifies some of the cases below. */
1211
1212 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1213 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1214 && ! dont_remove_subreg)
1215 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1216
1217 /* Similarly for OUT. */
1218 if (out != 0 && GET_CODE (out) == SUBREG
1219 && REG_P (SUBREG_REG (out))
1220 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1221 && ! dont_remove_subreg)
1222 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1223
1224 /* Narrow down the class of register wanted if that is
1225 desirable on this machine for efficiency. */
1226 {
1227 enum reg_class preferred_class = rclass;
1228
1229 if (in != 0)
1230 preferred_class = (enum reg_class) targetm.preferred_reload_class (in, rclass);
1231
1232 /* Output reloads may need analogous treatment, different in detail. */
1233 #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
1234 if (out != 0)
1235 preferred_class = PREFERRED_OUTPUT_RELOAD_CLASS (out, preferred_class);
1236 #endif
1237
1238 /* Discard what the target said if we cannot do it. */
1239 if (preferred_class != NO_REGS
1240 || (optional && type == RELOAD_FOR_OUTPUT))
1241 rclass = preferred_class;
1242 }
1243
1244 /* Make sure we use a class that can handle the actual pseudo
1245 inside any subreg. For example, on the 386, QImode regs
1246 can appear within SImode subregs. Although GENERAL_REGS
1247 can handle SImode, QImode needs a smaller class. */
1248 #ifdef LIMIT_RELOAD_CLASS
1249 if (in_subreg_loc)
1250 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1251 else if (in != 0 && GET_CODE (in) == SUBREG)
1252 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1253
1254 if (out_subreg_loc)
1255 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1256 if (out != 0 && GET_CODE (out) == SUBREG)
1257 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1258 #endif
1259
1260 /* Verify that this class is at least possible for the mode that
1261 is specified. */
1262 if (this_insn_is_asm)
1263 {
1264 enum machine_mode mode;
1265 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1266 mode = inmode;
1267 else
1268 mode = outmode;
1269 if (mode == VOIDmode)
1270 {
1271 error_for_asm (this_insn, "cannot reload integer constant "
1272 "operand in %<asm%>");
1273 mode = word_mode;
1274 if (in != 0)
1275 inmode = word_mode;
1276 if (out != 0)
1277 outmode = word_mode;
1278 }
1279 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1280 if (HARD_REGNO_MODE_OK (i, mode)
1281 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1282 break;
1283 if (i == FIRST_PSEUDO_REGISTER)
1284 {
1285 error_for_asm (this_insn, "impossible register constraint "
1286 "in %<asm%>");
1287 /* Avoid further trouble with this insn. */
1288 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1289 /* We used to continue here setting class to ALL_REGS, but it triggers
1290 sanity check on i386 for:
1291 void foo(long double d)
1292 {
1293 asm("" :: "a" (d));
1294 }
1295 Returning zero here ought to be safe as we take care in
1296 find_reloads to not process the reloads when instruction was
1297 replaced by USE. */
1298
1299 return 0;
1300 }
1301 }
1302
1303 /* Optional output reloads are always OK even if we have no register class,
1304 since the function of these reloads is only to have spill_reg_store etc.
1305 set, so that the storing insn can be deleted later. */
1306 gcc_assert (rclass != NO_REGS
1307 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1308
1309 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1310
1311 if (i == n_reloads)
1312 {
1313 /* See if we need a secondary reload register to move between CLASS
1314 and IN or CLASS and OUT. Get the icode and push any required reloads
1315 needed for each of them if so. */
1316
1317 if (in != 0)
1318 secondary_in_reload
1319 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1320 &secondary_in_icode, NULL);
1321 if (out != 0 && GET_CODE (out) != SCRATCH)
1322 secondary_out_reload
1323 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1324 type, &secondary_out_icode, NULL);
1325
1326 /* We found no existing reload suitable for re-use.
1327 So add an additional reload. */
1328
1329 #ifdef SECONDARY_MEMORY_NEEDED
1330 /* If a memory location is needed for the copy, make one. */
1331 if (in != 0
1332 && (REG_P (in)
1333 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1334 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
1335 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
1336 rclass, inmode))
1337 get_secondary_mem (in, inmode, opnum, type);
1338 #endif
1339
1340 i = n_reloads;
1341 rld[i].in = in;
1342 rld[i].out = out;
1343 rld[i].rclass = rclass;
1344 rld[i].inmode = inmode;
1345 rld[i].outmode = outmode;
1346 rld[i].reg_rtx = 0;
1347 rld[i].optional = optional;
1348 rld[i].inc = 0;
1349 rld[i].nocombine = 0;
1350 rld[i].in_reg = inloc ? *inloc : 0;
1351 rld[i].out_reg = outloc ? *outloc : 0;
1352 rld[i].opnum = opnum;
1353 rld[i].when_needed = type;
1354 rld[i].secondary_in_reload = secondary_in_reload;
1355 rld[i].secondary_out_reload = secondary_out_reload;
1356 rld[i].secondary_in_icode = secondary_in_icode;
1357 rld[i].secondary_out_icode = secondary_out_icode;
1358 rld[i].secondary_p = 0;
1359
1360 n_reloads++;
1361
1362 #ifdef SECONDARY_MEMORY_NEEDED
1363 if (out != 0
1364 && (REG_P (out)
1365 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1366 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1367 && SECONDARY_MEMORY_NEEDED (rclass,
1368 REGNO_REG_CLASS (reg_or_subregno (out)),
1369 outmode))
1370 get_secondary_mem (out, outmode, opnum, type);
1371 #endif
1372 }
1373 else
1374 {
1375 /* We are reusing an existing reload,
1376 but we may have additional information for it.
1377 For example, we may now have both IN and OUT
1378 while the old one may have just one of them. */
1379
1380 /* The modes can be different. If they are, we want to reload in
1381 the larger mode, so that the value is valid for both modes. */
1382 if (inmode != VOIDmode
1383 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1384 rld[i].inmode = inmode;
1385 if (outmode != VOIDmode
1386 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1387 rld[i].outmode = outmode;
1388 if (in != 0)
1389 {
1390 rtx in_reg = inloc ? *inloc : 0;
1391 /* If we merge reloads for two distinct rtl expressions that
1392 are identical in content, there might be duplicate address
1393 reloads. Remove the extra set now, so that if we later find
1394 that we can inherit this reload, we can get rid of the
1395 address reloads altogether.
1396
1397 Do not do this if both reloads are optional since the result
1398 would be an optional reload which could potentially leave
1399 unresolved address replacements.
1400
1401 It is not sufficient to call transfer_replacements since
1402 choose_reload_regs will remove the replacements for address
1403 reloads of inherited reloads which results in the same
1404 problem. */
1405 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1406 && ! (rld[i].optional && optional))
1407 {
1408 /* We must keep the address reload with the lower operand
1409 number alive. */
1410 if (opnum > rld[i].opnum)
1411 {
1412 remove_address_replacements (in);
1413 in = rld[i].in;
1414 in_reg = rld[i].in_reg;
1415 }
1416 else
1417 remove_address_replacements (rld[i].in);
1418 }
1419 /* When emitting reloads we don't necessarily look at the in-
1420 and outmode, but also directly at the operands (in and out).
1421 So we can't simply overwrite them with whatever we have found
1422 for this (to-be-merged) reload, we have to "merge" that too.
1423 Reusing another reload already verified that we deal with the
1424 same operands, just possibly in different modes. So we
1425 overwrite the operands only when the new mode is larger.
1426 See also PR33613. */
1427 if (!rld[i].in
1428 || GET_MODE_SIZE (GET_MODE (in))
1429 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1430 rld[i].in = in;
1431 if (!rld[i].in_reg
1432 || (in_reg
1433 && GET_MODE_SIZE (GET_MODE (in_reg))
1434 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1435 rld[i].in_reg = in_reg;
1436 }
1437 if (out != 0)
1438 {
1439 if (!rld[i].out
1440 || (out
1441 && GET_MODE_SIZE (GET_MODE (out))
1442 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1443 rld[i].out = out;
1444 if (outloc
1445 && (!rld[i].out_reg
1446 || GET_MODE_SIZE (GET_MODE (*outloc))
1447 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1448 rld[i].out_reg = *outloc;
1449 }
1450 if (reg_class_subset_p (rclass, rld[i].rclass))
1451 rld[i].rclass = rclass;
1452 rld[i].optional &= optional;
1453 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1454 opnum, rld[i].opnum))
1455 rld[i].when_needed = RELOAD_OTHER;
1456 rld[i].opnum = MIN (rld[i].opnum, opnum);
1457 }
1458
1459 /* If the ostensible rtx being reloaded differs from the rtx found
1460 in the location to substitute, this reload is not safe to combine
1461 because we cannot reliably tell whether it appears in the insn. */
1462
1463 if (in != 0 && in != *inloc)
1464 rld[i].nocombine = 1;
1465
1466 #if 0
1467 /* This was replaced by changes in find_reloads_address_1 and the new
1468 function inc_for_reload, which go with a new meaning of reload_inc. */
1469
1470 /* If this is an IN/OUT reload in an insn that sets the CC,
1471 it must be for an autoincrement. It doesn't work to store
1472 the incremented value after the insn because that would clobber the CC.
1473 So we must do the increment of the value reloaded from,
1474 increment it, store it back, then decrement again. */
1475 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1476 {
1477 out = 0;
1478 rld[i].out = 0;
1479 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1480 /* If we did not find a nonzero amount-to-increment-by,
1481 that contradicts the belief that IN is being incremented
1482 in an address in this insn. */
1483 gcc_assert (rld[i].inc != 0);
1484 }
1485 #endif
1486
1487 /* If we will replace IN and OUT with the reload-reg,
1488 record where they are located so that substitution need
1489 not do a tree walk. */
1490
1491 if (replace_reloads)
1492 {
1493 if (inloc != 0)
1494 {
1495 struct replacement *r = &replacements[n_replacements++];
1496 r->what = i;
1497 r->subreg_loc = in_subreg_loc;
1498 r->where = inloc;
1499 r->mode = inmode;
1500 }
1501 if (outloc != 0 && outloc != inloc)
1502 {
1503 struct replacement *r = &replacements[n_replacements++];
1504 r->what = i;
1505 r->where = outloc;
1506 r->subreg_loc = out_subreg_loc;
1507 r->mode = outmode;
1508 }
1509 }
1510
1511 /* If this reload is just being introduced and it has both
1512 an incoming quantity and an outgoing quantity that are
1513 supposed to be made to match, see if either one of the two
1514 can serve as the place to reload into.
1515
1516 If one of them is acceptable, set rld[i].reg_rtx
1517 to that one. */
1518
1519 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1520 {
1521 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1522 inmode, outmode,
1523 rld[i].rclass, i,
1524 earlyclobber_operand_p (out));
1525
1526 /* If the outgoing register already contains the same value
1527 as the incoming one, we can dispense with loading it.
1528 The easiest way to tell the caller that is to give a phony
1529 value for the incoming operand (same as outgoing one). */
1530 if (rld[i].reg_rtx == out
1531 && (REG_P (in) || CONSTANT_P (in))
1532 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1533 static_reload_reg_p, i, inmode))
1534 rld[i].in = out;
1535 }
1536
1537 /* If this is an input reload and the operand contains a register that
1538 dies in this insn and is used nowhere else, see if it is the right class
1539 to be used for this reload. Use it if so. (This occurs most commonly
1540 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1541 this if it is also an output reload that mentions the register unless
1542 the output is a SUBREG that clobbers an entire register.
1543
1544 Note that the operand might be one of the spill regs, if it is a
1545 pseudo reg and we are in a block where spilling has not taken place.
1546 But if there is no spilling in this block, that is OK.
1547 An explicitly used hard reg cannot be a spill reg. */
1548
1549 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1550 {
1551 rtx note;
1552 int regno;
1553 enum machine_mode rel_mode = inmode;
1554
1555 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1556 rel_mode = outmode;
1557
1558 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1559 if (REG_NOTE_KIND (note) == REG_DEAD
1560 && REG_P (XEXP (note, 0))
1561 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1562 && reg_mentioned_p (XEXP (note, 0), in)
1563 /* Check that a former pseudo is valid; see find_dummy_reload. */
1564 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1565 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1566 ORIGINAL_REGNO (XEXP (note, 0)))
1567 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1568 && ! refers_to_regno_for_reload_p (regno,
1569 end_hard_regno (rel_mode,
1570 regno),
1571 PATTERN (this_insn), inloc)
1572 /* If this is also an output reload, IN cannot be used as
1573 the reload register if it is set in this insn unless IN
1574 is also OUT. */
1575 && (out == 0 || in == out
1576 || ! hard_reg_set_here_p (regno,
1577 end_hard_regno (rel_mode, regno),
1578 PATTERN (this_insn)))
1579 /* ??? Why is this code so different from the previous?
1580 Is there any simple coherent way to describe the two together?
1581 What's going on here. */
1582 && (in != out
1583 || (GET_CODE (in) == SUBREG
1584 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1585 / UNITS_PER_WORD)
1586 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1587 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1588 /* Make sure the operand fits in the reg that dies. */
1589 && (GET_MODE_SIZE (rel_mode)
1590 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1591 && HARD_REGNO_MODE_OK (regno, inmode)
1592 && HARD_REGNO_MODE_OK (regno, outmode))
1593 {
1594 unsigned int offs;
1595 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1596 hard_regno_nregs[regno][outmode]);
1597
1598 for (offs = 0; offs < nregs; offs++)
1599 if (fixed_regs[regno + offs]
1600 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1601 regno + offs))
1602 break;
1603
1604 if (offs == nregs
1605 && (! (refers_to_regno_for_reload_p
1606 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1607 || can_reload_into (in, regno, inmode)))
1608 {
1609 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1610 break;
1611 }
1612 }
1613 }
1614
1615 if (out)
1616 output_reloadnum = i;
1617
1618 return i;
1619 }
1620
1621 /* Record an additional place we must replace a value
1622 for which we have already recorded a reload.
1623 RELOADNUM is the value returned by push_reload
1624 when the reload was recorded.
1625 This is used in insn patterns that use match_dup. */
1626
1627 static void
1628 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1629 {
1630 if (replace_reloads)
1631 {
1632 struct replacement *r = &replacements[n_replacements++];
1633 r->what = reloadnum;
1634 r->where = loc;
1635 r->subreg_loc = 0;
1636 r->mode = mode;
1637 }
1638 }
1639
1640 /* Duplicate any replacement we have recorded to apply at
1641 location ORIG_LOC to also be performed at DUP_LOC.
1642 This is used in insn patterns that use match_dup. */
1643
1644 static void
1645 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1646 {
1647 int i, n = n_replacements;
1648
1649 for (i = 0; i < n; i++)
1650 {
1651 struct replacement *r = &replacements[i];
1652 if (r->where == orig_loc)
1653 push_replacement (dup_loc, r->what, r->mode);
1654 }
1655 }
1656 \f
1657 /* Transfer all replacements that used to be in reload FROM to be in
1658 reload TO. */
1659
1660 void
1661 transfer_replacements (int to, int from)
1662 {
1663 int i;
1664
1665 for (i = 0; i < n_replacements; i++)
1666 if (replacements[i].what == from)
1667 replacements[i].what = to;
1668 }
1669 \f
1670 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1671 or a subpart of it. If we have any replacements registered for IN_RTX,
1672 cancel the reloads that were supposed to load them.
1673 Return nonzero if we canceled any reloads. */
1674 int
1675 remove_address_replacements (rtx in_rtx)
1676 {
1677 int i, j;
1678 char reload_flags[MAX_RELOADS];
1679 int something_changed = 0;
1680
1681 memset (reload_flags, 0, sizeof reload_flags);
1682 for (i = 0, j = 0; i < n_replacements; i++)
1683 {
1684 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1685 reload_flags[replacements[i].what] |= 1;
1686 else
1687 {
1688 replacements[j++] = replacements[i];
1689 reload_flags[replacements[i].what] |= 2;
1690 }
1691 }
1692 /* Note that the following store must be done before the recursive calls. */
1693 n_replacements = j;
1694
1695 for (i = n_reloads - 1; i >= 0; i--)
1696 {
1697 if (reload_flags[i] == 1)
1698 {
1699 deallocate_reload_reg (i);
1700 remove_address_replacements (rld[i].in);
1701 rld[i].in = 0;
1702 something_changed = 1;
1703 }
1704 }
1705 return something_changed;
1706 }
1707 \f
1708 /* If there is only one output reload, and it is not for an earlyclobber
1709 operand, try to combine it with a (logically unrelated) input reload
1710 to reduce the number of reload registers needed.
1711
1712 This is safe if the input reload does not appear in
1713 the value being output-reloaded, because this implies
1714 it is not needed any more once the original insn completes.
1715
1716 If that doesn't work, see we can use any of the registers that
1717 die in this insn as a reload register. We can if it is of the right
1718 class and does not appear in the value being output-reloaded. */
1719
1720 static void
1721 combine_reloads (void)
1722 {
1723 int i, regno;
1724 int output_reload = -1;
1725 int secondary_out = -1;
1726 rtx note;
1727
1728 /* Find the output reload; return unless there is exactly one
1729 and that one is mandatory. */
1730
1731 for (i = 0; i < n_reloads; i++)
1732 if (rld[i].out != 0)
1733 {
1734 if (output_reload >= 0)
1735 return;
1736 output_reload = i;
1737 }
1738
1739 if (output_reload < 0 || rld[output_reload].optional)
1740 return;
1741
1742 /* An input-output reload isn't combinable. */
1743
1744 if (rld[output_reload].in != 0)
1745 return;
1746
1747 /* If this reload is for an earlyclobber operand, we can't do anything. */
1748 if (earlyclobber_operand_p (rld[output_reload].out))
1749 return;
1750
1751 /* If there is a reload for part of the address of this operand, we would
1752 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1753 its life to the point where doing this combine would not lower the
1754 number of spill registers needed. */
1755 for (i = 0; i < n_reloads; i++)
1756 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1757 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1758 && rld[i].opnum == rld[output_reload].opnum)
1759 return;
1760
1761 /* Check each input reload; can we combine it? */
1762
1763 for (i = 0; i < n_reloads; i++)
1764 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1765 /* Life span of this reload must not extend past main insn. */
1766 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1767 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1768 && rld[i].when_needed != RELOAD_OTHER
1769 && (CLASS_MAX_NREGS (rld[i].rclass, rld[i].inmode)
1770 == CLASS_MAX_NREGS (rld[output_reload].rclass,
1771 rld[output_reload].outmode))
1772 && rld[i].inc == 0
1773 && rld[i].reg_rtx == 0
1774 #ifdef SECONDARY_MEMORY_NEEDED
1775 /* Don't combine two reloads with different secondary
1776 memory locations. */
1777 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1778 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1779 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1780 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1781 #endif
1782 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1783 ? (rld[i].rclass == rld[output_reload].rclass)
1784 : (reg_class_subset_p (rld[i].rclass,
1785 rld[output_reload].rclass)
1786 || reg_class_subset_p (rld[output_reload].rclass,
1787 rld[i].rclass)))
1788 && (MATCHES (rld[i].in, rld[output_reload].out)
1789 /* Args reversed because the first arg seems to be
1790 the one that we imagine being modified
1791 while the second is the one that might be affected. */
1792 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1793 rld[i].in)
1794 /* However, if the input is a register that appears inside
1795 the output, then we also can't share.
1796 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1797 If the same reload reg is used for both reg 69 and the
1798 result to be stored in memory, then that result
1799 will clobber the address of the memory ref. */
1800 && ! (REG_P (rld[i].in)
1801 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1802 rld[output_reload].out))))
1803 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1804 rld[i].when_needed != RELOAD_FOR_INPUT)
1805 && (reg_class_size[(int) rld[i].rclass]
1806 || targetm.small_register_classes_for_mode_p (VOIDmode))
1807 /* We will allow making things slightly worse by combining an
1808 input and an output, but no worse than that. */
1809 && (rld[i].when_needed == RELOAD_FOR_INPUT
1810 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1811 {
1812 int j;
1813
1814 /* We have found a reload to combine with! */
1815 rld[i].out = rld[output_reload].out;
1816 rld[i].out_reg = rld[output_reload].out_reg;
1817 rld[i].outmode = rld[output_reload].outmode;
1818 /* Mark the old output reload as inoperative. */
1819 rld[output_reload].out = 0;
1820 /* The combined reload is needed for the entire insn. */
1821 rld[i].when_needed = RELOAD_OTHER;
1822 /* If the output reload had a secondary reload, copy it. */
1823 if (rld[output_reload].secondary_out_reload != -1)
1824 {
1825 rld[i].secondary_out_reload
1826 = rld[output_reload].secondary_out_reload;
1827 rld[i].secondary_out_icode
1828 = rld[output_reload].secondary_out_icode;
1829 }
1830
1831 #ifdef SECONDARY_MEMORY_NEEDED
1832 /* Copy any secondary MEM. */
1833 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1834 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1835 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1836 #endif
1837 /* If required, minimize the register class. */
1838 if (reg_class_subset_p (rld[output_reload].rclass,
1839 rld[i].rclass))
1840 rld[i].rclass = rld[output_reload].rclass;
1841
1842 /* Transfer all replacements from the old reload to the combined. */
1843 for (j = 0; j < n_replacements; j++)
1844 if (replacements[j].what == output_reload)
1845 replacements[j].what = i;
1846
1847 return;
1848 }
1849
1850 /* If this insn has only one operand that is modified or written (assumed
1851 to be the first), it must be the one corresponding to this reload. It
1852 is safe to use anything that dies in this insn for that output provided
1853 that it does not occur in the output (we already know it isn't an
1854 earlyclobber. If this is an asm insn, give up. */
1855
1856 if (INSN_CODE (this_insn) == -1)
1857 return;
1858
1859 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1860 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1861 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1862 return;
1863
1864 /* See if some hard register that dies in this insn and is not used in
1865 the output is the right class. Only works if the register we pick
1866 up can fully hold our output reload. */
1867 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1868 if (REG_NOTE_KIND (note) == REG_DEAD
1869 && REG_P (XEXP (note, 0))
1870 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1871 rld[output_reload].out)
1872 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1873 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1874 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1875 regno)
1876 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1877 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1878 /* Ensure that a secondary or tertiary reload for this output
1879 won't want this register. */
1880 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1881 || (!(TEST_HARD_REG_BIT
1882 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1883 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1884 || !(TEST_HARD_REG_BIT
1885 (reg_class_contents[(int) rld[secondary_out].rclass],
1886 regno)))))
1887 && !fixed_regs[regno]
1888 /* Check that a former pseudo is valid; see find_dummy_reload. */
1889 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1890 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1891 ORIGINAL_REGNO (XEXP (note, 0)))
1892 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1893 {
1894 rld[output_reload].reg_rtx
1895 = gen_rtx_REG (rld[output_reload].outmode, regno);
1896 return;
1897 }
1898 }
1899 \f
1900 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1901 See if one of IN and OUT is a register that may be used;
1902 this is desirable since a spill-register won't be needed.
1903 If so, return the register rtx that proves acceptable.
1904
1905 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1906 RCLASS is the register class required for the reload.
1907
1908 If FOR_REAL is >= 0, it is the number of the reload,
1909 and in some cases when it can be discovered that OUT doesn't need
1910 to be computed, clear out rld[FOR_REAL].out.
1911
1912 If FOR_REAL is -1, this should not be done, because this call
1913 is just to see if a register can be found, not to find and install it.
1914
1915 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1916 puts an additional constraint on being able to use IN for OUT since
1917 IN must not appear elsewhere in the insn (it is assumed that IN itself
1918 is safe from the earlyclobber). */
1919
1920 static rtx
1921 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1922 enum machine_mode inmode, enum machine_mode outmode,
1923 enum reg_class rclass, int for_real, int earlyclobber)
1924 {
1925 rtx in = real_in;
1926 rtx out = real_out;
1927 int in_offset = 0;
1928 int out_offset = 0;
1929 rtx value = 0;
1930
1931 /* If operands exceed a word, we can't use either of them
1932 unless they have the same size. */
1933 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1934 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1935 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1936 return 0;
1937
1938 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1939 respectively refers to a hard register. */
1940
1941 /* Find the inside of any subregs. */
1942 while (GET_CODE (out) == SUBREG)
1943 {
1944 if (REG_P (SUBREG_REG (out))
1945 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1946 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1947 GET_MODE (SUBREG_REG (out)),
1948 SUBREG_BYTE (out),
1949 GET_MODE (out));
1950 out = SUBREG_REG (out);
1951 }
1952 while (GET_CODE (in) == SUBREG)
1953 {
1954 if (REG_P (SUBREG_REG (in))
1955 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1956 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1957 GET_MODE (SUBREG_REG (in)),
1958 SUBREG_BYTE (in),
1959 GET_MODE (in));
1960 in = SUBREG_REG (in);
1961 }
1962
1963 /* Narrow down the reg class, the same way push_reload will;
1964 otherwise we might find a dummy now, but push_reload won't. */
1965 {
1966 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1967 if (preferred_class != NO_REGS)
1968 rclass = (enum reg_class) preferred_class;
1969 }
1970
1971 /* See if OUT will do. */
1972 if (REG_P (out)
1973 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1974 {
1975 unsigned int regno = REGNO (out) + out_offset;
1976 unsigned int nwords = hard_regno_nregs[regno][outmode];
1977 rtx saved_rtx;
1978
1979 /* When we consider whether the insn uses OUT,
1980 ignore references within IN. They don't prevent us
1981 from copying IN into OUT, because those refs would
1982 move into the insn that reloads IN.
1983
1984 However, we only ignore IN in its role as this reload.
1985 If the insn uses IN elsewhere and it contains OUT,
1986 that counts. We can't be sure it's the "same" operand
1987 so it might not go through this reload. */
1988 saved_rtx = *inloc;
1989 *inloc = const0_rtx;
1990
1991 if (regno < FIRST_PSEUDO_REGISTER
1992 && HARD_REGNO_MODE_OK (regno, outmode)
1993 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1994 PATTERN (this_insn), outloc))
1995 {
1996 unsigned int i;
1997
1998 for (i = 0; i < nwords; i++)
1999 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2000 regno + i))
2001 break;
2002
2003 if (i == nwords)
2004 {
2005 if (REG_P (real_out))
2006 value = real_out;
2007 else
2008 value = gen_rtx_REG (outmode, regno);
2009 }
2010 }
2011
2012 *inloc = saved_rtx;
2013 }
2014
2015 /* Consider using IN if OUT was not acceptable
2016 or if OUT dies in this insn (like the quotient in a divmod insn).
2017 We can't use IN unless it is dies in this insn,
2018 which means we must know accurately which hard regs are live.
2019 Also, the result can't go in IN if IN is used within OUT,
2020 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2021 if (hard_regs_live_known
2022 && REG_P (in)
2023 && REGNO (in) < FIRST_PSEUDO_REGISTER
2024 && (value == 0
2025 || find_reg_note (this_insn, REG_UNUSED, real_out))
2026 && find_reg_note (this_insn, REG_DEAD, real_in)
2027 && !fixed_regs[REGNO (in)]
2028 && HARD_REGNO_MODE_OK (REGNO (in),
2029 /* The only case where out and real_out might
2030 have different modes is where real_out
2031 is a subreg, and in that case, out
2032 has a real mode. */
2033 (GET_MODE (out) != VOIDmode
2034 ? GET_MODE (out) : outmode))
2035 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2036 /* However only do this if we can be sure that this input
2037 operand doesn't correspond with an uninitialized pseudo.
2038 global can assign some hardreg to it that is the same as
2039 the one assigned to a different, also live pseudo (as it
2040 can ignore the conflict). We must never introduce writes
2041 to such hardregs, as they would clobber the other live
2042 pseudo. See PR 20973. */
2043 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
2044 ORIGINAL_REGNO (in))
2045 /* Similarly, only do this if we can be sure that the death
2046 note is still valid. global can assign some hardreg to
2047 the pseudo referenced in the note and simultaneously a
2048 subword of this hardreg to a different, also live pseudo,
2049 because only another subword of the hardreg is actually
2050 used in the insn. This cannot happen if the pseudo has
2051 been assigned exactly one hardreg. See PR 33732. */
2052 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2053 {
2054 unsigned int regno = REGNO (in) + in_offset;
2055 unsigned int nwords = hard_regno_nregs[regno][inmode];
2056
2057 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2058 && ! hard_reg_set_here_p (regno, regno + nwords,
2059 PATTERN (this_insn))
2060 && (! earlyclobber
2061 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2062 PATTERN (this_insn), inloc)))
2063 {
2064 unsigned int i;
2065
2066 for (i = 0; i < nwords; i++)
2067 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2068 regno + i))
2069 break;
2070
2071 if (i == nwords)
2072 {
2073 /* If we were going to use OUT as the reload reg
2074 and changed our mind, it means OUT is a dummy that
2075 dies here. So don't bother copying value to it. */
2076 if (for_real >= 0 && value == real_out)
2077 rld[for_real].out = 0;
2078 if (REG_P (real_in))
2079 value = real_in;
2080 else
2081 value = gen_rtx_REG (inmode, regno);
2082 }
2083 }
2084 }
2085
2086 return value;
2087 }
2088 \f
2089 /* This page contains subroutines used mainly for determining
2090 whether the IN or an OUT of a reload can serve as the
2091 reload register. */
2092
2093 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2094
2095 int
2096 earlyclobber_operand_p (rtx x)
2097 {
2098 int i;
2099
2100 for (i = 0; i < n_earlyclobbers; i++)
2101 if (reload_earlyclobbers[i] == x)
2102 return 1;
2103
2104 return 0;
2105 }
2106
2107 /* Return 1 if expression X alters a hard reg in the range
2108 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2109 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2110 X should be the body of an instruction. */
2111
2112 static int
2113 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2114 {
2115 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2116 {
2117 rtx op0 = SET_DEST (x);
2118
2119 while (GET_CODE (op0) == SUBREG)
2120 op0 = SUBREG_REG (op0);
2121 if (REG_P (op0))
2122 {
2123 unsigned int r = REGNO (op0);
2124
2125 /* See if this reg overlaps range under consideration. */
2126 if (r < end_regno
2127 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2128 return 1;
2129 }
2130 }
2131 else if (GET_CODE (x) == PARALLEL)
2132 {
2133 int i = XVECLEN (x, 0) - 1;
2134
2135 for (; i >= 0; i--)
2136 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2137 return 1;
2138 }
2139
2140 return 0;
2141 }
2142
2143 /* Return 1 if ADDR is a valid memory address for mode MODE
2144 in address space AS, and check that each pseudo reg has the
2145 proper kind of hard reg. */
2146
2147 int
2148 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2149 rtx addr, addr_space_t as)
2150 {
2151 #ifdef GO_IF_LEGITIMATE_ADDRESS
2152 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2153 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2154 return 0;
2155
2156 win:
2157 return 1;
2158 #else
2159 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2160 #endif
2161 }
2162 \f
2163 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2164 if they are the same hard reg, and has special hacks for
2165 autoincrement and autodecrement.
2166 This is specifically intended for find_reloads to use
2167 in determining whether two operands match.
2168 X is the operand whose number is the lower of the two.
2169
2170 The value is 2 if Y contains a pre-increment that matches
2171 a non-incrementing address in X. */
2172
2173 /* ??? To be completely correct, we should arrange to pass
2174 for X the output operand and for Y the input operand.
2175 For now, we assume that the output operand has the lower number
2176 because that is natural in (SET output (... input ...)). */
2177
2178 int
2179 operands_match_p (rtx x, rtx y)
2180 {
2181 int i;
2182 RTX_CODE code = GET_CODE (x);
2183 const char *fmt;
2184 int success_2;
2185
2186 if (x == y)
2187 return 1;
2188 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2189 && (REG_P (y) || (GET_CODE (y) == SUBREG
2190 && REG_P (SUBREG_REG (y)))))
2191 {
2192 int j;
2193
2194 if (code == SUBREG)
2195 {
2196 i = REGNO (SUBREG_REG (x));
2197 if (i >= FIRST_PSEUDO_REGISTER)
2198 goto slow;
2199 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2200 GET_MODE (SUBREG_REG (x)),
2201 SUBREG_BYTE (x),
2202 GET_MODE (x));
2203 }
2204 else
2205 i = REGNO (x);
2206
2207 if (GET_CODE (y) == SUBREG)
2208 {
2209 j = REGNO (SUBREG_REG (y));
2210 if (j >= FIRST_PSEUDO_REGISTER)
2211 goto slow;
2212 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2213 GET_MODE (SUBREG_REG (y)),
2214 SUBREG_BYTE (y),
2215 GET_MODE (y));
2216 }
2217 else
2218 j = REGNO (y);
2219
2220 /* On a WORDS_BIG_ENDIAN machine, point to the last register of a
2221 multiple hard register group of scalar integer registers, so that
2222 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2223 register. */
2224 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2225 && SCALAR_INT_MODE_P (GET_MODE (x))
2226 && i < FIRST_PSEUDO_REGISTER)
2227 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2228 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2229 && SCALAR_INT_MODE_P (GET_MODE (y))
2230 && j < FIRST_PSEUDO_REGISTER)
2231 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2232
2233 return i == j;
2234 }
2235 /* If two operands must match, because they are really a single
2236 operand of an assembler insn, then two postincrements are invalid
2237 because the assembler insn would increment only once.
2238 On the other hand, a postincrement matches ordinary indexing
2239 if the postincrement is the output operand. */
2240 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2241 return operands_match_p (XEXP (x, 0), y);
2242 /* Two preincrements are invalid
2243 because the assembler insn would increment only once.
2244 On the other hand, a preincrement matches ordinary indexing
2245 if the preincrement is the input operand.
2246 In this case, return 2, since some callers need to do special
2247 things when this happens. */
2248 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2249 || GET_CODE (y) == PRE_MODIFY)
2250 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2251
2252 slow:
2253
2254 /* Now we have disposed of all the cases in which different rtx codes
2255 can match. */
2256 if (code != GET_CODE (y))
2257 return 0;
2258
2259 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2260 if (GET_MODE (x) != GET_MODE (y))
2261 return 0;
2262
2263 /* MEMs refering to different address space are not equivalent. */
2264 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2265 return 0;
2266
2267 switch (code)
2268 {
2269 case CONST_INT:
2270 case CONST_DOUBLE:
2271 case CONST_FIXED:
2272 return 0;
2273
2274 case LABEL_REF:
2275 return XEXP (x, 0) == XEXP (y, 0);
2276 case SYMBOL_REF:
2277 return XSTR (x, 0) == XSTR (y, 0);
2278
2279 default:
2280 break;
2281 }
2282
2283 /* Compare the elements. If any pair of corresponding elements
2284 fail to match, return 0 for the whole things. */
2285
2286 success_2 = 0;
2287 fmt = GET_RTX_FORMAT (code);
2288 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2289 {
2290 int val, j;
2291 switch (fmt[i])
2292 {
2293 case 'w':
2294 if (XWINT (x, i) != XWINT (y, i))
2295 return 0;
2296 break;
2297
2298 case 'i':
2299 if (XINT (x, i) != XINT (y, i))
2300 return 0;
2301 break;
2302
2303 case 'e':
2304 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2305 if (val == 0)
2306 return 0;
2307 /* If any subexpression returns 2,
2308 we should return 2 if we are successful. */
2309 if (val == 2)
2310 success_2 = 1;
2311 break;
2312
2313 case '0':
2314 break;
2315
2316 case 'E':
2317 if (XVECLEN (x, i) != XVECLEN (y, i))
2318 return 0;
2319 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2320 {
2321 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2322 if (val == 0)
2323 return 0;
2324 if (val == 2)
2325 success_2 = 1;
2326 }
2327 break;
2328
2329 /* It is believed that rtx's at this level will never
2330 contain anything but integers and other rtx's,
2331 except for within LABEL_REFs and SYMBOL_REFs. */
2332 default:
2333 gcc_unreachable ();
2334 }
2335 }
2336 return 1 + success_2;
2337 }
2338 \f
2339 /* Describe the range of registers or memory referenced by X.
2340 If X is a register, set REG_FLAG and put the first register
2341 number into START and the last plus one into END.
2342 If X is a memory reference, put a base address into BASE
2343 and a range of integer offsets into START and END.
2344 If X is pushing on the stack, we can assume it causes no trouble,
2345 so we set the SAFE field. */
2346
2347 static struct decomposition
2348 decompose (rtx x)
2349 {
2350 struct decomposition val;
2351 int all_const = 0;
2352
2353 memset (&val, 0, sizeof (val));
2354
2355 switch (GET_CODE (x))
2356 {
2357 case MEM:
2358 {
2359 rtx base = NULL_RTX, offset = 0;
2360 rtx addr = XEXP (x, 0);
2361
2362 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2363 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2364 {
2365 val.base = XEXP (addr, 0);
2366 val.start = -GET_MODE_SIZE (GET_MODE (x));
2367 val.end = GET_MODE_SIZE (GET_MODE (x));
2368 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2369 return val;
2370 }
2371
2372 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2373 {
2374 if (GET_CODE (XEXP (addr, 1)) == PLUS
2375 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2376 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2377 {
2378 val.base = XEXP (addr, 0);
2379 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2380 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2381 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2382 return val;
2383 }
2384 }
2385
2386 if (GET_CODE (addr) == CONST)
2387 {
2388 addr = XEXP (addr, 0);
2389 all_const = 1;
2390 }
2391 if (GET_CODE (addr) == PLUS)
2392 {
2393 if (CONSTANT_P (XEXP (addr, 0)))
2394 {
2395 base = XEXP (addr, 1);
2396 offset = XEXP (addr, 0);
2397 }
2398 else if (CONSTANT_P (XEXP (addr, 1)))
2399 {
2400 base = XEXP (addr, 0);
2401 offset = XEXP (addr, 1);
2402 }
2403 }
2404
2405 if (offset == 0)
2406 {
2407 base = addr;
2408 offset = const0_rtx;
2409 }
2410 if (GET_CODE (offset) == CONST)
2411 offset = XEXP (offset, 0);
2412 if (GET_CODE (offset) == PLUS)
2413 {
2414 if (CONST_INT_P (XEXP (offset, 0)))
2415 {
2416 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2417 offset = XEXP (offset, 0);
2418 }
2419 else if (CONST_INT_P (XEXP (offset, 1)))
2420 {
2421 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2422 offset = XEXP (offset, 1);
2423 }
2424 else
2425 {
2426 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2427 offset = const0_rtx;
2428 }
2429 }
2430 else if (!CONST_INT_P (offset))
2431 {
2432 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2433 offset = const0_rtx;
2434 }
2435
2436 if (all_const && GET_CODE (base) == PLUS)
2437 base = gen_rtx_CONST (GET_MODE (base), base);
2438
2439 gcc_assert (CONST_INT_P (offset));
2440
2441 val.start = INTVAL (offset);
2442 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2443 val.base = base;
2444 }
2445 break;
2446
2447 case REG:
2448 val.reg_flag = 1;
2449 val.start = true_regnum (x);
2450 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2451 {
2452 /* A pseudo with no hard reg. */
2453 val.start = REGNO (x);
2454 val.end = val.start + 1;
2455 }
2456 else
2457 /* A hard reg. */
2458 val.end = end_hard_regno (GET_MODE (x), val.start);
2459 break;
2460
2461 case SUBREG:
2462 if (!REG_P (SUBREG_REG (x)))
2463 /* This could be more precise, but it's good enough. */
2464 return decompose (SUBREG_REG (x));
2465 val.reg_flag = 1;
2466 val.start = true_regnum (x);
2467 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2468 return decompose (SUBREG_REG (x));
2469 else
2470 /* A hard reg. */
2471 val.end = val.start + subreg_nregs (x);
2472 break;
2473
2474 case SCRATCH:
2475 /* This hasn't been assigned yet, so it can't conflict yet. */
2476 val.safe = 1;
2477 break;
2478
2479 default:
2480 gcc_assert (CONSTANT_P (x));
2481 val.safe = 1;
2482 break;
2483 }
2484 return val;
2485 }
2486
2487 /* Return 1 if altering Y will not modify the value of X.
2488 Y is also described by YDATA, which should be decompose (Y). */
2489
2490 static int
2491 immune_p (rtx x, rtx y, struct decomposition ydata)
2492 {
2493 struct decomposition xdata;
2494
2495 if (ydata.reg_flag)
2496 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2497 if (ydata.safe)
2498 return 1;
2499
2500 gcc_assert (MEM_P (y));
2501 /* If Y is memory and X is not, Y can't affect X. */
2502 if (!MEM_P (x))
2503 return 1;
2504
2505 xdata = decompose (x);
2506
2507 if (! rtx_equal_p (xdata.base, ydata.base))
2508 {
2509 /* If bases are distinct symbolic constants, there is no overlap. */
2510 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2511 return 1;
2512 /* Constants and stack slots never overlap. */
2513 if (CONSTANT_P (xdata.base)
2514 && (ydata.base == frame_pointer_rtx
2515 || ydata.base == hard_frame_pointer_rtx
2516 || ydata.base == stack_pointer_rtx))
2517 return 1;
2518 if (CONSTANT_P (ydata.base)
2519 && (xdata.base == frame_pointer_rtx
2520 || xdata.base == hard_frame_pointer_rtx
2521 || xdata.base == stack_pointer_rtx))
2522 return 1;
2523 /* If either base is variable, we don't know anything. */
2524 return 0;
2525 }
2526
2527 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2528 }
2529
2530 /* Similar, but calls decompose. */
2531
2532 int
2533 safe_from_earlyclobber (rtx op, rtx clobber)
2534 {
2535 struct decomposition early_data;
2536
2537 early_data = decompose (clobber);
2538 return immune_p (op, clobber, early_data);
2539 }
2540 \f
2541 /* Main entry point of this file: search the body of INSN
2542 for values that need reloading and record them with push_reload.
2543 REPLACE nonzero means record also where the values occur
2544 so that subst_reloads can be used.
2545
2546 IND_LEVELS says how many levels of indirection are supported by this
2547 machine; a value of zero means that a memory reference is not a valid
2548 memory address.
2549
2550 LIVE_KNOWN says we have valid information about which hard
2551 regs are live at each point in the program; this is true when
2552 we are called from global_alloc but false when stupid register
2553 allocation has been done.
2554
2555 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2556 which is nonnegative if the reg has been commandeered for reloading into.
2557 It is copied into STATIC_RELOAD_REG_P and referenced from there
2558 by various subroutines.
2559
2560 Return TRUE if some operands need to be changed, because of swapping
2561 commutative operands, reg_equiv_address substitution, or whatever. */
2562
2563 int
2564 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2565 short *reload_reg_p)
2566 {
2567 int insn_code_number;
2568 int i, j;
2569 int noperands;
2570 /* These start out as the constraints for the insn
2571 and they are chewed up as we consider alternatives. */
2572 const char *constraints[MAX_RECOG_OPERANDS];
2573 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2574 a register. */
2575 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2576 char pref_or_nothing[MAX_RECOG_OPERANDS];
2577 /* Nonzero for a MEM operand whose entire address needs a reload.
2578 May be -1 to indicate the entire address may or may not need a reload. */
2579 int address_reloaded[MAX_RECOG_OPERANDS];
2580 /* Nonzero for an address operand that needs to be completely reloaded.
2581 May be -1 to indicate the entire operand may or may not need a reload. */
2582 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2583 /* Value of enum reload_type to use for operand. */
2584 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2585 /* Value of enum reload_type to use within address of operand. */
2586 enum reload_type address_type[MAX_RECOG_OPERANDS];
2587 /* Save the usage of each operand. */
2588 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2589 int no_input_reloads = 0, no_output_reloads = 0;
2590 int n_alternatives;
2591 enum reg_class this_alternative[MAX_RECOG_OPERANDS];
2592 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2593 char this_alternative_win[MAX_RECOG_OPERANDS];
2594 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2595 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2596 int this_alternative_matches[MAX_RECOG_OPERANDS];
2597 int swapped;
2598 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2599 int this_alternative_number;
2600 int goal_alternative_number = 0;
2601 int operand_reloadnum[MAX_RECOG_OPERANDS];
2602 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2603 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2604 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2605 char goal_alternative_win[MAX_RECOG_OPERANDS];
2606 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2607 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2608 int goal_alternative_swapped;
2609 int best;
2610 int commutative;
2611 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2612 rtx substed_operand[MAX_RECOG_OPERANDS];
2613 rtx body = PATTERN (insn);
2614 rtx set = single_set (insn);
2615 int goal_earlyclobber = 0, this_earlyclobber;
2616 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2617 int retval = 0;
2618
2619 this_insn = insn;
2620 n_reloads = 0;
2621 n_replacements = 0;
2622 n_earlyclobbers = 0;
2623 replace_reloads = replace;
2624 hard_regs_live_known = live_known;
2625 static_reload_reg_p = reload_reg_p;
2626
2627 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2628 neither are insns that SET cc0. Insns that use CC0 are not allowed
2629 to have any input reloads. */
2630 if (JUMP_P (insn) || CALL_P (insn))
2631 no_output_reloads = 1;
2632
2633 #ifdef HAVE_cc0
2634 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2635 no_input_reloads = 1;
2636 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2637 no_output_reloads = 1;
2638 #endif
2639
2640 #ifdef SECONDARY_MEMORY_NEEDED
2641 /* The eliminated forms of any secondary memory locations are per-insn, so
2642 clear them out here. */
2643
2644 if (secondary_memlocs_elim_used)
2645 {
2646 memset (secondary_memlocs_elim, 0,
2647 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2648 secondary_memlocs_elim_used = 0;
2649 }
2650 #endif
2651
2652 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2653 is cheap to move between them. If it is not, there may not be an insn
2654 to do the copy, so we may need a reload. */
2655 if (GET_CODE (body) == SET
2656 && REG_P (SET_DEST (body))
2657 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2658 && REG_P (SET_SRC (body))
2659 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2660 && register_move_cost (GET_MODE (SET_SRC (body)),
2661 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2662 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2663 return 0;
2664
2665 extract_insn (insn);
2666
2667 noperands = reload_n_operands = recog_data.n_operands;
2668 n_alternatives = recog_data.n_alternatives;
2669
2670 /* Just return "no reloads" if insn has no operands with constraints. */
2671 if (noperands == 0 || n_alternatives == 0)
2672 return 0;
2673
2674 insn_code_number = INSN_CODE (insn);
2675 this_insn_is_asm = insn_code_number < 0;
2676
2677 memcpy (operand_mode, recog_data.operand_mode,
2678 noperands * sizeof (enum machine_mode));
2679 memcpy (constraints, recog_data.constraints,
2680 noperands * sizeof (const char *));
2681
2682 commutative = -1;
2683
2684 /* If we will need to know, later, whether some pair of operands
2685 are the same, we must compare them now and save the result.
2686 Reloading the base and index registers will clobber them
2687 and afterward they will fail to match. */
2688
2689 for (i = 0; i < noperands; i++)
2690 {
2691 const char *p;
2692 int c;
2693 char *end;
2694
2695 substed_operand[i] = recog_data.operand[i];
2696 p = constraints[i];
2697
2698 modified[i] = RELOAD_READ;
2699
2700 /* Scan this operand's constraint to see if it is an output operand,
2701 an in-out operand, is commutative, or should match another. */
2702
2703 while ((c = *p))
2704 {
2705 p += CONSTRAINT_LEN (c, p);
2706 switch (c)
2707 {
2708 case '=':
2709 modified[i] = RELOAD_WRITE;
2710 break;
2711 case '+':
2712 modified[i] = RELOAD_READ_WRITE;
2713 break;
2714 case '%':
2715 {
2716 /* The last operand should not be marked commutative. */
2717 gcc_assert (i != noperands - 1);
2718
2719 /* We currently only support one commutative pair of
2720 operands. Some existing asm code currently uses more
2721 than one pair. Previously, that would usually work,
2722 but sometimes it would crash the compiler. We
2723 continue supporting that case as well as we can by
2724 silently ignoring all but the first pair. In the
2725 future we may handle it correctly. */
2726 if (commutative < 0)
2727 commutative = i;
2728 else
2729 gcc_assert (this_insn_is_asm);
2730 }
2731 break;
2732 /* Use of ISDIGIT is tempting here, but it may get expensive because
2733 of locale support we don't want. */
2734 case '0': case '1': case '2': case '3': case '4':
2735 case '5': case '6': case '7': case '8': case '9':
2736 {
2737 c = strtoul (p - 1, &end, 10);
2738 p = end;
2739
2740 operands_match[c][i]
2741 = operands_match_p (recog_data.operand[c],
2742 recog_data.operand[i]);
2743
2744 /* An operand may not match itself. */
2745 gcc_assert (c != i);
2746
2747 /* If C can be commuted with C+1, and C might need to match I,
2748 then C+1 might also need to match I. */
2749 if (commutative >= 0)
2750 {
2751 if (c == commutative || c == commutative + 1)
2752 {
2753 int other = c + (c == commutative ? 1 : -1);
2754 operands_match[other][i]
2755 = operands_match_p (recog_data.operand[other],
2756 recog_data.operand[i]);
2757 }
2758 if (i == commutative || i == commutative + 1)
2759 {
2760 int other = i + (i == commutative ? 1 : -1);
2761 operands_match[c][other]
2762 = operands_match_p (recog_data.operand[c],
2763 recog_data.operand[other]);
2764 }
2765 /* Note that C is supposed to be less than I.
2766 No need to consider altering both C and I because in
2767 that case we would alter one into the other. */
2768 }
2769 }
2770 }
2771 }
2772 }
2773
2774 /* Examine each operand that is a memory reference or memory address
2775 and reload parts of the addresses into index registers.
2776 Also here any references to pseudo regs that didn't get hard regs
2777 but are equivalent to constants get replaced in the insn itself
2778 with those constants. Nobody will ever see them again.
2779
2780 Finally, set up the preferred classes of each operand. */
2781
2782 for (i = 0; i < noperands; i++)
2783 {
2784 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2785
2786 address_reloaded[i] = 0;
2787 address_operand_reloaded[i] = 0;
2788 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2789 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2790 : RELOAD_OTHER);
2791 address_type[i]
2792 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2793 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2794 : RELOAD_OTHER);
2795
2796 if (*constraints[i] == 0)
2797 /* Ignore things like match_operator operands. */
2798 ;
2799 else if (constraints[i][0] == 'p'
2800 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2801 {
2802 address_operand_reloaded[i]
2803 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2804 recog_data.operand[i],
2805 recog_data.operand_loc[i],
2806 i, operand_type[i], ind_levels, insn);
2807
2808 /* If we now have a simple operand where we used to have a
2809 PLUS or MULT, re-recognize and try again. */
2810 if ((OBJECT_P (*recog_data.operand_loc[i])
2811 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2812 && (GET_CODE (recog_data.operand[i]) == MULT
2813 || GET_CODE (recog_data.operand[i]) == PLUS))
2814 {
2815 INSN_CODE (insn) = -1;
2816 retval = find_reloads (insn, replace, ind_levels, live_known,
2817 reload_reg_p);
2818 return retval;
2819 }
2820
2821 recog_data.operand[i] = *recog_data.operand_loc[i];
2822 substed_operand[i] = recog_data.operand[i];
2823
2824 /* Address operands are reloaded in their existing mode,
2825 no matter what is specified in the machine description. */
2826 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2827 }
2828 else if (code == MEM)
2829 {
2830 address_reloaded[i]
2831 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2832 recog_data.operand_loc[i],
2833 XEXP (recog_data.operand[i], 0),
2834 &XEXP (recog_data.operand[i], 0),
2835 i, address_type[i], ind_levels, insn);
2836 recog_data.operand[i] = *recog_data.operand_loc[i];
2837 substed_operand[i] = recog_data.operand[i];
2838 }
2839 else if (code == SUBREG)
2840 {
2841 rtx reg = SUBREG_REG (recog_data.operand[i]);
2842 rtx op
2843 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2844 ind_levels,
2845 set != 0
2846 && &SET_DEST (set) == recog_data.operand_loc[i],
2847 insn,
2848 &address_reloaded[i]);
2849
2850 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2851 that didn't get a hard register, emit a USE with a REG_EQUAL
2852 note in front so that we might inherit a previous, possibly
2853 wider reload. */
2854
2855 if (replace
2856 && MEM_P (op)
2857 && REG_P (reg)
2858 && (GET_MODE_SIZE (GET_MODE (reg))
2859 >= GET_MODE_SIZE (GET_MODE (op)))
2860 && reg_equiv_constant[REGNO (reg)] == 0)
2861 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2862 insn),
2863 REG_EQUAL, reg_equiv_memory_loc[REGNO (reg)]);
2864
2865 substed_operand[i] = recog_data.operand[i] = op;
2866 }
2867 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2868 /* We can get a PLUS as an "operand" as a result of register
2869 elimination. See eliminate_regs and gen_reload. We handle
2870 a unary operator by reloading the operand. */
2871 substed_operand[i] = recog_data.operand[i]
2872 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2873 ind_levels, 0, insn,
2874 &address_reloaded[i]);
2875 else if (code == REG)
2876 {
2877 /* This is equivalent to calling find_reloads_toplev.
2878 The code is duplicated for speed.
2879 When we find a pseudo always equivalent to a constant,
2880 we replace it by the constant. We must be sure, however,
2881 that we don't try to replace it in the insn in which it
2882 is being set. */
2883 int regno = REGNO (recog_data.operand[i]);
2884 if (reg_equiv_constant[regno] != 0
2885 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2886 {
2887 /* Record the existing mode so that the check if constants are
2888 allowed will work when operand_mode isn't specified. */
2889
2890 if (operand_mode[i] == VOIDmode)
2891 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2892
2893 substed_operand[i] = recog_data.operand[i]
2894 = reg_equiv_constant[regno];
2895 }
2896 if (reg_equiv_memory_loc[regno] != 0
2897 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
2898 /* We need not give a valid is_set_dest argument since the case
2899 of a constant equivalence was checked above. */
2900 substed_operand[i] = recog_data.operand[i]
2901 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2902 ind_levels, 0, insn,
2903 &address_reloaded[i]);
2904 }
2905 /* If the operand is still a register (we didn't replace it with an
2906 equivalent), get the preferred class to reload it into. */
2907 code = GET_CODE (recog_data.operand[i]);
2908 preferred_class[i]
2909 = ((code == REG && REGNO (recog_data.operand[i])
2910 >= FIRST_PSEUDO_REGISTER)
2911 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2912 : NO_REGS);
2913 pref_or_nothing[i]
2914 = (code == REG
2915 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2916 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2917 }
2918
2919 /* If this is simply a copy from operand 1 to operand 0, merge the
2920 preferred classes for the operands. */
2921 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2922 && recog_data.operand[1] == SET_SRC (set))
2923 {
2924 preferred_class[0] = preferred_class[1]
2925 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2926 pref_or_nothing[0] |= pref_or_nothing[1];
2927 pref_or_nothing[1] |= pref_or_nothing[0];
2928 }
2929
2930 /* Now see what we need for pseudo-regs that didn't get hard regs
2931 or got the wrong kind of hard reg. For this, we must consider
2932 all the operands together against the register constraints. */
2933
2934 best = MAX_RECOG_OPERANDS * 2 + 600;
2935
2936 swapped = 0;
2937 goal_alternative_swapped = 0;
2938 try_swapped:
2939
2940 /* The constraints are made of several alternatives.
2941 Each operand's constraint looks like foo,bar,... with commas
2942 separating the alternatives. The first alternatives for all
2943 operands go together, the second alternatives go together, etc.
2944
2945 First loop over alternatives. */
2946
2947 for (this_alternative_number = 0;
2948 this_alternative_number < n_alternatives;
2949 this_alternative_number++)
2950 {
2951 /* Loop over operands for one constraint alternative. */
2952 /* LOSERS counts those that don't fit this alternative
2953 and would require loading. */
2954 int losers = 0;
2955 /* BAD is set to 1 if it some operand can't fit this alternative
2956 even after reloading. */
2957 int bad = 0;
2958 /* REJECT is a count of how undesirable this alternative says it is
2959 if any reloading is required. If the alternative matches exactly
2960 then REJECT is ignored, but otherwise it gets this much
2961 counted against it in addition to the reloading needed. Each
2962 ? counts three times here since we want the disparaging caused by
2963 a bad register class to only count 1/3 as much. */
2964 int reject = 0;
2965
2966 if (!recog_data.alternative_enabled_p[this_alternative_number])
2967 {
2968 int i;
2969
2970 for (i = 0; i < recog_data.n_operands; i++)
2971 constraints[i] = skip_alternative (constraints[i]);
2972
2973 continue;
2974 }
2975
2976 this_earlyclobber = 0;
2977
2978 for (i = 0; i < noperands; i++)
2979 {
2980 const char *p = constraints[i];
2981 char *end;
2982 int len;
2983 int win = 0;
2984 int did_match = 0;
2985 /* 0 => this operand can be reloaded somehow for this alternative. */
2986 int badop = 1;
2987 /* 0 => this operand can be reloaded if the alternative allows regs. */
2988 int winreg = 0;
2989 int c;
2990 int m;
2991 rtx operand = recog_data.operand[i];
2992 int offset = 0;
2993 /* Nonzero means this is a MEM that must be reloaded into a reg
2994 regardless of what the constraint says. */
2995 int force_reload = 0;
2996 int offmemok = 0;
2997 /* Nonzero if a constant forced into memory would be OK for this
2998 operand. */
2999 int constmemok = 0;
3000 int earlyclobber = 0;
3001
3002 /* If the predicate accepts a unary operator, it means that
3003 we need to reload the operand, but do not do this for
3004 match_operator and friends. */
3005 if (UNARY_P (operand) && *p != 0)
3006 operand = XEXP (operand, 0);
3007
3008 /* If the operand is a SUBREG, extract
3009 the REG or MEM (or maybe even a constant) within.
3010 (Constants can occur as a result of reg_equiv_constant.) */
3011
3012 while (GET_CODE (operand) == SUBREG)
3013 {
3014 /* Offset only matters when operand is a REG and
3015 it is a hard reg. This is because it is passed
3016 to reg_fits_class_p if it is a REG and all pseudos
3017 return 0 from that function. */
3018 if (REG_P (SUBREG_REG (operand))
3019 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3020 {
3021 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3022 GET_MODE (SUBREG_REG (operand)),
3023 SUBREG_BYTE (operand),
3024 GET_MODE (operand)) < 0)
3025 force_reload = 1;
3026 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3027 GET_MODE (SUBREG_REG (operand)),
3028 SUBREG_BYTE (operand),
3029 GET_MODE (operand));
3030 }
3031 operand = SUBREG_REG (operand);
3032 /* Force reload if this is a constant or PLUS or if there may
3033 be a problem accessing OPERAND in the outer mode. */
3034 if (CONSTANT_P (operand)
3035 || GET_CODE (operand) == PLUS
3036 /* We must force a reload of paradoxical SUBREGs
3037 of a MEM because the alignment of the inner value
3038 may not be enough to do the outer reference. On
3039 big-endian machines, it may also reference outside
3040 the object.
3041
3042 On machines that extend byte operations and we have a
3043 SUBREG where both the inner and outer modes are no wider
3044 than a word and the inner mode is narrower, is integral,
3045 and gets extended when loaded from memory, combine.c has
3046 made assumptions about the behavior of the machine in such
3047 register access. If the data is, in fact, in memory we
3048 must always load using the size assumed to be in the
3049 register and let the insn do the different-sized
3050 accesses.
3051
3052 This is doubly true if WORD_REGISTER_OPERATIONS. In
3053 this case eliminate_regs has left non-paradoxical
3054 subregs for push_reload to see. Make sure it does
3055 by forcing the reload.
3056
3057 ??? When is it right at this stage to have a subreg
3058 of a mem that is _not_ to be handled specially? IMO
3059 those should have been reduced to just a mem. */
3060 || ((MEM_P (operand)
3061 || (REG_P (operand)
3062 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3063 #ifndef WORD_REGISTER_OPERATIONS
3064 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3065 < BIGGEST_ALIGNMENT)
3066 && (GET_MODE_SIZE (operand_mode[i])
3067 > GET_MODE_SIZE (GET_MODE (operand))))
3068 || BYTES_BIG_ENDIAN
3069 #ifdef LOAD_EXTEND_OP
3070 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3071 && (GET_MODE_SIZE (GET_MODE (operand))
3072 <= UNITS_PER_WORD)
3073 && (GET_MODE_SIZE (operand_mode[i])
3074 > GET_MODE_SIZE (GET_MODE (operand)))
3075 && INTEGRAL_MODE_P (GET_MODE (operand))
3076 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3077 #endif
3078 )
3079 #endif
3080 )
3081 )
3082 force_reload = 1;
3083 }
3084
3085 this_alternative[i] = NO_REGS;
3086 this_alternative_win[i] = 0;
3087 this_alternative_match_win[i] = 0;
3088 this_alternative_offmemok[i] = 0;
3089 this_alternative_earlyclobber[i] = 0;
3090 this_alternative_matches[i] = -1;
3091
3092 /* An empty constraint or empty alternative
3093 allows anything which matched the pattern. */
3094 if (*p == 0 || *p == ',')
3095 win = 1, badop = 0;
3096
3097 /* Scan this alternative's specs for this operand;
3098 set WIN if the operand fits any letter in this alternative.
3099 Otherwise, clear BADOP if this operand could
3100 fit some letter after reloads,
3101 or set WINREG if this operand could fit after reloads
3102 provided the constraint allows some registers. */
3103
3104 do
3105 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3106 {
3107 case '\0':
3108 len = 0;
3109 break;
3110 case ',':
3111 c = '\0';
3112 break;
3113
3114 case '=': case '+': case '*':
3115 break;
3116
3117 case '%':
3118 /* We only support one commutative marker, the first
3119 one. We already set commutative above. */
3120 break;
3121
3122 case '?':
3123 reject += 6;
3124 break;
3125
3126 case '!':
3127 reject = 600;
3128 break;
3129
3130 case '#':
3131 /* Ignore rest of this alternative as far as
3132 reloading is concerned. */
3133 do
3134 p++;
3135 while (*p && *p != ',');
3136 len = 0;
3137 break;
3138
3139 case '0': case '1': case '2': case '3': case '4':
3140 case '5': case '6': case '7': case '8': case '9':
3141 m = strtoul (p, &end, 10);
3142 p = end;
3143 len = 0;
3144
3145 this_alternative_matches[i] = m;
3146 /* We are supposed to match a previous operand.
3147 If we do, we win if that one did.
3148 If we do not, count both of the operands as losers.
3149 (This is too conservative, since most of the time
3150 only a single reload insn will be needed to make
3151 the two operands win. As a result, this alternative
3152 may be rejected when it is actually desirable.) */
3153 if ((swapped && (m != commutative || i != commutative + 1))
3154 /* If we are matching as if two operands were swapped,
3155 also pretend that operands_match had been computed
3156 with swapped.
3157 But if I is the second of those and C is the first,
3158 don't exchange them, because operands_match is valid
3159 only on one side of its diagonal. */
3160 ? (operands_match
3161 [(m == commutative || m == commutative + 1)
3162 ? 2 * commutative + 1 - m : m]
3163 [(i == commutative || i == commutative + 1)
3164 ? 2 * commutative + 1 - i : i])
3165 : operands_match[m][i])
3166 {
3167 /* If we are matching a non-offsettable address where an
3168 offsettable address was expected, then we must reject
3169 this combination, because we can't reload it. */
3170 if (this_alternative_offmemok[m]
3171 && MEM_P (recog_data.operand[m])
3172 && this_alternative[m] == NO_REGS
3173 && ! this_alternative_win[m])
3174 bad = 1;
3175
3176 did_match = this_alternative_win[m];
3177 }
3178 else
3179 {
3180 /* Operands don't match. */
3181 rtx value;
3182 int loc1, loc2;
3183 /* Retroactively mark the operand we had to match
3184 as a loser, if it wasn't already. */
3185 if (this_alternative_win[m])
3186 losers++;
3187 this_alternative_win[m] = 0;
3188 if (this_alternative[m] == NO_REGS)
3189 bad = 1;
3190 /* But count the pair only once in the total badness of
3191 this alternative, if the pair can be a dummy reload.
3192 The pointers in operand_loc are not swapped; swap
3193 them by hand if necessary. */
3194 if (swapped && i == commutative)
3195 loc1 = commutative + 1;
3196 else if (swapped && i == commutative + 1)
3197 loc1 = commutative;
3198 else
3199 loc1 = i;
3200 if (swapped && m == commutative)
3201 loc2 = commutative + 1;
3202 else if (swapped && m == commutative + 1)
3203 loc2 = commutative;
3204 else
3205 loc2 = m;
3206 value
3207 = find_dummy_reload (recog_data.operand[i],
3208 recog_data.operand[m],
3209 recog_data.operand_loc[loc1],
3210 recog_data.operand_loc[loc2],
3211 operand_mode[i], operand_mode[m],
3212 this_alternative[m], -1,
3213 this_alternative_earlyclobber[m]);
3214
3215 if (value != 0)
3216 losers--;
3217 }
3218 /* This can be fixed with reloads if the operand
3219 we are supposed to match can be fixed with reloads. */
3220 badop = 0;
3221 this_alternative[i] = this_alternative[m];
3222
3223 /* If we have to reload this operand and some previous
3224 operand also had to match the same thing as this
3225 operand, we don't know how to do that. So reject this
3226 alternative. */
3227 if (! did_match || force_reload)
3228 for (j = 0; j < i; j++)
3229 if (this_alternative_matches[j]
3230 == this_alternative_matches[i])
3231 badop = 1;
3232 break;
3233
3234 case 'p':
3235 /* All necessary reloads for an address_operand
3236 were handled in find_reloads_address. */
3237 this_alternative[i] = base_reg_class (VOIDmode, ADDRESS,
3238 SCRATCH);
3239 win = 1;
3240 badop = 0;
3241 break;
3242
3243 case TARGET_MEM_CONSTRAINT:
3244 if (force_reload)
3245 break;
3246 if (MEM_P (operand)
3247 || (REG_P (operand)
3248 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3249 && reg_renumber[REGNO (operand)] < 0))
3250 win = 1;
3251 if (CONST_POOL_OK_P (operand))
3252 badop = 0;
3253 constmemok = 1;
3254 break;
3255
3256 case '<':
3257 if (MEM_P (operand)
3258 && ! address_reloaded[i]
3259 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3260 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3261 win = 1;
3262 break;
3263
3264 case '>':
3265 if (MEM_P (operand)
3266 && ! address_reloaded[i]
3267 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3268 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3269 win = 1;
3270 break;
3271
3272 /* Memory operand whose address is not offsettable. */
3273 case 'V':
3274 if (force_reload)
3275 break;
3276 if (MEM_P (operand)
3277 && ! (ind_levels ? offsettable_memref_p (operand)
3278 : offsettable_nonstrict_memref_p (operand))
3279 /* Certain mem addresses will become offsettable
3280 after they themselves are reloaded. This is important;
3281 we don't want our own handling of unoffsettables
3282 to override the handling of reg_equiv_address. */
3283 && !(REG_P (XEXP (operand, 0))
3284 && (ind_levels == 0
3285 || reg_equiv_address[REGNO (XEXP (operand, 0))] != 0)))
3286 win = 1;
3287 break;
3288
3289 /* Memory operand whose address is offsettable. */
3290 case 'o':
3291 if (force_reload)
3292 break;
3293 if ((MEM_P (operand)
3294 /* If IND_LEVELS, find_reloads_address won't reload a
3295 pseudo that didn't get a hard reg, so we have to
3296 reject that case. */
3297 && ((ind_levels ? offsettable_memref_p (operand)
3298 : offsettable_nonstrict_memref_p (operand))
3299 /* A reloaded address is offsettable because it is now
3300 just a simple register indirect. */
3301 || address_reloaded[i] == 1))
3302 || (REG_P (operand)
3303 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3304 && reg_renumber[REGNO (operand)] < 0
3305 /* If reg_equiv_address is nonzero, we will be
3306 loading it into a register; hence it will be
3307 offsettable, but we cannot say that reg_equiv_mem
3308 is offsettable without checking. */
3309 && ((reg_equiv_mem[REGNO (operand)] != 0
3310 && offsettable_memref_p (reg_equiv_mem[REGNO (operand)]))
3311 || (reg_equiv_address[REGNO (operand)] != 0))))
3312 win = 1;
3313 if (CONST_POOL_OK_P (operand)
3314 || MEM_P (operand))
3315 badop = 0;
3316 constmemok = 1;
3317 offmemok = 1;
3318 break;
3319
3320 case '&':
3321 /* Output operand that is stored before the need for the
3322 input operands (and their index registers) is over. */
3323 earlyclobber = 1, this_earlyclobber = 1;
3324 break;
3325
3326 case 'E':
3327 case 'F':
3328 if (GET_CODE (operand) == CONST_DOUBLE
3329 || (GET_CODE (operand) == CONST_VECTOR
3330 && (GET_MODE_CLASS (GET_MODE (operand))
3331 == MODE_VECTOR_FLOAT)))
3332 win = 1;
3333 break;
3334
3335 case 'G':
3336 case 'H':
3337 if (GET_CODE (operand) == CONST_DOUBLE
3338 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3339 win = 1;
3340 break;
3341
3342 case 's':
3343 if (CONST_INT_P (operand)
3344 || (GET_CODE (operand) == CONST_DOUBLE
3345 && GET_MODE (operand) == VOIDmode))
3346 break;
3347 case 'i':
3348 if (CONSTANT_P (operand)
3349 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3350 win = 1;
3351 break;
3352
3353 case 'n':
3354 if (CONST_INT_P (operand)
3355 || (GET_CODE (operand) == CONST_DOUBLE
3356 && GET_MODE (operand) == VOIDmode))
3357 win = 1;
3358 break;
3359
3360 case 'I':
3361 case 'J':
3362 case 'K':
3363 case 'L':
3364 case 'M':
3365 case 'N':
3366 case 'O':
3367 case 'P':
3368 if (CONST_INT_P (operand)
3369 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3370 win = 1;
3371 break;
3372
3373 case 'X':
3374 force_reload = 0;
3375 win = 1;
3376 break;
3377
3378 case 'g':
3379 if (! force_reload
3380 /* A PLUS is never a valid operand, but reload can make
3381 it from a register when eliminating registers. */
3382 && GET_CODE (operand) != PLUS
3383 /* A SCRATCH is not a valid operand. */
3384 && GET_CODE (operand) != SCRATCH
3385 && (! CONSTANT_P (operand)
3386 || ! flag_pic
3387 || LEGITIMATE_PIC_OPERAND_P (operand))
3388 && (GENERAL_REGS == ALL_REGS
3389 || !REG_P (operand)
3390 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3391 && reg_renumber[REGNO (operand)] < 0)))
3392 win = 1;
3393 /* Drop through into 'r' case. */
3394
3395 case 'r':
3396 this_alternative[i]
3397 = reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3398 goto reg;
3399
3400 default:
3401 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3402 {
3403 #ifdef EXTRA_CONSTRAINT_STR
3404 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3405 {
3406 if (force_reload)
3407 break;
3408 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3409 win = 1;
3410 /* If the address was already reloaded,
3411 we win as well. */
3412 else if (MEM_P (operand)
3413 && address_reloaded[i] == 1)
3414 win = 1;
3415 /* Likewise if the address will be reloaded because
3416 reg_equiv_address is nonzero. For reg_equiv_mem
3417 we have to check. */
3418 else if (REG_P (operand)
3419 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3420 && reg_renumber[REGNO (operand)] < 0
3421 && ((reg_equiv_mem[REGNO (operand)] != 0
3422 && EXTRA_CONSTRAINT_STR (reg_equiv_mem[REGNO (operand)], c, p))
3423 || (reg_equiv_address[REGNO (operand)] != 0)))
3424 win = 1;
3425
3426 /* If we didn't already win, we can reload
3427 constants via force_const_mem, and other
3428 MEMs by reloading the address like for 'o'. */
3429 if (CONST_POOL_OK_P (operand)
3430 || MEM_P (operand))
3431 badop = 0;
3432 constmemok = 1;
3433 offmemok = 1;
3434 break;
3435 }
3436 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3437 {
3438 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3439 win = 1;
3440
3441 /* If we didn't already win, we can reload
3442 the address into a base register. */
3443 this_alternative[i] = base_reg_class (VOIDmode,
3444 ADDRESS,
3445 SCRATCH);
3446 badop = 0;
3447 break;
3448 }
3449
3450 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3451 win = 1;
3452 #endif
3453 break;
3454 }
3455
3456 this_alternative[i]
3457 = (reg_class_subunion
3458 [this_alternative[i]]
3459 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3460 reg:
3461 if (GET_MODE (operand) == BLKmode)
3462 break;
3463 winreg = 1;
3464 if (REG_P (operand)
3465 && reg_fits_class_p (operand, this_alternative[i],
3466 offset, GET_MODE (recog_data.operand[i])))
3467 win = 1;
3468 break;
3469 }
3470 while ((p += len), c);
3471
3472 constraints[i] = p;
3473
3474 /* If this operand could be handled with a reg,
3475 and some reg is allowed, then this operand can be handled. */
3476 if (winreg && this_alternative[i] != NO_REGS
3477 && (win || !class_only_fixed_regs[this_alternative[i]]))
3478 badop = 0;
3479
3480 /* Record which operands fit this alternative. */
3481 this_alternative_earlyclobber[i] = earlyclobber;
3482 if (win && ! force_reload)
3483 this_alternative_win[i] = 1;
3484 else if (did_match && ! force_reload)
3485 this_alternative_match_win[i] = 1;
3486 else
3487 {
3488 int const_to_mem = 0;
3489
3490 this_alternative_offmemok[i] = offmemok;
3491 losers++;
3492 if (badop)
3493 bad = 1;
3494 /* Alternative loses if it has no regs for a reg operand. */
3495 if (REG_P (operand)
3496 && this_alternative[i] == NO_REGS
3497 && this_alternative_matches[i] < 0)
3498 bad = 1;
3499
3500 /* If this is a constant that is reloaded into the desired
3501 class by copying it to memory first, count that as another
3502 reload. This is consistent with other code and is
3503 required to avoid choosing another alternative when
3504 the constant is moved into memory by this function on
3505 an early reload pass. Note that the test here is
3506 precisely the same as in the code below that calls
3507 force_const_mem. */
3508 if (CONST_POOL_OK_P (operand)
3509 && ((targetm.preferred_reload_class (operand,
3510 this_alternative[i])
3511 == NO_REGS)
3512 || no_input_reloads)
3513 && operand_mode[i] != VOIDmode)
3514 {
3515 const_to_mem = 1;
3516 if (this_alternative[i] != NO_REGS)
3517 losers++;
3518 }
3519
3520 /* Alternative loses if it requires a type of reload not
3521 permitted for this insn. We can always reload SCRATCH
3522 and objects with a REG_UNUSED note. */
3523 if (GET_CODE (operand) != SCRATCH
3524 && modified[i] != RELOAD_READ && no_output_reloads
3525 && ! find_reg_note (insn, REG_UNUSED, operand))
3526 bad = 1;
3527 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3528 && ! const_to_mem)
3529 bad = 1;
3530
3531 /* If we can't reload this value at all, reject this
3532 alternative. Note that we could also lose due to
3533 LIMIT_RELOAD_CLASS, but we don't check that
3534 here. */
3535
3536 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3537 {
3538 if (targetm.preferred_reload_class (operand, this_alternative[i])
3539 == NO_REGS)
3540 reject = 600;
3541
3542 #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
3543 if (operand_type[i] == RELOAD_FOR_OUTPUT
3544 && (PREFERRED_OUTPUT_RELOAD_CLASS (operand,
3545 this_alternative[i])
3546 == NO_REGS))
3547 reject = 600;
3548 #endif
3549 }
3550
3551 /* We prefer to reload pseudos over reloading other things,
3552 since such reloads may be able to be eliminated later.
3553 If we are reloading a SCRATCH, we won't be generating any
3554 insns, just using a register, so it is also preferred.
3555 So bump REJECT in other cases. Don't do this in the
3556 case where we are forcing a constant into memory and
3557 it will then win since we don't want to have a different
3558 alternative match then. */
3559 if (! (REG_P (operand)
3560 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3561 && GET_CODE (operand) != SCRATCH
3562 && ! (const_to_mem && constmemok))
3563 reject += 2;
3564
3565 /* Input reloads can be inherited more often than output
3566 reloads can be removed, so penalize output reloads. */
3567 if (operand_type[i] != RELOAD_FOR_INPUT
3568 && GET_CODE (operand) != SCRATCH)
3569 reject++;
3570 }
3571
3572 /* If this operand is a pseudo register that didn't get a hard
3573 reg and this alternative accepts some register, see if the
3574 class that we want is a subset of the preferred class for this
3575 register. If not, but it intersects that class, use the
3576 preferred class instead. If it does not intersect the preferred
3577 class, show that usage of this alternative should be discouraged;
3578 it will be discouraged more still if the register is `preferred
3579 or nothing'. We do this because it increases the chance of
3580 reusing our spill register in a later insn and avoiding a pair
3581 of memory stores and loads.
3582
3583 Don't bother with this if this alternative will accept this
3584 operand.
3585
3586 Don't do this for a multiword operand, since it is only a
3587 small win and has the risk of requiring more spill registers,
3588 which could cause a large loss.
3589
3590 Don't do this if the preferred class has only one register
3591 because we might otherwise exhaust the class. */
3592
3593 if (! win && ! did_match
3594 && this_alternative[i] != NO_REGS
3595 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3596 && reg_class_size [(int) preferred_class[i]] > 0
3597 && ! small_register_class_p (preferred_class[i]))
3598 {
3599 if (! reg_class_subset_p (this_alternative[i],
3600 preferred_class[i]))
3601 {
3602 /* Since we don't have a way of forming the intersection,
3603 we just do something special if the preferred class
3604 is a subset of the class we have; that's the most
3605 common case anyway. */
3606 if (reg_class_subset_p (preferred_class[i],
3607 this_alternative[i]))
3608 this_alternative[i] = preferred_class[i];
3609 else
3610 reject += (2 + 2 * pref_or_nothing[i]);
3611 }
3612 }
3613 }
3614
3615 /* Now see if any output operands that are marked "earlyclobber"
3616 in this alternative conflict with any input operands
3617 or any memory addresses. */
3618
3619 for (i = 0; i < noperands; i++)
3620 if (this_alternative_earlyclobber[i]
3621 && (this_alternative_win[i] || this_alternative_match_win[i]))
3622 {
3623 struct decomposition early_data;
3624
3625 early_data = decompose (recog_data.operand[i]);
3626
3627 gcc_assert (modified[i] != RELOAD_READ);
3628
3629 if (this_alternative[i] == NO_REGS)
3630 {
3631 this_alternative_earlyclobber[i] = 0;
3632 gcc_assert (this_insn_is_asm);
3633 error_for_asm (this_insn,
3634 "%<&%> constraint used with no register class");
3635 }
3636
3637 for (j = 0; j < noperands; j++)
3638 /* Is this an input operand or a memory ref? */
3639 if ((MEM_P (recog_data.operand[j])
3640 || modified[j] != RELOAD_WRITE)
3641 && j != i
3642 /* Ignore things like match_operator operands. */
3643 && !recog_data.is_operator[j]
3644 /* Don't count an input operand that is constrained to match
3645 the early clobber operand. */
3646 && ! (this_alternative_matches[j] == i
3647 && rtx_equal_p (recog_data.operand[i],
3648 recog_data.operand[j]))
3649 /* Is it altered by storing the earlyclobber operand? */
3650 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3651 early_data))
3652 {
3653 /* If the output is in a non-empty few-regs class,
3654 it's costly to reload it, so reload the input instead. */
3655 if (small_register_class_p (this_alternative[i])
3656 && (REG_P (recog_data.operand[j])
3657 || GET_CODE (recog_data.operand[j]) == SUBREG))
3658 {
3659 losers++;
3660 this_alternative_win[j] = 0;
3661 this_alternative_match_win[j] = 0;
3662 }
3663 else
3664 break;
3665 }
3666 /* If an earlyclobber operand conflicts with something,
3667 it must be reloaded, so request this and count the cost. */
3668 if (j != noperands)
3669 {
3670 losers++;
3671 this_alternative_win[i] = 0;
3672 this_alternative_match_win[j] = 0;
3673 for (j = 0; j < noperands; j++)
3674 if (this_alternative_matches[j] == i
3675 && this_alternative_match_win[j])
3676 {
3677 this_alternative_win[j] = 0;
3678 this_alternative_match_win[j] = 0;
3679 losers++;
3680 }
3681 }
3682 }
3683
3684 /* If one alternative accepts all the operands, no reload required,
3685 choose that alternative; don't consider the remaining ones. */
3686 if (losers == 0)
3687 {
3688 /* Unswap these so that they are never swapped at `finish'. */
3689 if (commutative >= 0)
3690 {
3691 recog_data.operand[commutative] = substed_operand[commutative];
3692 recog_data.operand[commutative + 1]
3693 = substed_operand[commutative + 1];
3694 }
3695 for (i = 0; i < noperands; i++)
3696 {
3697 goal_alternative_win[i] = this_alternative_win[i];
3698 goal_alternative_match_win[i] = this_alternative_match_win[i];
3699 goal_alternative[i] = (reg_class_t) this_alternative[i];
3700 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3701 goal_alternative_matches[i] = this_alternative_matches[i];
3702 goal_alternative_earlyclobber[i]
3703 = this_alternative_earlyclobber[i];
3704 }
3705 goal_alternative_number = this_alternative_number;
3706 goal_alternative_swapped = swapped;
3707 goal_earlyclobber = this_earlyclobber;
3708 goto finish;
3709 }
3710
3711 /* REJECT, set by the ! and ? constraint characters and when a register
3712 would be reloaded into a non-preferred class, discourages the use of
3713 this alternative for a reload goal. REJECT is incremented by six
3714 for each ? and two for each non-preferred class. */
3715 losers = losers * 6 + reject;
3716
3717 /* If this alternative can be made to work by reloading,
3718 and it needs less reloading than the others checked so far,
3719 record it as the chosen goal for reloading. */
3720 if (! bad)
3721 {
3722 if (best > losers)
3723 {
3724 for (i = 0; i < noperands; i++)
3725 {
3726 goal_alternative[i] = (reg_class_t) this_alternative[i];
3727 goal_alternative_win[i] = this_alternative_win[i];
3728 goal_alternative_match_win[i]
3729 = this_alternative_match_win[i];
3730 goal_alternative_offmemok[i]
3731 = this_alternative_offmemok[i];
3732 goal_alternative_matches[i] = this_alternative_matches[i];
3733 goal_alternative_earlyclobber[i]
3734 = this_alternative_earlyclobber[i];
3735 }
3736 goal_alternative_swapped = swapped;
3737 best = losers;
3738 goal_alternative_number = this_alternative_number;
3739 goal_earlyclobber = this_earlyclobber;
3740 }
3741 }
3742 }
3743
3744 /* If insn is commutative (it's safe to exchange a certain pair of operands)
3745 then we need to try each alternative twice,
3746 the second time matching those two operands
3747 as if we had exchanged them.
3748 To do this, really exchange them in operands.
3749
3750 If we have just tried the alternatives the second time,
3751 return operands to normal and drop through. */
3752
3753 if (commutative >= 0)
3754 {
3755 swapped = !swapped;
3756 if (swapped)
3757 {
3758 enum reg_class tclass;
3759 int t;
3760
3761 recog_data.operand[commutative] = substed_operand[commutative + 1];
3762 recog_data.operand[commutative + 1] = substed_operand[commutative];
3763 /* Swap the duplicates too. */
3764 for (i = 0; i < recog_data.n_dups; i++)
3765 if (recog_data.dup_num[i] == commutative
3766 || recog_data.dup_num[i] == commutative + 1)
3767 *recog_data.dup_loc[i]
3768 = recog_data.operand[(int) recog_data.dup_num[i]];
3769
3770 tclass = preferred_class[commutative];
3771 preferred_class[commutative] = preferred_class[commutative + 1];
3772 preferred_class[commutative + 1] = tclass;
3773
3774 t = pref_or_nothing[commutative];
3775 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3776 pref_or_nothing[commutative + 1] = t;
3777
3778 t = address_reloaded[commutative];
3779 address_reloaded[commutative] = address_reloaded[commutative + 1];
3780 address_reloaded[commutative + 1] = t;
3781
3782 memcpy (constraints, recog_data.constraints,
3783 noperands * sizeof (const char *));
3784 goto try_swapped;
3785 }
3786 else
3787 {
3788 recog_data.operand[commutative] = substed_operand[commutative];
3789 recog_data.operand[commutative + 1]
3790 = substed_operand[commutative + 1];
3791 /* Unswap the duplicates too. */
3792 for (i = 0; i < recog_data.n_dups; i++)
3793 if (recog_data.dup_num[i] == commutative
3794 || recog_data.dup_num[i] == commutative + 1)
3795 *recog_data.dup_loc[i]
3796 = recog_data.operand[(int) recog_data.dup_num[i]];
3797 }
3798 }
3799
3800 /* The operands don't meet the constraints.
3801 goal_alternative describes the alternative
3802 that we could reach by reloading the fewest operands.
3803 Reload so as to fit it. */
3804
3805 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3806 {
3807 /* No alternative works with reloads?? */
3808 if (insn_code_number >= 0)
3809 fatal_insn ("unable to generate reloads for:", insn);
3810 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3811 /* Avoid further trouble with this insn. */
3812 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3813 n_reloads = 0;
3814 return 0;
3815 }
3816
3817 /* Jump to `finish' from above if all operands are valid already.
3818 In that case, goal_alternative_win is all 1. */
3819 finish:
3820
3821 /* Right now, for any pair of operands I and J that are required to match,
3822 with I < J,
3823 goal_alternative_matches[J] is I.
3824 Set up goal_alternative_matched as the inverse function:
3825 goal_alternative_matched[I] = J. */
3826
3827 for (i = 0; i < noperands; i++)
3828 goal_alternative_matched[i] = -1;
3829
3830 for (i = 0; i < noperands; i++)
3831 if (! goal_alternative_win[i]
3832 && goal_alternative_matches[i] >= 0)
3833 goal_alternative_matched[goal_alternative_matches[i]] = i;
3834
3835 for (i = 0; i < noperands; i++)
3836 goal_alternative_win[i] |= goal_alternative_match_win[i];
3837
3838 /* If the best alternative is with operands 1 and 2 swapped,
3839 consider them swapped before reporting the reloads. Update the
3840 operand numbers of any reloads already pushed. */
3841
3842 if (goal_alternative_swapped)
3843 {
3844 rtx tem;
3845
3846 tem = substed_operand[commutative];
3847 substed_operand[commutative] = substed_operand[commutative + 1];
3848 substed_operand[commutative + 1] = tem;
3849 tem = recog_data.operand[commutative];
3850 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3851 recog_data.operand[commutative + 1] = tem;
3852 tem = *recog_data.operand_loc[commutative];
3853 *recog_data.operand_loc[commutative]
3854 = *recog_data.operand_loc[commutative + 1];
3855 *recog_data.operand_loc[commutative + 1] = tem;
3856
3857 for (i = 0; i < n_reloads; i++)
3858 {
3859 if (rld[i].opnum == commutative)
3860 rld[i].opnum = commutative + 1;
3861 else if (rld[i].opnum == commutative + 1)
3862 rld[i].opnum = commutative;
3863 }
3864 }
3865
3866 for (i = 0; i < noperands; i++)
3867 {
3868 operand_reloadnum[i] = -1;
3869
3870 /* If this is an earlyclobber operand, we need to widen the scope.
3871 The reload must remain valid from the start of the insn being
3872 reloaded until after the operand is stored into its destination.
3873 We approximate this with RELOAD_OTHER even though we know that we
3874 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3875
3876 One special case that is worth checking is when we have an
3877 output that is earlyclobber but isn't used past the insn (typically
3878 a SCRATCH). In this case, we only need have the reload live
3879 through the insn itself, but not for any of our input or output
3880 reloads.
3881 But we must not accidentally narrow the scope of an existing
3882 RELOAD_OTHER reload - leave these alone.
3883
3884 In any case, anything needed to address this operand can remain
3885 however they were previously categorized. */
3886
3887 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3888 operand_type[i]
3889 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3890 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3891 }
3892
3893 /* Any constants that aren't allowed and can't be reloaded
3894 into registers are here changed into memory references. */
3895 for (i = 0; i < noperands; i++)
3896 if (! goal_alternative_win[i])
3897 {
3898 rtx op = recog_data.operand[i];
3899 rtx subreg = NULL_RTX;
3900 rtx plus = NULL_RTX;
3901 enum machine_mode mode = operand_mode[i];
3902
3903 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3904 push_reload so we have to let them pass here. */
3905 if (GET_CODE (op) == SUBREG)
3906 {
3907 subreg = op;
3908 op = SUBREG_REG (op);
3909 mode = GET_MODE (op);
3910 }
3911
3912 if (GET_CODE (op) == PLUS)
3913 {
3914 plus = op;
3915 op = XEXP (op, 1);
3916 }
3917
3918 if (CONST_POOL_OK_P (op)
3919 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3920 == NO_REGS)
3921 || no_input_reloads)
3922 && mode != VOIDmode)
3923 {
3924 int this_address_reloaded;
3925 rtx tem = force_const_mem (mode, op);
3926
3927 /* If we stripped a SUBREG or a PLUS above add it back. */
3928 if (plus != NULL_RTX)
3929 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3930
3931 if (subreg != NULL_RTX)
3932 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3933
3934 this_address_reloaded = 0;
3935 substed_operand[i] = recog_data.operand[i]
3936 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3937 0, insn, &this_address_reloaded);
3938
3939 /* If the alternative accepts constant pool refs directly
3940 there will be no reload needed at all. */
3941 if (plus == NULL_RTX
3942 && subreg == NULL_RTX
3943 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3944 ? substed_operand[i]
3945 : NULL,
3946 recog_data.constraints[i],
3947 goal_alternative_number))
3948 goal_alternative_win[i] = 1;
3949 }
3950 }
3951
3952 /* Record the values of the earlyclobber operands for the caller. */
3953 if (goal_earlyclobber)
3954 for (i = 0; i < noperands; i++)
3955 if (goal_alternative_earlyclobber[i])
3956 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3957
3958 /* Now record reloads for all the operands that need them. */
3959 for (i = 0; i < noperands; i++)
3960 if (! goal_alternative_win[i])
3961 {
3962 /* Operands that match previous ones have already been handled. */
3963 if (goal_alternative_matches[i] >= 0)
3964 ;
3965 /* Handle an operand with a nonoffsettable address
3966 appearing where an offsettable address will do
3967 by reloading the address into a base register.
3968
3969 ??? We can also do this when the operand is a register and
3970 reg_equiv_mem is not offsettable, but this is a bit tricky,
3971 so we don't bother with it. It may not be worth doing. */
3972 else if (goal_alternative_matched[i] == -1
3973 && goal_alternative_offmemok[i]
3974 && MEM_P (recog_data.operand[i]))
3975 {
3976 /* If the address to be reloaded is a VOIDmode constant,
3977 use the default address mode as mode of the reload register,
3978 as would have been done by find_reloads_address. */
3979 enum machine_mode address_mode;
3980 address_mode = GET_MODE (XEXP (recog_data.operand[i], 0));
3981 if (address_mode == VOIDmode)
3982 {
3983 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3984 address_mode = targetm.addr_space.address_mode (as);
3985 }
3986
3987 operand_reloadnum[i]
3988 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3989 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3990 base_reg_class (VOIDmode, MEM, SCRATCH),
3991 address_mode,
3992 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
3993 rld[operand_reloadnum[i]].inc
3994 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3995
3996 /* If this operand is an output, we will have made any
3997 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3998 now we are treating part of the operand as an input, so
3999 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4000
4001 if (modified[i] == RELOAD_WRITE)
4002 {
4003 for (j = 0; j < n_reloads; j++)
4004 {
4005 if (rld[j].opnum == i)
4006 {
4007 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4008 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4009 else if (rld[j].when_needed
4010 == RELOAD_FOR_OUTADDR_ADDRESS)
4011 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4012 }
4013 }
4014 }
4015 }
4016 else if (goal_alternative_matched[i] == -1)
4017 {
4018 operand_reloadnum[i]
4019 = push_reload ((modified[i] != RELOAD_WRITE
4020 ? recog_data.operand[i] : 0),
4021 (modified[i] != RELOAD_READ
4022 ? recog_data.operand[i] : 0),
4023 (modified[i] != RELOAD_WRITE
4024 ? recog_data.operand_loc[i] : 0),
4025 (modified[i] != RELOAD_READ
4026 ? recog_data.operand_loc[i] : 0),
4027 (enum reg_class) goal_alternative[i],
4028 (modified[i] == RELOAD_WRITE
4029 ? VOIDmode : operand_mode[i]),
4030 (modified[i] == RELOAD_READ
4031 ? VOIDmode : operand_mode[i]),
4032 (insn_code_number < 0 ? 0
4033 : insn_data[insn_code_number].operand[i].strict_low),
4034 0, i, operand_type[i]);
4035 }
4036 /* In a matching pair of operands, one must be input only
4037 and the other must be output only.
4038 Pass the input operand as IN and the other as OUT. */
4039 else if (modified[i] == RELOAD_READ
4040 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4041 {
4042 operand_reloadnum[i]
4043 = push_reload (recog_data.operand[i],
4044 recog_data.operand[goal_alternative_matched[i]],
4045 recog_data.operand_loc[i],
4046 recog_data.operand_loc[goal_alternative_matched[i]],
4047 (enum reg_class) goal_alternative[i],
4048 operand_mode[i],
4049 operand_mode[goal_alternative_matched[i]],
4050 0, 0, i, RELOAD_OTHER);
4051 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4052 }
4053 else if (modified[i] == RELOAD_WRITE
4054 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4055 {
4056 operand_reloadnum[goal_alternative_matched[i]]
4057 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4058 recog_data.operand[i],
4059 recog_data.operand_loc[goal_alternative_matched[i]],
4060 recog_data.operand_loc[i],
4061 (enum reg_class) goal_alternative[i],
4062 operand_mode[goal_alternative_matched[i]],
4063 operand_mode[i],
4064 0, 0, i, RELOAD_OTHER);
4065 operand_reloadnum[i] = output_reloadnum;
4066 }
4067 else
4068 {
4069 gcc_assert (insn_code_number < 0);
4070 error_for_asm (insn, "inconsistent operand constraints "
4071 "in an %<asm%>");
4072 /* Avoid further trouble with this insn. */
4073 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4074 n_reloads = 0;
4075 return 0;
4076 }
4077 }
4078 else if (goal_alternative_matched[i] < 0
4079 && goal_alternative_matches[i] < 0
4080 && address_operand_reloaded[i] != 1
4081 && optimize)
4082 {
4083 /* For each non-matching operand that's a MEM or a pseudo-register
4084 that didn't get a hard register, make an optional reload.
4085 This may get done even if the insn needs no reloads otherwise. */
4086
4087 rtx operand = recog_data.operand[i];
4088
4089 while (GET_CODE (operand) == SUBREG)
4090 operand = SUBREG_REG (operand);
4091 if ((MEM_P (operand)
4092 || (REG_P (operand)
4093 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4094 /* If this is only for an output, the optional reload would not
4095 actually cause us to use a register now, just note that
4096 something is stored here. */
4097 && (goal_alternative[i] != NO_REGS
4098 || modified[i] == RELOAD_WRITE)
4099 && ! no_input_reloads
4100 /* An optional output reload might allow to delete INSN later.
4101 We mustn't make in-out reloads on insns that are not permitted
4102 output reloads.
4103 If this is an asm, we can't delete it; we must not even call
4104 push_reload for an optional output reload in this case,
4105 because we can't be sure that the constraint allows a register,
4106 and push_reload verifies the constraints for asms. */
4107 && (modified[i] == RELOAD_READ
4108 || (! no_output_reloads && ! this_insn_is_asm)))
4109 operand_reloadnum[i]
4110 = push_reload ((modified[i] != RELOAD_WRITE
4111 ? recog_data.operand[i] : 0),
4112 (modified[i] != RELOAD_READ
4113 ? recog_data.operand[i] : 0),
4114 (modified[i] != RELOAD_WRITE
4115 ? recog_data.operand_loc[i] : 0),
4116 (modified[i] != RELOAD_READ
4117 ? recog_data.operand_loc[i] : 0),
4118 (enum reg_class) goal_alternative[i],
4119 (modified[i] == RELOAD_WRITE
4120 ? VOIDmode : operand_mode[i]),
4121 (modified[i] == RELOAD_READ
4122 ? VOIDmode : operand_mode[i]),
4123 (insn_code_number < 0 ? 0
4124 : insn_data[insn_code_number].operand[i].strict_low),
4125 1, i, operand_type[i]);
4126 /* If a memory reference remains (either as a MEM or a pseudo that
4127 did not get a hard register), yet we can't make an optional
4128 reload, check if this is actually a pseudo register reference;
4129 we then need to emit a USE and/or a CLOBBER so that reload
4130 inheritance will do the right thing. */
4131 else if (replace
4132 && (MEM_P (operand)
4133 || (REG_P (operand)
4134 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4135 && reg_renumber [REGNO (operand)] < 0)))
4136 {
4137 operand = *recog_data.operand_loc[i];
4138
4139 while (GET_CODE (operand) == SUBREG)
4140 operand = SUBREG_REG (operand);
4141 if (REG_P (operand))
4142 {
4143 if (modified[i] != RELOAD_WRITE)
4144 /* We mark the USE with QImode so that we recognize
4145 it as one that can be safely deleted at the end
4146 of reload. */
4147 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4148 insn), QImode);
4149 if (modified[i] != RELOAD_READ)
4150 emit_insn_after (gen_clobber (operand), insn);
4151 }
4152 }
4153 }
4154 else if (goal_alternative_matches[i] >= 0
4155 && goal_alternative_win[goal_alternative_matches[i]]
4156 && modified[i] == RELOAD_READ
4157 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4158 && ! no_input_reloads && ! no_output_reloads
4159 && optimize)
4160 {
4161 /* Similarly, make an optional reload for a pair of matching
4162 objects that are in MEM or a pseudo that didn't get a hard reg. */
4163
4164 rtx operand = recog_data.operand[i];
4165
4166 while (GET_CODE (operand) == SUBREG)
4167 operand = SUBREG_REG (operand);
4168 if ((MEM_P (operand)
4169 || (REG_P (operand)
4170 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4171 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4172 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4173 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4174 recog_data.operand[i],
4175 recog_data.operand_loc[goal_alternative_matches[i]],
4176 recog_data.operand_loc[i],
4177 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4178 operand_mode[goal_alternative_matches[i]],
4179 operand_mode[i],
4180 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4181 }
4182
4183 /* Perform whatever substitutions on the operands we are supposed
4184 to make due to commutativity or replacement of registers
4185 with equivalent constants or memory slots. */
4186
4187 for (i = 0; i < noperands; i++)
4188 {
4189 /* We only do this on the last pass through reload, because it is
4190 possible for some data (like reg_equiv_address) to be changed during
4191 later passes. Moreover, we lose the opportunity to get a useful
4192 reload_{in,out}_reg when we do these replacements. */
4193
4194 if (replace)
4195 {
4196 rtx substitution = substed_operand[i];
4197
4198 *recog_data.operand_loc[i] = substitution;
4199
4200 /* If we're replacing an operand with a LABEL_REF, we need to
4201 make sure that there's a REG_LABEL_OPERAND note attached to
4202 this instruction. */
4203 if (GET_CODE (substitution) == LABEL_REF
4204 && !find_reg_note (insn, REG_LABEL_OPERAND,
4205 XEXP (substitution, 0))
4206 /* For a JUMP_P, if it was a branch target it must have
4207 already been recorded as such. */
4208 && (!JUMP_P (insn)
4209 || !label_is_jump_target_p (XEXP (substitution, 0),
4210 insn)))
4211 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4212 }
4213 else
4214 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4215 }
4216
4217 /* If this insn pattern contains any MATCH_DUP's, make sure that
4218 they will be substituted if the operands they match are substituted.
4219 Also do now any substitutions we already did on the operands.
4220
4221 Don't do this if we aren't making replacements because we might be
4222 propagating things allocated by frame pointer elimination into places
4223 it doesn't expect. */
4224
4225 if (insn_code_number >= 0 && replace)
4226 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4227 {
4228 int opno = recog_data.dup_num[i];
4229 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4230 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4231 }
4232
4233 #if 0
4234 /* This loses because reloading of prior insns can invalidate the equivalence
4235 (or at least find_equiv_reg isn't smart enough to find it any more),
4236 causing this insn to need more reload regs than it needed before.
4237 It may be too late to make the reload regs available.
4238 Now this optimization is done safely in choose_reload_regs. */
4239
4240 /* For each reload of a reg into some other class of reg,
4241 search for an existing equivalent reg (same value now) in the right class.
4242 We can use it as long as we don't need to change its contents. */
4243 for (i = 0; i < n_reloads; i++)
4244 if (rld[i].reg_rtx == 0
4245 && rld[i].in != 0
4246 && REG_P (rld[i].in)
4247 && rld[i].out == 0)
4248 {
4249 rld[i].reg_rtx
4250 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4251 static_reload_reg_p, 0, rld[i].inmode);
4252 /* Prevent generation of insn to load the value
4253 because the one we found already has the value. */
4254 if (rld[i].reg_rtx)
4255 rld[i].in = rld[i].reg_rtx;
4256 }
4257 #endif
4258
4259 /* If we detected error and replaced asm instruction by USE, forget about the
4260 reloads. */
4261 if (GET_CODE (PATTERN (insn)) == USE
4262 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4263 n_reloads = 0;
4264
4265 /* Perhaps an output reload can be combined with another
4266 to reduce needs by one. */
4267 if (!goal_earlyclobber)
4268 combine_reloads ();
4269
4270 /* If we have a pair of reloads for parts of an address, they are reloading
4271 the same object, the operands themselves were not reloaded, and they
4272 are for two operands that are supposed to match, merge the reloads and
4273 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4274
4275 for (i = 0; i < n_reloads; i++)
4276 {
4277 int k;
4278
4279 for (j = i + 1; j < n_reloads; j++)
4280 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4281 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4282 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4283 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4284 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4285 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4286 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4287 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4288 && rtx_equal_p (rld[i].in, rld[j].in)
4289 && (operand_reloadnum[rld[i].opnum] < 0
4290 || rld[operand_reloadnum[rld[i].opnum]].optional)
4291 && (operand_reloadnum[rld[j].opnum] < 0
4292 || rld[operand_reloadnum[rld[j].opnum]].optional)
4293 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4294 || (goal_alternative_matches[rld[j].opnum]
4295 == rld[i].opnum)))
4296 {
4297 for (k = 0; k < n_replacements; k++)
4298 if (replacements[k].what == j)
4299 replacements[k].what = i;
4300
4301 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4302 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4303 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4304 else
4305 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4306 rld[j].in = 0;
4307 }
4308 }
4309
4310 /* Scan all the reloads and update their type.
4311 If a reload is for the address of an operand and we didn't reload
4312 that operand, change the type. Similarly, change the operand number
4313 of a reload when two operands match. If a reload is optional, treat it
4314 as though the operand isn't reloaded.
4315
4316 ??? This latter case is somewhat odd because if we do the optional
4317 reload, it means the object is hanging around. Thus we need only
4318 do the address reload if the optional reload was NOT done.
4319
4320 Change secondary reloads to be the address type of their operand, not
4321 the normal type.
4322
4323 If an operand's reload is now RELOAD_OTHER, change any
4324 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4325 RELOAD_FOR_OTHER_ADDRESS. */
4326
4327 for (i = 0; i < n_reloads; i++)
4328 {
4329 if (rld[i].secondary_p
4330 && rld[i].when_needed == operand_type[rld[i].opnum])
4331 rld[i].when_needed = address_type[rld[i].opnum];
4332
4333 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4334 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4335 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4336 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4337 && (operand_reloadnum[rld[i].opnum] < 0
4338 || rld[operand_reloadnum[rld[i].opnum]].optional))
4339 {
4340 /* If we have a secondary reload to go along with this reload,
4341 change its type to RELOAD_FOR_OPADDR_ADDR. */
4342
4343 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4344 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4345 && rld[i].secondary_in_reload != -1)
4346 {
4347 int secondary_in_reload = rld[i].secondary_in_reload;
4348
4349 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4350
4351 /* If there's a tertiary reload we have to change it also. */
4352 if (secondary_in_reload > 0
4353 && rld[secondary_in_reload].secondary_in_reload != -1)
4354 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4355 = RELOAD_FOR_OPADDR_ADDR;
4356 }
4357
4358 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4359 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4360 && rld[i].secondary_out_reload != -1)
4361 {
4362 int secondary_out_reload = rld[i].secondary_out_reload;
4363
4364 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4365
4366 /* If there's a tertiary reload we have to change it also. */
4367 if (secondary_out_reload
4368 && rld[secondary_out_reload].secondary_out_reload != -1)
4369 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4370 = RELOAD_FOR_OPADDR_ADDR;
4371 }
4372
4373 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4374 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4375 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4376 else
4377 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4378 }
4379
4380 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4381 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4382 && operand_reloadnum[rld[i].opnum] >= 0
4383 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4384 == RELOAD_OTHER))
4385 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4386
4387 if (goal_alternative_matches[rld[i].opnum] >= 0)
4388 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4389 }
4390
4391 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4392 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4393 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4394
4395 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4396 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4397 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4398 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4399 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4400 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4401 This is complicated by the fact that a single operand can have more
4402 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4403 choose_reload_regs without affecting code quality, and cases that
4404 actually fail are extremely rare, so it turns out to be better to fix
4405 the problem here by not generating cases that choose_reload_regs will
4406 fail for. */
4407 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4408 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4409 a single operand.
4410 We can reduce the register pressure by exploiting that a
4411 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4412 does not conflict with any of them, if it is only used for the first of
4413 the RELOAD_FOR_X_ADDRESS reloads. */
4414 {
4415 int first_op_addr_num = -2;
4416 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4417 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4418 int need_change = 0;
4419 /* We use last_op_addr_reload and the contents of the above arrays
4420 first as flags - -2 means no instance encountered, -1 means exactly
4421 one instance encountered.
4422 If more than one instance has been encountered, we store the reload
4423 number of the first reload of the kind in question; reload numbers
4424 are known to be non-negative. */
4425 for (i = 0; i < noperands; i++)
4426 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4427 for (i = n_reloads - 1; i >= 0; i--)
4428 {
4429 switch (rld[i].when_needed)
4430 {
4431 case RELOAD_FOR_OPERAND_ADDRESS:
4432 if (++first_op_addr_num >= 0)
4433 {
4434 first_op_addr_num = i;
4435 need_change = 1;
4436 }
4437 break;
4438 case RELOAD_FOR_INPUT_ADDRESS:
4439 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4440 {
4441 first_inpaddr_num[rld[i].opnum] = i;
4442 need_change = 1;
4443 }
4444 break;
4445 case RELOAD_FOR_OUTPUT_ADDRESS:
4446 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4447 {
4448 first_outpaddr_num[rld[i].opnum] = i;
4449 need_change = 1;
4450 }
4451 break;
4452 default:
4453 break;
4454 }
4455 }
4456
4457 if (need_change)
4458 {
4459 for (i = 0; i < n_reloads; i++)
4460 {
4461 int first_num;
4462 enum reload_type type;
4463
4464 switch (rld[i].when_needed)
4465 {
4466 case RELOAD_FOR_OPADDR_ADDR:
4467 first_num = first_op_addr_num;
4468 type = RELOAD_FOR_OPERAND_ADDRESS;
4469 break;
4470 case RELOAD_FOR_INPADDR_ADDRESS:
4471 first_num = first_inpaddr_num[rld[i].opnum];
4472 type = RELOAD_FOR_INPUT_ADDRESS;
4473 break;
4474 case RELOAD_FOR_OUTADDR_ADDRESS:
4475 first_num = first_outpaddr_num[rld[i].opnum];
4476 type = RELOAD_FOR_OUTPUT_ADDRESS;
4477 break;
4478 default:
4479 continue;
4480 }
4481 if (first_num < 0)
4482 continue;
4483 else if (i > first_num)
4484 rld[i].when_needed = type;
4485 else
4486 {
4487 /* Check if the only TYPE reload that uses reload I is
4488 reload FIRST_NUM. */
4489 for (j = n_reloads - 1; j > first_num; j--)
4490 {
4491 if (rld[j].when_needed == type
4492 && (rld[i].secondary_p
4493 ? rld[j].secondary_in_reload == i
4494 : reg_mentioned_p (rld[i].in, rld[j].in)))
4495 {
4496 rld[i].when_needed = type;
4497 break;
4498 }
4499 }
4500 }
4501 }
4502 }
4503 }
4504
4505 /* See if we have any reloads that are now allowed to be merged
4506 because we've changed when the reload is needed to
4507 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4508 check for the most common cases. */
4509
4510 for (i = 0; i < n_reloads; i++)
4511 if (rld[i].in != 0 && rld[i].out == 0
4512 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4513 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4514 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4515 for (j = 0; j < n_reloads; j++)
4516 if (i != j && rld[j].in != 0 && rld[j].out == 0
4517 && rld[j].when_needed == rld[i].when_needed
4518 && MATCHES (rld[i].in, rld[j].in)
4519 && rld[i].rclass == rld[j].rclass
4520 && !rld[i].nocombine && !rld[j].nocombine
4521 && rld[i].reg_rtx == rld[j].reg_rtx)
4522 {
4523 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4524 transfer_replacements (i, j);
4525 rld[j].in = 0;
4526 }
4527
4528 #ifdef HAVE_cc0
4529 /* If we made any reloads for addresses, see if they violate a
4530 "no input reloads" requirement for this insn. But loads that we
4531 do after the insn (such as for output addresses) are fine. */
4532 if (no_input_reloads)
4533 for (i = 0; i < n_reloads; i++)
4534 gcc_assert (rld[i].in == 0
4535 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4536 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4537 #endif
4538
4539 /* Compute reload_mode and reload_nregs. */
4540 for (i = 0; i < n_reloads; i++)
4541 {
4542 rld[i].mode
4543 = (rld[i].inmode == VOIDmode
4544 || (GET_MODE_SIZE (rld[i].outmode)
4545 > GET_MODE_SIZE (rld[i].inmode)))
4546 ? rld[i].outmode : rld[i].inmode;
4547
4548 rld[i].nregs = CLASS_MAX_NREGS (rld[i].rclass, rld[i].mode);
4549 }
4550
4551 /* Special case a simple move with an input reload and a
4552 destination of a hard reg, if the hard reg is ok, use it. */
4553 for (i = 0; i < n_reloads; i++)
4554 if (rld[i].when_needed == RELOAD_FOR_INPUT
4555 && GET_CODE (PATTERN (insn)) == SET
4556 && REG_P (SET_DEST (PATTERN (insn)))
4557 && (SET_SRC (PATTERN (insn)) == rld[i].in
4558 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4559 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4560 {
4561 rtx dest = SET_DEST (PATTERN (insn));
4562 unsigned int regno = REGNO (dest);
4563
4564 if (regno < FIRST_PSEUDO_REGISTER
4565 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4566 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4567 {
4568 int nr = hard_regno_nregs[regno][rld[i].mode];
4569 int ok = 1, nri;
4570
4571 for (nri = 1; nri < nr; nri ++)
4572 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4573 ok = 0;
4574
4575 if (ok)
4576 rld[i].reg_rtx = dest;
4577 }
4578 }
4579
4580 return retval;
4581 }
4582
4583 /* Return true if alternative number ALTNUM in constraint-string
4584 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4585 MEM gives the reference if it didn't need any reloads, otherwise it
4586 is null. */
4587
4588 static bool
4589 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4590 const char *constraint, int altnum)
4591 {
4592 int c;
4593
4594 /* Skip alternatives before the one requested. */
4595 while (altnum > 0)
4596 {
4597 while (*constraint++ != ',');
4598 altnum--;
4599 }
4600 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4601 If one of them is present, this alternative accepts the result of
4602 passing a constant-pool reference through find_reloads_toplev.
4603
4604 The same is true of extra memory constraints if the address
4605 was reloaded into a register. However, the target may elect
4606 to disallow the original constant address, forcing it to be
4607 reloaded into a register instead. */
4608 for (; (c = *constraint) && c != ',' && c != '#';
4609 constraint += CONSTRAINT_LEN (c, constraint))
4610 {
4611 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4612 return true;
4613 #ifdef EXTRA_CONSTRAINT_STR
4614 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4615 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4616 return true;
4617 #endif
4618 }
4619 return false;
4620 }
4621 \f
4622 /* Scan X for memory references and scan the addresses for reloading.
4623 Also checks for references to "constant" regs that we want to eliminate
4624 and replaces them with the values they stand for.
4625 We may alter X destructively if it contains a reference to such.
4626 If X is just a constant reg, we return the equivalent value
4627 instead of X.
4628
4629 IND_LEVELS says how many levels of indirect addressing this machine
4630 supports.
4631
4632 OPNUM and TYPE identify the purpose of the reload.
4633
4634 IS_SET_DEST is true if X is the destination of a SET, which is not
4635 appropriate to be replaced by a constant.
4636
4637 INSN, if nonzero, is the insn in which we do the reload. It is used
4638 to determine if we may generate output reloads, and where to put USEs
4639 for pseudos that we have to replace with stack slots.
4640
4641 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4642 result of find_reloads_address. */
4643
4644 static rtx
4645 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4646 int ind_levels, int is_set_dest, rtx insn,
4647 int *address_reloaded)
4648 {
4649 RTX_CODE code = GET_CODE (x);
4650
4651 const char *fmt = GET_RTX_FORMAT (code);
4652 int i;
4653 int copied;
4654
4655 if (code == REG)
4656 {
4657 /* This code is duplicated for speed in find_reloads. */
4658 int regno = REGNO (x);
4659 if (reg_equiv_constant[regno] != 0 && !is_set_dest)
4660 x = reg_equiv_constant[regno];
4661 #if 0
4662 /* This creates (subreg (mem...)) which would cause an unnecessary
4663 reload of the mem. */
4664 else if (reg_equiv_mem[regno] != 0)
4665 x = reg_equiv_mem[regno];
4666 #endif
4667 else if (reg_equiv_memory_loc[regno]
4668 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
4669 {
4670 rtx mem = make_memloc (x, regno);
4671 if (reg_equiv_address[regno]
4672 || ! rtx_equal_p (mem, reg_equiv_mem[regno]))
4673 {
4674 /* If this is not a toplevel operand, find_reloads doesn't see
4675 this substitution. We have to emit a USE of the pseudo so
4676 that delete_output_reload can see it. */
4677 if (replace_reloads && recog_data.operand[opnum] != x)
4678 /* We mark the USE with QImode so that we recognize it
4679 as one that can be safely deleted at the end of
4680 reload. */
4681 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4682 QImode);
4683 x = mem;
4684 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4685 opnum, type, ind_levels, insn);
4686 if (!rtx_equal_p (x, mem))
4687 push_reg_equiv_alt_mem (regno, x);
4688 if (address_reloaded)
4689 *address_reloaded = i;
4690 }
4691 }
4692 return x;
4693 }
4694 if (code == MEM)
4695 {
4696 rtx tem = x;
4697
4698 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4699 opnum, type, ind_levels, insn);
4700 if (address_reloaded)
4701 *address_reloaded = i;
4702
4703 return tem;
4704 }
4705
4706 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4707 {
4708 /* Check for SUBREG containing a REG that's equivalent to a
4709 constant. If the constant has a known value, truncate it
4710 right now. Similarly if we are extracting a single-word of a
4711 multi-word constant. If the constant is symbolic, allow it
4712 to be substituted normally. push_reload will strip the
4713 subreg later. The constant must not be VOIDmode, because we
4714 will lose the mode of the register (this should never happen
4715 because one of the cases above should handle it). */
4716
4717 int regno = REGNO (SUBREG_REG (x));
4718 rtx tem;
4719
4720 if (regno >= FIRST_PSEUDO_REGISTER
4721 && reg_renumber[regno] < 0
4722 && reg_equiv_constant[regno] != 0)
4723 {
4724 tem =
4725 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant[regno],
4726 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4727 gcc_assert (tem);
4728 if (CONSTANT_P (tem) && !LEGITIMATE_CONSTANT_P (tem))
4729 {
4730 tem = force_const_mem (GET_MODE (x), tem);
4731 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4732 &XEXP (tem, 0), opnum, type,
4733 ind_levels, insn);
4734 if (address_reloaded)
4735 *address_reloaded = i;
4736 }
4737 return tem;
4738 }
4739
4740 /* If the subreg contains a reg that will be converted to a mem,
4741 convert the subreg to a narrower memref now.
4742 Otherwise, we would get (subreg (mem ...) ...),
4743 which would force reload of the mem.
4744
4745 We also need to do this if there is an equivalent MEM that is
4746 not offsettable. In that case, alter_subreg would produce an
4747 invalid address on big-endian machines.
4748
4749 For machines that extend byte loads, we must not reload using
4750 a wider mode if we have a paradoxical SUBREG. find_reloads will
4751 force a reload in that case. So we should not do anything here. */
4752
4753 if (regno >= FIRST_PSEUDO_REGISTER
4754 #ifdef LOAD_EXTEND_OP
4755 && (GET_MODE_SIZE (GET_MODE (x))
4756 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4757 #endif
4758 && (reg_equiv_address[regno] != 0
4759 || (reg_equiv_mem[regno] != 0
4760 && (! strict_memory_address_addr_space_p
4761 (GET_MODE (x), XEXP (reg_equiv_mem[regno], 0),
4762 MEM_ADDR_SPACE (reg_equiv_mem[regno]))
4763 || ! offsettable_memref_p (reg_equiv_mem[regno])
4764 || num_not_at_initial_offset))))
4765 x = find_reloads_subreg_address (x, 1, opnum, type, ind_levels,
4766 insn);
4767 }
4768
4769 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4770 {
4771 if (fmt[i] == 'e')
4772 {
4773 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4774 ind_levels, is_set_dest, insn,
4775 address_reloaded);
4776 /* If we have replaced a reg with it's equivalent memory loc -
4777 that can still be handled here e.g. if it's in a paradoxical
4778 subreg - we must make the change in a copy, rather than using
4779 a destructive change. This way, find_reloads can still elect
4780 not to do the change. */
4781 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4782 {
4783 x = shallow_copy_rtx (x);
4784 copied = 1;
4785 }
4786 XEXP (x, i) = new_part;
4787 }
4788 }
4789 return x;
4790 }
4791
4792 /* Return a mem ref for the memory equivalent of reg REGNO.
4793 This mem ref is not shared with anything. */
4794
4795 static rtx
4796 make_memloc (rtx ad, int regno)
4797 {
4798 /* We must rerun eliminate_regs, in case the elimination
4799 offsets have changed. */
4800 rtx tem
4801 = XEXP (eliminate_regs (reg_equiv_memory_loc[regno], VOIDmode, NULL_RTX),
4802 0);
4803
4804 /* If TEM might contain a pseudo, we must copy it to avoid
4805 modifying it when we do the substitution for the reload. */
4806 if (rtx_varies_p (tem, 0))
4807 tem = copy_rtx (tem);
4808
4809 tem = replace_equiv_address_nv (reg_equiv_memory_loc[regno], tem);
4810 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4811
4812 /* Copy the result if it's still the same as the equivalence, to avoid
4813 modifying it when we do the substitution for the reload. */
4814 if (tem == reg_equiv_memory_loc[regno])
4815 tem = copy_rtx (tem);
4816 return tem;
4817 }
4818
4819 /* Returns true if AD could be turned into a valid memory reference
4820 to mode MODE in address space AS by reloading the part pointed to
4821 by PART into a register. */
4822
4823 static int
4824 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4825 addr_space_t as, rtx *part)
4826 {
4827 int retv;
4828 rtx tem = *part;
4829 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4830
4831 *part = reg;
4832 retv = memory_address_addr_space_p (mode, ad, as);
4833 *part = tem;
4834
4835 return retv;
4836 }
4837
4838 /* Record all reloads needed for handling memory address AD
4839 which appears in *LOC in a memory reference to mode MODE
4840 which itself is found in location *MEMREFLOC.
4841 Note that we take shortcuts assuming that no multi-reg machine mode
4842 occurs as part of an address.
4843
4844 OPNUM and TYPE specify the purpose of this reload.
4845
4846 IND_LEVELS says how many levels of indirect addressing this machine
4847 supports.
4848
4849 INSN, if nonzero, is the insn in which we do the reload. It is used
4850 to determine if we may generate output reloads, and where to put USEs
4851 for pseudos that we have to replace with stack slots.
4852
4853 Value is one if this address is reloaded or replaced as a whole; it is
4854 zero if the top level of this address was not reloaded or replaced, and
4855 it is -1 if it may or may not have been reloaded or replaced.
4856
4857 Note that there is no verification that the address will be valid after
4858 this routine does its work. Instead, we rely on the fact that the address
4859 was valid when reload started. So we need only undo things that reload
4860 could have broken. These are wrong register types, pseudos not allocated
4861 to a hard register, and frame pointer elimination. */
4862
4863 static int
4864 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4865 rtx *loc, int opnum, enum reload_type type,
4866 int ind_levels, rtx insn)
4867 {
4868 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4869 : ADDR_SPACE_GENERIC;
4870 int regno;
4871 int removed_and = 0;
4872 int op_index;
4873 rtx tem;
4874
4875 /* If the address is a register, see if it is a legitimate address and
4876 reload if not. We first handle the cases where we need not reload
4877 or where we must reload in a non-standard way. */
4878
4879 if (REG_P (ad))
4880 {
4881 regno = REGNO (ad);
4882
4883 if (reg_equiv_constant[regno] != 0)
4884 {
4885 find_reloads_address_part (reg_equiv_constant[regno], loc,
4886 base_reg_class (mode, MEM, SCRATCH),
4887 GET_MODE (ad), opnum, type, ind_levels);
4888 return 1;
4889 }
4890
4891 tem = reg_equiv_memory_loc[regno];
4892 if (tem != 0)
4893 {
4894 if (reg_equiv_address[regno] != 0 || num_not_at_initial_offset)
4895 {
4896 tem = make_memloc (ad, regno);
4897 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4898 XEXP (tem, 0),
4899 MEM_ADDR_SPACE (tem)))
4900 {
4901 rtx orig = tem;
4902
4903 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4904 &XEXP (tem, 0), opnum,
4905 ADDR_TYPE (type), ind_levels, insn);
4906 if (!rtx_equal_p (tem, orig))
4907 push_reg_equiv_alt_mem (regno, tem);
4908 }
4909 /* We can avoid a reload if the register's equivalent memory
4910 expression is valid as an indirect memory address.
4911 But not all addresses are valid in a mem used as an indirect
4912 address: only reg or reg+constant. */
4913
4914 if (ind_levels > 0
4915 && strict_memory_address_addr_space_p (mode, tem, as)
4916 && (REG_P (XEXP (tem, 0))
4917 || (GET_CODE (XEXP (tem, 0)) == PLUS
4918 && REG_P (XEXP (XEXP (tem, 0), 0))
4919 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4920 {
4921 /* TEM is not the same as what we'll be replacing the
4922 pseudo with after reload, put a USE in front of INSN
4923 in the final reload pass. */
4924 if (replace_reloads
4925 && num_not_at_initial_offset
4926 && ! rtx_equal_p (tem, reg_equiv_mem[regno]))
4927 {
4928 *loc = tem;
4929 /* We mark the USE with QImode so that we
4930 recognize it as one that can be safely
4931 deleted at the end of reload. */
4932 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4933 insn), QImode);
4934
4935 /* This doesn't really count as replacing the address
4936 as a whole, since it is still a memory access. */
4937 }
4938 return 0;
4939 }
4940 ad = tem;
4941 }
4942 }
4943
4944 /* The only remaining case where we can avoid a reload is if this is a
4945 hard register that is valid as a base register and which is not the
4946 subject of a CLOBBER in this insn. */
4947
4948 else if (regno < FIRST_PSEUDO_REGISTER
4949 && regno_ok_for_base_p (regno, mode, MEM, SCRATCH)
4950 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4951 return 0;
4952
4953 /* If we do not have one of the cases above, we must do the reload. */
4954 push_reload (ad, NULL_RTX, loc, (rtx*) 0, base_reg_class (mode, MEM, SCRATCH),
4955 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4956 return 1;
4957 }
4958
4959 if (strict_memory_address_addr_space_p (mode, ad, as))
4960 {
4961 /* The address appears valid, so reloads are not needed.
4962 But the address may contain an eliminable register.
4963 This can happen because a machine with indirect addressing
4964 may consider a pseudo register by itself a valid address even when
4965 it has failed to get a hard reg.
4966 So do a tree-walk to find and eliminate all such regs. */
4967
4968 /* But first quickly dispose of a common case. */
4969 if (GET_CODE (ad) == PLUS
4970 && CONST_INT_P (XEXP (ad, 1))
4971 && REG_P (XEXP (ad, 0))
4972 && reg_equiv_constant[REGNO (XEXP (ad, 0))] == 0)
4973 return 0;
4974
4975 subst_reg_equivs_changed = 0;
4976 *loc = subst_reg_equivs (ad, insn);
4977
4978 if (! subst_reg_equivs_changed)
4979 return 0;
4980
4981 /* Check result for validity after substitution. */
4982 if (strict_memory_address_addr_space_p (mode, ad, as))
4983 return 0;
4984 }
4985
4986 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4987 do
4988 {
4989 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4990 {
4991 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4992 ind_levels, win);
4993 }
4994 break;
4995 win:
4996 *memrefloc = copy_rtx (*memrefloc);
4997 XEXP (*memrefloc, 0) = ad;
4998 move_replacements (&ad, &XEXP (*memrefloc, 0));
4999 return -1;
5000 }
5001 while (0);
5002 #endif
5003
5004 /* The address is not valid. We have to figure out why. First see if
5005 we have an outer AND and remove it if so. Then analyze what's inside. */
5006
5007 if (GET_CODE (ad) == AND)
5008 {
5009 removed_and = 1;
5010 loc = &XEXP (ad, 0);
5011 ad = *loc;
5012 }
5013
5014 /* One possibility for why the address is invalid is that it is itself
5015 a MEM. This can happen when the frame pointer is being eliminated, a
5016 pseudo is not allocated to a hard register, and the offset between the
5017 frame and stack pointers is not its initial value. In that case the
5018 pseudo will have been replaced by a MEM referring to the
5019 stack pointer. */
5020 if (MEM_P (ad))
5021 {
5022 /* First ensure that the address in this MEM is valid. Then, unless
5023 indirect addresses are valid, reload the MEM into a register. */
5024 tem = ad;
5025 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5026 opnum, ADDR_TYPE (type),
5027 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5028
5029 /* If tem was changed, then we must create a new memory reference to
5030 hold it and store it back into memrefloc. */
5031 if (tem != ad && memrefloc)
5032 {
5033 *memrefloc = copy_rtx (*memrefloc);
5034 copy_replacements (tem, XEXP (*memrefloc, 0));
5035 loc = &XEXP (*memrefloc, 0);
5036 if (removed_and)
5037 loc = &XEXP (*loc, 0);
5038 }
5039
5040 /* Check similar cases as for indirect addresses as above except
5041 that we can allow pseudos and a MEM since they should have been
5042 taken care of above. */
5043
5044 if (ind_levels == 0
5045 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5046 || MEM_P (XEXP (tem, 0))
5047 || ! (REG_P (XEXP (tem, 0))
5048 || (GET_CODE (XEXP (tem, 0)) == PLUS
5049 && REG_P (XEXP (XEXP (tem, 0), 0))
5050 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5051 {
5052 /* Must use TEM here, not AD, since it is the one that will
5053 have any subexpressions reloaded, if needed. */
5054 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5055 base_reg_class (mode, MEM, SCRATCH), GET_MODE (tem),
5056 VOIDmode, 0,
5057 0, opnum, type);
5058 return ! removed_and;
5059 }
5060 else
5061 return 0;
5062 }
5063
5064 /* If we have address of a stack slot but it's not valid because the
5065 displacement is too large, compute the sum in a register.
5066 Handle all base registers here, not just fp/ap/sp, because on some
5067 targets (namely SH) we can also get too large displacements from
5068 big-endian corrections. */
5069 else if (GET_CODE (ad) == PLUS
5070 && REG_P (XEXP (ad, 0))
5071 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5072 && CONST_INT_P (XEXP (ad, 1))
5073 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, PLUS,
5074 CONST_INT))
5075
5076 {
5077 /* Unshare the MEM rtx so we can safely alter it. */
5078 if (memrefloc)
5079 {
5080 *memrefloc = copy_rtx (*memrefloc);
5081 loc = &XEXP (*memrefloc, 0);
5082 if (removed_and)
5083 loc = &XEXP (*loc, 0);
5084 }
5085
5086 if (double_reg_address_ok)
5087 {
5088 /* Unshare the sum as well. */
5089 *loc = ad = copy_rtx (ad);
5090
5091 /* Reload the displacement into an index reg.
5092 We assume the frame pointer or arg pointer is a base reg. */
5093 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5094 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5095 type, ind_levels);
5096 return 0;
5097 }
5098 else
5099 {
5100 /* If the sum of two regs is not necessarily valid,
5101 reload the sum into a base reg.
5102 That will at least work. */
5103 find_reloads_address_part (ad, loc,
5104 base_reg_class (mode, MEM, SCRATCH),
5105 GET_MODE (ad), opnum, type, ind_levels);
5106 }
5107 return ! removed_and;
5108 }
5109
5110 /* If we have an indexed stack slot, there are three possible reasons why
5111 it might be invalid: The index might need to be reloaded, the address
5112 might have been made by frame pointer elimination and hence have a
5113 constant out of range, or both reasons might apply.
5114
5115 We can easily check for an index needing reload, but even if that is the
5116 case, we might also have an invalid constant. To avoid making the
5117 conservative assumption and requiring two reloads, we see if this address
5118 is valid when not interpreted strictly. If it is, the only problem is
5119 that the index needs a reload and find_reloads_address_1 will take care
5120 of it.
5121
5122 Handle all base registers here, not just fp/ap/sp, because on some
5123 targets (namely SPARC) we can also get invalid addresses from preventive
5124 subreg big-endian corrections made by find_reloads_toplev. We
5125 can also get expressions involving LO_SUM (rather than PLUS) from
5126 find_reloads_subreg_address.
5127
5128 If we decide to do something, it must be that `double_reg_address_ok'
5129 is true. We generate a reload of the base register + constant and
5130 rework the sum so that the reload register will be added to the index.
5131 This is safe because we know the address isn't shared.
5132
5133 We check for the base register as both the first and second operand of
5134 the innermost PLUS and/or LO_SUM. */
5135
5136 for (op_index = 0; op_index < 2; ++op_index)
5137 {
5138 rtx operand, addend;
5139 enum rtx_code inner_code;
5140
5141 if (GET_CODE (ad) != PLUS)
5142 continue;
5143
5144 inner_code = GET_CODE (XEXP (ad, 0));
5145 if (!(GET_CODE (ad) == PLUS
5146 && CONST_INT_P (XEXP (ad, 1))
5147 && (inner_code == PLUS || inner_code == LO_SUM)))
5148 continue;
5149
5150 operand = XEXP (XEXP (ad, 0), op_index);
5151 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5152 continue;
5153
5154 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5155
5156 if ((regno_ok_for_base_p (REGNO (operand), mode, inner_code,
5157 GET_CODE (addend))
5158 || operand == frame_pointer_rtx
5159 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5160 || operand == hard_frame_pointer_rtx
5161 #endif
5162 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5163 || operand == arg_pointer_rtx
5164 #endif
5165 || operand == stack_pointer_rtx)
5166 && ! maybe_memory_address_addr_space_p
5167 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5168 {
5169 rtx offset_reg;
5170 enum reg_class cls;
5171
5172 offset_reg = plus_constant (operand, INTVAL (XEXP (ad, 1)));
5173
5174 /* Form the adjusted address. */
5175 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5176 ad = gen_rtx_PLUS (GET_MODE (ad),
5177 op_index == 0 ? offset_reg : addend,
5178 op_index == 0 ? addend : offset_reg);
5179 else
5180 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5181 op_index == 0 ? offset_reg : addend,
5182 op_index == 0 ? addend : offset_reg);
5183 *loc = ad;
5184
5185 cls = base_reg_class (mode, MEM, GET_CODE (addend));
5186 find_reloads_address_part (XEXP (ad, op_index),
5187 &XEXP (ad, op_index), cls,
5188 GET_MODE (ad), opnum, type, ind_levels);
5189 find_reloads_address_1 (mode,
5190 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5191 GET_CODE (XEXP (ad, op_index)),
5192 &XEXP (ad, 1 - op_index), opnum,
5193 type, 0, insn);
5194
5195 return 0;
5196 }
5197 }
5198
5199 /* See if address becomes valid when an eliminable register
5200 in a sum is replaced. */
5201
5202 tem = ad;
5203 if (GET_CODE (ad) == PLUS)
5204 tem = subst_indexed_address (ad);
5205 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5206 {
5207 /* Ok, we win that way. Replace any additional eliminable
5208 registers. */
5209
5210 subst_reg_equivs_changed = 0;
5211 tem = subst_reg_equivs (tem, insn);
5212
5213 /* Make sure that didn't make the address invalid again. */
5214
5215 if (! subst_reg_equivs_changed
5216 || strict_memory_address_addr_space_p (mode, tem, as))
5217 {
5218 *loc = tem;
5219 return 0;
5220 }
5221 }
5222
5223 /* If constants aren't valid addresses, reload the constant address
5224 into a register. */
5225 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5226 {
5227 enum machine_mode address_mode = GET_MODE (ad);
5228 if (address_mode == VOIDmode)
5229 address_mode = targetm.addr_space.address_mode (as);
5230
5231 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5232 Unshare it so we can safely alter it. */
5233 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5234 && CONSTANT_POOL_ADDRESS_P (ad))
5235 {
5236 *memrefloc = copy_rtx (*memrefloc);
5237 loc = &XEXP (*memrefloc, 0);
5238 if (removed_and)
5239 loc = &XEXP (*loc, 0);
5240 }
5241
5242 find_reloads_address_part (ad, loc, base_reg_class (mode, MEM, SCRATCH),
5243 address_mode, opnum, type, ind_levels);
5244 return ! removed_and;
5245 }
5246
5247 return find_reloads_address_1 (mode, ad, 0, MEM, SCRATCH, loc, opnum, type,
5248 ind_levels, insn);
5249 }
5250 \f
5251 /* Find all pseudo regs appearing in AD
5252 that are eliminable in favor of equivalent values
5253 and do not have hard regs; replace them by their equivalents.
5254 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5255 front of it for pseudos that we have to replace with stack slots. */
5256
5257 static rtx
5258 subst_reg_equivs (rtx ad, rtx insn)
5259 {
5260 RTX_CODE code = GET_CODE (ad);
5261 int i;
5262 const char *fmt;
5263
5264 switch (code)
5265 {
5266 case HIGH:
5267 case CONST_INT:
5268 case CONST:
5269 case CONST_DOUBLE:
5270 case CONST_FIXED:
5271 case CONST_VECTOR:
5272 case SYMBOL_REF:
5273 case LABEL_REF:
5274 case PC:
5275 case CC0:
5276 return ad;
5277
5278 case REG:
5279 {
5280 int regno = REGNO (ad);
5281
5282 if (reg_equiv_constant[regno] != 0)
5283 {
5284 subst_reg_equivs_changed = 1;
5285 return reg_equiv_constant[regno];
5286 }
5287 if (reg_equiv_memory_loc[regno] && num_not_at_initial_offset)
5288 {
5289 rtx mem = make_memloc (ad, regno);
5290 if (! rtx_equal_p (mem, reg_equiv_mem[regno]))
5291 {
5292 subst_reg_equivs_changed = 1;
5293 /* We mark the USE with QImode so that we recognize it
5294 as one that can be safely deleted at the end of
5295 reload. */
5296 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5297 QImode);
5298 return mem;
5299 }
5300 }
5301 }
5302 return ad;
5303
5304 case PLUS:
5305 /* Quickly dispose of a common case. */
5306 if (XEXP (ad, 0) == frame_pointer_rtx
5307 && CONST_INT_P (XEXP (ad, 1)))
5308 return ad;
5309 break;
5310
5311 default:
5312 break;
5313 }
5314
5315 fmt = GET_RTX_FORMAT (code);
5316 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5317 if (fmt[i] == 'e')
5318 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5319 return ad;
5320 }
5321 \f
5322 /* Compute the sum of X and Y, making canonicalizations assumed in an
5323 address, namely: sum constant integers, surround the sum of two
5324 constants with a CONST, put the constant as the second operand, and
5325 group the constant on the outermost sum.
5326
5327 This routine assumes both inputs are already in canonical form. */
5328
5329 rtx
5330 form_sum (enum machine_mode mode, rtx x, rtx y)
5331 {
5332 rtx tem;
5333
5334 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5335 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5336
5337 if (CONST_INT_P (x))
5338 return plus_constant (y, INTVAL (x));
5339 else if (CONST_INT_P (y))
5340 return plus_constant (x, INTVAL (y));
5341 else if (CONSTANT_P (x))
5342 tem = x, x = y, y = tem;
5343
5344 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5345 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5346
5347 /* Note that if the operands of Y are specified in the opposite
5348 order in the recursive calls below, infinite recursion will occur. */
5349 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5350 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5351
5352 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5353 constant will have been placed second. */
5354 if (CONSTANT_P (x) && CONSTANT_P (y))
5355 {
5356 if (GET_CODE (x) == CONST)
5357 x = XEXP (x, 0);
5358 if (GET_CODE (y) == CONST)
5359 y = XEXP (y, 0);
5360
5361 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5362 }
5363
5364 return gen_rtx_PLUS (mode, x, y);
5365 }
5366 \f
5367 /* If ADDR is a sum containing a pseudo register that should be
5368 replaced with a constant (from reg_equiv_constant),
5369 return the result of doing so, and also apply the associative
5370 law so that the result is more likely to be a valid address.
5371 (But it is not guaranteed to be one.)
5372
5373 Note that at most one register is replaced, even if more are
5374 replaceable. Also, we try to put the result into a canonical form
5375 so it is more likely to be a valid address.
5376
5377 In all other cases, return ADDR. */
5378
5379 static rtx
5380 subst_indexed_address (rtx addr)
5381 {
5382 rtx op0 = 0, op1 = 0, op2 = 0;
5383 rtx tem;
5384 int regno;
5385
5386 if (GET_CODE (addr) == PLUS)
5387 {
5388 /* Try to find a register to replace. */
5389 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5390 if (REG_P (op0)
5391 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5392 && reg_renumber[regno] < 0
5393 && reg_equiv_constant[regno] != 0)
5394 op0 = reg_equiv_constant[regno];
5395 else if (REG_P (op1)
5396 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5397 && reg_renumber[regno] < 0
5398 && reg_equiv_constant[regno] != 0)
5399 op1 = reg_equiv_constant[regno];
5400 else if (GET_CODE (op0) == PLUS
5401 && (tem = subst_indexed_address (op0)) != op0)
5402 op0 = tem;
5403 else if (GET_CODE (op1) == PLUS
5404 && (tem = subst_indexed_address (op1)) != op1)
5405 op1 = tem;
5406 else
5407 return addr;
5408
5409 /* Pick out up to three things to add. */
5410 if (GET_CODE (op1) == PLUS)
5411 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5412 else if (GET_CODE (op0) == PLUS)
5413 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5414
5415 /* Compute the sum. */
5416 if (op2 != 0)
5417 op1 = form_sum (GET_MODE (addr), op1, op2);
5418 if (op1 != 0)
5419 op0 = form_sum (GET_MODE (addr), op0, op1);
5420
5421 return op0;
5422 }
5423 return addr;
5424 }
5425 \f
5426 /* Update the REG_INC notes for an insn. It updates all REG_INC
5427 notes for the instruction which refer to REGNO the to refer
5428 to the reload number.
5429
5430 INSN is the insn for which any REG_INC notes need updating.
5431
5432 REGNO is the register number which has been reloaded.
5433
5434 RELOADNUM is the reload number. */
5435
5436 static void
5437 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5438 int reloadnum ATTRIBUTE_UNUSED)
5439 {
5440 #ifdef AUTO_INC_DEC
5441 rtx link;
5442
5443 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5444 if (REG_NOTE_KIND (link) == REG_INC
5445 && (int) REGNO (XEXP (link, 0)) == regno)
5446 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5447 #endif
5448 }
5449 \f
5450 /* Record the pseudo registers we must reload into hard registers in a
5451 subexpression of a would-be memory address, X referring to a value
5452 in mode MODE. (This function is not called if the address we find
5453 is strictly valid.)
5454
5455 CONTEXT = 1 means we are considering regs as index regs,
5456 = 0 means we are considering them as base regs.
5457 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5458 or an autoinc code.
5459 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5460 is the code of the index part of the address. Otherwise, pass SCRATCH
5461 for this argument.
5462 OPNUM and TYPE specify the purpose of any reloads made.
5463
5464 IND_LEVELS says how many levels of indirect addressing are
5465 supported at this point in the address.
5466
5467 INSN, if nonzero, is the insn in which we do the reload. It is used
5468 to determine if we may generate output reloads.
5469
5470 We return nonzero if X, as a whole, is reloaded or replaced. */
5471
5472 /* Note that we take shortcuts assuming that no multi-reg machine mode
5473 occurs as part of an address.
5474 Also, this is not fully machine-customizable; it works for machines
5475 such as VAXen and 68000's and 32000's, but other possible machines
5476 could have addressing modes that this does not handle right.
5477 If you add push_reload calls here, you need to make sure gen_reload
5478 handles those cases gracefully. */
5479
5480 static int
5481 find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
5482 enum rtx_code outer_code, enum rtx_code index_code,
5483 rtx *loc, int opnum, enum reload_type type,
5484 int ind_levels, rtx insn)
5485 {
5486 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, OUTER, INDEX) \
5487 ((CONTEXT) == 0 \
5488 ? regno_ok_for_base_p (REGNO, MODE, OUTER, INDEX) \
5489 : REGNO_OK_FOR_INDEX_P (REGNO))
5490
5491 enum reg_class context_reg_class;
5492 RTX_CODE code = GET_CODE (x);
5493
5494 if (context == 1)
5495 context_reg_class = INDEX_REG_CLASS;
5496 else
5497 context_reg_class = base_reg_class (mode, outer_code, index_code);
5498
5499 switch (code)
5500 {
5501 case PLUS:
5502 {
5503 rtx orig_op0 = XEXP (x, 0);
5504 rtx orig_op1 = XEXP (x, 1);
5505 RTX_CODE code0 = GET_CODE (orig_op0);
5506 RTX_CODE code1 = GET_CODE (orig_op1);
5507 rtx op0 = orig_op0;
5508 rtx op1 = orig_op1;
5509
5510 if (GET_CODE (op0) == SUBREG)
5511 {
5512 op0 = SUBREG_REG (op0);
5513 code0 = GET_CODE (op0);
5514 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5515 op0 = gen_rtx_REG (word_mode,
5516 (REGNO (op0) +
5517 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5518 GET_MODE (SUBREG_REG (orig_op0)),
5519 SUBREG_BYTE (orig_op0),
5520 GET_MODE (orig_op0))));
5521 }
5522
5523 if (GET_CODE (op1) == SUBREG)
5524 {
5525 op1 = SUBREG_REG (op1);
5526 code1 = GET_CODE (op1);
5527 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5528 /* ??? Why is this given op1's mode and above for
5529 ??? op0 SUBREGs we use word_mode? */
5530 op1 = gen_rtx_REG (GET_MODE (op1),
5531 (REGNO (op1) +
5532 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5533 GET_MODE (SUBREG_REG (orig_op1)),
5534 SUBREG_BYTE (orig_op1),
5535 GET_MODE (orig_op1))));
5536 }
5537 /* Plus in the index register may be created only as a result of
5538 register rematerialization for expression like &localvar*4. Reload it.
5539 It may be possible to combine the displacement on the outer level,
5540 but it is probably not worthwhile to do so. */
5541 if (context == 1)
5542 {
5543 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5544 opnum, ADDR_TYPE (type), ind_levels, insn);
5545 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5546 context_reg_class,
5547 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5548 return 1;
5549 }
5550
5551 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5552 || code0 == ZERO_EXTEND || code1 == MEM)
5553 {
5554 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5555 &XEXP (x, 0), opnum, type, ind_levels,
5556 insn);
5557 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5558 &XEXP (x, 1), opnum, type, ind_levels,
5559 insn);
5560 }
5561
5562 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5563 || code1 == ZERO_EXTEND || code0 == MEM)
5564 {
5565 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5566 &XEXP (x, 0), opnum, type, ind_levels,
5567 insn);
5568 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5569 &XEXP (x, 1), opnum, type, ind_levels,
5570 insn);
5571 }
5572
5573 else if (code0 == CONST_INT || code0 == CONST
5574 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5575 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5576 &XEXP (x, 1), opnum, type, ind_levels,
5577 insn);
5578
5579 else if (code1 == CONST_INT || code1 == CONST
5580 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5581 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5582 &XEXP (x, 0), opnum, type, ind_levels,
5583 insn);
5584
5585 else if (code0 == REG && code1 == REG)
5586 {
5587 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5588 && regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5589 return 0;
5590 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5591 && regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5592 return 0;
5593 else if (regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5594 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5595 &XEXP (x, 1), opnum, type, ind_levels,
5596 insn);
5597 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5598 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5599 &XEXP (x, 0), opnum, type, ind_levels,
5600 insn);
5601 else if (regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5602 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5603 &XEXP (x, 0), opnum, type, ind_levels,
5604 insn);
5605 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5606 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5607 &XEXP (x, 1), opnum, type, ind_levels,
5608 insn);
5609 else
5610 {
5611 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5612 &XEXP (x, 0), opnum, type, ind_levels,
5613 insn);
5614 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5615 &XEXP (x, 1), opnum, type, ind_levels,
5616 insn);
5617 }
5618 }
5619
5620 else if (code0 == REG)
5621 {
5622 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5623 &XEXP (x, 0), opnum, type, ind_levels,
5624 insn);
5625 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5626 &XEXP (x, 1), opnum, type, ind_levels,
5627 insn);
5628 }
5629
5630 else if (code1 == REG)
5631 {
5632 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5633 &XEXP (x, 1), opnum, type, ind_levels,
5634 insn);
5635 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5636 &XEXP (x, 0), opnum, type, ind_levels,
5637 insn);
5638 }
5639 }
5640
5641 return 0;
5642
5643 case POST_MODIFY:
5644 case PRE_MODIFY:
5645 {
5646 rtx op0 = XEXP (x, 0);
5647 rtx op1 = XEXP (x, 1);
5648 enum rtx_code index_code;
5649 int regno;
5650 int reloadnum;
5651
5652 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5653 return 0;
5654
5655 /* Currently, we only support {PRE,POST}_MODIFY constructs
5656 where a base register is {inc,dec}remented by the contents
5657 of another register or by a constant value. Thus, these
5658 operands must match. */
5659 gcc_assert (op0 == XEXP (op1, 0));
5660
5661 /* Require index register (or constant). Let's just handle the
5662 register case in the meantime... If the target allows
5663 auto-modify by a constant then we could try replacing a pseudo
5664 register with its equivalent constant where applicable.
5665
5666 We also handle the case where the register was eliminated
5667 resulting in a PLUS subexpression.
5668
5669 If we later decide to reload the whole PRE_MODIFY or
5670 POST_MODIFY, inc_for_reload might clobber the reload register
5671 before reading the index. The index register might therefore
5672 need to live longer than a TYPE reload normally would, so be
5673 conservative and class it as RELOAD_OTHER. */
5674 if ((REG_P (XEXP (op1, 1))
5675 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5676 || GET_CODE (XEXP (op1, 1)) == PLUS)
5677 find_reloads_address_1 (mode, XEXP (op1, 1), 1, code, SCRATCH,
5678 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5679 ind_levels, insn);
5680
5681 gcc_assert (REG_P (XEXP (op1, 0)));
5682
5683 regno = REGNO (XEXP (op1, 0));
5684 index_code = GET_CODE (XEXP (op1, 1));
5685
5686 /* A register that is incremented cannot be constant! */
5687 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5688 || reg_equiv_constant[regno] == 0);
5689
5690 /* Handle a register that is equivalent to a memory location
5691 which cannot be addressed directly. */
5692 if (reg_equiv_memory_loc[regno] != 0
5693 && (reg_equiv_address[regno] != 0
5694 || num_not_at_initial_offset))
5695 {
5696 rtx tem = make_memloc (XEXP (x, 0), regno);
5697
5698 if (reg_equiv_address[regno]
5699 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5700 {
5701 rtx orig = tem;
5702
5703 /* First reload the memory location's address.
5704 We can't use ADDR_TYPE (type) here, because we need to
5705 write back the value after reading it, hence we actually
5706 need two registers. */
5707 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5708 &XEXP (tem, 0), opnum,
5709 RELOAD_OTHER,
5710 ind_levels, insn);
5711
5712 if (!rtx_equal_p (tem, orig))
5713 push_reg_equiv_alt_mem (regno, tem);
5714
5715 /* Then reload the memory location into a base
5716 register. */
5717 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5718 &XEXP (op1, 0),
5719 base_reg_class (mode, code,
5720 index_code),
5721 GET_MODE (x), GET_MODE (x), 0,
5722 0, opnum, RELOAD_OTHER);
5723
5724 update_auto_inc_notes (this_insn, regno, reloadnum);
5725 return 0;
5726 }
5727 }
5728
5729 if (reg_renumber[regno] >= 0)
5730 regno = reg_renumber[regno];
5731
5732 /* We require a base register here... */
5733 if (!regno_ok_for_base_p (regno, GET_MODE (x), code, index_code))
5734 {
5735 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5736 &XEXP (op1, 0), &XEXP (x, 0),
5737 base_reg_class (mode, code, index_code),
5738 GET_MODE (x), GET_MODE (x), 0, 0,
5739 opnum, RELOAD_OTHER);
5740
5741 update_auto_inc_notes (this_insn, regno, reloadnum);
5742 return 0;
5743 }
5744 }
5745 return 0;
5746
5747 case POST_INC:
5748 case POST_DEC:
5749 case PRE_INC:
5750 case PRE_DEC:
5751 if (REG_P (XEXP (x, 0)))
5752 {
5753 int regno = REGNO (XEXP (x, 0));
5754 int value = 0;
5755 rtx x_orig = x;
5756
5757 /* A register that is incremented cannot be constant! */
5758 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5759 || reg_equiv_constant[regno] == 0);
5760
5761 /* Handle a register that is equivalent to a memory location
5762 which cannot be addressed directly. */
5763 if (reg_equiv_memory_loc[regno] != 0
5764 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
5765 {
5766 rtx tem = make_memloc (XEXP (x, 0), regno);
5767 if (reg_equiv_address[regno]
5768 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5769 {
5770 rtx orig = tem;
5771
5772 /* First reload the memory location's address.
5773 We can't use ADDR_TYPE (type) here, because we need to
5774 write back the value after reading it, hence we actually
5775 need two registers. */
5776 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5777 &XEXP (tem, 0), opnum, type,
5778 ind_levels, insn);
5779 if (!rtx_equal_p (tem, orig))
5780 push_reg_equiv_alt_mem (regno, tem);
5781 /* Put this inside a new increment-expression. */
5782 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5783 /* Proceed to reload that, as if it contained a register. */
5784 }
5785 }
5786
5787 /* If we have a hard register that is ok in this incdec context,
5788 don't make a reload. If the register isn't nice enough for
5789 autoincdec, we can reload it. But, if an autoincrement of a
5790 register that we here verified as playing nice, still outside
5791 isn't "valid", it must be that no autoincrement is "valid".
5792 If that is true and something made an autoincrement anyway,
5793 this must be a special context where one is allowed.
5794 (For example, a "push" instruction.)
5795 We can't improve this address, so leave it alone. */
5796
5797 /* Otherwise, reload the autoincrement into a suitable hard reg
5798 and record how much to increment by. */
5799
5800 if (reg_renumber[regno] >= 0)
5801 regno = reg_renumber[regno];
5802 if (regno >= FIRST_PSEUDO_REGISTER
5803 || !REG_OK_FOR_CONTEXT (context, regno, mode, code,
5804 index_code))
5805 {
5806 int reloadnum;
5807
5808 /* If we can output the register afterwards, do so, this
5809 saves the extra update.
5810 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5811 CALL_INSN - and it does not set CC0.
5812 But don't do this if we cannot directly address the
5813 memory location, since this will make it harder to
5814 reuse address reloads, and increases register pressure.
5815 Also don't do this if we can probably update x directly. */
5816 rtx equiv = (MEM_P (XEXP (x, 0))
5817 ? XEXP (x, 0)
5818 : reg_equiv_mem[regno]);
5819 int icode = (int) optab_handler (add_optab, GET_MODE (x));
5820 if (insn && NONJUMP_INSN_P (insn) && equiv
5821 && memory_operand (equiv, GET_MODE (equiv))
5822 #ifdef HAVE_cc0
5823 && ! sets_cc0_p (PATTERN (insn))
5824 #endif
5825 && ! (icode != CODE_FOR_nothing
5826 && ((*insn_data[icode].operand[0].predicate)
5827 (equiv, GET_MODE (x)))
5828 && ((*insn_data[icode].operand[1].predicate)
5829 (equiv, GET_MODE (x)))))
5830 {
5831 /* We use the original pseudo for loc, so that
5832 emit_reload_insns() knows which pseudo this
5833 reload refers to and updates the pseudo rtx, not
5834 its equivalent memory location, as well as the
5835 corresponding entry in reg_last_reload_reg. */
5836 loc = &XEXP (x_orig, 0);
5837 x = XEXP (x, 0);
5838 reloadnum
5839 = push_reload (x, x, loc, loc,
5840 context_reg_class,
5841 GET_MODE (x), GET_MODE (x), 0, 0,
5842 opnum, RELOAD_OTHER);
5843 }
5844 else
5845 {
5846 reloadnum
5847 = push_reload (x, x, loc, (rtx*) 0,
5848 context_reg_class,
5849 GET_MODE (x), GET_MODE (x), 0, 0,
5850 opnum, type);
5851 rld[reloadnum].inc
5852 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5853
5854 value = 1;
5855 }
5856
5857 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5858 reloadnum);
5859 }
5860 return value;
5861 }
5862 return 0;
5863
5864 case TRUNCATE:
5865 case SIGN_EXTEND:
5866 case ZERO_EXTEND:
5867 /* Look for parts to reload in the inner expression and reload them
5868 too, in addition to this operation. Reloading all inner parts in
5869 addition to this one shouldn't be necessary, but at this point,
5870 we don't know if we can possibly omit any part that *can* be
5871 reloaded. Targets that are better off reloading just either part
5872 (or perhaps even a different part of an outer expression), should
5873 define LEGITIMIZE_RELOAD_ADDRESS. */
5874 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), XEXP (x, 0),
5875 context, code, SCRATCH, &XEXP (x, 0), opnum,
5876 type, ind_levels, insn);
5877 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5878 context_reg_class,
5879 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5880 return 1;
5881
5882 case MEM:
5883 /* This is probably the result of a substitution, by eliminate_regs, of
5884 an equivalent address for a pseudo that was not allocated to a hard
5885 register. Verify that the specified address is valid and reload it
5886 into a register.
5887
5888 Since we know we are going to reload this item, don't decrement for
5889 the indirection level.
5890
5891 Note that this is actually conservative: it would be slightly more
5892 efficient to use the value of SPILL_INDIRECT_LEVELS from
5893 reload1.c here. */
5894
5895 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5896 opnum, ADDR_TYPE (type), ind_levels, insn);
5897 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5898 context_reg_class,
5899 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5900 return 1;
5901
5902 case REG:
5903 {
5904 int regno = REGNO (x);
5905
5906 if (reg_equiv_constant[regno] != 0)
5907 {
5908 find_reloads_address_part (reg_equiv_constant[regno], loc,
5909 context_reg_class,
5910 GET_MODE (x), opnum, type, ind_levels);
5911 return 1;
5912 }
5913
5914 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5915 that feeds this insn. */
5916 if (reg_equiv_mem[regno] != 0)
5917 {
5918 push_reload (reg_equiv_mem[regno], NULL_RTX, loc, (rtx*) 0,
5919 context_reg_class,
5920 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5921 return 1;
5922 }
5923 #endif
5924
5925 if (reg_equiv_memory_loc[regno]
5926 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
5927 {
5928 rtx tem = make_memloc (x, regno);
5929 if (reg_equiv_address[regno] != 0
5930 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5931 {
5932 x = tem;
5933 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5934 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5935 ind_levels, insn);
5936 if (!rtx_equal_p (x, tem))
5937 push_reg_equiv_alt_mem (regno, x);
5938 }
5939 }
5940
5941 if (reg_renumber[regno] >= 0)
5942 regno = reg_renumber[regno];
5943
5944 if (regno >= FIRST_PSEUDO_REGISTER
5945 || !REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5946 index_code))
5947 {
5948 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5949 context_reg_class,
5950 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5951 return 1;
5952 }
5953
5954 /* If a register appearing in an address is the subject of a CLOBBER
5955 in this insn, reload it into some other register to be safe.
5956 The CLOBBER is supposed to make the register unavailable
5957 from before this insn to after it. */
5958 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5959 {
5960 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5961 context_reg_class,
5962 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5963 return 1;
5964 }
5965 }
5966 return 0;
5967
5968 case SUBREG:
5969 if (REG_P (SUBREG_REG (x)))
5970 {
5971 /* If this is a SUBREG of a hard register and the resulting register
5972 is of the wrong class, reload the whole SUBREG. This avoids
5973 needless copies if SUBREG_REG is multi-word. */
5974 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5975 {
5976 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5977
5978 if (!REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5979 index_code))
5980 {
5981 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5982 context_reg_class,
5983 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5984 return 1;
5985 }
5986 }
5987 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5988 is larger than the class size, then reload the whole SUBREG. */
5989 else
5990 {
5991 enum reg_class rclass = context_reg_class;
5992 if ((unsigned) CLASS_MAX_NREGS (rclass, GET_MODE (SUBREG_REG (x)))
5993 > reg_class_size[rclass])
5994 {
5995 x = find_reloads_subreg_address (x, 0, opnum,
5996 ADDR_TYPE (type),
5997 ind_levels, insn);
5998 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
5999 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6000 return 1;
6001 }
6002 }
6003 }
6004 break;
6005
6006 default:
6007 break;
6008 }
6009
6010 {
6011 const char *fmt = GET_RTX_FORMAT (code);
6012 int i;
6013
6014 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6015 {
6016 if (fmt[i] == 'e')
6017 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6018 we get here. */
6019 find_reloads_address_1 (mode, XEXP (x, i), context, code, SCRATCH,
6020 &XEXP (x, i), opnum, type, ind_levels, insn);
6021 }
6022 }
6023
6024 #undef REG_OK_FOR_CONTEXT
6025 return 0;
6026 }
6027 \f
6028 /* X, which is found at *LOC, is a part of an address that needs to be
6029 reloaded into a register of class RCLASS. If X is a constant, or if
6030 X is a PLUS that contains a constant, check that the constant is a
6031 legitimate operand and that we are supposed to be able to load
6032 it into the register.
6033
6034 If not, force the constant into memory and reload the MEM instead.
6035
6036 MODE is the mode to use, in case X is an integer constant.
6037
6038 OPNUM and TYPE describe the purpose of any reloads made.
6039
6040 IND_LEVELS says how many levels of indirect addressing this machine
6041 supports. */
6042
6043 static void
6044 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6045 enum machine_mode mode, int opnum,
6046 enum reload_type type, int ind_levels)
6047 {
6048 if (CONSTANT_P (x)
6049 && (! LEGITIMATE_CONSTANT_P (x)
6050 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6051 {
6052 x = force_const_mem (mode, x);
6053 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6054 opnum, type, ind_levels, 0);
6055 }
6056
6057 else if (GET_CODE (x) == PLUS
6058 && CONSTANT_P (XEXP (x, 1))
6059 && (! LEGITIMATE_CONSTANT_P (XEXP (x, 1))
6060 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6061 == NO_REGS))
6062 {
6063 rtx tem;
6064
6065 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6066 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6067 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6068 opnum, type, ind_levels, 0);
6069 }
6070
6071 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6072 mode, VOIDmode, 0, 0, opnum, type);
6073 }
6074 \f
6075 /* X, a subreg of a pseudo, is a part of an address that needs to be
6076 reloaded.
6077
6078 If the pseudo is equivalent to a memory location that cannot be directly
6079 addressed, make the necessary address reloads.
6080
6081 If address reloads have been necessary, or if the address is changed
6082 by register elimination, return the rtx of the memory location;
6083 otherwise, return X.
6084
6085 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
6086 memory location.
6087
6088 OPNUM and TYPE identify the purpose of the reload.
6089
6090 IND_LEVELS says how many levels of indirect addressing are
6091 supported at this point in the address.
6092
6093 INSN, if nonzero, is the insn in which we do the reload. It is used
6094 to determine where to put USEs for pseudos that we have to replace with
6095 stack slots. */
6096
6097 static rtx
6098 find_reloads_subreg_address (rtx x, int force_replace, int opnum,
6099 enum reload_type type, int ind_levels, rtx insn)
6100 {
6101 int regno = REGNO (SUBREG_REG (x));
6102
6103 if (reg_equiv_memory_loc[regno])
6104 {
6105 /* If the address is not directly addressable, or if the address is not
6106 offsettable, then it must be replaced. */
6107 if (! force_replace
6108 && (reg_equiv_address[regno]
6109 || ! offsettable_memref_p (reg_equiv_mem[regno])))
6110 force_replace = 1;
6111
6112 if (force_replace || num_not_at_initial_offset)
6113 {
6114 rtx tem = make_memloc (SUBREG_REG (x), regno);
6115
6116 /* If the address changes because of register elimination, then
6117 it must be replaced. */
6118 if (force_replace
6119 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
6120 {
6121 unsigned outer_size = GET_MODE_SIZE (GET_MODE (x));
6122 unsigned inner_size = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
6123 int offset;
6124 rtx orig = tem;
6125 int reloaded;
6126
6127 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
6128 hold the correct (negative) byte offset. */
6129 if (BYTES_BIG_ENDIAN && outer_size > inner_size)
6130 offset = inner_size - outer_size;
6131 else
6132 offset = SUBREG_BYTE (x);
6133
6134 XEXP (tem, 0) = plus_constant (XEXP (tem, 0), offset);
6135 PUT_MODE (tem, GET_MODE (x));
6136 if (MEM_OFFSET (tem))
6137 set_mem_offset (tem, plus_constant (MEM_OFFSET (tem), offset));
6138 if (MEM_SIZE (tem)
6139 && INTVAL (MEM_SIZE (tem)) != (HOST_WIDE_INT) outer_size)
6140 set_mem_size (tem, GEN_INT (outer_size));
6141
6142 /* If this was a paradoxical subreg that we replaced, the
6143 resulting memory must be sufficiently aligned to allow
6144 us to widen the mode of the memory. */
6145 if (outer_size > inner_size)
6146 {
6147 rtx base;
6148
6149 base = XEXP (tem, 0);
6150 if (GET_CODE (base) == PLUS)
6151 {
6152 if (CONST_INT_P (XEXP (base, 1))
6153 && INTVAL (XEXP (base, 1)) % outer_size != 0)
6154 return x;
6155 base = XEXP (base, 0);
6156 }
6157 if (!REG_P (base)
6158 || (REGNO_POINTER_ALIGN (REGNO (base))
6159 < outer_size * BITS_PER_UNIT))
6160 return x;
6161 }
6162
6163 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6164 XEXP (tem, 0), &XEXP (tem, 0),
6165 opnum, type, ind_levels, insn);
6166 /* ??? Do we need to handle nonzero offsets somehow? */
6167 if (!offset && !rtx_equal_p (tem, orig))
6168 push_reg_equiv_alt_mem (regno, tem);
6169
6170 /* For some processors an address may be valid in the
6171 original mode but not in a smaller mode. For
6172 example, ARM accepts a scaled index register in
6173 SImode but not in HImode. Note that this is only
6174 a problem if the address in reg_equiv_mem is already
6175 invalid in the new mode; other cases would be fixed
6176 by find_reloads_address as usual.
6177
6178 ??? We attempt to handle such cases here by doing an
6179 additional reload of the full address after the
6180 usual processing by find_reloads_address. Note that
6181 this may not work in the general case, but it seems
6182 to cover the cases where this situation currently
6183 occurs. A more general fix might be to reload the
6184 *value* instead of the address, but this would not
6185 be expected by the callers of this routine as-is.
6186
6187 If find_reloads_address already completed replaced
6188 the address, there is nothing further to do. */
6189 if (reloaded == 0
6190 && reg_equiv_mem[regno] != 0
6191 && !strict_memory_address_addr_space_p
6192 (GET_MODE (x), XEXP (reg_equiv_mem[regno], 0),
6193 MEM_ADDR_SPACE (reg_equiv_mem[regno])))
6194 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6195 base_reg_class (GET_MODE (tem), MEM, SCRATCH),
6196 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0,
6197 opnum, type);
6198
6199 /* If this is not a toplevel operand, find_reloads doesn't see
6200 this substitution. We have to emit a USE of the pseudo so
6201 that delete_output_reload can see it. */
6202 if (replace_reloads && recog_data.operand[opnum] != x)
6203 /* We mark the USE with QImode so that we recognize it
6204 as one that can be safely deleted at the end of
6205 reload. */
6206 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode,
6207 SUBREG_REG (x)),
6208 insn), QImode);
6209 x = tem;
6210 }
6211 }
6212 }
6213 return x;
6214 }
6215 \f
6216 /* Substitute into the current INSN the registers into which we have reloaded
6217 the things that need reloading. The array `replacements'
6218 contains the locations of all pointers that must be changed
6219 and says what to replace them with.
6220
6221 Return the rtx that X translates into; usually X, but modified. */
6222
6223 void
6224 subst_reloads (rtx insn)
6225 {
6226 int i;
6227
6228 for (i = 0; i < n_replacements; i++)
6229 {
6230 struct replacement *r = &replacements[i];
6231 rtx reloadreg = rld[r->what].reg_rtx;
6232 if (reloadreg)
6233 {
6234 #ifdef DEBUG_RELOAD
6235 /* This checking takes a very long time on some platforms
6236 causing the gcc.c-torture/compile/limits-fnargs.c test
6237 to time out during testing. See PR 31850.
6238
6239 Internal consistency test. Check that we don't modify
6240 anything in the equivalence arrays. Whenever something from
6241 those arrays needs to be reloaded, it must be unshared before
6242 being substituted into; the equivalence must not be modified.
6243 Otherwise, if the equivalence is used after that, it will
6244 have been modified, and the thing substituted (probably a
6245 register) is likely overwritten and not a usable equivalence. */
6246 int check_regno;
6247
6248 for (check_regno = 0; check_regno < max_regno; check_regno++)
6249 {
6250 #define CHECK_MODF(ARRAY) \
6251 gcc_assert (!ARRAY[check_regno] \
6252 || !loc_mentioned_in_p (r->where, \
6253 ARRAY[check_regno]))
6254
6255 CHECK_MODF (reg_equiv_constant);
6256 CHECK_MODF (reg_equiv_memory_loc);
6257 CHECK_MODF (reg_equiv_address);
6258 CHECK_MODF (reg_equiv_mem);
6259 #undef CHECK_MODF
6260 }
6261 #endif /* DEBUG_RELOAD */
6262
6263 /* If we're replacing a LABEL_REF with a register, there must
6264 already be an indication (to e.g. flow) which label this
6265 register refers to. */
6266 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6267 || !JUMP_P (insn)
6268 || find_reg_note (insn,
6269 REG_LABEL_OPERAND,
6270 XEXP (*r->where, 0))
6271 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6272
6273 /* Encapsulate RELOADREG so its machine mode matches what
6274 used to be there. Note that gen_lowpart_common will
6275 do the wrong thing if RELOADREG is multi-word. RELOADREG
6276 will always be a REG here. */
6277 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6278 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6279
6280 /* If we are putting this into a SUBREG and RELOADREG is a
6281 SUBREG, we would be making nested SUBREGs, so we have to fix
6282 this up. Note that r->where == &SUBREG_REG (*r->subreg_loc). */
6283
6284 if (r->subreg_loc != 0 && GET_CODE (reloadreg) == SUBREG)
6285 {
6286 if (GET_MODE (*r->subreg_loc)
6287 == GET_MODE (SUBREG_REG (reloadreg)))
6288 *r->subreg_loc = SUBREG_REG (reloadreg);
6289 else
6290 {
6291 int final_offset =
6292 SUBREG_BYTE (*r->subreg_loc) + SUBREG_BYTE (reloadreg);
6293
6294 /* When working with SUBREGs the rule is that the byte
6295 offset must be a multiple of the SUBREG's mode. */
6296 final_offset = (final_offset /
6297 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6298 final_offset = (final_offset *
6299 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6300
6301 *r->where = SUBREG_REG (reloadreg);
6302 SUBREG_BYTE (*r->subreg_loc) = final_offset;
6303 }
6304 }
6305 else
6306 *r->where = reloadreg;
6307 }
6308 /* If reload got no reg and isn't optional, something's wrong. */
6309 else
6310 gcc_assert (rld[r->what].optional);
6311 }
6312 }
6313 \f
6314 /* Make a copy of any replacements being done into X and move those
6315 copies to locations in Y, a copy of X. */
6316
6317 void
6318 copy_replacements (rtx x, rtx y)
6319 {
6320 /* We can't support X being a SUBREG because we might then need to know its
6321 location if something inside it was replaced. */
6322 gcc_assert (GET_CODE (x) != SUBREG);
6323
6324 copy_replacements_1 (&x, &y, n_replacements);
6325 }
6326
6327 static void
6328 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6329 {
6330 int i, j;
6331 rtx x, y;
6332 struct replacement *r;
6333 enum rtx_code code;
6334 const char *fmt;
6335
6336 for (j = 0; j < orig_replacements; j++)
6337 {
6338 if (replacements[j].subreg_loc == px)
6339 {
6340 r = &replacements[n_replacements++];
6341 r->where = replacements[j].where;
6342 r->subreg_loc = py;
6343 r->what = replacements[j].what;
6344 r->mode = replacements[j].mode;
6345 }
6346 else if (replacements[j].where == px)
6347 {
6348 r = &replacements[n_replacements++];
6349 r->where = py;
6350 r->subreg_loc = 0;
6351 r->what = replacements[j].what;
6352 r->mode = replacements[j].mode;
6353 }
6354 }
6355
6356 x = *px;
6357 y = *py;
6358 code = GET_CODE (x);
6359 fmt = GET_RTX_FORMAT (code);
6360
6361 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6362 {
6363 if (fmt[i] == 'e')
6364 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6365 else if (fmt[i] == 'E')
6366 for (j = XVECLEN (x, i); --j >= 0; )
6367 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6368 orig_replacements);
6369 }
6370 }
6371
6372 /* Change any replacements being done to *X to be done to *Y. */
6373
6374 void
6375 move_replacements (rtx *x, rtx *y)
6376 {
6377 int i;
6378
6379 for (i = 0; i < n_replacements; i++)
6380 if (replacements[i].subreg_loc == x)
6381 replacements[i].subreg_loc = y;
6382 else if (replacements[i].where == x)
6383 {
6384 replacements[i].where = y;
6385 replacements[i].subreg_loc = 0;
6386 }
6387 }
6388 \f
6389 /* If LOC was scheduled to be replaced by something, return the replacement.
6390 Otherwise, return *LOC. */
6391
6392 rtx
6393 find_replacement (rtx *loc)
6394 {
6395 struct replacement *r;
6396
6397 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6398 {
6399 rtx reloadreg = rld[r->what].reg_rtx;
6400
6401 if (reloadreg && r->where == loc)
6402 {
6403 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6404 reloadreg = gen_rtx_REG (r->mode, REGNO (reloadreg));
6405
6406 return reloadreg;
6407 }
6408 else if (reloadreg && r->subreg_loc == loc)
6409 {
6410 /* RELOADREG must be either a REG or a SUBREG.
6411
6412 ??? Is it actually still ever a SUBREG? If so, why? */
6413
6414 if (REG_P (reloadreg))
6415 return gen_rtx_REG (GET_MODE (*loc),
6416 (REGNO (reloadreg) +
6417 subreg_regno_offset (REGNO (SUBREG_REG (*loc)),
6418 GET_MODE (SUBREG_REG (*loc)),
6419 SUBREG_BYTE (*loc),
6420 GET_MODE (*loc))));
6421 else if (GET_MODE (reloadreg) == GET_MODE (*loc))
6422 return reloadreg;
6423 else
6424 {
6425 int final_offset = SUBREG_BYTE (reloadreg) + SUBREG_BYTE (*loc);
6426
6427 /* When working with SUBREGs the rule is that the byte
6428 offset must be a multiple of the SUBREG's mode. */
6429 final_offset = (final_offset / GET_MODE_SIZE (GET_MODE (*loc)));
6430 final_offset = (final_offset * GET_MODE_SIZE (GET_MODE (*loc)));
6431 return gen_rtx_SUBREG (GET_MODE (*loc), SUBREG_REG (reloadreg),
6432 final_offset);
6433 }
6434 }
6435 }
6436
6437 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6438 what's inside and make a new rtl if so. */
6439 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6440 || GET_CODE (*loc) == MULT)
6441 {
6442 rtx x = find_replacement (&XEXP (*loc, 0));
6443 rtx y = find_replacement (&XEXP (*loc, 1));
6444
6445 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6446 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6447 }
6448
6449 return *loc;
6450 }
6451 \f
6452 /* Return nonzero if register in range [REGNO, ENDREGNO)
6453 appears either explicitly or implicitly in X
6454 other than being stored into (except for earlyclobber operands).
6455
6456 References contained within the substructure at LOC do not count.
6457 LOC may be zero, meaning don't ignore anything.
6458
6459 This is similar to refers_to_regno_p in rtlanal.c except that we
6460 look at equivalences for pseudos that didn't get hard registers. */
6461
6462 static int
6463 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6464 rtx x, rtx *loc)
6465 {
6466 int i;
6467 unsigned int r;
6468 RTX_CODE code;
6469 const char *fmt;
6470
6471 if (x == 0)
6472 return 0;
6473
6474 repeat:
6475 code = GET_CODE (x);
6476
6477 switch (code)
6478 {
6479 case REG:
6480 r = REGNO (x);
6481
6482 /* If this is a pseudo, a hard register must not have been allocated.
6483 X must therefore either be a constant or be in memory. */
6484 if (r >= FIRST_PSEUDO_REGISTER)
6485 {
6486 if (reg_equiv_memory_loc[r])
6487 return refers_to_regno_for_reload_p (regno, endregno,
6488 reg_equiv_memory_loc[r],
6489 (rtx*) 0);
6490
6491 gcc_assert (reg_equiv_constant[r] || reg_equiv_invariant[r]);
6492 return 0;
6493 }
6494
6495 return (endregno > r
6496 && regno < r + (r < FIRST_PSEUDO_REGISTER
6497 ? hard_regno_nregs[r][GET_MODE (x)]
6498 : 1));
6499
6500 case SUBREG:
6501 /* If this is a SUBREG of a hard reg, we can see exactly which
6502 registers are being modified. Otherwise, handle normally. */
6503 if (REG_P (SUBREG_REG (x))
6504 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6505 {
6506 unsigned int inner_regno = subreg_regno (x);
6507 unsigned int inner_endregno
6508 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6509 ? subreg_nregs (x) : 1);
6510
6511 return endregno > inner_regno && regno < inner_endregno;
6512 }
6513 break;
6514
6515 case CLOBBER:
6516 case SET:
6517 if (&SET_DEST (x) != loc
6518 /* Note setting a SUBREG counts as referring to the REG it is in for
6519 a pseudo but not for hard registers since we can
6520 treat each word individually. */
6521 && ((GET_CODE (SET_DEST (x)) == SUBREG
6522 && loc != &SUBREG_REG (SET_DEST (x))
6523 && REG_P (SUBREG_REG (SET_DEST (x)))
6524 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6525 && refers_to_regno_for_reload_p (regno, endregno,
6526 SUBREG_REG (SET_DEST (x)),
6527 loc))
6528 /* If the output is an earlyclobber operand, this is
6529 a conflict. */
6530 || ((!REG_P (SET_DEST (x))
6531 || earlyclobber_operand_p (SET_DEST (x)))
6532 && refers_to_regno_for_reload_p (regno, endregno,
6533 SET_DEST (x), loc))))
6534 return 1;
6535
6536 if (code == CLOBBER || loc == &SET_SRC (x))
6537 return 0;
6538 x = SET_SRC (x);
6539 goto repeat;
6540
6541 default:
6542 break;
6543 }
6544
6545 /* X does not match, so try its subexpressions. */
6546
6547 fmt = GET_RTX_FORMAT (code);
6548 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6549 {
6550 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6551 {
6552 if (i == 0)
6553 {
6554 x = XEXP (x, 0);
6555 goto repeat;
6556 }
6557 else
6558 if (refers_to_regno_for_reload_p (regno, endregno,
6559 XEXP (x, i), loc))
6560 return 1;
6561 }
6562 else if (fmt[i] == 'E')
6563 {
6564 int j;
6565 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6566 if (loc != &XVECEXP (x, i, j)
6567 && refers_to_regno_for_reload_p (regno, endregno,
6568 XVECEXP (x, i, j), loc))
6569 return 1;
6570 }
6571 }
6572 return 0;
6573 }
6574
6575 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6576 we check if any register number in X conflicts with the relevant register
6577 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6578 contains a MEM (we don't bother checking for memory addresses that can't
6579 conflict because we expect this to be a rare case.
6580
6581 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6582 that we look at equivalences for pseudos that didn't get hard registers. */
6583
6584 int
6585 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6586 {
6587 int regno, endregno;
6588
6589 /* Overly conservative. */
6590 if (GET_CODE (x) == STRICT_LOW_PART
6591 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6592 x = XEXP (x, 0);
6593
6594 /* If either argument is a constant, then modifying X can not affect IN. */
6595 if (CONSTANT_P (x) || CONSTANT_P (in))
6596 return 0;
6597 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6598 return refers_to_mem_for_reload_p (in);
6599 else if (GET_CODE (x) == SUBREG)
6600 {
6601 regno = REGNO (SUBREG_REG (x));
6602 if (regno < FIRST_PSEUDO_REGISTER)
6603 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6604 GET_MODE (SUBREG_REG (x)),
6605 SUBREG_BYTE (x),
6606 GET_MODE (x));
6607 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6608 ? subreg_nregs (x) : 1);
6609
6610 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6611 }
6612 else if (REG_P (x))
6613 {
6614 regno = REGNO (x);
6615
6616 /* If this is a pseudo, it must not have been assigned a hard register.
6617 Therefore, it must either be in memory or be a constant. */
6618
6619 if (regno >= FIRST_PSEUDO_REGISTER)
6620 {
6621 if (reg_equiv_memory_loc[regno])
6622 return refers_to_mem_for_reload_p (in);
6623 gcc_assert (reg_equiv_constant[regno]);
6624 return 0;
6625 }
6626
6627 endregno = END_HARD_REGNO (x);
6628
6629 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6630 }
6631 else if (MEM_P (x))
6632 return refers_to_mem_for_reload_p (in);
6633 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6634 || GET_CODE (x) == CC0)
6635 return reg_mentioned_p (x, in);
6636 else
6637 {
6638 gcc_assert (GET_CODE (x) == PLUS);
6639
6640 /* We actually want to know if X is mentioned somewhere inside IN.
6641 We must not say that (plus (sp) (const_int 124)) is in
6642 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6643 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6644 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6645 while (MEM_P (in))
6646 in = XEXP (in, 0);
6647 if (REG_P (in))
6648 return 0;
6649 else if (GET_CODE (in) == PLUS)
6650 return (rtx_equal_p (x, in)
6651 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6652 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6653 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6654 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6655 }
6656
6657 gcc_unreachable ();
6658 }
6659
6660 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6661 registers. */
6662
6663 static int
6664 refers_to_mem_for_reload_p (rtx x)
6665 {
6666 const char *fmt;
6667 int i;
6668
6669 if (MEM_P (x))
6670 return 1;
6671
6672 if (REG_P (x))
6673 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6674 && reg_equiv_memory_loc[REGNO (x)]);
6675
6676 fmt = GET_RTX_FORMAT (GET_CODE (x));
6677 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6678 if (fmt[i] == 'e'
6679 && (MEM_P (XEXP (x, i))
6680 || refers_to_mem_for_reload_p (XEXP (x, i))))
6681 return 1;
6682
6683 return 0;
6684 }
6685 \f
6686 /* Check the insns before INSN to see if there is a suitable register
6687 containing the same value as GOAL.
6688 If OTHER is -1, look for a register in class RCLASS.
6689 Otherwise, just see if register number OTHER shares GOAL's value.
6690
6691 Return an rtx for the register found, or zero if none is found.
6692
6693 If RELOAD_REG_P is (short *)1,
6694 we reject any hard reg that appears in reload_reg_rtx
6695 because such a hard reg is also needed coming into this insn.
6696
6697 If RELOAD_REG_P is any other nonzero value,
6698 it is a vector indexed by hard reg number
6699 and we reject any hard reg whose element in the vector is nonnegative
6700 as well as any that appears in reload_reg_rtx.
6701
6702 If GOAL is zero, then GOALREG is a register number; we look
6703 for an equivalent for that register.
6704
6705 MODE is the machine mode of the value we want an equivalence for.
6706 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6707
6708 This function is used by jump.c as well as in the reload pass.
6709
6710 If GOAL is the sum of the stack pointer and a constant, we treat it
6711 as if it were a constant except that sp is required to be unchanging. */
6712
6713 rtx
6714 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6715 short *reload_reg_p, int goalreg, enum machine_mode mode)
6716 {
6717 rtx p = insn;
6718 rtx goaltry, valtry, value, where;
6719 rtx pat;
6720 int regno = -1;
6721 int valueno;
6722 int goal_mem = 0;
6723 int goal_const = 0;
6724 int goal_mem_addr_varies = 0;
6725 int need_stable_sp = 0;
6726 int nregs;
6727 int valuenregs;
6728 int num = 0;
6729
6730 if (goal == 0)
6731 regno = goalreg;
6732 else if (REG_P (goal))
6733 regno = REGNO (goal);
6734 else if (MEM_P (goal))
6735 {
6736 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6737 if (MEM_VOLATILE_P (goal))
6738 return 0;
6739 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6740 return 0;
6741 /* An address with side effects must be reexecuted. */
6742 switch (code)
6743 {
6744 case POST_INC:
6745 case PRE_INC:
6746 case POST_DEC:
6747 case PRE_DEC:
6748 case POST_MODIFY:
6749 case PRE_MODIFY:
6750 return 0;
6751 default:
6752 break;
6753 }
6754 goal_mem = 1;
6755 }
6756 else if (CONSTANT_P (goal))
6757 goal_const = 1;
6758 else if (GET_CODE (goal) == PLUS
6759 && XEXP (goal, 0) == stack_pointer_rtx
6760 && CONSTANT_P (XEXP (goal, 1)))
6761 goal_const = need_stable_sp = 1;
6762 else if (GET_CODE (goal) == PLUS
6763 && XEXP (goal, 0) == frame_pointer_rtx
6764 && CONSTANT_P (XEXP (goal, 1)))
6765 goal_const = 1;
6766 else
6767 return 0;
6768
6769 num = 0;
6770 /* Scan insns back from INSN, looking for one that copies
6771 a value into or out of GOAL.
6772 Stop and give up if we reach a label. */
6773
6774 while (1)
6775 {
6776 p = PREV_INSN (p);
6777 if (p && DEBUG_INSN_P (p))
6778 continue;
6779 num++;
6780 if (p == 0 || LABEL_P (p)
6781 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6782 return 0;
6783
6784 if (NONJUMP_INSN_P (p)
6785 /* If we don't want spill regs ... */
6786 && (! (reload_reg_p != 0
6787 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6788 /* ... then ignore insns introduced by reload; they aren't
6789 useful and can cause results in reload_as_needed to be
6790 different from what they were when calculating the need for
6791 spills. If we notice an input-reload insn here, we will
6792 reject it below, but it might hide a usable equivalent.
6793 That makes bad code. It may even fail: perhaps no reg was
6794 spilled for this insn because it was assumed we would find
6795 that equivalent. */
6796 || INSN_UID (p) < reload_first_uid))
6797 {
6798 rtx tem;
6799 pat = single_set (p);
6800
6801 /* First check for something that sets some reg equal to GOAL. */
6802 if (pat != 0
6803 && ((regno >= 0
6804 && true_regnum (SET_SRC (pat)) == regno
6805 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6806 ||
6807 (regno >= 0
6808 && true_regnum (SET_DEST (pat)) == regno
6809 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6810 ||
6811 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6812 /* When looking for stack pointer + const,
6813 make sure we don't use a stack adjust. */
6814 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6815 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6816 || (goal_mem
6817 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6818 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6819 || (goal_mem
6820 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6821 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6822 /* If we are looking for a constant,
6823 and something equivalent to that constant was copied
6824 into a reg, we can use that reg. */
6825 || (goal_const && REG_NOTES (p) != 0
6826 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6827 && ((rtx_equal_p (XEXP (tem, 0), goal)
6828 && (valueno
6829 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6830 || (REG_P (SET_DEST (pat))
6831 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6832 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6833 && CONST_INT_P (goal)
6834 && 0 != (goaltry
6835 = operand_subword (XEXP (tem, 0), 0, 0,
6836 VOIDmode))
6837 && rtx_equal_p (goal, goaltry)
6838 && (valtry
6839 = operand_subword (SET_DEST (pat), 0, 0,
6840 VOIDmode))
6841 && (valueno = true_regnum (valtry)) >= 0)))
6842 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6843 NULL_RTX))
6844 && REG_P (SET_DEST (pat))
6845 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6846 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6847 && CONST_INT_P (goal)
6848 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6849 VOIDmode))
6850 && rtx_equal_p (goal, goaltry)
6851 && (valtry
6852 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6853 && (valueno = true_regnum (valtry)) >= 0)))
6854 {
6855 if (other >= 0)
6856 {
6857 if (valueno != other)
6858 continue;
6859 }
6860 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6861 continue;
6862 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6863 mode, valueno))
6864 continue;
6865 value = valtry;
6866 where = p;
6867 break;
6868 }
6869 }
6870 }
6871
6872 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6873 (or copying VALUE into GOAL, if GOAL is also a register).
6874 Now verify that VALUE is really valid. */
6875
6876 /* VALUENO is the register number of VALUE; a hard register. */
6877
6878 /* Don't try to re-use something that is killed in this insn. We want
6879 to be able to trust REG_UNUSED notes. */
6880 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6881 return 0;
6882
6883 /* If we propose to get the value from the stack pointer or if GOAL is
6884 a MEM based on the stack pointer, we need a stable SP. */
6885 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6886 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6887 goal)))
6888 need_stable_sp = 1;
6889
6890 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6891 if (GET_MODE (value) != mode)
6892 return 0;
6893
6894 /* Reject VALUE if it was loaded from GOAL
6895 and is also a register that appears in the address of GOAL. */
6896
6897 if (goal_mem && value == SET_DEST (single_set (where))
6898 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6899 goal, (rtx*) 0))
6900 return 0;
6901
6902 /* Reject registers that overlap GOAL. */
6903
6904 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6905 nregs = hard_regno_nregs[regno][mode];
6906 else
6907 nregs = 1;
6908 valuenregs = hard_regno_nregs[valueno][mode];
6909
6910 if (!goal_mem && !goal_const
6911 && regno + nregs > valueno && regno < valueno + valuenregs)
6912 return 0;
6913
6914 /* Reject VALUE if it is one of the regs reserved for reloads.
6915 Reload1 knows how to reuse them anyway, and it would get
6916 confused if we allocated one without its knowledge.
6917 (Now that insns introduced by reload are ignored above,
6918 this case shouldn't happen, but I'm not positive.) */
6919
6920 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6921 {
6922 int i;
6923 for (i = 0; i < valuenregs; ++i)
6924 if (reload_reg_p[valueno + i] >= 0)
6925 return 0;
6926 }
6927
6928 /* Reject VALUE if it is a register being used for an input reload
6929 even if it is not one of those reserved. */
6930
6931 if (reload_reg_p != 0)
6932 {
6933 int i;
6934 for (i = 0; i < n_reloads; i++)
6935 if (rld[i].reg_rtx != 0 && rld[i].in)
6936 {
6937 int regno1 = REGNO (rld[i].reg_rtx);
6938 int nregs1 = hard_regno_nregs[regno1]
6939 [GET_MODE (rld[i].reg_rtx)];
6940 if (regno1 < valueno + valuenregs
6941 && regno1 + nregs1 > valueno)
6942 return 0;
6943 }
6944 }
6945
6946 if (goal_mem)
6947 /* We must treat frame pointer as varying here,
6948 since it can vary--in a nonlocal goto as generated by expand_goto. */
6949 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6950
6951 /* Now verify that the values of GOAL and VALUE remain unaltered
6952 until INSN is reached. */
6953
6954 p = insn;
6955 while (1)
6956 {
6957 p = PREV_INSN (p);
6958 if (p == where)
6959 return value;
6960
6961 /* Don't trust the conversion past a function call
6962 if either of the two is in a call-clobbered register, or memory. */
6963 if (CALL_P (p))
6964 {
6965 int i;
6966
6967 if (goal_mem || need_stable_sp)
6968 return 0;
6969
6970 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6971 for (i = 0; i < nregs; ++i)
6972 if (call_used_regs[regno + i]
6973 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6974 return 0;
6975
6976 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6977 for (i = 0; i < valuenregs; ++i)
6978 if (call_used_regs[valueno + i]
6979 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6980 return 0;
6981 }
6982
6983 if (INSN_P (p))
6984 {
6985 pat = PATTERN (p);
6986
6987 /* Watch out for unspec_volatile, and volatile asms. */
6988 if (volatile_insn_p (pat))
6989 return 0;
6990
6991 /* If this insn P stores in either GOAL or VALUE, return 0.
6992 If GOAL is a memory ref and this insn writes memory, return 0.
6993 If GOAL is a memory ref and its address is not constant,
6994 and this insn P changes a register used in GOAL, return 0. */
6995
6996 if (GET_CODE (pat) == COND_EXEC)
6997 pat = COND_EXEC_CODE (pat);
6998 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6999 {
7000 rtx dest = SET_DEST (pat);
7001 while (GET_CODE (dest) == SUBREG
7002 || GET_CODE (dest) == ZERO_EXTRACT
7003 || GET_CODE (dest) == STRICT_LOW_PART)
7004 dest = XEXP (dest, 0);
7005 if (REG_P (dest))
7006 {
7007 int xregno = REGNO (dest);
7008 int xnregs;
7009 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7010 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7011 else
7012 xnregs = 1;
7013 if (xregno < regno + nregs && xregno + xnregs > regno)
7014 return 0;
7015 if (xregno < valueno + valuenregs
7016 && xregno + xnregs > valueno)
7017 return 0;
7018 if (goal_mem_addr_varies
7019 && reg_overlap_mentioned_for_reload_p (dest, goal))
7020 return 0;
7021 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7022 return 0;
7023 }
7024 else if (goal_mem && MEM_P (dest)
7025 && ! push_operand (dest, GET_MODE (dest)))
7026 return 0;
7027 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7028 && reg_equiv_memory_loc[regno] != 0)
7029 return 0;
7030 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7031 return 0;
7032 }
7033 else if (GET_CODE (pat) == PARALLEL)
7034 {
7035 int i;
7036 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7037 {
7038 rtx v1 = XVECEXP (pat, 0, i);
7039 if (GET_CODE (v1) == COND_EXEC)
7040 v1 = COND_EXEC_CODE (v1);
7041 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7042 {
7043 rtx dest = SET_DEST (v1);
7044 while (GET_CODE (dest) == SUBREG
7045 || GET_CODE (dest) == ZERO_EXTRACT
7046 || GET_CODE (dest) == STRICT_LOW_PART)
7047 dest = XEXP (dest, 0);
7048 if (REG_P (dest))
7049 {
7050 int xregno = REGNO (dest);
7051 int xnregs;
7052 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7053 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7054 else
7055 xnregs = 1;
7056 if (xregno < regno + nregs
7057 && xregno + xnregs > regno)
7058 return 0;
7059 if (xregno < valueno + valuenregs
7060 && xregno + xnregs > valueno)
7061 return 0;
7062 if (goal_mem_addr_varies
7063 && reg_overlap_mentioned_for_reload_p (dest,
7064 goal))
7065 return 0;
7066 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7067 return 0;
7068 }
7069 else if (goal_mem && MEM_P (dest)
7070 && ! push_operand (dest, GET_MODE (dest)))
7071 return 0;
7072 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7073 && reg_equiv_memory_loc[regno] != 0)
7074 return 0;
7075 else if (need_stable_sp
7076 && push_operand (dest, GET_MODE (dest)))
7077 return 0;
7078 }
7079 }
7080 }
7081
7082 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7083 {
7084 rtx link;
7085
7086 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7087 link = XEXP (link, 1))
7088 {
7089 pat = XEXP (link, 0);
7090 if (GET_CODE (pat) == CLOBBER)
7091 {
7092 rtx dest = SET_DEST (pat);
7093
7094 if (REG_P (dest))
7095 {
7096 int xregno = REGNO (dest);
7097 int xnregs
7098 = hard_regno_nregs[xregno][GET_MODE (dest)];
7099
7100 if (xregno < regno + nregs
7101 && xregno + xnregs > regno)
7102 return 0;
7103 else if (xregno < valueno + valuenregs
7104 && xregno + xnregs > valueno)
7105 return 0;
7106 else if (goal_mem_addr_varies
7107 && reg_overlap_mentioned_for_reload_p (dest,
7108 goal))
7109 return 0;
7110 }
7111
7112 else if (goal_mem && MEM_P (dest)
7113 && ! push_operand (dest, GET_MODE (dest)))
7114 return 0;
7115 else if (need_stable_sp
7116 && push_operand (dest, GET_MODE (dest)))
7117 return 0;
7118 }
7119 }
7120 }
7121
7122 #ifdef AUTO_INC_DEC
7123 /* If this insn auto-increments or auto-decrements
7124 either regno or valueno, return 0 now.
7125 If GOAL is a memory ref and its address is not constant,
7126 and this insn P increments a register used in GOAL, return 0. */
7127 {
7128 rtx link;
7129
7130 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7131 if (REG_NOTE_KIND (link) == REG_INC
7132 && REG_P (XEXP (link, 0)))
7133 {
7134 int incno = REGNO (XEXP (link, 0));
7135 if (incno < regno + nregs && incno >= regno)
7136 return 0;
7137 if (incno < valueno + valuenregs && incno >= valueno)
7138 return 0;
7139 if (goal_mem_addr_varies
7140 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7141 goal))
7142 return 0;
7143 }
7144 }
7145 #endif
7146 }
7147 }
7148 }
7149 \f
7150 /* Find a place where INCED appears in an increment or decrement operator
7151 within X, and return the amount INCED is incremented or decremented by.
7152 The value is always positive. */
7153
7154 static int
7155 find_inc_amount (rtx x, rtx inced)
7156 {
7157 enum rtx_code code = GET_CODE (x);
7158 const char *fmt;
7159 int i;
7160
7161 if (code == MEM)
7162 {
7163 rtx addr = XEXP (x, 0);
7164 if ((GET_CODE (addr) == PRE_DEC
7165 || GET_CODE (addr) == POST_DEC
7166 || GET_CODE (addr) == PRE_INC
7167 || GET_CODE (addr) == POST_INC)
7168 && XEXP (addr, 0) == inced)
7169 return GET_MODE_SIZE (GET_MODE (x));
7170 else if ((GET_CODE (addr) == PRE_MODIFY
7171 || GET_CODE (addr) == POST_MODIFY)
7172 && GET_CODE (XEXP (addr, 1)) == PLUS
7173 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7174 && XEXP (addr, 0) == inced
7175 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7176 {
7177 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7178 return i < 0 ? -i : i;
7179 }
7180 }
7181
7182 fmt = GET_RTX_FORMAT (code);
7183 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7184 {
7185 if (fmt[i] == 'e')
7186 {
7187 int tem = find_inc_amount (XEXP (x, i), inced);
7188 if (tem != 0)
7189 return tem;
7190 }
7191 if (fmt[i] == 'E')
7192 {
7193 int j;
7194 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7195 {
7196 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7197 if (tem != 0)
7198 return tem;
7199 }
7200 }
7201 }
7202
7203 return 0;
7204 }
7205 \f
7206 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7207 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7208
7209 #ifdef AUTO_INC_DEC
7210 static int
7211 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7212 rtx insn)
7213 {
7214 rtx link;
7215
7216 gcc_assert (insn);
7217
7218 if (! INSN_P (insn))
7219 return 0;
7220
7221 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7222 if (REG_NOTE_KIND (link) == REG_INC)
7223 {
7224 unsigned int test = (int) REGNO (XEXP (link, 0));
7225 if (test >= regno && test < endregno)
7226 return 1;
7227 }
7228 return 0;
7229 }
7230 #else
7231
7232 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7233
7234 #endif
7235
7236 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7237 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7238 REG_INC. REGNO must refer to a hard register. */
7239
7240 int
7241 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7242 int sets)
7243 {
7244 unsigned int nregs, endregno;
7245
7246 /* regno must be a hard register. */
7247 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7248
7249 nregs = hard_regno_nregs[regno][mode];
7250 endregno = regno + nregs;
7251
7252 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7253 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7254 && REG_P (XEXP (PATTERN (insn), 0)))
7255 {
7256 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7257
7258 return test >= regno && test < endregno;
7259 }
7260
7261 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7262 return 1;
7263
7264 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7265 {
7266 int i = XVECLEN (PATTERN (insn), 0) - 1;
7267
7268 for (; i >= 0; i--)
7269 {
7270 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7271 if ((GET_CODE (elt) == CLOBBER
7272 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7273 && REG_P (XEXP (elt, 0)))
7274 {
7275 unsigned int test = REGNO (XEXP (elt, 0));
7276
7277 if (test >= regno && test < endregno)
7278 return 1;
7279 }
7280 if (sets == 2
7281 && reg_inc_found_and_valid_p (regno, endregno, elt))
7282 return 1;
7283 }
7284 }
7285
7286 return 0;
7287 }
7288
7289 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7290 rtx
7291 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7292 {
7293 int regno;
7294
7295 if (GET_MODE (reloadreg) == mode)
7296 return reloadreg;
7297
7298 regno = REGNO (reloadreg);
7299
7300 if (WORDS_BIG_ENDIAN)
7301 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7302 - (int) hard_regno_nregs[regno][mode];
7303
7304 return gen_rtx_REG (mode, regno);
7305 }
7306
7307 static const char *const reload_when_needed_name[] =
7308 {
7309 "RELOAD_FOR_INPUT",
7310 "RELOAD_FOR_OUTPUT",
7311 "RELOAD_FOR_INSN",
7312 "RELOAD_FOR_INPUT_ADDRESS",
7313 "RELOAD_FOR_INPADDR_ADDRESS",
7314 "RELOAD_FOR_OUTPUT_ADDRESS",
7315 "RELOAD_FOR_OUTADDR_ADDRESS",
7316 "RELOAD_FOR_OPERAND_ADDRESS",
7317 "RELOAD_FOR_OPADDR_ADDR",
7318 "RELOAD_OTHER",
7319 "RELOAD_FOR_OTHER_ADDRESS"
7320 };
7321
7322 /* These functions are used to print the variables set by 'find_reloads' */
7323
7324 DEBUG_FUNCTION void
7325 debug_reload_to_stream (FILE *f)
7326 {
7327 int r;
7328 const char *prefix;
7329
7330 if (! f)
7331 f = stderr;
7332 for (r = 0; r < n_reloads; r++)
7333 {
7334 fprintf (f, "Reload %d: ", r);
7335
7336 if (rld[r].in != 0)
7337 {
7338 fprintf (f, "reload_in (%s) = ",
7339 GET_MODE_NAME (rld[r].inmode));
7340 print_inline_rtx (f, rld[r].in, 24);
7341 fprintf (f, "\n\t");
7342 }
7343
7344 if (rld[r].out != 0)
7345 {
7346 fprintf (f, "reload_out (%s) = ",
7347 GET_MODE_NAME (rld[r].outmode));
7348 print_inline_rtx (f, rld[r].out, 24);
7349 fprintf (f, "\n\t");
7350 }
7351
7352 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7353
7354 fprintf (f, "%s (opnum = %d)",
7355 reload_when_needed_name[(int) rld[r].when_needed],
7356 rld[r].opnum);
7357
7358 if (rld[r].optional)
7359 fprintf (f, ", optional");
7360
7361 if (rld[r].nongroup)
7362 fprintf (f, ", nongroup");
7363
7364 if (rld[r].inc != 0)
7365 fprintf (f, ", inc by %d", rld[r].inc);
7366
7367 if (rld[r].nocombine)
7368 fprintf (f, ", can't combine");
7369
7370 if (rld[r].secondary_p)
7371 fprintf (f, ", secondary_reload_p");
7372
7373 if (rld[r].in_reg != 0)
7374 {
7375 fprintf (f, "\n\treload_in_reg: ");
7376 print_inline_rtx (f, rld[r].in_reg, 24);
7377 }
7378
7379 if (rld[r].out_reg != 0)
7380 {
7381 fprintf (f, "\n\treload_out_reg: ");
7382 print_inline_rtx (f, rld[r].out_reg, 24);
7383 }
7384
7385 if (rld[r].reg_rtx != 0)
7386 {
7387 fprintf (f, "\n\treload_reg_rtx: ");
7388 print_inline_rtx (f, rld[r].reg_rtx, 24);
7389 }
7390
7391 prefix = "\n\t";
7392 if (rld[r].secondary_in_reload != -1)
7393 {
7394 fprintf (f, "%ssecondary_in_reload = %d",
7395 prefix, rld[r].secondary_in_reload);
7396 prefix = ", ";
7397 }
7398
7399 if (rld[r].secondary_out_reload != -1)
7400 fprintf (f, "%ssecondary_out_reload = %d\n",
7401 prefix, rld[r].secondary_out_reload);
7402
7403 prefix = "\n\t";
7404 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7405 {
7406 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7407 insn_data[rld[r].secondary_in_icode].name);
7408 prefix = ", ";
7409 }
7410
7411 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7412 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7413 insn_data[rld[r].secondary_out_icode].name);
7414
7415 fprintf (f, "\n");
7416 }
7417 }
7418
7419 DEBUG_FUNCTION void
7420 debug_reload (void)
7421 {
7422 debug_reload_to_stream (stderr);
7423 }