re PR rtl-optimization/49114 ([x32] Reload failed to handle (set reg:X (plus:X (subre...
[gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
28
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
31
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
38
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
45
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
54
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
57
58 NOTE SIDE EFFECTS:
59
60 find_reloads can alter the operands of the instruction it is called on.
61
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
65 better that way.
66
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
69
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
73
74 Using a reload register for several reloads in one insn:
75
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
79
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
82 register.
83
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
87
88 #define REG_OK_STRICT
89
90 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
91 #undef DEBUG_RELOAD
92
93 #include "config.h"
94 #include "system.h"
95 #include "coretypes.h"
96 #include "tm.h"
97 #include "rtl-error.h"
98 #include "tm_p.h"
99 #include "insn-config.h"
100 #include "expr.h"
101 #include "optabs.h"
102 #include "recog.h"
103 #include "df.h"
104 #include "reload.h"
105 #include "regs.h"
106 #include "addresses.h"
107 #include "hard-reg-set.h"
108 #include "flags.h"
109 #include "output.h"
110 #include "function.h"
111 #include "params.h"
112 #include "target.h"
113 #include "ira.h"
114
115 /* True if X is a constant that can be forced into the constant pool.
116 MODE is the mode of the operand, or VOIDmode if not known. */
117 #define CONST_POOL_OK_P(MODE, X) \
118 ((MODE) != VOIDmode \
119 && CONSTANT_P (X) \
120 && GET_CODE (X) != HIGH \
121 && !targetm.cannot_force_const_mem (MODE, X))
122
123 /* True if C is a non-empty register class that has too few registers
124 to be safely used as a reload target class. */
125
126 static inline bool
127 small_register_class_p (reg_class_t rclass)
128 {
129 return (reg_class_size [(int) rclass] == 1
130 || (reg_class_size [(int) rclass] >= 1
131 && targetm.class_likely_spilled_p (rclass)));
132 }
133
134 \f
135 /* All reloads of the current insn are recorded here. See reload.h for
136 comments. */
137 int n_reloads;
138 struct reload rld[MAX_RELOADS];
139
140 /* All the "earlyclobber" operands of the current insn
141 are recorded here. */
142 int n_earlyclobbers;
143 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
144
145 int reload_n_operands;
146
147 /* Replacing reloads.
148
149 If `replace_reloads' is nonzero, then as each reload is recorded
150 an entry is made for it in the table `replacements'.
151 Then later `subst_reloads' can look through that table and
152 perform all the replacements needed. */
153
154 /* Nonzero means record the places to replace. */
155 static int replace_reloads;
156
157 /* Each replacement is recorded with a structure like this. */
158 struct replacement
159 {
160 rtx *where; /* Location to store in */
161 int what; /* which reload this is for */
162 enum machine_mode mode; /* mode it must have */
163 };
164
165 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
166
167 /* Number of replacements currently recorded. */
168 static int n_replacements;
169
170 /* Used to track what is modified by an operand. */
171 struct decomposition
172 {
173 int reg_flag; /* Nonzero if referencing a register. */
174 int safe; /* Nonzero if this can't conflict with anything. */
175 rtx base; /* Base address for MEM. */
176 HOST_WIDE_INT start; /* Starting offset or register number. */
177 HOST_WIDE_INT end; /* Ending offset or register number. */
178 };
179
180 #ifdef SECONDARY_MEMORY_NEEDED
181
182 /* Save MEMs needed to copy from one class of registers to another. One MEM
183 is used per mode, but normally only one or two modes are ever used.
184
185 We keep two versions, before and after register elimination. The one
186 after register elimination is record separately for each operand. This
187 is done in case the address is not valid to be sure that we separately
188 reload each. */
189
190 static rtx secondary_memlocs[NUM_MACHINE_MODES];
191 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
192 static int secondary_memlocs_elim_used = 0;
193 #endif
194
195 /* The instruction we are doing reloads for;
196 so we can test whether a register dies in it. */
197 static rtx this_insn;
198
199 /* Nonzero if this instruction is a user-specified asm with operands. */
200 static int this_insn_is_asm;
201
202 /* If hard_regs_live_known is nonzero,
203 we can tell which hard regs are currently live,
204 at least enough to succeed in choosing dummy reloads. */
205 static int hard_regs_live_known;
206
207 /* Indexed by hard reg number,
208 element is nonnegative if hard reg has been spilled.
209 This vector is passed to `find_reloads' as an argument
210 and is not changed here. */
211 static short *static_reload_reg_p;
212
213 /* Set to 1 in subst_reg_equivs if it changes anything. */
214 static int subst_reg_equivs_changed;
215
216 /* On return from push_reload, holds the reload-number for the OUT
217 operand, which can be different for that from the input operand. */
218 static int output_reloadnum;
219
220 /* Compare two RTX's. */
221 #define MATCHES(x, y) \
222 (x == y || (x != 0 && (REG_P (x) \
223 ? REG_P (y) && REGNO (x) == REGNO (y) \
224 : rtx_equal_p (x, y) && ! side_effects_p (x))))
225
226 /* Indicates if two reloads purposes are for similar enough things that we
227 can merge their reloads. */
228 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
229 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
230 || ((when1) == (when2) && (op1) == (op2)) \
231 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
232 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
233 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
234 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
235 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
236
237 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
238 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
239 ((when1) != (when2) \
240 || ! ((op1) == (op2) \
241 || (when1) == RELOAD_FOR_INPUT \
242 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
243 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
244
245 /* If we are going to reload an address, compute the reload type to
246 use. */
247 #define ADDR_TYPE(type) \
248 ((type) == RELOAD_FOR_INPUT_ADDRESS \
249 ? RELOAD_FOR_INPADDR_ADDRESS \
250 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
251 ? RELOAD_FOR_OUTADDR_ADDRESS \
252 : (type)))
253
254 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
255 enum machine_mode, enum reload_type,
256 enum insn_code *, secondary_reload_info *);
257 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
258 int, unsigned int);
259 static int reload_inner_reg_of_subreg (rtx, enum machine_mode, int);
260 static void push_replacement (rtx *, int, enum machine_mode);
261 static void dup_replacements (rtx *, rtx *);
262 static void combine_reloads (void);
263 static int find_reusable_reload (rtx *, rtx, enum reg_class,
264 enum reload_type, int, int);
265 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
266 enum machine_mode, reg_class_t, int, int);
267 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
268 static struct decomposition decompose (rtx);
269 static int immune_p (rtx, rtx, struct decomposition);
270 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
271 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
272 int *);
273 static rtx make_memloc (rtx, int);
274 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
275 addr_space_t, rtx *);
276 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
277 int, enum reload_type, int, rtx);
278 static rtx subst_reg_equivs (rtx, rtx);
279 static rtx subst_indexed_address (rtx);
280 static void update_auto_inc_notes (rtx, int, int);
281 static int find_reloads_address_1 (enum machine_mode, rtx, int,
282 enum rtx_code, enum rtx_code, rtx *,
283 int, enum reload_type,int, rtx);
284 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
285 enum machine_mode, int,
286 enum reload_type, int);
287 static rtx find_reloads_subreg_address (rtx, int, int, enum reload_type,
288 int, rtx, int *);
289 static void copy_replacements_1 (rtx *, rtx *, int);
290 static int find_inc_amount (rtx, rtx);
291 static int refers_to_mem_for_reload_p (rtx);
292 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
293 rtx, rtx *);
294
295 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
296 list yet. */
297
298 static void
299 push_reg_equiv_alt_mem (int regno, rtx mem)
300 {
301 rtx it;
302
303 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
304 if (rtx_equal_p (XEXP (it, 0), mem))
305 return;
306
307 reg_equiv_alt_mem_list (regno)
308 = alloc_EXPR_LIST (REG_EQUIV, mem,
309 reg_equiv_alt_mem_list (regno));
310 }
311 \f
312 /* Determine if any secondary reloads are needed for loading (if IN_P is
313 nonzero) or storing (if IN_P is zero) X to or from a reload register of
314 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
315 are needed, push them.
316
317 Return the reload number of the secondary reload we made, or -1 if
318 we didn't need one. *PICODE is set to the insn_code to use if we do
319 need a secondary reload. */
320
321 static int
322 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
323 enum reg_class reload_class,
324 enum machine_mode reload_mode, enum reload_type type,
325 enum insn_code *picode, secondary_reload_info *prev_sri)
326 {
327 enum reg_class rclass = NO_REGS;
328 enum reg_class scratch_class;
329 enum machine_mode mode = reload_mode;
330 enum insn_code icode = CODE_FOR_nothing;
331 enum insn_code t_icode = CODE_FOR_nothing;
332 enum reload_type secondary_type;
333 int s_reload, t_reload = -1;
334 const char *scratch_constraint;
335 char letter;
336 secondary_reload_info sri;
337
338 if (type == RELOAD_FOR_INPUT_ADDRESS
339 || type == RELOAD_FOR_OUTPUT_ADDRESS
340 || type == RELOAD_FOR_INPADDR_ADDRESS
341 || type == RELOAD_FOR_OUTADDR_ADDRESS)
342 secondary_type = type;
343 else
344 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
345
346 *picode = CODE_FOR_nothing;
347
348 /* If X is a paradoxical SUBREG, use the inner value to determine both the
349 mode and object being reloaded. */
350 if (GET_CODE (x) == SUBREG
351 && (GET_MODE_SIZE (GET_MODE (x))
352 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
353 {
354 x = SUBREG_REG (x);
355 reload_mode = GET_MODE (x);
356 }
357
358 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
359 is still a pseudo-register by now, it *must* have an equivalent MEM
360 but we don't want to assume that), use that equivalent when seeing if
361 a secondary reload is needed since whether or not a reload is needed
362 might be sensitive to the form of the MEM. */
363
364 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
365 && reg_equiv_mem (REGNO (x)))
366 x = reg_equiv_mem (REGNO (x));
367
368 sri.icode = CODE_FOR_nothing;
369 sri.prev_sri = prev_sri;
370 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
371 reload_mode, &sri);
372 icode = (enum insn_code) sri.icode;
373
374 /* If we don't need any secondary registers, done. */
375 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
376 return -1;
377
378 if (rclass != NO_REGS)
379 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
380 reload_mode, type, &t_icode, &sri);
381
382 /* If we will be using an insn, the secondary reload is for a
383 scratch register. */
384
385 if (icode != CODE_FOR_nothing)
386 {
387 /* If IN_P is nonzero, the reload register will be the output in
388 operand 0. If IN_P is zero, the reload register will be the input
389 in operand 1. Outputs should have an initial "=", which we must
390 skip. */
391
392 /* ??? It would be useful to be able to handle only two, or more than
393 three, operands, but for now we can only handle the case of having
394 exactly three: output, input and one temp/scratch. */
395 gcc_assert (insn_data[(int) icode].n_operands == 3);
396
397 /* ??? We currently have no way to represent a reload that needs
398 an icode to reload from an intermediate tertiary reload register.
399 We should probably have a new field in struct reload to tag a
400 chain of scratch operand reloads onto. */
401 gcc_assert (rclass == NO_REGS);
402
403 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
404 gcc_assert (*scratch_constraint == '=');
405 scratch_constraint++;
406 if (*scratch_constraint == '&')
407 scratch_constraint++;
408 letter = *scratch_constraint;
409 scratch_class = (letter == 'r' ? GENERAL_REGS
410 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
411 scratch_constraint));
412
413 rclass = scratch_class;
414 mode = insn_data[(int) icode].operand[2].mode;
415 }
416
417 /* This case isn't valid, so fail. Reload is allowed to use the same
418 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
419 in the case of a secondary register, we actually need two different
420 registers for correct code. We fail here to prevent the possibility of
421 silently generating incorrect code later.
422
423 The convention is that secondary input reloads are valid only if the
424 secondary_class is different from class. If you have such a case, you
425 can not use secondary reloads, you must work around the problem some
426 other way.
427
428 Allow this when a reload_in/out pattern is being used. I.e. assume
429 that the generated code handles this case. */
430
431 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
432 || t_icode != CODE_FOR_nothing);
433
434 /* See if we can reuse an existing secondary reload. */
435 for (s_reload = 0; s_reload < n_reloads; s_reload++)
436 if (rld[s_reload].secondary_p
437 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
438 || reg_class_subset_p (rld[s_reload].rclass, rclass))
439 && ((in_p && rld[s_reload].inmode == mode)
440 || (! in_p && rld[s_reload].outmode == mode))
441 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
442 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
443 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
444 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
445 && (small_register_class_p (rclass)
446 || targetm.small_register_classes_for_mode_p (VOIDmode))
447 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
448 opnum, rld[s_reload].opnum))
449 {
450 if (in_p)
451 rld[s_reload].inmode = mode;
452 if (! in_p)
453 rld[s_reload].outmode = mode;
454
455 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
456 rld[s_reload].rclass = rclass;
457
458 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
459 rld[s_reload].optional &= optional;
460 rld[s_reload].secondary_p = 1;
461 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
462 opnum, rld[s_reload].opnum))
463 rld[s_reload].when_needed = RELOAD_OTHER;
464
465 break;
466 }
467
468 if (s_reload == n_reloads)
469 {
470 #ifdef SECONDARY_MEMORY_NEEDED
471 /* If we need a memory location to copy between the two reload regs,
472 set it up now. Note that we do the input case before making
473 the reload and the output case after. This is due to the
474 way reloads are output. */
475
476 if (in_p && icode == CODE_FOR_nothing
477 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
478 {
479 get_secondary_mem (x, reload_mode, opnum, type);
480
481 /* We may have just added new reloads. Make sure we add
482 the new reload at the end. */
483 s_reload = n_reloads;
484 }
485 #endif
486
487 /* We need to make a new secondary reload for this register class. */
488 rld[s_reload].in = rld[s_reload].out = 0;
489 rld[s_reload].rclass = rclass;
490
491 rld[s_reload].inmode = in_p ? mode : VOIDmode;
492 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
493 rld[s_reload].reg_rtx = 0;
494 rld[s_reload].optional = optional;
495 rld[s_reload].inc = 0;
496 /* Maybe we could combine these, but it seems too tricky. */
497 rld[s_reload].nocombine = 1;
498 rld[s_reload].in_reg = 0;
499 rld[s_reload].out_reg = 0;
500 rld[s_reload].opnum = opnum;
501 rld[s_reload].when_needed = secondary_type;
502 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
503 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
504 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
505 rld[s_reload].secondary_out_icode
506 = ! in_p ? t_icode : CODE_FOR_nothing;
507 rld[s_reload].secondary_p = 1;
508
509 n_reloads++;
510
511 #ifdef SECONDARY_MEMORY_NEEDED
512 if (! in_p && icode == CODE_FOR_nothing
513 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
514 get_secondary_mem (x, mode, opnum, type);
515 #endif
516 }
517
518 *picode = icode;
519 return s_reload;
520 }
521
522 /* If a secondary reload is needed, return its class. If both an intermediate
523 register and a scratch register is needed, we return the class of the
524 intermediate register. */
525 reg_class_t
526 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
527 rtx x)
528 {
529 enum insn_code icode;
530 secondary_reload_info sri;
531
532 sri.icode = CODE_FOR_nothing;
533 sri.prev_sri = NULL;
534 rclass
535 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
536 icode = (enum insn_code) sri.icode;
537
538 /* If there are no secondary reloads at all, we return NO_REGS.
539 If an intermediate register is needed, we return its class. */
540 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
541 return rclass;
542
543 /* No intermediate register is needed, but we have a special reload
544 pattern, which we assume for now needs a scratch register. */
545 return scratch_reload_class (icode);
546 }
547
548 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
549 three operands, verify that operand 2 is an output operand, and return
550 its register class.
551 ??? We'd like to be able to handle any pattern with at least 2 operands,
552 for zero or more scratch registers, but that needs more infrastructure. */
553 enum reg_class
554 scratch_reload_class (enum insn_code icode)
555 {
556 const char *scratch_constraint;
557 char scratch_letter;
558 enum reg_class rclass;
559
560 gcc_assert (insn_data[(int) icode].n_operands == 3);
561 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
562 gcc_assert (*scratch_constraint == '=');
563 scratch_constraint++;
564 if (*scratch_constraint == '&')
565 scratch_constraint++;
566 scratch_letter = *scratch_constraint;
567 if (scratch_letter == 'r')
568 return GENERAL_REGS;
569 rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
570 scratch_constraint);
571 gcc_assert (rclass != NO_REGS);
572 return rclass;
573 }
574 \f
575 #ifdef SECONDARY_MEMORY_NEEDED
576
577 /* Return a memory location that will be used to copy X in mode MODE.
578 If we haven't already made a location for this mode in this insn,
579 call find_reloads_address on the location being returned. */
580
581 rtx
582 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
583 int opnum, enum reload_type type)
584 {
585 rtx loc;
586 int mem_valid;
587
588 /* By default, if MODE is narrower than a word, widen it to a word.
589 This is required because most machines that require these memory
590 locations do not support short load and stores from all registers
591 (e.g., FP registers). */
592
593 #ifdef SECONDARY_MEMORY_NEEDED_MODE
594 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
595 #else
596 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
597 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
598 #endif
599
600 /* If we already have made a MEM for this operand in MODE, return it. */
601 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
602 return secondary_memlocs_elim[(int) mode][opnum];
603
604 /* If this is the first time we've tried to get a MEM for this mode,
605 allocate a new one. `something_changed' in reload will get set
606 by noticing that the frame size has changed. */
607
608 if (secondary_memlocs[(int) mode] == 0)
609 {
610 #ifdef SECONDARY_MEMORY_NEEDED_RTX
611 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
612 #else
613 secondary_memlocs[(int) mode]
614 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
615 #endif
616 }
617
618 /* Get a version of the address doing any eliminations needed. If that
619 didn't give us a new MEM, make a new one if it isn't valid. */
620
621 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
622 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
623 MEM_ADDR_SPACE (loc));
624
625 if (! mem_valid && loc == secondary_memlocs[(int) mode])
626 loc = copy_rtx (loc);
627
628 /* The only time the call below will do anything is if the stack
629 offset is too large. In that case IND_LEVELS doesn't matter, so we
630 can just pass a zero. Adjust the type to be the address of the
631 corresponding object. If the address was valid, save the eliminated
632 address. If it wasn't valid, we need to make a reload each time, so
633 don't save it. */
634
635 if (! mem_valid)
636 {
637 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
638 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
639 : RELOAD_OTHER);
640
641 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
642 opnum, type, 0, 0);
643 }
644
645 secondary_memlocs_elim[(int) mode][opnum] = loc;
646 if (secondary_memlocs_elim_used <= (int)mode)
647 secondary_memlocs_elim_used = (int)mode + 1;
648 return loc;
649 }
650
651 /* Clear any secondary memory locations we've made. */
652
653 void
654 clear_secondary_mem (void)
655 {
656 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
657 }
658 #endif /* SECONDARY_MEMORY_NEEDED */
659 \f
660
661 /* Find the largest class which has at least one register valid in
662 mode INNER, and which for every such register, that register number
663 plus N is also valid in OUTER (if in range) and is cheap to move
664 into REGNO. Such a class must exist. */
665
666 static enum reg_class
667 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
668 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
669 unsigned int dest_regno ATTRIBUTE_UNUSED)
670 {
671 int best_cost = -1;
672 int rclass;
673 int regno;
674 enum reg_class best_class = NO_REGS;
675 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
676 unsigned int best_size = 0;
677 int cost;
678
679 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
680 {
681 int bad = 0;
682 int good = 0;
683 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
684 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
685 {
686 if (HARD_REGNO_MODE_OK (regno, inner))
687 {
688 good = 1;
689 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
690 || ! HARD_REGNO_MODE_OK (regno + n, outer))
691 bad = 1;
692 }
693 }
694
695 if (bad || !good)
696 continue;
697 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
698
699 if ((reg_class_size[rclass] > best_size
700 && (best_cost < 0 || best_cost >= cost))
701 || best_cost > cost)
702 {
703 best_class = (enum reg_class) rclass;
704 best_size = reg_class_size[rclass];
705 best_cost = register_move_cost (outer, (enum reg_class) rclass,
706 dest_class);
707 }
708 }
709
710 gcc_assert (best_size != 0);
711
712 return best_class;
713 }
714 \f
715 /* Return the number of a previously made reload that can be combined with
716 a new one, or n_reloads if none of the existing reloads can be used.
717 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
718 push_reload, they determine the kind of the new reload that we try to
719 combine. P_IN points to the corresponding value of IN, which can be
720 modified by this function.
721 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
722
723 static int
724 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
725 enum reload_type type, int opnum, int dont_share)
726 {
727 rtx in = *p_in;
728 int i;
729 /* We can't merge two reloads if the output of either one is
730 earlyclobbered. */
731
732 if (earlyclobber_operand_p (out))
733 return n_reloads;
734
735 /* We can use an existing reload if the class is right
736 and at least one of IN and OUT is a match
737 and the other is at worst neutral.
738 (A zero compared against anything is neutral.)
739
740 For targets with small register classes, don't use existing reloads
741 unless they are for the same thing since that can cause us to need
742 more reload registers than we otherwise would. */
743
744 for (i = 0; i < n_reloads; i++)
745 if ((reg_class_subset_p (rclass, rld[i].rclass)
746 || reg_class_subset_p (rld[i].rclass, rclass))
747 /* If the existing reload has a register, it must fit our class. */
748 && (rld[i].reg_rtx == 0
749 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
750 true_regnum (rld[i].reg_rtx)))
751 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
752 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
753 || (out != 0 && MATCHES (rld[i].out, out)
754 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
755 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
756 && (small_register_class_p (rclass)
757 || targetm.small_register_classes_for_mode_p (VOIDmode))
758 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
759 return i;
760
761 /* Reloading a plain reg for input can match a reload to postincrement
762 that reg, since the postincrement's value is the right value.
763 Likewise, it can match a preincrement reload, since we regard
764 the preincrementation as happening before any ref in this insn
765 to that register. */
766 for (i = 0; i < n_reloads; i++)
767 if ((reg_class_subset_p (rclass, rld[i].rclass)
768 || reg_class_subset_p (rld[i].rclass, rclass))
769 /* If the existing reload has a register, it must fit our
770 class. */
771 && (rld[i].reg_rtx == 0
772 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
773 true_regnum (rld[i].reg_rtx)))
774 && out == 0 && rld[i].out == 0 && rld[i].in != 0
775 && ((REG_P (in)
776 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
777 && MATCHES (XEXP (rld[i].in, 0), in))
778 || (REG_P (rld[i].in)
779 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
780 && MATCHES (XEXP (in, 0), rld[i].in)))
781 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
782 && (small_register_class_p (rclass)
783 || targetm.small_register_classes_for_mode_p (VOIDmode))
784 && MERGABLE_RELOADS (type, rld[i].when_needed,
785 opnum, rld[i].opnum))
786 {
787 /* Make sure reload_in ultimately has the increment,
788 not the plain register. */
789 if (REG_P (in))
790 *p_in = rld[i].in;
791 return i;
792 }
793 return n_reloads;
794 }
795
796 /* Return nonzero if X is a SUBREG which will require reloading of its
797 SUBREG_REG expression. */
798
799 static int
800 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, int output)
801 {
802 rtx inner;
803
804 /* Only SUBREGs are problematical. */
805 if (GET_CODE (x) != SUBREG)
806 return 0;
807
808 inner = SUBREG_REG (x);
809
810 /* If INNER is a constant or PLUS, then INNER must be reloaded. */
811 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
812 return 1;
813
814 /* If INNER is not a hard register, then INNER will not need to
815 be reloaded. */
816 if (!REG_P (inner)
817 || REGNO (inner) >= FIRST_PSEUDO_REGISTER)
818 return 0;
819
820 /* If INNER is not ok for MODE, then INNER will need reloading. */
821 if (! HARD_REGNO_MODE_OK (subreg_regno (x), mode))
822 return 1;
823
824 /* If the outer part is a word or smaller, INNER larger than a
825 word and the number of regs for INNER is not the same as the
826 number of words in INNER, then INNER will need reloading. */
827 return (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
828 && output
829 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
830 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
831 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
832 }
833
834 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
835 requiring an extra reload register. The caller has already found that
836 IN contains some reference to REGNO, so check that we can produce the
837 new value in a single step. E.g. if we have
838 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
839 instruction that adds one to a register, this should succeed.
840 However, if we have something like
841 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
842 needs to be loaded into a register first, we need a separate reload
843 register.
844 Such PLUS reloads are generated by find_reload_address_part.
845 The out-of-range PLUS expressions are usually introduced in the instruction
846 patterns by register elimination and substituting pseudos without a home
847 by their function-invariant equivalences. */
848 static int
849 can_reload_into (rtx in, int regno, enum machine_mode mode)
850 {
851 rtx dst, test_insn;
852 int r = 0;
853 struct recog_data save_recog_data;
854
855 /* For matching constraints, we often get notional input reloads where
856 we want to use the original register as the reload register. I.e.
857 technically this is a non-optional input-output reload, but IN is
858 already a valid register, and has been chosen as the reload register.
859 Speed this up, since it trivially works. */
860 if (REG_P (in))
861 return 1;
862
863 /* To test MEMs properly, we'd have to take into account all the reloads
864 that are already scheduled, which can become quite complicated.
865 And since we've already handled address reloads for this MEM, it
866 should always succeed anyway. */
867 if (MEM_P (in))
868 return 1;
869
870 /* If we can make a simple SET insn that does the job, everything should
871 be fine. */
872 dst = gen_rtx_REG (mode, regno);
873 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
874 save_recog_data = recog_data;
875 if (recog_memoized (test_insn) >= 0)
876 {
877 extract_insn (test_insn);
878 r = constrain_operands (1);
879 }
880 recog_data = save_recog_data;
881 return r;
882 }
883
884 /* Record one reload that needs to be performed.
885 IN is an rtx saying where the data are to be found before this instruction.
886 OUT says where they must be stored after the instruction.
887 (IN is zero for data not read, and OUT is zero for data not written.)
888 INLOC and OUTLOC point to the places in the instructions where
889 IN and OUT were found.
890 If IN and OUT are both nonzero, it means the same register must be used
891 to reload both IN and OUT.
892
893 RCLASS is a register class required for the reloaded data.
894 INMODE is the machine mode that the instruction requires
895 for the reg that replaces IN and OUTMODE is likewise for OUT.
896
897 If IN is zero, then OUT's location and mode should be passed as
898 INLOC and INMODE.
899
900 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
901
902 OPTIONAL nonzero means this reload does not need to be performed:
903 it can be discarded if that is more convenient.
904
905 OPNUM and TYPE say what the purpose of this reload is.
906
907 The return value is the reload-number for this reload.
908
909 If both IN and OUT are nonzero, in some rare cases we might
910 want to make two separate reloads. (Actually we never do this now.)
911 Therefore, the reload-number for OUT is stored in
912 output_reloadnum when we return; the return value applies to IN.
913 Usually (presently always), when IN and OUT are nonzero,
914 the two reload-numbers are equal, but the caller should be careful to
915 distinguish them. */
916
917 int
918 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
919 enum reg_class rclass, enum machine_mode inmode,
920 enum machine_mode outmode, int strict_low, int optional,
921 int opnum, enum reload_type type)
922 {
923 int i;
924 int dont_share = 0;
925 int dont_remove_subreg = 0;
926 #ifdef LIMIT_RELOAD_CLASS
927 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
928 #endif
929 int secondary_in_reload = -1, secondary_out_reload = -1;
930 enum insn_code secondary_in_icode = CODE_FOR_nothing;
931 enum insn_code secondary_out_icode = CODE_FOR_nothing;
932
933 /* INMODE and/or OUTMODE could be VOIDmode if no mode
934 has been specified for the operand. In that case,
935 use the operand's mode as the mode to reload. */
936 if (inmode == VOIDmode && in != 0)
937 inmode = GET_MODE (in);
938 if (outmode == VOIDmode && out != 0)
939 outmode = GET_MODE (out);
940
941 /* If find_reloads and friends until now missed to replace a pseudo
942 with a constant of reg_equiv_constant something went wrong
943 beforehand.
944 Note that it can't simply be done here if we missed it earlier
945 since the constant might need to be pushed into the literal pool
946 and the resulting memref would probably need further
947 reloading. */
948 if (in != 0 && REG_P (in))
949 {
950 int regno = REGNO (in);
951
952 gcc_assert (regno < FIRST_PSEUDO_REGISTER
953 || reg_renumber[regno] >= 0
954 || reg_equiv_constant (regno) == NULL_RTX);
955 }
956
957 /* reg_equiv_constant only contains constants which are obviously
958 not appropriate as destination. So if we would need to replace
959 the destination pseudo with a constant we are in real
960 trouble. */
961 if (out != 0 && REG_P (out))
962 {
963 int regno = REGNO (out);
964
965 gcc_assert (regno < FIRST_PSEUDO_REGISTER
966 || reg_renumber[regno] >= 0
967 || reg_equiv_constant (regno) == NULL_RTX);
968 }
969
970 /* If we have a read-write operand with an address side-effect,
971 change either IN or OUT so the side-effect happens only once. */
972 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
973 switch (GET_CODE (XEXP (in, 0)))
974 {
975 case POST_INC: case POST_DEC: case POST_MODIFY:
976 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
977 break;
978
979 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
980 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
981 break;
982
983 default:
984 break;
985 }
986
987 /* If we are reloading a (SUBREG constant ...), really reload just the
988 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
989 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
990 a pseudo and hence will become a MEM) with M1 wider than M2 and the
991 register is a pseudo, also reload the inside expression.
992 For machines that extend byte loads, do this for any SUBREG of a pseudo
993 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
994 M2 is an integral mode that gets extended when loaded.
995 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
996 either M1 is not valid for R or M2 is wider than a word but we only
997 need one word to store an M2-sized quantity in R.
998 (However, if OUT is nonzero, we need to reload the reg *and*
999 the subreg, so do nothing here, and let following statement handle it.)
1000
1001 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1002 we can't handle it here because CONST_INT does not indicate a mode.
1003
1004 Similarly, we must reload the inside expression if we have a
1005 STRICT_LOW_PART (presumably, in == out in this case).
1006
1007 Also reload the inner expression if it does not require a secondary
1008 reload but the SUBREG does.
1009
1010 Finally, reload the inner expression if it is a register that is in
1011 the class whose registers cannot be referenced in a different size
1012 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1013 cannot reload just the inside since we might end up with the wrong
1014 register class. But if it is inside a STRICT_LOW_PART, we have
1015 no choice, so we hope we do get the right register class there. */
1016
1017 if (in != 0 && GET_CODE (in) == SUBREG
1018 && (subreg_lowpart_p (in) || strict_low)
1019 #ifdef CANNOT_CHANGE_MODE_CLASS
1020 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1021 #endif
1022 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1023 && (CONSTANT_P (SUBREG_REG (in))
1024 || GET_CODE (SUBREG_REG (in)) == PLUS
1025 || strict_low
1026 || (((REG_P (SUBREG_REG (in))
1027 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1028 || MEM_P (SUBREG_REG (in)))
1029 && ((GET_MODE_SIZE (inmode)
1030 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1031 #ifdef LOAD_EXTEND_OP
1032 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1033 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1034 <= UNITS_PER_WORD)
1035 && (GET_MODE_SIZE (inmode)
1036 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1037 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1038 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1039 #endif
1040 #ifdef WORD_REGISTER_OPERATIONS
1041 || ((GET_MODE_SIZE (inmode)
1042 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1043 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1044 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1045 / UNITS_PER_WORD)))
1046 #endif
1047 ))
1048 || (REG_P (SUBREG_REG (in))
1049 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1050 /* The case where out is nonzero
1051 is handled differently in the following statement. */
1052 && (out == 0 || subreg_lowpart_p (in))
1053 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1054 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1055 > UNITS_PER_WORD)
1056 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1057 / UNITS_PER_WORD)
1058 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1059 [GET_MODE (SUBREG_REG (in))]))
1060 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1061 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1062 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1063 SUBREG_REG (in))
1064 == NO_REGS))
1065 #ifdef CANNOT_CHANGE_MODE_CLASS
1066 || (REG_P (SUBREG_REG (in))
1067 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1068 && REG_CANNOT_CHANGE_MODE_P
1069 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1070 #endif
1071 ))
1072 {
1073 #ifdef LIMIT_RELOAD_CLASS
1074 in_subreg_loc = inloc;
1075 #endif
1076 inloc = &SUBREG_REG (in);
1077 in = *inloc;
1078 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1079 if (MEM_P (in))
1080 /* This is supposed to happen only for paradoxical subregs made by
1081 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1082 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1083 #endif
1084 inmode = GET_MODE (in);
1085 }
1086
1087 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1088 either M1 is not valid for R or M2 is wider than a word but we only
1089 need one word to store an M2-sized quantity in R.
1090
1091 However, we must reload the inner reg *as well as* the subreg in
1092 that case. */
1093
1094 /* Similar issue for (SUBREG constant ...) if it was not handled by the
1095 code above. This can happen if SUBREG_BYTE != 0. */
1096
1097 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, 0))
1098 {
1099 enum reg_class in_class = rclass;
1100
1101 if (REG_P (SUBREG_REG (in)))
1102 in_class
1103 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1104 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1105 GET_MODE (SUBREG_REG (in)),
1106 SUBREG_BYTE (in),
1107 GET_MODE (in)),
1108 REGNO (SUBREG_REG (in)));
1109
1110 /* This relies on the fact that emit_reload_insns outputs the
1111 instructions for input reloads of type RELOAD_OTHER in the same
1112 order as the reloads. Thus if the outer reload is also of type
1113 RELOAD_OTHER, we are guaranteed that this inner reload will be
1114 output before the outer reload. */
1115 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1116 in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1117 dont_remove_subreg = 1;
1118 }
1119
1120 /* Similarly for paradoxical and problematical SUBREGs on the output.
1121 Note that there is no reason we need worry about the previous value
1122 of SUBREG_REG (out); even if wider than out,
1123 storing in a subreg is entitled to clobber it all
1124 (except in the case of STRICT_LOW_PART,
1125 and in that case the constraint should label it input-output.) */
1126 if (out != 0 && GET_CODE (out) == SUBREG
1127 && (subreg_lowpart_p (out) || strict_low)
1128 #ifdef CANNOT_CHANGE_MODE_CLASS
1129 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1130 #endif
1131 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1132 && (CONSTANT_P (SUBREG_REG (out))
1133 || strict_low
1134 || (((REG_P (SUBREG_REG (out))
1135 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1136 || MEM_P (SUBREG_REG (out)))
1137 && ((GET_MODE_SIZE (outmode)
1138 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1139 #ifdef WORD_REGISTER_OPERATIONS
1140 || ((GET_MODE_SIZE (outmode)
1141 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1142 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1143 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1144 / UNITS_PER_WORD)))
1145 #endif
1146 ))
1147 || (REG_P (SUBREG_REG (out))
1148 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1149 && ((GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1150 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1151 > UNITS_PER_WORD)
1152 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1153 / UNITS_PER_WORD)
1154 != (int) hard_regno_nregs[REGNO (SUBREG_REG (out))]
1155 [GET_MODE (SUBREG_REG (out))]))
1156 || ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode)))
1157 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1158 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1159 SUBREG_REG (out))
1160 == NO_REGS))
1161 #ifdef CANNOT_CHANGE_MODE_CLASS
1162 || (REG_P (SUBREG_REG (out))
1163 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1164 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1165 GET_MODE (SUBREG_REG (out)),
1166 outmode))
1167 #endif
1168 ))
1169 {
1170 #ifdef LIMIT_RELOAD_CLASS
1171 out_subreg_loc = outloc;
1172 #endif
1173 outloc = &SUBREG_REG (out);
1174 out = *outloc;
1175 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1176 gcc_assert (!MEM_P (out)
1177 || GET_MODE_SIZE (GET_MODE (out))
1178 <= GET_MODE_SIZE (outmode));
1179 #endif
1180 outmode = GET_MODE (out);
1181 }
1182
1183 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1184 either M1 is not valid for R or M2 is wider than a word but we only
1185 need one word to store an M2-sized quantity in R.
1186
1187 However, we must reload the inner reg *as well as* the subreg in
1188 that case. In this case, the inner reg is an in-out reload. */
1189
1190 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, 1))
1191 {
1192 /* This relies on the fact that emit_reload_insns outputs the
1193 instructions for output reloads of type RELOAD_OTHER in reverse
1194 order of the reloads. Thus if the outer reload is also of type
1195 RELOAD_OTHER, we are guaranteed that this inner reload will be
1196 output after the outer reload. */
1197 dont_remove_subreg = 1;
1198 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1199 &SUBREG_REG (out),
1200 find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1201 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1202 GET_MODE (SUBREG_REG (out)),
1203 SUBREG_BYTE (out),
1204 GET_MODE (out)),
1205 REGNO (SUBREG_REG (out))),
1206 VOIDmode, VOIDmode, 0, 0,
1207 opnum, RELOAD_OTHER);
1208 }
1209
1210 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1211 if (in != 0 && out != 0 && MEM_P (out)
1212 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1213 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1214 dont_share = 1;
1215
1216 /* If IN is a SUBREG of a hard register, make a new REG. This
1217 simplifies some of the cases below. */
1218
1219 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1220 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1221 && ! dont_remove_subreg)
1222 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1223
1224 /* Similarly for OUT. */
1225 if (out != 0 && GET_CODE (out) == SUBREG
1226 && REG_P (SUBREG_REG (out))
1227 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1228 && ! dont_remove_subreg)
1229 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1230
1231 /* Narrow down the class of register wanted if that is
1232 desirable on this machine for efficiency. */
1233 {
1234 reg_class_t preferred_class = rclass;
1235
1236 if (in != 0)
1237 preferred_class = targetm.preferred_reload_class (in, rclass);
1238
1239 /* Output reloads may need analogous treatment, different in detail. */
1240 if (out != 0)
1241 preferred_class
1242 = targetm.preferred_output_reload_class (out, preferred_class);
1243
1244 /* Discard what the target said if we cannot do it. */
1245 if (preferred_class != NO_REGS
1246 || (optional && type == RELOAD_FOR_OUTPUT))
1247 rclass = (enum reg_class) preferred_class;
1248 }
1249
1250 /* Make sure we use a class that can handle the actual pseudo
1251 inside any subreg. For example, on the 386, QImode regs
1252 can appear within SImode subregs. Although GENERAL_REGS
1253 can handle SImode, QImode needs a smaller class. */
1254 #ifdef LIMIT_RELOAD_CLASS
1255 if (in_subreg_loc)
1256 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1257 else if (in != 0 && GET_CODE (in) == SUBREG)
1258 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1259
1260 if (out_subreg_loc)
1261 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1262 if (out != 0 && GET_CODE (out) == SUBREG)
1263 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1264 #endif
1265
1266 /* Verify that this class is at least possible for the mode that
1267 is specified. */
1268 if (this_insn_is_asm)
1269 {
1270 enum machine_mode mode;
1271 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1272 mode = inmode;
1273 else
1274 mode = outmode;
1275 if (mode == VOIDmode)
1276 {
1277 error_for_asm (this_insn, "cannot reload integer constant "
1278 "operand in %<asm%>");
1279 mode = word_mode;
1280 if (in != 0)
1281 inmode = word_mode;
1282 if (out != 0)
1283 outmode = word_mode;
1284 }
1285 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1286 if (HARD_REGNO_MODE_OK (i, mode)
1287 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1288 break;
1289 if (i == FIRST_PSEUDO_REGISTER)
1290 {
1291 error_for_asm (this_insn, "impossible register constraint "
1292 "in %<asm%>");
1293 /* Avoid further trouble with this insn. */
1294 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1295 /* We used to continue here setting class to ALL_REGS, but it triggers
1296 sanity check on i386 for:
1297 void foo(long double d)
1298 {
1299 asm("" :: "a" (d));
1300 }
1301 Returning zero here ought to be safe as we take care in
1302 find_reloads to not process the reloads when instruction was
1303 replaced by USE. */
1304
1305 return 0;
1306 }
1307 }
1308
1309 /* Optional output reloads are always OK even if we have no register class,
1310 since the function of these reloads is only to have spill_reg_store etc.
1311 set, so that the storing insn can be deleted later. */
1312 gcc_assert (rclass != NO_REGS
1313 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1314
1315 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1316
1317 if (i == n_reloads)
1318 {
1319 /* See if we need a secondary reload register to move between CLASS
1320 and IN or CLASS and OUT. Get the icode and push any required reloads
1321 needed for each of them if so. */
1322
1323 if (in != 0)
1324 secondary_in_reload
1325 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1326 &secondary_in_icode, NULL);
1327 if (out != 0 && GET_CODE (out) != SCRATCH)
1328 secondary_out_reload
1329 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1330 type, &secondary_out_icode, NULL);
1331
1332 /* We found no existing reload suitable for re-use.
1333 So add an additional reload. */
1334
1335 #ifdef SECONDARY_MEMORY_NEEDED
1336 /* If a memory location is needed for the copy, make one. */
1337 if (in != 0
1338 && (REG_P (in)
1339 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1340 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
1341 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
1342 rclass, inmode))
1343 get_secondary_mem (in, inmode, opnum, type);
1344 #endif
1345
1346 i = n_reloads;
1347 rld[i].in = in;
1348 rld[i].out = out;
1349 rld[i].rclass = rclass;
1350 rld[i].inmode = inmode;
1351 rld[i].outmode = outmode;
1352 rld[i].reg_rtx = 0;
1353 rld[i].optional = optional;
1354 rld[i].inc = 0;
1355 rld[i].nocombine = 0;
1356 rld[i].in_reg = inloc ? *inloc : 0;
1357 rld[i].out_reg = outloc ? *outloc : 0;
1358 rld[i].opnum = opnum;
1359 rld[i].when_needed = type;
1360 rld[i].secondary_in_reload = secondary_in_reload;
1361 rld[i].secondary_out_reload = secondary_out_reload;
1362 rld[i].secondary_in_icode = secondary_in_icode;
1363 rld[i].secondary_out_icode = secondary_out_icode;
1364 rld[i].secondary_p = 0;
1365
1366 n_reloads++;
1367
1368 #ifdef SECONDARY_MEMORY_NEEDED
1369 if (out != 0
1370 && (REG_P (out)
1371 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1372 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1373 && SECONDARY_MEMORY_NEEDED (rclass,
1374 REGNO_REG_CLASS (reg_or_subregno (out)),
1375 outmode))
1376 get_secondary_mem (out, outmode, opnum, type);
1377 #endif
1378 }
1379 else
1380 {
1381 /* We are reusing an existing reload,
1382 but we may have additional information for it.
1383 For example, we may now have both IN and OUT
1384 while the old one may have just one of them. */
1385
1386 /* The modes can be different. If they are, we want to reload in
1387 the larger mode, so that the value is valid for both modes. */
1388 if (inmode != VOIDmode
1389 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1390 rld[i].inmode = inmode;
1391 if (outmode != VOIDmode
1392 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1393 rld[i].outmode = outmode;
1394 if (in != 0)
1395 {
1396 rtx in_reg = inloc ? *inloc : 0;
1397 /* If we merge reloads for two distinct rtl expressions that
1398 are identical in content, there might be duplicate address
1399 reloads. Remove the extra set now, so that if we later find
1400 that we can inherit this reload, we can get rid of the
1401 address reloads altogether.
1402
1403 Do not do this if both reloads are optional since the result
1404 would be an optional reload which could potentially leave
1405 unresolved address replacements.
1406
1407 It is not sufficient to call transfer_replacements since
1408 choose_reload_regs will remove the replacements for address
1409 reloads of inherited reloads which results in the same
1410 problem. */
1411 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1412 && ! (rld[i].optional && optional))
1413 {
1414 /* We must keep the address reload with the lower operand
1415 number alive. */
1416 if (opnum > rld[i].opnum)
1417 {
1418 remove_address_replacements (in);
1419 in = rld[i].in;
1420 in_reg = rld[i].in_reg;
1421 }
1422 else
1423 remove_address_replacements (rld[i].in);
1424 }
1425 /* When emitting reloads we don't necessarily look at the in-
1426 and outmode, but also directly at the operands (in and out).
1427 So we can't simply overwrite them with whatever we have found
1428 for this (to-be-merged) reload, we have to "merge" that too.
1429 Reusing another reload already verified that we deal with the
1430 same operands, just possibly in different modes. So we
1431 overwrite the operands only when the new mode is larger.
1432 See also PR33613. */
1433 if (!rld[i].in
1434 || GET_MODE_SIZE (GET_MODE (in))
1435 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1436 rld[i].in = in;
1437 if (!rld[i].in_reg
1438 || (in_reg
1439 && GET_MODE_SIZE (GET_MODE (in_reg))
1440 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1441 rld[i].in_reg = in_reg;
1442 }
1443 if (out != 0)
1444 {
1445 if (!rld[i].out
1446 || (out
1447 && GET_MODE_SIZE (GET_MODE (out))
1448 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1449 rld[i].out = out;
1450 if (outloc
1451 && (!rld[i].out_reg
1452 || GET_MODE_SIZE (GET_MODE (*outloc))
1453 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1454 rld[i].out_reg = *outloc;
1455 }
1456 if (reg_class_subset_p (rclass, rld[i].rclass))
1457 rld[i].rclass = rclass;
1458 rld[i].optional &= optional;
1459 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1460 opnum, rld[i].opnum))
1461 rld[i].when_needed = RELOAD_OTHER;
1462 rld[i].opnum = MIN (rld[i].opnum, opnum);
1463 }
1464
1465 /* If the ostensible rtx being reloaded differs from the rtx found
1466 in the location to substitute, this reload is not safe to combine
1467 because we cannot reliably tell whether it appears in the insn. */
1468
1469 if (in != 0 && in != *inloc)
1470 rld[i].nocombine = 1;
1471
1472 #if 0
1473 /* This was replaced by changes in find_reloads_address_1 and the new
1474 function inc_for_reload, which go with a new meaning of reload_inc. */
1475
1476 /* If this is an IN/OUT reload in an insn that sets the CC,
1477 it must be for an autoincrement. It doesn't work to store
1478 the incremented value after the insn because that would clobber the CC.
1479 So we must do the increment of the value reloaded from,
1480 increment it, store it back, then decrement again. */
1481 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1482 {
1483 out = 0;
1484 rld[i].out = 0;
1485 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1486 /* If we did not find a nonzero amount-to-increment-by,
1487 that contradicts the belief that IN is being incremented
1488 in an address in this insn. */
1489 gcc_assert (rld[i].inc != 0);
1490 }
1491 #endif
1492
1493 /* If we will replace IN and OUT with the reload-reg,
1494 record where they are located so that substitution need
1495 not do a tree walk. */
1496
1497 if (replace_reloads)
1498 {
1499 if (inloc != 0)
1500 {
1501 struct replacement *r = &replacements[n_replacements++];
1502 r->what = i;
1503 r->where = inloc;
1504 r->mode = inmode;
1505 }
1506 if (outloc != 0 && outloc != inloc)
1507 {
1508 struct replacement *r = &replacements[n_replacements++];
1509 r->what = i;
1510 r->where = outloc;
1511 r->mode = outmode;
1512 }
1513 }
1514
1515 /* If this reload is just being introduced and it has both
1516 an incoming quantity and an outgoing quantity that are
1517 supposed to be made to match, see if either one of the two
1518 can serve as the place to reload into.
1519
1520 If one of them is acceptable, set rld[i].reg_rtx
1521 to that one. */
1522
1523 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1524 {
1525 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1526 inmode, outmode,
1527 rld[i].rclass, i,
1528 earlyclobber_operand_p (out));
1529
1530 /* If the outgoing register already contains the same value
1531 as the incoming one, we can dispense with loading it.
1532 The easiest way to tell the caller that is to give a phony
1533 value for the incoming operand (same as outgoing one). */
1534 if (rld[i].reg_rtx == out
1535 && (REG_P (in) || CONSTANT_P (in))
1536 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1537 static_reload_reg_p, i, inmode))
1538 rld[i].in = out;
1539 }
1540
1541 /* If this is an input reload and the operand contains a register that
1542 dies in this insn and is used nowhere else, see if it is the right class
1543 to be used for this reload. Use it if so. (This occurs most commonly
1544 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1545 this if it is also an output reload that mentions the register unless
1546 the output is a SUBREG that clobbers an entire register.
1547
1548 Note that the operand might be one of the spill regs, if it is a
1549 pseudo reg and we are in a block where spilling has not taken place.
1550 But if there is no spilling in this block, that is OK.
1551 An explicitly used hard reg cannot be a spill reg. */
1552
1553 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1554 {
1555 rtx note;
1556 int regno;
1557 enum machine_mode rel_mode = inmode;
1558
1559 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1560 rel_mode = outmode;
1561
1562 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1563 if (REG_NOTE_KIND (note) == REG_DEAD
1564 && REG_P (XEXP (note, 0))
1565 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1566 && reg_mentioned_p (XEXP (note, 0), in)
1567 /* Check that a former pseudo is valid; see find_dummy_reload. */
1568 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1569 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1570 ORIGINAL_REGNO (XEXP (note, 0)))
1571 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1572 && ! refers_to_regno_for_reload_p (regno,
1573 end_hard_regno (rel_mode,
1574 regno),
1575 PATTERN (this_insn), inloc)
1576 /* If this is also an output reload, IN cannot be used as
1577 the reload register if it is set in this insn unless IN
1578 is also OUT. */
1579 && (out == 0 || in == out
1580 || ! hard_reg_set_here_p (regno,
1581 end_hard_regno (rel_mode, regno),
1582 PATTERN (this_insn)))
1583 /* ??? Why is this code so different from the previous?
1584 Is there any simple coherent way to describe the two together?
1585 What's going on here. */
1586 && (in != out
1587 || (GET_CODE (in) == SUBREG
1588 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1589 / UNITS_PER_WORD)
1590 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1591 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1592 /* Make sure the operand fits in the reg that dies. */
1593 && (GET_MODE_SIZE (rel_mode)
1594 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1595 && HARD_REGNO_MODE_OK (regno, inmode)
1596 && HARD_REGNO_MODE_OK (regno, outmode))
1597 {
1598 unsigned int offs;
1599 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1600 hard_regno_nregs[regno][outmode]);
1601
1602 for (offs = 0; offs < nregs; offs++)
1603 if (fixed_regs[regno + offs]
1604 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1605 regno + offs))
1606 break;
1607
1608 if (offs == nregs
1609 && (! (refers_to_regno_for_reload_p
1610 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1611 || can_reload_into (in, regno, inmode)))
1612 {
1613 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1614 break;
1615 }
1616 }
1617 }
1618
1619 if (out)
1620 output_reloadnum = i;
1621
1622 return i;
1623 }
1624
1625 /* Record an additional place we must replace a value
1626 for which we have already recorded a reload.
1627 RELOADNUM is the value returned by push_reload
1628 when the reload was recorded.
1629 This is used in insn patterns that use match_dup. */
1630
1631 static void
1632 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1633 {
1634 if (replace_reloads)
1635 {
1636 struct replacement *r = &replacements[n_replacements++];
1637 r->what = reloadnum;
1638 r->where = loc;
1639 r->mode = mode;
1640 }
1641 }
1642
1643 /* Duplicate any replacement we have recorded to apply at
1644 location ORIG_LOC to also be performed at DUP_LOC.
1645 This is used in insn patterns that use match_dup. */
1646
1647 static void
1648 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1649 {
1650 int i, n = n_replacements;
1651
1652 for (i = 0; i < n; i++)
1653 {
1654 struct replacement *r = &replacements[i];
1655 if (r->where == orig_loc)
1656 push_replacement (dup_loc, r->what, r->mode);
1657 }
1658 }
1659 \f
1660 /* Transfer all replacements that used to be in reload FROM to be in
1661 reload TO. */
1662
1663 void
1664 transfer_replacements (int to, int from)
1665 {
1666 int i;
1667
1668 for (i = 0; i < n_replacements; i++)
1669 if (replacements[i].what == from)
1670 replacements[i].what = to;
1671 }
1672 \f
1673 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1674 or a subpart of it. If we have any replacements registered for IN_RTX,
1675 cancel the reloads that were supposed to load them.
1676 Return nonzero if we canceled any reloads. */
1677 int
1678 remove_address_replacements (rtx in_rtx)
1679 {
1680 int i, j;
1681 char reload_flags[MAX_RELOADS];
1682 int something_changed = 0;
1683
1684 memset (reload_flags, 0, sizeof reload_flags);
1685 for (i = 0, j = 0; i < n_replacements; i++)
1686 {
1687 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1688 reload_flags[replacements[i].what] |= 1;
1689 else
1690 {
1691 replacements[j++] = replacements[i];
1692 reload_flags[replacements[i].what] |= 2;
1693 }
1694 }
1695 /* Note that the following store must be done before the recursive calls. */
1696 n_replacements = j;
1697
1698 for (i = n_reloads - 1; i >= 0; i--)
1699 {
1700 if (reload_flags[i] == 1)
1701 {
1702 deallocate_reload_reg (i);
1703 remove_address_replacements (rld[i].in);
1704 rld[i].in = 0;
1705 something_changed = 1;
1706 }
1707 }
1708 return something_changed;
1709 }
1710 \f
1711 /* If there is only one output reload, and it is not for an earlyclobber
1712 operand, try to combine it with a (logically unrelated) input reload
1713 to reduce the number of reload registers needed.
1714
1715 This is safe if the input reload does not appear in
1716 the value being output-reloaded, because this implies
1717 it is not needed any more once the original insn completes.
1718
1719 If that doesn't work, see we can use any of the registers that
1720 die in this insn as a reload register. We can if it is of the right
1721 class and does not appear in the value being output-reloaded. */
1722
1723 static void
1724 combine_reloads (void)
1725 {
1726 int i, regno;
1727 int output_reload = -1;
1728 int secondary_out = -1;
1729 rtx note;
1730
1731 /* Find the output reload; return unless there is exactly one
1732 and that one is mandatory. */
1733
1734 for (i = 0; i < n_reloads; i++)
1735 if (rld[i].out != 0)
1736 {
1737 if (output_reload >= 0)
1738 return;
1739 output_reload = i;
1740 }
1741
1742 if (output_reload < 0 || rld[output_reload].optional)
1743 return;
1744
1745 /* An input-output reload isn't combinable. */
1746
1747 if (rld[output_reload].in != 0)
1748 return;
1749
1750 /* If this reload is for an earlyclobber operand, we can't do anything. */
1751 if (earlyclobber_operand_p (rld[output_reload].out))
1752 return;
1753
1754 /* If there is a reload for part of the address of this operand, we would
1755 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1756 its life to the point where doing this combine would not lower the
1757 number of spill registers needed. */
1758 for (i = 0; i < n_reloads; i++)
1759 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1760 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1761 && rld[i].opnum == rld[output_reload].opnum)
1762 return;
1763
1764 /* Check each input reload; can we combine it? */
1765
1766 for (i = 0; i < n_reloads; i++)
1767 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1768 /* Life span of this reload must not extend past main insn. */
1769 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1770 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1771 && rld[i].when_needed != RELOAD_OTHER
1772 && (CLASS_MAX_NREGS (rld[i].rclass, rld[i].inmode)
1773 == CLASS_MAX_NREGS (rld[output_reload].rclass,
1774 rld[output_reload].outmode))
1775 && rld[i].inc == 0
1776 && rld[i].reg_rtx == 0
1777 #ifdef SECONDARY_MEMORY_NEEDED
1778 /* Don't combine two reloads with different secondary
1779 memory locations. */
1780 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1781 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1782 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1783 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1784 #endif
1785 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1786 ? (rld[i].rclass == rld[output_reload].rclass)
1787 : (reg_class_subset_p (rld[i].rclass,
1788 rld[output_reload].rclass)
1789 || reg_class_subset_p (rld[output_reload].rclass,
1790 rld[i].rclass)))
1791 && (MATCHES (rld[i].in, rld[output_reload].out)
1792 /* Args reversed because the first arg seems to be
1793 the one that we imagine being modified
1794 while the second is the one that might be affected. */
1795 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1796 rld[i].in)
1797 /* However, if the input is a register that appears inside
1798 the output, then we also can't share.
1799 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1800 If the same reload reg is used for both reg 69 and the
1801 result to be stored in memory, then that result
1802 will clobber the address of the memory ref. */
1803 && ! (REG_P (rld[i].in)
1804 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1805 rld[output_reload].out))))
1806 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1807 rld[i].when_needed != RELOAD_FOR_INPUT)
1808 && (reg_class_size[(int) rld[i].rclass]
1809 || targetm.small_register_classes_for_mode_p (VOIDmode))
1810 /* We will allow making things slightly worse by combining an
1811 input and an output, but no worse than that. */
1812 && (rld[i].when_needed == RELOAD_FOR_INPUT
1813 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1814 {
1815 int j;
1816
1817 /* We have found a reload to combine with! */
1818 rld[i].out = rld[output_reload].out;
1819 rld[i].out_reg = rld[output_reload].out_reg;
1820 rld[i].outmode = rld[output_reload].outmode;
1821 /* Mark the old output reload as inoperative. */
1822 rld[output_reload].out = 0;
1823 /* The combined reload is needed for the entire insn. */
1824 rld[i].when_needed = RELOAD_OTHER;
1825 /* If the output reload had a secondary reload, copy it. */
1826 if (rld[output_reload].secondary_out_reload != -1)
1827 {
1828 rld[i].secondary_out_reload
1829 = rld[output_reload].secondary_out_reload;
1830 rld[i].secondary_out_icode
1831 = rld[output_reload].secondary_out_icode;
1832 }
1833
1834 #ifdef SECONDARY_MEMORY_NEEDED
1835 /* Copy any secondary MEM. */
1836 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1837 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1838 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1839 #endif
1840 /* If required, minimize the register class. */
1841 if (reg_class_subset_p (rld[output_reload].rclass,
1842 rld[i].rclass))
1843 rld[i].rclass = rld[output_reload].rclass;
1844
1845 /* Transfer all replacements from the old reload to the combined. */
1846 for (j = 0; j < n_replacements; j++)
1847 if (replacements[j].what == output_reload)
1848 replacements[j].what = i;
1849
1850 return;
1851 }
1852
1853 /* If this insn has only one operand that is modified or written (assumed
1854 to be the first), it must be the one corresponding to this reload. It
1855 is safe to use anything that dies in this insn for that output provided
1856 that it does not occur in the output (we already know it isn't an
1857 earlyclobber. If this is an asm insn, give up. */
1858
1859 if (INSN_CODE (this_insn) == -1)
1860 return;
1861
1862 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1863 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1864 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1865 return;
1866
1867 /* See if some hard register that dies in this insn and is not used in
1868 the output is the right class. Only works if the register we pick
1869 up can fully hold our output reload. */
1870 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1871 if (REG_NOTE_KIND (note) == REG_DEAD
1872 && REG_P (XEXP (note, 0))
1873 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1874 rld[output_reload].out)
1875 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1876 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1877 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1878 regno)
1879 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1880 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1881 /* Ensure that a secondary or tertiary reload for this output
1882 won't want this register. */
1883 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1884 || (!(TEST_HARD_REG_BIT
1885 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1886 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1887 || !(TEST_HARD_REG_BIT
1888 (reg_class_contents[(int) rld[secondary_out].rclass],
1889 regno)))))
1890 && !fixed_regs[regno]
1891 /* Check that a former pseudo is valid; see find_dummy_reload. */
1892 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1893 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1894 ORIGINAL_REGNO (XEXP (note, 0)))
1895 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1896 {
1897 rld[output_reload].reg_rtx
1898 = gen_rtx_REG (rld[output_reload].outmode, regno);
1899 return;
1900 }
1901 }
1902 \f
1903 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1904 See if one of IN and OUT is a register that may be used;
1905 this is desirable since a spill-register won't be needed.
1906 If so, return the register rtx that proves acceptable.
1907
1908 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1909 RCLASS is the register class required for the reload.
1910
1911 If FOR_REAL is >= 0, it is the number of the reload,
1912 and in some cases when it can be discovered that OUT doesn't need
1913 to be computed, clear out rld[FOR_REAL].out.
1914
1915 If FOR_REAL is -1, this should not be done, because this call
1916 is just to see if a register can be found, not to find and install it.
1917
1918 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1919 puts an additional constraint on being able to use IN for OUT since
1920 IN must not appear elsewhere in the insn (it is assumed that IN itself
1921 is safe from the earlyclobber). */
1922
1923 static rtx
1924 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1925 enum machine_mode inmode, enum machine_mode outmode,
1926 reg_class_t rclass, int for_real, int earlyclobber)
1927 {
1928 rtx in = real_in;
1929 rtx out = real_out;
1930 int in_offset = 0;
1931 int out_offset = 0;
1932 rtx value = 0;
1933
1934 /* If operands exceed a word, we can't use either of them
1935 unless they have the same size. */
1936 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1937 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1938 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1939 return 0;
1940
1941 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1942 respectively refers to a hard register. */
1943
1944 /* Find the inside of any subregs. */
1945 while (GET_CODE (out) == SUBREG)
1946 {
1947 if (REG_P (SUBREG_REG (out))
1948 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1949 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1950 GET_MODE (SUBREG_REG (out)),
1951 SUBREG_BYTE (out),
1952 GET_MODE (out));
1953 out = SUBREG_REG (out);
1954 }
1955 while (GET_CODE (in) == SUBREG)
1956 {
1957 if (REG_P (SUBREG_REG (in))
1958 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1959 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1960 GET_MODE (SUBREG_REG (in)),
1961 SUBREG_BYTE (in),
1962 GET_MODE (in));
1963 in = SUBREG_REG (in);
1964 }
1965
1966 /* Narrow down the reg class, the same way push_reload will;
1967 otherwise we might find a dummy now, but push_reload won't. */
1968 {
1969 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1970 if (preferred_class != NO_REGS)
1971 rclass = (enum reg_class) preferred_class;
1972 }
1973
1974 /* See if OUT will do. */
1975 if (REG_P (out)
1976 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1977 {
1978 unsigned int regno = REGNO (out) + out_offset;
1979 unsigned int nwords = hard_regno_nregs[regno][outmode];
1980 rtx saved_rtx;
1981
1982 /* When we consider whether the insn uses OUT,
1983 ignore references within IN. They don't prevent us
1984 from copying IN into OUT, because those refs would
1985 move into the insn that reloads IN.
1986
1987 However, we only ignore IN in its role as this reload.
1988 If the insn uses IN elsewhere and it contains OUT,
1989 that counts. We can't be sure it's the "same" operand
1990 so it might not go through this reload. */
1991 saved_rtx = *inloc;
1992 *inloc = const0_rtx;
1993
1994 if (regno < FIRST_PSEUDO_REGISTER
1995 && HARD_REGNO_MODE_OK (regno, outmode)
1996 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1997 PATTERN (this_insn), outloc))
1998 {
1999 unsigned int i;
2000
2001 for (i = 0; i < nwords; i++)
2002 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2003 regno + i))
2004 break;
2005
2006 if (i == nwords)
2007 {
2008 if (REG_P (real_out))
2009 value = real_out;
2010 else
2011 value = gen_rtx_REG (outmode, regno);
2012 }
2013 }
2014
2015 *inloc = saved_rtx;
2016 }
2017
2018 /* Consider using IN if OUT was not acceptable
2019 or if OUT dies in this insn (like the quotient in a divmod insn).
2020 We can't use IN unless it is dies in this insn,
2021 which means we must know accurately which hard regs are live.
2022 Also, the result can't go in IN if IN is used within OUT,
2023 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2024 if (hard_regs_live_known
2025 && REG_P (in)
2026 && REGNO (in) < FIRST_PSEUDO_REGISTER
2027 && (value == 0
2028 || find_reg_note (this_insn, REG_UNUSED, real_out))
2029 && find_reg_note (this_insn, REG_DEAD, real_in)
2030 && !fixed_regs[REGNO (in)]
2031 && HARD_REGNO_MODE_OK (REGNO (in),
2032 /* The only case where out and real_out might
2033 have different modes is where real_out
2034 is a subreg, and in that case, out
2035 has a real mode. */
2036 (GET_MODE (out) != VOIDmode
2037 ? GET_MODE (out) : outmode))
2038 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2039 /* However only do this if we can be sure that this input
2040 operand doesn't correspond with an uninitialized pseudo.
2041 global can assign some hardreg to it that is the same as
2042 the one assigned to a different, also live pseudo (as it
2043 can ignore the conflict). We must never introduce writes
2044 to such hardregs, as they would clobber the other live
2045 pseudo. See PR 20973. */
2046 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
2047 ORIGINAL_REGNO (in))
2048 /* Similarly, only do this if we can be sure that the death
2049 note is still valid. global can assign some hardreg to
2050 the pseudo referenced in the note and simultaneously a
2051 subword of this hardreg to a different, also live pseudo,
2052 because only another subword of the hardreg is actually
2053 used in the insn. This cannot happen if the pseudo has
2054 been assigned exactly one hardreg. See PR 33732. */
2055 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2056 {
2057 unsigned int regno = REGNO (in) + in_offset;
2058 unsigned int nwords = hard_regno_nregs[regno][inmode];
2059
2060 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2061 && ! hard_reg_set_here_p (regno, regno + nwords,
2062 PATTERN (this_insn))
2063 && (! earlyclobber
2064 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2065 PATTERN (this_insn), inloc)))
2066 {
2067 unsigned int i;
2068
2069 for (i = 0; i < nwords; i++)
2070 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2071 regno + i))
2072 break;
2073
2074 if (i == nwords)
2075 {
2076 /* If we were going to use OUT as the reload reg
2077 and changed our mind, it means OUT is a dummy that
2078 dies here. So don't bother copying value to it. */
2079 if (for_real >= 0 && value == real_out)
2080 rld[for_real].out = 0;
2081 if (REG_P (real_in))
2082 value = real_in;
2083 else
2084 value = gen_rtx_REG (inmode, regno);
2085 }
2086 }
2087 }
2088
2089 return value;
2090 }
2091 \f
2092 /* This page contains subroutines used mainly for determining
2093 whether the IN or an OUT of a reload can serve as the
2094 reload register. */
2095
2096 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2097
2098 int
2099 earlyclobber_operand_p (rtx x)
2100 {
2101 int i;
2102
2103 for (i = 0; i < n_earlyclobbers; i++)
2104 if (reload_earlyclobbers[i] == x)
2105 return 1;
2106
2107 return 0;
2108 }
2109
2110 /* Return 1 if expression X alters a hard reg in the range
2111 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2112 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2113 X should be the body of an instruction. */
2114
2115 static int
2116 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2117 {
2118 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2119 {
2120 rtx op0 = SET_DEST (x);
2121
2122 while (GET_CODE (op0) == SUBREG)
2123 op0 = SUBREG_REG (op0);
2124 if (REG_P (op0))
2125 {
2126 unsigned int r = REGNO (op0);
2127
2128 /* See if this reg overlaps range under consideration. */
2129 if (r < end_regno
2130 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2131 return 1;
2132 }
2133 }
2134 else if (GET_CODE (x) == PARALLEL)
2135 {
2136 int i = XVECLEN (x, 0) - 1;
2137
2138 for (; i >= 0; i--)
2139 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2140 return 1;
2141 }
2142
2143 return 0;
2144 }
2145
2146 /* Return 1 if ADDR is a valid memory address for mode MODE
2147 in address space AS, and check that each pseudo reg has the
2148 proper kind of hard reg. */
2149
2150 int
2151 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2152 rtx addr, addr_space_t as)
2153 {
2154 #ifdef GO_IF_LEGITIMATE_ADDRESS
2155 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2156 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2157 return 0;
2158
2159 win:
2160 return 1;
2161 #else
2162 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2163 #endif
2164 }
2165 \f
2166 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2167 if they are the same hard reg, and has special hacks for
2168 autoincrement and autodecrement.
2169 This is specifically intended for find_reloads to use
2170 in determining whether two operands match.
2171 X is the operand whose number is the lower of the two.
2172
2173 The value is 2 if Y contains a pre-increment that matches
2174 a non-incrementing address in X. */
2175
2176 /* ??? To be completely correct, we should arrange to pass
2177 for X the output operand and for Y the input operand.
2178 For now, we assume that the output operand has the lower number
2179 because that is natural in (SET output (... input ...)). */
2180
2181 int
2182 operands_match_p (rtx x, rtx y)
2183 {
2184 int i;
2185 RTX_CODE code = GET_CODE (x);
2186 const char *fmt;
2187 int success_2;
2188
2189 if (x == y)
2190 return 1;
2191 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2192 && (REG_P (y) || (GET_CODE (y) == SUBREG
2193 && REG_P (SUBREG_REG (y)))))
2194 {
2195 int j;
2196
2197 if (code == SUBREG)
2198 {
2199 i = REGNO (SUBREG_REG (x));
2200 if (i >= FIRST_PSEUDO_REGISTER)
2201 goto slow;
2202 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2203 GET_MODE (SUBREG_REG (x)),
2204 SUBREG_BYTE (x),
2205 GET_MODE (x));
2206 }
2207 else
2208 i = REGNO (x);
2209
2210 if (GET_CODE (y) == SUBREG)
2211 {
2212 j = REGNO (SUBREG_REG (y));
2213 if (j >= FIRST_PSEUDO_REGISTER)
2214 goto slow;
2215 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2216 GET_MODE (SUBREG_REG (y)),
2217 SUBREG_BYTE (y),
2218 GET_MODE (y));
2219 }
2220 else
2221 j = REGNO (y);
2222
2223 /* On a WORDS_BIG_ENDIAN machine, point to the last register of a
2224 multiple hard register group of scalar integer registers, so that
2225 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2226 register. */
2227 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2228 && SCALAR_INT_MODE_P (GET_MODE (x))
2229 && i < FIRST_PSEUDO_REGISTER)
2230 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2231 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2232 && SCALAR_INT_MODE_P (GET_MODE (y))
2233 && j < FIRST_PSEUDO_REGISTER)
2234 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2235
2236 return i == j;
2237 }
2238 /* If two operands must match, because they are really a single
2239 operand of an assembler insn, then two postincrements are invalid
2240 because the assembler insn would increment only once.
2241 On the other hand, a postincrement matches ordinary indexing
2242 if the postincrement is the output operand. */
2243 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2244 return operands_match_p (XEXP (x, 0), y);
2245 /* Two preincrements are invalid
2246 because the assembler insn would increment only once.
2247 On the other hand, a preincrement matches ordinary indexing
2248 if the preincrement is the input operand.
2249 In this case, return 2, since some callers need to do special
2250 things when this happens. */
2251 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2252 || GET_CODE (y) == PRE_MODIFY)
2253 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2254
2255 slow:
2256
2257 /* Now we have disposed of all the cases in which different rtx codes
2258 can match. */
2259 if (code != GET_CODE (y))
2260 return 0;
2261
2262 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2263 if (GET_MODE (x) != GET_MODE (y))
2264 return 0;
2265
2266 /* MEMs refering to different address space are not equivalent. */
2267 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2268 return 0;
2269
2270 switch (code)
2271 {
2272 case CONST_INT:
2273 case CONST_DOUBLE:
2274 case CONST_FIXED:
2275 return 0;
2276
2277 case LABEL_REF:
2278 return XEXP (x, 0) == XEXP (y, 0);
2279 case SYMBOL_REF:
2280 return XSTR (x, 0) == XSTR (y, 0);
2281
2282 default:
2283 break;
2284 }
2285
2286 /* Compare the elements. If any pair of corresponding elements
2287 fail to match, return 0 for the whole things. */
2288
2289 success_2 = 0;
2290 fmt = GET_RTX_FORMAT (code);
2291 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2292 {
2293 int val, j;
2294 switch (fmt[i])
2295 {
2296 case 'w':
2297 if (XWINT (x, i) != XWINT (y, i))
2298 return 0;
2299 break;
2300
2301 case 'i':
2302 if (XINT (x, i) != XINT (y, i))
2303 return 0;
2304 break;
2305
2306 case 'e':
2307 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2308 if (val == 0)
2309 return 0;
2310 /* If any subexpression returns 2,
2311 we should return 2 if we are successful. */
2312 if (val == 2)
2313 success_2 = 1;
2314 break;
2315
2316 case '0':
2317 break;
2318
2319 case 'E':
2320 if (XVECLEN (x, i) != XVECLEN (y, i))
2321 return 0;
2322 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2323 {
2324 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2325 if (val == 0)
2326 return 0;
2327 if (val == 2)
2328 success_2 = 1;
2329 }
2330 break;
2331
2332 /* It is believed that rtx's at this level will never
2333 contain anything but integers and other rtx's,
2334 except for within LABEL_REFs and SYMBOL_REFs. */
2335 default:
2336 gcc_unreachable ();
2337 }
2338 }
2339 return 1 + success_2;
2340 }
2341 \f
2342 /* Describe the range of registers or memory referenced by X.
2343 If X is a register, set REG_FLAG and put the first register
2344 number into START and the last plus one into END.
2345 If X is a memory reference, put a base address into BASE
2346 and a range of integer offsets into START and END.
2347 If X is pushing on the stack, we can assume it causes no trouble,
2348 so we set the SAFE field. */
2349
2350 static struct decomposition
2351 decompose (rtx x)
2352 {
2353 struct decomposition val;
2354 int all_const = 0;
2355
2356 memset (&val, 0, sizeof (val));
2357
2358 switch (GET_CODE (x))
2359 {
2360 case MEM:
2361 {
2362 rtx base = NULL_RTX, offset = 0;
2363 rtx addr = XEXP (x, 0);
2364
2365 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2366 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2367 {
2368 val.base = XEXP (addr, 0);
2369 val.start = -GET_MODE_SIZE (GET_MODE (x));
2370 val.end = GET_MODE_SIZE (GET_MODE (x));
2371 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2372 return val;
2373 }
2374
2375 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2376 {
2377 if (GET_CODE (XEXP (addr, 1)) == PLUS
2378 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2379 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2380 {
2381 val.base = XEXP (addr, 0);
2382 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2383 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2384 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2385 return val;
2386 }
2387 }
2388
2389 if (GET_CODE (addr) == CONST)
2390 {
2391 addr = XEXP (addr, 0);
2392 all_const = 1;
2393 }
2394 if (GET_CODE (addr) == PLUS)
2395 {
2396 if (CONSTANT_P (XEXP (addr, 0)))
2397 {
2398 base = XEXP (addr, 1);
2399 offset = XEXP (addr, 0);
2400 }
2401 else if (CONSTANT_P (XEXP (addr, 1)))
2402 {
2403 base = XEXP (addr, 0);
2404 offset = XEXP (addr, 1);
2405 }
2406 }
2407
2408 if (offset == 0)
2409 {
2410 base = addr;
2411 offset = const0_rtx;
2412 }
2413 if (GET_CODE (offset) == CONST)
2414 offset = XEXP (offset, 0);
2415 if (GET_CODE (offset) == PLUS)
2416 {
2417 if (CONST_INT_P (XEXP (offset, 0)))
2418 {
2419 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2420 offset = XEXP (offset, 0);
2421 }
2422 else if (CONST_INT_P (XEXP (offset, 1)))
2423 {
2424 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2425 offset = XEXP (offset, 1);
2426 }
2427 else
2428 {
2429 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2430 offset = const0_rtx;
2431 }
2432 }
2433 else if (!CONST_INT_P (offset))
2434 {
2435 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2436 offset = const0_rtx;
2437 }
2438
2439 if (all_const && GET_CODE (base) == PLUS)
2440 base = gen_rtx_CONST (GET_MODE (base), base);
2441
2442 gcc_assert (CONST_INT_P (offset));
2443
2444 val.start = INTVAL (offset);
2445 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2446 val.base = base;
2447 }
2448 break;
2449
2450 case REG:
2451 val.reg_flag = 1;
2452 val.start = true_regnum (x);
2453 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2454 {
2455 /* A pseudo with no hard reg. */
2456 val.start = REGNO (x);
2457 val.end = val.start + 1;
2458 }
2459 else
2460 /* A hard reg. */
2461 val.end = end_hard_regno (GET_MODE (x), val.start);
2462 break;
2463
2464 case SUBREG:
2465 if (!REG_P (SUBREG_REG (x)))
2466 /* This could be more precise, but it's good enough. */
2467 return decompose (SUBREG_REG (x));
2468 val.reg_flag = 1;
2469 val.start = true_regnum (x);
2470 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2471 return decompose (SUBREG_REG (x));
2472 else
2473 /* A hard reg. */
2474 val.end = val.start + subreg_nregs (x);
2475 break;
2476
2477 case SCRATCH:
2478 /* This hasn't been assigned yet, so it can't conflict yet. */
2479 val.safe = 1;
2480 break;
2481
2482 default:
2483 gcc_assert (CONSTANT_P (x));
2484 val.safe = 1;
2485 break;
2486 }
2487 return val;
2488 }
2489
2490 /* Return 1 if altering Y will not modify the value of X.
2491 Y is also described by YDATA, which should be decompose (Y). */
2492
2493 static int
2494 immune_p (rtx x, rtx y, struct decomposition ydata)
2495 {
2496 struct decomposition xdata;
2497
2498 if (ydata.reg_flag)
2499 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2500 if (ydata.safe)
2501 return 1;
2502
2503 gcc_assert (MEM_P (y));
2504 /* If Y is memory and X is not, Y can't affect X. */
2505 if (!MEM_P (x))
2506 return 1;
2507
2508 xdata = decompose (x);
2509
2510 if (! rtx_equal_p (xdata.base, ydata.base))
2511 {
2512 /* If bases are distinct symbolic constants, there is no overlap. */
2513 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2514 return 1;
2515 /* Constants and stack slots never overlap. */
2516 if (CONSTANT_P (xdata.base)
2517 && (ydata.base == frame_pointer_rtx
2518 || ydata.base == hard_frame_pointer_rtx
2519 || ydata.base == stack_pointer_rtx))
2520 return 1;
2521 if (CONSTANT_P (ydata.base)
2522 && (xdata.base == frame_pointer_rtx
2523 || xdata.base == hard_frame_pointer_rtx
2524 || xdata.base == stack_pointer_rtx))
2525 return 1;
2526 /* If either base is variable, we don't know anything. */
2527 return 0;
2528 }
2529
2530 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2531 }
2532
2533 /* Similar, but calls decompose. */
2534
2535 int
2536 safe_from_earlyclobber (rtx op, rtx clobber)
2537 {
2538 struct decomposition early_data;
2539
2540 early_data = decompose (clobber);
2541 return immune_p (op, clobber, early_data);
2542 }
2543 \f
2544 /* Main entry point of this file: search the body of INSN
2545 for values that need reloading and record them with push_reload.
2546 REPLACE nonzero means record also where the values occur
2547 so that subst_reloads can be used.
2548
2549 IND_LEVELS says how many levels of indirection are supported by this
2550 machine; a value of zero means that a memory reference is not a valid
2551 memory address.
2552
2553 LIVE_KNOWN says we have valid information about which hard
2554 regs are live at each point in the program; this is true when
2555 we are called from global_alloc but false when stupid register
2556 allocation has been done.
2557
2558 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2559 which is nonnegative if the reg has been commandeered for reloading into.
2560 It is copied into STATIC_RELOAD_REG_P and referenced from there
2561 by various subroutines.
2562
2563 Return TRUE if some operands need to be changed, because of swapping
2564 commutative operands, reg_equiv_address substitution, or whatever. */
2565
2566 int
2567 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2568 short *reload_reg_p)
2569 {
2570 int insn_code_number;
2571 int i, j;
2572 int noperands;
2573 /* These start out as the constraints for the insn
2574 and they are chewed up as we consider alternatives. */
2575 const char *constraints[MAX_RECOG_OPERANDS];
2576 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2577 a register. */
2578 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2579 char pref_or_nothing[MAX_RECOG_OPERANDS];
2580 /* Nonzero for a MEM operand whose entire address needs a reload.
2581 May be -1 to indicate the entire address may or may not need a reload. */
2582 int address_reloaded[MAX_RECOG_OPERANDS];
2583 /* Nonzero for an address operand that needs to be completely reloaded.
2584 May be -1 to indicate the entire operand may or may not need a reload. */
2585 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2586 /* Value of enum reload_type to use for operand. */
2587 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2588 /* Value of enum reload_type to use within address of operand. */
2589 enum reload_type address_type[MAX_RECOG_OPERANDS];
2590 /* Save the usage of each operand. */
2591 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2592 int no_input_reloads = 0, no_output_reloads = 0;
2593 int n_alternatives;
2594 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2595 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2596 char this_alternative_win[MAX_RECOG_OPERANDS];
2597 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2598 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2599 int this_alternative_matches[MAX_RECOG_OPERANDS];
2600 int swapped;
2601 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2602 int this_alternative_number;
2603 int goal_alternative_number = 0;
2604 int operand_reloadnum[MAX_RECOG_OPERANDS];
2605 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2606 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2607 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2608 char goal_alternative_win[MAX_RECOG_OPERANDS];
2609 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2610 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2611 int goal_alternative_swapped;
2612 int best;
2613 int commutative;
2614 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2615 rtx substed_operand[MAX_RECOG_OPERANDS];
2616 rtx body = PATTERN (insn);
2617 rtx set = single_set (insn);
2618 int goal_earlyclobber = 0, this_earlyclobber;
2619 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2620 int retval = 0;
2621
2622 this_insn = insn;
2623 n_reloads = 0;
2624 n_replacements = 0;
2625 n_earlyclobbers = 0;
2626 replace_reloads = replace;
2627 hard_regs_live_known = live_known;
2628 static_reload_reg_p = reload_reg_p;
2629
2630 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2631 neither are insns that SET cc0. Insns that use CC0 are not allowed
2632 to have any input reloads. */
2633 if (JUMP_P (insn) || CALL_P (insn))
2634 no_output_reloads = 1;
2635
2636 #ifdef HAVE_cc0
2637 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2638 no_input_reloads = 1;
2639 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2640 no_output_reloads = 1;
2641 #endif
2642
2643 #ifdef SECONDARY_MEMORY_NEEDED
2644 /* The eliminated forms of any secondary memory locations are per-insn, so
2645 clear them out here. */
2646
2647 if (secondary_memlocs_elim_used)
2648 {
2649 memset (secondary_memlocs_elim, 0,
2650 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2651 secondary_memlocs_elim_used = 0;
2652 }
2653 #endif
2654
2655 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2656 is cheap to move between them. If it is not, there may not be an insn
2657 to do the copy, so we may need a reload. */
2658 if (GET_CODE (body) == SET
2659 && REG_P (SET_DEST (body))
2660 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2661 && REG_P (SET_SRC (body))
2662 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2663 && register_move_cost (GET_MODE (SET_SRC (body)),
2664 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2665 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2666 return 0;
2667
2668 extract_insn (insn);
2669
2670 noperands = reload_n_operands = recog_data.n_operands;
2671 n_alternatives = recog_data.n_alternatives;
2672
2673 /* Just return "no reloads" if insn has no operands with constraints. */
2674 if (noperands == 0 || n_alternatives == 0)
2675 return 0;
2676
2677 insn_code_number = INSN_CODE (insn);
2678 this_insn_is_asm = insn_code_number < 0;
2679
2680 memcpy (operand_mode, recog_data.operand_mode,
2681 noperands * sizeof (enum machine_mode));
2682 memcpy (constraints, recog_data.constraints,
2683 noperands * sizeof (const char *));
2684
2685 commutative = -1;
2686
2687 /* If we will need to know, later, whether some pair of operands
2688 are the same, we must compare them now and save the result.
2689 Reloading the base and index registers will clobber them
2690 and afterward they will fail to match. */
2691
2692 for (i = 0; i < noperands; i++)
2693 {
2694 const char *p;
2695 int c;
2696 char *end;
2697
2698 substed_operand[i] = recog_data.operand[i];
2699 p = constraints[i];
2700
2701 modified[i] = RELOAD_READ;
2702
2703 /* Scan this operand's constraint to see if it is an output operand,
2704 an in-out operand, is commutative, or should match another. */
2705
2706 while ((c = *p))
2707 {
2708 p += CONSTRAINT_LEN (c, p);
2709 switch (c)
2710 {
2711 case '=':
2712 modified[i] = RELOAD_WRITE;
2713 break;
2714 case '+':
2715 modified[i] = RELOAD_READ_WRITE;
2716 break;
2717 case '%':
2718 {
2719 /* The last operand should not be marked commutative. */
2720 gcc_assert (i != noperands - 1);
2721
2722 /* We currently only support one commutative pair of
2723 operands. Some existing asm code currently uses more
2724 than one pair. Previously, that would usually work,
2725 but sometimes it would crash the compiler. We
2726 continue supporting that case as well as we can by
2727 silently ignoring all but the first pair. In the
2728 future we may handle it correctly. */
2729 if (commutative < 0)
2730 commutative = i;
2731 else
2732 gcc_assert (this_insn_is_asm);
2733 }
2734 break;
2735 /* Use of ISDIGIT is tempting here, but it may get expensive because
2736 of locale support we don't want. */
2737 case '0': case '1': case '2': case '3': case '4':
2738 case '5': case '6': case '7': case '8': case '9':
2739 {
2740 c = strtoul (p - 1, &end, 10);
2741 p = end;
2742
2743 operands_match[c][i]
2744 = operands_match_p (recog_data.operand[c],
2745 recog_data.operand[i]);
2746
2747 /* An operand may not match itself. */
2748 gcc_assert (c != i);
2749
2750 /* If C can be commuted with C+1, and C might need to match I,
2751 then C+1 might also need to match I. */
2752 if (commutative >= 0)
2753 {
2754 if (c == commutative || c == commutative + 1)
2755 {
2756 int other = c + (c == commutative ? 1 : -1);
2757 operands_match[other][i]
2758 = operands_match_p (recog_data.operand[other],
2759 recog_data.operand[i]);
2760 }
2761 if (i == commutative || i == commutative + 1)
2762 {
2763 int other = i + (i == commutative ? 1 : -1);
2764 operands_match[c][other]
2765 = operands_match_p (recog_data.operand[c],
2766 recog_data.operand[other]);
2767 }
2768 /* Note that C is supposed to be less than I.
2769 No need to consider altering both C and I because in
2770 that case we would alter one into the other. */
2771 }
2772 }
2773 }
2774 }
2775 }
2776
2777 /* Examine each operand that is a memory reference or memory address
2778 and reload parts of the addresses into index registers.
2779 Also here any references to pseudo regs that didn't get hard regs
2780 but are equivalent to constants get replaced in the insn itself
2781 with those constants. Nobody will ever see them again.
2782
2783 Finally, set up the preferred classes of each operand. */
2784
2785 for (i = 0; i < noperands; i++)
2786 {
2787 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2788
2789 address_reloaded[i] = 0;
2790 address_operand_reloaded[i] = 0;
2791 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2792 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2793 : RELOAD_OTHER);
2794 address_type[i]
2795 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2796 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2797 : RELOAD_OTHER);
2798
2799 if (*constraints[i] == 0)
2800 /* Ignore things like match_operator operands. */
2801 ;
2802 else if (constraints[i][0] == 'p'
2803 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2804 {
2805 address_operand_reloaded[i]
2806 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2807 recog_data.operand[i],
2808 recog_data.operand_loc[i],
2809 i, operand_type[i], ind_levels, insn);
2810
2811 /* If we now have a simple operand where we used to have a
2812 PLUS or MULT, re-recognize and try again. */
2813 if ((OBJECT_P (*recog_data.operand_loc[i])
2814 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2815 && (GET_CODE (recog_data.operand[i]) == MULT
2816 || GET_CODE (recog_data.operand[i]) == PLUS))
2817 {
2818 INSN_CODE (insn) = -1;
2819 retval = find_reloads (insn, replace, ind_levels, live_known,
2820 reload_reg_p);
2821 return retval;
2822 }
2823
2824 recog_data.operand[i] = *recog_data.operand_loc[i];
2825 substed_operand[i] = recog_data.operand[i];
2826
2827 /* Address operands are reloaded in their existing mode,
2828 no matter what is specified in the machine description. */
2829 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2830 }
2831 else if (code == MEM)
2832 {
2833 address_reloaded[i]
2834 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2835 recog_data.operand_loc[i],
2836 XEXP (recog_data.operand[i], 0),
2837 &XEXP (recog_data.operand[i], 0),
2838 i, address_type[i], ind_levels, insn);
2839 recog_data.operand[i] = *recog_data.operand_loc[i];
2840 substed_operand[i] = recog_data.operand[i];
2841 }
2842 else if (code == SUBREG)
2843 {
2844 rtx reg = SUBREG_REG (recog_data.operand[i]);
2845 rtx op
2846 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2847 ind_levels,
2848 set != 0
2849 && &SET_DEST (set) == recog_data.operand_loc[i],
2850 insn,
2851 &address_reloaded[i]);
2852
2853 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2854 that didn't get a hard register, emit a USE with a REG_EQUAL
2855 note in front so that we might inherit a previous, possibly
2856 wider reload. */
2857
2858 if (replace
2859 && MEM_P (op)
2860 && REG_P (reg)
2861 && (GET_MODE_SIZE (GET_MODE (reg))
2862 >= GET_MODE_SIZE (GET_MODE (op)))
2863 && reg_equiv_constant (REGNO (reg)) == 0)
2864 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2865 insn),
2866 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2867
2868 substed_operand[i] = recog_data.operand[i] = op;
2869 }
2870 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2871 /* We can get a PLUS as an "operand" as a result of register
2872 elimination. See eliminate_regs and gen_reload. We handle
2873 a unary operator by reloading the operand. */
2874 substed_operand[i] = recog_data.operand[i]
2875 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2876 ind_levels, 0, insn,
2877 &address_reloaded[i]);
2878 else if (code == REG)
2879 {
2880 /* This is equivalent to calling find_reloads_toplev.
2881 The code is duplicated for speed.
2882 When we find a pseudo always equivalent to a constant,
2883 we replace it by the constant. We must be sure, however,
2884 that we don't try to replace it in the insn in which it
2885 is being set. */
2886 int regno = REGNO (recog_data.operand[i]);
2887 if (reg_equiv_constant (regno) != 0
2888 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2889 {
2890 /* Record the existing mode so that the check if constants are
2891 allowed will work when operand_mode isn't specified. */
2892
2893 if (operand_mode[i] == VOIDmode)
2894 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2895
2896 substed_operand[i] = recog_data.operand[i]
2897 = reg_equiv_constant (regno);
2898 }
2899 if (reg_equiv_memory_loc (regno) != 0
2900 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2901 /* We need not give a valid is_set_dest argument since the case
2902 of a constant equivalence was checked above. */
2903 substed_operand[i] = recog_data.operand[i]
2904 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2905 ind_levels, 0, insn,
2906 &address_reloaded[i]);
2907 }
2908 /* If the operand is still a register (we didn't replace it with an
2909 equivalent), get the preferred class to reload it into. */
2910 code = GET_CODE (recog_data.operand[i]);
2911 preferred_class[i]
2912 = ((code == REG && REGNO (recog_data.operand[i])
2913 >= FIRST_PSEUDO_REGISTER)
2914 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2915 : NO_REGS);
2916 pref_or_nothing[i]
2917 = (code == REG
2918 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2919 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2920 }
2921
2922 /* If this is simply a copy from operand 1 to operand 0, merge the
2923 preferred classes for the operands. */
2924 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2925 && recog_data.operand[1] == SET_SRC (set))
2926 {
2927 preferred_class[0] = preferred_class[1]
2928 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2929 pref_or_nothing[0] |= pref_or_nothing[1];
2930 pref_or_nothing[1] |= pref_or_nothing[0];
2931 }
2932
2933 /* Now see what we need for pseudo-regs that didn't get hard regs
2934 or got the wrong kind of hard reg. For this, we must consider
2935 all the operands together against the register constraints. */
2936
2937 best = MAX_RECOG_OPERANDS * 2 + 600;
2938
2939 swapped = 0;
2940 goal_alternative_swapped = 0;
2941 try_swapped:
2942
2943 /* The constraints are made of several alternatives.
2944 Each operand's constraint looks like foo,bar,... with commas
2945 separating the alternatives. The first alternatives for all
2946 operands go together, the second alternatives go together, etc.
2947
2948 First loop over alternatives. */
2949
2950 for (this_alternative_number = 0;
2951 this_alternative_number < n_alternatives;
2952 this_alternative_number++)
2953 {
2954 /* Loop over operands for one constraint alternative. */
2955 /* LOSERS counts those that don't fit this alternative
2956 and would require loading. */
2957 int losers = 0;
2958 /* BAD is set to 1 if it some operand can't fit this alternative
2959 even after reloading. */
2960 int bad = 0;
2961 /* REJECT is a count of how undesirable this alternative says it is
2962 if any reloading is required. If the alternative matches exactly
2963 then REJECT is ignored, but otherwise it gets this much
2964 counted against it in addition to the reloading needed. Each
2965 ? counts three times here since we want the disparaging caused by
2966 a bad register class to only count 1/3 as much. */
2967 int reject = 0;
2968
2969 if (!recog_data.alternative_enabled_p[this_alternative_number])
2970 {
2971 int i;
2972
2973 for (i = 0; i < recog_data.n_operands; i++)
2974 constraints[i] = skip_alternative (constraints[i]);
2975
2976 continue;
2977 }
2978
2979 this_earlyclobber = 0;
2980
2981 for (i = 0; i < noperands; i++)
2982 {
2983 const char *p = constraints[i];
2984 char *end;
2985 int len;
2986 int win = 0;
2987 int did_match = 0;
2988 /* 0 => this operand can be reloaded somehow for this alternative. */
2989 int badop = 1;
2990 /* 0 => this operand can be reloaded if the alternative allows regs. */
2991 int winreg = 0;
2992 int c;
2993 int m;
2994 rtx operand = recog_data.operand[i];
2995 int offset = 0;
2996 /* Nonzero means this is a MEM that must be reloaded into a reg
2997 regardless of what the constraint says. */
2998 int force_reload = 0;
2999 int offmemok = 0;
3000 /* Nonzero if a constant forced into memory would be OK for this
3001 operand. */
3002 int constmemok = 0;
3003 int earlyclobber = 0;
3004
3005 /* If the predicate accepts a unary operator, it means that
3006 we need to reload the operand, but do not do this for
3007 match_operator and friends. */
3008 if (UNARY_P (operand) && *p != 0)
3009 operand = XEXP (operand, 0);
3010
3011 /* If the operand is a SUBREG, extract
3012 the REG or MEM (or maybe even a constant) within.
3013 (Constants can occur as a result of reg_equiv_constant.) */
3014
3015 while (GET_CODE (operand) == SUBREG)
3016 {
3017 /* Offset only matters when operand is a REG and
3018 it is a hard reg. This is because it is passed
3019 to reg_fits_class_p if it is a REG and all pseudos
3020 return 0 from that function. */
3021 if (REG_P (SUBREG_REG (operand))
3022 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3023 {
3024 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3025 GET_MODE (SUBREG_REG (operand)),
3026 SUBREG_BYTE (operand),
3027 GET_MODE (operand)) < 0)
3028 force_reload = 1;
3029 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3030 GET_MODE (SUBREG_REG (operand)),
3031 SUBREG_BYTE (operand),
3032 GET_MODE (operand));
3033 }
3034 operand = SUBREG_REG (operand);
3035 /* Force reload if this is a constant or PLUS or if there may
3036 be a problem accessing OPERAND in the outer mode. */
3037 if (CONSTANT_P (operand)
3038 || GET_CODE (operand) == PLUS
3039 /* We must force a reload of paradoxical SUBREGs
3040 of a MEM because the alignment of the inner value
3041 may not be enough to do the outer reference. On
3042 big-endian machines, it may also reference outside
3043 the object.
3044
3045 On machines that extend byte operations and we have a
3046 SUBREG where both the inner and outer modes are no wider
3047 than a word and the inner mode is narrower, is integral,
3048 and gets extended when loaded from memory, combine.c has
3049 made assumptions about the behavior of the machine in such
3050 register access. If the data is, in fact, in memory we
3051 must always load using the size assumed to be in the
3052 register and let the insn do the different-sized
3053 accesses.
3054
3055 This is doubly true if WORD_REGISTER_OPERATIONS. In
3056 this case eliminate_regs has left non-paradoxical
3057 subregs for push_reload to see. Make sure it does
3058 by forcing the reload.
3059
3060 ??? When is it right at this stage to have a subreg
3061 of a mem that is _not_ to be handled specially? IMO
3062 those should have been reduced to just a mem. */
3063 || ((MEM_P (operand)
3064 || (REG_P (operand)
3065 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3066 #ifndef WORD_REGISTER_OPERATIONS
3067 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3068 < BIGGEST_ALIGNMENT)
3069 && (GET_MODE_SIZE (operand_mode[i])
3070 > GET_MODE_SIZE (GET_MODE (operand))))
3071 || BYTES_BIG_ENDIAN
3072 #ifdef LOAD_EXTEND_OP
3073 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3074 && (GET_MODE_SIZE (GET_MODE (operand))
3075 <= UNITS_PER_WORD)
3076 && (GET_MODE_SIZE (operand_mode[i])
3077 > GET_MODE_SIZE (GET_MODE (operand)))
3078 && INTEGRAL_MODE_P (GET_MODE (operand))
3079 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3080 #endif
3081 )
3082 #endif
3083 )
3084 )
3085 force_reload = 1;
3086 }
3087
3088 this_alternative[i] = NO_REGS;
3089 this_alternative_win[i] = 0;
3090 this_alternative_match_win[i] = 0;
3091 this_alternative_offmemok[i] = 0;
3092 this_alternative_earlyclobber[i] = 0;
3093 this_alternative_matches[i] = -1;
3094
3095 /* An empty constraint or empty alternative
3096 allows anything which matched the pattern. */
3097 if (*p == 0 || *p == ',')
3098 win = 1, badop = 0;
3099
3100 /* Scan this alternative's specs for this operand;
3101 set WIN if the operand fits any letter in this alternative.
3102 Otherwise, clear BADOP if this operand could
3103 fit some letter after reloads,
3104 or set WINREG if this operand could fit after reloads
3105 provided the constraint allows some registers. */
3106
3107 do
3108 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3109 {
3110 case '\0':
3111 len = 0;
3112 break;
3113 case ',':
3114 c = '\0';
3115 break;
3116
3117 case '=': case '+': case '*':
3118 break;
3119
3120 case '%':
3121 /* We only support one commutative marker, the first
3122 one. We already set commutative above. */
3123 break;
3124
3125 case '?':
3126 reject += 6;
3127 break;
3128
3129 case '!':
3130 reject = 600;
3131 break;
3132
3133 case '#':
3134 /* Ignore rest of this alternative as far as
3135 reloading is concerned. */
3136 do
3137 p++;
3138 while (*p && *p != ',');
3139 len = 0;
3140 break;
3141
3142 case '0': case '1': case '2': case '3': case '4':
3143 case '5': case '6': case '7': case '8': case '9':
3144 m = strtoul (p, &end, 10);
3145 p = end;
3146 len = 0;
3147
3148 this_alternative_matches[i] = m;
3149 /* We are supposed to match a previous operand.
3150 If we do, we win if that one did.
3151 If we do not, count both of the operands as losers.
3152 (This is too conservative, since most of the time
3153 only a single reload insn will be needed to make
3154 the two operands win. As a result, this alternative
3155 may be rejected when it is actually desirable.) */
3156 if ((swapped && (m != commutative || i != commutative + 1))
3157 /* If we are matching as if two operands were swapped,
3158 also pretend that operands_match had been computed
3159 with swapped.
3160 But if I is the second of those and C is the first,
3161 don't exchange them, because operands_match is valid
3162 only on one side of its diagonal. */
3163 ? (operands_match
3164 [(m == commutative || m == commutative + 1)
3165 ? 2 * commutative + 1 - m : m]
3166 [(i == commutative || i == commutative + 1)
3167 ? 2 * commutative + 1 - i : i])
3168 : operands_match[m][i])
3169 {
3170 /* If we are matching a non-offsettable address where an
3171 offsettable address was expected, then we must reject
3172 this combination, because we can't reload it. */
3173 if (this_alternative_offmemok[m]
3174 && MEM_P (recog_data.operand[m])
3175 && this_alternative[m] == NO_REGS
3176 && ! this_alternative_win[m])
3177 bad = 1;
3178
3179 did_match = this_alternative_win[m];
3180 }
3181 else
3182 {
3183 /* Operands don't match. */
3184 rtx value;
3185 int loc1, loc2;
3186 /* Retroactively mark the operand we had to match
3187 as a loser, if it wasn't already. */
3188 if (this_alternative_win[m])
3189 losers++;
3190 this_alternative_win[m] = 0;
3191 if (this_alternative[m] == NO_REGS)
3192 bad = 1;
3193 /* But count the pair only once in the total badness of
3194 this alternative, if the pair can be a dummy reload.
3195 The pointers in operand_loc are not swapped; swap
3196 them by hand if necessary. */
3197 if (swapped && i == commutative)
3198 loc1 = commutative + 1;
3199 else if (swapped && i == commutative + 1)
3200 loc1 = commutative;
3201 else
3202 loc1 = i;
3203 if (swapped && m == commutative)
3204 loc2 = commutative + 1;
3205 else if (swapped && m == commutative + 1)
3206 loc2 = commutative;
3207 else
3208 loc2 = m;
3209 value
3210 = find_dummy_reload (recog_data.operand[i],
3211 recog_data.operand[m],
3212 recog_data.operand_loc[loc1],
3213 recog_data.operand_loc[loc2],
3214 operand_mode[i], operand_mode[m],
3215 this_alternative[m], -1,
3216 this_alternative_earlyclobber[m]);
3217
3218 if (value != 0)
3219 losers--;
3220 }
3221 /* This can be fixed with reloads if the operand
3222 we are supposed to match can be fixed with reloads. */
3223 badop = 0;
3224 this_alternative[i] = this_alternative[m];
3225
3226 /* If we have to reload this operand and some previous
3227 operand also had to match the same thing as this
3228 operand, we don't know how to do that. So reject this
3229 alternative. */
3230 if (! did_match || force_reload)
3231 for (j = 0; j < i; j++)
3232 if (this_alternative_matches[j]
3233 == this_alternative_matches[i])
3234 badop = 1;
3235 break;
3236
3237 case 'p':
3238 /* All necessary reloads for an address_operand
3239 were handled in find_reloads_address. */
3240 this_alternative[i] = base_reg_class (VOIDmode, ADDRESS,
3241 SCRATCH);
3242 win = 1;
3243 badop = 0;
3244 break;
3245
3246 case TARGET_MEM_CONSTRAINT:
3247 if (force_reload)
3248 break;
3249 if (MEM_P (operand)
3250 || (REG_P (operand)
3251 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3252 && reg_renumber[REGNO (operand)] < 0))
3253 win = 1;
3254 if (CONST_POOL_OK_P (operand_mode[i], operand))
3255 badop = 0;
3256 constmemok = 1;
3257 break;
3258
3259 case '<':
3260 if (MEM_P (operand)
3261 && ! address_reloaded[i]
3262 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3263 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3264 win = 1;
3265 break;
3266
3267 case '>':
3268 if (MEM_P (operand)
3269 && ! address_reloaded[i]
3270 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3271 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3272 win = 1;
3273 break;
3274
3275 /* Memory operand whose address is not offsettable. */
3276 case 'V':
3277 if (force_reload)
3278 break;
3279 if (MEM_P (operand)
3280 && ! (ind_levels ? offsettable_memref_p (operand)
3281 : offsettable_nonstrict_memref_p (operand))
3282 /* Certain mem addresses will become offsettable
3283 after they themselves are reloaded. This is important;
3284 we don't want our own handling of unoffsettables
3285 to override the handling of reg_equiv_address. */
3286 && !(REG_P (XEXP (operand, 0))
3287 && (ind_levels == 0
3288 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3289 win = 1;
3290 break;
3291
3292 /* Memory operand whose address is offsettable. */
3293 case 'o':
3294 if (force_reload)
3295 break;
3296 if ((MEM_P (operand)
3297 /* If IND_LEVELS, find_reloads_address won't reload a
3298 pseudo that didn't get a hard reg, so we have to
3299 reject that case. */
3300 && ((ind_levels ? offsettable_memref_p (operand)
3301 : offsettable_nonstrict_memref_p (operand))
3302 /* A reloaded address is offsettable because it is now
3303 just a simple register indirect. */
3304 || address_reloaded[i] == 1))
3305 || (REG_P (operand)
3306 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3307 && reg_renumber[REGNO (operand)] < 0
3308 /* If reg_equiv_address is nonzero, we will be
3309 loading it into a register; hence it will be
3310 offsettable, but we cannot say that reg_equiv_mem
3311 is offsettable without checking. */
3312 && ((reg_equiv_mem (REGNO (operand)) != 0
3313 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3314 || (reg_equiv_address (REGNO (operand)) != 0))))
3315 win = 1;
3316 if (CONST_POOL_OK_P (operand_mode[i], operand)
3317 || MEM_P (operand))
3318 badop = 0;
3319 constmemok = 1;
3320 offmemok = 1;
3321 break;
3322
3323 case '&':
3324 /* Output operand that is stored before the need for the
3325 input operands (and their index registers) is over. */
3326 earlyclobber = 1, this_earlyclobber = 1;
3327 break;
3328
3329 case 'E':
3330 case 'F':
3331 if (GET_CODE (operand) == CONST_DOUBLE
3332 || (GET_CODE (operand) == CONST_VECTOR
3333 && (GET_MODE_CLASS (GET_MODE (operand))
3334 == MODE_VECTOR_FLOAT)))
3335 win = 1;
3336 break;
3337
3338 case 'G':
3339 case 'H':
3340 if (GET_CODE (operand) == CONST_DOUBLE
3341 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3342 win = 1;
3343 break;
3344
3345 case 's':
3346 if (CONST_INT_P (operand)
3347 || (GET_CODE (operand) == CONST_DOUBLE
3348 && GET_MODE (operand) == VOIDmode))
3349 break;
3350 case 'i':
3351 if (CONSTANT_P (operand)
3352 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3353 win = 1;
3354 break;
3355
3356 case 'n':
3357 if (CONST_INT_P (operand)
3358 || (GET_CODE (operand) == CONST_DOUBLE
3359 && GET_MODE (operand) == VOIDmode))
3360 win = 1;
3361 break;
3362
3363 case 'I':
3364 case 'J':
3365 case 'K':
3366 case 'L':
3367 case 'M':
3368 case 'N':
3369 case 'O':
3370 case 'P':
3371 if (CONST_INT_P (operand)
3372 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3373 win = 1;
3374 break;
3375
3376 case 'X':
3377 force_reload = 0;
3378 win = 1;
3379 break;
3380
3381 case 'g':
3382 if (! force_reload
3383 /* A PLUS is never a valid operand, but reload can make
3384 it from a register when eliminating registers. */
3385 && GET_CODE (operand) != PLUS
3386 /* A SCRATCH is not a valid operand. */
3387 && GET_CODE (operand) != SCRATCH
3388 && (! CONSTANT_P (operand)
3389 || ! flag_pic
3390 || LEGITIMATE_PIC_OPERAND_P (operand))
3391 && (GENERAL_REGS == ALL_REGS
3392 || !REG_P (operand)
3393 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3394 && reg_renumber[REGNO (operand)] < 0)))
3395 win = 1;
3396 /* Drop through into 'r' case. */
3397
3398 case 'r':
3399 this_alternative[i]
3400 = reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3401 goto reg;
3402
3403 default:
3404 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3405 {
3406 #ifdef EXTRA_CONSTRAINT_STR
3407 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3408 {
3409 if (force_reload)
3410 break;
3411 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3412 win = 1;
3413 /* If the address was already reloaded,
3414 we win as well. */
3415 else if (MEM_P (operand)
3416 && address_reloaded[i] == 1)
3417 win = 1;
3418 /* Likewise if the address will be reloaded because
3419 reg_equiv_address is nonzero. For reg_equiv_mem
3420 we have to check. */
3421 else if (REG_P (operand)
3422 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3423 && reg_renumber[REGNO (operand)] < 0
3424 && ((reg_equiv_mem (REGNO (operand)) != 0
3425 && EXTRA_CONSTRAINT_STR (reg_equiv_mem (REGNO (operand)), c, p))
3426 || (reg_equiv_address (REGNO (operand)) != 0)))
3427 win = 1;
3428
3429 /* If we didn't already win, we can reload
3430 constants via force_const_mem, and other
3431 MEMs by reloading the address like for 'o'. */
3432 if (CONST_POOL_OK_P (operand_mode[i], operand)
3433 || MEM_P (operand))
3434 badop = 0;
3435 constmemok = 1;
3436 offmemok = 1;
3437 break;
3438 }
3439 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3440 {
3441 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3442 win = 1;
3443
3444 /* If we didn't already win, we can reload
3445 the address into a base register. */
3446 this_alternative[i] = base_reg_class (VOIDmode,
3447 ADDRESS,
3448 SCRATCH);
3449 badop = 0;
3450 break;
3451 }
3452
3453 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3454 win = 1;
3455 #endif
3456 break;
3457 }
3458
3459 this_alternative[i]
3460 = (reg_class_subunion
3461 [this_alternative[i]]
3462 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3463 reg:
3464 if (GET_MODE (operand) == BLKmode)
3465 break;
3466 winreg = 1;
3467 if (REG_P (operand)
3468 && reg_fits_class_p (operand, this_alternative[i],
3469 offset, GET_MODE (recog_data.operand[i])))
3470 win = 1;
3471 break;
3472 }
3473 while ((p += len), c);
3474
3475 constraints[i] = p;
3476
3477 /* If this operand could be handled with a reg,
3478 and some reg is allowed, then this operand can be handled. */
3479 if (winreg && this_alternative[i] != NO_REGS
3480 && (win || !class_only_fixed_regs[this_alternative[i]]))
3481 badop = 0;
3482
3483 /* Record which operands fit this alternative. */
3484 this_alternative_earlyclobber[i] = earlyclobber;
3485 if (win && ! force_reload)
3486 this_alternative_win[i] = 1;
3487 else if (did_match && ! force_reload)
3488 this_alternative_match_win[i] = 1;
3489 else
3490 {
3491 int const_to_mem = 0;
3492
3493 this_alternative_offmemok[i] = offmemok;
3494 losers++;
3495 if (badop)
3496 bad = 1;
3497 /* Alternative loses if it has no regs for a reg operand. */
3498 if (REG_P (operand)
3499 && this_alternative[i] == NO_REGS
3500 && this_alternative_matches[i] < 0)
3501 bad = 1;
3502
3503 /* If this is a constant that is reloaded into the desired
3504 class by copying it to memory first, count that as another
3505 reload. This is consistent with other code and is
3506 required to avoid choosing another alternative when
3507 the constant is moved into memory by this function on
3508 an early reload pass. Note that the test here is
3509 precisely the same as in the code below that calls
3510 force_const_mem. */
3511 if (CONST_POOL_OK_P (operand_mode[i], operand)
3512 && ((targetm.preferred_reload_class (operand,
3513 this_alternative[i])
3514 == NO_REGS)
3515 || no_input_reloads))
3516 {
3517 const_to_mem = 1;
3518 if (this_alternative[i] != NO_REGS)
3519 losers++;
3520 }
3521
3522 /* Alternative loses if it requires a type of reload not
3523 permitted for this insn. We can always reload SCRATCH
3524 and objects with a REG_UNUSED note. */
3525 if (GET_CODE (operand) != SCRATCH
3526 && modified[i] != RELOAD_READ && no_output_reloads
3527 && ! find_reg_note (insn, REG_UNUSED, operand))
3528 bad = 1;
3529 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3530 && ! const_to_mem)
3531 bad = 1;
3532
3533 /* If we can't reload this value at all, reject this
3534 alternative. Note that we could also lose due to
3535 LIMIT_RELOAD_CLASS, but we don't check that
3536 here. */
3537
3538 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3539 {
3540 if (targetm.preferred_reload_class (operand, this_alternative[i])
3541 == NO_REGS)
3542 reject = 600;
3543
3544 if (operand_type[i] == RELOAD_FOR_OUTPUT
3545 && (targetm.preferred_output_reload_class (operand,
3546 this_alternative[i])
3547 == NO_REGS))
3548 reject = 600;
3549 }
3550
3551 /* We prefer to reload pseudos over reloading other things,
3552 since such reloads may be able to be eliminated later.
3553 If we are reloading a SCRATCH, we won't be generating any
3554 insns, just using a register, so it is also preferred.
3555 So bump REJECT in other cases. Don't do this in the
3556 case where we are forcing a constant into memory and
3557 it will then win since we don't want to have a different
3558 alternative match then. */
3559 if (! (REG_P (operand)
3560 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3561 && GET_CODE (operand) != SCRATCH
3562 && ! (const_to_mem && constmemok))
3563 reject += 2;
3564
3565 /* Input reloads can be inherited more often than output
3566 reloads can be removed, so penalize output reloads. */
3567 if (operand_type[i] != RELOAD_FOR_INPUT
3568 && GET_CODE (operand) != SCRATCH)
3569 reject++;
3570 }
3571
3572 /* If this operand is a pseudo register that didn't get a hard
3573 reg and this alternative accepts some register, see if the
3574 class that we want is a subset of the preferred class for this
3575 register. If not, but it intersects that class, use the
3576 preferred class instead. If it does not intersect the preferred
3577 class, show that usage of this alternative should be discouraged;
3578 it will be discouraged more still if the register is `preferred
3579 or nothing'. We do this because it increases the chance of
3580 reusing our spill register in a later insn and avoiding a pair
3581 of memory stores and loads.
3582
3583 Don't bother with this if this alternative will accept this
3584 operand.
3585
3586 Don't do this for a multiword operand, since it is only a
3587 small win and has the risk of requiring more spill registers,
3588 which could cause a large loss.
3589
3590 Don't do this if the preferred class has only one register
3591 because we might otherwise exhaust the class. */
3592
3593 if (! win && ! did_match
3594 && this_alternative[i] != NO_REGS
3595 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3596 && reg_class_size [(int) preferred_class[i]] > 0
3597 && ! small_register_class_p (preferred_class[i]))
3598 {
3599 if (! reg_class_subset_p (this_alternative[i],
3600 preferred_class[i]))
3601 {
3602 /* Since we don't have a way of forming the intersection,
3603 we just do something special if the preferred class
3604 is a subset of the class we have; that's the most
3605 common case anyway. */
3606 if (reg_class_subset_p (preferred_class[i],
3607 this_alternative[i]))
3608 this_alternative[i] = preferred_class[i];
3609 else
3610 reject += (2 + 2 * pref_or_nothing[i]);
3611 }
3612 }
3613 }
3614
3615 /* Now see if any output operands that are marked "earlyclobber"
3616 in this alternative conflict with any input operands
3617 or any memory addresses. */
3618
3619 for (i = 0; i < noperands; i++)
3620 if (this_alternative_earlyclobber[i]
3621 && (this_alternative_win[i] || this_alternative_match_win[i]))
3622 {
3623 struct decomposition early_data;
3624
3625 early_data = decompose (recog_data.operand[i]);
3626
3627 gcc_assert (modified[i] != RELOAD_READ);
3628
3629 if (this_alternative[i] == NO_REGS)
3630 {
3631 this_alternative_earlyclobber[i] = 0;
3632 gcc_assert (this_insn_is_asm);
3633 error_for_asm (this_insn,
3634 "%<&%> constraint used with no register class");
3635 }
3636
3637 for (j = 0; j < noperands; j++)
3638 /* Is this an input operand or a memory ref? */
3639 if ((MEM_P (recog_data.operand[j])
3640 || modified[j] != RELOAD_WRITE)
3641 && j != i
3642 /* Ignore things like match_operator operands. */
3643 && !recog_data.is_operator[j]
3644 /* Don't count an input operand that is constrained to match
3645 the early clobber operand. */
3646 && ! (this_alternative_matches[j] == i
3647 && rtx_equal_p (recog_data.operand[i],
3648 recog_data.operand[j]))
3649 /* Is it altered by storing the earlyclobber operand? */
3650 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3651 early_data))
3652 {
3653 /* If the output is in a non-empty few-regs class,
3654 it's costly to reload it, so reload the input instead. */
3655 if (small_register_class_p (this_alternative[i])
3656 && (REG_P (recog_data.operand[j])
3657 || GET_CODE (recog_data.operand[j]) == SUBREG))
3658 {
3659 losers++;
3660 this_alternative_win[j] = 0;
3661 this_alternative_match_win[j] = 0;
3662 }
3663 else
3664 break;
3665 }
3666 /* If an earlyclobber operand conflicts with something,
3667 it must be reloaded, so request this and count the cost. */
3668 if (j != noperands)
3669 {
3670 losers++;
3671 this_alternative_win[i] = 0;
3672 this_alternative_match_win[j] = 0;
3673 for (j = 0; j < noperands; j++)
3674 if (this_alternative_matches[j] == i
3675 && this_alternative_match_win[j])
3676 {
3677 this_alternative_win[j] = 0;
3678 this_alternative_match_win[j] = 0;
3679 losers++;
3680 }
3681 }
3682 }
3683
3684 /* If one alternative accepts all the operands, no reload required,
3685 choose that alternative; don't consider the remaining ones. */
3686 if (losers == 0)
3687 {
3688 /* Unswap these so that they are never swapped at `finish'. */
3689 if (commutative >= 0)
3690 {
3691 recog_data.operand[commutative] = substed_operand[commutative];
3692 recog_data.operand[commutative + 1]
3693 = substed_operand[commutative + 1];
3694 }
3695 for (i = 0; i < noperands; i++)
3696 {
3697 goal_alternative_win[i] = this_alternative_win[i];
3698 goal_alternative_match_win[i] = this_alternative_match_win[i];
3699 goal_alternative[i] = this_alternative[i];
3700 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3701 goal_alternative_matches[i] = this_alternative_matches[i];
3702 goal_alternative_earlyclobber[i]
3703 = this_alternative_earlyclobber[i];
3704 }
3705 goal_alternative_number = this_alternative_number;
3706 goal_alternative_swapped = swapped;
3707 goal_earlyclobber = this_earlyclobber;
3708 goto finish;
3709 }
3710
3711 /* REJECT, set by the ! and ? constraint characters and when a register
3712 would be reloaded into a non-preferred class, discourages the use of
3713 this alternative for a reload goal. REJECT is incremented by six
3714 for each ? and two for each non-preferred class. */
3715 losers = losers * 6 + reject;
3716
3717 /* If this alternative can be made to work by reloading,
3718 and it needs less reloading than the others checked so far,
3719 record it as the chosen goal for reloading. */
3720 if (! bad)
3721 {
3722 if (best > losers)
3723 {
3724 for (i = 0; i < noperands; i++)
3725 {
3726 goal_alternative[i] = this_alternative[i];
3727 goal_alternative_win[i] = this_alternative_win[i];
3728 goal_alternative_match_win[i]
3729 = this_alternative_match_win[i];
3730 goal_alternative_offmemok[i]
3731 = this_alternative_offmemok[i];
3732 goal_alternative_matches[i] = this_alternative_matches[i];
3733 goal_alternative_earlyclobber[i]
3734 = this_alternative_earlyclobber[i];
3735 }
3736 goal_alternative_swapped = swapped;
3737 best = losers;
3738 goal_alternative_number = this_alternative_number;
3739 goal_earlyclobber = this_earlyclobber;
3740 }
3741 }
3742 }
3743
3744 /* If insn is commutative (it's safe to exchange a certain pair of operands)
3745 then we need to try each alternative twice,
3746 the second time matching those two operands
3747 as if we had exchanged them.
3748 To do this, really exchange them in operands.
3749
3750 If we have just tried the alternatives the second time,
3751 return operands to normal and drop through. */
3752
3753 if (commutative >= 0)
3754 {
3755 swapped = !swapped;
3756 if (swapped)
3757 {
3758 enum reg_class tclass;
3759 int t;
3760
3761 recog_data.operand[commutative] = substed_operand[commutative + 1];
3762 recog_data.operand[commutative + 1] = substed_operand[commutative];
3763 /* Swap the duplicates too. */
3764 for (i = 0; i < recog_data.n_dups; i++)
3765 if (recog_data.dup_num[i] == commutative
3766 || recog_data.dup_num[i] == commutative + 1)
3767 *recog_data.dup_loc[i]
3768 = recog_data.operand[(int) recog_data.dup_num[i]];
3769
3770 tclass = preferred_class[commutative];
3771 preferred_class[commutative] = preferred_class[commutative + 1];
3772 preferred_class[commutative + 1] = tclass;
3773
3774 t = pref_or_nothing[commutative];
3775 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3776 pref_or_nothing[commutative + 1] = t;
3777
3778 t = address_reloaded[commutative];
3779 address_reloaded[commutative] = address_reloaded[commutative + 1];
3780 address_reloaded[commutative + 1] = t;
3781
3782 memcpy (constraints, recog_data.constraints,
3783 noperands * sizeof (const char *));
3784 goto try_swapped;
3785 }
3786 else
3787 {
3788 recog_data.operand[commutative] = substed_operand[commutative];
3789 recog_data.operand[commutative + 1]
3790 = substed_operand[commutative + 1];
3791 /* Unswap the duplicates too. */
3792 for (i = 0; i < recog_data.n_dups; i++)
3793 if (recog_data.dup_num[i] == commutative
3794 || recog_data.dup_num[i] == commutative + 1)
3795 *recog_data.dup_loc[i]
3796 = recog_data.operand[(int) recog_data.dup_num[i]];
3797 }
3798 }
3799
3800 /* The operands don't meet the constraints.
3801 goal_alternative describes the alternative
3802 that we could reach by reloading the fewest operands.
3803 Reload so as to fit it. */
3804
3805 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3806 {
3807 /* No alternative works with reloads?? */
3808 if (insn_code_number >= 0)
3809 fatal_insn ("unable to generate reloads for:", insn);
3810 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3811 /* Avoid further trouble with this insn. */
3812 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3813 n_reloads = 0;
3814 return 0;
3815 }
3816
3817 /* Jump to `finish' from above if all operands are valid already.
3818 In that case, goal_alternative_win is all 1. */
3819 finish:
3820
3821 /* Right now, for any pair of operands I and J that are required to match,
3822 with I < J,
3823 goal_alternative_matches[J] is I.
3824 Set up goal_alternative_matched as the inverse function:
3825 goal_alternative_matched[I] = J. */
3826
3827 for (i = 0; i < noperands; i++)
3828 goal_alternative_matched[i] = -1;
3829
3830 for (i = 0; i < noperands; i++)
3831 if (! goal_alternative_win[i]
3832 && goal_alternative_matches[i] >= 0)
3833 goal_alternative_matched[goal_alternative_matches[i]] = i;
3834
3835 for (i = 0; i < noperands; i++)
3836 goal_alternative_win[i] |= goal_alternative_match_win[i];
3837
3838 /* If the best alternative is with operands 1 and 2 swapped,
3839 consider them swapped before reporting the reloads. Update the
3840 operand numbers of any reloads already pushed. */
3841
3842 if (goal_alternative_swapped)
3843 {
3844 rtx tem;
3845
3846 tem = substed_operand[commutative];
3847 substed_operand[commutative] = substed_operand[commutative + 1];
3848 substed_operand[commutative + 1] = tem;
3849 tem = recog_data.operand[commutative];
3850 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3851 recog_data.operand[commutative + 1] = tem;
3852 tem = *recog_data.operand_loc[commutative];
3853 *recog_data.operand_loc[commutative]
3854 = *recog_data.operand_loc[commutative + 1];
3855 *recog_data.operand_loc[commutative + 1] = tem;
3856
3857 for (i = 0; i < n_reloads; i++)
3858 {
3859 if (rld[i].opnum == commutative)
3860 rld[i].opnum = commutative + 1;
3861 else if (rld[i].opnum == commutative + 1)
3862 rld[i].opnum = commutative;
3863 }
3864 }
3865
3866 for (i = 0; i < noperands; i++)
3867 {
3868 operand_reloadnum[i] = -1;
3869
3870 /* If this is an earlyclobber operand, we need to widen the scope.
3871 The reload must remain valid from the start of the insn being
3872 reloaded until after the operand is stored into its destination.
3873 We approximate this with RELOAD_OTHER even though we know that we
3874 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3875
3876 One special case that is worth checking is when we have an
3877 output that is earlyclobber but isn't used past the insn (typically
3878 a SCRATCH). In this case, we only need have the reload live
3879 through the insn itself, but not for any of our input or output
3880 reloads.
3881 But we must not accidentally narrow the scope of an existing
3882 RELOAD_OTHER reload - leave these alone.
3883
3884 In any case, anything needed to address this operand can remain
3885 however they were previously categorized. */
3886
3887 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3888 operand_type[i]
3889 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3890 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3891 }
3892
3893 /* Any constants that aren't allowed and can't be reloaded
3894 into registers are here changed into memory references. */
3895 for (i = 0; i < noperands; i++)
3896 if (! goal_alternative_win[i])
3897 {
3898 rtx op = recog_data.operand[i];
3899 rtx subreg = NULL_RTX;
3900 rtx plus = NULL_RTX;
3901 enum machine_mode mode = operand_mode[i];
3902
3903 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3904 push_reload so we have to let them pass here. */
3905 if (GET_CODE (op) == SUBREG)
3906 {
3907 subreg = op;
3908 op = SUBREG_REG (op);
3909 mode = GET_MODE (op);
3910 }
3911
3912 if (GET_CODE (op) == PLUS)
3913 {
3914 plus = op;
3915 op = XEXP (op, 1);
3916 }
3917
3918 if (CONST_POOL_OK_P (mode, op)
3919 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3920 == NO_REGS)
3921 || no_input_reloads))
3922 {
3923 int this_address_reloaded;
3924 rtx tem = force_const_mem (mode, op);
3925
3926 /* If we stripped a SUBREG or a PLUS above add it back. */
3927 if (plus != NULL_RTX)
3928 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3929
3930 if (subreg != NULL_RTX)
3931 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3932
3933 this_address_reloaded = 0;
3934 substed_operand[i] = recog_data.operand[i]
3935 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3936 0, insn, &this_address_reloaded);
3937
3938 /* If the alternative accepts constant pool refs directly
3939 there will be no reload needed at all. */
3940 if (plus == NULL_RTX
3941 && subreg == NULL_RTX
3942 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3943 ? substed_operand[i]
3944 : NULL,
3945 recog_data.constraints[i],
3946 goal_alternative_number))
3947 goal_alternative_win[i] = 1;
3948 }
3949 }
3950
3951 /* Record the values of the earlyclobber operands for the caller. */
3952 if (goal_earlyclobber)
3953 for (i = 0; i < noperands; i++)
3954 if (goal_alternative_earlyclobber[i])
3955 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3956
3957 /* Now record reloads for all the operands that need them. */
3958 for (i = 0; i < noperands; i++)
3959 if (! goal_alternative_win[i])
3960 {
3961 /* Operands that match previous ones have already been handled. */
3962 if (goal_alternative_matches[i] >= 0)
3963 ;
3964 /* Handle an operand with a nonoffsettable address
3965 appearing where an offsettable address will do
3966 by reloading the address into a base register.
3967
3968 ??? We can also do this when the operand is a register and
3969 reg_equiv_mem is not offsettable, but this is a bit tricky,
3970 so we don't bother with it. It may not be worth doing. */
3971 else if (goal_alternative_matched[i] == -1
3972 && goal_alternative_offmemok[i]
3973 && MEM_P (recog_data.operand[i]))
3974 {
3975 /* If the address to be reloaded is a VOIDmode constant,
3976 use the default address mode as mode of the reload register,
3977 as would have been done by find_reloads_address. */
3978 enum machine_mode address_mode;
3979 address_mode = GET_MODE (XEXP (recog_data.operand[i], 0));
3980 if (address_mode == VOIDmode)
3981 {
3982 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3983 address_mode = targetm.addr_space.address_mode (as);
3984 }
3985
3986 operand_reloadnum[i]
3987 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3988 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3989 base_reg_class (VOIDmode, MEM, SCRATCH),
3990 address_mode,
3991 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
3992 rld[operand_reloadnum[i]].inc
3993 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3994
3995 /* If this operand is an output, we will have made any
3996 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3997 now we are treating part of the operand as an input, so
3998 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
3999
4000 if (modified[i] == RELOAD_WRITE)
4001 {
4002 for (j = 0; j < n_reloads; j++)
4003 {
4004 if (rld[j].opnum == i)
4005 {
4006 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4007 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4008 else if (rld[j].when_needed
4009 == RELOAD_FOR_OUTADDR_ADDRESS)
4010 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4011 }
4012 }
4013 }
4014 }
4015 else if (goal_alternative_matched[i] == -1)
4016 {
4017 operand_reloadnum[i]
4018 = push_reload ((modified[i] != RELOAD_WRITE
4019 ? recog_data.operand[i] : 0),
4020 (modified[i] != RELOAD_READ
4021 ? recog_data.operand[i] : 0),
4022 (modified[i] != RELOAD_WRITE
4023 ? recog_data.operand_loc[i] : 0),
4024 (modified[i] != RELOAD_READ
4025 ? recog_data.operand_loc[i] : 0),
4026 (enum reg_class) goal_alternative[i],
4027 (modified[i] == RELOAD_WRITE
4028 ? VOIDmode : operand_mode[i]),
4029 (modified[i] == RELOAD_READ
4030 ? VOIDmode : operand_mode[i]),
4031 (insn_code_number < 0 ? 0
4032 : insn_data[insn_code_number].operand[i].strict_low),
4033 0, i, operand_type[i]);
4034 }
4035 /* In a matching pair of operands, one must be input only
4036 and the other must be output only.
4037 Pass the input operand as IN and the other as OUT. */
4038 else if (modified[i] == RELOAD_READ
4039 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4040 {
4041 operand_reloadnum[i]
4042 = push_reload (recog_data.operand[i],
4043 recog_data.operand[goal_alternative_matched[i]],
4044 recog_data.operand_loc[i],
4045 recog_data.operand_loc[goal_alternative_matched[i]],
4046 (enum reg_class) goal_alternative[i],
4047 operand_mode[i],
4048 operand_mode[goal_alternative_matched[i]],
4049 0, 0, i, RELOAD_OTHER);
4050 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4051 }
4052 else if (modified[i] == RELOAD_WRITE
4053 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4054 {
4055 operand_reloadnum[goal_alternative_matched[i]]
4056 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4057 recog_data.operand[i],
4058 recog_data.operand_loc[goal_alternative_matched[i]],
4059 recog_data.operand_loc[i],
4060 (enum reg_class) goal_alternative[i],
4061 operand_mode[goal_alternative_matched[i]],
4062 operand_mode[i],
4063 0, 0, i, RELOAD_OTHER);
4064 operand_reloadnum[i] = output_reloadnum;
4065 }
4066 else
4067 {
4068 gcc_assert (insn_code_number < 0);
4069 error_for_asm (insn, "inconsistent operand constraints "
4070 "in an %<asm%>");
4071 /* Avoid further trouble with this insn. */
4072 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4073 n_reloads = 0;
4074 return 0;
4075 }
4076 }
4077 else if (goal_alternative_matched[i] < 0
4078 && goal_alternative_matches[i] < 0
4079 && address_operand_reloaded[i] != 1
4080 && optimize)
4081 {
4082 /* For each non-matching operand that's a MEM or a pseudo-register
4083 that didn't get a hard register, make an optional reload.
4084 This may get done even if the insn needs no reloads otherwise. */
4085
4086 rtx operand = recog_data.operand[i];
4087
4088 while (GET_CODE (operand) == SUBREG)
4089 operand = SUBREG_REG (operand);
4090 if ((MEM_P (operand)
4091 || (REG_P (operand)
4092 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4093 /* If this is only for an output, the optional reload would not
4094 actually cause us to use a register now, just note that
4095 something is stored here. */
4096 && (goal_alternative[i] != NO_REGS
4097 || modified[i] == RELOAD_WRITE)
4098 && ! no_input_reloads
4099 /* An optional output reload might allow to delete INSN later.
4100 We mustn't make in-out reloads on insns that are not permitted
4101 output reloads.
4102 If this is an asm, we can't delete it; we must not even call
4103 push_reload for an optional output reload in this case,
4104 because we can't be sure that the constraint allows a register,
4105 and push_reload verifies the constraints for asms. */
4106 && (modified[i] == RELOAD_READ
4107 || (! no_output_reloads && ! this_insn_is_asm)))
4108 operand_reloadnum[i]
4109 = push_reload ((modified[i] != RELOAD_WRITE
4110 ? recog_data.operand[i] : 0),
4111 (modified[i] != RELOAD_READ
4112 ? recog_data.operand[i] : 0),
4113 (modified[i] != RELOAD_WRITE
4114 ? recog_data.operand_loc[i] : 0),
4115 (modified[i] != RELOAD_READ
4116 ? recog_data.operand_loc[i] : 0),
4117 (enum reg_class) goal_alternative[i],
4118 (modified[i] == RELOAD_WRITE
4119 ? VOIDmode : operand_mode[i]),
4120 (modified[i] == RELOAD_READ
4121 ? VOIDmode : operand_mode[i]),
4122 (insn_code_number < 0 ? 0
4123 : insn_data[insn_code_number].operand[i].strict_low),
4124 1, i, operand_type[i]);
4125 /* If a memory reference remains (either as a MEM or a pseudo that
4126 did not get a hard register), yet we can't make an optional
4127 reload, check if this is actually a pseudo register reference;
4128 we then need to emit a USE and/or a CLOBBER so that reload
4129 inheritance will do the right thing. */
4130 else if (replace
4131 && (MEM_P (operand)
4132 || (REG_P (operand)
4133 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4134 && reg_renumber [REGNO (operand)] < 0)))
4135 {
4136 operand = *recog_data.operand_loc[i];
4137
4138 while (GET_CODE (operand) == SUBREG)
4139 operand = SUBREG_REG (operand);
4140 if (REG_P (operand))
4141 {
4142 if (modified[i] != RELOAD_WRITE)
4143 /* We mark the USE with QImode so that we recognize
4144 it as one that can be safely deleted at the end
4145 of reload. */
4146 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4147 insn), QImode);
4148 if (modified[i] != RELOAD_READ)
4149 emit_insn_after (gen_clobber (operand), insn);
4150 }
4151 }
4152 }
4153 else if (goal_alternative_matches[i] >= 0
4154 && goal_alternative_win[goal_alternative_matches[i]]
4155 && modified[i] == RELOAD_READ
4156 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4157 && ! no_input_reloads && ! no_output_reloads
4158 && optimize)
4159 {
4160 /* Similarly, make an optional reload for a pair of matching
4161 objects that are in MEM or a pseudo that didn't get a hard reg. */
4162
4163 rtx operand = recog_data.operand[i];
4164
4165 while (GET_CODE (operand) == SUBREG)
4166 operand = SUBREG_REG (operand);
4167 if ((MEM_P (operand)
4168 || (REG_P (operand)
4169 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4170 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4171 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4172 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4173 recog_data.operand[i],
4174 recog_data.operand_loc[goal_alternative_matches[i]],
4175 recog_data.operand_loc[i],
4176 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4177 operand_mode[goal_alternative_matches[i]],
4178 operand_mode[i],
4179 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4180 }
4181
4182 /* Perform whatever substitutions on the operands we are supposed
4183 to make due to commutativity or replacement of registers
4184 with equivalent constants or memory slots. */
4185
4186 for (i = 0; i < noperands; i++)
4187 {
4188 /* We only do this on the last pass through reload, because it is
4189 possible for some data (like reg_equiv_address) to be changed during
4190 later passes. Moreover, we lose the opportunity to get a useful
4191 reload_{in,out}_reg when we do these replacements. */
4192
4193 if (replace)
4194 {
4195 rtx substitution = substed_operand[i];
4196
4197 *recog_data.operand_loc[i] = substitution;
4198
4199 /* If we're replacing an operand with a LABEL_REF, we need to
4200 make sure that there's a REG_LABEL_OPERAND note attached to
4201 this instruction. */
4202 if (GET_CODE (substitution) == LABEL_REF
4203 && !find_reg_note (insn, REG_LABEL_OPERAND,
4204 XEXP (substitution, 0))
4205 /* For a JUMP_P, if it was a branch target it must have
4206 already been recorded as such. */
4207 && (!JUMP_P (insn)
4208 || !label_is_jump_target_p (XEXP (substitution, 0),
4209 insn)))
4210 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4211 }
4212 else
4213 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4214 }
4215
4216 /* If this insn pattern contains any MATCH_DUP's, make sure that
4217 they will be substituted if the operands they match are substituted.
4218 Also do now any substitutions we already did on the operands.
4219
4220 Don't do this if we aren't making replacements because we might be
4221 propagating things allocated by frame pointer elimination into places
4222 it doesn't expect. */
4223
4224 if (insn_code_number >= 0 && replace)
4225 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4226 {
4227 int opno = recog_data.dup_num[i];
4228 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4229 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4230 }
4231
4232 #if 0
4233 /* This loses because reloading of prior insns can invalidate the equivalence
4234 (or at least find_equiv_reg isn't smart enough to find it any more),
4235 causing this insn to need more reload regs than it needed before.
4236 It may be too late to make the reload regs available.
4237 Now this optimization is done safely in choose_reload_regs. */
4238
4239 /* For each reload of a reg into some other class of reg,
4240 search for an existing equivalent reg (same value now) in the right class.
4241 We can use it as long as we don't need to change its contents. */
4242 for (i = 0; i < n_reloads; i++)
4243 if (rld[i].reg_rtx == 0
4244 && rld[i].in != 0
4245 && REG_P (rld[i].in)
4246 && rld[i].out == 0)
4247 {
4248 rld[i].reg_rtx
4249 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4250 static_reload_reg_p, 0, rld[i].inmode);
4251 /* Prevent generation of insn to load the value
4252 because the one we found already has the value. */
4253 if (rld[i].reg_rtx)
4254 rld[i].in = rld[i].reg_rtx;
4255 }
4256 #endif
4257
4258 /* If we detected error and replaced asm instruction by USE, forget about the
4259 reloads. */
4260 if (GET_CODE (PATTERN (insn)) == USE
4261 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4262 n_reloads = 0;
4263
4264 /* Perhaps an output reload can be combined with another
4265 to reduce needs by one. */
4266 if (!goal_earlyclobber)
4267 combine_reloads ();
4268
4269 /* If we have a pair of reloads for parts of an address, they are reloading
4270 the same object, the operands themselves were not reloaded, and they
4271 are for two operands that are supposed to match, merge the reloads and
4272 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4273
4274 for (i = 0; i < n_reloads; i++)
4275 {
4276 int k;
4277
4278 for (j = i + 1; j < n_reloads; j++)
4279 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4280 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4281 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4282 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4283 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4284 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4285 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4286 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4287 && rtx_equal_p (rld[i].in, rld[j].in)
4288 && (operand_reloadnum[rld[i].opnum] < 0
4289 || rld[operand_reloadnum[rld[i].opnum]].optional)
4290 && (operand_reloadnum[rld[j].opnum] < 0
4291 || rld[operand_reloadnum[rld[j].opnum]].optional)
4292 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4293 || (goal_alternative_matches[rld[j].opnum]
4294 == rld[i].opnum)))
4295 {
4296 for (k = 0; k < n_replacements; k++)
4297 if (replacements[k].what == j)
4298 replacements[k].what = i;
4299
4300 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4301 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4302 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4303 else
4304 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4305 rld[j].in = 0;
4306 }
4307 }
4308
4309 /* Scan all the reloads and update their type.
4310 If a reload is for the address of an operand and we didn't reload
4311 that operand, change the type. Similarly, change the operand number
4312 of a reload when two operands match. If a reload is optional, treat it
4313 as though the operand isn't reloaded.
4314
4315 ??? This latter case is somewhat odd because if we do the optional
4316 reload, it means the object is hanging around. Thus we need only
4317 do the address reload if the optional reload was NOT done.
4318
4319 Change secondary reloads to be the address type of their operand, not
4320 the normal type.
4321
4322 If an operand's reload is now RELOAD_OTHER, change any
4323 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4324 RELOAD_FOR_OTHER_ADDRESS. */
4325
4326 for (i = 0; i < n_reloads; i++)
4327 {
4328 if (rld[i].secondary_p
4329 && rld[i].when_needed == operand_type[rld[i].opnum])
4330 rld[i].when_needed = address_type[rld[i].opnum];
4331
4332 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4333 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4334 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4335 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4336 && (operand_reloadnum[rld[i].opnum] < 0
4337 || rld[operand_reloadnum[rld[i].opnum]].optional))
4338 {
4339 /* If we have a secondary reload to go along with this reload,
4340 change its type to RELOAD_FOR_OPADDR_ADDR. */
4341
4342 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4343 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4344 && rld[i].secondary_in_reload != -1)
4345 {
4346 int secondary_in_reload = rld[i].secondary_in_reload;
4347
4348 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4349
4350 /* If there's a tertiary reload we have to change it also. */
4351 if (secondary_in_reload > 0
4352 && rld[secondary_in_reload].secondary_in_reload != -1)
4353 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4354 = RELOAD_FOR_OPADDR_ADDR;
4355 }
4356
4357 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4358 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4359 && rld[i].secondary_out_reload != -1)
4360 {
4361 int secondary_out_reload = rld[i].secondary_out_reload;
4362
4363 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4364
4365 /* If there's a tertiary reload we have to change it also. */
4366 if (secondary_out_reload
4367 && rld[secondary_out_reload].secondary_out_reload != -1)
4368 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4369 = RELOAD_FOR_OPADDR_ADDR;
4370 }
4371
4372 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4373 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4374 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4375 else
4376 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4377 }
4378
4379 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4380 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4381 && operand_reloadnum[rld[i].opnum] >= 0
4382 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4383 == RELOAD_OTHER))
4384 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4385
4386 if (goal_alternative_matches[rld[i].opnum] >= 0)
4387 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4388 }
4389
4390 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4391 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4392 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4393
4394 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4395 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4396 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4397 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4398 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4399 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4400 This is complicated by the fact that a single operand can have more
4401 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4402 choose_reload_regs without affecting code quality, and cases that
4403 actually fail are extremely rare, so it turns out to be better to fix
4404 the problem here by not generating cases that choose_reload_regs will
4405 fail for. */
4406 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4407 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4408 a single operand.
4409 We can reduce the register pressure by exploiting that a
4410 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4411 does not conflict with any of them, if it is only used for the first of
4412 the RELOAD_FOR_X_ADDRESS reloads. */
4413 {
4414 int first_op_addr_num = -2;
4415 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4416 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4417 int need_change = 0;
4418 /* We use last_op_addr_reload and the contents of the above arrays
4419 first as flags - -2 means no instance encountered, -1 means exactly
4420 one instance encountered.
4421 If more than one instance has been encountered, we store the reload
4422 number of the first reload of the kind in question; reload numbers
4423 are known to be non-negative. */
4424 for (i = 0; i < noperands; i++)
4425 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4426 for (i = n_reloads - 1; i >= 0; i--)
4427 {
4428 switch (rld[i].when_needed)
4429 {
4430 case RELOAD_FOR_OPERAND_ADDRESS:
4431 if (++first_op_addr_num >= 0)
4432 {
4433 first_op_addr_num = i;
4434 need_change = 1;
4435 }
4436 break;
4437 case RELOAD_FOR_INPUT_ADDRESS:
4438 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4439 {
4440 first_inpaddr_num[rld[i].opnum] = i;
4441 need_change = 1;
4442 }
4443 break;
4444 case RELOAD_FOR_OUTPUT_ADDRESS:
4445 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4446 {
4447 first_outpaddr_num[rld[i].opnum] = i;
4448 need_change = 1;
4449 }
4450 break;
4451 default:
4452 break;
4453 }
4454 }
4455
4456 if (need_change)
4457 {
4458 for (i = 0; i < n_reloads; i++)
4459 {
4460 int first_num;
4461 enum reload_type type;
4462
4463 switch (rld[i].when_needed)
4464 {
4465 case RELOAD_FOR_OPADDR_ADDR:
4466 first_num = first_op_addr_num;
4467 type = RELOAD_FOR_OPERAND_ADDRESS;
4468 break;
4469 case RELOAD_FOR_INPADDR_ADDRESS:
4470 first_num = first_inpaddr_num[rld[i].opnum];
4471 type = RELOAD_FOR_INPUT_ADDRESS;
4472 break;
4473 case RELOAD_FOR_OUTADDR_ADDRESS:
4474 first_num = first_outpaddr_num[rld[i].opnum];
4475 type = RELOAD_FOR_OUTPUT_ADDRESS;
4476 break;
4477 default:
4478 continue;
4479 }
4480 if (first_num < 0)
4481 continue;
4482 else if (i > first_num)
4483 rld[i].when_needed = type;
4484 else
4485 {
4486 /* Check if the only TYPE reload that uses reload I is
4487 reload FIRST_NUM. */
4488 for (j = n_reloads - 1; j > first_num; j--)
4489 {
4490 if (rld[j].when_needed == type
4491 && (rld[i].secondary_p
4492 ? rld[j].secondary_in_reload == i
4493 : reg_mentioned_p (rld[i].in, rld[j].in)))
4494 {
4495 rld[i].when_needed = type;
4496 break;
4497 }
4498 }
4499 }
4500 }
4501 }
4502 }
4503
4504 /* See if we have any reloads that are now allowed to be merged
4505 because we've changed when the reload is needed to
4506 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4507 check for the most common cases. */
4508
4509 for (i = 0; i < n_reloads; i++)
4510 if (rld[i].in != 0 && rld[i].out == 0
4511 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4512 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4513 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4514 for (j = 0; j < n_reloads; j++)
4515 if (i != j && rld[j].in != 0 && rld[j].out == 0
4516 && rld[j].when_needed == rld[i].when_needed
4517 && MATCHES (rld[i].in, rld[j].in)
4518 && rld[i].rclass == rld[j].rclass
4519 && !rld[i].nocombine && !rld[j].nocombine
4520 && rld[i].reg_rtx == rld[j].reg_rtx)
4521 {
4522 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4523 transfer_replacements (i, j);
4524 rld[j].in = 0;
4525 }
4526
4527 #ifdef HAVE_cc0
4528 /* If we made any reloads for addresses, see if they violate a
4529 "no input reloads" requirement for this insn. But loads that we
4530 do after the insn (such as for output addresses) are fine. */
4531 if (no_input_reloads)
4532 for (i = 0; i < n_reloads; i++)
4533 gcc_assert (rld[i].in == 0
4534 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4535 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4536 #endif
4537
4538 /* Compute reload_mode and reload_nregs. */
4539 for (i = 0; i < n_reloads; i++)
4540 {
4541 rld[i].mode
4542 = (rld[i].inmode == VOIDmode
4543 || (GET_MODE_SIZE (rld[i].outmode)
4544 > GET_MODE_SIZE (rld[i].inmode)))
4545 ? rld[i].outmode : rld[i].inmode;
4546
4547 rld[i].nregs = CLASS_MAX_NREGS (rld[i].rclass, rld[i].mode);
4548 }
4549
4550 /* Special case a simple move with an input reload and a
4551 destination of a hard reg, if the hard reg is ok, use it. */
4552 for (i = 0; i < n_reloads; i++)
4553 if (rld[i].when_needed == RELOAD_FOR_INPUT
4554 && GET_CODE (PATTERN (insn)) == SET
4555 && REG_P (SET_DEST (PATTERN (insn)))
4556 && (SET_SRC (PATTERN (insn)) == rld[i].in
4557 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4558 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4559 {
4560 rtx dest = SET_DEST (PATTERN (insn));
4561 unsigned int regno = REGNO (dest);
4562
4563 if (regno < FIRST_PSEUDO_REGISTER
4564 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4565 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4566 {
4567 int nr = hard_regno_nregs[regno][rld[i].mode];
4568 int ok = 1, nri;
4569
4570 for (nri = 1; nri < nr; nri ++)
4571 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4572 ok = 0;
4573
4574 if (ok)
4575 rld[i].reg_rtx = dest;
4576 }
4577 }
4578
4579 return retval;
4580 }
4581
4582 /* Return true if alternative number ALTNUM in constraint-string
4583 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4584 MEM gives the reference if it didn't need any reloads, otherwise it
4585 is null. */
4586
4587 static bool
4588 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4589 const char *constraint, int altnum)
4590 {
4591 int c;
4592
4593 /* Skip alternatives before the one requested. */
4594 while (altnum > 0)
4595 {
4596 while (*constraint++ != ',');
4597 altnum--;
4598 }
4599 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4600 If one of them is present, this alternative accepts the result of
4601 passing a constant-pool reference through find_reloads_toplev.
4602
4603 The same is true of extra memory constraints if the address
4604 was reloaded into a register. However, the target may elect
4605 to disallow the original constant address, forcing it to be
4606 reloaded into a register instead. */
4607 for (; (c = *constraint) && c != ',' && c != '#';
4608 constraint += CONSTRAINT_LEN (c, constraint))
4609 {
4610 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4611 return true;
4612 #ifdef EXTRA_CONSTRAINT_STR
4613 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4614 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4615 return true;
4616 #endif
4617 }
4618 return false;
4619 }
4620 \f
4621 /* Scan X for memory references and scan the addresses for reloading.
4622 Also checks for references to "constant" regs that we want to eliminate
4623 and replaces them with the values they stand for.
4624 We may alter X destructively if it contains a reference to such.
4625 If X is just a constant reg, we return the equivalent value
4626 instead of X.
4627
4628 IND_LEVELS says how many levels of indirect addressing this machine
4629 supports.
4630
4631 OPNUM and TYPE identify the purpose of the reload.
4632
4633 IS_SET_DEST is true if X is the destination of a SET, which is not
4634 appropriate to be replaced by a constant.
4635
4636 INSN, if nonzero, is the insn in which we do the reload. It is used
4637 to determine if we may generate output reloads, and where to put USEs
4638 for pseudos that we have to replace with stack slots.
4639
4640 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4641 result of find_reloads_address. */
4642
4643 static rtx
4644 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4645 int ind_levels, int is_set_dest, rtx insn,
4646 int *address_reloaded)
4647 {
4648 RTX_CODE code = GET_CODE (x);
4649
4650 const char *fmt = GET_RTX_FORMAT (code);
4651 int i;
4652 int copied;
4653
4654 if (code == REG)
4655 {
4656 /* This code is duplicated for speed in find_reloads. */
4657 int regno = REGNO (x);
4658 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4659 x = reg_equiv_constant (regno);
4660 #if 0
4661 /* This creates (subreg (mem...)) which would cause an unnecessary
4662 reload of the mem. */
4663 else if (reg_equiv_mem (regno) != 0)
4664 x = reg_equiv_mem (regno);
4665 #endif
4666 else if (reg_equiv_memory_loc (regno)
4667 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4668 {
4669 rtx mem = make_memloc (x, regno);
4670 if (reg_equiv_address (regno)
4671 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4672 {
4673 /* If this is not a toplevel operand, find_reloads doesn't see
4674 this substitution. We have to emit a USE of the pseudo so
4675 that delete_output_reload can see it. */
4676 if (replace_reloads && recog_data.operand[opnum] != x)
4677 /* We mark the USE with QImode so that we recognize it
4678 as one that can be safely deleted at the end of
4679 reload. */
4680 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4681 QImode);
4682 x = mem;
4683 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4684 opnum, type, ind_levels, insn);
4685 if (!rtx_equal_p (x, mem))
4686 push_reg_equiv_alt_mem (regno, x);
4687 if (address_reloaded)
4688 *address_reloaded = i;
4689 }
4690 }
4691 return x;
4692 }
4693 if (code == MEM)
4694 {
4695 rtx tem = x;
4696
4697 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4698 opnum, type, ind_levels, insn);
4699 if (address_reloaded)
4700 *address_reloaded = i;
4701
4702 return tem;
4703 }
4704
4705 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4706 {
4707 /* Check for SUBREG containing a REG that's equivalent to a
4708 constant. If the constant has a known value, truncate it
4709 right now. Similarly if we are extracting a single-word of a
4710 multi-word constant. If the constant is symbolic, allow it
4711 to be substituted normally. push_reload will strip the
4712 subreg later. The constant must not be VOIDmode, because we
4713 will lose the mode of the register (this should never happen
4714 because one of the cases above should handle it). */
4715
4716 int regno = REGNO (SUBREG_REG (x));
4717 rtx tem;
4718
4719 if (regno >= FIRST_PSEUDO_REGISTER
4720 && reg_renumber[regno] < 0
4721 && reg_equiv_constant (regno) != 0)
4722 {
4723 tem =
4724 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4725 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4726 gcc_assert (tem);
4727 if (CONSTANT_P (tem)
4728 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4729 {
4730 tem = force_const_mem (GET_MODE (x), tem);
4731 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4732 &XEXP (tem, 0), opnum, type,
4733 ind_levels, insn);
4734 if (address_reloaded)
4735 *address_reloaded = i;
4736 }
4737 return tem;
4738 }
4739
4740 /* If the subreg contains a reg that will be converted to a mem,
4741 convert the subreg to a narrower memref now.
4742 Otherwise, we would get (subreg (mem ...) ...),
4743 which would force reload of the mem.
4744
4745 We also need to do this if there is an equivalent MEM that is
4746 not offsettable. In that case, alter_subreg would produce an
4747 invalid address on big-endian machines.
4748
4749 For machines that extend byte loads, we must not reload using
4750 a wider mode if we have a paradoxical SUBREG. find_reloads will
4751 force a reload in that case. So we should not do anything here. */
4752
4753 if (regno >= FIRST_PSEUDO_REGISTER
4754 #ifdef LOAD_EXTEND_OP
4755 && (GET_MODE_SIZE (GET_MODE (x))
4756 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4757 #endif
4758 && (reg_equiv_address (regno) != 0
4759 || (reg_equiv_mem (regno) != 0
4760 && (! strict_memory_address_addr_space_p
4761 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
4762 MEM_ADDR_SPACE (reg_equiv_mem (regno)))
4763 || ! offsettable_memref_p (reg_equiv_mem (regno))
4764 || num_not_at_initial_offset))))
4765 x = find_reloads_subreg_address (x, 1, opnum, type, ind_levels,
4766 insn, address_reloaded);
4767 }
4768
4769 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4770 {
4771 if (fmt[i] == 'e')
4772 {
4773 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4774 ind_levels, is_set_dest, insn,
4775 address_reloaded);
4776 /* If we have replaced a reg with it's equivalent memory loc -
4777 that can still be handled here e.g. if it's in a paradoxical
4778 subreg - we must make the change in a copy, rather than using
4779 a destructive change. This way, find_reloads can still elect
4780 not to do the change. */
4781 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4782 {
4783 x = shallow_copy_rtx (x);
4784 copied = 1;
4785 }
4786 XEXP (x, i) = new_part;
4787 }
4788 }
4789 return x;
4790 }
4791
4792 /* Return a mem ref for the memory equivalent of reg REGNO.
4793 This mem ref is not shared with anything. */
4794
4795 static rtx
4796 make_memloc (rtx ad, int regno)
4797 {
4798 /* We must rerun eliminate_regs, in case the elimination
4799 offsets have changed. */
4800 rtx tem
4801 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4802 0);
4803
4804 /* If TEM might contain a pseudo, we must copy it to avoid
4805 modifying it when we do the substitution for the reload. */
4806 if (rtx_varies_p (tem, 0))
4807 tem = copy_rtx (tem);
4808
4809 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4810 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4811
4812 /* Copy the result if it's still the same as the equivalence, to avoid
4813 modifying it when we do the substitution for the reload. */
4814 if (tem == reg_equiv_memory_loc (regno))
4815 tem = copy_rtx (tem);
4816 return tem;
4817 }
4818
4819 /* Returns true if AD could be turned into a valid memory reference
4820 to mode MODE in address space AS by reloading the part pointed to
4821 by PART into a register. */
4822
4823 static int
4824 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4825 addr_space_t as, rtx *part)
4826 {
4827 int retv;
4828 rtx tem = *part;
4829 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4830
4831 *part = reg;
4832 retv = memory_address_addr_space_p (mode, ad, as);
4833 *part = tem;
4834
4835 return retv;
4836 }
4837
4838 /* Record all reloads needed for handling memory address AD
4839 which appears in *LOC in a memory reference to mode MODE
4840 which itself is found in location *MEMREFLOC.
4841 Note that we take shortcuts assuming that no multi-reg machine mode
4842 occurs as part of an address.
4843
4844 OPNUM and TYPE specify the purpose of this reload.
4845
4846 IND_LEVELS says how many levels of indirect addressing this machine
4847 supports.
4848
4849 INSN, if nonzero, is the insn in which we do the reload. It is used
4850 to determine if we may generate output reloads, and where to put USEs
4851 for pseudos that we have to replace with stack slots.
4852
4853 Value is one if this address is reloaded or replaced as a whole; it is
4854 zero if the top level of this address was not reloaded or replaced, and
4855 it is -1 if it may or may not have been reloaded or replaced.
4856
4857 Note that there is no verification that the address will be valid after
4858 this routine does its work. Instead, we rely on the fact that the address
4859 was valid when reload started. So we need only undo things that reload
4860 could have broken. These are wrong register types, pseudos not allocated
4861 to a hard register, and frame pointer elimination. */
4862
4863 static int
4864 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4865 rtx *loc, int opnum, enum reload_type type,
4866 int ind_levels, rtx insn)
4867 {
4868 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4869 : ADDR_SPACE_GENERIC;
4870 int regno;
4871 int removed_and = 0;
4872 int op_index;
4873 rtx tem;
4874
4875 /* If the address is a register, see if it is a legitimate address and
4876 reload if not. We first handle the cases where we need not reload
4877 or where we must reload in a non-standard way. */
4878
4879 if (REG_P (ad))
4880 {
4881 regno = REGNO (ad);
4882
4883 if (reg_equiv_constant (regno) != 0)
4884 {
4885 find_reloads_address_part (reg_equiv_constant (regno), loc,
4886 base_reg_class (mode, MEM, SCRATCH),
4887 GET_MODE (ad), opnum, type, ind_levels);
4888 return 1;
4889 }
4890
4891 tem = reg_equiv_memory_loc (regno);
4892 if (tem != 0)
4893 {
4894 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4895 {
4896 tem = make_memloc (ad, regno);
4897 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4898 XEXP (tem, 0),
4899 MEM_ADDR_SPACE (tem)))
4900 {
4901 rtx orig = tem;
4902
4903 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4904 &XEXP (tem, 0), opnum,
4905 ADDR_TYPE (type), ind_levels, insn);
4906 if (!rtx_equal_p (tem, orig))
4907 push_reg_equiv_alt_mem (regno, tem);
4908 }
4909 /* We can avoid a reload if the register's equivalent memory
4910 expression is valid as an indirect memory address.
4911 But not all addresses are valid in a mem used as an indirect
4912 address: only reg or reg+constant. */
4913
4914 if (ind_levels > 0
4915 && strict_memory_address_addr_space_p (mode, tem, as)
4916 && (REG_P (XEXP (tem, 0))
4917 || (GET_CODE (XEXP (tem, 0)) == PLUS
4918 && REG_P (XEXP (XEXP (tem, 0), 0))
4919 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4920 {
4921 /* TEM is not the same as what we'll be replacing the
4922 pseudo with after reload, put a USE in front of INSN
4923 in the final reload pass. */
4924 if (replace_reloads
4925 && num_not_at_initial_offset
4926 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4927 {
4928 *loc = tem;
4929 /* We mark the USE with QImode so that we
4930 recognize it as one that can be safely
4931 deleted at the end of reload. */
4932 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4933 insn), QImode);
4934
4935 /* This doesn't really count as replacing the address
4936 as a whole, since it is still a memory access. */
4937 }
4938 return 0;
4939 }
4940 ad = tem;
4941 }
4942 }
4943
4944 /* The only remaining case where we can avoid a reload is if this is a
4945 hard register that is valid as a base register and which is not the
4946 subject of a CLOBBER in this insn. */
4947
4948 else if (regno < FIRST_PSEUDO_REGISTER
4949 && regno_ok_for_base_p (regno, mode, MEM, SCRATCH)
4950 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4951 return 0;
4952
4953 /* If we do not have one of the cases above, we must do the reload. */
4954 push_reload (ad, NULL_RTX, loc, (rtx*) 0, base_reg_class (mode, MEM, SCRATCH),
4955 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4956 return 1;
4957 }
4958
4959 if (strict_memory_address_addr_space_p (mode, ad, as))
4960 {
4961 /* The address appears valid, so reloads are not needed.
4962 But the address may contain an eliminable register.
4963 This can happen because a machine with indirect addressing
4964 may consider a pseudo register by itself a valid address even when
4965 it has failed to get a hard reg.
4966 So do a tree-walk to find and eliminate all such regs. */
4967
4968 /* But first quickly dispose of a common case. */
4969 if (GET_CODE (ad) == PLUS
4970 && CONST_INT_P (XEXP (ad, 1))
4971 && REG_P (XEXP (ad, 0))
4972 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4973 return 0;
4974
4975 subst_reg_equivs_changed = 0;
4976 *loc = subst_reg_equivs (ad, insn);
4977
4978 if (! subst_reg_equivs_changed)
4979 return 0;
4980
4981 /* Check result for validity after substitution. */
4982 if (strict_memory_address_addr_space_p (mode, ad, as))
4983 return 0;
4984 }
4985
4986 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4987 do
4988 {
4989 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4990 {
4991 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4992 ind_levels, win);
4993 }
4994 break;
4995 win:
4996 *memrefloc = copy_rtx (*memrefloc);
4997 XEXP (*memrefloc, 0) = ad;
4998 move_replacements (&ad, &XEXP (*memrefloc, 0));
4999 return -1;
5000 }
5001 while (0);
5002 #endif
5003
5004 /* The address is not valid. We have to figure out why. First see if
5005 we have an outer AND and remove it if so. Then analyze what's inside. */
5006
5007 if (GET_CODE (ad) == AND)
5008 {
5009 removed_and = 1;
5010 loc = &XEXP (ad, 0);
5011 ad = *loc;
5012 }
5013
5014 /* One possibility for why the address is invalid is that it is itself
5015 a MEM. This can happen when the frame pointer is being eliminated, a
5016 pseudo is not allocated to a hard register, and the offset between the
5017 frame and stack pointers is not its initial value. In that case the
5018 pseudo will have been replaced by a MEM referring to the
5019 stack pointer. */
5020 if (MEM_P (ad))
5021 {
5022 /* First ensure that the address in this MEM is valid. Then, unless
5023 indirect addresses are valid, reload the MEM into a register. */
5024 tem = ad;
5025 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5026 opnum, ADDR_TYPE (type),
5027 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5028
5029 /* If tem was changed, then we must create a new memory reference to
5030 hold it and store it back into memrefloc. */
5031 if (tem != ad && memrefloc)
5032 {
5033 *memrefloc = copy_rtx (*memrefloc);
5034 copy_replacements (tem, XEXP (*memrefloc, 0));
5035 loc = &XEXP (*memrefloc, 0);
5036 if (removed_and)
5037 loc = &XEXP (*loc, 0);
5038 }
5039
5040 /* Check similar cases as for indirect addresses as above except
5041 that we can allow pseudos and a MEM since they should have been
5042 taken care of above. */
5043
5044 if (ind_levels == 0
5045 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5046 || MEM_P (XEXP (tem, 0))
5047 || ! (REG_P (XEXP (tem, 0))
5048 || (GET_CODE (XEXP (tem, 0)) == PLUS
5049 && REG_P (XEXP (XEXP (tem, 0), 0))
5050 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5051 {
5052 /* Must use TEM here, not AD, since it is the one that will
5053 have any subexpressions reloaded, if needed. */
5054 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5055 base_reg_class (mode, MEM, SCRATCH), GET_MODE (tem),
5056 VOIDmode, 0,
5057 0, opnum, type);
5058 return ! removed_and;
5059 }
5060 else
5061 return 0;
5062 }
5063
5064 /* If we have address of a stack slot but it's not valid because the
5065 displacement is too large, compute the sum in a register.
5066 Handle all base registers here, not just fp/ap/sp, because on some
5067 targets (namely SH) we can also get too large displacements from
5068 big-endian corrections. */
5069 else if (GET_CODE (ad) == PLUS
5070 && REG_P (XEXP (ad, 0))
5071 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5072 && CONST_INT_P (XEXP (ad, 1))
5073 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, PLUS,
5074 CONST_INT)
5075 /* Similarly, if we were to reload the base register and the
5076 mem+offset address is still invalid, then we want to reload
5077 the whole address, not just the base register. */
5078 || ! maybe_memory_address_addr_space_p
5079 (mode, ad, as, &(XEXP (ad, 0)))))
5080
5081 {
5082 /* Unshare the MEM rtx so we can safely alter it. */
5083 if (memrefloc)
5084 {
5085 *memrefloc = copy_rtx (*memrefloc);
5086 loc = &XEXP (*memrefloc, 0);
5087 if (removed_and)
5088 loc = &XEXP (*loc, 0);
5089 }
5090
5091 if (double_reg_address_ok
5092 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode,
5093 PLUS, CONST_INT))
5094 {
5095 /* Unshare the sum as well. */
5096 *loc = ad = copy_rtx (ad);
5097
5098 /* Reload the displacement into an index reg.
5099 We assume the frame pointer or arg pointer is a base reg. */
5100 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5101 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5102 type, ind_levels);
5103 return 0;
5104 }
5105 else
5106 {
5107 /* If the sum of two regs is not necessarily valid,
5108 reload the sum into a base reg.
5109 That will at least work. */
5110 find_reloads_address_part (ad, loc,
5111 base_reg_class (mode, MEM, SCRATCH),
5112 GET_MODE (ad), opnum, type, ind_levels);
5113 }
5114 return ! removed_and;
5115 }
5116
5117 /* If we have an indexed stack slot, there are three possible reasons why
5118 it might be invalid: The index might need to be reloaded, the address
5119 might have been made by frame pointer elimination and hence have a
5120 constant out of range, or both reasons might apply.
5121
5122 We can easily check for an index needing reload, but even if that is the
5123 case, we might also have an invalid constant. To avoid making the
5124 conservative assumption and requiring two reloads, we see if this address
5125 is valid when not interpreted strictly. If it is, the only problem is
5126 that the index needs a reload and find_reloads_address_1 will take care
5127 of it.
5128
5129 Handle all base registers here, not just fp/ap/sp, because on some
5130 targets (namely SPARC) we can also get invalid addresses from preventive
5131 subreg big-endian corrections made by find_reloads_toplev. We
5132 can also get expressions involving LO_SUM (rather than PLUS) from
5133 find_reloads_subreg_address.
5134
5135 If we decide to do something, it must be that `double_reg_address_ok'
5136 is true. We generate a reload of the base register + constant and
5137 rework the sum so that the reload register will be added to the index.
5138 This is safe because we know the address isn't shared.
5139
5140 We check for the base register as both the first and second operand of
5141 the innermost PLUS and/or LO_SUM. */
5142
5143 for (op_index = 0; op_index < 2; ++op_index)
5144 {
5145 rtx operand, addend;
5146 enum rtx_code inner_code;
5147
5148 if (GET_CODE (ad) != PLUS)
5149 continue;
5150
5151 inner_code = GET_CODE (XEXP (ad, 0));
5152 if (!(GET_CODE (ad) == PLUS
5153 && CONST_INT_P (XEXP (ad, 1))
5154 && (inner_code == PLUS || inner_code == LO_SUM)))
5155 continue;
5156
5157 operand = XEXP (XEXP (ad, 0), op_index);
5158 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5159 continue;
5160
5161 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5162
5163 if ((regno_ok_for_base_p (REGNO (operand), mode, inner_code,
5164 GET_CODE (addend))
5165 || operand == frame_pointer_rtx
5166 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5167 || operand == hard_frame_pointer_rtx
5168 #endif
5169 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5170 || operand == arg_pointer_rtx
5171 #endif
5172 || operand == stack_pointer_rtx)
5173 && ! maybe_memory_address_addr_space_p
5174 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5175 {
5176 rtx offset_reg;
5177 enum reg_class cls;
5178
5179 offset_reg = plus_constant (operand, INTVAL (XEXP (ad, 1)));
5180
5181 /* Form the adjusted address. */
5182 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5183 ad = gen_rtx_PLUS (GET_MODE (ad),
5184 op_index == 0 ? offset_reg : addend,
5185 op_index == 0 ? addend : offset_reg);
5186 else
5187 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5188 op_index == 0 ? offset_reg : addend,
5189 op_index == 0 ? addend : offset_reg);
5190 *loc = ad;
5191
5192 cls = base_reg_class (mode, MEM, GET_CODE (addend));
5193 find_reloads_address_part (XEXP (ad, op_index),
5194 &XEXP (ad, op_index), cls,
5195 GET_MODE (ad), opnum, type, ind_levels);
5196 find_reloads_address_1 (mode,
5197 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5198 GET_CODE (XEXP (ad, op_index)),
5199 &XEXP (ad, 1 - op_index), opnum,
5200 type, 0, insn);
5201
5202 return 0;
5203 }
5204 }
5205
5206 /* See if address becomes valid when an eliminable register
5207 in a sum is replaced. */
5208
5209 tem = ad;
5210 if (GET_CODE (ad) == PLUS)
5211 tem = subst_indexed_address (ad);
5212 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5213 {
5214 /* Ok, we win that way. Replace any additional eliminable
5215 registers. */
5216
5217 subst_reg_equivs_changed = 0;
5218 tem = subst_reg_equivs (tem, insn);
5219
5220 /* Make sure that didn't make the address invalid again. */
5221
5222 if (! subst_reg_equivs_changed
5223 || strict_memory_address_addr_space_p (mode, tem, as))
5224 {
5225 *loc = tem;
5226 return 0;
5227 }
5228 }
5229
5230 /* If constants aren't valid addresses, reload the constant address
5231 into a register. */
5232 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5233 {
5234 enum machine_mode address_mode = GET_MODE (ad);
5235 if (address_mode == VOIDmode)
5236 address_mode = targetm.addr_space.address_mode (as);
5237
5238 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5239 Unshare it so we can safely alter it. */
5240 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5241 && CONSTANT_POOL_ADDRESS_P (ad))
5242 {
5243 *memrefloc = copy_rtx (*memrefloc);
5244 loc = &XEXP (*memrefloc, 0);
5245 if (removed_and)
5246 loc = &XEXP (*loc, 0);
5247 }
5248
5249 find_reloads_address_part (ad, loc, base_reg_class (mode, MEM, SCRATCH),
5250 address_mode, opnum, type, ind_levels);
5251 return ! removed_and;
5252 }
5253
5254 return find_reloads_address_1 (mode, ad, 0, MEM, SCRATCH, loc, opnum, type,
5255 ind_levels, insn);
5256 }
5257 \f
5258 /* Find all pseudo regs appearing in AD
5259 that are eliminable in favor of equivalent values
5260 and do not have hard regs; replace them by their equivalents.
5261 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5262 front of it for pseudos that we have to replace with stack slots. */
5263
5264 static rtx
5265 subst_reg_equivs (rtx ad, rtx insn)
5266 {
5267 RTX_CODE code = GET_CODE (ad);
5268 int i;
5269 const char *fmt;
5270
5271 switch (code)
5272 {
5273 case HIGH:
5274 case CONST_INT:
5275 case CONST:
5276 case CONST_DOUBLE:
5277 case CONST_FIXED:
5278 case CONST_VECTOR:
5279 case SYMBOL_REF:
5280 case LABEL_REF:
5281 case PC:
5282 case CC0:
5283 return ad;
5284
5285 case REG:
5286 {
5287 int regno = REGNO (ad);
5288
5289 if (reg_equiv_constant (regno) != 0)
5290 {
5291 subst_reg_equivs_changed = 1;
5292 return reg_equiv_constant (regno);
5293 }
5294 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5295 {
5296 rtx mem = make_memloc (ad, regno);
5297 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5298 {
5299 subst_reg_equivs_changed = 1;
5300 /* We mark the USE with QImode so that we recognize it
5301 as one that can be safely deleted at the end of
5302 reload. */
5303 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5304 QImode);
5305 return mem;
5306 }
5307 }
5308 }
5309 return ad;
5310
5311 case PLUS:
5312 /* Quickly dispose of a common case. */
5313 if (XEXP (ad, 0) == frame_pointer_rtx
5314 && CONST_INT_P (XEXP (ad, 1)))
5315 return ad;
5316 break;
5317
5318 default:
5319 break;
5320 }
5321
5322 fmt = GET_RTX_FORMAT (code);
5323 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5324 if (fmt[i] == 'e')
5325 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5326 return ad;
5327 }
5328 \f
5329 /* Compute the sum of X and Y, making canonicalizations assumed in an
5330 address, namely: sum constant integers, surround the sum of two
5331 constants with a CONST, put the constant as the second operand, and
5332 group the constant on the outermost sum.
5333
5334 This routine assumes both inputs are already in canonical form. */
5335
5336 rtx
5337 form_sum (enum machine_mode mode, rtx x, rtx y)
5338 {
5339 rtx tem;
5340
5341 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5342 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5343
5344 if (CONST_INT_P (x))
5345 return plus_constant (y, INTVAL (x));
5346 else if (CONST_INT_P (y))
5347 return plus_constant (x, INTVAL (y));
5348 else if (CONSTANT_P (x))
5349 tem = x, x = y, y = tem;
5350
5351 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5352 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5353
5354 /* Note that if the operands of Y are specified in the opposite
5355 order in the recursive calls below, infinite recursion will occur. */
5356 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5357 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5358
5359 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5360 constant will have been placed second. */
5361 if (CONSTANT_P (x) && CONSTANT_P (y))
5362 {
5363 if (GET_CODE (x) == CONST)
5364 x = XEXP (x, 0);
5365 if (GET_CODE (y) == CONST)
5366 y = XEXP (y, 0);
5367
5368 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5369 }
5370
5371 return gen_rtx_PLUS (mode, x, y);
5372 }
5373 \f
5374 /* If ADDR is a sum containing a pseudo register that should be
5375 replaced with a constant (from reg_equiv_constant),
5376 return the result of doing so, and also apply the associative
5377 law so that the result is more likely to be a valid address.
5378 (But it is not guaranteed to be one.)
5379
5380 Note that at most one register is replaced, even if more are
5381 replaceable. Also, we try to put the result into a canonical form
5382 so it is more likely to be a valid address.
5383
5384 In all other cases, return ADDR. */
5385
5386 static rtx
5387 subst_indexed_address (rtx addr)
5388 {
5389 rtx op0 = 0, op1 = 0, op2 = 0;
5390 rtx tem;
5391 int regno;
5392
5393 if (GET_CODE (addr) == PLUS)
5394 {
5395 /* Try to find a register to replace. */
5396 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5397 if (REG_P (op0)
5398 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5399 && reg_renumber[regno] < 0
5400 && reg_equiv_constant (regno) != 0)
5401 op0 = reg_equiv_constant (regno);
5402 else if (REG_P (op1)
5403 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5404 && reg_renumber[regno] < 0
5405 && reg_equiv_constant (regno) != 0)
5406 op1 = reg_equiv_constant (regno);
5407 else if (GET_CODE (op0) == PLUS
5408 && (tem = subst_indexed_address (op0)) != op0)
5409 op0 = tem;
5410 else if (GET_CODE (op1) == PLUS
5411 && (tem = subst_indexed_address (op1)) != op1)
5412 op1 = tem;
5413 else
5414 return addr;
5415
5416 /* Pick out up to three things to add. */
5417 if (GET_CODE (op1) == PLUS)
5418 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5419 else if (GET_CODE (op0) == PLUS)
5420 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5421
5422 /* Compute the sum. */
5423 if (op2 != 0)
5424 op1 = form_sum (GET_MODE (addr), op1, op2);
5425 if (op1 != 0)
5426 op0 = form_sum (GET_MODE (addr), op0, op1);
5427
5428 return op0;
5429 }
5430 return addr;
5431 }
5432 \f
5433 /* Update the REG_INC notes for an insn. It updates all REG_INC
5434 notes for the instruction which refer to REGNO the to refer
5435 to the reload number.
5436
5437 INSN is the insn for which any REG_INC notes need updating.
5438
5439 REGNO is the register number which has been reloaded.
5440
5441 RELOADNUM is the reload number. */
5442
5443 static void
5444 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5445 int reloadnum ATTRIBUTE_UNUSED)
5446 {
5447 #ifdef AUTO_INC_DEC
5448 rtx link;
5449
5450 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5451 if (REG_NOTE_KIND (link) == REG_INC
5452 && (int) REGNO (XEXP (link, 0)) == regno)
5453 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5454 #endif
5455 }
5456 \f
5457 /* Record the pseudo registers we must reload into hard registers in a
5458 subexpression of a would-be memory address, X referring to a value
5459 in mode MODE. (This function is not called if the address we find
5460 is strictly valid.)
5461
5462 CONTEXT = 1 means we are considering regs as index regs,
5463 = 0 means we are considering them as base regs.
5464 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5465 or an autoinc code.
5466 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5467 is the code of the index part of the address. Otherwise, pass SCRATCH
5468 for this argument.
5469 OPNUM and TYPE specify the purpose of any reloads made.
5470
5471 IND_LEVELS says how many levels of indirect addressing are
5472 supported at this point in the address.
5473
5474 INSN, if nonzero, is the insn in which we do the reload. It is used
5475 to determine if we may generate output reloads.
5476
5477 We return nonzero if X, as a whole, is reloaded or replaced. */
5478
5479 /* Note that we take shortcuts assuming that no multi-reg machine mode
5480 occurs as part of an address.
5481 Also, this is not fully machine-customizable; it works for machines
5482 such as VAXen and 68000's and 32000's, but other possible machines
5483 could have addressing modes that this does not handle right.
5484 If you add push_reload calls here, you need to make sure gen_reload
5485 handles those cases gracefully. */
5486
5487 static int
5488 find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
5489 enum rtx_code outer_code, enum rtx_code index_code,
5490 rtx *loc, int opnum, enum reload_type type,
5491 int ind_levels, rtx insn)
5492 {
5493 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, OUTER, INDEX) \
5494 ((CONTEXT) == 0 \
5495 ? regno_ok_for_base_p (REGNO, MODE, OUTER, INDEX) \
5496 : REGNO_OK_FOR_INDEX_P (REGNO))
5497
5498 enum reg_class context_reg_class;
5499 RTX_CODE code = GET_CODE (x);
5500
5501 if (context == 1)
5502 context_reg_class = INDEX_REG_CLASS;
5503 else
5504 context_reg_class = base_reg_class (mode, outer_code, index_code);
5505
5506 switch (code)
5507 {
5508 case PLUS:
5509 {
5510 rtx orig_op0 = XEXP (x, 0);
5511 rtx orig_op1 = XEXP (x, 1);
5512 RTX_CODE code0 = GET_CODE (orig_op0);
5513 RTX_CODE code1 = GET_CODE (orig_op1);
5514 rtx op0 = orig_op0;
5515 rtx op1 = orig_op1;
5516
5517 if (GET_CODE (op0) == SUBREG)
5518 {
5519 op0 = SUBREG_REG (op0);
5520 code0 = GET_CODE (op0);
5521 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5522 op0 = gen_rtx_REG (word_mode,
5523 (REGNO (op0) +
5524 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5525 GET_MODE (SUBREG_REG (orig_op0)),
5526 SUBREG_BYTE (orig_op0),
5527 GET_MODE (orig_op0))));
5528 }
5529
5530 if (GET_CODE (op1) == SUBREG)
5531 {
5532 op1 = SUBREG_REG (op1);
5533 code1 = GET_CODE (op1);
5534 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5535 /* ??? Why is this given op1's mode and above for
5536 ??? op0 SUBREGs we use word_mode? */
5537 op1 = gen_rtx_REG (GET_MODE (op1),
5538 (REGNO (op1) +
5539 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5540 GET_MODE (SUBREG_REG (orig_op1)),
5541 SUBREG_BYTE (orig_op1),
5542 GET_MODE (orig_op1))));
5543 }
5544 /* Plus in the index register may be created only as a result of
5545 register rematerialization for expression like &localvar*4. Reload it.
5546 It may be possible to combine the displacement on the outer level,
5547 but it is probably not worthwhile to do so. */
5548 if (context == 1)
5549 {
5550 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5551 opnum, ADDR_TYPE (type), ind_levels, insn);
5552 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5553 context_reg_class,
5554 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5555 return 1;
5556 }
5557
5558 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5559 || code0 == ZERO_EXTEND || code1 == MEM)
5560 {
5561 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5562 &XEXP (x, 0), opnum, type, ind_levels,
5563 insn);
5564 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5565 &XEXP (x, 1), opnum, type, ind_levels,
5566 insn);
5567 }
5568
5569 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5570 || code1 == ZERO_EXTEND || code0 == MEM)
5571 {
5572 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5573 &XEXP (x, 0), opnum, type, ind_levels,
5574 insn);
5575 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5576 &XEXP (x, 1), opnum, type, ind_levels,
5577 insn);
5578 }
5579
5580 else if (code0 == CONST_INT || code0 == CONST
5581 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5582 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5583 &XEXP (x, 1), opnum, type, ind_levels,
5584 insn);
5585
5586 else if (code1 == CONST_INT || code1 == CONST
5587 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5588 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5589 &XEXP (x, 0), opnum, type, ind_levels,
5590 insn);
5591
5592 else if (code0 == REG && code1 == REG)
5593 {
5594 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5595 && regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5596 return 0;
5597 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5598 && regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5599 return 0;
5600 else if (regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5601 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5602 &XEXP (x, 1), opnum, type, ind_levels,
5603 insn);
5604 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5605 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5606 &XEXP (x, 0), opnum, type, ind_levels,
5607 insn);
5608 else if (regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5609 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5610 &XEXP (x, 0), opnum, type, ind_levels,
5611 insn);
5612 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5613 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5614 &XEXP (x, 1), opnum, type, ind_levels,
5615 insn);
5616 else
5617 {
5618 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5619 &XEXP (x, 0), opnum, type, ind_levels,
5620 insn);
5621 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5622 &XEXP (x, 1), opnum, type, ind_levels,
5623 insn);
5624 }
5625 }
5626
5627 else if (code0 == REG)
5628 {
5629 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5630 &XEXP (x, 0), opnum, type, ind_levels,
5631 insn);
5632 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5633 &XEXP (x, 1), opnum, type, ind_levels,
5634 insn);
5635 }
5636
5637 else if (code1 == REG)
5638 {
5639 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5640 &XEXP (x, 1), opnum, type, ind_levels,
5641 insn);
5642 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5643 &XEXP (x, 0), opnum, type, ind_levels,
5644 insn);
5645 }
5646 }
5647
5648 return 0;
5649
5650 case POST_MODIFY:
5651 case PRE_MODIFY:
5652 {
5653 rtx op0 = XEXP (x, 0);
5654 rtx op1 = XEXP (x, 1);
5655 enum rtx_code index_code;
5656 int regno;
5657 int reloadnum;
5658
5659 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5660 return 0;
5661
5662 /* Currently, we only support {PRE,POST}_MODIFY constructs
5663 where a base register is {inc,dec}remented by the contents
5664 of another register or by a constant value. Thus, these
5665 operands must match. */
5666 gcc_assert (op0 == XEXP (op1, 0));
5667
5668 /* Require index register (or constant). Let's just handle the
5669 register case in the meantime... If the target allows
5670 auto-modify by a constant then we could try replacing a pseudo
5671 register with its equivalent constant where applicable.
5672
5673 We also handle the case where the register was eliminated
5674 resulting in a PLUS subexpression.
5675
5676 If we later decide to reload the whole PRE_MODIFY or
5677 POST_MODIFY, inc_for_reload might clobber the reload register
5678 before reading the index. The index register might therefore
5679 need to live longer than a TYPE reload normally would, so be
5680 conservative and class it as RELOAD_OTHER. */
5681 if ((REG_P (XEXP (op1, 1))
5682 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5683 || GET_CODE (XEXP (op1, 1)) == PLUS)
5684 find_reloads_address_1 (mode, XEXP (op1, 1), 1, code, SCRATCH,
5685 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5686 ind_levels, insn);
5687
5688 gcc_assert (REG_P (XEXP (op1, 0)));
5689
5690 regno = REGNO (XEXP (op1, 0));
5691 index_code = GET_CODE (XEXP (op1, 1));
5692
5693 /* A register that is incremented cannot be constant! */
5694 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5695 || reg_equiv_constant (regno) == 0);
5696
5697 /* Handle a register that is equivalent to a memory location
5698 which cannot be addressed directly. */
5699 if (reg_equiv_memory_loc (regno) != 0
5700 && (reg_equiv_address (regno) != 0
5701 || num_not_at_initial_offset))
5702 {
5703 rtx tem = make_memloc (XEXP (x, 0), regno);
5704
5705 if (reg_equiv_address (regno)
5706 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5707 {
5708 rtx orig = tem;
5709
5710 /* First reload the memory location's address.
5711 We can't use ADDR_TYPE (type) here, because we need to
5712 write back the value after reading it, hence we actually
5713 need two registers. */
5714 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5715 &XEXP (tem, 0), opnum,
5716 RELOAD_OTHER,
5717 ind_levels, insn);
5718
5719 if (!rtx_equal_p (tem, orig))
5720 push_reg_equiv_alt_mem (regno, tem);
5721
5722 /* Then reload the memory location into a base
5723 register. */
5724 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5725 &XEXP (op1, 0),
5726 base_reg_class (mode, code,
5727 index_code),
5728 GET_MODE (x), GET_MODE (x), 0,
5729 0, opnum, RELOAD_OTHER);
5730
5731 update_auto_inc_notes (this_insn, regno, reloadnum);
5732 return 0;
5733 }
5734 }
5735
5736 if (reg_renumber[regno] >= 0)
5737 regno = reg_renumber[regno];
5738
5739 /* We require a base register here... */
5740 if (!regno_ok_for_base_p (regno, GET_MODE (x), code, index_code))
5741 {
5742 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5743 &XEXP (op1, 0), &XEXP (x, 0),
5744 base_reg_class (mode, code, index_code),
5745 GET_MODE (x), GET_MODE (x), 0, 0,
5746 opnum, RELOAD_OTHER);
5747
5748 update_auto_inc_notes (this_insn, regno, reloadnum);
5749 return 0;
5750 }
5751 }
5752 return 0;
5753
5754 case POST_INC:
5755 case POST_DEC:
5756 case PRE_INC:
5757 case PRE_DEC:
5758 if (REG_P (XEXP (x, 0)))
5759 {
5760 int regno = REGNO (XEXP (x, 0));
5761 int value = 0;
5762 rtx x_orig = x;
5763
5764 /* A register that is incremented cannot be constant! */
5765 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5766 || reg_equiv_constant (regno) == 0);
5767
5768 /* Handle a register that is equivalent to a memory location
5769 which cannot be addressed directly. */
5770 if (reg_equiv_memory_loc (regno) != 0
5771 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5772 {
5773 rtx tem = make_memloc (XEXP (x, 0), regno);
5774 if (reg_equiv_address (regno)
5775 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5776 {
5777 rtx orig = tem;
5778
5779 /* First reload the memory location's address.
5780 We can't use ADDR_TYPE (type) here, because we need to
5781 write back the value after reading it, hence we actually
5782 need two registers. */
5783 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5784 &XEXP (tem, 0), opnum, type,
5785 ind_levels, insn);
5786 if (!rtx_equal_p (tem, orig))
5787 push_reg_equiv_alt_mem (regno, tem);
5788 /* Put this inside a new increment-expression. */
5789 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5790 /* Proceed to reload that, as if it contained a register. */
5791 }
5792 }
5793
5794 /* If we have a hard register that is ok in this incdec context,
5795 don't make a reload. If the register isn't nice enough for
5796 autoincdec, we can reload it. But, if an autoincrement of a
5797 register that we here verified as playing nice, still outside
5798 isn't "valid", it must be that no autoincrement is "valid".
5799 If that is true and something made an autoincrement anyway,
5800 this must be a special context where one is allowed.
5801 (For example, a "push" instruction.)
5802 We can't improve this address, so leave it alone. */
5803
5804 /* Otherwise, reload the autoincrement into a suitable hard reg
5805 and record how much to increment by. */
5806
5807 if (reg_renumber[regno] >= 0)
5808 regno = reg_renumber[regno];
5809 if (regno >= FIRST_PSEUDO_REGISTER
5810 || !REG_OK_FOR_CONTEXT (context, regno, mode, code,
5811 index_code))
5812 {
5813 int reloadnum;
5814
5815 /* If we can output the register afterwards, do so, this
5816 saves the extra update.
5817 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5818 CALL_INSN - and it does not set CC0.
5819 But don't do this if we cannot directly address the
5820 memory location, since this will make it harder to
5821 reuse address reloads, and increases register pressure.
5822 Also don't do this if we can probably update x directly. */
5823 rtx equiv = (MEM_P (XEXP (x, 0))
5824 ? XEXP (x, 0)
5825 : reg_equiv_mem (regno));
5826 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5827 if (insn && NONJUMP_INSN_P (insn) && equiv
5828 && memory_operand (equiv, GET_MODE (equiv))
5829 #ifdef HAVE_cc0
5830 && ! sets_cc0_p (PATTERN (insn))
5831 #endif
5832 && ! (icode != CODE_FOR_nothing
5833 && insn_operand_matches (icode, 0, equiv)
5834 && insn_operand_matches (icode, 1, equiv)))
5835 {
5836 /* We use the original pseudo for loc, so that
5837 emit_reload_insns() knows which pseudo this
5838 reload refers to and updates the pseudo rtx, not
5839 its equivalent memory location, as well as the
5840 corresponding entry in reg_last_reload_reg. */
5841 loc = &XEXP (x_orig, 0);
5842 x = XEXP (x, 0);
5843 reloadnum
5844 = push_reload (x, x, loc, loc,
5845 context_reg_class,
5846 GET_MODE (x), GET_MODE (x), 0, 0,
5847 opnum, RELOAD_OTHER);
5848 }
5849 else
5850 {
5851 reloadnum
5852 = push_reload (x, x, loc, (rtx*) 0,
5853 context_reg_class,
5854 GET_MODE (x), GET_MODE (x), 0, 0,
5855 opnum, type);
5856 rld[reloadnum].inc
5857 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5858
5859 value = 1;
5860 }
5861
5862 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5863 reloadnum);
5864 }
5865 return value;
5866 }
5867 return 0;
5868
5869 case TRUNCATE:
5870 case SIGN_EXTEND:
5871 case ZERO_EXTEND:
5872 /* Look for parts to reload in the inner expression and reload them
5873 too, in addition to this operation. Reloading all inner parts in
5874 addition to this one shouldn't be necessary, but at this point,
5875 we don't know if we can possibly omit any part that *can* be
5876 reloaded. Targets that are better off reloading just either part
5877 (or perhaps even a different part of an outer expression), should
5878 define LEGITIMIZE_RELOAD_ADDRESS. */
5879 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), XEXP (x, 0),
5880 context, code, SCRATCH, &XEXP (x, 0), opnum,
5881 type, ind_levels, insn);
5882 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5883 context_reg_class,
5884 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5885 return 1;
5886
5887 case MEM:
5888 /* This is probably the result of a substitution, by eliminate_regs, of
5889 an equivalent address for a pseudo that was not allocated to a hard
5890 register. Verify that the specified address is valid and reload it
5891 into a register.
5892
5893 Since we know we are going to reload this item, don't decrement for
5894 the indirection level.
5895
5896 Note that this is actually conservative: it would be slightly more
5897 efficient to use the value of SPILL_INDIRECT_LEVELS from
5898 reload1.c here. */
5899
5900 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5901 opnum, ADDR_TYPE (type), ind_levels, insn);
5902 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5903 context_reg_class,
5904 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5905 return 1;
5906
5907 case REG:
5908 {
5909 int regno = REGNO (x);
5910
5911 if (reg_equiv_constant (regno) != 0)
5912 {
5913 find_reloads_address_part (reg_equiv_constant (regno), loc,
5914 context_reg_class,
5915 GET_MODE (x), opnum, type, ind_levels);
5916 return 1;
5917 }
5918
5919 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5920 that feeds this insn. */
5921 if (reg_equiv_mem (regno) != 0)
5922 {
5923 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5924 context_reg_class,
5925 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5926 return 1;
5927 }
5928 #endif
5929
5930 if (reg_equiv_memory_loc (regno)
5931 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5932 {
5933 rtx tem = make_memloc (x, regno);
5934 if (reg_equiv_address (regno) != 0
5935 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5936 {
5937 x = tem;
5938 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5939 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5940 ind_levels, insn);
5941 if (!rtx_equal_p (x, tem))
5942 push_reg_equiv_alt_mem (regno, x);
5943 }
5944 }
5945
5946 if (reg_renumber[regno] >= 0)
5947 regno = reg_renumber[regno];
5948
5949 if (regno >= FIRST_PSEUDO_REGISTER
5950 || !REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5951 index_code))
5952 {
5953 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5954 context_reg_class,
5955 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5956 return 1;
5957 }
5958
5959 /* If a register appearing in an address is the subject of a CLOBBER
5960 in this insn, reload it into some other register to be safe.
5961 The CLOBBER is supposed to make the register unavailable
5962 from before this insn to after it. */
5963 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5964 {
5965 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5966 context_reg_class,
5967 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5968 return 1;
5969 }
5970 }
5971 return 0;
5972
5973 case SUBREG:
5974 if (REG_P (SUBREG_REG (x)))
5975 {
5976 /* If this is a SUBREG of a hard register and the resulting register
5977 is of the wrong class, reload the whole SUBREG. This avoids
5978 needless copies if SUBREG_REG is multi-word. */
5979 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5980 {
5981 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5982
5983 if (!REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5984 index_code))
5985 {
5986 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5987 context_reg_class,
5988 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5989 return 1;
5990 }
5991 }
5992 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5993 is larger than the class size, then reload the whole SUBREG. */
5994 else
5995 {
5996 enum reg_class rclass = context_reg_class;
5997 if ((unsigned) CLASS_MAX_NREGS (rclass, GET_MODE (SUBREG_REG (x)))
5998 > reg_class_size[rclass])
5999 {
6000 x = find_reloads_subreg_address (x, 0, opnum,
6001 ADDR_TYPE (type),
6002 ind_levels, insn, NULL);
6003 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6004 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6005 return 1;
6006 }
6007 }
6008 }
6009 break;
6010
6011 default:
6012 break;
6013 }
6014
6015 {
6016 const char *fmt = GET_RTX_FORMAT (code);
6017 int i;
6018
6019 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6020 {
6021 if (fmt[i] == 'e')
6022 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6023 we get here. */
6024 find_reloads_address_1 (mode, XEXP (x, i), context, code, SCRATCH,
6025 &XEXP (x, i), opnum, type, ind_levels, insn);
6026 }
6027 }
6028
6029 #undef REG_OK_FOR_CONTEXT
6030 return 0;
6031 }
6032 \f
6033 /* X, which is found at *LOC, is a part of an address that needs to be
6034 reloaded into a register of class RCLASS. If X is a constant, or if
6035 X is a PLUS that contains a constant, check that the constant is a
6036 legitimate operand and that we are supposed to be able to load
6037 it into the register.
6038
6039 If not, force the constant into memory and reload the MEM instead.
6040
6041 MODE is the mode to use, in case X is an integer constant.
6042
6043 OPNUM and TYPE describe the purpose of any reloads made.
6044
6045 IND_LEVELS says how many levels of indirect addressing this machine
6046 supports. */
6047
6048 static void
6049 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6050 enum machine_mode mode, int opnum,
6051 enum reload_type type, int ind_levels)
6052 {
6053 if (CONSTANT_P (x)
6054 && (!targetm.legitimate_constant_p (mode, x)
6055 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6056 {
6057 x = force_const_mem (mode, x);
6058 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6059 opnum, type, ind_levels, 0);
6060 }
6061
6062 else if (GET_CODE (x) == PLUS
6063 && CONSTANT_P (XEXP (x, 1))
6064 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6065 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6066 == NO_REGS))
6067 {
6068 rtx tem;
6069
6070 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6071 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6072 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6073 opnum, type, ind_levels, 0);
6074 }
6075
6076 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6077 mode, VOIDmode, 0, 0, opnum, type);
6078 }
6079 \f
6080 /* X, a subreg of a pseudo, is a part of an address that needs to be
6081 reloaded.
6082
6083 If the pseudo is equivalent to a memory location that cannot be directly
6084 addressed, make the necessary address reloads.
6085
6086 If address reloads have been necessary, or if the address is changed
6087 by register elimination, return the rtx of the memory location;
6088 otherwise, return X.
6089
6090 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
6091 memory location.
6092
6093 OPNUM and TYPE identify the purpose of the reload.
6094
6095 IND_LEVELS says how many levels of indirect addressing are
6096 supported at this point in the address.
6097
6098 INSN, if nonzero, is the insn in which we do the reload. It is used
6099 to determine where to put USEs for pseudos that we have to replace with
6100 stack slots. */
6101
6102 static rtx
6103 find_reloads_subreg_address (rtx x, int force_replace, int opnum,
6104 enum reload_type type, int ind_levels, rtx insn,
6105 int *address_reloaded)
6106 {
6107 int regno = REGNO (SUBREG_REG (x));
6108 int reloaded = 0;
6109
6110 if (reg_equiv_memory_loc (regno))
6111 {
6112 /* If the address is not directly addressable, or if the address is not
6113 offsettable, then it must be replaced. */
6114 if (! force_replace
6115 && (reg_equiv_address (regno)
6116 || ! offsettable_memref_p (reg_equiv_mem (regno))))
6117 force_replace = 1;
6118
6119 if (force_replace || num_not_at_initial_offset)
6120 {
6121 rtx tem = make_memloc (SUBREG_REG (x), regno);
6122
6123 /* If the address changes because of register elimination, then
6124 it must be replaced. */
6125 if (force_replace
6126 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
6127 {
6128 unsigned outer_size = GET_MODE_SIZE (GET_MODE (x));
6129 unsigned inner_size = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
6130 int offset;
6131 rtx orig = tem;
6132
6133 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
6134 hold the correct (negative) byte offset. */
6135 if (BYTES_BIG_ENDIAN && outer_size > inner_size)
6136 offset = inner_size - outer_size;
6137 else
6138 offset = SUBREG_BYTE (x);
6139
6140 XEXP (tem, 0) = plus_constant (XEXP (tem, 0), offset);
6141 PUT_MODE (tem, GET_MODE (x));
6142 if (MEM_OFFSET (tem))
6143 set_mem_offset (tem, plus_constant (MEM_OFFSET (tem), offset));
6144 if (MEM_SIZE (tem)
6145 && INTVAL (MEM_SIZE (tem)) != (HOST_WIDE_INT) outer_size)
6146 set_mem_size (tem, GEN_INT (outer_size));
6147
6148 /* If this was a paradoxical subreg that we replaced, the
6149 resulting memory must be sufficiently aligned to allow
6150 us to widen the mode of the memory. */
6151 if (outer_size > inner_size)
6152 {
6153 rtx base;
6154
6155 base = XEXP (tem, 0);
6156 if (GET_CODE (base) == PLUS)
6157 {
6158 if (CONST_INT_P (XEXP (base, 1))
6159 && INTVAL (XEXP (base, 1)) % outer_size != 0)
6160 return x;
6161 base = XEXP (base, 0);
6162 }
6163 if (!REG_P (base)
6164 || (REGNO_POINTER_ALIGN (REGNO (base))
6165 < outer_size * BITS_PER_UNIT))
6166 return x;
6167 }
6168
6169 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6170 XEXP (tem, 0), &XEXP (tem, 0),
6171 opnum, type, ind_levels, insn);
6172 /* ??? Do we need to handle nonzero offsets somehow? */
6173 if (!offset && !rtx_equal_p (tem, orig))
6174 push_reg_equiv_alt_mem (regno, tem);
6175
6176 /* For some processors an address may be valid in the
6177 original mode but not in a smaller mode. For
6178 example, ARM accepts a scaled index register in
6179 SImode but not in HImode. Note that this is only
6180 a problem if the address in reg_equiv_mem is already
6181 invalid in the new mode; other cases would be fixed
6182 by find_reloads_address as usual.
6183
6184 ??? We attempt to handle such cases here by doing an
6185 additional reload of the full address after the
6186 usual processing by find_reloads_address. Note that
6187 this may not work in the general case, but it seems
6188 to cover the cases where this situation currently
6189 occurs. A more general fix might be to reload the
6190 *value* instead of the address, but this would not
6191 be expected by the callers of this routine as-is.
6192
6193 If find_reloads_address already completed replaced
6194 the address, there is nothing further to do. */
6195 if (reloaded == 0
6196 && reg_equiv_mem (regno) != 0
6197 && !strict_memory_address_addr_space_p
6198 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6199 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6200 {
6201 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6202 base_reg_class (GET_MODE (tem), MEM, SCRATCH),
6203 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0,
6204 opnum, type);
6205 reloaded = 1;
6206 }
6207 /* If this is not a toplevel operand, find_reloads doesn't see
6208 this substitution. We have to emit a USE of the pseudo so
6209 that delete_output_reload can see it. */
6210 if (replace_reloads && recog_data.operand[opnum] != x)
6211 /* We mark the USE with QImode so that we recognize it
6212 as one that can be safely deleted at the end of
6213 reload. */
6214 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode,
6215 SUBREG_REG (x)),
6216 insn), QImode);
6217 x = tem;
6218 }
6219 }
6220 }
6221 if (reloaded && address_reloaded)
6222 *address_reloaded = 1;
6223
6224 return x;
6225 }
6226 \f
6227 /* Substitute into the current INSN the registers into which we have reloaded
6228 the things that need reloading. The array `replacements'
6229 contains the locations of all pointers that must be changed
6230 and says what to replace them with.
6231
6232 Return the rtx that X translates into; usually X, but modified. */
6233
6234 void
6235 subst_reloads (rtx insn)
6236 {
6237 int i;
6238
6239 for (i = 0; i < n_replacements; i++)
6240 {
6241 struct replacement *r = &replacements[i];
6242 rtx reloadreg = rld[r->what].reg_rtx;
6243 if (reloadreg)
6244 {
6245 #ifdef DEBUG_RELOAD
6246 /* This checking takes a very long time on some platforms
6247 causing the gcc.c-torture/compile/limits-fnargs.c test
6248 to time out during testing. See PR 31850.
6249
6250 Internal consistency test. Check that we don't modify
6251 anything in the equivalence arrays. Whenever something from
6252 those arrays needs to be reloaded, it must be unshared before
6253 being substituted into; the equivalence must not be modified.
6254 Otherwise, if the equivalence is used after that, it will
6255 have been modified, and the thing substituted (probably a
6256 register) is likely overwritten and not a usable equivalence. */
6257 int check_regno;
6258
6259 for (check_regno = 0; check_regno < max_regno; check_regno++)
6260 {
6261 #define CHECK_MODF(ARRAY) \
6262 gcc_assert (!VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY \
6263 || !loc_mentioned_in_p (r->where, \
6264 VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY))
6265
6266 CHECK_MODF (equiv_constant);
6267 CHECK_MODF (equiv_memory_loc);
6268 CHECK_MODF (equiv_address);
6269 CHECK_MODF (equiv_mem);
6270 #undef CHECK_MODF
6271 }
6272 #endif /* DEBUG_RELOAD */
6273
6274 /* If we're replacing a LABEL_REF with a register, there must
6275 already be an indication (to e.g. flow) which label this
6276 register refers to. */
6277 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6278 || !JUMP_P (insn)
6279 || find_reg_note (insn,
6280 REG_LABEL_OPERAND,
6281 XEXP (*r->where, 0))
6282 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6283
6284 /* Encapsulate RELOADREG so its machine mode matches what
6285 used to be there. Note that gen_lowpart_common will
6286 do the wrong thing if RELOADREG is multi-word. RELOADREG
6287 will always be a REG here. */
6288 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6289 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6290
6291 *r->where = reloadreg;
6292 }
6293 /* If reload got no reg and isn't optional, something's wrong. */
6294 else
6295 gcc_assert (rld[r->what].optional);
6296 }
6297 }
6298 \f
6299 /* Make a copy of any replacements being done into X and move those
6300 copies to locations in Y, a copy of X. */
6301
6302 void
6303 copy_replacements (rtx x, rtx y)
6304 {
6305 copy_replacements_1 (&x, &y, n_replacements);
6306 }
6307
6308 static void
6309 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6310 {
6311 int i, j;
6312 rtx x, y;
6313 struct replacement *r;
6314 enum rtx_code code;
6315 const char *fmt;
6316
6317 for (j = 0; j < orig_replacements; j++)
6318 if (replacements[j].where == px)
6319 {
6320 r = &replacements[n_replacements++];
6321 r->where = py;
6322 r->what = replacements[j].what;
6323 r->mode = replacements[j].mode;
6324 }
6325
6326 x = *px;
6327 y = *py;
6328 code = GET_CODE (x);
6329 fmt = GET_RTX_FORMAT (code);
6330
6331 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6332 {
6333 if (fmt[i] == 'e')
6334 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6335 else if (fmt[i] == 'E')
6336 for (j = XVECLEN (x, i); --j >= 0; )
6337 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6338 orig_replacements);
6339 }
6340 }
6341
6342 /* Change any replacements being done to *X to be done to *Y. */
6343
6344 void
6345 move_replacements (rtx *x, rtx *y)
6346 {
6347 int i;
6348
6349 for (i = 0; i < n_replacements; i++)
6350 if (replacements[i].where == x)
6351 replacements[i].where = y;
6352 }
6353 \f
6354 /* If LOC was scheduled to be replaced by something, return the replacement.
6355 Otherwise, return *LOC. */
6356
6357 rtx
6358 find_replacement (rtx *loc)
6359 {
6360 struct replacement *r;
6361
6362 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6363 {
6364 rtx reloadreg = rld[r->what].reg_rtx;
6365
6366 if (reloadreg && r->where == loc)
6367 {
6368 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6369 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6370
6371 return reloadreg;
6372 }
6373 else if (reloadreg && GET_CODE (*loc) == SUBREG
6374 && r->where == &SUBREG_REG (*loc))
6375 {
6376 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6377 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6378
6379 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6380 GET_MODE (SUBREG_REG (*loc)),
6381 SUBREG_BYTE (*loc));
6382 }
6383 }
6384
6385 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6386 what's inside and make a new rtl if so. */
6387 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6388 || GET_CODE (*loc) == MULT)
6389 {
6390 rtx x = find_replacement (&XEXP (*loc, 0));
6391 rtx y = find_replacement (&XEXP (*loc, 1));
6392
6393 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6394 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6395 }
6396
6397 return *loc;
6398 }
6399 \f
6400 /* Return nonzero if register in range [REGNO, ENDREGNO)
6401 appears either explicitly or implicitly in X
6402 other than being stored into (except for earlyclobber operands).
6403
6404 References contained within the substructure at LOC do not count.
6405 LOC may be zero, meaning don't ignore anything.
6406
6407 This is similar to refers_to_regno_p in rtlanal.c except that we
6408 look at equivalences for pseudos that didn't get hard registers. */
6409
6410 static int
6411 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6412 rtx x, rtx *loc)
6413 {
6414 int i;
6415 unsigned int r;
6416 RTX_CODE code;
6417 const char *fmt;
6418
6419 if (x == 0)
6420 return 0;
6421
6422 repeat:
6423 code = GET_CODE (x);
6424
6425 switch (code)
6426 {
6427 case REG:
6428 r = REGNO (x);
6429
6430 /* If this is a pseudo, a hard register must not have been allocated.
6431 X must therefore either be a constant or be in memory. */
6432 if (r >= FIRST_PSEUDO_REGISTER)
6433 {
6434 if (reg_equiv_memory_loc (r))
6435 return refers_to_regno_for_reload_p (regno, endregno,
6436 reg_equiv_memory_loc (r),
6437 (rtx*) 0);
6438
6439 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6440 return 0;
6441 }
6442
6443 return (endregno > r
6444 && regno < r + (r < FIRST_PSEUDO_REGISTER
6445 ? hard_regno_nregs[r][GET_MODE (x)]
6446 : 1));
6447
6448 case SUBREG:
6449 /* If this is a SUBREG of a hard reg, we can see exactly which
6450 registers are being modified. Otherwise, handle normally. */
6451 if (REG_P (SUBREG_REG (x))
6452 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6453 {
6454 unsigned int inner_regno = subreg_regno (x);
6455 unsigned int inner_endregno
6456 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6457 ? subreg_nregs (x) : 1);
6458
6459 return endregno > inner_regno && regno < inner_endregno;
6460 }
6461 break;
6462
6463 case CLOBBER:
6464 case SET:
6465 if (&SET_DEST (x) != loc
6466 /* Note setting a SUBREG counts as referring to the REG it is in for
6467 a pseudo but not for hard registers since we can
6468 treat each word individually. */
6469 && ((GET_CODE (SET_DEST (x)) == SUBREG
6470 && loc != &SUBREG_REG (SET_DEST (x))
6471 && REG_P (SUBREG_REG (SET_DEST (x)))
6472 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6473 && refers_to_regno_for_reload_p (regno, endregno,
6474 SUBREG_REG (SET_DEST (x)),
6475 loc))
6476 /* If the output is an earlyclobber operand, this is
6477 a conflict. */
6478 || ((!REG_P (SET_DEST (x))
6479 || earlyclobber_operand_p (SET_DEST (x)))
6480 && refers_to_regno_for_reload_p (regno, endregno,
6481 SET_DEST (x), loc))))
6482 return 1;
6483
6484 if (code == CLOBBER || loc == &SET_SRC (x))
6485 return 0;
6486 x = SET_SRC (x);
6487 goto repeat;
6488
6489 default:
6490 break;
6491 }
6492
6493 /* X does not match, so try its subexpressions. */
6494
6495 fmt = GET_RTX_FORMAT (code);
6496 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6497 {
6498 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6499 {
6500 if (i == 0)
6501 {
6502 x = XEXP (x, 0);
6503 goto repeat;
6504 }
6505 else
6506 if (refers_to_regno_for_reload_p (regno, endregno,
6507 XEXP (x, i), loc))
6508 return 1;
6509 }
6510 else if (fmt[i] == 'E')
6511 {
6512 int j;
6513 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6514 if (loc != &XVECEXP (x, i, j)
6515 && refers_to_regno_for_reload_p (regno, endregno,
6516 XVECEXP (x, i, j), loc))
6517 return 1;
6518 }
6519 }
6520 return 0;
6521 }
6522
6523 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6524 we check if any register number in X conflicts with the relevant register
6525 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6526 contains a MEM (we don't bother checking for memory addresses that can't
6527 conflict because we expect this to be a rare case.
6528
6529 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6530 that we look at equivalences for pseudos that didn't get hard registers. */
6531
6532 int
6533 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6534 {
6535 int regno, endregno;
6536
6537 /* Overly conservative. */
6538 if (GET_CODE (x) == STRICT_LOW_PART
6539 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6540 x = XEXP (x, 0);
6541
6542 /* If either argument is a constant, then modifying X can not affect IN. */
6543 if (CONSTANT_P (x) || CONSTANT_P (in))
6544 return 0;
6545 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6546 return refers_to_mem_for_reload_p (in);
6547 else if (GET_CODE (x) == SUBREG)
6548 {
6549 regno = REGNO (SUBREG_REG (x));
6550 if (regno < FIRST_PSEUDO_REGISTER)
6551 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6552 GET_MODE (SUBREG_REG (x)),
6553 SUBREG_BYTE (x),
6554 GET_MODE (x));
6555 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6556 ? subreg_nregs (x) : 1);
6557
6558 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6559 }
6560 else if (REG_P (x))
6561 {
6562 regno = REGNO (x);
6563
6564 /* If this is a pseudo, it must not have been assigned a hard register.
6565 Therefore, it must either be in memory or be a constant. */
6566
6567 if (regno >= FIRST_PSEUDO_REGISTER)
6568 {
6569 if (reg_equiv_memory_loc (regno))
6570 return refers_to_mem_for_reload_p (in);
6571 gcc_assert (reg_equiv_constant (regno));
6572 return 0;
6573 }
6574
6575 endregno = END_HARD_REGNO (x);
6576
6577 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6578 }
6579 else if (MEM_P (x))
6580 return refers_to_mem_for_reload_p (in);
6581 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6582 || GET_CODE (x) == CC0)
6583 return reg_mentioned_p (x, in);
6584 else
6585 {
6586 gcc_assert (GET_CODE (x) == PLUS);
6587
6588 /* We actually want to know if X is mentioned somewhere inside IN.
6589 We must not say that (plus (sp) (const_int 124)) is in
6590 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6591 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6592 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6593 while (MEM_P (in))
6594 in = XEXP (in, 0);
6595 if (REG_P (in))
6596 return 0;
6597 else if (GET_CODE (in) == PLUS)
6598 return (rtx_equal_p (x, in)
6599 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6600 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6601 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6602 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6603 }
6604
6605 gcc_unreachable ();
6606 }
6607
6608 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6609 registers. */
6610
6611 static int
6612 refers_to_mem_for_reload_p (rtx x)
6613 {
6614 const char *fmt;
6615 int i;
6616
6617 if (MEM_P (x))
6618 return 1;
6619
6620 if (REG_P (x))
6621 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6622 && reg_equiv_memory_loc (REGNO (x)));
6623
6624 fmt = GET_RTX_FORMAT (GET_CODE (x));
6625 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6626 if (fmt[i] == 'e'
6627 && (MEM_P (XEXP (x, i))
6628 || refers_to_mem_for_reload_p (XEXP (x, i))))
6629 return 1;
6630
6631 return 0;
6632 }
6633 \f
6634 /* Check the insns before INSN to see if there is a suitable register
6635 containing the same value as GOAL.
6636 If OTHER is -1, look for a register in class RCLASS.
6637 Otherwise, just see if register number OTHER shares GOAL's value.
6638
6639 Return an rtx for the register found, or zero if none is found.
6640
6641 If RELOAD_REG_P is (short *)1,
6642 we reject any hard reg that appears in reload_reg_rtx
6643 because such a hard reg is also needed coming into this insn.
6644
6645 If RELOAD_REG_P is any other nonzero value,
6646 it is a vector indexed by hard reg number
6647 and we reject any hard reg whose element in the vector is nonnegative
6648 as well as any that appears in reload_reg_rtx.
6649
6650 If GOAL is zero, then GOALREG is a register number; we look
6651 for an equivalent for that register.
6652
6653 MODE is the machine mode of the value we want an equivalence for.
6654 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6655
6656 This function is used by jump.c as well as in the reload pass.
6657
6658 If GOAL is the sum of the stack pointer and a constant, we treat it
6659 as if it were a constant except that sp is required to be unchanging. */
6660
6661 rtx
6662 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6663 short *reload_reg_p, int goalreg, enum machine_mode mode)
6664 {
6665 rtx p = insn;
6666 rtx goaltry, valtry, value, where;
6667 rtx pat;
6668 int regno = -1;
6669 int valueno;
6670 int goal_mem = 0;
6671 int goal_const = 0;
6672 int goal_mem_addr_varies = 0;
6673 int need_stable_sp = 0;
6674 int nregs;
6675 int valuenregs;
6676 int num = 0;
6677
6678 if (goal == 0)
6679 regno = goalreg;
6680 else if (REG_P (goal))
6681 regno = REGNO (goal);
6682 else if (MEM_P (goal))
6683 {
6684 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6685 if (MEM_VOLATILE_P (goal))
6686 return 0;
6687 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6688 return 0;
6689 /* An address with side effects must be reexecuted. */
6690 switch (code)
6691 {
6692 case POST_INC:
6693 case PRE_INC:
6694 case POST_DEC:
6695 case PRE_DEC:
6696 case POST_MODIFY:
6697 case PRE_MODIFY:
6698 return 0;
6699 default:
6700 break;
6701 }
6702 goal_mem = 1;
6703 }
6704 else if (CONSTANT_P (goal))
6705 goal_const = 1;
6706 else if (GET_CODE (goal) == PLUS
6707 && XEXP (goal, 0) == stack_pointer_rtx
6708 && CONSTANT_P (XEXP (goal, 1)))
6709 goal_const = need_stable_sp = 1;
6710 else if (GET_CODE (goal) == PLUS
6711 && XEXP (goal, 0) == frame_pointer_rtx
6712 && CONSTANT_P (XEXP (goal, 1)))
6713 goal_const = 1;
6714 else
6715 return 0;
6716
6717 num = 0;
6718 /* Scan insns back from INSN, looking for one that copies
6719 a value into or out of GOAL.
6720 Stop and give up if we reach a label. */
6721
6722 while (1)
6723 {
6724 p = PREV_INSN (p);
6725 if (p && DEBUG_INSN_P (p))
6726 continue;
6727 num++;
6728 if (p == 0 || LABEL_P (p)
6729 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6730 return 0;
6731
6732 /* Don't reuse register contents from before a setjmp-type
6733 function call; on the second return (from the longjmp) it
6734 might have been clobbered by a later reuse. It doesn't
6735 seem worthwhile to actually go and see if it is actually
6736 reused even if that information would be readily available;
6737 just don't reuse it across the setjmp call. */
6738 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6739 return 0;
6740
6741 if (NONJUMP_INSN_P (p)
6742 /* If we don't want spill regs ... */
6743 && (! (reload_reg_p != 0
6744 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6745 /* ... then ignore insns introduced by reload; they aren't
6746 useful and can cause results in reload_as_needed to be
6747 different from what they were when calculating the need for
6748 spills. If we notice an input-reload insn here, we will
6749 reject it below, but it might hide a usable equivalent.
6750 That makes bad code. It may even fail: perhaps no reg was
6751 spilled for this insn because it was assumed we would find
6752 that equivalent. */
6753 || INSN_UID (p) < reload_first_uid))
6754 {
6755 rtx tem;
6756 pat = single_set (p);
6757
6758 /* First check for something that sets some reg equal to GOAL. */
6759 if (pat != 0
6760 && ((regno >= 0
6761 && true_regnum (SET_SRC (pat)) == regno
6762 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6763 ||
6764 (regno >= 0
6765 && true_regnum (SET_DEST (pat)) == regno
6766 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6767 ||
6768 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6769 /* When looking for stack pointer + const,
6770 make sure we don't use a stack adjust. */
6771 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6772 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6773 || (goal_mem
6774 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6775 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6776 || (goal_mem
6777 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6778 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6779 /* If we are looking for a constant,
6780 and something equivalent to that constant was copied
6781 into a reg, we can use that reg. */
6782 || (goal_const && REG_NOTES (p) != 0
6783 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6784 && ((rtx_equal_p (XEXP (tem, 0), goal)
6785 && (valueno
6786 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6787 || (REG_P (SET_DEST (pat))
6788 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6789 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6790 && CONST_INT_P (goal)
6791 && 0 != (goaltry
6792 = operand_subword (XEXP (tem, 0), 0, 0,
6793 VOIDmode))
6794 && rtx_equal_p (goal, goaltry)
6795 && (valtry
6796 = operand_subword (SET_DEST (pat), 0, 0,
6797 VOIDmode))
6798 && (valueno = true_regnum (valtry)) >= 0)))
6799 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6800 NULL_RTX))
6801 && REG_P (SET_DEST (pat))
6802 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6803 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6804 && CONST_INT_P (goal)
6805 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6806 VOIDmode))
6807 && rtx_equal_p (goal, goaltry)
6808 && (valtry
6809 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6810 && (valueno = true_regnum (valtry)) >= 0)))
6811 {
6812 if (other >= 0)
6813 {
6814 if (valueno != other)
6815 continue;
6816 }
6817 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6818 continue;
6819 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6820 mode, valueno))
6821 continue;
6822 value = valtry;
6823 where = p;
6824 break;
6825 }
6826 }
6827 }
6828
6829 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6830 (or copying VALUE into GOAL, if GOAL is also a register).
6831 Now verify that VALUE is really valid. */
6832
6833 /* VALUENO is the register number of VALUE; a hard register. */
6834
6835 /* Don't try to re-use something that is killed in this insn. We want
6836 to be able to trust REG_UNUSED notes. */
6837 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6838 return 0;
6839
6840 /* If we propose to get the value from the stack pointer or if GOAL is
6841 a MEM based on the stack pointer, we need a stable SP. */
6842 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6843 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6844 goal)))
6845 need_stable_sp = 1;
6846
6847 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6848 if (GET_MODE (value) != mode)
6849 return 0;
6850
6851 /* Reject VALUE if it was loaded from GOAL
6852 and is also a register that appears in the address of GOAL. */
6853
6854 if (goal_mem && value == SET_DEST (single_set (where))
6855 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6856 goal, (rtx*) 0))
6857 return 0;
6858
6859 /* Reject registers that overlap GOAL. */
6860
6861 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6862 nregs = hard_regno_nregs[regno][mode];
6863 else
6864 nregs = 1;
6865 valuenregs = hard_regno_nregs[valueno][mode];
6866
6867 if (!goal_mem && !goal_const
6868 && regno + nregs > valueno && regno < valueno + valuenregs)
6869 return 0;
6870
6871 /* Reject VALUE if it is one of the regs reserved for reloads.
6872 Reload1 knows how to reuse them anyway, and it would get
6873 confused if we allocated one without its knowledge.
6874 (Now that insns introduced by reload are ignored above,
6875 this case shouldn't happen, but I'm not positive.) */
6876
6877 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6878 {
6879 int i;
6880 for (i = 0; i < valuenregs; ++i)
6881 if (reload_reg_p[valueno + i] >= 0)
6882 return 0;
6883 }
6884
6885 /* Reject VALUE if it is a register being used for an input reload
6886 even if it is not one of those reserved. */
6887
6888 if (reload_reg_p != 0)
6889 {
6890 int i;
6891 for (i = 0; i < n_reloads; i++)
6892 if (rld[i].reg_rtx != 0 && rld[i].in)
6893 {
6894 int regno1 = REGNO (rld[i].reg_rtx);
6895 int nregs1 = hard_regno_nregs[regno1]
6896 [GET_MODE (rld[i].reg_rtx)];
6897 if (regno1 < valueno + valuenregs
6898 && regno1 + nregs1 > valueno)
6899 return 0;
6900 }
6901 }
6902
6903 if (goal_mem)
6904 /* We must treat frame pointer as varying here,
6905 since it can vary--in a nonlocal goto as generated by expand_goto. */
6906 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6907
6908 /* Now verify that the values of GOAL and VALUE remain unaltered
6909 until INSN is reached. */
6910
6911 p = insn;
6912 while (1)
6913 {
6914 p = PREV_INSN (p);
6915 if (p == where)
6916 return value;
6917
6918 /* Don't trust the conversion past a function call
6919 if either of the two is in a call-clobbered register, or memory. */
6920 if (CALL_P (p))
6921 {
6922 int i;
6923
6924 if (goal_mem || need_stable_sp)
6925 return 0;
6926
6927 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6928 for (i = 0; i < nregs; ++i)
6929 if (call_used_regs[regno + i]
6930 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6931 return 0;
6932
6933 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6934 for (i = 0; i < valuenregs; ++i)
6935 if (call_used_regs[valueno + i]
6936 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6937 return 0;
6938 }
6939
6940 if (INSN_P (p))
6941 {
6942 pat = PATTERN (p);
6943
6944 /* Watch out for unspec_volatile, and volatile asms. */
6945 if (volatile_insn_p (pat))
6946 return 0;
6947
6948 /* If this insn P stores in either GOAL or VALUE, return 0.
6949 If GOAL is a memory ref and this insn writes memory, return 0.
6950 If GOAL is a memory ref and its address is not constant,
6951 and this insn P changes a register used in GOAL, return 0. */
6952
6953 if (GET_CODE (pat) == COND_EXEC)
6954 pat = COND_EXEC_CODE (pat);
6955 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6956 {
6957 rtx dest = SET_DEST (pat);
6958 while (GET_CODE (dest) == SUBREG
6959 || GET_CODE (dest) == ZERO_EXTRACT
6960 || GET_CODE (dest) == STRICT_LOW_PART)
6961 dest = XEXP (dest, 0);
6962 if (REG_P (dest))
6963 {
6964 int xregno = REGNO (dest);
6965 int xnregs;
6966 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6967 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6968 else
6969 xnregs = 1;
6970 if (xregno < regno + nregs && xregno + xnregs > regno)
6971 return 0;
6972 if (xregno < valueno + valuenregs
6973 && xregno + xnregs > valueno)
6974 return 0;
6975 if (goal_mem_addr_varies
6976 && reg_overlap_mentioned_for_reload_p (dest, goal))
6977 return 0;
6978 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6979 return 0;
6980 }
6981 else if (goal_mem && MEM_P (dest)
6982 && ! push_operand (dest, GET_MODE (dest)))
6983 return 0;
6984 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6985 && reg_equiv_memory_loc (regno) != 0)
6986 return 0;
6987 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6988 return 0;
6989 }
6990 else if (GET_CODE (pat) == PARALLEL)
6991 {
6992 int i;
6993 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6994 {
6995 rtx v1 = XVECEXP (pat, 0, i);
6996 if (GET_CODE (v1) == COND_EXEC)
6997 v1 = COND_EXEC_CODE (v1);
6998 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
6999 {
7000 rtx dest = SET_DEST (v1);
7001 while (GET_CODE (dest) == SUBREG
7002 || GET_CODE (dest) == ZERO_EXTRACT
7003 || GET_CODE (dest) == STRICT_LOW_PART)
7004 dest = XEXP (dest, 0);
7005 if (REG_P (dest))
7006 {
7007 int xregno = REGNO (dest);
7008 int xnregs;
7009 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7010 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7011 else
7012 xnregs = 1;
7013 if (xregno < regno + nregs
7014 && xregno + xnregs > regno)
7015 return 0;
7016 if (xregno < valueno + valuenregs
7017 && xregno + xnregs > valueno)
7018 return 0;
7019 if (goal_mem_addr_varies
7020 && reg_overlap_mentioned_for_reload_p (dest,
7021 goal))
7022 return 0;
7023 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7024 return 0;
7025 }
7026 else if (goal_mem && MEM_P (dest)
7027 && ! push_operand (dest, GET_MODE (dest)))
7028 return 0;
7029 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7030 && reg_equiv_memory_loc (regno) != 0)
7031 return 0;
7032 else if (need_stable_sp
7033 && push_operand (dest, GET_MODE (dest)))
7034 return 0;
7035 }
7036 }
7037 }
7038
7039 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7040 {
7041 rtx link;
7042
7043 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7044 link = XEXP (link, 1))
7045 {
7046 pat = XEXP (link, 0);
7047 if (GET_CODE (pat) == CLOBBER)
7048 {
7049 rtx dest = SET_DEST (pat);
7050
7051 if (REG_P (dest))
7052 {
7053 int xregno = REGNO (dest);
7054 int xnregs
7055 = hard_regno_nregs[xregno][GET_MODE (dest)];
7056
7057 if (xregno < regno + nregs
7058 && xregno + xnregs > regno)
7059 return 0;
7060 else if (xregno < valueno + valuenregs
7061 && xregno + xnregs > valueno)
7062 return 0;
7063 else if (goal_mem_addr_varies
7064 && reg_overlap_mentioned_for_reload_p (dest,
7065 goal))
7066 return 0;
7067 }
7068
7069 else if (goal_mem && MEM_P (dest)
7070 && ! push_operand (dest, GET_MODE (dest)))
7071 return 0;
7072 else if (need_stable_sp
7073 && push_operand (dest, GET_MODE (dest)))
7074 return 0;
7075 }
7076 }
7077 }
7078
7079 #ifdef AUTO_INC_DEC
7080 /* If this insn auto-increments or auto-decrements
7081 either regno or valueno, return 0 now.
7082 If GOAL is a memory ref and its address is not constant,
7083 and this insn P increments a register used in GOAL, return 0. */
7084 {
7085 rtx link;
7086
7087 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7088 if (REG_NOTE_KIND (link) == REG_INC
7089 && REG_P (XEXP (link, 0)))
7090 {
7091 int incno = REGNO (XEXP (link, 0));
7092 if (incno < regno + nregs && incno >= regno)
7093 return 0;
7094 if (incno < valueno + valuenregs && incno >= valueno)
7095 return 0;
7096 if (goal_mem_addr_varies
7097 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7098 goal))
7099 return 0;
7100 }
7101 }
7102 #endif
7103 }
7104 }
7105 }
7106 \f
7107 /* Find a place where INCED appears in an increment or decrement operator
7108 within X, and return the amount INCED is incremented or decremented by.
7109 The value is always positive. */
7110
7111 static int
7112 find_inc_amount (rtx x, rtx inced)
7113 {
7114 enum rtx_code code = GET_CODE (x);
7115 const char *fmt;
7116 int i;
7117
7118 if (code == MEM)
7119 {
7120 rtx addr = XEXP (x, 0);
7121 if ((GET_CODE (addr) == PRE_DEC
7122 || GET_CODE (addr) == POST_DEC
7123 || GET_CODE (addr) == PRE_INC
7124 || GET_CODE (addr) == POST_INC)
7125 && XEXP (addr, 0) == inced)
7126 return GET_MODE_SIZE (GET_MODE (x));
7127 else if ((GET_CODE (addr) == PRE_MODIFY
7128 || GET_CODE (addr) == POST_MODIFY)
7129 && GET_CODE (XEXP (addr, 1)) == PLUS
7130 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7131 && XEXP (addr, 0) == inced
7132 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7133 {
7134 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7135 return i < 0 ? -i : i;
7136 }
7137 }
7138
7139 fmt = GET_RTX_FORMAT (code);
7140 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7141 {
7142 if (fmt[i] == 'e')
7143 {
7144 int tem = find_inc_amount (XEXP (x, i), inced);
7145 if (tem != 0)
7146 return tem;
7147 }
7148 if (fmt[i] == 'E')
7149 {
7150 int j;
7151 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7152 {
7153 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7154 if (tem != 0)
7155 return tem;
7156 }
7157 }
7158 }
7159
7160 return 0;
7161 }
7162 \f
7163 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7164 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7165
7166 #ifdef AUTO_INC_DEC
7167 static int
7168 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7169 rtx insn)
7170 {
7171 rtx link;
7172
7173 gcc_assert (insn);
7174
7175 if (! INSN_P (insn))
7176 return 0;
7177
7178 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7179 if (REG_NOTE_KIND (link) == REG_INC)
7180 {
7181 unsigned int test = (int) REGNO (XEXP (link, 0));
7182 if (test >= regno && test < endregno)
7183 return 1;
7184 }
7185 return 0;
7186 }
7187 #else
7188
7189 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7190
7191 #endif
7192
7193 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7194 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7195 REG_INC. REGNO must refer to a hard register. */
7196
7197 int
7198 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7199 int sets)
7200 {
7201 unsigned int nregs, endregno;
7202
7203 /* regno must be a hard register. */
7204 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7205
7206 nregs = hard_regno_nregs[regno][mode];
7207 endregno = regno + nregs;
7208
7209 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7210 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7211 && REG_P (XEXP (PATTERN (insn), 0)))
7212 {
7213 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7214
7215 return test >= regno && test < endregno;
7216 }
7217
7218 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7219 return 1;
7220
7221 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7222 {
7223 int i = XVECLEN (PATTERN (insn), 0) - 1;
7224
7225 for (; i >= 0; i--)
7226 {
7227 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7228 if ((GET_CODE (elt) == CLOBBER
7229 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7230 && REG_P (XEXP (elt, 0)))
7231 {
7232 unsigned int test = REGNO (XEXP (elt, 0));
7233
7234 if (test >= regno && test < endregno)
7235 return 1;
7236 }
7237 if (sets == 2
7238 && reg_inc_found_and_valid_p (regno, endregno, elt))
7239 return 1;
7240 }
7241 }
7242
7243 return 0;
7244 }
7245
7246 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7247 rtx
7248 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7249 {
7250 int regno;
7251
7252 if (GET_MODE (reloadreg) == mode)
7253 return reloadreg;
7254
7255 regno = REGNO (reloadreg);
7256
7257 if (WORDS_BIG_ENDIAN)
7258 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7259 - (int) hard_regno_nregs[regno][mode];
7260
7261 return gen_rtx_REG (mode, regno);
7262 }
7263
7264 static const char *const reload_when_needed_name[] =
7265 {
7266 "RELOAD_FOR_INPUT",
7267 "RELOAD_FOR_OUTPUT",
7268 "RELOAD_FOR_INSN",
7269 "RELOAD_FOR_INPUT_ADDRESS",
7270 "RELOAD_FOR_INPADDR_ADDRESS",
7271 "RELOAD_FOR_OUTPUT_ADDRESS",
7272 "RELOAD_FOR_OUTADDR_ADDRESS",
7273 "RELOAD_FOR_OPERAND_ADDRESS",
7274 "RELOAD_FOR_OPADDR_ADDR",
7275 "RELOAD_OTHER",
7276 "RELOAD_FOR_OTHER_ADDRESS"
7277 };
7278
7279 /* These functions are used to print the variables set by 'find_reloads' */
7280
7281 DEBUG_FUNCTION void
7282 debug_reload_to_stream (FILE *f)
7283 {
7284 int r;
7285 const char *prefix;
7286
7287 if (! f)
7288 f = stderr;
7289 for (r = 0; r < n_reloads; r++)
7290 {
7291 fprintf (f, "Reload %d: ", r);
7292
7293 if (rld[r].in != 0)
7294 {
7295 fprintf (f, "reload_in (%s) = ",
7296 GET_MODE_NAME (rld[r].inmode));
7297 print_inline_rtx (f, rld[r].in, 24);
7298 fprintf (f, "\n\t");
7299 }
7300
7301 if (rld[r].out != 0)
7302 {
7303 fprintf (f, "reload_out (%s) = ",
7304 GET_MODE_NAME (rld[r].outmode));
7305 print_inline_rtx (f, rld[r].out, 24);
7306 fprintf (f, "\n\t");
7307 }
7308
7309 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7310
7311 fprintf (f, "%s (opnum = %d)",
7312 reload_when_needed_name[(int) rld[r].when_needed],
7313 rld[r].opnum);
7314
7315 if (rld[r].optional)
7316 fprintf (f, ", optional");
7317
7318 if (rld[r].nongroup)
7319 fprintf (f, ", nongroup");
7320
7321 if (rld[r].inc != 0)
7322 fprintf (f, ", inc by %d", rld[r].inc);
7323
7324 if (rld[r].nocombine)
7325 fprintf (f, ", can't combine");
7326
7327 if (rld[r].secondary_p)
7328 fprintf (f, ", secondary_reload_p");
7329
7330 if (rld[r].in_reg != 0)
7331 {
7332 fprintf (f, "\n\treload_in_reg: ");
7333 print_inline_rtx (f, rld[r].in_reg, 24);
7334 }
7335
7336 if (rld[r].out_reg != 0)
7337 {
7338 fprintf (f, "\n\treload_out_reg: ");
7339 print_inline_rtx (f, rld[r].out_reg, 24);
7340 }
7341
7342 if (rld[r].reg_rtx != 0)
7343 {
7344 fprintf (f, "\n\treload_reg_rtx: ");
7345 print_inline_rtx (f, rld[r].reg_rtx, 24);
7346 }
7347
7348 prefix = "\n\t";
7349 if (rld[r].secondary_in_reload != -1)
7350 {
7351 fprintf (f, "%ssecondary_in_reload = %d",
7352 prefix, rld[r].secondary_in_reload);
7353 prefix = ", ";
7354 }
7355
7356 if (rld[r].secondary_out_reload != -1)
7357 fprintf (f, "%ssecondary_out_reload = %d\n",
7358 prefix, rld[r].secondary_out_reload);
7359
7360 prefix = "\n\t";
7361 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7362 {
7363 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7364 insn_data[rld[r].secondary_in_icode].name);
7365 prefix = ", ";
7366 }
7367
7368 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7369 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7370 insn_data[rld[r].secondary_out_icode].name);
7371
7372 fprintf (f, "\n");
7373 }
7374 }
7375
7376 DEBUG_FUNCTION void
7377 debug_reload (void)
7378 {
7379 debug_reload_to_stream (stderr);
7380 }