defaults.h (REG_WORDS_BIG_ENDIAN): Provide a default.
[gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
28
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
31
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
38
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
45
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
54
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
57
58 NOTE SIDE EFFECTS:
59
60 find_reloads can alter the operands of the instruction it is called on.
61
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
65 better that way.
66
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
69
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
73
74 Using a reload register for several reloads in one insn:
75
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
79
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
82 register.
83
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
87
88 #define REG_OK_STRICT
89
90 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
91 #undef DEBUG_RELOAD
92
93 #include "config.h"
94 #include "system.h"
95 #include "coretypes.h"
96 #include "tm.h"
97 #include "rtl-error.h"
98 #include "tm_p.h"
99 #include "insn-config.h"
100 #include "expr.h"
101 #include "optabs.h"
102 #include "recog.h"
103 #include "df.h"
104 #include "reload.h"
105 #include "regs.h"
106 #include "addresses.h"
107 #include "hard-reg-set.h"
108 #include "flags.h"
109 #include "output.h"
110 #include "function.h"
111 #include "params.h"
112 #include "target.h"
113 #include "ira.h"
114
115 /* True if X is a constant that can be forced into the constant pool.
116 MODE is the mode of the operand, or VOIDmode if not known. */
117 #define CONST_POOL_OK_P(MODE, X) \
118 ((MODE) != VOIDmode \
119 && CONSTANT_P (X) \
120 && GET_CODE (X) != HIGH \
121 && !targetm.cannot_force_const_mem (MODE, X))
122
123 /* True if C is a non-empty register class that has too few registers
124 to be safely used as a reload target class. */
125
126 static inline bool
127 small_register_class_p (reg_class_t rclass)
128 {
129 return (reg_class_size [(int) rclass] == 1
130 || (reg_class_size [(int) rclass] >= 1
131 && targetm.class_likely_spilled_p (rclass)));
132 }
133
134 \f
135 /* All reloads of the current insn are recorded here. See reload.h for
136 comments. */
137 int n_reloads;
138 struct reload rld[MAX_RELOADS];
139
140 /* All the "earlyclobber" operands of the current insn
141 are recorded here. */
142 int n_earlyclobbers;
143 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
144
145 int reload_n_operands;
146
147 /* Replacing reloads.
148
149 If `replace_reloads' is nonzero, then as each reload is recorded
150 an entry is made for it in the table `replacements'.
151 Then later `subst_reloads' can look through that table and
152 perform all the replacements needed. */
153
154 /* Nonzero means record the places to replace. */
155 static int replace_reloads;
156
157 /* Each replacement is recorded with a structure like this. */
158 struct replacement
159 {
160 rtx *where; /* Location to store in */
161 int what; /* which reload this is for */
162 enum machine_mode mode; /* mode it must have */
163 };
164
165 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
166
167 /* Number of replacements currently recorded. */
168 static int n_replacements;
169
170 /* Used to track what is modified by an operand. */
171 struct decomposition
172 {
173 int reg_flag; /* Nonzero if referencing a register. */
174 int safe; /* Nonzero if this can't conflict with anything. */
175 rtx base; /* Base address for MEM. */
176 HOST_WIDE_INT start; /* Starting offset or register number. */
177 HOST_WIDE_INT end; /* Ending offset or register number. */
178 };
179
180 #ifdef SECONDARY_MEMORY_NEEDED
181
182 /* Save MEMs needed to copy from one class of registers to another. One MEM
183 is used per mode, but normally only one or two modes are ever used.
184
185 We keep two versions, before and after register elimination. The one
186 after register elimination is record separately for each operand. This
187 is done in case the address is not valid to be sure that we separately
188 reload each. */
189
190 static rtx secondary_memlocs[NUM_MACHINE_MODES];
191 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
192 static int secondary_memlocs_elim_used = 0;
193 #endif
194
195 /* The instruction we are doing reloads for;
196 so we can test whether a register dies in it. */
197 static rtx this_insn;
198
199 /* Nonzero if this instruction is a user-specified asm with operands. */
200 static int this_insn_is_asm;
201
202 /* If hard_regs_live_known is nonzero,
203 we can tell which hard regs are currently live,
204 at least enough to succeed in choosing dummy reloads. */
205 static int hard_regs_live_known;
206
207 /* Indexed by hard reg number,
208 element is nonnegative if hard reg has been spilled.
209 This vector is passed to `find_reloads' as an argument
210 and is not changed here. */
211 static short *static_reload_reg_p;
212
213 /* Set to 1 in subst_reg_equivs if it changes anything. */
214 static int subst_reg_equivs_changed;
215
216 /* On return from push_reload, holds the reload-number for the OUT
217 operand, which can be different for that from the input operand. */
218 static int output_reloadnum;
219
220 /* Compare two RTX's. */
221 #define MATCHES(x, y) \
222 (x == y || (x != 0 && (REG_P (x) \
223 ? REG_P (y) && REGNO (x) == REGNO (y) \
224 : rtx_equal_p (x, y) && ! side_effects_p (x))))
225
226 /* Indicates if two reloads purposes are for similar enough things that we
227 can merge their reloads. */
228 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
229 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
230 || ((when1) == (when2) && (op1) == (op2)) \
231 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
232 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
233 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
234 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
235 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
236
237 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
238 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
239 ((when1) != (when2) \
240 || ! ((op1) == (op2) \
241 || (when1) == RELOAD_FOR_INPUT \
242 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
243 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
244
245 /* If we are going to reload an address, compute the reload type to
246 use. */
247 #define ADDR_TYPE(type) \
248 ((type) == RELOAD_FOR_INPUT_ADDRESS \
249 ? RELOAD_FOR_INPADDR_ADDRESS \
250 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
251 ? RELOAD_FOR_OUTADDR_ADDRESS \
252 : (type)))
253
254 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
255 enum machine_mode, enum reload_type,
256 enum insn_code *, secondary_reload_info *);
257 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
258 int, unsigned int);
259 static int reload_inner_reg_of_subreg (rtx, enum machine_mode, int);
260 static void push_replacement (rtx *, int, enum machine_mode);
261 static void dup_replacements (rtx *, rtx *);
262 static void combine_reloads (void);
263 static int find_reusable_reload (rtx *, rtx, enum reg_class,
264 enum reload_type, int, int);
265 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
266 enum machine_mode, reg_class_t, int, int);
267 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
268 static struct decomposition decompose (rtx);
269 static int immune_p (rtx, rtx, struct decomposition);
270 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
271 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
272 int *);
273 static rtx make_memloc (rtx, int);
274 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
275 addr_space_t, rtx *);
276 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
277 int, enum reload_type, int, rtx);
278 static rtx subst_reg_equivs (rtx, rtx);
279 static rtx subst_indexed_address (rtx);
280 static void update_auto_inc_notes (rtx, int, int);
281 static int find_reloads_address_1 (enum machine_mode, rtx, int,
282 enum rtx_code, enum rtx_code, rtx *,
283 int, enum reload_type,int, rtx);
284 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
285 enum machine_mode, int,
286 enum reload_type, int);
287 static rtx find_reloads_subreg_address (rtx, int, int, enum reload_type,
288 int, rtx, int *);
289 static void copy_replacements_1 (rtx *, rtx *, int);
290 static int find_inc_amount (rtx, rtx);
291 static int refers_to_mem_for_reload_p (rtx);
292 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
293 rtx, rtx *);
294
295 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
296 list yet. */
297
298 static void
299 push_reg_equiv_alt_mem (int regno, rtx mem)
300 {
301 rtx it;
302
303 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
304 if (rtx_equal_p (XEXP (it, 0), mem))
305 return;
306
307 reg_equiv_alt_mem_list (regno)
308 = alloc_EXPR_LIST (REG_EQUIV, mem,
309 reg_equiv_alt_mem_list (regno));
310 }
311 \f
312 /* Determine if any secondary reloads are needed for loading (if IN_P is
313 nonzero) or storing (if IN_P is zero) X to or from a reload register of
314 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
315 are needed, push them.
316
317 Return the reload number of the secondary reload we made, or -1 if
318 we didn't need one. *PICODE is set to the insn_code to use if we do
319 need a secondary reload. */
320
321 static int
322 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
323 enum reg_class reload_class,
324 enum machine_mode reload_mode, enum reload_type type,
325 enum insn_code *picode, secondary_reload_info *prev_sri)
326 {
327 enum reg_class rclass = NO_REGS;
328 enum reg_class scratch_class;
329 enum machine_mode mode = reload_mode;
330 enum insn_code icode = CODE_FOR_nothing;
331 enum insn_code t_icode = CODE_FOR_nothing;
332 enum reload_type secondary_type;
333 int s_reload, t_reload = -1;
334 const char *scratch_constraint;
335 char letter;
336 secondary_reload_info sri;
337
338 if (type == RELOAD_FOR_INPUT_ADDRESS
339 || type == RELOAD_FOR_OUTPUT_ADDRESS
340 || type == RELOAD_FOR_INPADDR_ADDRESS
341 || type == RELOAD_FOR_OUTADDR_ADDRESS)
342 secondary_type = type;
343 else
344 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
345
346 *picode = CODE_FOR_nothing;
347
348 /* If X is a paradoxical SUBREG, use the inner value to determine both the
349 mode and object being reloaded. */
350 if (paradoxical_subreg_p (x))
351 {
352 x = SUBREG_REG (x);
353 reload_mode = GET_MODE (x);
354 }
355
356 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
357 is still a pseudo-register by now, it *must* have an equivalent MEM
358 but we don't want to assume that), use that equivalent when seeing if
359 a secondary reload is needed since whether or not a reload is needed
360 might be sensitive to the form of the MEM. */
361
362 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
363 && reg_equiv_mem (REGNO (x)))
364 x = reg_equiv_mem (REGNO (x));
365
366 sri.icode = CODE_FOR_nothing;
367 sri.prev_sri = prev_sri;
368 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
369 reload_mode, &sri);
370 icode = (enum insn_code) sri.icode;
371
372 /* If we don't need any secondary registers, done. */
373 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
374 return -1;
375
376 if (rclass != NO_REGS)
377 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
378 reload_mode, type, &t_icode, &sri);
379
380 /* If we will be using an insn, the secondary reload is for a
381 scratch register. */
382
383 if (icode != CODE_FOR_nothing)
384 {
385 /* If IN_P is nonzero, the reload register will be the output in
386 operand 0. If IN_P is zero, the reload register will be the input
387 in operand 1. Outputs should have an initial "=", which we must
388 skip. */
389
390 /* ??? It would be useful to be able to handle only two, or more than
391 three, operands, but for now we can only handle the case of having
392 exactly three: output, input and one temp/scratch. */
393 gcc_assert (insn_data[(int) icode].n_operands == 3);
394
395 /* ??? We currently have no way to represent a reload that needs
396 an icode to reload from an intermediate tertiary reload register.
397 We should probably have a new field in struct reload to tag a
398 chain of scratch operand reloads onto. */
399 gcc_assert (rclass == NO_REGS);
400
401 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
402 gcc_assert (*scratch_constraint == '=');
403 scratch_constraint++;
404 if (*scratch_constraint == '&')
405 scratch_constraint++;
406 letter = *scratch_constraint;
407 scratch_class = (letter == 'r' ? GENERAL_REGS
408 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
409 scratch_constraint));
410
411 rclass = scratch_class;
412 mode = insn_data[(int) icode].operand[2].mode;
413 }
414
415 /* This case isn't valid, so fail. Reload is allowed to use the same
416 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
417 in the case of a secondary register, we actually need two different
418 registers for correct code. We fail here to prevent the possibility of
419 silently generating incorrect code later.
420
421 The convention is that secondary input reloads are valid only if the
422 secondary_class is different from class. If you have such a case, you
423 can not use secondary reloads, you must work around the problem some
424 other way.
425
426 Allow this when a reload_in/out pattern is being used. I.e. assume
427 that the generated code handles this case. */
428
429 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
430 || t_icode != CODE_FOR_nothing);
431
432 /* See if we can reuse an existing secondary reload. */
433 for (s_reload = 0; s_reload < n_reloads; s_reload++)
434 if (rld[s_reload].secondary_p
435 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
436 || reg_class_subset_p (rld[s_reload].rclass, rclass))
437 && ((in_p && rld[s_reload].inmode == mode)
438 || (! in_p && rld[s_reload].outmode == mode))
439 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
440 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
441 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
442 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
443 && (small_register_class_p (rclass)
444 || targetm.small_register_classes_for_mode_p (VOIDmode))
445 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
446 opnum, rld[s_reload].opnum))
447 {
448 if (in_p)
449 rld[s_reload].inmode = mode;
450 if (! in_p)
451 rld[s_reload].outmode = mode;
452
453 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
454 rld[s_reload].rclass = rclass;
455
456 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
457 rld[s_reload].optional &= optional;
458 rld[s_reload].secondary_p = 1;
459 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
460 opnum, rld[s_reload].opnum))
461 rld[s_reload].when_needed = RELOAD_OTHER;
462
463 break;
464 }
465
466 if (s_reload == n_reloads)
467 {
468 #ifdef SECONDARY_MEMORY_NEEDED
469 /* If we need a memory location to copy between the two reload regs,
470 set it up now. Note that we do the input case before making
471 the reload and the output case after. This is due to the
472 way reloads are output. */
473
474 if (in_p && icode == CODE_FOR_nothing
475 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
476 {
477 get_secondary_mem (x, reload_mode, opnum, type);
478
479 /* We may have just added new reloads. Make sure we add
480 the new reload at the end. */
481 s_reload = n_reloads;
482 }
483 #endif
484
485 /* We need to make a new secondary reload for this register class. */
486 rld[s_reload].in = rld[s_reload].out = 0;
487 rld[s_reload].rclass = rclass;
488
489 rld[s_reload].inmode = in_p ? mode : VOIDmode;
490 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
491 rld[s_reload].reg_rtx = 0;
492 rld[s_reload].optional = optional;
493 rld[s_reload].inc = 0;
494 /* Maybe we could combine these, but it seems too tricky. */
495 rld[s_reload].nocombine = 1;
496 rld[s_reload].in_reg = 0;
497 rld[s_reload].out_reg = 0;
498 rld[s_reload].opnum = opnum;
499 rld[s_reload].when_needed = secondary_type;
500 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
501 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
502 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
503 rld[s_reload].secondary_out_icode
504 = ! in_p ? t_icode : CODE_FOR_nothing;
505 rld[s_reload].secondary_p = 1;
506
507 n_reloads++;
508
509 #ifdef SECONDARY_MEMORY_NEEDED
510 if (! in_p && icode == CODE_FOR_nothing
511 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
512 get_secondary_mem (x, mode, opnum, type);
513 #endif
514 }
515
516 *picode = icode;
517 return s_reload;
518 }
519
520 /* If a secondary reload is needed, return its class. If both an intermediate
521 register and a scratch register is needed, we return the class of the
522 intermediate register. */
523 reg_class_t
524 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
525 rtx x)
526 {
527 enum insn_code icode;
528 secondary_reload_info sri;
529
530 sri.icode = CODE_FOR_nothing;
531 sri.prev_sri = NULL;
532 rclass
533 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
534 icode = (enum insn_code) sri.icode;
535
536 /* If there are no secondary reloads at all, we return NO_REGS.
537 If an intermediate register is needed, we return its class. */
538 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
539 return rclass;
540
541 /* No intermediate register is needed, but we have a special reload
542 pattern, which we assume for now needs a scratch register. */
543 return scratch_reload_class (icode);
544 }
545
546 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
547 three operands, verify that operand 2 is an output operand, and return
548 its register class.
549 ??? We'd like to be able to handle any pattern with at least 2 operands,
550 for zero or more scratch registers, but that needs more infrastructure. */
551 enum reg_class
552 scratch_reload_class (enum insn_code icode)
553 {
554 const char *scratch_constraint;
555 char scratch_letter;
556 enum reg_class rclass;
557
558 gcc_assert (insn_data[(int) icode].n_operands == 3);
559 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
560 gcc_assert (*scratch_constraint == '=');
561 scratch_constraint++;
562 if (*scratch_constraint == '&')
563 scratch_constraint++;
564 scratch_letter = *scratch_constraint;
565 if (scratch_letter == 'r')
566 return GENERAL_REGS;
567 rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
568 scratch_constraint);
569 gcc_assert (rclass != NO_REGS);
570 return rclass;
571 }
572 \f
573 #ifdef SECONDARY_MEMORY_NEEDED
574
575 /* Return a memory location that will be used to copy X in mode MODE.
576 If we haven't already made a location for this mode in this insn,
577 call find_reloads_address on the location being returned. */
578
579 rtx
580 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
581 int opnum, enum reload_type type)
582 {
583 rtx loc;
584 int mem_valid;
585
586 /* By default, if MODE is narrower than a word, widen it to a word.
587 This is required because most machines that require these memory
588 locations do not support short load and stores from all registers
589 (e.g., FP registers). */
590
591 #ifdef SECONDARY_MEMORY_NEEDED_MODE
592 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
593 #else
594 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
595 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
596 #endif
597
598 /* If we already have made a MEM for this operand in MODE, return it. */
599 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
600 return secondary_memlocs_elim[(int) mode][opnum];
601
602 /* If this is the first time we've tried to get a MEM for this mode,
603 allocate a new one. `something_changed' in reload will get set
604 by noticing that the frame size has changed. */
605
606 if (secondary_memlocs[(int) mode] == 0)
607 {
608 #ifdef SECONDARY_MEMORY_NEEDED_RTX
609 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
610 #else
611 secondary_memlocs[(int) mode]
612 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
613 #endif
614 }
615
616 /* Get a version of the address doing any eliminations needed. If that
617 didn't give us a new MEM, make a new one if it isn't valid. */
618
619 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
620 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
621 MEM_ADDR_SPACE (loc));
622
623 if (! mem_valid && loc == secondary_memlocs[(int) mode])
624 loc = copy_rtx (loc);
625
626 /* The only time the call below will do anything is if the stack
627 offset is too large. In that case IND_LEVELS doesn't matter, so we
628 can just pass a zero. Adjust the type to be the address of the
629 corresponding object. If the address was valid, save the eliminated
630 address. If it wasn't valid, we need to make a reload each time, so
631 don't save it. */
632
633 if (! mem_valid)
634 {
635 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
636 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
637 : RELOAD_OTHER);
638
639 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
640 opnum, type, 0, 0);
641 }
642
643 secondary_memlocs_elim[(int) mode][opnum] = loc;
644 if (secondary_memlocs_elim_used <= (int)mode)
645 secondary_memlocs_elim_used = (int)mode + 1;
646 return loc;
647 }
648
649 /* Clear any secondary memory locations we've made. */
650
651 void
652 clear_secondary_mem (void)
653 {
654 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
655 }
656 #endif /* SECONDARY_MEMORY_NEEDED */
657 \f
658
659 /* Find the largest class which has at least one register valid in
660 mode INNER, and which for every such register, that register number
661 plus N is also valid in OUTER (if in range) and is cheap to move
662 into REGNO. Such a class must exist. */
663
664 static enum reg_class
665 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
666 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
667 unsigned int dest_regno ATTRIBUTE_UNUSED)
668 {
669 int best_cost = -1;
670 int rclass;
671 int regno;
672 enum reg_class best_class = NO_REGS;
673 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
674 unsigned int best_size = 0;
675 int cost;
676
677 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
678 {
679 int bad = 0;
680 int good = 0;
681 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
682 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
683 {
684 if (HARD_REGNO_MODE_OK (regno, inner))
685 {
686 good = 1;
687 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
688 || ! HARD_REGNO_MODE_OK (regno + n, outer))
689 bad = 1;
690 }
691 }
692
693 if (bad || !good)
694 continue;
695 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
696
697 if ((reg_class_size[rclass] > best_size
698 && (best_cost < 0 || best_cost >= cost))
699 || best_cost > cost)
700 {
701 best_class = (enum reg_class) rclass;
702 best_size = reg_class_size[rclass];
703 best_cost = register_move_cost (outer, (enum reg_class) rclass,
704 dest_class);
705 }
706 }
707
708 gcc_assert (best_size != 0);
709
710 return best_class;
711 }
712 \f
713 /* Return the number of a previously made reload that can be combined with
714 a new one, or n_reloads if none of the existing reloads can be used.
715 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
716 push_reload, they determine the kind of the new reload that we try to
717 combine. P_IN points to the corresponding value of IN, which can be
718 modified by this function.
719 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
720
721 static int
722 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
723 enum reload_type type, int opnum, int dont_share)
724 {
725 rtx in = *p_in;
726 int i;
727 /* We can't merge two reloads if the output of either one is
728 earlyclobbered. */
729
730 if (earlyclobber_operand_p (out))
731 return n_reloads;
732
733 /* We can use an existing reload if the class is right
734 and at least one of IN and OUT is a match
735 and the other is at worst neutral.
736 (A zero compared against anything is neutral.)
737
738 For targets with small register classes, don't use existing reloads
739 unless they are for the same thing since that can cause us to need
740 more reload registers than we otherwise would. */
741
742 for (i = 0; i < n_reloads; i++)
743 if ((reg_class_subset_p (rclass, rld[i].rclass)
744 || reg_class_subset_p (rld[i].rclass, rclass))
745 /* If the existing reload has a register, it must fit our class. */
746 && (rld[i].reg_rtx == 0
747 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
748 true_regnum (rld[i].reg_rtx)))
749 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
750 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
751 || (out != 0 && MATCHES (rld[i].out, out)
752 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
753 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
754 && (small_register_class_p (rclass)
755 || targetm.small_register_classes_for_mode_p (VOIDmode))
756 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
757 return i;
758
759 /* Reloading a plain reg for input can match a reload to postincrement
760 that reg, since the postincrement's value is the right value.
761 Likewise, it can match a preincrement reload, since we regard
762 the preincrementation as happening before any ref in this insn
763 to that register. */
764 for (i = 0; i < n_reloads; i++)
765 if ((reg_class_subset_p (rclass, rld[i].rclass)
766 || reg_class_subset_p (rld[i].rclass, rclass))
767 /* If the existing reload has a register, it must fit our
768 class. */
769 && (rld[i].reg_rtx == 0
770 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
771 true_regnum (rld[i].reg_rtx)))
772 && out == 0 && rld[i].out == 0 && rld[i].in != 0
773 && ((REG_P (in)
774 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
775 && MATCHES (XEXP (rld[i].in, 0), in))
776 || (REG_P (rld[i].in)
777 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
778 && MATCHES (XEXP (in, 0), rld[i].in)))
779 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
780 && (small_register_class_p (rclass)
781 || targetm.small_register_classes_for_mode_p (VOIDmode))
782 && MERGABLE_RELOADS (type, rld[i].when_needed,
783 opnum, rld[i].opnum))
784 {
785 /* Make sure reload_in ultimately has the increment,
786 not the plain register. */
787 if (REG_P (in))
788 *p_in = rld[i].in;
789 return i;
790 }
791 return n_reloads;
792 }
793
794 /* Return nonzero if X is a SUBREG which will require reloading of its
795 SUBREG_REG expression. */
796
797 static int
798 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, int output)
799 {
800 rtx inner;
801
802 /* Only SUBREGs are problematical. */
803 if (GET_CODE (x) != SUBREG)
804 return 0;
805
806 inner = SUBREG_REG (x);
807
808 /* If INNER is a constant or PLUS, then INNER must be reloaded. */
809 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
810 return 1;
811
812 /* If INNER is not a hard register, then INNER will not need to
813 be reloaded. */
814 if (!REG_P (inner)
815 || REGNO (inner) >= FIRST_PSEUDO_REGISTER)
816 return 0;
817
818 /* If INNER is not ok for MODE, then INNER will need reloading. */
819 if (! HARD_REGNO_MODE_OK (subreg_regno (x), mode))
820 return 1;
821
822 /* If the outer part is a word or smaller, INNER larger than a
823 word and the number of regs for INNER is not the same as the
824 number of words in INNER, then INNER will need reloading. */
825 return (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
826 && output
827 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
828 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
829 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
830 }
831
832 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
833 requiring an extra reload register. The caller has already found that
834 IN contains some reference to REGNO, so check that we can produce the
835 new value in a single step. E.g. if we have
836 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
837 instruction that adds one to a register, this should succeed.
838 However, if we have something like
839 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
840 needs to be loaded into a register first, we need a separate reload
841 register.
842 Such PLUS reloads are generated by find_reload_address_part.
843 The out-of-range PLUS expressions are usually introduced in the instruction
844 patterns by register elimination and substituting pseudos without a home
845 by their function-invariant equivalences. */
846 static int
847 can_reload_into (rtx in, int regno, enum machine_mode mode)
848 {
849 rtx dst, test_insn;
850 int r = 0;
851 struct recog_data save_recog_data;
852
853 /* For matching constraints, we often get notional input reloads where
854 we want to use the original register as the reload register. I.e.
855 technically this is a non-optional input-output reload, but IN is
856 already a valid register, and has been chosen as the reload register.
857 Speed this up, since it trivially works. */
858 if (REG_P (in))
859 return 1;
860
861 /* To test MEMs properly, we'd have to take into account all the reloads
862 that are already scheduled, which can become quite complicated.
863 And since we've already handled address reloads for this MEM, it
864 should always succeed anyway. */
865 if (MEM_P (in))
866 return 1;
867
868 /* If we can make a simple SET insn that does the job, everything should
869 be fine. */
870 dst = gen_rtx_REG (mode, regno);
871 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
872 save_recog_data = recog_data;
873 if (recog_memoized (test_insn) >= 0)
874 {
875 extract_insn (test_insn);
876 r = constrain_operands (1);
877 }
878 recog_data = save_recog_data;
879 return r;
880 }
881
882 /* Record one reload that needs to be performed.
883 IN is an rtx saying where the data are to be found before this instruction.
884 OUT says where they must be stored after the instruction.
885 (IN is zero for data not read, and OUT is zero for data not written.)
886 INLOC and OUTLOC point to the places in the instructions where
887 IN and OUT were found.
888 If IN and OUT are both nonzero, it means the same register must be used
889 to reload both IN and OUT.
890
891 RCLASS is a register class required for the reloaded data.
892 INMODE is the machine mode that the instruction requires
893 for the reg that replaces IN and OUTMODE is likewise for OUT.
894
895 If IN is zero, then OUT's location and mode should be passed as
896 INLOC and INMODE.
897
898 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
899
900 OPTIONAL nonzero means this reload does not need to be performed:
901 it can be discarded if that is more convenient.
902
903 OPNUM and TYPE say what the purpose of this reload is.
904
905 The return value is the reload-number for this reload.
906
907 If both IN and OUT are nonzero, in some rare cases we might
908 want to make two separate reloads. (Actually we never do this now.)
909 Therefore, the reload-number for OUT is stored in
910 output_reloadnum when we return; the return value applies to IN.
911 Usually (presently always), when IN and OUT are nonzero,
912 the two reload-numbers are equal, but the caller should be careful to
913 distinguish them. */
914
915 int
916 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
917 enum reg_class rclass, enum machine_mode inmode,
918 enum machine_mode outmode, int strict_low, int optional,
919 int opnum, enum reload_type type)
920 {
921 int i;
922 int dont_share = 0;
923 int dont_remove_subreg = 0;
924 #ifdef LIMIT_RELOAD_CLASS
925 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
926 #endif
927 int secondary_in_reload = -1, secondary_out_reload = -1;
928 enum insn_code secondary_in_icode = CODE_FOR_nothing;
929 enum insn_code secondary_out_icode = CODE_FOR_nothing;
930
931 /* INMODE and/or OUTMODE could be VOIDmode if no mode
932 has been specified for the operand. In that case,
933 use the operand's mode as the mode to reload. */
934 if (inmode == VOIDmode && in != 0)
935 inmode = GET_MODE (in);
936 if (outmode == VOIDmode && out != 0)
937 outmode = GET_MODE (out);
938
939 /* If find_reloads and friends until now missed to replace a pseudo
940 with a constant of reg_equiv_constant something went wrong
941 beforehand.
942 Note that it can't simply be done here if we missed it earlier
943 since the constant might need to be pushed into the literal pool
944 and the resulting memref would probably need further
945 reloading. */
946 if (in != 0 && REG_P (in))
947 {
948 int regno = REGNO (in);
949
950 gcc_assert (regno < FIRST_PSEUDO_REGISTER
951 || reg_renumber[regno] >= 0
952 || reg_equiv_constant (regno) == NULL_RTX);
953 }
954
955 /* reg_equiv_constant only contains constants which are obviously
956 not appropriate as destination. So if we would need to replace
957 the destination pseudo with a constant we are in real
958 trouble. */
959 if (out != 0 && REG_P (out))
960 {
961 int regno = REGNO (out);
962
963 gcc_assert (regno < FIRST_PSEUDO_REGISTER
964 || reg_renumber[regno] >= 0
965 || reg_equiv_constant (regno) == NULL_RTX);
966 }
967
968 /* If we have a read-write operand with an address side-effect,
969 change either IN or OUT so the side-effect happens only once. */
970 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
971 switch (GET_CODE (XEXP (in, 0)))
972 {
973 case POST_INC: case POST_DEC: case POST_MODIFY:
974 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
975 break;
976
977 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
978 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
979 break;
980
981 default:
982 break;
983 }
984
985 /* If we are reloading a (SUBREG constant ...), really reload just the
986 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
987 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
988 a pseudo and hence will become a MEM) with M1 wider than M2 and the
989 register is a pseudo, also reload the inside expression.
990 For machines that extend byte loads, do this for any SUBREG of a pseudo
991 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
992 M2 is an integral mode that gets extended when loaded.
993 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
994 either M1 is not valid for R or M2 is wider than a word but we only
995 need one word to store an M2-sized quantity in R.
996 (However, if OUT is nonzero, we need to reload the reg *and*
997 the subreg, so do nothing here, and let following statement handle it.)
998
999 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1000 we can't handle it here because CONST_INT does not indicate a mode.
1001
1002 Similarly, we must reload the inside expression if we have a
1003 STRICT_LOW_PART (presumably, in == out in this case).
1004
1005 Also reload the inner expression if it does not require a secondary
1006 reload but the SUBREG does.
1007
1008 Finally, reload the inner expression if it is a register that is in
1009 the class whose registers cannot be referenced in a different size
1010 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1011 cannot reload just the inside since we might end up with the wrong
1012 register class. But if it is inside a STRICT_LOW_PART, we have
1013 no choice, so we hope we do get the right register class there. */
1014
1015 if (in != 0 && GET_CODE (in) == SUBREG
1016 && (subreg_lowpart_p (in) || strict_low)
1017 #ifdef CANNOT_CHANGE_MODE_CLASS
1018 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1019 #endif
1020 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1021 && (CONSTANT_P (SUBREG_REG (in))
1022 || GET_CODE (SUBREG_REG (in)) == PLUS
1023 || strict_low
1024 || (((REG_P (SUBREG_REG (in))
1025 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1026 || MEM_P (SUBREG_REG (in)))
1027 && ((GET_MODE_PRECISION (inmode)
1028 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1029 #ifdef LOAD_EXTEND_OP
1030 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1031 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1032 <= UNITS_PER_WORD)
1033 && (GET_MODE_PRECISION (inmode)
1034 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1035 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1036 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1037 #endif
1038 #ifdef WORD_REGISTER_OPERATIONS
1039 || ((GET_MODE_PRECISION (inmode)
1040 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1041 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1042 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1043 / UNITS_PER_WORD)))
1044 #endif
1045 ))
1046 || (REG_P (SUBREG_REG (in))
1047 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1048 /* The case where out is nonzero
1049 is handled differently in the following statement. */
1050 && (out == 0 || subreg_lowpart_p (in))
1051 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1052 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1053 > UNITS_PER_WORD)
1054 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1055 / UNITS_PER_WORD)
1056 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1057 [GET_MODE (SUBREG_REG (in))]))
1058 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1059 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1060 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1061 SUBREG_REG (in))
1062 == NO_REGS))
1063 #ifdef CANNOT_CHANGE_MODE_CLASS
1064 || (REG_P (SUBREG_REG (in))
1065 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1066 && REG_CANNOT_CHANGE_MODE_P
1067 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1068 #endif
1069 ))
1070 {
1071 #ifdef LIMIT_RELOAD_CLASS
1072 in_subreg_loc = inloc;
1073 #endif
1074 inloc = &SUBREG_REG (in);
1075 in = *inloc;
1076 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1077 if (MEM_P (in))
1078 /* This is supposed to happen only for paradoxical subregs made by
1079 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1080 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1081 #endif
1082 inmode = GET_MODE (in);
1083 }
1084
1085 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1086 either M1 is not valid for R or M2 is wider than a word but we only
1087 need one word to store an M2-sized quantity in R.
1088
1089 However, we must reload the inner reg *as well as* the subreg in
1090 that case. */
1091
1092 /* Similar issue for (SUBREG constant ...) if it was not handled by the
1093 code above. This can happen if SUBREG_BYTE != 0. */
1094
1095 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, 0))
1096 {
1097 enum reg_class in_class = rclass;
1098
1099 if (REG_P (SUBREG_REG (in)))
1100 in_class
1101 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1102 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1103 GET_MODE (SUBREG_REG (in)),
1104 SUBREG_BYTE (in),
1105 GET_MODE (in)),
1106 REGNO (SUBREG_REG (in)));
1107
1108 /* This relies on the fact that emit_reload_insns outputs the
1109 instructions for input reloads of type RELOAD_OTHER in the same
1110 order as the reloads. Thus if the outer reload is also of type
1111 RELOAD_OTHER, we are guaranteed that this inner reload will be
1112 output before the outer reload. */
1113 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1114 in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1115 dont_remove_subreg = 1;
1116 }
1117
1118 /* Similarly for paradoxical and problematical SUBREGs on the output.
1119 Note that there is no reason we need worry about the previous value
1120 of SUBREG_REG (out); even if wider than out,
1121 storing in a subreg is entitled to clobber it all
1122 (except in the case of STRICT_LOW_PART,
1123 and in that case the constraint should label it input-output.) */
1124 if (out != 0 && GET_CODE (out) == SUBREG
1125 && (subreg_lowpart_p (out) || strict_low)
1126 #ifdef CANNOT_CHANGE_MODE_CLASS
1127 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1128 #endif
1129 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1130 && (CONSTANT_P (SUBREG_REG (out))
1131 || strict_low
1132 || (((REG_P (SUBREG_REG (out))
1133 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1134 || MEM_P (SUBREG_REG (out)))
1135 && ((GET_MODE_PRECISION (outmode)
1136 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1137 #ifdef WORD_REGISTER_OPERATIONS
1138 || ((GET_MODE_PRECISION (outmode)
1139 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1140 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1141 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1142 / UNITS_PER_WORD)))
1143 #endif
1144 ))
1145 || (REG_P (SUBREG_REG (out))
1146 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1147 && ((GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1148 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1149 > UNITS_PER_WORD)
1150 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1151 / UNITS_PER_WORD)
1152 != (int) hard_regno_nregs[REGNO (SUBREG_REG (out))]
1153 [GET_MODE (SUBREG_REG (out))]))
1154 || ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode)))
1155 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1156 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1157 SUBREG_REG (out))
1158 == NO_REGS))
1159 #ifdef CANNOT_CHANGE_MODE_CLASS
1160 || (REG_P (SUBREG_REG (out))
1161 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1162 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1163 GET_MODE (SUBREG_REG (out)),
1164 outmode))
1165 #endif
1166 ))
1167 {
1168 #ifdef LIMIT_RELOAD_CLASS
1169 out_subreg_loc = outloc;
1170 #endif
1171 outloc = &SUBREG_REG (out);
1172 out = *outloc;
1173 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1174 gcc_assert (!MEM_P (out)
1175 || GET_MODE_SIZE (GET_MODE (out))
1176 <= GET_MODE_SIZE (outmode));
1177 #endif
1178 outmode = GET_MODE (out);
1179 }
1180
1181 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1182 either M1 is not valid for R or M2 is wider than a word but we only
1183 need one word to store an M2-sized quantity in R.
1184
1185 However, we must reload the inner reg *as well as* the subreg in
1186 that case. In this case, the inner reg is an in-out reload. */
1187
1188 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, 1))
1189 {
1190 /* This relies on the fact that emit_reload_insns outputs the
1191 instructions for output reloads of type RELOAD_OTHER in reverse
1192 order of the reloads. Thus if the outer reload is also of type
1193 RELOAD_OTHER, we are guaranteed that this inner reload will be
1194 output after the outer reload. */
1195 dont_remove_subreg = 1;
1196 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1197 &SUBREG_REG (out),
1198 find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1199 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1200 GET_MODE (SUBREG_REG (out)),
1201 SUBREG_BYTE (out),
1202 GET_MODE (out)),
1203 REGNO (SUBREG_REG (out))),
1204 VOIDmode, VOIDmode, 0, 0,
1205 opnum, RELOAD_OTHER);
1206 }
1207
1208 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1209 if (in != 0 && out != 0 && MEM_P (out)
1210 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1211 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1212 dont_share = 1;
1213
1214 /* If IN is a SUBREG of a hard register, make a new REG. This
1215 simplifies some of the cases below. */
1216
1217 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1218 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1219 && ! dont_remove_subreg)
1220 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1221
1222 /* Similarly for OUT. */
1223 if (out != 0 && GET_CODE (out) == SUBREG
1224 && REG_P (SUBREG_REG (out))
1225 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1226 && ! dont_remove_subreg)
1227 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1228
1229 /* Narrow down the class of register wanted if that is
1230 desirable on this machine for efficiency. */
1231 {
1232 reg_class_t preferred_class = rclass;
1233
1234 if (in != 0)
1235 preferred_class = targetm.preferred_reload_class (in, rclass);
1236
1237 /* Output reloads may need analogous treatment, different in detail. */
1238 if (out != 0)
1239 preferred_class
1240 = targetm.preferred_output_reload_class (out, preferred_class);
1241
1242 /* Discard what the target said if we cannot do it. */
1243 if (preferred_class != NO_REGS
1244 || (optional && type == RELOAD_FOR_OUTPUT))
1245 rclass = (enum reg_class) preferred_class;
1246 }
1247
1248 /* Make sure we use a class that can handle the actual pseudo
1249 inside any subreg. For example, on the 386, QImode regs
1250 can appear within SImode subregs. Although GENERAL_REGS
1251 can handle SImode, QImode needs a smaller class. */
1252 #ifdef LIMIT_RELOAD_CLASS
1253 if (in_subreg_loc)
1254 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1255 else if (in != 0 && GET_CODE (in) == SUBREG)
1256 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1257
1258 if (out_subreg_loc)
1259 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1260 if (out != 0 && GET_CODE (out) == SUBREG)
1261 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1262 #endif
1263
1264 /* Verify that this class is at least possible for the mode that
1265 is specified. */
1266 if (this_insn_is_asm)
1267 {
1268 enum machine_mode mode;
1269 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1270 mode = inmode;
1271 else
1272 mode = outmode;
1273 if (mode == VOIDmode)
1274 {
1275 error_for_asm (this_insn, "cannot reload integer constant "
1276 "operand in %<asm%>");
1277 mode = word_mode;
1278 if (in != 0)
1279 inmode = word_mode;
1280 if (out != 0)
1281 outmode = word_mode;
1282 }
1283 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1284 if (HARD_REGNO_MODE_OK (i, mode)
1285 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1286 break;
1287 if (i == FIRST_PSEUDO_REGISTER)
1288 {
1289 error_for_asm (this_insn, "impossible register constraint "
1290 "in %<asm%>");
1291 /* Avoid further trouble with this insn. */
1292 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1293 /* We used to continue here setting class to ALL_REGS, but it triggers
1294 sanity check on i386 for:
1295 void foo(long double d)
1296 {
1297 asm("" :: "a" (d));
1298 }
1299 Returning zero here ought to be safe as we take care in
1300 find_reloads to not process the reloads when instruction was
1301 replaced by USE. */
1302
1303 return 0;
1304 }
1305 }
1306
1307 /* Optional output reloads are always OK even if we have no register class,
1308 since the function of these reloads is only to have spill_reg_store etc.
1309 set, so that the storing insn can be deleted later. */
1310 gcc_assert (rclass != NO_REGS
1311 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1312
1313 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1314
1315 if (i == n_reloads)
1316 {
1317 /* See if we need a secondary reload register to move between CLASS
1318 and IN or CLASS and OUT. Get the icode and push any required reloads
1319 needed for each of them if so. */
1320
1321 if (in != 0)
1322 secondary_in_reload
1323 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1324 &secondary_in_icode, NULL);
1325 if (out != 0 && GET_CODE (out) != SCRATCH)
1326 secondary_out_reload
1327 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1328 type, &secondary_out_icode, NULL);
1329
1330 /* We found no existing reload suitable for re-use.
1331 So add an additional reload. */
1332
1333 #ifdef SECONDARY_MEMORY_NEEDED
1334 /* If a memory location is needed for the copy, make one. */
1335 if (in != 0
1336 && (REG_P (in)
1337 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1338 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
1339 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
1340 rclass, inmode))
1341 get_secondary_mem (in, inmode, opnum, type);
1342 #endif
1343
1344 i = n_reloads;
1345 rld[i].in = in;
1346 rld[i].out = out;
1347 rld[i].rclass = rclass;
1348 rld[i].inmode = inmode;
1349 rld[i].outmode = outmode;
1350 rld[i].reg_rtx = 0;
1351 rld[i].optional = optional;
1352 rld[i].inc = 0;
1353 rld[i].nocombine = 0;
1354 rld[i].in_reg = inloc ? *inloc : 0;
1355 rld[i].out_reg = outloc ? *outloc : 0;
1356 rld[i].opnum = opnum;
1357 rld[i].when_needed = type;
1358 rld[i].secondary_in_reload = secondary_in_reload;
1359 rld[i].secondary_out_reload = secondary_out_reload;
1360 rld[i].secondary_in_icode = secondary_in_icode;
1361 rld[i].secondary_out_icode = secondary_out_icode;
1362 rld[i].secondary_p = 0;
1363
1364 n_reloads++;
1365
1366 #ifdef SECONDARY_MEMORY_NEEDED
1367 if (out != 0
1368 && (REG_P (out)
1369 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1370 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1371 && SECONDARY_MEMORY_NEEDED (rclass,
1372 REGNO_REG_CLASS (reg_or_subregno (out)),
1373 outmode))
1374 get_secondary_mem (out, outmode, opnum, type);
1375 #endif
1376 }
1377 else
1378 {
1379 /* We are reusing an existing reload,
1380 but we may have additional information for it.
1381 For example, we may now have both IN and OUT
1382 while the old one may have just one of them. */
1383
1384 /* The modes can be different. If they are, we want to reload in
1385 the larger mode, so that the value is valid for both modes. */
1386 if (inmode != VOIDmode
1387 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1388 rld[i].inmode = inmode;
1389 if (outmode != VOIDmode
1390 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1391 rld[i].outmode = outmode;
1392 if (in != 0)
1393 {
1394 rtx in_reg = inloc ? *inloc : 0;
1395 /* If we merge reloads for two distinct rtl expressions that
1396 are identical in content, there might be duplicate address
1397 reloads. Remove the extra set now, so that if we later find
1398 that we can inherit this reload, we can get rid of the
1399 address reloads altogether.
1400
1401 Do not do this if both reloads are optional since the result
1402 would be an optional reload which could potentially leave
1403 unresolved address replacements.
1404
1405 It is not sufficient to call transfer_replacements since
1406 choose_reload_regs will remove the replacements for address
1407 reloads of inherited reloads which results in the same
1408 problem. */
1409 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1410 && ! (rld[i].optional && optional))
1411 {
1412 /* We must keep the address reload with the lower operand
1413 number alive. */
1414 if (opnum > rld[i].opnum)
1415 {
1416 remove_address_replacements (in);
1417 in = rld[i].in;
1418 in_reg = rld[i].in_reg;
1419 }
1420 else
1421 remove_address_replacements (rld[i].in);
1422 }
1423 /* When emitting reloads we don't necessarily look at the in-
1424 and outmode, but also directly at the operands (in and out).
1425 So we can't simply overwrite them with whatever we have found
1426 for this (to-be-merged) reload, we have to "merge" that too.
1427 Reusing another reload already verified that we deal with the
1428 same operands, just possibly in different modes. So we
1429 overwrite the operands only when the new mode is larger.
1430 See also PR33613. */
1431 if (!rld[i].in
1432 || GET_MODE_SIZE (GET_MODE (in))
1433 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1434 rld[i].in = in;
1435 if (!rld[i].in_reg
1436 || (in_reg
1437 && GET_MODE_SIZE (GET_MODE (in_reg))
1438 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1439 rld[i].in_reg = in_reg;
1440 }
1441 if (out != 0)
1442 {
1443 if (!rld[i].out
1444 || (out
1445 && GET_MODE_SIZE (GET_MODE (out))
1446 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1447 rld[i].out = out;
1448 if (outloc
1449 && (!rld[i].out_reg
1450 || GET_MODE_SIZE (GET_MODE (*outloc))
1451 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1452 rld[i].out_reg = *outloc;
1453 }
1454 if (reg_class_subset_p (rclass, rld[i].rclass))
1455 rld[i].rclass = rclass;
1456 rld[i].optional &= optional;
1457 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1458 opnum, rld[i].opnum))
1459 rld[i].when_needed = RELOAD_OTHER;
1460 rld[i].opnum = MIN (rld[i].opnum, opnum);
1461 }
1462
1463 /* If the ostensible rtx being reloaded differs from the rtx found
1464 in the location to substitute, this reload is not safe to combine
1465 because we cannot reliably tell whether it appears in the insn. */
1466
1467 if (in != 0 && in != *inloc)
1468 rld[i].nocombine = 1;
1469
1470 #if 0
1471 /* This was replaced by changes in find_reloads_address_1 and the new
1472 function inc_for_reload, which go with a new meaning of reload_inc. */
1473
1474 /* If this is an IN/OUT reload in an insn that sets the CC,
1475 it must be for an autoincrement. It doesn't work to store
1476 the incremented value after the insn because that would clobber the CC.
1477 So we must do the increment of the value reloaded from,
1478 increment it, store it back, then decrement again. */
1479 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1480 {
1481 out = 0;
1482 rld[i].out = 0;
1483 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1484 /* If we did not find a nonzero amount-to-increment-by,
1485 that contradicts the belief that IN is being incremented
1486 in an address in this insn. */
1487 gcc_assert (rld[i].inc != 0);
1488 }
1489 #endif
1490
1491 /* If we will replace IN and OUT with the reload-reg,
1492 record where they are located so that substitution need
1493 not do a tree walk. */
1494
1495 if (replace_reloads)
1496 {
1497 if (inloc != 0)
1498 {
1499 struct replacement *r = &replacements[n_replacements++];
1500 r->what = i;
1501 r->where = inloc;
1502 r->mode = inmode;
1503 }
1504 if (outloc != 0 && outloc != inloc)
1505 {
1506 struct replacement *r = &replacements[n_replacements++];
1507 r->what = i;
1508 r->where = outloc;
1509 r->mode = outmode;
1510 }
1511 }
1512
1513 /* If this reload is just being introduced and it has both
1514 an incoming quantity and an outgoing quantity that are
1515 supposed to be made to match, see if either one of the two
1516 can serve as the place to reload into.
1517
1518 If one of them is acceptable, set rld[i].reg_rtx
1519 to that one. */
1520
1521 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1522 {
1523 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1524 inmode, outmode,
1525 rld[i].rclass, i,
1526 earlyclobber_operand_p (out));
1527
1528 /* If the outgoing register already contains the same value
1529 as the incoming one, we can dispense with loading it.
1530 The easiest way to tell the caller that is to give a phony
1531 value for the incoming operand (same as outgoing one). */
1532 if (rld[i].reg_rtx == out
1533 && (REG_P (in) || CONSTANT_P (in))
1534 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1535 static_reload_reg_p, i, inmode))
1536 rld[i].in = out;
1537 }
1538
1539 /* If this is an input reload and the operand contains a register that
1540 dies in this insn and is used nowhere else, see if it is the right class
1541 to be used for this reload. Use it if so. (This occurs most commonly
1542 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1543 this if it is also an output reload that mentions the register unless
1544 the output is a SUBREG that clobbers an entire register.
1545
1546 Note that the operand might be one of the spill regs, if it is a
1547 pseudo reg and we are in a block where spilling has not taken place.
1548 But if there is no spilling in this block, that is OK.
1549 An explicitly used hard reg cannot be a spill reg. */
1550
1551 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1552 {
1553 rtx note;
1554 int regno;
1555 enum machine_mode rel_mode = inmode;
1556
1557 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1558 rel_mode = outmode;
1559
1560 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1561 if (REG_NOTE_KIND (note) == REG_DEAD
1562 && REG_P (XEXP (note, 0))
1563 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1564 && reg_mentioned_p (XEXP (note, 0), in)
1565 /* Check that a former pseudo is valid; see find_dummy_reload. */
1566 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1567 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1568 ORIGINAL_REGNO (XEXP (note, 0)))
1569 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1570 && ! refers_to_regno_for_reload_p (regno,
1571 end_hard_regno (rel_mode,
1572 regno),
1573 PATTERN (this_insn), inloc)
1574 /* If this is also an output reload, IN cannot be used as
1575 the reload register if it is set in this insn unless IN
1576 is also OUT. */
1577 && (out == 0 || in == out
1578 || ! hard_reg_set_here_p (regno,
1579 end_hard_regno (rel_mode, regno),
1580 PATTERN (this_insn)))
1581 /* ??? Why is this code so different from the previous?
1582 Is there any simple coherent way to describe the two together?
1583 What's going on here. */
1584 && (in != out
1585 || (GET_CODE (in) == SUBREG
1586 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1587 / UNITS_PER_WORD)
1588 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1589 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1590 /* Make sure the operand fits in the reg that dies. */
1591 && (GET_MODE_SIZE (rel_mode)
1592 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1593 && HARD_REGNO_MODE_OK (regno, inmode)
1594 && HARD_REGNO_MODE_OK (regno, outmode))
1595 {
1596 unsigned int offs;
1597 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1598 hard_regno_nregs[regno][outmode]);
1599
1600 for (offs = 0; offs < nregs; offs++)
1601 if (fixed_regs[regno + offs]
1602 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1603 regno + offs))
1604 break;
1605
1606 if (offs == nregs
1607 && (! (refers_to_regno_for_reload_p
1608 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1609 || can_reload_into (in, regno, inmode)))
1610 {
1611 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1612 break;
1613 }
1614 }
1615 }
1616
1617 if (out)
1618 output_reloadnum = i;
1619
1620 return i;
1621 }
1622
1623 /* Record an additional place we must replace a value
1624 for which we have already recorded a reload.
1625 RELOADNUM is the value returned by push_reload
1626 when the reload was recorded.
1627 This is used in insn patterns that use match_dup. */
1628
1629 static void
1630 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1631 {
1632 if (replace_reloads)
1633 {
1634 struct replacement *r = &replacements[n_replacements++];
1635 r->what = reloadnum;
1636 r->where = loc;
1637 r->mode = mode;
1638 }
1639 }
1640
1641 /* Duplicate any replacement we have recorded to apply at
1642 location ORIG_LOC to also be performed at DUP_LOC.
1643 This is used in insn patterns that use match_dup. */
1644
1645 static void
1646 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1647 {
1648 int i, n = n_replacements;
1649
1650 for (i = 0; i < n; i++)
1651 {
1652 struct replacement *r = &replacements[i];
1653 if (r->where == orig_loc)
1654 push_replacement (dup_loc, r->what, r->mode);
1655 }
1656 }
1657 \f
1658 /* Transfer all replacements that used to be in reload FROM to be in
1659 reload TO. */
1660
1661 void
1662 transfer_replacements (int to, int from)
1663 {
1664 int i;
1665
1666 for (i = 0; i < n_replacements; i++)
1667 if (replacements[i].what == from)
1668 replacements[i].what = to;
1669 }
1670 \f
1671 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1672 or a subpart of it. If we have any replacements registered for IN_RTX,
1673 cancel the reloads that were supposed to load them.
1674 Return nonzero if we canceled any reloads. */
1675 int
1676 remove_address_replacements (rtx in_rtx)
1677 {
1678 int i, j;
1679 char reload_flags[MAX_RELOADS];
1680 int something_changed = 0;
1681
1682 memset (reload_flags, 0, sizeof reload_flags);
1683 for (i = 0, j = 0; i < n_replacements; i++)
1684 {
1685 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1686 reload_flags[replacements[i].what] |= 1;
1687 else
1688 {
1689 replacements[j++] = replacements[i];
1690 reload_flags[replacements[i].what] |= 2;
1691 }
1692 }
1693 /* Note that the following store must be done before the recursive calls. */
1694 n_replacements = j;
1695
1696 for (i = n_reloads - 1; i >= 0; i--)
1697 {
1698 if (reload_flags[i] == 1)
1699 {
1700 deallocate_reload_reg (i);
1701 remove_address_replacements (rld[i].in);
1702 rld[i].in = 0;
1703 something_changed = 1;
1704 }
1705 }
1706 return something_changed;
1707 }
1708 \f
1709 /* If there is only one output reload, and it is not for an earlyclobber
1710 operand, try to combine it with a (logically unrelated) input reload
1711 to reduce the number of reload registers needed.
1712
1713 This is safe if the input reload does not appear in
1714 the value being output-reloaded, because this implies
1715 it is not needed any more once the original insn completes.
1716
1717 If that doesn't work, see we can use any of the registers that
1718 die in this insn as a reload register. We can if it is of the right
1719 class and does not appear in the value being output-reloaded. */
1720
1721 static void
1722 combine_reloads (void)
1723 {
1724 int i, regno;
1725 int output_reload = -1;
1726 int secondary_out = -1;
1727 rtx note;
1728
1729 /* Find the output reload; return unless there is exactly one
1730 and that one is mandatory. */
1731
1732 for (i = 0; i < n_reloads; i++)
1733 if (rld[i].out != 0)
1734 {
1735 if (output_reload >= 0)
1736 return;
1737 output_reload = i;
1738 }
1739
1740 if (output_reload < 0 || rld[output_reload].optional)
1741 return;
1742
1743 /* An input-output reload isn't combinable. */
1744
1745 if (rld[output_reload].in != 0)
1746 return;
1747
1748 /* If this reload is for an earlyclobber operand, we can't do anything. */
1749 if (earlyclobber_operand_p (rld[output_reload].out))
1750 return;
1751
1752 /* If there is a reload for part of the address of this operand, we would
1753 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1754 its life to the point where doing this combine would not lower the
1755 number of spill registers needed. */
1756 for (i = 0; i < n_reloads; i++)
1757 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1758 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1759 && rld[i].opnum == rld[output_reload].opnum)
1760 return;
1761
1762 /* Check each input reload; can we combine it? */
1763
1764 for (i = 0; i < n_reloads; i++)
1765 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1766 /* Life span of this reload must not extend past main insn. */
1767 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1768 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1769 && rld[i].when_needed != RELOAD_OTHER
1770 && (CLASS_MAX_NREGS (rld[i].rclass, rld[i].inmode)
1771 == CLASS_MAX_NREGS (rld[output_reload].rclass,
1772 rld[output_reload].outmode))
1773 && rld[i].inc == 0
1774 && rld[i].reg_rtx == 0
1775 #ifdef SECONDARY_MEMORY_NEEDED
1776 /* Don't combine two reloads with different secondary
1777 memory locations. */
1778 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1779 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1780 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1781 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1782 #endif
1783 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1784 ? (rld[i].rclass == rld[output_reload].rclass)
1785 : (reg_class_subset_p (rld[i].rclass,
1786 rld[output_reload].rclass)
1787 || reg_class_subset_p (rld[output_reload].rclass,
1788 rld[i].rclass)))
1789 && (MATCHES (rld[i].in, rld[output_reload].out)
1790 /* Args reversed because the first arg seems to be
1791 the one that we imagine being modified
1792 while the second is the one that might be affected. */
1793 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1794 rld[i].in)
1795 /* However, if the input is a register that appears inside
1796 the output, then we also can't share.
1797 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1798 If the same reload reg is used for both reg 69 and the
1799 result to be stored in memory, then that result
1800 will clobber the address of the memory ref. */
1801 && ! (REG_P (rld[i].in)
1802 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1803 rld[output_reload].out))))
1804 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1805 rld[i].when_needed != RELOAD_FOR_INPUT)
1806 && (reg_class_size[(int) rld[i].rclass]
1807 || targetm.small_register_classes_for_mode_p (VOIDmode))
1808 /* We will allow making things slightly worse by combining an
1809 input and an output, but no worse than that. */
1810 && (rld[i].when_needed == RELOAD_FOR_INPUT
1811 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1812 {
1813 int j;
1814
1815 /* We have found a reload to combine with! */
1816 rld[i].out = rld[output_reload].out;
1817 rld[i].out_reg = rld[output_reload].out_reg;
1818 rld[i].outmode = rld[output_reload].outmode;
1819 /* Mark the old output reload as inoperative. */
1820 rld[output_reload].out = 0;
1821 /* The combined reload is needed for the entire insn. */
1822 rld[i].when_needed = RELOAD_OTHER;
1823 /* If the output reload had a secondary reload, copy it. */
1824 if (rld[output_reload].secondary_out_reload != -1)
1825 {
1826 rld[i].secondary_out_reload
1827 = rld[output_reload].secondary_out_reload;
1828 rld[i].secondary_out_icode
1829 = rld[output_reload].secondary_out_icode;
1830 }
1831
1832 #ifdef SECONDARY_MEMORY_NEEDED
1833 /* Copy any secondary MEM. */
1834 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1835 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1836 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1837 #endif
1838 /* If required, minimize the register class. */
1839 if (reg_class_subset_p (rld[output_reload].rclass,
1840 rld[i].rclass))
1841 rld[i].rclass = rld[output_reload].rclass;
1842
1843 /* Transfer all replacements from the old reload to the combined. */
1844 for (j = 0; j < n_replacements; j++)
1845 if (replacements[j].what == output_reload)
1846 replacements[j].what = i;
1847
1848 return;
1849 }
1850
1851 /* If this insn has only one operand that is modified or written (assumed
1852 to be the first), it must be the one corresponding to this reload. It
1853 is safe to use anything that dies in this insn for that output provided
1854 that it does not occur in the output (we already know it isn't an
1855 earlyclobber. If this is an asm insn, give up. */
1856
1857 if (INSN_CODE (this_insn) == -1)
1858 return;
1859
1860 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1861 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1862 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1863 return;
1864
1865 /* See if some hard register that dies in this insn and is not used in
1866 the output is the right class. Only works if the register we pick
1867 up can fully hold our output reload. */
1868 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1869 if (REG_NOTE_KIND (note) == REG_DEAD
1870 && REG_P (XEXP (note, 0))
1871 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1872 rld[output_reload].out)
1873 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1874 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1875 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1876 regno)
1877 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1878 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1879 /* Ensure that a secondary or tertiary reload for this output
1880 won't want this register. */
1881 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1882 || (!(TEST_HARD_REG_BIT
1883 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1884 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1885 || !(TEST_HARD_REG_BIT
1886 (reg_class_contents[(int) rld[secondary_out].rclass],
1887 regno)))))
1888 && !fixed_regs[regno]
1889 /* Check that a former pseudo is valid; see find_dummy_reload. */
1890 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1891 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1892 ORIGINAL_REGNO (XEXP (note, 0)))
1893 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1894 {
1895 rld[output_reload].reg_rtx
1896 = gen_rtx_REG (rld[output_reload].outmode, regno);
1897 return;
1898 }
1899 }
1900 \f
1901 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1902 See if one of IN and OUT is a register that may be used;
1903 this is desirable since a spill-register won't be needed.
1904 If so, return the register rtx that proves acceptable.
1905
1906 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1907 RCLASS is the register class required for the reload.
1908
1909 If FOR_REAL is >= 0, it is the number of the reload,
1910 and in some cases when it can be discovered that OUT doesn't need
1911 to be computed, clear out rld[FOR_REAL].out.
1912
1913 If FOR_REAL is -1, this should not be done, because this call
1914 is just to see if a register can be found, not to find and install it.
1915
1916 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1917 puts an additional constraint on being able to use IN for OUT since
1918 IN must not appear elsewhere in the insn (it is assumed that IN itself
1919 is safe from the earlyclobber). */
1920
1921 static rtx
1922 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1923 enum machine_mode inmode, enum machine_mode outmode,
1924 reg_class_t rclass, int for_real, int earlyclobber)
1925 {
1926 rtx in = real_in;
1927 rtx out = real_out;
1928 int in_offset = 0;
1929 int out_offset = 0;
1930 rtx value = 0;
1931
1932 /* If operands exceed a word, we can't use either of them
1933 unless they have the same size. */
1934 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1935 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1936 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1937 return 0;
1938
1939 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1940 respectively refers to a hard register. */
1941
1942 /* Find the inside of any subregs. */
1943 while (GET_CODE (out) == SUBREG)
1944 {
1945 if (REG_P (SUBREG_REG (out))
1946 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1947 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1948 GET_MODE (SUBREG_REG (out)),
1949 SUBREG_BYTE (out),
1950 GET_MODE (out));
1951 out = SUBREG_REG (out);
1952 }
1953 while (GET_CODE (in) == SUBREG)
1954 {
1955 if (REG_P (SUBREG_REG (in))
1956 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1957 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1958 GET_MODE (SUBREG_REG (in)),
1959 SUBREG_BYTE (in),
1960 GET_MODE (in));
1961 in = SUBREG_REG (in);
1962 }
1963
1964 /* Narrow down the reg class, the same way push_reload will;
1965 otherwise we might find a dummy now, but push_reload won't. */
1966 {
1967 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1968 if (preferred_class != NO_REGS)
1969 rclass = (enum reg_class) preferred_class;
1970 }
1971
1972 /* See if OUT will do. */
1973 if (REG_P (out)
1974 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1975 {
1976 unsigned int regno = REGNO (out) + out_offset;
1977 unsigned int nwords = hard_regno_nregs[regno][outmode];
1978 rtx saved_rtx;
1979
1980 /* When we consider whether the insn uses OUT,
1981 ignore references within IN. They don't prevent us
1982 from copying IN into OUT, because those refs would
1983 move into the insn that reloads IN.
1984
1985 However, we only ignore IN in its role as this reload.
1986 If the insn uses IN elsewhere and it contains OUT,
1987 that counts. We can't be sure it's the "same" operand
1988 so it might not go through this reload. */
1989 saved_rtx = *inloc;
1990 *inloc = const0_rtx;
1991
1992 if (regno < FIRST_PSEUDO_REGISTER
1993 && HARD_REGNO_MODE_OK (regno, outmode)
1994 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1995 PATTERN (this_insn), outloc))
1996 {
1997 unsigned int i;
1998
1999 for (i = 0; i < nwords; i++)
2000 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2001 regno + i))
2002 break;
2003
2004 if (i == nwords)
2005 {
2006 if (REG_P (real_out))
2007 value = real_out;
2008 else
2009 value = gen_rtx_REG (outmode, regno);
2010 }
2011 }
2012
2013 *inloc = saved_rtx;
2014 }
2015
2016 /* Consider using IN if OUT was not acceptable
2017 or if OUT dies in this insn (like the quotient in a divmod insn).
2018 We can't use IN unless it is dies in this insn,
2019 which means we must know accurately which hard regs are live.
2020 Also, the result can't go in IN if IN is used within OUT,
2021 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2022 if (hard_regs_live_known
2023 && REG_P (in)
2024 && REGNO (in) < FIRST_PSEUDO_REGISTER
2025 && (value == 0
2026 || find_reg_note (this_insn, REG_UNUSED, real_out))
2027 && find_reg_note (this_insn, REG_DEAD, real_in)
2028 && !fixed_regs[REGNO (in)]
2029 && HARD_REGNO_MODE_OK (REGNO (in),
2030 /* The only case where out and real_out might
2031 have different modes is where real_out
2032 is a subreg, and in that case, out
2033 has a real mode. */
2034 (GET_MODE (out) != VOIDmode
2035 ? GET_MODE (out) : outmode))
2036 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2037 /* However only do this if we can be sure that this input
2038 operand doesn't correspond with an uninitialized pseudo.
2039 global can assign some hardreg to it that is the same as
2040 the one assigned to a different, also live pseudo (as it
2041 can ignore the conflict). We must never introduce writes
2042 to such hardregs, as they would clobber the other live
2043 pseudo. See PR 20973. */
2044 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
2045 ORIGINAL_REGNO (in))
2046 /* Similarly, only do this if we can be sure that the death
2047 note is still valid. global can assign some hardreg to
2048 the pseudo referenced in the note and simultaneously a
2049 subword of this hardreg to a different, also live pseudo,
2050 because only another subword of the hardreg is actually
2051 used in the insn. This cannot happen if the pseudo has
2052 been assigned exactly one hardreg. See PR 33732. */
2053 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2054 {
2055 unsigned int regno = REGNO (in) + in_offset;
2056 unsigned int nwords = hard_regno_nregs[regno][inmode];
2057
2058 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2059 && ! hard_reg_set_here_p (regno, regno + nwords,
2060 PATTERN (this_insn))
2061 && (! earlyclobber
2062 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2063 PATTERN (this_insn), inloc)))
2064 {
2065 unsigned int i;
2066
2067 for (i = 0; i < nwords; i++)
2068 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2069 regno + i))
2070 break;
2071
2072 if (i == nwords)
2073 {
2074 /* If we were going to use OUT as the reload reg
2075 and changed our mind, it means OUT is a dummy that
2076 dies here. So don't bother copying value to it. */
2077 if (for_real >= 0 && value == real_out)
2078 rld[for_real].out = 0;
2079 if (REG_P (real_in))
2080 value = real_in;
2081 else
2082 value = gen_rtx_REG (inmode, regno);
2083 }
2084 }
2085 }
2086
2087 return value;
2088 }
2089 \f
2090 /* This page contains subroutines used mainly for determining
2091 whether the IN or an OUT of a reload can serve as the
2092 reload register. */
2093
2094 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2095
2096 int
2097 earlyclobber_operand_p (rtx x)
2098 {
2099 int i;
2100
2101 for (i = 0; i < n_earlyclobbers; i++)
2102 if (reload_earlyclobbers[i] == x)
2103 return 1;
2104
2105 return 0;
2106 }
2107
2108 /* Return 1 if expression X alters a hard reg in the range
2109 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2110 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2111 X should be the body of an instruction. */
2112
2113 static int
2114 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2115 {
2116 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2117 {
2118 rtx op0 = SET_DEST (x);
2119
2120 while (GET_CODE (op0) == SUBREG)
2121 op0 = SUBREG_REG (op0);
2122 if (REG_P (op0))
2123 {
2124 unsigned int r = REGNO (op0);
2125
2126 /* See if this reg overlaps range under consideration. */
2127 if (r < end_regno
2128 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2129 return 1;
2130 }
2131 }
2132 else if (GET_CODE (x) == PARALLEL)
2133 {
2134 int i = XVECLEN (x, 0) - 1;
2135
2136 for (; i >= 0; i--)
2137 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2138 return 1;
2139 }
2140
2141 return 0;
2142 }
2143
2144 /* Return 1 if ADDR is a valid memory address for mode MODE
2145 in address space AS, and check that each pseudo reg has the
2146 proper kind of hard reg. */
2147
2148 int
2149 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2150 rtx addr, addr_space_t as)
2151 {
2152 #ifdef GO_IF_LEGITIMATE_ADDRESS
2153 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2154 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2155 return 0;
2156
2157 win:
2158 return 1;
2159 #else
2160 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2161 #endif
2162 }
2163 \f
2164 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2165 if they are the same hard reg, and has special hacks for
2166 autoincrement and autodecrement.
2167 This is specifically intended for find_reloads to use
2168 in determining whether two operands match.
2169 X is the operand whose number is the lower of the two.
2170
2171 The value is 2 if Y contains a pre-increment that matches
2172 a non-incrementing address in X. */
2173
2174 /* ??? To be completely correct, we should arrange to pass
2175 for X the output operand and for Y the input operand.
2176 For now, we assume that the output operand has the lower number
2177 because that is natural in (SET output (... input ...)). */
2178
2179 int
2180 operands_match_p (rtx x, rtx y)
2181 {
2182 int i;
2183 RTX_CODE code = GET_CODE (x);
2184 const char *fmt;
2185 int success_2;
2186
2187 if (x == y)
2188 return 1;
2189 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2190 && (REG_P (y) || (GET_CODE (y) == SUBREG
2191 && REG_P (SUBREG_REG (y)))))
2192 {
2193 int j;
2194
2195 if (code == SUBREG)
2196 {
2197 i = REGNO (SUBREG_REG (x));
2198 if (i >= FIRST_PSEUDO_REGISTER)
2199 goto slow;
2200 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2201 GET_MODE (SUBREG_REG (x)),
2202 SUBREG_BYTE (x),
2203 GET_MODE (x));
2204 }
2205 else
2206 i = REGNO (x);
2207
2208 if (GET_CODE (y) == SUBREG)
2209 {
2210 j = REGNO (SUBREG_REG (y));
2211 if (j >= FIRST_PSEUDO_REGISTER)
2212 goto slow;
2213 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2214 GET_MODE (SUBREG_REG (y)),
2215 SUBREG_BYTE (y),
2216 GET_MODE (y));
2217 }
2218 else
2219 j = REGNO (y);
2220
2221 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2222 multiple hard register group of scalar integer registers, so that
2223 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2224 register. */
2225 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2226 && SCALAR_INT_MODE_P (GET_MODE (x))
2227 && i < FIRST_PSEUDO_REGISTER)
2228 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2229 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2230 && SCALAR_INT_MODE_P (GET_MODE (y))
2231 && j < FIRST_PSEUDO_REGISTER)
2232 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2233
2234 return i == j;
2235 }
2236 /* If two operands must match, because they are really a single
2237 operand of an assembler insn, then two postincrements are invalid
2238 because the assembler insn would increment only once.
2239 On the other hand, a postincrement matches ordinary indexing
2240 if the postincrement is the output operand. */
2241 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2242 return operands_match_p (XEXP (x, 0), y);
2243 /* Two preincrements are invalid
2244 because the assembler insn would increment only once.
2245 On the other hand, a preincrement matches ordinary indexing
2246 if the preincrement is the input operand.
2247 In this case, return 2, since some callers need to do special
2248 things when this happens. */
2249 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2250 || GET_CODE (y) == PRE_MODIFY)
2251 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2252
2253 slow:
2254
2255 /* Now we have disposed of all the cases in which different rtx codes
2256 can match. */
2257 if (code != GET_CODE (y))
2258 return 0;
2259
2260 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2261 if (GET_MODE (x) != GET_MODE (y))
2262 return 0;
2263
2264 /* MEMs refering to different address space are not equivalent. */
2265 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2266 return 0;
2267
2268 switch (code)
2269 {
2270 case CONST_INT:
2271 case CONST_DOUBLE:
2272 case CONST_FIXED:
2273 return 0;
2274
2275 case LABEL_REF:
2276 return XEXP (x, 0) == XEXP (y, 0);
2277 case SYMBOL_REF:
2278 return XSTR (x, 0) == XSTR (y, 0);
2279
2280 default:
2281 break;
2282 }
2283
2284 /* Compare the elements. If any pair of corresponding elements
2285 fail to match, return 0 for the whole things. */
2286
2287 success_2 = 0;
2288 fmt = GET_RTX_FORMAT (code);
2289 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2290 {
2291 int val, j;
2292 switch (fmt[i])
2293 {
2294 case 'w':
2295 if (XWINT (x, i) != XWINT (y, i))
2296 return 0;
2297 break;
2298
2299 case 'i':
2300 if (XINT (x, i) != XINT (y, i))
2301 return 0;
2302 break;
2303
2304 case 'e':
2305 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2306 if (val == 0)
2307 return 0;
2308 /* If any subexpression returns 2,
2309 we should return 2 if we are successful. */
2310 if (val == 2)
2311 success_2 = 1;
2312 break;
2313
2314 case '0':
2315 break;
2316
2317 case 'E':
2318 if (XVECLEN (x, i) != XVECLEN (y, i))
2319 return 0;
2320 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2321 {
2322 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2323 if (val == 0)
2324 return 0;
2325 if (val == 2)
2326 success_2 = 1;
2327 }
2328 break;
2329
2330 /* It is believed that rtx's at this level will never
2331 contain anything but integers and other rtx's,
2332 except for within LABEL_REFs and SYMBOL_REFs. */
2333 default:
2334 gcc_unreachable ();
2335 }
2336 }
2337 return 1 + success_2;
2338 }
2339 \f
2340 /* Describe the range of registers or memory referenced by X.
2341 If X is a register, set REG_FLAG and put the first register
2342 number into START and the last plus one into END.
2343 If X is a memory reference, put a base address into BASE
2344 and a range of integer offsets into START and END.
2345 If X is pushing on the stack, we can assume it causes no trouble,
2346 so we set the SAFE field. */
2347
2348 static struct decomposition
2349 decompose (rtx x)
2350 {
2351 struct decomposition val;
2352 int all_const = 0;
2353
2354 memset (&val, 0, sizeof (val));
2355
2356 switch (GET_CODE (x))
2357 {
2358 case MEM:
2359 {
2360 rtx base = NULL_RTX, offset = 0;
2361 rtx addr = XEXP (x, 0);
2362
2363 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2364 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2365 {
2366 val.base = XEXP (addr, 0);
2367 val.start = -GET_MODE_SIZE (GET_MODE (x));
2368 val.end = GET_MODE_SIZE (GET_MODE (x));
2369 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2370 return val;
2371 }
2372
2373 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2374 {
2375 if (GET_CODE (XEXP (addr, 1)) == PLUS
2376 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2377 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2378 {
2379 val.base = XEXP (addr, 0);
2380 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2381 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2382 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2383 return val;
2384 }
2385 }
2386
2387 if (GET_CODE (addr) == CONST)
2388 {
2389 addr = XEXP (addr, 0);
2390 all_const = 1;
2391 }
2392 if (GET_CODE (addr) == PLUS)
2393 {
2394 if (CONSTANT_P (XEXP (addr, 0)))
2395 {
2396 base = XEXP (addr, 1);
2397 offset = XEXP (addr, 0);
2398 }
2399 else if (CONSTANT_P (XEXP (addr, 1)))
2400 {
2401 base = XEXP (addr, 0);
2402 offset = XEXP (addr, 1);
2403 }
2404 }
2405
2406 if (offset == 0)
2407 {
2408 base = addr;
2409 offset = const0_rtx;
2410 }
2411 if (GET_CODE (offset) == CONST)
2412 offset = XEXP (offset, 0);
2413 if (GET_CODE (offset) == PLUS)
2414 {
2415 if (CONST_INT_P (XEXP (offset, 0)))
2416 {
2417 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2418 offset = XEXP (offset, 0);
2419 }
2420 else if (CONST_INT_P (XEXP (offset, 1)))
2421 {
2422 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2423 offset = XEXP (offset, 1);
2424 }
2425 else
2426 {
2427 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2428 offset = const0_rtx;
2429 }
2430 }
2431 else if (!CONST_INT_P (offset))
2432 {
2433 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2434 offset = const0_rtx;
2435 }
2436
2437 if (all_const && GET_CODE (base) == PLUS)
2438 base = gen_rtx_CONST (GET_MODE (base), base);
2439
2440 gcc_assert (CONST_INT_P (offset));
2441
2442 val.start = INTVAL (offset);
2443 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2444 val.base = base;
2445 }
2446 break;
2447
2448 case REG:
2449 val.reg_flag = 1;
2450 val.start = true_regnum (x);
2451 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2452 {
2453 /* A pseudo with no hard reg. */
2454 val.start = REGNO (x);
2455 val.end = val.start + 1;
2456 }
2457 else
2458 /* A hard reg. */
2459 val.end = end_hard_regno (GET_MODE (x), val.start);
2460 break;
2461
2462 case SUBREG:
2463 if (!REG_P (SUBREG_REG (x)))
2464 /* This could be more precise, but it's good enough. */
2465 return decompose (SUBREG_REG (x));
2466 val.reg_flag = 1;
2467 val.start = true_regnum (x);
2468 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2469 return decompose (SUBREG_REG (x));
2470 else
2471 /* A hard reg. */
2472 val.end = val.start + subreg_nregs (x);
2473 break;
2474
2475 case SCRATCH:
2476 /* This hasn't been assigned yet, so it can't conflict yet. */
2477 val.safe = 1;
2478 break;
2479
2480 default:
2481 gcc_assert (CONSTANT_P (x));
2482 val.safe = 1;
2483 break;
2484 }
2485 return val;
2486 }
2487
2488 /* Return 1 if altering Y will not modify the value of X.
2489 Y is also described by YDATA, which should be decompose (Y). */
2490
2491 static int
2492 immune_p (rtx x, rtx y, struct decomposition ydata)
2493 {
2494 struct decomposition xdata;
2495
2496 if (ydata.reg_flag)
2497 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2498 if (ydata.safe)
2499 return 1;
2500
2501 gcc_assert (MEM_P (y));
2502 /* If Y is memory and X is not, Y can't affect X. */
2503 if (!MEM_P (x))
2504 return 1;
2505
2506 xdata = decompose (x);
2507
2508 if (! rtx_equal_p (xdata.base, ydata.base))
2509 {
2510 /* If bases are distinct symbolic constants, there is no overlap. */
2511 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2512 return 1;
2513 /* Constants and stack slots never overlap. */
2514 if (CONSTANT_P (xdata.base)
2515 && (ydata.base == frame_pointer_rtx
2516 || ydata.base == hard_frame_pointer_rtx
2517 || ydata.base == stack_pointer_rtx))
2518 return 1;
2519 if (CONSTANT_P (ydata.base)
2520 && (xdata.base == frame_pointer_rtx
2521 || xdata.base == hard_frame_pointer_rtx
2522 || xdata.base == stack_pointer_rtx))
2523 return 1;
2524 /* If either base is variable, we don't know anything. */
2525 return 0;
2526 }
2527
2528 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2529 }
2530
2531 /* Similar, but calls decompose. */
2532
2533 int
2534 safe_from_earlyclobber (rtx op, rtx clobber)
2535 {
2536 struct decomposition early_data;
2537
2538 early_data = decompose (clobber);
2539 return immune_p (op, clobber, early_data);
2540 }
2541 \f
2542 /* Main entry point of this file: search the body of INSN
2543 for values that need reloading and record them with push_reload.
2544 REPLACE nonzero means record also where the values occur
2545 so that subst_reloads can be used.
2546
2547 IND_LEVELS says how many levels of indirection are supported by this
2548 machine; a value of zero means that a memory reference is not a valid
2549 memory address.
2550
2551 LIVE_KNOWN says we have valid information about which hard
2552 regs are live at each point in the program; this is true when
2553 we are called from global_alloc but false when stupid register
2554 allocation has been done.
2555
2556 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2557 which is nonnegative if the reg has been commandeered for reloading into.
2558 It is copied into STATIC_RELOAD_REG_P and referenced from there
2559 by various subroutines.
2560
2561 Return TRUE if some operands need to be changed, because of swapping
2562 commutative operands, reg_equiv_address substitution, or whatever. */
2563
2564 int
2565 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2566 short *reload_reg_p)
2567 {
2568 int insn_code_number;
2569 int i, j;
2570 int noperands;
2571 /* These start out as the constraints for the insn
2572 and they are chewed up as we consider alternatives. */
2573 const char *constraints[MAX_RECOG_OPERANDS];
2574 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2575 a register. */
2576 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2577 char pref_or_nothing[MAX_RECOG_OPERANDS];
2578 /* Nonzero for a MEM operand whose entire address needs a reload.
2579 May be -1 to indicate the entire address may or may not need a reload. */
2580 int address_reloaded[MAX_RECOG_OPERANDS];
2581 /* Nonzero for an address operand that needs to be completely reloaded.
2582 May be -1 to indicate the entire operand may or may not need a reload. */
2583 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2584 /* Value of enum reload_type to use for operand. */
2585 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2586 /* Value of enum reload_type to use within address of operand. */
2587 enum reload_type address_type[MAX_RECOG_OPERANDS];
2588 /* Save the usage of each operand. */
2589 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2590 int no_input_reloads = 0, no_output_reloads = 0;
2591 int n_alternatives;
2592 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2593 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2594 char this_alternative_win[MAX_RECOG_OPERANDS];
2595 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2596 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2597 int this_alternative_matches[MAX_RECOG_OPERANDS];
2598 int swapped;
2599 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2600 int this_alternative_number;
2601 int goal_alternative_number = 0;
2602 int operand_reloadnum[MAX_RECOG_OPERANDS];
2603 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2604 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2605 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2606 char goal_alternative_win[MAX_RECOG_OPERANDS];
2607 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2608 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2609 int goal_alternative_swapped;
2610 int best;
2611 int commutative;
2612 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2613 rtx substed_operand[MAX_RECOG_OPERANDS];
2614 rtx body = PATTERN (insn);
2615 rtx set = single_set (insn);
2616 int goal_earlyclobber = 0, this_earlyclobber;
2617 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2618 int retval = 0;
2619
2620 this_insn = insn;
2621 n_reloads = 0;
2622 n_replacements = 0;
2623 n_earlyclobbers = 0;
2624 replace_reloads = replace;
2625 hard_regs_live_known = live_known;
2626 static_reload_reg_p = reload_reg_p;
2627
2628 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2629 neither are insns that SET cc0. Insns that use CC0 are not allowed
2630 to have any input reloads. */
2631 if (JUMP_P (insn) || CALL_P (insn))
2632 no_output_reloads = 1;
2633
2634 #ifdef HAVE_cc0
2635 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2636 no_input_reloads = 1;
2637 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2638 no_output_reloads = 1;
2639 #endif
2640
2641 #ifdef SECONDARY_MEMORY_NEEDED
2642 /* The eliminated forms of any secondary memory locations are per-insn, so
2643 clear them out here. */
2644
2645 if (secondary_memlocs_elim_used)
2646 {
2647 memset (secondary_memlocs_elim, 0,
2648 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2649 secondary_memlocs_elim_used = 0;
2650 }
2651 #endif
2652
2653 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2654 is cheap to move between them. If it is not, there may not be an insn
2655 to do the copy, so we may need a reload. */
2656 if (GET_CODE (body) == SET
2657 && REG_P (SET_DEST (body))
2658 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2659 && REG_P (SET_SRC (body))
2660 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2661 && register_move_cost (GET_MODE (SET_SRC (body)),
2662 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2663 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2664 return 0;
2665
2666 extract_insn (insn);
2667
2668 noperands = reload_n_operands = recog_data.n_operands;
2669 n_alternatives = recog_data.n_alternatives;
2670
2671 /* Just return "no reloads" if insn has no operands with constraints. */
2672 if (noperands == 0 || n_alternatives == 0)
2673 return 0;
2674
2675 insn_code_number = INSN_CODE (insn);
2676 this_insn_is_asm = insn_code_number < 0;
2677
2678 memcpy (operand_mode, recog_data.operand_mode,
2679 noperands * sizeof (enum machine_mode));
2680 memcpy (constraints, recog_data.constraints,
2681 noperands * sizeof (const char *));
2682
2683 commutative = -1;
2684
2685 /* If we will need to know, later, whether some pair of operands
2686 are the same, we must compare them now and save the result.
2687 Reloading the base and index registers will clobber them
2688 and afterward they will fail to match. */
2689
2690 for (i = 0; i < noperands; i++)
2691 {
2692 const char *p;
2693 int c;
2694 char *end;
2695
2696 substed_operand[i] = recog_data.operand[i];
2697 p = constraints[i];
2698
2699 modified[i] = RELOAD_READ;
2700
2701 /* Scan this operand's constraint to see if it is an output operand,
2702 an in-out operand, is commutative, or should match another. */
2703
2704 while ((c = *p))
2705 {
2706 p += CONSTRAINT_LEN (c, p);
2707 switch (c)
2708 {
2709 case '=':
2710 modified[i] = RELOAD_WRITE;
2711 break;
2712 case '+':
2713 modified[i] = RELOAD_READ_WRITE;
2714 break;
2715 case '%':
2716 {
2717 /* The last operand should not be marked commutative. */
2718 gcc_assert (i != noperands - 1);
2719
2720 /* We currently only support one commutative pair of
2721 operands. Some existing asm code currently uses more
2722 than one pair. Previously, that would usually work,
2723 but sometimes it would crash the compiler. We
2724 continue supporting that case as well as we can by
2725 silently ignoring all but the first pair. In the
2726 future we may handle it correctly. */
2727 if (commutative < 0)
2728 commutative = i;
2729 else
2730 gcc_assert (this_insn_is_asm);
2731 }
2732 break;
2733 /* Use of ISDIGIT is tempting here, but it may get expensive because
2734 of locale support we don't want. */
2735 case '0': case '1': case '2': case '3': case '4':
2736 case '5': case '6': case '7': case '8': case '9':
2737 {
2738 c = strtoul (p - 1, &end, 10);
2739 p = end;
2740
2741 operands_match[c][i]
2742 = operands_match_p (recog_data.operand[c],
2743 recog_data.operand[i]);
2744
2745 /* An operand may not match itself. */
2746 gcc_assert (c != i);
2747
2748 /* If C can be commuted with C+1, and C might need to match I,
2749 then C+1 might also need to match I. */
2750 if (commutative >= 0)
2751 {
2752 if (c == commutative || c == commutative + 1)
2753 {
2754 int other = c + (c == commutative ? 1 : -1);
2755 operands_match[other][i]
2756 = operands_match_p (recog_data.operand[other],
2757 recog_data.operand[i]);
2758 }
2759 if (i == commutative || i == commutative + 1)
2760 {
2761 int other = i + (i == commutative ? 1 : -1);
2762 operands_match[c][other]
2763 = operands_match_p (recog_data.operand[c],
2764 recog_data.operand[other]);
2765 }
2766 /* Note that C is supposed to be less than I.
2767 No need to consider altering both C and I because in
2768 that case we would alter one into the other. */
2769 }
2770 }
2771 }
2772 }
2773 }
2774
2775 /* Examine each operand that is a memory reference or memory address
2776 and reload parts of the addresses into index registers.
2777 Also here any references to pseudo regs that didn't get hard regs
2778 but are equivalent to constants get replaced in the insn itself
2779 with those constants. Nobody will ever see them again.
2780
2781 Finally, set up the preferred classes of each operand. */
2782
2783 for (i = 0; i < noperands; i++)
2784 {
2785 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2786
2787 address_reloaded[i] = 0;
2788 address_operand_reloaded[i] = 0;
2789 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2790 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2791 : RELOAD_OTHER);
2792 address_type[i]
2793 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2794 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2795 : RELOAD_OTHER);
2796
2797 if (*constraints[i] == 0)
2798 /* Ignore things like match_operator operands. */
2799 ;
2800 else if (constraints[i][0] == 'p'
2801 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2802 {
2803 address_operand_reloaded[i]
2804 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2805 recog_data.operand[i],
2806 recog_data.operand_loc[i],
2807 i, operand_type[i], ind_levels, insn);
2808
2809 /* If we now have a simple operand where we used to have a
2810 PLUS or MULT, re-recognize and try again. */
2811 if ((OBJECT_P (*recog_data.operand_loc[i])
2812 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2813 && (GET_CODE (recog_data.operand[i]) == MULT
2814 || GET_CODE (recog_data.operand[i]) == PLUS))
2815 {
2816 INSN_CODE (insn) = -1;
2817 retval = find_reloads (insn, replace, ind_levels, live_known,
2818 reload_reg_p);
2819 return retval;
2820 }
2821
2822 recog_data.operand[i] = *recog_data.operand_loc[i];
2823 substed_operand[i] = recog_data.operand[i];
2824
2825 /* Address operands are reloaded in their existing mode,
2826 no matter what is specified in the machine description. */
2827 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2828 }
2829 else if (code == MEM)
2830 {
2831 address_reloaded[i]
2832 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2833 recog_data.operand_loc[i],
2834 XEXP (recog_data.operand[i], 0),
2835 &XEXP (recog_data.operand[i], 0),
2836 i, address_type[i], ind_levels, insn);
2837 recog_data.operand[i] = *recog_data.operand_loc[i];
2838 substed_operand[i] = recog_data.operand[i];
2839 }
2840 else if (code == SUBREG)
2841 {
2842 rtx reg = SUBREG_REG (recog_data.operand[i]);
2843 rtx op
2844 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2845 ind_levels,
2846 set != 0
2847 && &SET_DEST (set) == recog_data.operand_loc[i],
2848 insn,
2849 &address_reloaded[i]);
2850
2851 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2852 that didn't get a hard register, emit a USE with a REG_EQUAL
2853 note in front so that we might inherit a previous, possibly
2854 wider reload. */
2855
2856 if (replace
2857 && MEM_P (op)
2858 && REG_P (reg)
2859 && (GET_MODE_SIZE (GET_MODE (reg))
2860 >= GET_MODE_SIZE (GET_MODE (op)))
2861 && reg_equiv_constant (REGNO (reg)) == 0)
2862 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2863 insn),
2864 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2865
2866 substed_operand[i] = recog_data.operand[i] = op;
2867 }
2868 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2869 /* We can get a PLUS as an "operand" as a result of register
2870 elimination. See eliminate_regs and gen_reload. We handle
2871 a unary operator by reloading the operand. */
2872 substed_operand[i] = recog_data.operand[i]
2873 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2874 ind_levels, 0, insn,
2875 &address_reloaded[i]);
2876 else if (code == REG)
2877 {
2878 /* This is equivalent to calling find_reloads_toplev.
2879 The code is duplicated for speed.
2880 When we find a pseudo always equivalent to a constant,
2881 we replace it by the constant. We must be sure, however,
2882 that we don't try to replace it in the insn in which it
2883 is being set. */
2884 int regno = REGNO (recog_data.operand[i]);
2885 if (reg_equiv_constant (regno) != 0
2886 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2887 {
2888 /* Record the existing mode so that the check if constants are
2889 allowed will work when operand_mode isn't specified. */
2890
2891 if (operand_mode[i] == VOIDmode)
2892 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2893
2894 substed_operand[i] = recog_data.operand[i]
2895 = reg_equiv_constant (regno);
2896 }
2897 if (reg_equiv_memory_loc (regno) != 0
2898 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2899 /* We need not give a valid is_set_dest argument since the case
2900 of a constant equivalence was checked above. */
2901 substed_operand[i] = recog_data.operand[i]
2902 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2903 ind_levels, 0, insn,
2904 &address_reloaded[i]);
2905 }
2906 /* If the operand is still a register (we didn't replace it with an
2907 equivalent), get the preferred class to reload it into. */
2908 code = GET_CODE (recog_data.operand[i]);
2909 preferred_class[i]
2910 = ((code == REG && REGNO (recog_data.operand[i])
2911 >= FIRST_PSEUDO_REGISTER)
2912 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2913 : NO_REGS);
2914 pref_or_nothing[i]
2915 = (code == REG
2916 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2917 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2918 }
2919
2920 /* If this is simply a copy from operand 1 to operand 0, merge the
2921 preferred classes for the operands. */
2922 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2923 && recog_data.operand[1] == SET_SRC (set))
2924 {
2925 preferred_class[0] = preferred_class[1]
2926 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2927 pref_or_nothing[0] |= pref_or_nothing[1];
2928 pref_or_nothing[1] |= pref_or_nothing[0];
2929 }
2930
2931 /* Now see what we need for pseudo-regs that didn't get hard regs
2932 or got the wrong kind of hard reg. For this, we must consider
2933 all the operands together against the register constraints. */
2934
2935 best = MAX_RECOG_OPERANDS * 2 + 600;
2936
2937 swapped = 0;
2938 goal_alternative_swapped = 0;
2939 try_swapped:
2940
2941 /* The constraints are made of several alternatives.
2942 Each operand's constraint looks like foo,bar,... with commas
2943 separating the alternatives. The first alternatives for all
2944 operands go together, the second alternatives go together, etc.
2945
2946 First loop over alternatives. */
2947
2948 for (this_alternative_number = 0;
2949 this_alternative_number < n_alternatives;
2950 this_alternative_number++)
2951 {
2952 /* Loop over operands for one constraint alternative. */
2953 /* LOSERS counts those that don't fit this alternative
2954 and would require loading. */
2955 int losers = 0;
2956 /* BAD is set to 1 if it some operand can't fit this alternative
2957 even after reloading. */
2958 int bad = 0;
2959 /* REJECT is a count of how undesirable this alternative says it is
2960 if any reloading is required. If the alternative matches exactly
2961 then REJECT is ignored, but otherwise it gets this much
2962 counted against it in addition to the reloading needed. Each
2963 ? counts three times here since we want the disparaging caused by
2964 a bad register class to only count 1/3 as much. */
2965 int reject = 0;
2966
2967 if (!recog_data.alternative_enabled_p[this_alternative_number])
2968 {
2969 int i;
2970
2971 for (i = 0; i < recog_data.n_operands; i++)
2972 constraints[i] = skip_alternative (constraints[i]);
2973
2974 continue;
2975 }
2976
2977 this_earlyclobber = 0;
2978
2979 for (i = 0; i < noperands; i++)
2980 {
2981 const char *p = constraints[i];
2982 char *end;
2983 int len;
2984 int win = 0;
2985 int did_match = 0;
2986 /* 0 => this operand can be reloaded somehow for this alternative. */
2987 int badop = 1;
2988 /* 0 => this operand can be reloaded if the alternative allows regs. */
2989 int winreg = 0;
2990 int c;
2991 int m;
2992 rtx operand = recog_data.operand[i];
2993 int offset = 0;
2994 /* Nonzero means this is a MEM that must be reloaded into a reg
2995 regardless of what the constraint says. */
2996 int force_reload = 0;
2997 int offmemok = 0;
2998 /* Nonzero if a constant forced into memory would be OK for this
2999 operand. */
3000 int constmemok = 0;
3001 int earlyclobber = 0;
3002
3003 /* If the predicate accepts a unary operator, it means that
3004 we need to reload the operand, but do not do this for
3005 match_operator and friends. */
3006 if (UNARY_P (operand) && *p != 0)
3007 operand = XEXP (operand, 0);
3008
3009 /* If the operand is a SUBREG, extract
3010 the REG or MEM (or maybe even a constant) within.
3011 (Constants can occur as a result of reg_equiv_constant.) */
3012
3013 while (GET_CODE (operand) == SUBREG)
3014 {
3015 /* Offset only matters when operand is a REG and
3016 it is a hard reg. This is because it is passed
3017 to reg_fits_class_p if it is a REG and all pseudos
3018 return 0 from that function. */
3019 if (REG_P (SUBREG_REG (operand))
3020 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3021 {
3022 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3023 GET_MODE (SUBREG_REG (operand)),
3024 SUBREG_BYTE (operand),
3025 GET_MODE (operand)) < 0)
3026 force_reload = 1;
3027 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3028 GET_MODE (SUBREG_REG (operand)),
3029 SUBREG_BYTE (operand),
3030 GET_MODE (operand));
3031 }
3032 operand = SUBREG_REG (operand);
3033 /* Force reload if this is a constant or PLUS or if there may
3034 be a problem accessing OPERAND in the outer mode. */
3035 if (CONSTANT_P (operand)
3036 || GET_CODE (operand) == PLUS
3037 /* We must force a reload of paradoxical SUBREGs
3038 of a MEM because the alignment of the inner value
3039 may not be enough to do the outer reference. On
3040 big-endian machines, it may also reference outside
3041 the object.
3042
3043 On machines that extend byte operations and we have a
3044 SUBREG where both the inner and outer modes are no wider
3045 than a word and the inner mode is narrower, is integral,
3046 and gets extended when loaded from memory, combine.c has
3047 made assumptions about the behavior of the machine in such
3048 register access. If the data is, in fact, in memory we
3049 must always load using the size assumed to be in the
3050 register and let the insn do the different-sized
3051 accesses.
3052
3053 This is doubly true if WORD_REGISTER_OPERATIONS. In
3054 this case eliminate_regs has left non-paradoxical
3055 subregs for push_reload to see. Make sure it does
3056 by forcing the reload.
3057
3058 ??? When is it right at this stage to have a subreg
3059 of a mem that is _not_ to be handled specially? IMO
3060 those should have been reduced to just a mem. */
3061 || ((MEM_P (operand)
3062 || (REG_P (operand)
3063 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3064 #ifndef WORD_REGISTER_OPERATIONS
3065 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3066 < BIGGEST_ALIGNMENT)
3067 && (GET_MODE_SIZE (operand_mode[i])
3068 > GET_MODE_SIZE (GET_MODE (operand))))
3069 || BYTES_BIG_ENDIAN
3070 #ifdef LOAD_EXTEND_OP
3071 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3072 && (GET_MODE_SIZE (GET_MODE (operand))
3073 <= UNITS_PER_WORD)
3074 && (GET_MODE_SIZE (operand_mode[i])
3075 > GET_MODE_SIZE (GET_MODE (operand)))
3076 && INTEGRAL_MODE_P (GET_MODE (operand))
3077 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3078 #endif
3079 )
3080 #endif
3081 )
3082 )
3083 force_reload = 1;
3084 }
3085
3086 this_alternative[i] = NO_REGS;
3087 this_alternative_win[i] = 0;
3088 this_alternative_match_win[i] = 0;
3089 this_alternative_offmemok[i] = 0;
3090 this_alternative_earlyclobber[i] = 0;
3091 this_alternative_matches[i] = -1;
3092
3093 /* An empty constraint or empty alternative
3094 allows anything which matched the pattern. */
3095 if (*p == 0 || *p == ',')
3096 win = 1, badop = 0;
3097
3098 /* Scan this alternative's specs for this operand;
3099 set WIN if the operand fits any letter in this alternative.
3100 Otherwise, clear BADOP if this operand could
3101 fit some letter after reloads,
3102 or set WINREG if this operand could fit after reloads
3103 provided the constraint allows some registers. */
3104
3105 do
3106 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3107 {
3108 case '\0':
3109 len = 0;
3110 break;
3111 case ',':
3112 c = '\0';
3113 break;
3114
3115 case '=': case '+': case '*':
3116 break;
3117
3118 case '%':
3119 /* We only support one commutative marker, the first
3120 one. We already set commutative above. */
3121 break;
3122
3123 case '?':
3124 reject += 6;
3125 break;
3126
3127 case '!':
3128 reject = 600;
3129 break;
3130
3131 case '#':
3132 /* Ignore rest of this alternative as far as
3133 reloading is concerned. */
3134 do
3135 p++;
3136 while (*p && *p != ',');
3137 len = 0;
3138 break;
3139
3140 case '0': case '1': case '2': case '3': case '4':
3141 case '5': case '6': case '7': case '8': case '9':
3142 m = strtoul (p, &end, 10);
3143 p = end;
3144 len = 0;
3145
3146 this_alternative_matches[i] = m;
3147 /* We are supposed to match a previous operand.
3148 If we do, we win if that one did.
3149 If we do not, count both of the operands as losers.
3150 (This is too conservative, since most of the time
3151 only a single reload insn will be needed to make
3152 the two operands win. As a result, this alternative
3153 may be rejected when it is actually desirable.) */
3154 if ((swapped && (m != commutative || i != commutative + 1))
3155 /* If we are matching as if two operands were swapped,
3156 also pretend that operands_match had been computed
3157 with swapped.
3158 But if I is the second of those and C is the first,
3159 don't exchange them, because operands_match is valid
3160 only on one side of its diagonal. */
3161 ? (operands_match
3162 [(m == commutative || m == commutative + 1)
3163 ? 2 * commutative + 1 - m : m]
3164 [(i == commutative || i == commutative + 1)
3165 ? 2 * commutative + 1 - i : i])
3166 : operands_match[m][i])
3167 {
3168 /* If we are matching a non-offsettable address where an
3169 offsettable address was expected, then we must reject
3170 this combination, because we can't reload it. */
3171 if (this_alternative_offmemok[m]
3172 && MEM_P (recog_data.operand[m])
3173 && this_alternative[m] == NO_REGS
3174 && ! this_alternative_win[m])
3175 bad = 1;
3176
3177 did_match = this_alternative_win[m];
3178 }
3179 else
3180 {
3181 /* Operands don't match. */
3182 rtx value;
3183 int loc1, loc2;
3184 /* Retroactively mark the operand we had to match
3185 as a loser, if it wasn't already. */
3186 if (this_alternative_win[m])
3187 losers++;
3188 this_alternative_win[m] = 0;
3189 if (this_alternative[m] == NO_REGS)
3190 bad = 1;
3191 /* But count the pair only once in the total badness of
3192 this alternative, if the pair can be a dummy reload.
3193 The pointers in operand_loc are not swapped; swap
3194 them by hand if necessary. */
3195 if (swapped && i == commutative)
3196 loc1 = commutative + 1;
3197 else if (swapped && i == commutative + 1)
3198 loc1 = commutative;
3199 else
3200 loc1 = i;
3201 if (swapped && m == commutative)
3202 loc2 = commutative + 1;
3203 else if (swapped && m == commutative + 1)
3204 loc2 = commutative;
3205 else
3206 loc2 = m;
3207 value
3208 = find_dummy_reload (recog_data.operand[i],
3209 recog_data.operand[m],
3210 recog_data.operand_loc[loc1],
3211 recog_data.operand_loc[loc2],
3212 operand_mode[i], operand_mode[m],
3213 this_alternative[m], -1,
3214 this_alternative_earlyclobber[m]);
3215
3216 if (value != 0)
3217 losers--;
3218 }
3219 /* This can be fixed with reloads if the operand
3220 we are supposed to match can be fixed with reloads. */
3221 badop = 0;
3222 this_alternative[i] = this_alternative[m];
3223
3224 /* If we have to reload this operand and some previous
3225 operand also had to match the same thing as this
3226 operand, we don't know how to do that. So reject this
3227 alternative. */
3228 if (! did_match || force_reload)
3229 for (j = 0; j < i; j++)
3230 if (this_alternative_matches[j]
3231 == this_alternative_matches[i])
3232 badop = 1;
3233 break;
3234
3235 case 'p':
3236 /* All necessary reloads for an address_operand
3237 were handled in find_reloads_address. */
3238 this_alternative[i] = base_reg_class (VOIDmode, ADDRESS,
3239 SCRATCH);
3240 win = 1;
3241 badop = 0;
3242 break;
3243
3244 case TARGET_MEM_CONSTRAINT:
3245 if (force_reload)
3246 break;
3247 if (MEM_P (operand)
3248 || (REG_P (operand)
3249 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3250 && reg_renumber[REGNO (operand)] < 0))
3251 win = 1;
3252 if (CONST_POOL_OK_P (operand_mode[i], operand))
3253 badop = 0;
3254 constmemok = 1;
3255 break;
3256
3257 case '<':
3258 if (MEM_P (operand)
3259 && ! address_reloaded[i]
3260 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3261 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3262 win = 1;
3263 break;
3264
3265 case '>':
3266 if (MEM_P (operand)
3267 && ! address_reloaded[i]
3268 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3269 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3270 win = 1;
3271 break;
3272
3273 /* Memory operand whose address is not offsettable. */
3274 case 'V':
3275 if (force_reload)
3276 break;
3277 if (MEM_P (operand)
3278 && ! (ind_levels ? offsettable_memref_p (operand)
3279 : offsettable_nonstrict_memref_p (operand))
3280 /* Certain mem addresses will become offsettable
3281 after they themselves are reloaded. This is important;
3282 we don't want our own handling of unoffsettables
3283 to override the handling of reg_equiv_address. */
3284 && !(REG_P (XEXP (operand, 0))
3285 && (ind_levels == 0
3286 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3287 win = 1;
3288 break;
3289
3290 /* Memory operand whose address is offsettable. */
3291 case 'o':
3292 if (force_reload)
3293 break;
3294 if ((MEM_P (operand)
3295 /* If IND_LEVELS, find_reloads_address won't reload a
3296 pseudo that didn't get a hard reg, so we have to
3297 reject that case. */
3298 && ((ind_levels ? offsettable_memref_p (operand)
3299 : offsettable_nonstrict_memref_p (operand))
3300 /* A reloaded address is offsettable because it is now
3301 just a simple register indirect. */
3302 || address_reloaded[i] == 1))
3303 || (REG_P (operand)
3304 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3305 && reg_renumber[REGNO (operand)] < 0
3306 /* If reg_equiv_address is nonzero, we will be
3307 loading it into a register; hence it will be
3308 offsettable, but we cannot say that reg_equiv_mem
3309 is offsettable without checking. */
3310 && ((reg_equiv_mem (REGNO (operand)) != 0
3311 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3312 || (reg_equiv_address (REGNO (operand)) != 0))))
3313 win = 1;
3314 if (CONST_POOL_OK_P (operand_mode[i], operand)
3315 || MEM_P (operand))
3316 badop = 0;
3317 constmemok = 1;
3318 offmemok = 1;
3319 break;
3320
3321 case '&':
3322 /* Output operand that is stored before the need for the
3323 input operands (and their index registers) is over. */
3324 earlyclobber = 1, this_earlyclobber = 1;
3325 break;
3326
3327 case 'E':
3328 case 'F':
3329 if (GET_CODE (operand) == CONST_DOUBLE
3330 || (GET_CODE (operand) == CONST_VECTOR
3331 && (GET_MODE_CLASS (GET_MODE (operand))
3332 == MODE_VECTOR_FLOAT)))
3333 win = 1;
3334 break;
3335
3336 case 'G':
3337 case 'H':
3338 if (GET_CODE (operand) == CONST_DOUBLE
3339 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3340 win = 1;
3341 break;
3342
3343 case 's':
3344 if (CONST_INT_P (operand)
3345 || (GET_CODE (operand) == CONST_DOUBLE
3346 && GET_MODE (operand) == VOIDmode))
3347 break;
3348 case 'i':
3349 if (CONSTANT_P (operand)
3350 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3351 win = 1;
3352 break;
3353
3354 case 'n':
3355 if (CONST_INT_P (operand)
3356 || (GET_CODE (operand) == CONST_DOUBLE
3357 && GET_MODE (operand) == VOIDmode))
3358 win = 1;
3359 break;
3360
3361 case 'I':
3362 case 'J':
3363 case 'K':
3364 case 'L':
3365 case 'M':
3366 case 'N':
3367 case 'O':
3368 case 'P':
3369 if (CONST_INT_P (operand)
3370 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3371 win = 1;
3372 break;
3373
3374 case 'X':
3375 force_reload = 0;
3376 win = 1;
3377 break;
3378
3379 case 'g':
3380 if (! force_reload
3381 /* A PLUS is never a valid operand, but reload can make
3382 it from a register when eliminating registers. */
3383 && GET_CODE (operand) != PLUS
3384 /* A SCRATCH is not a valid operand. */
3385 && GET_CODE (operand) != SCRATCH
3386 && (! CONSTANT_P (operand)
3387 || ! flag_pic
3388 || LEGITIMATE_PIC_OPERAND_P (operand))
3389 && (GENERAL_REGS == ALL_REGS
3390 || !REG_P (operand)
3391 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3392 && reg_renumber[REGNO (operand)] < 0)))
3393 win = 1;
3394 /* Drop through into 'r' case. */
3395
3396 case 'r':
3397 this_alternative[i]
3398 = reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3399 goto reg;
3400
3401 default:
3402 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3403 {
3404 #ifdef EXTRA_CONSTRAINT_STR
3405 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3406 {
3407 if (force_reload)
3408 break;
3409 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3410 win = 1;
3411 /* If the address was already reloaded,
3412 we win as well. */
3413 else if (MEM_P (operand)
3414 && address_reloaded[i] == 1)
3415 win = 1;
3416 /* Likewise if the address will be reloaded because
3417 reg_equiv_address is nonzero. For reg_equiv_mem
3418 we have to check. */
3419 else if (REG_P (operand)
3420 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3421 && reg_renumber[REGNO (operand)] < 0
3422 && ((reg_equiv_mem (REGNO (operand)) != 0
3423 && EXTRA_CONSTRAINT_STR (reg_equiv_mem (REGNO (operand)), c, p))
3424 || (reg_equiv_address (REGNO (operand)) != 0)))
3425 win = 1;
3426
3427 /* If we didn't already win, we can reload
3428 constants via force_const_mem, and other
3429 MEMs by reloading the address like for 'o'. */
3430 if (CONST_POOL_OK_P (operand_mode[i], operand)
3431 || MEM_P (operand))
3432 badop = 0;
3433 constmemok = 1;
3434 offmemok = 1;
3435 break;
3436 }
3437 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3438 {
3439 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3440 win = 1;
3441
3442 /* If we didn't already win, we can reload
3443 the address into a base register. */
3444 this_alternative[i] = base_reg_class (VOIDmode,
3445 ADDRESS,
3446 SCRATCH);
3447 badop = 0;
3448 break;
3449 }
3450
3451 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3452 win = 1;
3453 #endif
3454 break;
3455 }
3456
3457 this_alternative[i]
3458 = (reg_class_subunion
3459 [this_alternative[i]]
3460 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3461 reg:
3462 if (GET_MODE (operand) == BLKmode)
3463 break;
3464 winreg = 1;
3465 if (REG_P (operand)
3466 && reg_fits_class_p (operand, this_alternative[i],
3467 offset, GET_MODE (recog_data.operand[i])))
3468 win = 1;
3469 break;
3470 }
3471 while ((p += len), c);
3472
3473 constraints[i] = p;
3474
3475 /* If this operand could be handled with a reg,
3476 and some reg is allowed, then this operand can be handled. */
3477 if (winreg && this_alternative[i] != NO_REGS
3478 && (win || !class_only_fixed_regs[this_alternative[i]]))
3479 badop = 0;
3480
3481 /* Record which operands fit this alternative. */
3482 this_alternative_earlyclobber[i] = earlyclobber;
3483 if (win && ! force_reload)
3484 this_alternative_win[i] = 1;
3485 else if (did_match && ! force_reload)
3486 this_alternative_match_win[i] = 1;
3487 else
3488 {
3489 int const_to_mem = 0;
3490
3491 this_alternative_offmemok[i] = offmemok;
3492 losers++;
3493 if (badop)
3494 bad = 1;
3495 /* Alternative loses if it has no regs for a reg operand. */
3496 if (REG_P (operand)
3497 && this_alternative[i] == NO_REGS
3498 && this_alternative_matches[i] < 0)
3499 bad = 1;
3500
3501 /* If this is a constant that is reloaded into the desired
3502 class by copying it to memory first, count that as another
3503 reload. This is consistent with other code and is
3504 required to avoid choosing another alternative when
3505 the constant is moved into memory by this function on
3506 an early reload pass. Note that the test here is
3507 precisely the same as in the code below that calls
3508 force_const_mem. */
3509 if (CONST_POOL_OK_P (operand_mode[i], operand)
3510 && ((targetm.preferred_reload_class (operand,
3511 this_alternative[i])
3512 == NO_REGS)
3513 || no_input_reloads))
3514 {
3515 const_to_mem = 1;
3516 if (this_alternative[i] != NO_REGS)
3517 losers++;
3518 }
3519
3520 /* Alternative loses if it requires a type of reload not
3521 permitted for this insn. We can always reload SCRATCH
3522 and objects with a REG_UNUSED note. */
3523 if (GET_CODE (operand) != SCRATCH
3524 && modified[i] != RELOAD_READ && no_output_reloads
3525 && ! find_reg_note (insn, REG_UNUSED, operand))
3526 bad = 1;
3527 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3528 && ! const_to_mem)
3529 bad = 1;
3530
3531 /* If we can't reload this value at all, reject this
3532 alternative. Note that we could also lose due to
3533 LIMIT_RELOAD_CLASS, but we don't check that
3534 here. */
3535
3536 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3537 {
3538 if (targetm.preferred_reload_class (operand, this_alternative[i])
3539 == NO_REGS)
3540 reject = 600;
3541
3542 if (operand_type[i] == RELOAD_FOR_OUTPUT
3543 && (targetm.preferred_output_reload_class (operand,
3544 this_alternative[i])
3545 == NO_REGS))
3546 reject = 600;
3547 }
3548
3549 /* We prefer to reload pseudos over reloading other things,
3550 since such reloads may be able to be eliminated later.
3551 If we are reloading a SCRATCH, we won't be generating any
3552 insns, just using a register, so it is also preferred.
3553 So bump REJECT in other cases. Don't do this in the
3554 case where we are forcing a constant into memory and
3555 it will then win since we don't want to have a different
3556 alternative match then. */
3557 if (! (REG_P (operand)
3558 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3559 && GET_CODE (operand) != SCRATCH
3560 && ! (const_to_mem && constmemok))
3561 reject += 2;
3562
3563 /* Input reloads can be inherited more often than output
3564 reloads can be removed, so penalize output reloads. */
3565 if (operand_type[i] != RELOAD_FOR_INPUT
3566 && GET_CODE (operand) != SCRATCH)
3567 reject++;
3568 }
3569
3570 /* If this operand is a pseudo register that didn't get a hard
3571 reg and this alternative accepts some register, see if the
3572 class that we want is a subset of the preferred class for this
3573 register. If not, but it intersects that class, use the
3574 preferred class instead. If it does not intersect the preferred
3575 class, show that usage of this alternative should be discouraged;
3576 it will be discouraged more still if the register is `preferred
3577 or nothing'. We do this because it increases the chance of
3578 reusing our spill register in a later insn and avoiding a pair
3579 of memory stores and loads.
3580
3581 Don't bother with this if this alternative will accept this
3582 operand.
3583
3584 Don't do this for a multiword operand, since it is only a
3585 small win and has the risk of requiring more spill registers,
3586 which could cause a large loss.
3587
3588 Don't do this if the preferred class has only one register
3589 because we might otherwise exhaust the class. */
3590
3591 if (! win && ! did_match
3592 && this_alternative[i] != NO_REGS
3593 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3594 && reg_class_size [(int) preferred_class[i]] > 0
3595 && ! small_register_class_p (preferred_class[i]))
3596 {
3597 if (! reg_class_subset_p (this_alternative[i],
3598 preferred_class[i]))
3599 {
3600 /* Since we don't have a way of forming the intersection,
3601 we just do something special if the preferred class
3602 is a subset of the class we have; that's the most
3603 common case anyway. */
3604 if (reg_class_subset_p (preferred_class[i],
3605 this_alternative[i]))
3606 this_alternative[i] = preferred_class[i];
3607 else
3608 reject += (2 + 2 * pref_or_nothing[i]);
3609 }
3610 }
3611 }
3612
3613 /* Now see if any output operands that are marked "earlyclobber"
3614 in this alternative conflict with any input operands
3615 or any memory addresses. */
3616
3617 for (i = 0; i < noperands; i++)
3618 if (this_alternative_earlyclobber[i]
3619 && (this_alternative_win[i] || this_alternative_match_win[i]))
3620 {
3621 struct decomposition early_data;
3622
3623 early_data = decompose (recog_data.operand[i]);
3624
3625 gcc_assert (modified[i] != RELOAD_READ);
3626
3627 if (this_alternative[i] == NO_REGS)
3628 {
3629 this_alternative_earlyclobber[i] = 0;
3630 gcc_assert (this_insn_is_asm);
3631 error_for_asm (this_insn,
3632 "%<&%> constraint used with no register class");
3633 }
3634
3635 for (j = 0; j < noperands; j++)
3636 /* Is this an input operand or a memory ref? */
3637 if ((MEM_P (recog_data.operand[j])
3638 || modified[j] != RELOAD_WRITE)
3639 && j != i
3640 /* Ignore things like match_operator operands. */
3641 && !recog_data.is_operator[j]
3642 /* Don't count an input operand that is constrained to match
3643 the early clobber operand. */
3644 && ! (this_alternative_matches[j] == i
3645 && rtx_equal_p (recog_data.operand[i],
3646 recog_data.operand[j]))
3647 /* Is it altered by storing the earlyclobber operand? */
3648 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3649 early_data))
3650 {
3651 /* If the output is in a non-empty few-regs class,
3652 it's costly to reload it, so reload the input instead. */
3653 if (small_register_class_p (this_alternative[i])
3654 && (REG_P (recog_data.operand[j])
3655 || GET_CODE (recog_data.operand[j]) == SUBREG))
3656 {
3657 losers++;
3658 this_alternative_win[j] = 0;
3659 this_alternative_match_win[j] = 0;
3660 }
3661 else
3662 break;
3663 }
3664 /* If an earlyclobber operand conflicts with something,
3665 it must be reloaded, so request this and count the cost. */
3666 if (j != noperands)
3667 {
3668 losers++;
3669 this_alternative_win[i] = 0;
3670 this_alternative_match_win[j] = 0;
3671 for (j = 0; j < noperands; j++)
3672 if (this_alternative_matches[j] == i
3673 && this_alternative_match_win[j])
3674 {
3675 this_alternative_win[j] = 0;
3676 this_alternative_match_win[j] = 0;
3677 losers++;
3678 }
3679 }
3680 }
3681
3682 /* If one alternative accepts all the operands, no reload required,
3683 choose that alternative; don't consider the remaining ones. */
3684 if (losers == 0)
3685 {
3686 /* Unswap these so that they are never swapped at `finish'. */
3687 if (commutative >= 0)
3688 {
3689 recog_data.operand[commutative] = substed_operand[commutative];
3690 recog_data.operand[commutative + 1]
3691 = substed_operand[commutative + 1];
3692 }
3693 for (i = 0; i < noperands; i++)
3694 {
3695 goal_alternative_win[i] = this_alternative_win[i];
3696 goal_alternative_match_win[i] = this_alternative_match_win[i];
3697 goal_alternative[i] = this_alternative[i];
3698 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3699 goal_alternative_matches[i] = this_alternative_matches[i];
3700 goal_alternative_earlyclobber[i]
3701 = this_alternative_earlyclobber[i];
3702 }
3703 goal_alternative_number = this_alternative_number;
3704 goal_alternative_swapped = swapped;
3705 goal_earlyclobber = this_earlyclobber;
3706 goto finish;
3707 }
3708
3709 /* REJECT, set by the ! and ? constraint characters and when a register
3710 would be reloaded into a non-preferred class, discourages the use of
3711 this alternative for a reload goal. REJECT is incremented by six
3712 for each ? and two for each non-preferred class. */
3713 losers = losers * 6 + reject;
3714
3715 /* If this alternative can be made to work by reloading,
3716 and it needs less reloading than the others checked so far,
3717 record it as the chosen goal for reloading. */
3718 if (! bad)
3719 {
3720 if (best > losers)
3721 {
3722 for (i = 0; i < noperands; i++)
3723 {
3724 goal_alternative[i] = this_alternative[i];
3725 goal_alternative_win[i] = this_alternative_win[i];
3726 goal_alternative_match_win[i]
3727 = this_alternative_match_win[i];
3728 goal_alternative_offmemok[i]
3729 = this_alternative_offmemok[i];
3730 goal_alternative_matches[i] = this_alternative_matches[i];
3731 goal_alternative_earlyclobber[i]
3732 = this_alternative_earlyclobber[i];
3733 }
3734 goal_alternative_swapped = swapped;
3735 best = losers;
3736 goal_alternative_number = this_alternative_number;
3737 goal_earlyclobber = this_earlyclobber;
3738 }
3739 }
3740 }
3741
3742 /* If insn is commutative (it's safe to exchange a certain pair of operands)
3743 then we need to try each alternative twice,
3744 the second time matching those two operands
3745 as if we had exchanged them.
3746 To do this, really exchange them in operands.
3747
3748 If we have just tried the alternatives the second time,
3749 return operands to normal and drop through. */
3750
3751 if (commutative >= 0)
3752 {
3753 swapped = !swapped;
3754 if (swapped)
3755 {
3756 enum reg_class tclass;
3757 int t;
3758
3759 recog_data.operand[commutative] = substed_operand[commutative + 1];
3760 recog_data.operand[commutative + 1] = substed_operand[commutative];
3761 /* Swap the duplicates too. */
3762 for (i = 0; i < recog_data.n_dups; i++)
3763 if (recog_data.dup_num[i] == commutative
3764 || recog_data.dup_num[i] == commutative + 1)
3765 *recog_data.dup_loc[i]
3766 = recog_data.operand[(int) recog_data.dup_num[i]];
3767
3768 tclass = preferred_class[commutative];
3769 preferred_class[commutative] = preferred_class[commutative + 1];
3770 preferred_class[commutative + 1] = tclass;
3771
3772 t = pref_or_nothing[commutative];
3773 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3774 pref_or_nothing[commutative + 1] = t;
3775
3776 t = address_reloaded[commutative];
3777 address_reloaded[commutative] = address_reloaded[commutative + 1];
3778 address_reloaded[commutative + 1] = t;
3779
3780 memcpy (constraints, recog_data.constraints,
3781 noperands * sizeof (const char *));
3782 goto try_swapped;
3783 }
3784 else
3785 {
3786 recog_data.operand[commutative] = substed_operand[commutative];
3787 recog_data.operand[commutative + 1]
3788 = substed_operand[commutative + 1];
3789 /* Unswap the duplicates too. */
3790 for (i = 0; i < recog_data.n_dups; i++)
3791 if (recog_data.dup_num[i] == commutative
3792 || recog_data.dup_num[i] == commutative + 1)
3793 *recog_data.dup_loc[i]
3794 = recog_data.operand[(int) recog_data.dup_num[i]];
3795 }
3796 }
3797
3798 /* The operands don't meet the constraints.
3799 goal_alternative describes the alternative
3800 that we could reach by reloading the fewest operands.
3801 Reload so as to fit it. */
3802
3803 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3804 {
3805 /* No alternative works with reloads?? */
3806 if (insn_code_number >= 0)
3807 fatal_insn ("unable to generate reloads for:", insn);
3808 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3809 /* Avoid further trouble with this insn. */
3810 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3811 n_reloads = 0;
3812 return 0;
3813 }
3814
3815 /* Jump to `finish' from above if all operands are valid already.
3816 In that case, goal_alternative_win is all 1. */
3817 finish:
3818
3819 /* Right now, for any pair of operands I and J that are required to match,
3820 with I < J,
3821 goal_alternative_matches[J] is I.
3822 Set up goal_alternative_matched as the inverse function:
3823 goal_alternative_matched[I] = J. */
3824
3825 for (i = 0; i < noperands; i++)
3826 goal_alternative_matched[i] = -1;
3827
3828 for (i = 0; i < noperands; i++)
3829 if (! goal_alternative_win[i]
3830 && goal_alternative_matches[i] >= 0)
3831 goal_alternative_matched[goal_alternative_matches[i]] = i;
3832
3833 for (i = 0; i < noperands; i++)
3834 goal_alternative_win[i] |= goal_alternative_match_win[i];
3835
3836 /* If the best alternative is with operands 1 and 2 swapped,
3837 consider them swapped before reporting the reloads. Update the
3838 operand numbers of any reloads already pushed. */
3839
3840 if (goal_alternative_swapped)
3841 {
3842 rtx tem;
3843
3844 tem = substed_operand[commutative];
3845 substed_operand[commutative] = substed_operand[commutative + 1];
3846 substed_operand[commutative + 1] = tem;
3847 tem = recog_data.operand[commutative];
3848 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3849 recog_data.operand[commutative + 1] = tem;
3850 tem = *recog_data.operand_loc[commutative];
3851 *recog_data.operand_loc[commutative]
3852 = *recog_data.operand_loc[commutative + 1];
3853 *recog_data.operand_loc[commutative + 1] = tem;
3854
3855 for (i = 0; i < n_reloads; i++)
3856 {
3857 if (rld[i].opnum == commutative)
3858 rld[i].opnum = commutative + 1;
3859 else if (rld[i].opnum == commutative + 1)
3860 rld[i].opnum = commutative;
3861 }
3862 }
3863
3864 for (i = 0; i < noperands; i++)
3865 {
3866 operand_reloadnum[i] = -1;
3867
3868 /* If this is an earlyclobber operand, we need to widen the scope.
3869 The reload must remain valid from the start of the insn being
3870 reloaded until after the operand is stored into its destination.
3871 We approximate this with RELOAD_OTHER even though we know that we
3872 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3873
3874 One special case that is worth checking is when we have an
3875 output that is earlyclobber but isn't used past the insn (typically
3876 a SCRATCH). In this case, we only need have the reload live
3877 through the insn itself, but not for any of our input or output
3878 reloads.
3879 But we must not accidentally narrow the scope of an existing
3880 RELOAD_OTHER reload - leave these alone.
3881
3882 In any case, anything needed to address this operand can remain
3883 however they were previously categorized. */
3884
3885 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3886 operand_type[i]
3887 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3888 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3889 }
3890
3891 /* Any constants that aren't allowed and can't be reloaded
3892 into registers are here changed into memory references. */
3893 for (i = 0; i < noperands; i++)
3894 if (! goal_alternative_win[i])
3895 {
3896 rtx op = recog_data.operand[i];
3897 rtx subreg = NULL_RTX;
3898 rtx plus = NULL_RTX;
3899 enum machine_mode mode = operand_mode[i];
3900
3901 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3902 push_reload so we have to let them pass here. */
3903 if (GET_CODE (op) == SUBREG)
3904 {
3905 subreg = op;
3906 op = SUBREG_REG (op);
3907 mode = GET_MODE (op);
3908 }
3909
3910 if (GET_CODE (op) == PLUS)
3911 {
3912 plus = op;
3913 op = XEXP (op, 1);
3914 }
3915
3916 if (CONST_POOL_OK_P (mode, op)
3917 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3918 == NO_REGS)
3919 || no_input_reloads))
3920 {
3921 int this_address_reloaded;
3922 rtx tem = force_const_mem (mode, op);
3923
3924 /* If we stripped a SUBREG or a PLUS above add it back. */
3925 if (plus != NULL_RTX)
3926 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3927
3928 if (subreg != NULL_RTX)
3929 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3930
3931 this_address_reloaded = 0;
3932 substed_operand[i] = recog_data.operand[i]
3933 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3934 0, insn, &this_address_reloaded);
3935
3936 /* If the alternative accepts constant pool refs directly
3937 there will be no reload needed at all. */
3938 if (plus == NULL_RTX
3939 && subreg == NULL_RTX
3940 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3941 ? substed_operand[i]
3942 : NULL,
3943 recog_data.constraints[i],
3944 goal_alternative_number))
3945 goal_alternative_win[i] = 1;
3946 }
3947 }
3948
3949 /* Record the values of the earlyclobber operands for the caller. */
3950 if (goal_earlyclobber)
3951 for (i = 0; i < noperands; i++)
3952 if (goal_alternative_earlyclobber[i])
3953 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3954
3955 /* Now record reloads for all the operands that need them. */
3956 for (i = 0; i < noperands; i++)
3957 if (! goal_alternative_win[i])
3958 {
3959 /* Operands that match previous ones have already been handled. */
3960 if (goal_alternative_matches[i] >= 0)
3961 ;
3962 /* Handle an operand with a nonoffsettable address
3963 appearing where an offsettable address will do
3964 by reloading the address into a base register.
3965
3966 ??? We can also do this when the operand is a register and
3967 reg_equiv_mem is not offsettable, but this is a bit tricky,
3968 so we don't bother with it. It may not be worth doing. */
3969 else if (goal_alternative_matched[i] == -1
3970 && goal_alternative_offmemok[i]
3971 && MEM_P (recog_data.operand[i]))
3972 {
3973 /* If the address to be reloaded is a VOIDmode constant,
3974 use the default address mode as mode of the reload register,
3975 as would have been done by find_reloads_address. */
3976 enum machine_mode address_mode;
3977 address_mode = GET_MODE (XEXP (recog_data.operand[i], 0));
3978 if (address_mode == VOIDmode)
3979 {
3980 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3981 address_mode = targetm.addr_space.address_mode (as);
3982 }
3983
3984 operand_reloadnum[i]
3985 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3986 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3987 base_reg_class (VOIDmode, MEM, SCRATCH),
3988 address_mode,
3989 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
3990 rld[operand_reloadnum[i]].inc
3991 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3992
3993 /* If this operand is an output, we will have made any
3994 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3995 now we are treating part of the operand as an input, so
3996 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
3997
3998 if (modified[i] == RELOAD_WRITE)
3999 {
4000 for (j = 0; j < n_reloads; j++)
4001 {
4002 if (rld[j].opnum == i)
4003 {
4004 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4005 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4006 else if (rld[j].when_needed
4007 == RELOAD_FOR_OUTADDR_ADDRESS)
4008 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4009 }
4010 }
4011 }
4012 }
4013 else if (goal_alternative_matched[i] == -1)
4014 {
4015 operand_reloadnum[i]
4016 = push_reload ((modified[i] != RELOAD_WRITE
4017 ? recog_data.operand[i] : 0),
4018 (modified[i] != RELOAD_READ
4019 ? recog_data.operand[i] : 0),
4020 (modified[i] != RELOAD_WRITE
4021 ? recog_data.operand_loc[i] : 0),
4022 (modified[i] != RELOAD_READ
4023 ? recog_data.operand_loc[i] : 0),
4024 (enum reg_class) goal_alternative[i],
4025 (modified[i] == RELOAD_WRITE
4026 ? VOIDmode : operand_mode[i]),
4027 (modified[i] == RELOAD_READ
4028 ? VOIDmode : operand_mode[i]),
4029 (insn_code_number < 0 ? 0
4030 : insn_data[insn_code_number].operand[i].strict_low),
4031 0, i, operand_type[i]);
4032 }
4033 /* In a matching pair of operands, one must be input only
4034 and the other must be output only.
4035 Pass the input operand as IN and the other as OUT. */
4036 else if (modified[i] == RELOAD_READ
4037 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4038 {
4039 operand_reloadnum[i]
4040 = push_reload (recog_data.operand[i],
4041 recog_data.operand[goal_alternative_matched[i]],
4042 recog_data.operand_loc[i],
4043 recog_data.operand_loc[goal_alternative_matched[i]],
4044 (enum reg_class) goal_alternative[i],
4045 operand_mode[i],
4046 operand_mode[goal_alternative_matched[i]],
4047 0, 0, i, RELOAD_OTHER);
4048 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4049 }
4050 else if (modified[i] == RELOAD_WRITE
4051 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4052 {
4053 operand_reloadnum[goal_alternative_matched[i]]
4054 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4055 recog_data.operand[i],
4056 recog_data.operand_loc[goal_alternative_matched[i]],
4057 recog_data.operand_loc[i],
4058 (enum reg_class) goal_alternative[i],
4059 operand_mode[goal_alternative_matched[i]],
4060 operand_mode[i],
4061 0, 0, i, RELOAD_OTHER);
4062 operand_reloadnum[i] = output_reloadnum;
4063 }
4064 else
4065 {
4066 gcc_assert (insn_code_number < 0);
4067 error_for_asm (insn, "inconsistent operand constraints "
4068 "in an %<asm%>");
4069 /* Avoid further trouble with this insn. */
4070 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4071 n_reloads = 0;
4072 return 0;
4073 }
4074 }
4075 else if (goal_alternative_matched[i] < 0
4076 && goal_alternative_matches[i] < 0
4077 && address_operand_reloaded[i] != 1
4078 && optimize)
4079 {
4080 /* For each non-matching operand that's a MEM or a pseudo-register
4081 that didn't get a hard register, make an optional reload.
4082 This may get done even if the insn needs no reloads otherwise. */
4083
4084 rtx operand = recog_data.operand[i];
4085
4086 while (GET_CODE (operand) == SUBREG)
4087 operand = SUBREG_REG (operand);
4088 if ((MEM_P (operand)
4089 || (REG_P (operand)
4090 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4091 /* If this is only for an output, the optional reload would not
4092 actually cause us to use a register now, just note that
4093 something is stored here. */
4094 && (goal_alternative[i] != NO_REGS
4095 || modified[i] == RELOAD_WRITE)
4096 && ! no_input_reloads
4097 /* An optional output reload might allow to delete INSN later.
4098 We mustn't make in-out reloads on insns that are not permitted
4099 output reloads.
4100 If this is an asm, we can't delete it; we must not even call
4101 push_reload for an optional output reload in this case,
4102 because we can't be sure that the constraint allows a register,
4103 and push_reload verifies the constraints for asms. */
4104 && (modified[i] == RELOAD_READ
4105 || (! no_output_reloads && ! this_insn_is_asm)))
4106 operand_reloadnum[i]
4107 = push_reload ((modified[i] != RELOAD_WRITE
4108 ? recog_data.operand[i] : 0),
4109 (modified[i] != RELOAD_READ
4110 ? recog_data.operand[i] : 0),
4111 (modified[i] != RELOAD_WRITE
4112 ? recog_data.operand_loc[i] : 0),
4113 (modified[i] != RELOAD_READ
4114 ? recog_data.operand_loc[i] : 0),
4115 (enum reg_class) goal_alternative[i],
4116 (modified[i] == RELOAD_WRITE
4117 ? VOIDmode : operand_mode[i]),
4118 (modified[i] == RELOAD_READ
4119 ? VOIDmode : operand_mode[i]),
4120 (insn_code_number < 0 ? 0
4121 : insn_data[insn_code_number].operand[i].strict_low),
4122 1, i, operand_type[i]);
4123 /* If a memory reference remains (either as a MEM or a pseudo that
4124 did not get a hard register), yet we can't make an optional
4125 reload, check if this is actually a pseudo register reference;
4126 we then need to emit a USE and/or a CLOBBER so that reload
4127 inheritance will do the right thing. */
4128 else if (replace
4129 && (MEM_P (operand)
4130 || (REG_P (operand)
4131 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4132 && reg_renumber [REGNO (operand)] < 0)))
4133 {
4134 operand = *recog_data.operand_loc[i];
4135
4136 while (GET_CODE (operand) == SUBREG)
4137 operand = SUBREG_REG (operand);
4138 if (REG_P (operand))
4139 {
4140 if (modified[i] != RELOAD_WRITE)
4141 /* We mark the USE with QImode so that we recognize
4142 it as one that can be safely deleted at the end
4143 of reload. */
4144 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4145 insn), QImode);
4146 if (modified[i] != RELOAD_READ)
4147 emit_insn_after (gen_clobber (operand), insn);
4148 }
4149 }
4150 }
4151 else if (goal_alternative_matches[i] >= 0
4152 && goal_alternative_win[goal_alternative_matches[i]]
4153 && modified[i] == RELOAD_READ
4154 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4155 && ! no_input_reloads && ! no_output_reloads
4156 && optimize)
4157 {
4158 /* Similarly, make an optional reload for a pair of matching
4159 objects that are in MEM or a pseudo that didn't get a hard reg. */
4160
4161 rtx operand = recog_data.operand[i];
4162
4163 while (GET_CODE (operand) == SUBREG)
4164 operand = SUBREG_REG (operand);
4165 if ((MEM_P (operand)
4166 || (REG_P (operand)
4167 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4168 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4169 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4170 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4171 recog_data.operand[i],
4172 recog_data.operand_loc[goal_alternative_matches[i]],
4173 recog_data.operand_loc[i],
4174 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4175 operand_mode[goal_alternative_matches[i]],
4176 operand_mode[i],
4177 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4178 }
4179
4180 /* Perform whatever substitutions on the operands we are supposed
4181 to make due to commutativity or replacement of registers
4182 with equivalent constants or memory slots. */
4183
4184 for (i = 0; i < noperands; i++)
4185 {
4186 /* We only do this on the last pass through reload, because it is
4187 possible for some data (like reg_equiv_address) to be changed during
4188 later passes. Moreover, we lose the opportunity to get a useful
4189 reload_{in,out}_reg when we do these replacements. */
4190
4191 if (replace)
4192 {
4193 rtx substitution = substed_operand[i];
4194
4195 *recog_data.operand_loc[i] = substitution;
4196
4197 /* If we're replacing an operand with a LABEL_REF, we need to
4198 make sure that there's a REG_LABEL_OPERAND note attached to
4199 this instruction. */
4200 if (GET_CODE (substitution) == LABEL_REF
4201 && !find_reg_note (insn, REG_LABEL_OPERAND,
4202 XEXP (substitution, 0))
4203 /* For a JUMP_P, if it was a branch target it must have
4204 already been recorded as such. */
4205 && (!JUMP_P (insn)
4206 || !label_is_jump_target_p (XEXP (substitution, 0),
4207 insn)))
4208 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4209 }
4210 else
4211 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4212 }
4213
4214 /* If this insn pattern contains any MATCH_DUP's, make sure that
4215 they will be substituted if the operands they match are substituted.
4216 Also do now any substitutions we already did on the operands.
4217
4218 Don't do this if we aren't making replacements because we might be
4219 propagating things allocated by frame pointer elimination into places
4220 it doesn't expect. */
4221
4222 if (insn_code_number >= 0 && replace)
4223 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4224 {
4225 int opno = recog_data.dup_num[i];
4226 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4227 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4228 }
4229
4230 #if 0
4231 /* This loses because reloading of prior insns can invalidate the equivalence
4232 (or at least find_equiv_reg isn't smart enough to find it any more),
4233 causing this insn to need more reload regs than it needed before.
4234 It may be too late to make the reload regs available.
4235 Now this optimization is done safely in choose_reload_regs. */
4236
4237 /* For each reload of a reg into some other class of reg,
4238 search for an existing equivalent reg (same value now) in the right class.
4239 We can use it as long as we don't need to change its contents. */
4240 for (i = 0; i < n_reloads; i++)
4241 if (rld[i].reg_rtx == 0
4242 && rld[i].in != 0
4243 && REG_P (rld[i].in)
4244 && rld[i].out == 0)
4245 {
4246 rld[i].reg_rtx
4247 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4248 static_reload_reg_p, 0, rld[i].inmode);
4249 /* Prevent generation of insn to load the value
4250 because the one we found already has the value. */
4251 if (rld[i].reg_rtx)
4252 rld[i].in = rld[i].reg_rtx;
4253 }
4254 #endif
4255
4256 /* If we detected error and replaced asm instruction by USE, forget about the
4257 reloads. */
4258 if (GET_CODE (PATTERN (insn)) == USE
4259 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4260 n_reloads = 0;
4261
4262 /* Perhaps an output reload can be combined with another
4263 to reduce needs by one. */
4264 if (!goal_earlyclobber)
4265 combine_reloads ();
4266
4267 /* If we have a pair of reloads for parts of an address, they are reloading
4268 the same object, the operands themselves were not reloaded, and they
4269 are for two operands that are supposed to match, merge the reloads and
4270 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4271
4272 for (i = 0; i < n_reloads; i++)
4273 {
4274 int k;
4275
4276 for (j = i + 1; j < n_reloads; j++)
4277 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4278 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4279 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4280 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4281 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4282 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4283 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4284 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4285 && rtx_equal_p (rld[i].in, rld[j].in)
4286 && (operand_reloadnum[rld[i].opnum] < 0
4287 || rld[operand_reloadnum[rld[i].opnum]].optional)
4288 && (operand_reloadnum[rld[j].opnum] < 0
4289 || rld[operand_reloadnum[rld[j].opnum]].optional)
4290 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4291 || (goal_alternative_matches[rld[j].opnum]
4292 == rld[i].opnum)))
4293 {
4294 for (k = 0; k < n_replacements; k++)
4295 if (replacements[k].what == j)
4296 replacements[k].what = i;
4297
4298 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4299 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4300 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4301 else
4302 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4303 rld[j].in = 0;
4304 }
4305 }
4306
4307 /* Scan all the reloads and update their type.
4308 If a reload is for the address of an operand and we didn't reload
4309 that operand, change the type. Similarly, change the operand number
4310 of a reload when two operands match. If a reload is optional, treat it
4311 as though the operand isn't reloaded.
4312
4313 ??? This latter case is somewhat odd because if we do the optional
4314 reload, it means the object is hanging around. Thus we need only
4315 do the address reload if the optional reload was NOT done.
4316
4317 Change secondary reloads to be the address type of their operand, not
4318 the normal type.
4319
4320 If an operand's reload is now RELOAD_OTHER, change any
4321 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4322 RELOAD_FOR_OTHER_ADDRESS. */
4323
4324 for (i = 0; i < n_reloads; i++)
4325 {
4326 if (rld[i].secondary_p
4327 && rld[i].when_needed == operand_type[rld[i].opnum])
4328 rld[i].when_needed = address_type[rld[i].opnum];
4329
4330 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4331 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4332 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4333 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4334 && (operand_reloadnum[rld[i].opnum] < 0
4335 || rld[operand_reloadnum[rld[i].opnum]].optional))
4336 {
4337 /* If we have a secondary reload to go along with this reload,
4338 change its type to RELOAD_FOR_OPADDR_ADDR. */
4339
4340 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4341 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4342 && rld[i].secondary_in_reload != -1)
4343 {
4344 int secondary_in_reload = rld[i].secondary_in_reload;
4345
4346 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4347
4348 /* If there's a tertiary reload we have to change it also. */
4349 if (secondary_in_reload > 0
4350 && rld[secondary_in_reload].secondary_in_reload != -1)
4351 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4352 = RELOAD_FOR_OPADDR_ADDR;
4353 }
4354
4355 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4356 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4357 && rld[i].secondary_out_reload != -1)
4358 {
4359 int secondary_out_reload = rld[i].secondary_out_reload;
4360
4361 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4362
4363 /* If there's a tertiary reload we have to change it also. */
4364 if (secondary_out_reload
4365 && rld[secondary_out_reload].secondary_out_reload != -1)
4366 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4367 = RELOAD_FOR_OPADDR_ADDR;
4368 }
4369
4370 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4371 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4372 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4373 else
4374 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4375 }
4376
4377 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4378 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4379 && operand_reloadnum[rld[i].opnum] >= 0
4380 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4381 == RELOAD_OTHER))
4382 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4383
4384 if (goal_alternative_matches[rld[i].opnum] >= 0)
4385 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4386 }
4387
4388 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4389 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4390 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4391
4392 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4393 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4394 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4395 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4396 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4397 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4398 This is complicated by the fact that a single operand can have more
4399 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4400 choose_reload_regs without affecting code quality, and cases that
4401 actually fail are extremely rare, so it turns out to be better to fix
4402 the problem here by not generating cases that choose_reload_regs will
4403 fail for. */
4404 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4405 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4406 a single operand.
4407 We can reduce the register pressure by exploiting that a
4408 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4409 does not conflict with any of them, if it is only used for the first of
4410 the RELOAD_FOR_X_ADDRESS reloads. */
4411 {
4412 int first_op_addr_num = -2;
4413 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4414 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4415 int need_change = 0;
4416 /* We use last_op_addr_reload and the contents of the above arrays
4417 first as flags - -2 means no instance encountered, -1 means exactly
4418 one instance encountered.
4419 If more than one instance has been encountered, we store the reload
4420 number of the first reload of the kind in question; reload numbers
4421 are known to be non-negative. */
4422 for (i = 0; i < noperands; i++)
4423 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4424 for (i = n_reloads - 1; i >= 0; i--)
4425 {
4426 switch (rld[i].when_needed)
4427 {
4428 case RELOAD_FOR_OPERAND_ADDRESS:
4429 if (++first_op_addr_num >= 0)
4430 {
4431 first_op_addr_num = i;
4432 need_change = 1;
4433 }
4434 break;
4435 case RELOAD_FOR_INPUT_ADDRESS:
4436 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4437 {
4438 first_inpaddr_num[rld[i].opnum] = i;
4439 need_change = 1;
4440 }
4441 break;
4442 case RELOAD_FOR_OUTPUT_ADDRESS:
4443 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4444 {
4445 first_outpaddr_num[rld[i].opnum] = i;
4446 need_change = 1;
4447 }
4448 break;
4449 default:
4450 break;
4451 }
4452 }
4453
4454 if (need_change)
4455 {
4456 for (i = 0; i < n_reloads; i++)
4457 {
4458 int first_num;
4459 enum reload_type type;
4460
4461 switch (rld[i].when_needed)
4462 {
4463 case RELOAD_FOR_OPADDR_ADDR:
4464 first_num = first_op_addr_num;
4465 type = RELOAD_FOR_OPERAND_ADDRESS;
4466 break;
4467 case RELOAD_FOR_INPADDR_ADDRESS:
4468 first_num = first_inpaddr_num[rld[i].opnum];
4469 type = RELOAD_FOR_INPUT_ADDRESS;
4470 break;
4471 case RELOAD_FOR_OUTADDR_ADDRESS:
4472 first_num = first_outpaddr_num[rld[i].opnum];
4473 type = RELOAD_FOR_OUTPUT_ADDRESS;
4474 break;
4475 default:
4476 continue;
4477 }
4478 if (first_num < 0)
4479 continue;
4480 else if (i > first_num)
4481 rld[i].when_needed = type;
4482 else
4483 {
4484 /* Check if the only TYPE reload that uses reload I is
4485 reload FIRST_NUM. */
4486 for (j = n_reloads - 1; j > first_num; j--)
4487 {
4488 if (rld[j].when_needed == type
4489 && (rld[i].secondary_p
4490 ? rld[j].secondary_in_reload == i
4491 : reg_mentioned_p (rld[i].in, rld[j].in)))
4492 {
4493 rld[i].when_needed = type;
4494 break;
4495 }
4496 }
4497 }
4498 }
4499 }
4500 }
4501
4502 /* See if we have any reloads that are now allowed to be merged
4503 because we've changed when the reload is needed to
4504 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4505 check for the most common cases. */
4506
4507 for (i = 0; i < n_reloads; i++)
4508 if (rld[i].in != 0 && rld[i].out == 0
4509 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4510 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4511 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4512 for (j = 0; j < n_reloads; j++)
4513 if (i != j && rld[j].in != 0 && rld[j].out == 0
4514 && rld[j].when_needed == rld[i].when_needed
4515 && MATCHES (rld[i].in, rld[j].in)
4516 && rld[i].rclass == rld[j].rclass
4517 && !rld[i].nocombine && !rld[j].nocombine
4518 && rld[i].reg_rtx == rld[j].reg_rtx)
4519 {
4520 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4521 transfer_replacements (i, j);
4522 rld[j].in = 0;
4523 }
4524
4525 #ifdef HAVE_cc0
4526 /* If we made any reloads for addresses, see if they violate a
4527 "no input reloads" requirement for this insn. But loads that we
4528 do after the insn (such as for output addresses) are fine. */
4529 if (no_input_reloads)
4530 for (i = 0; i < n_reloads; i++)
4531 gcc_assert (rld[i].in == 0
4532 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4533 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4534 #endif
4535
4536 /* Compute reload_mode and reload_nregs. */
4537 for (i = 0; i < n_reloads; i++)
4538 {
4539 rld[i].mode
4540 = (rld[i].inmode == VOIDmode
4541 || (GET_MODE_SIZE (rld[i].outmode)
4542 > GET_MODE_SIZE (rld[i].inmode)))
4543 ? rld[i].outmode : rld[i].inmode;
4544
4545 rld[i].nregs = CLASS_MAX_NREGS (rld[i].rclass, rld[i].mode);
4546 }
4547
4548 /* Special case a simple move with an input reload and a
4549 destination of a hard reg, if the hard reg is ok, use it. */
4550 for (i = 0; i < n_reloads; i++)
4551 if (rld[i].when_needed == RELOAD_FOR_INPUT
4552 && GET_CODE (PATTERN (insn)) == SET
4553 && REG_P (SET_DEST (PATTERN (insn)))
4554 && (SET_SRC (PATTERN (insn)) == rld[i].in
4555 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4556 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4557 {
4558 rtx dest = SET_DEST (PATTERN (insn));
4559 unsigned int regno = REGNO (dest);
4560
4561 if (regno < FIRST_PSEUDO_REGISTER
4562 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4563 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4564 {
4565 int nr = hard_regno_nregs[regno][rld[i].mode];
4566 int ok = 1, nri;
4567
4568 for (nri = 1; nri < nr; nri ++)
4569 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4570 ok = 0;
4571
4572 if (ok)
4573 rld[i].reg_rtx = dest;
4574 }
4575 }
4576
4577 return retval;
4578 }
4579
4580 /* Return true if alternative number ALTNUM in constraint-string
4581 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4582 MEM gives the reference if it didn't need any reloads, otherwise it
4583 is null. */
4584
4585 static bool
4586 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4587 const char *constraint, int altnum)
4588 {
4589 int c;
4590
4591 /* Skip alternatives before the one requested. */
4592 while (altnum > 0)
4593 {
4594 while (*constraint++ != ',');
4595 altnum--;
4596 }
4597 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4598 If one of them is present, this alternative accepts the result of
4599 passing a constant-pool reference through find_reloads_toplev.
4600
4601 The same is true of extra memory constraints if the address
4602 was reloaded into a register. However, the target may elect
4603 to disallow the original constant address, forcing it to be
4604 reloaded into a register instead. */
4605 for (; (c = *constraint) && c != ',' && c != '#';
4606 constraint += CONSTRAINT_LEN (c, constraint))
4607 {
4608 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4609 return true;
4610 #ifdef EXTRA_CONSTRAINT_STR
4611 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4612 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4613 return true;
4614 #endif
4615 }
4616 return false;
4617 }
4618 \f
4619 /* Scan X for memory references and scan the addresses for reloading.
4620 Also checks for references to "constant" regs that we want to eliminate
4621 and replaces them with the values they stand for.
4622 We may alter X destructively if it contains a reference to such.
4623 If X is just a constant reg, we return the equivalent value
4624 instead of X.
4625
4626 IND_LEVELS says how many levels of indirect addressing this machine
4627 supports.
4628
4629 OPNUM and TYPE identify the purpose of the reload.
4630
4631 IS_SET_DEST is true if X is the destination of a SET, which is not
4632 appropriate to be replaced by a constant.
4633
4634 INSN, if nonzero, is the insn in which we do the reload. It is used
4635 to determine if we may generate output reloads, and where to put USEs
4636 for pseudos that we have to replace with stack slots.
4637
4638 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4639 result of find_reloads_address. */
4640
4641 static rtx
4642 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4643 int ind_levels, int is_set_dest, rtx insn,
4644 int *address_reloaded)
4645 {
4646 RTX_CODE code = GET_CODE (x);
4647
4648 const char *fmt = GET_RTX_FORMAT (code);
4649 int i;
4650 int copied;
4651
4652 if (code == REG)
4653 {
4654 /* This code is duplicated for speed in find_reloads. */
4655 int regno = REGNO (x);
4656 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4657 x = reg_equiv_constant (regno);
4658 #if 0
4659 /* This creates (subreg (mem...)) which would cause an unnecessary
4660 reload of the mem. */
4661 else if (reg_equiv_mem (regno) != 0)
4662 x = reg_equiv_mem (regno);
4663 #endif
4664 else if (reg_equiv_memory_loc (regno)
4665 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4666 {
4667 rtx mem = make_memloc (x, regno);
4668 if (reg_equiv_address (regno)
4669 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4670 {
4671 /* If this is not a toplevel operand, find_reloads doesn't see
4672 this substitution. We have to emit a USE of the pseudo so
4673 that delete_output_reload can see it. */
4674 if (replace_reloads && recog_data.operand[opnum] != x)
4675 /* We mark the USE with QImode so that we recognize it
4676 as one that can be safely deleted at the end of
4677 reload. */
4678 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4679 QImode);
4680 x = mem;
4681 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4682 opnum, type, ind_levels, insn);
4683 if (!rtx_equal_p (x, mem))
4684 push_reg_equiv_alt_mem (regno, x);
4685 if (address_reloaded)
4686 *address_reloaded = i;
4687 }
4688 }
4689 return x;
4690 }
4691 if (code == MEM)
4692 {
4693 rtx tem = x;
4694
4695 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4696 opnum, type, ind_levels, insn);
4697 if (address_reloaded)
4698 *address_reloaded = i;
4699
4700 return tem;
4701 }
4702
4703 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4704 {
4705 /* Check for SUBREG containing a REG that's equivalent to a
4706 constant. If the constant has a known value, truncate it
4707 right now. Similarly if we are extracting a single-word of a
4708 multi-word constant. If the constant is symbolic, allow it
4709 to be substituted normally. push_reload will strip the
4710 subreg later. The constant must not be VOIDmode, because we
4711 will lose the mode of the register (this should never happen
4712 because one of the cases above should handle it). */
4713
4714 int regno = REGNO (SUBREG_REG (x));
4715 rtx tem;
4716
4717 if (regno >= FIRST_PSEUDO_REGISTER
4718 && reg_renumber[regno] < 0
4719 && reg_equiv_constant (regno) != 0)
4720 {
4721 tem =
4722 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4723 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4724 gcc_assert (tem);
4725 if (CONSTANT_P (tem)
4726 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4727 {
4728 tem = force_const_mem (GET_MODE (x), tem);
4729 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4730 &XEXP (tem, 0), opnum, type,
4731 ind_levels, insn);
4732 if (address_reloaded)
4733 *address_reloaded = i;
4734 }
4735 return tem;
4736 }
4737
4738 /* If the subreg contains a reg that will be converted to a mem,
4739 convert the subreg to a narrower memref now.
4740 Otherwise, we would get (subreg (mem ...) ...),
4741 which would force reload of the mem.
4742
4743 We also need to do this if there is an equivalent MEM that is
4744 not offsettable. In that case, alter_subreg would produce an
4745 invalid address on big-endian machines.
4746
4747 For machines that extend byte loads, we must not reload using
4748 a wider mode if we have a paradoxical SUBREG. find_reloads will
4749 force a reload in that case. So we should not do anything here. */
4750
4751 if (regno >= FIRST_PSEUDO_REGISTER
4752 #ifdef LOAD_EXTEND_OP
4753 && !paradoxical_subreg_p (x)
4754 #endif
4755 && (reg_equiv_address (regno) != 0
4756 || (reg_equiv_mem (regno) != 0
4757 && (! strict_memory_address_addr_space_p
4758 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
4759 MEM_ADDR_SPACE (reg_equiv_mem (regno)))
4760 || ! offsettable_memref_p (reg_equiv_mem (regno))
4761 || num_not_at_initial_offset))))
4762 x = find_reloads_subreg_address (x, 1, opnum, type, ind_levels,
4763 insn, address_reloaded);
4764 }
4765
4766 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4767 {
4768 if (fmt[i] == 'e')
4769 {
4770 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4771 ind_levels, is_set_dest, insn,
4772 address_reloaded);
4773 /* If we have replaced a reg with it's equivalent memory loc -
4774 that can still be handled here e.g. if it's in a paradoxical
4775 subreg - we must make the change in a copy, rather than using
4776 a destructive change. This way, find_reloads can still elect
4777 not to do the change. */
4778 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4779 {
4780 x = shallow_copy_rtx (x);
4781 copied = 1;
4782 }
4783 XEXP (x, i) = new_part;
4784 }
4785 }
4786 return x;
4787 }
4788
4789 /* Return a mem ref for the memory equivalent of reg REGNO.
4790 This mem ref is not shared with anything. */
4791
4792 static rtx
4793 make_memloc (rtx ad, int regno)
4794 {
4795 /* We must rerun eliminate_regs, in case the elimination
4796 offsets have changed. */
4797 rtx tem
4798 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4799 0);
4800
4801 /* If TEM might contain a pseudo, we must copy it to avoid
4802 modifying it when we do the substitution for the reload. */
4803 if (rtx_varies_p (tem, 0))
4804 tem = copy_rtx (tem);
4805
4806 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4807 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4808
4809 /* Copy the result if it's still the same as the equivalence, to avoid
4810 modifying it when we do the substitution for the reload. */
4811 if (tem == reg_equiv_memory_loc (regno))
4812 tem = copy_rtx (tem);
4813 return tem;
4814 }
4815
4816 /* Returns true if AD could be turned into a valid memory reference
4817 to mode MODE in address space AS by reloading the part pointed to
4818 by PART into a register. */
4819
4820 static int
4821 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4822 addr_space_t as, rtx *part)
4823 {
4824 int retv;
4825 rtx tem = *part;
4826 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4827
4828 *part = reg;
4829 retv = memory_address_addr_space_p (mode, ad, as);
4830 *part = tem;
4831
4832 return retv;
4833 }
4834
4835 /* Record all reloads needed for handling memory address AD
4836 which appears in *LOC in a memory reference to mode MODE
4837 which itself is found in location *MEMREFLOC.
4838 Note that we take shortcuts assuming that no multi-reg machine mode
4839 occurs as part of an address.
4840
4841 OPNUM and TYPE specify the purpose of this reload.
4842
4843 IND_LEVELS says how many levels of indirect addressing this machine
4844 supports.
4845
4846 INSN, if nonzero, is the insn in which we do the reload. It is used
4847 to determine if we may generate output reloads, and where to put USEs
4848 for pseudos that we have to replace with stack slots.
4849
4850 Value is one if this address is reloaded or replaced as a whole; it is
4851 zero if the top level of this address was not reloaded or replaced, and
4852 it is -1 if it may or may not have been reloaded or replaced.
4853
4854 Note that there is no verification that the address will be valid after
4855 this routine does its work. Instead, we rely on the fact that the address
4856 was valid when reload started. So we need only undo things that reload
4857 could have broken. These are wrong register types, pseudos not allocated
4858 to a hard register, and frame pointer elimination. */
4859
4860 static int
4861 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4862 rtx *loc, int opnum, enum reload_type type,
4863 int ind_levels, rtx insn)
4864 {
4865 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4866 : ADDR_SPACE_GENERIC;
4867 int regno;
4868 int removed_and = 0;
4869 int op_index;
4870 rtx tem;
4871
4872 /* If the address is a register, see if it is a legitimate address and
4873 reload if not. We first handle the cases where we need not reload
4874 or where we must reload in a non-standard way. */
4875
4876 if (REG_P (ad))
4877 {
4878 regno = REGNO (ad);
4879
4880 if (reg_equiv_constant (regno) != 0)
4881 {
4882 find_reloads_address_part (reg_equiv_constant (regno), loc,
4883 base_reg_class (mode, MEM, SCRATCH),
4884 GET_MODE (ad), opnum, type, ind_levels);
4885 return 1;
4886 }
4887
4888 tem = reg_equiv_memory_loc (regno);
4889 if (tem != 0)
4890 {
4891 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4892 {
4893 tem = make_memloc (ad, regno);
4894 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4895 XEXP (tem, 0),
4896 MEM_ADDR_SPACE (tem)))
4897 {
4898 rtx orig = tem;
4899
4900 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4901 &XEXP (tem, 0), opnum,
4902 ADDR_TYPE (type), ind_levels, insn);
4903 if (!rtx_equal_p (tem, orig))
4904 push_reg_equiv_alt_mem (regno, tem);
4905 }
4906 /* We can avoid a reload if the register's equivalent memory
4907 expression is valid as an indirect memory address.
4908 But not all addresses are valid in a mem used as an indirect
4909 address: only reg or reg+constant. */
4910
4911 if (ind_levels > 0
4912 && strict_memory_address_addr_space_p (mode, tem, as)
4913 && (REG_P (XEXP (tem, 0))
4914 || (GET_CODE (XEXP (tem, 0)) == PLUS
4915 && REG_P (XEXP (XEXP (tem, 0), 0))
4916 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4917 {
4918 /* TEM is not the same as what we'll be replacing the
4919 pseudo with after reload, put a USE in front of INSN
4920 in the final reload pass. */
4921 if (replace_reloads
4922 && num_not_at_initial_offset
4923 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4924 {
4925 *loc = tem;
4926 /* We mark the USE with QImode so that we
4927 recognize it as one that can be safely
4928 deleted at the end of reload. */
4929 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4930 insn), QImode);
4931
4932 /* This doesn't really count as replacing the address
4933 as a whole, since it is still a memory access. */
4934 }
4935 return 0;
4936 }
4937 ad = tem;
4938 }
4939 }
4940
4941 /* The only remaining case where we can avoid a reload is if this is a
4942 hard register that is valid as a base register and which is not the
4943 subject of a CLOBBER in this insn. */
4944
4945 else if (regno < FIRST_PSEUDO_REGISTER
4946 && regno_ok_for_base_p (regno, mode, MEM, SCRATCH)
4947 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4948 return 0;
4949
4950 /* If we do not have one of the cases above, we must do the reload. */
4951 push_reload (ad, NULL_RTX, loc, (rtx*) 0, base_reg_class (mode, MEM, SCRATCH),
4952 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4953 return 1;
4954 }
4955
4956 if (strict_memory_address_addr_space_p (mode, ad, as))
4957 {
4958 /* The address appears valid, so reloads are not needed.
4959 But the address may contain an eliminable register.
4960 This can happen because a machine with indirect addressing
4961 may consider a pseudo register by itself a valid address even when
4962 it has failed to get a hard reg.
4963 So do a tree-walk to find and eliminate all such regs. */
4964
4965 /* But first quickly dispose of a common case. */
4966 if (GET_CODE (ad) == PLUS
4967 && CONST_INT_P (XEXP (ad, 1))
4968 && REG_P (XEXP (ad, 0))
4969 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4970 return 0;
4971
4972 subst_reg_equivs_changed = 0;
4973 *loc = subst_reg_equivs (ad, insn);
4974
4975 if (! subst_reg_equivs_changed)
4976 return 0;
4977
4978 /* Check result for validity after substitution. */
4979 if (strict_memory_address_addr_space_p (mode, ad, as))
4980 return 0;
4981 }
4982
4983 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4984 do
4985 {
4986 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4987 {
4988 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4989 ind_levels, win);
4990 }
4991 break;
4992 win:
4993 *memrefloc = copy_rtx (*memrefloc);
4994 XEXP (*memrefloc, 0) = ad;
4995 move_replacements (&ad, &XEXP (*memrefloc, 0));
4996 return -1;
4997 }
4998 while (0);
4999 #endif
5000
5001 /* The address is not valid. We have to figure out why. First see if
5002 we have an outer AND and remove it if so. Then analyze what's inside. */
5003
5004 if (GET_CODE (ad) == AND)
5005 {
5006 removed_and = 1;
5007 loc = &XEXP (ad, 0);
5008 ad = *loc;
5009 }
5010
5011 /* One possibility for why the address is invalid is that it is itself
5012 a MEM. This can happen when the frame pointer is being eliminated, a
5013 pseudo is not allocated to a hard register, and the offset between the
5014 frame and stack pointers is not its initial value. In that case the
5015 pseudo will have been replaced by a MEM referring to the
5016 stack pointer. */
5017 if (MEM_P (ad))
5018 {
5019 /* First ensure that the address in this MEM is valid. Then, unless
5020 indirect addresses are valid, reload the MEM into a register. */
5021 tem = ad;
5022 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5023 opnum, ADDR_TYPE (type),
5024 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5025
5026 /* If tem was changed, then we must create a new memory reference to
5027 hold it and store it back into memrefloc. */
5028 if (tem != ad && memrefloc)
5029 {
5030 *memrefloc = copy_rtx (*memrefloc);
5031 copy_replacements (tem, XEXP (*memrefloc, 0));
5032 loc = &XEXP (*memrefloc, 0);
5033 if (removed_and)
5034 loc = &XEXP (*loc, 0);
5035 }
5036
5037 /* Check similar cases as for indirect addresses as above except
5038 that we can allow pseudos and a MEM since they should have been
5039 taken care of above. */
5040
5041 if (ind_levels == 0
5042 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5043 || MEM_P (XEXP (tem, 0))
5044 || ! (REG_P (XEXP (tem, 0))
5045 || (GET_CODE (XEXP (tem, 0)) == PLUS
5046 && REG_P (XEXP (XEXP (tem, 0), 0))
5047 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5048 {
5049 /* Must use TEM here, not AD, since it is the one that will
5050 have any subexpressions reloaded, if needed. */
5051 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5052 base_reg_class (mode, MEM, SCRATCH), GET_MODE (tem),
5053 VOIDmode, 0,
5054 0, opnum, type);
5055 return ! removed_and;
5056 }
5057 else
5058 return 0;
5059 }
5060
5061 /* If we have address of a stack slot but it's not valid because the
5062 displacement is too large, compute the sum in a register.
5063 Handle all base registers here, not just fp/ap/sp, because on some
5064 targets (namely SH) we can also get too large displacements from
5065 big-endian corrections. */
5066 else if (GET_CODE (ad) == PLUS
5067 && REG_P (XEXP (ad, 0))
5068 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5069 && CONST_INT_P (XEXP (ad, 1))
5070 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, PLUS,
5071 CONST_INT)
5072 /* Similarly, if we were to reload the base register and the
5073 mem+offset address is still invalid, then we want to reload
5074 the whole address, not just the base register. */
5075 || ! maybe_memory_address_addr_space_p
5076 (mode, ad, as, &(XEXP (ad, 0)))))
5077
5078 {
5079 /* Unshare the MEM rtx so we can safely alter it. */
5080 if (memrefloc)
5081 {
5082 *memrefloc = copy_rtx (*memrefloc);
5083 loc = &XEXP (*memrefloc, 0);
5084 if (removed_and)
5085 loc = &XEXP (*loc, 0);
5086 }
5087
5088 if (double_reg_address_ok
5089 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode,
5090 PLUS, CONST_INT))
5091 {
5092 /* Unshare the sum as well. */
5093 *loc = ad = copy_rtx (ad);
5094
5095 /* Reload the displacement into an index reg.
5096 We assume the frame pointer or arg pointer is a base reg. */
5097 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5098 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5099 type, ind_levels);
5100 return 0;
5101 }
5102 else
5103 {
5104 /* If the sum of two regs is not necessarily valid,
5105 reload the sum into a base reg.
5106 That will at least work. */
5107 find_reloads_address_part (ad, loc,
5108 base_reg_class (mode, MEM, SCRATCH),
5109 GET_MODE (ad), opnum, type, ind_levels);
5110 }
5111 return ! removed_and;
5112 }
5113
5114 /* If we have an indexed stack slot, there are three possible reasons why
5115 it might be invalid: The index might need to be reloaded, the address
5116 might have been made by frame pointer elimination and hence have a
5117 constant out of range, or both reasons might apply.
5118
5119 We can easily check for an index needing reload, but even if that is the
5120 case, we might also have an invalid constant. To avoid making the
5121 conservative assumption and requiring two reloads, we see if this address
5122 is valid when not interpreted strictly. If it is, the only problem is
5123 that the index needs a reload and find_reloads_address_1 will take care
5124 of it.
5125
5126 Handle all base registers here, not just fp/ap/sp, because on some
5127 targets (namely SPARC) we can also get invalid addresses from preventive
5128 subreg big-endian corrections made by find_reloads_toplev. We
5129 can also get expressions involving LO_SUM (rather than PLUS) from
5130 find_reloads_subreg_address.
5131
5132 If we decide to do something, it must be that `double_reg_address_ok'
5133 is true. We generate a reload of the base register + constant and
5134 rework the sum so that the reload register will be added to the index.
5135 This is safe because we know the address isn't shared.
5136
5137 We check for the base register as both the first and second operand of
5138 the innermost PLUS and/or LO_SUM. */
5139
5140 for (op_index = 0; op_index < 2; ++op_index)
5141 {
5142 rtx operand, addend;
5143 enum rtx_code inner_code;
5144
5145 if (GET_CODE (ad) != PLUS)
5146 continue;
5147
5148 inner_code = GET_CODE (XEXP (ad, 0));
5149 if (!(GET_CODE (ad) == PLUS
5150 && CONST_INT_P (XEXP (ad, 1))
5151 && (inner_code == PLUS || inner_code == LO_SUM)))
5152 continue;
5153
5154 operand = XEXP (XEXP (ad, 0), op_index);
5155 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5156 continue;
5157
5158 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5159
5160 if ((regno_ok_for_base_p (REGNO (operand), mode, inner_code,
5161 GET_CODE (addend))
5162 || operand == frame_pointer_rtx
5163 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5164 || operand == hard_frame_pointer_rtx
5165 #endif
5166 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5167 || operand == arg_pointer_rtx
5168 #endif
5169 || operand == stack_pointer_rtx)
5170 && ! maybe_memory_address_addr_space_p
5171 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5172 {
5173 rtx offset_reg;
5174 enum reg_class cls;
5175
5176 offset_reg = plus_constant (operand, INTVAL (XEXP (ad, 1)));
5177
5178 /* Form the adjusted address. */
5179 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5180 ad = gen_rtx_PLUS (GET_MODE (ad),
5181 op_index == 0 ? offset_reg : addend,
5182 op_index == 0 ? addend : offset_reg);
5183 else
5184 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5185 op_index == 0 ? offset_reg : addend,
5186 op_index == 0 ? addend : offset_reg);
5187 *loc = ad;
5188
5189 cls = base_reg_class (mode, MEM, GET_CODE (addend));
5190 find_reloads_address_part (XEXP (ad, op_index),
5191 &XEXP (ad, op_index), cls,
5192 GET_MODE (ad), opnum, type, ind_levels);
5193 find_reloads_address_1 (mode,
5194 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5195 GET_CODE (XEXP (ad, op_index)),
5196 &XEXP (ad, 1 - op_index), opnum,
5197 type, 0, insn);
5198
5199 return 0;
5200 }
5201 }
5202
5203 /* See if address becomes valid when an eliminable register
5204 in a sum is replaced. */
5205
5206 tem = ad;
5207 if (GET_CODE (ad) == PLUS)
5208 tem = subst_indexed_address (ad);
5209 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5210 {
5211 /* Ok, we win that way. Replace any additional eliminable
5212 registers. */
5213
5214 subst_reg_equivs_changed = 0;
5215 tem = subst_reg_equivs (tem, insn);
5216
5217 /* Make sure that didn't make the address invalid again. */
5218
5219 if (! subst_reg_equivs_changed
5220 || strict_memory_address_addr_space_p (mode, tem, as))
5221 {
5222 *loc = tem;
5223 return 0;
5224 }
5225 }
5226
5227 /* If constants aren't valid addresses, reload the constant address
5228 into a register. */
5229 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5230 {
5231 enum machine_mode address_mode = GET_MODE (ad);
5232 if (address_mode == VOIDmode)
5233 address_mode = targetm.addr_space.address_mode (as);
5234
5235 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5236 Unshare it so we can safely alter it. */
5237 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5238 && CONSTANT_POOL_ADDRESS_P (ad))
5239 {
5240 *memrefloc = copy_rtx (*memrefloc);
5241 loc = &XEXP (*memrefloc, 0);
5242 if (removed_and)
5243 loc = &XEXP (*loc, 0);
5244 }
5245
5246 find_reloads_address_part (ad, loc, base_reg_class (mode, MEM, SCRATCH),
5247 address_mode, opnum, type, ind_levels);
5248 return ! removed_and;
5249 }
5250
5251 return find_reloads_address_1 (mode, ad, 0, MEM, SCRATCH, loc, opnum, type,
5252 ind_levels, insn);
5253 }
5254 \f
5255 /* Find all pseudo regs appearing in AD
5256 that are eliminable in favor of equivalent values
5257 and do not have hard regs; replace them by their equivalents.
5258 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5259 front of it for pseudos that we have to replace with stack slots. */
5260
5261 static rtx
5262 subst_reg_equivs (rtx ad, rtx insn)
5263 {
5264 RTX_CODE code = GET_CODE (ad);
5265 int i;
5266 const char *fmt;
5267
5268 switch (code)
5269 {
5270 case HIGH:
5271 case CONST_INT:
5272 case CONST:
5273 case CONST_DOUBLE:
5274 case CONST_FIXED:
5275 case CONST_VECTOR:
5276 case SYMBOL_REF:
5277 case LABEL_REF:
5278 case PC:
5279 case CC0:
5280 return ad;
5281
5282 case REG:
5283 {
5284 int regno = REGNO (ad);
5285
5286 if (reg_equiv_constant (regno) != 0)
5287 {
5288 subst_reg_equivs_changed = 1;
5289 return reg_equiv_constant (regno);
5290 }
5291 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5292 {
5293 rtx mem = make_memloc (ad, regno);
5294 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5295 {
5296 subst_reg_equivs_changed = 1;
5297 /* We mark the USE with QImode so that we recognize it
5298 as one that can be safely deleted at the end of
5299 reload. */
5300 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5301 QImode);
5302 return mem;
5303 }
5304 }
5305 }
5306 return ad;
5307
5308 case PLUS:
5309 /* Quickly dispose of a common case. */
5310 if (XEXP (ad, 0) == frame_pointer_rtx
5311 && CONST_INT_P (XEXP (ad, 1)))
5312 return ad;
5313 break;
5314
5315 default:
5316 break;
5317 }
5318
5319 fmt = GET_RTX_FORMAT (code);
5320 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5321 if (fmt[i] == 'e')
5322 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5323 return ad;
5324 }
5325 \f
5326 /* Compute the sum of X and Y, making canonicalizations assumed in an
5327 address, namely: sum constant integers, surround the sum of two
5328 constants with a CONST, put the constant as the second operand, and
5329 group the constant on the outermost sum.
5330
5331 This routine assumes both inputs are already in canonical form. */
5332
5333 rtx
5334 form_sum (enum machine_mode mode, rtx x, rtx y)
5335 {
5336 rtx tem;
5337
5338 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5339 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5340
5341 if (CONST_INT_P (x))
5342 return plus_constant (y, INTVAL (x));
5343 else if (CONST_INT_P (y))
5344 return plus_constant (x, INTVAL (y));
5345 else if (CONSTANT_P (x))
5346 tem = x, x = y, y = tem;
5347
5348 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5349 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5350
5351 /* Note that if the operands of Y are specified in the opposite
5352 order in the recursive calls below, infinite recursion will occur. */
5353 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5354 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5355
5356 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5357 constant will have been placed second. */
5358 if (CONSTANT_P (x) && CONSTANT_P (y))
5359 {
5360 if (GET_CODE (x) == CONST)
5361 x = XEXP (x, 0);
5362 if (GET_CODE (y) == CONST)
5363 y = XEXP (y, 0);
5364
5365 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5366 }
5367
5368 return gen_rtx_PLUS (mode, x, y);
5369 }
5370 \f
5371 /* If ADDR is a sum containing a pseudo register that should be
5372 replaced with a constant (from reg_equiv_constant),
5373 return the result of doing so, and also apply the associative
5374 law so that the result is more likely to be a valid address.
5375 (But it is not guaranteed to be one.)
5376
5377 Note that at most one register is replaced, even if more are
5378 replaceable. Also, we try to put the result into a canonical form
5379 so it is more likely to be a valid address.
5380
5381 In all other cases, return ADDR. */
5382
5383 static rtx
5384 subst_indexed_address (rtx addr)
5385 {
5386 rtx op0 = 0, op1 = 0, op2 = 0;
5387 rtx tem;
5388 int regno;
5389
5390 if (GET_CODE (addr) == PLUS)
5391 {
5392 /* Try to find a register to replace. */
5393 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5394 if (REG_P (op0)
5395 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5396 && reg_renumber[regno] < 0
5397 && reg_equiv_constant (regno) != 0)
5398 op0 = reg_equiv_constant (regno);
5399 else if (REG_P (op1)
5400 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5401 && reg_renumber[regno] < 0
5402 && reg_equiv_constant (regno) != 0)
5403 op1 = reg_equiv_constant (regno);
5404 else if (GET_CODE (op0) == PLUS
5405 && (tem = subst_indexed_address (op0)) != op0)
5406 op0 = tem;
5407 else if (GET_CODE (op1) == PLUS
5408 && (tem = subst_indexed_address (op1)) != op1)
5409 op1 = tem;
5410 else
5411 return addr;
5412
5413 /* Pick out up to three things to add. */
5414 if (GET_CODE (op1) == PLUS)
5415 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5416 else if (GET_CODE (op0) == PLUS)
5417 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5418
5419 /* Compute the sum. */
5420 if (op2 != 0)
5421 op1 = form_sum (GET_MODE (addr), op1, op2);
5422 if (op1 != 0)
5423 op0 = form_sum (GET_MODE (addr), op0, op1);
5424
5425 return op0;
5426 }
5427 return addr;
5428 }
5429 \f
5430 /* Update the REG_INC notes for an insn. It updates all REG_INC
5431 notes for the instruction which refer to REGNO the to refer
5432 to the reload number.
5433
5434 INSN is the insn for which any REG_INC notes need updating.
5435
5436 REGNO is the register number which has been reloaded.
5437
5438 RELOADNUM is the reload number. */
5439
5440 static void
5441 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5442 int reloadnum ATTRIBUTE_UNUSED)
5443 {
5444 #ifdef AUTO_INC_DEC
5445 rtx link;
5446
5447 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5448 if (REG_NOTE_KIND (link) == REG_INC
5449 && (int) REGNO (XEXP (link, 0)) == regno)
5450 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5451 #endif
5452 }
5453 \f
5454 /* Record the pseudo registers we must reload into hard registers in a
5455 subexpression of a would-be memory address, X referring to a value
5456 in mode MODE. (This function is not called if the address we find
5457 is strictly valid.)
5458
5459 CONTEXT = 1 means we are considering regs as index regs,
5460 = 0 means we are considering them as base regs.
5461 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5462 or an autoinc code.
5463 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5464 is the code of the index part of the address. Otherwise, pass SCRATCH
5465 for this argument.
5466 OPNUM and TYPE specify the purpose of any reloads made.
5467
5468 IND_LEVELS says how many levels of indirect addressing are
5469 supported at this point in the address.
5470
5471 INSN, if nonzero, is the insn in which we do the reload. It is used
5472 to determine if we may generate output reloads.
5473
5474 We return nonzero if X, as a whole, is reloaded or replaced. */
5475
5476 /* Note that we take shortcuts assuming that no multi-reg machine mode
5477 occurs as part of an address.
5478 Also, this is not fully machine-customizable; it works for machines
5479 such as VAXen and 68000's and 32000's, but other possible machines
5480 could have addressing modes that this does not handle right.
5481 If you add push_reload calls here, you need to make sure gen_reload
5482 handles those cases gracefully. */
5483
5484 static int
5485 find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
5486 enum rtx_code outer_code, enum rtx_code index_code,
5487 rtx *loc, int opnum, enum reload_type type,
5488 int ind_levels, rtx insn)
5489 {
5490 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, OUTER, INDEX) \
5491 ((CONTEXT) == 0 \
5492 ? regno_ok_for_base_p (REGNO, MODE, OUTER, INDEX) \
5493 : REGNO_OK_FOR_INDEX_P (REGNO))
5494
5495 enum reg_class context_reg_class;
5496 RTX_CODE code = GET_CODE (x);
5497
5498 if (context == 1)
5499 context_reg_class = INDEX_REG_CLASS;
5500 else
5501 context_reg_class = base_reg_class (mode, outer_code, index_code);
5502
5503 switch (code)
5504 {
5505 case PLUS:
5506 {
5507 rtx orig_op0 = XEXP (x, 0);
5508 rtx orig_op1 = XEXP (x, 1);
5509 RTX_CODE code0 = GET_CODE (orig_op0);
5510 RTX_CODE code1 = GET_CODE (orig_op1);
5511 rtx op0 = orig_op0;
5512 rtx op1 = orig_op1;
5513
5514 if (GET_CODE (op0) == SUBREG)
5515 {
5516 op0 = SUBREG_REG (op0);
5517 code0 = GET_CODE (op0);
5518 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5519 op0 = gen_rtx_REG (word_mode,
5520 (REGNO (op0) +
5521 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5522 GET_MODE (SUBREG_REG (orig_op0)),
5523 SUBREG_BYTE (orig_op0),
5524 GET_MODE (orig_op0))));
5525 }
5526
5527 if (GET_CODE (op1) == SUBREG)
5528 {
5529 op1 = SUBREG_REG (op1);
5530 code1 = GET_CODE (op1);
5531 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5532 /* ??? Why is this given op1's mode and above for
5533 ??? op0 SUBREGs we use word_mode? */
5534 op1 = gen_rtx_REG (GET_MODE (op1),
5535 (REGNO (op1) +
5536 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5537 GET_MODE (SUBREG_REG (orig_op1)),
5538 SUBREG_BYTE (orig_op1),
5539 GET_MODE (orig_op1))));
5540 }
5541 /* Plus in the index register may be created only as a result of
5542 register rematerialization for expression like &localvar*4. Reload it.
5543 It may be possible to combine the displacement on the outer level,
5544 but it is probably not worthwhile to do so. */
5545 if (context == 1)
5546 {
5547 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5548 opnum, ADDR_TYPE (type), ind_levels, insn);
5549 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5550 context_reg_class,
5551 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5552 return 1;
5553 }
5554
5555 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5556 || code0 == ZERO_EXTEND || code1 == MEM)
5557 {
5558 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5559 &XEXP (x, 0), opnum, type, ind_levels,
5560 insn);
5561 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5562 &XEXP (x, 1), opnum, type, ind_levels,
5563 insn);
5564 }
5565
5566 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5567 || code1 == ZERO_EXTEND || code0 == MEM)
5568 {
5569 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5570 &XEXP (x, 0), opnum, type, ind_levels,
5571 insn);
5572 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5573 &XEXP (x, 1), opnum, type, ind_levels,
5574 insn);
5575 }
5576
5577 else if (code0 == CONST_INT || code0 == CONST
5578 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5579 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5580 &XEXP (x, 1), opnum, type, ind_levels,
5581 insn);
5582
5583 else if (code1 == CONST_INT || code1 == CONST
5584 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5585 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5586 &XEXP (x, 0), opnum, type, ind_levels,
5587 insn);
5588
5589 else if (code0 == REG && code1 == REG)
5590 {
5591 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5592 && regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5593 return 0;
5594 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5595 && regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5596 return 0;
5597 else if (regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5598 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5599 &XEXP (x, 1), opnum, type, ind_levels,
5600 insn);
5601 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5602 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5603 &XEXP (x, 0), opnum, type, ind_levels,
5604 insn);
5605 else if (regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5606 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5607 &XEXP (x, 0), opnum, type, ind_levels,
5608 insn);
5609 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5610 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5611 &XEXP (x, 1), opnum, type, ind_levels,
5612 insn);
5613 else
5614 {
5615 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5616 &XEXP (x, 0), opnum, type, ind_levels,
5617 insn);
5618 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5619 &XEXP (x, 1), opnum, type, ind_levels,
5620 insn);
5621 }
5622 }
5623
5624 else if (code0 == REG)
5625 {
5626 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5627 &XEXP (x, 0), opnum, type, ind_levels,
5628 insn);
5629 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5630 &XEXP (x, 1), opnum, type, ind_levels,
5631 insn);
5632 }
5633
5634 else if (code1 == REG)
5635 {
5636 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5637 &XEXP (x, 1), opnum, type, ind_levels,
5638 insn);
5639 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5640 &XEXP (x, 0), opnum, type, ind_levels,
5641 insn);
5642 }
5643 }
5644
5645 return 0;
5646
5647 case POST_MODIFY:
5648 case PRE_MODIFY:
5649 {
5650 rtx op0 = XEXP (x, 0);
5651 rtx op1 = XEXP (x, 1);
5652 enum rtx_code index_code;
5653 int regno;
5654 int reloadnum;
5655
5656 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5657 return 0;
5658
5659 /* Currently, we only support {PRE,POST}_MODIFY constructs
5660 where a base register is {inc,dec}remented by the contents
5661 of another register or by a constant value. Thus, these
5662 operands must match. */
5663 gcc_assert (op0 == XEXP (op1, 0));
5664
5665 /* Require index register (or constant). Let's just handle the
5666 register case in the meantime... If the target allows
5667 auto-modify by a constant then we could try replacing a pseudo
5668 register with its equivalent constant where applicable.
5669
5670 We also handle the case where the register was eliminated
5671 resulting in a PLUS subexpression.
5672
5673 If we later decide to reload the whole PRE_MODIFY or
5674 POST_MODIFY, inc_for_reload might clobber the reload register
5675 before reading the index. The index register might therefore
5676 need to live longer than a TYPE reload normally would, so be
5677 conservative and class it as RELOAD_OTHER. */
5678 if ((REG_P (XEXP (op1, 1))
5679 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5680 || GET_CODE (XEXP (op1, 1)) == PLUS)
5681 find_reloads_address_1 (mode, XEXP (op1, 1), 1, code, SCRATCH,
5682 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5683 ind_levels, insn);
5684
5685 gcc_assert (REG_P (XEXP (op1, 0)));
5686
5687 regno = REGNO (XEXP (op1, 0));
5688 index_code = GET_CODE (XEXP (op1, 1));
5689
5690 /* A register that is incremented cannot be constant! */
5691 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5692 || reg_equiv_constant (regno) == 0);
5693
5694 /* Handle a register that is equivalent to a memory location
5695 which cannot be addressed directly. */
5696 if (reg_equiv_memory_loc (regno) != 0
5697 && (reg_equiv_address (regno) != 0
5698 || num_not_at_initial_offset))
5699 {
5700 rtx tem = make_memloc (XEXP (x, 0), regno);
5701
5702 if (reg_equiv_address (regno)
5703 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5704 {
5705 rtx orig = tem;
5706
5707 /* First reload the memory location's address.
5708 We can't use ADDR_TYPE (type) here, because we need to
5709 write back the value after reading it, hence we actually
5710 need two registers. */
5711 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5712 &XEXP (tem, 0), opnum,
5713 RELOAD_OTHER,
5714 ind_levels, insn);
5715
5716 if (!rtx_equal_p (tem, orig))
5717 push_reg_equiv_alt_mem (regno, tem);
5718
5719 /* Then reload the memory location into a base
5720 register. */
5721 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5722 &XEXP (op1, 0),
5723 base_reg_class (mode, code,
5724 index_code),
5725 GET_MODE (x), GET_MODE (x), 0,
5726 0, opnum, RELOAD_OTHER);
5727
5728 update_auto_inc_notes (this_insn, regno, reloadnum);
5729 return 0;
5730 }
5731 }
5732
5733 if (reg_renumber[regno] >= 0)
5734 regno = reg_renumber[regno];
5735
5736 /* We require a base register here... */
5737 if (!regno_ok_for_base_p (regno, GET_MODE (x), code, index_code))
5738 {
5739 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5740 &XEXP (op1, 0), &XEXP (x, 0),
5741 base_reg_class (mode, code, index_code),
5742 GET_MODE (x), GET_MODE (x), 0, 0,
5743 opnum, RELOAD_OTHER);
5744
5745 update_auto_inc_notes (this_insn, regno, reloadnum);
5746 return 0;
5747 }
5748 }
5749 return 0;
5750
5751 case POST_INC:
5752 case POST_DEC:
5753 case PRE_INC:
5754 case PRE_DEC:
5755 if (REG_P (XEXP (x, 0)))
5756 {
5757 int regno = REGNO (XEXP (x, 0));
5758 int value = 0;
5759 rtx x_orig = x;
5760
5761 /* A register that is incremented cannot be constant! */
5762 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5763 || reg_equiv_constant (regno) == 0);
5764
5765 /* Handle a register that is equivalent to a memory location
5766 which cannot be addressed directly. */
5767 if (reg_equiv_memory_loc (regno) != 0
5768 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5769 {
5770 rtx tem = make_memloc (XEXP (x, 0), regno);
5771 if (reg_equiv_address (regno)
5772 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5773 {
5774 rtx orig = tem;
5775
5776 /* First reload the memory location's address.
5777 We can't use ADDR_TYPE (type) here, because we need to
5778 write back the value after reading it, hence we actually
5779 need two registers. */
5780 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5781 &XEXP (tem, 0), opnum, type,
5782 ind_levels, insn);
5783 if (!rtx_equal_p (tem, orig))
5784 push_reg_equiv_alt_mem (regno, tem);
5785 /* Put this inside a new increment-expression. */
5786 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5787 /* Proceed to reload that, as if it contained a register. */
5788 }
5789 }
5790
5791 /* If we have a hard register that is ok in this incdec context,
5792 don't make a reload. If the register isn't nice enough for
5793 autoincdec, we can reload it. But, if an autoincrement of a
5794 register that we here verified as playing nice, still outside
5795 isn't "valid", it must be that no autoincrement is "valid".
5796 If that is true and something made an autoincrement anyway,
5797 this must be a special context where one is allowed.
5798 (For example, a "push" instruction.)
5799 We can't improve this address, so leave it alone. */
5800
5801 /* Otherwise, reload the autoincrement into a suitable hard reg
5802 and record how much to increment by. */
5803
5804 if (reg_renumber[regno] >= 0)
5805 regno = reg_renumber[regno];
5806 if (regno >= FIRST_PSEUDO_REGISTER
5807 || !REG_OK_FOR_CONTEXT (context, regno, mode, code,
5808 index_code))
5809 {
5810 int reloadnum;
5811
5812 /* If we can output the register afterwards, do so, this
5813 saves the extra update.
5814 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5815 CALL_INSN - and it does not set CC0.
5816 But don't do this if we cannot directly address the
5817 memory location, since this will make it harder to
5818 reuse address reloads, and increases register pressure.
5819 Also don't do this if we can probably update x directly. */
5820 rtx equiv = (MEM_P (XEXP (x, 0))
5821 ? XEXP (x, 0)
5822 : reg_equiv_mem (regno));
5823 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5824 if (insn && NONJUMP_INSN_P (insn) && equiv
5825 && memory_operand (equiv, GET_MODE (equiv))
5826 #ifdef HAVE_cc0
5827 && ! sets_cc0_p (PATTERN (insn))
5828 #endif
5829 && ! (icode != CODE_FOR_nothing
5830 && insn_operand_matches (icode, 0, equiv)
5831 && insn_operand_matches (icode, 1, equiv)))
5832 {
5833 /* We use the original pseudo for loc, so that
5834 emit_reload_insns() knows which pseudo this
5835 reload refers to and updates the pseudo rtx, not
5836 its equivalent memory location, as well as the
5837 corresponding entry in reg_last_reload_reg. */
5838 loc = &XEXP (x_orig, 0);
5839 x = XEXP (x, 0);
5840 reloadnum
5841 = push_reload (x, x, loc, loc,
5842 context_reg_class,
5843 GET_MODE (x), GET_MODE (x), 0, 0,
5844 opnum, RELOAD_OTHER);
5845 }
5846 else
5847 {
5848 reloadnum
5849 = push_reload (x, x, loc, (rtx*) 0,
5850 context_reg_class,
5851 GET_MODE (x), GET_MODE (x), 0, 0,
5852 opnum, type);
5853 rld[reloadnum].inc
5854 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5855
5856 value = 1;
5857 }
5858
5859 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5860 reloadnum);
5861 }
5862 return value;
5863 }
5864 return 0;
5865
5866 case TRUNCATE:
5867 case SIGN_EXTEND:
5868 case ZERO_EXTEND:
5869 /* Look for parts to reload in the inner expression and reload them
5870 too, in addition to this operation. Reloading all inner parts in
5871 addition to this one shouldn't be necessary, but at this point,
5872 we don't know if we can possibly omit any part that *can* be
5873 reloaded. Targets that are better off reloading just either part
5874 (or perhaps even a different part of an outer expression), should
5875 define LEGITIMIZE_RELOAD_ADDRESS. */
5876 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), XEXP (x, 0),
5877 context, code, SCRATCH, &XEXP (x, 0), opnum,
5878 type, ind_levels, insn);
5879 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5880 context_reg_class,
5881 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5882 return 1;
5883
5884 case MEM:
5885 /* This is probably the result of a substitution, by eliminate_regs, of
5886 an equivalent address for a pseudo that was not allocated to a hard
5887 register. Verify that the specified address is valid and reload it
5888 into a register.
5889
5890 Since we know we are going to reload this item, don't decrement for
5891 the indirection level.
5892
5893 Note that this is actually conservative: it would be slightly more
5894 efficient to use the value of SPILL_INDIRECT_LEVELS from
5895 reload1.c here. */
5896
5897 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5898 opnum, ADDR_TYPE (type), ind_levels, insn);
5899 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5900 context_reg_class,
5901 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5902 return 1;
5903
5904 case REG:
5905 {
5906 int regno = REGNO (x);
5907
5908 if (reg_equiv_constant (regno) != 0)
5909 {
5910 find_reloads_address_part (reg_equiv_constant (regno), loc,
5911 context_reg_class,
5912 GET_MODE (x), opnum, type, ind_levels);
5913 return 1;
5914 }
5915
5916 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5917 that feeds this insn. */
5918 if (reg_equiv_mem (regno) != 0)
5919 {
5920 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5921 context_reg_class,
5922 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5923 return 1;
5924 }
5925 #endif
5926
5927 if (reg_equiv_memory_loc (regno)
5928 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5929 {
5930 rtx tem = make_memloc (x, regno);
5931 if (reg_equiv_address (regno) != 0
5932 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5933 {
5934 x = tem;
5935 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5936 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5937 ind_levels, insn);
5938 if (!rtx_equal_p (x, tem))
5939 push_reg_equiv_alt_mem (regno, x);
5940 }
5941 }
5942
5943 if (reg_renumber[regno] >= 0)
5944 regno = reg_renumber[regno];
5945
5946 if (regno >= FIRST_PSEUDO_REGISTER
5947 || !REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5948 index_code))
5949 {
5950 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5951 context_reg_class,
5952 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5953 return 1;
5954 }
5955
5956 /* If a register appearing in an address is the subject of a CLOBBER
5957 in this insn, reload it into some other register to be safe.
5958 The CLOBBER is supposed to make the register unavailable
5959 from before this insn to after it. */
5960 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5961 {
5962 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5963 context_reg_class,
5964 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5965 return 1;
5966 }
5967 }
5968 return 0;
5969
5970 case SUBREG:
5971 if (REG_P (SUBREG_REG (x)))
5972 {
5973 /* If this is a SUBREG of a hard register and the resulting register
5974 is of the wrong class, reload the whole SUBREG. This avoids
5975 needless copies if SUBREG_REG is multi-word. */
5976 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5977 {
5978 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5979
5980 if (!REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5981 index_code))
5982 {
5983 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5984 context_reg_class,
5985 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5986 return 1;
5987 }
5988 }
5989 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5990 is larger than the class size, then reload the whole SUBREG. */
5991 else
5992 {
5993 enum reg_class rclass = context_reg_class;
5994 if ((unsigned) CLASS_MAX_NREGS (rclass, GET_MODE (SUBREG_REG (x)))
5995 > reg_class_size[rclass])
5996 {
5997 x = find_reloads_subreg_address (x, 0, opnum,
5998 ADDR_TYPE (type),
5999 ind_levels, insn, NULL);
6000 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6001 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6002 return 1;
6003 }
6004 }
6005 }
6006 break;
6007
6008 default:
6009 break;
6010 }
6011
6012 {
6013 const char *fmt = GET_RTX_FORMAT (code);
6014 int i;
6015
6016 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6017 {
6018 if (fmt[i] == 'e')
6019 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6020 we get here. */
6021 find_reloads_address_1 (mode, XEXP (x, i), context, code, SCRATCH,
6022 &XEXP (x, i), opnum, type, ind_levels, insn);
6023 }
6024 }
6025
6026 #undef REG_OK_FOR_CONTEXT
6027 return 0;
6028 }
6029 \f
6030 /* X, which is found at *LOC, is a part of an address that needs to be
6031 reloaded into a register of class RCLASS. If X is a constant, or if
6032 X is a PLUS that contains a constant, check that the constant is a
6033 legitimate operand and that we are supposed to be able to load
6034 it into the register.
6035
6036 If not, force the constant into memory and reload the MEM instead.
6037
6038 MODE is the mode to use, in case X is an integer constant.
6039
6040 OPNUM and TYPE describe the purpose of any reloads made.
6041
6042 IND_LEVELS says how many levels of indirect addressing this machine
6043 supports. */
6044
6045 static void
6046 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6047 enum machine_mode mode, int opnum,
6048 enum reload_type type, int ind_levels)
6049 {
6050 if (CONSTANT_P (x)
6051 && (!targetm.legitimate_constant_p (mode, x)
6052 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6053 {
6054 x = force_const_mem (mode, x);
6055 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6056 opnum, type, ind_levels, 0);
6057 }
6058
6059 else if (GET_CODE (x) == PLUS
6060 && CONSTANT_P (XEXP (x, 1))
6061 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6062 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6063 == NO_REGS))
6064 {
6065 rtx tem;
6066
6067 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6068 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6069 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6070 opnum, type, ind_levels, 0);
6071 }
6072
6073 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6074 mode, VOIDmode, 0, 0, opnum, type);
6075 }
6076 \f
6077 /* X, a subreg of a pseudo, is a part of an address that needs to be
6078 reloaded.
6079
6080 If the pseudo is equivalent to a memory location that cannot be directly
6081 addressed, make the necessary address reloads.
6082
6083 If address reloads have been necessary, or if the address is changed
6084 by register elimination, return the rtx of the memory location;
6085 otherwise, return X.
6086
6087 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
6088 memory location.
6089
6090 OPNUM and TYPE identify the purpose of the reload.
6091
6092 IND_LEVELS says how many levels of indirect addressing are
6093 supported at this point in the address.
6094
6095 INSN, if nonzero, is the insn in which we do the reload. It is used
6096 to determine where to put USEs for pseudos that we have to replace with
6097 stack slots. */
6098
6099 static rtx
6100 find_reloads_subreg_address (rtx x, int force_replace, int opnum,
6101 enum reload_type type, int ind_levels, rtx insn,
6102 int *address_reloaded)
6103 {
6104 int regno = REGNO (SUBREG_REG (x));
6105 int reloaded = 0;
6106
6107 if (reg_equiv_memory_loc (regno))
6108 {
6109 /* If the address is not directly addressable, or if the address is not
6110 offsettable, then it must be replaced. */
6111 if (! force_replace
6112 && (reg_equiv_address (regno)
6113 || ! offsettable_memref_p (reg_equiv_mem (regno))))
6114 force_replace = 1;
6115
6116 if (force_replace || num_not_at_initial_offset)
6117 {
6118 rtx tem = make_memloc (SUBREG_REG (x), regno);
6119
6120 /* If the address changes because of register elimination, then
6121 it must be replaced. */
6122 if (force_replace
6123 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
6124 {
6125 unsigned outer_size = GET_MODE_SIZE (GET_MODE (x));
6126 unsigned inner_size = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
6127 int offset;
6128 rtx orig = tem;
6129
6130 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
6131 hold the correct (negative) byte offset. */
6132 if (BYTES_BIG_ENDIAN && outer_size > inner_size)
6133 offset = inner_size - outer_size;
6134 else
6135 offset = SUBREG_BYTE (x);
6136
6137 XEXP (tem, 0) = plus_constant (XEXP (tem, 0), offset);
6138 PUT_MODE (tem, GET_MODE (x));
6139 if (MEM_OFFSET (tem))
6140 set_mem_offset (tem, plus_constant (MEM_OFFSET (tem), offset));
6141 if (MEM_SIZE (tem)
6142 && INTVAL (MEM_SIZE (tem)) != (HOST_WIDE_INT) outer_size)
6143 set_mem_size (tem, GEN_INT (outer_size));
6144
6145 /* If this was a paradoxical subreg that we replaced, the
6146 resulting memory must be sufficiently aligned to allow
6147 us to widen the mode of the memory. */
6148 if (outer_size > inner_size)
6149 {
6150 rtx base;
6151
6152 base = XEXP (tem, 0);
6153 if (GET_CODE (base) == PLUS)
6154 {
6155 if (CONST_INT_P (XEXP (base, 1))
6156 && INTVAL (XEXP (base, 1)) % outer_size != 0)
6157 return x;
6158 base = XEXP (base, 0);
6159 }
6160 if (!REG_P (base)
6161 || (REGNO_POINTER_ALIGN (REGNO (base))
6162 < outer_size * BITS_PER_UNIT))
6163 return x;
6164 }
6165
6166 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6167 XEXP (tem, 0), &XEXP (tem, 0),
6168 opnum, type, ind_levels, insn);
6169 /* ??? Do we need to handle nonzero offsets somehow? */
6170 if (!offset && !rtx_equal_p (tem, orig))
6171 push_reg_equiv_alt_mem (regno, tem);
6172
6173 /* For some processors an address may be valid in the
6174 original mode but not in a smaller mode. For
6175 example, ARM accepts a scaled index register in
6176 SImode but not in HImode. Note that this is only
6177 a problem if the address in reg_equiv_mem is already
6178 invalid in the new mode; other cases would be fixed
6179 by find_reloads_address as usual.
6180
6181 ??? We attempt to handle such cases here by doing an
6182 additional reload of the full address after the
6183 usual processing by find_reloads_address. Note that
6184 this may not work in the general case, but it seems
6185 to cover the cases where this situation currently
6186 occurs. A more general fix might be to reload the
6187 *value* instead of the address, but this would not
6188 be expected by the callers of this routine as-is.
6189
6190 If find_reloads_address already completed replaced
6191 the address, there is nothing further to do. */
6192 if (reloaded == 0
6193 && reg_equiv_mem (regno) != 0
6194 && !strict_memory_address_addr_space_p
6195 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6196 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6197 {
6198 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6199 base_reg_class (GET_MODE (tem), MEM, SCRATCH),
6200 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0,
6201 opnum, type);
6202 reloaded = 1;
6203 }
6204 /* If this is not a toplevel operand, find_reloads doesn't see
6205 this substitution. We have to emit a USE of the pseudo so
6206 that delete_output_reload can see it. */
6207 if (replace_reloads && recog_data.operand[opnum] != x)
6208 /* We mark the USE with QImode so that we recognize it
6209 as one that can be safely deleted at the end of
6210 reload. */
6211 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode,
6212 SUBREG_REG (x)),
6213 insn), QImode);
6214 x = tem;
6215 }
6216 }
6217 }
6218 if (reloaded && address_reloaded)
6219 *address_reloaded = 1;
6220
6221 return x;
6222 }
6223 \f
6224 /* Substitute into the current INSN the registers into which we have reloaded
6225 the things that need reloading. The array `replacements'
6226 contains the locations of all pointers that must be changed
6227 and says what to replace them with.
6228
6229 Return the rtx that X translates into; usually X, but modified. */
6230
6231 void
6232 subst_reloads (rtx insn)
6233 {
6234 int i;
6235
6236 for (i = 0; i < n_replacements; i++)
6237 {
6238 struct replacement *r = &replacements[i];
6239 rtx reloadreg = rld[r->what].reg_rtx;
6240 if (reloadreg)
6241 {
6242 #ifdef DEBUG_RELOAD
6243 /* This checking takes a very long time on some platforms
6244 causing the gcc.c-torture/compile/limits-fnargs.c test
6245 to time out during testing. See PR 31850.
6246
6247 Internal consistency test. Check that we don't modify
6248 anything in the equivalence arrays. Whenever something from
6249 those arrays needs to be reloaded, it must be unshared before
6250 being substituted into; the equivalence must not be modified.
6251 Otherwise, if the equivalence is used after that, it will
6252 have been modified, and the thing substituted (probably a
6253 register) is likely overwritten and not a usable equivalence. */
6254 int check_regno;
6255
6256 for (check_regno = 0; check_regno < max_regno; check_regno++)
6257 {
6258 #define CHECK_MODF(ARRAY) \
6259 gcc_assert (!VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY \
6260 || !loc_mentioned_in_p (r->where, \
6261 VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY))
6262
6263 CHECK_MODF (equiv_constant);
6264 CHECK_MODF (equiv_memory_loc);
6265 CHECK_MODF (equiv_address);
6266 CHECK_MODF (equiv_mem);
6267 #undef CHECK_MODF
6268 }
6269 #endif /* DEBUG_RELOAD */
6270
6271 /* If we're replacing a LABEL_REF with a register, there must
6272 already be an indication (to e.g. flow) which label this
6273 register refers to. */
6274 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6275 || !JUMP_P (insn)
6276 || find_reg_note (insn,
6277 REG_LABEL_OPERAND,
6278 XEXP (*r->where, 0))
6279 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6280
6281 /* Encapsulate RELOADREG so its machine mode matches what
6282 used to be there. Note that gen_lowpart_common will
6283 do the wrong thing if RELOADREG is multi-word. RELOADREG
6284 will always be a REG here. */
6285 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6286 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6287
6288 *r->where = reloadreg;
6289 }
6290 /* If reload got no reg and isn't optional, something's wrong. */
6291 else
6292 gcc_assert (rld[r->what].optional);
6293 }
6294 }
6295 \f
6296 /* Make a copy of any replacements being done into X and move those
6297 copies to locations in Y, a copy of X. */
6298
6299 void
6300 copy_replacements (rtx x, rtx y)
6301 {
6302 copy_replacements_1 (&x, &y, n_replacements);
6303 }
6304
6305 static void
6306 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6307 {
6308 int i, j;
6309 rtx x, y;
6310 struct replacement *r;
6311 enum rtx_code code;
6312 const char *fmt;
6313
6314 for (j = 0; j < orig_replacements; j++)
6315 if (replacements[j].where == px)
6316 {
6317 r = &replacements[n_replacements++];
6318 r->where = py;
6319 r->what = replacements[j].what;
6320 r->mode = replacements[j].mode;
6321 }
6322
6323 x = *px;
6324 y = *py;
6325 code = GET_CODE (x);
6326 fmt = GET_RTX_FORMAT (code);
6327
6328 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6329 {
6330 if (fmt[i] == 'e')
6331 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6332 else if (fmt[i] == 'E')
6333 for (j = XVECLEN (x, i); --j >= 0; )
6334 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6335 orig_replacements);
6336 }
6337 }
6338
6339 /* Change any replacements being done to *X to be done to *Y. */
6340
6341 void
6342 move_replacements (rtx *x, rtx *y)
6343 {
6344 int i;
6345
6346 for (i = 0; i < n_replacements; i++)
6347 if (replacements[i].where == x)
6348 replacements[i].where = y;
6349 }
6350 \f
6351 /* If LOC was scheduled to be replaced by something, return the replacement.
6352 Otherwise, return *LOC. */
6353
6354 rtx
6355 find_replacement (rtx *loc)
6356 {
6357 struct replacement *r;
6358
6359 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6360 {
6361 rtx reloadreg = rld[r->what].reg_rtx;
6362
6363 if (reloadreg && r->where == loc)
6364 {
6365 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6366 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6367
6368 return reloadreg;
6369 }
6370 else if (reloadreg && GET_CODE (*loc) == SUBREG
6371 && r->where == &SUBREG_REG (*loc))
6372 {
6373 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6374 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6375
6376 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6377 GET_MODE (SUBREG_REG (*loc)),
6378 SUBREG_BYTE (*loc));
6379 }
6380 }
6381
6382 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6383 what's inside and make a new rtl if so. */
6384 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6385 || GET_CODE (*loc) == MULT)
6386 {
6387 rtx x = find_replacement (&XEXP (*loc, 0));
6388 rtx y = find_replacement (&XEXP (*loc, 1));
6389
6390 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6391 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6392 }
6393
6394 return *loc;
6395 }
6396 \f
6397 /* Return nonzero if register in range [REGNO, ENDREGNO)
6398 appears either explicitly or implicitly in X
6399 other than being stored into (except for earlyclobber operands).
6400
6401 References contained within the substructure at LOC do not count.
6402 LOC may be zero, meaning don't ignore anything.
6403
6404 This is similar to refers_to_regno_p in rtlanal.c except that we
6405 look at equivalences for pseudos that didn't get hard registers. */
6406
6407 static int
6408 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6409 rtx x, rtx *loc)
6410 {
6411 int i;
6412 unsigned int r;
6413 RTX_CODE code;
6414 const char *fmt;
6415
6416 if (x == 0)
6417 return 0;
6418
6419 repeat:
6420 code = GET_CODE (x);
6421
6422 switch (code)
6423 {
6424 case REG:
6425 r = REGNO (x);
6426
6427 /* If this is a pseudo, a hard register must not have been allocated.
6428 X must therefore either be a constant or be in memory. */
6429 if (r >= FIRST_PSEUDO_REGISTER)
6430 {
6431 if (reg_equiv_memory_loc (r))
6432 return refers_to_regno_for_reload_p (regno, endregno,
6433 reg_equiv_memory_loc (r),
6434 (rtx*) 0);
6435
6436 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6437 return 0;
6438 }
6439
6440 return (endregno > r
6441 && regno < r + (r < FIRST_PSEUDO_REGISTER
6442 ? hard_regno_nregs[r][GET_MODE (x)]
6443 : 1));
6444
6445 case SUBREG:
6446 /* If this is a SUBREG of a hard reg, we can see exactly which
6447 registers are being modified. Otherwise, handle normally. */
6448 if (REG_P (SUBREG_REG (x))
6449 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6450 {
6451 unsigned int inner_regno = subreg_regno (x);
6452 unsigned int inner_endregno
6453 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6454 ? subreg_nregs (x) : 1);
6455
6456 return endregno > inner_regno && regno < inner_endregno;
6457 }
6458 break;
6459
6460 case CLOBBER:
6461 case SET:
6462 if (&SET_DEST (x) != loc
6463 /* Note setting a SUBREG counts as referring to the REG it is in for
6464 a pseudo but not for hard registers since we can
6465 treat each word individually. */
6466 && ((GET_CODE (SET_DEST (x)) == SUBREG
6467 && loc != &SUBREG_REG (SET_DEST (x))
6468 && REG_P (SUBREG_REG (SET_DEST (x)))
6469 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6470 && refers_to_regno_for_reload_p (regno, endregno,
6471 SUBREG_REG (SET_DEST (x)),
6472 loc))
6473 /* If the output is an earlyclobber operand, this is
6474 a conflict. */
6475 || ((!REG_P (SET_DEST (x))
6476 || earlyclobber_operand_p (SET_DEST (x)))
6477 && refers_to_regno_for_reload_p (regno, endregno,
6478 SET_DEST (x), loc))))
6479 return 1;
6480
6481 if (code == CLOBBER || loc == &SET_SRC (x))
6482 return 0;
6483 x = SET_SRC (x);
6484 goto repeat;
6485
6486 default:
6487 break;
6488 }
6489
6490 /* X does not match, so try its subexpressions. */
6491
6492 fmt = GET_RTX_FORMAT (code);
6493 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6494 {
6495 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6496 {
6497 if (i == 0)
6498 {
6499 x = XEXP (x, 0);
6500 goto repeat;
6501 }
6502 else
6503 if (refers_to_regno_for_reload_p (regno, endregno,
6504 XEXP (x, i), loc))
6505 return 1;
6506 }
6507 else if (fmt[i] == 'E')
6508 {
6509 int j;
6510 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6511 if (loc != &XVECEXP (x, i, j)
6512 && refers_to_regno_for_reload_p (regno, endregno,
6513 XVECEXP (x, i, j), loc))
6514 return 1;
6515 }
6516 }
6517 return 0;
6518 }
6519
6520 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6521 we check if any register number in X conflicts with the relevant register
6522 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6523 contains a MEM (we don't bother checking for memory addresses that can't
6524 conflict because we expect this to be a rare case.
6525
6526 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6527 that we look at equivalences for pseudos that didn't get hard registers. */
6528
6529 int
6530 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6531 {
6532 int regno, endregno;
6533
6534 /* Overly conservative. */
6535 if (GET_CODE (x) == STRICT_LOW_PART
6536 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6537 x = XEXP (x, 0);
6538
6539 /* If either argument is a constant, then modifying X can not affect IN. */
6540 if (CONSTANT_P (x) || CONSTANT_P (in))
6541 return 0;
6542 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6543 return refers_to_mem_for_reload_p (in);
6544 else if (GET_CODE (x) == SUBREG)
6545 {
6546 regno = REGNO (SUBREG_REG (x));
6547 if (regno < FIRST_PSEUDO_REGISTER)
6548 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6549 GET_MODE (SUBREG_REG (x)),
6550 SUBREG_BYTE (x),
6551 GET_MODE (x));
6552 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6553 ? subreg_nregs (x) : 1);
6554
6555 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6556 }
6557 else if (REG_P (x))
6558 {
6559 regno = REGNO (x);
6560
6561 /* If this is a pseudo, it must not have been assigned a hard register.
6562 Therefore, it must either be in memory or be a constant. */
6563
6564 if (regno >= FIRST_PSEUDO_REGISTER)
6565 {
6566 if (reg_equiv_memory_loc (regno))
6567 return refers_to_mem_for_reload_p (in);
6568 gcc_assert (reg_equiv_constant (regno));
6569 return 0;
6570 }
6571
6572 endregno = END_HARD_REGNO (x);
6573
6574 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6575 }
6576 else if (MEM_P (x))
6577 return refers_to_mem_for_reload_p (in);
6578 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6579 || GET_CODE (x) == CC0)
6580 return reg_mentioned_p (x, in);
6581 else
6582 {
6583 gcc_assert (GET_CODE (x) == PLUS);
6584
6585 /* We actually want to know if X is mentioned somewhere inside IN.
6586 We must not say that (plus (sp) (const_int 124)) is in
6587 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6588 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6589 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6590 while (MEM_P (in))
6591 in = XEXP (in, 0);
6592 if (REG_P (in))
6593 return 0;
6594 else if (GET_CODE (in) == PLUS)
6595 return (rtx_equal_p (x, in)
6596 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6597 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6598 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6599 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6600 }
6601
6602 gcc_unreachable ();
6603 }
6604
6605 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6606 registers. */
6607
6608 static int
6609 refers_to_mem_for_reload_p (rtx x)
6610 {
6611 const char *fmt;
6612 int i;
6613
6614 if (MEM_P (x))
6615 return 1;
6616
6617 if (REG_P (x))
6618 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6619 && reg_equiv_memory_loc (REGNO (x)));
6620
6621 fmt = GET_RTX_FORMAT (GET_CODE (x));
6622 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6623 if (fmt[i] == 'e'
6624 && (MEM_P (XEXP (x, i))
6625 || refers_to_mem_for_reload_p (XEXP (x, i))))
6626 return 1;
6627
6628 return 0;
6629 }
6630 \f
6631 /* Check the insns before INSN to see if there is a suitable register
6632 containing the same value as GOAL.
6633 If OTHER is -1, look for a register in class RCLASS.
6634 Otherwise, just see if register number OTHER shares GOAL's value.
6635
6636 Return an rtx for the register found, or zero if none is found.
6637
6638 If RELOAD_REG_P is (short *)1,
6639 we reject any hard reg that appears in reload_reg_rtx
6640 because such a hard reg is also needed coming into this insn.
6641
6642 If RELOAD_REG_P is any other nonzero value,
6643 it is a vector indexed by hard reg number
6644 and we reject any hard reg whose element in the vector is nonnegative
6645 as well as any that appears in reload_reg_rtx.
6646
6647 If GOAL is zero, then GOALREG is a register number; we look
6648 for an equivalent for that register.
6649
6650 MODE is the machine mode of the value we want an equivalence for.
6651 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6652
6653 This function is used by jump.c as well as in the reload pass.
6654
6655 If GOAL is the sum of the stack pointer and a constant, we treat it
6656 as if it were a constant except that sp is required to be unchanging. */
6657
6658 rtx
6659 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6660 short *reload_reg_p, int goalreg, enum machine_mode mode)
6661 {
6662 rtx p = insn;
6663 rtx goaltry, valtry, value, where;
6664 rtx pat;
6665 int regno = -1;
6666 int valueno;
6667 int goal_mem = 0;
6668 int goal_const = 0;
6669 int goal_mem_addr_varies = 0;
6670 int need_stable_sp = 0;
6671 int nregs;
6672 int valuenregs;
6673 int num = 0;
6674
6675 if (goal == 0)
6676 regno = goalreg;
6677 else if (REG_P (goal))
6678 regno = REGNO (goal);
6679 else if (MEM_P (goal))
6680 {
6681 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6682 if (MEM_VOLATILE_P (goal))
6683 return 0;
6684 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6685 return 0;
6686 /* An address with side effects must be reexecuted. */
6687 switch (code)
6688 {
6689 case POST_INC:
6690 case PRE_INC:
6691 case POST_DEC:
6692 case PRE_DEC:
6693 case POST_MODIFY:
6694 case PRE_MODIFY:
6695 return 0;
6696 default:
6697 break;
6698 }
6699 goal_mem = 1;
6700 }
6701 else if (CONSTANT_P (goal))
6702 goal_const = 1;
6703 else if (GET_CODE (goal) == PLUS
6704 && XEXP (goal, 0) == stack_pointer_rtx
6705 && CONSTANT_P (XEXP (goal, 1)))
6706 goal_const = need_stable_sp = 1;
6707 else if (GET_CODE (goal) == PLUS
6708 && XEXP (goal, 0) == frame_pointer_rtx
6709 && CONSTANT_P (XEXP (goal, 1)))
6710 goal_const = 1;
6711 else
6712 return 0;
6713
6714 num = 0;
6715 /* Scan insns back from INSN, looking for one that copies
6716 a value into or out of GOAL.
6717 Stop and give up if we reach a label. */
6718
6719 while (1)
6720 {
6721 p = PREV_INSN (p);
6722 if (p && DEBUG_INSN_P (p))
6723 continue;
6724 num++;
6725 if (p == 0 || LABEL_P (p)
6726 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6727 return 0;
6728
6729 /* Don't reuse register contents from before a setjmp-type
6730 function call; on the second return (from the longjmp) it
6731 might have been clobbered by a later reuse. It doesn't
6732 seem worthwhile to actually go and see if it is actually
6733 reused even if that information would be readily available;
6734 just don't reuse it across the setjmp call. */
6735 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6736 return 0;
6737
6738 if (NONJUMP_INSN_P (p)
6739 /* If we don't want spill regs ... */
6740 && (! (reload_reg_p != 0
6741 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6742 /* ... then ignore insns introduced by reload; they aren't
6743 useful and can cause results in reload_as_needed to be
6744 different from what they were when calculating the need for
6745 spills. If we notice an input-reload insn here, we will
6746 reject it below, but it might hide a usable equivalent.
6747 That makes bad code. It may even fail: perhaps no reg was
6748 spilled for this insn because it was assumed we would find
6749 that equivalent. */
6750 || INSN_UID (p) < reload_first_uid))
6751 {
6752 rtx tem;
6753 pat = single_set (p);
6754
6755 /* First check for something that sets some reg equal to GOAL. */
6756 if (pat != 0
6757 && ((regno >= 0
6758 && true_regnum (SET_SRC (pat)) == regno
6759 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6760 ||
6761 (regno >= 0
6762 && true_regnum (SET_DEST (pat)) == regno
6763 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6764 ||
6765 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6766 /* When looking for stack pointer + const,
6767 make sure we don't use a stack adjust. */
6768 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6769 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6770 || (goal_mem
6771 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6772 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6773 || (goal_mem
6774 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6775 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6776 /* If we are looking for a constant,
6777 and something equivalent to that constant was copied
6778 into a reg, we can use that reg. */
6779 || (goal_const && REG_NOTES (p) != 0
6780 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6781 && ((rtx_equal_p (XEXP (tem, 0), goal)
6782 && (valueno
6783 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6784 || (REG_P (SET_DEST (pat))
6785 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6786 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6787 && CONST_INT_P (goal)
6788 && 0 != (goaltry
6789 = operand_subword (XEXP (tem, 0), 0, 0,
6790 VOIDmode))
6791 && rtx_equal_p (goal, goaltry)
6792 && (valtry
6793 = operand_subword (SET_DEST (pat), 0, 0,
6794 VOIDmode))
6795 && (valueno = true_regnum (valtry)) >= 0)))
6796 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6797 NULL_RTX))
6798 && REG_P (SET_DEST (pat))
6799 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6800 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6801 && CONST_INT_P (goal)
6802 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6803 VOIDmode))
6804 && rtx_equal_p (goal, goaltry)
6805 && (valtry
6806 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6807 && (valueno = true_regnum (valtry)) >= 0)))
6808 {
6809 if (other >= 0)
6810 {
6811 if (valueno != other)
6812 continue;
6813 }
6814 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6815 continue;
6816 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6817 mode, valueno))
6818 continue;
6819 value = valtry;
6820 where = p;
6821 break;
6822 }
6823 }
6824 }
6825
6826 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6827 (or copying VALUE into GOAL, if GOAL is also a register).
6828 Now verify that VALUE is really valid. */
6829
6830 /* VALUENO is the register number of VALUE; a hard register. */
6831
6832 /* Don't try to re-use something that is killed in this insn. We want
6833 to be able to trust REG_UNUSED notes. */
6834 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6835 return 0;
6836
6837 /* If we propose to get the value from the stack pointer or if GOAL is
6838 a MEM based on the stack pointer, we need a stable SP. */
6839 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6840 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6841 goal)))
6842 need_stable_sp = 1;
6843
6844 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6845 if (GET_MODE (value) != mode)
6846 return 0;
6847
6848 /* Reject VALUE if it was loaded from GOAL
6849 and is also a register that appears in the address of GOAL. */
6850
6851 if (goal_mem && value == SET_DEST (single_set (where))
6852 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6853 goal, (rtx*) 0))
6854 return 0;
6855
6856 /* Reject registers that overlap GOAL. */
6857
6858 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6859 nregs = hard_regno_nregs[regno][mode];
6860 else
6861 nregs = 1;
6862 valuenregs = hard_regno_nregs[valueno][mode];
6863
6864 if (!goal_mem && !goal_const
6865 && regno + nregs > valueno && regno < valueno + valuenregs)
6866 return 0;
6867
6868 /* Reject VALUE if it is one of the regs reserved for reloads.
6869 Reload1 knows how to reuse them anyway, and it would get
6870 confused if we allocated one without its knowledge.
6871 (Now that insns introduced by reload are ignored above,
6872 this case shouldn't happen, but I'm not positive.) */
6873
6874 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6875 {
6876 int i;
6877 for (i = 0; i < valuenregs; ++i)
6878 if (reload_reg_p[valueno + i] >= 0)
6879 return 0;
6880 }
6881
6882 /* Reject VALUE if it is a register being used for an input reload
6883 even if it is not one of those reserved. */
6884
6885 if (reload_reg_p != 0)
6886 {
6887 int i;
6888 for (i = 0; i < n_reloads; i++)
6889 if (rld[i].reg_rtx != 0 && rld[i].in)
6890 {
6891 int regno1 = REGNO (rld[i].reg_rtx);
6892 int nregs1 = hard_regno_nregs[regno1]
6893 [GET_MODE (rld[i].reg_rtx)];
6894 if (regno1 < valueno + valuenregs
6895 && regno1 + nregs1 > valueno)
6896 return 0;
6897 }
6898 }
6899
6900 if (goal_mem)
6901 /* We must treat frame pointer as varying here,
6902 since it can vary--in a nonlocal goto as generated by expand_goto. */
6903 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6904
6905 /* Now verify that the values of GOAL and VALUE remain unaltered
6906 until INSN is reached. */
6907
6908 p = insn;
6909 while (1)
6910 {
6911 p = PREV_INSN (p);
6912 if (p == where)
6913 return value;
6914
6915 /* Don't trust the conversion past a function call
6916 if either of the two is in a call-clobbered register, or memory. */
6917 if (CALL_P (p))
6918 {
6919 int i;
6920
6921 if (goal_mem || need_stable_sp)
6922 return 0;
6923
6924 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6925 for (i = 0; i < nregs; ++i)
6926 if (call_used_regs[regno + i]
6927 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6928 return 0;
6929
6930 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6931 for (i = 0; i < valuenregs; ++i)
6932 if (call_used_regs[valueno + i]
6933 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6934 return 0;
6935 }
6936
6937 if (INSN_P (p))
6938 {
6939 pat = PATTERN (p);
6940
6941 /* Watch out for unspec_volatile, and volatile asms. */
6942 if (volatile_insn_p (pat))
6943 return 0;
6944
6945 /* If this insn P stores in either GOAL or VALUE, return 0.
6946 If GOAL is a memory ref and this insn writes memory, return 0.
6947 If GOAL is a memory ref and its address is not constant,
6948 and this insn P changes a register used in GOAL, return 0. */
6949
6950 if (GET_CODE (pat) == COND_EXEC)
6951 pat = COND_EXEC_CODE (pat);
6952 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6953 {
6954 rtx dest = SET_DEST (pat);
6955 while (GET_CODE (dest) == SUBREG
6956 || GET_CODE (dest) == ZERO_EXTRACT
6957 || GET_CODE (dest) == STRICT_LOW_PART)
6958 dest = XEXP (dest, 0);
6959 if (REG_P (dest))
6960 {
6961 int xregno = REGNO (dest);
6962 int xnregs;
6963 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6964 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6965 else
6966 xnregs = 1;
6967 if (xregno < regno + nregs && xregno + xnregs > regno)
6968 return 0;
6969 if (xregno < valueno + valuenregs
6970 && xregno + xnregs > valueno)
6971 return 0;
6972 if (goal_mem_addr_varies
6973 && reg_overlap_mentioned_for_reload_p (dest, goal))
6974 return 0;
6975 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6976 return 0;
6977 }
6978 else if (goal_mem && MEM_P (dest)
6979 && ! push_operand (dest, GET_MODE (dest)))
6980 return 0;
6981 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6982 && reg_equiv_memory_loc (regno) != 0)
6983 return 0;
6984 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6985 return 0;
6986 }
6987 else if (GET_CODE (pat) == PARALLEL)
6988 {
6989 int i;
6990 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6991 {
6992 rtx v1 = XVECEXP (pat, 0, i);
6993 if (GET_CODE (v1) == COND_EXEC)
6994 v1 = COND_EXEC_CODE (v1);
6995 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
6996 {
6997 rtx dest = SET_DEST (v1);
6998 while (GET_CODE (dest) == SUBREG
6999 || GET_CODE (dest) == ZERO_EXTRACT
7000 || GET_CODE (dest) == STRICT_LOW_PART)
7001 dest = XEXP (dest, 0);
7002 if (REG_P (dest))
7003 {
7004 int xregno = REGNO (dest);
7005 int xnregs;
7006 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7007 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7008 else
7009 xnregs = 1;
7010 if (xregno < regno + nregs
7011 && xregno + xnregs > regno)
7012 return 0;
7013 if (xregno < valueno + valuenregs
7014 && xregno + xnregs > valueno)
7015 return 0;
7016 if (goal_mem_addr_varies
7017 && reg_overlap_mentioned_for_reload_p (dest,
7018 goal))
7019 return 0;
7020 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7021 return 0;
7022 }
7023 else if (goal_mem && MEM_P (dest)
7024 && ! push_operand (dest, GET_MODE (dest)))
7025 return 0;
7026 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7027 && reg_equiv_memory_loc (regno) != 0)
7028 return 0;
7029 else if (need_stable_sp
7030 && push_operand (dest, GET_MODE (dest)))
7031 return 0;
7032 }
7033 }
7034 }
7035
7036 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7037 {
7038 rtx link;
7039
7040 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7041 link = XEXP (link, 1))
7042 {
7043 pat = XEXP (link, 0);
7044 if (GET_CODE (pat) == CLOBBER)
7045 {
7046 rtx dest = SET_DEST (pat);
7047
7048 if (REG_P (dest))
7049 {
7050 int xregno = REGNO (dest);
7051 int xnregs
7052 = hard_regno_nregs[xregno][GET_MODE (dest)];
7053
7054 if (xregno < regno + nregs
7055 && xregno + xnregs > regno)
7056 return 0;
7057 else if (xregno < valueno + valuenregs
7058 && xregno + xnregs > valueno)
7059 return 0;
7060 else if (goal_mem_addr_varies
7061 && reg_overlap_mentioned_for_reload_p (dest,
7062 goal))
7063 return 0;
7064 }
7065
7066 else if (goal_mem && MEM_P (dest)
7067 && ! push_operand (dest, GET_MODE (dest)))
7068 return 0;
7069 else if (need_stable_sp
7070 && push_operand (dest, GET_MODE (dest)))
7071 return 0;
7072 }
7073 }
7074 }
7075
7076 #ifdef AUTO_INC_DEC
7077 /* If this insn auto-increments or auto-decrements
7078 either regno or valueno, return 0 now.
7079 If GOAL is a memory ref and its address is not constant,
7080 and this insn P increments a register used in GOAL, return 0. */
7081 {
7082 rtx link;
7083
7084 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7085 if (REG_NOTE_KIND (link) == REG_INC
7086 && REG_P (XEXP (link, 0)))
7087 {
7088 int incno = REGNO (XEXP (link, 0));
7089 if (incno < regno + nregs && incno >= regno)
7090 return 0;
7091 if (incno < valueno + valuenregs && incno >= valueno)
7092 return 0;
7093 if (goal_mem_addr_varies
7094 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7095 goal))
7096 return 0;
7097 }
7098 }
7099 #endif
7100 }
7101 }
7102 }
7103 \f
7104 /* Find a place where INCED appears in an increment or decrement operator
7105 within X, and return the amount INCED is incremented or decremented by.
7106 The value is always positive. */
7107
7108 static int
7109 find_inc_amount (rtx x, rtx inced)
7110 {
7111 enum rtx_code code = GET_CODE (x);
7112 const char *fmt;
7113 int i;
7114
7115 if (code == MEM)
7116 {
7117 rtx addr = XEXP (x, 0);
7118 if ((GET_CODE (addr) == PRE_DEC
7119 || GET_CODE (addr) == POST_DEC
7120 || GET_CODE (addr) == PRE_INC
7121 || GET_CODE (addr) == POST_INC)
7122 && XEXP (addr, 0) == inced)
7123 return GET_MODE_SIZE (GET_MODE (x));
7124 else if ((GET_CODE (addr) == PRE_MODIFY
7125 || GET_CODE (addr) == POST_MODIFY)
7126 && GET_CODE (XEXP (addr, 1)) == PLUS
7127 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7128 && XEXP (addr, 0) == inced
7129 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7130 {
7131 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7132 return i < 0 ? -i : i;
7133 }
7134 }
7135
7136 fmt = GET_RTX_FORMAT (code);
7137 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7138 {
7139 if (fmt[i] == 'e')
7140 {
7141 int tem = find_inc_amount (XEXP (x, i), inced);
7142 if (tem != 0)
7143 return tem;
7144 }
7145 if (fmt[i] == 'E')
7146 {
7147 int j;
7148 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7149 {
7150 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7151 if (tem != 0)
7152 return tem;
7153 }
7154 }
7155 }
7156
7157 return 0;
7158 }
7159 \f
7160 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7161 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7162
7163 #ifdef AUTO_INC_DEC
7164 static int
7165 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7166 rtx insn)
7167 {
7168 rtx link;
7169
7170 gcc_assert (insn);
7171
7172 if (! INSN_P (insn))
7173 return 0;
7174
7175 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7176 if (REG_NOTE_KIND (link) == REG_INC)
7177 {
7178 unsigned int test = (int) REGNO (XEXP (link, 0));
7179 if (test >= regno && test < endregno)
7180 return 1;
7181 }
7182 return 0;
7183 }
7184 #else
7185
7186 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7187
7188 #endif
7189
7190 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7191 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7192 REG_INC. REGNO must refer to a hard register. */
7193
7194 int
7195 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7196 int sets)
7197 {
7198 unsigned int nregs, endregno;
7199
7200 /* regno must be a hard register. */
7201 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7202
7203 nregs = hard_regno_nregs[regno][mode];
7204 endregno = regno + nregs;
7205
7206 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7207 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7208 && REG_P (XEXP (PATTERN (insn), 0)))
7209 {
7210 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7211
7212 return test >= regno && test < endregno;
7213 }
7214
7215 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7216 return 1;
7217
7218 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7219 {
7220 int i = XVECLEN (PATTERN (insn), 0) - 1;
7221
7222 for (; i >= 0; i--)
7223 {
7224 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7225 if ((GET_CODE (elt) == CLOBBER
7226 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7227 && REG_P (XEXP (elt, 0)))
7228 {
7229 unsigned int test = REGNO (XEXP (elt, 0));
7230
7231 if (test >= regno && test < endregno)
7232 return 1;
7233 }
7234 if (sets == 2
7235 && reg_inc_found_and_valid_p (regno, endregno, elt))
7236 return 1;
7237 }
7238 }
7239
7240 return 0;
7241 }
7242
7243 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7244 rtx
7245 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7246 {
7247 int regno;
7248
7249 if (GET_MODE (reloadreg) == mode)
7250 return reloadreg;
7251
7252 regno = REGNO (reloadreg);
7253
7254 if (REG_WORDS_BIG_ENDIAN)
7255 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7256 - (int) hard_regno_nregs[regno][mode];
7257
7258 return gen_rtx_REG (mode, regno);
7259 }
7260
7261 static const char *const reload_when_needed_name[] =
7262 {
7263 "RELOAD_FOR_INPUT",
7264 "RELOAD_FOR_OUTPUT",
7265 "RELOAD_FOR_INSN",
7266 "RELOAD_FOR_INPUT_ADDRESS",
7267 "RELOAD_FOR_INPADDR_ADDRESS",
7268 "RELOAD_FOR_OUTPUT_ADDRESS",
7269 "RELOAD_FOR_OUTADDR_ADDRESS",
7270 "RELOAD_FOR_OPERAND_ADDRESS",
7271 "RELOAD_FOR_OPADDR_ADDR",
7272 "RELOAD_OTHER",
7273 "RELOAD_FOR_OTHER_ADDRESS"
7274 };
7275
7276 /* These functions are used to print the variables set by 'find_reloads' */
7277
7278 DEBUG_FUNCTION void
7279 debug_reload_to_stream (FILE *f)
7280 {
7281 int r;
7282 const char *prefix;
7283
7284 if (! f)
7285 f = stderr;
7286 for (r = 0; r < n_reloads; r++)
7287 {
7288 fprintf (f, "Reload %d: ", r);
7289
7290 if (rld[r].in != 0)
7291 {
7292 fprintf (f, "reload_in (%s) = ",
7293 GET_MODE_NAME (rld[r].inmode));
7294 print_inline_rtx (f, rld[r].in, 24);
7295 fprintf (f, "\n\t");
7296 }
7297
7298 if (rld[r].out != 0)
7299 {
7300 fprintf (f, "reload_out (%s) = ",
7301 GET_MODE_NAME (rld[r].outmode));
7302 print_inline_rtx (f, rld[r].out, 24);
7303 fprintf (f, "\n\t");
7304 }
7305
7306 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7307
7308 fprintf (f, "%s (opnum = %d)",
7309 reload_when_needed_name[(int) rld[r].when_needed],
7310 rld[r].opnum);
7311
7312 if (rld[r].optional)
7313 fprintf (f, ", optional");
7314
7315 if (rld[r].nongroup)
7316 fprintf (f, ", nongroup");
7317
7318 if (rld[r].inc != 0)
7319 fprintf (f, ", inc by %d", rld[r].inc);
7320
7321 if (rld[r].nocombine)
7322 fprintf (f, ", can't combine");
7323
7324 if (rld[r].secondary_p)
7325 fprintf (f, ", secondary_reload_p");
7326
7327 if (rld[r].in_reg != 0)
7328 {
7329 fprintf (f, "\n\treload_in_reg: ");
7330 print_inline_rtx (f, rld[r].in_reg, 24);
7331 }
7332
7333 if (rld[r].out_reg != 0)
7334 {
7335 fprintf (f, "\n\treload_out_reg: ");
7336 print_inline_rtx (f, rld[r].out_reg, 24);
7337 }
7338
7339 if (rld[r].reg_rtx != 0)
7340 {
7341 fprintf (f, "\n\treload_reg_rtx: ");
7342 print_inline_rtx (f, rld[r].reg_rtx, 24);
7343 }
7344
7345 prefix = "\n\t";
7346 if (rld[r].secondary_in_reload != -1)
7347 {
7348 fprintf (f, "%ssecondary_in_reload = %d",
7349 prefix, rld[r].secondary_in_reload);
7350 prefix = ", ";
7351 }
7352
7353 if (rld[r].secondary_out_reload != -1)
7354 fprintf (f, "%ssecondary_out_reload = %d\n",
7355 prefix, rld[r].secondary_out_reload);
7356
7357 prefix = "\n\t";
7358 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7359 {
7360 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7361 insn_data[rld[r].secondary_in_icode].name);
7362 prefix = ", ";
7363 }
7364
7365 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7366 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7367 insn_data[rld[r].secondary_out_icode].name);
7368
7369 fprintf (f, "\n");
7370 }
7371 }
7372
7373 DEBUG_FUNCTION void
7374 debug_reload (void)
7375 {
7376 debug_reload_to_stream (stderr);
7377 }