re PR libfortran/47567 (Wrong output for small absolute values with F editing)
[gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
28
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
31
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
38
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
45
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
54
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
57
58 NOTE SIDE EFFECTS:
59
60 find_reloads can alter the operands of the instruction it is called on.
61
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
65 better that way.
66
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
69
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
73
74 Using a reload register for several reloads in one insn:
75
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
79
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
82 register.
83
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
87
88 #define REG_OK_STRICT
89
90 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
91 #undef DEBUG_RELOAD
92
93 #include "config.h"
94 #include "system.h"
95 #include "coretypes.h"
96 #include "tm.h"
97 #include "rtl-error.h"
98 #include "tm_p.h"
99 #include "insn-config.h"
100 #include "expr.h"
101 #include "optabs.h"
102 #include "recog.h"
103 #include "df.h"
104 #include "reload.h"
105 #include "regs.h"
106 #include "addresses.h"
107 #include "hard-reg-set.h"
108 #include "flags.h"
109 #include "output.h"
110 #include "function.h"
111 #include "params.h"
112 #include "target.h"
113 #include "ira.h"
114
115 /* True if X is a constant that can be forced into the constant pool. */
116 #define CONST_POOL_OK_P(X) \
117 (CONSTANT_P (X) \
118 && GET_CODE (X) != HIGH \
119 && !targetm.cannot_force_const_mem (X))
120
121 /* True if C is a non-empty register class that has too few registers
122 to be safely used as a reload target class. */
123
124 static inline bool
125 small_register_class_p (reg_class_t rclass)
126 {
127 return (reg_class_size [(int) rclass] == 1
128 || (reg_class_size [(int) rclass] >= 1
129 && targetm.class_likely_spilled_p (rclass)));
130 }
131
132 \f
133 /* All reloads of the current insn are recorded here. See reload.h for
134 comments. */
135 int n_reloads;
136 struct reload rld[MAX_RELOADS];
137
138 /* All the "earlyclobber" operands of the current insn
139 are recorded here. */
140 int n_earlyclobbers;
141 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
142
143 int reload_n_operands;
144
145 /* Replacing reloads.
146
147 If `replace_reloads' is nonzero, then as each reload is recorded
148 an entry is made for it in the table `replacements'.
149 Then later `subst_reloads' can look through that table and
150 perform all the replacements needed. */
151
152 /* Nonzero means record the places to replace. */
153 static int replace_reloads;
154
155 /* Each replacement is recorded with a structure like this. */
156 struct replacement
157 {
158 rtx *where; /* Location to store in */
159 rtx *subreg_loc; /* Location of SUBREG if WHERE is inside
160 a SUBREG; 0 otherwise. */
161 int what; /* which reload this is for */
162 enum machine_mode mode; /* mode it must have */
163 };
164
165 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
166
167 /* Number of replacements currently recorded. */
168 static int n_replacements;
169
170 /* Used to track what is modified by an operand. */
171 struct decomposition
172 {
173 int reg_flag; /* Nonzero if referencing a register. */
174 int safe; /* Nonzero if this can't conflict with anything. */
175 rtx base; /* Base address for MEM. */
176 HOST_WIDE_INT start; /* Starting offset or register number. */
177 HOST_WIDE_INT end; /* Ending offset or register number. */
178 };
179
180 #ifdef SECONDARY_MEMORY_NEEDED
181
182 /* Save MEMs needed to copy from one class of registers to another. One MEM
183 is used per mode, but normally only one or two modes are ever used.
184
185 We keep two versions, before and after register elimination. The one
186 after register elimination is record separately for each operand. This
187 is done in case the address is not valid to be sure that we separately
188 reload each. */
189
190 static rtx secondary_memlocs[NUM_MACHINE_MODES];
191 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
192 static int secondary_memlocs_elim_used = 0;
193 #endif
194
195 /* The instruction we are doing reloads for;
196 so we can test whether a register dies in it. */
197 static rtx this_insn;
198
199 /* Nonzero if this instruction is a user-specified asm with operands. */
200 static int this_insn_is_asm;
201
202 /* If hard_regs_live_known is nonzero,
203 we can tell which hard regs are currently live,
204 at least enough to succeed in choosing dummy reloads. */
205 static int hard_regs_live_known;
206
207 /* Indexed by hard reg number,
208 element is nonnegative if hard reg has been spilled.
209 This vector is passed to `find_reloads' as an argument
210 and is not changed here. */
211 static short *static_reload_reg_p;
212
213 /* Set to 1 in subst_reg_equivs if it changes anything. */
214 static int subst_reg_equivs_changed;
215
216 /* On return from push_reload, holds the reload-number for the OUT
217 operand, which can be different for that from the input operand. */
218 static int output_reloadnum;
219
220 /* Compare two RTX's. */
221 #define MATCHES(x, y) \
222 (x == y || (x != 0 && (REG_P (x) \
223 ? REG_P (y) && REGNO (x) == REGNO (y) \
224 : rtx_equal_p (x, y) && ! side_effects_p (x))))
225
226 /* Indicates if two reloads purposes are for similar enough things that we
227 can merge their reloads. */
228 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
229 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
230 || ((when1) == (when2) && (op1) == (op2)) \
231 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
232 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
233 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
234 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
235 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
236
237 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
238 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
239 ((when1) != (when2) \
240 || ! ((op1) == (op2) \
241 || (when1) == RELOAD_FOR_INPUT \
242 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
243 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
244
245 /* If we are going to reload an address, compute the reload type to
246 use. */
247 #define ADDR_TYPE(type) \
248 ((type) == RELOAD_FOR_INPUT_ADDRESS \
249 ? RELOAD_FOR_INPADDR_ADDRESS \
250 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
251 ? RELOAD_FOR_OUTADDR_ADDRESS \
252 : (type)))
253
254 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
255 enum machine_mode, enum reload_type,
256 enum insn_code *, secondary_reload_info *);
257 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
258 int, unsigned int);
259 static int reload_inner_reg_of_subreg (rtx, enum machine_mode, int);
260 static void push_replacement (rtx *, int, enum machine_mode);
261 static void dup_replacements (rtx *, rtx *);
262 static void combine_reloads (void);
263 static int find_reusable_reload (rtx *, rtx, enum reg_class,
264 enum reload_type, int, int);
265 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
266 enum machine_mode, reg_class_t, int, int);
267 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
268 static struct decomposition decompose (rtx);
269 static int immune_p (rtx, rtx, struct decomposition);
270 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
271 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
272 int *);
273 static rtx make_memloc (rtx, int);
274 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
275 addr_space_t, rtx *);
276 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
277 int, enum reload_type, int, rtx);
278 static rtx subst_reg_equivs (rtx, rtx);
279 static rtx subst_indexed_address (rtx);
280 static void update_auto_inc_notes (rtx, int, int);
281 static int find_reloads_address_1 (enum machine_mode, rtx, int,
282 enum rtx_code, enum rtx_code, rtx *,
283 int, enum reload_type,int, rtx);
284 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
285 enum machine_mode, int,
286 enum reload_type, int);
287 static rtx find_reloads_subreg_address (rtx, int, int, enum reload_type,
288 int, rtx);
289 static void copy_replacements_1 (rtx *, rtx *, int);
290 static int find_inc_amount (rtx, rtx);
291 static int refers_to_mem_for_reload_p (rtx);
292 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
293 rtx, rtx *);
294
295 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
296 list yet. */
297
298 static void
299 push_reg_equiv_alt_mem (int regno, rtx mem)
300 {
301 rtx it;
302
303 for (it = reg_equiv_alt_mem_list [regno]; it; it = XEXP (it, 1))
304 if (rtx_equal_p (XEXP (it, 0), mem))
305 return;
306
307 reg_equiv_alt_mem_list [regno]
308 = alloc_EXPR_LIST (REG_EQUIV, mem,
309 reg_equiv_alt_mem_list [regno]);
310 }
311 \f
312 /* Determine if any secondary reloads are needed for loading (if IN_P is
313 nonzero) or storing (if IN_P is zero) X to or from a reload register of
314 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
315 are needed, push them.
316
317 Return the reload number of the secondary reload we made, or -1 if
318 we didn't need one. *PICODE is set to the insn_code to use if we do
319 need a secondary reload. */
320
321 static int
322 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
323 enum reg_class reload_class,
324 enum machine_mode reload_mode, enum reload_type type,
325 enum insn_code *picode, secondary_reload_info *prev_sri)
326 {
327 enum reg_class rclass = NO_REGS;
328 enum reg_class scratch_class;
329 enum machine_mode mode = reload_mode;
330 enum insn_code icode = CODE_FOR_nothing;
331 enum insn_code t_icode = CODE_FOR_nothing;
332 enum reload_type secondary_type;
333 int s_reload, t_reload = -1;
334 const char *scratch_constraint;
335 char letter;
336 secondary_reload_info sri;
337
338 if (type == RELOAD_FOR_INPUT_ADDRESS
339 || type == RELOAD_FOR_OUTPUT_ADDRESS
340 || type == RELOAD_FOR_INPADDR_ADDRESS
341 || type == RELOAD_FOR_OUTADDR_ADDRESS)
342 secondary_type = type;
343 else
344 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
345
346 *picode = CODE_FOR_nothing;
347
348 /* If X is a paradoxical SUBREG, use the inner value to determine both the
349 mode and object being reloaded. */
350 if (GET_CODE (x) == SUBREG
351 && (GET_MODE_SIZE (GET_MODE (x))
352 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
353 {
354 x = SUBREG_REG (x);
355 reload_mode = GET_MODE (x);
356 }
357
358 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
359 is still a pseudo-register by now, it *must* have an equivalent MEM
360 but we don't want to assume that), use that equivalent when seeing if
361 a secondary reload is needed since whether or not a reload is needed
362 might be sensitive to the form of the MEM. */
363
364 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
365 && reg_equiv_mem[REGNO (x)] != 0)
366 x = reg_equiv_mem[REGNO (x)];
367
368 sri.icode = CODE_FOR_nothing;
369 sri.prev_sri = prev_sri;
370 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
371 reload_mode, &sri);
372 icode = (enum insn_code) sri.icode;
373
374 /* If we don't need any secondary registers, done. */
375 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
376 return -1;
377
378 if (rclass != NO_REGS)
379 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
380 reload_mode, type, &t_icode, &sri);
381
382 /* If we will be using an insn, the secondary reload is for a
383 scratch register. */
384
385 if (icode != CODE_FOR_nothing)
386 {
387 /* If IN_P is nonzero, the reload register will be the output in
388 operand 0. If IN_P is zero, the reload register will be the input
389 in operand 1. Outputs should have an initial "=", which we must
390 skip. */
391
392 /* ??? It would be useful to be able to handle only two, or more than
393 three, operands, but for now we can only handle the case of having
394 exactly three: output, input and one temp/scratch. */
395 gcc_assert (insn_data[(int) icode].n_operands == 3);
396
397 /* ??? We currently have no way to represent a reload that needs
398 an icode to reload from an intermediate tertiary reload register.
399 We should probably have a new field in struct reload to tag a
400 chain of scratch operand reloads onto. */
401 gcc_assert (rclass == NO_REGS);
402
403 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
404 gcc_assert (*scratch_constraint == '=');
405 scratch_constraint++;
406 if (*scratch_constraint == '&')
407 scratch_constraint++;
408 letter = *scratch_constraint;
409 scratch_class = (letter == 'r' ? GENERAL_REGS
410 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
411 scratch_constraint));
412
413 rclass = scratch_class;
414 mode = insn_data[(int) icode].operand[2].mode;
415 }
416
417 /* This case isn't valid, so fail. Reload is allowed to use the same
418 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
419 in the case of a secondary register, we actually need two different
420 registers for correct code. We fail here to prevent the possibility of
421 silently generating incorrect code later.
422
423 The convention is that secondary input reloads are valid only if the
424 secondary_class is different from class. If you have such a case, you
425 can not use secondary reloads, you must work around the problem some
426 other way.
427
428 Allow this when a reload_in/out pattern is being used. I.e. assume
429 that the generated code handles this case. */
430
431 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
432 || t_icode != CODE_FOR_nothing);
433
434 /* See if we can reuse an existing secondary reload. */
435 for (s_reload = 0; s_reload < n_reloads; s_reload++)
436 if (rld[s_reload].secondary_p
437 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
438 || reg_class_subset_p (rld[s_reload].rclass, rclass))
439 && ((in_p && rld[s_reload].inmode == mode)
440 || (! in_p && rld[s_reload].outmode == mode))
441 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
442 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
443 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
444 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
445 && (small_register_class_p (rclass)
446 || targetm.small_register_classes_for_mode_p (VOIDmode))
447 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
448 opnum, rld[s_reload].opnum))
449 {
450 if (in_p)
451 rld[s_reload].inmode = mode;
452 if (! in_p)
453 rld[s_reload].outmode = mode;
454
455 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
456 rld[s_reload].rclass = rclass;
457
458 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
459 rld[s_reload].optional &= optional;
460 rld[s_reload].secondary_p = 1;
461 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
462 opnum, rld[s_reload].opnum))
463 rld[s_reload].when_needed = RELOAD_OTHER;
464
465 break;
466 }
467
468 if (s_reload == n_reloads)
469 {
470 #ifdef SECONDARY_MEMORY_NEEDED
471 /* If we need a memory location to copy between the two reload regs,
472 set it up now. Note that we do the input case before making
473 the reload and the output case after. This is due to the
474 way reloads are output. */
475
476 if (in_p && icode == CODE_FOR_nothing
477 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
478 {
479 get_secondary_mem (x, reload_mode, opnum, type);
480
481 /* We may have just added new reloads. Make sure we add
482 the new reload at the end. */
483 s_reload = n_reloads;
484 }
485 #endif
486
487 /* We need to make a new secondary reload for this register class. */
488 rld[s_reload].in = rld[s_reload].out = 0;
489 rld[s_reload].rclass = rclass;
490
491 rld[s_reload].inmode = in_p ? mode : VOIDmode;
492 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
493 rld[s_reload].reg_rtx = 0;
494 rld[s_reload].optional = optional;
495 rld[s_reload].inc = 0;
496 /* Maybe we could combine these, but it seems too tricky. */
497 rld[s_reload].nocombine = 1;
498 rld[s_reload].in_reg = 0;
499 rld[s_reload].out_reg = 0;
500 rld[s_reload].opnum = opnum;
501 rld[s_reload].when_needed = secondary_type;
502 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
503 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
504 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
505 rld[s_reload].secondary_out_icode
506 = ! in_p ? t_icode : CODE_FOR_nothing;
507 rld[s_reload].secondary_p = 1;
508
509 n_reloads++;
510
511 #ifdef SECONDARY_MEMORY_NEEDED
512 if (! in_p && icode == CODE_FOR_nothing
513 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
514 get_secondary_mem (x, mode, opnum, type);
515 #endif
516 }
517
518 *picode = icode;
519 return s_reload;
520 }
521
522 /* If a secondary reload is needed, return its class. If both an intermediate
523 register and a scratch register is needed, we return the class of the
524 intermediate register. */
525 reg_class_t
526 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
527 rtx x)
528 {
529 enum insn_code icode;
530 secondary_reload_info sri;
531
532 sri.icode = CODE_FOR_nothing;
533 sri.prev_sri = NULL;
534 rclass
535 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
536 icode = (enum insn_code) sri.icode;
537
538 /* If there are no secondary reloads at all, we return NO_REGS.
539 If an intermediate register is needed, we return its class. */
540 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
541 return rclass;
542
543 /* No intermediate register is needed, but we have a special reload
544 pattern, which we assume for now needs a scratch register. */
545 return scratch_reload_class (icode);
546 }
547
548 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
549 three operands, verify that operand 2 is an output operand, and return
550 its register class.
551 ??? We'd like to be able to handle any pattern with at least 2 operands,
552 for zero or more scratch registers, but that needs more infrastructure. */
553 enum reg_class
554 scratch_reload_class (enum insn_code icode)
555 {
556 const char *scratch_constraint;
557 char scratch_letter;
558 enum reg_class rclass;
559
560 gcc_assert (insn_data[(int) icode].n_operands == 3);
561 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
562 gcc_assert (*scratch_constraint == '=');
563 scratch_constraint++;
564 if (*scratch_constraint == '&')
565 scratch_constraint++;
566 scratch_letter = *scratch_constraint;
567 if (scratch_letter == 'r')
568 return GENERAL_REGS;
569 rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
570 scratch_constraint);
571 gcc_assert (rclass != NO_REGS);
572 return rclass;
573 }
574 \f
575 #ifdef SECONDARY_MEMORY_NEEDED
576
577 /* Return a memory location that will be used to copy X in mode MODE.
578 If we haven't already made a location for this mode in this insn,
579 call find_reloads_address on the location being returned. */
580
581 rtx
582 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
583 int opnum, enum reload_type type)
584 {
585 rtx loc;
586 int mem_valid;
587
588 /* By default, if MODE is narrower than a word, widen it to a word.
589 This is required because most machines that require these memory
590 locations do not support short load and stores from all registers
591 (e.g., FP registers). */
592
593 #ifdef SECONDARY_MEMORY_NEEDED_MODE
594 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
595 #else
596 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
597 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
598 #endif
599
600 /* If we already have made a MEM for this operand in MODE, return it. */
601 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
602 return secondary_memlocs_elim[(int) mode][opnum];
603
604 /* If this is the first time we've tried to get a MEM for this mode,
605 allocate a new one. `something_changed' in reload will get set
606 by noticing that the frame size has changed. */
607
608 if (secondary_memlocs[(int) mode] == 0)
609 {
610 #ifdef SECONDARY_MEMORY_NEEDED_RTX
611 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
612 #else
613 secondary_memlocs[(int) mode]
614 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
615 #endif
616 }
617
618 /* Get a version of the address doing any eliminations needed. If that
619 didn't give us a new MEM, make a new one if it isn't valid. */
620
621 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
622 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
623 MEM_ADDR_SPACE (loc));
624
625 if (! mem_valid && loc == secondary_memlocs[(int) mode])
626 loc = copy_rtx (loc);
627
628 /* The only time the call below will do anything is if the stack
629 offset is too large. In that case IND_LEVELS doesn't matter, so we
630 can just pass a zero. Adjust the type to be the address of the
631 corresponding object. If the address was valid, save the eliminated
632 address. If it wasn't valid, we need to make a reload each time, so
633 don't save it. */
634
635 if (! mem_valid)
636 {
637 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
638 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
639 : RELOAD_OTHER);
640
641 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
642 opnum, type, 0, 0);
643 }
644
645 secondary_memlocs_elim[(int) mode][opnum] = loc;
646 if (secondary_memlocs_elim_used <= (int)mode)
647 secondary_memlocs_elim_used = (int)mode + 1;
648 return loc;
649 }
650
651 /* Clear any secondary memory locations we've made. */
652
653 void
654 clear_secondary_mem (void)
655 {
656 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
657 }
658 #endif /* SECONDARY_MEMORY_NEEDED */
659 \f
660
661 /* Find the largest class which has at least one register valid in
662 mode INNER, and which for every such register, that register number
663 plus N is also valid in OUTER (if in range) and is cheap to move
664 into REGNO. Such a class must exist. */
665
666 static enum reg_class
667 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
668 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
669 unsigned int dest_regno ATTRIBUTE_UNUSED)
670 {
671 int best_cost = -1;
672 int rclass;
673 int regno;
674 enum reg_class best_class = NO_REGS;
675 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
676 unsigned int best_size = 0;
677 int cost;
678
679 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
680 {
681 int bad = 0;
682 int good = 0;
683 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
684 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
685 {
686 if (HARD_REGNO_MODE_OK (regno, inner))
687 {
688 good = 1;
689 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
690 || ! HARD_REGNO_MODE_OK (regno + n, outer))
691 bad = 1;
692 }
693 }
694
695 if (bad || !good)
696 continue;
697 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
698
699 if ((reg_class_size[rclass] > best_size
700 && (best_cost < 0 || best_cost >= cost))
701 || best_cost > cost)
702 {
703 best_class = (enum reg_class) rclass;
704 best_size = reg_class_size[rclass];
705 best_cost = register_move_cost (outer, (enum reg_class) rclass,
706 dest_class);
707 }
708 }
709
710 gcc_assert (best_size != 0);
711
712 return best_class;
713 }
714 \f
715 /* Return the number of a previously made reload that can be combined with
716 a new one, or n_reloads if none of the existing reloads can be used.
717 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
718 push_reload, they determine the kind of the new reload that we try to
719 combine. P_IN points to the corresponding value of IN, which can be
720 modified by this function.
721 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
722
723 static int
724 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
725 enum reload_type type, int opnum, int dont_share)
726 {
727 rtx in = *p_in;
728 int i;
729 /* We can't merge two reloads if the output of either one is
730 earlyclobbered. */
731
732 if (earlyclobber_operand_p (out))
733 return n_reloads;
734
735 /* We can use an existing reload if the class is right
736 and at least one of IN and OUT is a match
737 and the other is at worst neutral.
738 (A zero compared against anything is neutral.)
739
740 For targets with small register classes, don't use existing reloads
741 unless they are for the same thing since that can cause us to need
742 more reload registers than we otherwise would. */
743
744 for (i = 0; i < n_reloads; i++)
745 if ((reg_class_subset_p (rclass, rld[i].rclass)
746 || reg_class_subset_p (rld[i].rclass, rclass))
747 /* If the existing reload has a register, it must fit our class. */
748 && (rld[i].reg_rtx == 0
749 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
750 true_regnum (rld[i].reg_rtx)))
751 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
752 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
753 || (out != 0 && MATCHES (rld[i].out, out)
754 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
755 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
756 && (small_register_class_p (rclass)
757 || targetm.small_register_classes_for_mode_p (VOIDmode))
758 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
759 return i;
760
761 /* Reloading a plain reg for input can match a reload to postincrement
762 that reg, since the postincrement's value is the right value.
763 Likewise, it can match a preincrement reload, since we regard
764 the preincrementation as happening before any ref in this insn
765 to that register. */
766 for (i = 0; i < n_reloads; i++)
767 if ((reg_class_subset_p (rclass, rld[i].rclass)
768 || reg_class_subset_p (rld[i].rclass, rclass))
769 /* If the existing reload has a register, it must fit our
770 class. */
771 && (rld[i].reg_rtx == 0
772 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
773 true_regnum (rld[i].reg_rtx)))
774 && out == 0 && rld[i].out == 0 && rld[i].in != 0
775 && ((REG_P (in)
776 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
777 && MATCHES (XEXP (rld[i].in, 0), in))
778 || (REG_P (rld[i].in)
779 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
780 && MATCHES (XEXP (in, 0), rld[i].in)))
781 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
782 && (small_register_class_p (rclass)
783 || targetm.small_register_classes_for_mode_p (VOIDmode))
784 && MERGABLE_RELOADS (type, rld[i].when_needed,
785 opnum, rld[i].opnum))
786 {
787 /* Make sure reload_in ultimately has the increment,
788 not the plain register. */
789 if (REG_P (in))
790 *p_in = rld[i].in;
791 return i;
792 }
793 return n_reloads;
794 }
795
796 /* Return nonzero if X is a SUBREG which will require reloading of its
797 SUBREG_REG expression. */
798
799 static int
800 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, int output)
801 {
802 rtx inner;
803
804 /* Only SUBREGs are problematical. */
805 if (GET_CODE (x) != SUBREG)
806 return 0;
807
808 inner = SUBREG_REG (x);
809
810 /* If INNER is a constant or PLUS, then INNER must be reloaded. */
811 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
812 return 1;
813
814 /* If INNER is not a hard register, then INNER will not need to
815 be reloaded. */
816 if (!REG_P (inner)
817 || REGNO (inner) >= FIRST_PSEUDO_REGISTER)
818 return 0;
819
820 /* If INNER is not ok for MODE, then INNER will need reloading. */
821 if (! HARD_REGNO_MODE_OK (subreg_regno (x), mode))
822 return 1;
823
824 /* If the outer part is a word or smaller, INNER larger than a
825 word and the number of regs for INNER is not the same as the
826 number of words in INNER, then INNER will need reloading. */
827 return (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
828 && output
829 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
830 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
831 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
832 }
833
834 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
835 requiring an extra reload register. The caller has already found that
836 IN contains some reference to REGNO, so check that we can produce the
837 new value in a single step. E.g. if we have
838 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
839 instruction that adds one to a register, this should succeed.
840 However, if we have something like
841 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
842 needs to be loaded into a register first, we need a separate reload
843 register.
844 Such PLUS reloads are generated by find_reload_address_part.
845 The out-of-range PLUS expressions are usually introduced in the instruction
846 patterns by register elimination and substituting pseudos without a home
847 by their function-invariant equivalences. */
848 static int
849 can_reload_into (rtx in, int regno, enum machine_mode mode)
850 {
851 rtx dst, test_insn;
852 int r = 0;
853 struct recog_data save_recog_data;
854
855 /* For matching constraints, we often get notional input reloads where
856 we want to use the original register as the reload register. I.e.
857 technically this is a non-optional input-output reload, but IN is
858 already a valid register, and has been chosen as the reload register.
859 Speed this up, since it trivially works. */
860 if (REG_P (in))
861 return 1;
862
863 /* To test MEMs properly, we'd have to take into account all the reloads
864 that are already scheduled, which can become quite complicated.
865 And since we've already handled address reloads for this MEM, it
866 should always succeed anyway. */
867 if (MEM_P (in))
868 return 1;
869
870 /* If we can make a simple SET insn that does the job, everything should
871 be fine. */
872 dst = gen_rtx_REG (mode, regno);
873 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
874 save_recog_data = recog_data;
875 if (recog_memoized (test_insn) >= 0)
876 {
877 extract_insn (test_insn);
878 r = constrain_operands (1);
879 }
880 recog_data = save_recog_data;
881 return r;
882 }
883
884 /* Record one reload that needs to be performed.
885 IN is an rtx saying where the data are to be found before this instruction.
886 OUT says where they must be stored after the instruction.
887 (IN is zero for data not read, and OUT is zero for data not written.)
888 INLOC and OUTLOC point to the places in the instructions where
889 IN and OUT were found.
890 If IN and OUT are both nonzero, it means the same register must be used
891 to reload both IN and OUT.
892
893 RCLASS is a register class required for the reloaded data.
894 INMODE is the machine mode that the instruction requires
895 for the reg that replaces IN and OUTMODE is likewise for OUT.
896
897 If IN is zero, then OUT's location and mode should be passed as
898 INLOC and INMODE.
899
900 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
901
902 OPTIONAL nonzero means this reload does not need to be performed:
903 it can be discarded if that is more convenient.
904
905 OPNUM and TYPE say what the purpose of this reload is.
906
907 The return value is the reload-number for this reload.
908
909 If both IN and OUT are nonzero, in some rare cases we might
910 want to make two separate reloads. (Actually we never do this now.)
911 Therefore, the reload-number for OUT is stored in
912 output_reloadnum when we return; the return value applies to IN.
913 Usually (presently always), when IN and OUT are nonzero,
914 the two reload-numbers are equal, but the caller should be careful to
915 distinguish them. */
916
917 int
918 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
919 enum reg_class rclass, enum machine_mode inmode,
920 enum machine_mode outmode, int strict_low, int optional,
921 int opnum, enum reload_type type)
922 {
923 int i;
924 int dont_share = 0;
925 int dont_remove_subreg = 0;
926 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
927 int secondary_in_reload = -1, secondary_out_reload = -1;
928 enum insn_code secondary_in_icode = CODE_FOR_nothing;
929 enum insn_code secondary_out_icode = CODE_FOR_nothing;
930
931 /* INMODE and/or OUTMODE could be VOIDmode if no mode
932 has been specified for the operand. In that case,
933 use the operand's mode as the mode to reload. */
934 if (inmode == VOIDmode && in != 0)
935 inmode = GET_MODE (in);
936 if (outmode == VOIDmode && out != 0)
937 outmode = GET_MODE (out);
938
939 /* If find_reloads and friends until now missed to replace a pseudo
940 with a constant of reg_equiv_constant something went wrong
941 beforehand.
942 Note that it can't simply be done here if we missed it earlier
943 since the constant might need to be pushed into the literal pool
944 and the resulting memref would probably need further
945 reloading. */
946 if (in != 0 && REG_P (in))
947 {
948 int regno = REGNO (in);
949
950 gcc_assert (regno < FIRST_PSEUDO_REGISTER
951 || reg_renumber[regno] >= 0
952 || reg_equiv_constant[regno] == NULL_RTX);
953 }
954
955 /* reg_equiv_constant only contains constants which are obviously
956 not appropriate as destination. So if we would need to replace
957 the destination pseudo with a constant we are in real
958 trouble. */
959 if (out != 0 && REG_P (out))
960 {
961 int regno = REGNO (out);
962
963 gcc_assert (regno < FIRST_PSEUDO_REGISTER
964 || reg_renumber[regno] >= 0
965 || reg_equiv_constant[regno] == NULL_RTX);
966 }
967
968 /* If we have a read-write operand with an address side-effect,
969 change either IN or OUT so the side-effect happens only once. */
970 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
971 switch (GET_CODE (XEXP (in, 0)))
972 {
973 case POST_INC: case POST_DEC: case POST_MODIFY:
974 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
975 break;
976
977 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
978 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
979 break;
980
981 default:
982 break;
983 }
984
985 /* If we are reloading a (SUBREG constant ...), really reload just the
986 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
987 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
988 a pseudo and hence will become a MEM) with M1 wider than M2 and the
989 register is a pseudo, also reload the inside expression.
990 For machines that extend byte loads, do this for any SUBREG of a pseudo
991 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
992 M2 is an integral mode that gets extended when loaded.
993 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
994 either M1 is not valid for R or M2 is wider than a word but we only
995 need one word to store an M2-sized quantity in R.
996 (However, if OUT is nonzero, we need to reload the reg *and*
997 the subreg, so do nothing here, and let following statement handle it.)
998
999 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1000 we can't handle it here because CONST_INT does not indicate a mode.
1001
1002 Similarly, we must reload the inside expression if we have a
1003 STRICT_LOW_PART (presumably, in == out in this case).
1004
1005 Also reload the inner expression if it does not require a secondary
1006 reload but the SUBREG does.
1007
1008 Finally, reload the inner expression if it is a register that is in
1009 the class whose registers cannot be referenced in a different size
1010 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1011 cannot reload just the inside since we might end up with the wrong
1012 register class. But if it is inside a STRICT_LOW_PART, we have
1013 no choice, so we hope we do get the right register class there. */
1014
1015 if (in != 0 && GET_CODE (in) == SUBREG
1016 && (subreg_lowpart_p (in) || strict_low)
1017 #ifdef CANNOT_CHANGE_MODE_CLASS
1018 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1019 #endif
1020 && (CONSTANT_P (SUBREG_REG (in))
1021 || GET_CODE (SUBREG_REG (in)) == PLUS
1022 || strict_low
1023 || (((REG_P (SUBREG_REG (in))
1024 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1025 || MEM_P (SUBREG_REG (in)))
1026 && ((GET_MODE_SIZE (inmode)
1027 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1028 #ifdef LOAD_EXTEND_OP
1029 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1030 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1031 <= UNITS_PER_WORD)
1032 && (GET_MODE_SIZE (inmode)
1033 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1034 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1035 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1036 #endif
1037 #ifdef WORD_REGISTER_OPERATIONS
1038 || ((GET_MODE_SIZE (inmode)
1039 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1040 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1041 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1042 / UNITS_PER_WORD)))
1043 #endif
1044 ))
1045 || (REG_P (SUBREG_REG (in))
1046 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1047 /* The case where out is nonzero
1048 is handled differently in the following statement. */
1049 && (out == 0 || subreg_lowpart_p (in))
1050 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1051 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1052 > UNITS_PER_WORD)
1053 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1054 / UNITS_PER_WORD)
1055 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1056 [GET_MODE (SUBREG_REG (in))]))
1057 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1058 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1059 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1060 SUBREG_REG (in))
1061 == NO_REGS))
1062 #ifdef CANNOT_CHANGE_MODE_CLASS
1063 || (REG_P (SUBREG_REG (in))
1064 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1065 && REG_CANNOT_CHANGE_MODE_P
1066 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1067 #endif
1068 ))
1069 {
1070 in_subreg_loc = inloc;
1071 inloc = &SUBREG_REG (in);
1072 in = *inloc;
1073 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1074 if (MEM_P (in))
1075 /* This is supposed to happen only for paradoxical subregs made by
1076 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1077 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1078 #endif
1079 inmode = GET_MODE (in);
1080 }
1081
1082 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1083 either M1 is not valid for R or M2 is wider than a word but we only
1084 need one word to store an M2-sized quantity in R.
1085
1086 However, we must reload the inner reg *as well as* the subreg in
1087 that case. */
1088
1089 /* Similar issue for (SUBREG constant ...) if it was not handled by the
1090 code above. This can happen if SUBREG_BYTE != 0. */
1091
1092 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, 0))
1093 {
1094 enum reg_class in_class = rclass;
1095
1096 if (REG_P (SUBREG_REG (in)))
1097 in_class
1098 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1099 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1100 GET_MODE (SUBREG_REG (in)),
1101 SUBREG_BYTE (in),
1102 GET_MODE (in)),
1103 REGNO (SUBREG_REG (in)));
1104
1105 /* This relies on the fact that emit_reload_insns outputs the
1106 instructions for input reloads of type RELOAD_OTHER in the same
1107 order as the reloads. Thus if the outer reload is also of type
1108 RELOAD_OTHER, we are guaranteed that this inner reload will be
1109 output before the outer reload. */
1110 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1111 in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1112 dont_remove_subreg = 1;
1113 }
1114
1115 /* Similarly for paradoxical and problematical SUBREGs on the output.
1116 Note that there is no reason we need worry about the previous value
1117 of SUBREG_REG (out); even if wider than out,
1118 storing in a subreg is entitled to clobber it all
1119 (except in the case of STRICT_LOW_PART,
1120 and in that case the constraint should label it input-output.) */
1121 if (out != 0 && GET_CODE (out) == SUBREG
1122 && (subreg_lowpart_p (out) || strict_low)
1123 #ifdef CANNOT_CHANGE_MODE_CLASS
1124 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1125 #endif
1126 && (CONSTANT_P (SUBREG_REG (out))
1127 || strict_low
1128 || (((REG_P (SUBREG_REG (out))
1129 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1130 || MEM_P (SUBREG_REG (out)))
1131 && ((GET_MODE_SIZE (outmode)
1132 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1133 #ifdef WORD_REGISTER_OPERATIONS
1134 || ((GET_MODE_SIZE (outmode)
1135 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1136 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1137 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1138 / UNITS_PER_WORD)))
1139 #endif
1140 ))
1141 || (REG_P (SUBREG_REG (out))
1142 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1143 && ((GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1144 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1145 > UNITS_PER_WORD)
1146 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1147 / UNITS_PER_WORD)
1148 != (int) hard_regno_nregs[REGNO (SUBREG_REG (out))]
1149 [GET_MODE (SUBREG_REG (out))]))
1150 || ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode)))
1151 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1152 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1153 SUBREG_REG (out))
1154 == NO_REGS))
1155 #ifdef CANNOT_CHANGE_MODE_CLASS
1156 || (REG_P (SUBREG_REG (out))
1157 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1158 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1159 GET_MODE (SUBREG_REG (out)),
1160 outmode))
1161 #endif
1162 ))
1163 {
1164 out_subreg_loc = outloc;
1165 outloc = &SUBREG_REG (out);
1166 out = *outloc;
1167 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1168 gcc_assert (!MEM_P (out)
1169 || GET_MODE_SIZE (GET_MODE (out))
1170 <= GET_MODE_SIZE (outmode));
1171 #endif
1172 outmode = GET_MODE (out);
1173 }
1174
1175 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1176 either M1 is not valid for R or M2 is wider than a word but we only
1177 need one word to store an M2-sized quantity in R.
1178
1179 However, we must reload the inner reg *as well as* the subreg in
1180 that case. In this case, the inner reg is an in-out reload. */
1181
1182 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, 1))
1183 {
1184 /* This relies on the fact that emit_reload_insns outputs the
1185 instructions for output reloads of type RELOAD_OTHER in reverse
1186 order of the reloads. Thus if the outer reload is also of type
1187 RELOAD_OTHER, we are guaranteed that this inner reload will be
1188 output after the outer reload. */
1189 dont_remove_subreg = 1;
1190 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1191 &SUBREG_REG (out),
1192 find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1193 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1194 GET_MODE (SUBREG_REG (out)),
1195 SUBREG_BYTE (out),
1196 GET_MODE (out)),
1197 REGNO (SUBREG_REG (out))),
1198 VOIDmode, VOIDmode, 0, 0,
1199 opnum, RELOAD_OTHER);
1200 }
1201
1202 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1203 if (in != 0 && out != 0 && MEM_P (out)
1204 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1205 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1206 dont_share = 1;
1207
1208 /* If IN is a SUBREG of a hard register, make a new REG. This
1209 simplifies some of the cases below. */
1210
1211 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1212 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1213 && ! dont_remove_subreg)
1214 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1215
1216 /* Similarly for OUT. */
1217 if (out != 0 && GET_CODE (out) == SUBREG
1218 && REG_P (SUBREG_REG (out))
1219 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1220 && ! dont_remove_subreg)
1221 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1222
1223 /* Narrow down the class of register wanted if that is
1224 desirable on this machine for efficiency. */
1225 {
1226 reg_class_t preferred_class = rclass;
1227
1228 if (in != 0)
1229 preferred_class = targetm.preferred_reload_class (in, rclass);
1230
1231 /* Output reloads may need analogous treatment, different in detail. */
1232 if (out != 0)
1233 preferred_class
1234 = targetm.preferred_output_reload_class (out, preferred_class);
1235
1236 /* Discard what the target said if we cannot do it. */
1237 if (preferred_class != NO_REGS
1238 || (optional && type == RELOAD_FOR_OUTPUT))
1239 rclass = (enum reg_class) preferred_class;
1240 }
1241
1242 /* Make sure we use a class that can handle the actual pseudo
1243 inside any subreg. For example, on the 386, QImode regs
1244 can appear within SImode subregs. Although GENERAL_REGS
1245 can handle SImode, QImode needs a smaller class. */
1246 #ifdef LIMIT_RELOAD_CLASS
1247 if (in_subreg_loc)
1248 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1249 else if (in != 0 && GET_CODE (in) == SUBREG)
1250 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1251
1252 if (out_subreg_loc)
1253 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1254 if (out != 0 && GET_CODE (out) == SUBREG)
1255 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1256 #endif
1257
1258 /* Verify that this class is at least possible for the mode that
1259 is specified. */
1260 if (this_insn_is_asm)
1261 {
1262 enum machine_mode mode;
1263 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1264 mode = inmode;
1265 else
1266 mode = outmode;
1267 if (mode == VOIDmode)
1268 {
1269 error_for_asm (this_insn, "cannot reload integer constant "
1270 "operand in %<asm%>");
1271 mode = word_mode;
1272 if (in != 0)
1273 inmode = word_mode;
1274 if (out != 0)
1275 outmode = word_mode;
1276 }
1277 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1278 if (HARD_REGNO_MODE_OK (i, mode)
1279 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1280 break;
1281 if (i == FIRST_PSEUDO_REGISTER)
1282 {
1283 error_for_asm (this_insn, "impossible register constraint "
1284 "in %<asm%>");
1285 /* Avoid further trouble with this insn. */
1286 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1287 /* We used to continue here setting class to ALL_REGS, but it triggers
1288 sanity check on i386 for:
1289 void foo(long double d)
1290 {
1291 asm("" :: "a" (d));
1292 }
1293 Returning zero here ought to be safe as we take care in
1294 find_reloads to not process the reloads when instruction was
1295 replaced by USE. */
1296
1297 return 0;
1298 }
1299 }
1300
1301 /* Optional output reloads are always OK even if we have no register class,
1302 since the function of these reloads is only to have spill_reg_store etc.
1303 set, so that the storing insn can be deleted later. */
1304 gcc_assert (rclass != NO_REGS
1305 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1306
1307 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1308
1309 if (i == n_reloads)
1310 {
1311 /* See if we need a secondary reload register to move between CLASS
1312 and IN or CLASS and OUT. Get the icode and push any required reloads
1313 needed for each of them if so. */
1314
1315 if (in != 0)
1316 secondary_in_reload
1317 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1318 &secondary_in_icode, NULL);
1319 if (out != 0 && GET_CODE (out) != SCRATCH)
1320 secondary_out_reload
1321 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1322 type, &secondary_out_icode, NULL);
1323
1324 /* We found no existing reload suitable for re-use.
1325 So add an additional reload. */
1326
1327 #ifdef SECONDARY_MEMORY_NEEDED
1328 /* If a memory location is needed for the copy, make one. */
1329 if (in != 0
1330 && (REG_P (in)
1331 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1332 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
1333 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
1334 rclass, inmode))
1335 get_secondary_mem (in, inmode, opnum, type);
1336 #endif
1337
1338 i = n_reloads;
1339 rld[i].in = in;
1340 rld[i].out = out;
1341 rld[i].rclass = rclass;
1342 rld[i].inmode = inmode;
1343 rld[i].outmode = outmode;
1344 rld[i].reg_rtx = 0;
1345 rld[i].optional = optional;
1346 rld[i].inc = 0;
1347 rld[i].nocombine = 0;
1348 rld[i].in_reg = inloc ? *inloc : 0;
1349 rld[i].out_reg = outloc ? *outloc : 0;
1350 rld[i].opnum = opnum;
1351 rld[i].when_needed = type;
1352 rld[i].secondary_in_reload = secondary_in_reload;
1353 rld[i].secondary_out_reload = secondary_out_reload;
1354 rld[i].secondary_in_icode = secondary_in_icode;
1355 rld[i].secondary_out_icode = secondary_out_icode;
1356 rld[i].secondary_p = 0;
1357
1358 n_reloads++;
1359
1360 #ifdef SECONDARY_MEMORY_NEEDED
1361 if (out != 0
1362 && (REG_P (out)
1363 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1364 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1365 && SECONDARY_MEMORY_NEEDED (rclass,
1366 REGNO_REG_CLASS (reg_or_subregno (out)),
1367 outmode))
1368 get_secondary_mem (out, outmode, opnum, type);
1369 #endif
1370 }
1371 else
1372 {
1373 /* We are reusing an existing reload,
1374 but we may have additional information for it.
1375 For example, we may now have both IN and OUT
1376 while the old one may have just one of them. */
1377
1378 /* The modes can be different. If they are, we want to reload in
1379 the larger mode, so that the value is valid for both modes. */
1380 if (inmode != VOIDmode
1381 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1382 rld[i].inmode = inmode;
1383 if (outmode != VOIDmode
1384 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1385 rld[i].outmode = outmode;
1386 if (in != 0)
1387 {
1388 rtx in_reg = inloc ? *inloc : 0;
1389 /* If we merge reloads for two distinct rtl expressions that
1390 are identical in content, there might be duplicate address
1391 reloads. Remove the extra set now, so that if we later find
1392 that we can inherit this reload, we can get rid of the
1393 address reloads altogether.
1394
1395 Do not do this if both reloads are optional since the result
1396 would be an optional reload which could potentially leave
1397 unresolved address replacements.
1398
1399 It is not sufficient to call transfer_replacements since
1400 choose_reload_regs will remove the replacements for address
1401 reloads of inherited reloads which results in the same
1402 problem. */
1403 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1404 && ! (rld[i].optional && optional))
1405 {
1406 /* We must keep the address reload with the lower operand
1407 number alive. */
1408 if (opnum > rld[i].opnum)
1409 {
1410 remove_address_replacements (in);
1411 in = rld[i].in;
1412 in_reg = rld[i].in_reg;
1413 }
1414 else
1415 remove_address_replacements (rld[i].in);
1416 }
1417 /* When emitting reloads we don't necessarily look at the in-
1418 and outmode, but also directly at the operands (in and out).
1419 So we can't simply overwrite them with whatever we have found
1420 for this (to-be-merged) reload, we have to "merge" that too.
1421 Reusing another reload already verified that we deal with the
1422 same operands, just possibly in different modes. So we
1423 overwrite the operands only when the new mode is larger.
1424 See also PR33613. */
1425 if (!rld[i].in
1426 || GET_MODE_SIZE (GET_MODE (in))
1427 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1428 rld[i].in = in;
1429 if (!rld[i].in_reg
1430 || (in_reg
1431 && GET_MODE_SIZE (GET_MODE (in_reg))
1432 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1433 rld[i].in_reg = in_reg;
1434 }
1435 if (out != 0)
1436 {
1437 if (!rld[i].out
1438 || (out
1439 && GET_MODE_SIZE (GET_MODE (out))
1440 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1441 rld[i].out = out;
1442 if (outloc
1443 && (!rld[i].out_reg
1444 || GET_MODE_SIZE (GET_MODE (*outloc))
1445 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1446 rld[i].out_reg = *outloc;
1447 }
1448 if (reg_class_subset_p (rclass, rld[i].rclass))
1449 rld[i].rclass = rclass;
1450 rld[i].optional &= optional;
1451 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1452 opnum, rld[i].opnum))
1453 rld[i].when_needed = RELOAD_OTHER;
1454 rld[i].opnum = MIN (rld[i].opnum, opnum);
1455 }
1456
1457 /* If the ostensible rtx being reloaded differs from the rtx found
1458 in the location to substitute, this reload is not safe to combine
1459 because we cannot reliably tell whether it appears in the insn. */
1460
1461 if (in != 0 && in != *inloc)
1462 rld[i].nocombine = 1;
1463
1464 #if 0
1465 /* This was replaced by changes in find_reloads_address_1 and the new
1466 function inc_for_reload, which go with a new meaning of reload_inc. */
1467
1468 /* If this is an IN/OUT reload in an insn that sets the CC,
1469 it must be for an autoincrement. It doesn't work to store
1470 the incremented value after the insn because that would clobber the CC.
1471 So we must do the increment of the value reloaded from,
1472 increment it, store it back, then decrement again. */
1473 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1474 {
1475 out = 0;
1476 rld[i].out = 0;
1477 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1478 /* If we did not find a nonzero amount-to-increment-by,
1479 that contradicts the belief that IN is being incremented
1480 in an address in this insn. */
1481 gcc_assert (rld[i].inc != 0);
1482 }
1483 #endif
1484
1485 /* If we will replace IN and OUT with the reload-reg,
1486 record where they are located so that substitution need
1487 not do a tree walk. */
1488
1489 if (replace_reloads)
1490 {
1491 if (inloc != 0)
1492 {
1493 struct replacement *r = &replacements[n_replacements++];
1494 r->what = i;
1495 r->subreg_loc = in_subreg_loc;
1496 r->where = inloc;
1497 r->mode = inmode;
1498 }
1499 if (outloc != 0 && outloc != inloc)
1500 {
1501 struct replacement *r = &replacements[n_replacements++];
1502 r->what = i;
1503 r->where = outloc;
1504 r->subreg_loc = out_subreg_loc;
1505 r->mode = outmode;
1506 }
1507 }
1508
1509 /* If this reload is just being introduced and it has both
1510 an incoming quantity and an outgoing quantity that are
1511 supposed to be made to match, see if either one of the two
1512 can serve as the place to reload into.
1513
1514 If one of them is acceptable, set rld[i].reg_rtx
1515 to that one. */
1516
1517 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1518 {
1519 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1520 inmode, outmode,
1521 rld[i].rclass, i,
1522 earlyclobber_operand_p (out));
1523
1524 /* If the outgoing register already contains the same value
1525 as the incoming one, we can dispense with loading it.
1526 The easiest way to tell the caller that is to give a phony
1527 value for the incoming operand (same as outgoing one). */
1528 if (rld[i].reg_rtx == out
1529 && (REG_P (in) || CONSTANT_P (in))
1530 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1531 static_reload_reg_p, i, inmode))
1532 rld[i].in = out;
1533 }
1534
1535 /* If this is an input reload and the operand contains a register that
1536 dies in this insn and is used nowhere else, see if it is the right class
1537 to be used for this reload. Use it if so. (This occurs most commonly
1538 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1539 this if it is also an output reload that mentions the register unless
1540 the output is a SUBREG that clobbers an entire register.
1541
1542 Note that the operand might be one of the spill regs, if it is a
1543 pseudo reg and we are in a block where spilling has not taken place.
1544 But if there is no spilling in this block, that is OK.
1545 An explicitly used hard reg cannot be a spill reg. */
1546
1547 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1548 {
1549 rtx note;
1550 int regno;
1551 enum machine_mode rel_mode = inmode;
1552
1553 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1554 rel_mode = outmode;
1555
1556 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1557 if (REG_NOTE_KIND (note) == REG_DEAD
1558 && REG_P (XEXP (note, 0))
1559 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1560 && reg_mentioned_p (XEXP (note, 0), in)
1561 /* Check that a former pseudo is valid; see find_dummy_reload. */
1562 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1563 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1564 ORIGINAL_REGNO (XEXP (note, 0)))
1565 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1566 && ! refers_to_regno_for_reload_p (regno,
1567 end_hard_regno (rel_mode,
1568 regno),
1569 PATTERN (this_insn), inloc)
1570 /* If this is also an output reload, IN cannot be used as
1571 the reload register if it is set in this insn unless IN
1572 is also OUT. */
1573 && (out == 0 || in == out
1574 || ! hard_reg_set_here_p (regno,
1575 end_hard_regno (rel_mode, regno),
1576 PATTERN (this_insn)))
1577 /* ??? Why is this code so different from the previous?
1578 Is there any simple coherent way to describe the two together?
1579 What's going on here. */
1580 && (in != out
1581 || (GET_CODE (in) == SUBREG
1582 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1583 / UNITS_PER_WORD)
1584 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1585 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1586 /* Make sure the operand fits in the reg that dies. */
1587 && (GET_MODE_SIZE (rel_mode)
1588 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1589 && HARD_REGNO_MODE_OK (regno, inmode)
1590 && HARD_REGNO_MODE_OK (regno, outmode))
1591 {
1592 unsigned int offs;
1593 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1594 hard_regno_nregs[regno][outmode]);
1595
1596 for (offs = 0; offs < nregs; offs++)
1597 if (fixed_regs[regno + offs]
1598 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1599 regno + offs))
1600 break;
1601
1602 if (offs == nregs
1603 && (! (refers_to_regno_for_reload_p
1604 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1605 || can_reload_into (in, regno, inmode)))
1606 {
1607 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1608 break;
1609 }
1610 }
1611 }
1612
1613 if (out)
1614 output_reloadnum = i;
1615
1616 return i;
1617 }
1618
1619 /* Record an additional place we must replace a value
1620 for which we have already recorded a reload.
1621 RELOADNUM is the value returned by push_reload
1622 when the reload was recorded.
1623 This is used in insn patterns that use match_dup. */
1624
1625 static void
1626 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1627 {
1628 if (replace_reloads)
1629 {
1630 struct replacement *r = &replacements[n_replacements++];
1631 r->what = reloadnum;
1632 r->where = loc;
1633 r->subreg_loc = 0;
1634 r->mode = mode;
1635 }
1636 }
1637
1638 /* Duplicate any replacement we have recorded to apply at
1639 location ORIG_LOC to also be performed at DUP_LOC.
1640 This is used in insn patterns that use match_dup. */
1641
1642 static void
1643 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1644 {
1645 int i, n = n_replacements;
1646
1647 for (i = 0; i < n; i++)
1648 {
1649 struct replacement *r = &replacements[i];
1650 if (r->where == orig_loc)
1651 push_replacement (dup_loc, r->what, r->mode);
1652 }
1653 }
1654 \f
1655 /* Transfer all replacements that used to be in reload FROM to be in
1656 reload TO. */
1657
1658 void
1659 transfer_replacements (int to, int from)
1660 {
1661 int i;
1662
1663 for (i = 0; i < n_replacements; i++)
1664 if (replacements[i].what == from)
1665 replacements[i].what = to;
1666 }
1667 \f
1668 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1669 or a subpart of it. If we have any replacements registered for IN_RTX,
1670 cancel the reloads that were supposed to load them.
1671 Return nonzero if we canceled any reloads. */
1672 int
1673 remove_address_replacements (rtx in_rtx)
1674 {
1675 int i, j;
1676 char reload_flags[MAX_RELOADS];
1677 int something_changed = 0;
1678
1679 memset (reload_flags, 0, sizeof reload_flags);
1680 for (i = 0, j = 0; i < n_replacements; i++)
1681 {
1682 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1683 reload_flags[replacements[i].what] |= 1;
1684 else
1685 {
1686 replacements[j++] = replacements[i];
1687 reload_flags[replacements[i].what] |= 2;
1688 }
1689 }
1690 /* Note that the following store must be done before the recursive calls. */
1691 n_replacements = j;
1692
1693 for (i = n_reloads - 1; i >= 0; i--)
1694 {
1695 if (reload_flags[i] == 1)
1696 {
1697 deallocate_reload_reg (i);
1698 remove_address_replacements (rld[i].in);
1699 rld[i].in = 0;
1700 something_changed = 1;
1701 }
1702 }
1703 return something_changed;
1704 }
1705 \f
1706 /* If there is only one output reload, and it is not for an earlyclobber
1707 operand, try to combine it with a (logically unrelated) input reload
1708 to reduce the number of reload registers needed.
1709
1710 This is safe if the input reload does not appear in
1711 the value being output-reloaded, because this implies
1712 it is not needed any more once the original insn completes.
1713
1714 If that doesn't work, see we can use any of the registers that
1715 die in this insn as a reload register. We can if it is of the right
1716 class and does not appear in the value being output-reloaded. */
1717
1718 static void
1719 combine_reloads (void)
1720 {
1721 int i, regno;
1722 int output_reload = -1;
1723 int secondary_out = -1;
1724 rtx note;
1725
1726 /* Find the output reload; return unless there is exactly one
1727 and that one is mandatory. */
1728
1729 for (i = 0; i < n_reloads; i++)
1730 if (rld[i].out != 0)
1731 {
1732 if (output_reload >= 0)
1733 return;
1734 output_reload = i;
1735 }
1736
1737 if (output_reload < 0 || rld[output_reload].optional)
1738 return;
1739
1740 /* An input-output reload isn't combinable. */
1741
1742 if (rld[output_reload].in != 0)
1743 return;
1744
1745 /* If this reload is for an earlyclobber operand, we can't do anything. */
1746 if (earlyclobber_operand_p (rld[output_reload].out))
1747 return;
1748
1749 /* If there is a reload for part of the address of this operand, we would
1750 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1751 its life to the point where doing this combine would not lower the
1752 number of spill registers needed. */
1753 for (i = 0; i < n_reloads; i++)
1754 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1755 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1756 && rld[i].opnum == rld[output_reload].opnum)
1757 return;
1758
1759 /* Check each input reload; can we combine it? */
1760
1761 for (i = 0; i < n_reloads; i++)
1762 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1763 /* Life span of this reload must not extend past main insn. */
1764 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1765 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1766 && rld[i].when_needed != RELOAD_OTHER
1767 && (CLASS_MAX_NREGS (rld[i].rclass, rld[i].inmode)
1768 == CLASS_MAX_NREGS (rld[output_reload].rclass,
1769 rld[output_reload].outmode))
1770 && rld[i].inc == 0
1771 && rld[i].reg_rtx == 0
1772 #ifdef SECONDARY_MEMORY_NEEDED
1773 /* Don't combine two reloads with different secondary
1774 memory locations. */
1775 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1776 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1777 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1778 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1779 #endif
1780 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1781 ? (rld[i].rclass == rld[output_reload].rclass)
1782 : (reg_class_subset_p (rld[i].rclass,
1783 rld[output_reload].rclass)
1784 || reg_class_subset_p (rld[output_reload].rclass,
1785 rld[i].rclass)))
1786 && (MATCHES (rld[i].in, rld[output_reload].out)
1787 /* Args reversed because the first arg seems to be
1788 the one that we imagine being modified
1789 while the second is the one that might be affected. */
1790 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1791 rld[i].in)
1792 /* However, if the input is a register that appears inside
1793 the output, then we also can't share.
1794 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1795 If the same reload reg is used for both reg 69 and the
1796 result to be stored in memory, then that result
1797 will clobber the address of the memory ref. */
1798 && ! (REG_P (rld[i].in)
1799 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1800 rld[output_reload].out))))
1801 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1802 rld[i].when_needed != RELOAD_FOR_INPUT)
1803 && (reg_class_size[(int) rld[i].rclass]
1804 || targetm.small_register_classes_for_mode_p (VOIDmode))
1805 /* We will allow making things slightly worse by combining an
1806 input and an output, but no worse than that. */
1807 && (rld[i].when_needed == RELOAD_FOR_INPUT
1808 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1809 {
1810 int j;
1811
1812 /* We have found a reload to combine with! */
1813 rld[i].out = rld[output_reload].out;
1814 rld[i].out_reg = rld[output_reload].out_reg;
1815 rld[i].outmode = rld[output_reload].outmode;
1816 /* Mark the old output reload as inoperative. */
1817 rld[output_reload].out = 0;
1818 /* The combined reload is needed for the entire insn. */
1819 rld[i].when_needed = RELOAD_OTHER;
1820 /* If the output reload had a secondary reload, copy it. */
1821 if (rld[output_reload].secondary_out_reload != -1)
1822 {
1823 rld[i].secondary_out_reload
1824 = rld[output_reload].secondary_out_reload;
1825 rld[i].secondary_out_icode
1826 = rld[output_reload].secondary_out_icode;
1827 }
1828
1829 #ifdef SECONDARY_MEMORY_NEEDED
1830 /* Copy any secondary MEM. */
1831 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1832 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1833 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1834 #endif
1835 /* If required, minimize the register class. */
1836 if (reg_class_subset_p (rld[output_reload].rclass,
1837 rld[i].rclass))
1838 rld[i].rclass = rld[output_reload].rclass;
1839
1840 /* Transfer all replacements from the old reload to the combined. */
1841 for (j = 0; j < n_replacements; j++)
1842 if (replacements[j].what == output_reload)
1843 replacements[j].what = i;
1844
1845 return;
1846 }
1847
1848 /* If this insn has only one operand that is modified or written (assumed
1849 to be the first), it must be the one corresponding to this reload. It
1850 is safe to use anything that dies in this insn for that output provided
1851 that it does not occur in the output (we already know it isn't an
1852 earlyclobber. If this is an asm insn, give up. */
1853
1854 if (INSN_CODE (this_insn) == -1)
1855 return;
1856
1857 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1858 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1859 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1860 return;
1861
1862 /* See if some hard register that dies in this insn and is not used in
1863 the output is the right class. Only works if the register we pick
1864 up can fully hold our output reload. */
1865 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1866 if (REG_NOTE_KIND (note) == REG_DEAD
1867 && REG_P (XEXP (note, 0))
1868 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1869 rld[output_reload].out)
1870 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1871 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1872 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1873 regno)
1874 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1875 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1876 /* Ensure that a secondary or tertiary reload for this output
1877 won't want this register. */
1878 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1879 || (!(TEST_HARD_REG_BIT
1880 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1881 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1882 || !(TEST_HARD_REG_BIT
1883 (reg_class_contents[(int) rld[secondary_out].rclass],
1884 regno)))))
1885 && !fixed_regs[regno]
1886 /* Check that a former pseudo is valid; see find_dummy_reload. */
1887 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1888 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1889 ORIGINAL_REGNO (XEXP (note, 0)))
1890 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1891 {
1892 rld[output_reload].reg_rtx
1893 = gen_rtx_REG (rld[output_reload].outmode, regno);
1894 return;
1895 }
1896 }
1897 \f
1898 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1899 See if one of IN and OUT is a register that may be used;
1900 this is desirable since a spill-register won't be needed.
1901 If so, return the register rtx that proves acceptable.
1902
1903 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1904 RCLASS is the register class required for the reload.
1905
1906 If FOR_REAL is >= 0, it is the number of the reload,
1907 and in some cases when it can be discovered that OUT doesn't need
1908 to be computed, clear out rld[FOR_REAL].out.
1909
1910 If FOR_REAL is -1, this should not be done, because this call
1911 is just to see if a register can be found, not to find and install it.
1912
1913 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1914 puts an additional constraint on being able to use IN for OUT since
1915 IN must not appear elsewhere in the insn (it is assumed that IN itself
1916 is safe from the earlyclobber). */
1917
1918 static rtx
1919 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1920 enum machine_mode inmode, enum machine_mode outmode,
1921 reg_class_t rclass, int for_real, int earlyclobber)
1922 {
1923 rtx in = real_in;
1924 rtx out = real_out;
1925 int in_offset = 0;
1926 int out_offset = 0;
1927 rtx value = 0;
1928
1929 /* If operands exceed a word, we can't use either of them
1930 unless they have the same size. */
1931 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1932 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1933 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1934 return 0;
1935
1936 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1937 respectively refers to a hard register. */
1938
1939 /* Find the inside of any subregs. */
1940 while (GET_CODE (out) == SUBREG)
1941 {
1942 if (REG_P (SUBREG_REG (out))
1943 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1944 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1945 GET_MODE (SUBREG_REG (out)),
1946 SUBREG_BYTE (out),
1947 GET_MODE (out));
1948 out = SUBREG_REG (out);
1949 }
1950 while (GET_CODE (in) == SUBREG)
1951 {
1952 if (REG_P (SUBREG_REG (in))
1953 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1954 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1955 GET_MODE (SUBREG_REG (in)),
1956 SUBREG_BYTE (in),
1957 GET_MODE (in));
1958 in = SUBREG_REG (in);
1959 }
1960
1961 /* Narrow down the reg class, the same way push_reload will;
1962 otherwise we might find a dummy now, but push_reload won't. */
1963 {
1964 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1965 if (preferred_class != NO_REGS)
1966 rclass = (enum reg_class) preferred_class;
1967 }
1968
1969 /* See if OUT will do. */
1970 if (REG_P (out)
1971 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1972 {
1973 unsigned int regno = REGNO (out) + out_offset;
1974 unsigned int nwords = hard_regno_nregs[regno][outmode];
1975 rtx saved_rtx;
1976
1977 /* When we consider whether the insn uses OUT,
1978 ignore references within IN. They don't prevent us
1979 from copying IN into OUT, because those refs would
1980 move into the insn that reloads IN.
1981
1982 However, we only ignore IN in its role as this reload.
1983 If the insn uses IN elsewhere and it contains OUT,
1984 that counts. We can't be sure it's the "same" operand
1985 so it might not go through this reload. */
1986 saved_rtx = *inloc;
1987 *inloc = const0_rtx;
1988
1989 if (regno < FIRST_PSEUDO_REGISTER
1990 && HARD_REGNO_MODE_OK (regno, outmode)
1991 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1992 PATTERN (this_insn), outloc))
1993 {
1994 unsigned int i;
1995
1996 for (i = 0; i < nwords; i++)
1997 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1998 regno + i))
1999 break;
2000
2001 if (i == nwords)
2002 {
2003 if (REG_P (real_out))
2004 value = real_out;
2005 else
2006 value = gen_rtx_REG (outmode, regno);
2007 }
2008 }
2009
2010 *inloc = saved_rtx;
2011 }
2012
2013 /* Consider using IN if OUT was not acceptable
2014 or if OUT dies in this insn (like the quotient in a divmod insn).
2015 We can't use IN unless it is dies in this insn,
2016 which means we must know accurately which hard regs are live.
2017 Also, the result can't go in IN if IN is used within OUT,
2018 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2019 if (hard_regs_live_known
2020 && REG_P (in)
2021 && REGNO (in) < FIRST_PSEUDO_REGISTER
2022 && (value == 0
2023 || find_reg_note (this_insn, REG_UNUSED, real_out))
2024 && find_reg_note (this_insn, REG_DEAD, real_in)
2025 && !fixed_regs[REGNO (in)]
2026 && HARD_REGNO_MODE_OK (REGNO (in),
2027 /* The only case where out and real_out might
2028 have different modes is where real_out
2029 is a subreg, and in that case, out
2030 has a real mode. */
2031 (GET_MODE (out) != VOIDmode
2032 ? GET_MODE (out) : outmode))
2033 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2034 /* However only do this if we can be sure that this input
2035 operand doesn't correspond with an uninitialized pseudo.
2036 global can assign some hardreg to it that is the same as
2037 the one assigned to a different, also live pseudo (as it
2038 can ignore the conflict). We must never introduce writes
2039 to such hardregs, as they would clobber the other live
2040 pseudo. See PR 20973. */
2041 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
2042 ORIGINAL_REGNO (in))
2043 /* Similarly, only do this if we can be sure that the death
2044 note is still valid. global can assign some hardreg to
2045 the pseudo referenced in the note and simultaneously a
2046 subword of this hardreg to a different, also live pseudo,
2047 because only another subword of the hardreg is actually
2048 used in the insn. This cannot happen if the pseudo has
2049 been assigned exactly one hardreg. See PR 33732. */
2050 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2051 {
2052 unsigned int regno = REGNO (in) + in_offset;
2053 unsigned int nwords = hard_regno_nregs[regno][inmode];
2054
2055 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2056 && ! hard_reg_set_here_p (regno, regno + nwords,
2057 PATTERN (this_insn))
2058 && (! earlyclobber
2059 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2060 PATTERN (this_insn), inloc)))
2061 {
2062 unsigned int i;
2063
2064 for (i = 0; i < nwords; i++)
2065 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2066 regno + i))
2067 break;
2068
2069 if (i == nwords)
2070 {
2071 /* If we were going to use OUT as the reload reg
2072 and changed our mind, it means OUT is a dummy that
2073 dies here. So don't bother copying value to it. */
2074 if (for_real >= 0 && value == real_out)
2075 rld[for_real].out = 0;
2076 if (REG_P (real_in))
2077 value = real_in;
2078 else
2079 value = gen_rtx_REG (inmode, regno);
2080 }
2081 }
2082 }
2083
2084 return value;
2085 }
2086 \f
2087 /* This page contains subroutines used mainly for determining
2088 whether the IN or an OUT of a reload can serve as the
2089 reload register. */
2090
2091 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2092
2093 int
2094 earlyclobber_operand_p (rtx x)
2095 {
2096 int i;
2097
2098 for (i = 0; i < n_earlyclobbers; i++)
2099 if (reload_earlyclobbers[i] == x)
2100 return 1;
2101
2102 return 0;
2103 }
2104
2105 /* Return 1 if expression X alters a hard reg in the range
2106 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2107 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2108 X should be the body of an instruction. */
2109
2110 static int
2111 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2112 {
2113 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2114 {
2115 rtx op0 = SET_DEST (x);
2116
2117 while (GET_CODE (op0) == SUBREG)
2118 op0 = SUBREG_REG (op0);
2119 if (REG_P (op0))
2120 {
2121 unsigned int r = REGNO (op0);
2122
2123 /* See if this reg overlaps range under consideration. */
2124 if (r < end_regno
2125 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2126 return 1;
2127 }
2128 }
2129 else if (GET_CODE (x) == PARALLEL)
2130 {
2131 int i = XVECLEN (x, 0) - 1;
2132
2133 for (; i >= 0; i--)
2134 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2135 return 1;
2136 }
2137
2138 return 0;
2139 }
2140
2141 /* Return 1 if ADDR is a valid memory address for mode MODE
2142 in address space AS, and check that each pseudo reg has the
2143 proper kind of hard reg. */
2144
2145 int
2146 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2147 rtx addr, addr_space_t as)
2148 {
2149 #ifdef GO_IF_LEGITIMATE_ADDRESS
2150 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2151 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2152 return 0;
2153
2154 win:
2155 return 1;
2156 #else
2157 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2158 #endif
2159 }
2160 \f
2161 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2162 if they are the same hard reg, and has special hacks for
2163 autoincrement and autodecrement.
2164 This is specifically intended for find_reloads to use
2165 in determining whether two operands match.
2166 X is the operand whose number is the lower of the two.
2167
2168 The value is 2 if Y contains a pre-increment that matches
2169 a non-incrementing address in X. */
2170
2171 /* ??? To be completely correct, we should arrange to pass
2172 for X the output operand and for Y the input operand.
2173 For now, we assume that the output operand has the lower number
2174 because that is natural in (SET output (... input ...)). */
2175
2176 int
2177 operands_match_p (rtx x, rtx y)
2178 {
2179 int i;
2180 RTX_CODE code = GET_CODE (x);
2181 const char *fmt;
2182 int success_2;
2183
2184 if (x == y)
2185 return 1;
2186 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2187 && (REG_P (y) || (GET_CODE (y) == SUBREG
2188 && REG_P (SUBREG_REG (y)))))
2189 {
2190 int j;
2191
2192 if (code == SUBREG)
2193 {
2194 i = REGNO (SUBREG_REG (x));
2195 if (i >= FIRST_PSEUDO_REGISTER)
2196 goto slow;
2197 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2198 GET_MODE (SUBREG_REG (x)),
2199 SUBREG_BYTE (x),
2200 GET_MODE (x));
2201 }
2202 else
2203 i = REGNO (x);
2204
2205 if (GET_CODE (y) == SUBREG)
2206 {
2207 j = REGNO (SUBREG_REG (y));
2208 if (j >= FIRST_PSEUDO_REGISTER)
2209 goto slow;
2210 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2211 GET_MODE (SUBREG_REG (y)),
2212 SUBREG_BYTE (y),
2213 GET_MODE (y));
2214 }
2215 else
2216 j = REGNO (y);
2217
2218 /* On a WORDS_BIG_ENDIAN machine, point to the last register of a
2219 multiple hard register group of scalar integer registers, so that
2220 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2221 register. */
2222 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2223 && SCALAR_INT_MODE_P (GET_MODE (x))
2224 && i < FIRST_PSEUDO_REGISTER)
2225 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2226 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2227 && SCALAR_INT_MODE_P (GET_MODE (y))
2228 && j < FIRST_PSEUDO_REGISTER)
2229 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2230
2231 return i == j;
2232 }
2233 /* If two operands must match, because they are really a single
2234 operand of an assembler insn, then two postincrements are invalid
2235 because the assembler insn would increment only once.
2236 On the other hand, a postincrement matches ordinary indexing
2237 if the postincrement is the output operand. */
2238 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2239 return operands_match_p (XEXP (x, 0), y);
2240 /* Two preincrements are invalid
2241 because the assembler insn would increment only once.
2242 On the other hand, a preincrement matches ordinary indexing
2243 if the preincrement is the input operand.
2244 In this case, return 2, since some callers need to do special
2245 things when this happens. */
2246 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2247 || GET_CODE (y) == PRE_MODIFY)
2248 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2249
2250 slow:
2251
2252 /* Now we have disposed of all the cases in which different rtx codes
2253 can match. */
2254 if (code != GET_CODE (y))
2255 return 0;
2256
2257 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2258 if (GET_MODE (x) != GET_MODE (y))
2259 return 0;
2260
2261 /* MEMs refering to different address space are not equivalent. */
2262 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2263 return 0;
2264
2265 switch (code)
2266 {
2267 case CONST_INT:
2268 case CONST_DOUBLE:
2269 case CONST_FIXED:
2270 return 0;
2271
2272 case LABEL_REF:
2273 return XEXP (x, 0) == XEXP (y, 0);
2274 case SYMBOL_REF:
2275 return XSTR (x, 0) == XSTR (y, 0);
2276
2277 default:
2278 break;
2279 }
2280
2281 /* Compare the elements. If any pair of corresponding elements
2282 fail to match, return 0 for the whole things. */
2283
2284 success_2 = 0;
2285 fmt = GET_RTX_FORMAT (code);
2286 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2287 {
2288 int val, j;
2289 switch (fmt[i])
2290 {
2291 case 'w':
2292 if (XWINT (x, i) != XWINT (y, i))
2293 return 0;
2294 break;
2295
2296 case 'i':
2297 if (XINT (x, i) != XINT (y, i))
2298 return 0;
2299 break;
2300
2301 case 'e':
2302 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2303 if (val == 0)
2304 return 0;
2305 /* If any subexpression returns 2,
2306 we should return 2 if we are successful. */
2307 if (val == 2)
2308 success_2 = 1;
2309 break;
2310
2311 case '0':
2312 break;
2313
2314 case 'E':
2315 if (XVECLEN (x, i) != XVECLEN (y, i))
2316 return 0;
2317 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2318 {
2319 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2320 if (val == 0)
2321 return 0;
2322 if (val == 2)
2323 success_2 = 1;
2324 }
2325 break;
2326
2327 /* It is believed that rtx's at this level will never
2328 contain anything but integers and other rtx's,
2329 except for within LABEL_REFs and SYMBOL_REFs. */
2330 default:
2331 gcc_unreachable ();
2332 }
2333 }
2334 return 1 + success_2;
2335 }
2336 \f
2337 /* Describe the range of registers or memory referenced by X.
2338 If X is a register, set REG_FLAG and put the first register
2339 number into START and the last plus one into END.
2340 If X is a memory reference, put a base address into BASE
2341 and a range of integer offsets into START and END.
2342 If X is pushing on the stack, we can assume it causes no trouble,
2343 so we set the SAFE field. */
2344
2345 static struct decomposition
2346 decompose (rtx x)
2347 {
2348 struct decomposition val;
2349 int all_const = 0;
2350
2351 memset (&val, 0, sizeof (val));
2352
2353 switch (GET_CODE (x))
2354 {
2355 case MEM:
2356 {
2357 rtx base = NULL_RTX, offset = 0;
2358 rtx addr = XEXP (x, 0);
2359
2360 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2361 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2362 {
2363 val.base = XEXP (addr, 0);
2364 val.start = -GET_MODE_SIZE (GET_MODE (x));
2365 val.end = GET_MODE_SIZE (GET_MODE (x));
2366 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2367 return val;
2368 }
2369
2370 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2371 {
2372 if (GET_CODE (XEXP (addr, 1)) == PLUS
2373 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2374 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2375 {
2376 val.base = XEXP (addr, 0);
2377 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2378 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2379 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2380 return val;
2381 }
2382 }
2383
2384 if (GET_CODE (addr) == CONST)
2385 {
2386 addr = XEXP (addr, 0);
2387 all_const = 1;
2388 }
2389 if (GET_CODE (addr) == PLUS)
2390 {
2391 if (CONSTANT_P (XEXP (addr, 0)))
2392 {
2393 base = XEXP (addr, 1);
2394 offset = XEXP (addr, 0);
2395 }
2396 else if (CONSTANT_P (XEXP (addr, 1)))
2397 {
2398 base = XEXP (addr, 0);
2399 offset = XEXP (addr, 1);
2400 }
2401 }
2402
2403 if (offset == 0)
2404 {
2405 base = addr;
2406 offset = const0_rtx;
2407 }
2408 if (GET_CODE (offset) == CONST)
2409 offset = XEXP (offset, 0);
2410 if (GET_CODE (offset) == PLUS)
2411 {
2412 if (CONST_INT_P (XEXP (offset, 0)))
2413 {
2414 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2415 offset = XEXP (offset, 0);
2416 }
2417 else if (CONST_INT_P (XEXP (offset, 1)))
2418 {
2419 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2420 offset = XEXP (offset, 1);
2421 }
2422 else
2423 {
2424 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2425 offset = const0_rtx;
2426 }
2427 }
2428 else if (!CONST_INT_P (offset))
2429 {
2430 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2431 offset = const0_rtx;
2432 }
2433
2434 if (all_const && GET_CODE (base) == PLUS)
2435 base = gen_rtx_CONST (GET_MODE (base), base);
2436
2437 gcc_assert (CONST_INT_P (offset));
2438
2439 val.start = INTVAL (offset);
2440 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2441 val.base = base;
2442 }
2443 break;
2444
2445 case REG:
2446 val.reg_flag = 1;
2447 val.start = true_regnum (x);
2448 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2449 {
2450 /* A pseudo with no hard reg. */
2451 val.start = REGNO (x);
2452 val.end = val.start + 1;
2453 }
2454 else
2455 /* A hard reg. */
2456 val.end = end_hard_regno (GET_MODE (x), val.start);
2457 break;
2458
2459 case SUBREG:
2460 if (!REG_P (SUBREG_REG (x)))
2461 /* This could be more precise, but it's good enough. */
2462 return decompose (SUBREG_REG (x));
2463 val.reg_flag = 1;
2464 val.start = true_regnum (x);
2465 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2466 return decompose (SUBREG_REG (x));
2467 else
2468 /* A hard reg. */
2469 val.end = val.start + subreg_nregs (x);
2470 break;
2471
2472 case SCRATCH:
2473 /* This hasn't been assigned yet, so it can't conflict yet. */
2474 val.safe = 1;
2475 break;
2476
2477 default:
2478 gcc_assert (CONSTANT_P (x));
2479 val.safe = 1;
2480 break;
2481 }
2482 return val;
2483 }
2484
2485 /* Return 1 if altering Y will not modify the value of X.
2486 Y is also described by YDATA, which should be decompose (Y). */
2487
2488 static int
2489 immune_p (rtx x, rtx y, struct decomposition ydata)
2490 {
2491 struct decomposition xdata;
2492
2493 if (ydata.reg_flag)
2494 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2495 if (ydata.safe)
2496 return 1;
2497
2498 gcc_assert (MEM_P (y));
2499 /* If Y is memory and X is not, Y can't affect X. */
2500 if (!MEM_P (x))
2501 return 1;
2502
2503 xdata = decompose (x);
2504
2505 if (! rtx_equal_p (xdata.base, ydata.base))
2506 {
2507 /* If bases are distinct symbolic constants, there is no overlap. */
2508 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2509 return 1;
2510 /* Constants and stack slots never overlap. */
2511 if (CONSTANT_P (xdata.base)
2512 && (ydata.base == frame_pointer_rtx
2513 || ydata.base == hard_frame_pointer_rtx
2514 || ydata.base == stack_pointer_rtx))
2515 return 1;
2516 if (CONSTANT_P (ydata.base)
2517 && (xdata.base == frame_pointer_rtx
2518 || xdata.base == hard_frame_pointer_rtx
2519 || xdata.base == stack_pointer_rtx))
2520 return 1;
2521 /* If either base is variable, we don't know anything. */
2522 return 0;
2523 }
2524
2525 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2526 }
2527
2528 /* Similar, but calls decompose. */
2529
2530 int
2531 safe_from_earlyclobber (rtx op, rtx clobber)
2532 {
2533 struct decomposition early_data;
2534
2535 early_data = decompose (clobber);
2536 return immune_p (op, clobber, early_data);
2537 }
2538 \f
2539 /* Main entry point of this file: search the body of INSN
2540 for values that need reloading and record them with push_reload.
2541 REPLACE nonzero means record also where the values occur
2542 so that subst_reloads can be used.
2543
2544 IND_LEVELS says how many levels of indirection are supported by this
2545 machine; a value of zero means that a memory reference is not a valid
2546 memory address.
2547
2548 LIVE_KNOWN says we have valid information about which hard
2549 regs are live at each point in the program; this is true when
2550 we are called from global_alloc but false when stupid register
2551 allocation has been done.
2552
2553 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2554 which is nonnegative if the reg has been commandeered for reloading into.
2555 It is copied into STATIC_RELOAD_REG_P and referenced from there
2556 by various subroutines.
2557
2558 Return TRUE if some operands need to be changed, because of swapping
2559 commutative operands, reg_equiv_address substitution, or whatever. */
2560
2561 int
2562 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2563 short *reload_reg_p)
2564 {
2565 int insn_code_number;
2566 int i, j;
2567 int noperands;
2568 /* These start out as the constraints for the insn
2569 and they are chewed up as we consider alternatives. */
2570 const char *constraints[MAX_RECOG_OPERANDS];
2571 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2572 a register. */
2573 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2574 char pref_or_nothing[MAX_RECOG_OPERANDS];
2575 /* Nonzero for a MEM operand whose entire address needs a reload.
2576 May be -1 to indicate the entire address may or may not need a reload. */
2577 int address_reloaded[MAX_RECOG_OPERANDS];
2578 /* Nonzero for an address operand that needs to be completely reloaded.
2579 May be -1 to indicate the entire operand may or may not need a reload. */
2580 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2581 /* Value of enum reload_type to use for operand. */
2582 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2583 /* Value of enum reload_type to use within address of operand. */
2584 enum reload_type address_type[MAX_RECOG_OPERANDS];
2585 /* Save the usage of each operand. */
2586 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2587 int no_input_reloads = 0, no_output_reloads = 0;
2588 int n_alternatives;
2589 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2590 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2591 char this_alternative_win[MAX_RECOG_OPERANDS];
2592 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2593 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2594 int this_alternative_matches[MAX_RECOG_OPERANDS];
2595 int swapped;
2596 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2597 int this_alternative_number;
2598 int goal_alternative_number = 0;
2599 int operand_reloadnum[MAX_RECOG_OPERANDS];
2600 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2601 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2602 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2603 char goal_alternative_win[MAX_RECOG_OPERANDS];
2604 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2605 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2606 int goal_alternative_swapped;
2607 int best;
2608 int commutative;
2609 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2610 rtx substed_operand[MAX_RECOG_OPERANDS];
2611 rtx body = PATTERN (insn);
2612 rtx set = single_set (insn);
2613 int goal_earlyclobber = 0, this_earlyclobber;
2614 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2615 int retval = 0;
2616
2617 this_insn = insn;
2618 n_reloads = 0;
2619 n_replacements = 0;
2620 n_earlyclobbers = 0;
2621 replace_reloads = replace;
2622 hard_regs_live_known = live_known;
2623 static_reload_reg_p = reload_reg_p;
2624
2625 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2626 neither are insns that SET cc0. Insns that use CC0 are not allowed
2627 to have any input reloads. */
2628 if (JUMP_P (insn) || CALL_P (insn))
2629 no_output_reloads = 1;
2630
2631 #ifdef HAVE_cc0
2632 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2633 no_input_reloads = 1;
2634 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2635 no_output_reloads = 1;
2636 #endif
2637
2638 #ifdef SECONDARY_MEMORY_NEEDED
2639 /* The eliminated forms of any secondary memory locations are per-insn, so
2640 clear them out here. */
2641
2642 if (secondary_memlocs_elim_used)
2643 {
2644 memset (secondary_memlocs_elim, 0,
2645 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2646 secondary_memlocs_elim_used = 0;
2647 }
2648 #endif
2649
2650 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2651 is cheap to move between them. If it is not, there may not be an insn
2652 to do the copy, so we may need a reload. */
2653 if (GET_CODE (body) == SET
2654 && REG_P (SET_DEST (body))
2655 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2656 && REG_P (SET_SRC (body))
2657 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2658 && register_move_cost (GET_MODE (SET_SRC (body)),
2659 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2660 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2661 return 0;
2662
2663 extract_insn (insn);
2664
2665 noperands = reload_n_operands = recog_data.n_operands;
2666 n_alternatives = recog_data.n_alternatives;
2667
2668 /* Just return "no reloads" if insn has no operands with constraints. */
2669 if (noperands == 0 || n_alternatives == 0)
2670 return 0;
2671
2672 insn_code_number = INSN_CODE (insn);
2673 this_insn_is_asm = insn_code_number < 0;
2674
2675 memcpy (operand_mode, recog_data.operand_mode,
2676 noperands * sizeof (enum machine_mode));
2677 memcpy (constraints, recog_data.constraints,
2678 noperands * sizeof (const char *));
2679
2680 commutative = -1;
2681
2682 /* If we will need to know, later, whether some pair of operands
2683 are the same, we must compare them now and save the result.
2684 Reloading the base and index registers will clobber them
2685 and afterward they will fail to match. */
2686
2687 for (i = 0; i < noperands; i++)
2688 {
2689 const char *p;
2690 int c;
2691 char *end;
2692
2693 substed_operand[i] = recog_data.operand[i];
2694 p = constraints[i];
2695
2696 modified[i] = RELOAD_READ;
2697
2698 /* Scan this operand's constraint to see if it is an output operand,
2699 an in-out operand, is commutative, or should match another. */
2700
2701 while ((c = *p))
2702 {
2703 p += CONSTRAINT_LEN (c, p);
2704 switch (c)
2705 {
2706 case '=':
2707 modified[i] = RELOAD_WRITE;
2708 break;
2709 case '+':
2710 modified[i] = RELOAD_READ_WRITE;
2711 break;
2712 case '%':
2713 {
2714 /* The last operand should not be marked commutative. */
2715 gcc_assert (i != noperands - 1);
2716
2717 /* We currently only support one commutative pair of
2718 operands. Some existing asm code currently uses more
2719 than one pair. Previously, that would usually work,
2720 but sometimes it would crash the compiler. We
2721 continue supporting that case as well as we can by
2722 silently ignoring all but the first pair. In the
2723 future we may handle it correctly. */
2724 if (commutative < 0)
2725 commutative = i;
2726 else
2727 gcc_assert (this_insn_is_asm);
2728 }
2729 break;
2730 /* Use of ISDIGIT is tempting here, but it may get expensive because
2731 of locale support we don't want. */
2732 case '0': case '1': case '2': case '3': case '4':
2733 case '5': case '6': case '7': case '8': case '9':
2734 {
2735 c = strtoul (p - 1, &end, 10);
2736 p = end;
2737
2738 operands_match[c][i]
2739 = operands_match_p (recog_data.operand[c],
2740 recog_data.operand[i]);
2741
2742 /* An operand may not match itself. */
2743 gcc_assert (c != i);
2744
2745 /* If C can be commuted with C+1, and C might need to match I,
2746 then C+1 might also need to match I. */
2747 if (commutative >= 0)
2748 {
2749 if (c == commutative || c == commutative + 1)
2750 {
2751 int other = c + (c == commutative ? 1 : -1);
2752 operands_match[other][i]
2753 = operands_match_p (recog_data.operand[other],
2754 recog_data.operand[i]);
2755 }
2756 if (i == commutative || i == commutative + 1)
2757 {
2758 int other = i + (i == commutative ? 1 : -1);
2759 operands_match[c][other]
2760 = operands_match_p (recog_data.operand[c],
2761 recog_data.operand[other]);
2762 }
2763 /* Note that C is supposed to be less than I.
2764 No need to consider altering both C and I because in
2765 that case we would alter one into the other. */
2766 }
2767 }
2768 }
2769 }
2770 }
2771
2772 /* Examine each operand that is a memory reference or memory address
2773 and reload parts of the addresses into index registers.
2774 Also here any references to pseudo regs that didn't get hard regs
2775 but are equivalent to constants get replaced in the insn itself
2776 with those constants. Nobody will ever see them again.
2777
2778 Finally, set up the preferred classes of each operand. */
2779
2780 for (i = 0; i < noperands; i++)
2781 {
2782 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2783
2784 address_reloaded[i] = 0;
2785 address_operand_reloaded[i] = 0;
2786 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2787 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2788 : RELOAD_OTHER);
2789 address_type[i]
2790 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2791 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2792 : RELOAD_OTHER);
2793
2794 if (*constraints[i] == 0)
2795 /* Ignore things like match_operator operands. */
2796 ;
2797 else if (constraints[i][0] == 'p'
2798 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2799 {
2800 address_operand_reloaded[i]
2801 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2802 recog_data.operand[i],
2803 recog_data.operand_loc[i],
2804 i, operand_type[i], ind_levels, insn);
2805
2806 /* If we now have a simple operand where we used to have a
2807 PLUS or MULT, re-recognize and try again. */
2808 if ((OBJECT_P (*recog_data.operand_loc[i])
2809 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2810 && (GET_CODE (recog_data.operand[i]) == MULT
2811 || GET_CODE (recog_data.operand[i]) == PLUS))
2812 {
2813 INSN_CODE (insn) = -1;
2814 retval = find_reloads (insn, replace, ind_levels, live_known,
2815 reload_reg_p);
2816 return retval;
2817 }
2818
2819 recog_data.operand[i] = *recog_data.operand_loc[i];
2820 substed_operand[i] = recog_data.operand[i];
2821
2822 /* Address operands are reloaded in their existing mode,
2823 no matter what is specified in the machine description. */
2824 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2825 }
2826 else if (code == MEM)
2827 {
2828 address_reloaded[i]
2829 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2830 recog_data.operand_loc[i],
2831 XEXP (recog_data.operand[i], 0),
2832 &XEXP (recog_data.operand[i], 0),
2833 i, address_type[i], ind_levels, insn);
2834 recog_data.operand[i] = *recog_data.operand_loc[i];
2835 substed_operand[i] = recog_data.operand[i];
2836 }
2837 else if (code == SUBREG)
2838 {
2839 rtx reg = SUBREG_REG (recog_data.operand[i]);
2840 rtx op
2841 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2842 ind_levels,
2843 set != 0
2844 && &SET_DEST (set) == recog_data.operand_loc[i],
2845 insn,
2846 &address_reloaded[i]);
2847
2848 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2849 that didn't get a hard register, emit a USE with a REG_EQUAL
2850 note in front so that we might inherit a previous, possibly
2851 wider reload. */
2852
2853 if (replace
2854 && MEM_P (op)
2855 && REG_P (reg)
2856 && (GET_MODE_SIZE (GET_MODE (reg))
2857 >= GET_MODE_SIZE (GET_MODE (op)))
2858 && reg_equiv_constant[REGNO (reg)] == 0)
2859 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2860 insn),
2861 REG_EQUAL, reg_equiv_memory_loc[REGNO (reg)]);
2862
2863 substed_operand[i] = recog_data.operand[i] = op;
2864 }
2865 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2866 /* We can get a PLUS as an "operand" as a result of register
2867 elimination. See eliminate_regs and gen_reload. We handle
2868 a unary operator by reloading the operand. */
2869 substed_operand[i] = recog_data.operand[i]
2870 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2871 ind_levels, 0, insn,
2872 &address_reloaded[i]);
2873 else if (code == REG)
2874 {
2875 /* This is equivalent to calling find_reloads_toplev.
2876 The code is duplicated for speed.
2877 When we find a pseudo always equivalent to a constant,
2878 we replace it by the constant. We must be sure, however,
2879 that we don't try to replace it in the insn in which it
2880 is being set. */
2881 int regno = REGNO (recog_data.operand[i]);
2882 if (reg_equiv_constant[regno] != 0
2883 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2884 {
2885 /* Record the existing mode so that the check if constants are
2886 allowed will work when operand_mode isn't specified. */
2887
2888 if (operand_mode[i] == VOIDmode)
2889 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2890
2891 substed_operand[i] = recog_data.operand[i]
2892 = reg_equiv_constant[regno];
2893 }
2894 if (reg_equiv_memory_loc[regno] != 0
2895 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
2896 /* We need not give a valid is_set_dest argument since the case
2897 of a constant equivalence was checked above. */
2898 substed_operand[i] = recog_data.operand[i]
2899 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2900 ind_levels, 0, insn,
2901 &address_reloaded[i]);
2902 }
2903 /* If the operand is still a register (we didn't replace it with an
2904 equivalent), get the preferred class to reload it into. */
2905 code = GET_CODE (recog_data.operand[i]);
2906 preferred_class[i]
2907 = ((code == REG && REGNO (recog_data.operand[i])
2908 >= FIRST_PSEUDO_REGISTER)
2909 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2910 : NO_REGS);
2911 pref_or_nothing[i]
2912 = (code == REG
2913 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2914 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2915 }
2916
2917 /* If this is simply a copy from operand 1 to operand 0, merge the
2918 preferred classes for the operands. */
2919 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2920 && recog_data.operand[1] == SET_SRC (set))
2921 {
2922 preferred_class[0] = preferred_class[1]
2923 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2924 pref_or_nothing[0] |= pref_or_nothing[1];
2925 pref_or_nothing[1] |= pref_or_nothing[0];
2926 }
2927
2928 /* Now see what we need for pseudo-regs that didn't get hard regs
2929 or got the wrong kind of hard reg. For this, we must consider
2930 all the operands together against the register constraints. */
2931
2932 best = MAX_RECOG_OPERANDS * 2 + 600;
2933
2934 swapped = 0;
2935 goal_alternative_swapped = 0;
2936 try_swapped:
2937
2938 /* The constraints are made of several alternatives.
2939 Each operand's constraint looks like foo,bar,... with commas
2940 separating the alternatives. The first alternatives for all
2941 operands go together, the second alternatives go together, etc.
2942
2943 First loop over alternatives. */
2944
2945 for (this_alternative_number = 0;
2946 this_alternative_number < n_alternatives;
2947 this_alternative_number++)
2948 {
2949 /* Loop over operands for one constraint alternative. */
2950 /* LOSERS counts those that don't fit this alternative
2951 and would require loading. */
2952 int losers = 0;
2953 /* BAD is set to 1 if it some operand can't fit this alternative
2954 even after reloading. */
2955 int bad = 0;
2956 /* REJECT is a count of how undesirable this alternative says it is
2957 if any reloading is required. If the alternative matches exactly
2958 then REJECT is ignored, but otherwise it gets this much
2959 counted against it in addition to the reloading needed. Each
2960 ? counts three times here since we want the disparaging caused by
2961 a bad register class to only count 1/3 as much. */
2962 int reject = 0;
2963
2964 if (!recog_data.alternative_enabled_p[this_alternative_number])
2965 {
2966 int i;
2967
2968 for (i = 0; i < recog_data.n_operands; i++)
2969 constraints[i] = skip_alternative (constraints[i]);
2970
2971 continue;
2972 }
2973
2974 this_earlyclobber = 0;
2975
2976 for (i = 0; i < noperands; i++)
2977 {
2978 const char *p = constraints[i];
2979 char *end;
2980 int len;
2981 int win = 0;
2982 int did_match = 0;
2983 /* 0 => this operand can be reloaded somehow for this alternative. */
2984 int badop = 1;
2985 /* 0 => this operand can be reloaded if the alternative allows regs. */
2986 int winreg = 0;
2987 int c;
2988 int m;
2989 rtx operand = recog_data.operand[i];
2990 int offset = 0;
2991 /* Nonzero means this is a MEM that must be reloaded into a reg
2992 regardless of what the constraint says. */
2993 int force_reload = 0;
2994 int offmemok = 0;
2995 /* Nonzero if a constant forced into memory would be OK for this
2996 operand. */
2997 int constmemok = 0;
2998 int earlyclobber = 0;
2999
3000 /* If the predicate accepts a unary operator, it means that
3001 we need to reload the operand, but do not do this for
3002 match_operator and friends. */
3003 if (UNARY_P (operand) && *p != 0)
3004 operand = XEXP (operand, 0);
3005
3006 /* If the operand is a SUBREG, extract
3007 the REG or MEM (or maybe even a constant) within.
3008 (Constants can occur as a result of reg_equiv_constant.) */
3009
3010 while (GET_CODE (operand) == SUBREG)
3011 {
3012 /* Offset only matters when operand is a REG and
3013 it is a hard reg. This is because it is passed
3014 to reg_fits_class_p if it is a REG and all pseudos
3015 return 0 from that function. */
3016 if (REG_P (SUBREG_REG (operand))
3017 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3018 {
3019 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3020 GET_MODE (SUBREG_REG (operand)),
3021 SUBREG_BYTE (operand),
3022 GET_MODE (operand)) < 0)
3023 force_reload = 1;
3024 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3025 GET_MODE (SUBREG_REG (operand)),
3026 SUBREG_BYTE (operand),
3027 GET_MODE (operand));
3028 }
3029 operand = SUBREG_REG (operand);
3030 /* Force reload if this is a constant or PLUS or if there may
3031 be a problem accessing OPERAND in the outer mode. */
3032 if (CONSTANT_P (operand)
3033 || GET_CODE (operand) == PLUS
3034 /* We must force a reload of paradoxical SUBREGs
3035 of a MEM because the alignment of the inner value
3036 may not be enough to do the outer reference. On
3037 big-endian machines, it may also reference outside
3038 the object.
3039
3040 On machines that extend byte operations and we have a
3041 SUBREG where both the inner and outer modes are no wider
3042 than a word and the inner mode is narrower, is integral,
3043 and gets extended when loaded from memory, combine.c has
3044 made assumptions about the behavior of the machine in such
3045 register access. If the data is, in fact, in memory we
3046 must always load using the size assumed to be in the
3047 register and let the insn do the different-sized
3048 accesses.
3049
3050 This is doubly true if WORD_REGISTER_OPERATIONS. In
3051 this case eliminate_regs has left non-paradoxical
3052 subregs for push_reload to see. Make sure it does
3053 by forcing the reload.
3054
3055 ??? When is it right at this stage to have a subreg
3056 of a mem that is _not_ to be handled specially? IMO
3057 those should have been reduced to just a mem. */
3058 || ((MEM_P (operand)
3059 || (REG_P (operand)
3060 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3061 #ifndef WORD_REGISTER_OPERATIONS
3062 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3063 < BIGGEST_ALIGNMENT)
3064 && (GET_MODE_SIZE (operand_mode[i])
3065 > GET_MODE_SIZE (GET_MODE (operand))))
3066 || BYTES_BIG_ENDIAN
3067 #ifdef LOAD_EXTEND_OP
3068 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3069 && (GET_MODE_SIZE (GET_MODE (operand))
3070 <= UNITS_PER_WORD)
3071 && (GET_MODE_SIZE (operand_mode[i])
3072 > GET_MODE_SIZE (GET_MODE (operand)))
3073 && INTEGRAL_MODE_P (GET_MODE (operand))
3074 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3075 #endif
3076 )
3077 #endif
3078 )
3079 )
3080 force_reload = 1;
3081 }
3082
3083 this_alternative[i] = NO_REGS;
3084 this_alternative_win[i] = 0;
3085 this_alternative_match_win[i] = 0;
3086 this_alternative_offmemok[i] = 0;
3087 this_alternative_earlyclobber[i] = 0;
3088 this_alternative_matches[i] = -1;
3089
3090 /* An empty constraint or empty alternative
3091 allows anything which matched the pattern. */
3092 if (*p == 0 || *p == ',')
3093 win = 1, badop = 0;
3094
3095 /* Scan this alternative's specs for this operand;
3096 set WIN if the operand fits any letter in this alternative.
3097 Otherwise, clear BADOP if this operand could
3098 fit some letter after reloads,
3099 or set WINREG if this operand could fit after reloads
3100 provided the constraint allows some registers. */
3101
3102 do
3103 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3104 {
3105 case '\0':
3106 len = 0;
3107 break;
3108 case ',':
3109 c = '\0';
3110 break;
3111
3112 case '=': case '+': case '*':
3113 break;
3114
3115 case '%':
3116 /* We only support one commutative marker, the first
3117 one. We already set commutative above. */
3118 break;
3119
3120 case '?':
3121 reject += 6;
3122 break;
3123
3124 case '!':
3125 reject = 600;
3126 break;
3127
3128 case '#':
3129 /* Ignore rest of this alternative as far as
3130 reloading is concerned. */
3131 do
3132 p++;
3133 while (*p && *p != ',');
3134 len = 0;
3135 break;
3136
3137 case '0': case '1': case '2': case '3': case '4':
3138 case '5': case '6': case '7': case '8': case '9':
3139 m = strtoul (p, &end, 10);
3140 p = end;
3141 len = 0;
3142
3143 this_alternative_matches[i] = m;
3144 /* We are supposed to match a previous operand.
3145 If we do, we win if that one did.
3146 If we do not, count both of the operands as losers.
3147 (This is too conservative, since most of the time
3148 only a single reload insn will be needed to make
3149 the two operands win. As a result, this alternative
3150 may be rejected when it is actually desirable.) */
3151 if ((swapped && (m != commutative || i != commutative + 1))
3152 /* If we are matching as if two operands were swapped,
3153 also pretend that operands_match had been computed
3154 with swapped.
3155 But if I is the second of those and C is the first,
3156 don't exchange them, because operands_match is valid
3157 only on one side of its diagonal. */
3158 ? (operands_match
3159 [(m == commutative || m == commutative + 1)
3160 ? 2 * commutative + 1 - m : m]
3161 [(i == commutative || i == commutative + 1)
3162 ? 2 * commutative + 1 - i : i])
3163 : operands_match[m][i])
3164 {
3165 /* If we are matching a non-offsettable address where an
3166 offsettable address was expected, then we must reject
3167 this combination, because we can't reload it. */
3168 if (this_alternative_offmemok[m]
3169 && MEM_P (recog_data.operand[m])
3170 && this_alternative[m] == NO_REGS
3171 && ! this_alternative_win[m])
3172 bad = 1;
3173
3174 did_match = this_alternative_win[m];
3175 }
3176 else
3177 {
3178 /* Operands don't match. */
3179 rtx value;
3180 int loc1, loc2;
3181 /* Retroactively mark the operand we had to match
3182 as a loser, if it wasn't already. */
3183 if (this_alternative_win[m])
3184 losers++;
3185 this_alternative_win[m] = 0;
3186 if (this_alternative[m] == NO_REGS)
3187 bad = 1;
3188 /* But count the pair only once in the total badness of
3189 this alternative, if the pair can be a dummy reload.
3190 The pointers in operand_loc are not swapped; swap
3191 them by hand if necessary. */
3192 if (swapped && i == commutative)
3193 loc1 = commutative + 1;
3194 else if (swapped && i == commutative + 1)
3195 loc1 = commutative;
3196 else
3197 loc1 = i;
3198 if (swapped && m == commutative)
3199 loc2 = commutative + 1;
3200 else if (swapped && m == commutative + 1)
3201 loc2 = commutative;
3202 else
3203 loc2 = m;
3204 value
3205 = find_dummy_reload (recog_data.operand[i],
3206 recog_data.operand[m],
3207 recog_data.operand_loc[loc1],
3208 recog_data.operand_loc[loc2],
3209 operand_mode[i], operand_mode[m],
3210 this_alternative[m], -1,
3211 this_alternative_earlyclobber[m]);
3212
3213 if (value != 0)
3214 losers--;
3215 }
3216 /* This can be fixed with reloads if the operand
3217 we are supposed to match can be fixed with reloads. */
3218 badop = 0;
3219 this_alternative[i] = this_alternative[m];
3220
3221 /* If we have to reload this operand and some previous
3222 operand also had to match the same thing as this
3223 operand, we don't know how to do that. So reject this
3224 alternative. */
3225 if (! did_match || force_reload)
3226 for (j = 0; j < i; j++)
3227 if (this_alternative_matches[j]
3228 == this_alternative_matches[i])
3229 badop = 1;
3230 break;
3231
3232 case 'p':
3233 /* All necessary reloads for an address_operand
3234 were handled in find_reloads_address. */
3235 this_alternative[i] = base_reg_class (VOIDmode, ADDRESS,
3236 SCRATCH);
3237 win = 1;
3238 badop = 0;
3239 break;
3240
3241 case TARGET_MEM_CONSTRAINT:
3242 if (force_reload)
3243 break;
3244 if (MEM_P (operand)
3245 || (REG_P (operand)
3246 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3247 && reg_renumber[REGNO (operand)] < 0))
3248 win = 1;
3249 if (CONST_POOL_OK_P (operand))
3250 badop = 0;
3251 constmemok = 1;
3252 break;
3253
3254 case '<':
3255 if (MEM_P (operand)
3256 && ! address_reloaded[i]
3257 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3258 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3259 win = 1;
3260 break;
3261
3262 case '>':
3263 if (MEM_P (operand)
3264 && ! address_reloaded[i]
3265 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3266 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3267 win = 1;
3268 break;
3269
3270 /* Memory operand whose address is not offsettable. */
3271 case 'V':
3272 if (force_reload)
3273 break;
3274 if (MEM_P (operand)
3275 && ! (ind_levels ? offsettable_memref_p (operand)
3276 : offsettable_nonstrict_memref_p (operand))
3277 /* Certain mem addresses will become offsettable
3278 after they themselves are reloaded. This is important;
3279 we don't want our own handling of unoffsettables
3280 to override the handling of reg_equiv_address. */
3281 && !(REG_P (XEXP (operand, 0))
3282 && (ind_levels == 0
3283 || reg_equiv_address[REGNO (XEXP (operand, 0))] != 0)))
3284 win = 1;
3285 break;
3286
3287 /* Memory operand whose address is offsettable. */
3288 case 'o':
3289 if (force_reload)
3290 break;
3291 if ((MEM_P (operand)
3292 /* If IND_LEVELS, find_reloads_address won't reload a
3293 pseudo that didn't get a hard reg, so we have to
3294 reject that case. */
3295 && ((ind_levels ? offsettable_memref_p (operand)
3296 : offsettable_nonstrict_memref_p (operand))
3297 /* A reloaded address is offsettable because it is now
3298 just a simple register indirect. */
3299 || address_reloaded[i] == 1))
3300 || (REG_P (operand)
3301 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3302 && reg_renumber[REGNO (operand)] < 0
3303 /* If reg_equiv_address is nonzero, we will be
3304 loading it into a register; hence it will be
3305 offsettable, but we cannot say that reg_equiv_mem
3306 is offsettable without checking. */
3307 && ((reg_equiv_mem[REGNO (operand)] != 0
3308 && offsettable_memref_p (reg_equiv_mem[REGNO (operand)]))
3309 || (reg_equiv_address[REGNO (operand)] != 0))))
3310 win = 1;
3311 if (CONST_POOL_OK_P (operand)
3312 || MEM_P (operand))
3313 badop = 0;
3314 constmemok = 1;
3315 offmemok = 1;
3316 break;
3317
3318 case '&':
3319 /* Output operand that is stored before the need for the
3320 input operands (and their index registers) is over. */
3321 earlyclobber = 1, this_earlyclobber = 1;
3322 break;
3323
3324 case 'E':
3325 case 'F':
3326 if (GET_CODE (operand) == CONST_DOUBLE
3327 || (GET_CODE (operand) == CONST_VECTOR
3328 && (GET_MODE_CLASS (GET_MODE (operand))
3329 == MODE_VECTOR_FLOAT)))
3330 win = 1;
3331 break;
3332
3333 case 'G':
3334 case 'H':
3335 if (GET_CODE (operand) == CONST_DOUBLE
3336 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3337 win = 1;
3338 break;
3339
3340 case 's':
3341 if (CONST_INT_P (operand)
3342 || (GET_CODE (operand) == CONST_DOUBLE
3343 && GET_MODE (operand) == VOIDmode))
3344 break;
3345 case 'i':
3346 if (CONSTANT_P (operand)
3347 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3348 win = 1;
3349 break;
3350
3351 case 'n':
3352 if (CONST_INT_P (operand)
3353 || (GET_CODE (operand) == CONST_DOUBLE
3354 && GET_MODE (operand) == VOIDmode))
3355 win = 1;
3356 break;
3357
3358 case 'I':
3359 case 'J':
3360 case 'K':
3361 case 'L':
3362 case 'M':
3363 case 'N':
3364 case 'O':
3365 case 'P':
3366 if (CONST_INT_P (operand)
3367 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3368 win = 1;
3369 break;
3370
3371 case 'X':
3372 force_reload = 0;
3373 win = 1;
3374 break;
3375
3376 case 'g':
3377 if (! force_reload
3378 /* A PLUS is never a valid operand, but reload can make
3379 it from a register when eliminating registers. */
3380 && GET_CODE (operand) != PLUS
3381 /* A SCRATCH is not a valid operand. */
3382 && GET_CODE (operand) != SCRATCH
3383 && (! CONSTANT_P (operand)
3384 || ! flag_pic
3385 || LEGITIMATE_PIC_OPERAND_P (operand))
3386 && (GENERAL_REGS == ALL_REGS
3387 || !REG_P (operand)
3388 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3389 && reg_renumber[REGNO (operand)] < 0)))
3390 win = 1;
3391 /* Drop through into 'r' case. */
3392
3393 case 'r':
3394 this_alternative[i]
3395 = reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3396 goto reg;
3397
3398 default:
3399 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3400 {
3401 #ifdef EXTRA_CONSTRAINT_STR
3402 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3403 {
3404 if (force_reload)
3405 break;
3406 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3407 win = 1;
3408 /* If the address was already reloaded,
3409 we win as well. */
3410 else if (MEM_P (operand)
3411 && address_reloaded[i] == 1)
3412 win = 1;
3413 /* Likewise if the address will be reloaded because
3414 reg_equiv_address is nonzero. For reg_equiv_mem
3415 we have to check. */
3416 else if (REG_P (operand)
3417 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3418 && reg_renumber[REGNO (operand)] < 0
3419 && ((reg_equiv_mem[REGNO (operand)] != 0
3420 && EXTRA_CONSTRAINT_STR (reg_equiv_mem[REGNO (operand)], c, p))
3421 || (reg_equiv_address[REGNO (operand)] != 0)))
3422 win = 1;
3423
3424 /* If we didn't already win, we can reload
3425 constants via force_const_mem, and other
3426 MEMs by reloading the address like for 'o'. */
3427 if (CONST_POOL_OK_P (operand)
3428 || MEM_P (operand))
3429 badop = 0;
3430 constmemok = 1;
3431 offmemok = 1;
3432 break;
3433 }
3434 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3435 {
3436 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3437 win = 1;
3438
3439 /* If we didn't already win, we can reload
3440 the address into a base register. */
3441 this_alternative[i] = base_reg_class (VOIDmode,
3442 ADDRESS,
3443 SCRATCH);
3444 badop = 0;
3445 break;
3446 }
3447
3448 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3449 win = 1;
3450 #endif
3451 break;
3452 }
3453
3454 this_alternative[i]
3455 = (reg_class_subunion
3456 [this_alternative[i]]
3457 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3458 reg:
3459 if (GET_MODE (operand) == BLKmode)
3460 break;
3461 winreg = 1;
3462 if (REG_P (operand)
3463 && reg_fits_class_p (operand, this_alternative[i],
3464 offset, GET_MODE (recog_data.operand[i])))
3465 win = 1;
3466 break;
3467 }
3468 while ((p += len), c);
3469
3470 constraints[i] = p;
3471
3472 /* If this operand could be handled with a reg,
3473 and some reg is allowed, then this operand can be handled. */
3474 if (winreg && this_alternative[i] != NO_REGS
3475 && (win || !class_only_fixed_regs[this_alternative[i]]))
3476 badop = 0;
3477
3478 /* Record which operands fit this alternative. */
3479 this_alternative_earlyclobber[i] = earlyclobber;
3480 if (win && ! force_reload)
3481 this_alternative_win[i] = 1;
3482 else if (did_match && ! force_reload)
3483 this_alternative_match_win[i] = 1;
3484 else
3485 {
3486 int const_to_mem = 0;
3487
3488 this_alternative_offmemok[i] = offmemok;
3489 losers++;
3490 if (badop)
3491 bad = 1;
3492 /* Alternative loses if it has no regs for a reg operand. */
3493 if (REG_P (operand)
3494 && this_alternative[i] == NO_REGS
3495 && this_alternative_matches[i] < 0)
3496 bad = 1;
3497
3498 /* If this is a constant that is reloaded into the desired
3499 class by copying it to memory first, count that as another
3500 reload. This is consistent with other code and is
3501 required to avoid choosing another alternative when
3502 the constant is moved into memory by this function on
3503 an early reload pass. Note that the test here is
3504 precisely the same as in the code below that calls
3505 force_const_mem. */
3506 if (CONST_POOL_OK_P (operand)
3507 && ((targetm.preferred_reload_class (operand,
3508 this_alternative[i])
3509 == NO_REGS)
3510 || no_input_reloads)
3511 && operand_mode[i] != VOIDmode)
3512 {
3513 const_to_mem = 1;
3514 if (this_alternative[i] != NO_REGS)
3515 losers++;
3516 }
3517
3518 /* Alternative loses if it requires a type of reload not
3519 permitted for this insn. We can always reload SCRATCH
3520 and objects with a REG_UNUSED note. */
3521 if (GET_CODE (operand) != SCRATCH
3522 && modified[i] != RELOAD_READ && no_output_reloads
3523 && ! find_reg_note (insn, REG_UNUSED, operand))
3524 bad = 1;
3525 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3526 && ! const_to_mem)
3527 bad = 1;
3528
3529 /* If we can't reload this value at all, reject this
3530 alternative. Note that we could also lose due to
3531 LIMIT_RELOAD_CLASS, but we don't check that
3532 here. */
3533
3534 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3535 {
3536 if (targetm.preferred_reload_class (operand, this_alternative[i])
3537 == NO_REGS)
3538 reject = 600;
3539
3540 if (operand_type[i] == RELOAD_FOR_OUTPUT
3541 && (targetm.preferred_output_reload_class (operand,
3542 this_alternative[i])
3543 == NO_REGS))
3544 reject = 600;
3545 }
3546
3547 /* We prefer to reload pseudos over reloading other things,
3548 since such reloads may be able to be eliminated later.
3549 If we are reloading a SCRATCH, we won't be generating any
3550 insns, just using a register, so it is also preferred.
3551 So bump REJECT in other cases. Don't do this in the
3552 case where we are forcing a constant into memory and
3553 it will then win since we don't want to have a different
3554 alternative match then. */
3555 if (! (REG_P (operand)
3556 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3557 && GET_CODE (operand) != SCRATCH
3558 && ! (const_to_mem && constmemok))
3559 reject += 2;
3560
3561 /* Input reloads can be inherited more often than output
3562 reloads can be removed, so penalize output reloads. */
3563 if (operand_type[i] != RELOAD_FOR_INPUT
3564 && GET_CODE (operand) != SCRATCH)
3565 reject++;
3566 }
3567
3568 /* If this operand is a pseudo register that didn't get a hard
3569 reg and this alternative accepts some register, see if the
3570 class that we want is a subset of the preferred class for this
3571 register. If not, but it intersects that class, use the
3572 preferred class instead. If it does not intersect the preferred
3573 class, show that usage of this alternative should be discouraged;
3574 it will be discouraged more still if the register is `preferred
3575 or nothing'. We do this because it increases the chance of
3576 reusing our spill register in a later insn and avoiding a pair
3577 of memory stores and loads.
3578
3579 Don't bother with this if this alternative will accept this
3580 operand.
3581
3582 Don't do this for a multiword operand, since it is only a
3583 small win and has the risk of requiring more spill registers,
3584 which could cause a large loss.
3585
3586 Don't do this if the preferred class has only one register
3587 because we might otherwise exhaust the class. */
3588
3589 if (! win && ! did_match
3590 && this_alternative[i] != NO_REGS
3591 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3592 && reg_class_size [(int) preferred_class[i]] > 0
3593 && ! small_register_class_p (preferred_class[i]))
3594 {
3595 if (! reg_class_subset_p (this_alternative[i],
3596 preferred_class[i]))
3597 {
3598 /* Since we don't have a way of forming the intersection,
3599 we just do something special if the preferred class
3600 is a subset of the class we have; that's the most
3601 common case anyway. */
3602 if (reg_class_subset_p (preferred_class[i],
3603 this_alternative[i]))
3604 this_alternative[i] = preferred_class[i];
3605 else
3606 reject += (2 + 2 * pref_or_nothing[i]);
3607 }
3608 }
3609 }
3610
3611 /* Now see if any output operands that are marked "earlyclobber"
3612 in this alternative conflict with any input operands
3613 or any memory addresses. */
3614
3615 for (i = 0; i < noperands; i++)
3616 if (this_alternative_earlyclobber[i]
3617 && (this_alternative_win[i] || this_alternative_match_win[i]))
3618 {
3619 struct decomposition early_data;
3620
3621 early_data = decompose (recog_data.operand[i]);
3622
3623 gcc_assert (modified[i] != RELOAD_READ);
3624
3625 if (this_alternative[i] == NO_REGS)
3626 {
3627 this_alternative_earlyclobber[i] = 0;
3628 gcc_assert (this_insn_is_asm);
3629 error_for_asm (this_insn,
3630 "%<&%> constraint used with no register class");
3631 }
3632
3633 for (j = 0; j < noperands; j++)
3634 /* Is this an input operand or a memory ref? */
3635 if ((MEM_P (recog_data.operand[j])
3636 || modified[j] != RELOAD_WRITE)
3637 && j != i
3638 /* Ignore things like match_operator operands. */
3639 && !recog_data.is_operator[j]
3640 /* Don't count an input operand that is constrained to match
3641 the early clobber operand. */
3642 && ! (this_alternative_matches[j] == i
3643 && rtx_equal_p (recog_data.operand[i],
3644 recog_data.operand[j]))
3645 /* Is it altered by storing the earlyclobber operand? */
3646 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3647 early_data))
3648 {
3649 /* If the output is in a non-empty few-regs class,
3650 it's costly to reload it, so reload the input instead. */
3651 if (small_register_class_p (this_alternative[i])
3652 && (REG_P (recog_data.operand[j])
3653 || GET_CODE (recog_data.operand[j]) == SUBREG))
3654 {
3655 losers++;
3656 this_alternative_win[j] = 0;
3657 this_alternative_match_win[j] = 0;
3658 }
3659 else
3660 break;
3661 }
3662 /* If an earlyclobber operand conflicts with something,
3663 it must be reloaded, so request this and count the cost. */
3664 if (j != noperands)
3665 {
3666 losers++;
3667 this_alternative_win[i] = 0;
3668 this_alternative_match_win[j] = 0;
3669 for (j = 0; j < noperands; j++)
3670 if (this_alternative_matches[j] == i
3671 && this_alternative_match_win[j])
3672 {
3673 this_alternative_win[j] = 0;
3674 this_alternative_match_win[j] = 0;
3675 losers++;
3676 }
3677 }
3678 }
3679
3680 /* If one alternative accepts all the operands, no reload required,
3681 choose that alternative; don't consider the remaining ones. */
3682 if (losers == 0)
3683 {
3684 /* Unswap these so that they are never swapped at `finish'. */
3685 if (commutative >= 0)
3686 {
3687 recog_data.operand[commutative] = substed_operand[commutative];
3688 recog_data.operand[commutative + 1]
3689 = substed_operand[commutative + 1];
3690 }
3691 for (i = 0; i < noperands; i++)
3692 {
3693 goal_alternative_win[i] = this_alternative_win[i];
3694 goal_alternative_match_win[i] = this_alternative_match_win[i];
3695 goal_alternative[i] = this_alternative[i];
3696 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3697 goal_alternative_matches[i] = this_alternative_matches[i];
3698 goal_alternative_earlyclobber[i]
3699 = this_alternative_earlyclobber[i];
3700 }
3701 goal_alternative_number = this_alternative_number;
3702 goal_alternative_swapped = swapped;
3703 goal_earlyclobber = this_earlyclobber;
3704 goto finish;
3705 }
3706
3707 /* REJECT, set by the ! and ? constraint characters and when a register
3708 would be reloaded into a non-preferred class, discourages the use of
3709 this alternative for a reload goal. REJECT is incremented by six
3710 for each ? and two for each non-preferred class. */
3711 losers = losers * 6 + reject;
3712
3713 /* If this alternative can be made to work by reloading,
3714 and it needs less reloading than the others checked so far,
3715 record it as the chosen goal for reloading. */
3716 if (! bad)
3717 {
3718 if (best > losers)
3719 {
3720 for (i = 0; i < noperands; i++)
3721 {
3722 goal_alternative[i] = this_alternative[i];
3723 goal_alternative_win[i] = this_alternative_win[i];
3724 goal_alternative_match_win[i]
3725 = this_alternative_match_win[i];
3726 goal_alternative_offmemok[i]
3727 = this_alternative_offmemok[i];
3728 goal_alternative_matches[i] = this_alternative_matches[i];
3729 goal_alternative_earlyclobber[i]
3730 = this_alternative_earlyclobber[i];
3731 }
3732 goal_alternative_swapped = swapped;
3733 best = losers;
3734 goal_alternative_number = this_alternative_number;
3735 goal_earlyclobber = this_earlyclobber;
3736 }
3737 }
3738 }
3739
3740 /* If insn is commutative (it's safe to exchange a certain pair of operands)
3741 then we need to try each alternative twice,
3742 the second time matching those two operands
3743 as if we had exchanged them.
3744 To do this, really exchange them in operands.
3745
3746 If we have just tried the alternatives the second time,
3747 return operands to normal and drop through. */
3748
3749 if (commutative >= 0)
3750 {
3751 swapped = !swapped;
3752 if (swapped)
3753 {
3754 enum reg_class tclass;
3755 int t;
3756
3757 recog_data.operand[commutative] = substed_operand[commutative + 1];
3758 recog_data.operand[commutative + 1] = substed_operand[commutative];
3759 /* Swap the duplicates too. */
3760 for (i = 0; i < recog_data.n_dups; i++)
3761 if (recog_data.dup_num[i] == commutative
3762 || recog_data.dup_num[i] == commutative + 1)
3763 *recog_data.dup_loc[i]
3764 = recog_data.operand[(int) recog_data.dup_num[i]];
3765
3766 tclass = preferred_class[commutative];
3767 preferred_class[commutative] = preferred_class[commutative + 1];
3768 preferred_class[commutative + 1] = tclass;
3769
3770 t = pref_or_nothing[commutative];
3771 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3772 pref_or_nothing[commutative + 1] = t;
3773
3774 t = address_reloaded[commutative];
3775 address_reloaded[commutative] = address_reloaded[commutative + 1];
3776 address_reloaded[commutative + 1] = t;
3777
3778 memcpy (constraints, recog_data.constraints,
3779 noperands * sizeof (const char *));
3780 goto try_swapped;
3781 }
3782 else
3783 {
3784 recog_data.operand[commutative] = substed_operand[commutative];
3785 recog_data.operand[commutative + 1]
3786 = substed_operand[commutative + 1];
3787 /* Unswap the duplicates too. */
3788 for (i = 0; i < recog_data.n_dups; i++)
3789 if (recog_data.dup_num[i] == commutative
3790 || recog_data.dup_num[i] == commutative + 1)
3791 *recog_data.dup_loc[i]
3792 = recog_data.operand[(int) recog_data.dup_num[i]];
3793 }
3794 }
3795
3796 /* The operands don't meet the constraints.
3797 goal_alternative describes the alternative
3798 that we could reach by reloading the fewest operands.
3799 Reload so as to fit it. */
3800
3801 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3802 {
3803 /* No alternative works with reloads?? */
3804 if (insn_code_number >= 0)
3805 fatal_insn ("unable to generate reloads for:", insn);
3806 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3807 /* Avoid further trouble with this insn. */
3808 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3809 n_reloads = 0;
3810 return 0;
3811 }
3812
3813 /* Jump to `finish' from above if all operands are valid already.
3814 In that case, goal_alternative_win is all 1. */
3815 finish:
3816
3817 /* Right now, for any pair of operands I and J that are required to match,
3818 with I < J,
3819 goal_alternative_matches[J] is I.
3820 Set up goal_alternative_matched as the inverse function:
3821 goal_alternative_matched[I] = J. */
3822
3823 for (i = 0; i < noperands; i++)
3824 goal_alternative_matched[i] = -1;
3825
3826 for (i = 0; i < noperands; i++)
3827 if (! goal_alternative_win[i]
3828 && goal_alternative_matches[i] >= 0)
3829 goal_alternative_matched[goal_alternative_matches[i]] = i;
3830
3831 for (i = 0; i < noperands; i++)
3832 goal_alternative_win[i] |= goal_alternative_match_win[i];
3833
3834 /* If the best alternative is with operands 1 and 2 swapped,
3835 consider them swapped before reporting the reloads. Update the
3836 operand numbers of any reloads already pushed. */
3837
3838 if (goal_alternative_swapped)
3839 {
3840 rtx tem;
3841
3842 tem = substed_operand[commutative];
3843 substed_operand[commutative] = substed_operand[commutative + 1];
3844 substed_operand[commutative + 1] = tem;
3845 tem = recog_data.operand[commutative];
3846 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3847 recog_data.operand[commutative + 1] = tem;
3848 tem = *recog_data.operand_loc[commutative];
3849 *recog_data.operand_loc[commutative]
3850 = *recog_data.operand_loc[commutative + 1];
3851 *recog_data.operand_loc[commutative + 1] = tem;
3852
3853 for (i = 0; i < n_reloads; i++)
3854 {
3855 if (rld[i].opnum == commutative)
3856 rld[i].opnum = commutative + 1;
3857 else if (rld[i].opnum == commutative + 1)
3858 rld[i].opnum = commutative;
3859 }
3860 }
3861
3862 for (i = 0; i < noperands; i++)
3863 {
3864 operand_reloadnum[i] = -1;
3865
3866 /* If this is an earlyclobber operand, we need to widen the scope.
3867 The reload must remain valid from the start of the insn being
3868 reloaded until after the operand is stored into its destination.
3869 We approximate this with RELOAD_OTHER even though we know that we
3870 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3871
3872 One special case that is worth checking is when we have an
3873 output that is earlyclobber but isn't used past the insn (typically
3874 a SCRATCH). In this case, we only need have the reload live
3875 through the insn itself, but not for any of our input or output
3876 reloads.
3877 But we must not accidentally narrow the scope of an existing
3878 RELOAD_OTHER reload - leave these alone.
3879
3880 In any case, anything needed to address this operand can remain
3881 however they were previously categorized. */
3882
3883 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3884 operand_type[i]
3885 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3886 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3887 }
3888
3889 /* Any constants that aren't allowed and can't be reloaded
3890 into registers are here changed into memory references. */
3891 for (i = 0; i < noperands; i++)
3892 if (! goal_alternative_win[i])
3893 {
3894 rtx op = recog_data.operand[i];
3895 rtx subreg = NULL_RTX;
3896 rtx plus = NULL_RTX;
3897 enum machine_mode mode = operand_mode[i];
3898
3899 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3900 push_reload so we have to let them pass here. */
3901 if (GET_CODE (op) == SUBREG)
3902 {
3903 subreg = op;
3904 op = SUBREG_REG (op);
3905 mode = GET_MODE (op);
3906 }
3907
3908 if (GET_CODE (op) == PLUS)
3909 {
3910 plus = op;
3911 op = XEXP (op, 1);
3912 }
3913
3914 if (CONST_POOL_OK_P (op)
3915 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3916 == NO_REGS)
3917 || no_input_reloads)
3918 && mode != VOIDmode)
3919 {
3920 int this_address_reloaded;
3921 rtx tem = force_const_mem (mode, op);
3922
3923 /* If we stripped a SUBREG or a PLUS above add it back. */
3924 if (plus != NULL_RTX)
3925 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3926
3927 if (subreg != NULL_RTX)
3928 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3929
3930 this_address_reloaded = 0;
3931 substed_operand[i] = recog_data.operand[i]
3932 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3933 0, insn, &this_address_reloaded);
3934
3935 /* If the alternative accepts constant pool refs directly
3936 there will be no reload needed at all. */
3937 if (plus == NULL_RTX
3938 && subreg == NULL_RTX
3939 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3940 ? substed_operand[i]
3941 : NULL,
3942 recog_data.constraints[i],
3943 goal_alternative_number))
3944 goal_alternative_win[i] = 1;
3945 }
3946 }
3947
3948 /* Record the values of the earlyclobber operands for the caller. */
3949 if (goal_earlyclobber)
3950 for (i = 0; i < noperands; i++)
3951 if (goal_alternative_earlyclobber[i])
3952 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3953
3954 /* Now record reloads for all the operands that need them. */
3955 for (i = 0; i < noperands; i++)
3956 if (! goal_alternative_win[i])
3957 {
3958 /* Operands that match previous ones have already been handled. */
3959 if (goal_alternative_matches[i] >= 0)
3960 ;
3961 /* Handle an operand with a nonoffsettable address
3962 appearing where an offsettable address will do
3963 by reloading the address into a base register.
3964
3965 ??? We can also do this when the operand is a register and
3966 reg_equiv_mem is not offsettable, but this is a bit tricky,
3967 so we don't bother with it. It may not be worth doing. */
3968 else if (goal_alternative_matched[i] == -1
3969 && goal_alternative_offmemok[i]
3970 && MEM_P (recog_data.operand[i]))
3971 {
3972 /* If the address to be reloaded is a VOIDmode constant,
3973 use the default address mode as mode of the reload register,
3974 as would have been done by find_reloads_address. */
3975 enum machine_mode address_mode;
3976 address_mode = GET_MODE (XEXP (recog_data.operand[i], 0));
3977 if (address_mode == VOIDmode)
3978 {
3979 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3980 address_mode = targetm.addr_space.address_mode (as);
3981 }
3982
3983 operand_reloadnum[i]
3984 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3985 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3986 base_reg_class (VOIDmode, MEM, SCRATCH),
3987 address_mode,
3988 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
3989 rld[operand_reloadnum[i]].inc
3990 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3991
3992 /* If this operand is an output, we will have made any
3993 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3994 now we are treating part of the operand as an input, so
3995 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
3996
3997 if (modified[i] == RELOAD_WRITE)
3998 {
3999 for (j = 0; j < n_reloads; j++)
4000 {
4001 if (rld[j].opnum == i)
4002 {
4003 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4004 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4005 else if (rld[j].when_needed
4006 == RELOAD_FOR_OUTADDR_ADDRESS)
4007 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4008 }
4009 }
4010 }
4011 }
4012 else if (goal_alternative_matched[i] == -1)
4013 {
4014 operand_reloadnum[i]
4015 = push_reload ((modified[i] != RELOAD_WRITE
4016 ? recog_data.operand[i] : 0),
4017 (modified[i] != RELOAD_READ
4018 ? recog_data.operand[i] : 0),
4019 (modified[i] != RELOAD_WRITE
4020 ? recog_data.operand_loc[i] : 0),
4021 (modified[i] != RELOAD_READ
4022 ? recog_data.operand_loc[i] : 0),
4023 (enum reg_class) goal_alternative[i],
4024 (modified[i] == RELOAD_WRITE
4025 ? VOIDmode : operand_mode[i]),
4026 (modified[i] == RELOAD_READ
4027 ? VOIDmode : operand_mode[i]),
4028 (insn_code_number < 0 ? 0
4029 : insn_data[insn_code_number].operand[i].strict_low),
4030 0, i, operand_type[i]);
4031 }
4032 /* In a matching pair of operands, one must be input only
4033 and the other must be output only.
4034 Pass the input operand as IN and the other as OUT. */
4035 else if (modified[i] == RELOAD_READ
4036 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4037 {
4038 operand_reloadnum[i]
4039 = push_reload (recog_data.operand[i],
4040 recog_data.operand[goal_alternative_matched[i]],
4041 recog_data.operand_loc[i],
4042 recog_data.operand_loc[goal_alternative_matched[i]],
4043 (enum reg_class) goal_alternative[i],
4044 operand_mode[i],
4045 operand_mode[goal_alternative_matched[i]],
4046 0, 0, i, RELOAD_OTHER);
4047 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4048 }
4049 else if (modified[i] == RELOAD_WRITE
4050 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4051 {
4052 operand_reloadnum[goal_alternative_matched[i]]
4053 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4054 recog_data.operand[i],
4055 recog_data.operand_loc[goal_alternative_matched[i]],
4056 recog_data.operand_loc[i],
4057 (enum reg_class) goal_alternative[i],
4058 operand_mode[goal_alternative_matched[i]],
4059 operand_mode[i],
4060 0, 0, i, RELOAD_OTHER);
4061 operand_reloadnum[i] = output_reloadnum;
4062 }
4063 else
4064 {
4065 gcc_assert (insn_code_number < 0);
4066 error_for_asm (insn, "inconsistent operand constraints "
4067 "in an %<asm%>");
4068 /* Avoid further trouble with this insn. */
4069 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4070 n_reloads = 0;
4071 return 0;
4072 }
4073 }
4074 else if (goal_alternative_matched[i] < 0
4075 && goal_alternative_matches[i] < 0
4076 && address_operand_reloaded[i] != 1
4077 && optimize)
4078 {
4079 /* For each non-matching operand that's a MEM or a pseudo-register
4080 that didn't get a hard register, make an optional reload.
4081 This may get done even if the insn needs no reloads otherwise. */
4082
4083 rtx operand = recog_data.operand[i];
4084
4085 while (GET_CODE (operand) == SUBREG)
4086 operand = SUBREG_REG (operand);
4087 if ((MEM_P (operand)
4088 || (REG_P (operand)
4089 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4090 /* If this is only for an output, the optional reload would not
4091 actually cause us to use a register now, just note that
4092 something is stored here. */
4093 && (goal_alternative[i] != NO_REGS
4094 || modified[i] == RELOAD_WRITE)
4095 && ! no_input_reloads
4096 /* An optional output reload might allow to delete INSN later.
4097 We mustn't make in-out reloads on insns that are not permitted
4098 output reloads.
4099 If this is an asm, we can't delete it; we must not even call
4100 push_reload for an optional output reload in this case,
4101 because we can't be sure that the constraint allows a register,
4102 and push_reload verifies the constraints for asms. */
4103 && (modified[i] == RELOAD_READ
4104 || (! no_output_reloads && ! this_insn_is_asm)))
4105 operand_reloadnum[i]
4106 = push_reload ((modified[i] != RELOAD_WRITE
4107 ? recog_data.operand[i] : 0),
4108 (modified[i] != RELOAD_READ
4109 ? recog_data.operand[i] : 0),
4110 (modified[i] != RELOAD_WRITE
4111 ? recog_data.operand_loc[i] : 0),
4112 (modified[i] != RELOAD_READ
4113 ? recog_data.operand_loc[i] : 0),
4114 (enum reg_class) goal_alternative[i],
4115 (modified[i] == RELOAD_WRITE
4116 ? VOIDmode : operand_mode[i]),
4117 (modified[i] == RELOAD_READ
4118 ? VOIDmode : operand_mode[i]),
4119 (insn_code_number < 0 ? 0
4120 : insn_data[insn_code_number].operand[i].strict_low),
4121 1, i, operand_type[i]);
4122 /* If a memory reference remains (either as a MEM or a pseudo that
4123 did not get a hard register), yet we can't make an optional
4124 reload, check if this is actually a pseudo register reference;
4125 we then need to emit a USE and/or a CLOBBER so that reload
4126 inheritance will do the right thing. */
4127 else if (replace
4128 && (MEM_P (operand)
4129 || (REG_P (operand)
4130 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4131 && reg_renumber [REGNO (operand)] < 0)))
4132 {
4133 operand = *recog_data.operand_loc[i];
4134
4135 while (GET_CODE (operand) == SUBREG)
4136 operand = SUBREG_REG (operand);
4137 if (REG_P (operand))
4138 {
4139 if (modified[i] != RELOAD_WRITE)
4140 /* We mark the USE with QImode so that we recognize
4141 it as one that can be safely deleted at the end
4142 of reload. */
4143 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4144 insn), QImode);
4145 if (modified[i] != RELOAD_READ)
4146 emit_insn_after (gen_clobber (operand), insn);
4147 }
4148 }
4149 }
4150 else if (goal_alternative_matches[i] >= 0
4151 && goal_alternative_win[goal_alternative_matches[i]]
4152 && modified[i] == RELOAD_READ
4153 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4154 && ! no_input_reloads && ! no_output_reloads
4155 && optimize)
4156 {
4157 /* Similarly, make an optional reload for a pair of matching
4158 objects that are in MEM or a pseudo that didn't get a hard reg. */
4159
4160 rtx operand = recog_data.operand[i];
4161
4162 while (GET_CODE (operand) == SUBREG)
4163 operand = SUBREG_REG (operand);
4164 if ((MEM_P (operand)
4165 || (REG_P (operand)
4166 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4167 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4168 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4169 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4170 recog_data.operand[i],
4171 recog_data.operand_loc[goal_alternative_matches[i]],
4172 recog_data.operand_loc[i],
4173 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4174 operand_mode[goal_alternative_matches[i]],
4175 operand_mode[i],
4176 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4177 }
4178
4179 /* Perform whatever substitutions on the operands we are supposed
4180 to make due to commutativity or replacement of registers
4181 with equivalent constants or memory slots. */
4182
4183 for (i = 0; i < noperands; i++)
4184 {
4185 /* We only do this on the last pass through reload, because it is
4186 possible for some data (like reg_equiv_address) to be changed during
4187 later passes. Moreover, we lose the opportunity to get a useful
4188 reload_{in,out}_reg when we do these replacements. */
4189
4190 if (replace)
4191 {
4192 rtx substitution = substed_operand[i];
4193
4194 *recog_data.operand_loc[i] = substitution;
4195
4196 /* If we're replacing an operand with a LABEL_REF, we need to
4197 make sure that there's a REG_LABEL_OPERAND note attached to
4198 this instruction. */
4199 if (GET_CODE (substitution) == LABEL_REF
4200 && !find_reg_note (insn, REG_LABEL_OPERAND,
4201 XEXP (substitution, 0))
4202 /* For a JUMP_P, if it was a branch target it must have
4203 already been recorded as such. */
4204 && (!JUMP_P (insn)
4205 || !label_is_jump_target_p (XEXP (substitution, 0),
4206 insn)))
4207 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4208 }
4209 else
4210 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4211 }
4212
4213 /* If this insn pattern contains any MATCH_DUP's, make sure that
4214 they will be substituted if the operands they match are substituted.
4215 Also do now any substitutions we already did on the operands.
4216
4217 Don't do this if we aren't making replacements because we might be
4218 propagating things allocated by frame pointer elimination into places
4219 it doesn't expect. */
4220
4221 if (insn_code_number >= 0 && replace)
4222 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4223 {
4224 int opno = recog_data.dup_num[i];
4225 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4226 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4227 }
4228
4229 #if 0
4230 /* This loses because reloading of prior insns can invalidate the equivalence
4231 (or at least find_equiv_reg isn't smart enough to find it any more),
4232 causing this insn to need more reload regs than it needed before.
4233 It may be too late to make the reload regs available.
4234 Now this optimization is done safely in choose_reload_regs. */
4235
4236 /* For each reload of a reg into some other class of reg,
4237 search for an existing equivalent reg (same value now) in the right class.
4238 We can use it as long as we don't need to change its contents. */
4239 for (i = 0; i < n_reloads; i++)
4240 if (rld[i].reg_rtx == 0
4241 && rld[i].in != 0
4242 && REG_P (rld[i].in)
4243 && rld[i].out == 0)
4244 {
4245 rld[i].reg_rtx
4246 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4247 static_reload_reg_p, 0, rld[i].inmode);
4248 /* Prevent generation of insn to load the value
4249 because the one we found already has the value. */
4250 if (rld[i].reg_rtx)
4251 rld[i].in = rld[i].reg_rtx;
4252 }
4253 #endif
4254
4255 /* If we detected error and replaced asm instruction by USE, forget about the
4256 reloads. */
4257 if (GET_CODE (PATTERN (insn)) == USE
4258 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4259 n_reloads = 0;
4260
4261 /* Perhaps an output reload can be combined with another
4262 to reduce needs by one. */
4263 if (!goal_earlyclobber)
4264 combine_reloads ();
4265
4266 /* If we have a pair of reloads for parts of an address, they are reloading
4267 the same object, the operands themselves were not reloaded, and they
4268 are for two operands that are supposed to match, merge the reloads and
4269 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4270
4271 for (i = 0; i < n_reloads; i++)
4272 {
4273 int k;
4274
4275 for (j = i + 1; j < n_reloads; j++)
4276 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4277 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4278 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4279 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4280 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4281 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4282 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4283 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4284 && rtx_equal_p (rld[i].in, rld[j].in)
4285 && (operand_reloadnum[rld[i].opnum] < 0
4286 || rld[operand_reloadnum[rld[i].opnum]].optional)
4287 && (operand_reloadnum[rld[j].opnum] < 0
4288 || rld[operand_reloadnum[rld[j].opnum]].optional)
4289 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4290 || (goal_alternative_matches[rld[j].opnum]
4291 == rld[i].opnum)))
4292 {
4293 for (k = 0; k < n_replacements; k++)
4294 if (replacements[k].what == j)
4295 replacements[k].what = i;
4296
4297 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4298 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4299 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4300 else
4301 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4302 rld[j].in = 0;
4303 }
4304 }
4305
4306 /* Scan all the reloads and update their type.
4307 If a reload is for the address of an operand and we didn't reload
4308 that operand, change the type. Similarly, change the operand number
4309 of a reload when two operands match. If a reload is optional, treat it
4310 as though the operand isn't reloaded.
4311
4312 ??? This latter case is somewhat odd because if we do the optional
4313 reload, it means the object is hanging around. Thus we need only
4314 do the address reload if the optional reload was NOT done.
4315
4316 Change secondary reloads to be the address type of their operand, not
4317 the normal type.
4318
4319 If an operand's reload is now RELOAD_OTHER, change any
4320 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4321 RELOAD_FOR_OTHER_ADDRESS. */
4322
4323 for (i = 0; i < n_reloads; i++)
4324 {
4325 if (rld[i].secondary_p
4326 && rld[i].when_needed == operand_type[rld[i].opnum])
4327 rld[i].when_needed = address_type[rld[i].opnum];
4328
4329 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4330 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4331 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4332 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4333 && (operand_reloadnum[rld[i].opnum] < 0
4334 || rld[operand_reloadnum[rld[i].opnum]].optional))
4335 {
4336 /* If we have a secondary reload to go along with this reload,
4337 change its type to RELOAD_FOR_OPADDR_ADDR. */
4338
4339 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4340 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4341 && rld[i].secondary_in_reload != -1)
4342 {
4343 int secondary_in_reload = rld[i].secondary_in_reload;
4344
4345 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4346
4347 /* If there's a tertiary reload we have to change it also. */
4348 if (secondary_in_reload > 0
4349 && rld[secondary_in_reload].secondary_in_reload != -1)
4350 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4351 = RELOAD_FOR_OPADDR_ADDR;
4352 }
4353
4354 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4355 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4356 && rld[i].secondary_out_reload != -1)
4357 {
4358 int secondary_out_reload = rld[i].secondary_out_reload;
4359
4360 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4361
4362 /* If there's a tertiary reload we have to change it also. */
4363 if (secondary_out_reload
4364 && rld[secondary_out_reload].secondary_out_reload != -1)
4365 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4366 = RELOAD_FOR_OPADDR_ADDR;
4367 }
4368
4369 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4370 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4371 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4372 else
4373 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4374 }
4375
4376 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4377 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4378 && operand_reloadnum[rld[i].opnum] >= 0
4379 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4380 == RELOAD_OTHER))
4381 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4382
4383 if (goal_alternative_matches[rld[i].opnum] >= 0)
4384 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4385 }
4386
4387 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4388 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4389 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4390
4391 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4392 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4393 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4394 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4395 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4396 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4397 This is complicated by the fact that a single operand can have more
4398 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4399 choose_reload_regs without affecting code quality, and cases that
4400 actually fail are extremely rare, so it turns out to be better to fix
4401 the problem here by not generating cases that choose_reload_regs will
4402 fail for. */
4403 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4404 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4405 a single operand.
4406 We can reduce the register pressure by exploiting that a
4407 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4408 does not conflict with any of them, if it is only used for the first of
4409 the RELOAD_FOR_X_ADDRESS reloads. */
4410 {
4411 int first_op_addr_num = -2;
4412 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4413 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4414 int need_change = 0;
4415 /* We use last_op_addr_reload and the contents of the above arrays
4416 first as flags - -2 means no instance encountered, -1 means exactly
4417 one instance encountered.
4418 If more than one instance has been encountered, we store the reload
4419 number of the first reload of the kind in question; reload numbers
4420 are known to be non-negative. */
4421 for (i = 0; i < noperands; i++)
4422 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4423 for (i = n_reloads - 1; i >= 0; i--)
4424 {
4425 switch (rld[i].when_needed)
4426 {
4427 case RELOAD_FOR_OPERAND_ADDRESS:
4428 if (++first_op_addr_num >= 0)
4429 {
4430 first_op_addr_num = i;
4431 need_change = 1;
4432 }
4433 break;
4434 case RELOAD_FOR_INPUT_ADDRESS:
4435 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4436 {
4437 first_inpaddr_num[rld[i].opnum] = i;
4438 need_change = 1;
4439 }
4440 break;
4441 case RELOAD_FOR_OUTPUT_ADDRESS:
4442 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4443 {
4444 first_outpaddr_num[rld[i].opnum] = i;
4445 need_change = 1;
4446 }
4447 break;
4448 default:
4449 break;
4450 }
4451 }
4452
4453 if (need_change)
4454 {
4455 for (i = 0; i < n_reloads; i++)
4456 {
4457 int first_num;
4458 enum reload_type type;
4459
4460 switch (rld[i].when_needed)
4461 {
4462 case RELOAD_FOR_OPADDR_ADDR:
4463 first_num = first_op_addr_num;
4464 type = RELOAD_FOR_OPERAND_ADDRESS;
4465 break;
4466 case RELOAD_FOR_INPADDR_ADDRESS:
4467 first_num = first_inpaddr_num[rld[i].opnum];
4468 type = RELOAD_FOR_INPUT_ADDRESS;
4469 break;
4470 case RELOAD_FOR_OUTADDR_ADDRESS:
4471 first_num = first_outpaddr_num[rld[i].opnum];
4472 type = RELOAD_FOR_OUTPUT_ADDRESS;
4473 break;
4474 default:
4475 continue;
4476 }
4477 if (first_num < 0)
4478 continue;
4479 else if (i > first_num)
4480 rld[i].when_needed = type;
4481 else
4482 {
4483 /* Check if the only TYPE reload that uses reload I is
4484 reload FIRST_NUM. */
4485 for (j = n_reloads - 1; j > first_num; j--)
4486 {
4487 if (rld[j].when_needed == type
4488 && (rld[i].secondary_p
4489 ? rld[j].secondary_in_reload == i
4490 : reg_mentioned_p (rld[i].in, rld[j].in)))
4491 {
4492 rld[i].when_needed = type;
4493 break;
4494 }
4495 }
4496 }
4497 }
4498 }
4499 }
4500
4501 /* See if we have any reloads that are now allowed to be merged
4502 because we've changed when the reload is needed to
4503 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4504 check for the most common cases. */
4505
4506 for (i = 0; i < n_reloads; i++)
4507 if (rld[i].in != 0 && rld[i].out == 0
4508 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4509 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4510 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4511 for (j = 0; j < n_reloads; j++)
4512 if (i != j && rld[j].in != 0 && rld[j].out == 0
4513 && rld[j].when_needed == rld[i].when_needed
4514 && MATCHES (rld[i].in, rld[j].in)
4515 && rld[i].rclass == rld[j].rclass
4516 && !rld[i].nocombine && !rld[j].nocombine
4517 && rld[i].reg_rtx == rld[j].reg_rtx)
4518 {
4519 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4520 transfer_replacements (i, j);
4521 rld[j].in = 0;
4522 }
4523
4524 #ifdef HAVE_cc0
4525 /* If we made any reloads for addresses, see if they violate a
4526 "no input reloads" requirement for this insn. But loads that we
4527 do after the insn (such as for output addresses) are fine. */
4528 if (no_input_reloads)
4529 for (i = 0; i < n_reloads; i++)
4530 gcc_assert (rld[i].in == 0
4531 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4532 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4533 #endif
4534
4535 /* Compute reload_mode and reload_nregs. */
4536 for (i = 0; i < n_reloads; i++)
4537 {
4538 rld[i].mode
4539 = (rld[i].inmode == VOIDmode
4540 || (GET_MODE_SIZE (rld[i].outmode)
4541 > GET_MODE_SIZE (rld[i].inmode)))
4542 ? rld[i].outmode : rld[i].inmode;
4543
4544 rld[i].nregs = CLASS_MAX_NREGS (rld[i].rclass, rld[i].mode);
4545 }
4546
4547 /* Special case a simple move with an input reload and a
4548 destination of a hard reg, if the hard reg is ok, use it. */
4549 for (i = 0; i < n_reloads; i++)
4550 if (rld[i].when_needed == RELOAD_FOR_INPUT
4551 && GET_CODE (PATTERN (insn)) == SET
4552 && REG_P (SET_DEST (PATTERN (insn)))
4553 && (SET_SRC (PATTERN (insn)) == rld[i].in
4554 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4555 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4556 {
4557 rtx dest = SET_DEST (PATTERN (insn));
4558 unsigned int regno = REGNO (dest);
4559
4560 if (regno < FIRST_PSEUDO_REGISTER
4561 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4562 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4563 {
4564 int nr = hard_regno_nregs[regno][rld[i].mode];
4565 int ok = 1, nri;
4566
4567 for (nri = 1; nri < nr; nri ++)
4568 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4569 ok = 0;
4570
4571 if (ok)
4572 rld[i].reg_rtx = dest;
4573 }
4574 }
4575
4576 return retval;
4577 }
4578
4579 /* Return true if alternative number ALTNUM in constraint-string
4580 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4581 MEM gives the reference if it didn't need any reloads, otherwise it
4582 is null. */
4583
4584 static bool
4585 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4586 const char *constraint, int altnum)
4587 {
4588 int c;
4589
4590 /* Skip alternatives before the one requested. */
4591 while (altnum > 0)
4592 {
4593 while (*constraint++ != ',');
4594 altnum--;
4595 }
4596 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4597 If one of them is present, this alternative accepts the result of
4598 passing a constant-pool reference through find_reloads_toplev.
4599
4600 The same is true of extra memory constraints if the address
4601 was reloaded into a register. However, the target may elect
4602 to disallow the original constant address, forcing it to be
4603 reloaded into a register instead. */
4604 for (; (c = *constraint) && c != ',' && c != '#';
4605 constraint += CONSTRAINT_LEN (c, constraint))
4606 {
4607 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4608 return true;
4609 #ifdef EXTRA_CONSTRAINT_STR
4610 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4611 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4612 return true;
4613 #endif
4614 }
4615 return false;
4616 }
4617 \f
4618 /* Scan X for memory references and scan the addresses for reloading.
4619 Also checks for references to "constant" regs that we want to eliminate
4620 and replaces them with the values they stand for.
4621 We may alter X destructively if it contains a reference to such.
4622 If X is just a constant reg, we return the equivalent value
4623 instead of X.
4624
4625 IND_LEVELS says how many levels of indirect addressing this machine
4626 supports.
4627
4628 OPNUM and TYPE identify the purpose of the reload.
4629
4630 IS_SET_DEST is true if X is the destination of a SET, which is not
4631 appropriate to be replaced by a constant.
4632
4633 INSN, if nonzero, is the insn in which we do the reload. It is used
4634 to determine if we may generate output reloads, and where to put USEs
4635 for pseudos that we have to replace with stack slots.
4636
4637 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4638 result of find_reloads_address. */
4639
4640 static rtx
4641 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4642 int ind_levels, int is_set_dest, rtx insn,
4643 int *address_reloaded)
4644 {
4645 RTX_CODE code = GET_CODE (x);
4646
4647 const char *fmt = GET_RTX_FORMAT (code);
4648 int i;
4649 int copied;
4650
4651 if (code == REG)
4652 {
4653 /* This code is duplicated for speed in find_reloads. */
4654 int regno = REGNO (x);
4655 if (reg_equiv_constant[regno] != 0 && !is_set_dest)
4656 x = reg_equiv_constant[regno];
4657 #if 0
4658 /* This creates (subreg (mem...)) which would cause an unnecessary
4659 reload of the mem. */
4660 else if (reg_equiv_mem[regno] != 0)
4661 x = reg_equiv_mem[regno];
4662 #endif
4663 else if (reg_equiv_memory_loc[regno]
4664 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
4665 {
4666 rtx mem = make_memloc (x, regno);
4667 if (reg_equiv_address[regno]
4668 || ! rtx_equal_p (mem, reg_equiv_mem[regno]))
4669 {
4670 /* If this is not a toplevel operand, find_reloads doesn't see
4671 this substitution. We have to emit a USE of the pseudo so
4672 that delete_output_reload can see it. */
4673 if (replace_reloads && recog_data.operand[opnum] != x)
4674 /* We mark the USE with QImode so that we recognize it
4675 as one that can be safely deleted at the end of
4676 reload. */
4677 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4678 QImode);
4679 x = mem;
4680 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4681 opnum, type, ind_levels, insn);
4682 if (!rtx_equal_p (x, mem))
4683 push_reg_equiv_alt_mem (regno, x);
4684 if (address_reloaded)
4685 *address_reloaded = i;
4686 }
4687 }
4688 return x;
4689 }
4690 if (code == MEM)
4691 {
4692 rtx tem = x;
4693
4694 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4695 opnum, type, ind_levels, insn);
4696 if (address_reloaded)
4697 *address_reloaded = i;
4698
4699 return tem;
4700 }
4701
4702 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4703 {
4704 /* Check for SUBREG containing a REG that's equivalent to a
4705 constant. If the constant has a known value, truncate it
4706 right now. Similarly if we are extracting a single-word of a
4707 multi-word constant. If the constant is symbolic, allow it
4708 to be substituted normally. push_reload will strip the
4709 subreg later. The constant must not be VOIDmode, because we
4710 will lose the mode of the register (this should never happen
4711 because one of the cases above should handle it). */
4712
4713 int regno = REGNO (SUBREG_REG (x));
4714 rtx tem;
4715
4716 if (regno >= FIRST_PSEUDO_REGISTER
4717 && reg_renumber[regno] < 0
4718 && reg_equiv_constant[regno] != 0)
4719 {
4720 tem =
4721 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant[regno],
4722 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4723 gcc_assert (tem);
4724 if (CONSTANT_P (tem) && !LEGITIMATE_CONSTANT_P (tem))
4725 {
4726 tem = force_const_mem (GET_MODE (x), tem);
4727 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4728 &XEXP (tem, 0), opnum, type,
4729 ind_levels, insn);
4730 if (address_reloaded)
4731 *address_reloaded = i;
4732 }
4733 return tem;
4734 }
4735
4736 /* If the subreg contains a reg that will be converted to a mem,
4737 convert the subreg to a narrower memref now.
4738 Otherwise, we would get (subreg (mem ...) ...),
4739 which would force reload of the mem.
4740
4741 We also need to do this if there is an equivalent MEM that is
4742 not offsettable. In that case, alter_subreg would produce an
4743 invalid address on big-endian machines.
4744
4745 For machines that extend byte loads, we must not reload using
4746 a wider mode if we have a paradoxical SUBREG. find_reloads will
4747 force a reload in that case. So we should not do anything here. */
4748
4749 if (regno >= FIRST_PSEUDO_REGISTER
4750 #ifdef LOAD_EXTEND_OP
4751 && (GET_MODE_SIZE (GET_MODE (x))
4752 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4753 #endif
4754 && (reg_equiv_address[regno] != 0
4755 || (reg_equiv_mem[regno] != 0
4756 && (! strict_memory_address_addr_space_p
4757 (GET_MODE (x), XEXP (reg_equiv_mem[regno], 0),
4758 MEM_ADDR_SPACE (reg_equiv_mem[regno]))
4759 || ! offsettable_memref_p (reg_equiv_mem[regno])
4760 || num_not_at_initial_offset))))
4761 x = find_reloads_subreg_address (x, 1, opnum, type, ind_levels,
4762 insn);
4763 }
4764
4765 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4766 {
4767 if (fmt[i] == 'e')
4768 {
4769 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4770 ind_levels, is_set_dest, insn,
4771 address_reloaded);
4772 /* If we have replaced a reg with it's equivalent memory loc -
4773 that can still be handled here e.g. if it's in a paradoxical
4774 subreg - we must make the change in a copy, rather than using
4775 a destructive change. This way, find_reloads can still elect
4776 not to do the change. */
4777 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4778 {
4779 x = shallow_copy_rtx (x);
4780 copied = 1;
4781 }
4782 XEXP (x, i) = new_part;
4783 }
4784 }
4785 return x;
4786 }
4787
4788 /* Return a mem ref for the memory equivalent of reg REGNO.
4789 This mem ref is not shared with anything. */
4790
4791 static rtx
4792 make_memloc (rtx ad, int regno)
4793 {
4794 /* We must rerun eliminate_regs, in case the elimination
4795 offsets have changed. */
4796 rtx tem
4797 = XEXP (eliminate_regs (reg_equiv_memory_loc[regno], VOIDmode, NULL_RTX),
4798 0);
4799
4800 /* If TEM might contain a pseudo, we must copy it to avoid
4801 modifying it when we do the substitution for the reload. */
4802 if (rtx_varies_p (tem, 0))
4803 tem = copy_rtx (tem);
4804
4805 tem = replace_equiv_address_nv (reg_equiv_memory_loc[regno], tem);
4806 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4807
4808 /* Copy the result if it's still the same as the equivalence, to avoid
4809 modifying it when we do the substitution for the reload. */
4810 if (tem == reg_equiv_memory_loc[regno])
4811 tem = copy_rtx (tem);
4812 return tem;
4813 }
4814
4815 /* Returns true if AD could be turned into a valid memory reference
4816 to mode MODE in address space AS by reloading the part pointed to
4817 by PART into a register. */
4818
4819 static int
4820 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4821 addr_space_t as, rtx *part)
4822 {
4823 int retv;
4824 rtx tem = *part;
4825 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4826
4827 *part = reg;
4828 retv = memory_address_addr_space_p (mode, ad, as);
4829 *part = tem;
4830
4831 return retv;
4832 }
4833
4834 /* Record all reloads needed for handling memory address AD
4835 which appears in *LOC in a memory reference to mode MODE
4836 which itself is found in location *MEMREFLOC.
4837 Note that we take shortcuts assuming that no multi-reg machine mode
4838 occurs as part of an address.
4839
4840 OPNUM and TYPE specify the purpose of this reload.
4841
4842 IND_LEVELS says how many levels of indirect addressing this machine
4843 supports.
4844
4845 INSN, if nonzero, is the insn in which we do the reload. It is used
4846 to determine if we may generate output reloads, and where to put USEs
4847 for pseudos that we have to replace with stack slots.
4848
4849 Value is one if this address is reloaded or replaced as a whole; it is
4850 zero if the top level of this address was not reloaded or replaced, and
4851 it is -1 if it may or may not have been reloaded or replaced.
4852
4853 Note that there is no verification that the address will be valid after
4854 this routine does its work. Instead, we rely on the fact that the address
4855 was valid when reload started. So we need only undo things that reload
4856 could have broken. These are wrong register types, pseudos not allocated
4857 to a hard register, and frame pointer elimination. */
4858
4859 static int
4860 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4861 rtx *loc, int opnum, enum reload_type type,
4862 int ind_levels, rtx insn)
4863 {
4864 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4865 : ADDR_SPACE_GENERIC;
4866 int regno;
4867 int removed_and = 0;
4868 int op_index;
4869 rtx tem;
4870
4871 /* If the address is a register, see if it is a legitimate address and
4872 reload if not. We first handle the cases where we need not reload
4873 or where we must reload in a non-standard way. */
4874
4875 if (REG_P (ad))
4876 {
4877 regno = REGNO (ad);
4878
4879 if (reg_equiv_constant[regno] != 0)
4880 {
4881 find_reloads_address_part (reg_equiv_constant[regno], loc,
4882 base_reg_class (mode, MEM, SCRATCH),
4883 GET_MODE (ad), opnum, type, ind_levels);
4884 return 1;
4885 }
4886
4887 tem = reg_equiv_memory_loc[regno];
4888 if (tem != 0)
4889 {
4890 if (reg_equiv_address[regno] != 0 || num_not_at_initial_offset)
4891 {
4892 tem = make_memloc (ad, regno);
4893 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4894 XEXP (tem, 0),
4895 MEM_ADDR_SPACE (tem)))
4896 {
4897 rtx orig = tem;
4898
4899 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4900 &XEXP (tem, 0), opnum,
4901 ADDR_TYPE (type), ind_levels, insn);
4902 if (!rtx_equal_p (tem, orig))
4903 push_reg_equiv_alt_mem (regno, tem);
4904 }
4905 /* We can avoid a reload if the register's equivalent memory
4906 expression is valid as an indirect memory address.
4907 But not all addresses are valid in a mem used as an indirect
4908 address: only reg or reg+constant. */
4909
4910 if (ind_levels > 0
4911 && strict_memory_address_addr_space_p (mode, tem, as)
4912 && (REG_P (XEXP (tem, 0))
4913 || (GET_CODE (XEXP (tem, 0)) == PLUS
4914 && REG_P (XEXP (XEXP (tem, 0), 0))
4915 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4916 {
4917 /* TEM is not the same as what we'll be replacing the
4918 pseudo with after reload, put a USE in front of INSN
4919 in the final reload pass. */
4920 if (replace_reloads
4921 && num_not_at_initial_offset
4922 && ! rtx_equal_p (tem, reg_equiv_mem[regno]))
4923 {
4924 *loc = tem;
4925 /* We mark the USE with QImode so that we
4926 recognize it as one that can be safely
4927 deleted at the end of reload. */
4928 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4929 insn), QImode);
4930
4931 /* This doesn't really count as replacing the address
4932 as a whole, since it is still a memory access. */
4933 }
4934 return 0;
4935 }
4936 ad = tem;
4937 }
4938 }
4939
4940 /* The only remaining case where we can avoid a reload is if this is a
4941 hard register that is valid as a base register and which is not the
4942 subject of a CLOBBER in this insn. */
4943
4944 else if (regno < FIRST_PSEUDO_REGISTER
4945 && regno_ok_for_base_p (regno, mode, MEM, SCRATCH)
4946 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4947 return 0;
4948
4949 /* If we do not have one of the cases above, we must do the reload. */
4950 push_reload (ad, NULL_RTX, loc, (rtx*) 0, base_reg_class (mode, MEM, SCRATCH),
4951 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4952 return 1;
4953 }
4954
4955 if (strict_memory_address_addr_space_p (mode, ad, as))
4956 {
4957 /* The address appears valid, so reloads are not needed.
4958 But the address may contain an eliminable register.
4959 This can happen because a machine with indirect addressing
4960 may consider a pseudo register by itself a valid address even when
4961 it has failed to get a hard reg.
4962 So do a tree-walk to find and eliminate all such regs. */
4963
4964 /* But first quickly dispose of a common case. */
4965 if (GET_CODE (ad) == PLUS
4966 && CONST_INT_P (XEXP (ad, 1))
4967 && REG_P (XEXP (ad, 0))
4968 && reg_equiv_constant[REGNO (XEXP (ad, 0))] == 0)
4969 return 0;
4970
4971 subst_reg_equivs_changed = 0;
4972 *loc = subst_reg_equivs (ad, insn);
4973
4974 if (! subst_reg_equivs_changed)
4975 return 0;
4976
4977 /* Check result for validity after substitution. */
4978 if (strict_memory_address_addr_space_p (mode, ad, as))
4979 return 0;
4980 }
4981
4982 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4983 do
4984 {
4985 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4986 {
4987 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4988 ind_levels, win);
4989 }
4990 break;
4991 win:
4992 *memrefloc = copy_rtx (*memrefloc);
4993 XEXP (*memrefloc, 0) = ad;
4994 move_replacements (&ad, &XEXP (*memrefloc, 0));
4995 return -1;
4996 }
4997 while (0);
4998 #endif
4999
5000 /* The address is not valid. We have to figure out why. First see if
5001 we have an outer AND and remove it if so. Then analyze what's inside. */
5002
5003 if (GET_CODE (ad) == AND)
5004 {
5005 removed_and = 1;
5006 loc = &XEXP (ad, 0);
5007 ad = *loc;
5008 }
5009
5010 /* One possibility for why the address is invalid is that it is itself
5011 a MEM. This can happen when the frame pointer is being eliminated, a
5012 pseudo is not allocated to a hard register, and the offset between the
5013 frame and stack pointers is not its initial value. In that case the
5014 pseudo will have been replaced by a MEM referring to the
5015 stack pointer. */
5016 if (MEM_P (ad))
5017 {
5018 /* First ensure that the address in this MEM is valid. Then, unless
5019 indirect addresses are valid, reload the MEM into a register. */
5020 tem = ad;
5021 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5022 opnum, ADDR_TYPE (type),
5023 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5024
5025 /* If tem was changed, then we must create a new memory reference to
5026 hold it and store it back into memrefloc. */
5027 if (tem != ad && memrefloc)
5028 {
5029 *memrefloc = copy_rtx (*memrefloc);
5030 copy_replacements (tem, XEXP (*memrefloc, 0));
5031 loc = &XEXP (*memrefloc, 0);
5032 if (removed_and)
5033 loc = &XEXP (*loc, 0);
5034 }
5035
5036 /* Check similar cases as for indirect addresses as above except
5037 that we can allow pseudos and a MEM since they should have been
5038 taken care of above. */
5039
5040 if (ind_levels == 0
5041 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5042 || MEM_P (XEXP (tem, 0))
5043 || ! (REG_P (XEXP (tem, 0))
5044 || (GET_CODE (XEXP (tem, 0)) == PLUS
5045 && REG_P (XEXP (XEXP (tem, 0), 0))
5046 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5047 {
5048 /* Must use TEM here, not AD, since it is the one that will
5049 have any subexpressions reloaded, if needed. */
5050 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5051 base_reg_class (mode, MEM, SCRATCH), GET_MODE (tem),
5052 VOIDmode, 0,
5053 0, opnum, type);
5054 return ! removed_and;
5055 }
5056 else
5057 return 0;
5058 }
5059
5060 /* If we have address of a stack slot but it's not valid because the
5061 displacement is too large, compute the sum in a register.
5062 Handle all base registers here, not just fp/ap/sp, because on some
5063 targets (namely SH) we can also get too large displacements from
5064 big-endian corrections. */
5065 else if (GET_CODE (ad) == PLUS
5066 && REG_P (XEXP (ad, 0))
5067 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5068 && CONST_INT_P (XEXP (ad, 1))
5069 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, PLUS,
5070 CONST_INT)
5071 /* Similarly, if we were to reload the base register and the
5072 mem+offset address is still invalid, then we want to reload
5073 the whole address, not just the base register. */
5074 || ! maybe_memory_address_addr_space_p
5075 (mode, ad, as, &(XEXP (ad, 0)))))
5076
5077 {
5078 /* Unshare the MEM rtx so we can safely alter it. */
5079 if (memrefloc)
5080 {
5081 *memrefloc = copy_rtx (*memrefloc);
5082 loc = &XEXP (*memrefloc, 0);
5083 if (removed_and)
5084 loc = &XEXP (*loc, 0);
5085 }
5086
5087 if (double_reg_address_ok
5088 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode,
5089 PLUS, CONST_INT))
5090 {
5091 /* Unshare the sum as well. */
5092 *loc = ad = copy_rtx (ad);
5093
5094 /* Reload the displacement into an index reg.
5095 We assume the frame pointer or arg pointer is a base reg. */
5096 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5097 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5098 type, ind_levels);
5099 return 0;
5100 }
5101 else
5102 {
5103 /* If the sum of two regs is not necessarily valid,
5104 reload the sum into a base reg.
5105 That will at least work. */
5106 find_reloads_address_part (ad, loc,
5107 base_reg_class (mode, MEM, SCRATCH),
5108 GET_MODE (ad), opnum, type, ind_levels);
5109 }
5110 return ! removed_and;
5111 }
5112
5113 /* If we have an indexed stack slot, there are three possible reasons why
5114 it might be invalid: The index might need to be reloaded, the address
5115 might have been made by frame pointer elimination and hence have a
5116 constant out of range, or both reasons might apply.
5117
5118 We can easily check for an index needing reload, but even if that is the
5119 case, we might also have an invalid constant. To avoid making the
5120 conservative assumption and requiring two reloads, we see if this address
5121 is valid when not interpreted strictly. If it is, the only problem is
5122 that the index needs a reload and find_reloads_address_1 will take care
5123 of it.
5124
5125 Handle all base registers here, not just fp/ap/sp, because on some
5126 targets (namely SPARC) we can also get invalid addresses from preventive
5127 subreg big-endian corrections made by find_reloads_toplev. We
5128 can also get expressions involving LO_SUM (rather than PLUS) from
5129 find_reloads_subreg_address.
5130
5131 If we decide to do something, it must be that `double_reg_address_ok'
5132 is true. We generate a reload of the base register + constant and
5133 rework the sum so that the reload register will be added to the index.
5134 This is safe because we know the address isn't shared.
5135
5136 We check for the base register as both the first and second operand of
5137 the innermost PLUS and/or LO_SUM. */
5138
5139 for (op_index = 0; op_index < 2; ++op_index)
5140 {
5141 rtx operand, addend;
5142 enum rtx_code inner_code;
5143
5144 if (GET_CODE (ad) != PLUS)
5145 continue;
5146
5147 inner_code = GET_CODE (XEXP (ad, 0));
5148 if (!(GET_CODE (ad) == PLUS
5149 && CONST_INT_P (XEXP (ad, 1))
5150 && (inner_code == PLUS || inner_code == LO_SUM)))
5151 continue;
5152
5153 operand = XEXP (XEXP (ad, 0), op_index);
5154 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5155 continue;
5156
5157 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5158
5159 if ((regno_ok_for_base_p (REGNO (operand), mode, inner_code,
5160 GET_CODE (addend))
5161 || operand == frame_pointer_rtx
5162 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5163 || operand == hard_frame_pointer_rtx
5164 #endif
5165 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5166 || operand == arg_pointer_rtx
5167 #endif
5168 || operand == stack_pointer_rtx)
5169 && ! maybe_memory_address_addr_space_p
5170 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5171 {
5172 rtx offset_reg;
5173 enum reg_class cls;
5174
5175 offset_reg = plus_constant (operand, INTVAL (XEXP (ad, 1)));
5176
5177 /* Form the adjusted address. */
5178 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5179 ad = gen_rtx_PLUS (GET_MODE (ad),
5180 op_index == 0 ? offset_reg : addend,
5181 op_index == 0 ? addend : offset_reg);
5182 else
5183 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5184 op_index == 0 ? offset_reg : addend,
5185 op_index == 0 ? addend : offset_reg);
5186 *loc = ad;
5187
5188 cls = base_reg_class (mode, MEM, GET_CODE (addend));
5189 find_reloads_address_part (XEXP (ad, op_index),
5190 &XEXP (ad, op_index), cls,
5191 GET_MODE (ad), opnum, type, ind_levels);
5192 find_reloads_address_1 (mode,
5193 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5194 GET_CODE (XEXP (ad, op_index)),
5195 &XEXP (ad, 1 - op_index), opnum,
5196 type, 0, insn);
5197
5198 return 0;
5199 }
5200 }
5201
5202 /* See if address becomes valid when an eliminable register
5203 in a sum is replaced. */
5204
5205 tem = ad;
5206 if (GET_CODE (ad) == PLUS)
5207 tem = subst_indexed_address (ad);
5208 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5209 {
5210 /* Ok, we win that way. Replace any additional eliminable
5211 registers. */
5212
5213 subst_reg_equivs_changed = 0;
5214 tem = subst_reg_equivs (tem, insn);
5215
5216 /* Make sure that didn't make the address invalid again. */
5217
5218 if (! subst_reg_equivs_changed
5219 || strict_memory_address_addr_space_p (mode, tem, as))
5220 {
5221 *loc = tem;
5222 return 0;
5223 }
5224 }
5225
5226 /* If constants aren't valid addresses, reload the constant address
5227 into a register. */
5228 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5229 {
5230 enum machine_mode address_mode = GET_MODE (ad);
5231 if (address_mode == VOIDmode)
5232 address_mode = targetm.addr_space.address_mode (as);
5233
5234 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5235 Unshare it so we can safely alter it. */
5236 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5237 && CONSTANT_POOL_ADDRESS_P (ad))
5238 {
5239 *memrefloc = copy_rtx (*memrefloc);
5240 loc = &XEXP (*memrefloc, 0);
5241 if (removed_and)
5242 loc = &XEXP (*loc, 0);
5243 }
5244
5245 find_reloads_address_part (ad, loc, base_reg_class (mode, MEM, SCRATCH),
5246 address_mode, opnum, type, ind_levels);
5247 return ! removed_and;
5248 }
5249
5250 return find_reloads_address_1 (mode, ad, 0, MEM, SCRATCH, loc, opnum, type,
5251 ind_levels, insn);
5252 }
5253 \f
5254 /* Find all pseudo regs appearing in AD
5255 that are eliminable in favor of equivalent values
5256 and do not have hard regs; replace them by their equivalents.
5257 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5258 front of it for pseudos that we have to replace with stack slots. */
5259
5260 static rtx
5261 subst_reg_equivs (rtx ad, rtx insn)
5262 {
5263 RTX_CODE code = GET_CODE (ad);
5264 int i;
5265 const char *fmt;
5266
5267 switch (code)
5268 {
5269 case HIGH:
5270 case CONST_INT:
5271 case CONST:
5272 case CONST_DOUBLE:
5273 case CONST_FIXED:
5274 case CONST_VECTOR:
5275 case SYMBOL_REF:
5276 case LABEL_REF:
5277 case PC:
5278 case CC0:
5279 return ad;
5280
5281 case REG:
5282 {
5283 int regno = REGNO (ad);
5284
5285 if (reg_equiv_constant[regno] != 0)
5286 {
5287 subst_reg_equivs_changed = 1;
5288 return reg_equiv_constant[regno];
5289 }
5290 if (reg_equiv_memory_loc[regno] && num_not_at_initial_offset)
5291 {
5292 rtx mem = make_memloc (ad, regno);
5293 if (! rtx_equal_p (mem, reg_equiv_mem[regno]))
5294 {
5295 subst_reg_equivs_changed = 1;
5296 /* We mark the USE with QImode so that we recognize it
5297 as one that can be safely deleted at the end of
5298 reload. */
5299 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5300 QImode);
5301 return mem;
5302 }
5303 }
5304 }
5305 return ad;
5306
5307 case PLUS:
5308 /* Quickly dispose of a common case. */
5309 if (XEXP (ad, 0) == frame_pointer_rtx
5310 && CONST_INT_P (XEXP (ad, 1)))
5311 return ad;
5312 break;
5313
5314 default:
5315 break;
5316 }
5317
5318 fmt = GET_RTX_FORMAT (code);
5319 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5320 if (fmt[i] == 'e')
5321 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5322 return ad;
5323 }
5324 \f
5325 /* Compute the sum of X and Y, making canonicalizations assumed in an
5326 address, namely: sum constant integers, surround the sum of two
5327 constants with a CONST, put the constant as the second operand, and
5328 group the constant on the outermost sum.
5329
5330 This routine assumes both inputs are already in canonical form. */
5331
5332 rtx
5333 form_sum (enum machine_mode mode, rtx x, rtx y)
5334 {
5335 rtx tem;
5336
5337 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5338 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5339
5340 if (CONST_INT_P (x))
5341 return plus_constant (y, INTVAL (x));
5342 else if (CONST_INT_P (y))
5343 return plus_constant (x, INTVAL (y));
5344 else if (CONSTANT_P (x))
5345 tem = x, x = y, y = tem;
5346
5347 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5348 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5349
5350 /* Note that if the operands of Y are specified in the opposite
5351 order in the recursive calls below, infinite recursion will occur. */
5352 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5353 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5354
5355 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5356 constant will have been placed second. */
5357 if (CONSTANT_P (x) && CONSTANT_P (y))
5358 {
5359 if (GET_CODE (x) == CONST)
5360 x = XEXP (x, 0);
5361 if (GET_CODE (y) == CONST)
5362 y = XEXP (y, 0);
5363
5364 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5365 }
5366
5367 return gen_rtx_PLUS (mode, x, y);
5368 }
5369 \f
5370 /* If ADDR is a sum containing a pseudo register that should be
5371 replaced with a constant (from reg_equiv_constant),
5372 return the result of doing so, and also apply the associative
5373 law so that the result is more likely to be a valid address.
5374 (But it is not guaranteed to be one.)
5375
5376 Note that at most one register is replaced, even if more are
5377 replaceable. Also, we try to put the result into a canonical form
5378 so it is more likely to be a valid address.
5379
5380 In all other cases, return ADDR. */
5381
5382 static rtx
5383 subst_indexed_address (rtx addr)
5384 {
5385 rtx op0 = 0, op1 = 0, op2 = 0;
5386 rtx tem;
5387 int regno;
5388
5389 if (GET_CODE (addr) == PLUS)
5390 {
5391 /* Try to find a register to replace. */
5392 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5393 if (REG_P (op0)
5394 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5395 && reg_renumber[regno] < 0
5396 && reg_equiv_constant[regno] != 0)
5397 op0 = reg_equiv_constant[regno];
5398 else if (REG_P (op1)
5399 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5400 && reg_renumber[regno] < 0
5401 && reg_equiv_constant[regno] != 0)
5402 op1 = reg_equiv_constant[regno];
5403 else if (GET_CODE (op0) == PLUS
5404 && (tem = subst_indexed_address (op0)) != op0)
5405 op0 = tem;
5406 else if (GET_CODE (op1) == PLUS
5407 && (tem = subst_indexed_address (op1)) != op1)
5408 op1 = tem;
5409 else
5410 return addr;
5411
5412 /* Pick out up to three things to add. */
5413 if (GET_CODE (op1) == PLUS)
5414 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5415 else if (GET_CODE (op0) == PLUS)
5416 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5417
5418 /* Compute the sum. */
5419 if (op2 != 0)
5420 op1 = form_sum (GET_MODE (addr), op1, op2);
5421 if (op1 != 0)
5422 op0 = form_sum (GET_MODE (addr), op0, op1);
5423
5424 return op0;
5425 }
5426 return addr;
5427 }
5428 \f
5429 /* Update the REG_INC notes for an insn. It updates all REG_INC
5430 notes for the instruction which refer to REGNO the to refer
5431 to the reload number.
5432
5433 INSN is the insn for which any REG_INC notes need updating.
5434
5435 REGNO is the register number which has been reloaded.
5436
5437 RELOADNUM is the reload number. */
5438
5439 static void
5440 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5441 int reloadnum ATTRIBUTE_UNUSED)
5442 {
5443 #ifdef AUTO_INC_DEC
5444 rtx link;
5445
5446 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5447 if (REG_NOTE_KIND (link) == REG_INC
5448 && (int) REGNO (XEXP (link, 0)) == regno)
5449 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5450 #endif
5451 }
5452 \f
5453 /* Record the pseudo registers we must reload into hard registers in a
5454 subexpression of a would-be memory address, X referring to a value
5455 in mode MODE. (This function is not called if the address we find
5456 is strictly valid.)
5457
5458 CONTEXT = 1 means we are considering regs as index regs,
5459 = 0 means we are considering them as base regs.
5460 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5461 or an autoinc code.
5462 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5463 is the code of the index part of the address. Otherwise, pass SCRATCH
5464 for this argument.
5465 OPNUM and TYPE specify the purpose of any reloads made.
5466
5467 IND_LEVELS says how many levels of indirect addressing are
5468 supported at this point in the address.
5469
5470 INSN, if nonzero, is the insn in which we do the reload. It is used
5471 to determine if we may generate output reloads.
5472
5473 We return nonzero if X, as a whole, is reloaded or replaced. */
5474
5475 /* Note that we take shortcuts assuming that no multi-reg machine mode
5476 occurs as part of an address.
5477 Also, this is not fully machine-customizable; it works for machines
5478 such as VAXen and 68000's and 32000's, but other possible machines
5479 could have addressing modes that this does not handle right.
5480 If you add push_reload calls here, you need to make sure gen_reload
5481 handles those cases gracefully. */
5482
5483 static int
5484 find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
5485 enum rtx_code outer_code, enum rtx_code index_code,
5486 rtx *loc, int opnum, enum reload_type type,
5487 int ind_levels, rtx insn)
5488 {
5489 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, OUTER, INDEX) \
5490 ((CONTEXT) == 0 \
5491 ? regno_ok_for_base_p (REGNO, MODE, OUTER, INDEX) \
5492 : REGNO_OK_FOR_INDEX_P (REGNO))
5493
5494 enum reg_class context_reg_class;
5495 RTX_CODE code = GET_CODE (x);
5496
5497 if (context == 1)
5498 context_reg_class = INDEX_REG_CLASS;
5499 else
5500 context_reg_class = base_reg_class (mode, outer_code, index_code);
5501
5502 switch (code)
5503 {
5504 case PLUS:
5505 {
5506 rtx orig_op0 = XEXP (x, 0);
5507 rtx orig_op1 = XEXP (x, 1);
5508 RTX_CODE code0 = GET_CODE (orig_op0);
5509 RTX_CODE code1 = GET_CODE (orig_op1);
5510 rtx op0 = orig_op0;
5511 rtx op1 = orig_op1;
5512
5513 if (GET_CODE (op0) == SUBREG)
5514 {
5515 op0 = SUBREG_REG (op0);
5516 code0 = GET_CODE (op0);
5517 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5518 op0 = gen_rtx_REG (word_mode,
5519 (REGNO (op0) +
5520 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5521 GET_MODE (SUBREG_REG (orig_op0)),
5522 SUBREG_BYTE (orig_op0),
5523 GET_MODE (orig_op0))));
5524 }
5525
5526 if (GET_CODE (op1) == SUBREG)
5527 {
5528 op1 = SUBREG_REG (op1);
5529 code1 = GET_CODE (op1);
5530 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5531 /* ??? Why is this given op1's mode and above for
5532 ??? op0 SUBREGs we use word_mode? */
5533 op1 = gen_rtx_REG (GET_MODE (op1),
5534 (REGNO (op1) +
5535 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5536 GET_MODE (SUBREG_REG (orig_op1)),
5537 SUBREG_BYTE (orig_op1),
5538 GET_MODE (orig_op1))));
5539 }
5540 /* Plus in the index register may be created only as a result of
5541 register rematerialization for expression like &localvar*4. Reload it.
5542 It may be possible to combine the displacement on the outer level,
5543 but it is probably not worthwhile to do so. */
5544 if (context == 1)
5545 {
5546 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5547 opnum, ADDR_TYPE (type), ind_levels, insn);
5548 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5549 context_reg_class,
5550 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5551 return 1;
5552 }
5553
5554 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5555 || code0 == ZERO_EXTEND || code1 == MEM)
5556 {
5557 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5558 &XEXP (x, 0), opnum, type, ind_levels,
5559 insn);
5560 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5561 &XEXP (x, 1), opnum, type, ind_levels,
5562 insn);
5563 }
5564
5565 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5566 || code1 == ZERO_EXTEND || code0 == MEM)
5567 {
5568 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5569 &XEXP (x, 0), opnum, type, ind_levels,
5570 insn);
5571 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5572 &XEXP (x, 1), opnum, type, ind_levels,
5573 insn);
5574 }
5575
5576 else if (code0 == CONST_INT || code0 == CONST
5577 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5578 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5579 &XEXP (x, 1), opnum, type, ind_levels,
5580 insn);
5581
5582 else if (code1 == CONST_INT || code1 == CONST
5583 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5584 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5585 &XEXP (x, 0), opnum, type, ind_levels,
5586 insn);
5587
5588 else if (code0 == REG && code1 == REG)
5589 {
5590 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5591 && regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5592 return 0;
5593 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5594 && regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5595 return 0;
5596 else if (regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5597 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5598 &XEXP (x, 1), opnum, type, ind_levels,
5599 insn);
5600 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5601 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5602 &XEXP (x, 0), opnum, type, ind_levels,
5603 insn);
5604 else if (regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5605 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5606 &XEXP (x, 0), opnum, type, ind_levels,
5607 insn);
5608 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5609 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5610 &XEXP (x, 1), opnum, type, ind_levels,
5611 insn);
5612 else
5613 {
5614 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5615 &XEXP (x, 0), opnum, type, ind_levels,
5616 insn);
5617 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5618 &XEXP (x, 1), opnum, type, ind_levels,
5619 insn);
5620 }
5621 }
5622
5623 else if (code0 == REG)
5624 {
5625 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5626 &XEXP (x, 0), opnum, type, ind_levels,
5627 insn);
5628 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5629 &XEXP (x, 1), opnum, type, ind_levels,
5630 insn);
5631 }
5632
5633 else if (code1 == REG)
5634 {
5635 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5636 &XEXP (x, 1), opnum, type, ind_levels,
5637 insn);
5638 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5639 &XEXP (x, 0), opnum, type, ind_levels,
5640 insn);
5641 }
5642 }
5643
5644 return 0;
5645
5646 case POST_MODIFY:
5647 case PRE_MODIFY:
5648 {
5649 rtx op0 = XEXP (x, 0);
5650 rtx op1 = XEXP (x, 1);
5651 enum rtx_code index_code;
5652 int regno;
5653 int reloadnum;
5654
5655 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5656 return 0;
5657
5658 /* Currently, we only support {PRE,POST}_MODIFY constructs
5659 where a base register is {inc,dec}remented by the contents
5660 of another register or by a constant value. Thus, these
5661 operands must match. */
5662 gcc_assert (op0 == XEXP (op1, 0));
5663
5664 /* Require index register (or constant). Let's just handle the
5665 register case in the meantime... If the target allows
5666 auto-modify by a constant then we could try replacing a pseudo
5667 register with its equivalent constant where applicable.
5668
5669 We also handle the case where the register was eliminated
5670 resulting in a PLUS subexpression.
5671
5672 If we later decide to reload the whole PRE_MODIFY or
5673 POST_MODIFY, inc_for_reload might clobber the reload register
5674 before reading the index. The index register might therefore
5675 need to live longer than a TYPE reload normally would, so be
5676 conservative and class it as RELOAD_OTHER. */
5677 if ((REG_P (XEXP (op1, 1))
5678 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5679 || GET_CODE (XEXP (op1, 1)) == PLUS)
5680 find_reloads_address_1 (mode, XEXP (op1, 1), 1, code, SCRATCH,
5681 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5682 ind_levels, insn);
5683
5684 gcc_assert (REG_P (XEXP (op1, 0)));
5685
5686 regno = REGNO (XEXP (op1, 0));
5687 index_code = GET_CODE (XEXP (op1, 1));
5688
5689 /* A register that is incremented cannot be constant! */
5690 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5691 || reg_equiv_constant[regno] == 0);
5692
5693 /* Handle a register that is equivalent to a memory location
5694 which cannot be addressed directly. */
5695 if (reg_equiv_memory_loc[regno] != 0
5696 && (reg_equiv_address[regno] != 0
5697 || num_not_at_initial_offset))
5698 {
5699 rtx tem = make_memloc (XEXP (x, 0), regno);
5700
5701 if (reg_equiv_address[regno]
5702 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5703 {
5704 rtx orig = tem;
5705
5706 /* First reload the memory location's address.
5707 We can't use ADDR_TYPE (type) here, because we need to
5708 write back the value after reading it, hence we actually
5709 need two registers. */
5710 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5711 &XEXP (tem, 0), opnum,
5712 RELOAD_OTHER,
5713 ind_levels, insn);
5714
5715 if (!rtx_equal_p (tem, orig))
5716 push_reg_equiv_alt_mem (regno, tem);
5717
5718 /* Then reload the memory location into a base
5719 register. */
5720 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5721 &XEXP (op1, 0),
5722 base_reg_class (mode, code,
5723 index_code),
5724 GET_MODE (x), GET_MODE (x), 0,
5725 0, opnum, RELOAD_OTHER);
5726
5727 update_auto_inc_notes (this_insn, regno, reloadnum);
5728 return 0;
5729 }
5730 }
5731
5732 if (reg_renumber[regno] >= 0)
5733 regno = reg_renumber[regno];
5734
5735 /* We require a base register here... */
5736 if (!regno_ok_for_base_p (regno, GET_MODE (x), code, index_code))
5737 {
5738 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5739 &XEXP (op1, 0), &XEXP (x, 0),
5740 base_reg_class (mode, code, index_code),
5741 GET_MODE (x), GET_MODE (x), 0, 0,
5742 opnum, RELOAD_OTHER);
5743
5744 update_auto_inc_notes (this_insn, regno, reloadnum);
5745 return 0;
5746 }
5747 }
5748 return 0;
5749
5750 case POST_INC:
5751 case POST_DEC:
5752 case PRE_INC:
5753 case PRE_DEC:
5754 if (REG_P (XEXP (x, 0)))
5755 {
5756 int regno = REGNO (XEXP (x, 0));
5757 int value = 0;
5758 rtx x_orig = x;
5759
5760 /* A register that is incremented cannot be constant! */
5761 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5762 || reg_equiv_constant[regno] == 0);
5763
5764 /* Handle a register that is equivalent to a memory location
5765 which cannot be addressed directly. */
5766 if (reg_equiv_memory_loc[regno] != 0
5767 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
5768 {
5769 rtx tem = make_memloc (XEXP (x, 0), regno);
5770 if (reg_equiv_address[regno]
5771 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5772 {
5773 rtx orig = tem;
5774
5775 /* First reload the memory location's address.
5776 We can't use ADDR_TYPE (type) here, because we need to
5777 write back the value after reading it, hence we actually
5778 need two registers. */
5779 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5780 &XEXP (tem, 0), opnum, type,
5781 ind_levels, insn);
5782 if (!rtx_equal_p (tem, orig))
5783 push_reg_equiv_alt_mem (regno, tem);
5784 /* Put this inside a new increment-expression. */
5785 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5786 /* Proceed to reload that, as if it contained a register. */
5787 }
5788 }
5789
5790 /* If we have a hard register that is ok in this incdec context,
5791 don't make a reload. If the register isn't nice enough for
5792 autoincdec, we can reload it. But, if an autoincrement of a
5793 register that we here verified as playing nice, still outside
5794 isn't "valid", it must be that no autoincrement is "valid".
5795 If that is true and something made an autoincrement anyway,
5796 this must be a special context where one is allowed.
5797 (For example, a "push" instruction.)
5798 We can't improve this address, so leave it alone. */
5799
5800 /* Otherwise, reload the autoincrement into a suitable hard reg
5801 and record how much to increment by. */
5802
5803 if (reg_renumber[regno] >= 0)
5804 regno = reg_renumber[regno];
5805 if (regno >= FIRST_PSEUDO_REGISTER
5806 || !REG_OK_FOR_CONTEXT (context, regno, mode, code,
5807 index_code))
5808 {
5809 int reloadnum;
5810
5811 /* If we can output the register afterwards, do so, this
5812 saves the extra update.
5813 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5814 CALL_INSN - and it does not set CC0.
5815 But don't do this if we cannot directly address the
5816 memory location, since this will make it harder to
5817 reuse address reloads, and increases register pressure.
5818 Also don't do this if we can probably update x directly. */
5819 rtx equiv = (MEM_P (XEXP (x, 0))
5820 ? XEXP (x, 0)
5821 : reg_equiv_mem[regno]);
5822 int icode = (int) optab_handler (add_optab, GET_MODE (x));
5823 if (insn && NONJUMP_INSN_P (insn) && equiv
5824 && memory_operand (equiv, GET_MODE (equiv))
5825 #ifdef HAVE_cc0
5826 && ! sets_cc0_p (PATTERN (insn))
5827 #endif
5828 && ! (icode != CODE_FOR_nothing
5829 && ((*insn_data[icode].operand[0].predicate)
5830 (equiv, GET_MODE (x)))
5831 && ((*insn_data[icode].operand[1].predicate)
5832 (equiv, GET_MODE (x)))))
5833 {
5834 /* We use the original pseudo for loc, so that
5835 emit_reload_insns() knows which pseudo this
5836 reload refers to and updates the pseudo rtx, not
5837 its equivalent memory location, as well as the
5838 corresponding entry in reg_last_reload_reg. */
5839 loc = &XEXP (x_orig, 0);
5840 x = XEXP (x, 0);
5841 reloadnum
5842 = push_reload (x, x, loc, loc,
5843 context_reg_class,
5844 GET_MODE (x), GET_MODE (x), 0, 0,
5845 opnum, RELOAD_OTHER);
5846 }
5847 else
5848 {
5849 reloadnum
5850 = push_reload (x, x, loc, (rtx*) 0,
5851 context_reg_class,
5852 GET_MODE (x), GET_MODE (x), 0, 0,
5853 opnum, type);
5854 rld[reloadnum].inc
5855 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5856
5857 value = 1;
5858 }
5859
5860 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5861 reloadnum);
5862 }
5863 return value;
5864 }
5865 return 0;
5866
5867 case TRUNCATE:
5868 case SIGN_EXTEND:
5869 case ZERO_EXTEND:
5870 /* Look for parts to reload in the inner expression and reload them
5871 too, in addition to this operation. Reloading all inner parts in
5872 addition to this one shouldn't be necessary, but at this point,
5873 we don't know if we can possibly omit any part that *can* be
5874 reloaded. Targets that are better off reloading just either part
5875 (or perhaps even a different part of an outer expression), should
5876 define LEGITIMIZE_RELOAD_ADDRESS. */
5877 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), XEXP (x, 0),
5878 context, code, SCRATCH, &XEXP (x, 0), opnum,
5879 type, ind_levels, insn);
5880 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5881 context_reg_class,
5882 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5883 return 1;
5884
5885 case MEM:
5886 /* This is probably the result of a substitution, by eliminate_regs, of
5887 an equivalent address for a pseudo that was not allocated to a hard
5888 register. Verify that the specified address is valid and reload it
5889 into a register.
5890
5891 Since we know we are going to reload this item, don't decrement for
5892 the indirection level.
5893
5894 Note that this is actually conservative: it would be slightly more
5895 efficient to use the value of SPILL_INDIRECT_LEVELS from
5896 reload1.c here. */
5897
5898 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5899 opnum, ADDR_TYPE (type), ind_levels, insn);
5900 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5901 context_reg_class,
5902 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5903 return 1;
5904
5905 case REG:
5906 {
5907 int regno = REGNO (x);
5908
5909 if (reg_equiv_constant[regno] != 0)
5910 {
5911 find_reloads_address_part (reg_equiv_constant[regno], loc,
5912 context_reg_class,
5913 GET_MODE (x), opnum, type, ind_levels);
5914 return 1;
5915 }
5916
5917 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5918 that feeds this insn. */
5919 if (reg_equiv_mem[regno] != 0)
5920 {
5921 push_reload (reg_equiv_mem[regno], NULL_RTX, loc, (rtx*) 0,
5922 context_reg_class,
5923 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5924 return 1;
5925 }
5926 #endif
5927
5928 if (reg_equiv_memory_loc[regno]
5929 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
5930 {
5931 rtx tem = make_memloc (x, regno);
5932 if (reg_equiv_address[regno] != 0
5933 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5934 {
5935 x = tem;
5936 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5937 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5938 ind_levels, insn);
5939 if (!rtx_equal_p (x, tem))
5940 push_reg_equiv_alt_mem (regno, x);
5941 }
5942 }
5943
5944 if (reg_renumber[regno] >= 0)
5945 regno = reg_renumber[regno];
5946
5947 if (regno >= FIRST_PSEUDO_REGISTER
5948 || !REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5949 index_code))
5950 {
5951 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5952 context_reg_class,
5953 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5954 return 1;
5955 }
5956
5957 /* If a register appearing in an address is the subject of a CLOBBER
5958 in this insn, reload it into some other register to be safe.
5959 The CLOBBER is supposed to make the register unavailable
5960 from before this insn to after it. */
5961 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5962 {
5963 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5964 context_reg_class,
5965 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5966 return 1;
5967 }
5968 }
5969 return 0;
5970
5971 case SUBREG:
5972 if (REG_P (SUBREG_REG (x)))
5973 {
5974 /* If this is a SUBREG of a hard register and the resulting register
5975 is of the wrong class, reload the whole SUBREG. This avoids
5976 needless copies if SUBREG_REG is multi-word. */
5977 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5978 {
5979 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5980
5981 if (!REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5982 index_code))
5983 {
5984 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5985 context_reg_class,
5986 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5987 return 1;
5988 }
5989 }
5990 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5991 is larger than the class size, then reload the whole SUBREG. */
5992 else
5993 {
5994 enum reg_class rclass = context_reg_class;
5995 if ((unsigned) CLASS_MAX_NREGS (rclass, GET_MODE (SUBREG_REG (x)))
5996 > reg_class_size[rclass])
5997 {
5998 x = find_reloads_subreg_address (x, 0, opnum,
5999 ADDR_TYPE (type),
6000 ind_levels, insn);
6001 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6002 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6003 return 1;
6004 }
6005 }
6006 }
6007 break;
6008
6009 default:
6010 break;
6011 }
6012
6013 {
6014 const char *fmt = GET_RTX_FORMAT (code);
6015 int i;
6016
6017 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6018 {
6019 if (fmt[i] == 'e')
6020 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6021 we get here. */
6022 find_reloads_address_1 (mode, XEXP (x, i), context, code, SCRATCH,
6023 &XEXP (x, i), opnum, type, ind_levels, insn);
6024 }
6025 }
6026
6027 #undef REG_OK_FOR_CONTEXT
6028 return 0;
6029 }
6030 \f
6031 /* X, which is found at *LOC, is a part of an address that needs to be
6032 reloaded into a register of class RCLASS. If X is a constant, or if
6033 X is a PLUS that contains a constant, check that the constant is a
6034 legitimate operand and that we are supposed to be able to load
6035 it into the register.
6036
6037 If not, force the constant into memory and reload the MEM instead.
6038
6039 MODE is the mode to use, in case X is an integer constant.
6040
6041 OPNUM and TYPE describe the purpose of any reloads made.
6042
6043 IND_LEVELS says how many levels of indirect addressing this machine
6044 supports. */
6045
6046 static void
6047 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6048 enum machine_mode mode, int opnum,
6049 enum reload_type type, int ind_levels)
6050 {
6051 if (CONSTANT_P (x)
6052 && (! LEGITIMATE_CONSTANT_P (x)
6053 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6054 {
6055 x = force_const_mem (mode, x);
6056 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6057 opnum, type, ind_levels, 0);
6058 }
6059
6060 else if (GET_CODE (x) == PLUS
6061 && CONSTANT_P (XEXP (x, 1))
6062 && (! LEGITIMATE_CONSTANT_P (XEXP (x, 1))
6063 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6064 == NO_REGS))
6065 {
6066 rtx tem;
6067
6068 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6069 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6070 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6071 opnum, type, ind_levels, 0);
6072 }
6073
6074 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6075 mode, VOIDmode, 0, 0, opnum, type);
6076 }
6077 \f
6078 /* X, a subreg of a pseudo, is a part of an address that needs to be
6079 reloaded.
6080
6081 If the pseudo is equivalent to a memory location that cannot be directly
6082 addressed, make the necessary address reloads.
6083
6084 If address reloads have been necessary, or if the address is changed
6085 by register elimination, return the rtx of the memory location;
6086 otherwise, return X.
6087
6088 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
6089 memory location.
6090
6091 OPNUM and TYPE identify the purpose of the reload.
6092
6093 IND_LEVELS says how many levels of indirect addressing are
6094 supported at this point in the address.
6095
6096 INSN, if nonzero, is the insn in which we do the reload. It is used
6097 to determine where to put USEs for pseudos that we have to replace with
6098 stack slots. */
6099
6100 static rtx
6101 find_reloads_subreg_address (rtx x, int force_replace, int opnum,
6102 enum reload_type type, int ind_levels, rtx insn)
6103 {
6104 int regno = REGNO (SUBREG_REG (x));
6105
6106 if (reg_equiv_memory_loc[regno])
6107 {
6108 /* If the address is not directly addressable, or if the address is not
6109 offsettable, then it must be replaced. */
6110 if (! force_replace
6111 && (reg_equiv_address[regno]
6112 || ! offsettable_memref_p (reg_equiv_mem[regno])))
6113 force_replace = 1;
6114
6115 if (force_replace || num_not_at_initial_offset)
6116 {
6117 rtx tem = make_memloc (SUBREG_REG (x), regno);
6118
6119 /* If the address changes because of register elimination, then
6120 it must be replaced. */
6121 if (force_replace
6122 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
6123 {
6124 unsigned outer_size = GET_MODE_SIZE (GET_MODE (x));
6125 unsigned inner_size = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
6126 int offset;
6127 rtx orig = tem;
6128 int reloaded;
6129
6130 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
6131 hold the correct (negative) byte offset. */
6132 if (BYTES_BIG_ENDIAN && outer_size > inner_size)
6133 offset = inner_size - outer_size;
6134 else
6135 offset = SUBREG_BYTE (x);
6136
6137 XEXP (tem, 0) = plus_constant (XEXP (tem, 0), offset);
6138 PUT_MODE (tem, GET_MODE (x));
6139 if (MEM_OFFSET (tem))
6140 set_mem_offset (tem, plus_constant (MEM_OFFSET (tem), offset));
6141 if (MEM_SIZE (tem)
6142 && INTVAL (MEM_SIZE (tem)) != (HOST_WIDE_INT) outer_size)
6143 set_mem_size (tem, GEN_INT (outer_size));
6144
6145 /* If this was a paradoxical subreg that we replaced, the
6146 resulting memory must be sufficiently aligned to allow
6147 us to widen the mode of the memory. */
6148 if (outer_size > inner_size)
6149 {
6150 rtx base;
6151
6152 base = XEXP (tem, 0);
6153 if (GET_CODE (base) == PLUS)
6154 {
6155 if (CONST_INT_P (XEXP (base, 1))
6156 && INTVAL (XEXP (base, 1)) % outer_size != 0)
6157 return x;
6158 base = XEXP (base, 0);
6159 }
6160 if (!REG_P (base)
6161 || (REGNO_POINTER_ALIGN (REGNO (base))
6162 < outer_size * BITS_PER_UNIT))
6163 return x;
6164 }
6165
6166 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6167 XEXP (tem, 0), &XEXP (tem, 0),
6168 opnum, type, ind_levels, insn);
6169 /* ??? Do we need to handle nonzero offsets somehow? */
6170 if (!offset && !rtx_equal_p (tem, orig))
6171 push_reg_equiv_alt_mem (regno, tem);
6172
6173 /* For some processors an address may be valid in the
6174 original mode but not in a smaller mode. For
6175 example, ARM accepts a scaled index register in
6176 SImode but not in HImode. Note that this is only
6177 a problem if the address in reg_equiv_mem is already
6178 invalid in the new mode; other cases would be fixed
6179 by find_reloads_address as usual.
6180
6181 ??? We attempt to handle such cases here by doing an
6182 additional reload of the full address after the
6183 usual processing by find_reloads_address. Note that
6184 this may not work in the general case, but it seems
6185 to cover the cases where this situation currently
6186 occurs. A more general fix might be to reload the
6187 *value* instead of the address, but this would not
6188 be expected by the callers of this routine as-is.
6189
6190 If find_reloads_address already completed replaced
6191 the address, there is nothing further to do. */
6192 if (reloaded == 0
6193 && reg_equiv_mem[regno] != 0
6194 && !strict_memory_address_addr_space_p
6195 (GET_MODE (x), XEXP (reg_equiv_mem[regno], 0),
6196 MEM_ADDR_SPACE (reg_equiv_mem[regno])))
6197 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6198 base_reg_class (GET_MODE (tem), MEM, SCRATCH),
6199 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0,
6200 opnum, type);
6201
6202 /* If this is not a toplevel operand, find_reloads doesn't see
6203 this substitution. We have to emit a USE of the pseudo so
6204 that delete_output_reload can see it. */
6205 if (replace_reloads && recog_data.operand[opnum] != x)
6206 /* We mark the USE with QImode so that we recognize it
6207 as one that can be safely deleted at the end of
6208 reload. */
6209 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode,
6210 SUBREG_REG (x)),
6211 insn), QImode);
6212 x = tem;
6213 }
6214 }
6215 }
6216 return x;
6217 }
6218 \f
6219 /* Substitute into the current INSN the registers into which we have reloaded
6220 the things that need reloading. The array `replacements'
6221 contains the locations of all pointers that must be changed
6222 and says what to replace them with.
6223
6224 Return the rtx that X translates into; usually X, but modified. */
6225
6226 void
6227 subst_reloads (rtx insn)
6228 {
6229 int i;
6230
6231 for (i = 0; i < n_replacements; i++)
6232 {
6233 struct replacement *r = &replacements[i];
6234 rtx reloadreg = rld[r->what].reg_rtx;
6235 if (reloadreg)
6236 {
6237 #ifdef DEBUG_RELOAD
6238 /* This checking takes a very long time on some platforms
6239 causing the gcc.c-torture/compile/limits-fnargs.c test
6240 to time out during testing. See PR 31850.
6241
6242 Internal consistency test. Check that we don't modify
6243 anything in the equivalence arrays. Whenever something from
6244 those arrays needs to be reloaded, it must be unshared before
6245 being substituted into; the equivalence must not be modified.
6246 Otherwise, if the equivalence is used after that, it will
6247 have been modified, and the thing substituted (probably a
6248 register) is likely overwritten and not a usable equivalence. */
6249 int check_regno;
6250
6251 for (check_regno = 0; check_regno < max_regno; check_regno++)
6252 {
6253 #define CHECK_MODF(ARRAY) \
6254 gcc_assert (!ARRAY[check_regno] \
6255 || !loc_mentioned_in_p (r->where, \
6256 ARRAY[check_regno]))
6257
6258 CHECK_MODF (reg_equiv_constant);
6259 CHECK_MODF (reg_equiv_memory_loc);
6260 CHECK_MODF (reg_equiv_address);
6261 CHECK_MODF (reg_equiv_mem);
6262 #undef CHECK_MODF
6263 }
6264 #endif /* DEBUG_RELOAD */
6265
6266 /* If we're replacing a LABEL_REF with a register, there must
6267 already be an indication (to e.g. flow) which label this
6268 register refers to. */
6269 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6270 || !JUMP_P (insn)
6271 || find_reg_note (insn,
6272 REG_LABEL_OPERAND,
6273 XEXP (*r->where, 0))
6274 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6275
6276 /* Encapsulate RELOADREG so its machine mode matches what
6277 used to be there. Note that gen_lowpart_common will
6278 do the wrong thing if RELOADREG is multi-word. RELOADREG
6279 will always be a REG here. */
6280 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6281 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6282
6283 /* If we are putting this into a SUBREG and RELOADREG is a
6284 SUBREG, we would be making nested SUBREGs, so we have to fix
6285 this up. Note that r->where == &SUBREG_REG (*r->subreg_loc). */
6286
6287 if (r->subreg_loc != 0 && GET_CODE (reloadreg) == SUBREG)
6288 {
6289 if (GET_MODE (*r->subreg_loc)
6290 == GET_MODE (SUBREG_REG (reloadreg)))
6291 *r->subreg_loc = SUBREG_REG (reloadreg);
6292 else
6293 {
6294 int final_offset =
6295 SUBREG_BYTE (*r->subreg_loc) + SUBREG_BYTE (reloadreg);
6296
6297 /* When working with SUBREGs the rule is that the byte
6298 offset must be a multiple of the SUBREG's mode. */
6299 final_offset = (final_offset /
6300 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6301 final_offset = (final_offset *
6302 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6303
6304 *r->where = SUBREG_REG (reloadreg);
6305 SUBREG_BYTE (*r->subreg_loc) = final_offset;
6306 }
6307 }
6308 else
6309 *r->where = reloadreg;
6310 }
6311 /* If reload got no reg and isn't optional, something's wrong. */
6312 else
6313 gcc_assert (rld[r->what].optional);
6314 }
6315 }
6316 \f
6317 /* Make a copy of any replacements being done into X and move those
6318 copies to locations in Y, a copy of X. */
6319
6320 void
6321 copy_replacements (rtx x, rtx y)
6322 {
6323 /* We can't support X being a SUBREG because we might then need to know its
6324 location if something inside it was replaced. */
6325 gcc_assert (GET_CODE (x) != SUBREG);
6326
6327 copy_replacements_1 (&x, &y, n_replacements);
6328 }
6329
6330 static void
6331 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6332 {
6333 int i, j;
6334 rtx x, y;
6335 struct replacement *r;
6336 enum rtx_code code;
6337 const char *fmt;
6338
6339 for (j = 0; j < orig_replacements; j++)
6340 {
6341 if (replacements[j].subreg_loc == px)
6342 {
6343 r = &replacements[n_replacements++];
6344 r->where = replacements[j].where;
6345 r->subreg_loc = py;
6346 r->what = replacements[j].what;
6347 r->mode = replacements[j].mode;
6348 }
6349 else if (replacements[j].where == px)
6350 {
6351 r = &replacements[n_replacements++];
6352 r->where = py;
6353 r->subreg_loc = 0;
6354 r->what = replacements[j].what;
6355 r->mode = replacements[j].mode;
6356 }
6357 }
6358
6359 x = *px;
6360 y = *py;
6361 code = GET_CODE (x);
6362 fmt = GET_RTX_FORMAT (code);
6363
6364 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6365 {
6366 if (fmt[i] == 'e')
6367 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6368 else if (fmt[i] == 'E')
6369 for (j = XVECLEN (x, i); --j >= 0; )
6370 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6371 orig_replacements);
6372 }
6373 }
6374
6375 /* Change any replacements being done to *X to be done to *Y. */
6376
6377 void
6378 move_replacements (rtx *x, rtx *y)
6379 {
6380 int i;
6381
6382 for (i = 0; i < n_replacements; i++)
6383 if (replacements[i].subreg_loc == x)
6384 replacements[i].subreg_loc = y;
6385 else if (replacements[i].where == x)
6386 {
6387 replacements[i].where = y;
6388 replacements[i].subreg_loc = 0;
6389 }
6390 }
6391 \f
6392 /* If LOC was scheduled to be replaced by something, return the replacement.
6393 Otherwise, return *LOC. */
6394
6395 rtx
6396 find_replacement (rtx *loc)
6397 {
6398 struct replacement *r;
6399
6400 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6401 {
6402 rtx reloadreg = rld[r->what].reg_rtx;
6403
6404 if (reloadreg && r->where == loc)
6405 {
6406 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6407 reloadreg = gen_rtx_REG (r->mode, REGNO (reloadreg));
6408
6409 return reloadreg;
6410 }
6411 else if (reloadreg && r->subreg_loc == loc)
6412 {
6413 /* RELOADREG must be either a REG or a SUBREG.
6414
6415 ??? Is it actually still ever a SUBREG? If so, why? */
6416
6417 if (REG_P (reloadreg))
6418 return gen_rtx_REG (GET_MODE (*loc),
6419 (REGNO (reloadreg) +
6420 subreg_regno_offset (REGNO (SUBREG_REG (*loc)),
6421 GET_MODE (SUBREG_REG (*loc)),
6422 SUBREG_BYTE (*loc),
6423 GET_MODE (*loc))));
6424 else if (GET_MODE (reloadreg) == GET_MODE (*loc))
6425 return reloadreg;
6426 else
6427 {
6428 int final_offset = SUBREG_BYTE (reloadreg) + SUBREG_BYTE (*loc);
6429
6430 /* When working with SUBREGs the rule is that the byte
6431 offset must be a multiple of the SUBREG's mode. */
6432 final_offset = (final_offset / GET_MODE_SIZE (GET_MODE (*loc)));
6433 final_offset = (final_offset * GET_MODE_SIZE (GET_MODE (*loc)));
6434 return gen_rtx_SUBREG (GET_MODE (*loc), SUBREG_REG (reloadreg),
6435 final_offset);
6436 }
6437 }
6438 }
6439
6440 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6441 what's inside and make a new rtl if so. */
6442 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6443 || GET_CODE (*loc) == MULT)
6444 {
6445 rtx x = find_replacement (&XEXP (*loc, 0));
6446 rtx y = find_replacement (&XEXP (*loc, 1));
6447
6448 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6449 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6450 }
6451
6452 return *loc;
6453 }
6454 \f
6455 /* Return nonzero if register in range [REGNO, ENDREGNO)
6456 appears either explicitly or implicitly in X
6457 other than being stored into (except for earlyclobber operands).
6458
6459 References contained within the substructure at LOC do not count.
6460 LOC may be zero, meaning don't ignore anything.
6461
6462 This is similar to refers_to_regno_p in rtlanal.c except that we
6463 look at equivalences for pseudos that didn't get hard registers. */
6464
6465 static int
6466 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6467 rtx x, rtx *loc)
6468 {
6469 int i;
6470 unsigned int r;
6471 RTX_CODE code;
6472 const char *fmt;
6473
6474 if (x == 0)
6475 return 0;
6476
6477 repeat:
6478 code = GET_CODE (x);
6479
6480 switch (code)
6481 {
6482 case REG:
6483 r = REGNO (x);
6484
6485 /* If this is a pseudo, a hard register must not have been allocated.
6486 X must therefore either be a constant or be in memory. */
6487 if (r >= FIRST_PSEUDO_REGISTER)
6488 {
6489 if (reg_equiv_memory_loc[r])
6490 return refers_to_regno_for_reload_p (regno, endregno,
6491 reg_equiv_memory_loc[r],
6492 (rtx*) 0);
6493
6494 gcc_assert (reg_equiv_constant[r] || reg_equiv_invariant[r]);
6495 return 0;
6496 }
6497
6498 return (endregno > r
6499 && regno < r + (r < FIRST_PSEUDO_REGISTER
6500 ? hard_regno_nregs[r][GET_MODE (x)]
6501 : 1));
6502
6503 case SUBREG:
6504 /* If this is a SUBREG of a hard reg, we can see exactly which
6505 registers are being modified. Otherwise, handle normally. */
6506 if (REG_P (SUBREG_REG (x))
6507 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6508 {
6509 unsigned int inner_regno = subreg_regno (x);
6510 unsigned int inner_endregno
6511 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6512 ? subreg_nregs (x) : 1);
6513
6514 return endregno > inner_regno && regno < inner_endregno;
6515 }
6516 break;
6517
6518 case CLOBBER:
6519 case SET:
6520 if (&SET_DEST (x) != loc
6521 /* Note setting a SUBREG counts as referring to the REG it is in for
6522 a pseudo but not for hard registers since we can
6523 treat each word individually. */
6524 && ((GET_CODE (SET_DEST (x)) == SUBREG
6525 && loc != &SUBREG_REG (SET_DEST (x))
6526 && REG_P (SUBREG_REG (SET_DEST (x)))
6527 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6528 && refers_to_regno_for_reload_p (regno, endregno,
6529 SUBREG_REG (SET_DEST (x)),
6530 loc))
6531 /* If the output is an earlyclobber operand, this is
6532 a conflict. */
6533 || ((!REG_P (SET_DEST (x))
6534 || earlyclobber_operand_p (SET_DEST (x)))
6535 && refers_to_regno_for_reload_p (regno, endregno,
6536 SET_DEST (x), loc))))
6537 return 1;
6538
6539 if (code == CLOBBER || loc == &SET_SRC (x))
6540 return 0;
6541 x = SET_SRC (x);
6542 goto repeat;
6543
6544 default:
6545 break;
6546 }
6547
6548 /* X does not match, so try its subexpressions. */
6549
6550 fmt = GET_RTX_FORMAT (code);
6551 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6552 {
6553 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6554 {
6555 if (i == 0)
6556 {
6557 x = XEXP (x, 0);
6558 goto repeat;
6559 }
6560 else
6561 if (refers_to_regno_for_reload_p (regno, endregno,
6562 XEXP (x, i), loc))
6563 return 1;
6564 }
6565 else if (fmt[i] == 'E')
6566 {
6567 int j;
6568 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6569 if (loc != &XVECEXP (x, i, j)
6570 && refers_to_regno_for_reload_p (regno, endregno,
6571 XVECEXP (x, i, j), loc))
6572 return 1;
6573 }
6574 }
6575 return 0;
6576 }
6577
6578 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6579 we check if any register number in X conflicts with the relevant register
6580 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6581 contains a MEM (we don't bother checking for memory addresses that can't
6582 conflict because we expect this to be a rare case.
6583
6584 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6585 that we look at equivalences for pseudos that didn't get hard registers. */
6586
6587 int
6588 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6589 {
6590 int regno, endregno;
6591
6592 /* Overly conservative. */
6593 if (GET_CODE (x) == STRICT_LOW_PART
6594 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6595 x = XEXP (x, 0);
6596
6597 /* If either argument is a constant, then modifying X can not affect IN. */
6598 if (CONSTANT_P (x) || CONSTANT_P (in))
6599 return 0;
6600 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6601 return refers_to_mem_for_reload_p (in);
6602 else if (GET_CODE (x) == SUBREG)
6603 {
6604 regno = REGNO (SUBREG_REG (x));
6605 if (regno < FIRST_PSEUDO_REGISTER)
6606 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6607 GET_MODE (SUBREG_REG (x)),
6608 SUBREG_BYTE (x),
6609 GET_MODE (x));
6610 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6611 ? subreg_nregs (x) : 1);
6612
6613 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6614 }
6615 else if (REG_P (x))
6616 {
6617 regno = REGNO (x);
6618
6619 /* If this is a pseudo, it must not have been assigned a hard register.
6620 Therefore, it must either be in memory or be a constant. */
6621
6622 if (regno >= FIRST_PSEUDO_REGISTER)
6623 {
6624 if (reg_equiv_memory_loc[regno])
6625 return refers_to_mem_for_reload_p (in);
6626 gcc_assert (reg_equiv_constant[regno]);
6627 return 0;
6628 }
6629
6630 endregno = END_HARD_REGNO (x);
6631
6632 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6633 }
6634 else if (MEM_P (x))
6635 return refers_to_mem_for_reload_p (in);
6636 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6637 || GET_CODE (x) == CC0)
6638 return reg_mentioned_p (x, in);
6639 else
6640 {
6641 gcc_assert (GET_CODE (x) == PLUS);
6642
6643 /* We actually want to know if X is mentioned somewhere inside IN.
6644 We must not say that (plus (sp) (const_int 124)) is in
6645 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6646 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6647 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6648 while (MEM_P (in))
6649 in = XEXP (in, 0);
6650 if (REG_P (in))
6651 return 0;
6652 else if (GET_CODE (in) == PLUS)
6653 return (rtx_equal_p (x, in)
6654 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6655 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6656 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6657 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6658 }
6659
6660 gcc_unreachable ();
6661 }
6662
6663 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6664 registers. */
6665
6666 static int
6667 refers_to_mem_for_reload_p (rtx x)
6668 {
6669 const char *fmt;
6670 int i;
6671
6672 if (MEM_P (x))
6673 return 1;
6674
6675 if (REG_P (x))
6676 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6677 && reg_equiv_memory_loc[REGNO (x)]);
6678
6679 fmt = GET_RTX_FORMAT (GET_CODE (x));
6680 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6681 if (fmt[i] == 'e'
6682 && (MEM_P (XEXP (x, i))
6683 || refers_to_mem_for_reload_p (XEXP (x, i))))
6684 return 1;
6685
6686 return 0;
6687 }
6688 \f
6689 /* Check the insns before INSN to see if there is a suitable register
6690 containing the same value as GOAL.
6691 If OTHER is -1, look for a register in class RCLASS.
6692 Otherwise, just see if register number OTHER shares GOAL's value.
6693
6694 Return an rtx for the register found, or zero if none is found.
6695
6696 If RELOAD_REG_P is (short *)1,
6697 we reject any hard reg that appears in reload_reg_rtx
6698 because such a hard reg is also needed coming into this insn.
6699
6700 If RELOAD_REG_P is any other nonzero value,
6701 it is a vector indexed by hard reg number
6702 and we reject any hard reg whose element in the vector is nonnegative
6703 as well as any that appears in reload_reg_rtx.
6704
6705 If GOAL is zero, then GOALREG is a register number; we look
6706 for an equivalent for that register.
6707
6708 MODE is the machine mode of the value we want an equivalence for.
6709 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6710
6711 This function is used by jump.c as well as in the reload pass.
6712
6713 If GOAL is the sum of the stack pointer and a constant, we treat it
6714 as if it were a constant except that sp is required to be unchanging. */
6715
6716 rtx
6717 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6718 short *reload_reg_p, int goalreg, enum machine_mode mode)
6719 {
6720 rtx p = insn;
6721 rtx goaltry, valtry, value, where;
6722 rtx pat;
6723 int regno = -1;
6724 int valueno;
6725 int goal_mem = 0;
6726 int goal_const = 0;
6727 int goal_mem_addr_varies = 0;
6728 int need_stable_sp = 0;
6729 int nregs;
6730 int valuenregs;
6731 int num = 0;
6732
6733 if (goal == 0)
6734 regno = goalreg;
6735 else if (REG_P (goal))
6736 regno = REGNO (goal);
6737 else if (MEM_P (goal))
6738 {
6739 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6740 if (MEM_VOLATILE_P (goal))
6741 return 0;
6742 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6743 return 0;
6744 /* An address with side effects must be reexecuted. */
6745 switch (code)
6746 {
6747 case POST_INC:
6748 case PRE_INC:
6749 case POST_DEC:
6750 case PRE_DEC:
6751 case POST_MODIFY:
6752 case PRE_MODIFY:
6753 return 0;
6754 default:
6755 break;
6756 }
6757 goal_mem = 1;
6758 }
6759 else if (CONSTANT_P (goal))
6760 goal_const = 1;
6761 else if (GET_CODE (goal) == PLUS
6762 && XEXP (goal, 0) == stack_pointer_rtx
6763 && CONSTANT_P (XEXP (goal, 1)))
6764 goal_const = need_stable_sp = 1;
6765 else if (GET_CODE (goal) == PLUS
6766 && XEXP (goal, 0) == frame_pointer_rtx
6767 && CONSTANT_P (XEXP (goal, 1)))
6768 goal_const = 1;
6769 else
6770 return 0;
6771
6772 num = 0;
6773 /* Scan insns back from INSN, looking for one that copies
6774 a value into or out of GOAL.
6775 Stop and give up if we reach a label. */
6776
6777 while (1)
6778 {
6779 p = PREV_INSN (p);
6780 if (p && DEBUG_INSN_P (p))
6781 continue;
6782 num++;
6783 if (p == 0 || LABEL_P (p)
6784 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6785 return 0;
6786
6787 if (NONJUMP_INSN_P (p)
6788 /* If we don't want spill regs ... */
6789 && (! (reload_reg_p != 0
6790 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6791 /* ... then ignore insns introduced by reload; they aren't
6792 useful and can cause results in reload_as_needed to be
6793 different from what they were when calculating the need for
6794 spills. If we notice an input-reload insn here, we will
6795 reject it below, but it might hide a usable equivalent.
6796 That makes bad code. It may even fail: perhaps no reg was
6797 spilled for this insn because it was assumed we would find
6798 that equivalent. */
6799 || INSN_UID (p) < reload_first_uid))
6800 {
6801 rtx tem;
6802 pat = single_set (p);
6803
6804 /* First check for something that sets some reg equal to GOAL. */
6805 if (pat != 0
6806 && ((regno >= 0
6807 && true_regnum (SET_SRC (pat)) == regno
6808 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6809 ||
6810 (regno >= 0
6811 && true_regnum (SET_DEST (pat)) == regno
6812 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6813 ||
6814 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6815 /* When looking for stack pointer + const,
6816 make sure we don't use a stack adjust. */
6817 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6818 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6819 || (goal_mem
6820 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6821 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6822 || (goal_mem
6823 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6824 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6825 /* If we are looking for a constant,
6826 and something equivalent to that constant was copied
6827 into a reg, we can use that reg. */
6828 || (goal_const && REG_NOTES (p) != 0
6829 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6830 && ((rtx_equal_p (XEXP (tem, 0), goal)
6831 && (valueno
6832 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6833 || (REG_P (SET_DEST (pat))
6834 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6835 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6836 && CONST_INT_P (goal)
6837 && 0 != (goaltry
6838 = operand_subword (XEXP (tem, 0), 0, 0,
6839 VOIDmode))
6840 && rtx_equal_p (goal, goaltry)
6841 && (valtry
6842 = operand_subword (SET_DEST (pat), 0, 0,
6843 VOIDmode))
6844 && (valueno = true_regnum (valtry)) >= 0)))
6845 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6846 NULL_RTX))
6847 && REG_P (SET_DEST (pat))
6848 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6849 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6850 && CONST_INT_P (goal)
6851 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6852 VOIDmode))
6853 && rtx_equal_p (goal, goaltry)
6854 && (valtry
6855 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6856 && (valueno = true_regnum (valtry)) >= 0)))
6857 {
6858 if (other >= 0)
6859 {
6860 if (valueno != other)
6861 continue;
6862 }
6863 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6864 continue;
6865 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6866 mode, valueno))
6867 continue;
6868 value = valtry;
6869 where = p;
6870 break;
6871 }
6872 }
6873 }
6874
6875 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6876 (or copying VALUE into GOAL, if GOAL is also a register).
6877 Now verify that VALUE is really valid. */
6878
6879 /* VALUENO is the register number of VALUE; a hard register. */
6880
6881 /* Don't try to re-use something that is killed in this insn. We want
6882 to be able to trust REG_UNUSED notes. */
6883 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6884 return 0;
6885
6886 /* If we propose to get the value from the stack pointer or if GOAL is
6887 a MEM based on the stack pointer, we need a stable SP. */
6888 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6889 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6890 goal)))
6891 need_stable_sp = 1;
6892
6893 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6894 if (GET_MODE (value) != mode)
6895 return 0;
6896
6897 /* Reject VALUE if it was loaded from GOAL
6898 and is also a register that appears in the address of GOAL. */
6899
6900 if (goal_mem && value == SET_DEST (single_set (where))
6901 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6902 goal, (rtx*) 0))
6903 return 0;
6904
6905 /* Reject registers that overlap GOAL. */
6906
6907 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6908 nregs = hard_regno_nregs[regno][mode];
6909 else
6910 nregs = 1;
6911 valuenregs = hard_regno_nregs[valueno][mode];
6912
6913 if (!goal_mem && !goal_const
6914 && regno + nregs > valueno && regno < valueno + valuenregs)
6915 return 0;
6916
6917 /* Reject VALUE if it is one of the regs reserved for reloads.
6918 Reload1 knows how to reuse them anyway, and it would get
6919 confused if we allocated one without its knowledge.
6920 (Now that insns introduced by reload are ignored above,
6921 this case shouldn't happen, but I'm not positive.) */
6922
6923 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6924 {
6925 int i;
6926 for (i = 0; i < valuenregs; ++i)
6927 if (reload_reg_p[valueno + i] >= 0)
6928 return 0;
6929 }
6930
6931 /* Reject VALUE if it is a register being used for an input reload
6932 even if it is not one of those reserved. */
6933
6934 if (reload_reg_p != 0)
6935 {
6936 int i;
6937 for (i = 0; i < n_reloads; i++)
6938 if (rld[i].reg_rtx != 0 && rld[i].in)
6939 {
6940 int regno1 = REGNO (rld[i].reg_rtx);
6941 int nregs1 = hard_regno_nregs[regno1]
6942 [GET_MODE (rld[i].reg_rtx)];
6943 if (regno1 < valueno + valuenregs
6944 && regno1 + nregs1 > valueno)
6945 return 0;
6946 }
6947 }
6948
6949 if (goal_mem)
6950 /* We must treat frame pointer as varying here,
6951 since it can vary--in a nonlocal goto as generated by expand_goto. */
6952 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6953
6954 /* Now verify that the values of GOAL and VALUE remain unaltered
6955 until INSN is reached. */
6956
6957 p = insn;
6958 while (1)
6959 {
6960 p = PREV_INSN (p);
6961 if (p == where)
6962 return value;
6963
6964 /* Don't trust the conversion past a function call
6965 if either of the two is in a call-clobbered register, or memory. */
6966 if (CALL_P (p))
6967 {
6968 int i;
6969
6970 if (goal_mem || need_stable_sp)
6971 return 0;
6972
6973 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6974 for (i = 0; i < nregs; ++i)
6975 if (call_used_regs[regno + i]
6976 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6977 return 0;
6978
6979 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6980 for (i = 0; i < valuenregs; ++i)
6981 if (call_used_regs[valueno + i]
6982 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6983 return 0;
6984 }
6985
6986 if (INSN_P (p))
6987 {
6988 pat = PATTERN (p);
6989
6990 /* Watch out for unspec_volatile, and volatile asms. */
6991 if (volatile_insn_p (pat))
6992 return 0;
6993
6994 /* If this insn P stores in either GOAL or VALUE, return 0.
6995 If GOAL is a memory ref and this insn writes memory, return 0.
6996 If GOAL is a memory ref and its address is not constant,
6997 and this insn P changes a register used in GOAL, return 0. */
6998
6999 if (GET_CODE (pat) == COND_EXEC)
7000 pat = COND_EXEC_CODE (pat);
7001 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
7002 {
7003 rtx dest = SET_DEST (pat);
7004 while (GET_CODE (dest) == SUBREG
7005 || GET_CODE (dest) == ZERO_EXTRACT
7006 || GET_CODE (dest) == STRICT_LOW_PART)
7007 dest = XEXP (dest, 0);
7008 if (REG_P (dest))
7009 {
7010 int xregno = REGNO (dest);
7011 int xnregs;
7012 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7013 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7014 else
7015 xnregs = 1;
7016 if (xregno < regno + nregs && xregno + xnregs > regno)
7017 return 0;
7018 if (xregno < valueno + valuenregs
7019 && xregno + xnregs > valueno)
7020 return 0;
7021 if (goal_mem_addr_varies
7022 && reg_overlap_mentioned_for_reload_p (dest, goal))
7023 return 0;
7024 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7025 return 0;
7026 }
7027 else if (goal_mem && MEM_P (dest)
7028 && ! push_operand (dest, GET_MODE (dest)))
7029 return 0;
7030 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7031 && reg_equiv_memory_loc[regno] != 0)
7032 return 0;
7033 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7034 return 0;
7035 }
7036 else if (GET_CODE (pat) == PARALLEL)
7037 {
7038 int i;
7039 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7040 {
7041 rtx v1 = XVECEXP (pat, 0, i);
7042 if (GET_CODE (v1) == COND_EXEC)
7043 v1 = COND_EXEC_CODE (v1);
7044 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7045 {
7046 rtx dest = SET_DEST (v1);
7047 while (GET_CODE (dest) == SUBREG
7048 || GET_CODE (dest) == ZERO_EXTRACT
7049 || GET_CODE (dest) == STRICT_LOW_PART)
7050 dest = XEXP (dest, 0);
7051 if (REG_P (dest))
7052 {
7053 int xregno = REGNO (dest);
7054 int xnregs;
7055 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7056 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7057 else
7058 xnregs = 1;
7059 if (xregno < regno + nregs
7060 && xregno + xnregs > regno)
7061 return 0;
7062 if (xregno < valueno + valuenregs
7063 && xregno + xnregs > valueno)
7064 return 0;
7065 if (goal_mem_addr_varies
7066 && reg_overlap_mentioned_for_reload_p (dest,
7067 goal))
7068 return 0;
7069 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7070 return 0;
7071 }
7072 else if (goal_mem && MEM_P (dest)
7073 && ! push_operand (dest, GET_MODE (dest)))
7074 return 0;
7075 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7076 && reg_equiv_memory_loc[regno] != 0)
7077 return 0;
7078 else if (need_stable_sp
7079 && push_operand (dest, GET_MODE (dest)))
7080 return 0;
7081 }
7082 }
7083 }
7084
7085 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7086 {
7087 rtx link;
7088
7089 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7090 link = XEXP (link, 1))
7091 {
7092 pat = XEXP (link, 0);
7093 if (GET_CODE (pat) == CLOBBER)
7094 {
7095 rtx dest = SET_DEST (pat);
7096
7097 if (REG_P (dest))
7098 {
7099 int xregno = REGNO (dest);
7100 int xnregs
7101 = hard_regno_nregs[xregno][GET_MODE (dest)];
7102
7103 if (xregno < regno + nregs
7104 && xregno + xnregs > regno)
7105 return 0;
7106 else if (xregno < valueno + valuenregs
7107 && xregno + xnregs > valueno)
7108 return 0;
7109 else if (goal_mem_addr_varies
7110 && reg_overlap_mentioned_for_reload_p (dest,
7111 goal))
7112 return 0;
7113 }
7114
7115 else if (goal_mem && MEM_P (dest)
7116 && ! push_operand (dest, GET_MODE (dest)))
7117 return 0;
7118 else if (need_stable_sp
7119 && push_operand (dest, GET_MODE (dest)))
7120 return 0;
7121 }
7122 }
7123 }
7124
7125 #ifdef AUTO_INC_DEC
7126 /* If this insn auto-increments or auto-decrements
7127 either regno or valueno, return 0 now.
7128 If GOAL is a memory ref and its address is not constant,
7129 and this insn P increments a register used in GOAL, return 0. */
7130 {
7131 rtx link;
7132
7133 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7134 if (REG_NOTE_KIND (link) == REG_INC
7135 && REG_P (XEXP (link, 0)))
7136 {
7137 int incno = REGNO (XEXP (link, 0));
7138 if (incno < regno + nregs && incno >= regno)
7139 return 0;
7140 if (incno < valueno + valuenregs && incno >= valueno)
7141 return 0;
7142 if (goal_mem_addr_varies
7143 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7144 goal))
7145 return 0;
7146 }
7147 }
7148 #endif
7149 }
7150 }
7151 }
7152 \f
7153 /* Find a place where INCED appears in an increment or decrement operator
7154 within X, and return the amount INCED is incremented or decremented by.
7155 The value is always positive. */
7156
7157 static int
7158 find_inc_amount (rtx x, rtx inced)
7159 {
7160 enum rtx_code code = GET_CODE (x);
7161 const char *fmt;
7162 int i;
7163
7164 if (code == MEM)
7165 {
7166 rtx addr = XEXP (x, 0);
7167 if ((GET_CODE (addr) == PRE_DEC
7168 || GET_CODE (addr) == POST_DEC
7169 || GET_CODE (addr) == PRE_INC
7170 || GET_CODE (addr) == POST_INC)
7171 && XEXP (addr, 0) == inced)
7172 return GET_MODE_SIZE (GET_MODE (x));
7173 else if ((GET_CODE (addr) == PRE_MODIFY
7174 || GET_CODE (addr) == POST_MODIFY)
7175 && GET_CODE (XEXP (addr, 1)) == PLUS
7176 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7177 && XEXP (addr, 0) == inced
7178 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7179 {
7180 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7181 return i < 0 ? -i : i;
7182 }
7183 }
7184
7185 fmt = GET_RTX_FORMAT (code);
7186 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7187 {
7188 if (fmt[i] == 'e')
7189 {
7190 int tem = find_inc_amount (XEXP (x, i), inced);
7191 if (tem != 0)
7192 return tem;
7193 }
7194 if (fmt[i] == 'E')
7195 {
7196 int j;
7197 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7198 {
7199 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7200 if (tem != 0)
7201 return tem;
7202 }
7203 }
7204 }
7205
7206 return 0;
7207 }
7208 \f
7209 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7210 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7211
7212 #ifdef AUTO_INC_DEC
7213 static int
7214 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7215 rtx insn)
7216 {
7217 rtx link;
7218
7219 gcc_assert (insn);
7220
7221 if (! INSN_P (insn))
7222 return 0;
7223
7224 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7225 if (REG_NOTE_KIND (link) == REG_INC)
7226 {
7227 unsigned int test = (int) REGNO (XEXP (link, 0));
7228 if (test >= regno && test < endregno)
7229 return 1;
7230 }
7231 return 0;
7232 }
7233 #else
7234
7235 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7236
7237 #endif
7238
7239 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7240 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7241 REG_INC. REGNO must refer to a hard register. */
7242
7243 int
7244 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7245 int sets)
7246 {
7247 unsigned int nregs, endregno;
7248
7249 /* regno must be a hard register. */
7250 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7251
7252 nregs = hard_regno_nregs[regno][mode];
7253 endregno = regno + nregs;
7254
7255 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7256 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7257 && REG_P (XEXP (PATTERN (insn), 0)))
7258 {
7259 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7260
7261 return test >= regno && test < endregno;
7262 }
7263
7264 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7265 return 1;
7266
7267 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7268 {
7269 int i = XVECLEN (PATTERN (insn), 0) - 1;
7270
7271 for (; i >= 0; i--)
7272 {
7273 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7274 if ((GET_CODE (elt) == CLOBBER
7275 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7276 && REG_P (XEXP (elt, 0)))
7277 {
7278 unsigned int test = REGNO (XEXP (elt, 0));
7279
7280 if (test >= regno && test < endregno)
7281 return 1;
7282 }
7283 if (sets == 2
7284 && reg_inc_found_and_valid_p (regno, endregno, elt))
7285 return 1;
7286 }
7287 }
7288
7289 return 0;
7290 }
7291
7292 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7293 rtx
7294 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7295 {
7296 int regno;
7297
7298 if (GET_MODE (reloadreg) == mode)
7299 return reloadreg;
7300
7301 regno = REGNO (reloadreg);
7302
7303 if (WORDS_BIG_ENDIAN)
7304 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7305 - (int) hard_regno_nregs[regno][mode];
7306
7307 return gen_rtx_REG (mode, regno);
7308 }
7309
7310 static const char *const reload_when_needed_name[] =
7311 {
7312 "RELOAD_FOR_INPUT",
7313 "RELOAD_FOR_OUTPUT",
7314 "RELOAD_FOR_INSN",
7315 "RELOAD_FOR_INPUT_ADDRESS",
7316 "RELOAD_FOR_INPADDR_ADDRESS",
7317 "RELOAD_FOR_OUTPUT_ADDRESS",
7318 "RELOAD_FOR_OUTADDR_ADDRESS",
7319 "RELOAD_FOR_OPERAND_ADDRESS",
7320 "RELOAD_FOR_OPADDR_ADDR",
7321 "RELOAD_OTHER",
7322 "RELOAD_FOR_OTHER_ADDRESS"
7323 };
7324
7325 /* These functions are used to print the variables set by 'find_reloads' */
7326
7327 DEBUG_FUNCTION void
7328 debug_reload_to_stream (FILE *f)
7329 {
7330 int r;
7331 const char *prefix;
7332
7333 if (! f)
7334 f = stderr;
7335 for (r = 0; r < n_reloads; r++)
7336 {
7337 fprintf (f, "Reload %d: ", r);
7338
7339 if (rld[r].in != 0)
7340 {
7341 fprintf (f, "reload_in (%s) = ",
7342 GET_MODE_NAME (rld[r].inmode));
7343 print_inline_rtx (f, rld[r].in, 24);
7344 fprintf (f, "\n\t");
7345 }
7346
7347 if (rld[r].out != 0)
7348 {
7349 fprintf (f, "reload_out (%s) = ",
7350 GET_MODE_NAME (rld[r].outmode));
7351 print_inline_rtx (f, rld[r].out, 24);
7352 fprintf (f, "\n\t");
7353 }
7354
7355 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7356
7357 fprintf (f, "%s (opnum = %d)",
7358 reload_when_needed_name[(int) rld[r].when_needed],
7359 rld[r].opnum);
7360
7361 if (rld[r].optional)
7362 fprintf (f, ", optional");
7363
7364 if (rld[r].nongroup)
7365 fprintf (f, ", nongroup");
7366
7367 if (rld[r].inc != 0)
7368 fprintf (f, ", inc by %d", rld[r].inc);
7369
7370 if (rld[r].nocombine)
7371 fprintf (f, ", can't combine");
7372
7373 if (rld[r].secondary_p)
7374 fprintf (f, ", secondary_reload_p");
7375
7376 if (rld[r].in_reg != 0)
7377 {
7378 fprintf (f, "\n\treload_in_reg: ");
7379 print_inline_rtx (f, rld[r].in_reg, 24);
7380 }
7381
7382 if (rld[r].out_reg != 0)
7383 {
7384 fprintf (f, "\n\treload_out_reg: ");
7385 print_inline_rtx (f, rld[r].out_reg, 24);
7386 }
7387
7388 if (rld[r].reg_rtx != 0)
7389 {
7390 fprintf (f, "\n\treload_reg_rtx: ");
7391 print_inline_rtx (f, rld[r].reg_rtx, 24);
7392 }
7393
7394 prefix = "\n\t";
7395 if (rld[r].secondary_in_reload != -1)
7396 {
7397 fprintf (f, "%ssecondary_in_reload = %d",
7398 prefix, rld[r].secondary_in_reload);
7399 prefix = ", ";
7400 }
7401
7402 if (rld[r].secondary_out_reload != -1)
7403 fprintf (f, "%ssecondary_out_reload = %d\n",
7404 prefix, rld[r].secondary_out_reload);
7405
7406 prefix = "\n\t";
7407 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7408 {
7409 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7410 insn_data[rld[r].secondary_in_icode].name);
7411 prefix = ", ";
7412 }
7413
7414 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7415 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7416 insn_data[rld[r].secondary_out_icode].name);
7417
7418 fprintf (f, "\n");
7419 }
7420 }
7421
7422 DEBUG_FUNCTION void
7423 debug_reload (void)
7424 {
7425 debug_reload_to_stream (stderr);
7426 }