tree-vrp.c (debug_value_range, [...]): Annotate with DEBUG_FUNCTION.
[gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
28
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
31
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
38
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
45
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
54
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
57
58 NOTE SIDE EFFECTS:
59
60 find_reloads can alter the operands of the instruction it is called on.
61
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
65 better that way.
66
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
69
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
73
74 Using a reload register for several reloads in one insn:
75
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
79
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
82 register.
83
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
87
88 #define REG_OK_STRICT
89
90 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
91 #undef DEBUG_RELOAD
92
93 #include "config.h"
94 #include "system.h"
95 #include "coretypes.h"
96 #include "tm.h"
97 #include "rtl.h"
98 #include "tm_p.h"
99 #include "insn-config.h"
100 #include "expr.h"
101 #include "optabs.h"
102 #include "recog.h"
103 #include "df.h"
104 #include "reload.h"
105 #include "regs.h"
106 #include "addresses.h"
107 #include "hard-reg-set.h"
108 #include "flags.h"
109 #include "output.h"
110 #include "function.h"
111 #include "toplev.h"
112 #include "params.h"
113 #include "target.h"
114 #include "ira.h"
115
116 /* True if X is a constant that can be forced into the constant pool. */
117 #define CONST_POOL_OK_P(X) \
118 (CONSTANT_P (X) \
119 && GET_CODE (X) != HIGH \
120 && !targetm.cannot_force_const_mem (X))
121
122 /* True if C is a non-empty register class that has too few registers
123 to be safely used as a reload target class. */
124 #define SMALL_REGISTER_CLASS_P(C) \
125 (reg_class_size [(C)] == 1 \
126 || (reg_class_size [(C)] >= 1 && CLASS_LIKELY_SPILLED_P (C)))
127
128 \f
129 /* All reloads of the current insn are recorded here. See reload.h for
130 comments. */
131 int n_reloads;
132 struct reload rld[MAX_RELOADS];
133
134 /* All the "earlyclobber" operands of the current insn
135 are recorded here. */
136 int n_earlyclobbers;
137 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
138
139 int reload_n_operands;
140
141 /* Replacing reloads.
142
143 If `replace_reloads' is nonzero, then as each reload is recorded
144 an entry is made for it in the table `replacements'.
145 Then later `subst_reloads' can look through that table and
146 perform all the replacements needed. */
147
148 /* Nonzero means record the places to replace. */
149 static int replace_reloads;
150
151 /* Each replacement is recorded with a structure like this. */
152 struct replacement
153 {
154 rtx *where; /* Location to store in */
155 rtx *subreg_loc; /* Location of SUBREG if WHERE is inside
156 a SUBREG; 0 otherwise. */
157 int what; /* which reload this is for */
158 enum machine_mode mode; /* mode it must have */
159 };
160
161 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
162
163 /* Number of replacements currently recorded. */
164 static int n_replacements;
165
166 /* Used to track what is modified by an operand. */
167 struct decomposition
168 {
169 int reg_flag; /* Nonzero if referencing a register. */
170 int safe; /* Nonzero if this can't conflict with anything. */
171 rtx base; /* Base address for MEM. */
172 HOST_WIDE_INT start; /* Starting offset or register number. */
173 HOST_WIDE_INT end; /* Ending offset or register number. */
174 };
175
176 #ifdef SECONDARY_MEMORY_NEEDED
177
178 /* Save MEMs needed to copy from one class of registers to another. One MEM
179 is used per mode, but normally only one or two modes are ever used.
180
181 We keep two versions, before and after register elimination. The one
182 after register elimination is record separately for each operand. This
183 is done in case the address is not valid to be sure that we separately
184 reload each. */
185
186 static rtx secondary_memlocs[NUM_MACHINE_MODES];
187 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
188 static int secondary_memlocs_elim_used = 0;
189 #endif
190
191 /* The instruction we are doing reloads for;
192 so we can test whether a register dies in it. */
193 static rtx this_insn;
194
195 /* Nonzero if this instruction is a user-specified asm with operands. */
196 static int this_insn_is_asm;
197
198 /* If hard_regs_live_known is nonzero,
199 we can tell which hard regs are currently live,
200 at least enough to succeed in choosing dummy reloads. */
201 static int hard_regs_live_known;
202
203 /* Indexed by hard reg number,
204 element is nonnegative if hard reg has been spilled.
205 This vector is passed to `find_reloads' as an argument
206 and is not changed here. */
207 static short *static_reload_reg_p;
208
209 /* Set to 1 in subst_reg_equivs if it changes anything. */
210 static int subst_reg_equivs_changed;
211
212 /* On return from push_reload, holds the reload-number for the OUT
213 operand, which can be different for that from the input operand. */
214 static int output_reloadnum;
215
216 /* Compare two RTX's. */
217 #define MATCHES(x, y) \
218 (x == y || (x != 0 && (REG_P (x) \
219 ? REG_P (y) && REGNO (x) == REGNO (y) \
220 : rtx_equal_p (x, y) && ! side_effects_p (x))))
221
222 /* Indicates if two reloads purposes are for similar enough things that we
223 can merge their reloads. */
224 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
225 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
226 || ((when1) == (when2) && (op1) == (op2)) \
227 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
228 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
229 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
230 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
231 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
232
233 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
234 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
235 ((when1) != (when2) \
236 || ! ((op1) == (op2) \
237 || (when1) == RELOAD_FOR_INPUT \
238 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
239 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
240
241 /* If we are going to reload an address, compute the reload type to
242 use. */
243 #define ADDR_TYPE(type) \
244 ((type) == RELOAD_FOR_INPUT_ADDRESS \
245 ? RELOAD_FOR_INPADDR_ADDRESS \
246 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
247 ? RELOAD_FOR_OUTADDR_ADDRESS \
248 : (type)))
249
250 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
251 enum machine_mode, enum reload_type,
252 enum insn_code *, secondary_reload_info *);
253 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
254 int, unsigned int);
255 static int reload_inner_reg_of_subreg (rtx, enum machine_mode, int);
256 static void push_replacement (rtx *, int, enum machine_mode);
257 static void dup_replacements (rtx *, rtx *);
258 static void combine_reloads (void);
259 static int find_reusable_reload (rtx *, rtx, enum reg_class,
260 enum reload_type, int, int);
261 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
262 enum machine_mode, enum reg_class, int, int);
263 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
264 static struct decomposition decompose (rtx);
265 static int immune_p (rtx, rtx, struct decomposition);
266 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
267 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
268 int *);
269 static rtx make_memloc (rtx, int);
270 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
271 addr_space_t, rtx *);
272 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
273 int, enum reload_type, int, rtx);
274 static rtx subst_reg_equivs (rtx, rtx);
275 static rtx subst_indexed_address (rtx);
276 static void update_auto_inc_notes (rtx, int, int);
277 static int find_reloads_address_1 (enum machine_mode, rtx, int,
278 enum rtx_code, enum rtx_code, rtx *,
279 int, enum reload_type,int, rtx);
280 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
281 enum machine_mode, int,
282 enum reload_type, int);
283 static rtx find_reloads_subreg_address (rtx, int, int, enum reload_type,
284 int, rtx);
285 static void copy_replacements_1 (rtx *, rtx *, int);
286 static int find_inc_amount (rtx, rtx);
287 static int refers_to_mem_for_reload_p (rtx);
288 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
289 rtx, rtx *);
290
291 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
292 list yet. */
293
294 static void
295 push_reg_equiv_alt_mem (int regno, rtx mem)
296 {
297 rtx it;
298
299 for (it = reg_equiv_alt_mem_list [regno]; it; it = XEXP (it, 1))
300 if (rtx_equal_p (XEXP (it, 0), mem))
301 return;
302
303 reg_equiv_alt_mem_list [regno]
304 = alloc_EXPR_LIST (REG_EQUIV, mem,
305 reg_equiv_alt_mem_list [regno]);
306 }
307 \f
308 /* Determine if any secondary reloads are needed for loading (if IN_P is
309 nonzero) or storing (if IN_P is zero) X to or from a reload register of
310 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
311 are needed, push them.
312
313 Return the reload number of the secondary reload we made, or -1 if
314 we didn't need one. *PICODE is set to the insn_code to use if we do
315 need a secondary reload. */
316
317 static int
318 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
319 enum reg_class reload_class,
320 enum machine_mode reload_mode, enum reload_type type,
321 enum insn_code *picode, secondary_reload_info *prev_sri)
322 {
323 enum reg_class rclass = NO_REGS;
324 enum reg_class scratch_class;
325 enum machine_mode mode = reload_mode;
326 enum insn_code icode = CODE_FOR_nothing;
327 enum insn_code t_icode = CODE_FOR_nothing;
328 enum reload_type secondary_type;
329 int s_reload, t_reload = -1;
330 const char *scratch_constraint;
331 char letter;
332 secondary_reload_info sri;
333
334 if (type == RELOAD_FOR_INPUT_ADDRESS
335 || type == RELOAD_FOR_OUTPUT_ADDRESS
336 || type == RELOAD_FOR_INPADDR_ADDRESS
337 || type == RELOAD_FOR_OUTADDR_ADDRESS)
338 secondary_type = type;
339 else
340 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
341
342 *picode = CODE_FOR_nothing;
343
344 /* If X is a paradoxical SUBREG, use the inner value to determine both the
345 mode and object being reloaded. */
346 if (GET_CODE (x) == SUBREG
347 && (GET_MODE_SIZE (GET_MODE (x))
348 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
349 {
350 x = SUBREG_REG (x);
351 reload_mode = GET_MODE (x);
352 }
353
354 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
355 is still a pseudo-register by now, it *must* have an equivalent MEM
356 but we don't want to assume that), use that equivalent when seeing if
357 a secondary reload is needed since whether or not a reload is needed
358 might be sensitive to the form of the MEM. */
359
360 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
361 && reg_equiv_mem[REGNO (x)] != 0)
362 x = reg_equiv_mem[REGNO (x)];
363
364 sri.icode = CODE_FOR_nothing;
365 sri.prev_sri = prev_sri;
366 rclass = targetm.secondary_reload (in_p, x, reload_class, reload_mode, &sri);
367 icode = (enum insn_code) sri.icode;
368
369 /* If we don't need any secondary registers, done. */
370 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
371 return -1;
372
373 if (rclass != NO_REGS)
374 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
375 reload_mode, type, &t_icode, &sri);
376
377 /* If we will be using an insn, the secondary reload is for a
378 scratch register. */
379
380 if (icode != CODE_FOR_nothing)
381 {
382 /* If IN_P is nonzero, the reload register will be the output in
383 operand 0. If IN_P is zero, the reload register will be the input
384 in operand 1. Outputs should have an initial "=", which we must
385 skip. */
386
387 /* ??? It would be useful to be able to handle only two, or more than
388 three, operands, but for now we can only handle the case of having
389 exactly three: output, input and one temp/scratch. */
390 gcc_assert (insn_data[(int) icode].n_operands == 3);
391
392 /* ??? We currently have no way to represent a reload that needs
393 an icode to reload from an intermediate tertiary reload register.
394 We should probably have a new field in struct reload to tag a
395 chain of scratch operand reloads onto. */
396 gcc_assert (rclass == NO_REGS);
397
398 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
399 gcc_assert (*scratch_constraint == '=');
400 scratch_constraint++;
401 if (*scratch_constraint == '&')
402 scratch_constraint++;
403 letter = *scratch_constraint;
404 scratch_class = (letter == 'r' ? GENERAL_REGS
405 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
406 scratch_constraint));
407
408 rclass = scratch_class;
409 mode = insn_data[(int) icode].operand[2].mode;
410 }
411
412 /* This case isn't valid, so fail. Reload is allowed to use the same
413 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
414 in the case of a secondary register, we actually need two different
415 registers for correct code. We fail here to prevent the possibility of
416 silently generating incorrect code later.
417
418 The convention is that secondary input reloads are valid only if the
419 secondary_class is different from class. If you have such a case, you
420 can not use secondary reloads, you must work around the problem some
421 other way.
422
423 Allow this when a reload_in/out pattern is being used. I.e. assume
424 that the generated code handles this case. */
425
426 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
427 || t_icode != CODE_FOR_nothing);
428
429 /* See if we can reuse an existing secondary reload. */
430 for (s_reload = 0; s_reload < n_reloads; s_reload++)
431 if (rld[s_reload].secondary_p
432 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
433 || reg_class_subset_p (rld[s_reload].rclass, rclass))
434 && ((in_p && rld[s_reload].inmode == mode)
435 || (! in_p && rld[s_reload].outmode == mode))
436 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
437 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
438 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
439 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
440 && (SMALL_REGISTER_CLASS_P (rclass)
441 || targetm.small_register_classes_for_mode_p (VOIDmode))
442 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
443 opnum, rld[s_reload].opnum))
444 {
445 if (in_p)
446 rld[s_reload].inmode = mode;
447 if (! in_p)
448 rld[s_reload].outmode = mode;
449
450 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
451 rld[s_reload].rclass = rclass;
452
453 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
454 rld[s_reload].optional &= optional;
455 rld[s_reload].secondary_p = 1;
456 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
457 opnum, rld[s_reload].opnum))
458 rld[s_reload].when_needed = RELOAD_OTHER;
459
460 break;
461 }
462
463 if (s_reload == n_reloads)
464 {
465 #ifdef SECONDARY_MEMORY_NEEDED
466 /* If we need a memory location to copy between the two reload regs,
467 set it up now. Note that we do the input case before making
468 the reload and the output case after. This is due to the
469 way reloads are output. */
470
471 if (in_p && icode == CODE_FOR_nothing
472 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
473 {
474 get_secondary_mem (x, reload_mode, opnum, type);
475
476 /* We may have just added new reloads. Make sure we add
477 the new reload at the end. */
478 s_reload = n_reloads;
479 }
480 #endif
481
482 /* We need to make a new secondary reload for this register class. */
483 rld[s_reload].in = rld[s_reload].out = 0;
484 rld[s_reload].rclass = rclass;
485
486 rld[s_reload].inmode = in_p ? mode : VOIDmode;
487 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
488 rld[s_reload].reg_rtx = 0;
489 rld[s_reload].optional = optional;
490 rld[s_reload].inc = 0;
491 /* Maybe we could combine these, but it seems too tricky. */
492 rld[s_reload].nocombine = 1;
493 rld[s_reload].in_reg = 0;
494 rld[s_reload].out_reg = 0;
495 rld[s_reload].opnum = opnum;
496 rld[s_reload].when_needed = secondary_type;
497 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
498 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
499 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
500 rld[s_reload].secondary_out_icode
501 = ! in_p ? t_icode : CODE_FOR_nothing;
502 rld[s_reload].secondary_p = 1;
503
504 n_reloads++;
505
506 #ifdef SECONDARY_MEMORY_NEEDED
507 if (! in_p && icode == CODE_FOR_nothing
508 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
509 get_secondary_mem (x, mode, opnum, type);
510 #endif
511 }
512
513 *picode = icode;
514 return s_reload;
515 }
516
517 /* If a secondary reload is needed, return its class. If both an intermediate
518 register and a scratch register is needed, we return the class of the
519 intermediate register. */
520 enum reg_class
521 secondary_reload_class (bool in_p, enum reg_class rclass,
522 enum machine_mode mode, rtx x)
523 {
524 enum insn_code icode;
525 secondary_reload_info sri;
526
527 sri.icode = CODE_FOR_nothing;
528 sri.prev_sri = NULL;
529 rclass = targetm.secondary_reload (in_p, x, rclass, mode, &sri);
530 icode = (enum insn_code) sri.icode;
531
532 /* If there are no secondary reloads at all, we return NO_REGS.
533 If an intermediate register is needed, we return its class. */
534 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
535 return rclass;
536
537 /* No intermediate register is needed, but we have a special reload
538 pattern, which we assume for now needs a scratch register. */
539 return scratch_reload_class (icode);
540 }
541
542 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
543 three operands, verify that operand 2 is an output operand, and return
544 its register class.
545 ??? We'd like to be able to handle any pattern with at least 2 operands,
546 for zero or more scratch registers, but that needs more infrastructure. */
547 enum reg_class
548 scratch_reload_class (enum insn_code icode)
549 {
550 const char *scratch_constraint;
551 char scratch_letter;
552 enum reg_class rclass;
553
554 gcc_assert (insn_data[(int) icode].n_operands == 3);
555 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
556 gcc_assert (*scratch_constraint == '=');
557 scratch_constraint++;
558 if (*scratch_constraint == '&')
559 scratch_constraint++;
560 scratch_letter = *scratch_constraint;
561 if (scratch_letter == 'r')
562 return GENERAL_REGS;
563 rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
564 scratch_constraint);
565 gcc_assert (rclass != NO_REGS);
566 return rclass;
567 }
568 \f
569 #ifdef SECONDARY_MEMORY_NEEDED
570
571 /* Return a memory location that will be used to copy X in mode MODE.
572 If we haven't already made a location for this mode in this insn,
573 call find_reloads_address on the location being returned. */
574
575 rtx
576 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
577 int opnum, enum reload_type type)
578 {
579 rtx loc;
580 int mem_valid;
581
582 /* By default, if MODE is narrower than a word, widen it to a word.
583 This is required because most machines that require these memory
584 locations do not support short load and stores from all registers
585 (e.g., FP registers). */
586
587 #ifdef SECONDARY_MEMORY_NEEDED_MODE
588 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
589 #else
590 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
591 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
592 #endif
593
594 /* If we already have made a MEM for this operand in MODE, return it. */
595 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
596 return secondary_memlocs_elim[(int) mode][opnum];
597
598 /* If this is the first time we've tried to get a MEM for this mode,
599 allocate a new one. `something_changed' in reload will get set
600 by noticing that the frame size has changed. */
601
602 if (secondary_memlocs[(int) mode] == 0)
603 {
604 #ifdef SECONDARY_MEMORY_NEEDED_RTX
605 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
606 #else
607 secondary_memlocs[(int) mode]
608 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
609 #endif
610 }
611
612 /* Get a version of the address doing any eliminations needed. If that
613 didn't give us a new MEM, make a new one if it isn't valid. */
614
615 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
616 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
617 MEM_ADDR_SPACE (loc));
618
619 if (! mem_valid && loc == secondary_memlocs[(int) mode])
620 loc = copy_rtx (loc);
621
622 /* The only time the call below will do anything is if the stack
623 offset is too large. In that case IND_LEVELS doesn't matter, so we
624 can just pass a zero. Adjust the type to be the address of the
625 corresponding object. If the address was valid, save the eliminated
626 address. If it wasn't valid, we need to make a reload each time, so
627 don't save it. */
628
629 if (! mem_valid)
630 {
631 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
632 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
633 : RELOAD_OTHER);
634
635 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
636 opnum, type, 0, 0);
637 }
638
639 secondary_memlocs_elim[(int) mode][opnum] = loc;
640 if (secondary_memlocs_elim_used <= (int)mode)
641 secondary_memlocs_elim_used = (int)mode + 1;
642 return loc;
643 }
644
645 /* Clear any secondary memory locations we've made. */
646
647 void
648 clear_secondary_mem (void)
649 {
650 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
651 }
652 #endif /* SECONDARY_MEMORY_NEEDED */
653 \f
654
655 /* Find the largest class which has at least one register valid in
656 mode INNER, and which for every such register, that register number
657 plus N is also valid in OUTER (if in range) and is cheap to move
658 into REGNO. Such a class must exist. */
659
660 static enum reg_class
661 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
662 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
663 unsigned int dest_regno ATTRIBUTE_UNUSED)
664 {
665 int best_cost = -1;
666 int rclass;
667 int regno;
668 enum reg_class best_class = NO_REGS;
669 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
670 unsigned int best_size = 0;
671 int cost;
672
673 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
674 {
675 int bad = 0;
676 int good = 0;
677 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
678 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
679 {
680 if (HARD_REGNO_MODE_OK (regno, inner))
681 {
682 good = 1;
683 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
684 || ! HARD_REGNO_MODE_OK (regno + n, outer))
685 bad = 1;
686 }
687 }
688
689 if (bad || !good)
690 continue;
691 cost = REGISTER_MOVE_COST (outer, (enum reg_class) rclass, dest_class);
692
693 if ((reg_class_size[rclass] > best_size
694 && (best_cost < 0 || best_cost >= cost))
695 || best_cost > cost)
696 {
697 best_class = (enum reg_class) rclass;
698 best_size = reg_class_size[rclass];
699 best_cost = REGISTER_MOVE_COST (outer, (enum reg_class) rclass,
700 dest_class);
701 }
702 }
703
704 gcc_assert (best_size != 0);
705
706 return best_class;
707 }
708 \f
709 /* Return the number of a previously made reload that can be combined with
710 a new one, or n_reloads if none of the existing reloads can be used.
711 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
712 push_reload, they determine the kind of the new reload that we try to
713 combine. P_IN points to the corresponding value of IN, which can be
714 modified by this function.
715 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
716
717 static int
718 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
719 enum reload_type type, int opnum, int dont_share)
720 {
721 rtx in = *p_in;
722 int i;
723 /* We can't merge two reloads if the output of either one is
724 earlyclobbered. */
725
726 if (earlyclobber_operand_p (out))
727 return n_reloads;
728
729 /* We can use an existing reload if the class is right
730 and at least one of IN and OUT is a match
731 and the other is at worst neutral.
732 (A zero compared against anything is neutral.)
733
734 For targets with small register classes, don't use existing reloads
735 unless they are for the same thing since that can cause us to need
736 more reload registers than we otherwise would. */
737
738 for (i = 0; i < n_reloads; i++)
739 if ((reg_class_subset_p (rclass, rld[i].rclass)
740 || reg_class_subset_p (rld[i].rclass, rclass))
741 /* If the existing reload has a register, it must fit our class. */
742 && (rld[i].reg_rtx == 0
743 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
744 true_regnum (rld[i].reg_rtx)))
745 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
746 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
747 || (out != 0 && MATCHES (rld[i].out, out)
748 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
749 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
750 && (SMALL_REGISTER_CLASS_P (rclass)
751 || targetm.small_register_classes_for_mode_p (VOIDmode))
752 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
753 return i;
754
755 /* Reloading a plain reg for input can match a reload to postincrement
756 that reg, since the postincrement's value is the right value.
757 Likewise, it can match a preincrement reload, since we regard
758 the preincrementation as happening before any ref in this insn
759 to that register. */
760 for (i = 0; i < n_reloads; i++)
761 if ((reg_class_subset_p (rclass, rld[i].rclass)
762 || reg_class_subset_p (rld[i].rclass, rclass))
763 /* If the existing reload has a register, it must fit our
764 class. */
765 && (rld[i].reg_rtx == 0
766 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
767 true_regnum (rld[i].reg_rtx)))
768 && out == 0 && rld[i].out == 0 && rld[i].in != 0
769 && ((REG_P (in)
770 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
771 && MATCHES (XEXP (rld[i].in, 0), in))
772 || (REG_P (rld[i].in)
773 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
774 && MATCHES (XEXP (in, 0), rld[i].in)))
775 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
776 && (SMALL_REGISTER_CLASS_P (rclass)
777 || targetm.small_register_classes_for_mode_p (VOIDmode))
778 && MERGABLE_RELOADS (type, rld[i].when_needed,
779 opnum, rld[i].opnum))
780 {
781 /* Make sure reload_in ultimately has the increment,
782 not the plain register. */
783 if (REG_P (in))
784 *p_in = rld[i].in;
785 return i;
786 }
787 return n_reloads;
788 }
789
790 /* Return nonzero if X is a SUBREG which will require reloading of its
791 SUBREG_REG expression. */
792
793 static int
794 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, int output)
795 {
796 rtx inner;
797
798 /* Only SUBREGs are problematical. */
799 if (GET_CODE (x) != SUBREG)
800 return 0;
801
802 inner = SUBREG_REG (x);
803
804 /* If INNER is a constant or PLUS, then INNER must be reloaded. */
805 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
806 return 1;
807
808 /* If INNER is not a hard register, then INNER will not need to
809 be reloaded. */
810 if (!REG_P (inner)
811 || REGNO (inner) >= FIRST_PSEUDO_REGISTER)
812 return 0;
813
814 /* If INNER is not ok for MODE, then INNER will need reloading. */
815 if (! HARD_REGNO_MODE_OK (subreg_regno (x), mode))
816 return 1;
817
818 /* If the outer part is a word or smaller, INNER larger than a
819 word and the number of regs for INNER is not the same as the
820 number of words in INNER, then INNER will need reloading. */
821 return (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
822 && output
823 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
824 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
825 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
826 }
827
828 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
829 requiring an extra reload register. The caller has already found that
830 IN contains some reference to REGNO, so check that we can produce the
831 new value in a single step. E.g. if we have
832 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
833 instruction that adds one to a register, this should succeed.
834 However, if we have something like
835 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
836 needs to be loaded into a register first, we need a separate reload
837 register.
838 Such PLUS reloads are generated by find_reload_address_part.
839 The out-of-range PLUS expressions are usually introduced in the instruction
840 patterns by register elimination and substituting pseudos without a home
841 by their function-invariant equivalences. */
842 static int
843 can_reload_into (rtx in, int regno, enum machine_mode mode)
844 {
845 rtx dst, test_insn;
846 int r = 0;
847 struct recog_data save_recog_data;
848
849 /* For matching constraints, we often get notional input reloads where
850 we want to use the original register as the reload register. I.e.
851 technically this is a non-optional input-output reload, but IN is
852 already a valid register, and has been chosen as the reload register.
853 Speed this up, since it trivially works. */
854 if (REG_P (in))
855 return 1;
856
857 /* To test MEMs properly, we'd have to take into account all the reloads
858 that are already scheduled, which can become quite complicated.
859 And since we've already handled address reloads for this MEM, it
860 should always succeed anyway. */
861 if (MEM_P (in))
862 return 1;
863
864 /* If we can make a simple SET insn that does the job, everything should
865 be fine. */
866 dst = gen_rtx_REG (mode, regno);
867 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
868 save_recog_data = recog_data;
869 if (recog_memoized (test_insn) >= 0)
870 {
871 extract_insn (test_insn);
872 r = constrain_operands (1);
873 }
874 recog_data = save_recog_data;
875 return r;
876 }
877
878 /* Record one reload that needs to be performed.
879 IN is an rtx saying where the data are to be found before this instruction.
880 OUT says where they must be stored after the instruction.
881 (IN is zero for data not read, and OUT is zero for data not written.)
882 INLOC and OUTLOC point to the places in the instructions where
883 IN and OUT were found.
884 If IN and OUT are both nonzero, it means the same register must be used
885 to reload both IN and OUT.
886
887 RCLASS is a register class required for the reloaded data.
888 INMODE is the machine mode that the instruction requires
889 for the reg that replaces IN and OUTMODE is likewise for OUT.
890
891 If IN is zero, then OUT's location and mode should be passed as
892 INLOC and INMODE.
893
894 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
895
896 OPTIONAL nonzero means this reload does not need to be performed:
897 it can be discarded if that is more convenient.
898
899 OPNUM and TYPE say what the purpose of this reload is.
900
901 The return value is the reload-number for this reload.
902
903 If both IN and OUT are nonzero, in some rare cases we might
904 want to make two separate reloads. (Actually we never do this now.)
905 Therefore, the reload-number for OUT is stored in
906 output_reloadnum when we return; the return value applies to IN.
907 Usually (presently always), when IN and OUT are nonzero,
908 the two reload-numbers are equal, but the caller should be careful to
909 distinguish them. */
910
911 int
912 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
913 enum reg_class rclass, enum machine_mode inmode,
914 enum machine_mode outmode, int strict_low, int optional,
915 int opnum, enum reload_type type)
916 {
917 int i;
918 int dont_share = 0;
919 int dont_remove_subreg = 0;
920 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
921 int secondary_in_reload = -1, secondary_out_reload = -1;
922 enum insn_code secondary_in_icode = CODE_FOR_nothing;
923 enum insn_code secondary_out_icode = CODE_FOR_nothing;
924
925 /* INMODE and/or OUTMODE could be VOIDmode if no mode
926 has been specified for the operand. In that case,
927 use the operand's mode as the mode to reload. */
928 if (inmode == VOIDmode && in != 0)
929 inmode = GET_MODE (in);
930 if (outmode == VOIDmode && out != 0)
931 outmode = GET_MODE (out);
932
933 /* If find_reloads and friends until now missed to replace a pseudo
934 with a constant of reg_equiv_constant something went wrong
935 beforehand.
936 Note that it can't simply be done here if we missed it earlier
937 since the constant might need to be pushed into the literal pool
938 and the resulting memref would probably need further
939 reloading. */
940 if (in != 0 && REG_P (in))
941 {
942 int regno = REGNO (in);
943
944 gcc_assert (regno < FIRST_PSEUDO_REGISTER
945 || reg_renumber[regno] >= 0
946 || reg_equiv_constant[regno] == NULL_RTX);
947 }
948
949 /* reg_equiv_constant only contains constants which are obviously
950 not appropriate as destination. So if we would need to replace
951 the destination pseudo with a constant we are in real
952 trouble. */
953 if (out != 0 && REG_P (out))
954 {
955 int regno = REGNO (out);
956
957 gcc_assert (regno < FIRST_PSEUDO_REGISTER
958 || reg_renumber[regno] >= 0
959 || reg_equiv_constant[regno] == NULL_RTX);
960 }
961
962 /* If we have a read-write operand with an address side-effect,
963 change either IN or OUT so the side-effect happens only once. */
964 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
965 switch (GET_CODE (XEXP (in, 0)))
966 {
967 case POST_INC: case POST_DEC: case POST_MODIFY:
968 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
969 break;
970
971 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
972 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
973 break;
974
975 default:
976 break;
977 }
978
979 /* If we are reloading a (SUBREG constant ...), really reload just the
980 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
981 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
982 a pseudo and hence will become a MEM) with M1 wider than M2 and the
983 register is a pseudo, also reload the inside expression.
984 For machines that extend byte loads, do this for any SUBREG of a pseudo
985 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
986 M2 is an integral mode that gets extended when loaded.
987 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
988 either M1 is not valid for R or M2 is wider than a word but we only
989 need one word to store an M2-sized quantity in R.
990 (However, if OUT is nonzero, we need to reload the reg *and*
991 the subreg, so do nothing here, and let following statement handle it.)
992
993 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
994 we can't handle it here because CONST_INT does not indicate a mode.
995
996 Similarly, we must reload the inside expression if we have a
997 STRICT_LOW_PART (presumably, in == out in this case).
998
999 Also reload the inner expression if it does not require a secondary
1000 reload but the SUBREG does.
1001
1002 Finally, reload the inner expression if it is a register that is in
1003 the class whose registers cannot be referenced in a different size
1004 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1005 cannot reload just the inside since we might end up with the wrong
1006 register class. But if it is inside a STRICT_LOW_PART, we have
1007 no choice, so we hope we do get the right register class there. */
1008
1009 if (in != 0 && GET_CODE (in) == SUBREG
1010 && (subreg_lowpart_p (in) || strict_low)
1011 #ifdef CANNOT_CHANGE_MODE_CLASS
1012 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1013 #endif
1014 && (CONSTANT_P (SUBREG_REG (in))
1015 || GET_CODE (SUBREG_REG (in)) == PLUS
1016 || strict_low
1017 || (((REG_P (SUBREG_REG (in))
1018 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1019 || MEM_P (SUBREG_REG (in)))
1020 && ((GET_MODE_SIZE (inmode)
1021 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1022 #ifdef LOAD_EXTEND_OP
1023 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1024 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1025 <= UNITS_PER_WORD)
1026 && (GET_MODE_SIZE (inmode)
1027 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1028 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1029 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1030 #endif
1031 #ifdef WORD_REGISTER_OPERATIONS
1032 || ((GET_MODE_SIZE (inmode)
1033 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1034 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1035 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1036 / UNITS_PER_WORD)))
1037 #endif
1038 ))
1039 || (REG_P (SUBREG_REG (in))
1040 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1041 /* The case where out is nonzero
1042 is handled differently in the following statement. */
1043 && (out == 0 || subreg_lowpart_p (in))
1044 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1045 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1046 > UNITS_PER_WORD)
1047 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1048 / UNITS_PER_WORD)
1049 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1050 [GET_MODE (SUBREG_REG (in))]))
1051 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1052 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1053 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1054 SUBREG_REG (in))
1055 == NO_REGS))
1056 #ifdef CANNOT_CHANGE_MODE_CLASS
1057 || (REG_P (SUBREG_REG (in))
1058 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1059 && REG_CANNOT_CHANGE_MODE_P
1060 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1061 #endif
1062 ))
1063 {
1064 in_subreg_loc = inloc;
1065 inloc = &SUBREG_REG (in);
1066 in = *inloc;
1067 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1068 if (MEM_P (in))
1069 /* This is supposed to happen only for paradoxical subregs made by
1070 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1071 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1072 #endif
1073 inmode = GET_MODE (in);
1074 }
1075
1076 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1077 either M1 is not valid for R or M2 is wider than a word but we only
1078 need one word to store an M2-sized quantity in R.
1079
1080 However, we must reload the inner reg *as well as* the subreg in
1081 that case. */
1082
1083 /* Similar issue for (SUBREG constant ...) if it was not handled by the
1084 code above. This can happen if SUBREG_BYTE != 0. */
1085
1086 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, 0))
1087 {
1088 enum reg_class in_class = rclass;
1089
1090 if (REG_P (SUBREG_REG (in)))
1091 in_class
1092 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1093 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1094 GET_MODE (SUBREG_REG (in)),
1095 SUBREG_BYTE (in),
1096 GET_MODE (in)),
1097 REGNO (SUBREG_REG (in)));
1098
1099 /* This relies on the fact that emit_reload_insns outputs the
1100 instructions for input reloads of type RELOAD_OTHER in the same
1101 order as the reloads. Thus if the outer reload is also of type
1102 RELOAD_OTHER, we are guaranteed that this inner reload will be
1103 output before the outer reload. */
1104 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1105 in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1106 dont_remove_subreg = 1;
1107 }
1108
1109 /* Similarly for paradoxical and problematical SUBREGs on the output.
1110 Note that there is no reason we need worry about the previous value
1111 of SUBREG_REG (out); even if wider than out,
1112 storing in a subreg is entitled to clobber it all
1113 (except in the case of STRICT_LOW_PART,
1114 and in that case the constraint should label it input-output.) */
1115 if (out != 0 && GET_CODE (out) == SUBREG
1116 && (subreg_lowpart_p (out) || strict_low)
1117 #ifdef CANNOT_CHANGE_MODE_CLASS
1118 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1119 #endif
1120 && (CONSTANT_P (SUBREG_REG (out))
1121 || strict_low
1122 || (((REG_P (SUBREG_REG (out))
1123 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1124 || MEM_P (SUBREG_REG (out)))
1125 && ((GET_MODE_SIZE (outmode)
1126 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1127 #ifdef WORD_REGISTER_OPERATIONS
1128 || ((GET_MODE_SIZE (outmode)
1129 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1130 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1131 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1132 / UNITS_PER_WORD)))
1133 #endif
1134 ))
1135 || (REG_P (SUBREG_REG (out))
1136 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1137 && ((GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1138 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1139 > UNITS_PER_WORD)
1140 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1141 / UNITS_PER_WORD)
1142 != (int) hard_regno_nregs[REGNO (SUBREG_REG (out))]
1143 [GET_MODE (SUBREG_REG (out))]))
1144 || ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode)))
1145 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1146 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1147 SUBREG_REG (out))
1148 == NO_REGS))
1149 #ifdef CANNOT_CHANGE_MODE_CLASS
1150 || (REG_P (SUBREG_REG (out))
1151 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1152 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1153 GET_MODE (SUBREG_REG (out)),
1154 outmode))
1155 #endif
1156 ))
1157 {
1158 out_subreg_loc = outloc;
1159 outloc = &SUBREG_REG (out);
1160 out = *outloc;
1161 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1162 gcc_assert (!MEM_P (out)
1163 || GET_MODE_SIZE (GET_MODE (out))
1164 <= GET_MODE_SIZE (outmode));
1165 #endif
1166 outmode = GET_MODE (out);
1167 }
1168
1169 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1170 either M1 is not valid for R or M2 is wider than a word but we only
1171 need one word to store an M2-sized quantity in R.
1172
1173 However, we must reload the inner reg *as well as* the subreg in
1174 that case. In this case, the inner reg is an in-out reload. */
1175
1176 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, 1))
1177 {
1178 /* This relies on the fact that emit_reload_insns outputs the
1179 instructions for output reloads of type RELOAD_OTHER in reverse
1180 order of the reloads. Thus if the outer reload is also of type
1181 RELOAD_OTHER, we are guaranteed that this inner reload will be
1182 output after the outer reload. */
1183 dont_remove_subreg = 1;
1184 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1185 &SUBREG_REG (out),
1186 find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1187 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1188 GET_MODE (SUBREG_REG (out)),
1189 SUBREG_BYTE (out),
1190 GET_MODE (out)),
1191 REGNO (SUBREG_REG (out))),
1192 VOIDmode, VOIDmode, 0, 0,
1193 opnum, RELOAD_OTHER);
1194 }
1195
1196 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1197 if (in != 0 && out != 0 && MEM_P (out)
1198 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1199 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1200 dont_share = 1;
1201
1202 /* If IN is a SUBREG of a hard register, make a new REG. This
1203 simplifies some of the cases below. */
1204
1205 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1206 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1207 && ! dont_remove_subreg)
1208 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1209
1210 /* Similarly for OUT. */
1211 if (out != 0 && GET_CODE (out) == SUBREG
1212 && REG_P (SUBREG_REG (out))
1213 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1214 && ! dont_remove_subreg)
1215 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1216
1217 /* Narrow down the class of register wanted if that is
1218 desirable on this machine for efficiency. */
1219 {
1220 enum reg_class preferred_class = rclass;
1221
1222 if (in != 0)
1223 preferred_class = PREFERRED_RELOAD_CLASS (in, rclass);
1224
1225 /* Output reloads may need analogous treatment, different in detail. */
1226 #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
1227 if (out != 0)
1228 preferred_class = PREFERRED_OUTPUT_RELOAD_CLASS (out, preferred_class);
1229 #endif
1230
1231 /* Discard what the target said if we cannot do it. */
1232 if (preferred_class != NO_REGS
1233 || (optional && type == RELOAD_FOR_OUTPUT))
1234 rclass = preferred_class;
1235 }
1236
1237 /* Make sure we use a class that can handle the actual pseudo
1238 inside any subreg. For example, on the 386, QImode regs
1239 can appear within SImode subregs. Although GENERAL_REGS
1240 can handle SImode, QImode needs a smaller class. */
1241 #ifdef LIMIT_RELOAD_CLASS
1242 if (in_subreg_loc)
1243 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1244 else if (in != 0 && GET_CODE (in) == SUBREG)
1245 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1246
1247 if (out_subreg_loc)
1248 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1249 if (out != 0 && GET_CODE (out) == SUBREG)
1250 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1251 #endif
1252
1253 /* Verify that this class is at least possible for the mode that
1254 is specified. */
1255 if (this_insn_is_asm)
1256 {
1257 enum machine_mode mode;
1258 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1259 mode = inmode;
1260 else
1261 mode = outmode;
1262 if (mode == VOIDmode)
1263 {
1264 error_for_asm (this_insn, "cannot reload integer constant "
1265 "operand in %<asm%>");
1266 mode = word_mode;
1267 if (in != 0)
1268 inmode = word_mode;
1269 if (out != 0)
1270 outmode = word_mode;
1271 }
1272 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1273 if (HARD_REGNO_MODE_OK (i, mode)
1274 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1275 break;
1276 if (i == FIRST_PSEUDO_REGISTER)
1277 {
1278 error_for_asm (this_insn, "impossible register constraint "
1279 "in %<asm%>");
1280 /* Avoid further trouble with this insn. */
1281 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1282 /* We used to continue here setting class to ALL_REGS, but it triggers
1283 sanity check on i386 for:
1284 void foo(long double d)
1285 {
1286 asm("" :: "a" (d));
1287 }
1288 Returning zero here ought to be safe as we take care in
1289 find_reloads to not process the reloads when instruction was
1290 replaced by USE. */
1291
1292 return 0;
1293 }
1294 }
1295
1296 /* Optional output reloads are always OK even if we have no register class,
1297 since the function of these reloads is only to have spill_reg_store etc.
1298 set, so that the storing insn can be deleted later. */
1299 gcc_assert (rclass != NO_REGS
1300 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1301
1302 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1303
1304 if (i == n_reloads)
1305 {
1306 /* See if we need a secondary reload register to move between CLASS
1307 and IN or CLASS and OUT. Get the icode and push any required reloads
1308 needed for each of them if so. */
1309
1310 if (in != 0)
1311 secondary_in_reload
1312 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1313 &secondary_in_icode, NULL);
1314 if (out != 0 && GET_CODE (out) != SCRATCH)
1315 secondary_out_reload
1316 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1317 type, &secondary_out_icode, NULL);
1318
1319 /* We found no existing reload suitable for re-use.
1320 So add an additional reload. */
1321
1322 #ifdef SECONDARY_MEMORY_NEEDED
1323 /* If a memory location is needed for the copy, make one. */
1324 if (in != 0
1325 && (REG_P (in)
1326 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1327 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
1328 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
1329 rclass, inmode))
1330 get_secondary_mem (in, inmode, opnum, type);
1331 #endif
1332
1333 i = n_reloads;
1334 rld[i].in = in;
1335 rld[i].out = out;
1336 rld[i].rclass = rclass;
1337 rld[i].inmode = inmode;
1338 rld[i].outmode = outmode;
1339 rld[i].reg_rtx = 0;
1340 rld[i].optional = optional;
1341 rld[i].inc = 0;
1342 rld[i].nocombine = 0;
1343 rld[i].in_reg = inloc ? *inloc : 0;
1344 rld[i].out_reg = outloc ? *outloc : 0;
1345 rld[i].opnum = opnum;
1346 rld[i].when_needed = type;
1347 rld[i].secondary_in_reload = secondary_in_reload;
1348 rld[i].secondary_out_reload = secondary_out_reload;
1349 rld[i].secondary_in_icode = secondary_in_icode;
1350 rld[i].secondary_out_icode = secondary_out_icode;
1351 rld[i].secondary_p = 0;
1352
1353 n_reloads++;
1354
1355 #ifdef SECONDARY_MEMORY_NEEDED
1356 if (out != 0
1357 && (REG_P (out)
1358 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1359 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1360 && SECONDARY_MEMORY_NEEDED (rclass,
1361 REGNO_REG_CLASS (reg_or_subregno (out)),
1362 outmode))
1363 get_secondary_mem (out, outmode, opnum, type);
1364 #endif
1365 }
1366 else
1367 {
1368 /* We are reusing an existing reload,
1369 but we may have additional information for it.
1370 For example, we may now have both IN and OUT
1371 while the old one may have just one of them. */
1372
1373 /* The modes can be different. If they are, we want to reload in
1374 the larger mode, so that the value is valid for both modes. */
1375 if (inmode != VOIDmode
1376 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1377 rld[i].inmode = inmode;
1378 if (outmode != VOIDmode
1379 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1380 rld[i].outmode = outmode;
1381 if (in != 0)
1382 {
1383 rtx in_reg = inloc ? *inloc : 0;
1384 /* If we merge reloads for two distinct rtl expressions that
1385 are identical in content, there might be duplicate address
1386 reloads. Remove the extra set now, so that if we later find
1387 that we can inherit this reload, we can get rid of the
1388 address reloads altogether.
1389
1390 Do not do this if both reloads are optional since the result
1391 would be an optional reload which could potentially leave
1392 unresolved address replacements.
1393
1394 It is not sufficient to call transfer_replacements since
1395 choose_reload_regs will remove the replacements for address
1396 reloads of inherited reloads which results in the same
1397 problem. */
1398 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1399 && ! (rld[i].optional && optional))
1400 {
1401 /* We must keep the address reload with the lower operand
1402 number alive. */
1403 if (opnum > rld[i].opnum)
1404 {
1405 remove_address_replacements (in);
1406 in = rld[i].in;
1407 in_reg = rld[i].in_reg;
1408 }
1409 else
1410 remove_address_replacements (rld[i].in);
1411 }
1412 /* When emitting reloads we don't necessarily look at the in-
1413 and outmode, but also directly at the operands (in and out).
1414 So we can't simply overwrite them with whatever we have found
1415 for this (to-be-merged) reload, we have to "merge" that too.
1416 Reusing another reload already verified that we deal with the
1417 same operands, just possibly in different modes. So we
1418 overwrite the operands only when the new mode is larger.
1419 See also PR33613. */
1420 if (!rld[i].in
1421 || GET_MODE_SIZE (GET_MODE (in))
1422 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1423 rld[i].in = in;
1424 if (!rld[i].in_reg
1425 || (in_reg
1426 && GET_MODE_SIZE (GET_MODE (in_reg))
1427 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1428 rld[i].in_reg = in_reg;
1429 }
1430 if (out != 0)
1431 {
1432 if (!rld[i].out
1433 || (out
1434 && GET_MODE_SIZE (GET_MODE (out))
1435 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1436 rld[i].out = out;
1437 if (outloc
1438 && (!rld[i].out_reg
1439 || GET_MODE_SIZE (GET_MODE (*outloc))
1440 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1441 rld[i].out_reg = *outloc;
1442 }
1443 if (reg_class_subset_p (rclass, rld[i].rclass))
1444 rld[i].rclass = rclass;
1445 rld[i].optional &= optional;
1446 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1447 opnum, rld[i].opnum))
1448 rld[i].when_needed = RELOAD_OTHER;
1449 rld[i].opnum = MIN (rld[i].opnum, opnum);
1450 }
1451
1452 /* If the ostensible rtx being reloaded differs from the rtx found
1453 in the location to substitute, this reload is not safe to combine
1454 because we cannot reliably tell whether it appears in the insn. */
1455
1456 if (in != 0 && in != *inloc)
1457 rld[i].nocombine = 1;
1458
1459 #if 0
1460 /* This was replaced by changes in find_reloads_address_1 and the new
1461 function inc_for_reload, which go with a new meaning of reload_inc. */
1462
1463 /* If this is an IN/OUT reload in an insn that sets the CC,
1464 it must be for an autoincrement. It doesn't work to store
1465 the incremented value after the insn because that would clobber the CC.
1466 So we must do the increment of the value reloaded from,
1467 increment it, store it back, then decrement again. */
1468 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1469 {
1470 out = 0;
1471 rld[i].out = 0;
1472 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1473 /* If we did not find a nonzero amount-to-increment-by,
1474 that contradicts the belief that IN is being incremented
1475 in an address in this insn. */
1476 gcc_assert (rld[i].inc != 0);
1477 }
1478 #endif
1479
1480 /* If we will replace IN and OUT with the reload-reg,
1481 record where they are located so that substitution need
1482 not do a tree walk. */
1483
1484 if (replace_reloads)
1485 {
1486 if (inloc != 0)
1487 {
1488 struct replacement *r = &replacements[n_replacements++];
1489 r->what = i;
1490 r->subreg_loc = in_subreg_loc;
1491 r->where = inloc;
1492 r->mode = inmode;
1493 }
1494 if (outloc != 0 && outloc != inloc)
1495 {
1496 struct replacement *r = &replacements[n_replacements++];
1497 r->what = i;
1498 r->where = outloc;
1499 r->subreg_loc = out_subreg_loc;
1500 r->mode = outmode;
1501 }
1502 }
1503
1504 /* If this reload is just being introduced and it has both
1505 an incoming quantity and an outgoing quantity that are
1506 supposed to be made to match, see if either one of the two
1507 can serve as the place to reload into.
1508
1509 If one of them is acceptable, set rld[i].reg_rtx
1510 to that one. */
1511
1512 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1513 {
1514 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1515 inmode, outmode,
1516 rld[i].rclass, i,
1517 earlyclobber_operand_p (out));
1518
1519 /* If the outgoing register already contains the same value
1520 as the incoming one, we can dispense with loading it.
1521 The easiest way to tell the caller that is to give a phony
1522 value for the incoming operand (same as outgoing one). */
1523 if (rld[i].reg_rtx == out
1524 && (REG_P (in) || CONSTANT_P (in))
1525 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1526 static_reload_reg_p, i, inmode))
1527 rld[i].in = out;
1528 }
1529
1530 /* If this is an input reload and the operand contains a register that
1531 dies in this insn and is used nowhere else, see if it is the right class
1532 to be used for this reload. Use it if so. (This occurs most commonly
1533 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1534 this if it is also an output reload that mentions the register unless
1535 the output is a SUBREG that clobbers an entire register.
1536
1537 Note that the operand might be one of the spill regs, if it is a
1538 pseudo reg and we are in a block where spilling has not taken place.
1539 But if there is no spilling in this block, that is OK.
1540 An explicitly used hard reg cannot be a spill reg. */
1541
1542 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1543 {
1544 rtx note;
1545 int regno;
1546 enum machine_mode rel_mode = inmode;
1547
1548 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1549 rel_mode = outmode;
1550
1551 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1552 if (REG_NOTE_KIND (note) == REG_DEAD
1553 && REG_P (XEXP (note, 0))
1554 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1555 && reg_mentioned_p (XEXP (note, 0), in)
1556 /* Check that a former pseudo is valid; see find_dummy_reload. */
1557 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1558 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1559 ORIGINAL_REGNO (XEXP (note, 0)))
1560 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1561 && ! refers_to_regno_for_reload_p (regno,
1562 end_hard_regno (rel_mode,
1563 regno),
1564 PATTERN (this_insn), inloc)
1565 /* If this is also an output reload, IN cannot be used as
1566 the reload register if it is set in this insn unless IN
1567 is also OUT. */
1568 && (out == 0 || in == out
1569 || ! hard_reg_set_here_p (regno,
1570 end_hard_regno (rel_mode, regno),
1571 PATTERN (this_insn)))
1572 /* ??? Why is this code so different from the previous?
1573 Is there any simple coherent way to describe the two together?
1574 What's going on here. */
1575 && (in != out
1576 || (GET_CODE (in) == SUBREG
1577 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1578 / UNITS_PER_WORD)
1579 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1580 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1581 /* Make sure the operand fits in the reg that dies. */
1582 && (GET_MODE_SIZE (rel_mode)
1583 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1584 && HARD_REGNO_MODE_OK (regno, inmode)
1585 && HARD_REGNO_MODE_OK (regno, outmode))
1586 {
1587 unsigned int offs;
1588 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1589 hard_regno_nregs[regno][outmode]);
1590
1591 for (offs = 0; offs < nregs; offs++)
1592 if (fixed_regs[regno + offs]
1593 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1594 regno + offs))
1595 break;
1596
1597 if (offs == nregs
1598 && (! (refers_to_regno_for_reload_p
1599 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1600 || can_reload_into (in, regno, inmode)))
1601 {
1602 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1603 break;
1604 }
1605 }
1606 }
1607
1608 if (out)
1609 output_reloadnum = i;
1610
1611 return i;
1612 }
1613
1614 /* Record an additional place we must replace a value
1615 for which we have already recorded a reload.
1616 RELOADNUM is the value returned by push_reload
1617 when the reload was recorded.
1618 This is used in insn patterns that use match_dup. */
1619
1620 static void
1621 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1622 {
1623 if (replace_reloads)
1624 {
1625 struct replacement *r = &replacements[n_replacements++];
1626 r->what = reloadnum;
1627 r->where = loc;
1628 r->subreg_loc = 0;
1629 r->mode = mode;
1630 }
1631 }
1632
1633 /* Duplicate any replacement we have recorded to apply at
1634 location ORIG_LOC to also be performed at DUP_LOC.
1635 This is used in insn patterns that use match_dup. */
1636
1637 static void
1638 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1639 {
1640 int i, n = n_replacements;
1641
1642 for (i = 0; i < n; i++)
1643 {
1644 struct replacement *r = &replacements[i];
1645 if (r->where == orig_loc)
1646 push_replacement (dup_loc, r->what, r->mode);
1647 }
1648 }
1649 \f
1650 /* Transfer all replacements that used to be in reload FROM to be in
1651 reload TO. */
1652
1653 void
1654 transfer_replacements (int to, int from)
1655 {
1656 int i;
1657
1658 for (i = 0; i < n_replacements; i++)
1659 if (replacements[i].what == from)
1660 replacements[i].what = to;
1661 }
1662 \f
1663 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1664 or a subpart of it. If we have any replacements registered for IN_RTX,
1665 cancel the reloads that were supposed to load them.
1666 Return nonzero if we canceled any reloads. */
1667 int
1668 remove_address_replacements (rtx in_rtx)
1669 {
1670 int i, j;
1671 char reload_flags[MAX_RELOADS];
1672 int something_changed = 0;
1673
1674 memset (reload_flags, 0, sizeof reload_flags);
1675 for (i = 0, j = 0; i < n_replacements; i++)
1676 {
1677 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1678 reload_flags[replacements[i].what] |= 1;
1679 else
1680 {
1681 replacements[j++] = replacements[i];
1682 reload_flags[replacements[i].what] |= 2;
1683 }
1684 }
1685 /* Note that the following store must be done before the recursive calls. */
1686 n_replacements = j;
1687
1688 for (i = n_reloads - 1; i >= 0; i--)
1689 {
1690 if (reload_flags[i] == 1)
1691 {
1692 deallocate_reload_reg (i);
1693 remove_address_replacements (rld[i].in);
1694 rld[i].in = 0;
1695 something_changed = 1;
1696 }
1697 }
1698 return something_changed;
1699 }
1700 \f
1701 /* If there is only one output reload, and it is not for an earlyclobber
1702 operand, try to combine it with a (logically unrelated) input reload
1703 to reduce the number of reload registers needed.
1704
1705 This is safe if the input reload does not appear in
1706 the value being output-reloaded, because this implies
1707 it is not needed any more once the original insn completes.
1708
1709 If that doesn't work, see we can use any of the registers that
1710 die in this insn as a reload register. We can if it is of the right
1711 class and does not appear in the value being output-reloaded. */
1712
1713 static void
1714 combine_reloads (void)
1715 {
1716 int i, regno;
1717 int output_reload = -1;
1718 int secondary_out = -1;
1719 rtx note;
1720
1721 /* Find the output reload; return unless there is exactly one
1722 and that one is mandatory. */
1723
1724 for (i = 0; i < n_reloads; i++)
1725 if (rld[i].out != 0)
1726 {
1727 if (output_reload >= 0)
1728 return;
1729 output_reload = i;
1730 }
1731
1732 if (output_reload < 0 || rld[output_reload].optional)
1733 return;
1734
1735 /* An input-output reload isn't combinable. */
1736
1737 if (rld[output_reload].in != 0)
1738 return;
1739
1740 /* If this reload is for an earlyclobber operand, we can't do anything. */
1741 if (earlyclobber_operand_p (rld[output_reload].out))
1742 return;
1743
1744 /* If there is a reload for part of the address of this operand, we would
1745 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1746 its life to the point where doing this combine would not lower the
1747 number of spill registers needed. */
1748 for (i = 0; i < n_reloads; i++)
1749 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1750 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1751 && rld[i].opnum == rld[output_reload].opnum)
1752 return;
1753
1754 /* Check each input reload; can we combine it? */
1755
1756 for (i = 0; i < n_reloads; i++)
1757 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1758 /* Life span of this reload must not extend past main insn. */
1759 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1760 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1761 && rld[i].when_needed != RELOAD_OTHER
1762 && (CLASS_MAX_NREGS (rld[i].rclass, rld[i].inmode)
1763 == CLASS_MAX_NREGS (rld[output_reload].rclass,
1764 rld[output_reload].outmode))
1765 && rld[i].inc == 0
1766 && rld[i].reg_rtx == 0
1767 #ifdef SECONDARY_MEMORY_NEEDED
1768 /* Don't combine two reloads with different secondary
1769 memory locations. */
1770 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1771 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1772 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1773 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1774 #endif
1775 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1776 ? (rld[i].rclass == rld[output_reload].rclass)
1777 : (reg_class_subset_p (rld[i].rclass,
1778 rld[output_reload].rclass)
1779 || reg_class_subset_p (rld[output_reload].rclass,
1780 rld[i].rclass)))
1781 && (MATCHES (rld[i].in, rld[output_reload].out)
1782 /* Args reversed because the first arg seems to be
1783 the one that we imagine being modified
1784 while the second is the one that might be affected. */
1785 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1786 rld[i].in)
1787 /* However, if the input is a register that appears inside
1788 the output, then we also can't share.
1789 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1790 If the same reload reg is used for both reg 69 and the
1791 result to be stored in memory, then that result
1792 will clobber the address of the memory ref. */
1793 && ! (REG_P (rld[i].in)
1794 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1795 rld[output_reload].out))))
1796 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1797 rld[i].when_needed != RELOAD_FOR_INPUT)
1798 && (reg_class_size[(int) rld[i].rclass]
1799 || targetm.small_register_classes_for_mode_p (VOIDmode))
1800 /* We will allow making things slightly worse by combining an
1801 input and an output, but no worse than that. */
1802 && (rld[i].when_needed == RELOAD_FOR_INPUT
1803 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1804 {
1805 int j;
1806
1807 /* We have found a reload to combine with! */
1808 rld[i].out = rld[output_reload].out;
1809 rld[i].out_reg = rld[output_reload].out_reg;
1810 rld[i].outmode = rld[output_reload].outmode;
1811 /* Mark the old output reload as inoperative. */
1812 rld[output_reload].out = 0;
1813 /* The combined reload is needed for the entire insn. */
1814 rld[i].when_needed = RELOAD_OTHER;
1815 /* If the output reload had a secondary reload, copy it. */
1816 if (rld[output_reload].secondary_out_reload != -1)
1817 {
1818 rld[i].secondary_out_reload
1819 = rld[output_reload].secondary_out_reload;
1820 rld[i].secondary_out_icode
1821 = rld[output_reload].secondary_out_icode;
1822 }
1823
1824 #ifdef SECONDARY_MEMORY_NEEDED
1825 /* Copy any secondary MEM. */
1826 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1827 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1828 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1829 #endif
1830 /* If required, minimize the register class. */
1831 if (reg_class_subset_p (rld[output_reload].rclass,
1832 rld[i].rclass))
1833 rld[i].rclass = rld[output_reload].rclass;
1834
1835 /* Transfer all replacements from the old reload to the combined. */
1836 for (j = 0; j < n_replacements; j++)
1837 if (replacements[j].what == output_reload)
1838 replacements[j].what = i;
1839
1840 return;
1841 }
1842
1843 /* If this insn has only one operand that is modified or written (assumed
1844 to be the first), it must be the one corresponding to this reload. It
1845 is safe to use anything that dies in this insn for that output provided
1846 that it does not occur in the output (we already know it isn't an
1847 earlyclobber. If this is an asm insn, give up. */
1848
1849 if (INSN_CODE (this_insn) == -1)
1850 return;
1851
1852 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1853 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1854 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1855 return;
1856
1857 /* See if some hard register that dies in this insn and is not used in
1858 the output is the right class. Only works if the register we pick
1859 up can fully hold our output reload. */
1860 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1861 if (REG_NOTE_KIND (note) == REG_DEAD
1862 && REG_P (XEXP (note, 0))
1863 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1864 rld[output_reload].out)
1865 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1866 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1867 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1868 regno)
1869 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1870 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1871 /* Ensure that a secondary or tertiary reload for this output
1872 won't want this register. */
1873 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1874 || (!(TEST_HARD_REG_BIT
1875 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1876 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1877 || !(TEST_HARD_REG_BIT
1878 (reg_class_contents[(int) rld[secondary_out].rclass],
1879 regno)))))
1880 && !fixed_regs[regno]
1881 /* Check that a former pseudo is valid; see find_dummy_reload. */
1882 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1883 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1884 ORIGINAL_REGNO (XEXP (note, 0)))
1885 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1886 {
1887 rld[output_reload].reg_rtx
1888 = gen_rtx_REG (rld[output_reload].outmode, regno);
1889 return;
1890 }
1891 }
1892 \f
1893 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1894 See if one of IN and OUT is a register that may be used;
1895 this is desirable since a spill-register won't be needed.
1896 If so, return the register rtx that proves acceptable.
1897
1898 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1899 RCLASS is the register class required for the reload.
1900
1901 If FOR_REAL is >= 0, it is the number of the reload,
1902 and in some cases when it can be discovered that OUT doesn't need
1903 to be computed, clear out rld[FOR_REAL].out.
1904
1905 If FOR_REAL is -1, this should not be done, because this call
1906 is just to see if a register can be found, not to find and install it.
1907
1908 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1909 puts an additional constraint on being able to use IN for OUT since
1910 IN must not appear elsewhere in the insn (it is assumed that IN itself
1911 is safe from the earlyclobber). */
1912
1913 static rtx
1914 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1915 enum machine_mode inmode, enum machine_mode outmode,
1916 enum reg_class rclass, int for_real, int earlyclobber)
1917 {
1918 rtx in = real_in;
1919 rtx out = real_out;
1920 int in_offset = 0;
1921 int out_offset = 0;
1922 rtx value = 0;
1923
1924 /* If operands exceed a word, we can't use either of them
1925 unless they have the same size. */
1926 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1927 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1928 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1929 return 0;
1930
1931 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1932 respectively refers to a hard register. */
1933
1934 /* Find the inside of any subregs. */
1935 while (GET_CODE (out) == SUBREG)
1936 {
1937 if (REG_P (SUBREG_REG (out))
1938 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1939 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1940 GET_MODE (SUBREG_REG (out)),
1941 SUBREG_BYTE (out),
1942 GET_MODE (out));
1943 out = SUBREG_REG (out);
1944 }
1945 while (GET_CODE (in) == SUBREG)
1946 {
1947 if (REG_P (SUBREG_REG (in))
1948 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1949 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1950 GET_MODE (SUBREG_REG (in)),
1951 SUBREG_BYTE (in),
1952 GET_MODE (in));
1953 in = SUBREG_REG (in);
1954 }
1955
1956 /* Narrow down the reg class, the same way push_reload will;
1957 otherwise we might find a dummy now, but push_reload won't. */
1958 {
1959 enum reg_class preferred_class = PREFERRED_RELOAD_CLASS (in, rclass);
1960 if (preferred_class != NO_REGS)
1961 rclass = preferred_class;
1962 }
1963
1964 /* See if OUT will do. */
1965 if (REG_P (out)
1966 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1967 {
1968 unsigned int regno = REGNO (out) + out_offset;
1969 unsigned int nwords = hard_regno_nregs[regno][outmode];
1970 rtx saved_rtx;
1971
1972 /* When we consider whether the insn uses OUT,
1973 ignore references within IN. They don't prevent us
1974 from copying IN into OUT, because those refs would
1975 move into the insn that reloads IN.
1976
1977 However, we only ignore IN in its role as this reload.
1978 If the insn uses IN elsewhere and it contains OUT,
1979 that counts. We can't be sure it's the "same" operand
1980 so it might not go through this reload. */
1981 saved_rtx = *inloc;
1982 *inloc = const0_rtx;
1983
1984 if (regno < FIRST_PSEUDO_REGISTER
1985 && HARD_REGNO_MODE_OK (regno, outmode)
1986 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1987 PATTERN (this_insn), outloc))
1988 {
1989 unsigned int i;
1990
1991 for (i = 0; i < nwords; i++)
1992 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1993 regno + i))
1994 break;
1995
1996 if (i == nwords)
1997 {
1998 if (REG_P (real_out))
1999 value = real_out;
2000 else
2001 value = gen_rtx_REG (outmode, regno);
2002 }
2003 }
2004
2005 *inloc = saved_rtx;
2006 }
2007
2008 /* Consider using IN if OUT was not acceptable
2009 or if OUT dies in this insn (like the quotient in a divmod insn).
2010 We can't use IN unless it is dies in this insn,
2011 which means we must know accurately which hard regs are live.
2012 Also, the result can't go in IN if IN is used within OUT,
2013 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2014 if (hard_regs_live_known
2015 && REG_P (in)
2016 && REGNO (in) < FIRST_PSEUDO_REGISTER
2017 && (value == 0
2018 || find_reg_note (this_insn, REG_UNUSED, real_out))
2019 && find_reg_note (this_insn, REG_DEAD, real_in)
2020 && !fixed_regs[REGNO (in)]
2021 && HARD_REGNO_MODE_OK (REGNO (in),
2022 /* The only case where out and real_out might
2023 have different modes is where real_out
2024 is a subreg, and in that case, out
2025 has a real mode. */
2026 (GET_MODE (out) != VOIDmode
2027 ? GET_MODE (out) : outmode))
2028 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2029 /* However only do this if we can be sure that this input
2030 operand doesn't correspond with an uninitialized pseudo.
2031 global can assign some hardreg to it that is the same as
2032 the one assigned to a different, also live pseudo (as it
2033 can ignore the conflict). We must never introduce writes
2034 to such hardregs, as they would clobber the other live
2035 pseudo. See PR 20973. */
2036 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
2037 ORIGINAL_REGNO (in))
2038 /* Similarly, only do this if we can be sure that the death
2039 note is still valid. global can assign some hardreg to
2040 the pseudo referenced in the note and simultaneously a
2041 subword of this hardreg to a different, also live pseudo,
2042 because only another subword of the hardreg is actually
2043 used in the insn. This cannot happen if the pseudo has
2044 been assigned exactly one hardreg. See PR 33732. */
2045 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2046 {
2047 unsigned int regno = REGNO (in) + in_offset;
2048 unsigned int nwords = hard_regno_nregs[regno][inmode];
2049
2050 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2051 && ! hard_reg_set_here_p (regno, regno + nwords,
2052 PATTERN (this_insn))
2053 && (! earlyclobber
2054 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2055 PATTERN (this_insn), inloc)))
2056 {
2057 unsigned int i;
2058
2059 for (i = 0; i < nwords; i++)
2060 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2061 regno + i))
2062 break;
2063
2064 if (i == nwords)
2065 {
2066 /* If we were going to use OUT as the reload reg
2067 and changed our mind, it means OUT is a dummy that
2068 dies here. So don't bother copying value to it. */
2069 if (for_real >= 0 && value == real_out)
2070 rld[for_real].out = 0;
2071 if (REG_P (real_in))
2072 value = real_in;
2073 else
2074 value = gen_rtx_REG (inmode, regno);
2075 }
2076 }
2077 }
2078
2079 return value;
2080 }
2081 \f
2082 /* This page contains subroutines used mainly for determining
2083 whether the IN or an OUT of a reload can serve as the
2084 reload register. */
2085
2086 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2087
2088 int
2089 earlyclobber_operand_p (rtx x)
2090 {
2091 int i;
2092
2093 for (i = 0; i < n_earlyclobbers; i++)
2094 if (reload_earlyclobbers[i] == x)
2095 return 1;
2096
2097 return 0;
2098 }
2099
2100 /* Return 1 if expression X alters a hard reg in the range
2101 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2102 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2103 X should be the body of an instruction. */
2104
2105 static int
2106 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2107 {
2108 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2109 {
2110 rtx op0 = SET_DEST (x);
2111
2112 while (GET_CODE (op0) == SUBREG)
2113 op0 = SUBREG_REG (op0);
2114 if (REG_P (op0))
2115 {
2116 unsigned int r = REGNO (op0);
2117
2118 /* See if this reg overlaps range under consideration. */
2119 if (r < end_regno
2120 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2121 return 1;
2122 }
2123 }
2124 else if (GET_CODE (x) == PARALLEL)
2125 {
2126 int i = XVECLEN (x, 0) - 1;
2127
2128 for (; i >= 0; i--)
2129 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2130 return 1;
2131 }
2132
2133 return 0;
2134 }
2135
2136 /* Return 1 if ADDR is a valid memory address for mode MODE
2137 in address space AS, and check that each pseudo reg has the
2138 proper kind of hard reg. */
2139
2140 int
2141 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2142 rtx addr, addr_space_t as)
2143 {
2144 #ifdef GO_IF_LEGITIMATE_ADDRESS
2145 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2146 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2147 return 0;
2148
2149 win:
2150 return 1;
2151 #else
2152 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2153 #endif
2154 }
2155 \f
2156 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2157 if they are the same hard reg, and has special hacks for
2158 autoincrement and autodecrement.
2159 This is specifically intended for find_reloads to use
2160 in determining whether two operands match.
2161 X is the operand whose number is the lower of the two.
2162
2163 The value is 2 if Y contains a pre-increment that matches
2164 a non-incrementing address in X. */
2165
2166 /* ??? To be completely correct, we should arrange to pass
2167 for X the output operand and for Y the input operand.
2168 For now, we assume that the output operand has the lower number
2169 because that is natural in (SET output (... input ...)). */
2170
2171 int
2172 operands_match_p (rtx x, rtx y)
2173 {
2174 int i;
2175 RTX_CODE code = GET_CODE (x);
2176 const char *fmt;
2177 int success_2;
2178
2179 if (x == y)
2180 return 1;
2181 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2182 && (REG_P (y) || (GET_CODE (y) == SUBREG
2183 && REG_P (SUBREG_REG (y)))))
2184 {
2185 int j;
2186
2187 if (code == SUBREG)
2188 {
2189 i = REGNO (SUBREG_REG (x));
2190 if (i >= FIRST_PSEUDO_REGISTER)
2191 goto slow;
2192 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2193 GET_MODE (SUBREG_REG (x)),
2194 SUBREG_BYTE (x),
2195 GET_MODE (x));
2196 }
2197 else
2198 i = REGNO (x);
2199
2200 if (GET_CODE (y) == SUBREG)
2201 {
2202 j = REGNO (SUBREG_REG (y));
2203 if (j >= FIRST_PSEUDO_REGISTER)
2204 goto slow;
2205 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2206 GET_MODE (SUBREG_REG (y)),
2207 SUBREG_BYTE (y),
2208 GET_MODE (y));
2209 }
2210 else
2211 j = REGNO (y);
2212
2213 /* On a WORDS_BIG_ENDIAN machine, point to the last register of a
2214 multiple hard register group of scalar integer registers, so that
2215 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2216 register. */
2217 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2218 && SCALAR_INT_MODE_P (GET_MODE (x))
2219 && i < FIRST_PSEUDO_REGISTER)
2220 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2221 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2222 && SCALAR_INT_MODE_P (GET_MODE (y))
2223 && j < FIRST_PSEUDO_REGISTER)
2224 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2225
2226 return i == j;
2227 }
2228 /* If two operands must match, because they are really a single
2229 operand of an assembler insn, then two postincrements are invalid
2230 because the assembler insn would increment only once.
2231 On the other hand, a postincrement matches ordinary indexing
2232 if the postincrement is the output operand. */
2233 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2234 return operands_match_p (XEXP (x, 0), y);
2235 /* Two preincrements are invalid
2236 because the assembler insn would increment only once.
2237 On the other hand, a preincrement matches ordinary indexing
2238 if the preincrement is the input operand.
2239 In this case, return 2, since some callers need to do special
2240 things when this happens. */
2241 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2242 || GET_CODE (y) == PRE_MODIFY)
2243 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2244
2245 slow:
2246
2247 /* Now we have disposed of all the cases in which different rtx codes
2248 can match. */
2249 if (code != GET_CODE (y))
2250 return 0;
2251
2252 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2253 if (GET_MODE (x) != GET_MODE (y))
2254 return 0;
2255
2256 /* MEMs refering to different address space are not equivalent. */
2257 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2258 return 0;
2259
2260 switch (code)
2261 {
2262 case CONST_INT:
2263 case CONST_DOUBLE:
2264 case CONST_FIXED:
2265 return 0;
2266
2267 case LABEL_REF:
2268 return XEXP (x, 0) == XEXP (y, 0);
2269 case SYMBOL_REF:
2270 return XSTR (x, 0) == XSTR (y, 0);
2271
2272 default:
2273 break;
2274 }
2275
2276 /* Compare the elements. If any pair of corresponding elements
2277 fail to match, return 0 for the whole things. */
2278
2279 success_2 = 0;
2280 fmt = GET_RTX_FORMAT (code);
2281 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2282 {
2283 int val, j;
2284 switch (fmt[i])
2285 {
2286 case 'w':
2287 if (XWINT (x, i) != XWINT (y, i))
2288 return 0;
2289 break;
2290
2291 case 'i':
2292 if (XINT (x, i) != XINT (y, i))
2293 return 0;
2294 break;
2295
2296 case 'e':
2297 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2298 if (val == 0)
2299 return 0;
2300 /* If any subexpression returns 2,
2301 we should return 2 if we are successful. */
2302 if (val == 2)
2303 success_2 = 1;
2304 break;
2305
2306 case '0':
2307 break;
2308
2309 case 'E':
2310 if (XVECLEN (x, i) != XVECLEN (y, i))
2311 return 0;
2312 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2313 {
2314 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2315 if (val == 0)
2316 return 0;
2317 if (val == 2)
2318 success_2 = 1;
2319 }
2320 break;
2321
2322 /* It is believed that rtx's at this level will never
2323 contain anything but integers and other rtx's,
2324 except for within LABEL_REFs and SYMBOL_REFs. */
2325 default:
2326 gcc_unreachable ();
2327 }
2328 }
2329 return 1 + success_2;
2330 }
2331 \f
2332 /* Describe the range of registers or memory referenced by X.
2333 If X is a register, set REG_FLAG and put the first register
2334 number into START and the last plus one into END.
2335 If X is a memory reference, put a base address into BASE
2336 and a range of integer offsets into START and END.
2337 If X is pushing on the stack, we can assume it causes no trouble,
2338 so we set the SAFE field. */
2339
2340 static struct decomposition
2341 decompose (rtx x)
2342 {
2343 struct decomposition val;
2344 int all_const = 0;
2345
2346 memset (&val, 0, sizeof (val));
2347
2348 switch (GET_CODE (x))
2349 {
2350 case MEM:
2351 {
2352 rtx base = NULL_RTX, offset = 0;
2353 rtx addr = XEXP (x, 0);
2354
2355 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2356 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2357 {
2358 val.base = XEXP (addr, 0);
2359 val.start = -GET_MODE_SIZE (GET_MODE (x));
2360 val.end = GET_MODE_SIZE (GET_MODE (x));
2361 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2362 return val;
2363 }
2364
2365 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2366 {
2367 if (GET_CODE (XEXP (addr, 1)) == PLUS
2368 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2369 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2370 {
2371 val.base = XEXP (addr, 0);
2372 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2373 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2374 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2375 return val;
2376 }
2377 }
2378
2379 if (GET_CODE (addr) == CONST)
2380 {
2381 addr = XEXP (addr, 0);
2382 all_const = 1;
2383 }
2384 if (GET_CODE (addr) == PLUS)
2385 {
2386 if (CONSTANT_P (XEXP (addr, 0)))
2387 {
2388 base = XEXP (addr, 1);
2389 offset = XEXP (addr, 0);
2390 }
2391 else if (CONSTANT_P (XEXP (addr, 1)))
2392 {
2393 base = XEXP (addr, 0);
2394 offset = XEXP (addr, 1);
2395 }
2396 }
2397
2398 if (offset == 0)
2399 {
2400 base = addr;
2401 offset = const0_rtx;
2402 }
2403 if (GET_CODE (offset) == CONST)
2404 offset = XEXP (offset, 0);
2405 if (GET_CODE (offset) == PLUS)
2406 {
2407 if (CONST_INT_P (XEXP (offset, 0)))
2408 {
2409 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2410 offset = XEXP (offset, 0);
2411 }
2412 else if (CONST_INT_P (XEXP (offset, 1)))
2413 {
2414 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2415 offset = XEXP (offset, 1);
2416 }
2417 else
2418 {
2419 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2420 offset = const0_rtx;
2421 }
2422 }
2423 else if (!CONST_INT_P (offset))
2424 {
2425 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2426 offset = const0_rtx;
2427 }
2428
2429 if (all_const && GET_CODE (base) == PLUS)
2430 base = gen_rtx_CONST (GET_MODE (base), base);
2431
2432 gcc_assert (CONST_INT_P (offset));
2433
2434 val.start = INTVAL (offset);
2435 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2436 val.base = base;
2437 }
2438 break;
2439
2440 case REG:
2441 val.reg_flag = 1;
2442 val.start = true_regnum (x);
2443 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2444 {
2445 /* A pseudo with no hard reg. */
2446 val.start = REGNO (x);
2447 val.end = val.start + 1;
2448 }
2449 else
2450 /* A hard reg. */
2451 val.end = end_hard_regno (GET_MODE (x), val.start);
2452 break;
2453
2454 case SUBREG:
2455 if (!REG_P (SUBREG_REG (x)))
2456 /* This could be more precise, but it's good enough. */
2457 return decompose (SUBREG_REG (x));
2458 val.reg_flag = 1;
2459 val.start = true_regnum (x);
2460 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2461 return decompose (SUBREG_REG (x));
2462 else
2463 /* A hard reg. */
2464 val.end = val.start + subreg_nregs (x);
2465 break;
2466
2467 case SCRATCH:
2468 /* This hasn't been assigned yet, so it can't conflict yet. */
2469 val.safe = 1;
2470 break;
2471
2472 default:
2473 gcc_assert (CONSTANT_P (x));
2474 val.safe = 1;
2475 break;
2476 }
2477 return val;
2478 }
2479
2480 /* Return 1 if altering Y will not modify the value of X.
2481 Y is also described by YDATA, which should be decompose (Y). */
2482
2483 static int
2484 immune_p (rtx x, rtx y, struct decomposition ydata)
2485 {
2486 struct decomposition xdata;
2487
2488 if (ydata.reg_flag)
2489 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2490 if (ydata.safe)
2491 return 1;
2492
2493 gcc_assert (MEM_P (y));
2494 /* If Y is memory and X is not, Y can't affect X. */
2495 if (!MEM_P (x))
2496 return 1;
2497
2498 xdata = decompose (x);
2499
2500 if (! rtx_equal_p (xdata.base, ydata.base))
2501 {
2502 /* If bases are distinct symbolic constants, there is no overlap. */
2503 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2504 return 1;
2505 /* Constants and stack slots never overlap. */
2506 if (CONSTANT_P (xdata.base)
2507 && (ydata.base == frame_pointer_rtx
2508 || ydata.base == hard_frame_pointer_rtx
2509 || ydata.base == stack_pointer_rtx))
2510 return 1;
2511 if (CONSTANT_P (ydata.base)
2512 && (xdata.base == frame_pointer_rtx
2513 || xdata.base == hard_frame_pointer_rtx
2514 || xdata.base == stack_pointer_rtx))
2515 return 1;
2516 /* If either base is variable, we don't know anything. */
2517 return 0;
2518 }
2519
2520 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2521 }
2522
2523 /* Similar, but calls decompose. */
2524
2525 int
2526 safe_from_earlyclobber (rtx op, rtx clobber)
2527 {
2528 struct decomposition early_data;
2529
2530 early_data = decompose (clobber);
2531 return immune_p (op, clobber, early_data);
2532 }
2533 \f
2534 /* Main entry point of this file: search the body of INSN
2535 for values that need reloading and record them with push_reload.
2536 REPLACE nonzero means record also where the values occur
2537 so that subst_reloads can be used.
2538
2539 IND_LEVELS says how many levels of indirection are supported by this
2540 machine; a value of zero means that a memory reference is not a valid
2541 memory address.
2542
2543 LIVE_KNOWN says we have valid information about which hard
2544 regs are live at each point in the program; this is true when
2545 we are called from global_alloc but false when stupid register
2546 allocation has been done.
2547
2548 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2549 which is nonnegative if the reg has been commandeered for reloading into.
2550 It is copied into STATIC_RELOAD_REG_P and referenced from there
2551 by various subroutines.
2552
2553 Return TRUE if some operands need to be changed, because of swapping
2554 commutative operands, reg_equiv_address substitution, or whatever. */
2555
2556 int
2557 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2558 short *reload_reg_p)
2559 {
2560 int insn_code_number;
2561 int i, j;
2562 int noperands;
2563 /* These start out as the constraints for the insn
2564 and they are chewed up as we consider alternatives. */
2565 const char *constraints[MAX_RECOG_OPERANDS];
2566 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2567 a register. */
2568 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2569 char pref_or_nothing[MAX_RECOG_OPERANDS];
2570 /* Nonzero for a MEM operand whose entire address needs a reload.
2571 May be -1 to indicate the entire address may or may not need a reload. */
2572 int address_reloaded[MAX_RECOG_OPERANDS];
2573 /* Nonzero for an address operand that needs to be completely reloaded.
2574 May be -1 to indicate the entire operand may or may not need a reload. */
2575 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2576 /* Value of enum reload_type to use for operand. */
2577 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2578 /* Value of enum reload_type to use within address of operand. */
2579 enum reload_type address_type[MAX_RECOG_OPERANDS];
2580 /* Save the usage of each operand. */
2581 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2582 int no_input_reloads = 0, no_output_reloads = 0;
2583 int n_alternatives;
2584 enum reg_class this_alternative[MAX_RECOG_OPERANDS];
2585 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2586 char this_alternative_win[MAX_RECOG_OPERANDS];
2587 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2588 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2589 int this_alternative_matches[MAX_RECOG_OPERANDS];
2590 int swapped;
2591 int goal_alternative[MAX_RECOG_OPERANDS];
2592 int this_alternative_number;
2593 int goal_alternative_number = 0;
2594 int operand_reloadnum[MAX_RECOG_OPERANDS];
2595 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2596 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2597 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2598 char goal_alternative_win[MAX_RECOG_OPERANDS];
2599 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2600 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2601 int goal_alternative_swapped;
2602 int best;
2603 int best_small_class_operands_num;
2604 int commutative;
2605 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2606 rtx substed_operand[MAX_RECOG_OPERANDS];
2607 rtx body = PATTERN (insn);
2608 rtx set = single_set (insn);
2609 int goal_earlyclobber = 0, this_earlyclobber;
2610 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2611 int retval = 0;
2612
2613 this_insn = insn;
2614 n_reloads = 0;
2615 n_replacements = 0;
2616 n_earlyclobbers = 0;
2617 replace_reloads = replace;
2618 hard_regs_live_known = live_known;
2619 static_reload_reg_p = reload_reg_p;
2620
2621 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2622 neither are insns that SET cc0. Insns that use CC0 are not allowed
2623 to have any input reloads. */
2624 if (JUMP_P (insn) || CALL_P (insn))
2625 no_output_reloads = 1;
2626
2627 #ifdef HAVE_cc0
2628 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2629 no_input_reloads = 1;
2630 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2631 no_output_reloads = 1;
2632 #endif
2633
2634 #ifdef SECONDARY_MEMORY_NEEDED
2635 /* The eliminated forms of any secondary memory locations are per-insn, so
2636 clear them out here. */
2637
2638 if (secondary_memlocs_elim_used)
2639 {
2640 memset (secondary_memlocs_elim, 0,
2641 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2642 secondary_memlocs_elim_used = 0;
2643 }
2644 #endif
2645
2646 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2647 is cheap to move between them. If it is not, there may not be an insn
2648 to do the copy, so we may need a reload. */
2649 if (GET_CODE (body) == SET
2650 && REG_P (SET_DEST (body))
2651 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2652 && REG_P (SET_SRC (body))
2653 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2654 && REGISTER_MOVE_COST (GET_MODE (SET_SRC (body)),
2655 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2656 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2657 return 0;
2658
2659 extract_insn (insn);
2660
2661 noperands = reload_n_operands = recog_data.n_operands;
2662 n_alternatives = recog_data.n_alternatives;
2663
2664 /* Just return "no reloads" if insn has no operands with constraints. */
2665 if (noperands == 0 || n_alternatives == 0)
2666 return 0;
2667
2668 insn_code_number = INSN_CODE (insn);
2669 this_insn_is_asm = insn_code_number < 0;
2670
2671 memcpy (operand_mode, recog_data.operand_mode,
2672 noperands * sizeof (enum machine_mode));
2673 memcpy (constraints, recog_data.constraints,
2674 noperands * sizeof (const char *));
2675
2676 commutative = -1;
2677
2678 /* If we will need to know, later, whether some pair of operands
2679 are the same, we must compare them now and save the result.
2680 Reloading the base and index registers will clobber them
2681 and afterward they will fail to match. */
2682
2683 for (i = 0; i < noperands; i++)
2684 {
2685 const char *p;
2686 int c;
2687 char *end;
2688
2689 substed_operand[i] = recog_data.operand[i];
2690 p = constraints[i];
2691
2692 modified[i] = RELOAD_READ;
2693
2694 /* Scan this operand's constraint to see if it is an output operand,
2695 an in-out operand, is commutative, or should match another. */
2696
2697 while ((c = *p))
2698 {
2699 p += CONSTRAINT_LEN (c, p);
2700 switch (c)
2701 {
2702 case '=':
2703 modified[i] = RELOAD_WRITE;
2704 break;
2705 case '+':
2706 modified[i] = RELOAD_READ_WRITE;
2707 break;
2708 case '%':
2709 {
2710 /* The last operand should not be marked commutative. */
2711 gcc_assert (i != noperands - 1);
2712
2713 /* We currently only support one commutative pair of
2714 operands. Some existing asm code currently uses more
2715 than one pair. Previously, that would usually work,
2716 but sometimes it would crash the compiler. We
2717 continue supporting that case as well as we can by
2718 silently ignoring all but the first pair. In the
2719 future we may handle it correctly. */
2720 if (commutative < 0)
2721 commutative = i;
2722 else
2723 gcc_assert (this_insn_is_asm);
2724 }
2725 break;
2726 /* Use of ISDIGIT is tempting here, but it may get expensive because
2727 of locale support we don't want. */
2728 case '0': case '1': case '2': case '3': case '4':
2729 case '5': case '6': case '7': case '8': case '9':
2730 {
2731 c = strtoul (p - 1, &end, 10);
2732 p = end;
2733
2734 operands_match[c][i]
2735 = operands_match_p (recog_data.operand[c],
2736 recog_data.operand[i]);
2737
2738 /* An operand may not match itself. */
2739 gcc_assert (c != i);
2740
2741 /* If C can be commuted with C+1, and C might need to match I,
2742 then C+1 might also need to match I. */
2743 if (commutative >= 0)
2744 {
2745 if (c == commutative || c == commutative + 1)
2746 {
2747 int other = c + (c == commutative ? 1 : -1);
2748 operands_match[other][i]
2749 = operands_match_p (recog_data.operand[other],
2750 recog_data.operand[i]);
2751 }
2752 if (i == commutative || i == commutative + 1)
2753 {
2754 int other = i + (i == commutative ? 1 : -1);
2755 operands_match[c][other]
2756 = operands_match_p (recog_data.operand[c],
2757 recog_data.operand[other]);
2758 }
2759 /* Note that C is supposed to be less than I.
2760 No need to consider altering both C and I because in
2761 that case we would alter one into the other. */
2762 }
2763 }
2764 }
2765 }
2766 }
2767
2768 /* Examine each operand that is a memory reference or memory address
2769 and reload parts of the addresses into index registers.
2770 Also here any references to pseudo regs that didn't get hard regs
2771 but are equivalent to constants get replaced in the insn itself
2772 with those constants. Nobody will ever see them again.
2773
2774 Finally, set up the preferred classes of each operand. */
2775
2776 for (i = 0; i < noperands; i++)
2777 {
2778 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2779
2780 address_reloaded[i] = 0;
2781 address_operand_reloaded[i] = 0;
2782 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2783 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2784 : RELOAD_OTHER);
2785 address_type[i]
2786 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2787 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2788 : RELOAD_OTHER);
2789
2790 if (*constraints[i] == 0)
2791 /* Ignore things like match_operator operands. */
2792 ;
2793 else if (constraints[i][0] == 'p'
2794 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2795 {
2796 address_operand_reloaded[i]
2797 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2798 recog_data.operand[i],
2799 recog_data.operand_loc[i],
2800 i, operand_type[i], ind_levels, insn);
2801
2802 /* If we now have a simple operand where we used to have a
2803 PLUS or MULT, re-recognize and try again. */
2804 if ((OBJECT_P (*recog_data.operand_loc[i])
2805 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2806 && (GET_CODE (recog_data.operand[i]) == MULT
2807 || GET_CODE (recog_data.operand[i]) == PLUS))
2808 {
2809 INSN_CODE (insn) = -1;
2810 retval = find_reloads (insn, replace, ind_levels, live_known,
2811 reload_reg_p);
2812 return retval;
2813 }
2814
2815 recog_data.operand[i] = *recog_data.operand_loc[i];
2816 substed_operand[i] = recog_data.operand[i];
2817
2818 /* Address operands are reloaded in their existing mode,
2819 no matter what is specified in the machine description. */
2820 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2821 }
2822 else if (code == MEM)
2823 {
2824 address_reloaded[i]
2825 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2826 recog_data.operand_loc[i],
2827 XEXP (recog_data.operand[i], 0),
2828 &XEXP (recog_data.operand[i], 0),
2829 i, address_type[i], ind_levels, insn);
2830 recog_data.operand[i] = *recog_data.operand_loc[i];
2831 substed_operand[i] = recog_data.operand[i];
2832 }
2833 else if (code == SUBREG)
2834 {
2835 rtx reg = SUBREG_REG (recog_data.operand[i]);
2836 rtx op
2837 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2838 ind_levels,
2839 set != 0
2840 && &SET_DEST (set) == recog_data.operand_loc[i],
2841 insn,
2842 &address_reloaded[i]);
2843
2844 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2845 that didn't get a hard register, emit a USE with a REG_EQUAL
2846 note in front so that we might inherit a previous, possibly
2847 wider reload. */
2848
2849 if (replace
2850 && MEM_P (op)
2851 && REG_P (reg)
2852 && (GET_MODE_SIZE (GET_MODE (reg))
2853 >= GET_MODE_SIZE (GET_MODE (op)))
2854 && reg_equiv_constant[REGNO (reg)] == 0)
2855 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2856 insn),
2857 REG_EQUAL, reg_equiv_memory_loc[REGNO (reg)]);
2858
2859 substed_operand[i] = recog_data.operand[i] = op;
2860 }
2861 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2862 /* We can get a PLUS as an "operand" as a result of register
2863 elimination. See eliminate_regs and gen_reload. We handle
2864 a unary operator by reloading the operand. */
2865 substed_operand[i] = recog_data.operand[i]
2866 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2867 ind_levels, 0, insn,
2868 &address_reloaded[i]);
2869 else if (code == REG)
2870 {
2871 /* This is equivalent to calling find_reloads_toplev.
2872 The code is duplicated for speed.
2873 When we find a pseudo always equivalent to a constant,
2874 we replace it by the constant. We must be sure, however,
2875 that we don't try to replace it in the insn in which it
2876 is being set. */
2877 int regno = REGNO (recog_data.operand[i]);
2878 if (reg_equiv_constant[regno] != 0
2879 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2880 {
2881 /* Record the existing mode so that the check if constants are
2882 allowed will work when operand_mode isn't specified. */
2883
2884 if (operand_mode[i] == VOIDmode)
2885 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2886
2887 substed_operand[i] = recog_data.operand[i]
2888 = reg_equiv_constant[regno];
2889 }
2890 if (reg_equiv_memory_loc[regno] != 0
2891 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
2892 /* We need not give a valid is_set_dest argument since the case
2893 of a constant equivalence was checked above. */
2894 substed_operand[i] = recog_data.operand[i]
2895 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2896 ind_levels, 0, insn,
2897 &address_reloaded[i]);
2898 }
2899 /* If the operand is still a register (we didn't replace it with an
2900 equivalent), get the preferred class to reload it into. */
2901 code = GET_CODE (recog_data.operand[i]);
2902 preferred_class[i]
2903 = ((code == REG && REGNO (recog_data.operand[i])
2904 >= FIRST_PSEUDO_REGISTER)
2905 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2906 : NO_REGS);
2907 pref_or_nothing[i]
2908 = (code == REG
2909 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2910 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2911 }
2912
2913 /* If this is simply a copy from operand 1 to operand 0, merge the
2914 preferred classes for the operands. */
2915 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2916 && recog_data.operand[1] == SET_SRC (set))
2917 {
2918 preferred_class[0] = preferred_class[1]
2919 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2920 pref_or_nothing[0] |= pref_or_nothing[1];
2921 pref_or_nothing[1] |= pref_or_nothing[0];
2922 }
2923
2924 /* Now see what we need for pseudo-regs that didn't get hard regs
2925 or got the wrong kind of hard reg. For this, we must consider
2926 all the operands together against the register constraints. */
2927
2928 best = MAX_RECOG_OPERANDS * 2 + 600;
2929 best_small_class_operands_num = 0;
2930
2931 swapped = 0;
2932 goal_alternative_swapped = 0;
2933 try_swapped:
2934
2935 /* The constraints are made of several alternatives.
2936 Each operand's constraint looks like foo,bar,... with commas
2937 separating the alternatives. The first alternatives for all
2938 operands go together, the second alternatives go together, etc.
2939
2940 First loop over alternatives. */
2941
2942 for (this_alternative_number = 0;
2943 this_alternative_number < n_alternatives;
2944 this_alternative_number++)
2945 {
2946 /* Loop over operands for one constraint alternative. */
2947 /* LOSERS counts those that don't fit this alternative
2948 and would require loading. */
2949 int losers = 0;
2950 /* BAD is set to 1 if it some operand can't fit this alternative
2951 even after reloading. */
2952 int bad = 0;
2953 /* REJECT is a count of how undesirable this alternative says it is
2954 if any reloading is required. If the alternative matches exactly
2955 then REJECT is ignored, but otherwise it gets this much
2956 counted against it in addition to the reloading needed. Each
2957 ? counts three times here since we want the disparaging caused by
2958 a bad register class to only count 1/3 as much. */
2959 int reject = 0;
2960
2961 if (!recog_data.alternative_enabled_p[this_alternative_number])
2962 {
2963 int i;
2964
2965 for (i = 0; i < recog_data.n_operands; i++)
2966 constraints[i] = skip_alternative (constraints[i]);
2967
2968 continue;
2969 }
2970
2971 this_earlyclobber = 0;
2972
2973 for (i = 0; i < noperands; i++)
2974 {
2975 const char *p = constraints[i];
2976 char *end;
2977 int len;
2978 int win = 0;
2979 int did_match = 0;
2980 /* 0 => this operand can be reloaded somehow for this alternative. */
2981 int badop = 1;
2982 /* 0 => this operand can be reloaded if the alternative allows regs. */
2983 int winreg = 0;
2984 int c;
2985 int m;
2986 rtx operand = recog_data.operand[i];
2987 int offset = 0;
2988 /* Nonzero means this is a MEM that must be reloaded into a reg
2989 regardless of what the constraint says. */
2990 int force_reload = 0;
2991 int offmemok = 0;
2992 /* Nonzero if a constant forced into memory would be OK for this
2993 operand. */
2994 int constmemok = 0;
2995 int earlyclobber = 0;
2996
2997 /* If the predicate accepts a unary operator, it means that
2998 we need to reload the operand, but do not do this for
2999 match_operator and friends. */
3000 if (UNARY_P (operand) && *p != 0)
3001 operand = XEXP (operand, 0);
3002
3003 /* If the operand is a SUBREG, extract
3004 the REG or MEM (or maybe even a constant) within.
3005 (Constants can occur as a result of reg_equiv_constant.) */
3006
3007 while (GET_CODE (operand) == SUBREG)
3008 {
3009 /* Offset only matters when operand is a REG and
3010 it is a hard reg. This is because it is passed
3011 to reg_fits_class_p if it is a REG and all pseudos
3012 return 0 from that function. */
3013 if (REG_P (SUBREG_REG (operand))
3014 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3015 {
3016 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3017 GET_MODE (SUBREG_REG (operand)),
3018 SUBREG_BYTE (operand),
3019 GET_MODE (operand)) < 0)
3020 force_reload = 1;
3021 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3022 GET_MODE (SUBREG_REG (operand)),
3023 SUBREG_BYTE (operand),
3024 GET_MODE (operand));
3025 }
3026 operand = SUBREG_REG (operand);
3027 /* Force reload if this is a constant or PLUS or if there may
3028 be a problem accessing OPERAND in the outer mode. */
3029 if (CONSTANT_P (operand)
3030 || GET_CODE (operand) == PLUS
3031 /* We must force a reload of paradoxical SUBREGs
3032 of a MEM because the alignment of the inner value
3033 may not be enough to do the outer reference. On
3034 big-endian machines, it may also reference outside
3035 the object.
3036
3037 On machines that extend byte operations and we have a
3038 SUBREG where both the inner and outer modes are no wider
3039 than a word and the inner mode is narrower, is integral,
3040 and gets extended when loaded from memory, combine.c has
3041 made assumptions about the behavior of the machine in such
3042 register access. If the data is, in fact, in memory we
3043 must always load using the size assumed to be in the
3044 register and let the insn do the different-sized
3045 accesses.
3046
3047 This is doubly true if WORD_REGISTER_OPERATIONS. In
3048 this case eliminate_regs has left non-paradoxical
3049 subregs for push_reload to see. Make sure it does
3050 by forcing the reload.
3051
3052 ??? When is it right at this stage to have a subreg
3053 of a mem that is _not_ to be handled specially? IMO
3054 those should have been reduced to just a mem. */
3055 || ((MEM_P (operand)
3056 || (REG_P (operand)
3057 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3058 #ifndef WORD_REGISTER_OPERATIONS
3059 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3060 < BIGGEST_ALIGNMENT)
3061 && (GET_MODE_SIZE (operand_mode[i])
3062 > GET_MODE_SIZE (GET_MODE (operand))))
3063 || BYTES_BIG_ENDIAN
3064 #ifdef LOAD_EXTEND_OP
3065 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3066 && (GET_MODE_SIZE (GET_MODE (operand))
3067 <= UNITS_PER_WORD)
3068 && (GET_MODE_SIZE (operand_mode[i])
3069 > GET_MODE_SIZE (GET_MODE (operand)))
3070 && INTEGRAL_MODE_P (GET_MODE (operand))
3071 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3072 #endif
3073 )
3074 #endif
3075 )
3076 )
3077 force_reload = 1;
3078 }
3079
3080 this_alternative[i] = NO_REGS;
3081 this_alternative_win[i] = 0;
3082 this_alternative_match_win[i] = 0;
3083 this_alternative_offmemok[i] = 0;
3084 this_alternative_earlyclobber[i] = 0;
3085 this_alternative_matches[i] = -1;
3086
3087 /* An empty constraint or empty alternative
3088 allows anything which matched the pattern. */
3089 if (*p == 0 || *p == ',')
3090 win = 1, badop = 0;
3091
3092 /* Scan this alternative's specs for this operand;
3093 set WIN if the operand fits any letter in this alternative.
3094 Otherwise, clear BADOP if this operand could
3095 fit some letter after reloads,
3096 or set WINREG if this operand could fit after reloads
3097 provided the constraint allows some registers. */
3098
3099 do
3100 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3101 {
3102 case '\0':
3103 len = 0;
3104 break;
3105 case ',':
3106 c = '\0';
3107 break;
3108
3109 case '=': case '+': case '*':
3110 break;
3111
3112 case '%':
3113 /* We only support one commutative marker, the first
3114 one. We already set commutative above. */
3115 break;
3116
3117 case '?':
3118 reject += 6;
3119 break;
3120
3121 case '!':
3122 reject = 600;
3123 break;
3124
3125 case '#':
3126 /* Ignore rest of this alternative as far as
3127 reloading is concerned. */
3128 do
3129 p++;
3130 while (*p && *p != ',');
3131 len = 0;
3132 break;
3133
3134 case '0': case '1': case '2': case '3': case '4':
3135 case '5': case '6': case '7': case '8': case '9':
3136 m = strtoul (p, &end, 10);
3137 p = end;
3138 len = 0;
3139
3140 this_alternative_matches[i] = m;
3141 /* We are supposed to match a previous operand.
3142 If we do, we win if that one did.
3143 If we do not, count both of the operands as losers.
3144 (This is too conservative, since most of the time
3145 only a single reload insn will be needed to make
3146 the two operands win. As a result, this alternative
3147 may be rejected when it is actually desirable.) */
3148 if ((swapped && (m != commutative || i != commutative + 1))
3149 /* If we are matching as if two operands were swapped,
3150 also pretend that operands_match had been computed
3151 with swapped.
3152 But if I is the second of those and C is the first,
3153 don't exchange them, because operands_match is valid
3154 only on one side of its diagonal. */
3155 ? (operands_match
3156 [(m == commutative || m == commutative + 1)
3157 ? 2 * commutative + 1 - m : m]
3158 [(i == commutative || i == commutative + 1)
3159 ? 2 * commutative + 1 - i : i])
3160 : operands_match[m][i])
3161 {
3162 /* If we are matching a non-offsettable address where an
3163 offsettable address was expected, then we must reject
3164 this combination, because we can't reload it. */
3165 if (this_alternative_offmemok[m]
3166 && MEM_P (recog_data.operand[m])
3167 && this_alternative[m] == NO_REGS
3168 && ! this_alternative_win[m])
3169 bad = 1;
3170
3171 did_match = this_alternative_win[m];
3172 }
3173 else
3174 {
3175 /* Operands don't match. */
3176 rtx value;
3177 int loc1, loc2;
3178 /* Retroactively mark the operand we had to match
3179 as a loser, if it wasn't already. */
3180 if (this_alternative_win[m])
3181 losers++;
3182 this_alternative_win[m] = 0;
3183 if (this_alternative[m] == NO_REGS)
3184 bad = 1;
3185 /* But count the pair only once in the total badness of
3186 this alternative, if the pair can be a dummy reload.
3187 The pointers in operand_loc are not swapped; swap
3188 them by hand if necessary. */
3189 if (swapped && i == commutative)
3190 loc1 = commutative + 1;
3191 else if (swapped && i == commutative + 1)
3192 loc1 = commutative;
3193 else
3194 loc1 = i;
3195 if (swapped && m == commutative)
3196 loc2 = commutative + 1;
3197 else if (swapped && m == commutative + 1)
3198 loc2 = commutative;
3199 else
3200 loc2 = m;
3201 value
3202 = find_dummy_reload (recog_data.operand[i],
3203 recog_data.operand[m],
3204 recog_data.operand_loc[loc1],
3205 recog_data.operand_loc[loc2],
3206 operand_mode[i], operand_mode[m],
3207 this_alternative[m], -1,
3208 this_alternative_earlyclobber[m]);
3209
3210 if (value != 0)
3211 losers--;
3212 }
3213 /* This can be fixed with reloads if the operand
3214 we are supposed to match can be fixed with reloads. */
3215 badop = 0;
3216 this_alternative[i] = this_alternative[m];
3217
3218 /* If we have to reload this operand and some previous
3219 operand also had to match the same thing as this
3220 operand, we don't know how to do that. So reject this
3221 alternative. */
3222 if (! did_match || force_reload)
3223 for (j = 0; j < i; j++)
3224 if (this_alternative_matches[j]
3225 == this_alternative_matches[i])
3226 badop = 1;
3227 break;
3228
3229 case 'p':
3230 /* All necessary reloads for an address_operand
3231 were handled in find_reloads_address. */
3232 this_alternative[i] = base_reg_class (VOIDmode, ADDRESS,
3233 SCRATCH);
3234 win = 1;
3235 badop = 0;
3236 break;
3237
3238 case TARGET_MEM_CONSTRAINT:
3239 if (force_reload)
3240 break;
3241 if (MEM_P (operand)
3242 || (REG_P (operand)
3243 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3244 && reg_renumber[REGNO (operand)] < 0))
3245 win = 1;
3246 if (CONST_POOL_OK_P (operand))
3247 badop = 0;
3248 constmemok = 1;
3249 break;
3250
3251 case '<':
3252 if (MEM_P (operand)
3253 && ! address_reloaded[i]
3254 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3255 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3256 win = 1;
3257 break;
3258
3259 case '>':
3260 if (MEM_P (operand)
3261 && ! address_reloaded[i]
3262 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3263 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3264 win = 1;
3265 break;
3266
3267 /* Memory operand whose address is not offsettable. */
3268 case 'V':
3269 if (force_reload)
3270 break;
3271 if (MEM_P (operand)
3272 && ! (ind_levels ? offsettable_memref_p (operand)
3273 : offsettable_nonstrict_memref_p (operand))
3274 /* Certain mem addresses will become offsettable
3275 after they themselves are reloaded. This is important;
3276 we don't want our own handling of unoffsettables
3277 to override the handling of reg_equiv_address. */
3278 && !(REG_P (XEXP (operand, 0))
3279 && (ind_levels == 0
3280 || reg_equiv_address[REGNO (XEXP (operand, 0))] != 0)))
3281 win = 1;
3282 break;
3283
3284 /* Memory operand whose address is offsettable. */
3285 case 'o':
3286 if (force_reload)
3287 break;
3288 if ((MEM_P (operand)
3289 /* If IND_LEVELS, find_reloads_address won't reload a
3290 pseudo that didn't get a hard reg, so we have to
3291 reject that case. */
3292 && ((ind_levels ? offsettable_memref_p (operand)
3293 : offsettable_nonstrict_memref_p (operand))
3294 /* A reloaded address is offsettable because it is now
3295 just a simple register indirect. */
3296 || address_reloaded[i] == 1))
3297 || (REG_P (operand)
3298 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3299 && reg_renumber[REGNO (operand)] < 0
3300 /* If reg_equiv_address is nonzero, we will be
3301 loading it into a register; hence it will be
3302 offsettable, but we cannot say that reg_equiv_mem
3303 is offsettable without checking. */
3304 && ((reg_equiv_mem[REGNO (operand)] != 0
3305 && offsettable_memref_p (reg_equiv_mem[REGNO (operand)]))
3306 || (reg_equiv_address[REGNO (operand)] != 0))))
3307 win = 1;
3308 if (CONST_POOL_OK_P (operand)
3309 || MEM_P (operand))
3310 badop = 0;
3311 constmemok = 1;
3312 offmemok = 1;
3313 break;
3314
3315 case '&':
3316 /* Output operand that is stored before the need for the
3317 input operands (and their index registers) is over. */
3318 earlyclobber = 1, this_earlyclobber = 1;
3319 break;
3320
3321 case 'E':
3322 case 'F':
3323 if (GET_CODE (operand) == CONST_DOUBLE
3324 || (GET_CODE (operand) == CONST_VECTOR
3325 && (GET_MODE_CLASS (GET_MODE (operand))
3326 == MODE_VECTOR_FLOAT)))
3327 win = 1;
3328 break;
3329
3330 case 'G':
3331 case 'H':
3332 if (GET_CODE (operand) == CONST_DOUBLE
3333 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3334 win = 1;
3335 break;
3336
3337 case 's':
3338 if (CONST_INT_P (operand)
3339 || (GET_CODE (operand) == CONST_DOUBLE
3340 && GET_MODE (operand) == VOIDmode))
3341 break;
3342 case 'i':
3343 if (CONSTANT_P (operand)
3344 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3345 win = 1;
3346 break;
3347
3348 case 'n':
3349 if (CONST_INT_P (operand)
3350 || (GET_CODE (operand) == CONST_DOUBLE
3351 && GET_MODE (operand) == VOIDmode))
3352 win = 1;
3353 break;
3354
3355 case 'I':
3356 case 'J':
3357 case 'K':
3358 case 'L':
3359 case 'M':
3360 case 'N':
3361 case 'O':
3362 case 'P':
3363 if (CONST_INT_P (operand)
3364 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3365 win = 1;
3366 break;
3367
3368 case 'X':
3369 force_reload = 0;
3370 win = 1;
3371 break;
3372
3373 case 'g':
3374 if (! force_reload
3375 /* A PLUS is never a valid operand, but reload can make
3376 it from a register when eliminating registers. */
3377 && GET_CODE (operand) != PLUS
3378 /* A SCRATCH is not a valid operand. */
3379 && GET_CODE (operand) != SCRATCH
3380 && (! CONSTANT_P (operand)
3381 || ! flag_pic
3382 || LEGITIMATE_PIC_OPERAND_P (operand))
3383 && (GENERAL_REGS == ALL_REGS
3384 || !REG_P (operand)
3385 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3386 && reg_renumber[REGNO (operand)] < 0)))
3387 win = 1;
3388 /* Drop through into 'r' case. */
3389
3390 case 'r':
3391 this_alternative[i]
3392 = reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3393 goto reg;
3394
3395 default:
3396 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3397 {
3398 #ifdef EXTRA_CONSTRAINT_STR
3399 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3400 {
3401 if (force_reload)
3402 break;
3403 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3404 win = 1;
3405 /* If the address was already reloaded,
3406 we win as well. */
3407 else if (MEM_P (operand)
3408 && address_reloaded[i] == 1)
3409 win = 1;
3410 /* Likewise if the address will be reloaded because
3411 reg_equiv_address is nonzero. For reg_equiv_mem
3412 we have to check. */
3413 else if (REG_P (operand)
3414 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3415 && reg_renumber[REGNO (operand)] < 0
3416 && ((reg_equiv_mem[REGNO (operand)] != 0
3417 && EXTRA_CONSTRAINT_STR (reg_equiv_mem[REGNO (operand)], c, p))
3418 || (reg_equiv_address[REGNO (operand)] != 0)))
3419 win = 1;
3420
3421 /* If we didn't already win, we can reload
3422 constants via force_const_mem, and other
3423 MEMs by reloading the address like for 'o'. */
3424 if (CONST_POOL_OK_P (operand)
3425 || MEM_P (operand))
3426 badop = 0;
3427 constmemok = 1;
3428 offmemok = 1;
3429 break;
3430 }
3431 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3432 {
3433 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3434 win = 1;
3435
3436 /* If we didn't already win, we can reload
3437 the address into a base register. */
3438 this_alternative[i] = base_reg_class (VOIDmode,
3439 ADDRESS,
3440 SCRATCH);
3441 badop = 0;
3442 break;
3443 }
3444
3445 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3446 win = 1;
3447 #endif
3448 break;
3449 }
3450
3451 this_alternative[i]
3452 = (reg_class_subunion
3453 [this_alternative[i]]
3454 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3455 reg:
3456 if (GET_MODE (operand) == BLKmode)
3457 break;
3458 winreg = 1;
3459 if (REG_P (operand)
3460 && reg_fits_class_p (operand, this_alternative[i],
3461 offset, GET_MODE (recog_data.operand[i])))
3462 win = 1;
3463 break;
3464 }
3465 while ((p += len), c);
3466
3467 constraints[i] = p;
3468
3469 /* If this operand could be handled with a reg,
3470 and some reg is allowed, then this operand can be handled. */
3471 if (winreg && this_alternative[i] != NO_REGS)
3472 badop = 0;
3473
3474 /* Record which operands fit this alternative. */
3475 this_alternative_earlyclobber[i] = earlyclobber;
3476 if (win && ! force_reload)
3477 this_alternative_win[i] = 1;
3478 else if (did_match && ! force_reload)
3479 this_alternative_match_win[i] = 1;
3480 else
3481 {
3482 int const_to_mem = 0;
3483
3484 this_alternative_offmemok[i] = offmemok;
3485 losers++;
3486 if (badop)
3487 bad = 1;
3488 /* Alternative loses if it has no regs for a reg operand. */
3489 if (REG_P (operand)
3490 && this_alternative[i] == NO_REGS
3491 && this_alternative_matches[i] < 0)
3492 bad = 1;
3493
3494 /* If this is a constant that is reloaded into the desired
3495 class by copying it to memory first, count that as another
3496 reload. This is consistent with other code and is
3497 required to avoid choosing another alternative when
3498 the constant is moved into memory by this function on
3499 an early reload pass. Note that the test here is
3500 precisely the same as in the code below that calls
3501 force_const_mem. */
3502 if (CONST_POOL_OK_P (operand)
3503 && ((PREFERRED_RELOAD_CLASS (operand, this_alternative[i])
3504 == NO_REGS)
3505 || no_input_reloads)
3506 && operand_mode[i] != VOIDmode)
3507 {
3508 const_to_mem = 1;
3509 if (this_alternative[i] != NO_REGS)
3510 losers++;
3511 }
3512
3513 /* Alternative loses if it requires a type of reload not
3514 permitted for this insn. We can always reload SCRATCH
3515 and objects with a REG_UNUSED note. */
3516 if (GET_CODE (operand) != SCRATCH
3517 && modified[i] != RELOAD_READ && no_output_reloads
3518 && ! find_reg_note (insn, REG_UNUSED, operand))
3519 bad = 1;
3520 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3521 && ! const_to_mem)
3522 bad = 1;
3523
3524 /* If we can't reload this value at all, reject this
3525 alternative. Note that we could also lose due to
3526 LIMIT_RELOAD_CLASS, but we don't check that
3527 here. */
3528
3529 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3530 {
3531 if (PREFERRED_RELOAD_CLASS (operand, this_alternative[i])
3532 == NO_REGS)
3533 reject = 600;
3534
3535 #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
3536 if (operand_type[i] == RELOAD_FOR_OUTPUT
3537 && (PREFERRED_OUTPUT_RELOAD_CLASS (operand,
3538 this_alternative[i])
3539 == NO_REGS))
3540 reject = 600;
3541 #endif
3542 }
3543
3544 /* We prefer to reload pseudos over reloading other things,
3545 since such reloads may be able to be eliminated later.
3546 If we are reloading a SCRATCH, we won't be generating any
3547 insns, just using a register, so it is also preferred.
3548 So bump REJECT in other cases. Don't do this in the
3549 case where we are forcing a constant into memory and
3550 it will then win since we don't want to have a different
3551 alternative match then. */
3552 if (! (REG_P (operand)
3553 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3554 && GET_CODE (operand) != SCRATCH
3555 && ! (const_to_mem && constmemok))
3556 reject += 2;
3557
3558 /* Input reloads can be inherited more often than output
3559 reloads can be removed, so penalize output reloads. */
3560 if (operand_type[i] != RELOAD_FOR_INPUT
3561 && GET_CODE (operand) != SCRATCH)
3562 reject++;
3563 }
3564
3565 /* If this operand is a pseudo register that didn't get a hard
3566 reg and this alternative accepts some register, see if the
3567 class that we want is a subset of the preferred class for this
3568 register. If not, but it intersects that class, use the
3569 preferred class instead. If it does not intersect the preferred
3570 class, show that usage of this alternative should be discouraged;
3571 it will be discouraged more still if the register is `preferred
3572 or nothing'. We do this because it increases the chance of
3573 reusing our spill register in a later insn and avoiding a pair
3574 of memory stores and loads.
3575
3576 Don't bother with this if this alternative will accept this
3577 operand.
3578
3579 Don't do this for a multiword operand, since it is only a
3580 small win and has the risk of requiring more spill registers,
3581 which could cause a large loss.
3582
3583 Don't do this if the preferred class has only one register
3584 because we might otherwise exhaust the class. */
3585
3586 if (! win && ! did_match
3587 && this_alternative[i] != NO_REGS
3588 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3589 && reg_class_size [(int) preferred_class[i]] > 0
3590 && ! SMALL_REGISTER_CLASS_P (preferred_class[i]))
3591 {
3592 if (! reg_class_subset_p (this_alternative[i],
3593 preferred_class[i]))
3594 {
3595 /* Since we don't have a way of forming the intersection,
3596 we just do something special if the preferred class
3597 is a subset of the class we have; that's the most
3598 common case anyway. */
3599 if (reg_class_subset_p (preferred_class[i],
3600 this_alternative[i]))
3601 this_alternative[i] = preferred_class[i];
3602 else
3603 reject += (2 + 2 * pref_or_nothing[i]);
3604 }
3605 }
3606 }
3607
3608 /* Now see if any output operands that are marked "earlyclobber"
3609 in this alternative conflict with any input operands
3610 or any memory addresses. */
3611
3612 for (i = 0; i < noperands; i++)
3613 if (this_alternative_earlyclobber[i]
3614 && (this_alternative_win[i] || this_alternative_match_win[i]))
3615 {
3616 struct decomposition early_data;
3617
3618 early_data = decompose (recog_data.operand[i]);
3619
3620 gcc_assert (modified[i] != RELOAD_READ);
3621
3622 if (this_alternative[i] == NO_REGS)
3623 {
3624 this_alternative_earlyclobber[i] = 0;
3625 gcc_assert (this_insn_is_asm);
3626 error_for_asm (this_insn,
3627 "%<&%> constraint used with no register class");
3628 }
3629
3630 for (j = 0; j < noperands; j++)
3631 /* Is this an input operand or a memory ref? */
3632 if ((MEM_P (recog_data.operand[j])
3633 || modified[j] != RELOAD_WRITE)
3634 && j != i
3635 /* Ignore things like match_operator operands. */
3636 && !recog_data.is_operator[j]
3637 /* Don't count an input operand that is constrained to match
3638 the early clobber operand. */
3639 && ! (this_alternative_matches[j] == i
3640 && rtx_equal_p (recog_data.operand[i],
3641 recog_data.operand[j]))
3642 /* Is it altered by storing the earlyclobber operand? */
3643 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3644 early_data))
3645 {
3646 /* If the output is in a non-empty few-regs class,
3647 it's costly to reload it, so reload the input instead. */
3648 if (SMALL_REGISTER_CLASS_P (this_alternative[i])
3649 && (REG_P (recog_data.operand[j])
3650 || GET_CODE (recog_data.operand[j]) == SUBREG))
3651 {
3652 losers++;
3653 this_alternative_win[j] = 0;
3654 this_alternative_match_win[j] = 0;
3655 }
3656 else
3657 break;
3658 }
3659 /* If an earlyclobber operand conflicts with something,
3660 it must be reloaded, so request this and count the cost. */
3661 if (j != noperands)
3662 {
3663 losers++;
3664 this_alternative_win[i] = 0;
3665 this_alternative_match_win[j] = 0;
3666 for (j = 0; j < noperands; j++)
3667 if (this_alternative_matches[j] == i
3668 && this_alternative_match_win[j])
3669 {
3670 this_alternative_win[j] = 0;
3671 this_alternative_match_win[j] = 0;
3672 losers++;
3673 }
3674 }
3675 }
3676
3677 /* If one alternative accepts all the operands, no reload required,
3678 choose that alternative; don't consider the remaining ones. */
3679 if (losers == 0)
3680 {
3681 /* Unswap these so that they are never swapped at `finish'. */
3682 if (commutative >= 0)
3683 {
3684 recog_data.operand[commutative] = substed_operand[commutative];
3685 recog_data.operand[commutative + 1]
3686 = substed_operand[commutative + 1];
3687 }
3688 for (i = 0; i < noperands; i++)
3689 {
3690 goal_alternative_win[i] = this_alternative_win[i];
3691 goal_alternative_match_win[i] = this_alternative_match_win[i];
3692 goal_alternative[i] = this_alternative[i];
3693 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3694 goal_alternative_matches[i] = this_alternative_matches[i];
3695 goal_alternative_earlyclobber[i]
3696 = this_alternative_earlyclobber[i];
3697 }
3698 goal_alternative_number = this_alternative_number;
3699 goal_alternative_swapped = swapped;
3700 goal_earlyclobber = this_earlyclobber;
3701 goto finish;
3702 }
3703
3704 /* REJECT, set by the ! and ? constraint characters and when a register
3705 would be reloaded into a non-preferred class, discourages the use of
3706 this alternative for a reload goal. REJECT is incremented by six
3707 for each ? and two for each non-preferred class. */
3708 losers = losers * 6 + reject;
3709
3710 /* If this alternative can be made to work by reloading,
3711 and it needs less reloading than the others checked so far,
3712 record it as the chosen goal for reloading. */
3713 if (! bad)
3714 {
3715 bool change_p = false;
3716 int small_class_operands_num = 0;
3717
3718 if (best >= losers)
3719 {
3720 for (i = 0; i < noperands; i++)
3721 small_class_operands_num
3722 += SMALL_REGISTER_CLASS_P (this_alternative[i]) ? 1 : 0;
3723 if (best > losers
3724 || (best == losers
3725 /* If the cost of the reloads is the same,
3726 prefer alternative which requires minimal
3727 number of small register classes for the
3728 operands. This improves chances of reloads
3729 for insn requiring small register
3730 classes. */
3731 && (small_class_operands_num
3732 < best_small_class_operands_num)))
3733 change_p = true;
3734 }
3735 if (change_p)
3736 {
3737 for (i = 0; i < noperands; i++)
3738 {
3739 goal_alternative[i] = this_alternative[i];
3740 goal_alternative_win[i] = this_alternative_win[i];
3741 goal_alternative_match_win[i]
3742 = this_alternative_match_win[i];
3743 goal_alternative_offmemok[i]
3744 = this_alternative_offmemok[i];
3745 goal_alternative_matches[i] = this_alternative_matches[i];
3746 goal_alternative_earlyclobber[i]
3747 = this_alternative_earlyclobber[i];
3748 }
3749 goal_alternative_swapped = swapped;
3750 best = losers;
3751 best_small_class_operands_num = small_class_operands_num;
3752 goal_alternative_number = this_alternative_number;
3753 goal_earlyclobber = this_earlyclobber;
3754 }
3755 }
3756 }
3757
3758 /* If insn is commutative (it's safe to exchange a certain pair of operands)
3759 then we need to try each alternative twice,
3760 the second time matching those two operands
3761 as if we had exchanged them.
3762 To do this, really exchange them in operands.
3763
3764 If we have just tried the alternatives the second time,
3765 return operands to normal and drop through. */
3766
3767 if (commutative >= 0)
3768 {
3769 swapped = !swapped;
3770 if (swapped)
3771 {
3772 enum reg_class tclass;
3773 int t;
3774
3775 recog_data.operand[commutative] = substed_operand[commutative + 1];
3776 recog_data.operand[commutative + 1] = substed_operand[commutative];
3777 /* Swap the duplicates too. */
3778 for (i = 0; i < recog_data.n_dups; i++)
3779 if (recog_data.dup_num[i] == commutative
3780 || recog_data.dup_num[i] == commutative + 1)
3781 *recog_data.dup_loc[i]
3782 = recog_data.operand[(int) recog_data.dup_num[i]];
3783
3784 tclass = preferred_class[commutative];
3785 preferred_class[commutative] = preferred_class[commutative + 1];
3786 preferred_class[commutative + 1] = tclass;
3787
3788 t = pref_or_nothing[commutative];
3789 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3790 pref_or_nothing[commutative + 1] = t;
3791
3792 t = address_reloaded[commutative];
3793 address_reloaded[commutative] = address_reloaded[commutative + 1];
3794 address_reloaded[commutative + 1] = t;
3795
3796 memcpy (constraints, recog_data.constraints,
3797 noperands * sizeof (const char *));
3798 goto try_swapped;
3799 }
3800 else
3801 {
3802 recog_data.operand[commutative] = substed_operand[commutative];
3803 recog_data.operand[commutative + 1]
3804 = substed_operand[commutative + 1];
3805 /* Unswap the duplicates too. */
3806 for (i = 0; i < recog_data.n_dups; i++)
3807 if (recog_data.dup_num[i] == commutative
3808 || recog_data.dup_num[i] == commutative + 1)
3809 *recog_data.dup_loc[i]
3810 = recog_data.operand[(int) recog_data.dup_num[i]];
3811 }
3812 }
3813
3814 /* The operands don't meet the constraints.
3815 goal_alternative describes the alternative
3816 that we could reach by reloading the fewest operands.
3817 Reload so as to fit it. */
3818
3819 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3820 {
3821 /* No alternative works with reloads?? */
3822 if (insn_code_number >= 0)
3823 fatal_insn ("unable to generate reloads for:", insn);
3824 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3825 /* Avoid further trouble with this insn. */
3826 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3827 n_reloads = 0;
3828 return 0;
3829 }
3830
3831 /* Jump to `finish' from above if all operands are valid already.
3832 In that case, goal_alternative_win is all 1. */
3833 finish:
3834
3835 /* Right now, for any pair of operands I and J that are required to match,
3836 with I < J,
3837 goal_alternative_matches[J] is I.
3838 Set up goal_alternative_matched as the inverse function:
3839 goal_alternative_matched[I] = J. */
3840
3841 for (i = 0; i < noperands; i++)
3842 goal_alternative_matched[i] = -1;
3843
3844 for (i = 0; i < noperands; i++)
3845 if (! goal_alternative_win[i]
3846 && goal_alternative_matches[i] >= 0)
3847 goal_alternative_matched[goal_alternative_matches[i]] = i;
3848
3849 for (i = 0; i < noperands; i++)
3850 goal_alternative_win[i] |= goal_alternative_match_win[i];
3851
3852 /* If the best alternative is with operands 1 and 2 swapped,
3853 consider them swapped before reporting the reloads. Update the
3854 operand numbers of any reloads already pushed. */
3855
3856 if (goal_alternative_swapped)
3857 {
3858 rtx tem;
3859
3860 tem = substed_operand[commutative];
3861 substed_operand[commutative] = substed_operand[commutative + 1];
3862 substed_operand[commutative + 1] = tem;
3863 tem = recog_data.operand[commutative];
3864 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3865 recog_data.operand[commutative + 1] = tem;
3866 tem = *recog_data.operand_loc[commutative];
3867 *recog_data.operand_loc[commutative]
3868 = *recog_data.operand_loc[commutative + 1];
3869 *recog_data.operand_loc[commutative + 1] = tem;
3870
3871 for (i = 0; i < n_reloads; i++)
3872 {
3873 if (rld[i].opnum == commutative)
3874 rld[i].opnum = commutative + 1;
3875 else if (rld[i].opnum == commutative + 1)
3876 rld[i].opnum = commutative;
3877 }
3878 }
3879
3880 for (i = 0; i < noperands; i++)
3881 {
3882 operand_reloadnum[i] = -1;
3883
3884 /* If this is an earlyclobber operand, we need to widen the scope.
3885 The reload must remain valid from the start of the insn being
3886 reloaded until after the operand is stored into its destination.
3887 We approximate this with RELOAD_OTHER even though we know that we
3888 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3889
3890 One special case that is worth checking is when we have an
3891 output that is earlyclobber but isn't used past the insn (typically
3892 a SCRATCH). In this case, we only need have the reload live
3893 through the insn itself, but not for any of our input or output
3894 reloads.
3895 But we must not accidentally narrow the scope of an existing
3896 RELOAD_OTHER reload - leave these alone.
3897
3898 In any case, anything needed to address this operand can remain
3899 however they were previously categorized. */
3900
3901 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3902 operand_type[i]
3903 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3904 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3905 }
3906
3907 /* Any constants that aren't allowed and can't be reloaded
3908 into registers are here changed into memory references. */
3909 for (i = 0; i < noperands; i++)
3910 if (! goal_alternative_win[i])
3911 {
3912 rtx op = recog_data.operand[i];
3913 rtx subreg = NULL_RTX;
3914 rtx plus = NULL_RTX;
3915 enum machine_mode mode = operand_mode[i];
3916
3917 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3918 push_reload so we have to let them pass here. */
3919 if (GET_CODE (op) == SUBREG)
3920 {
3921 subreg = op;
3922 op = SUBREG_REG (op);
3923 mode = GET_MODE (op);
3924 }
3925
3926 if (GET_CODE (op) == PLUS)
3927 {
3928 plus = op;
3929 op = XEXP (op, 1);
3930 }
3931
3932 if (CONST_POOL_OK_P (op)
3933 && ((PREFERRED_RELOAD_CLASS (op,
3934 (enum reg_class) goal_alternative[i])
3935 == NO_REGS)
3936 || no_input_reloads)
3937 && mode != VOIDmode)
3938 {
3939 int this_address_reloaded;
3940 rtx tem = force_const_mem (mode, op);
3941
3942 /* If we stripped a SUBREG or a PLUS above add it back. */
3943 if (plus != NULL_RTX)
3944 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3945
3946 if (subreg != NULL_RTX)
3947 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3948
3949 this_address_reloaded = 0;
3950 substed_operand[i] = recog_data.operand[i]
3951 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3952 0, insn, &this_address_reloaded);
3953
3954 /* If the alternative accepts constant pool refs directly
3955 there will be no reload needed at all. */
3956 if (plus == NULL_RTX
3957 && subreg == NULL_RTX
3958 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3959 ? substed_operand[i]
3960 : NULL,
3961 recog_data.constraints[i],
3962 goal_alternative_number))
3963 goal_alternative_win[i] = 1;
3964 }
3965 }
3966
3967 /* Record the values of the earlyclobber operands for the caller. */
3968 if (goal_earlyclobber)
3969 for (i = 0; i < noperands; i++)
3970 if (goal_alternative_earlyclobber[i])
3971 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3972
3973 /* Now record reloads for all the operands that need them. */
3974 for (i = 0; i < noperands; i++)
3975 if (! goal_alternative_win[i])
3976 {
3977 /* Operands that match previous ones have already been handled. */
3978 if (goal_alternative_matches[i] >= 0)
3979 ;
3980 /* Handle an operand with a nonoffsettable address
3981 appearing where an offsettable address will do
3982 by reloading the address into a base register.
3983
3984 ??? We can also do this when the operand is a register and
3985 reg_equiv_mem is not offsettable, but this is a bit tricky,
3986 so we don't bother with it. It may not be worth doing. */
3987 else if (goal_alternative_matched[i] == -1
3988 && goal_alternative_offmemok[i]
3989 && MEM_P (recog_data.operand[i]))
3990 {
3991 /* If the address to be reloaded is a VOIDmode constant,
3992 use the default address mode as mode of the reload register,
3993 as would have been done by find_reloads_address. */
3994 enum machine_mode address_mode;
3995 address_mode = GET_MODE (XEXP (recog_data.operand[i], 0));
3996 if (address_mode == VOIDmode)
3997 {
3998 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3999 address_mode = targetm.addr_space.address_mode (as);
4000 }
4001
4002 operand_reloadnum[i]
4003 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4004 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4005 base_reg_class (VOIDmode, MEM, SCRATCH),
4006 address_mode,
4007 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4008 rld[operand_reloadnum[i]].inc
4009 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4010
4011 /* If this operand is an output, we will have made any
4012 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4013 now we are treating part of the operand as an input, so
4014 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4015
4016 if (modified[i] == RELOAD_WRITE)
4017 {
4018 for (j = 0; j < n_reloads; j++)
4019 {
4020 if (rld[j].opnum == i)
4021 {
4022 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4023 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4024 else if (rld[j].when_needed
4025 == RELOAD_FOR_OUTADDR_ADDRESS)
4026 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4027 }
4028 }
4029 }
4030 }
4031 else if (goal_alternative_matched[i] == -1)
4032 {
4033 operand_reloadnum[i]
4034 = push_reload ((modified[i] != RELOAD_WRITE
4035 ? recog_data.operand[i] : 0),
4036 (modified[i] != RELOAD_READ
4037 ? recog_data.operand[i] : 0),
4038 (modified[i] != RELOAD_WRITE
4039 ? recog_data.operand_loc[i] : 0),
4040 (modified[i] != RELOAD_READ
4041 ? recog_data.operand_loc[i] : 0),
4042 (enum reg_class) goal_alternative[i],
4043 (modified[i] == RELOAD_WRITE
4044 ? VOIDmode : operand_mode[i]),
4045 (modified[i] == RELOAD_READ
4046 ? VOIDmode : operand_mode[i]),
4047 (insn_code_number < 0 ? 0
4048 : insn_data[insn_code_number].operand[i].strict_low),
4049 0, i, operand_type[i]);
4050 }
4051 /* In a matching pair of operands, one must be input only
4052 and the other must be output only.
4053 Pass the input operand as IN and the other as OUT. */
4054 else if (modified[i] == RELOAD_READ
4055 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4056 {
4057 operand_reloadnum[i]
4058 = push_reload (recog_data.operand[i],
4059 recog_data.operand[goal_alternative_matched[i]],
4060 recog_data.operand_loc[i],
4061 recog_data.operand_loc[goal_alternative_matched[i]],
4062 (enum reg_class) goal_alternative[i],
4063 operand_mode[i],
4064 operand_mode[goal_alternative_matched[i]],
4065 0, 0, i, RELOAD_OTHER);
4066 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4067 }
4068 else if (modified[i] == RELOAD_WRITE
4069 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4070 {
4071 operand_reloadnum[goal_alternative_matched[i]]
4072 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4073 recog_data.operand[i],
4074 recog_data.operand_loc[goal_alternative_matched[i]],
4075 recog_data.operand_loc[i],
4076 (enum reg_class) goal_alternative[i],
4077 operand_mode[goal_alternative_matched[i]],
4078 operand_mode[i],
4079 0, 0, i, RELOAD_OTHER);
4080 operand_reloadnum[i] = output_reloadnum;
4081 }
4082 else
4083 {
4084 gcc_assert (insn_code_number < 0);
4085 error_for_asm (insn, "inconsistent operand constraints "
4086 "in an %<asm%>");
4087 /* Avoid further trouble with this insn. */
4088 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4089 n_reloads = 0;
4090 return 0;
4091 }
4092 }
4093 else if (goal_alternative_matched[i] < 0
4094 && goal_alternative_matches[i] < 0
4095 && address_operand_reloaded[i] != 1
4096 && optimize)
4097 {
4098 /* For each non-matching operand that's a MEM or a pseudo-register
4099 that didn't get a hard register, make an optional reload.
4100 This may get done even if the insn needs no reloads otherwise. */
4101
4102 rtx operand = recog_data.operand[i];
4103
4104 while (GET_CODE (operand) == SUBREG)
4105 operand = SUBREG_REG (operand);
4106 if ((MEM_P (operand)
4107 || (REG_P (operand)
4108 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4109 /* If this is only for an output, the optional reload would not
4110 actually cause us to use a register now, just note that
4111 something is stored here. */
4112 && ((enum reg_class) goal_alternative[i] != NO_REGS
4113 || modified[i] == RELOAD_WRITE)
4114 && ! no_input_reloads
4115 /* An optional output reload might allow to delete INSN later.
4116 We mustn't make in-out reloads on insns that are not permitted
4117 output reloads.
4118 If this is an asm, we can't delete it; we must not even call
4119 push_reload for an optional output reload in this case,
4120 because we can't be sure that the constraint allows a register,
4121 and push_reload verifies the constraints for asms. */
4122 && (modified[i] == RELOAD_READ
4123 || (! no_output_reloads && ! this_insn_is_asm)))
4124 operand_reloadnum[i]
4125 = push_reload ((modified[i] != RELOAD_WRITE
4126 ? recog_data.operand[i] : 0),
4127 (modified[i] != RELOAD_READ
4128 ? recog_data.operand[i] : 0),
4129 (modified[i] != RELOAD_WRITE
4130 ? recog_data.operand_loc[i] : 0),
4131 (modified[i] != RELOAD_READ
4132 ? recog_data.operand_loc[i] : 0),
4133 (enum reg_class) goal_alternative[i],
4134 (modified[i] == RELOAD_WRITE
4135 ? VOIDmode : operand_mode[i]),
4136 (modified[i] == RELOAD_READ
4137 ? VOIDmode : operand_mode[i]),
4138 (insn_code_number < 0 ? 0
4139 : insn_data[insn_code_number].operand[i].strict_low),
4140 1, i, operand_type[i]);
4141 /* If a memory reference remains (either as a MEM or a pseudo that
4142 did not get a hard register), yet we can't make an optional
4143 reload, check if this is actually a pseudo register reference;
4144 we then need to emit a USE and/or a CLOBBER so that reload
4145 inheritance will do the right thing. */
4146 else if (replace
4147 && (MEM_P (operand)
4148 || (REG_P (operand)
4149 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4150 && reg_renumber [REGNO (operand)] < 0)))
4151 {
4152 operand = *recog_data.operand_loc[i];
4153
4154 while (GET_CODE (operand) == SUBREG)
4155 operand = SUBREG_REG (operand);
4156 if (REG_P (operand))
4157 {
4158 if (modified[i] != RELOAD_WRITE)
4159 /* We mark the USE with QImode so that we recognize
4160 it as one that can be safely deleted at the end
4161 of reload. */
4162 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4163 insn), QImode);
4164 if (modified[i] != RELOAD_READ)
4165 emit_insn_after (gen_clobber (operand), insn);
4166 }
4167 }
4168 }
4169 else if (goal_alternative_matches[i] >= 0
4170 && goal_alternative_win[goal_alternative_matches[i]]
4171 && modified[i] == RELOAD_READ
4172 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4173 && ! no_input_reloads && ! no_output_reloads
4174 && optimize)
4175 {
4176 /* Similarly, make an optional reload for a pair of matching
4177 objects that are in MEM or a pseudo that didn't get a hard reg. */
4178
4179 rtx operand = recog_data.operand[i];
4180
4181 while (GET_CODE (operand) == SUBREG)
4182 operand = SUBREG_REG (operand);
4183 if ((MEM_P (operand)
4184 || (REG_P (operand)
4185 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4186 && ((enum reg_class) goal_alternative[goal_alternative_matches[i]]
4187 != NO_REGS))
4188 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4189 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4190 recog_data.operand[i],
4191 recog_data.operand_loc[goal_alternative_matches[i]],
4192 recog_data.operand_loc[i],
4193 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4194 operand_mode[goal_alternative_matches[i]],
4195 operand_mode[i],
4196 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4197 }
4198
4199 /* Perform whatever substitutions on the operands we are supposed
4200 to make due to commutativity or replacement of registers
4201 with equivalent constants or memory slots. */
4202
4203 for (i = 0; i < noperands; i++)
4204 {
4205 /* We only do this on the last pass through reload, because it is
4206 possible for some data (like reg_equiv_address) to be changed during
4207 later passes. Moreover, we lose the opportunity to get a useful
4208 reload_{in,out}_reg when we do these replacements. */
4209
4210 if (replace)
4211 {
4212 rtx substitution = substed_operand[i];
4213
4214 *recog_data.operand_loc[i] = substitution;
4215
4216 /* If we're replacing an operand with a LABEL_REF, we need to
4217 make sure that there's a REG_LABEL_OPERAND note attached to
4218 this instruction. */
4219 if (GET_CODE (substitution) == LABEL_REF
4220 && !find_reg_note (insn, REG_LABEL_OPERAND,
4221 XEXP (substitution, 0))
4222 /* For a JUMP_P, if it was a branch target it must have
4223 already been recorded as such. */
4224 && (!JUMP_P (insn)
4225 || !label_is_jump_target_p (XEXP (substitution, 0),
4226 insn)))
4227 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4228 }
4229 else
4230 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4231 }
4232
4233 /* If this insn pattern contains any MATCH_DUP's, make sure that
4234 they will be substituted if the operands they match are substituted.
4235 Also do now any substitutions we already did on the operands.
4236
4237 Don't do this if we aren't making replacements because we might be
4238 propagating things allocated by frame pointer elimination into places
4239 it doesn't expect. */
4240
4241 if (insn_code_number >= 0 && replace)
4242 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4243 {
4244 int opno = recog_data.dup_num[i];
4245 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4246 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4247 }
4248
4249 #if 0
4250 /* This loses because reloading of prior insns can invalidate the equivalence
4251 (or at least find_equiv_reg isn't smart enough to find it any more),
4252 causing this insn to need more reload regs than it needed before.
4253 It may be too late to make the reload regs available.
4254 Now this optimization is done safely in choose_reload_regs. */
4255
4256 /* For each reload of a reg into some other class of reg,
4257 search for an existing equivalent reg (same value now) in the right class.
4258 We can use it as long as we don't need to change its contents. */
4259 for (i = 0; i < n_reloads; i++)
4260 if (rld[i].reg_rtx == 0
4261 && rld[i].in != 0
4262 && REG_P (rld[i].in)
4263 && rld[i].out == 0)
4264 {
4265 rld[i].reg_rtx
4266 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4267 static_reload_reg_p, 0, rld[i].inmode);
4268 /* Prevent generation of insn to load the value
4269 because the one we found already has the value. */
4270 if (rld[i].reg_rtx)
4271 rld[i].in = rld[i].reg_rtx;
4272 }
4273 #endif
4274
4275 /* If we detected error and replaced asm instruction by USE, forget about the
4276 reloads. */
4277 if (GET_CODE (PATTERN (insn)) == USE
4278 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4279 n_reloads = 0;
4280
4281 /* Perhaps an output reload can be combined with another
4282 to reduce needs by one. */
4283 if (!goal_earlyclobber)
4284 combine_reloads ();
4285
4286 /* If we have a pair of reloads for parts of an address, they are reloading
4287 the same object, the operands themselves were not reloaded, and they
4288 are for two operands that are supposed to match, merge the reloads and
4289 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4290
4291 for (i = 0; i < n_reloads; i++)
4292 {
4293 int k;
4294
4295 for (j = i + 1; j < n_reloads; j++)
4296 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4297 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4298 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4299 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4300 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4301 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4302 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4303 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4304 && rtx_equal_p (rld[i].in, rld[j].in)
4305 && (operand_reloadnum[rld[i].opnum] < 0
4306 || rld[operand_reloadnum[rld[i].opnum]].optional)
4307 && (operand_reloadnum[rld[j].opnum] < 0
4308 || rld[operand_reloadnum[rld[j].opnum]].optional)
4309 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4310 || (goal_alternative_matches[rld[j].opnum]
4311 == rld[i].opnum)))
4312 {
4313 for (k = 0; k < n_replacements; k++)
4314 if (replacements[k].what == j)
4315 replacements[k].what = i;
4316
4317 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4318 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4319 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4320 else
4321 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4322 rld[j].in = 0;
4323 }
4324 }
4325
4326 /* Scan all the reloads and update their type.
4327 If a reload is for the address of an operand and we didn't reload
4328 that operand, change the type. Similarly, change the operand number
4329 of a reload when two operands match. If a reload is optional, treat it
4330 as though the operand isn't reloaded.
4331
4332 ??? This latter case is somewhat odd because if we do the optional
4333 reload, it means the object is hanging around. Thus we need only
4334 do the address reload if the optional reload was NOT done.
4335
4336 Change secondary reloads to be the address type of their operand, not
4337 the normal type.
4338
4339 If an operand's reload is now RELOAD_OTHER, change any
4340 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4341 RELOAD_FOR_OTHER_ADDRESS. */
4342
4343 for (i = 0; i < n_reloads; i++)
4344 {
4345 if (rld[i].secondary_p
4346 && rld[i].when_needed == operand_type[rld[i].opnum])
4347 rld[i].when_needed = address_type[rld[i].opnum];
4348
4349 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4350 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4351 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4352 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4353 && (operand_reloadnum[rld[i].opnum] < 0
4354 || rld[operand_reloadnum[rld[i].opnum]].optional))
4355 {
4356 /* If we have a secondary reload to go along with this reload,
4357 change its type to RELOAD_FOR_OPADDR_ADDR. */
4358
4359 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4360 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4361 && rld[i].secondary_in_reload != -1)
4362 {
4363 int secondary_in_reload = rld[i].secondary_in_reload;
4364
4365 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4366
4367 /* If there's a tertiary reload we have to change it also. */
4368 if (secondary_in_reload > 0
4369 && rld[secondary_in_reload].secondary_in_reload != -1)
4370 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4371 = RELOAD_FOR_OPADDR_ADDR;
4372 }
4373
4374 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4375 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4376 && rld[i].secondary_out_reload != -1)
4377 {
4378 int secondary_out_reload = rld[i].secondary_out_reload;
4379
4380 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4381
4382 /* If there's a tertiary reload we have to change it also. */
4383 if (secondary_out_reload
4384 && rld[secondary_out_reload].secondary_out_reload != -1)
4385 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4386 = RELOAD_FOR_OPADDR_ADDR;
4387 }
4388
4389 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4390 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4391 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4392 else
4393 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4394 }
4395
4396 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4397 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4398 && operand_reloadnum[rld[i].opnum] >= 0
4399 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4400 == RELOAD_OTHER))
4401 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4402
4403 if (goal_alternative_matches[rld[i].opnum] >= 0)
4404 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4405 }
4406
4407 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4408 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4409 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4410
4411 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4412 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4413 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4414 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4415 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4416 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4417 This is complicated by the fact that a single operand can have more
4418 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4419 choose_reload_regs without affecting code quality, and cases that
4420 actually fail are extremely rare, so it turns out to be better to fix
4421 the problem here by not generating cases that choose_reload_regs will
4422 fail for. */
4423 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4424 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4425 a single operand.
4426 We can reduce the register pressure by exploiting that a
4427 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4428 does not conflict with any of them, if it is only used for the first of
4429 the RELOAD_FOR_X_ADDRESS reloads. */
4430 {
4431 int first_op_addr_num = -2;
4432 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4433 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4434 int need_change = 0;
4435 /* We use last_op_addr_reload and the contents of the above arrays
4436 first as flags - -2 means no instance encountered, -1 means exactly
4437 one instance encountered.
4438 If more than one instance has been encountered, we store the reload
4439 number of the first reload of the kind in question; reload numbers
4440 are known to be non-negative. */
4441 for (i = 0; i < noperands; i++)
4442 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4443 for (i = n_reloads - 1; i >= 0; i--)
4444 {
4445 switch (rld[i].when_needed)
4446 {
4447 case RELOAD_FOR_OPERAND_ADDRESS:
4448 if (++first_op_addr_num >= 0)
4449 {
4450 first_op_addr_num = i;
4451 need_change = 1;
4452 }
4453 break;
4454 case RELOAD_FOR_INPUT_ADDRESS:
4455 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4456 {
4457 first_inpaddr_num[rld[i].opnum] = i;
4458 need_change = 1;
4459 }
4460 break;
4461 case RELOAD_FOR_OUTPUT_ADDRESS:
4462 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4463 {
4464 first_outpaddr_num[rld[i].opnum] = i;
4465 need_change = 1;
4466 }
4467 break;
4468 default:
4469 break;
4470 }
4471 }
4472
4473 if (need_change)
4474 {
4475 for (i = 0; i < n_reloads; i++)
4476 {
4477 int first_num;
4478 enum reload_type type;
4479
4480 switch (rld[i].when_needed)
4481 {
4482 case RELOAD_FOR_OPADDR_ADDR:
4483 first_num = first_op_addr_num;
4484 type = RELOAD_FOR_OPERAND_ADDRESS;
4485 break;
4486 case RELOAD_FOR_INPADDR_ADDRESS:
4487 first_num = first_inpaddr_num[rld[i].opnum];
4488 type = RELOAD_FOR_INPUT_ADDRESS;
4489 break;
4490 case RELOAD_FOR_OUTADDR_ADDRESS:
4491 first_num = first_outpaddr_num[rld[i].opnum];
4492 type = RELOAD_FOR_OUTPUT_ADDRESS;
4493 break;
4494 default:
4495 continue;
4496 }
4497 if (first_num < 0)
4498 continue;
4499 else if (i > first_num)
4500 rld[i].when_needed = type;
4501 else
4502 {
4503 /* Check if the only TYPE reload that uses reload I is
4504 reload FIRST_NUM. */
4505 for (j = n_reloads - 1; j > first_num; j--)
4506 {
4507 if (rld[j].when_needed == type
4508 && (rld[i].secondary_p
4509 ? rld[j].secondary_in_reload == i
4510 : reg_mentioned_p (rld[i].in, rld[j].in)))
4511 {
4512 rld[i].when_needed = type;
4513 break;
4514 }
4515 }
4516 }
4517 }
4518 }
4519 }
4520
4521 /* See if we have any reloads that are now allowed to be merged
4522 because we've changed when the reload is needed to
4523 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4524 check for the most common cases. */
4525
4526 for (i = 0; i < n_reloads; i++)
4527 if (rld[i].in != 0 && rld[i].out == 0
4528 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4529 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4530 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4531 for (j = 0; j < n_reloads; j++)
4532 if (i != j && rld[j].in != 0 && rld[j].out == 0
4533 && rld[j].when_needed == rld[i].when_needed
4534 && MATCHES (rld[i].in, rld[j].in)
4535 && rld[i].rclass == rld[j].rclass
4536 && !rld[i].nocombine && !rld[j].nocombine
4537 && rld[i].reg_rtx == rld[j].reg_rtx)
4538 {
4539 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4540 transfer_replacements (i, j);
4541 rld[j].in = 0;
4542 }
4543
4544 #ifdef HAVE_cc0
4545 /* If we made any reloads for addresses, see if they violate a
4546 "no input reloads" requirement for this insn. But loads that we
4547 do after the insn (such as for output addresses) are fine. */
4548 if (no_input_reloads)
4549 for (i = 0; i < n_reloads; i++)
4550 gcc_assert (rld[i].in == 0
4551 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4552 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4553 #endif
4554
4555 /* Compute reload_mode and reload_nregs. */
4556 for (i = 0; i < n_reloads; i++)
4557 {
4558 rld[i].mode
4559 = (rld[i].inmode == VOIDmode
4560 || (GET_MODE_SIZE (rld[i].outmode)
4561 > GET_MODE_SIZE (rld[i].inmode)))
4562 ? rld[i].outmode : rld[i].inmode;
4563
4564 rld[i].nregs = CLASS_MAX_NREGS (rld[i].rclass, rld[i].mode);
4565 }
4566
4567 /* Special case a simple move with an input reload and a
4568 destination of a hard reg, if the hard reg is ok, use it. */
4569 for (i = 0; i < n_reloads; i++)
4570 if (rld[i].when_needed == RELOAD_FOR_INPUT
4571 && GET_CODE (PATTERN (insn)) == SET
4572 && REG_P (SET_DEST (PATTERN (insn)))
4573 && (SET_SRC (PATTERN (insn)) == rld[i].in
4574 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4575 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4576 {
4577 rtx dest = SET_DEST (PATTERN (insn));
4578 unsigned int regno = REGNO (dest);
4579
4580 if (regno < FIRST_PSEUDO_REGISTER
4581 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4582 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4583 {
4584 int nr = hard_regno_nregs[regno][rld[i].mode];
4585 int ok = 1, nri;
4586
4587 for (nri = 1; nri < nr; nri ++)
4588 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4589 ok = 0;
4590
4591 if (ok)
4592 rld[i].reg_rtx = dest;
4593 }
4594 }
4595
4596 return retval;
4597 }
4598
4599 /* Return true if alternative number ALTNUM in constraint-string
4600 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4601 MEM gives the reference if it didn't need any reloads, otherwise it
4602 is null. */
4603
4604 static bool
4605 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4606 const char *constraint, int altnum)
4607 {
4608 int c;
4609
4610 /* Skip alternatives before the one requested. */
4611 while (altnum > 0)
4612 {
4613 while (*constraint++ != ',');
4614 altnum--;
4615 }
4616 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4617 If one of them is present, this alternative accepts the result of
4618 passing a constant-pool reference through find_reloads_toplev.
4619
4620 The same is true of extra memory constraints if the address
4621 was reloaded into a register. However, the target may elect
4622 to disallow the original constant address, forcing it to be
4623 reloaded into a register instead. */
4624 for (; (c = *constraint) && c != ',' && c != '#';
4625 constraint += CONSTRAINT_LEN (c, constraint))
4626 {
4627 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4628 return true;
4629 #ifdef EXTRA_CONSTRAINT_STR
4630 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4631 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4632 return true;
4633 #endif
4634 }
4635 return false;
4636 }
4637 \f
4638 /* Scan X for memory references and scan the addresses for reloading.
4639 Also checks for references to "constant" regs that we want to eliminate
4640 and replaces them with the values they stand for.
4641 We may alter X destructively if it contains a reference to such.
4642 If X is just a constant reg, we return the equivalent value
4643 instead of X.
4644
4645 IND_LEVELS says how many levels of indirect addressing this machine
4646 supports.
4647
4648 OPNUM and TYPE identify the purpose of the reload.
4649
4650 IS_SET_DEST is true if X is the destination of a SET, which is not
4651 appropriate to be replaced by a constant.
4652
4653 INSN, if nonzero, is the insn in which we do the reload. It is used
4654 to determine if we may generate output reloads, and where to put USEs
4655 for pseudos that we have to replace with stack slots.
4656
4657 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4658 result of find_reloads_address. */
4659
4660 static rtx
4661 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4662 int ind_levels, int is_set_dest, rtx insn,
4663 int *address_reloaded)
4664 {
4665 RTX_CODE code = GET_CODE (x);
4666
4667 const char *fmt = GET_RTX_FORMAT (code);
4668 int i;
4669 int copied;
4670
4671 if (code == REG)
4672 {
4673 /* This code is duplicated for speed in find_reloads. */
4674 int regno = REGNO (x);
4675 if (reg_equiv_constant[regno] != 0 && !is_set_dest)
4676 x = reg_equiv_constant[regno];
4677 #if 0
4678 /* This creates (subreg (mem...)) which would cause an unnecessary
4679 reload of the mem. */
4680 else if (reg_equiv_mem[regno] != 0)
4681 x = reg_equiv_mem[regno];
4682 #endif
4683 else if (reg_equiv_memory_loc[regno]
4684 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
4685 {
4686 rtx mem = make_memloc (x, regno);
4687 if (reg_equiv_address[regno]
4688 || ! rtx_equal_p (mem, reg_equiv_mem[regno]))
4689 {
4690 /* If this is not a toplevel operand, find_reloads doesn't see
4691 this substitution. We have to emit a USE of the pseudo so
4692 that delete_output_reload can see it. */
4693 if (replace_reloads && recog_data.operand[opnum] != x)
4694 /* We mark the USE with QImode so that we recognize it
4695 as one that can be safely deleted at the end of
4696 reload. */
4697 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4698 QImode);
4699 x = mem;
4700 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4701 opnum, type, ind_levels, insn);
4702 if (!rtx_equal_p (x, mem))
4703 push_reg_equiv_alt_mem (regno, x);
4704 if (address_reloaded)
4705 *address_reloaded = i;
4706 }
4707 }
4708 return x;
4709 }
4710 if (code == MEM)
4711 {
4712 rtx tem = x;
4713
4714 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4715 opnum, type, ind_levels, insn);
4716 if (address_reloaded)
4717 *address_reloaded = i;
4718
4719 return tem;
4720 }
4721
4722 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4723 {
4724 /* Check for SUBREG containing a REG that's equivalent to a
4725 constant. If the constant has a known value, truncate it
4726 right now. Similarly if we are extracting a single-word of a
4727 multi-word constant. If the constant is symbolic, allow it
4728 to be substituted normally. push_reload will strip the
4729 subreg later. The constant must not be VOIDmode, because we
4730 will lose the mode of the register (this should never happen
4731 because one of the cases above should handle it). */
4732
4733 int regno = REGNO (SUBREG_REG (x));
4734 rtx tem;
4735
4736 if (regno >= FIRST_PSEUDO_REGISTER
4737 && reg_renumber[regno] < 0
4738 && reg_equiv_constant[regno] != 0)
4739 {
4740 tem =
4741 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant[regno],
4742 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4743 gcc_assert (tem);
4744 if (CONSTANT_P (tem) && !LEGITIMATE_CONSTANT_P (tem))
4745 {
4746 tem = force_const_mem (GET_MODE (x), tem);
4747 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4748 &XEXP (tem, 0), opnum, type,
4749 ind_levels, insn);
4750 if (address_reloaded)
4751 *address_reloaded = i;
4752 }
4753 return tem;
4754 }
4755
4756 /* If the subreg contains a reg that will be converted to a mem,
4757 convert the subreg to a narrower memref now.
4758 Otherwise, we would get (subreg (mem ...) ...),
4759 which would force reload of the mem.
4760
4761 We also need to do this if there is an equivalent MEM that is
4762 not offsettable. In that case, alter_subreg would produce an
4763 invalid address on big-endian machines.
4764
4765 For machines that extend byte loads, we must not reload using
4766 a wider mode if we have a paradoxical SUBREG. find_reloads will
4767 force a reload in that case. So we should not do anything here. */
4768
4769 if (regno >= FIRST_PSEUDO_REGISTER
4770 #ifdef LOAD_EXTEND_OP
4771 && (GET_MODE_SIZE (GET_MODE (x))
4772 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4773 #endif
4774 && (reg_equiv_address[regno] != 0
4775 || (reg_equiv_mem[regno] != 0
4776 && (! strict_memory_address_addr_space_p
4777 (GET_MODE (x), XEXP (reg_equiv_mem[regno], 0),
4778 MEM_ADDR_SPACE (reg_equiv_mem[regno]))
4779 || ! offsettable_memref_p (reg_equiv_mem[regno])
4780 || num_not_at_initial_offset))))
4781 x = find_reloads_subreg_address (x, 1, opnum, type, ind_levels,
4782 insn);
4783 }
4784
4785 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4786 {
4787 if (fmt[i] == 'e')
4788 {
4789 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4790 ind_levels, is_set_dest, insn,
4791 address_reloaded);
4792 /* If we have replaced a reg with it's equivalent memory loc -
4793 that can still be handled here e.g. if it's in a paradoxical
4794 subreg - we must make the change in a copy, rather than using
4795 a destructive change. This way, find_reloads can still elect
4796 not to do the change. */
4797 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4798 {
4799 x = shallow_copy_rtx (x);
4800 copied = 1;
4801 }
4802 XEXP (x, i) = new_part;
4803 }
4804 }
4805 return x;
4806 }
4807
4808 /* Return a mem ref for the memory equivalent of reg REGNO.
4809 This mem ref is not shared with anything. */
4810
4811 static rtx
4812 make_memloc (rtx ad, int regno)
4813 {
4814 /* We must rerun eliminate_regs, in case the elimination
4815 offsets have changed. */
4816 rtx tem
4817 = XEXP (eliminate_regs (reg_equiv_memory_loc[regno], VOIDmode, NULL_RTX),
4818 0);
4819
4820 /* If TEM might contain a pseudo, we must copy it to avoid
4821 modifying it when we do the substitution for the reload. */
4822 if (rtx_varies_p (tem, 0))
4823 tem = copy_rtx (tem);
4824
4825 tem = replace_equiv_address_nv (reg_equiv_memory_loc[regno], tem);
4826 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4827
4828 /* Copy the result if it's still the same as the equivalence, to avoid
4829 modifying it when we do the substitution for the reload. */
4830 if (tem == reg_equiv_memory_loc[regno])
4831 tem = copy_rtx (tem);
4832 return tem;
4833 }
4834
4835 /* Returns true if AD could be turned into a valid memory reference
4836 to mode MODE in address space AS by reloading the part pointed to
4837 by PART into a register. */
4838
4839 static int
4840 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4841 addr_space_t as, rtx *part)
4842 {
4843 int retv;
4844 rtx tem = *part;
4845 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4846
4847 *part = reg;
4848 retv = memory_address_addr_space_p (mode, ad, as);
4849 *part = tem;
4850
4851 return retv;
4852 }
4853
4854 /* Record all reloads needed for handling memory address AD
4855 which appears in *LOC in a memory reference to mode MODE
4856 which itself is found in location *MEMREFLOC.
4857 Note that we take shortcuts assuming that no multi-reg machine mode
4858 occurs as part of an address.
4859
4860 OPNUM and TYPE specify the purpose of this reload.
4861
4862 IND_LEVELS says how many levels of indirect addressing this machine
4863 supports.
4864
4865 INSN, if nonzero, is the insn in which we do the reload. It is used
4866 to determine if we may generate output reloads, and where to put USEs
4867 for pseudos that we have to replace with stack slots.
4868
4869 Value is one if this address is reloaded or replaced as a whole; it is
4870 zero if the top level of this address was not reloaded or replaced, and
4871 it is -1 if it may or may not have been reloaded or replaced.
4872
4873 Note that there is no verification that the address will be valid after
4874 this routine does its work. Instead, we rely on the fact that the address
4875 was valid when reload started. So we need only undo things that reload
4876 could have broken. These are wrong register types, pseudos not allocated
4877 to a hard register, and frame pointer elimination. */
4878
4879 static int
4880 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4881 rtx *loc, int opnum, enum reload_type type,
4882 int ind_levels, rtx insn)
4883 {
4884 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4885 : ADDR_SPACE_GENERIC;
4886 int regno;
4887 int removed_and = 0;
4888 int op_index;
4889 rtx tem;
4890
4891 /* If the address is a register, see if it is a legitimate address and
4892 reload if not. We first handle the cases where we need not reload
4893 or where we must reload in a non-standard way. */
4894
4895 if (REG_P (ad))
4896 {
4897 regno = REGNO (ad);
4898
4899 if (reg_equiv_constant[regno] != 0)
4900 {
4901 find_reloads_address_part (reg_equiv_constant[regno], loc,
4902 base_reg_class (mode, MEM, SCRATCH),
4903 GET_MODE (ad), opnum, type, ind_levels);
4904 return 1;
4905 }
4906
4907 tem = reg_equiv_memory_loc[regno];
4908 if (tem != 0)
4909 {
4910 if (reg_equiv_address[regno] != 0 || num_not_at_initial_offset)
4911 {
4912 tem = make_memloc (ad, regno);
4913 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4914 XEXP (tem, 0),
4915 MEM_ADDR_SPACE (tem)))
4916 {
4917 rtx orig = tem;
4918
4919 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4920 &XEXP (tem, 0), opnum,
4921 ADDR_TYPE (type), ind_levels, insn);
4922 if (!rtx_equal_p (tem, orig))
4923 push_reg_equiv_alt_mem (regno, tem);
4924 }
4925 /* We can avoid a reload if the register's equivalent memory
4926 expression is valid as an indirect memory address.
4927 But not all addresses are valid in a mem used as an indirect
4928 address: only reg or reg+constant. */
4929
4930 if (ind_levels > 0
4931 && strict_memory_address_addr_space_p (mode, tem, as)
4932 && (REG_P (XEXP (tem, 0))
4933 || (GET_CODE (XEXP (tem, 0)) == PLUS
4934 && REG_P (XEXP (XEXP (tem, 0), 0))
4935 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4936 {
4937 /* TEM is not the same as what we'll be replacing the
4938 pseudo with after reload, put a USE in front of INSN
4939 in the final reload pass. */
4940 if (replace_reloads
4941 && num_not_at_initial_offset
4942 && ! rtx_equal_p (tem, reg_equiv_mem[regno]))
4943 {
4944 *loc = tem;
4945 /* We mark the USE with QImode so that we
4946 recognize it as one that can be safely
4947 deleted at the end of reload. */
4948 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4949 insn), QImode);
4950
4951 /* This doesn't really count as replacing the address
4952 as a whole, since it is still a memory access. */
4953 }
4954 return 0;
4955 }
4956 ad = tem;
4957 }
4958 }
4959
4960 /* The only remaining case where we can avoid a reload is if this is a
4961 hard register that is valid as a base register and which is not the
4962 subject of a CLOBBER in this insn. */
4963
4964 else if (regno < FIRST_PSEUDO_REGISTER
4965 && regno_ok_for_base_p (regno, mode, MEM, SCRATCH)
4966 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4967 return 0;
4968
4969 /* If we do not have one of the cases above, we must do the reload. */
4970 push_reload (ad, NULL_RTX, loc, (rtx*) 0, base_reg_class (mode, MEM, SCRATCH),
4971 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4972 return 1;
4973 }
4974
4975 if (strict_memory_address_addr_space_p (mode, ad, as))
4976 {
4977 /* The address appears valid, so reloads are not needed.
4978 But the address may contain an eliminable register.
4979 This can happen because a machine with indirect addressing
4980 may consider a pseudo register by itself a valid address even when
4981 it has failed to get a hard reg.
4982 So do a tree-walk to find and eliminate all such regs. */
4983
4984 /* But first quickly dispose of a common case. */
4985 if (GET_CODE (ad) == PLUS
4986 && CONST_INT_P (XEXP (ad, 1))
4987 && REG_P (XEXP (ad, 0))
4988 && reg_equiv_constant[REGNO (XEXP (ad, 0))] == 0)
4989 return 0;
4990
4991 subst_reg_equivs_changed = 0;
4992 *loc = subst_reg_equivs (ad, insn);
4993
4994 if (! subst_reg_equivs_changed)
4995 return 0;
4996
4997 /* Check result for validity after substitution. */
4998 if (strict_memory_address_addr_space_p (mode, ad, as))
4999 return 0;
5000 }
5001
5002 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5003 do
5004 {
5005 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5006 {
5007 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5008 ind_levels, win);
5009 }
5010 break;
5011 win:
5012 *memrefloc = copy_rtx (*memrefloc);
5013 XEXP (*memrefloc, 0) = ad;
5014 move_replacements (&ad, &XEXP (*memrefloc, 0));
5015 return -1;
5016 }
5017 while (0);
5018 #endif
5019
5020 /* The address is not valid. We have to figure out why. First see if
5021 we have an outer AND and remove it if so. Then analyze what's inside. */
5022
5023 if (GET_CODE (ad) == AND)
5024 {
5025 removed_and = 1;
5026 loc = &XEXP (ad, 0);
5027 ad = *loc;
5028 }
5029
5030 /* One possibility for why the address is invalid is that it is itself
5031 a MEM. This can happen when the frame pointer is being eliminated, a
5032 pseudo is not allocated to a hard register, and the offset between the
5033 frame and stack pointers is not its initial value. In that case the
5034 pseudo will have been replaced by a MEM referring to the
5035 stack pointer. */
5036 if (MEM_P (ad))
5037 {
5038 /* First ensure that the address in this MEM is valid. Then, unless
5039 indirect addresses are valid, reload the MEM into a register. */
5040 tem = ad;
5041 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5042 opnum, ADDR_TYPE (type),
5043 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5044
5045 /* If tem was changed, then we must create a new memory reference to
5046 hold it and store it back into memrefloc. */
5047 if (tem != ad && memrefloc)
5048 {
5049 *memrefloc = copy_rtx (*memrefloc);
5050 copy_replacements (tem, XEXP (*memrefloc, 0));
5051 loc = &XEXP (*memrefloc, 0);
5052 if (removed_and)
5053 loc = &XEXP (*loc, 0);
5054 }
5055
5056 /* Check similar cases as for indirect addresses as above except
5057 that we can allow pseudos and a MEM since they should have been
5058 taken care of above. */
5059
5060 if (ind_levels == 0
5061 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5062 || MEM_P (XEXP (tem, 0))
5063 || ! (REG_P (XEXP (tem, 0))
5064 || (GET_CODE (XEXP (tem, 0)) == PLUS
5065 && REG_P (XEXP (XEXP (tem, 0), 0))
5066 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5067 {
5068 /* Must use TEM here, not AD, since it is the one that will
5069 have any subexpressions reloaded, if needed. */
5070 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5071 base_reg_class (mode, MEM, SCRATCH), GET_MODE (tem),
5072 VOIDmode, 0,
5073 0, opnum, type);
5074 return ! removed_and;
5075 }
5076 else
5077 return 0;
5078 }
5079
5080 /* If we have address of a stack slot but it's not valid because the
5081 displacement is too large, compute the sum in a register.
5082 Handle all base registers here, not just fp/ap/sp, because on some
5083 targets (namely SH) we can also get too large displacements from
5084 big-endian corrections. */
5085 else if (GET_CODE (ad) == PLUS
5086 && REG_P (XEXP (ad, 0))
5087 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5088 && CONST_INT_P (XEXP (ad, 1))
5089 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, PLUS,
5090 CONST_INT))
5091
5092 {
5093 /* Unshare the MEM rtx so we can safely alter it. */
5094 if (memrefloc)
5095 {
5096 *memrefloc = copy_rtx (*memrefloc);
5097 loc = &XEXP (*memrefloc, 0);
5098 if (removed_and)
5099 loc = &XEXP (*loc, 0);
5100 }
5101
5102 if (double_reg_address_ok)
5103 {
5104 /* Unshare the sum as well. */
5105 *loc = ad = copy_rtx (ad);
5106
5107 /* Reload the displacement into an index reg.
5108 We assume the frame pointer or arg pointer is a base reg. */
5109 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5110 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5111 type, ind_levels);
5112 return 0;
5113 }
5114 else
5115 {
5116 /* If the sum of two regs is not necessarily valid,
5117 reload the sum into a base reg.
5118 That will at least work. */
5119 find_reloads_address_part (ad, loc,
5120 base_reg_class (mode, MEM, SCRATCH),
5121 GET_MODE (ad), opnum, type, ind_levels);
5122 }
5123 return ! removed_and;
5124 }
5125
5126 /* If we have an indexed stack slot, there are three possible reasons why
5127 it might be invalid: The index might need to be reloaded, the address
5128 might have been made by frame pointer elimination and hence have a
5129 constant out of range, or both reasons might apply.
5130
5131 We can easily check for an index needing reload, but even if that is the
5132 case, we might also have an invalid constant. To avoid making the
5133 conservative assumption and requiring two reloads, we see if this address
5134 is valid when not interpreted strictly. If it is, the only problem is
5135 that the index needs a reload and find_reloads_address_1 will take care
5136 of it.
5137
5138 Handle all base registers here, not just fp/ap/sp, because on some
5139 targets (namely SPARC) we can also get invalid addresses from preventive
5140 subreg big-endian corrections made by find_reloads_toplev. We
5141 can also get expressions involving LO_SUM (rather than PLUS) from
5142 find_reloads_subreg_address.
5143
5144 If we decide to do something, it must be that `double_reg_address_ok'
5145 is true. We generate a reload of the base register + constant and
5146 rework the sum so that the reload register will be added to the index.
5147 This is safe because we know the address isn't shared.
5148
5149 We check for the base register as both the first and second operand of
5150 the innermost PLUS and/or LO_SUM. */
5151
5152 for (op_index = 0; op_index < 2; ++op_index)
5153 {
5154 rtx operand, addend;
5155 enum rtx_code inner_code;
5156
5157 if (GET_CODE (ad) != PLUS)
5158 continue;
5159
5160 inner_code = GET_CODE (XEXP (ad, 0));
5161 if (!(GET_CODE (ad) == PLUS
5162 && CONST_INT_P (XEXP (ad, 1))
5163 && (inner_code == PLUS || inner_code == LO_SUM)))
5164 continue;
5165
5166 operand = XEXP (XEXP (ad, 0), op_index);
5167 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5168 continue;
5169
5170 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5171
5172 if ((regno_ok_for_base_p (REGNO (operand), mode, inner_code,
5173 GET_CODE (addend))
5174 || operand == frame_pointer_rtx
5175 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5176 || operand == hard_frame_pointer_rtx
5177 #endif
5178 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5179 || operand == arg_pointer_rtx
5180 #endif
5181 || operand == stack_pointer_rtx)
5182 && ! maybe_memory_address_addr_space_p
5183 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5184 {
5185 rtx offset_reg;
5186 enum reg_class cls;
5187
5188 offset_reg = plus_constant (operand, INTVAL (XEXP (ad, 1)));
5189
5190 /* Form the adjusted address. */
5191 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5192 ad = gen_rtx_PLUS (GET_MODE (ad),
5193 op_index == 0 ? offset_reg : addend,
5194 op_index == 0 ? addend : offset_reg);
5195 else
5196 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5197 op_index == 0 ? offset_reg : addend,
5198 op_index == 0 ? addend : offset_reg);
5199 *loc = ad;
5200
5201 cls = base_reg_class (mode, MEM, GET_CODE (addend));
5202 find_reloads_address_part (XEXP (ad, op_index),
5203 &XEXP (ad, op_index), cls,
5204 GET_MODE (ad), opnum, type, ind_levels);
5205 find_reloads_address_1 (mode,
5206 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5207 GET_CODE (XEXP (ad, op_index)),
5208 &XEXP (ad, 1 - op_index), opnum,
5209 type, 0, insn);
5210
5211 return 0;
5212 }
5213 }
5214
5215 /* See if address becomes valid when an eliminable register
5216 in a sum is replaced. */
5217
5218 tem = ad;
5219 if (GET_CODE (ad) == PLUS)
5220 tem = subst_indexed_address (ad);
5221 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5222 {
5223 /* Ok, we win that way. Replace any additional eliminable
5224 registers. */
5225
5226 subst_reg_equivs_changed = 0;
5227 tem = subst_reg_equivs (tem, insn);
5228
5229 /* Make sure that didn't make the address invalid again. */
5230
5231 if (! subst_reg_equivs_changed
5232 || strict_memory_address_addr_space_p (mode, tem, as))
5233 {
5234 *loc = tem;
5235 return 0;
5236 }
5237 }
5238
5239 /* If constants aren't valid addresses, reload the constant address
5240 into a register. */
5241 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5242 {
5243 enum machine_mode address_mode = GET_MODE (ad);
5244 if (address_mode == VOIDmode)
5245 address_mode = targetm.addr_space.address_mode (as);
5246
5247 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5248 Unshare it so we can safely alter it. */
5249 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5250 && CONSTANT_POOL_ADDRESS_P (ad))
5251 {
5252 *memrefloc = copy_rtx (*memrefloc);
5253 loc = &XEXP (*memrefloc, 0);
5254 if (removed_and)
5255 loc = &XEXP (*loc, 0);
5256 }
5257
5258 find_reloads_address_part (ad, loc, base_reg_class (mode, MEM, SCRATCH),
5259 address_mode, opnum, type, ind_levels);
5260 return ! removed_and;
5261 }
5262
5263 return find_reloads_address_1 (mode, ad, 0, MEM, SCRATCH, loc, opnum, type,
5264 ind_levels, insn);
5265 }
5266 \f
5267 /* Find all pseudo regs appearing in AD
5268 that are eliminable in favor of equivalent values
5269 and do not have hard regs; replace them by their equivalents.
5270 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5271 front of it for pseudos that we have to replace with stack slots. */
5272
5273 static rtx
5274 subst_reg_equivs (rtx ad, rtx insn)
5275 {
5276 RTX_CODE code = GET_CODE (ad);
5277 int i;
5278 const char *fmt;
5279
5280 switch (code)
5281 {
5282 case HIGH:
5283 case CONST_INT:
5284 case CONST:
5285 case CONST_DOUBLE:
5286 case CONST_FIXED:
5287 case CONST_VECTOR:
5288 case SYMBOL_REF:
5289 case LABEL_REF:
5290 case PC:
5291 case CC0:
5292 return ad;
5293
5294 case REG:
5295 {
5296 int regno = REGNO (ad);
5297
5298 if (reg_equiv_constant[regno] != 0)
5299 {
5300 subst_reg_equivs_changed = 1;
5301 return reg_equiv_constant[regno];
5302 }
5303 if (reg_equiv_memory_loc[regno] && num_not_at_initial_offset)
5304 {
5305 rtx mem = make_memloc (ad, regno);
5306 if (! rtx_equal_p (mem, reg_equiv_mem[regno]))
5307 {
5308 subst_reg_equivs_changed = 1;
5309 /* We mark the USE with QImode so that we recognize it
5310 as one that can be safely deleted at the end of
5311 reload. */
5312 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5313 QImode);
5314 return mem;
5315 }
5316 }
5317 }
5318 return ad;
5319
5320 case PLUS:
5321 /* Quickly dispose of a common case. */
5322 if (XEXP (ad, 0) == frame_pointer_rtx
5323 && CONST_INT_P (XEXP (ad, 1)))
5324 return ad;
5325 break;
5326
5327 default:
5328 break;
5329 }
5330
5331 fmt = GET_RTX_FORMAT (code);
5332 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5333 if (fmt[i] == 'e')
5334 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5335 return ad;
5336 }
5337 \f
5338 /* Compute the sum of X and Y, making canonicalizations assumed in an
5339 address, namely: sum constant integers, surround the sum of two
5340 constants with a CONST, put the constant as the second operand, and
5341 group the constant on the outermost sum.
5342
5343 This routine assumes both inputs are already in canonical form. */
5344
5345 rtx
5346 form_sum (enum machine_mode mode, rtx x, rtx y)
5347 {
5348 rtx tem;
5349
5350 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5351 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5352
5353 if (CONST_INT_P (x))
5354 return plus_constant (y, INTVAL (x));
5355 else if (CONST_INT_P (y))
5356 return plus_constant (x, INTVAL (y));
5357 else if (CONSTANT_P (x))
5358 tem = x, x = y, y = tem;
5359
5360 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5361 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5362
5363 /* Note that if the operands of Y are specified in the opposite
5364 order in the recursive calls below, infinite recursion will occur. */
5365 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5366 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5367
5368 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5369 constant will have been placed second. */
5370 if (CONSTANT_P (x) && CONSTANT_P (y))
5371 {
5372 if (GET_CODE (x) == CONST)
5373 x = XEXP (x, 0);
5374 if (GET_CODE (y) == CONST)
5375 y = XEXP (y, 0);
5376
5377 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5378 }
5379
5380 return gen_rtx_PLUS (mode, x, y);
5381 }
5382 \f
5383 /* If ADDR is a sum containing a pseudo register that should be
5384 replaced with a constant (from reg_equiv_constant),
5385 return the result of doing so, and also apply the associative
5386 law so that the result is more likely to be a valid address.
5387 (But it is not guaranteed to be one.)
5388
5389 Note that at most one register is replaced, even if more are
5390 replaceable. Also, we try to put the result into a canonical form
5391 so it is more likely to be a valid address.
5392
5393 In all other cases, return ADDR. */
5394
5395 static rtx
5396 subst_indexed_address (rtx addr)
5397 {
5398 rtx op0 = 0, op1 = 0, op2 = 0;
5399 rtx tem;
5400 int regno;
5401
5402 if (GET_CODE (addr) == PLUS)
5403 {
5404 /* Try to find a register to replace. */
5405 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5406 if (REG_P (op0)
5407 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5408 && reg_renumber[regno] < 0
5409 && reg_equiv_constant[regno] != 0)
5410 op0 = reg_equiv_constant[regno];
5411 else if (REG_P (op1)
5412 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5413 && reg_renumber[regno] < 0
5414 && reg_equiv_constant[regno] != 0)
5415 op1 = reg_equiv_constant[regno];
5416 else if (GET_CODE (op0) == PLUS
5417 && (tem = subst_indexed_address (op0)) != op0)
5418 op0 = tem;
5419 else if (GET_CODE (op1) == PLUS
5420 && (tem = subst_indexed_address (op1)) != op1)
5421 op1 = tem;
5422 else
5423 return addr;
5424
5425 /* Pick out up to three things to add. */
5426 if (GET_CODE (op1) == PLUS)
5427 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5428 else if (GET_CODE (op0) == PLUS)
5429 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5430
5431 /* Compute the sum. */
5432 if (op2 != 0)
5433 op1 = form_sum (GET_MODE (addr), op1, op2);
5434 if (op1 != 0)
5435 op0 = form_sum (GET_MODE (addr), op0, op1);
5436
5437 return op0;
5438 }
5439 return addr;
5440 }
5441 \f
5442 /* Update the REG_INC notes for an insn. It updates all REG_INC
5443 notes for the instruction which refer to REGNO the to refer
5444 to the reload number.
5445
5446 INSN is the insn for which any REG_INC notes need updating.
5447
5448 REGNO is the register number which has been reloaded.
5449
5450 RELOADNUM is the reload number. */
5451
5452 static void
5453 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5454 int reloadnum ATTRIBUTE_UNUSED)
5455 {
5456 #ifdef AUTO_INC_DEC
5457 rtx link;
5458
5459 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5460 if (REG_NOTE_KIND (link) == REG_INC
5461 && (int) REGNO (XEXP (link, 0)) == regno)
5462 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5463 #endif
5464 }
5465 \f
5466 /* Record the pseudo registers we must reload into hard registers in a
5467 subexpression of a would-be memory address, X referring to a value
5468 in mode MODE. (This function is not called if the address we find
5469 is strictly valid.)
5470
5471 CONTEXT = 1 means we are considering regs as index regs,
5472 = 0 means we are considering them as base regs.
5473 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5474 or an autoinc code.
5475 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5476 is the code of the index part of the address. Otherwise, pass SCRATCH
5477 for this argument.
5478 OPNUM and TYPE specify the purpose of any reloads made.
5479
5480 IND_LEVELS says how many levels of indirect addressing are
5481 supported at this point in the address.
5482
5483 INSN, if nonzero, is the insn in which we do the reload. It is used
5484 to determine if we may generate output reloads.
5485
5486 We return nonzero if X, as a whole, is reloaded or replaced. */
5487
5488 /* Note that we take shortcuts assuming that no multi-reg machine mode
5489 occurs as part of an address.
5490 Also, this is not fully machine-customizable; it works for machines
5491 such as VAXen and 68000's and 32000's, but other possible machines
5492 could have addressing modes that this does not handle right.
5493 If you add push_reload calls here, you need to make sure gen_reload
5494 handles those cases gracefully. */
5495
5496 static int
5497 find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
5498 enum rtx_code outer_code, enum rtx_code index_code,
5499 rtx *loc, int opnum, enum reload_type type,
5500 int ind_levels, rtx insn)
5501 {
5502 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, OUTER, INDEX) \
5503 ((CONTEXT) == 0 \
5504 ? regno_ok_for_base_p (REGNO, MODE, OUTER, INDEX) \
5505 : REGNO_OK_FOR_INDEX_P (REGNO))
5506
5507 enum reg_class context_reg_class;
5508 RTX_CODE code = GET_CODE (x);
5509
5510 if (context == 1)
5511 context_reg_class = INDEX_REG_CLASS;
5512 else
5513 context_reg_class = base_reg_class (mode, outer_code, index_code);
5514
5515 switch (code)
5516 {
5517 case PLUS:
5518 {
5519 rtx orig_op0 = XEXP (x, 0);
5520 rtx orig_op1 = XEXP (x, 1);
5521 RTX_CODE code0 = GET_CODE (orig_op0);
5522 RTX_CODE code1 = GET_CODE (orig_op1);
5523 rtx op0 = orig_op0;
5524 rtx op1 = orig_op1;
5525
5526 if (GET_CODE (op0) == SUBREG)
5527 {
5528 op0 = SUBREG_REG (op0);
5529 code0 = GET_CODE (op0);
5530 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5531 op0 = gen_rtx_REG (word_mode,
5532 (REGNO (op0) +
5533 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5534 GET_MODE (SUBREG_REG (orig_op0)),
5535 SUBREG_BYTE (orig_op0),
5536 GET_MODE (orig_op0))));
5537 }
5538
5539 if (GET_CODE (op1) == SUBREG)
5540 {
5541 op1 = SUBREG_REG (op1);
5542 code1 = GET_CODE (op1);
5543 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5544 /* ??? Why is this given op1's mode and above for
5545 ??? op0 SUBREGs we use word_mode? */
5546 op1 = gen_rtx_REG (GET_MODE (op1),
5547 (REGNO (op1) +
5548 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5549 GET_MODE (SUBREG_REG (orig_op1)),
5550 SUBREG_BYTE (orig_op1),
5551 GET_MODE (orig_op1))));
5552 }
5553 /* Plus in the index register may be created only as a result of
5554 register rematerialization for expression like &localvar*4. Reload it.
5555 It may be possible to combine the displacement on the outer level,
5556 but it is probably not worthwhile to do so. */
5557 if (context == 1)
5558 {
5559 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5560 opnum, ADDR_TYPE (type), ind_levels, insn);
5561 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5562 context_reg_class,
5563 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5564 return 1;
5565 }
5566
5567 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5568 || code0 == ZERO_EXTEND || code1 == MEM)
5569 {
5570 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5571 &XEXP (x, 0), opnum, type, ind_levels,
5572 insn);
5573 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5574 &XEXP (x, 1), opnum, type, ind_levels,
5575 insn);
5576 }
5577
5578 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5579 || code1 == ZERO_EXTEND || code0 == MEM)
5580 {
5581 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5582 &XEXP (x, 0), opnum, type, ind_levels,
5583 insn);
5584 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5585 &XEXP (x, 1), opnum, type, ind_levels,
5586 insn);
5587 }
5588
5589 else if (code0 == CONST_INT || code0 == CONST
5590 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5591 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5592 &XEXP (x, 1), opnum, type, ind_levels,
5593 insn);
5594
5595 else if (code1 == CONST_INT || code1 == CONST
5596 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5597 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5598 &XEXP (x, 0), opnum, type, ind_levels,
5599 insn);
5600
5601 else if (code0 == REG && code1 == REG)
5602 {
5603 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5604 && regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5605 return 0;
5606 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5607 && regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5608 return 0;
5609 else if (regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5610 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5611 &XEXP (x, 1), opnum, type, ind_levels,
5612 insn);
5613 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5614 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5615 &XEXP (x, 0), opnum, type, ind_levels,
5616 insn);
5617 else if (regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5618 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5619 &XEXP (x, 0), opnum, type, ind_levels,
5620 insn);
5621 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5622 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5623 &XEXP (x, 1), opnum, type, ind_levels,
5624 insn);
5625 else
5626 {
5627 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5628 &XEXP (x, 0), opnum, type, ind_levels,
5629 insn);
5630 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5631 &XEXP (x, 1), opnum, type, ind_levels,
5632 insn);
5633 }
5634 }
5635
5636 else if (code0 == REG)
5637 {
5638 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5639 &XEXP (x, 0), opnum, type, ind_levels,
5640 insn);
5641 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5642 &XEXP (x, 1), opnum, type, ind_levels,
5643 insn);
5644 }
5645
5646 else if (code1 == REG)
5647 {
5648 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5649 &XEXP (x, 1), opnum, type, ind_levels,
5650 insn);
5651 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5652 &XEXP (x, 0), opnum, type, ind_levels,
5653 insn);
5654 }
5655 }
5656
5657 return 0;
5658
5659 case POST_MODIFY:
5660 case PRE_MODIFY:
5661 {
5662 rtx op0 = XEXP (x, 0);
5663 rtx op1 = XEXP (x, 1);
5664 enum rtx_code index_code;
5665 int regno;
5666 int reloadnum;
5667
5668 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5669 return 0;
5670
5671 /* Currently, we only support {PRE,POST}_MODIFY constructs
5672 where a base register is {inc,dec}remented by the contents
5673 of another register or by a constant value. Thus, these
5674 operands must match. */
5675 gcc_assert (op0 == XEXP (op1, 0));
5676
5677 /* Require index register (or constant). Let's just handle the
5678 register case in the meantime... If the target allows
5679 auto-modify by a constant then we could try replacing a pseudo
5680 register with its equivalent constant where applicable.
5681
5682 We also handle the case where the register was eliminated
5683 resulting in a PLUS subexpression.
5684
5685 If we later decide to reload the whole PRE_MODIFY or
5686 POST_MODIFY, inc_for_reload might clobber the reload register
5687 before reading the index. The index register might therefore
5688 need to live longer than a TYPE reload normally would, so be
5689 conservative and class it as RELOAD_OTHER. */
5690 if ((REG_P (XEXP (op1, 1))
5691 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5692 || GET_CODE (XEXP (op1, 1)) == PLUS)
5693 find_reloads_address_1 (mode, XEXP (op1, 1), 1, code, SCRATCH,
5694 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5695 ind_levels, insn);
5696
5697 gcc_assert (REG_P (XEXP (op1, 0)));
5698
5699 regno = REGNO (XEXP (op1, 0));
5700 index_code = GET_CODE (XEXP (op1, 1));
5701
5702 /* A register that is incremented cannot be constant! */
5703 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5704 || reg_equiv_constant[regno] == 0);
5705
5706 /* Handle a register that is equivalent to a memory location
5707 which cannot be addressed directly. */
5708 if (reg_equiv_memory_loc[regno] != 0
5709 && (reg_equiv_address[regno] != 0
5710 || num_not_at_initial_offset))
5711 {
5712 rtx tem = make_memloc (XEXP (x, 0), regno);
5713
5714 if (reg_equiv_address[regno]
5715 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5716 {
5717 rtx orig = tem;
5718
5719 /* First reload the memory location's address.
5720 We can't use ADDR_TYPE (type) here, because we need to
5721 write back the value after reading it, hence we actually
5722 need two registers. */
5723 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5724 &XEXP (tem, 0), opnum,
5725 RELOAD_OTHER,
5726 ind_levels, insn);
5727
5728 if (!rtx_equal_p (tem, orig))
5729 push_reg_equiv_alt_mem (regno, tem);
5730
5731 /* Then reload the memory location into a base
5732 register. */
5733 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5734 &XEXP (op1, 0),
5735 base_reg_class (mode, code,
5736 index_code),
5737 GET_MODE (x), GET_MODE (x), 0,
5738 0, opnum, RELOAD_OTHER);
5739
5740 update_auto_inc_notes (this_insn, regno, reloadnum);
5741 return 0;
5742 }
5743 }
5744
5745 if (reg_renumber[regno] >= 0)
5746 regno = reg_renumber[regno];
5747
5748 /* We require a base register here... */
5749 if (!regno_ok_for_base_p (regno, GET_MODE (x), code, index_code))
5750 {
5751 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5752 &XEXP (op1, 0), &XEXP (x, 0),
5753 base_reg_class (mode, code, index_code),
5754 GET_MODE (x), GET_MODE (x), 0, 0,
5755 opnum, RELOAD_OTHER);
5756
5757 update_auto_inc_notes (this_insn, regno, reloadnum);
5758 return 0;
5759 }
5760 }
5761 return 0;
5762
5763 case POST_INC:
5764 case POST_DEC:
5765 case PRE_INC:
5766 case PRE_DEC:
5767 if (REG_P (XEXP (x, 0)))
5768 {
5769 int regno = REGNO (XEXP (x, 0));
5770 int value = 0;
5771 rtx x_orig = x;
5772
5773 /* A register that is incremented cannot be constant! */
5774 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5775 || reg_equiv_constant[regno] == 0);
5776
5777 /* Handle a register that is equivalent to a memory location
5778 which cannot be addressed directly. */
5779 if (reg_equiv_memory_loc[regno] != 0
5780 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
5781 {
5782 rtx tem = make_memloc (XEXP (x, 0), regno);
5783 if (reg_equiv_address[regno]
5784 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5785 {
5786 rtx orig = tem;
5787
5788 /* First reload the memory location's address.
5789 We can't use ADDR_TYPE (type) here, because we need to
5790 write back the value after reading it, hence we actually
5791 need two registers. */
5792 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5793 &XEXP (tem, 0), opnum, type,
5794 ind_levels, insn);
5795 if (!rtx_equal_p (tem, orig))
5796 push_reg_equiv_alt_mem (regno, tem);
5797 /* Put this inside a new increment-expression. */
5798 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5799 /* Proceed to reload that, as if it contained a register. */
5800 }
5801 }
5802
5803 /* If we have a hard register that is ok in this incdec context,
5804 don't make a reload. If the register isn't nice enough for
5805 autoincdec, we can reload it. But, if an autoincrement of a
5806 register that we here verified as playing nice, still outside
5807 isn't "valid", it must be that no autoincrement is "valid".
5808 If that is true and something made an autoincrement anyway,
5809 this must be a special context where one is allowed.
5810 (For example, a "push" instruction.)
5811 We can't improve this address, so leave it alone. */
5812
5813 /* Otherwise, reload the autoincrement into a suitable hard reg
5814 and record how much to increment by. */
5815
5816 if (reg_renumber[regno] >= 0)
5817 regno = reg_renumber[regno];
5818 if (regno >= FIRST_PSEUDO_REGISTER
5819 || !REG_OK_FOR_CONTEXT (context, regno, mode, code,
5820 index_code))
5821 {
5822 int reloadnum;
5823
5824 /* If we can output the register afterwards, do so, this
5825 saves the extra update.
5826 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5827 CALL_INSN - and it does not set CC0.
5828 But don't do this if we cannot directly address the
5829 memory location, since this will make it harder to
5830 reuse address reloads, and increases register pressure.
5831 Also don't do this if we can probably update x directly. */
5832 rtx equiv = (MEM_P (XEXP (x, 0))
5833 ? XEXP (x, 0)
5834 : reg_equiv_mem[regno]);
5835 int icode
5836 = (int) optab_handler (add_optab, GET_MODE (x))->insn_code;
5837 if (insn && NONJUMP_INSN_P (insn) && equiv
5838 && memory_operand (equiv, GET_MODE (equiv))
5839 #ifdef HAVE_cc0
5840 && ! sets_cc0_p (PATTERN (insn))
5841 #endif
5842 && ! (icode != CODE_FOR_nothing
5843 && ((*insn_data[icode].operand[0].predicate)
5844 (equiv, GET_MODE (x)))
5845 && ((*insn_data[icode].operand[1].predicate)
5846 (equiv, GET_MODE (x)))))
5847 {
5848 /* We use the original pseudo for loc, so that
5849 emit_reload_insns() knows which pseudo this
5850 reload refers to and updates the pseudo rtx, not
5851 its equivalent memory location, as well as the
5852 corresponding entry in reg_last_reload_reg. */
5853 loc = &XEXP (x_orig, 0);
5854 x = XEXP (x, 0);
5855 reloadnum
5856 = push_reload (x, x, loc, loc,
5857 context_reg_class,
5858 GET_MODE (x), GET_MODE (x), 0, 0,
5859 opnum, RELOAD_OTHER);
5860 }
5861 else
5862 {
5863 reloadnum
5864 = push_reload (x, x, loc, (rtx*) 0,
5865 context_reg_class,
5866 GET_MODE (x), GET_MODE (x), 0, 0,
5867 opnum, type);
5868 rld[reloadnum].inc
5869 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5870
5871 value = 1;
5872 }
5873
5874 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5875 reloadnum);
5876 }
5877 return value;
5878 }
5879 return 0;
5880
5881 case TRUNCATE:
5882 case SIGN_EXTEND:
5883 case ZERO_EXTEND:
5884 /* Look for parts to reload in the inner expression and reload them
5885 too, in addition to this operation. Reloading all inner parts in
5886 addition to this one shouldn't be necessary, but at this point,
5887 we don't know if we can possibly omit any part that *can* be
5888 reloaded. Targets that are better off reloading just either part
5889 (or perhaps even a different part of an outer expression), should
5890 define LEGITIMIZE_RELOAD_ADDRESS. */
5891 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), XEXP (x, 0),
5892 context, code, SCRATCH, &XEXP (x, 0), opnum,
5893 type, ind_levels, insn);
5894 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5895 context_reg_class,
5896 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5897 return 1;
5898
5899 case MEM:
5900 /* This is probably the result of a substitution, by eliminate_regs, of
5901 an equivalent address for a pseudo that was not allocated to a hard
5902 register. Verify that the specified address is valid and reload it
5903 into a register.
5904
5905 Since we know we are going to reload this item, don't decrement for
5906 the indirection level.
5907
5908 Note that this is actually conservative: it would be slightly more
5909 efficient to use the value of SPILL_INDIRECT_LEVELS from
5910 reload1.c here. */
5911
5912 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5913 opnum, ADDR_TYPE (type), ind_levels, insn);
5914 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5915 context_reg_class,
5916 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5917 return 1;
5918
5919 case REG:
5920 {
5921 int regno = REGNO (x);
5922
5923 if (reg_equiv_constant[regno] != 0)
5924 {
5925 find_reloads_address_part (reg_equiv_constant[regno], loc,
5926 context_reg_class,
5927 GET_MODE (x), opnum, type, ind_levels);
5928 return 1;
5929 }
5930
5931 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5932 that feeds this insn. */
5933 if (reg_equiv_mem[regno] != 0)
5934 {
5935 push_reload (reg_equiv_mem[regno], NULL_RTX, loc, (rtx*) 0,
5936 context_reg_class,
5937 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5938 return 1;
5939 }
5940 #endif
5941
5942 if (reg_equiv_memory_loc[regno]
5943 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
5944 {
5945 rtx tem = make_memloc (x, regno);
5946 if (reg_equiv_address[regno] != 0
5947 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5948 {
5949 x = tem;
5950 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5951 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5952 ind_levels, insn);
5953 if (!rtx_equal_p (x, tem))
5954 push_reg_equiv_alt_mem (regno, x);
5955 }
5956 }
5957
5958 if (reg_renumber[regno] >= 0)
5959 regno = reg_renumber[regno];
5960
5961 if (regno >= FIRST_PSEUDO_REGISTER
5962 || !REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5963 index_code))
5964 {
5965 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5966 context_reg_class,
5967 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5968 return 1;
5969 }
5970
5971 /* If a register appearing in an address is the subject of a CLOBBER
5972 in this insn, reload it into some other register to be safe.
5973 The CLOBBER is supposed to make the register unavailable
5974 from before this insn to after it. */
5975 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5976 {
5977 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5978 context_reg_class,
5979 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5980 return 1;
5981 }
5982 }
5983 return 0;
5984
5985 case SUBREG:
5986 if (REG_P (SUBREG_REG (x)))
5987 {
5988 /* If this is a SUBREG of a hard register and the resulting register
5989 is of the wrong class, reload the whole SUBREG. This avoids
5990 needless copies if SUBREG_REG is multi-word. */
5991 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5992 {
5993 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5994
5995 if (!REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5996 index_code))
5997 {
5998 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5999 context_reg_class,
6000 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6001 return 1;
6002 }
6003 }
6004 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6005 is larger than the class size, then reload the whole SUBREG. */
6006 else
6007 {
6008 enum reg_class rclass = context_reg_class;
6009 if ((unsigned) CLASS_MAX_NREGS (rclass, GET_MODE (SUBREG_REG (x)))
6010 > reg_class_size[rclass])
6011 {
6012 x = find_reloads_subreg_address (x, 0, opnum,
6013 ADDR_TYPE (type),
6014 ind_levels, insn);
6015 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6016 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6017 return 1;
6018 }
6019 }
6020 }
6021 break;
6022
6023 default:
6024 break;
6025 }
6026
6027 {
6028 const char *fmt = GET_RTX_FORMAT (code);
6029 int i;
6030
6031 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6032 {
6033 if (fmt[i] == 'e')
6034 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6035 we get here. */
6036 find_reloads_address_1 (mode, XEXP (x, i), context, code, SCRATCH,
6037 &XEXP (x, i), opnum, type, ind_levels, insn);
6038 }
6039 }
6040
6041 #undef REG_OK_FOR_CONTEXT
6042 return 0;
6043 }
6044 \f
6045 /* X, which is found at *LOC, is a part of an address that needs to be
6046 reloaded into a register of class RCLASS. If X is a constant, or if
6047 X is a PLUS that contains a constant, check that the constant is a
6048 legitimate operand and that we are supposed to be able to load
6049 it into the register.
6050
6051 If not, force the constant into memory and reload the MEM instead.
6052
6053 MODE is the mode to use, in case X is an integer constant.
6054
6055 OPNUM and TYPE describe the purpose of any reloads made.
6056
6057 IND_LEVELS says how many levels of indirect addressing this machine
6058 supports. */
6059
6060 static void
6061 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6062 enum machine_mode mode, int opnum,
6063 enum reload_type type, int ind_levels)
6064 {
6065 if (CONSTANT_P (x)
6066 && (! LEGITIMATE_CONSTANT_P (x)
6067 || PREFERRED_RELOAD_CLASS (x, rclass) == NO_REGS))
6068 {
6069 x = force_const_mem (mode, x);
6070 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6071 opnum, type, ind_levels, 0);
6072 }
6073
6074 else if (GET_CODE (x) == PLUS
6075 && CONSTANT_P (XEXP (x, 1))
6076 && (! LEGITIMATE_CONSTANT_P (XEXP (x, 1))
6077 || PREFERRED_RELOAD_CLASS (XEXP (x, 1), rclass) == NO_REGS))
6078 {
6079 rtx tem;
6080
6081 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6082 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6083 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6084 opnum, type, ind_levels, 0);
6085 }
6086
6087 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6088 mode, VOIDmode, 0, 0, opnum, type);
6089 }
6090 \f
6091 /* X, a subreg of a pseudo, is a part of an address that needs to be
6092 reloaded.
6093
6094 If the pseudo is equivalent to a memory location that cannot be directly
6095 addressed, make the necessary address reloads.
6096
6097 If address reloads have been necessary, or if the address is changed
6098 by register elimination, return the rtx of the memory location;
6099 otherwise, return X.
6100
6101 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
6102 memory location.
6103
6104 OPNUM and TYPE identify the purpose of the reload.
6105
6106 IND_LEVELS says how many levels of indirect addressing are
6107 supported at this point in the address.
6108
6109 INSN, if nonzero, is the insn in which we do the reload. It is used
6110 to determine where to put USEs for pseudos that we have to replace with
6111 stack slots. */
6112
6113 static rtx
6114 find_reloads_subreg_address (rtx x, int force_replace, int opnum,
6115 enum reload_type type, int ind_levels, rtx insn)
6116 {
6117 int regno = REGNO (SUBREG_REG (x));
6118
6119 if (reg_equiv_memory_loc[regno])
6120 {
6121 /* If the address is not directly addressable, or if the address is not
6122 offsettable, then it must be replaced. */
6123 if (! force_replace
6124 && (reg_equiv_address[regno]
6125 || ! offsettable_memref_p (reg_equiv_mem[regno])))
6126 force_replace = 1;
6127
6128 if (force_replace || num_not_at_initial_offset)
6129 {
6130 rtx tem = make_memloc (SUBREG_REG (x), regno);
6131
6132 /* If the address changes because of register elimination, then
6133 it must be replaced. */
6134 if (force_replace
6135 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
6136 {
6137 unsigned outer_size = GET_MODE_SIZE (GET_MODE (x));
6138 unsigned inner_size = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
6139 int offset;
6140 rtx orig = tem;
6141 int reloaded;
6142
6143 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
6144 hold the correct (negative) byte offset. */
6145 if (BYTES_BIG_ENDIAN && outer_size > inner_size)
6146 offset = inner_size - outer_size;
6147 else
6148 offset = SUBREG_BYTE (x);
6149
6150 XEXP (tem, 0) = plus_constant (XEXP (tem, 0), offset);
6151 PUT_MODE (tem, GET_MODE (x));
6152 if (MEM_OFFSET (tem))
6153 set_mem_offset (tem, plus_constant (MEM_OFFSET (tem), offset));
6154 if (MEM_SIZE (tem)
6155 && INTVAL (MEM_SIZE (tem)) != (HOST_WIDE_INT) outer_size)
6156 set_mem_size (tem, GEN_INT (outer_size));
6157
6158 /* If this was a paradoxical subreg that we replaced, the
6159 resulting memory must be sufficiently aligned to allow
6160 us to widen the mode of the memory. */
6161 if (outer_size > inner_size)
6162 {
6163 rtx base;
6164
6165 base = XEXP (tem, 0);
6166 if (GET_CODE (base) == PLUS)
6167 {
6168 if (CONST_INT_P (XEXP (base, 1))
6169 && INTVAL (XEXP (base, 1)) % outer_size != 0)
6170 return x;
6171 base = XEXP (base, 0);
6172 }
6173 if (!REG_P (base)
6174 || (REGNO_POINTER_ALIGN (REGNO (base))
6175 < outer_size * BITS_PER_UNIT))
6176 return x;
6177 }
6178
6179 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6180 XEXP (tem, 0), &XEXP (tem, 0),
6181 opnum, type, ind_levels, insn);
6182 /* ??? Do we need to handle nonzero offsets somehow? */
6183 if (!offset && !rtx_equal_p (tem, orig))
6184 push_reg_equiv_alt_mem (regno, tem);
6185
6186 /* For some processors an address may be valid in the
6187 original mode but not in a smaller mode. For
6188 example, ARM accepts a scaled index register in
6189 SImode but not in HImode. Note that this is only
6190 a problem if the address in reg_equiv_mem is already
6191 invalid in the new mode; other cases would be fixed
6192 by find_reloads_address as usual.
6193
6194 ??? We attempt to handle such cases here by doing an
6195 additional reload of the full address after the
6196 usual processing by find_reloads_address. Note that
6197 this may not work in the general case, but it seems
6198 to cover the cases where this situation currently
6199 occurs. A more general fix might be to reload the
6200 *value* instead of the address, but this would not
6201 be expected by the callers of this routine as-is.
6202
6203 If find_reloads_address already completed replaced
6204 the address, there is nothing further to do. */
6205 if (reloaded == 0
6206 && reg_equiv_mem[regno] != 0
6207 && !strict_memory_address_addr_space_p
6208 (GET_MODE (x), XEXP (reg_equiv_mem[regno], 0),
6209 MEM_ADDR_SPACE (reg_equiv_mem[regno])))
6210 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6211 base_reg_class (GET_MODE (tem), MEM, SCRATCH),
6212 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0,
6213 opnum, type);
6214
6215 /* If this is not a toplevel operand, find_reloads doesn't see
6216 this substitution. We have to emit a USE of the pseudo so
6217 that delete_output_reload can see it. */
6218 if (replace_reloads && recog_data.operand[opnum] != x)
6219 /* We mark the USE with QImode so that we recognize it
6220 as one that can be safely deleted at the end of
6221 reload. */
6222 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode,
6223 SUBREG_REG (x)),
6224 insn), QImode);
6225 x = tem;
6226 }
6227 }
6228 }
6229 return x;
6230 }
6231 \f
6232 /* Substitute into the current INSN the registers into which we have reloaded
6233 the things that need reloading. The array `replacements'
6234 contains the locations of all pointers that must be changed
6235 and says what to replace them with.
6236
6237 Return the rtx that X translates into; usually X, but modified. */
6238
6239 void
6240 subst_reloads (rtx insn)
6241 {
6242 int i;
6243
6244 for (i = 0; i < n_replacements; i++)
6245 {
6246 struct replacement *r = &replacements[i];
6247 rtx reloadreg = rld[r->what].reg_rtx;
6248 if (reloadreg)
6249 {
6250 #ifdef DEBUG_RELOAD
6251 /* This checking takes a very long time on some platforms
6252 causing the gcc.c-torture/compile/limits-fnargs.c test
6253 to time out during testing. See PR 31850.
6254
6255 Internal consistency test. Check that we don't modify
6256 anything in the equivalence arrays. Whenever something from
6257 those arrays needs to be reloaded, it must be unshared before
6258 being substituted into; the equivalence must not be modified.
6259 Otherwise, if the equivalence is used after that, it will
6260 have been modified, and the thing substituted (probably a
6261 register) is likely overwritten and not a usable equivalence. */
6262 int check_regno;
6263
6264 for (check_regno = 0; check_regno < max_regno; check_regno++)
6265 {
6266 #define CHECK_MODF(ARRAY) \
6267 gcc_assert (!ARRAY[check_regno] \
6268 || !loc_mentioned_in_p (r->where, \
6269 ARRAY[check_regno]))
6270
6271 CHECK_MODF (reg_equiv_constant);
6272 CHECK_MODF (reg_equiv_memory_loc);
6273 CHECK_MODF (reg_equiv_address);
6274 CHECK_MODF (reg_equiv_mem);
6275 #undef CHECK_MODF
6276 }
6277 #endif /* DEBUG_RELOAD */
6278
6279 /* If we're replacing a LABEL_REF with a register, there must
6280 already be an indication (to e.g. flow) which label this
6281 register refers to. */
6282 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6283 || !JUMP_P (insn)
6284 || find_reg_note (insn,
6285 REG_LABEL_OPERAND,
6286 XEXP (*r->where, 0))
6287 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6288
6289 /* Encapsulate RELOADREG so its machine mode matches what
6290 used to be there. Note that gen_lowpart_common will
6291 do the wrong thing if RELOADREG is multi-word. RELOADREG
6292 will always be a REG here. */
6293 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6294 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6295
6296 /* If we are putting this into a SUBREG and RELOADREG is a
6297 SUBREG, we would be making nested SUBREGs, so we have to fix
6298 this up. Note that r->where == &SUBREG_REG (*r->subreg_loc). */
6299
6300 if (r->subreg_loc != 0 && GET_CODE (reloadreg) == SUBREG)
6301 {
6302 if (GET_MODE (*r->subreg_loc)
6303 == GET_MODE (SUBREG_REG (reloadreg)))
6304 *r->subreg_loc = SUBREG_REG (reloadreg);
6305 else
6306 {
6307 int final_offset =
6308 SUBREG_BYTE (*r->subreg_loc) + SUBREG_BYTE (reloadreg);
6309
6310 /* When working with SUBREGs the rule is that the byte
6311 offset must be a multiple of the SUBREG's mode. */
6312 final_offset = (final_offset /
6313 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6314 final_offset = (final_offset *
6315 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6316
6317 *r->where = SUBREG_REG (reloadreg);
6318 SUBREG_BYTE (*r->subreg_loc) = final_offset;
6319 }
6320 }
6321 else
6322 *r->where = reloadreg;
6323 }
6324 /* If reload got no reg and isn't optional, something's wrong. */
6325 else
6326 gcc_assert (rld[r->what].optional);
6327 }
6328 }
6329 \f
6330 /* Make a copy of any replacements being done into X and move those
6331 copies to locations in Y, a copy of X. */
6332
6333 void
6334 copy_replacements (rtx x, rtx y)
6335 {
6336 /* We can't support X being a SUBREG because we might then need to know its
6337 location if something inside it was replaced. */
6338 gcc_assert (GET_CODE (x) != SUBREG);
6339
6340 copy_replacements_1 (&x, &y, n_replacements);
6341 }
6342
6343 static void
6344 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6345 {
6346 int i, j;
6347 rtx x, y;
6348 struct replacement *r;
6349 enum rtx_code code;
6350 const char *fmt;
6351
6352 for (j = 0; j < orig_replacements; j++)
6353 {
6354 if (replacements[j].subreg_loc == px)
6355 {
6356 r = &replacements[n_replacements++];
6357 r->where = replacements[j].where;
6358 r->subreg_loc = py;
6359 r->what = replacements[j].what;
6360 r->mode = replacements[j].mode;
6361 }
6362 else if (replacements[j].where == px)
6363 {
6364 r = &replacements[n_replacements++];
6365 r->where = py;
6366 r->subreg_loc = 0;
6367 r->what = replacements[j].what;
6368 r->mode = replacements[j].mode;
6369 }
6370 }
6371
6372 x = *px;
6373 y = *py;
6374 code = GET_CODE (x);
6375 fmt = GET_RTX_FORMAT (code);
6376
6377 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6378 {
6379 if (fmt[i] == 'e')
6380 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6381 else if (fmt[i] == 'E')
6382 for (j = XVECLEN (x, i); --j >= 0; )
6383 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6384 orig_replacements);
6385 }
6386 }
6387
6388 /* Change any replacements being done to *X to be done to *Y. */
6389
6390 void
6391 move_replacements (rtx *x, rtx *y)
6392 {
6393 int i;
6394
6395 for (i = 0; i < n_replacements; i++)
6396 if (replacements[i].subreg_loc == x)
6397 replacements[i].subreg_loc = y;
6398 else if (replacements[i].where == x)
6399 {
6400 replacements[i].where = y;
6401 replacements[i].subreg_loc = 0;
6402 }
6403 }
6404 \f
6405 /* If LOC was scheduled to be replaced by something, return the replacement.
6406 Otherwise, return *LOC. */
6407
6408 rtx
6409 find_replacement (rtx *loc)
6410 {
6411 struct replacement *r;
6412
6413 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6414 {
6415 rtx reloadreg = rld[r->what].reg_rtx;
6416
6417 if (reloadreg && r->where == loc)
6418 {
6419 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6420 reloadreg = gen_rtx_REG (r->mode, REGNO (reloadreg));
6421
6422 return reloadreg;
6423 }
6424 else if (reloadreg && r->subreg_loc == loc)
6425 {
6426 /* RELOADREG must be either a REG or a SUBREG.
6427
6428 ??? Is it actually still ever a SUBREG? If so, why? */
6429
6430 if (REG_P (reloadreg))
6431 return gen_rtx_REG (GET_MODE (*loc),
6432 (REGNO (reloadreg) +
6433 subreg_regno_offset (REGNO (SUBREG_REG (*loc)),
6434 GET_MODE (SUBREG_REG (*loc)),
6435 SUBREG_BYTE (*loc),
6436 GET_MODE (*loc))));
6437 else if (GET_MODE (reloadreg) == GET_MODE (*loc))
6438 return reloadreg;
6439 else
6440 {
6441 int final_offset = SUBREG_BYTE (reloadreg) + SUBREG_BYTE (*loc);
6442
6443 /* When working with SUBREGs the rule is that the byte
6444 offset must be a multiple of the SUBREG's mode. */
6445 final_offset = (final_offset / GET_MODE_SIZE (GET_MODE (*loc)));
6446 final_offset = (final_offset * GET_MODE_SIZE (GET_MODE (*loc)));
6447 return gen_rtx_SUBREG (GET_MODE (*loc), SUBREG_REG (reloadreg),
6448 final_offset);
6449 }
6450 }
6451 }
6452
6453 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6454 what's inside and make a new rtl if so. */
6455 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6456 || GET_CODE (*loc) == MULT)
6457 {
6458 rtx x = find_replacement (&XEXP (*loc, 0));
6459 rtx y = find_replacement (&XEXP (*loc, 1));
6460
6461 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6462 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6463 }
6464
6465 return *loc;
6466 }
6467 \f
6468 /* Return nonzero if register in range [REGNO, ENDREGNO)
6469 appears either explicitly or implicitly in X
6470 other than being stored into (except for earlyclobber operands).
6471
6472 References contained within the substructure at LOC do not count.
6473 LOC may be zero, meaning don't ignore anything.
6474
6475 This is similar to refers_to_regno_p in rtlanal.c except that we
6476 look at equivalences for pseudos that didn't get hard registers. */
6477
6478 static int
6479 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6480 rtx x, rtx *loc)
6481 {
6482 int i;
6483 unsigned int r;
6484 RTX_CODE code;
6485 const char *fmt;
6486
6487 if (x == 0)
6488 return 0;
6489
6490 repeat:
6491 code = GET_CODE (x);
6492
6493 switch (code)
6494 {
6495 case REG:
6496 r = REGNO (x);
6497
6498 /* If this is a pseudo, a hard register must not have been allocated.
6499 X must therefore either be a constant or be in memory. */
6500 if (r >= FIRST_PSEUDO_REGISTER)
6501 {
6502 if (reg_equiv_memory_loc[r])
6503 return refers_to_regno_for_reload_p (regno, endregno,
6504 reg_equiv_memory_loc[r],
6505 (rtx*) 0);
6506
6507 gcc_assert (reg_equiv_constant[r] || reg_equiv_invariant[r]);
6508 return 0;
6509 }
6510
6511 return (endregno > r
6512 && regno < r + (r < FIRST_PSEUDO_REGISTER
6513 ? hard_regno_nregs[r][GET_MODE (x)]
6514 : 1));
6515
6516 case SUBREG:
6517 /* If this is a SUBREG of a hard reg, we can see exactly which
6518 registers are being modified. Otherwise, handle normally. */
6519 if (REG_P (SUBREG_REG (x))
6520 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6521 {
6522 unsigned int inner_regno = subreg_regno (x);
6523 unsigned int inner_endregno
6524 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6525 ? subreg_nregs (x) : 1);
6526
6527 return endregno > inner_regno && regno < inner_endregno;
6528 }
6529 break;
6530
6531 case CLOBBER:
6532 case SET:
6533 if (&SET_DEST (x) != loc
6534 /* Note setting a SUBREG counts as referring to the REG it is in for
6535 a pseudo but not for hard registers since we can
6536 treat each word individually. */
6537 && ((GET_CODE (SET_DEST (x)) == SUBREG
6538 && loc != &SUBREG_REG (SET_DEST (x))
6539 && REG_P (SUBREG_REG (SET_DEST (x)))
6540 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6541 && refers_to_regno_for_reload_p (regno, endregno,
6542 SUBREG_REG (SET_DEST (x)),
6543 loc))
6544 /* If the output is an earlyclobber operand, this is
6545 a conflict. */
6546 || ((!REG_P (SET_DEST (x))
6547 || earlyclobber_operand_p (SET_DEST (x)))
6548 && refers_to_regno_for_reload_p (regno, endregno,
6549 SET_DEST (x), loc))))
6550 return 1;
6551
6552 if (code == CLOBBER || loc == &SET_SRC (x))
6553 return 0;
6554 x = SET_SRC (x);
6555 goto repeat;
6556
6557 default:
6558 break;
6559 }
6560
6561 /* X does not match, so try its subexpressions. */
6562
6563 fmt = GET_RTX_FORMAT (code);
6564 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6565 {
6566 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6567 {
6568 if (i == 0)
6569 {
6570 x = XEXP (x, 0);
6571 goto repeat;
6572 }
6573 else
6574 if (refers_to_regno_for_reload_p (regno, endregno,
6575 XEXP (x, i), loc))
6576 return 1;
6577 }
6578 else if (fmt[i] == 'E')
6579 {
6580 int j;
6581 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6582 if (loc != &XVECEXP (x, i, j)
6583 && refers_to_regno_for_reload_p (regno, endregno,
6584 XVECEXP (x, i, j), loc))
6585 return 1;
6586 }
6587 }
6588 return 0;
6589 }
6590
6591 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6592 we check if any register number in X conflicts with the relevant register
6593 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6594 contains a MEM (we don't bother checking for memory addresses that can't
6595 conflict because we expect this to be a rare case.
6596
6597 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6598 that we look at equivalences for pseudos that didn't get hard registers. */
6599
6600 int
6601 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6602 {
6603 int regno, endregno;
6604
6605 /* Overly conservative. */
6606 if (GET_CODE (x) == STRICT_LOW_PART
6607 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6608 x = XEXP (x, 0);
6609
6610 /* If either argument is a constant, then modifying X can not affect IN. */
6611 if (CONSTANT_P (x) || CONSTANT_P (in))
6612 return 0;
6613 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6614 return refers_to_mem_for_reload_p (in);
6615 else if (GET_CODE (x) == SUBREG)
6616 {
6617 regno = REGNO (SUBREG_REG (x));
6618 if (regno < FIRST_PSEUDO_REGISTER)
6619 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6620 GET_MODE (SUBREG_REG (x)),
6621 SUBREG_BYTE (x),
6622 GET_MODE (x));
6623 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6624 ? subreg_nregs (x) : 1);
6625
6626 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6627 }
6628 else if (REG_P (x))
6629 {
6630 regno = REGNO (x);
6631
6632 /* If this is a pseudo, it must not have been assigned a hard register.
6633 Therefore, it must either be in memory or be a constant. */
6634
6635 if (regno >= FIRST_PSEUDO_REGISTER)
6636 {
6637 if (reg_equiv_memory_loc[regno])
6638 return refers_to_mem_for_reload_p (in);
6639 gcc_assert (reg_equiv_constant[regno]);
6640 return 0;
6641 }
6642
6643 endregno = END_HARD_REGNO (x);
6644
6645 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6646 }
6647 else if (MEM_P (x))
6648 return refers_to_mem_for_reload_p (in);
6649 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6650 || GET_CODE (x) == CC0)
6651 return reg_mentioned_p (x, in);
6652 else
6653 {
6654 gcc_assert (GET_CODE (x) == PLUS);
6655
6656 /* We actually want to know if X is mentioned somewhere inside IN.
6657 We must not say that (plus (sp) (const_int 124)) is in
6658 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6659 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6660 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6661 while (MEM_P (in))
6662 in = XEXP (in, 0);
6663 if (REG_P (in))
6664 return 0;
6665 else if (GET_CODE (in) == PLUS)
6666 return (rtx_equal_p (x, in)
6667 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6668 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6669 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6670 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6671 }
6672
6673 gcc_unreachable ();
6674 }
6675
6676 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6677 registers. */
6678
6679 static int
6680 refers_to_mem_for_reload_p (rtx x)
6681 {
6682 const char *fmt;
6683 int i;
6684
6685 if (MEM_P (x))
6686 return 1;
6687
6688 if (REG_P (x))
6689 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6690 && reg_equiv_memory_loc[REGNO (x)]);
6691
6692 fmt = GET_RTX_FORMAT (GET_CODE (x));
6693 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6694 if (fmt[i] == 'e'
6695 && (MEM_P (XEXP (x, i))
6696 || refers_to_mem_for_reload_p (XEXP (x, i))))
6697 return 1;
6698
6699 return 0;
6700 }
6701 \f
6702 /* Check the insns before INSN to see if there is a suitable register
6703 containing the same value as GOAL.
6704 If OTHER is -1, look for a register in class RCLASS.
6705 Otherwise, just see if register number OTHER shares GOAL's value.
6706
6707 Return an rtx for the register found, or zero if none is found.
6708
6709 If RELOAD_REG_P is (short *)1,
6710 we reject any hard reg that appears in reload_reg_rtx
6711 because such a hard reg is also needed coming into this insn.
6712
6713 If RELOAD_REG_P is any other nonzero value,
6714 it is a vector indexed by hard reg number
6715 and we reject any hard reg whose element in the vector is nonnegative
6716 as well as any that appears in reload_reg_rtx.
6717
6718 If GOAL is zero, then GOALREG is a register number; we look
6719 for an equivalent for that register.
6720
6721 MODE is the machine mode of the value we want an equivalence for.
6722 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6723
6724 This function is used by jump.c as well as in the reload pass.
6725
6726 If GOAL is the sum of the stack pointer and a constant, we treat it
6727 as if it were a constant except that sp is required to be unchanging. */
6728
6729 rtx
6730 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6731 short *reload_reg_p, int goalreg, enum machine_mode mode)
6732 {
6733 rtx p = insn;
6734 rtx goaltry, valtry, value, where;
6735 rtx pat;
6736 int regno = -1;
6737 int valueno;
6738 int goal_mem = 0;
6739 int goal_const = 0;
6740 int goal_mem_addr_varies = 0;
6741 int need_stable_sp = 0;
6742 int nregs;
6743 int valuenregs;
6744 int num = 0;
6745
6746 if (goal == 0)
6747 regno = goalreg;
6748 else if (REG_P (goal))
6749 regno = REGNO (goal);
6750 else if (MEM_P (goal))
6751 {
6752 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6753 if (MEM_VOLATILE_P (goal))
6754 return 0;
6755 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6756 return 0;
6757 /* An address with side effects must be reexecuted. */
6758 switch (code)
6759 {
6760 case POST_INC:
6761 case PRE_INC:
6762 case POST_DEC:
6763 case PRE_DEC:
6764 case POST_MODIFY:
6765 case PRE_MODIFY:
6766 return 0;
6767 default:
6768 break;
6769 }
6770 goal_mem = 1;
6771 }
6772 else if (CONSTANT_P (goal))
6773 goal_const = 1;
6774 else if (GET_CODE (goal) == PLUS
6775 && XEXP (goal, 0) == stack_pointer_rtx
6776 && CONSTANT_P (XEXP (goal, 1)))
6777 goal_const = need_stable_sp = 1;
6778 else if (GET_CODE (goal) == PLUS
6779 && XEXP (goal, 0) == frame_pointer_rtx
6780 && CONSTANT_P (XEXP (goal, 1)))
6781 goal_const = 1;
6782 else
6783 return 0;
6784
6785 num = 0;
6786 /* Scan insns back from INSN, looking for one that copies
6787 a value into or out of GOAL.
6788 Stop and give up if we reach a label. */
6789
6790 while (1)
6791 {
6792 p = PREV_INSN (p);
6793 if (p && DEBUG_INSN_P (p))
6794 continue;
6795 num++;
6796 if (p == 0 || LABEL_P (p)
6797 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6798 return 0;
6799
6800 if (NONJUMP_INSN_P (p)
6801 /* If we don't want spill regs ... */
6802 && (! (reload_reg_p != 0
6803 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6804 /* ... then ignore insns introduced by reload; they aren't
6805 useful and can cause results in reload_as_needed to be
6806 different from what they were when calculating the need for
6807 spills. If we notice an input-reload insn here, we will
6808 reject it below, but it might hide a usable equivalent.
6809 That makes bad code. It may even fail: perhaps no reg was
6810 spilled for this insn because it was assumed we would find
6811 that equivalent. */
6812 || INSN_UID (p) < reload_first_uid))
6813 {
6814 rtx tem;
6815 pat = single_set (p);
6816
6817 /* First check for something that sets some reg equal to GOAL. */
6818 if (pat != 0
6819 && ((regno >= 0
6820 && true_regnum (SET_SRC (pat)) == regno
6821 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6822 ||
6823 (regno >= 0
6824 && true_regnum (SET_DEST (pat)) == regno
6825 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6826 ||
6827 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6828 /* When looking for stack pointer + const,
6829 make sure we don't use a stack adjust. */
6830 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6831 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6832 || (goal_mem
6833 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6834 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6835 || (goal_mem
6836 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6837 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6838 /* If we are looking for a constant,
6839 and something equivalent to that constant was copied
6840 into a reg, we can use that reg. */
6841 || (goal_const && REG_NOTES (p) != 0
6842 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6843 && ((rtx_equal_p (XEXP (tem, 0), goal)
6844 && (valueno
6845 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6846 || (REG_P (SET_DEST (pat))
6847 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6848 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6849 && CONST_INT_P (goal)
6850 && 0 != (goaltry
6851 = operand_subword (XEXP (tem, 0), 0, 0,
6852 VOIDmode))
6853 && rtx_equal_p (goal, goaltry)
6854 && (valtry
6855 = operand_subword (SET_DEST (pat), 0, 0,
6856 VOIDmode))
6857 && (valueno = true_regnum (valtry)) >= 0)))
6858 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6859 NULL_RTX))
6860 && REG_P (SET_DEST (pat))
6861 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6862 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6863 && CONST_INT_P (goal)
6864 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6865 VOIDmode))
6866 && rtx_equal_p (goal, goaltry)
6867 && (valtry
6868 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6869 && (valueno = true_regnum (valtry)) >= 0)))
6870 {
6871 if (other >= 0)
6872 {
6873 if (valueno != other)
6874 continue;
6875 }
6876 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6877 continue;
6878 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6879 mode, valueno))
6880 continue;
6881 value = valtry;
6882 where = p;
6883 break;
6884 }
6885 }
6886 }
6887
6888 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6889 (or copying VALUE into GOAL, if GOAL is also a register).
6890 Now verify that VALUE is really valid. */
6891
6892 /* VALUENO is the register number of VALUE; a hard register. */
6893
6894 /* Don't try to re-use something that is killed in this insn. We want
6895 to be able to trust REG_UNUSED notes. */
6896 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6897 return 0;
6898
6899 /* If we propose to get the value from the stack pointer or if GOAL is
6900 a MEM based on the stack pointer, we need a stable SP. */
6901 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6902 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6903 goal)))
6904 need_stable_sp = 1;
6905
6906 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6907 if (GET_MODE (value) != mode)
6908 return 0;
6909
6910 /* Reject VALUE if it was loaded from GOAL
6911 and is also a register that appears in the address of GOAL. */
6912
6913 if (goal_mem && value == SET_DEST (single_set (where))
6914 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6915 goal, (rtx*) 0))
6916 return 0;
6917
6918 /* Reject registers that overlap GOAL. */
6919
6920 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6921 nregs = hard_regno_nregs[regno][mode];
6922 else
6923 nregs = 1;
6924 valuenregs = hard_regno_nregs[valueno][mode];
6925
6926 if (!goal_mem && !goal_const
6927 && regno + nregs > valueno && regno < valueno + valuenregs)
6928 return 0;
6929
6930 /* Reject VALUE if it is one of the regs reserved for reloads.
6931 Reload1 knows how to reuse them anyway, and it would get
6932 confused if we allocated one without its knowledge.
6933 (Now that insns introduced by reload are ignored above,
6934 this case shouldn't happen, but I'm not positive.) */
6935
6936 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6937 {
6938 int i;
6939 for (i = 0; i < valuenregs; ++i)
6940 if (reload_reg_p[valueno + i] >= 0)
6941 return 0;
6942 }
6943
6944 /* Reject VALUE if it is a register being used for an input reload
6945 even if it is not one of those reserved. */
6946
6947 if (reload_reg_p != 0)
6948 {
6949 int i;
6950 for (i = 0; i < n_reloads; i++)
6951 if (rld[i].reg_rtx != 0 && rld[i].in)
6952 {
6953 int regno1 = REGNO (rld[i].reg_rtx);
6954 int nregs1 = hard_regno_nregs[regno1]
6955 [GET_MODE (rld[i].reg_rtx)];
6956 if (regno1 < valueno + valuenregs
6957 && regno1 + nregs1 > valueno)
6958 return 0;
6959 }
6960 }
6961
6962 if (goal_mem)
6963 /* We must treat frame pointer as varying here,
6964 since it can vary--in a nonlocal goto as generated by expand_goto. */
6965 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6966
6967 /* Now verify that the values of GOAL and VALUE remain unaltered
6968 until INSN is reached. */
6969
6970 p = insn;
6971 while (1)
6972 {
6973 p = PREV_INSN (p);
6974 if (p == where)
6975 return value;
6976
6977 /* Don't trust the conversion past a function call
6978 if either of the two is in a call-clobbered register, or memory. */
6979 if (CALL_P (p))
6980 {
6981 int i;
6982
6983 if (goal_mem || need_stable_sp)
6984 return 0;
6985
6986 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6987 for (i = 0; i < nregs; ++i)
6988 if (call_used_regs[regno + i]
6989 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6990 return 0;
6991
6992 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6993 for (i = 0; i < valuenregs; ++i)
6994 if (call_used_regs[valueno + i]
6995 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6996 return 0;
6997 }
6998
6999 if (INSN_P (p))
7000 {
7001 pat = PATTERN (p);
7002
7003 /* Watch out for unspec_volatile, and volatile asms. */
7004 if (volatile_insn_p (pat))
7005 return 0;
7006
7007 /* If this insn P stores in either GOAL or VALUE, return 0.
7008 If GOAL is a memory ref and this insn writes memory, return 0.
7009 If GOAL is a memory ref and its address is not constant,
7010 and this insn P changes a register used in GOAL, return 0. */
7011
7012 if (GET_CODE (pat) == COND_EXEC)
7013 pat = COND_EXEC_CODE (pat);
7014 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
7015 {
7016 rtx dest = SET_DEST (pat);
7017 while (GET_CODE (dest) == SUBREG
7018 || GET_CODE (dest) == ZERO_EXTRACT
7019 || GET_CODE (dest) == STRICT_LOW_PART)
7020 dest = XEXP (dest, 0);
7021 if (REG_P (dest))
7022 {
7023 int xregno = REGNO (dest);
7024 int xnregs;
7025 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7026 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7027 else
7028 xnregs = 1;
7029 if (xregno < regno + nregs && xregno + xnregs > regno)
7030 return 0;
7031 if (xregno < valueno + valuenregs
7032 && xregno + xnregs > valueno)
7033 return 0;
7034 if (goal_mem_addr_varies
7035 && reg_overlap_mentioned_for_reload_p (dest, goal))
7036 return 0;
7037 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7038 return 0;
7039 }
7040 else if (goal_mem && MEM_P (dest)
7041 && ! push_operand (dest, GET_MODE (dest)))
7042 return 0;
7043 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7044 && reg_equiv_memory_loc[regno] != 0)
7045 return 0;
7046 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7047 return 0;
7048 }
7049 else if (GET_CODE (pat) == PARALLEL)
7050 {
7051 int i;
7052 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7053 {
7054 rtx v1 = XVECEXP (pat, 0, i);
7055 if (GET_CODE (v1) == COND_EXEC)
7056 v1 = COND_EXEC_CODE (v1);
7057 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7058 {
7059 rtx dest = SET_DEST (v1);
7060 while (GET_CODE (dest) == SUBREG
7061 || GET_CODE (dest) == ZERO_EXTRACT
7062 || GET_CODE (dest) == STRICT_LOW_PART)
7063 dest = XEXP (dest, 0);
7064 if (REG_P (dest))
7065 {
7066 int xregno = REGNO (dest);
7067 int xnregs;
7068 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7069 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7070 else
7071 xnregs = 1;
7072 if (xregno < regno + nregs
7073 && xregno + xnregs > regno)
7074 return 0;
7075 if (xregno < valueno + valuenregs
7076 && xregno + xnregs > valueno)
7077 return 0;
7078 if (goal_mem_addr_varies
7079 && reg_overlap_mentioned_for_reload_p (dest,
7080 goal))
7081 return 0;
7082 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7083 return 0;
7084 }
7085 else if (goal_mem && MEM_P (dest)
7086 && ! push_operand (dest, GET_MODE (dest)))
7087 return 0;
7088 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7089 && reg_equiv_memory_loc[regno] != 0)
7090 return 0;
7091 else if (need_stable_sp
7092 && push_operand (dest, GET_MODE (dest)))
7093 return 0;
7094 }
7095 }
7096 }
7097
7098 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7099 {
7100 rtx link;
7101
7102 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7103 link = XEXP (link, 1))
7104 {
7105 pat = XEXP (link, 0);
7106 if (GET_CODE (pat) == CLOBBER)
7107 {
7108 rtx dest = SET_DEST (pat);
7109
7110 if (REG_P (dest))
7111 {
7112 int xregno = REGNO (dest);
7113 int xnregs
7114 = hard_regno_nregs[xregno][GET_MODE (dest)];
7115
7116 if (xregno < regno + nregs
7117 && xregno + xnregs > regno)
7118 return 0;
7119 else if (xregno < valueno + valuenregs
7120 && xregno + xnregs > valueno)
7121 return 0;
7122 else if (goal_mem_addr_varies
7123 && reg_overlap_mentioned_for_reload_p (dest,
7124 goal))
7125 return 0;
7126 }
7127
7128 else if (goal_mem && MEM_P (dest)
7129 && ! push_operand (dest, GET_MODE (dest)))
7130 return 0;
7131 else if (need_stable_sp
7132 && push_operand (dest, GET_MODE (dest)))
7133 return 0;
7134 }
7135 }
7136 }
7137
7138 #ifdef AUTO_INC_DEC
7139 /* If this insn auto-increments or auto-decrements
7140 either regno or valueno, return 0 now.
7141 If GOAL is a memory ref and its address is not constant,
7142 and this insn P increments a register used in GOAL, return 0. */
7143 {
7144 rtx link;
7145
7146 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7147 if (REG_NOTE_KIND (link) == REG_INC
7148 && REG_P (XEXP (link, 0)))
7149 {
7150 int incno = REGNO (XEXP (link, 0));
7151 if (incno < regno + nregs && incno >= regno)
7152 return 0;
7153 if (incno < valueno + valuenregs && incno >= valueno)
7154 return 0;
7155 if (goal_mem_addr_varies
7156 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7157 goal))
7158 return 0;
7159 }
7160 }
7161 #endif
7162 }
7163 }
7164 }
7165 \f
7166 /* Find a place where INCED appears in an increment or decrement operator
7167 within X, and return the amount INCED is incremented or decremented by.
7168 The value is always positive. */
7169
7170 static int
7171 find_inc_amount (rtx x, rtx inced)
7172 {
7173 enum rtx_code code = GET_CODE (x);
7174 const char *fmt;
7175 int i;
7176
7177 if (code == MEM)
7178 {
7179 rtx addr = XEXP (x, 0);
7180 if ((GET_CODE (addr) == PRE_DEC
7181 || GET_CODE (addr) == POST_DEC
7182 || GET_CODE (addr) == PRE_INC
7183 || GET_CODE (addr) == POST_INC)
7184 && XEXP (addr, 0) == inced)
7185 return GET_MODE_SIZE (GET_MODE (x));
7186 else if ((GET_CODE (addr) == PRE_MODIFY
7187 || GET_CODE (addr) == POST_MODIFY)
7188 && GET_CODE (XEXP (addr, 1)) == PLUS
7189 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7190 && XEXP (addr, 0) == inced
7191 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7192 {
7193 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7194 return i < 0 ? -i : i;
7195 }
7196 }
7197
7198 fmt = GET_RTX_FORMAT (code);
7199 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7200 {
7201 if (fmt[i] == 'e')
7202 {
7203 int tem = find_inc_amount (XEXP (x, i), inced);
7204 if (tem != 0)
7205 return tem;
7206 }
7207 if (fmt[i] == 'E')
7208 {
7209 int j;
7210 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7211 {
7212 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7213 if (tem != 0)
7214 return tem;
7215 }
7216 }
7217 }
7218
7219 return 0;
7220 }
7221 \f
7222 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7223 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7224
7225 #ifdef AUTO_INC_DEC
7226 static int
7227 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7228 rtx insn)
7229 {
7230 rtx link;
7231
7232 gcc_assert (insn);
7233
7234 if (! INSN_P (insn))
7235 return 0;
7236
7237 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7238 if (REG_NOTE_KIND (link) == REG_INC)
7239 {
7240 unsigned int test = (int) REGNO (XEXP (link, 0));
7241 if (test >= regno && test < endregno)
7242 return 1;
7243 }
7244 return 0;
7245 }
7246 #else
7247
7248 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7249
7250 #endif
7251
7252 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7253 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7254 REG_INC. REGNO must refer to a hard register. */
7255
7256 int
7257 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7258 int sets)
7259 {
7260 unsigned int nregs, endregno;
7261
7262 /* regno must be a hard register. */
7263 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7264
7265 nregs = hard_regno_nregs[regno][mode];
7266 endregno = regno + nregs;
7267
7268 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7269 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7270 && REG_P (XEXP (PATTERN (insn), 0)))
7271 {
7272 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7273
7274 return test >= regno && test < endregno;
7275 }
7276
7277 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7278 return 1;
7279
7280 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7281 {
7282 int i = XVECLEN (PATTERN (insn), 0) - 1;
7283
7284 for (; i >= 0; i--)
7285 {
7286 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7287 if ((GET_CODE (elt) == CLOBBER
7288 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7289 && REG_P (XEXP (elt, 0)))
7290 {
7291 unsigned int test = REGNO (XEXP (elt, 0));
7292
7293 if (test >= regno && test < endregno)
7294 return 1;
7295 }
7296 if (sets == 2
7297 && reg_inc_found_and_valid_p (regno, endregno, elt))
7298 return 1;
7299 }
7300 }
7301
7302 return 0;
7303 }
7304
7305 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7306 rtx
7307 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7308 {
7309 int regno;
7310
7311 if (GET_MODE (reloadreg) == mode)
7312 return reloadreg;
7313
7314 regno = REGNO (reloadreg);
7315
7316 if (WORDS_BIG_ENDIAN)
7317 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7318 - (int) hard_regno_nregs[regno][mode];
7319
7320 return gen_rtx_REG (mode, regno);
7321 }
7322
7323 static const char *const reload_when_needed_name[] =
7324 {
7325 "RELOAD_FOR_INPUT",
7326 "RELOAD_FOR_OUTPUT",
7327 "RELOAD_FOR_INSN",
7328 "RELOAD_FOR_INPUT_ADDRESS",
7329 "RELOAD_FOR_INPADDR_ADDRESS",
7330 "RELOAD_FOR_OUTPUT_ADDRESS",
7331 "RELOAD_FOR_OUTADDR_ADDRESS",
7332 "RELOAD_FOR_OPERAND_ADDRESS",
7333 "RELOAD_FOR_OPADDR_ADDR",
7334 "RELOAD_OTHER",
7335 "RELOAD_FOR_OTHER_ADDRESS"
7336 };
7337
7338 /* These functions are used to print the variables set by 'find_reloads' */
7339
7340 DEBUG_FUNCTION void
7341 debug_reload_to_stream (FILE *f)
7342 {
7343 int r;
7344 const char *prefix;
7345
7346 if (! f)
7347 f = stderr;
7348 for (r = 0; r < n_reloads; r++)
7349 {
7350 fprintf (f, "Reload %d: ", r);
7351
7352 if (rld[r].in != 0)
7353 {
7354 fprintf (f, "reload_in (%s) = ",
7355 GET_MODE_NAME (rld[r].inmode));
7356 print_inline_rtx (f, rld[r].in, 24);
7357 fprintf (f, "\n\t");
7358 }
7359
7360 if (rld[r].out != 0)
7361 {
7362 fprintf (f, "reload_out (%s) = ",
7363 GET_MODE_NAME (rld[r].outmode));
7364 print_inline_rtx (f, rld[r].out, 24);
7365 fprintf (f, "\n\t");
7366 }
7367
7368 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7369
7370 fprintf (f, "%s (opnum = %d)",
7371 reload_when_needed_name[(int) rld[r].when_needed],
7372 rld[r].opnum);
7373
7374 if (rld[r].optional)
7375 fprintf (f, ", optional");
7376
7377 if (rld[r].nongroup)
7378 fprintf (f, ", nongroup");
7379
7380 if (rld[r].inc != 0)
7381 fprintf (f, ", inc by %d", rld[r].inc);
7382
7383 if (rld[r].nocombine)
7384 fprintf (f, ", can't combine");
7385
7386 if (rld[r].secondary_p)
7387 fprintf (f, ", secondary_reload_p");
7388
7389 if (rld[r].in_reg != 0)
7390 {
7391 fprintf (f, "\n\treload_in_reg: ");
7392 print_inline_rtx (f, rld[r].in_reg, 24);
7393 }
7394
7395 if (rld[r].out_reg != 0)
7396 {
7397 fprintf (f, "\n\treload_out_reg: ");
7398 print_inline_rtx (f, rld[r].out_reg, 24);
7399 }
7400
7401 if (rld[r].reg_rtx != 0)
7402 {
7403 fprintf (f, "\n\treload_reg_rtx: ");
7404 print_inline_rtx (f, rld[r].reg_rtx, 24);
7405 }
7406
7407 prefix = "\n\t";
7408 if (rld[r].secondary_in_reload != -1)
7409 {
7410 fprintf (f, "%ssecondary_in_reload = %d",
7411 prefix, rld[r].secondary_in_reload);
7412 prefix = ", ";
7413 }
7414
7415 if (rld[r].secondary_out_reload != -1)
7416 fprintf (f, "%ssecondary_out_reload = %d\n",
7417 prefix, rld[r].secondary_out_reload);
7418
7419 prefix = "\n\t";
7420 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7421 {
7422 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7423 insn_data[rld[r].secondary_in_icode].name);
7424 prefix = ", ";
7425 }
7426
7427 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7428 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7429 insn_data[rld[r].secondary_out_icode].name);
7430
7431 fprintf (f, "\n");
7432 }
7433 }
7434
7435 DEBUG_FUNCTION void
7436 debug_reload (void)
7437 {
7438 debug_reload_to_stream (stderr);
7439 }