genpreds.c (const_int_start, [...]): New variables.
[gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
26
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
29
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
36
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
43
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
52
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
55
56 NOTE SIDE EFFECTS:
57
58 find_reloads can alter the operands of the instruction it is called on.
59
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
64
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
67
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71
72 Using a reload register for several reloads in one insn:
73
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
85
86 #define REG_OK_STRICT
87
88 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
89 #undef DEBUG_RELOAD
90
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "tm.h"
95 #include "rtl-error.h"
96 #include "tm_p.h"
97 #include "insn-config.h"
98 #include "expr.h"
99 #include "optabs.h"
100 #include "recog.h"
101 #include "df.h"
102 #include "reload.h"
103 #include "regs.h"
104 #include "addresses.h"
105 #include "hard-reg-set.h"
106 #include "flags.h"
107 #include "function.h"
108 #include "params.h"
109 #include "target.h"
110 #include "ira.h"
111
112 /* True if X is a constant that can be forced into the constant pool.
113 MODE is the mode of the operand, or VOIDmode if not known. */
114 #define CONST_POOL_OK_P(MODE, X) \
115 ((MODE) != VOIDmode \
116 && CONSTANT_P (X) \
117 && GET_CODE (X) != HIGH \
118 && !targetm.cannot_force_const_mem (MODE, X))
119
120 /* True if C is a non-empty register class that has too few registers
121 to be safely used as a reload target class. */
122
123 static inline bool
124 small_register_class_p (reg_class_t rclass)
125 {
126 return (reg_class_size [(int) rclass] == 1
127 || (reg_class_size [(int) rclass] >= 1
128 && targetm.class_likely_spilled_p (rclass)));
129 }
130
131 \f
132 /* All reloads of the current insn are recorded here. See reload.h for
133 comments. */
134 int n_reloads;
135 struct reload rld[MAX_RELOADS];
136
137 /* All the "earlyclobber" operands of the current insn
138 are recorded here. */
139 int n_earlyclobbers;
140 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
141
142 int reload_n_operands;
143
144 /* Replacing reloads.
145
146 If `replace_reloads' is nonzero, then as each reload is recorded
147 an entry is made for it in the table `replacements'.
148 Then later `subst_reloads' can look through that table and
149 perform all the replacements needed. */
150
151 /* Nonzero means record the places to replace. */
152 static int replace_reloads;
153
154 /* Each replacement is recorded with a structure like this. */
155 struct replacement
156 {
157 rtx *where; /* Location to store in */
158 int what; /* which reload this is for */
159 enum machine_mode mode; /* mode it must have */
160 };
161
162 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
163
164 /* Number of replacements currently recorded. */
165 static int n_replacements;
166
167 /* Used to track what is modified by an operand. */
168 struct decomposition
169 {
170 int reg_flag; /* Nonzero if referencing a register. */
171 int safe; /* Nonzero if this can't conflict with anything. */
172 rtx base; /* Base address for MEM. */
173 HOST_WIDE_INT start; /* Starting offset or register number. */
174 HOST_WIDE_INT end; /* Ending offset or register number. */
175 };
176
177 #ifdef SECONDARY_MEMORY_NEEDED
178
179 /* Save MEMs needed to copy from one class of registers to another. One MEM
180 is used per mode, but normally only one or two modes are ever used.
181
182 We keep two versions, before and after register elimination. The one
183 after register elimination is record separately for each operand. This
184 is done in case the address is not valid to be sure that we separately
185 reload each. */
186
187 static rtx secondary_memlocs[NUM_MACHINE_MODES];
188 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
189 static int secondary_memlocs_elim_used = 0;
190 #endif
191
192 /* The instruction we are doing reloads for;
193 so we can test whether a register dies in it. */
194 static rtx this_insn;
195
196 /* Nonzero if this instruction is a user-specified asm with operands. */
197 static int this_insn_is_asm;
198
199 /* If hard_regs_live_known is nonzero,
200 we can tell which hard regs are currently live,
201 at least enough to succeed in choosing dummy reloads. */
202 static int hard_regs_live_known;
203
204 /* Indexed by hard reg number,
205 element is nonnegative if hard reg has been spilled.
206 This vector is passed to `find_reloads' as an argument
207 and is not changed here. */
208 static short *static_reload_reg_p;
209
210 /* Set to 1 in subst_reg_equivs if it changes anything. */
211 static int subst_reg_equivs_changed;
212
213 /* On return from push_reload, holds the reload-number for the OUT
214 operand, which can be different for that from the input operand. */
215 static int output_reloadnum;
216
217 /* Compare two RTX's. */
218 #define MATCHES(x, y) \
219 (x == y || (x != 0 && (REG_P (x) \
220 ? REG_P (y) && REGNO (x) == REGNO (y) \
221 : rtx_equal_p (x, y) && ! side_effects_p (x))))
222
223 /* Indicates if two reloads purposes are for similar enough things that we
224 can merge their reloads. */
225 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
226 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
227 || ((when1) == (when2) && (op1) == (op2)) \
228 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
229 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
230 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
231 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
232 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
233
234 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
235 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
236 ((when1) != (when2) \
237 || ! ((op1) == (op2) \
238 || (when1) == RELOAD_FOR_INPUT \
239 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
240 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
241
242 /* If we are going to reload an address, compute the reload type to
243 use. */
244 #define ADDR_TYPE(type) \
245 ((type) == RELOAD_FOR_INPUT_ADDRESS \
246 ? RELOAD_FOR_INPADDR_ADDRESS \
247 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
248 ? RELOAD_FOR_OUTADDR_ADDRESS \
249 : (type)))
250
251 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
252 enum machine_mode, enum reload_type,
253 enum insn_code *, secondary_reload_info *);
254 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
255 int, unsigned int);
256 static void push_replacement (rtx *, int, enum machine_mode);
257 static void dup_replacements (rtx *, rtx *);
258 static void combine_reloads (void);
259 static int find_reusable_reload (rtx *, rtx, enum reg_class,
260 enum reload_type, int, int);
261 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
262 enum machine_mode, reg_class_t, int, int);
263 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
264 static struct decomposition decompose (rtx);
265 static int immune_p (rtx, rtx, struct decomposition);
266 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
267 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
268 int *);
269 static rtx make_memloc (rtx, int);
270 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
271 addr_space_t, rtx *);
272 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
273 int, enum reload_type, int, rtx);
274 static rtx subst_reg_equivs (rtx, rtx);
275 static rtx subst_indexed_address (rtx);
276 static void update_auto_inc_notes (rtx, int, int);
277 static int find_reloads_address_1 (enum machine_mode, addr_space_t, rtx, int,
278 enum rtx_code, enum rtx_code, rtx *,
279 int, enum reload_type,int, rtx);
280 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
281 enum machine_mode, int,
282 enum reload_type, int);
283 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
284 int, rtx, int *);
285 static void copy_replacements_1 (rtx *, rtx *, int);
286 static int find_inc_amount (rtx, rtx);
287 static int refers_to_mem_for_reload_p (rtx);
288 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
289 rtx, rtx *);
290
291 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
292 list yet. */
293
294 static void
295 push_reg_equiv_alt_mem (int regno, rtx mem)
296 {
297 rtx it;
298
299 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
300 if (rtx_equal_p (XEXP (it, 0), mem))
301 return;
302
303 reg_equiv_alt_mem_list (regno)
304 = alloc_EXPR_LIST (REG_EQUIV, mem,
305 reg_equiv_alt_mem_list (regno));
306 }
307 \f
308 /* Determine if any secondary reloads are needed for loading (if IN_P is
309 nonzero) or storing (if IN_P is zero) X to or from a reload register of
310 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
311 are needed, push them.
312
313 Return the reload number of the secondary reload we made, or -1 if
314 we didn't need one. *PICODE is set to the insn_code to use if we do
315 need a secondary reload. */
316
317 static int
318 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
319 enum reg_class reload_class,
320 enum machine_mode reload_mode, enum reload_type type,
321 enum insn_code *picode, secondary_reload_info *prev_sri)
322 {
323 enum reg_class rclass = NO_REGS;
324 enum reg_class scratch_class;
325 enum machine_mode mode = reload_mode;
326 enum insn_code icode = CODE_FOR_nothing;
327 enum insn_code t_icode = CODE_FOR_nothing;
328 enum reload_type secondary_type;
329 int s_reload, t_reload = -1;
330 const char *scratch_constraint;
331 char letter;
332 secondary_reload_info sri;
333
334 if (type == RELOAD_FOR_INPUT_ADDRESS
335 || type == RELOAD_FOR_OUTPUT_ADDRESS
336 || type == RELOAD_FOR_INPADDR_ADDRESS
337 || type == RELOAD_FOR_OUTADDR_ADDRESS)
338 secondary_type = type;
339 else
340 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
341
342 *picode = CODE_FOR_nothing;
343
344 /* If X is a paradoxical SUBREG, use the inner value to determine both the
345 mode and object being reloaded. */
346 if (paradoxical_subreg_p (x))
347 {
348 x = SUBREG_REG (x);
349 reload_mode = GET_MODE (x);
350 }
351
352 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
353 is still a pseudo-register by now, it *must* have an equivalent MEM
354 but we don't want to assume that), use that equivalent when seeing if
355 a secondary reload is needed since whether or not a reload is needed
356 might be sensitive to the form of the MEM. */
357
358 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
359 && reg_equiv_mem (REGNO (x)))
360 x = reg_equiv_mem (REGNO (x));
361
362 sri.icode = CODE_FOR_nothing;
363 sri.prev_sri = prev_sri;
364 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
365 reload_mode, &sri);
366 icode = (enum insn_code) sri.icode;
367
368 /* If we don't need any secondary registers, done. */
369 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
370 return -1;
371
372 if (rclass != NO_REGS)
373 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
374 reload_mode, type, &t_icode, &sri);
375
376 /* If we will be using an insn, the secondary reload is for a
377 scratch register. */
378
379 if (icode != CODE_FOR_nothing)
380 {
381 /* If IN_P is nonzero, the reload register will be the output in
382 operand 0. If IN_P is zero, the reload register will be the input
383 in operand 1. Outputs should have an initial "=", which we must
384 skip. */
385
386 /* ??? It would be useful to be able to handle only two, or more than
387 three, operands, but for now we can only handle the case of having
388 exactly three: output, input and one temp/scratch. */
389 gcc_assert (insn_data[(int) icode].n_operands == 3);
390
391 /* ??? We currently have no way to represent a reload that needs
392 an icode to reload from an intermediate tertiary reload register.
393 We should probably have a new field in struct reload to tag a
394 chain of scratch operand reloads onto. */
395 gcc_assert (rclass == NO_REGS);
396
397 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
398 gcc_assert (*scratch_constraint == '=');
399 scratch_constraint++;
400 if (*scratch_constraint == '&')
401 scratch_constraint++;
402 letter = *scratch_constraint;
403 scratch_class = (letter == 'r' ? GENERAL_REGS
404 : (reg_class_for_constraint
405 (lookup_constraint (scratch_constraint))));
406
407 rclass = scratch_class;
408 mode = insn_data[(int) icode].operand[2].mode;
409 }
410
411 /* This case isn't valid, so fail. Reload is allowed to use the same
412 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
413 in the case of a secondary register, we actually need two different
414 registers for correct code. We fail here to prevent the possibility of
415 silently generating incorrect code later.
416
417 The convention is that secondary input reloads are valid only if the
418 secondary_class is different from class. If you have such a case, you
419 can not use secondary reloads, you must work around the problem some
420 other way.
421
422 Allow this when a reload_in/out pattern is being used. I.e. assume
423 that the generated code handles this case. */
424
425 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
426 || t_icode != CODE_FOR_nothing);
427
428 /* See if we can reuse an existing secondary reload. */
429 for (s_reload = 0; s_reload < n_reloads; s_reload++)
430 if (rld[s_reload].secondary_p
431 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
432 || reg_class_subset_p (rld[s_reload].rclass, rclass))
433 && ((in_p && rld[s_reload].inmode == mode)
434 || (! in_p && rld[s_reload].outmode == mode))
435 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
436 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
437 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
438 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
439 && (small_register_class_p (rclass)
440 || targetm.small_register_classes_for_mode_p (VOIDmode))
441 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
442 opnum, rld[s_reload].opnum))
443 {
444 if (in_p)
445 rld[s_reload].inmode = mode;
446 if (! in_p)
447 rld[s_reload].outmode = mode;
448
449 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
450 rld[s_reload].rclass = rclass;
451
452 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
453 rld[s_reload].optional &= optional;
454 rld[s_reload].secondary_p = 1;
455 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
456 opnum, rld[s_reload].opnum))
457 rld[s_reload].when_needed = RELOAD_OTHER;
458
459 break;
460 }
461
462 if (s_reload == n_reloads)
463 {
464 #ifdef SECONDARY_MEMORY_NEEDED
465 /* If we need a memory location to copy between the two reload regs,
466 set it up now. Note that we do the input case before making
467 the reload and the output case after. This is due to the
468 way reloads are output. */
469
470 if (in_p && icode == CODE_FOR_nothing
471 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
472 {
473 get_secondary_mem (x, reload_mode, opnum, type);
474
475 /* We may have just added new reloads. Make sure we add
476 the new reload at the end. */
477 s_reload = n_reloads;
478 }
479 #endif
480
481 /* We need to make a new secondary reload for this register class. */
482 rld[s_reload].in = rld[s_reload].out = 0;
483 rld[s_reload].rclass = rclass;
484
485 rld[s_reload].inmode = in_p ? mode : VOIDmode;
486 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
487 rld[s_reload].reg_rtx = 0;
488 rld[s_reload].optional = optional;
489 rld[s_reload].inc = 0;
490 /* Maybe we could combine these, but it seems too tricky. */
491 rld[s_reload].nocombine = 1;
492 rld[s_reload].in_reg = 0;
493 rld[s_reload].out_reg = 0;
494 rld[s_reload].opnum = opnum;
495 rld[s_reload].when_needed = secondary_type;
496 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
497 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
498 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
499 rld[s_reload].secondary_out_icode
500 = ! in_p ? t_icode : CODE_FOR_nothing;
501 rld[s_reload].secondary_p = 1;
502
503 n_reloads++;
504
505 #ifdef SECONDARY_MEMORY_NEEDED
506 if (! in_p && icode == CODE_FOR_nothing
507 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
508 get_secondary_mem (x, mode, opnum, type);
509 #endif
510 }
511
512 *picode = icode;
513 return s_reload;
514 }
515
516 /* If a secondary reload is needed, return its class. If both an intermediate
517 register and a scratch register is needed, we return the class of the
518 intermediate register. */
519 reg_class_t
520 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
521 rtx x)
522 {
523 enum insn_code icode;
524 secondary_reload_info sri;
525
526 sri.icode = CODE_FOR_nothing;
527 sri.prev_sri = NULL;
528 rclass
529 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
530 icode = (enum insn_code) sri.icode;
531
532 /* If there are no secondary reloads at all, we return NO_REGS.
533 If an intermediate register is needed, we return its class. */
534 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
535 return rclass;
536
537 /* No intermediate register is needed, but we have a special reload
538 pattern, which we assume for now needs a scratch register. */
539 return scratch_reload_class (icode);
540 }
541
542 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
543 three operands, verify that operand 2 is an output operand, and return
544 its register class.
545 ??? We'd like to be able to handle any pattern with at least 2 operands,
546 for zero or more scratch registers, but that needs more infrastructure. */
547 enum reg_class
548 scratch_reload_class (enum insn_code icode)
549 {
550 const char *scratch_constraint;
551 char scratch_letter;
552 enum reg_class rclass;
553
554 gcc_assert (insn_data[(int) icode].n_operands == 3);
555 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
556 gcc_assert (*scratch_constraint == '=');
557 scratch_constraint++;
558 if (*scratch_constraint == '&')
559 scratch_constraint++;
560 scratch_letter = *scratch_constraint;
561 if (scratch_letter == 'r')
562 return GENERAL_REGS;
563 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
564 gcc_assert (rclass != NO_REGS);
565 return rclass;
566 }
567 \f
568 #ifdef SECONDARY_MEMORY_NEEDED
569
570 /* Return a memory location that will be used to copy X in mode MODE.
571 If we haven't already made a location for this mode in this insn,
572 call find_reloads_address on the location being returned. */
573
574 rtx
575 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
576 int opnum, enum reload_type type)
577 {
578 rtx loc;
579 int mem_valid;
580
581 /* By default, if MODE is narrower than a word, widen it to a word.
582 This is required because most machines that require these memory
583 locations do not support short load and stores from all registers
584 (e.g., FP registers). */
585
586 #ifdef SECONDARY_MEMORY_NEEDED_MODE
587 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
588 #else
589 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
590 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
591 #endif
592
593 /* If we already have made a MEM for this operand in MODE, return it. */
594 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
595 return secondary_memlocs_elim[(int) mode][opnum];
596
597 /* If this is the first time we've tried to get a MEM for this mode,
598 allocate a new one. `something_changed' in reload will get set
599 by noticing that the frame size has changed. */
600
601 if (secondary_memlocs[(int) mode] == 0)
602 {
603 #ifdef SECONDARY_MEMORY_NEEDED_RTX
604 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
605 #else
606 secondary_memlocs[(int) mode]
607 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
608 #endif
609 }
610
611 /* Get a version of the address doing any eliminations needed. If that
612 didn't give us a new MEM, make a new one if it isn't valid. */
613
614 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
615 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
616 MEM_ADDR_SPACE (loc));
617
618 if (! mem_valid && loc == secondary_memlocs[(int) mode])
619 loc = copy_rtx (loc);
620
621 /* The only time the call below will do anything is if the stack
622 offset is too large. In that case IND_LEVELS doesn't matter, so we
623 can just pass a zero. Adjust the type to be the address of the
624 corresponding object. If the address was valid, save the eliminated
625 address. If it wasn't valid, we need to make a reload each time, so
626 don't save it. */
627
628 if (! mem_valid)
629 {
630 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
631 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
632 : RELOAD_OTHER);
633
634 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
635 opnum, type, 0, 0);
636 }
637
638 secondary_memlocs_elim[(int) mode][opnum] = loc;
639 if (secondary_memlocs_elim_used <= (int)mode)
640 secondary_memlocs_elim_used = (int)mode + 1;
641 return loc;
642 }
643
644 /* Clear any secondary memory locations we've made. */
645
646 void
647 clear_secondary_mem (void)
648 {
649 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
650 }
651 #endif /* SECONDARY_MEMORY_NEEDED */
652 \f
653
654 /* Find the largest class which has at least one register valid in
655 mode INNER, and which for every such register, that register number
656 plus N is also valid in OUTER (if in range) and is cheap to move
657 into REGNO. Such a class must exist. */
658
659 static enum reg_class
660 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
661 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
662 unsigned int dest_regno ATTRIBUTE_UNUSED)
663 {
664 int best_cost = -1;
665 int rclass;
666 int regno;
667 enum reg_class best_class = NO_REGS;
668 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
669 unsigned int best_size = 0;
670 int cost;
671
672 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
673 {
674 int bad = 0;
675 int good = 0;
676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
677 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
678 {
679 if (HARD_REGNO_MODE_OK (regno, inner))
680 {
681 good = 1;
682 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
683 && ! HARD_REGNO_MODE_OK (regno + n, outer))
684 bad = 1;
685 }
686 }
687
688 if (bad || !good)
689 continue;
690 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
691
692 if ((reg_class_size[rclass] > best_size
693 && (best_cost < 0 || best_cost >= cost))
694 || best_cost > cost)
695 {
696 best_class = (enum reg_class) rclass;
697 best_size = reg_class_size[rclass];
698 best_cost = register_move_cost (outer, (enum reg_class) rclass,
699 dest_class);
700 }
701 }
702
703 gcc_assert (best_size != 0);
704
705 return best_class;
706 }
707
708 /* We are trying to reload a subreg of something that is not a register.
709 Find the largest class which contains only registers valid in
710 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
711 which we would eventually like to obtain the object. */
712
713 static enum reg_class
714 find_valid_class_1 (enum machine_mode outer ATTRIBUTE_UNUSED,
715 enum machine_mode mode ATTRIBUTE_UNUSED,
716 enum reg_class dest_class ATTRIBUTE_UNUSED)
717 {
718 int best_cost = -1;
719 int rclass;
720 int regno;
721 enum reg_class best_class = NO_REGS;
722 unsigned int best_size = 0;
723 int cost;
724
725 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
726 {
727 int bad = 0;
728 for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++)
729 {
730 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
731 && !HARD_REGNO_MODE_OK (regno, mode))
732 bad = 1;
733 }
734
735 if (bad)
736 continue;
737
738 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
739
740 if ((reg_class_size[rclass] > best_size
741 && (best_cost < 0 || best_cost >= cost))
742 || best_cost > cost)
743 {
744 best_class = (enum reg_class) rclass;
745 best_size = reg_class_size[rclass];
746 best_cost = register_move_cost (outer, (enum reg_class) rclass,
747 dest_class);
748 }
749 }
750
751 gcc_assert (best_size != 0);
752
753 #ifdef LIMIT_RELOAD_CLASS
754 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
755 #endif
756 return best_class;
757 }
758 \f
759 /* Return the number of a previously made reload that can be combined with
760 a new one, or n_reloads if none of the existing reloads can be used.
761 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
762 push_reload, they determine the kind of the new reload that we try to
763 combine. P_IN points to the corresponding value of IN, which can be
764 modified by this function.
765 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
766
767 static int
768 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
769 enum reload_type type, int opnum, int dont_share)
770 {
771 rtx in = *p_in;
772 int i;
773 /* We can't merge two reloads if the output of either one is
774 earlyclobbered. */
775
776 if (earlyclobber_operand_p (out))
777 return n_reloads;
778
779 /* We can use an existing reload if the class is right
780 and at least one of IN and OUT is a match
781 and the other is at worst neutral.
782 (A zero compared against anything is neutral.)
783
784 For targets with small register classes, don't use existing reloads
785 unless they are for the same thing since that can cause us to need
786 more reload registers than we otherwise would. */
787
788 for (i = 0; i < n_reloads; i++)
789 if ((reg_class_subset_p (rclass, rld[i].rclass)
790 || reg_class_subset_p (rld[i].rclass, rclass))
791 /* If the existing reload has a register, it must fit our class. */
792 && (rld[i].reg_rtx == 0
793 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
794 true_regnum (rld[i].reg_rtx)))
795 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
796 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
797 || (out != 0 && MATCHES (rld[i].out, out)
798 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
799 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
800 && (small_register_class_p (rclass)
801 || targetm.small_register_classes_for_mode_p (VOIDmode))
802 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
803 return i;
804
805 /* Reloading a plain reg for input can match a reload to postincrement
806 that reg, since the postincrement's value is the right value.
807 Likewise, it can match a preincrement reload, since we regard
808 the preincrementation as happening before any ref in this insn
809 to that register. */
810 for (i = 0; i < n_reloads; i++)
811 if ((reg_class_subset_p (rclass, rld[i].rclass)
812 || reg_class_subset_p (rld[i].rclass, rclass))
813 /* If the existing reload has a register, it must fit our
814 class. */
815 && (rld[i].reg_rtx == 0
816 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
817 true_regnum (rld[i].reg_rtx)))
818 && out == 0 && rld[i].out == 0 && rld[i].in != 0
819 && ((REG_P (in)
820 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
821 && MATCHES (XEXP (rld[i].in, 0), in))
822 || (REG_P (rld[i].in)
823 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
824 && MATCHES (XEXP (in, 0), rld[i].in)))
825 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
826 && (small_register_class_p (rclass)
827 || targetm.small_register_classes_for_mode_p (VOIDmode))
828 && MERGABLE_RELOADS (type, rld[i].when_needed,
829 opnum, rld[i].opnum))
830 {
831 /* Make sure reload_in ultimately has the increment,
832 not the plain register. */
833 if (REG_P (in))
834 *p_in = rld[i].in;
835 return i;
836 }
837 return n_reloads;
838 }
839
840 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
841 expression. MODE is the mode that X will be used in. OUTPUT is true if
842 the function is invoked for the output part of an enclosing reload. */
843
844 static bool
845 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, bool output)
846 {
847 rtx inner;
848
849 /* Only SUBREGs are problematical. */
850 if (GET_CODE (x) != SUBREG)
851 return false;
852
853 inner = SUBREG_REG (x);
854
855 /* If INNER is a constant or PLUS, then INNER will need reloading. */
856 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
857 return true;
858
859 /* If INNER is not a hard register, then INNER will not need reloading. */
860 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
861 return false;
862
863 /* If INNER is not ok for MODE, then INNER will need reloading. */
864 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
865 return true;
866
867 /* If this is for an output, and the outer part is a word or smaller,
868 INNER is larger than a word and the number of registers in INNER is
869 not the same as the number of words in INNER, then INNER will need
870 reloading (with an in-out reload). */
871 return (output
872 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
873 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
874 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
875 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
876 }
877
878 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
879 requiring an extra reload register. The caller has already found that
880 IN contains some reference to REGNO, so check that we can produce the
881 new value in a single step. E.g. if we have
882 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
883 instruction that adds one to a register, this should succeed.
884 However, if we have something like
885 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
886 needs to be loaded into a register first, we need a separate reload
887 register.
888 Such PLUS reloads are generated by find_reload_address_part.
889 The out-of-range PLUS expressions are usually introduced in the instruction
890 patterns by register elimination and substituting pseudos without a home
891 by their function-invariant equivalences. */
892 static int
893 can_reload_into (rtx in, int regno, enum machine_mode mode)
894 {
895 rtx dst, test_insn;
896 int r = 0;
897 struct recog_data_d save_recog_data;
898
899 /* For matching constraints, we often get notional input reloads where
900 we want to use the original register as the reload register. I.e.
901 technically this is a non-optional input-output reload, but IN is
902 already a valid register, and has been chosen as the reload register.
903 Speed this up, since it trivially works. */
904 if (REG_P (in))
905 return 1;
906
907 /* To test MEMs properly, we'd have to take into account all the reloads
908 that are already scheduled, which can become quite complicated.
909 And since we've already handled address reloads for this MEM, it
910 should always succeed anyway. */
911 if (MEM_P (in))
912 return 1;
913
914 /* If we can make a simple SET insn that does the job, everything should
915 be fine. */
916 dst = gen_rtx_REG (mode, regno);
917 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
918 save_recog_data = recog_data;
919 if (recog_memoized (test_insn) >= 0)
920 {
921 extract_insn (test_insn);
922 r = constrain_operands (1);
923 }
924 recog_data = save_recog_data;
925 return r;
926 }
927
928 /* Record one reload that needs to be performed.
929 IN is an rtx saying where the data are to be found before this instruction.
930 OUT says where they must be stored after the instruction.
931 (IN is zero for data not read, and OUT is zero for data not written.)
932 INLOC and OUTLOC point to the places in the instructions where
933 IN and OUT were found.
934 If IN and OUT are both nonzero, it means the same register must be used
935 to reload both IN and OUT.
936
937 RCLASS is a register class required for the reloaded data.
938 INMODE is the machine mode that the instruction requires
939 for the reg that replaces IN and OUTMODE is likewise for OUT.
940
941 If IN is zero, then OUT's location and mode should be passed as
942 INLOC and INMODE.
943
944 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
945
946 OPTIONAL nonzero means this reload does not need to be performed:
947 it can be discarded if that is more convenient.
948
949 OPNUM and TYPE say what the purpose of this reload is.
950
951 The return value is the reload-number for this reload.
952
953 If both IN and OUT are nonzero, in some rare cases we might
954 want to make two separate reloads. (Actually we never do this now.)
955 Therefore, the reload-number for OUT is stored in
956 output_reloadnum when we return; the return value applies to IN.
957 Usually (presently always), when IN and OUT are nonzero,
958 the two reload-numbers are equal, but the caller should be careful to
959 distinguish them. */
960
961 int
962 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
963 enum reg_class rclass, enum machine_mode inmode,
964 enum machine_mode outmode, int strict_low, int optional,
965 int opnum, enum reload_type type)
966 {
967 int i;
968 int dont_share = 0;
969 int dont_remove_subreg = 0;
970 #ifdef LIMIT_RELOAD_CLASS
971 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
972 #endif
973 int secondary_in_reload = -1, secondary_out_reload = -1;
974 enum insn_code secondary_in_icode = CODE_FOR_nothing;
975 enum insn_code secondary_out_icode = CODE_FOR_nothing;
976 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
977 subreg_in_class = NO_REGS;
978
979 /* INMODE and/or OUTMODE could be VOIDmode if no mode
980 has been specified for the operand. In that case,
981 use the operand's mode as the mode to reload. */
982 if (inmode == VOIDmode && in != 0)
983 inmode = GET_MODE (in);
984 if (outmode == VOIDmode && out != 0)
985 outmode = GET_MODE (out);
986
987 /* If find_reloads and friends until now missed to replace a pseudo
988 with a constant of reg_equiv_constant something went wrong
989 beforehand.
990 Note that it can't simply be done here if we missed it earlier
991 since the constant might need to be pushed into the literal pool
992 and the resulting memref would probably need further
993 reloading. */
994 if (in != 0 && REG_P (in))
995 {
996 int regno = REGNO (in);
997
998 gcc_assert (regno < FIRST_PSEUDO_REGISTER
999 || reg_renumber[regno] >= 0
1000 || reg_equiv_constant (regno) == NULL_RTX);
1001 }
1002
1003 /* reg_equiv_constant only contains constants which are obviously
1004 not appropriate as destination. So if we would need to replace
1005 the destination pseudo with a constant we are in real
1006 trouble. */
1007 if (out != 0 && REG_P (out))
1008 {
1009 int regno = REGNO (out);
1010
1011 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1012 || reg_renumber[regno] >= 0
1013 || reg_equiv_constant (regno) == NULL_RTX);
1014 }
1015
1016 /* If we have a read-write operand with an address side-effect,
1017 change either IN or OUT so the side-effect happens only once. */
1018 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1019 switch (GET_CODE (XEXP (in, 0)))
1020 {
1021 case POST_INC: case POST_DEC: case POST_MODIFY:
1022 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1023 break;
1024
1025 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1026 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1027 break;
1028
1029 default:
1030 break;
1031 }
1032
1033 /* If we are reloading a (SUBREG constant ...), really reload just the
1034 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1035 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1036 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1037 register is a pseudo, also reload the inside expression.
1038 For machines that extend byte loads, do this for any SUBREG of a pseudo
1039 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1040 M2 is an integral mode that gets extended when loaded.
1041 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1042 where either M1 is not valid for R or M2 is wider than a word but we
1043 only need one register to store an M2-sized quantity in R.
1044 (However, if OUT is nonzero, we need to reload the reg *and*
1045 the subreg, so do nothing here, and let following statement handle it.)
1046
1047 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1048 we can't handle it here because CONST_INT does not indicate a mode.
1049
1050 Similarly, we must reload the inside expression if we have a
1051 STRICT_LOW_PART (presumably, in == out in this case).
1052
1053 Also reload the inner expression if it does not require a secondary
1054 reload but the SUBREG does.
1055
1056 Finally, reload the inner expression if it is a register that is in
1057 the class whose registers cannot be referenced in a different size
1058 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1059 cannot reload just the inside since we might end up with the wrong
1060 register class. But if it is inside a STRICT_LOW_PART, we have
1061 no choice, so we hope we do get the right register class there. */
1062
1063 if (in != 0 && GET_CODE (in) == SUBREG
1064 && (subreg_lowpart_p (in) || strict_low)
1065 #ifdef CANNOT_CHANGE_MODE_CLASS
1066 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1067 #endif
1068 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1069 && (CONSTANT_P (SUBREG_REG (in))
1070 || GET_CODE (SUBREG_REG (in)) == PLUS
1071 || strict_low
1072 || (((REG_P (SUBREG_REG (in))
1073 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1074 || MEM_P (SUBREG_REG (in)))
1075 && ((GET_MODE_PRECISION (inmode)
1076 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1077 #ifdef LOAD_EXTEND_OP
1078 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1079 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1080 <= UNITS_PER_WORD)
1081 && (GET_MODE_PRECISION (inmode)
1082 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1083 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1084 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1085 #endif
1086 #ifdef WORD_REGISTER_OPERATIONS
1087 || ((GET_MODE_PRECISION (inmode)
1088 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1089 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1090 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1091 / UNITS_PER_WORD)))
1092 #endif
1093 ))
1094 || (REG_P (SUBREG_REG (in))
1095 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1096 /* The case where out is nonzero
1097 is handled differently in the following statement. */
1098 && (out == 0 || subreg_lowpart_p (in))
1099 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1100 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1101 > UNITS_PER_WORD)
1102 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1103 / UNITS_PER_WORD)
1104 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1105 [GET_MODE (SUBREG_REG (in))]))
1106 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1107 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1108 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1109 SUBREG_REG (in))
1110 == NO_REGS))
1111 #ifdef CANNOT_CHANGE_MODE_CLASS
1112 || (REG_P (SUBREG_REG (in))
1113 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1114 && REG_CANNOT_CHANGE_MODE_P
1115 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1116 #endif
1117 ))
1118 {
1119 #ifdef LIMIT_RELOAD_CLASS
1120 in_subreg_loc = inloc;
1121 #endif
1122 inloc = &SUBREG_REG (in);
1123 in = *inloc;
1124 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1125 if (MEM_P (in))
1126 /* This is supposed to happen only for paradoxical subregs made by
1127 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1128 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1129 #endif
1130 inmode = GET_MODE (in);
1131 }
1132
1133 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1134 where M1 is not valid for R if it was not handled by the code above.
1135
1136 Similar issue for (SUBREG constant ...) if it was not handled by the
1137 code above. This can happen if SUBREG_BYTE != 0.
1138
1139 However, we must reload the inner reg *as well as* the subreg in
1140 that case. */
1141
1142 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1143 {
1144 if (REG_P (SUBREG_REG (in)))
1145 subreg_in_class
1146 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1147 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1148 GET_MODE (SUBREG_REG (in)),
1149 SUBREG_BYTE (in),
1150 GET_MODE (in)),
1151 REGNO (SUBREG_REG (in)));
1152 else if (GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1153 subreg_in_class = find_valid_class_1 (inmode,
1154 GET_MODE (SUBREG_REG (in)),
1155 rclass);
1156
1157 /* This relies on the fact that emit_reload_insns outputs the
1158 instructions for input reloads of type RELOAD_OTHER in the same
1159 order as the reloads. Thus if the outer reload is also of type
1160 RELOAD_OTHER, we are guaranteed that this inner reload will be
1161 output before the outer reload. */
1162 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1163 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1164 dont_remove_subreg = 1;
1165 }
1166
1167 /* Similarly for paradoxical and problematical SUBREGs on the output.
1168 Note that there is no reason we need worry about the previous value
1169 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1170 entitled to clobber it all (except in the case of a word mode subreg
1171 or of a STRICT_LOW_PART, in that latter case the constraint should
1172 label it input-output.) */
1173 if (out != 0 && GET_CODE (out) == SUBREG
1174 && (subreg_lowpart_p (out) || strict_low)
1175 #ifdef CANNOT_CHANGE_MODE_CLASS
1176 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1177 #endif
1178 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1179 && (CONSTANT_P (SUBREG_REG (out))
1180 || strict_low
1181 || (((REG_P (SUBREG_REG (out))
1182 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1183 || MEM_P (SUBREG_REG (out)))
1184 && ((GET_MODE_PRECISION (outmode)
1185 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1186 #ifdef WORD_REGISTER_OPERATIONS
1187 || ((GET_MODE_PRECISION (outmode)
1188 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1189 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1190 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1191 / UNITS_PER_WORD)))
1192 #endif
1193 ))
1194 || (REG_P (SUBREG_REG (out))
1195 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1196 /* The case of a word mode subreg
1197 is handled differently in the following statement. */
1198 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1199 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1200 > UNITS_PER_WORD))
1201 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1202 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1203 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1204 SUBREG_REG (out))
1205 == NO_REGS))
1206 #ifdef CANNOT_CHANGE_MODE_CLASS
1207 || (REG_P (SUBREG_REG (out))
1208 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1209 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1210 GET_MODE (SUBREG_REG (out)),
1211 outmode))
1212 #endif
1213 ))
1214 {
1215 #ifdef LIMIT_RELOAD_CLASS
1216 out_subreg_loc = outloc;
1217 #endif
1218 outloc = &SUBREG_REG (out);
1219 out = *outloc;
1220 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1221 gcc_assert (!MEM_P (out)
1222 || GET_MODE_SIZE (GET_MODE (out))
1223 <= GET_MODE_SIZE (outmode));
1224 #endif
1225 outmode = GET_MODE (out);
1226 }
1227
1228 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1229 where either M1 is not valid for R or M2 is wider than a word but we
1230 only need one register to store an M2-sized quantity in R.
1231
1232 However, we must reload the inner reg *as well as* the subreg in
1233 that case and the inner reg is an in-out reload. */
1234
1235 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1236 {
1237 enum reg_class in_out_class
1238 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1239 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1240 GET_MODE (SUBREG_REG (out)),
1241 SUBREG_BYTE (out),
1242 GET_MODE (out)),
1243 REGNO (SUBREG_REG (out)));
1244
1245 /* This relies on the fact that emit_reload_insns outputs the
1246 instructions for output reloads of type RELOAD_OTHER in reverse
1247 order of the reloads. Thus if the outer reload is also of type
1248 RELOAD_OTHER, we are guaranteed that this inner reload will be
1249 output after the outer reload. */
1250 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1251 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1252 0, 0, opnum, RELOAD_OTHER);
1253 dont_remove_subreg = 1;
1254 }
1255
1256 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1257 if (in != 0 && out != 0 && MEM_P (out)
1258 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1259 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1260 dont_share = 1;
1261
1262 /* If IN is a SUBREG of a hard register, make a new REG. This
1263 simplifies some of the cases below. */
1264
1265 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1266 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1267 && ! dont_remove_subreg)
1268 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1269
1270 /* Similarly for OUT. */
1271 if (out != 0 && GET_CODE (out) == SUBREG
1272 && REG_P (SUBREG_REG (out))
1273 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1274 && ! dont_remove_subreg)
1275 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1276
1277 /* Narrow down the class of register wanted if that is
1278 desirable on this machine for efficiency. */
1279 {
1280 reg_class_t preferred_class = rclass;
1281
1282 if (in != 0)
1283 preferred_class = targetm.preferred_reload_class (in, rclass);
1284
1285 /* Output reloads may need analogous treatment, different in detail. */
1286 if (out != 0)
1287 preferred_class
1288 = targetm.preferred_output_reload_class (out, preferred_class);
1289
1290 /* Discard what the target said if we cannot do it. */
1291 if (preferred_class != NO_REGS
1292 || (optional && type == RELOAD_FOR_OUTPUT))
1293 rclass = (enum reg_class) preferred_class;
1294 }
1295
1296 /* Make sure we use a class that can handle the actual pseudo
1297 inside any subreg. For example, on the 386, QImode regs
1298 can appear within SImode subregs. Although GENERAL_REGS
1299 can handle SImode, QImode needs a smaller class. */
1300 #ifdef LIMIT_RELOAD_CLASS
1301 if (in_subreg_loc)
1302 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1303 else if (in != 0 && GET_CODE (in) == SUBREG)
1304 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1305
1306 if (out_subreg_loc)
1307 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1308 if (out != 0 && GET_CODE (out) == SUBREG)
1309 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1310 #endif
1311
1312 /* Verify that this class is at least possible for the mode that
1313 is specified. */
1314 if (this_insn_is_asm)
1315 {
1316 enum machine_mode mode;
1317 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1318 mode = inmode;
1319 else
1320 mode = outmode;
1321 if (mode == VOIDmode)
1322 {
1323 error_for_asm (this_insn, "cannot reload integer constant "
1324 "operand in %<asm%>");
1325 mode = word_mode;
1326 if (in != 0)
1327 inmode = word_mode;
1328 if (out != 0)
1329 outmode = word_mode;
1330 }
1331 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1332 if (HARD_REGNO_MODE_OK (i, mode)
1333 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1334 break;
1335 if (i == FIRST_PSEUDO_REGISTER)
1336 {
1337 error_for_asm (this_insn, "impossible register constraint "
1338 "in %<asm%>");
1339 /* Avoid further trouble with this insn. */
1340 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1341 /* We used to continue here setting class to ALL_REGS, but it triggers
1342 sanity check on i386 for:
1343 void foo(long double d)
1344 {
1345 asm("" :: "a" (d));
1346 }
1347 Returning zero here ought to be safe as we take care in
1348 find_reloads to not process the reloads when instruction was
1349 replaced by USE. */
1350
1351 return 0;
1352 }
1353 }
1354
1355 /* Optional output reloads are always OK even if we have no register class,
1356 since the function of these reloads is only to have spill_reg_store etc.
1357 set, so that the storing insn can be deleted later. */
1358 gcc_assert (rclass != NO_REGS
1359 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1360
1361 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1362
1363 if (i == n_reloads)
1364 {
1365 /* See if we need a secondary reload register to move between CLASS
1366 and IN or CLASS and OUT. Get the icode and push any required reloads
1367 needed for each of them if so. */
1368
1369 if (in != 0)
1370 secondary_in_reload
1371 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1372 &secondary_in_icode, NULL);
1373 if (out != 0 && GET_CODE (out) != SCRATCH)
1374 secondary_out_reload
1375 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1376 type, &secondary_out_icode, NULL);
1377
1378 /* We found no existing reload suitable for re-use.
1379 So add an additional reload. */
1380
1381 #ifdef SECONDARY_MEMORY_NEEDED
1382 if (subreg_in_class == NO_REGS
1383 && in != 0
1384 && (REG_P (in)
1385 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1386 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1387 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1388 /* If a memory location is needed for the copy, make one. */
1389 if (subreg_in_class != NO_REGS
1390 && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1391 get_secondary_mem (in, inmode, opnum, type);
1392 #endif
1393
1394 i = n_reloads;
1395 rld[i].in = in;
1396 rld[i].out = out;
1397 rld[i].rclass = rclass;
1398 rld[i].inmode = inmode;
1399 rld[i].outmode = outmode;
1400 rld[i].reg_rtx = 0;
1401 rld[i].optional = optional;
1402 rld[i].inc = 0;
1403 rld[i].nocombine = 0;
1404 rld[i].in_reg = inloc ? *inloc : 0;
1405 rld[i].out_reg = outloc ? *outloc : 0;
1406 rld[i].opnum = opnum;
1407 rld[i].when_needed = type;
1408 rld[i].secondary_in_reload = secondary_in_reload;
1409 rld[i].secondary_out_reload = secondary_out_reload;
1410 rld[i].secondary_in_icode = secondary_in_icode;
1411 rld[i].secondary_out_icode = secondary_out_icode;
1412 rld[i].secondary_p = 0;
1413
1414 n_reloads++;
1415
1416 #ifdef SECONDARY_MEMORY_NEEDED
1417 if (out != 0
1418 && (REG_P (out)
1419 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1420 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1421 && SECONDARY_MEMORY_NEEDED (rclass,
1422 REGNO_REG_CLASS (reg_or_subregno (out)),
1423 outmode))
1424 get_secondary_mem (out, outmode, opnum, type);
1425 #endif
1426 }
1427 else
1428 {
1429 /* We are reusing an existing reload,
1430 but we may have additional information for it.
1431 For example, we may now have both IN and OUT
1432 while the old one may have just one of them. */
1433
1434 /* The modes can be different. If they are, we want to reload in
1435 the larger mode, so that the value is valid for both modes. */
1436 if (inmode != VOIDmode
1437 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1438 rld[i].inmode = inmode;
1439 if (outmode != VOIDmode
1440 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1441 rld[i].outmode = outmode;
1442 if (in != 0)
1443 {
1444 rtx in_reg = inloc ? *inloc : 0;
1445 /* If we merge reloads for two distinct rtl expressions that
1446 are identical in content, there might be duplicate address
1447 reloads. Remove the extra set now, so that if we later find
1448 that we can inherit this reload, we can get rid of the
1449 address reloads altogether.
1450
1451 Do not do this if both reloads are optional since the result
1452 would be an optional reload which could potentially leave
1453 unresolved address replacements.
1454
1455 It is not sufficient to call transfer_replacements since
1456 choose_reload_regs will remove the replacements for address
1457 reloads of inherited reloads which results in the same
1458 problem. */
1459 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1460 && ! (rld[i].optional && optional))
1461 {
1462 /* We must keep the address reload with the lower operand
1463 number alive. */
1464 if (opnum > rld[i].opnum)
1465 {
1466 remove_address_replacements (in);
1467 in = rld[i].in;
1468 in_reg = rld[i].in_reg;
1469 }
1470 else
1471 remove_address_replacements (rld[i].in);
1472 }
1473 /* When emitting reloads we don't necessarily look at the in-
1474 and outmode, but also directly at the operands (in and out).
1475 So we can't simply overwrite them with whatever we have found
1476 for this (to-be-merged) reload, we have to "merge" that too.
1477 Reusing another reload already verified that we deal with the
1478 same operands, just possibly in different modes. So we
1479 overwrite the operands only when the new mode is larger.
1480 See also PR33613. */
1481 if (!rld[i].in
1482 || GET_MODE_SIZE (GET_MODE (in))
1483 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1484 rld[i].in = in;
1485 if (!rld[i].in_reg
1486 || (in_reg
1487 && GET_MODE_SIZE (GET_MODE (in_reg))
1488 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1489 rld[i].in_reg = in_reg;
1490 }
1491 if (out != 0)
1492 {
1493 if (!rld[i].out
1494 || (out
1495 && GET_MODE_SIZE (GET_MODE (out))
1496 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1497 rld[i].out = out;
1498 if (outloc
1499 && (!rld[i].out_reg
1500 || GET_MODE_SIZE (GET_MODE (*outloc))
1501 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1502 rld[i].out_reg = *outloc;
1503 }
1504 if (reg_class_subset_p (rclass, rld[i].rclass))
1505 rld[i].rclass = rclass;
1506 rld[i].optional &= optional;
1507 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1508 opnum, rld[i].opnum))
1509 rld[i].when_needed = RELOAD_OTHER;
1510 rld[i].opnum = MIN (rld[i].opnum, opnum);
1511 }
1512
1513 /* If the ostensible rtx being reloaded differs from the rtx found
1514 in the location to substitute, this reload is not safe to combine
1515 because we cannot reliably tell whether it appears in the insn. */
1516
1517 if (in != 0 && in != *inloc)
1518 rld[i].nocombine = 1;
1519
1520 #if 0
1521 /* This was replaced by changes in find_reloads_address_1 and the new
1522 function inc_for_reload, which go with a new meaning of reload_inc. */
1523
1524 /* If this is an IN/OUT reload in an insn that sets the CC,
1525 it must be for an autoincrement. It doesn't work to store
1526 the incremented value after the insn because that would clobber the CC.
1527 So we must do the increment of the value reloaded from,
1528 increment it, store it back, then decrement again. */
1529 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1530 {
1531 out = 0;
1532 rld[i].out = 0;
1533 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1534 /* If we did not find a nonzero amount-to-increment-by,
1535 that contradicts the belief that IN is being incremented
1536 in an address in this insn. */
1537 gcc_assert (rld[i].inc != 0);
1538 }
1539 #endif
1540
1541 /* If we will replace IN and OUT with the reload-reg,
1542 record where they are located so that substitution need
1543 not do a tree walk. */
1544
1545 if (replace_reloads)
1546 {
1547 if (inloc != 0)
1548 {
1549 struct replacement *r = &replacements[n_replacements++];
1550 r->what = i;
1551 r->where = inloc;
1552 r->mode = inmode;
1553 }
1554 if (outloc != 0 && outloc != inloc)
1555 {
1556 struct replacement *r = &replacements[n_replacements++];
1557 r->what = i;
1558 r->where = outloc;
1559 r->mode = outmode;
1560 }
1561 }
1562
1563 /* If this reload is just being introduced and it has both
1564 an incoming quantity and an outgoing quantity that are
1565 supposed to be made to match, see if either one of the two
1566 can serve as the place to reload into.
1567
1568 If one of them is acceptable, set rld[i].reg_rtx
1569 to that one. */
1570
1571 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1572 {
1573 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1574 inmode, outmode,
1575 rld[i].rclass, i,
1576 earlyclobber_operand_p (out));
1577
1578 /* If the outgoing register already contains the same value
1579 as the incoming one, we can dispense with loading it.
1580 The easiest way to tell the caller that is to give a phony
1581 value for the incoming operand (same as outgoing one). */
1582 if (rld[i].reg_rtx == out
1583 && (REG_P (in) || CONSTANT_P (in))
1584 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1585 static_reload_reg_p, i, inmode))
1586 rld[i].in = out;
1587 }
1588
1589 /* If this is an input reload and the operand contains a register that
1590 dies in this insn and is used nowhere else, see if it is the right class
1591 to be used for this reload. Use it if so. (This occurs most commonly
1592 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1593 this if it is also an output reload that mentions the register unless
1594 the output is a SUBREG that clobbers an entire register.
1595
1596 Note that the operand might be one of the spill regs, if it is a
1597 pseudo reg and we are in a block where spilling has not taken place.
1598 But if there is no spilling in this block, that is OK.
1599 An explicitly used hard reg cannot be a spill reg. */
1600
1601 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1602 {
1603 rtx note;
1604 int regno;
1605 enum machine_mode rel_mode = inmode;
1606
1607 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1608 rel_mode = outmode;
1609
1610 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1611 if (REG_NOTE_KIND (note) == REG_DEAD
1612 && REG_P (XEXP (note, 0))
1613 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1614 && reg_mentioned_p (XEXP (note, 0), in)
1615 /* Check that a former pseudo is valid; see find_dummy_reload. */
1616 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1617 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1618 ORIGINAL_REGNO (XEXP (note, 0)))
1619 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1620 && ! refers_to_regno_for_reload_p (regno,
1621 end_hard_regno (rel_mode,
1622 regno),
1623 PATTERN (this_insn), inloc)
1624 /* If this is also an output reload, IN cannot be used as
1625 the reload register if it is set in this insn unless IN
1626 is also OUT. */
1627 && (out == 0 || in == out
1628 || ! hard_reg_set_here_p (regno,
1629 end_hard_regno (rel_mode, regno),
1630 PATTERN (this_insn)))
1631 /* ??? Why is this code so different from the previous?
1632 Is there any simple coherent way to describe the two together?
1633 What's going on here. */
1634 && (in != out
1635 || (GET_CODE (in) == SUBREG
1636 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1637 / UNITS_PER_WORD)
1638 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1639 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1640 /* Make sure the operand fits in the reg that dies. */
1641 && (GET_MODE_SIZE (rel_mode)
1642 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1643 && HARD_REGNO_MODE_OK (regno, inmode)
1644 && HARD_REGNO_MODE_OK (regno, outmode))
1645 {
1646 unsigned int offs;
1647 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1648 hard_regno_nregs[regno][outmode]);
1649
1650 for (offs = 0; offs < nregs; offs++)
1651 if (fixed_regs[regno + offs]
1652 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1653 regno + offs))
1654 break;
1655
1656 if (offs == nregs
1657 && (! (refers_to_regno_for_reload_p
1658 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1659 || can_reload_into (in, regno, inmode)))
1660 {
1661 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1662 break;
1663 }
1664 }
1665 }
1666
1667 if (out)
1668 output_reloadnum = i;
1669
1670 return i;
1671 }
1672
1673 /* Record an additional place we must replace a value
1674 for which we have already recorded a reload.
1675 RELOADNUM is the value returned by push_reload
1676 when the reload was recorded.
1677 This is used in insn patterns that use match_dup. */
1678
1679 static void
1680 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1681 {
1682 if (replace_reloads)
1683 {
1684 struct replacement *r = &replacements[n_replacements++];
1685 r->what = reloadnum;
1686 r->where = loc;
1687 r->mode = mode;
1688 }
1689 }
1690
1691 /* Duplicate any replacement we have recorded to apply at
1692 location ORIG_LOC to also be performed at DUP_LOC.
1693 This is used in insn patterns that use match_dup. */
1694
1695 static void
1696 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1697 {
1698 int i, n = n_replacements;
1699
1700 for (i = 0; i < n; i++)
1701 {
1702 struct replacement *r = &replacements[i];
1703 if (r->where == orig_loc)
1704 push_replacement (dup_loc, r->what, r->mode);
1705 }
1706 }
1707 \f
1708 /* Transfer all replacements that used to be in reload FROM to be in
1709 reload TO. */
1710
1711 void
1712 transfer_replacements (int to, int from)
1713 {
1714 int i;
1715
1716 for (i = 0; i < n_replacements; i++)
1717 if (replacements[i].what == from)
1718 replacements[i].what = to;
1719 }
1720 \f
1721 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1722 or a subpart of it. If we have any replacements registered for IN_RTX,
1723 cancel the reloads that were supposed to load them.
1724 Return nonzero if we canceled any reloads. */
1725 int
1726 remove_address_replacements (rtx in_rtx)
1727 {
1728 int i, j;
1729 char reload_flags[MAX_RELOADS];
1730 int something_changed = 0;
1731
1732 memset (reload_flags, 0, sizeof reload_flags);
1733 for (i = 0, j = 0; i < n_replacements; i++)
1734 {
1735 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1736 reload_flags[replacements[i].what] |= 1;
1737 else
1738 {
1739 replacements[j++] = replacements[i];
1740 reload_flags[replacements[i].what] |= 2;
1741 }
1742 }
1743 /* Note that the following store must be done before the recursive calls. */
1744 n_replacements = j;
1745
1746 for (i = n_reloads - 1; i >= 0; i--)
1747 {
1748 if (reload_flags[i] == 1)
1749 {
1750 deallocate_reload_reg (i);
1751 remove_address_replacements (rld[i].in);
1752 rld[i].in = 0;
1753 something_changed = 1;
1754 }
1755 }
1756 return something_changed;
1757 }
1758 \f
1759 /* If there is only one output reload, and it is not for an earlyclobber
1760 operand, try to combine it with a (logically unrelated) input reload
1761 to reduce the number of reload registers needed.
1762
1763 This is safe if the input reload does not appear in
1764 the value being output-reloaded, because this implies
1765 it is not needed any more once the original insn completes.
1766
1767 If that doesn't work, see we can use any of the registers that
1768 die in this insn as a reload register. We can if it is of the right
1769 class and does not appear in the value being output-reloaded. */
1770
1771 static void
1772 combine_reloads (void)
1773 {
1774 int i, regno;
1775 int output_reload = -1;
1776 int secondary_out = -1;
1777 rtx note;
1778
1779 /* Find the output reload; return unless there is exactly one
1780 and that one is mandatory. */
1781
1782 for (i = 0; i < n_reloads; i++)
1783 if (rld[i].out != 0)
1784 {
1785 if (output_reload >= 0)
1786 return;
1787 output_reload = i;
1788 }
1789
1790 if (output_reload < 0 || rld[output_reload].optional)
1791 return;
1792
1793 /* An input-output reload isn't combinable. */
1794
1795 if (rld[output_reload].in != 0)
1796 return;
1797
1798 /* If this reload is for an earlyclobber operand, we can't do anything. */
1799 if (earlyclobber_operand_p (rld[output_reload].out))
1800 return;
1801
1802 /* If there is a reload for part of the address of this operand, we would
1803 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1804 its life to the point where doing this combine would not lower the
1805 number of spill registers needed. */
1806 for (i = 0; i < n_reloads; i++)
1807 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1808 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1809 && rld[i].opnum == rld[output_reload].opnum)
1810 return;
1811
1812 /* Check each input reload; can we combine it? */
1813
1814 for (i = 0; i < n_reloads; i++)
1815 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1816 /* Life span of this reload must not extend past main insn. */
1817 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1818 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1819 && rld[i].when_needed != RELOAD_OTHER
1820 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1821 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1822 [(int) rld[output_reload].outmode])
1823 && rld[i].inc == 0
1824 && rld[i].reg_rtx == 0
1825 #ifdef SECONDARY_MEMORY_NEEDED
1826 /* Don't combine two reloads with different secondary
1827 memory locations. */
1828 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1829 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1830 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1831 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1832 #endif
1833 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1834 ? (rld[i].rclass == rld[output_reload].rclass)
1835 : (reg_class_subset_p (rld[i].rclass,
1836 rld[output_reload].rclass)
1837 || reg_class_subset_p (rld[output_reload].rclass,
1838 rld[i].rclass)))
1839 && (MATCHES (rld[i].in, rld[output_reload].out)
1840 /* Args reversed because the first arg seems to be
1841 the one that we imagine being modified
1842 while the second is the one that might be affected. */
1843 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1844 rld[i].in)
1845 /* However, if the input is a register that appears inside
1846 the output, then we also can't share.
1847 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1848 If the same reload reg is used for both reg 69 and the
1849 result to be stored in memory, then that result
1850 will clobber the address of the memory ref. */
1851 && ! (REG_P (rld[i].in)
1852 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1853 rld[output_reload].out))))
1854 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1855 rld[i].when_needed != RELOAD_FOR_INPUT)
1856 && (reg_class_size[(int) rld[i].rclass]
1857 || targetm.small_register_classes_for_mode_p (VOIDmode))
1858 /* We will allow making things slightly worse by combining an
1859 input and an output, but no worse than that. */
1860 && (rld[i].when_needed == RELOAD_FOR_INPUT
1861 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1862 {
1863 int j;
1864
1865 /* We have found a reload to combine with! */
1866 rld[i].out = rld[output_reload].out;
1867 rld[i].out_reg = rld[output_reload].out_reg;
1868 rld[i].outmode = rld[output_reload].outmode;
1869 /* Mark the old output reload as inoperative. */
1870 rld[output_reload].out = 0;
1871 /* The combined reload is needed for the entire insn. */
1872 rld[i].when_needed = RELOAD_OTHER;
1873 /* If the output reload had a secondary reload, copy it. */
1874 if (rld[output_reload].secondary_out_reload != -1)
1875 {
1876 rld[i].secondary_out_reload
1877 = rld[output_reload].secondary_out_reload;
1878 rld[i].secondary_out_icode
1879 = rld[output_reload].secondary_out_icode;
1880 }
1881
1882 #ifdef SECONDARY_MEMORY_NEEDED
1883 /* Copy any secondary MEM. */
1884 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1885 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1886 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1887 #endif
1888 /* If required, minimize the register class. */
1889 if (reg_class_subset_p (rld[output_reload].rclass,
1890 rld[i].rclass))
1891 rld[i].rclass = rld[output_reload].rclass;
1892
1893 /* Transfer all replacements from the old reload to the combined. */
1894 for (j = 0; j < n_replacements; j++)
1895 if (replacements[j].what == output_reload)
1896 replacements[j].what = i;
1897
1898 return;
1899 }
1900
1901 /* If this insn has only one operand that is modified or written (assumed
1902 to be the first), it must be the one corresponding to this reload. It
1903 is safe to use anything that dies in this insn for that output provided
1904 that it does not occur in the output (we already know it isn't an
1905 earlyclobber. If this is an asm insn, give up. */
1906
1907 if (INSN_CODE (this_insn) == -1)
1908 return;
1909
1910 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1911 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1912 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1913 return;
1914
1915 /* See if some hard register that dies in this insn and is not used in
1916 the output is the right class. Only works if the register we pick
1917 up can fully hold our output reload. */
1918 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1919 if (REG_NOTE_KIND (note) == REG_DEAD
1920 && REG_P (XEXP (note, 0))
1921 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1922 rld[output_reload].out)
1923 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1924 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1925 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1926 regno)
1927 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1928 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1929 /* Ensure that a secondary or tertiary reload for this output
1930 won't want this register. */
1931 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1932 || (!(TEST_HARD_REG_BIT
1933 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1934 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1935 || !(TEST_HARD_REG_BIT
1936 (reg_class_contents[(int) rld[secondary_out].rclass],
1937 regno)))))
1938 && !fixed_regs[regno]
1939 /* Check that a former pseudo is valid; see find_dummy_reload. */
1940 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1941 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1942 ORIGINAL_REGNO (XEXP (note, 0)))
1943 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1944 {
1945 rld[output_reload].reg_rtx
1946 = gen_rtx_REG (rld[output_reload].outmode, regno);
1947 return;
1948 }
1949 }
1950 \f
1951 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1952 See if one of IN and OUT is a register that may be used;
1953 this is desirable since a spill-register won't be needed.
1954 If so, return the register rtx that proves acceptable.
1955
1956 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1957 RCLASS is the register class required for the reload.
1958
1959 If FOR_REAL is >= 0, it is the number of the reload,
1960 and in some cases when it can be discovered that OUT doesn't need
1961 to be computed, clear out rld[FOR_REAL].out.
1962
1963 If FOR_REAL is -1, this should not be done, because this call
1964 is just to see if a register can be found, not to find and install it.
1965
1966 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1967 puts an additional constraint on being able to use IN for OUT since
1968 IN must not appear elsewhere in the insn (it is assumed that IN itself
1969 is safe from the earlyclobber). */
1970
1971 static rtx
1972 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1973 enum machine_mode inmode, enum machine_mode outmode,
1974 reg_class_t rclass, int for_real, int earlyclobber)
1975 {
1976 rtx in = real_in;
1977 rtx out = real_out;
1978 int in_offset = 0;
1979 int out_offset = 0;
1980 rtx value = 0;
1981
1982 /* If operands exceed a word, we can't use either of them
1983 unless they have the same size. */
1984 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1985 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1986 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1987 return 0;
1988
1989 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1990 respectively refers to a hard register. */
1991
1992 /* Find the inside of any subregs. */
1993 while (GET_CODE (out) == SUBREG)
1994 {
1995 if (REG_P (SUBREG_REG (out))
1996 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1997 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1998 GET_MODE (SUBREG_REG (out)),
1999 SUBREG_BYTE (out),
2000 GET_MODE (out));
2001 out = SUBREG_REG (out);
2002 }
2003 while (GET_CODE (in) == SUBREG)
2004 {
2005 if (REG_P (SUBREG_REG (in))
2006 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
2007 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
2008 GET_MODE (SUBREG_REG (in)),
2009 SUBREG_BYTE (in),
2010 GET_MODE (in));
2011 in = SUBREG_REG (in);
2012 }
2013
2014 /* Narrow down the reg class, the same way push_reload will;
2015 otherwise we might find a dummy now, but push_reload won't. */
2016 {
2017 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2018 if (preferred_class != NO_REGS)
2019 rclass = (enum reg_class) preferred_class;
2020 }
2021
2022 /* See if OUT will do. */
2023 if (REG_P (out)
2024 && REGNO (out) < FIRST_PSEUDO_REGISTER)
2025 {
2026 unsigned int regno = REGNO (out) + out_offset;
2027 unsigned int nwords = hard_regno_nregs[regno][outmode];
2028 rtx saved_rtx;
2029
2030 /* When we consider whether the insn uses OUT,
2031 ignore references within IN. They don't prevent us
2032 from copying IN into OUT, because those refs would
2033 move into the insn that reloads IN.
2034
2035 However, we only ignore IN in its role as this reload.
2036 If the insn uses IN elsewhere and it contains OUT,
2037 that counts. We can't be sure it's the "same" operand
2038 so it might not go through this reload.
2039
2040 We also need to avoid using OUT if it, or part of it, is a
2041 fixed register. Modifying such registers, even transiently,
2042 may have undefined effects on the machine, such as modifying
2043 the stack pointer. */
2044 saved_rtx = *inloc;
2045 *inloc = const0_rtx;
2046
2047 if (regno < FIRST_PSEUDO_REGISTER
2048 && HARD_REGNO_MODE_OK (regno, outmode)
2049 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2050 PATTERN (this_insn), outloc))
2051 {
2052 unsigned int i;
2053
2054 for (i = 0; i < nwords; i++)
2055 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2056 regno + i)
2057 || fixed_regs[regno + i])
2058 break;
2059
2060 if (i == nwords)
2061 {
2062 if (REG_P (real_out))
2063 value = real_out;
2064 else
2065 value = gen_rtx_REG (outmode, regno);
2066 }
2067 }
2068
2069 *inloc = saved_rtx;
2070 }
2071
2072 /* Consider using IN if OUT was not acceptable
2073 or if OUT dies in this insn (like the quotient in a divmod insn).
2074 We can't use IN unless it is dies in this insn,
2075 which means we must know accurately which hard regs are live.
2076 Also, the result can't go in IN if IN is used within OUT,
2077 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2078 if (hard_regs_live_known
2079 && REG_P (in)
2080 && REGNO (in) < FIRST_PSEUDO_REGISTER
2081 && (value == 0
2082 || find_reg_note (this_insn, REG_UNUSED, real_out))
2083 && find_reg_note (this_insn, REG_DEAD, real_in)
2084 && !fixed_regs[REGNO (in)]
2085 && HARD_REGNO_MODE_OK (REGNO (in),
2086 /* The only case where out and real_out might
2087 have different modes is where real_out
2088 is a subreg, and in that case, out
2089 has a real mode. */
2090 (GET_MODE (out) != VOIDmode
2091 ? GET_MODE (out) : outmode))
2092 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2093 /* However only do this if we can be sure that this input
2094 operand doesn't correspond with an uninitialized pseudo.
2095 global can assign some hardreg to it that is the same as
2096 the one assigned to a different, also live pseudo (as it
2097 can ignore the conflict). We must never introduce writes
2098 to such hardregs, as they would clobber the other live
2099 pseudo. See PR 20973. */
2100 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2101 ORIGINAL_REGNO (in))
2102 /* Similarly, only do this if we can be sure that the death
2103 note is still valid. global can assign some hardreg to
2104 the pseudo referenced in the note and simultaneously a
2105 subword of this hardreg to a different, also live pseudo,
2106 because only another subword of the hardreg is actually
2107 used in the insn. This cannot happen if the pseudo has
2108 been assigned exactly one hardreg. See PR 33732. */
2109 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2110 {
2111 unsigned int regno = REGNO (in) + in_offset;
2112 unsigned int nwords = hard_regno_nregs[regno][inmode];
2113
2114 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2115 && ! hard_reg_set_here_p (regno, regno + nwords,
2116 PATTERN (this_insn))
2117 && (! earlyclobber
2118 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2119 PATTERN (this_insn), inloc)))
2120 {
2121 unsigned int i;
2122
2123 for (i = 0; i < nwords; i++)
2124 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2125 regno + i))
2126 break;
2127
2128 if (i == nwords)
2129 {
2130 /* If we were going to use OUT as the reload reg
2131 and changed our mind, it means OUT is a dummy that
2132 dies here. So don't bother copying value to it. */
2133 if (for_real >= 0 && value == real_out)
2134 rld[for_real].out = 0;
2135 if (REG_P (real_in))
2136 value = real_in;
2137 else
2138 value = gen_rtx_REG (inmode, regno);
2139 }
2140 }
2141 }
2142
2143 return value;
2144 }
2145 \f
2146 /* This page contains subroutines used mainly for determining
2147 whether the IN or an OUT of a reload can serve as the
2148 reload register. */
2149
2150 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2151
2152 int
2153 earlyclobber_operand_p (rtx x)
2154 {
2155 int i;
2156
2157 for (i = 0; i < n_earlyclobbers; i++)
2158 if (reload_earlyclobbers[i] == x)
2159 return 1;
2160
2161 return 0;
2162 }
2163
2164 /* Return 1 if expression X alters a hard reg in the range
2165 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2166 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2167 X should be the body of an instruction. */
2168
2169 static int
2170 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2171 {
2172 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2173 {
2174 rtx op0 = SET_DEST (x);
2175
2176 while (GET_CODE (op0) == SUBREG)
2177 op0 = SUBREG_REG (op0);
2178 if (REG_P (op0))
2179 {
2180 unsigned int r = REGNO (op0);
2181
2182 /* See if this reg overlaps range under consideration. */
2183 if (r < end_regno
2184 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2185 return 1;
2186 }
2187 }
2188 else if (GET_CODE (x) == PARALLEL)
2189 {
2190 int i = XVECLEN (x, 0) - 1;
2191
2192 for (; i >= 0; i--)
2193 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2194 return 1;
2195 }
2196
2197 return 0;
2198 }
2199
2200 /* Return 1 if ADDR is a valid memory address for mode MODE
2201 in address space AS, and check that each pseudo reg has the
2202 proper kind of hard reg. */
2203
2204 int
2205 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2206 rtx addr, addr_space_t as)
2207 {
2208 #ifdef GO_IF_LEGITIMATE_ADDRESS
2209 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2210 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2211 return 0;
2212
2213 win:
2214 return 1;
2215 #else
2216 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2217 #endif
2218 }
2219 \f
2220 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2221 if they are the same hard reg, and has special hacks for
2222 autoincrement and autodecrement.
2223 This is specifically intended for find_reloads to use
2224 in determining whether two operands match.
2225 X is the operand whose number is the lower of the two.
2226
2227 The value is 2 if Y contains a pre-increment that matches
2228 a non-incrementing address in X. */
2229
2230 /* ??? To be completely correct, we should arrange to pass
2231 for X the output operand and for Y the input operand.
2232 For now, we assume that the output operand has the lower number
2233 because that is natural in (SET output (... input ...)). */
2234
2235 int
2236 operands_match_p (rtx x, rtx y)
2237 {
2238 int i;
2239 RTX_CODE code = GET_CODE (x);
2240 const char *fmt;
2241 int success_2;
2242
2243 if (x == y)
2244 return 1;
2245 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2246 && (REG_P (y) || (GET_CODE (y) == SUBREG
2247 && REG_P (SUBREG_REG (y)))))
2248 {
2249 int j;
2250
2251 if (code == SUBREG)
2252 {
2253 i = REGNO (SUBREG_REG (x));
2254 if (i >= FIRST_PSEUDO_REGISTER)
2255 goto slow;
2256 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2257 GET_MODE (SUBREG_REG (x)),
2258 SUBREG_BYTE (x),
2259 GET_MODE (x));
2260 }
2261 else
2262 i = REGNO (x);
2263
2264 if (GET_CODE (y) == SUBREG)
2265 {
2266 j = REGNO (SUBREG_REG (y));
2267 if (j >= FIRST_PSEUDO_REGISTER)
2268 goto slow;
2269 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2270 GET_MODE (SUBREG_REG (y)),
2271 SUBREG_BYTE (y),
2272 GET_MODE (y));
2273 }
2274 else
2275 j = REGNO (y);
2276
2277 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2278 multiple hard register group of scalar integer registers, so that
2279 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2280 register. */
2281 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2282 && SCALAR_INT_MODE_P (GET_MODE (x))
2283 && i < FIRST_PSEUDO_REGISTER)
2284 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2285 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2286 && SCALAR_INT_MODE_P (GET_MODE (y))
2287 && j < FIRST_PSEUDO_REGISTER)
2288 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2289
2290 return i == j;
2291 }
2292 /* If two operands must match, because they are really a single
2293 operand of an assembler insn, then two postincrements are invalid
2294 because the assembler insn would increment only once.
2295 On the other hand, a postincrement matches ordinary indexing
2296 if the postincrement is the output operand. */
2297 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2298 return operands_match_p (XEXP (x, 0), y);
2299 /* Two preincrements are invalid
2300 because the assembler insn would increment only once.
2301 On the other hand, a preincrement matches ordinary indexing
2302 if the preincrement is the input operand.
2303 In this case, return 2, since some callers need to do special
2304 things when this happens. */
2305 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2306 || GET_CODE (y) == PRE_MODIFY)
2307 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2308
2309 slow:
2310
2311 /* Now we have disposed of all the cases in which different rtx codes
2312 can match. */
2313 if (code != GET_CODE (y))
2314 return 0;
2315
2316 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2317 if (GET_MODE (x) != GET_MODE (y))
2318 return 0;
2319
2320 /* MEMs referring to different address space are not equivalent. */
2321 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2322 return 0;
2323
2324 switch (code)
2325 {
2326 CASE_CONST_UNIQUE:
2327 return 0;
2328
2329 case LABEL_REF:
2330 return XEXP (x, 0) == XEXP (y, 0);
2331 case SYMBOL_REF:
2332 return XSTR (x, 0) == XSTR (y, 0);
2333
2334 default:
2335 break;
2336 }
2337
2338 /* Compare the elements. If any pair of corresponding elements
2339 fail to match, return 0 for the whole things. */
2340
2341 success_2 = 0;
2342 fmt = GET_RTX_FORMAT (code);
2343 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2344 {
2345 int val, j;
2346 switch (fmt[i])
2347 {
2348 case 'w':
2349 if (XWINT (x, i) != XWINT (y, i))
2350 return 0;
2351 break;
2352
2353 case 'i':
2354 if (XINT (x, i) != XINT (y, i))
2355 return 0;
2356 break;
2357
2358 case 'e':
2359 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2360 if (val == 0)
2361 return 0;
2362 /* If any subexpression returns 2,
2363 we should return 2 if we are successful. */
2364 if (val == 2)
2365 success_2 = 1;
2366 break;
2367
2368 case '0':
2369 break;
2370
2371 case 'E':
2372 if (XVECLEN (x, i) != XVECLEN (y, i))
2373 return 0;
2374 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2375 {
2376 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2377 if (val == 0)
2378 return 0;
2379 if (val == 2)
2380 success_2 = 1;
2381 }
2382 break;
2383
2384 /* It is believed that rtx's at this level will never
2385 contain anything but integers and other rtx's,
2386 except for within LABEL_REFs and SYMBOL_REFs. */
2387 default:
2388 gcc_unreachable ();
2389 }
2390 }
2391 return 1 + success_2;
2392 }
2393 \f
2394 /* Describe the range of registers or memory referenced by X.
2395 If X is a register, set REG_FLAG and put the first register
2396 number into START and the last plus one into END.
2397 If X is a memory reference, put a base address into BASE
2398 and a range of integer offsets into START and END.
2399 If X is pushing on the stack, we can assume it causes no trouble,
2400 so we set the SAFE field. */
2401
2402 static struct decomposition
2403 decompose (rtx x)
2404 {
2405 struct decomposition val;
2406 int all_const = 0;
2407
2408 memset (&val, 0, sizeof (val));
2409
2410 switch (GET_CODE (x))
2411 {
2412 case MEM:
2413 {
2414 rtx base = NULL_RTX, offset = 0;
2415 rtx addr = XEXP (x, 0);
2416
2417 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2418 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2419 {
2420 val.base = XEXP (addr, 0);
2421 val.start = -GET_MODE_SIZE (GET_MODE (x));
2422 val.end = GET_MODE_SIZE (GET_MODE (x));
2423 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2424 return val;
2425 }
2426
2427 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2428 {
2429 if (GET_CODE (XEXP (addr, 1)) == PLUS
2430 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2431 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2432 {
2433 val.base = XEXP (addr, 0);
2434 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2435 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2436 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2437 return val;
2438 }
2439 }
2440
2441 if (GET_CODE (addr) == CONST)
2442 {
2443 addr = XEXP (addr, 0);
2444 all_const = 1;
2445 }
2446 if (GET_CODE (addr) == PLUS)
2447 {
2448 if (CONSTANT_P (XEXP (addr, 0)))
2449 {
2450 base = XEXP (addr, 1);
2451 offset = XEXP (addr, 0);
2452 }
2453 else if (CONSTANT_P (XEXP (addr, 1)))
2454 {
2455 base = XEXP (addr, 0);
2456 offset = XEXP (addr, 1);
2457 }
2458 }
2459
2460 if (offset == 0)
2461 {
2462 base = addr;
2463 offset = const0_rtx;
2464 }
2465 if (GET_CODE (offset) == CONST)
2466 offset = XEXP (offset, 0);
2467 if (GET_CODE (offset) == PLUS)
2468 {
2469 if (CONST_INT_P (XEXP (offset, 0)))
2470 {
2471 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2472 offset = XEXP (offset, 0);
2473 }
2474 else if (CONST_INT_P (XEXP (offset, 1)))
2475 {
2476 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2477 offset = XEXP (offset, 1);
2478 }
2479 else
2480 {
2481 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2482 offset = const0_rtx;
2483 }
2484 }
2485 else if (!CONST_INT_P (offset))
2486 {
2487 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2488 offset = const0_rtx;
2489 }
2490
2491 if (all_const && GET_CODE (base) == PLUS)
2492 base = gen_rtx_CONST (GET_MODE (base), base);
2493
2494 gcc_assert (CONST_INT_P (offset));
2495
2496 val.start = INTVAL (offset);
2497 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2498 val.base = base;
2499 }
2500 break;
2501
2502 case REG:
2503 val.reg_flag = 1;
2504 val.start = true_regnum (x);
2505 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2506 {
2507 /* A pseudo with no hard reg. */
2508 val.start = REGNO (x);
2509 val.end = val.start + 1;
2510 }
2511 else
2512 /* A hard reg. */
2513 val.end = end_hard_regno (GET_MODE (x), val.start);
2514 break;
2515
2516 case SUBREG:
2517 if (!REG_P (SUBREG_REG (x)))
2518 /* This could be more precise, but it's good enough. */
2519 return decompose (SUBREG_REG (x));
2520 val.reg_flag = 1;
2521 val.start = true_regnum (x);
2522 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2523 return decompose (SUBREG_REG (x));
2524 else
2525 /* A hard reg. */
2526 val.end = val.start + subreg_nregs (x);
2527 break;
2528
2529 case SCRATCH:
2530 /* This hasn't been assigned yet, so it can't conflict yet. */
2531 val.safe = 1;
2532 break;
2533
2534 default:
2535 gcc_assert (CONSTANT_P (x));
2536 val.safe = 1;
2537 break;
2538 }
2539 return val;
2540 }
2541
2542 /* Return 1 if altering Y will not modify the value of X.
2543 Y is also described by YDATA, which should be decompose (Y). */
2544
2545 static int
2546 immune_p (rtx x, rtx y, struct decomposition ydata)
2547 {
2548 struct decomposition xdata;
2549
2550 if (ydata.reg_flag)
2551 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2552 if (ydata.safe)
2553 return 1;
2554
2555 gcc_assert (MEM_P (y));
2556 /* If Y is memory and X is not, Y can't affect X. */
2557 if (!MEM_P (x))
2558 return 1;
2559
2560 xdata = decompose (x);
2561
2562 if (! rtx_equal_p (xdata.base, ydata.base))
2563 {
2564 /* If bases are distinct symbolic constants, there is no overlap. */
2565 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2566 return 1;
2567 /* Constants and stack slots never overlap. */
2568 if (CONSTANT_P (xdata.base)
2569 && (ydata.base == frame_pointer_rtx
2570 || ydata.base == hard_frame_pointer_rtx
2571 || ydata.base == stack_pointer_rtx))
2572 return 1;
2573 if (CONSTANT_P (ydata.base)
2574 && (xdata.base == frame_pointer_rtx
2575 || xdata.base == hard_frame_pointer_rtx
2576 || xdata.base == stack_pointer_rtx))
2577 return 1;
2578 /* If either base is variable, we don't know anything. */
2579 return 0;
2580 }
2581
2582 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2583 }
2584
2585 /* Similar, but calls decompose. */
2586
2587 int
2588 safe_from_earlyclobber (rtx op, rtx clobber)
2589 {
2590 struct decomposition early_data;
2591
2592 early_data = decompose (clobber);
2593 return immune_p (op, clobber, early_data);
2594 }
2595 \f
2596 /* Main entry point of this file: search the body of INSN
2597 for values that need reloading and record them with push_reload.
2598 REPLACE nonzero means record also where the values occur
2599 so that subst_reloads can be used.
2600
2601 IND_LEVELS says how many levels of indirection are supported by this
2602 machine; a value of zero means that a memory reference is not a valid
2603 memory address.
2604
2605 LIVE_KNOWN says we have valid information about which hard
2606 regs are live at each point in the program; this is true when
2607 we are called from global_alloc but false when stupid register
2608 allocation has been done.
2609
2610 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2611 which is nonnegative if the reg has been commandeered for reloading into.
2612 It is copied into STATIC_RELOAD_REG_P and referenced from there
2613 by various subroutines.
2614
2615 Return TRUE if some operands need to be changed, because of swapping
2616 commutative operands, reg_equiv_address substitution, or whatever. */
2617
2618 int
2619 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2620 short *reload_reg_p)
2621 {
2622 int insn_code_number;
2623 int i, j;
2624 int noperands;
2625 /* These start out as the constraints for the insn
2626 and they are chewed up as we consider alternatives. */
2627 const char *constraints[MAX_RECOG_OPERANDS];
2628 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2629 a register. */
2630 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2631 char pref_or_nothing[MAX_RECOG_OPERANDS];
2632 /* Nonzero for a MEM operand whose entire address needs a reload.
2633 May be -1 to indicate the entire address may or may not need a reload. */
2634 int address_reloaded[MAX_RECOG_OPERANDS];
2635 /* Nonzero for an address operand that needs to be completely reloaded.
2636 May be -1 to indicate the entire operand may or may not need a reload. */
2637 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2638 /* Value of enum reload_type to use for operand. */
2639 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2640 /* Value of enum reload_type to use within address of operand. */
2641 enum reload_type address_type[MAX_RECOG_OPERANDS];
2642 /* Save the usage of each operand. */
2643 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2644 int no_input_reloads = 0, no_output_reloads = 0;
2645 int n_alternatives;
2646 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2647 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2648 char this_alternative_win[MAX_RECOG_OPERANDS];
2649 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2650 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2651 int this_alternative_matches[MAX_RECOG_OPERANDS];
2652 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2653 int this_alternative_number;
2654 int goal_alternative_number = 0;
2655 int operand_reloadnum[MAX_RECOG_OPERANDS];
2656 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2657 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2658 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2659 char goal_alternative_win[MAX_RECOG_OPERANDS];
2660 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2661 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2662 int goal_alternative_swapped;
2663 int best;
2664 int commutative;
2665 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2666 rtx substed_operand[MAX_RECOG_OPERANDS];
2667 rtx body = PATTERN (insn);
2668 rtx set = single_set (insn);
2669 int goal_earlyclobber = 0, this_earlyclobber;
2670 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2671 int retval = 0;
2672
2673 this_insn = insn;
2674 n_reloads = 0;
2675 n_replacements = 0;
2676 n_earlyclobbers = 0;
2677 replace_reloads = replace;
2678 hard_regs_live_known = live_known;
2679 static_reload_reg_p = reload_reg_p;
2680
2681 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2682 neither are insns that SET cc0. Insns that use CC0 are not allowed
2683 to have any input reloads. */
2684 if (JUMP_P (insn) || CALL_P (insn))
2685 no_output_reloads = 1;
2686
2687 #ifdef HAVE_cc0
2688 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2689 no_input_reloads = 1;
2690 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2691 no_output_reloads = 1;
2692 #endif
2693
2694 #ifdef SECONDARY_MEMORY_NEEDED
2695 /* The eliminated forms of any secondary memory locations are per-insn, so
2696 clear them out here. */
2697
2698 if (secondary_memlocs_elim_used)
2699 {
2700 memset (secondary_memlocs_elim, 0,
2701 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2702 secondary_memlocs_elim_used = 0;
2703 }
2704 #endif
2705
2706 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2707 is cheap to move between them. If it is not, there may not be an insn
2708 to do the copy, so we may need a reload. */
2709 if (GET_CODE (body) == SET
2710 && REG_P (SET_DEST (body))
2711 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2712 && REG_P (SET_SRC (body))
2713 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2714 && register_move_cost (GET_MODE (SET_SRC (body)),
2715 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2716 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2717 return 0;
2718
2719 extract_insn (insn);
2720
2721 noperands = reload_n_operands = recog_data.n_operands;
2722 n_alternatives = recog_data.n_alternatives;
2723
2724 /* Just return "no reloads" if insn has no operands with constraints. */
2725 if (noperands == 0 || n_alternatives == 0)
2726 return 0;
2727
2728 insn_code_number = INSN_CODE (insn);
2729 this_insn_is_asm = insn_code_number < 0;
2730
2731 memcpy (operand_mode, recog_data.operand_mode,
2732 noperands * sizeof (enum machine_mode));
2733 memcpy (constraints, recog_data.constraints,
2734 noperands * sizeof (const char *));
2735
2736 commutative = -1;
2737
2738 /* If we will need to know, later, whether some pair of operands
2739 are the same, we must compare them now and save the result.
2740 Reloading the base and index registers will clobber them
2741 and afterward they will fail to match. */
2742
2743 for (i = 0; i < noperands; i++)
2744 {
2745 const char *p;
2746 int c;
2747 char *end;
2748
2749 substed_operand[i] = recog_data.operand[i];
2750 p = constraints[i];
2751
2752 modified[i] = RELOAD_READ;
2753
2754 /* Scan this operand's constraint to see if it is an output operand,
2755 an in-out operand, is commutative, or should match another. */
2756
2757 while ((c = *p))
2758 {
2759 p += CONSTRAINT_LEN (c, p);
2760 switch (c)
2761 {
2762 case '=':
2763 modified[i] = RELOAD_WRITE;
2764 break;
2765 case '+':
2766 modified[i] = RELOAD_READ_WRITE;
2767 break;
2768 case '%':
2769 {
2770 /* The last operand should not be marked commutative. */
2771 gcc_assert (i != noperands - 1);
2772
2773 /* We currently only support one commutative pair of
2774 operands. Some existing asm code currently uses more
2775 than one pair. Previously, that would usually work,
2776 but sometimes it would crash the compiler. We
2777 continue supporting that case as well as we can by
2778 silently ignoring all but the first pair. In the
2779 future we may handle it correctly. */
2780 if (commutative < 0)
2781 commutative = i;
2782 else
2783 gcc_assert (this_insn_is_asm);
2784 }
2785 break;
2786 /* Use of ISDIGIT is tempting here, but it may get expensive because
2787 of locale support we don't want. */
2788 case '0': case '1': case '2': case '3': case '4':
2789 case '5': case '6': case '7': case '8': case '9':
2790 {
2791 c = strtoul (p - 1, &end, 10);
2792 p = end;
2793
2794 operands_match[c][i]
2795 = operands_match_p (recog_data.operand[c],
2796 recog_data.operand[i]);
2797
2798 /* An operand may not match itself. */
2799 gcc_assert (c != i);
2800
2801 /* If C can be commuted with C+1, and C might need to match I,
2802 then C+1 might also need to match I. */
2803 if (commutative >= 0)
2804 {
2805 if (c == commutative || c == commutative + 1)
2806 {
2807 int other = c + (c == commutative ? 1 : -1);
2808 operands_match[other][i]
2809 = operands_match_p (recog_data.operand[other],
2810 recog_data.operand[i]);
2811 }
2812 if (i == commutative || i == commutative + 1)
2813 {
2814 int other = i + (i == commutative ? 1 : -1);
2815 operands_match[c][other]
2816 = operands_match_p (recog_data.operand[c],
2817 recog_data.operand[other]);
2818 }
2819 /* Note that C is supposed to be less than I.
2820 No need to consider altering both C and I because in
2821 that case we would alter one into the other. */
2822 }
2823 }
2824 }
2825 }
2826 }
2827
2828 /* Examine each operand that is a memory reference or memory address
2829 and reload parts of the addresses into index registers.
2830 Also here any references to pseudo regs that didn't get hard regs
2831 but are equivalent to constants get replaced in the insn itself
2832 with those constants. Nobody will ever see them again.
2833
2834 Finally, set up the preferred classes of each operand. */
2835
2836 for (i = 0; i < noperands; i++)
2837 {
2838 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2839
2840 address_reloaded[i] = 0;
2841 address_operand_reloaded[i] = 0;
2842 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2843 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2844 : RELOAD_OTHER);
2845 address_type[i]
2846 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2847 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2848 : RELOAD_OTHER);
2849
2850 if (*constraints[i] == 0)
2851 /* Ignore things like match_operator operands. */
2852 ;
2853 else if (constraints[i][0] == 'p'
2854 || (insn_extra_address_constraint
2855 (lookup_constraint (constraints[i]))))
2856 {
2857 address_operand_reloaded[i]
2858 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2859 recog_data.operand[i],
2860 recog_data.operand_loc[i],
2861 i, operand_type[i], ind_levels, insn);
2862
2863 /* If we now have a simple operand where we used to have a
2864 PLUS or MULT, re-recognize and try again. */
2865 if ((OBJECT_P (*recog_data.operand_loc[i])
2866 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2867 && (GET_CODE (recog_data.operand[i]) == MULT
2868 || GET_CODE (recog_data.operand[i]) == PLUS))
2869 {
2870 INSN_CODE (insn) = -1;
2871 retval = find_reloads (insn, replace, ind_levels, live_known,
2872 reload_reg_p);
2873 return retval;
2874 }
2875
2876 recog_data.operand[i] = *recog_data.operand_loc[i];
2877 substed_operand[i] = recog_data.operand[i];
2878
2879 /* Address operands are reloaded in their existing mode,
2880 no matter what is specified in the machine description. */
2881 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2882
2883 /* If the address is a single CONST_INT pick address mode
2884 instead otherwise we will later not know in which mode
2885 the reload should be performed. */
2886 if (operand_mode[i] == VOIDmode)
2887 operand_mode[i] = Pmode;
2888
2889 }
2890 else if (code == MEM)
2891 {
2892 address_reloaded[i]
2893 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2894 recog_data.operand_loc[i],
2895 XEXP (recog_data.operand[i], 0),
2896 &XEXP (recog_data.operand[i], 0),
2897 i, address_type[i], ind_levels, insn);
2898 recog_data.operand[i] = *recog_data.operand_loc[i];
2899 substed_operand[i] = recog_data.operand[i];
2900 }
2901 else if (code == SUBREG)
2902 {
2903 rtx reg = SUBREG_REG (recog_data.operand[i]);
2904 rtx op
2905 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2906 ind_levels,
2907 set != 0
2908 && &SET_DEST (set) == recog_data.operand_loc[i],
2909 insn,
2910 &address_reloaded[i]);
2911
2912 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2913 that didn't get a hard register, emit a USE with a REG_EQUAL
2914 note in front so that we might inherit a previous, possibly
2915 wider reload. */
2916
2917 if (replace
2918 && MEM_P (op)
2919 && REG_P (reg)
2920 && (GET_MODE_SIZE (GET_MODE (reg))
2921 >= GET_MODE_SIZE (GET_MODE (op)))
2922 && reg_equiv_constant (REGNO (reg)) == 0)
2923 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2924 insn),
2925 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2926
2927 substed_operand[i] = recog_data.operand[i] = op;
2928 }
2929 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2930 /* We can get a PLUS as an "operand" as a result of register
2931 elimination. See eliminate_regs and gen_reload. We handle
2932 a unary operator by reloading the operand. */
2933 substed_operand[i] = recog_data.operand[i]
2934 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2935 ind_levels, 0, insn,
2936 &address_reloaded[i]);
2937 else if (code == REG)
2938 {
2939 /* This is equivalent to calling find_reloads_toplev.
2940 The code is duplicated for speed.
2941 When we find a pseudo always equivalent to a constant,
2942 we replace it by the constant. We must be sure, however,
2943 that we don't try to replace it in the insn in which it
2944 is being set. */
2945 int regno = REGNO (recog_data.operand[i]);
2946 if (reg_equiv_constant (regno) != 0
2947 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2948 {
2949 /* Record the existing mode so that the check if constants are
2950 allowed will work when operand_mode isn't specified. */
2951
2952 if (operand_mode[i] == VOIDmode)
2953 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2954
2955 substed_operand[i] = recog_data.operand[i]
2956 = reg_equiv_constant (regno);
2957 }
2958 if (reg_equiv_memory_loc (regno) != 0
2959 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2960 /* We need not give a valid is_set_dest argument since the case
2961 of a constant equivalence was checked above. */
2962 substed_operand[i] = recog_data.operand[i]
2963 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2964 ind_levels, 0, insn,
2965 &address_reloaded[i]);
2966 }
2967 /* If the operand is still a register (we didn't replace it with an
2968 equivalent), get the preferred class to reload it into. */
2969 code = GET_CODE (recog_data.operand[i]);
2970 preferred_class[i]
2971 = ((code == REG && REGNO (recog_data.operand[i])
2972 >= FIRST_PSEUDO_REGISTER)
2973 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2974 : NO_REGS);
2975 pref_or_nothing[i]
2976 = (code == REG
2977 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2978 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2979 }
2980
2981 /* If this is simply a copy from operand 1 to operand 0, merge the
2982 preferred classes for the operands. */
2983 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2984 && recog_data.operand[1] == SET_SRC (set))
2985 {
2986 preferred_class[0] = preferred_class[1]
2987 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2988 pref_or_nothing[0] |= pref_or_nothing[1];
2989 pref_or_nothing[1] |= pref_or_nothing[0];
2990 }
2991
2992 /* Now see what we need for pseudo-regs that didn't get hard regs
2993 or got the wrong kind of hard reg. For this, we must consider
2994 all the operands together against the register constraints. */
2995
2996 best = MAX_RECOG_OPERANDS * 2 + 600;
2997
2998 goal_alternative_swapped = 0;
2999
3000 /* The constraints are made of several alternatives.
3001 Each operand's constraint looks like foo,bar,... with commas
3002 separating the alternatives. The first alternatives for all
3003 operands go together, the second alternatives go together, etc.
3004
3005 First loop over alternatives. */
3006
3007 for (this_alternative_number = 0;
3008 this_alternative_number < n_alternatives;
3009 this_alternative_number++)
3010 {
3011 int swapped;
3012
3013 if (!TEST_BIT (recog_data.enabled_alternatives, this_alternative_number))
3014 {
3015 int i;
3016
3017 for (i = 0; i < recog_data.n_operands; i++)
3018 constraints[i] = skip_alternative (constraints[i]);
3019
3020 continue;
3021 }
3022
3023 /* If insn is commutative (it's safe to exchange a certain pair
3024 of operands) then we need to try each alternative twice, the
3025 second time matching those two operands as if we had
3026 exchanged them. To do this, really exchange them in
3027 operands. */
3028 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3029 {
3030 /* Loop over operands for one constraint alternative. */
3031 /* LOSERS counts those that don't fit this alternative
3032 and would require loading. */
3033 int losers = 0;
3034 /* BAD is set to 1 if it some operand can't fit this alternative
3035 even after reloading. */
3036 int bad = 0;
3037 /* REJECT is a count of how undesirable this alternative says it is
3038 if any reloading is required. If the alternative matches exactly
3039 then REJECT is ignored, but otherwise it gets this much
3040 counted against it in addition to the reloading needed. Each
3041 ? counts three times here since we want the disparaging caused by
3042 a bad register class to only count 1/3 as much. */
3043 int reject = 0;
3044
3045 if (swapped)
3046 {
3047 enum reg_class tclass;
3048 int t;
3049
3050 recog_data.operand[commutative] = substed_operand[commutative + 1];
3051 recog_data.operand[commutative + 1] = substed_operand[commutative];
3052 /* Swap the duplicates too. */
3053 for (i = 0; i < recog_data.n_dups; i++)
3054 if (recog_data.dup_num[i] == commutative
3055 || recog_data.dup_num[i] == commutative + 1)
3056 *recog_data.dup_loc[i]
3057 = recog_data.operand[(int) recog_data.dup_num[i]];
3058
3059 tclass = preferred_class[commutative];
3060 preferred_class[commutative] = preferred_class[commutative + 1];
3061 preferred_class[commutative + 1] = tclass;
3062
3063 t = pref_or_nothing[commutative];
3064 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3065 pref_or_nothing[commutative + 1] = t;
3066
3067 t = address_reloaded[commutative];
3068 address_reloaded[commutative] = address_reloaded[commutative + 1];
3069 address_reloaded[commutative + 1] = t;
3070 }
3071
3072 this_earlyclobber = 0;
3073
3074 for (i = 0; i < noperands; i++)
3075 {
3076 const char *p = constraints[i];
3077 char *end;
3078 int len;
3079 int win = 0;
3080 int did_match = 0;
3081 /* 0 => this operand can be reloaded somehow for this alternative. */
3082 int badop = 1;
3083 /* 0 => this operand can be reloaded if the alternative allows regs. */
3084 int winreg = 0;
3085 int c;
3086 int m;
3087 rtx operand = recog_data.operand[i];
3088 int offset = 0;
3089 /* Nonzero means this is a MEM that must be reloaded into a reg
3090 regardless of what the constraint says. */
3091 int force_reload = 0;
3092 int offmemok = 0;
3093 /* Nonzero if a constant forced into memory would be OK for this
3094 operand. */
3095 int constmemok = 0;
3096 int earlyclobber = 0;
3097 enum constraint_num cn;
3098 enum reg_class cl;
3099
3100 /* If the predicate accepts a unary operator, it means that
3101 we need to reload the operand, but do not do this for
3102 match_operator and friends. */
3103 if (UNARY_P (operand) && *p != 0)
3104 operand = XEXP (operand, 0);
3105
3106 /* If the operand is a SUBREG, extract
3107 the REG or MEM (or maybe even a constant) within.
3108 (Constants can occur as a result of reg_equiv_constant.) */
3109
3110 while (GET_CODE (operand) == SUBREG)
3111 {
3112 /* Offset only matters when operand is a REG and
3113 it is a hard reg. This is because it is passed
3114 to reg_fits_class_p if it is a REG and all pseudos
3115 return 0 from that function. */
3116 if (REG_P (SUBREG_REG (operand))
3117 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3118 {
3119 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3120 GET_MODE (SUBREG_REG (operand)),
3121 SUBREG_BYTE (operand),
3122 GET_MODE (operand)) < 0)
3123 force_reload = 1;
3124 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3125 GET_MODE (SUBREG_REG (operand)),
3126 SUBREG_BYTE (operand),
3127 GET_MODE (operand));
3128 }
3129 operand = SUBREG_REG (operand);
3130 /* Force reload if this is a constant or PLUS or if there may
3131 be a problem accessing OPERAND in the outer mode. */
3132 if (CONSTANT_P (operand)
3133 || GET_CODE (operand) == PLUS
3134 /* We must force a reload of paradoxical SUBREGs
3135 of a MEM because the alignment of the inner value
3136 may not be enough to do the outer reference. On
3137 big-endian machines, it may also reference outside
3138 the object.
3139
3140 On machines that extend byte operations and we have a
3141 SUBREG where both the inner and outer modes are no wider
3142 than a word and the inner mode is narrower, is integral,
3143 and gets extended when loaded from memory, combine.c has
3144 made assumptions about the behavior of the machine in such
3145 register access. If the data is, in fact, in memory we
3146 must always load using the size assumed to be in the
3147 register and let the insn do the different-sized
3148 accesses.
3149
3150 This is doubly true if WORD_REGISTER_OPERATIONS. In
3151 this case eliminate_regs has left non-paradoxical
3152 subregs for push_reload to see. Make sure it does
3153 by forcing the reload.
3154
3155 ??? When is it right at this stage to have a subreg
3156 of a mem that is _not_ to be handled specially? IMO
3157 those should have been reduced to just a mem. */
3158 || ((MEM_P (operand)
3159 || (REG_P (operand)
3160 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3161 #ifndef WORD_REGISTER_OPERATIONS
3162 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3163 < BIGGEST_ALIGNMENT)
3164 && (GET_MODE_SIZE (operand_mode[i])
3165 > GET_MODE_SIZE (GET_MODE (operand))))
3166 || BYTES_BIG_ENDIAN
3167 #ifdef LOAD_EXTEND_OP
3168 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3169 && (GET_MODE_SIZE (GET_MODE (operand))
3170 <= UNITS_PER_WORD)
3171 && (GET_MODE_SIZE (operand_mode[i])
3172 > GET_MODE_SIZE (GET_MODE (operand)))
3173 && INTEGRAL_MODE_P (GET_MODE (operand))
3174 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3175 #endif
3176 )
3177 #endif
3178 )
3179 )
3180 force_reload = 1;
3181 }
3182
3183 this_alternative[i] = NO_REGS;
3184 this_alternative_win[i] = 0;
3185 this_alternative_match_win[i] = 0;
3186 this_alternative_offmemok[i] = 0;
3187 this_alternative_earlyclobber[i] = 0;
3188 this_alternative_matches[i] = -1;
3189
3190 /* An empty constraint or empty alternative
3191 allows anything which matched the pattern. */
3192 if (*p == 0 || *p == ',')
3193 win = 1, badop = 0;
3194
3195 /* Scan this alternative's specs for this operand;
3196 set WIN if the operand fits any letter in this alternative.
3197 Otherwise, clear BADOP if this operand could
3198 fit some letter after reloads,
3199 or set WINREG if this operand could fit after reloads
3200 provided the constraint allows some registers. */
3201
3202 do
3203 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3204 {
3205 case '\0':
3206 len = 0;
3207 break;
3208 case ',':
3209 c = '\0';
3210 break;
3211
3212 case '=': case '+': case '*':
3213 break;
3214
3215 case '%':
3216 /* We only support one commutative marker, the first
3217 one. We already set commutative above. */
3218 break;
3219
3220 case '?':
3221 reject += 6;
3222 break;
3223
3224 case '!':
3225 reject = 600;
3226 break;
3227
3228 case '#':
3229 /* Ignore rest of this alternative as far as
3230 reloading is concerned. */
3231 do
3232 p++;
3233 while (*p && *p != ',');
3234 len = 0;
3235 break;
3236
3237 case '0': case '1': case '2': case '3': case '4':
3238 case '5': case '6': case '7': case '8': case '9':
3239 m = strtoul (p, &end, 10);
3240 p = end;
3241 len = 0;
3242
3243 this_alternative_matches[i] = m;
3244 /* We are supposed to match a previous operand.
3245 If we do, we win if that one did.
3246 If we do not, count both of the operands as losers.
3247 (This is too conservative, since most of the time
3248 only a single reload insn will be needed to make
3249 the two operands win. As a result, this alternative
3250 may be rejected when it is actually desirable.) */
3251 if ((swapped && (m != commutative || i != commutative + 1))
3252 /* If we are matching as if two operands were swapped,
3253 also pretend that operands_match had been computed
3254 with swapped.
3255 But if I is the second of those and C is the first,
3256 don't exchange them, because operands_match is valid
3257 only on one side of its diagonal. */
3258 ? (operands_match
3259 [(m == commutative || m == commutative + 1)
3260 ? 2 * commutative + 1 - m : m]
3261 [(i == commutative || i == commutative + 1)
3262 ? 2 * commutative + 1 - i : i])
3263 : operands_match[m][i])
3264 {
3265 /* If we are matching a non-offsettable address where an
3266 offsettable address was expected, then we must reject
3267 this combination, because we can't reload it. */
3268 if (this_alternative_offmemok[m]
3269 && MEM_P (recog_data.operand[m])
3270 && this_alternative[m] == NO_REGS
3271 && ! this_alternative_win[m])
3272 bad = 1;
3273
3274 did_match = this_alternative_win[m];
3275 }
3276 else
3277 {
3278 /* Operands don't match. */
3279 rtx value;
3280 int loc1, loc2;
3281 /* Retroactively mark the operand we had to match
3282 as a loser, if it wasn't already. */
3283 if (this_alternative_win[m])
3284 losers++;
3285 this_alternative_win[m] = 0;
3286 if (this_alternative[m] == NO_REGS)
3287 bad = 1;
3288 /* But count the pair only once in the total badness of
3289 this alternative, if the pair can be a dummy reload.
3290 The pointers in operand_loc are not swapped; swap
3291 them by hand if necessary. */
3292 if (swapped && i == commutative)
3293 loc1 = commutative + 1;
3294 else if (swapped && i == commutative + 1)
3295 loc1 = commutative;
3296 else
3297 loc1 = i;
3298 if (swapped && m == commutative)
3299 loc2 = commutative + 1;
3300 else if (swapped && m == commutative + 1)
3301 loc2 = commutative;
3302 else
3303 loc2 = m;
3304 value
3305 = find_dummy_reload (recog_data.operand[i],
3306 recog_data.operand[m],
3307 recog_data.operand_loc[loc1],
3308 recog_data.operand_loc[loc2],
3309 operand_mode[i], operand_mode[m],
3310 this_alternative[m], -1,
3311 this_alternative_earlyclobber[m]);
3312
3313 if (value != 0)
3314 losers--;
3315 }
3316 /* This can be fixed with reloads if the operand
3317 we are supposed to match can be fixed with reloads. */
3318 badop = 0;
3319 this_alternative[i] = this_alternative[m];
3320
3321 /* If we have to reload this operand and some previous
3322 operand also had to match the same thing as this
3323 operand, we don't know how to do that. So reject this
3324 alternative. */
3325 if (! did_match || force_reload)
3326 for (j = 0; j < i; j++)
3327 if (this_alternative_matches[j]
3328 == this_alternative_matches[i])
3329 {
3330 badop = 1;
3331 break;
3332 }
3333 break;
3334
3335 case 'p':
3336 /* All necessary reloads for an address_operand
3337 were handled in find_reloads_address. */
3338 this_alternative[i]
3339 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3340 ADDRESS, SCRATCH);
3341 win = 1;
3342 badop = 0;
3343 break;
3344
3345 case TARGET_MEM_CONSTRAINT:
3346 if (force_reload)
3347 break;
3348 if (MEM_P (operand)
3349 || (REG_P (operand)
3350 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3351 && reg_renumber[REGNO (operand)] < 0))
3352 win = 1;
3353 if (CONST_POOL_OK_P (operand_mode[i], operand))
3354 badop = 0;
3355 constmemok = 1;
3356 break;
3357
3358 case '<':
3359 if (MEM_P (operand)
3360 && ! address_reloaded[i]
3361 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3362 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3363 win = 1;
3364 break;
3365
3366 case '>':
3367 if (MEM_P (operand)
3368 && ! address_reloaded[i]
3369 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3370 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3371 win = 1;
3372 break;
3373
3374 /* Memory operand whose address is not offsettable. */
3375 case 'V':
3376 if (force_reload)
3377 break;
3378 if (MEM_P (operand)
3379 && ! (ind_levels ? offsettable_memref_p (operand)
3380 : offsettable_nonstrict_memref_p (operand))
3381 /* Certain mem addresses will become offsettable
3382 after they themselves are reloaded. This is important;
3383 we don't want our own handling of unoffsettables
3384 to override the handling of reg_equiv_address. */
3385 && !(REG_P (XEXP (operand, 0))
3386 && (ind_levels == 0
3387 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3388 win = 1;
3389 break;
3390
3391 /* Memory operand whose address is offsettable. */
3392 case 'o':
3393 if (force_reload)
3394 break;
3395 if ((MEM_P (operand)
3396 /* If IND_LEVELS, find_reloads_address won't reload a
3397 pseudo that didn't get a hard reg, so we have to
3398 reject that case. */
3399 && ((ind_levels ? offsettable_memref_p (operand)
3400 : offsettable_nonstrict_memref_p (operand))
3401 /* A reloaded address is offsettable because it is now
3402 just a simple register indirect. */
3403 || address_reloaded[i] == 1))
3404 || (REG_P (operand)
3405 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3406 && reg_renumber[REGNO (operand)] < 0
3407 /* If reg_equiv_address is nonzero, we will be
3408 loading it into a register; hence it will be
3409 offsettable, but we cannot say that reg_equiv_mem
3410 is offsettable without checking. */
3411 && ((reg_equiv_mem (REGNO (operand)) != 0
3412 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3413 || (reg_equiv_address (REGNO (operand)) != 0))))
3414 win = 1;
3415 if (CONST_POOL_OK_P (operand_mode[i], operand)
3416 || MEM_P (operand))
3417 badop = 0;
3418 constmemok = 1;
3419 offmemok = 1;
3420 break;
3421
3422 case '&':
3423 /* Output operand that is stored before the need for the
3424 input operands (and their index registers) is over. */
3425 earlyclobber = 1, this_earlyclobber = 1;
3426 break;
3427
3428 case 'E':
3429 case 'F':
3430 if (CONST_DOUBLE_AS_FLOAT_P (operand)
3431 || (GET_CODE (operand) == CONST_VECTOR
3432 && (GET_MODE_CLASS (GET_MODE (operand))
3433 == MODE_VECTOR_FLOAT)))
3434 win = 1;
3435 break;
3436
3437 case 'G':
3438 case 'H':
3439 if (CONST_DOUBLE_AS_FLOAT_P (operand)
3440 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3441 win = 1;
3442 break;
3443
3444 case 's':
3445 if (CONST_SCALAR_INT_P (operand))
3446 break;
3447 case 'i':
3448 if (CONSTANT_P (operand)
3449 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3450 win = 1;
3451 break;
3452
3453 case 'n':
3454 if (CONST_SCALAR_INT_P (operand))
3455 win = 1;
3456 break;
3457
3458 case 'I':
3459 case 'J':
3460 case 'K':
3461 case 'L':
3462 case 'M':
3463 case 'N':
3464 case 'O':
3465 case 'P':
3466 if (CONST_INT_P (operand)
3467 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3468 win = 1;
3469 break;
3470
3471 case 'X':
3472 force_reload = 0;
3473 win = 1;
3474 break;
3475
3476 case 'g':
3477 if (! force_reload
3478 /* A PLUS is never a valid operand, but reload can make
3479 it from a register when eliminating registers. */
3480 && GET_CODE (operand) != PLUS
3481 /* A SCRATCH is not a valid operand. */
3482 && GET_CODE (operand) != SCRATCH
3483 && (! CONSTANT_P (operand)
3484 || ! flag_pic
3485 || LEGITIMATE_PIC_OPERAND_P (operand))
3486 && (GENERAL_REGS == ALL_REGS
3487 || !REG_P (operand)
3488 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3489 && reg_renumber[REGNO (operand)] < 0)))
3490 win = 1;
3491 /* Drop through into 'r' case. */
3492
3493 case 'r':
3494 cl = GENERAL_REGS;
3495 goto reg;
3496
3497 default:
3498 cn = lookup_constraint (p);
3499 switch (get_constraint_type (cn))
3500 {
3501 case CT_REGISTER:
3502 cl = reg_class_for_constraint (cn);
3503 if (cl != NO_REGS)
3504 goto reg;
3505 break;
3506
3507 case CT_CONST_INT:
3508 if (CONST_INT_P (operand)
3509 && (insn_const_int_ok_for_constraint
3510 (INTVAL (operand), cn)))
3511 win = true;
3512 break;
3513
3514 case CT_MEMORY:
3515 if (force_reload)
3516 break;
3517 if (constraint_satisfied_p (operand, cn))
3518 win = 1;
3519 /* If the address was already reloaded,
3520 we win as well. */
3521 else if (MEM_P (operand) && address_reloaded[i] == 1)
3522 win = 1;
3523 /* Likewise if the address will be reloaded because
3524 reg_equiv_address is nonzero. For reg_equiv_mem
3525 we have to check. */
3526 else if (REG_P (operand)
3527 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3528 && reg_renumber[REGNO (operand)] < 0
3529 && ((reg_equiv_mem (REGNO (operand)) != 0
3530 && (constraint_satisfied_p
3531 (reg_equiv_mem (REGNO (operand)),
3532 cn)))
3533 || (reg_equiv_address (REGNO (operand))
3534 != 0)))
3535 win = 1;
3536
3537 /* If we didn't already win, we can reload
3538 constants via force_const_mem, and other
3539 MEMs by reloading the address like for 'o'. */
3540 if (CONST_POOL_OK_P (operand_mode[i], operand)
3541 || MEM_P (operand))
3542 badop = 0;
3543 constmemok = 1;
3544 offmemok = 1;
3545 break;
3546
3547 case CT_ADDRESS:
3548 if (constraint_satisfied_p (operand, cn))
3549 win = 1;
3550
3551 /* If we didn't already win, we can reload
3552 the address into a base register. */
3553 this_alternative[i]
3554 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3555 ADDRESS, SCRATCH);
3556 badop = 0;
3557 break;
3558
3559 case CT_FIXED_FORM:
3560 if (constraint_satisfied_p (operand, cn))
3561 win = 1;
3562 break;
3563 }
3564 break;
3565
3566 reg:
3567 this_alternative[i]
3568 = reg_class_subunion[this_alternative[i]][cl];
3569 if (GET_MODE (operand) == BLKmode)
3570 break;
3571 winreg = 1;
3572 if (REG_P (operand)
3573 && reg_fits_class_p (operand, this_alternative[i],
3574 offset, GET_MODE (recog_data.operand[i])))
3575 win = 1;
3576 break;
3577 }
3578 while ((p += len), c);
3579
3580 if (swapped == (commutative >= 0 ? 1 : 0))
3581 constraints[i] = p;
3582
3583 /* If this operand could be handled with a reg,
3584 and some reg is allowed, then this operand can be handled. */
3585 if (winreg && this_alternative[i] != NO_REGS
3586 && (win || !class_only_fixed_regs[this_alternative[i]]))
3587 badop = 0;
3588
3589 /* Record which operands fit this alternative. */
3590 this_alternative_earlyclobber[i] = earlyclobber;
3591 if (win && ! force_reload)
3592 this_alternative_win[i] = 1;
3593 else if (did_match && ! force_reload)
3594 this_alternative_match_win[i] = 1;
3595 else
3596 {
3597 int const_to_mem = 0;
3598
3599 this_alternative_offmemok[i] = offmemok;
3600 losers++;
3601 if (badop)
3602 bad = 1;
3603 /* Alternative loses if it has no regs for a reg operand. */
3604 if (REG_P (operand)
3605 && this_alternative[i] == NO_REGS
3606 && this_alternative_matches[i] < 0)
3607 bad = 1;
3608
3609 /* If this is a constant that is reloaded into the desired
3610 class by copying it to memory first, count that as another
3611 reload. This is consistent with other code and is
3612 required to avoid choosing another alternative when
3613 the constant is moved into memory by this function on
3614 an early reload pass. Note that the test here is
3615 precisely the same as in the code below that calls
3616 force_const_mem. */
3617 if (CONST_POOL_OK_P (operand_mode[i], operand)
3618 && ((targetm.preferred_reload_class (operand,
3619 this_alternative[i])
3620 == NO_REGS)
3621 || no_input_reloads))
3622 {
3623 const_to_mem = 1;
3624 if (this_alternative[i] != NO_REGS)
3625 losers++;
3626 }
3627
3628 /* Alternative loses if it requires a type of reload not
3629 permitted for this insn. We can always reload SCRATCH
3630 and objects with a REG_UNUSED note. */
3631 if (GET_CODE (operand) != SCRATCH
3632 && modified[i] != RELOAD_READ && no_output_reloads
3633 && ! find_reg_note (insn, REG_UNUSED, operand))
3634 bad = 1;
3635 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3636 && ! const_to_mem)
3637 bad = 1;
3638
3639 /* If we can't reload this value at all, reject this
3640 alternative. Note that we could also lose due to
3641 LIMIT_RELOAD_CLASS, but we don't check that
3642 here. */
3643
3644 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3645 {
3646 if (targetm.preferred_reload_class (operand,
3647 this_alternative[i])
3648 == NO_REGS)
3649 reject = 600;
3650
3651 if (operand_type[i] == RELOAD_FOR_OUTPUT
3652 && (targetm.preferred_output_reload_class (operand,
3653 this_alternative[i])
3654 == NO_REGS))
3655 reject = 600;
3656 }
3657
3658 /* We prefer to reload pseudos over reloading other things,
3659 since such reloads may be able to be eliminated later.
3660 If we are reloading a SCRATCH, we won't be generating any
3661 insns, just using a register, so it is also preferred.
3662 So bump REJECT in other cases. Don't do this in the
3663 case where we are forcing a constant into memory and
3664 it will then win since we don't want to have a different
3665 alternative match then. */
3666 if (! (REG_P (operand)
3667 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3668 && GET_CODE (operand) != SCRATCH
3669 && ! (const_to_mem && constmemok))
3670 reject += 2;
3671
3672 /* Input reloads can be inherited more often than output
3673 reloads can be removed, so penalize output reloads. */
3674 if (operand_type[i] != RELOAD_FOR_INPUT
3675 && GET_CODE (operand) != SCRATCH)
3676 reject++;
3677 }
3678
3679 /* If this operand is a pseudo register that didn't get
3680 a hard reg and this alternative accepts some
3681 register, see if the class that we want is a subset
3682 of the preferred class for this register. If not,
3683 but it intersects that class, use the preferred class
3684 instead. If it does not intersect the preferred
3685 class, show that usage of this alternative should be
3686 discouraged; it will be discouraged more still if the
3687 register is `preferred or nothing'. We do this
3688 because it increases the chance of reusing our spill
3689 register in a later insn and avoiding a pair of
3690 memory stores and loads.
3691
3692 Don't bother with this if this alternative will
3693 accept this operand.
3694
3695 Don't do this for a multiword operand, since it is
3696 only a small win and has the risk of requiring more
3697 spill registers, which could cause a large loss.
3698
3699 Don't do this if the preferred class has only one
3700 register because we might otherwise exhaust the
3701 class. */
3702
3703 if (! win && ! did_match
3704 && this_alternative[i] != NO_REGS
3705 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3706 && reg_class_size [(int) preferred_class[i]] > 0
3707 && ! small_register_class_p (preferred_class[i]))
3708 {
3709 if (! reg_class_subset_p (this_alternative[i],
3710 preferred_class[i]))
3711 {
3712 /* Since we don't have a way of forming the intersection,
3713 we just do something special if the preferred class
3714 is a subset of the class we have; that's the most
3715 common case anyway. */
3716 if (reg_class_subset_p (preferred_class[i],
3717 this_alternative[i]))
3718 this_alternative[i] = preferred_class[i];
3719 else
3720 reject += (2 + 2 * pref_or_nothing[i]);
3721 }
3722 }
3723 }
3724
3725 /* Now see if any output operands that are marked "earlyclobber"
3726 in this alternative conflict with any input operands
3727 or any memory addresses. */
3728
3729 for (i = 0; i < noperands; i++)
3730 if (this_alternative_earlyclobber[i]
3731 && (this_alternative_win[i] || this_alternative_match_win[i]))
3732 {
3733 struct decomposition early_data;
3734
3735 early_data = decompose (recog_data.operand[i]);
3736
3737 gcc_assert (modified[i] != RELOAD_READ);
3738
3739 if (this_alternative[i] == NO_REGS)
3740 {
3741 this_alternative_earlyclobber[i] = 0;
3742 gcc_assert (this_insn_is_asm);
3743 error_for_asm (this_insn,
3744 "%<&%> constraint used with no register class");
3745 }
3746
3747 for (j = 0; j < noperands; j++)
3748 /* Is this an input operand or a memory ref? */
3749 if ((MEM_P (recog_data.operand[j])
3750 || modified[j] != RELOAD_WRITE)
3751 && j != i
3752 /* Ignore things like match_operator operands. */
3753 && !recog_data.is_operator[j]
3754 /* Don't count an input operand that is constrained to match
3755 the early clobber operand. */
3756 && ! (this_alternative_matches[j] == i
3757 && rtx_equal_p (recog_data.operand[i],
3758 recog_data.operand[j]))
3759 /* Is it altered by storing the earlyclobber operand? */
3760 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3761 early_data))
3762 {
3763 /* If the output is in a non-empty few-regs class,
3764 it's costly to reload it, so reload the input instead. */
3765 if (small_register_class_p (this_alternative[i])
3766 && (REG_P (recog_data.operand[j])
3767 || GET_CODE (recog_data.operand[j]) == SUBREG))
3768 {
3769 losers++;
3770 this_alternative_win[j] = 0;
3771 this_alternative_match_win[j] = 0;
3772 }
3773 else
3774 break;
3775 }
3776 /* If an earlyclobber operand conflicts with something,
3777 it must be reloaded, so request this and count the cost. */
3778 if (j != noperands)
3779 {
3780 losers++;
3781 this_alternative_win[i] = 0;
3782 this_alternative_match_win[j] = 0;
3783 for (j = 0; j < noperands; j++)
3784 if (this_alternative_matches[j] == i
3785 && this_alternative_match_win[j])
3786 {
3787 this_alternative_win[j] = 0;
3788 this_alternative_match_win[j] = 0;
3789 losers++;
3790 }
3791 }
3792 }
3793
3794 /* If one alternative accepts all the operands, no reload required,
3795 choose that alternative; don't consider the remaining ones. */
3796 if (losers == 0)
3797 {
3798 /* Unswap these so that they are never swapped at `finish'. */
3799 if (swapped)
3800 {
3801 recog_data.operand[commutative] = substed_operand[commutative];
3802 recog_data.operand[commutative + 1]
3803 = substed_operand[commutative + 1];
3804 }
3805 for (i = 0; i < noperands; i++)
3806 {
3807 goal_alternative_win[i] = this_alternative_win[i];
3808 goal_alternative_match_win[i] = this_alternative_match_win[i];
3809 goal_alternative[i] = this_alternative[i];
3810 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3811 goal_alternative_matches[i] = this_alternative_matches[i];
3812 goal_alternative_earlyclobber[i]
3813 = this_alternative_earlyclobber[i];
3814 }
3815 goal_alternative_number = this_alternative_number;
3816 goal_alternative_swapped = swapped;
3817 goal_earlyclobber = this_earlyclobber;
3818 goto finish;
3819 }
3820
3821 /* REJECT, set by the ! and ? constraint characters and when a register
3822 would be reloaded into a non-preferred class, discourages the use of
3823 this alternative for a reload goal. REJECT is incremented by six
3824 for each ? and two for each non-preferred class. */
3825 losers = losers * 6 + reject;
3826
3827 /* If this alternative can be made to work by reloading,
3828 and it needs less reloading than the others checked so far,
3829 record it as the chosen goal for reloading. */
3830 if (! bad)
3831 {
3832 if (best > losers)
3833 {
3834 for (i = 0; i < noperands; i++)
3835 {
3836 goal_alternative[i] = this_alternative[i];
3837 goal_alternative_win[i] = this_alternative_win[i];
3838 goal_alternative_match_win[i]
3839 = this_alternative_match_win[i];
3840 goal_alternative_offmemok[i]
3841 = this_alternative_offmemok[i];
3842 goal_alternative_matches[i] = this_alternative_matches[i];
3843 goal_alternative_earlyclobber[i]
3844 = this_alternative_earlyclobber[i];
3845 }
3846 goal_alternative_swapped = swapped;
3847 best = losers;
3848 goal_alternative_number = this_alternative_number;
3849 goal_earlyclobber = this_earlyclobber;
3850 }
3851 }
3852
3853 if (swapped)
3854 {
3855 enum reg_class tclass;
3856 int t;
3857
3858 /* If the commutative operands have been swapped, swap
3859 them back in order to check the next alternative. */
3860 recog_data.operand[commutative] = substed_operand[commutative];
3861 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3862 /* Unswap the duplicates too. */
3863 for (i = 0; i < recog_data.n_dups; i++)
3864 if (recog_data.dup_num[i] == commutative
3865 || recog_data.dup_num[i] == commutative + 1)
3866 *recog_data.dup_loc[i]
3867 = recog_data.operand[(int) recog_data.dup_num[i]];
3868
3869 /* Unswap the operand related information as well. */
3870 tclass = preferred_class[commutative];
3871 preferred_class[commutative] = preferred_class[commutative + 1];
3872 preferred_class[commutative + 1] = tclass;
3873
3874 t = pref_or_nothing[commutative];
3875 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3876 pref_or_nothing[commutative + 1] = t;
3877
3878 t = address_reloaded[commutative];
3879 address_reloaded[commutative] = address_reloaded[commutative + 1];
3880 address_reloaded[commutative + 1] = t;
3881 }
3882 }
3883 }
3884
3885 /* The operands don't meet the constraints.
3886 goal_alternative describes the alternative
3887 that we could reach by reloading the fewest operands.
3888 Reload so as to fit it. */
3889
3890 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3891 {
3892 /* No alternative works with reloads?? */
3893 if (insn_code_number >= 0)
3894 fatal_insn ("unable to generate reloads for:", insn);
3895 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3896 /* Avoid further trouble with this insn. */
3897 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3898 n_reloads = 0;
3899 return 0;
3900 }
3901
3902 /* Jump to `finish' from above if all operands are valid already.
3903 In that case, goal_alternative_win is all 1. */
3904 finish:
3905
3906 /* Right now, for any pair of operands I and J that are required to match,
3907 with I < J,
3908 goal_alternative_matches[J] is I.
3909 Set up goal_alternative_matched as the inverse function:
3910 goal_alternative_matched[I] = J. */
3911
3912 for (i = 0; i < noperands; i++)
3913 goal_alternative_matched[i] = -1;
3914
3915 for (i = 0; i < noperands; i++)
3916 if (! goal_alternative_win[i]
3917 && goal_alternative_matches[i] >= 0)
3918 goal_alternative_matched[goal_alternative_matches[i]] = i;
3919
3920 for (i = 0; i < noperands; i++)
3921 goal_alternative_win[i] |= goal_alternative_match_win[i];
3922
3923 /* If the best alternative is with operands 1 and 2 swapped,
3924 consider them swapped before reporting the reloads. Update the
3925 operand numbers of any reloads already pushed. */
3926
3927 if (goal_alternative_swapped)
3928 {
3929 rtx tem;
3930
3931 tem = substed_operand[commutative];
3932 substed_operand[commutative] = substed_operand[commutative + 1];
3933 substed_operand[commutative + 1] = tem;
3934 tem = recog_data.operand[commutative];
3935 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3936 recog_data.operand[commutative + 1] = tem;
3937 tem = *recog_data.operand_loc[commutative];
3938 *recog_data.operand_loc[commutative]
3939 = *recog_data.operand_loc[commutative + 1];
3940 *recog_data.operand_loc[commutative + 1] = tem;
3941
3942 for (i = 0; i < n_reloads; i++)
3943 {
3944 if (rld[i].opnum == commutative)
3945 rld[i].opnum = commutative + 1;
3946 else if (rld[i].opnum == commutative + 1)
3947 rld[i].opnum = commutative;
3948 }
3949 }
3950
3951 for (i = 0; i < noperands; i++)
3952 {
3953 operand_reloadnum[i] = -1;
3954
3955 /* If this is an earlyclobber operand, we need to widen the scope.
3956 The reload must remain valid from the start of the insn being
3957 reloaded until after the operand is stored into its destination.
3958 We approximate this with RELOAD_OTHER even though we know that we
3959 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3960
3961 One special case that is worth checking is when we have an
3962 output that is earlyclobber but isn't used past the insn (typically
3963 a SCRATCH). In this case, we only need have the reload live
3964 through the insn itself, but not for any of our input or output
3965 reloads.
3966 But we must not accidentally narrow the scope of an existing
3967 RELOAD_OTHER reload - leave these alone.
3968
3969 In any case, anything needed to address this operand can remain
3970 however they were previously categorized. */
3971
3972 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3973 operand_type[i]
3974 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3975 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3976 }
3977
3978 /* Any constants that aren't allowed and can't be reloaded
3979 into registers are here changed into memory references. */
3980 for (i = 0; i < noperands; i++)
3981 if (! goal_alternative_win[i])
3982 {
3983 rtx op = recog_data.operand[i];
3984 rtx subreg = NULL_RTX;
3985 rtx plus = NULL_RTX;
3986 enum machine_mode mode = operand_mode[i];
3987
3988 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3989 push_reload so we have to let them pass here. */
3990 if (GET_CODE (op) == SUBREG)
3991 {
3992 subreg = op;
3993 op = SUBREG_REG (op);
3994 mode = GET_MODE (op);
3995 }
3996
3997 if (GET_CODE (op) == PLUS)
3998 {
3999 plus = op;
4000 op = XEXP (op, 1);
4001 }
4002
4003 if (CONST_POOL_OK_P (mode, op)
4004 && ((targetm.preferred_reload_class (op, goal_alternative[i])
4005 == NO_REGS)
4006 || no_input_reloads))
4007 {
4008 int this_address_reloaded;
4009 rtx tem = force_const_mem (mode, op);
4010
4011 /* If we stripped a SUBREG or a PLUS above add it back. */
4012 if (plus != NULL_RTX)
4013 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
4014
4015 if (subreg != NULL_RTX)
4016 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
4017
4018 this_address_reloaded = 0;
4019 substed_operand[i] = recog_data.operand[i]
4020 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
4021 0, insn, &this_address_reloaded);
4022
4023 /* If the alternative accepts constant pool refs directly
4024 there will be no reload needed at all. */
4025 if (plus == NULL_RTX
4026 && subreg == NULL_RTX
4027 && alternative_allows_const_pool_ref (this_address_reloaded == 0
4028 ? substed_operand[i]
4029 : NULL,
4030 recog_data.constraints[i],
4031 goal_alternative_number))
4032 goal_alternative_win[i] = 1;
4033 }
4034 }
4035
4036 /* Record the values of the earlyclobber operands for the caller. */
4037 if (goal_earlyclobber)
4038 for (i = 0; i < noperands; i++)
4039 if (goal_alternative_earlyclobber[i])
4040 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
4041
4042 /* Now record reloads for all the operands that need them. */
4043 for (i = 0; i < noperands; i++)
4044 if (! goal_alternative_win[i])
4045 {
4046 /* Operands that match previous ones have already been handled. */
4047 if (goal_alternative_matches[i] >= 0)
4048 ;
4049 /* Handle an operand with a nonoffsettable address
4050 appearing where an offsettable address will do
4051 by reloading the address into a base register.
4052
4053 ??? We can also do this when the operand is a register and
4054 reg_equiv_mem is not offsettable, but this is a bit tricky,
4055 so we don't bother with it. It may not be worth doing. */
4056 else if (goal_alternative_matched[i] == -1
4057 && goal_alternative_offmemok[i]
4058 && MEM_P (recog_data.operand[i]))
4059 {
4060 /* If the address to be reloaded is a VOIDmode constant,
4061 use the default address mode as mode of the reload register,
4062 as would have been done by find_reloads_address. */
4063 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4064 enum machine_mode address_mode;
4065
4066 address_mode = get_address_mode (recog_data.operand[i]);
4067 operand_reloadnum[i]
4068 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4069 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4070 base_reg_class (VOIDmode, as, MEM, SCRATCH),
4071 address_mode,
4072 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4073 rld[operand_reloadnum[i]].inc
4074 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4075
4076 /* If this operand is an output, we will have made any
4077 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4078 now we are treating part of the operand as an input, so
4079 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4080
4081 if (modified[i] == RELOAD_WRITE)
4082 {
4083 for (j = 0; j < n_reloads; j++)
4084 {
4085 if (rld[j].opnum == i)
4086 {
4087 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4088 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4089 else if (rld[j].when_needed
4090 == RELOAD_FOR_OUTADDR_ADDRESS)
4091 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4092 }
4093 }
4094 }
4095 }
4096 else if (goal_alternative_matched[i] == -1)
4097 {
4098 operand_reloadnum[i]
4099 = push_reload ((modified[i] != RELOAD_WRITE
4100 ? recog_data.operand[i] : 0),
4101 (modified[i] != RELOAD_READ
4102 ? recog_data.operand[i] : 0),
4103 (modified[i] != RELOAD_WRITE
4104 ? recog_data.operand_loc[i] : 0),
4105 (modified[i] != RELOAD_READ
4106 ? recog_data.operand_loc[i] : 0),
4107 (enum reg_class) goal_alternative[i],
4108 (modified[i] == RELOAD_WRITE
4109 ? VOIDmode : operand_mode[i]),
4110 (modified[i] == RELOAD_READ
4111 ? VOIDmode : operand_mode[i]),
4112 (insn_code_number < 0 ? 0
4113 : insn_data[insn_code_number].operand[i].strict_low),
4114 0, i, operand_type[i]);
4115 }
4116 /* In a matching pair of operands, one must be input only
4117 and the other must be output only.
4118 Pass the input operand as IN and the other as OUT. */
4119 else if (modified[i] == RELOAD_READ
4120 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4121 {
4122 operand_reloadnum[i]
4123 = push_reload (recog_data.operand[i],
4124 recog_data.operand[goal_alternative_matched[i]],
4125 recog_data.operand_loc[i],
4126 recog_data.operand_loc[goal_alternative_matched[i]],
4127 (enum reg_class) goal_alternative[i],
4128 operand_mode[i],
4129 operand_mode[goal_alternative_matched[i]],
4130 0, 0, i, RELOAD_OTHER);
4131 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4132 }
4133 else if (modified[i] == RELOAD_WRITE
4134 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4135 {
4136 operand_reloadnum[goal_alternative_matched[i]]
4137 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4138 recog_data.operand[i],
4139 recog_data.operand_loc[goal_alternative_matched[i]],
4140 recog_data.operand_loc[i],
4141 (enum reg_class) goal_alternative[i],
4142 operand_mode[goal_alternative_matched[i]],
4143 operand_mode[i],
4144 0, 0, i, RELOAD_OTHER);
4145 operand_reloadnum[i] = output_reloadnum;
4146 }
4147 else
4148 {
4149 gcc_assert (insn_code_number < 0);
4150 error_for_asm (insn, "inconsistent operand constraints "
4151 "in an %<asm%>");
4152 /* Avoid further trouble with this insn. */
4153 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4154 n_reloads = 0;
4155 return 0;
4156 }
4157 }
4158 else if (goal_alternative_matched[i] < 0
4159 && goal_alternative_matches[i] < 0
4160 && address_operand_reloaded[i] != 1
4161 && optimize)
4162 {
4163 /* For each non-matching operand that's a MEM or a pseudo-register
4164 that didn't get a hard register, make an optional reload.
4165 This may get done even if the insn needs no reloads otherwise. */
4166
4167 rtx operand = recog_data.operand[i];
4168
4169 while (GET_CODE (operand) == SUBREG)
4170 operand = SUBREG_REG (operand);
4171 if ((MEM_P (operand)
4172 || (REG_P (operand)
4173 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4174 /* If this is only for an output, the optional reload would not
4175 actually cause us to use a register now, just note that
4176 something is stored here. */
4177 && (goal_alternative[i] != NO_REGS
4178 || modified[i] == RELOAD_WRITE)
4179 && ! no_input_reloads
4180 /* An optional output reload might allow to delete INSN later.
4181 We mustn't make in-out reloads on insns that are not permitted
4182 output reloads.
4183 If this is an asm, we can't delete it; we must not even call
4184 push_reload for an optional output reload in this case,
4185 because we can't be sure that the constraint allows a register,
4186 and push_reload verifies the constraints for asms. */
4187 && (modified[i] == RELOAD_READ
4188 || (! no_output_reloads && ! this_insn_is_asm)))
4189 operand_reloadnum[i]
4190 = push_reload ((modified[i] != RELOAD_WRITE
4191 ? recog_data.operand[i] : 0),
4192 (modified[i] != RELOAD_READ
4193 ? recog_data.operand[i] : 0),
4194 (modified[i] != RELOAD_WRITE
4195 ? recog_data.operand_loc[i] : 0),
4196 (modified[i] != RELOAD_READ
4197 ? recog_data.operand_loc[i] : 0),
4198 (enum reg_class) goal_alternative[i],
4199 (modified[i] == RELOAD_WRITE
4200 ? VOIDmode : operand_mode[i]),
4201 (modified[i] == RELOAD_READ
4202 ? VOIDmode : operand_mode[i]),
4203 (insn_code_number < 0 ? 0
4204 : insn_data[insn_code_number].operand[i].strict_low),
4205 1, i, operand_type[i]);
4206 /* If a memory reference remains (either as a MEM or a pseudo that
4207 did not get a hard register), yet we can't make an optional
4208 reload, check if this is actually a pseudo register reference;
4209 we then need to emit a USE and/or a CLOBBER so that reload
4210 inheritance will do the right thing. */
4211 else if (replace
4212 && (MEM_P (operand)
4213 || (REG_P (operand)
4214 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4215 && reg_renumber [REGNO (operand)] < 0)))
4216 {
4217 operand = *recog_data.operand_loc[i];
4218
4219 while (GET_CODE (operand) == SUBREG)
4220 operand = SUBREG_REG (operand);
4221 if (REG_P (operand))
4222 {
4223 if (modified[i] != RELOAD_WRITE)
4224 /* We mark the USE with QImode so that we recognize
4225 it as one that can be safely deleted at the end
4226 of reload. */
4227 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4228 insn), QImode);
4229 if (modified[i] != RELOAD_READ)
4230 emit_insn_after (gen_clobber (operand), insn);
4231 }
4232 }
4233 }
4234 else if (goal_alternative_matches[i] >= 0
4235 && goal_alternative_win[goal_alternative_matches[i]]
4236 && modified[i] == RELOAD_READ
4237 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4238 && ! no_input_reloads && ! no_output_reloads
4239 && optimize)
4240 {
4241 /* Similarly, make an optional reload for a pair of matching
4242 objects that are in MEM or a pseudo that didn't get a hard reg. */
4243
4244 rtx operand = recog_data.operand[i];
4245
4246 while (GET_CODE (operand) == SUBREG)
4247 operand = SUBREG_REG (operand);
4248 if ((MEM_P (operand)
4249 || (REG_P (operand)
4250 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4251 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4252 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4253 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4254 recog_data.operand[i],
4255 recog_data.operand_loc[goal_alternative_matches[i]],
4256 recog_data.operand_loc[i],
4257 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4258 operand_mode[goal_alternative_matches[i]],
4259 operand_mode[i],
4260 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4261 }
4262
4263 /* Perform whatever substitutions on the operands we are supposed
4264 to make due to commutativity or replacement of registers
4265 with equivalent constants or memory slots. */
4266
4267 for (i = 0; i < noperands; i++)
4268 {
4269 /* We only do this on the last pass through reload, because it is
4270 possible for some data (like reg_equiv_address) to be changed during
4271 later passes. Moreover, we lose the opportunity to get a useful
4272 reload_{in,out}_reg when we do these replacements. */
4273
4274 if (replace)
4275 {
4276 rtx substitution = substed_operand[i];
4277
4278 *recog_data.operand_loc[i] = substitution;
4279
4280 /* If we're replacing an operand with a LABEL_REF, we need to
4281 make sure that there's a REG_LABEL_OPERAND note attached to
4282 this instruction. */
4283 if (GET_CODE (substitution) == LABEL_REF
4284 && !find_reg_note (insn, REG_LABEL_OPERAND,
4285 XEXP (substitution, 0))
4286 /* For a JUMP_P, if it was a branch target it must have
4287 already been recorded as such. */
4288 && (!JUMP_P (insn)
4289 || !label_is_jump_target_p (XEXP (substitution, 0),
4290 insn)))
4291 {
4292 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4293 if (LABEL_P (XEXP (substitution, 0)))
4294 ++LABEL_NUSES (XEXP (substitution, 0));
4295 }
4296
4297 }
4298 else
4299 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4300 }
4301
4302 /* If this insn pattern contains any MATCH_DUP's, make sure that
4303 they will be substituted if the operands they match are substituted.
4304 Also do now any substitutions we already did on the operands.
4305
4306 Don't do this if we aren't making replacements because we might be
4307 propagating things allocated by frame pointer elimination into places
4308 it doesn't expect. */
4309
4310 if (insn_code_number >= 0 && replace)
4311 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4312 {
4313 int opno = recog_data.dup_num[i];
4314 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4315 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4316 }
4317
4318 #if 0
4319 /* This loses because reloading of prior insns can invalidate the equivalence
4320 (or at least find_equiv_reg isn't smart enough to find it any more),
4321 causing this insn to need more reload regs than it needed before.
4322 It may be too late to make the reload regs available.
4323 Now this optimization is done safely in choose_reload_regs. */
4324
4325 /* For each reload of a reg into some other class of reg,
4326 search for an existing equivalent reg (same value now) in the right class.
4327 We can use it as long as we don't need to change its contents. */
4328 for (i = 0; i < n_reloads; i++)
4329 if (rld[i].reg_rtx == 0
4330 && rld[i].in != 0
4331 && REG_P (rld[i].in)
4332 && rld[i].out == 0)
4333 {
4334 rld[i].reg_rtx
4335 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4336 static_reload_reg_p, 0, rld[i].inmode);
4337 /* Prevent generation of insn to load the value
4338 because the one we found already has the value. */
4339 if (rld[i].reg_rtx)
4340 rld[i].in = rld[i].reg_rtx;
4341 }
4342 #endif
4343
4344 /* If we detected error and replaced asm instruction by USE, forget about the
4345 reloads. */
4346 if (GET_CODE (PATTERN (insn)) == USE
4347 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4348 n_reloads = 0;
4349
4350 /* Perhaps an output reload can be combined with another
4351 to reduce needs by one. */
4352 if (!goal_earlyclobber)
4353 combine_reloads ();
4354
4355 /* If we have a pair of reloads for parts of an address, they are reloading
4356 the same object, the operands themselves were not reloaded, and they
4357 are for two operands that are supposed to match, merge the reloads and
4358 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4359
4360 for (i = 0; i < n_reloads; i++)
4361 {
4362 int k;
4363
4364 for (j = i + 1; j < n_reloads; j++)
4365 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4366 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4367 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4368 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4369 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4370 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4371 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4372 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4373 && rtx_equal_p (rld[i].in, rld[j].in)
4374 && (operand_reloadnum[rld[i].opnum] < 0
4375 || rld[operand_reloadnum[rld[i].opnum]].optional)
4376 && (operand_reloadnum[rld[j].opnum] < 0
4377 || rld[operand_reloadnum[rld[j].opnum]].optional)
4378 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4379 || (goal_alternative_matches[rld[j].opnum]
4380 == rld[i].opnum)))
4381 {
4382 for (k = 0; k < n_replacements; k++)
4383 if (replacements[k].what == j)
4384 replacements[k].what = i;
4385
4386 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4387 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4388 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4389 else
4390 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4391 rld[j].in = 0;
4392 }
4393 }
4394
4395 /* Scan all the reloads and update their type.
4396 If a reload is for the address of an operand and we didn't reload
4397 that operand, change the type. Similarly, change the operand number
4398 of a reload when two operands match. If a reload is optional, treat it
4399 as though the operand isn't reloaded.
4400
4401 ??? This latter case is somewhat odd because if we do the optional
4402 reload, it means the object is hanging around. Thus we need only
4403 do the address reload if the optional reload was NOT done.
4404
4405 Change secondary reloads to be the address type of their operand, not
4406 the normal type.
4407
4408 If an operand's reload is now RELOAD_OTHER, change any
4409 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4410 RELOAD_FOR_OTHER_ADDRESS. */
4411
4412 for (i = 0; i < n_reloads; i++)
4413 {
4414 if (rld[i].secondary_p
4415 && rld[i].when_needed == operand_type[rld[i].opnum])
4416 rld[i].when_needed = address_type[rld[i].opnum];
4417
4418 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4419 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4420 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4421 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4422 && (operand_reloadnum[rld[i].opnum] < 0
4423 || rld[operand_reloadnum[rld[i].opnum]].optional))
4424 {
4425 /* If we have a secondary reload to go along with this reload,
4426 change its type to RELOAD_FOR_OPADDR_ADDR. */
4427
4428 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4429 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4430 && rld[i].secondary_in_reload != -1)
4431 {
4432 int secondary_in_reload = rld[i].secondary_in_reload;
4433
4434 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4435
4436 /* If there's a tertiary reload we have to change it also. */
4437 if (secondary_in_reload > 0
4438 && rld[secondary_in_reload].secondary_in_reload != -1)
4439 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4440 = RELOAD_FOR_OPADDR_ADDR;
4441 }
4442
4443 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4444 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4445 && rld[i].secondary_out_reload != -1)
4446 {
4447 int secondary_out_reload = rld[i].secondary_out_reload;
4448
4449 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4450
4451 /* If there's a tertiary reload we have to change it also. */
4452 if (secondary_out_reload
4453 && rld[secondary_out_reload].secondary_out_reload != -1)
4454 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4455 = RELOAD_FOR_OPADDR_ADDR;
4456 }
4457
4458 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4459 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4460 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4461 else
4462 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4463 }
4464
4465 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4466 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4467 && operand_reloadnum[rld[i].opnum] >= 0
4468 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4469 == RELOAD_OTHER))
4470 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4471
4472 if (goal_alternative_matches[rld[i].opnum] >= 0)
4473 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4474 }
4475
4476 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4477 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4478 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4479
4480 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4481 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4482 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4483 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4484 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4485 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4486 This is complicated by the fact that a single operand can have more
4487 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4488 choose_reload_regs without affecting code quality, and cases that
4489 actually fail are extremely rare, so it turns out to be better to fix
4490 the problem here by not generating cases that choose_reload_regs will
4491 fail for. */
4492 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4493 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4494 a single operand.
4495 We can reduce the register pressure by exploiting that a
4496 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4497 does not conflict with any of them, if it is only used for the first of
4498 the RELOAD_FOR_X_ADDRESS reloads. */
4499 {
4500 int first_op_addr_num = -2;
4501 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4502 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4503 int need_change = 0;
4504 /* We use last_op_addr_reload and the contents of the above arrays
4505 first as flags - -2 means no instance encountered, -1 means exactly
4506 one instance encountered.
4507 If more than one instance has been encountered, we store the reload
4508 number of the first reload of the kind in question; reload numbers
4509 are known to be non-negative. */
4510 for (i = 0; i < noperands; i++)
4511 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4512 for (i = n_reloads - 1; i >= 0; i--)
4513 {
4514 switch (rld[i].when_needed)
4515 {
4516 case RELOAD_FOR_OPERAND_ADDRESS:
4517 if (++first_op_addr_num >= 0)
4518 {
4519 first_op_addr_num = i;
4520 need_change = 1;
4521 }
4522 break;
4523 case RELOAD_FOR_INPUT_ADDRESS:
4524 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4525 {
4526 first_inpaddr_num[rld[i].opnum] = i;
4527 need_change = 1;
4528 }
4529 break;
4530 case RELOAD_FOR_OUTPUT_ADDRESS:
4531 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4532 {
4533 first_outpaddr_num[rld[i].opnum] = i;
4534 need_change = 1;
4535 }
4536 break;
4537 default:
4538 break;
4539 }
4540 }
4541
4542 if (need_change)
4543 {
4544 for (i = 0; i < n_reloads; i++)
4545 {
4546 int first_num;
4547 enum reload_type type;
4548
4549 switch (rld[i].when_needed)
4550 {
4551 case RELOAD_FOR_OPADDR_ADDR:
4552 first_num = first_op_addr_num;
4553 type = RELOAD_FOR_OPERAND_ADDRESS;
4554 break;
4555 case RELOAD_FOR_INPADDR_ADDRESS:
4556 first_num = first_inpaddr_num[rld[i].opnum];
4557 type = RELOAD_FOR_INPUT_ADDRESS;
4558 break;
4559 case RELOAD_FOR_OUTADDR_ADDRESS:
4560 first_num = first_outpaddr_num[rld[i].opnum];
4561 type = RELOAD_FOR_OUTPUT_ADDRESS;
4562 break;
4563 default:
4564 continue;
4565 }
4566 if (first_num < 0)
4567 continue;
4568 else if (i > first_num)
4569 rld[i].when_needed = type;
4570 else
4571 {
4572 /* Check if the only TYPE reload that uses reload I is
4573 reload FIRST_NUM. */
4574 for (j = n_reloads - 1; j > first_num; j--)
4575 {
4576 if (rld[j].when_needed == type
4577 && (rld[i].secondary_p
4578 ? rld[j].secondary_in_reload == i
4579 : reg_mentioned_p (rld[i].in, rld[j].in)))
4580 {
4581 rld[i].when_needed = type;
4582 break;
4583 }
4584 }
4585 }
4586 }
4587 }
4588 }
4589
4590 /* See if we have any reloads that are now allowed to be merged
4591 because we've changed when the reload is needed to
4592 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4593 check for the most common cases. */
4594
4595 for (i = 0; i < n_reloads; i++)
4596 if (rld[i].in != 0 && rld[i].out == 0
4597 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4598 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4599 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4600 for (j = 0; j < n_reloads; j++)
4601 if (i != j && rld[j].in != 0 && rld[j].out == 0
4602 && rld[j].when_needed == rld[i].when_needed
4603 && MATCHES (rld[i].in, rld[j].in)
4604 && rld[i].rclass == rld[j].rclass
4605 && !rld[i].nocombine && !rld[j].nocombine
4606 && rld[i].reg_rtx == rld[j].reg_rtx)
4607 {
4608 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4609 transfer_replacements (i, j);
4610 rld[j].in = 0;
4611 }
4612
4613 #ifdef HAVE_cc0
4614 /* If we made any reloads for addresses, see if they violate a
4615 "no input reloads" requirement for this insn. But loads that we
4616 do after the insn (such as for output addresses) are fine. */
4617 if (no_input_reloads)
4618 for (i = 0; i < n_reloads; i++)
4619 gcc_assert (rld[i].in == 0
4620 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4621 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4622 #endif
4623
4624 /* Compute reload_mode and reload_nregs. */
4625 for (i = 0; i < n_reloads; i++)
4626 {
4627 rld[i].mode
4628 = (rld[i].inmode == VOIDmode
4629 || (GET_MODE_SIZE (rld[i].outmode)
4630 > GET_MODE_SIZE (rld[i].inmode)))
4631 ? rld[i].outmode : rld[i].inmode;
4632
4633 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4634 }
4635
4636 /* Special case a simple move with an input reload and a
4637 destination of a hard reg, if the hard reg is ok, use it. */
4638 for (i = 0; i < n_reloads; i++)
4639 if (rld[i].when_needed == RELOAD_FOR_INPUT
4640 && GET_CODE (PATTERN (insn)) == SET
4641 && REG_P (SET_DEST (PATTERN (insn)))
4642 && (SET_SRC (PATTERN (insn)) == rld[i].in
4643 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4644 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4645 {
4646 rtx dest = SET_DEST (PATTERN (insn));
4647 unsigned int regno = REGNO (dest);
4648
4649 if (regno < FIRST_PSEUDO_REGISTER
4650 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4651 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4652 {
4653 int nr = hard_regno_nregs[regno][rld[i].mode];
4654 int ok = 1, nri;
4655
4656 for (nri = 1; nri < nr; nri ++)
4657 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4658 {
4659 ok = 0;
4660 break;
4661 }
4662
4663 if (ok)
4664 rld[i].reg_rtx = dest;
4665 }
4666 }
4667
4668 return retval;
4669 }
4670
4671 /* Return true if alternative number ALTNUM in constraint-string
4672 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4673 MEM gives the reference if it didn't need any reloads, otherwise it
4674 is null. */
4675
4676 static bool
4677 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4678 const char *constraint, int altnum)
4679 {
4680 int c;
4681
4682 /* Skip alternatives before the one requested. */
4683 while (altnum > 0)
4684 {
4685 while (*constraint++ != ',')
4686 ;
4687 altnum--;
4688 }
4689 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4690 If one of them is present, this alternative accepts the result of
4691 passing a constant-pool reference through find_reloads_toplev.
4692
4693 The same is true of extra memory constraints if the address
4694 was reloaded into a register. However, the target may elect
4695 to disallow the original constant address, forcing it to be
4696 reloaded into a register instead. */
4697 for (; (c = *constraint) && c != ',' && c != '#';
4698 constraint += CONSTRAINT_LEN (c, constraint))
4699 {
4700 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4701 return true;
4702 enum constraint_num cn = lookup_constraint (constraint);
4703 if (insn_extra_memory_constraint (cn)
4704 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4705 return true;
4706 }
4707 return false;
4708 }
4709 \f
4710 /* Scan X for memory references and scan the addresses for reloading.
4711 Also checks for references to "constant" regs that we want to eliminate
4712 and replaces them with the values they stand for.
4713 We may alter X destructively if it contains a reference to such.
4714 If X is just a constant reg, we return the equivalent value
4715 instead of X.
4716
4717 IND_LEVELS says how many levels of indirect addressing this machine
4718 supports.
4719
4720 OPNUM and TYPE identify the purpose of the reload.
4721
4722 IS_SET_DEST is true if X is the destination of a SET, which is not
4723 appropriate to be replaced by a constant.
4724
4725 INSN, if nonzero, is the insn in which we do the reload. It is used
4726 to determine if we may generate output reloads, and where to put USEs
4727 for pseudos that we have to replace with stack slots.
4728
4729 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4730 result of find_reloads_address. */
4731
4732 static rtx
4733 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4734 int ind_levels, int is_set_dest, rtx insn,
4735 int *address_reloaded)
4736 {
4737 RTX_CODE code = GET_CODE (x);
4738
4739 const char *fmt = GET_RTX_FORMAT (code);
4740 int i;
4741 int copied;
4742
4743 if (code == REG)
4744 {
4745 /* This code is duplicated for speed in find_reloads. */
4746 int regno = REGNO (x);
4747 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4748 x = reg_equiv_constant (regno);
4749 #if 0
4750 /* This creates (subreg (mem...)) which would cause an unnecessary
4751 reload of the mem. */
4752 else if (reg_equiv_mem (regno) != 0)
4753 x = reg_equiv_mem (regno);
4754 #endif
4755 else if (reg_equiv_memory_loc (regno)
4756 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4757 {
4758 rtx mem = make_memloc (x, regno);
4759 if (reg_equiv_address (regno)
4760 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4761 {
4762 /* If this is not a toplevel operand, find_reloads doesn't see
4763 this substitution. We have to emit a USE of the pseudo so
4764 that delete_output_reload can see it. */
4765 if (replace_reloads && recog_data.operand[opnum] != x)
4766 /* We mark the USE with QImode so that we recognize it
4767 as one that can be safely deleted at the end of
4768 reload. */
4769 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4770 QImode);
4771 x = mem;
4772 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4773 opnum, type, ind_levels, insn);
4774 if (!rtx_equal_p (x, mem))
4775 push_reg_equiv_alt_mem (regno, x);
4776 if (address_reloaded)
4777 *address_reloaded = i;
4778 }
4779 }
4780 return x;
4781 }
4782 if (code == MEM)
4783 {
4784 rtx tem = x;
4785
4786 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4787 opnum, type, ind_levels, insn);
4788 if (address_reloaded)
4789 *address_reloaded = i;
4790
4791 return tem;
4792 }
4793
4794 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4795 {
4796 /* Check for SUBREG containing a REG that's equivalent to a
4797 constant. If the constant has a known value, truncate it
4798 right now. Similarly if we are extracting a single-word of a
4799 multi-word constant. If the constant is symbolic, allow it
4800 to be substituted normally. push_reload will strip the
4801 subreg later. The constant must not be VOIDmode, because we
4802 will lose the mode of the register (this should never happen
4803 because one of the cases above should handle it). */
4804
4805 int regno = REGNO (SUBREG_REG (x));
4806 rtx tem;
4807
4808 if (regno >= FIRST_PSEUDO_REGISTER
4809 && reg_renumber[regno] < 0
4810 && reg_equiv_constant (regno) != 0)
4811 {
4812 tem =
4813 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4814 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4815 gcc_assert (tem);
4816 if (CONSTANT_P (tem)
4817 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4818 {
4819 tem = force_const_mem (GET_MODE (x), tem);
4820 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4821 &XEXP (tem, 0), opnum, type,
4822 ind_levels, insn);
4823 if (address_reloaded)
4824 *address_reloaded = i;
4825 }
4826 return tem;
4827 }
4828
4829 /* If the subreg contains a reg that will be converted to a mem,
4830 attempt to convert the whole subreg to a (narrower or wider)
4831 memory reference instead. If this succeeds, we're done --
4832 otherwise fall through to check whether the inner reg still
4833 needs address reloads anyway. */
4834
4835 if (regno >= FIRST_PSEUDO_REGISTER
4836 && reg_equiv_memory_loc (regno) != 0)
4837 {
4838 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4839 insn, address_reloaded);
4840 if (tem)
4841 return tem;
4842 }
4843 }
4844
4845 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4846 {
4847 if (fmt[i] == 'e')
4848 {
4849 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4850 ind_levels, is_set_dest, insn,
4851 address_reloaded);
4852 /* If we have replaced a reg with it's equivalent memory loc -
4853 that can still be handled here e.g. if it's in a paradoxical
4854 subreg - we must make the change in a copy, rather than using
4855 a destructive change. This way, find_reloads can still elect
4856 not to do the change. */
4857 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4858 {
4859 x = shallow_copy_rtx (x);
4860 copied = 1;
4861 }
4862 XEXP (x, i) = new_part;
4863 }
4864 }
4865 return x;
4866 }
4867
4868 /* Return a mem ref for the memory equivalent of reg REGNO.
4869 This mem ref is not shared with anything. */
4870
4871 static rtx
4872 make_memloc (rtx ad, int regno)
4873 {
4874 /* We must rerun eliminate_regs, in case the elimination
4875 offsets have changed. */
4876 rtx tem
4877 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4878 0);
4879
4880 /* If TEM might contain a pseudo, we must copy it to avoid
4881 modifying it when we do the substitution for the reload. */
4882 if (rtx_varies_p (tem, 0))
4883 tem = copy_rtx (tem);
4884
4885 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4886 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4887
4888 /* Copy the result if it's still the same as the equivalence, to avoid
4889 modifying it when we do the substitution for the reload. */
4890 if (tem == reg_equiv_memory_loc (regno))
4891 tem = copy_rtx (tem);
4892 return tem;
4893 }
4894
4895 /* Returns true if AD could be turned into a valid memory reference
4896 to mode MODE in address space AS by reloading the part pointed to
4897 by PART into a register. */
4898
4899 static int
4900 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4901 addr_space_t as, rtx *part)
4902 {
4903 int retv;
4904 rtx tem = *part;
4905 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4906
4907 *part = reg;
4908 retv = memory_address_addr_space_p (mode, ad, as);
4909 *part = tem;
4910
4911 return retv;
4912 }
4913
4914 /* Record all reloads needed for handling memory address AD
4915 which appears in *LOC in a memory reference to mode MODE
4916 which itself is found in location *MEMREFLOC.
4917 Note that we take shortcuts assuming that no multi-reg machine mode
4918 occurs as part of an address.
4919
4920 OPNUM and TYPE specify the purpose of this reload.
4921
4922 IND_LEVELS says how many levels of indirect addressing this machine
4923 supports.
4924
4925 INSN, if nonzero, is the insn in which we do the reload. It is used
4926 to determine if we may generate output reloads, and where to put USEs
4927 for pseudos that we have to replace with stack slots.
4928
4929 Value is one if this address is reloaded or replaced as a whole; it is
4930 zero if the top level of this address was not reloaded or replaced, and
4931 it is -1 if it may or may not have been reloaded or replaced.
4932
4933 Note that there is no verification that the address will be valid after
4934 this routine does its work. Instead, we rely on the fact that the address
4935 was valid when reload started. So we need only undo things that reload
4936 could have broken. These are wrong register types, pseudos not allocated
4937 to a hard register, and frame pointer elimination. */
4938
4939 static int
4940 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4941 rtx *loc, int opnum, enum reload_type type,
4942 int ind_levels, rtx insn)
4943 {
4944 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4945 : ADDR_SPACE_GENERIC;
4946 int regno;
4947 int removed_and = 0;
4948 int op_index;
4949 rtx tem;
4950
4951 /* If the address is a register, see if it is a legitimate address and
4952 reload if not. We first handle the cases where we need not reload
4953 or where we must reload in a non-standard way. */
4954
4955 if (REG_P (ad))
4956 {
4957 regno = REGNO (ad);
4958
4959 if (reg_equiv_constant (regno) != 0)
4960 {
4961 find_reloads_address_part (reg_equiv_constant (regno), loc,
4962 base_reg_class (mode, as, MEM, SCRATCH),
4963 GET_MODE (ad), opnum, type, ind_levels);
4964 return 1;
4965 }
4966
4967 tem = reg_equiv_memory_loc (regno);
4968 if (tem != 0)
4969 {
4970 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4971 {
4972 tem = make_memloc (ad, regno);
4973 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4974 XEXP (tem, 0),
4975 MEM_ADDR_SPACE (tem)))
4976 {
4977 rtx orig = tem;
4978
4979 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4980 &XEXP (tem, 0), opnum,
4981 ADDR_TYPE (type), ind_levels, insn);
4982 if (!rtx_equal_p (tem, orig))
4983 push_reg_equiv_alt_mem (regno, tem);
4984 }
4985 /* We can avoid a reload if the register's equivalent memory
4986 expression is valid as an indirect memory address.
4987 But not all addresses are valid in a mem used as an indirect
4988 address: only reg or reg+constant. */
4989
4990 if (ind_levels > 0
4991 && strict_memory_address_addr_space_p (mode, tem, as)
4992 && (REG_P (XEXP (tem, 0))
4993 || (GET_CODE (XEXP (tem, 0)) == PLUS
4994 && REG_P (XEXP (XEXP (tem, 0), 0))
4995 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4996 {
4997 /* TEM is not the same as what we'll be replacing the
4998 pseudo with after reload, put a USE in front of INSN
4999 in the final reload pass. */
5000 if (replace_reloads
5001 && num_not_at_initial_offset
5002 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5003 {
5004 *loc = tem;
5005 /* We mark the USE with QImode so that we
5006 recognize it as one that can be safely
5007 deleted at the end of reload. */
5008 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
5009 insn), QImode);
5010
5011 /* This doesn't really count as replacing the address
5012 as a whole, since it is still a memory access. */
5013 }
5014 return 0;
5015 }
5016 ad = tem;
5017 }
5018 }
5019
5020 /* The only remaining case where we can avoid a reload is if this is a
5021 hard register that is valid as a base register and which is not the
5022 subject of a CLOBBER in this insn. */
5023
5024 else if (regno < FIRST_PSEUDO_REGISTER
5025 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
5026 && ! regno_clobbered_p (regno, this_insn, mode, 0))
5027 return 0;
5028
5029 /* If we do not have one of the cases above, we must do the reload. */
5030 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
5031 base_reg_class (mode, as, MEM, SCRATCH),
5032 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
5033 return 1;
5034 }
5035
5036 if (strict_memory_address_addr_space_p (mode, ad, as))
5037 {
5038 /* The address appears valid, so reloads are not needed.
5039 But the address may contain an eliminable register.
5040 This can happen because a machine with indirect addressing
5041 may consider a pseudo register by itself a valid address even when
5042 it has failed to get a hard reg.
5043 So do a tree-walk to find and eliminate all such regs. */
5044
5045 /* But first quickly dispose of a common case. */
5046 if (GET_CODE (ad) == PLUS
5047 && CONST_INT_P (XEXP (ad, 1))
5048 && REG_P (XEXP (ad, 0))
5049 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
5050 return 0;
5051
5052 subst_reg_equivs_changed = 0;
5053 *loc = subst_reg_equivs (ad, insn);
5054
5055 if (! subst_reg_equivs_changed)
5056 return 0;
5057
5058 /* Check result for validity after substitution. */
5059 if (strict_memory_address_addr_space_p (mode, ad, as))
5060 return 0;
5061 }
5062
5063 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5064 do
5065 {
5066 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5067 {
5068 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5069 ind_levels, win);
5070 }
5071 break;
5072 win:
5073 *memrefloc = copy_rtx (*memrefloc);
5074 XEXP (*memrefloc, 0) = ad;
5075 move_replacements (&ad, &XEXP (*memrefloc, 0));
5076 return -1;
5077 }
5078 while (0);
5079 #endif
5080
5081 /* The address is not valid. We have to figure out why. First see if
5082 we have an outer AND and remove it if so. Then analyze what's inside. */
5083
5084 if (GET_CODE (ad) == AND)
5085 {
5086 removed_and = 1;
5087 loc = &XEXP (ad, 0);
5088 ad = *loc;
5089 }
5090
5091 /* One possibility for why the address is invalid is that it is itself
5092 a MEM. This can happen when the frame pointer is being eliminated, a
5093 pseudo is not allocated to a hard register, and the offset between the
5094 frame and stack pointers is not its initial value. In that case the
5095 pseudo will have been replaced by a MEM referring to the
5096 stack pointer. */
5097 if (MEM_P (ad))
5098 {
5099 /* First ensure that the address in this MEM is valid. Then, unless
5100 indirect addresses are valid, reload the MEM into a register. */
5101 tem = ad;
5102 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5103 opnum, ADDR_TYPE (type),
5104 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5105
5106 /* If tem was changed, then we must create a new memory reference to
5107 hold it and store it back into memrefloc. */
5108 if (tem != ad && memrefloc)
5109 {
5110 *memrefloc = copy_rtx (*memrefloc);
5111 copy_replacements (tem, XEXP (*memrefloc, 0));
5112 loc = &XEXP (*memrefloc, 0);
5113 if (removed_and)
5114 loc = &XEXP (*loc, 0);
5115 }
5116
5117 /* Check similar cases as for indirect addresses as above except
5118 that we can allow pseudos and a MEM since they should have been
5119 taken care of above. */
5120
5121 if (ind_levels == 0
5122 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5123 || MEM_P (XEXP (tem, 0))
5124 || ! (REG_P (XEXP (tem, 0))
5125 || (GET_CODE (XEXP (tem, 0)) == PLUS
5126 && REG_P (XEXP (XEXP (tem, 0), 0))
5127 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5128 {
5129 /* Must use TEM here, not AD, since it is the one that will
5130 have any subexpressions reloaded, if needed. */
5131 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5132 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5133 VOIDmode, 0,
5134 0, opnum, type);
5135 return ! removed_and;
5136 }
5137 else
5138 return 0;
5139 }
5140
5141 /* If we have address of a stack slot but it's not valid because the
5142 displacement is too large, compute the sum in a register.
5143 Handle all base registers here, not just fp/ap/sp, because on some
5144 targets (namely SH) we can also get too large displacements from
5145 big-endian corrections. */
5146 else if (GET_CODE (ad) == PLUS
5147 && REG_P (XEXP (ad, 0))
5148 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5149 && CONST_INT_P (XEXP (ad, 1))
5150 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5151 CONST_INT)
5152 /* Similarly, if we were to reload the base register and the
5153 mem+offset address is still invalid, then we want to reload
5154 the whole address, not just the base register. */
5155 || ! maybe_memory_address_addr_space_p
5156 (mode, ad, as, &(XEXP (ad, 0)))))
5157
5158 {
5159 /* Unshare the MEM rtx so we can safely alter it. */
5160 if (memrefloc)
5161 {
5162 *memrefloc = copy_rtx (*memrefloc);
5163 loc = &XEXP (*memrefloc, 0);
5164 if (removed_and)
5165 loc = &XEXP (*loc, 0);
5166 }
5167
5168 if (double_reg_address_ok
5169 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5170 PLUS, CONST_INT))
5171 {
5172 /* Unshare the sum as well. */
5173 *loc = ad = copy_rtx (ad);
5174
5175 /* Reload the displacement into an index reg.
5176 We assume the frame pointer or arg pointer is a base reg. */
5177 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5178 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5179 type, ind_levels);
5180 return 0;
5181 }
5182 else
5183 {
5184 /* If the sum of two regs is not necessarily valid,
5185 reload the sum into a base reg.
5186 That will at least work. */
5187 find_reloads_address_part (ad, loc,
5188 base_reg_class (mode, as, MEM, SCRATCH),
5189 GET_MODE (ad), opnum, type, ind_levels);
5190 }
5191 return ! removed_and;
5192 }
5193
5194 /* If we have an indexed stack slot, there are three possible reasons why
5195 it might be invalid: The index might need to be reloaded, the address
5196 might have been made by frame pointer elimination and hence have a
5197 constant out of range, or both reasons might apply.
5198
5199 We can easily check for an index needing reload, but even if that is the
5200 case, we might also have an invalid constant. To avoid making the
5201 conservative assumption and requiring two reloads, we see if this address
5202 is valid when not interpreted strictly. If it is, the only problem is
5203 that the index needs a reload and find_reloads_address_1 will take care
5204 of it.
5205
5206 Handle all base registers here, not just fp/ap/sp, because on some
5207 targets (namely SPARC) we can also get invalid addresses from preventive
5208 subreg big-endian corrections made by find_reloads_toplev. We
5209 can also get expressions involving LO_SUM (rather than PLUS) from
5210 find_reloads_subreg_address.
5211
5212 If we decide to do something, it must be that `double_reg_address_ok'
5213 is true. We generate a reload of the base register + constant and
5214 rework the sum so that the reload register will be added to the index.
5215 This is safe because we know the address isn't shared.
5216
5217 We check for the base register as both the first and second operand of
5218 the innermost PLUS and/or LO_SUM. */
5219
5220 for (op_index = 0; op_index < 2; ++op_index)
5221 {
5222 rtx operand, addend;
5223 enum rtx_code inner_code;
5224
5225 if (GET_CODE (ad) != PLUS)
5226 continue;
5227
5228 inner_code = GET_CODE (XEXP (ad, 0));
5229 if (!(GET_CODE (ad) == PLUS
5230 && CONST_INT_P (XEXP (ad, 1))
5231 && (inner_code == PLUS || inner_code == LO_SUM)))
5232 continue;
5233
5234 operand = XEXP (XEXP (ad, 0), op_index);
5235 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5236 continue;
5237
5238 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5239
5240 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5241 GET_CODE (addend))
5242 || operand == frame_pointer_rtx
5243 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5244 || operand == hard_frame_pointer_rtx
5245 #endif
5246 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5247 || operand == arg_pointer_rtx
5248 #endif
5249 || operand == stack_pointer_rtx)
5250 && ! maybe_memory_address_addr_space_p
5251 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5252 {
5253 rtx offset_reg;
5254 enum reg_class cls;
5255
5256 offset_reg = plus_constant (GET_MODE (ad), operand,
5257 INTVAL (XEXP (ad, 1)));
5258
5259 /* Form the adjusted address. */
5260 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5261 ad = gen_rtx_PLUS (GET_MODE (ad),
5262 op_index == 0 ? offset_reg : addend,
5263 op_index == 0 ? addend : offset_reg);
5264 else
5265 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5266 op_index == 0 ? offset_reg : addend,
5267 op_index == 0 ? addend : offset_reg);
5268 *loc = ad;
5269
5270 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5271 find_reloads_address_part (XEXP (ad, op_index),
5272 &XEXP (ad, op_index), cls,
5273 GET_MODE (ad), opnum, type, ind_levels);
5274 find_reloads_address_1 (mode, as,
5275 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5276 GET_CODE (XEXP (ad, op_index)),
5277 &XEXP (ad, 1 - op_index), opnum,
5278 type, 0, insn);
5279
5280 return 0;
5281 }
5282 }
5283
5284 /* See if address becomes valid when an eliminable register
5285 in a sum is replaced. */
5286
5287 tem = ad;
5288 if (GET_CODE (ad) == PLUS)
5289 tem = subst_indexed_address (ad);
5290 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5291 {
5292 /* Ok, we win that way. Replace any additional eliminable
5293 registers. */
5294
5295 subst_reg_equivs_changed = 0;
5296 tem = subst_reg_equivs (tem, insn);
5297
5298 /* Make sure that didn't make the address invalid again. */
5299
5300 if (! subst_reg_equivs_changed
5301 || strict_memory_address_addr_space_p (mode, tem, as))
5302 {
5303 *loc = tem;
5304 return 0;
5305 }
5306 }
5307
5308 /* If constants aren't valid addresses, reload the constant address
5309 into a register. */
5310 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5311 {
5312 enum machine_mode address_mode = GET_MODE (ad);
5313 if (address_mode == VOIDmode)
5314 address_mode = targetm.addr_space.address_mode (as);
5315
5316 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5317 Unshare it so we can safely alter it. */
5318 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5319 && CONSTANT_POOL_ADDRESS_P (ad))
5320 {
5321 *memrefloc = copy_rtx (*memrefloc);
5322 loc = &XEXP (*memrefloc, 0);
5323 if (removed_and)
5324 loc = &XEXP (*loc, 0);
5325 }
5326
5327 find_reloads_address_part (ad, loc,
5328 base_reg_class (mode, as, MEM, SCRATCH),
5329 address_mode, opnum, type, ind_levels);
5330 return ! removed_and;
5331 }
5332
5333 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5334 opnum, type, ind_levels, insn);
5335 }
5336 \f
5337 /* Find all pseudo regs appearing in AD
5338 that are eliminable in favor of equivalent values
5339 and do not have hard regs; replace them by their equivalents.
5340 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5341 front of it for pseudos that we have to replace with stack slots. */
5342
5343 static rtx
5344 subst_reg_equivs (rtx ad, rtx insn)
5345 {
5346 RTX_CODE code = GET_CODE (ad);
5347 int i;
5348 const char *fmt;
5349
5350 switch (code)
5351 {
5352 case HIGH:
5353 case CONST:
5354 CASE_CONST_ANY:
5355 case SYMBOL_REF:
5356 case LABEL_REF:
5357 case PC:
5358 case CC0:
5359 return ad;
5360
5361 case REG:
5362 {
5363 int regno = REGNO (ad);
5364
5365 if (reg_equiv_constant (regno) != 0)
5366 {
5367 subst_reg_equivs_changed = 1;
5368 return reg_equiv_constant (regno);
5369 }
5370 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5371 {
5372 rtx mem = make_memloc (ad, regno);
5373 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5374 {
5375 subst_reg_equivs_changed = 1;
5376 /* We mark the USE with QImode so that we recognize it
5377 as one that can be safely deleted at the end of
5378 reload. */
5379 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5380 QImode);
5381 return mem;
5382 }
5383 }
5384 }
5385 return ad;
5386
5387 case PLUS:
5388 /* Quickly dispose of a common case. */
5389 if (XEXP (ad, 0) == frame_pointer_rtx
5390 && CONST_INT_P (XEXP (ad, 1)))
5391 return ad;
5392 break;
5393
5394 default:
5395 break;
5396 }
5397
5398 fmt = GET_RTX_FORMAT (code);
5399 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5400 if (fmt[i] == 'e')
5401 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5402 return ad;
5403 }
5404 \f
5405 /* Compute the sum of X and Y, making canonicalizations assumed in an
5406 address, namely: sum constant integers, surround the sum of two
5407 constants with a CONST, put the constant as the second operand, and
5408 group the constant on the outermost sum.
5409
5410 This routine assumes both inputs are already in canonical form. */
5411
5412 rtx
5413 form_sum (enum machine_mode mode, rtx x, rtx y)
5414 {
5415 rtx tem;
5416
5417 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5418 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5419
5420 if (CONST_INT_P (x))
5421 return plus_constant (mode, y, INTVAL (x));
5422 else if (CONST_INT_P (y))
5423 return plus_constant (mode, x, INTVAL (y));
5424 else if (CONSTANT_P (x))
5425 tem = x, x = y, y = tem;
5426
5427 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5428 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5429
5430 /* Note that if the operands of Y are specified in the opposite
5431 order in the recursive calls below, infinite recursion will occur. */
5432 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5433 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5434
5435 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5436 constant will have been placed second. */
5437 if (CONSTANT_P (x) && CONSTANT_P (y))
5438 {
5439 if (GET_CODE (x) == CONST)
5440 x = XEXP (x, 0);
5441 if (GET_CODE (y) == CONST)
5442 y = XEXP (y, 0);
5443
5444 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5445 }
5446
5447 return gen_rtx_PLUS (mode, x, y);
5448 }
5449 \f
5450 /* If ADDR is a sum containing a pseudo register that should be
5451 replaced with a constant (from reg_equiv_constant),
5452 return the result of doing so, and also apply the associative
5453 law so that the result is more likely to be a valid address.
5454 (But it is not guaranteed to be one.)
5455
5456 Note that at most one register is replaced, even if more are
5457 replaceable. Also, we try to put the result into a canonical form
5458 so it is more likely to be a valid address.
5459
5460 In all other cases, return ADDR. */
5461
5462 static rtx
5463 subst_indexed_address (rtx addr)
5464 {
5465 rtx op0 = 0, op1 = 0, op2 = 0;
5466 rtx tem;
5467 int regno;
5468
5469 if (GET_CODE (addr) == PLUS)
5470 {
5471 /* Try to find a register to replace. */
5472 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5473 if (REG_P (op0)
5474 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5475 && reg_renumber[regno] < 0
5476 && reg_equiv_constant (regno) != 0)
5477 op0 = reg_equiv_constant (regno);
5478 else if (REG_P (op1)
5479 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5480 && reg_renumber[regno] < 0
5481 && reg_equiv_constant (regno) != 0)
5482 op1 = reg_equiv_constant (regno);
5483 else if (GET_CODE (op0) == PLUS
5484 && (tem = subst_indexed_address (op0)) != op0)
5485 op0 = tem;
5486 else if (GET_CODE (op1) == PLUS
5487 && (tem = subst_indexed_address (op1)) != op1)
5488 op1 = tem;
5489 else
5490 return addr;
5491
5492 /* Pick out up to three things to add. */
5493 if (GET_CODE (op1) == PLUS)
5494 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5495 else if (GET_CODE (op0) == PLUS)
5496 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5497
5498 /* Compute the sum. */
5499 if (op2 != 0)
5500 op1 = form_sum (GET_MODE (addr), op1, op2);
5501 if (op1 != 0)
5502 op0 = form_sum (GET_MODE (addr), op0, op1);
5503
5504 return op0;
5505 }
5506 return addr;
5507 }
5508 \f
5509 /* Update the REG_INC notes for an insn. It updates all REG_INC
5510 notes for the instruction which refer to REGNO the to refer
5511 to the reload number.
5512
5513 INSN is the insn for which any REG_INC notes need updating.
5514
5515 REGNO is the register number which has been reloaded.
5516
5517 RELOADNUM is the reload number. */
5518
5519 static void
5520 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5521 int reloadnum ATTRIBUTE_UNUSED)
5522 {
5523 #ifdef AUTO_INC_DEC
5524 rtx link;
5525
5526 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5527 if (REG_NOTE_KIND (link) == REG_INC
5528 && (int) REGNO (XEXP (link, 0)) == regno)
5529 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5530 #endif
5531 }
5532 \f
5533 /* Record the pseudo registers we must reload into hard registers in a
5534 subexpression of a would-be memory address, X referring to a value
5535 in mode MODE. (This function is not called if the address we find
5536 is strictly valid.)
5537
5538 CONTEXT = 1 means we are considering regs as index regs,
5539 = 0 means we are considering them as base regs.
5540 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5541 or an autoinc code.
5542 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5543 is the code of the index part of the address. Otherwise, pass SCRATCH
5544 for this argument.
5545 OPNUM and TYPE specify the purpose of any reloads made.
5546
5547 IND_LEVELS says how many levels of indirect addressing are
5548 supported at this point in the address.
5549
5550 INSN, if nonzero, is the insn in which we do the reload. It is used
5551 to determine if we may generate output reloads.
5552
5553 We return nonzero if X, as a whole, is reloaded or replaced. */
5554
5555 /* Note that we take shortcuts assuming that no multi-reg machine mode
5556 occurs as part of an address.
5557 Also, this is not fully machine-customizable; it works for machines
5558 such as VAXen and 68000's and 32000's, but other possible machines
5559 could have addressing modes that this does not handle right.
5560 If you add push_reload calls here, you need to make sure gen_reload
5561 handles those cases gracefully. */
5562
5563 static int
5564 find_reloads_address_1 (enum machine_mode mode, addr_space_t as,
5565 rtx x, int context,
5566 enum rtx_code outer_code, enum rtx_code index_code,
5567 rtx *loc, int opnum, enum reload_type type,
5568 int ind_levels, rtx insn)
5569 {
5570 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5571 ((CONTEXT) == 0 \
5572 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5573 : REGNO_OK_FOR_INDEX_P (REGNO))
5574
5575 enum reg_class context_reg_class;
5576 RTX_CODE code = GET_CODE (x);
5577 bool reloaded_inner_of_autoinc = false;
5578
5579 if (context == 1)
5580 context_reg_class = INDEX_REG_CLASS;
5581 else
5582 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5583
5584 switch (code)
5585 {
5586 case PLUS:
5587 {
5588 rtx orig_op0 = XEXP (x, 0);
5589 rtx orig_op1 = XEXP (x, 1);
5590 RTX_CODE code0 = GET_CODE (orig_op0);
5591 RTX_CODE code1 = GET_CODE (orig_op1);
5592 rtx op0 = orig_op0;
5593 rtx op1 = orig_op1;
5594
5595 if (GET_CODE (op0) == SUBREG)
5596 {
5597 op0 = SUBREG_REG (op0);
5598 code0 = GET_CODE (op0);
5599 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5600 op0 = gen_rtx_REG (word_mode,
5601 (REGNO (op0) +
5602 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5603 GET_MODE (SUBREG_REG (orig_op0)),
5604 SUBREG_BYTE (orig_op0),
5605 GET_MODE (orig_op0))));
5606 }
5607
5608 if (GET_CODE (op1) == SUBREG)
5609 {
5610 op1 = SUBREG_REG (op1);
5611 code1 = GET_CODE (op1);
5612 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5613 /* ??? Why is this given op1's mode and above for
5614 ??? op0 SUBREGs we use word_mode? */
5615 op1 = gen_rtx_REG (GET_MODE (op1),
5616 (REGNO (op1) +
5617 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5618 GET_MODE (SUBREG_REG (orig_op1)),
5619 SUBREG_BYTE (orig_op1),
5620 GET_MODE (orig_op1))));
5621 }
5622 /* Plus in the index register may be created only as a result of
5623 register rematerialization for expression like &localvar*4. Reload it.
5624 It may be possible to combine the displacement on the outer level,
5625 but it is probably not worthwhile to do so. */
5626 if (context == 1)
5627 {
5628 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5629 opnum, ADDR_TYPE (type), ind_levels, insn);
5630 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5631 context_reg_class,
5632 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5633 return 1;
5634 }
5635
5636 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5637 || code0 == ZERO_EXTEND || code1 == MEM)
5638 {
5639 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5640 &XEXP (x, 0), opnum, type, ind_levels,
5641 insn);
5642 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5643 &XEXP (x, 1), opnum, type, ind_levels,
5644 insn);
5645 }
5646
5647 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5648 || code1 == ZERO_EXTEND || code0 == MEM)
5649 {
5650 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5651 &XEXP (x, 0), opnum, type, ind_levels,
5652 insn);
5653 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5654 &XEXP (x, 1), opnum, type, ind_levels,
5655 insn);
5656 }
5657
5658 else if (code0 == CONST_INT || code0 == CONST
5659 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5660 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5661 &XEXP (x, 1), opnum, type, ind_levels,
5662 insn);
5663
5664 else if (code1 == CONST_INT || code1 == CONST
5665 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5666 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5667 &XEXP (x, 0), opnum, type, ind_levels,
5668 insn);
5669
5670 else if (code0 == REG && code1 == REG)
5671 {
5672 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5673 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5674 return 0;
5675 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5676 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5677 return 0;
5678 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5679 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5680 &XEXP (x, 1), opnum, type, ind_levels,
5681 insn);
5682 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5683 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5684 &XEXP (x, 0), opnum, type, ind_levels,
5685 insn);
5686 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5687 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5688 &XEXP (x, 0), opnum, type, ind_levels,
5689 insn);
5690 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5691 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5692 &XEXP (x, 1), opnum, type, ind_levels,
5693 insn);
5694 else
5695 {
5696 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5697 &XEXP (x, 0), opnum, type, ind_levels,
5698 insn);
5699 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5700 &XEXP (x, 1), opnum, type, ind_levels,
5701 insn);
5702 }
5703 }
5704
5705 else if (code0 == REG)
5706 {
5707 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5708 &XEXP (x, 0), opnum, type, ind_levels,
5709 insn);
5710 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5711 &XEXP (x, 1), opnum, type, ind_levels,
5712 insn);
5713 }
5714
5715 else if (code1 == REG)
5716 {
5717 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5718 &XEXP (x, 1), opnum, type, ind_levels,
5719 insn);
5720 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5721 &XEXP (x, 0), opnum, type, ind_levels,
5722 insn);
5723 }
5724 }
5725
5726 return 0;
5727
5728 case POST_MODIFY:
5729 case PRE_MODIFY:
5730 {
5731 rtx op0 = XEXP (x, 0);
5732 rtx op1 = XEXP (x, 1);
5733 enum rtx_code index_code;
5734 int regno;
5735 int reloadnum;
5736
5737 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5738 return 0;
5739
5740 /* Currently, we only support {PRE,POST}_MODIFY constructs
5741 where a base register is {inc,dec}remented by the contents
5742 of another register or by a constant value. Thus, these
5743 operands must match. */
5744 gcc_assert (op0 == XEXP (op1, 0));
5745
5746 /* Require index register (or constant). Let's just handle the
5747 register case in the meantime... If the target allows
5748 auto-modify by a constant then we could try replacing a pseudo
5749 register with its equivalent constant where applicable.
5750
5751 We also handle the case where the register was eliminated
5752 resulting in a PLUS subexpression.
5753
5754 If we later decide to reload the whole PRE_MODIFY or
5755 POST_MODIFY, inc_for_reload might clobber the reload register
5756 before reading the index. The index register might therefore
5757 need to live longer than a TYPE reload normally would, so be
5758 conservative and class it as RELOAD_OTHER. */
5759 if ((REG_P (XEXP (op1, 1))
5760 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5761 || GET_CODE (XEXP (op1, 1)) == PLUS)
5762 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5763 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5764 ind_levels, insn);
5765
5766 gcc_assert (REG_P (XEXP (op1, 0)));
5767
5768 regno = REGNO (XEXP (op1, 0));
5769 index_code = GET_CODE (XEXP (op1, 1));
5770
5771 /* A register that is incremented cannot be constant! */
5772 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5773 || reg_equiv_constant (regno) == 0);
5774
5775 /* Handle a register that is equivalent to a memory location
5776 which cannot be addressed directly. */
5777 if (reg_equiv_memory_loc (regno) != 0
5778 && (reg_equiv_address (regno) != 0
5779 || num_not_at_initial_offset))
5780 {
5781 rtx tem = make_memloc (XEXP (x, 0), regno);
5782
5783 if (reg_equiv_address (regno)
5784 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5785 {
5786 rtx orig = tem;
5787
5788 /* First reload the memory location's address.
5789 We can't use ADDR_TYPE (type) here, because we need to
5790 write back the value after reading it, hence we actually
5791 need two registers. */
5792 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5793 &XEXP (tem, 0), opnum,
5794 RELOAD_OTHER,
5795 ind_levels, insn);
5796
5797 if (!rtx_equal_p (tem, orig))
5798 push_reg_equiv_alt_mem (regno, tem);
5799
5800 /* Then reload the memory location into a base
5801 register. */
5802 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5803 &XEXP (op1, 0),
5804 base_reg_class (mode, as,
5805 code, index_code),
5806 GET_MODE (x), GET_MODE (x), 0,
5807 0, opnum, RELOAD_OTHER);
5808
5809 update_auto_inc_notes (this_insn, regno, reloadnum);
5810 return 0;
5811 }
5812 }
5813
5814 if (reg_renumber[regno] >= 0)
5815 regno = reg_renumber[regno];
5816
5817 /* We require a base register here... */
5818 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5819 {
5820 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5821 &XEXP (op1, 0), &XEXP (x, 0),
5822 base_reg_class (mode, as,
5823 code, index_code),
5824 GET_MODE (x), GET_MODE (x), 0, 0,
5825 opnum, RELOAD_OTHER);
5826
5827 update_auto_inc_notes (this_insn, regno, reloadnum);
5828 return 0;
5829 }
5830 }
5831 return 0;
5832
5833 case POST_INC:
5834 case POST_DEC:
5835 case PRE_INC:
5836 case PRE_DEC:
5837 if (REG_P (XEXP (x, 0)))
5838 {
5839 int regno = REGNO (XEXP (x, 0));
5840 int value = 0;
5841 rtx x_orig = x;
5842
5843 /* A register that is incremented cannot be constant! */
5844 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5845 || reg_equiv_constant (regno) == 0);
5846
5847 /* Handle a register that is equivalent to a memory location
5848 which cannot be addressed directly. */
5849 if (reg_equiv_memory_loc (regno) != 0
5850 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5851 {
5852 rtx tem = make_memloc (XEXP (x, 0), regno);
5853 if (reg_equiv_address (regno)
5854 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5855 {
5856 rtx orig = tem;
5857
5858 /* First reload the memory location's address.
5859 We can't use ADDR_TYPE (type) here, because we need to
5860 write back the value after reading it, hence we actually
5861 need two registers. */
5862 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5863 &XEXP (tem, 0), opnum, type,
5864 ind_levels, insn);
5865 reloaded_inner_of_autoinc = true;
5866 if (!rtx_equal_p (tem, orig))
5867 push_reg_equiv_alt_mem (regno, tem);
5868 /* Put this inside a new increment-expression. */
5869 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5870 /* Proceed to reload that, as if it contained a register. */
5871 }
5872 }
5873
5874 /* If we have a hard register that is ok in this incdec context,
5875 don't make a reload. If the register isn't nice enough for
5876 autoincdec, we can reload it. But, if an autoincrement of a
5877 register that we here verified as playing nice, still outside
5878 isn't "valid", it must be that no autoincrement is "valid".
5879 If that is true and something made an autoincrement anyway,
5880 this must be a special context where one is allowed.
5881 (For example, a "push" instruction.)
5882 We can't improve this address, so leave it alone. */
5883
5884 /* Otherwise, reload the autoincrement into a suitable hard reg
5885 and record how much to increment by. */
5886
5887 if (reg_renumber[regno] >= 0)
5888 regno = reg_renumber[regno];
5889 if (regno >= FIRST_PSEUDO_REGISTER
5890 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5891 index_code))
5892 {
5893 int reloadnum;
5894
5895 /* If we can output the register afterwards, do so, this
5896 saves the extra update.
5897 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5898 CALL_INSN - and it does not set CC0.
5899 But don't do this if we cannot directly address the
5900 memory location, since this will make it harder to
5901 reuse address reloads, and increases register pressure.
5902 Also don't do this if we can probably update x directly. */
5903 rtx equiv = (MEM_P (XEXP (x, 0))
5904 ? XEXP (x, 0)
5905 : reg_equiv_mem (regno));
5906 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5907 if (insn && NONJUMP_INSN_P (insn) && equiv
5908 && memory_operand (equiv, GET_MODE (equiv))
5909 #ifdef HAVE_cc0
5910 && ! sets_cc0_p (PATTERN (insn))
5911 #endif
5912 && ! (icode != CODE_FOR_nothing
5913 && insn_operand_matches (icode, 0, equiv)
5914 && insn_operand_matches (icode, 1, equiv))
5915 /* Using RELOAD_OTHER means we emit this and the reload we
5916 made earlier in the wrong order. */
5917 && !reloaded_inner_of_autoinc)
5918 {
5919 /* We use the original pseudo for loc, so that
5920 emit_reload_insns() knows which pseudo this
5921 reload refers to and updates the pseudo rtx, not
5922 its equivalent memory location, as well as the
5923 corresponding entry in reg_last_reload_reg. */
5924 loc = &XEXP (x_orig, 0);
5925 x = XEXP (x, 0);
5926 reloadnum
5927 = push_reload (x, x, loc, loc,
5928 context_reg_class,
5929 GET_MODE (x), GET_MODE (x), 0, 0,
5930 opnum, RELOAD_OTHER);
5931 }
5932 else
5933 {
5934 reloadnum
5935 = push_reload (x, x, loc, (rtx*) 0,
5936 context_reg_class,
5937 GET_MODE (x), GET_MODE (x), 0, 0,
5938 opnum, type);
5939 rld[reloadnum].inc
5940 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5941
5942 value = 1;
5943 }
5944
5945 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5946 reloadnum);
5947 }
5948 return value;
5949 }
5950 return 0;
5951
5952 case TRUNCATE:
5953 case SIGN_EXTEND:
5954 case ZERO_EXTEND:
5955 /* Look for parts to reload in the inner expression and reload them
5956 too, in addition to this operation. Reloading all inner parts in
5957 addition to this one shouldn't be necessary, but at this point,
5958 we don't know if we can possibly omit any part that *can* be
5959 reloaded. Targets that are better off reloading just either part
5960 (or perhaps even a different part of an outer expression), should
5961 define LEGITIMIZE_RELOAD_ADDRESS. */
5962 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5963 context, code, SCRATCH, &XEXP (x, 0), opnum,
5964 type, ind_levels, insn);
5965 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5966 context_reg_class,
5967 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5968 return 1;
5969
5970 case MEM:
5971 /* This is probably the result of a substitution, by eliminate_regs, of
5972 an equivalent address for a pseudo that was not allocated to a hard
5973 register. Verify that the specified address is valid and reload it
5974 into a register.
5975
5976 Since we know we are going to reload this item, don't decrement for
5977 the indirection level.
5978
5979 Note that this is actually conservative: it would be slightly more
5980 efficient to use the value of SPILL_INDIRECT_LEVELS from
5981 reload1.c here. */
5982
5983 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5984 opnum, ADDR_TYPE (type), ind_levels, insn);
5985 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5986 context_reg_class,
5987 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5988 return 1;
5989
5990 case REG:
5991 {
5992 int regno = REGNO (x);
5993
5994 if (reg_equiv_constant (regno) != 0)
5995 {
5996 find_reloads_address_part (reg_equiv_constant (regno), loc,
5997 context_reg_class,
5998 GET_MODE (x), opnum, type, ind_levels);
5999 return 1;
6000 }
6001
6002 #if 0 /* This might screw code in reload1.c to delete prior output-reload
6003 that feeds this insn. */
6004 if (reg_equiv_mem (regno) != 0)
6005 {
6006 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
6007 context_reg_class,
6008 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6009 return 1;
6010 }
6011 #endif
6012
6013 if (reg_equiv_memory_loc (regno)
6014 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
6015 {
6016 rtx tem = make_memloc (x, regno);
6017 if (reg_equiv_address (regno) != 0
6018 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
6019 {
6020 x = tem;
6021 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
6022 &XEXP (x, 0), opnum, ADDR_TYPE (type),
6023 ind_levels, insn);
6024 if (!rtx_equal_p (x, tem))
6025 push_reg_equiv_alt_mem (regno, x);
6026 }
6027 }
6028
6029 if (reg_renumber[regno] >= 0)
6030 regno = reg_renumber[regno];
6031
6032 if (regno >= FIRST_PSEUDO_REGISTER
6033 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6034 index_code))
6035 {
6036 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6037 context_reg_class,
6038 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6039 return 1;
6040 }
6041
6042 /* If a register appearing in an address is the subject of a CLOBBER
6043 in this insn, reload it into some other register to be safe.
6044 The CLOBBER is supposed to make the register unavailable
6045 from before this insn to after it. */
6046 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
6047 {
6048 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6049 context_reg_class,
6050 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6051 return 1;
6052 }
6053 }
6054 return 0;
6055
6056 case SUBREG:
6057 if (REG_P (SUBREG_REG (x)))
6058 {
6059 /* If this is a SUBREG of a hard register and the resulting register
6060 is of the wrong class, reload the whole SUBREG. This avoids
6061 needless copies if SUBREG_REG is multi-word. */
6062 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6063 {
6064 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6065
6066 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6067 index_code))
6068 {
6069 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6070 context_reg_class,
6071 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6072 return 1;
6073 }
6074 }
6075 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6076 is larger than the class size, then reload the whole SUBREG. */
6077 else
6078 {
6079 enum reg_class rclass = context_reg_class;
6080 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6081 > reg_class_size[(int) rclass])
6082 {
6083 /* If the inner register will be replaced by a memory
6084 reference, we can do this only if we can replace the
6085 whole subreg by a (narrower) memory reference. If
6086 this is not possible, fall through and reload just
6087 the inner register (including address reloads). */
6088 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6089 {
6090 rtx tem = find_reloads_subreg_address (x, opnum,
6091 ADDR_TYPE (type),
6092 ind_levels, insn,
6093 NULL);
6094 if (tem)
6095 {
6096 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6097 GET_MODE (tem), VOIDmode, 0, 0,
6098 opnum, type);
6099 return 1;
6100 }
6101 }
6102 else
6103 {
6104 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6105 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6106 return 1;
6107 }
6108 }
6109 }
6110 }
6111 break;
6112
6113 default:
6114 break;
6115 }
6116
6117 {
6118 const char *fmt = GET_RTX_FORMAT (code);
6119 int i;
6120
6121 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6122 {
6123 if (fmt[i] == 'e')
6124 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6125 we get here. */
6126 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6127 code, SCRATCH, &XEXP (x, i),
6128 opnum, type, ind_levels, insn);
6129 }
6130 }
6131
6132 #undef REG_OK_FOR_CONTEXT
6133 return 0;
6134 }
6135 \f
6136 /* X, which is found at *LOC, is a part of an address that needs to be
6137 reloaded into a register of class RCLASS. If X is a constant, or if
6138 X is a PLUS that contains a constant, check that the constant is a
6139 legitimate operand and that we are supposed to be able to load
6140 it into the register.
6141
6142 If not, force the constant into memory and reload the MEM instead.
6143
6144 MODE is the mode to use, in case X is an integer constant.
6145
6146 OPNUM and TYPE describe the purpose of any reloads made.
6147
6148 IND_LEVELS says how many levels of indirect addressing this machine
6149 supports. */
6150
6151 static void
6152 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6153 enum machine_mode mode, int opnum,
6154 enum reload_type type, int ind_levels)
6155 {
6156 if (CONSTANT_P (x)
6157 && (!targetm.legitimate_constant_p (mode, x)
6158 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6159 {
6160 x = force_const_mem (mode, x);
6161 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6162 opnum, type, ind_levels, 0);
6163 }
6164
6165 else if (GET_CODE (x) == PLUS
6166 && CONSTANT_P (XEXP (x, 1))
6167 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6168 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6169 == NO_REGS))
6170 {
6171 rtx tem;
6172
6173 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6174 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6175 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6176 opnum, type, ind_levels, 0);
6177 }
6178
6179 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6180 mode, VOIDmode, 0, 0, opnum, type);
6181 }
6182 \f
6183 /* X, a subreg of a pseudo, is a part of an address that needs to be
6184 reloaded, and the pseusdo is equivalent to a memory location.
6185
6186 Attempt to replace the whole subreg by a (possibly narrower or wider)
6187 memory reference. If this is possible, return this new memory
6188 reference, and push all required address reloads. Otherwise,
6189 return NULL.
6190
6191 OPNUM and TYPE identify the purpose of the reload.
6192
6193 IND_LEVELS says how many levels of indirect addressing are
6194 supported at this point in the address.
6195
6196 INSN, if nonzero, is the insn in which we do the reload. It is used
6197 to determine where to put USEs for pseudos that we have to replace with
6198 stack slots. */
6199
6200 static rtx
6201 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6202 int ind_levels, rtx insn, int *address_reloaded)
6203 {
6204 enum machine_mode outer_mode = GET_MODE (x);
6205 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6206 int regno = REGNO (SUBREG_REG (x));
6207 int reloaded = 0;
6208 rtx tem, orig;
6209 int offset;
6210
6211 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6212
6213 /* We cannot replace the subreg with a modified memory reference if:
6214
6215 - we have a paradoxical subreg that implicitly acts as a zero or
6216 sign extension operation due to LOAD_EXTEND_OP;
6217
6218 - we have a subreg that is implicitly supposed to act on the full
6219 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6220
6221 - the address of the equivalent memory location is mode-dependent; or
6222
6223 - we have a paradoxical subreg and the resulting memory is not
6224 sufficiently aligned to allow access in the wider mode.
6225
6226 In addition, we choose not to perform the replacement for *any*
6227 paradoxical subreg, even if it were possible in principle. This
6228 is to avoid generating wider memory references than necessary.
6229
6230 This corresponds to how previous versions of reload used to handle
6231 paradoxical subregs where no address reload was required. */
6232
6233 if (paradoxical_subreg_p (x))
6234 return NULL;
6235
6236 #ifdef WORD_REGISTER_OPERATIONS
6237 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6238 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6239 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6240 return NULL;
6241 #endif
6242
6243 /* Since we don't attempt to handle paradoxical subregs, we can just
6244 call into simplify_subreg, which will handle all remaining checks
6245 for us. */
6246 orig = make_memloc (SUBREG_REG (x), regno);
6247 offset = SUBREG_BYTE (x);
6248 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6249 if (!tem || !MEM_P (tem))
6250 return NULL;
6251
6252 /* Now push all required address reloads, if any. */
6253 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6254 XEXP (tem, 0), &XEXP (tem, 0),
6255 opnum, type, ind_levels, insn);
6256 /* ??? Do we need to handle nonzero offsets somehow? */
6257 if (!offset && !rtx_equal_p (tem, orig))
6258 push_reg_equiv_alt_mem (regno, tem);
6259
6260 /* For some processors an address may be valid in the original mode but
6261 not in a smaller mode. For example, ARM accepts a scaled index register
6262 in SImode but not in HImode. Note that this is only a problem if the
6263 address in reg_equiv_mem is already invalid in the new mode; other
6264 cases would be fixed by find_reloads_address as usual.
6265
6266 ??? We attempt to handle such cases here by doing an additional reload
6267 of the full address after the usual processing by find_reloads_address.
6268 Note that this may not work in the general case, but it seems to cover
6269 the cases where this situation currently occurs. A more general fix
6270 might be to reload the *value* instead of the address, but this would
6271 not be expected by the callers of this routine as-is.
6272
6273 If find_reloads_address already completed replaced the address, there
6274 is nothing further to do. */
6275 if (reloaded == 0
6276 && reg_equiv_mem (regno) != 0
6277 && !strict_memory_address_addr_space_p
6278 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6279 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6280 {
6281 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6282 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6283 MEM, SCRATCH),
6284 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6285 reloaded = 1;
6286 }
6287
6288 /* If this is not a toplevel operand, find_reloads doesn't see this
6289 substitution. We have to emit a USE of the pseudo so that
6290 delete_output_reload can see it. */
6291 if (replace_reloads && recog_data.operand[opnum] != x)
6292 /* We mark the USE with QImode so that we recognize it as one that
6293 can be safely deleted at the end of reload. */
6294 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6295 QImode);
6296
6297 if (address_reloaded)
6298 *address_reloaded = reloaded;
6299
6300 return tem;
6301 }
6302 \f
6303 /* Substitute into the current INSN the registers into which we have reloaded
6304 the things that need reloading. The array `replacements'
6305 contains the locations of all pointers that must be changed
6306 and says what to replace them with.
6307
6308 Return the rtx that X translates into; usually X, but modified. */
6309
6310 void
6311 subst_reloads (rtx insn)
6312 {
6313 int i;
6314
6315 for (i = 0; i < n_replacements; i++)
6316 {
6317 struct replacement *r = &replacements[i];
6318 rtx reloadreg = rld[r->what].reg_rtx;
6319 if (reloadreg)
6320 {
6321 #ifdef DEBUG_RELOAD
6322 /* This checking takes a very long time on some platforms
6323 causing the gcc.c-torture/compile/limits-fnargs.c test
6324 to time out during testing. See PR 31850.
6325
6326 Internal consistency test. Check that we don't modify
6327 anything in the equivalence arrays. Whenever something from
6328 those arrays needs to be reloaded, it must be unshared before
6329 being substituted into; the equivalence must not be modified.
6330 Otherwise, if the equivalence is used after that, it will
6331 have been modified, and the thing substituted (probably a
6332 register) is likely overwritten and not a usable equivalence. */
6333 int check_regno;
6334
6335 for (check_regno = 0; check_regno < max_regno; check_regno++)
6336 {
6337 #define CHECK_MODF(ARRAY) \
6338 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6339 || !loc_mentioned_in_p (r->where, \
6340 (*reg_equivs)[check_regno].ARRAY))
6341
6342 CHECK_MODF (constant);
6343 CHECK_MODF (memory_loc);
6344 CHECK_MODF (address);
6345 CHECK_MODF (mem);
6346 #undef CHECK_MODF
6347 }
6348 #endif /* DEBUG_RELOAD */
6349
6350 /* If we're replacing a LABEL_REF with a register, there must
6351 already be an indication (to e.g. flow) which label this
6352 register refers to. */
6353 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6354 || !JUMP_P (insn)
6355 || find_reg_note (insn,
6356 REG_LABEL_OPERAND,
6357 XEXP (*r->where, 0))
6358 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6359
6360 /* Encapsulate RELOADREG so its machine mode matches what
6361 used to be there. Note that gen_lowpart_common will
6362 do the wrong thing if RELOADREG is multi-word. RELOADREG
6363 will always be a REG here. */
6364 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6365 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6366
6367 *r->where = reloadreg;
6368 }
6369 /* If reload got no reg and isn't optional, something's wrong. */
6370 else
6371 gcc_assert (rld[r->what].optional);
6372 }
6373 }
6374 \f
6375 /* Make a copy of any replacements being done into X and move those
6376 copies to locations in Y, a copy of X. */
6377
6378 void
6379 copy_replacements (rtx x, rtx y)
6380 {
6381 copy_replacements_1 (&x, &y, n_replacements);
6382 }
6383
6384 static void
6385 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6386 {
6387 int i, j;
6388 rtx x, y;
6389 struct replacement *r;
6390 enum rtx_code code;
6391 const char *fmt;
6392
6393 for (j = 0; j < orig_replacements; j++)
6394 if (replacements[j].where == px)
6395 {
6396 r = &replacements[n_replacements++];
6397 r->where = py;
6398 r->what = replacements[j].what;
6399 r->mode = replacements[j].mode;
6400 }
6401
6402 x = *px;
6403 y = *py;
6404 code = GET_CODE (x);
6405 fmt = GET_RTX_FORMAT (code);
6406
6407 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6408 {
6409 if (fmt[i] == 'e')
6410 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6411 else if (fmt[i] == 'E')
6412 for (j = XVECLEN (x, i); --j >= 0; )
6413 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6414 orig_replacements);
6415 }
6416 }
6417
6418 /* Change any replacements being done to *X to be done to *Y. */
6419
6420 void
6421 move_replacements (rtx *x, rtx *y)
6422 {
6423 int i;
6424
6425 for (i = 0; i < n_replacements; i++)
6426 if (replacements[i].where == x)
6427 replacements[i].where = y;
6428 }
6429 \f
6430 /* If LOC was scheduled to be replaced by something, return the replacement.
6431 Otherwise, return *LOC. */
6432
6433 rtx
6434 find_replacement (rtx *loc)
6435 {
6436 struct replacement *r;
6437
6438 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6439 {
6440 rtx reloadreg = rld[r->what].reg_rtx;
6441
6442 if (reloadreg && r->where == loc)
6443 {
6444 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6445 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6446
6447 return reloadreg;
6448 }
6449 else if (reloadreg && GET_CODE (*loc) == SUBREG
6450 && r->where == &SUBREG_REG (*loc))
6451 {
6452 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6453 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6454
6455 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6456 GET_MODE (SUBREG_REG (*loc)),
6457 SUBREG_BYTE (*loc));
6458 }
6459 }
6460
6461 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6462 what's inside and make a new rtl if so. */
6463 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6464 || GET_CODE (*loc) == MULT)
6465 {
6466 rtx x = find_replacement (&XEXP (*loc, 0));
6467 rtx y = find_replacement (&XEXP (*loc, 1));
6468
6469 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6470 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6471 }
6472
6473 return *loc;
6474 }
6475 \f
6476 /* Return nonzero if register in range [REGNO, ENDREGNO)
6477 appears either explicitly or implicitly in X
6478 other than being stored into (except for earlyclobber operands).
6479
6480 References contained within the substructure at LOC do not count.
6481 LOC may be zero, meaning don't ignore anything.
6482
6483 This is similar to refers_to_regno_p in rtlanal.c except that we
6484 look at equivalences for pseudos that didn't get hard registers. */
6485
6486 static int
6487 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6488 rtx x, rtx *loc)
6489 {
6490 int i;
6491 unsigned int r;
6492 RTX_CODE code;
6493 const char *fmt;
6494
6495 if (x == 0)
6496 return 0;
6497
6498 repeat:
6499 code = GET_CODE (x);
6500
6501 switch (code)
6502 {
6503 case REG:
6504 r = REGNO (x);
6505
6506 /* If this is a pseudo, a hard register must not have been allocated.
6507 X must therefore either be a constant or be in memory. */
6508 if (r >= FIRST_PSEUDO_REGISTER)
6509 {
6510 if (reg_equiv_memory_loc (r))
6511 return refers_to_regno_for_reload_p (regno, endregno,
6512 reg_equiv_memory_loc (r),
6513 (rtx*) 0);
6514
6515 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6516 return 0;
6517 }
6518
6519 return (endregno > r
6520 && regno < r + (r < FIRST_PSEUDO_REGISTER
6521 ? hard_regno_nregs[r][GET_MODE (x)]
6522 : 1));
6523
6524 case SUBREG:
6525 /* If this is a SUBREG of a hard reg, we can see exactly which
6526 registers are being modified. Otherwise, handle normally. */
6527 if (REG_P (SUBREG_REG (x))
6528 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6529 {
6530 unsigned int inner_regno = subreg_regno (x);
6531 unsigned int inner_endregno
6532 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6533 ? subreg_nregs (x) : 1);
6534
6535 return endregno > inner_regno && regno < inner_endregno;
6536 }
6537 break;
6538
6539 case CLOBBER:
6540 case SET:
6541 if (&SET_DEST (x) != loc
6542 /* Note setting a SUBREG counts as referring to the REG it is in for
6543 a pseudo but not for hard registers since we can
6544 treat each word individually. */
6545 && ((GET_CODE (SET_DEST (x)) == SUBREG
6546 && loc != &SUBREG_REG (SET_DEST (x))
6547 && REG_P (SUBREG_REG (SET_DEST (x)))
6548 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6549 && refers_to_regno_for_reload_p (regno, endregno,
6550 SUBREG_REG (SET_DEST (x)),
6551 loc))
6552 /* If the output is an earlyclobber operand, this is
6553 a conflict. */
6554 || ((!REG_P (SET_DEST (x))
6555 || earlyclobber_operand_p (SET_DEST (x)))
6556 && refers_to_regno_for_reload_p (regno, endregno,
6557 SET_DEST (x), loc))))
6558 return 1;
6559
6560 if (code == CLOBBER || loc == &SET_SRC (x))
6561 return 0;
6562 x = SET_SRC (x);
6563 goto repeat;
6564
6565 default:
6566 break;
6567 }
6568
6569 /* X does not match, so try its subexpressions. */
6570
6571 fmt = GET_RTX_FORMAT (code);
6572 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6573 {
6574 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6575 {
6576 if (i == 0)
6577 {
6578 x = XEXP (x, 0);
6579 goto repeat;
6580 }
6581 else
6582 if (refers_to_regno_for_reload_p (regno, endregno,
6583 XEXP (x, i), loc))
6584 return 1;
6585 }
6586 else if (fmt[i] == 'E')
6587 {
6588 int j;
6589 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6590 if (loc != &XVECEXP (x, i, j)
6591 && refers_to_regno_for_reload_p (regno, endregno,
6592 XVECEXP (x, i, j), loc))
6593 return 1;
6594 }
6595 }
6596 return 0;
6597 }
6598
6599 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6600 we check if any register number in X conflicts with the relevant register
6601 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6602 contains a MEM (we don't bother checking for memory addresses that can't
6603 conflict because we expect this to be a rare case.
6604
6605 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6606 that we look at equivalences for pseudos that didn't get hard registers. */
6607
6608 int
6609 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6610 {
6611 int regno, endregno;
6612
6613 /* Overly conservative. */
6614 if (GET_CODE (x) == STRICT_LOW_PART
6615 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6616 x = XEXP (x, 0);
6617
6618 /* If either argument is a constant, then modifying X can not affect IN. */
6619 if (CONSTANT_P (x) || CONSTANT_P (in))
6620 return 0;
6621 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6622 return refers_to_mem_for_reload_p (in);
6623 else if (GET_CODE (x) == SUBREG)
6624 {
6625 regno = REGNO (SUBREG_REG (x));
6626 if (regno < FIRST_PSEUDO_REGISTER)
6627 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6628 GET_MODE (SUBREG_REG (x)),
6629 SUBREG_BYTE (x),
6630 GET_MODE (x));
6631 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6632 ? subreg_nregs (x) : 1);
6633
6634 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6635 }
6636 else if (REG_P (x))
6637 {
6638 regno = REGNO (x);
6639
6640 /* If this is a pseudo, it must not have been assigned a hard register.
6641 Therefore, it must either be in memory or be a constant. */
6642
6643 if (regno >= FIRST_PSEUDO_REGISTER)
6644 {
6645 if (reg_equiv_memory_loc (regno))
6646 return refers_to_mem_for_reload_p (in);
6647 gcc_assert (reg_equiv_constant (regno));
6648 return 0;
6649 }
6650
6651 endregno = END_HARD_REGNO (x);
6652
6653 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6654 }
6655 else if (MEM_P (x))
6656 return refers_to_mem_for_reload_p (in);
6657 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6658 || GET_CODE (x) == CC0)
6659 return reg_mentioned_p (x, in);
6660 else
6661 {
6662 gcc_assert (GET_CODE (x) == PLUS);
6663
6664 /* We actually want to know if X is mentioned somewhere inside IN.
6665 We must not say that (plus (sp) (const_int 124)) is in
6666 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6667 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6668 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6669 while (MEM_P (in))
6670 in = XEXP (in, 0);
6671 if (REG_P (in))
6672 return 0;
6673 else if (GET_CODE (in) == PLUS)
6674 return (rtx_equal_p (x, in)
6675 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6676 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6677 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6678 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6679 }
6680
6681 gcc_unreachable ();
6682 }
6683
6684 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6685 registers. */
6686
6687 static int
6688 refers_to_mem_for_reload_p (rtx x)
6689 {
6690 const char *fmt;
6691 int i;
6692
6693 if (MEM_P (x))
6694 return 1;
6695
6696 if (REG_P (x))
6697 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6698 && reg_equiv_memory_loc (REGNO (x)));
6699
6700 fmt = GET_RTX_FORMAT (GET_CODE (x));
6701 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6702 if (fmt[i] == 'e'
6703 && (MEM_P (XEXP (x, i))
6704 || refers_to_mem_for_reload_p (XEXP (x, i))))
6705 return 1;
6706
6707 return 0;
6708 }
6709 \f
6710 /* Check the insns before INSN to see if there is a suitable register
6711 containing the same value as GOAL.
6712 If OTHER is -1, look for a register in class RCLASS.
6713 Otherwise, just see if register number OTHER shares GOAL's value.
6714
6715 Return an rtx for the register found, or zero if none is found.
6716
6717 If RELOAD_REG_P is (short *)1,
6718 we reject any hard reg that appears in reload_reg_rtx
6719 because such a hard reg is also needed coming into this insn.
6720
6721 If RELOAD_REG_P is any other nonzero value,
6722 it is a vector indexed by hard reg number
6723 and we reject any hard reg whose element in the vector is nonnegative
6724 as well as any that appears in reload_reg_rtx.
6725
6726 If GOAL is zero, then GOALREG is a register number; we look
6727 for an equivalent for that register.
6728
6729 MODE is the machine mode of the value we want an equivalence for.
6730 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6731
6732 This function is used by jump.c as well as in the reload pass.
6733
6734 If GOAL is the sum of the stack pointer and a constant, we treat it
6735 as if it were a constant except that sp is required to be unchanging. */
6736
6737 rtx
6738 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6739 short *reload_reg_p, int goalreg, enum machine_mode mode)
6740 {
6741 rtx p = insn;
6742 rtx goaltry, valtry, value, where;
6743 rtx pat;
6744 int regno = -1;
6745 int valueno;
6746 int goal_mem = 0;
6747 int goal_const = 0;
6748 int goal_mem_addr_varies = 0;
6749 int need_stable_sp = 0;
6750 int nregs;
6751 int valuenregs;
6752 int num = 0;
6753
6754 if (goal == 0)
6755 regno = goalreg;
6756 else if (REG_P (goal))
6757 regno = REGNO (goal);
6758 else if (MEM_P (goal))
6759 {
6760 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6761 if (MEM_VOLATILE_P (goal))
6762 return 0;
6763 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6764 return 0;
6765 /* An address with side effects must be reexecuted. */
6766 switch (code)
6767 {
6768 case POST_INC:
6769 case PRE_INC:
6770 case POST_DEC:
6771 case PRE_DEC:
6772 case POST_MODIFY:
6773 case PRE_MODIFY:
6774 return 0;
6775 default:
6776 break;
6777 }
6778 goal_mem = 1;
6779 }
6780 else if (CONSTANT_P (goal))
6781 goal_const = 1;
6782 else if (GET_CODE (goal) == PLUS
6783 && XEXP (goal, 0) == stack_pointer_rtx
6784 && CONSTANT_P (XEXP (goal, 1)))
6785 goal_const = need_stable_sp = 1;
6786 else if (GET_CODE (goal) == PLUS
6787 && XEXP (goal, 0) == frame_pointer_rtx
6788 && CONSTANT_P (XEXP (goal, 1)))
6789 goal_const = 1;
6790 else
6791 return 0;
6792
6793 num = 0;
6794 /* Scan insns back from INSN, looking for one that copies
6795 a value into or out of GOAL.
6796 Stop and give up if we reach a label. */
6797
6798 while (1)
6799 {
6800 p = PREV_INSN (p);
6801 if (p && DEBUG_INSN_P (p))
6802 continue;
6803 num++;
6804 if (p == 0 || LABEL_P (p)
6805 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6806 return 0;
6807
6808 /* Don't reuse register contents from before a setjmp-type
6809 function call; on the second return (from the longjmp) it
6810 might have been clobbered by a later reuse. It doesn't
6811 seem worthwhile to actually go and see if it is actually
6812 reused even if that information would be readily available;
6813 just don't reuse it across the setjmp call. */
6814 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6815 return 0;
6816
6817 if (NONJUMP_INSN_P (p)
6818 /* If we don't want spill regs ... */
6819 && (! (reload_reg_p != 0
6820 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6821 /* ... then ignore insns introduced by reload; they aren't
6822 useful and can cause results in reload_as_needed to be
6823 different from what they were when calculating the need for
6824 spills. If we notice an input-reload insn here, we will
6825 reject it below, but it might hide a usable equivalent.
6826 That makes bad code. It may even fail: perhaps no reg was
6827 spilled for this insn because it was assumed we would find
6828 that equivalent. */
6829 || INSN_UID (p) < reload_first_uid))
6830 {
6831 rtx tem;
6832 pat = single_set (p);
6833
6834 /* First check for something that sets some reg equal to GOAL. */
6835 if (pat != 0
6836 && ((regno >= 0
6837 && true_regnum (SET_SRC (pat)) == regno
6838 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6839 ||
6840 (regno >= 0
6841 && true_regnum (SET_DEST (pat)) == regno
6842 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6843 ||
6844 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6845 /* When looking for stack pointer + const,
6846 make sure we don't use a stack adjust. */
6847 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6848 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6849 || (goal_mem
6850 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6851 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6852 || (goal_mem
6853 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6854 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6855 /* If we are looking for a constant,
6856 and something equivalent to that constant was copied
6857 into a reg, we can use that reg. */
6858 || (goal_const && REG_NOTES (p) != 0
6859 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6860 && ((rtx_equal_p (XEXP (tem, 0), goal)
6861 && (valueno
6862 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6863 || (REG_P (SET_DEST (pat))
6864 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6865 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6866 && CONST_INT_P (goal)
6867 && 0 != (goaltry
6868 = operand_subword (XEXP (tem, 0), 0, 0,
6869 VOIDmode))
6870 && rtx_equal_p (goal, goaltry)
6871 && (valtry
6872 = operand_subword (SET_DEST (pat), 0, 0,
6873 VOIDmode))
6874 && (valueno = true_regnum (valtry)) >= 0)))
6875 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6876 NULL_RTX))
6877 && REG_P (SET_DEST (pat))
6878 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6879 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6880 && CONST_INT_P (goal)
6881 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6882 VOIDmode))
6883 && rtx_equal_p (goal, goaltry)
6884 && (valtry
6885 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6886 && (valueno = true_regnum (valtry)) >= 0)))
6887 {
6888 if (other >= 0)
6889 {
6890 if (valueno != other)
6891 continue;
6892 }
6893 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6894 continue;
6895 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6896 mode, valueno))
6897 continue;
6898 value = valtry;
6899 where = p;
6900 break;
6901 }
6902 }
6903 }
6904
6905 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6906 (or copying VALUE into GOAL, if GOAL is also a register).
6907 Now verify that VALUE is really valid. */
6908
6909 /* VALUENO is the register number of VALUE; a hard register. */
6910
6911 /* Don't try to re-use something that is killed in this insn. We want
6912 to be able to trust REG_UNUSED notes. */
6913 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6914 return 0;
6915
6916 /* If we propose to get the value from the stack pointer or if GOAL is
6917 a MEM based on the stack pointer, we need a stable SP. */
6918 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6919 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6920 goal)))
6921 need_stable_sp = 1;
6922
6923 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6924 if (GET_MODE (value) != mode)
6925 return 0;
6926
6927 /* Reject VALUE if it was loaded from GOAL
6928 and is also a register that appears in the address of GOAL. */
6929
6930 if (goal_mem && value == SET_DEST (single_set (where))
6931 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6932 goal, (rtx*) 0))
6933 return 0;
6934
6935 /* Reject registers that overlap GOAL. */
6936
6937 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6938 nregs = hard_regno_nregs[regno][mode];
6939 else
6940 nregs = 1;
6941 valuenregs = hard_regno_nregs[valueno][mode];
6942
6943 if (!goal_mem && !goal_const
6944 && regno + nregs > valueno && regno < valueno + valuenregs)
6945 return 0;
6946
6947 /* Reject VALUE if it is one of the regs reserved for reloads.
6948 Reload1 knows how to reuse them anyway, and it would get
6949 confused if we allocated one without its knowledge.
6950 (Now that insns introduced by reload are ignored above,
6951 this case shouldn't happen, but I'm not positive.) */
6952
6953 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6954 {
6955 int i;
6956 for (i = 0; i < valuenregs; ++i)
6957 if (reload_reg_p[valueno + i] >= 0)
6958 return 0;
6959 }
6960
6961 /* Reject VALUE if it is a register being used for an input reload
6962 even if it is not one of those reserved. */
6963
6964 if (reload_reg_p != 0)
6965 {
6966 int i;
6967 for (i = 0; i < n_reloads; i++)
6968 if (rld[i].reg_rtx != 0 && rld[i].in)
6969 {
6970 int regno1 = REGNO (rld[i].reg_rtx);
6971 int nregs1 = hard_regno_nregs[regno1]
6972 [GET_MODE (rld[i].reg_rtx)];
6973 if (regno1 < valueno + valuenregs
6974 && regno1 + nregs1 > valueno)
6975 return 0;
6976 }
6977 }
6978
6979 if (goal_mem)
6980 /* We must treat frame pointer as varying here,
6981 since it can vary--in a nonlocal goto as generated by expand_goto. */
6982 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6983
6984 /* Now verify that the values of GOAL and VALUE remain unaltered
6985 until INSN is reached. */
6986
6987 p = insn;
6988 while (1)
6989 {
6990 p = PREV_INSN (p);
6991 if (p == where)
6992 return value;
6993
6994 /* Don't trust the conversion past a function call
6995 if either of the two is in a call-clobbered register, or memory. */
6996 if (CALL_P (p))
6997 {
6998 int i;
6999
7000 if (goal_mem || need_stable_sp)
7001 return 0;
7002
7003 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
7004 for (i = 0; i < nregs; ++i)
7005 if (call_used_regs[regno + i]
7006 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
7007 return 0;
7008
7009 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
7010 for (i = 0; i < valuenregs; ++i)
7011 if (call_used_regs[valueno + i]
7012 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
7013 return 0;
7014 }
7015
7016 if (INSN_P (p))
7017 {
7018 pat = PATTERN (p);
7019
7020 /* Watch out for unspec_volatile, and volatile asms. */
7021 if (volatile_insn_p (pat))
7022 return 0;
7023
7024 /* If this insn P stores in either GOAL or VALUE, return 0.
7025 If GOAL is a memory ref and this insn writes memory, return 0.
7026 If GOAL is a memory ref and its address is not constant,
7027 and this insn P changes a register used in GOAL, return 0. */
7028
7029 if (GET_CODE (pat) == COND_EXEC)
7030 pat = COND_EXEC_CODE (pat);
7031 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
7032 {
7033 rtx dest = SET_DEST (pat);
7034 while (GET_CODE (dest) == SUBREG
7035 || GET_CODE (dest) == ZERO_EXTRACT
7036 || GET_CODE (dest) == STRICT_LOW_PART)
7037 dest = XEXP (dest, 0);
7038 if (REG_P (dest))
7039 {
7040 int xregno = REGNO (dest);
7041 int xnregs;
7042 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7043 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7044 else
7045 xnregs = 1;
7046 if (xregno < regno + nregs && xregno + xnregs > regno)
7047 return 0;
7048 if (xregno < valueno + valuenregs
7049 && xregno + xnregs > valueno)
7050 return 0;
7051 if (goal_mem_addr_varies
7052 && reg_overlap_mentioned_for_reload_p (dest, goal))
7053 return 0;
7054 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7055 return 0;
7056 }
7057 else if (goal_mem && MEM_P (dest)
7058 && ! push_operand (dest, GET_MODE (dest)))
7059 return 0;
7060 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7061 && reg_equiv_memory_loc (regno) != 0)
7062 return 0;
7063 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7064 return 0;
7065 }
7066 else if (GET_CODE (pat) == PARALLEL)
7067 {
7068 int i;
7069 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7070 {
7071 rtx v1 = XVECEXP (pat, 0, i);
7072 if (GET_CODE (v1) == COND_EXEC)
7073 v1 = COND_EXEC_CODE (v1);
7074 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7075 {
7076 rtx dest = SET_DEST (v1);
7077 while (GET_CODE (dest) == SUBREG
7078 || GET_CODE (dest) == ZERO_EXTRACT
7079 || GET_CODE (dest) == STRICT_LOW_PART)
7080 dest = XEXP (dest, 0);
7081 if (REG_P (dest))
7082 {
7083 int xregno = REGNO (dest);
7084 int xnregs;
7085 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7086 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7087 else
7088 xnregs = 1;
7089 if (xregno < regno + nregs
7090 && xregno + xnregs > regno)
7091 return 0;
7092 if (xregno < valueno + valuenregs
7093 && xregno + xnregs > valueno)
7094 return 0;
7095 if (goal_mem_addr_varies
7096 && reg_overlap_mentioned_for_reload_p (dest,
7097 goal))
7098 return 0;
7099 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7100 return 0;
7101 }
7102 else if (goal_mem && MEM_P (dest)
7103 && ! push_operand (dest, GET_MODE (dest)))
7104 return 0;
7105 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7106 && reg_equiv_memory_loc (regno) != 0)
7107 return 0;
7108 else if (need_stable_sp
7109 && push_operand (dest, GET_MODE (dest)))
7110 return 0;
7111 }
7112 }
7113 }
7114
7115 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7116 {
7117 rtx link;
7118
7119 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7120 link = XEXP (link, 1))
7121 {
7122 pat = XEXP (link, 0);
7123 if (GET_CODE (pat) == CLOBBER)
7124 {
7125 rtx dest = SET_DEST (pat);
7126
7127 if (REG_P (dest))
7128 {
7129 int xregno = REGNO (dest);
7130 int xnregs
7131 = hard_regno_nregs[xregno][GET_MODE (dest)];
7132
7133 if (xregno < regno + nregs
7134 && xregno + xnregs > regno)
7135 return 0;
7136 else if (xregno < valueno + valuenregs
7137 && xregno + xnregs > valueno)
7138 return 0;
7139 else if (goal_mem_addr_varies
7140 && reg_overlap_mentioned_for_reload_p (dest,
7141 goal))
7142 return 0;
7143 }
7144
7145 else if (goal_mem && MEM_P (dest)
7146 && ! push_operand (dest, GET_MODE (dest)))
7147 return 0;
7148 else if (need_stable_sp
7149 && push_operand (dest, GET_MODE (dest)))
7150 return 0;
7151 }
7152 }
7153 }
7154
7155 #ifdef AUTO_INC_DEC
7156 /* If this insn auto-increments or auto-decrements
7157 either regno or valueno, return 0 now.
7158 If GOAL is a memory ref and its address is not constant,
7159 and this insn P increments a register used in GOAL, return 0. */
7160 {
7161 rtx link;
7162
7163 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7164 if (REG_NOTE_KIND (link) == REG_INC
7165 && REG_P (XEXP (link, 0)))
7166 {
7167 int incno = REGNO (XEXP (link, 0));
7168 if (incno < regno + nregs && incno >= regno)
7169 return 0;
7170 if (incno < valueno + valuenregs && incno >= valueno)
7171 return 0;
7172 if (goal_mem_addr_varies
7173 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7174 goal))
7175 return 0;
7176 }
7177 }
7178 #endif
7179 }
7180 }
7181 }
7182 \f
7183 /* Find a place where INCED appears in an increment or decrement operator
7184 within X, and return the amount INCED is incremented or decremented by.
7185 The value is always positive. */
7186
7187 static int
7188 find_inc_amount (rtx x, rtx inced)
7189 {
7190 enum rtx_code code = GET_CODE (x);
7191 const char *fmt;
7192 int i;
7193
7194 if (code == MEM)
7195 {
7196 rtx addr = XEXP (x, 0);
7197 if ((GET_CODE (addr) == PRE_DEC
7198 || GET_CODE (addr) == POST_DEC
7199 || GET_CODE (addr) == PRE_INC
7200 || GET_CODE (addr) == POST_INC)
7201 && XEXP (addr, 0) == inced)
7202 return GET_MODE_SIZE (GET_MODE (x));
7203 else if ((GET_CODE (addr) == PRE_MODIFY
7204 || GET_CODE (addr) == POST_MODIFY)
7205 && GET_CODE (XEXP (addr, 1)) == PLUS
7206 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7207 && XEXP (addr, 0) == inced
7208 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7209 {
7210 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7211 return i < 0 ? -i : i;
7212 }
7213 }
7214
7215 fmt = GET_RTX_FORMAT (code);
7216 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7217 {
7218 if (fmt[i] == 'e')
7219 {
7220 int tem = find_inc_amount (XEXP (x, i), inced);
7221 if (tem != 0)
7222 return tem;
7223 }
7224 if (fmt[i] == 'E')
7225 {
7226 int j;
7227 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7228 {
7229 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7230 if (tem != 0)
7231 return tem;
7232 }
7233 }
7234 }
7235
7236 return 0;
7237 }
7238 \f
7239 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7240 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7241
7242 #ifdef AUTO_INC_DEC
7243 static int
7244 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7245 rtx insn)
7246 {
7247 rtx link;
7248
7249 gcc_assert (insn);
7250
7251 if (! INSN_P (insn))
7252 return 0;
7253
7254 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7255 if (REG_NOTE_KIND (link) == REG_INC)
7256 {
7257 unsigned int test = (int) REGNO (XEXP (link, 0));
7258 if (test >= regno && test < endregno)
7259 return 1;
7260 }
7261 return 0;
7262 }
7263 #else
7264
7265 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7266
7267 #endif
7268
7269 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7270 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7271 REG_INC. REGNO must refer to a hard register. */
7272
7273 int
7274 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7275 int sets)
7276 {
7277 unsigned int nregs, endregno;
7278
7279 /* regno must be a hard register. */
7280 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7281
7282 nregs = hard_regno_nregs[regno][mode];
7283 endregno = regno + nregs;
7284
7285 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7286 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7287 && REG_P (XEXP (PATTERN (insn), 0)))
7288 {
7289 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7290
7291 return test >= regno && test < endregno;
7292 }
7293
7294 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7295 return 1;
7296
7297 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7298 {
7299 int i = XVECLEN (PATTERN (insn), 0) - 1;
7300
7301 for (; i >= 0; i--)
7302 {
7303 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7304 if ((GET_CODE (elt) == CLOBBER
7305 || (sets == 1 && GET_CODE (elt) == SET))
7306 && REG_P (XEXP (elt, 0)))
7307 {
7308 unsigned int test = REGNO (XEXP (elt, 0));
7309
7310 if (test >= regno && test < endregno)
7311 return 1;
7312 }
7313 if (sets == 2
7314 && reg_inc_found_and_valid_p (regno, endregno, elt))
7315 return 1;
7316 }
7317 }
7318
7319 return 0;
7320 }
7321
7322 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7323 rtx
7324 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7325 {
7326 int regno;
7327
7328 if (GET_MODE (reloadreg) == mode)
7329 return reloadreg;
7330
7331 regno = REGNO (reloadreg);
7332
7333 if (REG_WORDS_BIG_ENDIAN)
7334 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7335 - (int) hard_regno_nregs[regno][mode];
7336
7337 return gen_rtx_REG (mode, regno);
7338 }
7339
7340 static const char *const reload_when_needed_name[] =
7341 {
7342 "RELOAD_FOR_INPUT",
7343 "RELOAD_FOR_OUTPUT",
7344 "RELOAD_FOR_INSN",
7345 "RELOAD_FOR_INPUT_ADDRESS",
7346 "RELOAD_FOR_INPADDR_ADDRESS",
7347 "RELOAD_FOR_OUTPUT_ADDRESS",
7348 "RELOAD_FOR_OUTADDR_ADDRESS",
7349 "RELOAD_FOR_OPERAND_ADDRESS",
7350 "RELOAD_FOR_OPADDR_ADDR",
7351 "RELOAD_OTHER",
7352 "RELOAD_FOR_OTHER_ADDRESS"
7353 };
7354
7355 /* These functions are used to print the variables set by 'find_reloads' */
7356
7357 DEBUG_FUNCTION void
7358 debug_reload_to_stream (FILE *f)
7359 {
7360 int r;
7361 const char *prefix;
7362
7363 if (! f)
7364 f = stderr;
7365 for (r = 0; r < n_reloads; r++)
7366 {
7367 fprintf (f, "Reload %d: ", r);
7368
7369 if (rld[r].in != 0)
7370 {
7371 fprintf (f, "reload_in (%s) = ",
7372 GET_MODE_NAME (rld[r].inmode));
7373 print_inline_rtx (f, rld[r].in, 24);
7374 fprintf (f, "\n\t");
7375 }
7376
7377 if (rld[r].out != 0)
7378 {
7379 fprintf (f, "reload_out (%s) = ",
7380 GET_MODE_NAME (rld[r].outmode));
7381 print_inline_rtx (f, rld[r].out, 24);
7382 fprintf (f, "\n\t");
7383 }
7384
7385 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7386
7387 fprintf (f, "%s (opnum = %d)",
7388 reload_when_needed_name[(int) rld[r].when_needed],
7389 rld[r].opnum);
7390
7391 if (rld[r].optional)
7392 fprintf (f, ", optional");
7393
7394 if (rld[r].nongroup)
7395 fprintf (f, ", nongroup");
7396
7397 if (rld[r].inc != 0)
7398 fprintf (f, ", inc by %d", rld[r].inc);
7399
7400 if (rld[r].nocombine)
7401 fprintf (f, ", can't combine");
7402
7403 if (rld[r].secondary_p)
7404 fprintf (f, ", secondary_reload_p");
7405
7406 if (rld[r].in_reg != 0)
7407 {
7408 fprintf (f, "\n\treload_in_reg: ");
7409 print_inline_rtx (f, rld[r].in_reg, 24);
7410 }
7411
7412 if (rld[r].out_reg != 0)
7413 {
7414 fprintf (f, "\n\treload_out_reg: ");
7415 print_inline_rtx (f, rld[r].out_reg, 24);
7416 }
7417
7418 if (rld[r].reg_rtx != 0)
7419 {
7420 fprintf (f, "\n\treload_reg_rtx: ");
7421 print_inline_rtx (f, rld[r].reg_rtx, 24);
7422 }
7423
7424 prefix = "\n\t";
7425 if (rld[r].secondary_in_reload != -1)
7426 {
7427 fprintf (f, "%ssecondary_in_reload = %d",
7428 prefix, rld[r].secondary_in_reload);
7429 prefix = ", ";
7430 }
7431
7432 if (rld[r].secondary_out_reload != -1)
7433 fprintf (f, "%ssecondary_out_reload = %d\n",
7434 prefix, rld[r].secondary_out_reload);
7435
7436 prefix = "\n\t";
7437 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7438 {
7439 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7440 insn_data[rld[r].secondary_in_icode].name);
7441 prefix = ", ";
7442 }
7443
7444 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7445 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7446 insn_data[rld[r].secondary_out_icode].name);
7447
7448 fprintf (f, "\n");
7449 }
7450 }
7451
7452 DEBUG_FUNCTION void
7453 debug_reload (void)
7454 {
7455 debug_reload_to_stream (stderr);
7456 }