Kill OVL_CURRENT, OVL_NEXT.
[gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
26
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
29
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
36
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
43
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
52
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
55
56 NOTE SIDE EFFECTS:
57
58 find_reloads can alter the operands of the instruction it is called on.
59
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
64
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
67
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71
72 Using a reload register for several reloads in one insn:
73
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
85
86 #define REG_OK_STRICT
87
88 /* We do not enable this with CHECKING_P, since it is awfully slow. */
89 #undef DEBUG_RELOAD
90
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "backend.h"
95 #include "target.h"
96 #include "rtl.h"
97 #include "tree.h"
98 #include "df.h"
99 #include "memmodel.h"
100 #include "tm_p.h"
101 #include "optabs.h"
102 #include "regs.h"
103 #include "ira.h"
104 #include "recog.h"
105 #include "rtl-error.h"
106 #include "reload.h"
107 #include "addresses.h"
108 #include "params.h"
109
110 /* True if X is a constant that can be forced into the constant pool.
111 MODE is the mode of the operand, or VOIDmode if not known. */
112 #define CONST_POOL_OK_P(MODE, X) \
113 ((MODE) != VOIDmode \
114 && CONSTANT_P (X) \
115 && GET_CODE (X) != HIGH \
116 && !targetm.cannot_force_const_mem (MODE, X))
117
118 /* True if C is a non-empty register class that has too few registers
119 to be safely used as a reload target class. */
120
121 static inline bool
122 small_register_class_p (reg_class_t rclass)
123 {
124 return (reg_class_size [(int) rclass] == 1
125 || (reg_class_size [(int) rclass] >= 1
126 && targetm.class_likely_spilled_p (rclass)));
127 }
128
129 \f
130 /* All reloads of the current insn are recorded here. See reload.h for
131 comments. */
132 int n_reloads;
133 struct reload rld[MAX_RELOADS];
134
135 /* All the "earlyclobber" operands of the current insn
136 are recorded here. */
137 int n_earlyclobbers;
138 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
139
140 int reload_n_operands;
141
142 /* Replacing reloads.
143
144 If `replace_reloads' is nonzero, then as each reload is recorded
145 an entry is made for it in the table `replacements'.
146 Then later `subst_reloads' can look through that table and
147 perform all the replacements needed. */
148
149 /* Nonzero means record the places to replace. */
150 static int replace_reloads;
151
152 /* Each replacement is recorded with a structure like this. */
153 struct replacement
154 {
155 rtx *where; /* Location to store in */
156 int what; /* which reload this is for */
157 machine_mode mode; /* mode it must have */
158 };
159
160 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
161
162 /* Number of replacements currently recorded. */
163 static int n_replacements;
164
165 /* Used to track what is modified by an operand. */
166 struct decomposition
167 {
168 int reg_flag; /* Nonzero if referencing a register. */
169 int safe; /* Nonzero if this can't conflict with anything. */
170 rtx base; /* Base address for MEM. */
171 HOST_WIDE_INT start; /* Starting offset or register number. */
172 HOST_WIDE_INT end; /* Ending offset or register number. */
173 };
174
175 #ifdef SECONDARY_MEMORY_NEEDED
176
177 /* Save MEMs needed to copy from one class of registers to another. One MEM
178 is used per mode, but normally only one or two modes are ever used.
179
180 We keep two versions, before and after register elimination. The one
181 after register elimination is record separately for each operand. This
182 is done in case the address is not valid to be sure that we separately
183 reload each. */
184
185 static rtx secondary_memlocs[NUM_MACHINE_MODES];
186 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
187 static int secondary_memlocs_elim_used = 0;
188 #endif
189
190 /* The instruction we are doing reloads for;
191 so we can test whether a register dies in it. */
192 static rtx_insn *this_insn;
193
194 /* Nonzero if this instruction is a user-specified asm with operands. */
195 static int this_insn_is_asm;
196
197 /* If hard_regs_live_known is nonzero,
198 we can tell which hard regs are currently live,
199 at least enough to succeed in choosing dummy reloads. */
200 static int hard_regs_live_known;
201
202 /* Indexed by hard reg number,
203 element is nonnegative if hard reg has been spilled.
204 This vector is passed to `find_reloads' as an argument
205 and is not changed here. */
206 static short *static_reload_reg_p;
207
208 /* Set to 1 in subst_reg_equivs if it changes anything. */
209 static int subst_reg_equivs_changed;
210
211 /* On return from push_reload, holds the reload-number for the OUT
212 operand, which can be different for that from the input operand. */
213 static int output_reloadnum;
214
215 /* Compare two RTX's. */
216 #define MATCHES(x, y) \
217 (x == y || (x != 0 && (REG_P (x) \
218 ? REG_P (y) && REGNO (x) == REGNO (y) \
219 : rtx_equal_p (x, y) && ! side_effects_p (x))))
220
221 /* Indicates if two reloads purposes are for similar enough things that we
222 can merge their reloads. */
223 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
224 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
225 || ((when1) == (when2) && (op1) == (op2)) \
226 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
227 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
228 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
229 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
230 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
231
232 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
233 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
234 ((when1) != (when2) \
235 || ! ((op1) == (op2) \
236 || (when1) == RELOAD_FOR_INPUT \
237 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
238 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
239
240 /* If we are going to reload an address, compute the reload type to
241 use. */
242 #define ADDR_TYPE(type) \
243 ((type) == RELOAD_FOR_INPUT_ADDRESS \
244 ? RELOAD_FOR_INPADDR_ADDRESS \
245 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
246 ? RELOAD_FOR_OUTADDR_ADDRESS \
247 : (type)))
248
249 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
250 machine_mode, enum reload_type,
251 enum insn_code *, secondary_reload_info *);
252 static enum reg_class find_valid_class (machine_mode, machine_mode,
253 int, unsigned int);
254 static void push_replacement (rtx *, int, machine_mode);
255 static void dup_replacements (rtx *, rtx *);
256 static void combine_reloads (void);
257 static int find_reusable_reload (rtx *, rtx, enum reg_class,
258 enum reload_type, int, int);
259 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
260 machine_mode, reg_class_t, int, int);
261 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
262 static struct decomposition decompose (rtx);
263 static int immune_p (rtx, rtx, struct decomposition);
264 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
265 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
266 rtx_insn *, int *);
267 static rtx make_memloc (rtx, int);
268 static int maybe_memory_address_addr_space_p (machine_mode, rtx,
269 addr_space_t, rtx *);
270 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
271 int, enum reload_type, int, rtx_insn *);
272 static rtx subst_reg_equivs (rtx, rtx_insn *);
273 static rtx subst_indexed_address (rtx);
274 static void update_auto_inc_notes (rtx_insn *, int, int);
275 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
276 enum rtx_code, enum rtx_code, rtx *,
277 int, enum reload_type,int, rtx_insn *);
278 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
279 machine_mode, int,
280 enum reload_type, int);
281 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
282 int, rtx_insn *, int *);
283 static void copy_replacements_1 (rtx *, rtx *, int);
284 static int find_inc_amount (rtx, rtx);
285 static int refers_to_mem_for_reload_p (rtx);
286 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
287 rtx, rtx *);
288
289 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
290 list yet. */
291
292 static void
293 push_reg_equiv_alt_mem (int regno, rtx mem)
294 {
295 rtx it;
296
297 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
298 if (rtx_equal_p (XEXP (it, 0), mem))
299 return;
300
301 reg_equiv_alt_mem_list (regno)
302 = alloc_EXPR_LIST (REG_EQUIV, mem,
303 reg_equiv_alt_mem_list (regno));
304 }
305 \f
306 /* Determine if any secondary reloads are needed for loading (if IN_P is
307 nonzero) or storing (if IN_P is zero) X to or from a reload register of
308 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
309 are needed, push them.
310
311 Return the reload number of the secondary reload we made, or -1 if
312 we didn't need one. *PICODE is set to the insn_code to use if we do
313 need a secondary reload. */
314
315 static int
316 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
317 enum reg_class reload_class,
318 machine_mode reload_mode, enum reload_type type,
319 enum insn_code *picode, secondary_reload_info *prev_sri)
320 {
321 enum reg_class rclass = NO_REGS;
322 enum reg_class scratch_class;
323 machine_mode mode = reload_mode;
324 enum insn_code icode = CODE_FOR_nothing;
325 enum insn_code t_icode = CODE_FOR_nothing;
326 enum reload_type secondary_type;
327 int s_reload, t_reload = -1;
328 const char *scratch_constraint;
329 secondary_reload_info sri;
330
331 if (type == RELOAD_FOR_INPUT_ADDRESS
332 || type == RELOAD_FOR_OUTPUT_ADDRESS
333 || type == RELOAD_FOR_INPADDR_ADDRESS
334 || type == RELOAD_FOR_OUTADDR_ADDRESS)
335 secondary_type = type;
336 else
337 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
338
339 *picode = CODE_FOR_nothing;
340
341 /* If X is a paradoxical SUBREG, use the inner value to determine both the
342 mode and object being reloaded. */
343 if (paradoxical_subreg_p (x))
344 {
345 x = SUBREG_REG (x);
346 reload_mode = GET_MODE (x);
347 }
348
349 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
350 is still a pseudo-register by now, it *must* have an equivalent MEM
351 but we don't want to assume that), use that equivalent when seeing if
352 a secondary reload is needed since whether or not a reload is needed
353 might be sensitive to the form of the MEM. */
354
355 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
356 && reg_equiv_mem (REGNO (x)))
357 x = reg_equiv_mem (REGNO (x));
358
359 sri.icode = CODE_FOR_nothing;
360 sri.prev_sri = prev_sri;
361 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
362 reload_mode, &sri);
363 icode = (enum insn_code) sri.icode;
364
365 /* If we don't need any secondary registers, done. */
366 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
367 return -1;
368
369 if (rclass != NO_REGS)
370 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
371 reload_mode, type, &t_icode, &sri);
372
373 /* If we will be using an insn, the secondary reload is for a
374 scratch register. */
375
376 if (icode != CODE_FOR_nothing)
377 {
378 /* If IN_P is nonzero, the reload register will be the output in
379 operand 0. If IN_P is zero, the reload register will be the input
380 in operand 1. Outputs should have an initial "=", which we must
381 skip. */
382
383 /* ??? It would be useful to be able to handle only two, or more than
384 three, operands, but for now we can only handle the case of having
385 exactly three: output, input and one temp/scratch. */
386 gcc_assert (insn_data[(int) icode].n_operands == 3);
387
388 /* ??? We currently have no way to represent a reload that needs
389 an icode to reload from an intermediate tertiary reload register.
390 We should probably have a new field in struct reload to tag a
391 chain of scratch operand reloads onto. */
392 gcc_assert (rclass == NO_REGS);
393
394 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
395 gcc_assert (*scratch_constraint == '=');
396 scratch_constraint++;
397 if (*scratch_constraint == '&')
398 scratch_constraint++;
399 scratch_class = (reg_class_for_constraint
400 (lookup_constraint (scratch_constraint)));
401
402 rclass = scratch_class;
403 mode = insn_data[(int) icode].operand[2].mode;
404 }
405
406 /* This case isn't valid, so fail. Reload is allowed to use the same
407 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
408 in the case of a secondary register, we actually need two different
409 registers for correct code. We fail here to prevent the possibility of
410 silently generating incorrect code later.
411
412 The convention is that secondary input reloads are valid only if the
413 secondary_class is different from class. If you have such a case, you
414 can not use secondary reloads, you must work around the problem some
415 other way.
416
417 Allow this when a reload_in/out pattern is being used. I.e. assume
418 that the generated code handles this case. */
419
420 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
421 || t_icode != CODE_FOR_nothing);
422
423 /* See if we can reuse an existing secondary reload. */
424 for (s_reload = 0; s_reload < n_reloads; s_reload++)
425 if (rld[s_reload].secondary_p
426 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
427 || reg_class_subset_p (rld[s_reload].rclass, rclass))
428 && ((in_p && rld[s_reload].inmode == mode)
429 || (! in_p && rld[s_reload].outmode == mode))
430 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
431 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
432 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
433 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
434 && (small_register_class_p (rclass)
435 || targetm.small_register_classes_for_mode_p (VOIDmode))
436 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
437 opnum, rld[s_reload].opnum))
438 {
439 if (in_p)
440 rld[s_reload].inmode = mode;
441 if (! in_p)
442 rld[s_reload].outmode = mode;
443
444 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
445 rld[s_reload].rclass = rclass;
446
447 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
448 rld[s_reload].optional &= optional;
449 rld[s_reload].secondary_p = 1;
450 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
451 opnum, rld[s_reload].opnum))
452 rld[s_reload].when_needed = RELOAD_OTHER;
453
454 break;
455 }
456
457 if (s_reload == n_reloads)
458 {
459 #ifdef SECONDARY_MEMORY_NEEDED
460 /* If we need a memory location to copy between the two reload regs,
461 set it up now. Note that we do the input case before making
462 the reload and the output case after. This is due to the
463 way reloads are output. */
464
465 if (in_p && icode == CODE_FOR_nothing
466 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
467 {
468 get_secondary_mem (x, reload_mode, opnum, type);
469
470 /* We may have just added new reloads. Make sure we add
471 the new reload at the end. */
472 s_reload = n_reloads;
473 }
474 #endif
475
476 /* We need to make a new secondary reload for this register class. */
477 rld[s_reload].in = rld[s_reload].out = 0;
478 rld[s_reload].rclass = rclass;
479
480 rld[s_reload].inmode = in_p ? mode : VOIDmode;
481 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
482 rld[s_reload].reg_rtx = 0;
483 rld[s_reload].optional = optional;
484 rld[s_reload].inc = 0;
485 /* Maybe we could combine these, but it seems too tricky. */
486 rld[s_reload].nocombine = 1;
487 rld[s_reload].in_reg = 0;
488 rld[s_reload].out_reg = 0;
489 rld[s_reload].opnum = opnum;
490 rld[s_reload].when_needed = secondary_type;
491 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
492 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
493 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
494 rld[s_reload].secondary_out_icode
495 = ! in_p ? t_icode : CODE_FOR_nothing;
496 rld[s_reload].secondary_p = 1;
497
498 n_reloads++;
499
500 #ifdef SECONDARY_MEMORY_NEEDED
501 if (! in_p && icode == CODE_FOR_nothing
502 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
503 get_secondary_mem (x, mode, opnum, type);
504 #endif
505 }
506
507 *picode = icode;
508 return s_reload;
509 }
510
511 /* If a secondary reload is needed, return its class. If both an intermediate
512 register and a scratch register is needed, we return the class of the
513 intermediate register. */
514 reg_class_t
515 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
516 rtx x)
517 {
518 enum insn_code icode;
519 secondary_reload_info sri;
520
521 sri.icode = CODE_FOR_nothing;
522 sri.prev_sri = NULL;
523 rclass
524 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
525 icode = (enum insn_code) sri.icode;
526
527 /* If there are no secondary reloads at all, we return NO_REGS.
528 If an intermediate register is needed, we return its class. */
529 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
530 return rclass;
531
532 /* No intermediate register is needed, but we have a special reload
533 pattern, which we assume for now needs a scratch register. */
534 return scratch_reload_class (icode);
535 }
536
537 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
538 three operands, verify that operand 2 is an output operand, and return
539 its register class.
540 ??? We'd like to be able to handle any pattern with at least 2 operands,
541 for zero or more scratch registers, but that needs more infrastructure. */
542 enum reg_class
543 scratch_reload_class (enum insn_code icode)
544 {
545 const char *scratch_constraint;
546 enum reg_class rclass;
547
548 gcc_assert (insn_data[(int) icode].n_operands == 3);
549 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
550 gcc_assert (*scratch_constraint == '=');
551 scratch_constraint++;
552 if (*scratch_constraint == '&')
553 scratch_constraint++;
554 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
555 gcc_assert (rclass != NO_REGS);
556 return rclass;
557 }
558 \f
559 #ifdef SECONDARY_MEMORY_NEEDED
560
561 /* Return a memory location that will be used to copy X in mode MODE.
562 If we haven't already made a location for this mode in this insn,
563 call find_reloads_address on the location being returned. */
564
565 rtx
566 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
567 int opnum, enum reload_type type)
568 {
569 rtx loc;
570 int mem_valid;
571
572 /* By default, if MODE is narrower than a word, widen it to a word.
573 This is required because most machines that require these memory
574 locations do not support short load and stores from all registers
575 (e.g., FP registers). */
576
577 #ifdef SECONDARY_MEMORY_NEEDED_MODE
578 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
579 #else
580 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
581 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
582 #endif
583
584 /* If we already have made a MEM for this operand in MODE, return it. */
585 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
586 return secondary_memlocs_elim[(int) mode][opnum];
587
588 /* If this is the first time we've tried to get a MEM for this mode,
589 allocate a new one. `something_changed' in reload will get set
590 by noticing that the frame size has changed. */
591
592 if (secondary_memlocs[(int) mode] == 0)
593 {
594 #ifdef SECONDARY_MEMORY_NEEDED_RTX
595 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
596 #else
597 secondary_memlocs[(int) mode]
598 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
599 #endif
600 }
601
602 /* Get a version of the address doing any eliminations needed. If that
603 didn't give us a new MEM, make a new one if it isn't valid. */
604
605 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
606 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
607 MEM_ADDR_SPACE (loc));
608
609 if (! mem_valid && loc == secondary_memlocs[(int) mode])
610 loc = copy_rtx (loc);
611
612 /* The only time the call below will do anything is if the stack
613 offset is too large. In that case IND_LEVELS doesn't matter, so we
614 can just pass a zero. Adjust the type to be the address of the
615 corresponding object. If the address was valid, save the eliminated
616 address. If it wasn't valid, we need to make a reload each time, so
617 don't save it. */
618
619 if (! mem_valid)
620 {
621 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
622 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
623 : RELOAD_OTHER);
624
625 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
626 opnum, type, 0, 0);
627 }
628
629 secondary_memlocs_elim[(int) mode][opnum] = loc;
630 if (secondary_memlocs_elim_used <= (int)mode)
631 secondary_memlocs_elim_used = (int)mode + 1;
632 return loc;
633 }
634
635 /* Clear any secondary memory locations we've made. */
636
637 void
638 clear_secondary_mem (void)
639 {
640 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
641 }
642 #endif /* SECONDARY_MEMORY_NEEDED */
643 \f
644
645 /* Find the largest class which has at least one register valid in
646 mode INNER, and which for every such register, that register number
647 plus N is also valid in OUTER (if in range) and is cheap to move
648 into REGNO. Such a class must exist. */
649
650 static enum reg_class
651 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
652 machine_mode inner ATTRIBUTE_UNUSED, int n,
653 unsigned int dest_regno ATTRIBUTE_UNUSED)
654 {
655 int best_cost = -1;
656 int rclass;
657 int regno;
658 enum reg_class best_class = NO_REGS;
659 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
660 unsigned int best_size = 0;
661 int cost;
662
663 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
664 {
665 int bad = 0;
666 int good = 0;
667 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
668 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
669 {
670 if (HARD_REGNO_MODE_OK (regno, inner))
671 {
672 good = 1;
673 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
674 && ! HARD_REGNO_MODE_OK (regno + n, outer))
675 bad = 1;
676 }
677 }
678
679 if (bad || !good)
680 continue;
681 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
682
683 if ((reg_class_size[rclass] > best_size
684 && (best_cost < 0 || best_cost >= cost))
685 || best_cost > cost)
686 {
687 best_class = (enum reg_class) rclass;
688 best_size = reg_class_size[rclass];
689 best_cost = register_move_cost (outer, (enum reg_class) rclass,
690 dest_class);
691 }
692 }
693
694 gcc_assert (best_size != 0);
695
696 return best_class;
697 }
698
699 /* We are trying to reload a subreg of something that is not a register.
700 Find the largest class which contains only registers valid in
701 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
702 which we would eventually like to obtain the object. */
703
704 static enum reg_class
705 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
706 machine_mode mode ATTRIBUTE_UNUSED,
707 enum reg_class dest_class ATTRIBUTE_UNUSED)
708 {
709 int best_cost = -1;
710 int rclass;
711 int regno;
712 enum reg_class best_class = NO_REGS;
713 unsigned int best_size = 0;
714 int cost;
715
716 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
717 {
718 unsigned int computed_rclass_size = 0;
719
720 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
721 {
722 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
723 && (HARD_REGNO_MODE_OK (regno, mode)))
724 computed_rclass_size++;
725 }
726
727 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
728
729 if ((computed_rclass_size > best_size
730 && (best_cost < 0 || best_cost >= cost))
731 || best_cost > cost)
732 {
733 best_class = (enum reg_class) rclass;
734 best_size = computed_rclass_size;
735 best_cost = register_move_cost (outer, (enum reg_class) rclass,
736 dest_class);
737 }
738 }
739
740 gcc_assert (best_size != 0);
741
742 #ifdef LIMIT_RELOAD_CLASS
743 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
744 #endif
745 return best_class;
746 }
747 \f
748 /* Return the number of a previously made reload that can be combined with
749 a new one, or n_reloads if none of the existing reloads can be used.
750 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
751 push_reload, they determine the kind of the new reload that we try to
752 combine. P_IN points to the corresponding value of IN, which can be
753 modified by this function.
754 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
755
756 static int
757 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
758 enum reload_type type, int opnum, int dont_share)
759 {
760 rtx in = *p_in;
761 int i;
762 /* We can't merge two reloads if the output of either one is
763 earlyclobbered. */
764
765 if (earlyclobber_operand_p (out))
766 return n_reloads;
767
768 /* We can use an existing reload if the class is right
769 and at least one of IN and OUT is a match
770 and the other is at worst neutral.
771 (A zero compared against anything is neutral.)
772
773 For targets with small register classes, don't use existing reloads
774 unless they are for the same thing since that can cause us to need
775 more reload registers than we otherwise would. */
776
777 for (i = 0; i < n_reloads; i++)
778 if ((reg_class_subset_p (rclass, rld[i].rclass)
779 || reg_class_subset_p (rld[i].rclass, rclass))
780 /* If the existing reload has a register, it must fit our class. */
781 && (rld[i].reg_rtx == 0
782 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
783 true_regnum (rld[i].reg_rtx)))
784 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
785 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
786 || (out != 0 && MATCHES (rld[i].out, out)
787 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
788 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
789 && (small_register_class_p (rclass)
790 || targetm.small_register_classes_for_mode_p (VOIDmode))
791 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
792 return i;
793
794 /* Reloading a plain reg for input can match a reload to postincrement
795 that reg, since the postincrement's value is the right value.
796 Likewise, it can match a preincrement reload, since we regard
797 the preincrementation as happening before any ref in this insn
798 to that register. */
799 for (i = 0; i < n_reloads; i++)
800 if ((reg_class_subset_p (rclass, rld[i].rclass)
801 || reg_class_subset_p (rld[i].rclass, rclass))
802 /* If the existing reload has a register, it must fit our
803 class. */
804 && (rld[i].reg_rtx == 0
805 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
806 true_regnum (rld[i].reg_rtx)))
807 && out == 0 && rld[i].out == 0 && rld[i].in != 0
808 && ((REG_P (in)
809 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
810 && MATCHES (XEXP (rld[i].in, 0), in))
811 || (REG_P (rld[i].in)
812 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
813 && MATCHES (XEXP (in, 0), rld[i].in)))
814 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
815 && (small_register_class_p (rclass)
816 || targetm.small_register_classes_for_mode_p (VOIDmode))
817 && MERGABLE_RELOADS (type, rld[i].when_needed,
818 opnum, rld[i].opnum))
819 {
820 /* Make sure reload_in ultimately has the increment,
821 not the plain register. */
822 if (REG_P (in))
823 *p_in = rld[i].in;
824 return i;
825 }
826 return n_reloads;
827 }
828
829 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
830 expression. MODE is the mode that X will be used in. OUTPUT is true if
831 the function is invoked for the output part of an enclosing reload. */
832
833 static bool
834 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
835 {
836 rtx inner;
837
838 /* Only SUBREGs are problematical. */
839 if (GET_CODE (x) != SUBREG)
840 return false;
841
842 inner = SUBREG_REG (x);
843
844 /* If INNER is a constant or PLUS, then INNER will need reloading. */
845 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
846 return true;
847
848 /* If INNER is not a hard register, then INNER will not need reloading. */
849 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
850 return false;
851
852 /* If INNER is not ok for MODE, then INNER will need reloading. */
853 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
854 return true;
855
856 /* If this is for an output, and the outer part is a word or smaller,
857 INNER is larger than a word and the number of registers in INNER is
858 not the same as the number of words in INNER, then INNER will need
859 reloading (with an in-out reload). */
860 return (output
861 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
862 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
863 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
864 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
865 }
866
867 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
868 requiring an extra reload register. The caller has already found that
869 IN contains some reference to REGNO, so check that we can produce the
870 new value in a single step. E.g. if we have
871 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
872 instruction that adds one to a register, this should succeed.
873 However, if we have something like
874 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
875 needs to be loaded into a register first, we need a separate reload
876 register.
877 Such PLUS reloads are generated by find_reload_address_part.
878 The out-of-range PLUS expressions are usually introduced in the instruction
879 patterns by register elimination and substituting pseudos without a home
880 by their function-invariant equivalences. */
881 static int
882 can_reload_into (rtx in, int regno, machine_mode mode)
883 {
884 rtx dst;
885 rtx_insn *test_insn;
886 int r = 0;
887 struct recog_data_d save_recog_data;
888
889 /* For matching constraints, we often get notional input reloads where
890 we want to use the original register as the reload register. I.e.
891 technically this is a non-optional input-output reload, but IN is
892 already a valid register, and has been chosen as the reload register.
893 Speed this up, since it trivially works. */
894 if (REG_P (in))
895 return 1;
896
897 /* To test MEMs properly, we'd have to take into account all the reloads
898 that are already scheduled, which can become quite complicated.
899 And since we've already handled address reloads for this MEM, it
900 should always succeed anyway. */
901 if (MEM_P (in))
902 return 1;
903
904 /* If we can make a simple SET insn that does the job, everything should
905 be fine. */
906 dst = gen_rtx_REG (mode, regno);
907 test_insn = make_insn_raw (gen_rtx_SET (dst, in));
908 save_recog_data = recog_data;
909 if (recog_memoized (test_insn) >= 0)
910 {
911 extract_insn (test_insn);
912 r = constrain_operands (1, get_enabled_alternatives (test_insn));
913 }
914 recog_data = save_recog_data;
915 return r;
916 }
917
918 /* Record one reload that needs to be performed.
919 IN is an rtx saying where the data are to be found before this instruction.
920 OUT says where they must be stored after the instruction.
921 (IN is zero for data not read, and OUT is zero for data not written.)
922 INLOC and OUTLOC point to the places in the instructions where
923 IN and OUT were found.
924 If IN and OUT are both nonzero, it means the same register must be used
925 to reload both IN and OUT.
926
927 RCLASS is a register class required for the reloaded data.
928 INMODE is the machine mode that the instruction requires
929 for the reg that replaces IN and OUTMODE is likewise for OUT.
930
931 If IN is zero, then OUT's location and mode should be passed as
932 INLOC and INMODE.
933
934 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
935
936 OPTIONAL nonzero means this reload does not need to be performed:
937 it can be discarded if that is more convenient.
938
939 OPNUM and TYPE say what the purpose of this reload is.
940
941 The return value is the reload-number for this reload.
942
943 If both IN and OUT are nonzero, in some rare cases we might
944 want to make two separate reloads. (Actually we never do this now.)
945 Therefore, the reload-number for OUT is stored in
946 output_reloadnum when we return; the return value applies to IN.
947 Usually (presently always), when IN and OUT are nonzero,
948 the two reload-numbers are equal, but the caller should be careful to
949 distinguish them. */
950
951 int
952 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
953 enum reg_class rclass, machine_mode inmode,
954 machine_mode outmode, int strict_low, int optional,
955 int opnum, enum reload_type type)
956 {
957 int i;
958 int dont_share = 0;
959 int dont_remove_subreg = 0;
960 #ifdef LIMIT_RELOAD_CLASS
961 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
962 #endif
963 int secondary_in_reload = -1, secondary_out_reload = -1;
964 enum insn_code secondary_in_icode = CODE_FOR_nothing;
965 enum insn_code secondary_out_icode = CODE_FOR_nothing;
966 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
967 subreg_in_class = NO_REGS;
968
969 /* INMODE and/or OUTMODE could be VOIDmode if no mode
970 has been specified for the operand. In that case,
971 use the operand's mode as the mode to reload. */
972 if (inmode == VOIDmode && in != 0)
973 inmode = GET_MODE (in);
974 if (outmode == VOIDmode && out != 0)
975 outmode = GET_MODE (out);
976
977 /* If find_reloads and friends until now missed to replace a pseudo
978 with a constant of reg_equiv_constant something went wrong
979 beforehand.
980 Note that it can't simply be done here if we missed it earlier
981 since the constant might need to be pushed into the literal pool
982 and the resulting memref would probably need further
983 reloading. */
984 if (in != 0 && REG_P (in))
985 {
986 int regno = REGNO (in);
987
988 gcc_assert (regno < FIRST_PSEUDO_REGISTER
989 || reg_renumber[regno] >= 0
990 || reg_equiv_constant (regno) == NULL_RTX);
991 }
992
993 /* reg_equiv_constant only contains constants which are obviously
994 not appropriate as destination. So if we would need to replace
995 the destination pseudo with a constant we are in real
996 trouble. */
997 if (out != 0 && REG_P (out))
998 {
999 int regno = REGNO (out);
1000
1001 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1002 || reg_renumber[regno] >= 0
1003 || reg_equiv_constant (regno) == NULL_RTX);
1004 }
1005
1006 /* If we have a read-write operand with an address side-effect,
1007 change either IN or OUT so the side-effect happens only once. */
1008 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1009 switch (GET_CODE (XEXP (in, 0)))
1010 {
1011 case POST_INC: case POST_DEC: case POST_MODIFY:
1012 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1013 break;
1014
1015 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1016 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1017 break;
1018
1019 default:
1020 break;
1021 }
1022
1023 /* If we are reloading a (SUBREG constant ...), really reload just the
1024 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1025 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1026 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1027 register is a pseudo, also reload the inside expression.
1028 For machines that extend byte loads, do this for any SUBREG of a pseudo
1029 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1030 M2 is an integral mode that gets extended when loaded.
1031 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1032 where either M1 is not valid for R or M2 is wider than a word but we
1033 only need one register to store an M2-sized quantity in R.
1034 (However, if OUT is nonzero, we need to reload the reg *and*
1035 the subreg, so do nothing here, and let following statement handle it.)
1036
1037 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1038 we can't handle it here because CONST_INT does not indicate a mode.
1039
1040 Similarly, we must reload the inside expression if we have a
1041 STRICT_LOW_PART (presumably, in == out in this case).
1042
1043 Also reload the inner expression if it does not require a secondary
1044 reload but the SUBREG does.
1045
1046 Finally, reload the inner expression if it is a register that is in
1047 the class whose registers cannot be referenced in a different size
1048 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1049 cannot reload just the inside since we might end up with the wrong
1050 register class. But if it is inside a STRICT_LOW_PART, we have
1051 no choice, so we hope we do get the right register class there. */
1052
1053 if (in != 0 && GET_CODE (in) == SUBREG
1054 && (subreg_lowpart_p (in) || strict_low)
1055 #ifdef CANNOT_CHANGE_MODE_CLASS
1056 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1057 #endif
1058 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (in))]
1059 && (CONSTANT_P (SUBREG_REG (in))
1060 || GET_CODE (SUBREG_REG (in)) == PLUS
1061 || strict_low
1062 || (((REG_P (SUBREG_REG (in))
1063 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1064 || MEM_P (SUBREG_REG (in)))
1065 && ((GET_MODE_PRECISION (inmode)
1066 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1067 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1068 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1069 <= UNITS_PER_WORD)
1070 && (GET_MODE_PRECISION (inmode)
1071 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1072 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1073 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1074 || (WORD_REGISTER_OPERATIONS
1075 && (GET_MODE_PRECISION (inmode)
1076 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1077 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1078 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1079 / UNITS_PER_WORD)))))
1080 || (REG_P (SUBREG_REG (in))
1081 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1082 /* The case where out is nonzero
1083 is handled differently in the following statement. */
1084 && (out == 0 || subreg_lowpart_p (in))
1085 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1086 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1087 > UNITS_PER_WORD)
1088 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1089 / UNITS_PER_WORD)
1090 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1091 [GET_MODE (SUBREG_REG (in))]))
1092 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1093 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1094 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1095 SUBREG_REG (in))
1096 == NO_REGS))
1097 #ifdef CANNOT_CHANGE_MODE_CLASS
1098 || (REG_P (SUBREG_REG (in))
1099 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1100 && REG_CANNOT_CHANGE_MODE_P
1101 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1102 #endif
1103 ))
1104 {
1105 #ifdef LIMIT_RELOAD_CLASS
1106 in_subreg_loc = inloc;
1107 #endif
1108 inloc = &SUBREG_REG (in);
1109 in = *inloc;
1110
1111 if (!WORD_REGISTER_OPERATIONS
1112 && LOAD_EXTEND_OP (GET_MODE (in)) == UNKNOWN
1113 && MEM_P (in))
1114 /* This is supposed to happen only for paradoxical subregs made by
1115 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1116 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1117
1118 inmode = GET_MODE (in);
1119 }
1120
1121 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1122 where M1 is not valid for R if it was not handled by the code above.
1123
1124 Similar issue for (SUBREG constant ...) if it was not handled by the
1125 code above. This can happen if SUBREG_BYTE != 0.
1126
1127 However, we must reload the inner reg *as well as* the subreg in
1128 that case. */
1129
1130 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1131 {
1132 if (REG_P (SUBREG_REG (in)))
1133 subreg_in_class
1134 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1135 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1136 GET_MODE (SUBREG_REG (in)),
1137 SUBREG_BYTE (in),
1138 GET_MODE (in)),
1139 REGNO (SUBREG_REG (in)));
1140 else if (CONSTANT_P (SUBREG_REG (in))
1141 || GET_CODE (SUBREG_REG (in)) == PLUS)
1142 subreg_in_class = find_valid_class_1 (inmode,
1143 GET_MODE (SUBREG_REG (in)),
1144 rclass);
1145
1146 /* This relies on the fact that emit_reload_insns outputs the
1147 instructions for input reloads of type RELOAD_OTHER in the same
1148 order as the reloads. Thus if the outer reload is also of type
1149 RELOAD_OTHER, we are guaranteed that this inner reload will be
1150 output before the outer reload. */
1151 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1152 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1153 dont_remove_subreg = 1;
1154 }
1155
1156 /* Similarly for paradoxical and problematical SUBREGs on the output.
1157 Note that there is no reason we need worry about the previous value
1158 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1159 entitled to clobber it all (except in the case of a word mode subreg
1160 or of a STRICT_LOW_PART, in that latter case the constraint should
1161 label it input-output.) */
1162 if (out != 0 && GET_CODE (out) == SUBREG
1163 && (subreg_lowpart_p (out) || strict_low)
1164 #ifdef CANNOT_CHANGE_MODE_CLASS
1165 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1166 #endif
1167 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (out))]
1168 && (CONSTANT_P (SUBREG_REG (out))
1169 || strict_low
1170 || (((REG_P (SUBREG_REG (out))
1171 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1172 || MEM_P (SUBREG_REG (out)))
1173 && ((GET_MODE_PRECISION (outmode)
1174 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1175 || (WORD_REGISTER_OPERATIONS
1176 && (GET_MODE_PRECISION (outmode)
1177 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1178 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1179 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1180 / UNITS_PER_WORD)))))
1181 || (REG_P (SUBREG_REG (out))
1182 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1183 /* The case of a word mode subreg
1184 is handled differently in the following statement. */
1185 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1186 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1187 > UNITS_PER_WORD))
1188 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1189 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1190 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1191 SUBREG_REG (out))
1192 == NO_REGS))
1193 #ifdef CANNOT_CHANGE_MODE_CLASS
1194 || (REG_P (SUBREG_REG (out))
1195 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1196 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1197 GET_MODE (SUBREG_REG (out)),
1198 outmode))
1199 #endif
1200 ))
1201 {
1202 #ifdef LIMIT_RELOAD_CLASS
1203 out_subreg_loc = outloc;
1204 #endif
1205 outloc = &SUBREG_REG (out);
1206 out = *outloc;
1207 gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1208 || GET_MODE_SIZE (GET_MODE (out))
1209 <= GET_MODE_SIZE (outmode));
1210 outmode = GET_MODE (out);
1211 }
1212
1213 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1214 where either M1 is not valid for R or M2 is wider than a word but we
1215 only need one register to store an M2-sized quantity in R.
1216
1217 However, we must reload the inner reg *as well as* the subreg in
1218 that case and the inner reg is an in-out reload. */
1219
1220 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1221 {
1222 enum reg_class in_out_class
1223 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1224 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1225 GET_MODE (SUBREG_REG (out)),
1226 SUBREG_BYTE (out),
1227 GET_MODE (out)),
1228 REGNO (SUBREG_REG (out)));
1229
1230 /* This relies on the fact that emit_reload_insns outputs the
1231 instructions for output reloads of type RELOAD_OTHER in reverse
1232 order of the reloads. Thus if the outer reload is also of type
1233 RELOAD_OTHER, we are guaranteed that this inner reload will be
1234 output after the outer reload. */
1235 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1236 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1237 0, 0, opnum, RELOAD_OTHER);
1238 dont_remove_subreg = 1;
1239 }
1240
1241 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1242 if (in != 0 && out != 0 && MEM_P (out)
1243 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1244 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1245 dont_share = 1;
1246
1247 /* If IN is a SUBREG of a hard register, make a new REG. This
1248 simplifies some of the cases below. */
1249
1250 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1251 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1252 && ! dont_remove_subreg)
1253 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1254
1255 /* Similarly for OUT. */
1256 if (out != 0 && GET_CODE (out) == SUBREG
1257 && REG_P (SUBREG_REG (out))
1258 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1259 && ! dont_remove_subreg)
1260 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1261
1262 /* Narrow down the class of register wanted if that is
1263 desirable on this machine for efficiency. */
1264 {
1265 reg_class_t preferred_class = rclass;
1266
1267 if (in != 0)
1268 preferred_class = targetm.preferred_reload_class (in, rclass);
1269
1270 /* Output reloads may need analogous treatment, different in detail. */
1271 if (out != 0)
1272 preferred_class
1273 = targetm.preferred_output_reload_class (out, preferred_class);
1274
1275 /* Discard what the target said if we cannot do it. */
1276 if (preferred_class != NO_REGS
1277 || (optional && type == RELOAD_FOR_OUTPUT))
1278 rclass = (enum reg_class) preferred_class;
1279 }
1280
1281 /* Make sure we use a class that can handle the actual pseudo
1282 inside any subreg. For example, on the 386, QImode regs
1283 can appear within SImode subregs. Although GENERAL_REGS
1284 can handle SImode, QImode needs a smaller class. */
1285 #ifdef LIMIT_RELOAD_CLASS
1286 if (in_subreg_loc)
1287 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1288 else if (in != 0 && GET_CODE (in) == SUBREG)
1289 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1290
1291 if (out_subreg_loc)
1292 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1293 if (out != 0 && GET_CODE (out) == SUBREG)
1294 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1295 #endif
1296
1297 /* Verify that this class is at least possible for the mode that
1298 is specified. */
1299 if (this_insn_is_asm)
1300 {
1301 machine_mode mode;
1302 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1303 mode = inmode;
1304 else
1305 mode = outmode;
1306 if (mode == VOIDmode)
1307 {
1308 error_for_asm (this_insn, "cannot reload integer constant "
1309 "operand in %<asm%>");
1310 mode = word_mode;
1311 if (in != 0)
1312 inmode = word_mode;
1313 if (out != 0)
1314 outmode = word_mode;
1315 }
1316 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1317 if (HARD_REGNO_MODE_OK (i, mode)
1318 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1319 break;
1320 if (i == FIRST_PSEUDO_REGISTER)
1321 {
1322 error_for_asm (this_insn, "impossible register constraint "
1323 "in %<asm%>");
1324 /* Avoid further trouble with this insn. */
1325 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1326 /* We used to continue here setting class to ALL_REGS, but it triggers
1327 sanity check on i386 for:
1328 void foo(long double d)
1329 {
1330 asm("" :: "a" (d));
1331 }
1332 Returning zero here ought to be safe as we take care in
1333 find_reloads to not process the reloads when instruction was
1334 replaced by USE. */
1335
1336 return 0;
1337 }
1338 }
1339
1340 /* Optional output reloads are always OK even if we have no register class,
1341 since the function of these reloads is only to have spill_reg_store etc.
1342 set, so that the storing insn can be deleted later. */
1343 gcc_assert (rclass != NO_REGS
1344 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1345
1346 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1347
1348 if (i == n_reloads)
1349 {
1350 /* See if we need a secondary reload register to move between CLASS
1351 and IN or CLASS and OUT. Get the icode and push any required reloads
1352 needed for each of them if so. */
1353
1354 if (in != 0)
1355 secondary_in_reload
1356 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1357 &secondary_in_icode, NULL);
1358 if (out != 0 && GET_CODE (out) != SCRATCH)
1359 secondary_out_reload
1360 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1361 type, &secondary_out_icode, NULL);
1362
1363 /* We found no existing reload suitable for re-use.
1364 So add an additional reload. */
1365
1366 #ifdef SECONDARY_MEMORY_NEEDED
1367 if (subreg_in_class == NO_REGS
1368 && in != 0
1369 && (REG_P (in)
1370 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1371 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1372 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1373 /* If a memory location is needed for the copy, make one. */
1374 if (subreg_in_class != NO_REGS
1375 && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1376 get_secondary_mem (in, inmode, opnum, type);
1377 #endif
1378
1379 i = n_reloads;
1380 rld[i].in = in;
1381 rld[i].out = out;
1382 rld[i].rclass = rclass;
1383 rld[i].inmode = inmode;
1384 rld[i].outmode = outmode;
1385 rld[i].reg_rtx = 0;
1386 rld[i].optional = optional;
1387 rld[i].inc = 0;
1388 rld[i].nocombine = 0;
1389 rld[i].in_reg = inloc ? *inloc : 0;
1390 rld[i].out_reg = outloc ? *outloc : 0;
1391 rld[i].opnum = opnum;
1392 rld[i].when_needed = type;
1393 rld[i].secondary_in_reload = secondary_in_reload;
1394 rld[i].secondary_out_reload = secondary_out_reload;
1395 rld[i].secondary_in_icode = secondary_in_icode;
1396 rld[i].secondary_out_icode = secondary_out_icode;
1397 rld[i].secondary_p = 0;
1398
1399 n_reloads++;
1400
1401 #ifdef SECONDARY_MEMORY_NEEDED
1402 if (out != 0
1403 && (REG_P (out)
1404 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1405 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1406 && SECONDARY_MEMORY_NEEDED (rclass,
1407 REGNO_REG_CLASS (reg_or_subregno (out)),
1408 outmode))
1409 get_secondary_mem (out, outmode, opnum, type);
1410 #endif
1411 }
1412 else
1413 {
1414 /* We are reusing an existing reload,
1415 but we may have additional information for it.
1416 For example, we may now have both IN and OUT
1417 while the old one may have just one of them. */
1418
1419 /* The modes can be different. If they are, we want to reload in
1420 the larger mode, so that the value is valid for both modes. */
1421 if (inmode != VOIDmode
1422 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1423 rld[i].inmode = inmode;
1424 if (outmode != VOIDmode
1425 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1426 rld[i].outmode = outmode;
1427 if (in != 0)
1428 {
1429 rtx in_reg = inloc ? *inloc : 0;
1430 /* If we merge reloads for two distinct rtl expressions that
1431 are identical in content, there might be duplicate address
1432 reloads. Remove the extra set now, so that if we later find
1433 that we can inherit this reload, we can get rid of the
1434 address reloads altogether.
1435
1436 Do not do this if both reloads are optional since the result
1437 would be an optional reload which could potentially leave
1438 unresolved address replacements.
1439
1440 It is not sufficient to call transfer_replacements since
1441 choose_reload_regs will remove the replacements for address
1442 reloads of inherited reloads which results in the same
1443 problem. */
1444 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1445 && ! (rld[i].optional && optional))
1446 {
1447 /* We must keep the address reload with the lower operand
1448 number alive. */
1449 if (opnum > rld[i].opnum)
1450 {
1451 remove_address_replacements (in);
1452 in = rld[i].in;
1453 in_reg = rld[i].in_reg;
1454 }
1455 else
1456 remove_address_replacements (rld[i].in);
1457 }
1458 /* When emitting reloads we don't necessarily look at the in-
1459 and outmode, but also directly at the operands (in and out).
1460 So we can't simply overwrite them with whatever we have found
1461 for this (to-be-merged) reload, we have to "merge" that too.
1462 Reusing another reload already verified that we deal with the
1463 same operands, just possibly in different modes. So we
1464 overwrite the operands only when the new mode is larger.
1465 See also PR33613. */
1466 if (!rld[i].in
1467 || GET_MODE_SIZE (GET_MODE (in))
1468 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1469 rld[i].in = in;
1470 if (!rld[i].in_reg
1471 || (in_reg
1472 && GET_MODE_SIZE (GET_MODE (in_reg))
1473 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1474 rld[i].in_reg = in_reg;
1475 }
1476 if (out != 0)
1477 {
1478 if (!rld[i].out
1479 || (out
1480 && GET_MODE_SIZE (GET_MODE (out))
1481 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1482 rld[i].out = out;
1483 if (outloc
1484 && (!rld[i].out_reg
1485 || GET_MODE_SIZE (GET_MODE (*outloc))
1486 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1487 rld[i].out_reg = *outloc;
1488 }
1489 if (reg_class_subset_p (rclass, rld[i].rclass))
1490 rld[i].rclass = rclass;
1491 rld[i].optional &= optional;
1492 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1493 opnum, rld[i].opnum))
1494 rld[i].when_needed = RELOAD_OTHER;
1495 rld[i].opnum = MIN (rld[i].opnum, opnum);
1496 }
1497
1498 /* If the ostensible rtx being reloaded differs from the rtx found
1499 in the location to substitute, this reload is not safe to combine
1500 because we cannot reliably tell whether it appears in the insn. */
1501
1502 if (in != 0 && in != *inloc)
1503 rld[i].nocombine = 1;
1504
1505 #if 0
1506 /* This was replaced by changes in find_reloads_address_1 and the new
1507 function inc_for_reload, which go with a new meaning of reload_inc. */
1508
1509 /* If this is an IN/OUT reload in an insn that sets the CC,
1510 it must be for an autoincrement. It doesn't work to store
1511 the incremented value after the insn because that would clobber the CC.
1512 So we must do the increment of the value reloaded from,
1513 increment it, store it back, then decrement again. */
1514 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1515 {
1516 out = 0;
1517 rld[i].out = 0;
1518 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1519 /* If we did not find a nonzero amount-to-increment-by,
1520 that contradicts the belief that IN is being incremented
1521 in an address in this insn. */
1522 gcc_assert (rld[i].inc != 0);
1523 }
1524 #endif
1525
1526 /* If we will replace IN and OUT with the reload-reg,
1527 record where they are located so that substitution need
1528 not do a tree walk. */
1529
1530 if (replace_reloads)
1531 {
1532 if (inloc != 0)
1533 {
1534 struct replacement *r = &replacements[n_replacements++];
1535 r->what = i;
1536 r->where = inloc;
1537 r->mode = inmode;
1538 }
1539 if (outloc != 0 && outloc != inloc)
1540 {
1541 struct replacement *r = &replacements[n_replacements++];
1542 r->what = i;
1543 r->where = outloc;
1544 r->mode = outmode;
1545 }
1546 }
1547
1548 /* If this reload is just being introduced and it has both
1549 an incoming quantity and an outgoing quantity that are
1550 supposed to be made to match, see if either one of the two
1551 can serve as the place to reload into.
1552
1553 If one of them is acceptable, set rld[i].reg_rtx
1554 to that one. */
1555
1556 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1557 {
1558 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1559 inmode, outmode,
1560 rld[i].rclass, i,
1561 earlyclobber_operand_p (out));
1562
1563 /* If the outgoing register already contains the same value
1564 as the incoming one, we can dispense with loading it.
1565 The easiest way to tell the caller that is to give a phony
1566 value for the incoming operand (same as outgoing one). */
1567 if (rld[i].reg_rtx == out
1568 && (REG_P (in) || CONSTANT_P (in))
1569 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1570 static_reload_reg_p, i, inmode))
1571 rld[i].in = out;
1572 }
1573
1574 /* If this is an input reload and the operand contains a register that
1575 dies in this insn and is used nowhere else, see if it is the right class
1576 to be used for this reload. Use it if so. (This occurs most commonly
1577 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1578 this if it is also an output reload that mentions the register unless
1579 the output is a SUBREG that clobbers an entire register.
1580
1581 Note that the operand might be one of the spill regs, if it is a
1582 pseudo reg and we are in a block where spilling has not taken place.
1583 But if there is no spilling in this block, that is OK.
1584 An explicitly used hard reg cannot be a spill reg. */
1585
1586 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1587 {
1588 rtx note;
1589 int regno;
1590 machine_mode rel_mode = inmode;
1591
1592 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1593 rel_mode = outmode;
1594
1595 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1596 if (REG_NOTE_KIND (note) == REG_DEAD
1597 && REG_P (XEXP (note, 0))
1598 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1599 && reg_mentioned_p (XEXP (note, 0), in)
1600 /* Check that a former pseudo is valid; see find_dummy_reload. */
1601 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1602 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1603 ORIGINAL_REGNO (XEXP (note, 0)))
1604 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1605 && ! refers_to_regno_for_reload_p (regno,
1606 end_hard_regno (rel_mode,
1607 regno),
1608 PATTERN (this_insn), inloc)
1609 && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1610 /* If this is also an output reload, IN cannot be used as
1611 the reload register if it is set in this insn unless IN
1612 is also OUT. */
1613 && (out == 0 || in == out
1614 || ! hard_reg_set_here_p (regno,
1615 end_hard_regno (rel_mode, regno),
1616 PATTERN (this_insn)))
1617 /* ??? Why is this code so different from the previous?
1618 Is there any simple coherent way to describe the two together?
1619 What's going on here. */
1620 && (in != out
1621 || (GET_CODE (in) == SUBREG
1622 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1623 / UNITS_PER_WORD)
1624 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1625 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1626 /* Make sure the operand fits in the reg that dies. */
1627 && (GET_MODE_SIZE (rel_mode)
1628 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1629 && HARD_REGNO_MODE_OK (regno, inmode)
1630 && HARD_REGNO_MODE_OK (regno, outmode))
1631 {
1632 unsigned int offs;
1633 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1634 hard_regno_nregs[regno][outmode]);
1635
1636 for (offs = 0; offs < nregs; offs++)
1637 if (fixed_regs[regno + offs]
1638 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1639 regno + offs))
1640 break;
1641
1642 if (offs == nregs
1643 && (! (refers_to_regno_for_reload_p
1644 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1645 || can_reload_into (in, regno, inmode)))
1646 {
1647 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1648 break;
1649 }
1650 }
1651 }
1652
1653 if (out)
1654 output_reloadnum = i;
1655
1656 return i;
1657 }
1658
1659 /* Record an additional place we must replace a value
1660 for which we have already recorded a reload.
1661 RELOADNUM is the value returned by push_reload
1662 when the reload was recorded.
1663 This is used in insn patterns that use match_dup. */
1664
1665 static void
1666 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1667 {
1668 if (replace_reloads)
1669 {
1670 struct replacement *r = &replacements[n_replacements++];
1671 r->what = reloadnum;
1672 r->where = loc;
1673 r->mode = mode;
1674 }
1675 }
1676
1677 /* Duplicate any replacement we have recorded to apply at
1678 location ORIG_LOC to also be performed at DUP_LOC.
1679 This is used in insn patterns that use match_dup. */
1680
1681 static void
1682 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1683 {
1684 int i, n = n_replacements;
1685
1686 for (i = 0; i < n; i++)
1687 {
1688 struct replacement *r = &replacements[i];
1689 if (r->where == orig_loc)
1690 push_replacement (dup_loc, r->what, r->mode);
1691 }
1692 }
1693 \f
1694 /* Transfer all replacements that used to be in reload FROM to be in
1695 reload TO. */
1696
1697 void
1698 transfer_replacements (int to, int from)
1699 {
1700 int i;
1701
1702 for (i = 0; i < n_replacements; i++)
1703 if (replacements[i].what == from)
1704 replacements[i].what = to;
1705 }
1706 \f
1707 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1708 or a subpart of it. If we have any replacements registered for IN_RTX,
1709 cancel the reloads that were supposed to load them.
1710 Return nonzero if we canceled any reloads. */
1711 int
1712 remove_address_replacements (rtx in_rtx)
1713 {
1714 int i, j;
1715 char reload_flags[MAX_RELOADS];
1716 int something_changed = 0;
1717
1718 memset (reload_flags, 0, sizeof reload_flags);
1719 for (i = 0, j = 0; i < n_replacements; i++)
1720 {
1721 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1722 reload_flags[replacements[i].what] |= 1;
1723 else
1724 {
1725 replacements[j++] = replacements[i];
1726 reload_flags[replacements[i].what] |= 2;
1727 }
1728 }
1729 /* Note that the following store must be done before the recursive calls. */
1730 n_replacements = j;
1731
1732 for (i = n_reloads - 1; i >= 0; i--)
1733 {
1734 if (reload_flags[i] == 1)
1735 {
1736 deallocate_reload_reg (i);
1737 remove_address_replacements (rld[i].in);
1738 rld[i].in = 0;
1739 something_changed = 1;
1740 }
1741 }
1742 return something_changed;
1743 }
1744 \f
1745 /* If there is only one output reload, and it is not for an earlyclobber
1746 operand, try to combine it with a (logically unrelated) input reload
1747 to reduce the number of reload registers needed.
1748
1749 This is safe if the input reload does not appear in
1750 the value being output-reloaded, because this implies
1751 it is not needed any more once the original insn completes.
1752
1753 If that doesn't work, see we can use any of the registers that
1754 die in this insn as a reload register. We can if it is of the right
1755 class and does not appear in the value being output-reloaded. */
1756
1757 static void
1758 combine_reloads (void)
1759 {
1760 int i, regno;
1761 int output_reload = -1;
1762 int secondary_out = -1;
1763 rtx note;
1764
1765 /* Find the output reload; return unless there is exactly one
1766 and that one is mandatory. */
1767
1768 for (i = 0; i < n_reloads; i++)
1769 if (rld[i].out != 0)
1770 {
1771 if (output_reload >= 0)
1772 return;
1773 output_reload = i;
1774 }
1775
1776 if (output_reload < 0 || rld[output_reload].optional)
1777 return;
1778
1779 /* An input-output reload isn't combinable. */
1780
1781 if (rld[output_reload].in != 0)
1782 return;
1783
1784 /* If this reload is for an earlyclobber operand, we can't do anything. */
1785 if (earlyclobber_operand_p (rld[output_reload].out))
1786 return;
1787
1788 /* If there is a reload for part of the address of this operand, we would
1789 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1790 its life to the point where doing this combine would not lower the
1791 number of spill registers needed. */
1792 for (i = 0; i < n_reloads; i++)
1793 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1794 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1795 && rld[i].opnum == rld[output_reload].opnum)
1796 return;
1797
1798 /* Check each input reload; can we combine it? */
1799
1800 for (i = 0; i < n_reloads; i++)
1801 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1802 /* Life span of this reload must not extend past main insn. */
1803 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1804 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1805 && rld[i].when_needed != RELOAD_OTHER
1806 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1807 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1808 [(int) rld[output_reload].outmode])
1809 && rld[i].inc == 0
1810 && rld[i].reg_rtx == 0
1811 #ifdef SECONDARY_MEMORY_NEEDED
1812 /* Don't combine two reloads with different secondary
1813 memory locations. */
1814 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1815 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1816 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1817 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1818 #endif
1819 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1820 ? (rld[i].rclass == rld[output_reload].rclass)
1821 : (reg_class_subset_p (rld[i].rclass,
1822 rld[output_reload].rclass)
1823 || reg_class_subset_p (rld[output_reload].rclass,
1824 rld[i].rclass)))
1825 && (MATCHES (rld[i].in, rld[output_reload].out)
1826 /* Args reversed because the first arg seems to be
1827 the one that we imagine being modified
1828 while the second is the one that might be affected. */
1829 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1830 rld[i].in)
1831 /* However, if the input is a register that appears inside
1832 the output, then we also can't share.
1833 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1834 If the same reload reg is used for both reg 69 and the
1835 result to be stored in memory, then that result
1836 will clobber the address of the memory ref. */
1837 && ! (REG_P (rld[i].in)
1838 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1839 rld[output_reload].out))))
1840 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1841 rld[i].when_needed != RELOAD_FOR_INPUT)
1842 && (reg_class_size[(int) rld[i].rclass]
1843 || targetm.small_register_classes_for_mode_p (VOIDmode))
1844 /* We will allow making things slightly worse by combining an
1845 input and an output, but no worse than that. */
1846 && (rld[i].when_needed == RELOAD_FOR_INPUT
1847 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1848 {
1849 int j;
1850
1851 /* We have found a reload to combine with! */
1852 rld[i].out = rld[output_reload].out;
1853 rld[i].out_reg = rld[output_reload].out_reg;
1854 rld[i].outmode = rld[output_reload].outmode;
1855 /* Mark the old output reload as inoperative. */
1856 rld[output_reload].out = 0;
1857 /* The combined reload is needed for the entire insn. */
1858 rld[i].when_needed = RELOAD_OTHER;
1859 /* If the output reload had a secondary reload, copy it. */
1860 if (rld[output_reload].secondary_out_reload != -1)
1861 {
1862 rld[i].secondary_out_reload
1863 = rld[output_reload].secondary_out_reload;
1864 rld[i].secondary_out_icode
1865 = rld[output_reload].secondary_out_icode;
1866 }
1867
1868 #ifdef SECONDARY_MEMORY_NEEDED
1869 /* Copy any secondary MEM. */
1870 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1871 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1872 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1873 #endif
1874 /* If required, minimize the register class. */
1875 if (reg_class_subset_p (rld[output_reload].rclass,
1876 rld[i].rclass))
1877 rld[i].rclass = rld[output_reload].rclass;
1878
1879 /* Transfer all replacements from the old reload to the combined. */
1880 for (j = 0; j < n_replacements; j++)
1881 if (replacements[j].what == output_reload)
1882 replacements[j].what = i;
1883
1884 return;
1885 }
1886
1887 /* If this insn has only one operand that is modified or written (assumed
1888 to be the first), it must be the one corresponding to this reload. It
1889 is safe to use anything that dies in this insn for that output provided
1890 that it does not occur in the output (we already know it isn't an
1891 earlyclobber. If this is an asm insn, give up. */
1892
1893 if (INSN_CODE (this_insn) == -1)
1894 return;
1895
1896 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1897 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1898 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1899 return;
1900
1901 /* See if some hard register that dies in this insn and is not used in
1902 the output is the right class. Only works if the register we pick
1903 up can fully hold our output reload. */
1904 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1905 if (REG_NOTE_KIND (note) == REG_DEAD
1906 && REG_P (XEXP (note, 0))
1907 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1908 rld[output_reload].out)
1909 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1910 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1911 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1912 regno)
1913 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1914 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1915 /* Ensure that a secondary or tertiary reload for this output
1916 won't want this register. */
1917 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1918 || (!(TEST_HARD_REG_BIT
1919 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1920 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1921 || !(TEST_HARD_REG_BIT
1922 (reg_class_contents[(int) rld[secondary_out].rclass],
1923 regno)))))
1924 && !fixed_regs[regno]
1925 /* Check that a former pseudo is valid; see find_dummy_reload. */
1926 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1927 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1928 ORIGINAL_REGNO (XEXP (note, 0)))
1929 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1930 {
1931 rld[output_reload].reg_rtx
1932 = gen_rtx_REG (rld[output_reload].outmode, regno);
1933 return;
1934 }
1935 }
1936 \f
1937 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1938 See if one of IN and OUT is a register that may be used;
1939 this is desirable since a spill-register won't be needed.
1940 If so, return the register rtx that proves acceptable.
1941
1942 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1943 RCLASS is the register class required for the reload.
1944
1945 If FOR_REAL is >= 0, it is the number of the reload,
1946 and in some cases when it can be discovered that OUT doesn't need
1947 to be computed, clear out rld[FOR_REAL].out.
1948
1949 If FOR_REAL is -1, this should not be done, because this call
1950 is just to see if a register can be found, not to find and install it.
1951
1952 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1953 puts an additional constraint on being able to use IN for OUT since
1954 IN must not appear elsewhere in the insn (it is assumed that IN itself
1955 is safe from the earlyclobber). */
1956
1957 static rtx
1958 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1959 machine_mode inmode, machine_mode outmode,
1960 reg_class_t rclass, int for_real, int earlyclobber)
1961 {
1962 rtx in = real_in;
1963 rtx out = real_out;
1964 int in_offset = 0;
1965 int out_offset = 0;
1966 rtx value = 0;
1967
1968 /* If operands exceed a word, we can't use either of them
1969 unless they have the same size. */
1970 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1971 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1972 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1973 return 0;
1974
1975 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1976 respectively refers to a hard register. */
1977
1978 /* Find the inside of any subregs. */
1979 while (GET_CODE (out) == SUBREG)
1980 {
1981 if (REG_P (SUBREG_REG (out))
1982 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1983 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1984 GET_MODE (SUBREG_REG (out)),
1985 SUBREG_BYTE (out),
1986 GET_MODE (out));
1987 out = SUBREG_REG (out);
1988 }
1989 while (GET_CODE (in) == SUBREG)
1990 {
1991 if (REG_P (SUBREG_REG (in))
1992 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1993 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1994 GET_MODE (SUBREG_REG (in)),
1995 SUBREG_BYTE (in),
1996 GET_MODE (in));
1997 in = SUBREG_REG (in);
1998 }
1999
2000 /* Narrow down the reg class, the same way push_reload will;
2001 otherwise we might find a dummy now, but push_reload won't. */
2002 {
2003 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2004 if (preferred_class != NO_REGS)
2005 rclass = (enum reg_class) preferred_class;
2006 }
2007
2008 /* See if OUT will do. */
2009 if (REG_P (out)
2010 && REGNO (out) < FIRST_PSEUDO_REGISTER)
2011 {
2012 unsigned int regno = REGNO (out) + out_offset;
2013 unsigned int nwords = hard_regno_nregs[regno][outmode];
2014 rtx saved_rtx;
2015
2016 /* When we consider whether the insn uses OUT,
2017 ignore references within IN. They don't prevent us
2018 from copying IN into OUT, because those refs would
2019 move into the insn that reloads IN.
2020
2021 However, we only ignore IN in its role as this reload.
2022 If the insn uses IN elsewhere and it contains OUT,
2023 that counts. We can't be sure it's the "same" operand
2024 so it might not go through this reload.
2025
2026 We also need to avoid using OUT if it, or part of it, is a
2027 fixed register. Modifying such registers, even transiently,
2028 may have undefined effects on the machine, such as modifying
2029 the stack pointer. */
2030 saved_rtx = *inloc;
2031 *inloc = const0_rtx;
2032
2033 if (regno < FIRST_PSEUDO_REGISTER
2034 && HARD_REGNO_MODE_OK (regno, outmode)
2035 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2036 PATTERN (this_insn), outloc))
2037 {
2038 unsigned int i;
2039
2040 for (i = 0; i < nwords; i++)
2041 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2042 regno + i)
2043 || fixed_regs[regno + i])
2044 break;
2045
2046 if (i == nwords)
2047 {
2048 if (REG_P (real_out))
2049 value = real_out;
2050 else
2051 value = gen_rtx_REG (outmode, regno);
2052 }
2053 }
2054
2055 *inloc = saved_rtx;
2056 }
2057
2058 /* Consider using IN if OUT was not acceptable
2059 or if OUT dies in this insn (like the quotient in a divmod insn).
2060 We can't use IN unless it is dies in this insn,
2061 which means we must know accurately which hard regs are live.
2062 Also, the result can't go in IN if IN is used within OUT,
2063 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2064 if (hard_regs_live_known
2065 && REG_P (in)
2066 && REGNO (in) < FIRST_PSEUDO_REGISTER
2067 && (value == 0
2068 || find_reg_note (this_insn, REG_UNUSED, real_out))
2069 && find_reg_note (this_insn, REG_DEAD, real_in)
2070 && !fixed_regs[REGNO (in)]
2071 && HARD_REGNO_MODE_OK (REGNO (in),
2072 /* The only case where out and real_out might
2073 have different modes is where real_out
2074 is a subreg, and in that case, out
2075 has a real mode. */
2076 (GET_MODE (out) != VOIDmode
2077 ? GET_MODE (out) : outmode))
2078 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2079 /* However only do this if we can be sure that this input
2080 operand doesn't correspond with an uninitialized pseudo.
2081 global can assign some hardreg to it that is the same as
2082 the one assigned to a different, also live pseudo (as it
2083 can ignore the conflict). We must never introduce writes
2084 to such hardregs, as they would clobber the other live
2085 pseudo. See PR 20973. */
2086 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2087 ORIGINAL_REGNO (in))
2088 /* Similarly, only do this if we can be sure that the death
2089 note is still valid. global can assign some hardreg to
2090 the pseudo referenced in the note and simultaneously a
2091 subword of this hardreg to a different, also live pseudo,
2092 because only another subword of the hardreg is actually
2093 used in the insn. This cannot happen if the pseudo has
2094 been assigned exactly one hardreg. See PR 33732. */
2095 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2096 {
2097 unsigned int regno = REGNO (in) + in_offset;
2098 unsigned int nwords = hard_regno_nregs[regno][inmode];
2099
2100 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2101 && ! hard_reg_set_here_p (regno, regno + nwords,
2102 PATTERN (this_insn))
2103 && (! earlyclobber
2104 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2105 PATTERN (this_insn), inloc)))
2106 {
2107 unsigned int i;
2108
2109 for (i = 0; i < nwords; i++)
2110 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2111 regno + i))
2112 break;
2113
2114 if (i == nwords)
2115 {
2116 /* If we were going to use OUT as the reload reg
2117 and changed our mind, it means OUT is a dummy that
2118 dies here. So don't bother copying value to it. */
2119 if (for_real >= 0 && value == real_out)
2120 rld[for_real].out = 0;
2121 if (REG_P (real_in))
2122 value = real_in;
2123 else
2124 value = gen_rtx_REG (inmode, regno);
2125 }
2126 }
2127 }
2128
2129 return value;
2130 }
2131 \f
2132 /* This page contains subroutines used mainly for determining
2133 whether the IN or an OUT of a reload can serve as the
2134 reload register. */
2135
2136 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2137
2138 int
2139 earlyclobber_operand_p (rtx x)
2140 {
2141 int i;
2142
2143 for (i = 0; i < n_earlyclobbers; i++)
2144 if (reload_earlyclobbers[i] == x)
2145 return 1;
2146
2147 return 0;
2148 }
2149
2150 /* Return 1 if expression X alters a hard reg in the range
2151 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2152 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2153 X should be the body of an instruction. */
2154
2155 static int
2156 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2157 {
2158 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2159 {
2160 rtx op0 = SET_DEST (x);
2161
2162 while (GET_CODE (op0) == SUBREG)
2163 op0 = SUBREG_REG (op0);
2164 if (REG_P (op0))
2165 {
2166 unsigned int r = REGNO (op0);
2167
2168 /* See if this reg overlaps range under consideration. */
2169 if (r < end_regno
2170 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2171 return 1;
2172 }
2173 }
2174 else if (GET_CODE (x) == PARALLEL)
2175 {
2176 int i = XVECLEN (x, 0) - 1;
2177
2178 for (; i >= 0; i--)
2179 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2180 return 1;
2181 }
2182
2183 return 0;
2184 }
2185
2186 /* Return 1 if ADDR is a valid memory address for mode MODE
2187 in address space AS, and check that each pseudo reg has the
2188 proper kind of hard reg. */
2189
2190 int
2191 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2192 rtx addr, addr_space_t as)
2193 {
2194 #ifdef GO_IF_LEGITIMATE_ADDRESS
2195 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2196 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2197 return 0;
2198
2199 win:
2200 return 1;
2201 #else
2202 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2203 #endif
2204 }
2205 \f
2206 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2207 if they are the same hard reg, and has special hacks for
2208 autoincrement and autodecrement.
2209 This is specifically intended for find_reloads to use
2210 in determining whether two operands match.
2211 X is the operand whose number is the lower of the two.
2212
2213 The value is 2 if Y contains a pre-increment that matches
2214 a non-incrementing address in X. */
2215
2216 /* ??? To be completely correct, we should arrange to pass
2217 for X the output operand and for Y the input operand.
2218 For now, we assume that the output operand has the lower number
2219 because that is natural in (SET output (... input ...)). */
2220
2221 int
2222 operands_match_p (rtx x, rtx y)
2223 {
2224 int i;
2225 RTX_CODE code = GET_CODE (x);
2226 const char *fmt;
2227 int success_2;
2228
2229 if (x == y)
2230 return 1;
2231 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2232 && (REG_P (y) || (GET_CODE (y) == SUBREG
2233 && REG_P (SUBREG_REG (y)))))
2234 {
2235 int j;
2236
2237 if (code == SUBREG)
2238 {
2239 i = REGNO (SUBREG_REG (x));
2240 if (i >= FIRST_PSEUDO_REGISTER)
2241 goto slow;
2242 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2243 GET_MODE (SUBREG_REG (x)),
2244 SUBREG_BYTE (x),
2245 GET_MODE (x));
2246 }
2247 else
2248 i = REGNO (x);
2249
2250 if (GET_CODE (y) == SUBREG)
2251 {
2252 j = REGNO (SUBREG_REG (y));
2253 if (j >= FIRST_PSEUDO_REGISTER)
2254 goto slow;
2255 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2256 GET_MODE (SUBREG_REG (y)),
2257 SUBREG_BYTE (y),
2258 GET_MODE (y));
2259 }
2260 else
2261 j = REGNO (y);
2262
2263 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2264 multiple hard register group of scalar integer registers, so that
2265 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2266 register. */
2267 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2268 && SCALAR_INT_MODE_P (GET_MODE (x))
2269 && i < FIRST_PSEUDO_REGISTER)
2270 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2271 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2272 && SCALAR_INT_MODE_P (GET_MODE (y))
2273 && j < FIRST_PSEUDO_REGISTER)
2274 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2275
2276 return i == j;
2277 }
2278 /* If two operands must match, because they are really a single
2279 operand of an assembler insn, then two postincrements are invalid
2280 because the assembler insn would increment only once.
2281 On the other hand, a postincrement matches ordinary indexing
2282 if the postincrement is the output operand. */
2283 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2284 return operands_match_p (XEXP (x, 0), y);
2285 /* Two preincrements are invalid
2286 because the assembler insn would increment only once.
2287 On the other hand, a preincrement matches ordinary indexing
2288 if the preincrement is the input operand.
2289 In this case, return 2, since some callers need to do special
2290 things when this happens. */
2291 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2292 || GET_CODE (y) == PRE_MODIFY)
2293 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2294
2295 slow:
2296
2297 /* Now we have disposed of all the cases in which different rtx codes
2298 can match. */
2299 if (code != GET_CODE (y))
2300 return 0;
2301
2302 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2303 if (GET_MODE (x) != GET_MODE (y))
2304 return 0;
2305
2306 /* MEMs referring to different address space are not equivalent. */
2307 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2308 return 0;
2309
2310 switch (code)
2311 {
2312 CASE_CONST_UNIQUE:
2313 return 0;
2314
2315 case LABEL_REF:
2316 return label_ref_label (x) == label_ref_label (y);
2317 case SYMBOL_REF:
2318 return XSTR (x, 0) == XSTR (y, 0);
2319
2320 default:
2321 break;
2322 }
2323
2324 /* Compare the elements. If any pair of corresponding elements
2325 fail to match, return 0 for the whole things. */
2326
2327 success_2 = 0;
2328 fmt = GET_RTX_FORMAT (code);
2329 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2330 {
2331 int val, j;
2332 switch (fmt[i])
2333 {
2334 case 'w':
2335 if (XWINT (x, i) != XWINT (y, i))
2336 return 0;
2337 break;
2338
2339 case 'i':
2340 if (XINT (x, i) != XINT (y, i))
2341 return 0;
2342 break;
2343
2344 case 'e':
2345 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2346 if (val == 0)
2347 return 0;
2348 /* If any subexpression returns 2,
2349 we should return 2 if we are successful. */
2350 if (val == 2)
2351 success_2 = 1;
2352 break;
2353
2354 case '0':
2355 break;
2356
2357 case 'E':
2358 if (XVECLEN (x, i) != XVECLEN (y, i))
2359 return 0;
2360 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2361 {
2362 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2363 if (val == 0)
2364 return 0;
2365 if (val == 2)
2366 success_2 = 1;
2367 }
2368 break;
2369
2370 /* It is believed that rtx's at this level will never
2371 contain anything but integers and other rtx's,
2372 except for within LABEL_REFs and SYMBOL_REFs. */
2373 default:
2374 gcc_unreachable ();
2375 }
2376 }
2377 return 1 + success_2;
2378 }
2379 \f
2380 /* Describe the range of registers or memory referenced by X.
2381 If X is a register, set REG_FLAG and put the first register
2382 number into START and the last plus one into END.
2383 If X is a memory reference, put a base address into BASE
2384 and a range of integer offsets into START and END.
2385 If X is pushing on the stack, we can assume it causes no trouble,
2386 so we set the SAFE field. */
2387
2388 static struct decomposition
2389 decompose (rtx x)
2390 {
2391 struct decomposition val;
2392 int all_const = 0;
2393
2394 memset (&val, 0, sizeof (val));
2395
2396 switch (GET_CODE (x))
2397 {
2398 case MEM:
2399 {
2400 rtx base = NULL_RTX, offset = 0;
2401 rtx addr = XEXP (x, 0);
2402
2403 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2404 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2405 {
2406 val.base = XEXP (addr, 0);
2407 val.start = -GET_MODE_SIZE (GET_MODE (x));
2408 val.end = GET_MODE_SIZE (GET_MODE (x));
2409 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2410 return val;
2411 }
2412
2413 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2414 {
2415 if (GET_CODE (XEXP (addr, 1)) == PLUS
2416 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2417 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2418 {
2419 val.base = XEXP (addr, 0);
2420 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2421 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2422 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2423 return val;
2424 }
2425 }
2426
2427 if (GET_CODE (addr) == CONST)
2428 {
2429 addr = XEXP (addr, 0);
2430 all_const = 1;
2431 }
2432 if (GET_CODE (addr) == PLUS)
2433 {
2434 if (CONSTANT_P (XEXP (addr, 0)))
2435 {
2436 base = XEXP (addr, 1);
2437 offset = XEXP (addr, 0);
2438 }
2439 else if (CONSTANT_P (XEXP (addr, 1)))
2440 {
2441 base = XEXP (addr, 0);
2442 offset = XEXP (addr, 1);
2443 }
2444 }
2445
2446 if (offset == 0)
2447 {
2448 base = addr;
2449 offset = const0_rtx;
2450 }
2451 if (GET_CODE (offset) == CONST)
2452 offset = XEXP (offset, 0);
2453 if (GET_CODE (offset) == PLUS)
2454 {
2455 if (CONST_INT_P (XEXP (offset, 0)))
2456 {
2457 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2458 offset = XEXP (offset, 0);
2459 }
2460 else if (CONST_INT_P (XEXP (offset, 1)))
2461 {
2462 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2463 offset = XEXP (offset, 1);
2464 }
2465 else
2466 {
2467 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2468 offset = const0_rtx;
2469 }
2470 }
2471 else if (!CONST_INT_P (offset))
2472 {
2473 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2474 offset = const0_rtx;
2475 }
2476
2477 if (all_const && GET_CODE (base) == PLUS)
2478 base = gen_rtx_CONST (GET_MODE (base), base);
2479
2480 gcc_assert (CONST_INT_P (offset));
2481
2482 val.start = INTVAL (offset);
2483 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2484 val.base = base;
2485 }
2486 break;
2487
2488 case REG:
2489 val.reg_flag = 1;
2490 val.start = true_regnum (x);
2491 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2492 {
2493 /* A pseudo with no hard reg. */
2494 val.start = REGNO (x);
2495 val.end = val.start + 1;
2496 }
2497 else
2498 /* A hard reg. */
2499 val.end = end_hard_regno (GET_MODE (x), val.start);
2500 break;
2501
2502 case SUBREG:
2503 if (!REG_P (SUBREG_REG (x)))
2504 /* This could be more precise, but it's good enough. */
2505 return decompose (SUBREG_REG (x));
2506 val.reg_flag = 1;
2507 val.start = true_regnum (x);
2508 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2509 return decompose (SUBREG_REG (x));
2510 else
2511 /* A hard reg. */
2512 val.end = val.start + subreg_nregs (x);
2513 break;
2514
2515 case SCRATCH:
2516 /* This hasn't been assigned yet, so it can't conflict yet. */
2517 val.safe = 1;
2518 break;
2519
2520 default:
2521 gcc_assert (CONSTANT_P (x));
2522 val.safe = 1;
2523 break;
2524 }
2525 return val;
2526 }
2527
2528 /* Return 1 if altering Y will not modify the value of X.
2529 Y is also described by YDATA, which should be decompose (Y). */
2530
2531 static int
2532 immune_p (rtx x, rtx y, struct decomposition ydata)
2533 {
2534 struct decomposition xdata;
2535
2536 if (ydata.reg_flag)
2537 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2538 if (ydata.safe)
2539 return 1;
2540
2541 gcc_assert (MEM_P (y));
2542 /* If Y is memory and X is not, Y can't affect X. */
2543 if (!MEM_P (x))
2544 return 1;
2545
2546 xdata = decompose (x);
2547
2548 if (! rtx_equal_p (xdata.base, ydata.base))
2549 {
2550 /* If bases are distinct symbolic constants, there is no overlap. */
2551 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2552 return 1;
2553 /* Constants and stack slots never overlap. */
2554 if (CONSTANT_P (xdata.base)
2555 && (ydata.base == frame_pointer_rtx
2556 || ydata.base == hard_frame_pointer_rtx
2557 || ydata.base == stack_pointer_rtx))
2558 return 1;
2559 if (CONSTANT_P (ydata.base)
2560 && (xdata.base == frame_pointer_rtx
2561 || xdata.base == hard_frame_pointer_rtx
2562 || xdata.base == stack_pointer_rtx))
2563 return 1;
2564 /* If either base is variable, we don't know anything. */
2565 return 0;
2566 }
2567
2568 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2569 }
2570
2571 /* Similar, but calls decompose. */
2572
2573 int
2574 safe_from_earlyclobber (rtx op, rtx clobber)
2575 {
2576 struct decomposition early_data;
2577
2578 early_data = decompose (clobber);
2579 return immune_p (op, clobber, early_data);
2580 }
2581 \f
2582 /* Main entry point of this file: search the body of INSN
2583 for values that need reloading and record them with push_reload.
2584 REPLACE nonzero means record also where the values occur
2585 so that subst_reloads can be used.
2586
2587 IND_LEVELS says how many levels of indirection are supported by this
2588 machine; a value of zero means that a memory reference is not a valid
2589 memory address.
2590
2591 LIVE_KNOWN says we have valid information about which hard
2592 regs are live at each point in the program; this is true when
2593 we are called from global_alloc but false when stupid register
2594 allocation has been done.
2595
2596 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2597 which is nonnegative if the reg has been commandeered for reloading into.
2598 It is copied into STATIC_RELOAD_REG_P and referenced from there
2599 by various subroutines.
2600
2601 Return TRUE if some operands need to be changed, because of swapping
2602 commutative operands, reg_equiv_address substitution, or whatever. */
2603
2604 int
2605 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2606 short *reload_reg_p)
2607 {
2608 int insn_code_number;
2609 int i, j;
2610 int noperands;
2611 /* These start out as the constraints for the insn
2612 and they are chewed up as we consider alternatives. */
2613 const char *constraints[MAX_RECOG_OPERANDS];
2614 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2615 a register. */
2616 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2617 char pref_or_nothing[MAX_RECOG_OPERANDS];
2618 /* Nonzero for a MEM operand whose entire address needs a reload.
2619 May be -1 to indicate the entire address may or may not need a reload. */
2620 int address_reloaded[MAX_RECOG_OPERANDS];
2621 /* Nonzero for an address operand that needs to be completely reloaded.
2622 May be -1 to indicate the entire operand may or may not need a reload. */
2623 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2624 /* Value of enum reload_type to use for operand. */
2625 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2626 /* Value of enum reload_type to use within address of operand. */
2627 enum reload_type address_type[MAX_RECOG_OPERANDS];
2628 /* Save the usage of each operand. */
2629 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2630 int no_input_reloads = 0, no_output_reloads = 0;
2631 int n_alternatives;
2632 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2633 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2634 char this_alternative_win[MAX_RECOG_OPERANDS];
2635 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2636 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2637 int this_alternative_matches[MAX_RECOG_OPERANDS];
2638 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2639 int this_alternative_number;
2640 int goal_alternative_number = 0;
2641 int operand_reloadnum[MAX_RECOG_OPERANDS];
2642 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2643 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2644 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2645 char goal_alternative_win[MAX_RECOG_OPERANDS];
2646 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2647 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2648 int goal_alternative_swapped;
2649 int best;
2650 int commutative;
2651 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2652 rtx substed_operand[MAX_RECOG_OPERANDS];
2653 rtx body = PATTERN (insn);
2654 rtx set = single_set (insn);
2655 int goal_earlyclobber = 0, this_earlyclobber;
2656 machine_mode operand_mode[MAX_RECOG_OPERANDS];
2657 int retval = 0;
2658
2659 this_insn = insn;
2660 n_reloads = 0;
2661 n_replacements = 0;
2662 n_earlyclobbers = 0;
2663 replace_reloads = replace;
2664 hard_regs_live_known = live_known;
2665 static_reload_reg_p = reload_reg_p;
2666
2667 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2668 neither are insns that SET cc0. Insns that use CC0 are not allowed
2669 to have any input reloads. */
2670 if (JUMP_P (insn) || CALL_P (insn))
2671 no_output_reloads = 1;
2672
2673 if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (insn)))
2674 no_input_reloads = 1;
2675 if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (insn)))
2676 no_output_reloads = 1;
2677
2678 #ifdef SECONDARY_MEMORY_NEEDED
2679 /* The eliminated forms of any secondary memory locations are per-insn, so
2680 clear them out here. */
2681
2682 if (secondary_memlocs_elim_used)
2683 {
2684 memset (secondary_memlocs_elim, 0,
2685 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2686 secondary_memlocs_elim_used = 0;
2687 }
2688 #endif
2689
2690 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2691 is cheap to move between them. If it is not, there may not be an insn
2692 to do the copy, so we may need a reload. */
2693 if (GET_CODE (body) == SET
2694 && REG_P (SET_DEST (body))
2695 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2696 && REG_P (SET_SRC (body))
2697 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2698 && register_move_cost (GET_MODE (SET_SRC (body)),
2699 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2700 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2701 return 0;
2702
2703 extract_insn (insn);
2704
2705 noperands = reload_n_operands = recog_data.n_operands;
2706 n_alternatives = recog_data.n_alternatives;
2707
2708 /* Just return "no reloads" if insn has no operands with constraints. */
2709 if (noperands == 0 || n_alternatives == 0)
2710 return 0;
2711
2712 insn_code_number = INSN_CODE (insn);
2713 this_insn_is_asm = insn_code_number < 0;
2714
2715 memcpy (operand_mode, recog_data.operand_mode,
2716 noperands * sizeof (machine_mode));
2717 memcpy (constraints, recog_data.constraints,
2718 noperands * sizeof (const char *));
2719
2720 commutative = -1;
2721
2722 /* If we will need to know, later, whether some pair of operands
2723 are the same, we must compare them now and save the result.
2724 Reloading the base and index registers will clobber them
2725 and afterward they will fail to match. */
2726
2727 for (i = 0; i < noperands; i++)
2728 {
2729 const char *p;
2730 int c;
2731 char *end;
2732
2733 substed_operand[i] = recog_data.operand[i];
2734 p = constraints[i];
2735
2736 modified[i] = RELOAD_READ;
2737
2738 /* Scan this operand's constraint to see if it is an output operand,
2739 an in-out operand, is commutative, or should match another. */
2740
2741 while ((c = *p))
2742 {
2743 p += CONSTRAINT_LEN (c, p);
2744 switch (c)
2745 {
2746 case '=':
2747 modified[i] = RELOAD_WRITE;
2748 break;
2749 case '+':
2750 modified[i] = RELOAD_READ_WRITE;
2751 break;
2752 case '%':
2753 {
2754 /* The last operand should not be marked commutative. */
2755 gcc_assert (i != noperands - 1);
2756
2757 /* We currently only support one commutative pair of
2758 operands. Some existing asm code currently uses more
2759 than one pair. Previously, that would usually work,
2760 but sometimes it would crash the compiler. We
2761 continue supporting that case as well as we can by
2762 silently ignoring all but the first pair. In the
2763 future we may handle it correctly. */
2764 if (commutative < 0)
2765 commutative = i;
2766 else
2767 gcc_assert (this_insn_is_asm);
2768 }
2769 break;
2770 /* Use of ISDIGIT is tempting here, but it may get expensive because
2771 of locale support we don't want. */
2772 case '0': case '1': case '2': case '3': case '4':
2773 case '5': case '6': case '7': case '8': case '9':
2774 {
2775 c = strtoul (p - 1, &end, 10);
2776 p = end;
2777
2778 operands_match[c][i]
2779 = operands_match_p (recog_data.operand[c],
2780 recog_data.operand[i]);
2781
2782 /* An operand may not match itself. */
2783 gcc_assert (c != i);
2784
2785 /* If C can be commuted with C+1, and C might need to match I,
2786 then C+1 might also need to match I. */
2787 if (commutative >= 0)
2788 {
2789 if (c == commutative || c == commutative + 1)
2790 {
2791 int other = c + (c == commutative ? 1 : -1);
2792 operands_match[other][i]
2793 = operands_match_p (recog_data.operand[other],
2794 recog_data.operand[i]);
2795 }
2796 if (i == commutative || i == commutative + 1)
2797 {
2798 int other = i + (i == commutative ? 1 : -1);
2799 operands_match[c][other]
2800 = operands_match_p (recog_data.operand[c],
2801 recog_data.operand[other]);
2802 }
2803 /* Note that C is supposed to be less than I.
2804 No need to consider altering both C and I because in
2805 that case we would alter one into the other. */
2806 }
2807 }
2808 }
2809 }
2810 }
2811
2812 /* Examine each operand that is a memory reference or memory address
2813 and reload parts of the addresses into index registers.
2814 Also here any references to pseudo regs that didn't get hard regs
2815 but are equivalent to constants get replaced in the insn itself
2816 with those constants. Nobody will ever see them again.
2817
2818 Finally, set up the preferred classes of each operand. */
2819
2820 for (i = 0; i < noperands; i++)
2821 {
2822 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2823
2824 address_reloaded[i] = 0;
2825 address_operand_reloaded[i] = 0;
2826 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2827 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2828 : RELOAD_OTHER);
2829 address_type[i]
2830 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2831 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2832 : RELOAD_OTHER);
2833
2834 if (*constraints[i] == 0)
2835 /* Ignore things like match_operator operands. */
2836 ;
2837 else if (insn_extra_address_constraint
2838 (lookup_constraint (constraints[i])))
2839 {
2840 address_operand_reloaded[i]
2841 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2842 recog_data.operand[i],
2843 recog_data.operand_loc[i],
2844 i, operand_type[i], ind_levels, insn);
2845
2846 /* If we now have a simple operand where we used to have a
2847 PLUS or MULT, re-recognize and try again. */
2848 if ((OBJECT_P (*recog_data.operand_loc[i])
2849 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2850 && (GET_CODE (recog_data.operand[i]) == MULT
2851 || GET_CODE (recog_data.operand[i]) == PLUS))
2852 {
2853 INSN_CODE (insn) = -1;
2854 retval = find_reloads (insn, replace, ind_levels, live_known,
2855 reload_reg_p);
2856 return retval;
2857 }
2858
2859 recog_data.operand[i] = *recog_data.operand_loc[i];
2860 substed_operand[i] = recog_data.operand[i];
2861
2862 /* Address operands are reloaded in their existing mode,
2863 no matter what is specified in the machine description. */
2864 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2865
2866 /* If the address is a single CONST_INT pick address mode
2867 instead otherwise we will later not know in which mode
2868 the reload should be performed. */
2869 if (operand_mode[i] == VOIDmode)
2870 operand_mode[i] = Pmode;
2871
2872 }
2873 else if (code == MEM)
2874 {
2875 address_reloaded[i]
2876 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2877 recog_data.operand_loc[i],
2878 XEXP (recog_data.operand[i], 0),
2879 &XEXP (recog_data.operand[i], 0),
2880 i, address_type[i], ind_levels, insn);
2881 recog_data.operand[i] = *recog_data.operand_loc[i];
2882 substed_operand[i] = recog_data.operand[i];
2883 }
2884 else if (code == SUBREG)
2885 {
2886 rtx reg = SUBREG_REG (recog_data.operand[i]);
2887 rtx op
2888 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2889 ind_levels,
2890 set != 0
2891 && &SET_DEST (set) == recog_data.operand_loc[i],
2892 insn,
2893 &address_reloaded[i]);
2894
2895 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2896 that didn't get a hard register, emit a USE with a REG_EQUAL
2897 note in front so that we might inherit a previous, possibly
2898 wider reload. */
2899
2900 if (replace
2901 && MEM_P (op)
2902 && REG_P (reg)
2903 && (GET_MODE_SIZE (GET_MODE (reg))
2904 >= GET_MODE_SIZE (GET_MODE (op)))
2905 && reg_equiv_constant (REGNO (reg)) == 0)
2906 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2907 insn),
2908 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2909
2910 substed_operand[i] = recog_data.operand[i] = op;
2911 }
2912 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2913 /* We can get a PLUS as an "operand" as a result of register
2914 elimination. See eliminate_regs and gen_reload. We handle
2915 a unary operator by reloading the operand. */
2916 substed_operand[i] = recog_data.operand[i]
2917 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2918 ind_levels, 0, insn,
2919 &address_reloaded[i]);
2920 else if (code == REG)
2921 {
2922 /* This is equivalent to calling find_reloads_toplev.
2923 The code is duplicated for speed.
2924 When we find a pseudo always equivalent to a constant,
2925 we replace it by the constant. We must be sure, however,
2926 that we don't try to replace it in the insn in which it
2927 is being set. */
2928 int regno = REGNO (recog_data.operand[i]);
2929 if (reg_equiv_constant (regno) != 0
2930 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2931 {
2932 /* Record the existing mode so that the check if constants are
2933 allowed will work when operand_mode isn't specified. */
2934
2935 if (operand_mode[i] == VOIDmode)
2936 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2937
2938 substed_operand[i] = recog_data.operand[i]
2939 = reg_equiv_constant (regno);
2940 }
2941 if (reg_equiv_memory_loc (regno) != 0
2942 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2943 /* We need not give a valid is_set_dest argument since the case
2944 of a constant equivalence was checked above. */
2945 substed_operand[i] = recog_data.operand[i]
2946 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2947 ind_levels, 0, insn,
2948 &address_reloaded[i]);
2949 }
2950 /* If the operand is still a register (we didn't replace it with an
2951 equivalent), get the preferred class to reload it into. */
2952 code = GET_CODE (recog_data.operand[i]);
2953 preferred_class[i]
2954 = ((code == REG && REGNO (recog_data.operand[i])
2955 >= FIRST_PSEUDO_REGISTER)
2956 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2957 : NO_REGS);
2958 pref_or_nothing[i]
2959 = (code == REG
2960 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2961 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2962 }
2963
2964 /* If this is simply a copy from operand 1 to operand 0, merge the
2965 preferred classes for the operands. */
2966 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2967 && recog_data.operand[1] == SET_SRC (set))
2968 {
2969 preferred_class[0] = preferred_class[1]
2970 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2971 pref_or_nothing[0] |= pref_or_nothing[1];
2972 pref_or_nothing[1] |= pref_or_nothing[0];
2973 }
2974
2975 /* Now see what we need for pseudo-regs that didn't get hard regs
2976 or got the wrong kind of hard reg. For this, we must consider
2977 all the operands together against the register constraints. */
2978
2979 best = MAX_RECOG_OPERANDS * 2 + 600;
2980
2981 goal_alternative_swapped = 0;
2982
2983 /* The constraints are made of several alternatives.
2984 Each operand's constraint looks like foo,bar,... with commas
2985 separating the alternatives. The first alternatives for all
2986 operands go together, the second alternatives go together, etc.
2987
2988 First loop over alternatives. */
2989
2990 alternative_mask enabled = get_enabled_alternatives (insn);
2991 for (this_alternative_number = 0;
2992 this_alternative_number < n_alternatives;
2993 this_alternative_number++)
2994 {
2995 int swapped;
2996
2997 if (!TEST_BIT (enabled, this_alternative_number))
2998 {
2999 int i;
3000
3001 for (i = 0; i < recog_data.n_operands; i++)
3002 constraints[i] = skip_alternative (constraints[i]);
3003
3004 continue;
3005 }
3006
3007 /* If insn is commutative (it's safe to exchange a certain pair
3008 of operands) then we need to try each alternative twice, the
3009 second time matching those two operands as if we had
3010 exchanged them. To do this, really exchange them in
3011 operands. */
3012 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3013 {
3014 /* Loop over operands for one constraint alternative. */
3015 /* LOSERS counts those that don't fit this alternative
3016 and would require loading. */
3017 int losers = 0;
3018 /* BAD is set to 1 if it some operand can't fit this alternative
3019 even after reloading. */
3020 int bad = 0;
3021 /* REJECT is a count of how undesirable this alternative says it is
3022 if any reloading is required. If the alternative matches exactly
3023 then REJECT is ignored, but otherwise it gets this much
3024 counted against it in addition to the reloading needed. Each
3025 ? counts three times here since we want the disparaging caused by
3026 a bad register class to only count 1/3 as much. */
3027 int reject = 0;
3028
3029 if (swapped)
3030 {
3031 recog_data.operand[commutative] = substed_operand[commutative + 1];
3032 recog_data.operand[commutative + 1] = substed_operand[commutative];
3033 /* Swap the duplicates too. */
3034 for (i = 0; i < recog_data.n_dups; i++)
3035 if (recog_data.dup_num[i] == commutative
3036 || recog_data.dup_num[i] == commutative + 1)
3037 *recog_data.dup_loc[i]
3038 = recog_data.operand[(int) recog_data.dup_num[i]];
3039
3040 std::swap (preferred_class[commutative],
3041 preferred_class[commutative + 1]);
3042 std::swap (pref_or_nothing[commutative],
3043 pref_or_nothing[commutative + 1]);
3044 std::swap (address_reloaded[commutative],
3045 address_reloaded[commutative + 1]);
3046 }
3047
3048 this_earlyclobber = 0;
3049
3050 for (i = 0; i < noperands; i++)
3051 {
3052 const char *p = constraints[i];
3053 char *end;
3054 int len;
3055 int win = 0;
3056 int did_match = 0;
3057 /* 0 => this operand can be reloaded somehow for this alternative. */
3058 int badop = 1;
3059 /* 0 => this operand can be reloaded if the alternative allows regs. */
3060 int winreg = 0;
3061 int c;
3062 int m;
3063 rtx operand = recog_data.operand[i];
3064 int offset = 0;
3065 /* Nonzero means this is a MEM that must be reloaded into a reg
3066 regardless of what the constraint says. */
3067 int force_reload = 0;
3068 int offmemok = 0;
3069 /* Nonzero if a constant forced into memory would be OK for this
3070 operand. */
3071 int constmemok = 0;
3072 int earlyclobber = 0;
3073 enum constraint_num cn;
3074 enum reg_class cl;
3075
3076 /* If the predicate accepts a unary operator, it means that
3077 we need to reload the operand, but do not do this for
3078 match_operator and friends. */
3079 if (UNARY_P (operand) && *p != 0)
3080 operand = XEXP (operand, 0);
3081
3082 /* If the operand is a SUBREG, extract
3083 the REG or MEM (or maybe even a constant) within.
3084 (Constants can occur as a result of reg_equiv_constant.) */
3085
3086 while (GET_CODE (operand) == SUBREG)
3087 {
3088 /* Offset only matters when operand is a REG and
3089 it is a hard reg. This is because it is passed
3090 to reg_fits_class_p if it is a REG and all pseudos
3091 return 0 from that function. */
3092 if (REG_P (SUBREG_REG (operand))
3093 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3094 {
3095 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3096 GET_MODE (SUBREG_REG (operand)),
3097 SUBREG_BYTE (operand),
3098 GET_MODE (operand)) < 0)
3099 force_reload = 1;
3100 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3101 GET_MODE (SUBREG_REG (operand)),
3102 SUBREG_BYTE (operand),
3103 GET_MODE (operand));
3104 }
3105 operand = SUBREG_REG (operand);
3106 /* Force reload if this is a constant or PLUS or if there may
3107 be a problem accessing OPERAND in the outer mode. */
3108 if (CONSTANT_P (operand)
3109 || GET_CODE (operand) == PLUS
3110 /* We must force a reload of paradoxical SUBREGs
3111 of a MEM because the alignment of the inner value
3112 may not be enough to do the outer reference. On
3113 big-endian machines, it may also reference outside
3114 the object.
3115
3116 On machines that extend byte operations and we have a
3117 SUBREG where both the inner and outer modes are no wider
3118 than a word and the inner mode is narrower, is integral,
3119 and gets extended when loaded from memory, combine.c has
3120 made assumptions about the behavior of the machine in such
3121 register access. If the data is, in fact, in memory we
3122 must always load using the size assumed to be in the
3123 register and let the insn do the different-sized
3124 accesses.
3125
3126 This is doubly true if WORD_REGISTER_OPERATIONS. In
3127 this case eliminate_regs has left non-paradoxical
3128 subregs for push_reload to see. Make sure it does
3129 by forcing the reload.
3130
3131 ??? When is it right at this stage to have a subreg
3132 of a mem that is _not_ to be handled specially? IMO
3133 those should have been reduced to just a mem. */
3134 || ((MEM_P (operand)
3135 || (REG_P (operand)
3136 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3137 && (WORD_REGISTER_OPERATIONS
3138 || ((GET_MODE_BITSIZE (GET_MODE (operand))
3139 < BIGGEST_ALIGNMENT)
3140 && (GET_MODE_SIZE (operand_mode[i])
3141 > GET_MODE_SIZE (GET_MODE (operand))))
3142 || BYTES_BIG_ENDIAN
3143 || ((GET_MODE_SIZE (operand_mode[i])
3144 <= UNITS_PER_WORD)
3145 && (GET_MODE_SIZE (GET_MODE (operand))
3146 <= UNITS_PER_WORD)
3147 && (GET_MODE_SIZE (operand_mode[i])
3148 > GET_MODE_SIZE (GET_MODE (operand)))
3149 && INTEGRAL_MODE_P (GET_MODE (operand))
3150 && LOAD_EXTEND_OP (GET_MODE (operand))
3151 != UNKNOWN)))
3152 )
3153 force_reload = 1;
3154 }
3155
3156 this_alternative[i] = NO_REGS;
3157 this_alternative_win[i] = 0;
3158 this_alternative_match_win[i] = 0;
3159 this_alternative_offmemok[i] = 0;
3160 this_alternative_earlyclobber[i] = 0;
3161 this_alternative_matches[i] = -1;
3162
3163 /* An empty constraint or empty alternative
3164 allows anything which matched the pattern. */
3165 if (*p == 0 || *p == ',')
3166 win = 1, badop = 0;
3167
3168 /* Scan this alternative's specs for this operand;
3169 set WIN if the operand fits any letter in this alternative.
3170 Otherwise, clear BADOP if this operand could
3171 fit some letter after reloads,
3172 or set WINREG if this operand could fit after reloads
3173 provided the constraint allows some registers. */
3174
3175 do
3176 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3177 {
3178 case '\0':
3179 len = 0;
3180 break;
3181 case ',':
3182 c = '\0';
3183 break;
3184
3185 case '?':
3186 reject += 6;
3187 break;
3188
3189 case '!':
3190 reject = 600;
3191 break;
3192
3193 case '#':
3194 /* Ignore rest of this alternative as far as
3195 reloading is concerned. */
3196 do
3197 p++;
3198 while (*p && *p != ',');
3199 len = 0;
3200 break;
3201
3202 case '0': case '1': case '2': case '3': case '4':
3203 case '5': case '6': case '7': case '8': case '9':
3204 m = strtoul (p, &end, 10);
3205 p = end;
3206 len = 0;
3207
3208 this_alternative_matches[i] = m;
3209 /* We are supposed to match a previous operand.
3210 If we do, we win if that one did.
3211 If we do not, count both of the operands as losers.
3212 (This is too conservative, since most of the time
3213 only a single reload insn will be needed to make
3214 the two operands win. As a result, this alternative
3215 may be rejected when it is actually desirable.) */
3216 if ((swapped && (m != commutative || i != commutative + 1))
3217 /* If we are matching as if two operands were swapped,
3218 also pretend that operands_match had been computed
3219 with swapped.
3220 But if I is the second of those and C is the first,
3221 don't exchange them, because operands_match is valid
3222 only on one side of its diagonal. */
3223 ? (operands_match
3224 [(m == commutative || m == commutative + 1)
3225 ? 2 * commutative + 1 - m : m]
3226 [(i == commutative || i == commutative + 1)
3227 ? 2 * commutative + 1 - i : i])
3228 : operands_match[m][i])
3229 {
3230 /* If we are matching a non-offsettable address where an
3231 offsettable address was expected, then we must reject
3232 this combination, because we can't reload it. */
3233 if (this_alternative_offmemok[m]
3234 && MEM_P (recog_data.operand[m])
3235 && this_alternative[m] == NO_REGS
3236 && ! this_alternative_win[m])
3237 bad = 1;
3238
3239 did_match = this_alternative_win[m];
3240 }
3241 else
3242 {
3243 /* Operands don't match. */
3244 rtx value;
3245 int loc1, loc2;
3246 /* Retroactively mark the operand we had to match
3247 as a loser, if it wasn't already. */
3248 if (this_alternative_win[m])
3249 losers++;
3250 this_alternative_win[m] = 0;
3251 if (this_alternative[m] == NO_REGS)
3252 bad = 1;
3253 /* But count the pair only once in the total badness of
3254 this alternative, if the pair can be a dummy reload.
3255 The pointers in operand_loc are not swapped; swap
3256 them by hand if necessary. */
3257 if (swapped && i == commutative)
3258 loc1 = commutative + 1;
3259 else if (swapped && i == commutative + 1)
3260 loc1 = commutative;
3261 else
3262 loc1 = i;
3263 if (swapped && m == commutative)
3264 loc2 = commutative + 1;
3265 else if (swapped && m == commutative + 1)
3266 loc2 = commutative;
3267 else
3268 loc2 = m;
3269 value
3270 = find_dummy_reload (recog_data.operand[i],
3271 recog_data.operand[m],
3272 recog_data.operand_loc[loc1],
3273 recog_data.operand_loc[loc2],
3274 operand_mode[i], operand_mode[m],
3275 this_alternative[m], -1,
3276 this_alternative_earlyclobber[m]);
3277
3278 if (value != 0)
3279 losers--;
3280 }
3281 /* This can be fixed with reloads if the operand
3282 we are supposed to match can be fixed with reloads. */
3283 badop = 0;
3284 this_alternative[i] = this_alternative[m];
3285
3286 /* If we have to reload this operand and some previous
3287 operand also had to match the same thing as this
3288 operand, we don't know how to do that. So reject this
3289 alternative. */
3290 if (! did_match || force_reload)
3291 for (j = 0; j < i; j++)
3292 if (this_alternative_matches[j]
3293 == this_alternative_matches[i])
3294 {
3295 badop = 1;
3296 break;
3297 }
3298 break;
3299
3300 case 'p':
3301 /* All necessary reloads for an address_operand
3302 were handled in find_reloads_address. */
3303 this_alternative[i]
3304 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3305 ADDRESS, SCRATCH);
3306 win = 1;
3307 badop = 0;
3308 break;
3309
3310 case TARGET_MEM_CONSTRAINT:
3311 if (force_reload)
3312 break;
3313 if (MEM_P (operand)
3314 || (REG_P (operand)
3315 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3316 && reg_renumber[REGNO (operand)] < 0))
3317 win = 1;
3318 if (CONST_POOL_OK_P (operand_mode[i], operand))
3319 badop = 0;
3320 constmemok = 1;
3321 break;
3322
3323 case '<':
3324 if (MEM_P (operand)
3325 && ! address_reloaded[i]
3326 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3327 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3328 win = 1;
3329 break;
3330
3331 case '>':
3332 if (MEM_P (operand)
3333 && ! address_reloaded[i]
3334 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3335 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3336 win = 1;
3337 break;
3338
3339 /* Memory operand whose address is not offsettable. */
3340 case 'V':
3341 if (force_reload)
3342 break;
3343 if (MEM_P (operand)
3344 && ! (ind_levels ? offsettable_memref_p (operand)
3345 : offsettable_nonstrict_memref_p (operand))
3346 /* Certain mem addresses will become offsettable
3347 after they themselves are reloaded. This is important;
3348 we don't want our own handling of unoffsettables
3349 to override the handling of reg_equiv_address. */
3350 && !(REG_P (XEXP (operand, 0))
3351 && (ind_levels == 0
3352 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3353 win = 1;
3354 break;
3355
3356 /* Memory operand whose address is offsettable. */
3357 case 'o':
3358 if (force_reload)
3359 break;
3360 if ((MEM_P (operand)
3361 /* If IND_LEVELS, find_reloads_address won't reload a
3362 pseudo that didn't get a hard reg, so we have to
3363 reject that case. */
3364 && ((ind_levels ? offsettable_memref_p (operand)
3365 : offsettable_nonstrict_memref_p (operand))
3366 /* A reloaded address is offsettable because it is now
3367 just a simple register indirect. */
3368 || address_reloaded[i] == 1))
3369 || (REG_P (operand)
3370 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3371 && reg_renumber[REGNO (operand)] < 0
3372 /* If reg_equiv_address is nonzero, we will be
3373 loading it into a register; hence it will be
3374 offsettable, but we cannot say that reg_equiv_mem
3375 is offsettable without checking. */
3376 && ((reg_equiv_mem (REGNO (operand)) != 0
3377 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3378 || (reg_equiv_address (REGNO (operand)) != 0))))
3379 win = 1;
3380 if (CONST_POOL_OK_P (operand_mode[i], operand)
3381 || MEM_P (operand))
3382 badop = 0;
3383 constmemok = 1;
3384 offmemok = 1;
3385 break;
3386
3387 case '&':
3388 /* Output operand that is stored before the need for the
3389 input operands (and their index registers) is over. */
3390 earlyclobber = 1, this_earlyclobber = 1;
3391 break;
3392
3393 case 'X':
3394 force_reload = 0;
3395 win = 1;
3396 break;
3397
3398 case 'g':
3399 if (! force_reload
3400 /* A PLUS is never a valid operand, but reload can make
3401 it from a register when eliminating registers. */
3402 && GET_CODE (operand) != PLUS
3403 /* A SCRATCH is not a valid operand. */
3404 && GET_CODE (operand) != SCRATCH
3405 && (! CONSTANT_P (operand)
3406 || ! flag_pic
3407 || LEGITIMATE_PIC_OPERAND_P (operand))
3408 && (GENERAL_REGS == ALL_REGS
3409 || !REG_P (operand)
3410 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3411 && reg_renumber[REGNO (operand)] < 0)))
3412 win = 1;
3413 cl = GENERAL_REGS;
3414 goto reg;
3415
3416 default:
3417 cn = lookup_constraint (p);
3418 switch (get_constraint_type (cn))
3419 {
3420 case CT_REGISTER:
3421 cl = reg_class_for_constraint (cn);
3422 if (cl != NO_REGS)
3423 goto reg;
3424 break;
3425
3426 case CT_CONST_INT:
3427 if (CONST_INT_P (operand)
3428 && (insn_const_int_ok_for_constraint
3429 (INTVAL (operand), cn)))
3430 win = true;
3431 break;
3432
3433 case CT_MEMORY:
3434 if (force_reload)
3435 break;
3436 if (constraint_satisfied_p (operand, cn))
3437 win = 1;
3438 /* If the address was already reloaded,
3439 we win as well. */
3440 else if (MEM_P (operand) && address_reloaded[i] == 1)
3441 win = 1;
3442 /* Likewise if the address will be reloaded because
3443 reg_equiv_address is nonzero. For reg_equiv_mem
3444 we have to check. */
3445 else if (REG_P (operand)
3446 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3447 && reg_renumber[REGNO (operand)] < 0
3448 && ((reg_equiv_mem (REGNO (operand)) != 0
3449 && (constraint_satisfied_p
3450 (reg_equiv_mem (REGNO (operand)),
3451 cn)))
3452 || (reg_equiv_address (REGNO (operand))
3453 != 0)))
3454 win = 1;
3455
3456 /* If we didn't already win, we can reload
3457 constants via force_const_mem, and other
3458 MEMs by reloading the address like for 'o'. */
3459 if (CONST_POOL_OK_P (operand_mode[i], operand)
3460 || MEM_P (operand))
3461 badop = 0;
3462 constmemok = 1;
3463 offmemok = 1;
3464 break;
3465
3466 case CT_SPECIAL_MEMORY:
3467 if (force_reload)
3468 break;
3469 if (constraint_satisfied_p (operand, cn))
3470 win = 1;
3471 /* Likewise if the address will be reloaded because
3472 reg_equiv_address is nonzero. For reg_equiv_mem
3473 we have to check. */
3474 else if (REG_P (operand)
3475 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3476 && reg_renumber[REGNO (operand)] < 0
3477 && reg_equiv_mem (REGNO (operand)) != 0
3478 && (constraint_satisfied_p
3479 (reg_equiv_mem (REGNO (operand)), cn)))
3480 win = 1;
3481 break;
3482
3483 case CT_ADDRESS:
3484 if (constraint_satisfied_p (operand, cn))
3485 win = 1;
3486
3487 /* If we didn't already win, we can reload
3488 the address into a base register. */
3489 this_alternative[i]
3490 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3491 ADDRESS, SCRATCH);
3492 badop = 0;
3493 break;
3494
3495 case CT_FIXED_FORM:
3496 if (constraint_satisfied_p (operand, cn))
3497 win = 1;
3498 break;
3499 }
3500 break;
3501
3502 reg:
3503 this_alternative[i]
3504 = reg_class_subunion[this_alternative[i]][cl];
3505 if (GET_MODE (operand) == BLKmode)
3506 break;
3507 winreg = 1;
3508 if (REG_P (operand)
3509 && reg_fits_class_p (operand, this_alternative[i],
3510 offset, GET_MODE (recog_data.operand[i])))
3511 win = 1;
3512 break;
3513 }
3514 while ((p += len), c);
3515
3516 if (swapped == (commutative >= 0 ? 1 : 0))
3517 constraints[i] = p;
3518
3519 /* If this operand could be handled with a reg,
3520 and some reg is allowed, then this operand can be handled. */
3521 if (winreg && this_alternative[i] != NO_REGS
3522 && (win || !class_only_fixed_regs[this_alternative[i]]))
3523 badop = 0;
3524
3525 /* Record which operands fit this alternative. */
3526 this_alternative_earlyclobber[i] = earlyclobber;
3527 if (win && ! force_reload)
3528 this_alternative_win[i] = 1;
3529 else if (did_match && ! force_reload)
3530 this_alternative_match_win[i] = 1;
3531 else
3532 {
3533 int const_to_mem = 0;
3534
3535 this_alternative_offmemok[i] = offmemok;
3536 losers++;
3537 if (badop)
3538 bad = 1;
3539 /* Alternative loses if it has no regs for a reg operand. */
3540 if (REG_P (operand)
3541 && this_alternative[i] == NO_REGS
3542 && this_alternative_matches[i] < 0)
3543 bad = 1;
3544
3545 /* If this is a constant that is reloaded into the desired
3546 class by copying it to memory first, count that as another
3547 reload. This is consistent with other code and is
3548 required to avoid choosing another alternative when
3549 the constant is moved into memory by this function on
3550 an early reload pass. Note that the test here is
3551 precisely the same as in the code below that calls
3552 force_const_mem. */
3553 if (CONST_POOL_OK_P (operand_mode[i], operand)
3554 && ((targetm.preferred_reload_class (operand,
3555 this_alternative[i])
3556 == NO_REGS)
3557 || no_input_reloads))
3558 {
3559 const_to_mem = 1;
3560 if (this_alternative[i] != NO_REGS)
3561 losers++;
3562 }
3563
3564 /* Alternative loses if it requires a type of reload not
3565 permitted for this insn. We can always reload SCRATCH
3566 and objects with a REG_UNUSED note. */
3567 if (GET_CODE (operand) != SCRATCH
3568 && modified[i] != RELOAD_READ && no_output_reloads
3569 && ! find_reg_note (insn, REG_UNUSED, operand))
3570 bad = 1;
3571 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3572 && ! const_to_mem)
3573 bad = 1;
3574
3575 /* If we can't reload this value at all, reject this
3576 alternative. Note that we could also lose due to
3577 LIMIT_RELOAD_CLASS, but we don't check that
3578 here. */
3579
3580 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3581 {
3582 if (targetm.preferred_reload_class (operand,
3583 this_alternative[i])
3584 == NO_REGS)
3585 reject = 600;
3586
3587 if (operand_type[i] == RELOAD_FOR_OUTPUT
3588 && (targetm.preferred_output_reload_class (operand,
3589 this_alternative[i])
3590 == NO_REGS))
3591 reject = 600;
3592 }
3593
3594 /* We prefer to reload pseudos over reloading other things,
3595 since such reloads may be able to be eliminated later.
3596 If we are reloading a SCRATCH, we won't be generating any
3597 insns, just using a register, so it is also preferred.
3598 So bump REJECT in other cases. Don't do this in the
3599 case where we are forcing a constant into memory and
3600 it will then win since we don't want to have a different
3601 alternative match then. */
3602 if (! (REG_P (operand)
3603 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3604 && GET_CODE (operand) != SCRATCH
3605 && ! (const_to_mem && constmemok))
3606 reject += 2;
3607
3608 /* Input reloads can be inherited more often than output
3609 reloads can be removed, so penalize output reloads. */
3610 if (operand_type[i] != RELOAD_FOR_INPUT
3611 && GET_CODE (operand) != SCRATCH)
3612 reject++;
3613 }
3614
3615 /* If this operand is a pseudo register that didn't get
3616 a hard reg and this alternative accepts some
3617 register, see if the class that we want is a subset
3618 of the preferred class for this register. If not,
3619 but it intersects that class, use the preferred class
3620 instead. If it does not intersect the preferred
3621 class, show that usage of this alternative should be
3622 discouraged; it will be discouraged more still if the
3623 register is `preferred or nothing'. We do this
3624 because it increases the chance of reusing our spill
3625 register in a later insn and avoiding a pair of
3626 memory stores and loads.
3627
3628 Don't bother with this if this alternative will
3629 accept this operand.
3630
3631 Don't do this for a multiword operand, since it is
3632 only a small win and has the risk of requiring more
3633 spill registers, which could cause a large loss.
3634
3635 Don't do this if the preferred class has only one
3636 register because we might otherwise exhaust the
3637 class. */
3638
3639 if (! win && ! did_match
3640 && this_alternative[i] != NO_REGS
3641 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3642 && reg_class_size [(int) preferred_class[i]] > 0
3643 && ! small_register_class_p (preferred_class[i]))
3644 {
3645 if (! reg_class_subset_p (this_alternative[i],
3646 preferred_class[i]))
3647 {
3648 /* Since we don't have a way of forming the intersection,
3649 we just do something special if the preferred class
3650 is a subset of the class we have; that's the most
3651 common case anyway. */
3652 if (reg_class_subset_p (preferred_class[i],
3653 this_alternative[i]))
3654 this_alternative[i] = preferred_class[i];
3655 else
3656 reject += (2 + 2 * pref_or_nothing[i]);
3657 }
3658 }
3659 }
3660
3661 /* Now see if any output operands that are marked "earlyclobber"
3662 in this alternative conflict with any input operands
3663 or any memory addresses. */
3664
3665 for (i = 0; i < noperands; i++)
3666 if (this_alternative_earlyclobber[i]
3667 && (this_alternative_win[i] || this_alternative_match_win[i]))
3668 {
3669 struct decomposition early_data;
3670
3671 early_data = decompose (recog_data.operand[i]);
3672
3673 gcc_assert (modified[i] != RELOAD_READ);
3674
3675 if (this_alternative[i] == NO_REGS)
3676 {
3677 this_alternative_earlyclobber[i] = 0;
3678 gcc_assert (this_insn_is_asm);
3679 error_for_asm (this_insn,
3680 "%<&%> constraint used with no register class");
3681 }
3682
3683 for (j = 0; j < noperands; j++)
3684 /* Is this an input operand or a memory ref? */
3685 if ((MEM_P (recog_data.operand[j])
3686 || modified[j] != RELOAD_WRITE)
3687 && j != i
3688 /* Ignore things like match_operator operands. */
3689 && !recog_data.is_operator[j]
3690 /* Don't count an input operand that is constrained to match
3691 the early clobber operand. */
3692 && ! (this_alternative_matches[j] == i
3693 && rtx_equal_p (recog_data.operand[i],
3694 recog_data.operand[j]))
3695 /* Is it altered by storing the earlyclobber operand? */
3696 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3697 early_data))
3698 {
3699 /* If the output is in a non-empty few-regs class,
3700 it's costly to reload it, so reload the input instead. */
3701 if (small_register_class_p (this_alternative[i])
3702 && (REG_P (recog_data.operand[j])
3703 || GET_CODE (recog_data.operand[j]) == SUBREG))
3704 {
3705 losers++;
3706 this_alternative_win[j] = 0;
3707 this_alternative_match_win[j] = 0;
3708 }
3709 else
3710 break;
3711 }
3712 /* If an earlyclobber operand conflicts with something,
3713 it must be reloaded, so request this and count the cost. */
3714 if (j != noperands)
3715 {
3716 losers++;
3717 this_alternative_win[i] = 0;
3718 this_alternative_match_win[j] = 0;
3719 for (j = 0; j < noperands; j++)
3720 if (this_alternative_matches[j] == i
3721 && this_alternative_match_win[j])
3722 {
3723 this_alternative_win[j] = 0;
3724 this_alternative_match_win[j] = 0;
3725 losers++;
3726 }
3727 }
3728 }
3729
3730 /* If one alternative accepts all the operands, no reload required,
3731 choose that alternative; don't consider the remaining ones. */
3732 if (losers == 0)
3733 {
3734 /* Unswap these so that they are never swapped at `finish'. */
3735 if (swapped)
3736 {
3737 recog_data.operand[commutative] = substed_operand[commutative];
3738 recog_data.operand[commutative + 1]
3739 = substed_operand[commutative + 1];
3740 }
3741 for (i = 0; i < noperands; i++)
3742 {
3743 goal_alternative_win[i] = this_alternative_win[i];
3744 goal_alternative_match_win[i] = this_alternative_match_win[i];
3745 goal_alternative[i] = this_alternative[i];
3746 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3747 goal_alternative_matches[i] = this_alternative_matches[i];
3748 goal_alternative_earlyclobber[i]
3749 = this_alternative_earlyclobber[i];
3750 }
3751 goal_alternative_number = this_alternative_number;
3752 goal_alternative_swapped = swapped;
3753 goal_earlyclobber = this_earlyclobber;
3754 goto finish;
3755 }
3756
3757 /* REJECT, set by the ! and ? constraint characters and when a register
3758 would be reloaded into a non-preferred class, discourages the use of
3759 this alternative for a reload goal. REJECT is incremented by six
3760 for each ? and two for each non-preferred class. */
3761 losers = losers * 6 + reject;
3762
3763 /* If this alternative can be made to work by reloading,
3764 and it needs less reloading than the others checked so far,
3765 record it as the chosen goal for reloading. */
3766 if (! bad)
3767 {
3768 if (best > losers)
3769 {
3770 for (i = 0; i < noperands; i++)
3771 {
3772 goal_alternative[i] = this_alternative[i];
3773 goal_alternative_win[i] = this_alternative_win[i];
3774 goal_alternative_match_win[i]
3775 = this_alternative_match_win[i];
3776 goal_alternative_offmemok[i]
3777 = this_alternative_offmemok[i];
3778 goal_alternative_matches[i] = this_alternative_matches[i];
3779 goal_alternative_earlyclobber[i]
3780 = this_alternative_earlyclobber[i];
3781 }
3782 goal_alternative_swapped = swapped;
3783 best = losers;
3784 goal_alternative_number = this_alternative_number;
3785 goal_earlyclobber = this_earlyclobber;
3786 }
3787 }
3788
3789 if (swapped)
3790 {
3791 /* If the commutative operands have been swapped, swap
3792 them back in order to check the next alternative. */
3793 recog_data.operand[commutative] = substed_operand[commutative];
3794 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3795 /* Unswap the duplicates too. */
3796 for (i = 0; i < recog_data.n_dups; i++)
3797 if (recog_data.dup_num[i] == commutative
3798 || recog_data.dup_num[i] == commutative + 1)
3799 *recog_data.dup_loc[i]
3800 = recog_data.operand[(int) recog_data.dup_num[i]];
3801
3802 /* Unswap the operand related information as well. */
3803 std::swap (preferred_class[commutative],
3804 preferred_class[commutative + 1]);
3805 std::swap (pref_or_nothing[commutative],
3806 pref_or_nothing[commutative + 1]);
3807 std::swap (address_reloaded[commutative],
3808 address_reloaded[commutative + 1]);
3809 }
3810 }
3811 }
3812
3813 /* The operands don't meet the constraints.
3814 goal_alternative describes the alternative
3815 that we could reach by reloading the fewest operands.
3816 Reload so as to fit it. */
3817
3818 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3819 {
3820 /* No alternative works with reloads?? */
3821 if (insn_code_number >= 0)
3822 fatal_insn ("unable to generate reloads for:", insn);
3823 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3824 /* Avoid further trouble with this insn. */
3825 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3826 n_reloads = 0;
3827 return 0;
3828 }
3829
3830 /* Jump to `finish' from above if all operands are valid already.
3831 In that case, goal_alternative_win is all 1. */
3832 finish:
3833
3834 /* Right now, for any pair of operands I and J that are required to match,
3835 with I < J,
3836 goal_alternative_matches[J] is I.
3837 Set up goal_alternative_matched as the inverse function:
3838 goal_alternative_matched[I] = J. */
3839
3840 for (i = 0; i < noperands; i++)
3841 goal_alternative_matched[i] = -1;
3842
3843 for (i = 0; i < noperands; i++)
3844 if (! goal_alternative_win[i]
3845 && goal_alternative_matches[i] >= 0)
3846 goal_alternative_matched[goal_alternative_matches[i]] = i;
3847
3848 for (i = 0; i < noperands; i++)
3849 goal_alternative_win[i] |= goal_alternative_match_win[i];
3850
3851 /* If the best alternative is with operands 1 and 2 swapped,
3852 consider them swapped before reporting the reloads. Update the
3853 operand numbers of any reloads already pushed. */
3854
3855 if (goal_alternative_swapped)
3856 {
3857 std::swap (substed_operand[commutative],
3858 substed_operand[commutative + 1]);
3859 std::swap (recog_data.operand[commutative],
3860 recog_data.operand[commutative + 1]);
3861 std::swap (*recog_data.operand_loc[commutative],
3862 *recog_data.operand_loc[commutative + 1]);
3863
3864 for (i = 0; i < recog_data.n_dups; i++)
3865 if (recog_data.dup_num[i] == commutative
3866 || recog_data.dup_num[i] == commutative + 1)
3867 *recog_data.dup_loc[i]
3868 = recog_data.operand[(int) recog_data.dup_num[i]];
3869
3870 for (i = 0; i < n_reloads; i++)
3871 {
3872 if (rld[i].opnum == commutative)
3873 rld[i].opnum = commutative + 1;
3874 else if (rld[i].opnum == commutative + 1)
3875 rld[i].opnum = commutative;
3876 }
3877 }
3878
3879 for (i = 0; i < noperands; i++)
3880 {
3881 operand_reloadnum[i] = -1;
3882
3883 /* If this is an earlyclobber operand, we need to widen the scope.
3884 The reload must remain valid from the start of the insn being
3885 reloaded until after the operand is stored into its destination.
3886 We approximate this with RELOAD_OTHER even though we know that we
3887 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3888
3889 One special case that is worth checking is when we have an
3890 output that is earlyclobber but isn't used past the insn (typically
3891 a SCRATCH). In this case, we only need have the reload live
3892 through the insn itself, but not for any of our input or output
3893 reloads.
3894 But we must not accidentally narrow the scope of an existing
3895 RELOAD_OTHER reload - leave these alone.
3896
3897 In any case, anything needed to address this operand can remain
3898 however they were previously categorized. */
3899
3900 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3901 operand_type[i]
3902 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3903 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3904 }
3905
3906 /* Any constants that aren't allowed and can't be reloaded
3907 into registers are here changed into memory references. */
3908 for (i = 0; i < noperands; i++)
3909 if (! goal_alternative_win[i])
3910 {
3911 rtx op = recog_data.operand[i];
3912 rtx subreg = NULL_RTX;
3913 rtx plus = NULL_RTX;
3914 machine_mode mode = operand_mode[i];
3915
3916 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3917 push_reload so we have to let them pass here. */
3918 if (GET_CODE (op) == SUBREG)
3919 {
3920 subreg = op;
3921 op = SUBREG_REG (op);
3922 mode = GET_MODE (op);
3923 }
3924
3925 if (GET_CODE (op) == PLUS)
3926 {
3927 plus = op;
3928 op = XEXP (op, 1);
3929 }
3930
3931 if (CONST_POOL_OK_P (mode, op)
3932 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3933 == NO_REGS)
3934 || no_input_reloads))
3935 {
3936 int this_address_reloaded;
3937 rtx tem = force_const_mem (mode, op);
3938
3939 /* If we stripped a SUBREG or a PLUS above add it back. */
3940 if (plus != NULL_RTX)
3941 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3942
3943 if (subreg != NULL_RTX)
3944 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3945
3946 this_address_reloaded = 0;
3947 substed_operand[i] = recog_data.operand[i]
3948 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3949 0, insn, &this_address_reloaded);
3950
3951 /* If the alternative accepts constant pool refs directly
3952 there will be no reload needed at all. */
3953 if (plus == NULL_RTX
3954 && subreg == NULL_RTX
3955 && alternative_allows_const_pool_ref (this_address_reloaded != 1
3956 ? substed_operand[i]
3957 : NULL,
3958 recog_data.constraints[i],
3959 goal_alternative_number))
3960 goal_alternative_win[i] = 1;
3961 }
3962 }
3963
3964 /* Record the values of the earlyclobber operands for the caller. */
3965 if (goal_earlyclobber)
3966 for (i = 0; i < noperands; i++)
3967 if (goal_alternative_earlyclobber[i])
3968 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3969
3970 /* Now record reloads for all the operands that need them. */
3971 for (i = 0; i < noperands; i++)
3972 if (! goal_alternative_win[i])
3973 {
3974 /* Operands that match previous ones have already been handled. */
3975 if (goal_alternative_matches[i] >= 0)
3976 ;
3977 /* Handle an operand with a nonoffsettable address
3978 appearing where an offsettable address will do
3979 by reloading the address into a base register.
3980
3981 ??? We can also do this when the operand is a register and
3982 reg_equiv_mem is not offsettable, but this is a bit tricky,
3983 so we don't bother with it. It may not be worth doing. */
3984 else if (goal_alternative_matched[i] == -1
3985 && goal_alternative_offmemok[i]
3986 && MEM_P (recog_data.operand[i]))
3987 {
3988 /* If the address to be reloaded is a VOIDmode constant,
3989 use the default address mode as mode of the reload register,
3990 as would have been done by find_reloads_address. */
3991 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3992 machine_mode address_mode;
3993
3994 address_mode = get_address_mode (recog_data.operand[i]);
3995 operand_reloadnum[i]
3996 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3997 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3998 base_reg_class (VOIDmode, as, MEM, SCRATCH),
3999 address_mode,
4000 VOIDmode, 0, 0, i, RELOAD_OTHER);
4001 rld[operand_reloadnum[i]].inc
4002 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4003
4004 /* If this operand is an output, we will have made any
4005 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4006 now we are treating part of the operand as an input, so
4007 we must change these to RELOAD_FOR_OTHER_ADDRESS. */
4008
4009 if (modified[i] == RELOAD_WRITE)
4010 {
4011 for (j = 0; j < n_reloads; j++)
4012 {
4013 if (rld[j].opnum == i)
4014 {
4015 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4016 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4017 else if (rld[j].when_needed
4018 == RELOAD_FOR_OUTADDR_ADDRESS)
4019 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4020 }
4021 }
4022 }
4023 }
4024 else if (goal_alternative_matched[i] == -1)
4025 {
4026 operand_reloadnum[i]
4027 = push_reload ((modified[i] != RELOAD_WRITE
4028 ? recog_data.operand[i] : 0),
4029 (modified[i] != RELOAD_READ
4030 ? recog_data.operand[i] : 0),
4031 (modified[i] != RELOAD_WRITE
4032 ? recog_data.operand_loc[i] : 0),
4033 (modified[i] != RELOAD_READ
4034 ? recog_data.operand_loc[i] : 0),
4035 (enum reg_class) goal_alternative[i],
4036 (modified[i] == RELOAD_WRITE
4037 ? VOIDmode : operand_mode[i]),
4038 (modified[i] == RELOAD_READ
4039 ? VOIDmode : operand_mode[i]),
4040 (insn_code_number < 0 ? 0
4041 : insn_data[insn_code_number].operand[i].strict_low),
4042 0, i, operand_type[i]);
4043 }
4044 /* In a matching pair of operands, one must be input only
4045 and the other must be output only.
4046 Pass the input operand as IN and the other as OUT. */
4047 else if (modified[i] == RELOAD_READ
4048 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4049 {
4050 operand_reloadnum[i]
4051 = push_reload (recog_data.operand[i],
4052 recog_data.operand[goal_alternative_matched[i]],
4053 recog_data.operand_loc[i],
4054 recog_data.operand_loc[goal_alternative_matched[i]],
4055 (enum reg_class) goal_alternative[i],
4056 operand_mode[i],
4057 operand_mode[goal_alternative_matched[i]],
4058 0, 0, i, RELOAD_OTHER);
4059 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4060 }
4061 else if (modified[i] == RELOAD_WRITE
4062 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4063 {
4064 operand_reloadnum[goal_alternative_matched[i]]
4065 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4066 recog_data.operand[i],
4067 recog_data.operand_loc[goal_alternative_matched[i]],
4068 recog_data.operand_loc[i],
4069 (enum reg_class) goal_alternative[i],
4070 operand_mode[goal_alternative_matched[i]],
4071 operand_mode[i],
4072 0, 0, i, RELOAD_OTHER);
4073 operand_reloadnum[i] = output_reloadnum;
4074 }
4075 else
4076 {
4077 gcc_assert (insn_code_number < 0);
4078 error_for_asm (insn, "inconsistent operand constraints "
4079 "in an %<asm%>");
4080 /* Avoid further trouble with this insn. */
4081 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4082 n_reloads = 0;
4083 return 0;
4084 }
4085 }
4086 else if (goal_alternative_matched[i] < 0
4087 && goal_alternative_matches[i] < 0
4088 && address_operand_reloaded[i] != 1
4089 && optimize)
4090 {
4091 /* For each non-matching operand that's a MEM or a pseudo-register
4092 that didn't get a hard register, make an optional reload.
4093 This may get done even if the insn needs no reloads otherwise. */
4094
4095 rtx operand = recog_data.operand[i];
4096
4097 while (GET_CODE (operand) == SUBREG)
4098 operand = SUBREG_REG (operand);
4099 if ((MEM_P (operand)
4100 || (REG_P (operand)
4101 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4102 /* If this is only for an output, the optional reload would not
4103 actually cause us to use a register now, just note that
4104 something is stored here. */
4105 && (goal_alternative[i] != NO_REGS
4106 || modified[i] == RELOAD_WRITE)
4107 && ! no_input_reloads
4108 /* An optional output reload might allow to delete INSN later.
4109 We mustn't make in-out reloads on insns that are not permitted
4110 output reloads.
4111 If this is an asm, we can't delete it; we must not even call
4112 push_reload for an optional output reload in this case,
4113 because we can't be sure that the constraint allows a register,
4114 and push_reload verifies the constraints for asms. */
4115 && (modified[i] == RELOAD_READ
4116 || (! no_output_reloads && ! this_insn_is_asm)))
4117 operand_reloadnum[i]
4118 = push_reload ((modified[i] != RELOAD_WRITE
4119 ? recog_data.operand[i] : 0),
4120 (modified[i] != RELOAD_READ
4121 ? recog_data.operand[i] : 0),
4122 (modified[i] != RELOAD_WRITE
4123 ? recog_data.operand_loc[i] : 0),
4124 (modified[i] != RELOAD_READ
4125 ? recog_data.operand_loc[i] : 0),
4126 (enum reg_class) goal_alternative[i],
4127 (modified[i] == RELOAD_WRITE
4128 ? VOIDmode : operand_mode[i]),
4129 (modified[i] == RELOAD_READ
4130 ? VOIDmode : operand_mode[i]),
4131 (insn_code_number < 0 ? 0
4132 : insn_data[insn_code_number].operand[i].strict_low),
4133 1, i, operand_type[i]);
4134 /* If a memory reference remains (either as a MEM or a pseudo that
4135 did not get a hard register), yet we can't make an optional
4136 reload, check if this is actually a pseudo register reference;
4137 we then need to emit a USE and/or a CLOBBER so that reload
4138 inheritance will do the right thing. */
4139 else if (replace
4140 && (MEM_P (operand)
4141 || (REG_P (operand)
4142 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4143 && reg_renumber [REGNO (operand)] < 0)))
4144 {
4145 operand = *recog_data.operand_loc[i];
4146
4147 while (GET_CODE (operand) == SUBREG)
4148 operand = SUBREG_REG (operand);
4149 if (REG_P (operand))
4150 {
4151 if (modified[i] != RELOAD_WRITE)
4152 /* We mark the USE with QImode so that we recognize
4153 it as one that can be safely deleted at the end
4154 of reload. */
4155 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4156 insn), QImode);
4157 if (modified[i] != RELOAD_READ)
4158 emit_insn_after (gen_clobber (operand), insn);
4159 }
4160 }
4161 }
4162 else if (goal_alternative_matches[i] >= 0
4163 && goal_alternative_win[goal_alternative_matches[i]]
4164 && modified[i] == RELOAD_READ
4165 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4166 && ! no_input_reloads && ! no_output_reloads
4167 && optimize)
4168 {
4169 /* Similarly, make an optional reload for a pair of matching
4170 objects that are in MEM or a pseudo that didn't get a hard reg. */
4171
4172 rtx operand = recog_data.operand[i];
4173
4174 while (GET_CODE (operand) == SUBREG)
4175 operand = SUBREG_REG (operand);
4176 if ((MEM_P (operand)
4177 || (REG_P (operand)
4178 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4179 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4180 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4181 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4182 recog_data.operand[i],
4183 recog_data.operand_loc[goal_alternative_matches[i]],
4184 recog_data.operand_loc[i],
4185 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4186 operand_mode[goal_alternative_matches[i]],
4187 operand_mode[i],
4188 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4189 }
4190
4191 /* Perform whatever substitutions on the operands we are supposed
4192 to make due to commutativity or replacement of registers
4193 with equivalent constants or memory slots. */
4194
4195 for (i = 0; i < noperands; i++)
4196 {
4197 /* We only do this on the last pass through reload, because it is
4198 possible for some data (like reg_equiv_address) to be changed during
4199 later passes. Moreover, we lose the opportunity to get a useful
4200 reload_{in,out}_reg when we do these replacements. */
4201
4202 if (replace)
4203 {
4204 rtx substitution = substed_operand[i];
4205
4206 *recog_data.operand_loc[i] = substitution;
4207
4208 /* If we're replacing an operand with a LABEL_REF, we need to
4209 make sure that there's a REG_LABEL_OPERAND note attached to
4210 this instruction. */
4211 if (GET_CODE (substitution) == LABEL_REF
4212 && !find_reg_note (insn, REG_LABEL_OPERAND,
4213 label_ref_label (substitution))
4214 /* For a JUMP_P, if it was a branch target it must have
4215 already been recorded as such. */
4216 && (!JUMP_P (insn)
4217 || !label_is_jump_target_p (label_ref_label (substitution),
4218 insn)))
4219 {
4220 add_reg_note (insn, REG_LABEL_OPERAND,
4221 label_ref_label (substitution));
4222 if (LABEL_P (label_ref_label (substitution)))
4223 ++LABEL_NUSES (label_ref_label (substitution));
4224 }
4225
4226 }
4227 else
4228 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4229 }
4230
4231 /* If this insn pattern contains any MATCH_DUP's, make sure that
4232 they will be substituted if the operands they match are substituted.
4233 Also do now any substitutions we already did on the operands.
4234
4235 Don't do this if we aren't making replacements because we might be
4236 propagating things allocated by frame pointer elimination into places
4237 it doesn't expect. */
4238
4239 if (insn_code_number >= 0 && replace)
4240 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4241 {
4242 int opno = recog_data.dup_num[i];
4243 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4244 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4245 }
4246
4247 #if 0
4248 /* This loses because reloading of prior insns can invalidate the equivalence
4249 (or at least find_equiv_reg isn't smart enough to find it any more),
4250 causing this insn to need more reload regs than it needed before.
4251 It may be too late to make the reload regs available.
4252 Now this optimization is done safely in choose_reload_regs. */
4253
4254 /* For each reload of a reg into some other class of reg,
4255 search for an existing equivalent reg (same value now) in the right class.
4256 We can use it as long as we don't need to change its contents. */
4257 for (i = 0; i < n_reloads; i++)
4258 if (rld[i].reg_rtx == 0
4259 && rld[i].in != 0
4260 && REG_P (rld[i].in)
4261 && rld[i].out == 0)
4262 {
4263 rld[i].reg_rtx
4264 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4265 static_reload_reg_p, 0, rld[i].inmode);
4266 /* Prevent generation of insn to load the value
4267 because the one we found already has the value. */
4268 if (rld[i].reg_rtx)
4269 rld[i].in = rld[i].reg_rtx;
4270 }
4271 #endif
4272
4273 /* If we detected error and replaced asm instruction by USE, forget about the
4274 reloads. */
4275 if (GET_CODE (PATTERN (insn)) == USE
4276 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4277 n_reloads = 0;
4278
4279 /* Perhaps an output reload can be combined with another
4280 to reduce needs by one. */
4281 if (!goal_earlyclobber)
4282 combine_reloads ();
4283
4284 /* If we have a pair of reloads for parts of an address, they are reloading
4285 the same object, the operands themselves were not reloaded, and they
4286 are for two operands that are supposed to match, merge the reloads and
4287 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4288
4289 for (i = 0; i < n_reloads; i++)
4290 {
4291 int k;
4292
4293 for (j = i + 1; j < n_reloads; j++)
4294 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4295 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4296 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4297 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4298 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4299 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4300 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4301 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4302 && rtx_equal_p (rld[i].in, rld[j].in)
4303 && (operand_reloadnum[rld[i].opnum] < 0
4304 || rld[operand_reloadnum[rld[i].opnum]].optional)
4305 && (operand_reloadnum[rld[j].opnum] < 0
4306 || rld[operand_reloadnum[rld[j].opnum]].optional)
4307 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4308 || (goal_alternative_matches[rld[j].opnum]
4309 == rld[i].opnum)))
4310 {
4311 for (k = 0; k < n_replacements; k++)
4312 if (replacements[k].what == j)
4313 replacements[k].what = i;
4314
4315 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4316 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4317 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4318 else
4319 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4320 rld[j].in = 0;
4321 }
4322 }
4323
4324 /* Scan all the reloads and update their type.
4325 If a reload is for the address of an operand and we didn't reload
4326 that operand, change the type. Similarly, change the operand number
4327 of a reload when two operands match. If a reload is optional, treat it
4328 as though the operand isn't reloaded.
4329
4330 ??? This latter case is somewhat odd because if we do the optional
4331 reload, it means the object is hanging around. Thus we need only
4332 do the address reload if the optional reload was NOT done.
4333
4334 Change secondary reloads to be the address type of their operand, not
4335 the normal type.
4336
4337 If an operand's reload is now RELOAD_OTHER, change any
4338 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4339 RELOAD_FOR_OTHER_ADDRESS. */
4340
4341 for (i = 0; i < n_reloads; i++)
4342 {
4343 if (rld[i].secondary_p
4344 && rld[i].when_needed == operand_type[rld[i].opnum])
4345 rld[i].when_needed = address_type[rld[i].opnum];
4346
4347 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4348 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4349 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4350 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4351 && (operand_reloadnum[rld[i].opnum] < 0
4352 || rld[operand_reloadnum[rld[i].opnum]].optional))
4353 {
4354 /* If we have a secondary reload to go along with this reload,
4355 change its type to RELOAD_FOR_OPADDR_ADDR. */
4356
4357 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4358 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4359 && rld[i].secondary_in_reload != -1)
4360 {
4361 int secondary_in_reload = rld[i].secondary_in_reload;
4362
4363 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4364
4365 /* If there's a tertiary reload we have to change it also. */
4366 if (secondary_in_reload > 0
4367 && rld[secondary_in_reload].secondary_in_reload != -1)
4368 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4369 = RELOAD_FOR_OPADDR_ADDR;
4370 }
4371
4372 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4373 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4374 && rld[i].secondary_out_reload != -1)
4375 {
4376 int secondary_out_reload = rld[i].secondary_out_reload;
4377
4378 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4379
4380 /* If there's a tertiary reload we have to change it also. */
4381 if (secondary_out_reload
4382 && rld[secondary_out_reload].secondary_out_reload != -1)
4383 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4384 = RELOAD_FOR_OPADDR_ADDR;
4385 }
4386
4387 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4388 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4389 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4390 else
4391 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4392 }
4393
4394 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4395 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4396 && operand_reloadnum[rld[i].opnum] >= 0
4397 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4398 == RELOAD_OTHER))
4399 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4400
4401 if (goal_alternative_matches[rld[i].opnum] >= 0)
4402 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4403 }
4404
4405 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4406 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4407 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4408
4409 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4410 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4411 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4412 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4413 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4414 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4415 This is complicated by the fact that a single operand can have more
4416 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4417 choose_reload_regs without affecting code quality, and cases that
4418 actually fail are extremely rare, so it turns out to be better to fix
4419 the problem here by not generating cases that choose_reload_regs will
4420 fail for. */
4421 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4422 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4423 a single operand.
4424 We can reduce the register pressure by exploiting that a
4425 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4426 does not conflict with any of them, if it is only used for the first of
4427 the RELOAD_FOR_X_ADDRESS reloads. */
4428 {
4429 int first_op_addr_num = -2;
4430 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4431 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4432 int need_change = 0;
4433 /* We use last_op_addr_reload and the contents of the above arrays
4434 first as flags - -2 means no instance encountered, -1 means exactly
4435 one instance encountered.
4436 If more than one instance has been encountered, we store the reload
4437 number of the first reload of the kind in question; reload numbers
4438 are known to be non-negative. */
4439 for (i = 0; i < noperands; i++)
4440 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4441 for (i = n_reloads - 1; i >= 0; i--)
4442 {
4443 switch (rld[i].when_needed)
4444 {
4445 case RELOAD_FOR_OPERAND_ADDRESS:
4446 if (++first_op_addr_num >= 0)
4447 {
4448 first_op_addr_num = i;
4449 need_change = 1;
4450 }
4451 break;
4452 case RELOAD_FOR_INPUT_ADDRESS:
4453 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4454 {
4455 first_inpaddr_num[rld[i].opnum] = i;
4456 need_change = 1;
4457 }
4458 break;
4459 case RELOAD_FOR_OUTPUT_ADDRESS:
4460 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4461 {
4462 first_outpaddr_num[rld[i].opnum] = i;
4463 need_change = 1;
4464 }
4465 break;
4466 default:
4467 break;
4468 }
4469 }
4470
4471 if (need_change)
4472 {
4473 for (i = 0; i < n_reloads; i++)
4474 {
4475 int first_num;
4476 enum reload_type type;
4477
4478 switch (rld[i].when_needed)
4479 {
4480 case RELOAD_FOR_OPADDR_ADDR:
4481 first_num = first_op_addr_num;
4482 type = RELOAD_FOR_OPERAND_ADDRESS;
4483 break;
4484 case RELOAD_FOR_INPADDR_ADDRESS:
4485 first_num = first_inpaddr_num[rld[i].opnum];
4486 type = RELOAD_FOR_INPUT_ADDRESS;
4487 break;
4488 case RELOAD_FOR_OUTADDR_ADDRESS:
4489 first_num = first_outpaddr_num[rld[i].opnum];
4490 type = RELOAD_FOR_OUTPUT_ADDRESS;
4491 break;
4492 default:
4493 continue;
4494 }
4495 if (first_num < 0)
4496 continue;
4497 else if (i > first_num)
4498 rld[i].when_needed = type;
4499 else
4500 {
4501 /* Check if the only TYPE reload that uses reload I is
4502 reload FIRST_NUM. */
4503 for (j = n_reloads - 1; j > first_num; j--)
4504 {
4505 if (rld[j].when_needed == type
4506 && (rld[i].secondary_p
4507 ? rld[j].secondary_in_reload == i
4508 : reg_mentioned_p (rld[i].in, rld[j].in)))
4509 {
4510 rld[i].when_needed = type;
4511 break;
4512 }
4513 }
4514 }
4515 }
4516 }
4517 }
4518
4519 /* See if we have any reloads that are now allowed to be merged
4520 because we've changed when the reload is needed to
4521 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4522 check for the most common cases. */
4523
4524 for (i = 0; i < n_reloads; i++)
4525 if (rld[i].in != 0 && rld[i].out == 0
4526 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4527 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4528 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4529 for (j = 0; j < n_reloads; j++)
4530 if (i != j && rld[j].in != 0 && rld[j].out == 0
4531 && rld[j].when_needed == rld[i].when_needed
4532 && MATCHES (rld[i].in, rld[j].in)
4533 && rld[i].rclass == rld[j].rclass
4534 && !rld[i].nocombine && !rld[j].nocombine
4535 && rld[i].reg_rtx == rld[j].reg_rtx)
4536 {
4537 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4538 transfer_replacements (i, j);
4539 rld[j].in = 0;
4540 }
4541
4542 /* If we made any reloads for addresses, see if they violate a
4543 "no input reloads" requirement for this insn. But loads that we
4544 do after the insn (such as for output addresses) are fine. */
4545 if (HAVE_cc0 && no_input_reloads)
4546 for (i = 0; i < n_reloads; i++)
4547 gcc_assert (rld[i].in == 0
4548 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4549 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4550
4551 /* Compute reload_mode and reload_nregs. */
4552 for (i = 0; i < n_reloads; i++)
4553 {
4554 rld[i].mode
4555 = (rld[i].inmode == VOIDmode
4556 || (GET_MODE_SIZE (rld[i].outmode)
4557 > GET_MODE_SIZE (rld[i].inmode)))
4558 ? rld[i].outmode : rld[i].inmode;
4559
4560 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4561 }
4562
4563 /* Special case a simple move with an input reload and a
4564 destination of a hard reg, if the hard reg is ok, use it. */
4565 for (i = 0; i < n_reloads; i++)
4566 if (rld[i].when_needed == RELOAD_FOR_INPUT
4567 && GET_CODE (PATTERN (insn)) == SET
4568 && REG_P (SET_DEST (PATTERN (insn)))
4569 && (SET_SRC (PATTERN (insn)) == rld[i].in
4570 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4571 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4572 {
4573 rtx dest = SET_DEST (PATTERN (insn));
4574 unsigned int regno = REGNO (dest);
4575
4576 if (regno < FIRST_PSEUDO_REGISTER
4577 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4578 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4579 {
4580 int nr = hard_regno_nregs[regno][rld[i].mode];
4581 int ok = 1, nri;
4582
4583 for (nri = 1; nri < nr; nri ++)
4584 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4585 {
4586 ok = 0;
4587 break;
4588 }
4589
4590 if (ok)
4591 rld[i].reg_rtx = dest;
4592 }
4593 }
4594
4595 return retval;
4596 }
4597
4598 /* Return true if alternative number ALTNUM in constraint-string
4599 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4600 MEM gives the reference if its address hasn't been fully reloaded,
4601 otherwise it is NULL. */
4602
4603 static bool
4604 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4605 const char *constraint, int altnum)
4606 {
4607 int c;
4608
4609 /* Skip alternatives before the one requested. */
4610 while (altnum > 0)
4611 {
4612 while (*constraint++ != ',')
4613 ;
4614 altnum--;
4615 }
4616 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4617 If one of them is present, this alternative accepts the result of
4618 passing a constant-pool reference through find_reloads_toplev.
4619
4620 The same is true of extra memory constraints if the address
4621 was reloaded into a register. However, the target may elect
4622 to disallow the original constant address, forcing it to be
4623 reloaded into a register instead. */
4624 for (; (c = *constraint) && c != ',' && c != '#';
4625 constraint += CONSTRAINT_LEN (c, constraint))
4626 {
4627 enum constraint_num cn = lookup_constraint (constraint);
4628 if (insn_extra_memory_constraint (cn)
4629 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4630 return true;
4631 }
4632 return false;
4633 }
4634 \f
4635 /* Scan X for memory references and scan the addresses for reloading.
4636 Also checks for references to "constant" regs that we want to eliminate
4637 and replaces them with the values they stand for.
4638 We may alter X destructively if it contains a reference to such.
4639 If X is just a constant reg, we return the equivalent value
4640 instead of X.
4641
4642 IND_LEVELS says how many levels of indirect addressing this machine
4643 supports.
4644
4645 OPNUM and TYPE identify the purpose of the reload.
4646
4647 IS_SET_DEST is true if X is the destination of a SET, which is not
4648 appropriate to be replaced by a constant.
4649
4650 INSN, if nonzero, is the insn in which we do the reload. It is used
4651 to determine if we may generate output reloads, and where to put USEs
4652 for pseudos that we have to replace with stack slots.
4653
4654 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4655 result of find_reloads_address. */
4656
4657 static rtx
4658 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4659 int ind_levels, int is_set_dest, rtx_insn *insn,
4660 int *address_reloaded)
4661 {
4662 RTX_CODE code = GET_CODE (x);
4663
4664 const char *fmt = GET_RTX_FORMAT (code);
4665 int i;
4666 int copied;
4667
4668 if (code == REG)
4669 {
4670 /* This code is duplicated for speed in find_reloads. */
4671 int regno = REGNO (x);
4672 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4673 x = reg_equiv_constant (regno);
4674 #if 0
4675 /* This creates (subreg (mem...)) which would cause an unnecessary
4676 reload of the mem. */
4677 else if (reg_equiv_mem (regno) != 0)
4678 x = reg_equiv_mem (regno);
4679 #endif
4680 else if (reg_equiv_memory_loc (regno)
4681 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4682 {
4683 rtx mem = make_memloc (x, regno);
4684 if (reg_equiv_address (regno)
4685 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4686 {
4687 /* If this is not a toplevel operand, find_reloads doesn't see
4688 this substitution. We have to emit a USE of the pseudo so
4689 that delete_output_reload can see it. */
4690 if (replace_reloads && recog_data.operand[opnum] != x)
4691 /* We mark the USE with QImode so that we recognize it
4692 as one that can be safely deleted at the end of
4693 reload. */
4694 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4695 QImode);
4696 x = mem;
4697 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4698 opnum, type, ind_levels, insn);
4699 if (!rtx_equal_p (x, mem))
4700 push_reg_equiv_alt_mem (regno, x);
4701 if (address_reloaded)
4702 *address_reloaded = i;
4703 }
4704 }
4705 return x;
4706 }
4707 if (code == MEM)
4708 {
4709 rtx tem = x;
4710
4711 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4712 opnum, type, ind_levels, insn);
4713 if (address_reloaded)
4714 *address_reloaded = i;
4715
4716 return tem;
4717 }
4718
4719 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4720 {
4721 /* Check for SUBREG containing a REG that's equivalent to a
4722 constant. If the constant has a known value, truncate it
4723 right now. Similarly if we are extracting a single-word of a
4724 multi-word constant. If the constant is symbolic, allow it
4725 to be substituted normally. push_reload will strip the
4726 subreg later. The constant must not be VOIDmode, because we
4727 will lose the mode of the register (this should never happen
4728 because one of the cases above should handle it). */
4729
4730 int regno = REGNO (SUBREG_REG (x));
4731 rtx tem;
4732
4733 if (regno >= FIRST_PSEUDO_REGISTER
4734 && reg_renumber[regno] < 0
4735 && reg_equiv_constant (regno) != 0)
4736 {
4737 tem =
4738 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4739 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4740 gcc_assert (tem);
4741 if (CONSTANT_P (tem)
4742 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4743 {
4744 tem = force_const_mem (GET_MODE (x), tem);
4745 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4746 &XEXP (tem, 0), opnum, type,
4747 ind_levels, insn);
4748 if (address_reloaded)
4749 *address_reloaded = i;
4750 }
4751 return tem;
4752 }
4753
4754 /* If the subreg contains a reg that will be converted to a mem,
4755 attempt to convert the whole subreg to a (narrower or wider)
4756 memory reference instead. If this succeeds, we're done --
4757 otherwise fall through to check whether the inner reg still
4758 needs address reloads anyway. */
4759
4760 if (regno >= FIRST_PSEUDO_REGISTER
4761 && reg_equiv_memory_loc (regno) != 0)
4762 {
4763 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4764 insn, address_reloaded);
4765 if (tem)
4766 return tem;
4767 }
4768 }
4769
4770 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4771 {
4772 if (fmt[i] == 'e')
4773 {
4774 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4775 ind_levels, is_set_dest, insn,
4776 address_reloaded);
4777 /* If we have replaced a reg with it's equivalent memory loc -
4778 that can still be handled here e.g. if it's in a paradoxical
4779 subreg - we must make the change in a copy, rather than using
4780 a destructive change. This way, find_reloads can still elect
4781 not to do the change. */
4782 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4783 {
4784 x = shallow_copy_rtx (x);
4785 copied = 1;
4786 }
4787 XEXP (x, i) = new_part;
4788 }
4789 }
4790 return x;
4791 }
4792
4793 /* Return a mem ref for the memory equivalent of reg REGNO.
4794 This mem ref is not shared with anything. */
4795
4796 static rtx
4797 make_memloc (rtx ad, int regno)
4798 {
4799 /* We must rerun eliminate_regs, in case the elimination
4800 offsets have changed. */
4801 rtx tem
4802 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4803 0);
4804
4805 /* If TEM might contain a pseudo, we must copy it to avoid
4806 modifying it when we do the substitution for the reload. */
4807 if (rtx_varies_p (tem, 0))
4808 tem = copy_rtx (tem);
4809
4810 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4811 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4812
4813 /* Copy the result if it's still the same as the equivalence, to avoid
4814 modifying it when we do the substitution for the reload. */
4815 if (tem == reg_equiv_memory_loc (regno))
4816 tem = copy_rtx (tem);
4817 return tem;
4818 }
4819
4820 /* Returns true if AD could be turned into a valid memory reference
4821 to mode MODE in address space AS by reloading the part pointed to
4822 by PART into a register. */
4823
4824 static int
4825 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4826 addr_space_t as, rtx *part)
4827 {
4828 int retv;
4829 rtx tem = *part;
4830 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4831
4832 *part = reg;
4833 retv = memory_address_addr_space_p (mode, ad, as);
4834 *part = tem;
4835
4836 return retv;
4837 }
4838
4839 /* Record all reloads needed for handling memory address AD
4840 which appears in *LOC in a memory reference to mode MODE
4841 which itself is found in location *MEMREFLOC.
4842 Note that we take shortcuts assuming that no multi-reg machine mode
4843 occurs as part of an address.
4844
4845 OPNUM and TYPE specify the purpose of this reload.
4846
4847 IND_LEVELS says how many levels of indirect addressing this machine
4848 supports.
4849
4850 INSN, if nonzero, is the insn in which we do the reload. It is used
4851 to determine if we may generate output reloads, and where to put USEs
4852 for pseudos that we have to replace with stack slots.
4853
4854 Value is one if this address is reloaded or replaced as a whole; it is
4855 zero if the top level of this address was not reloaded or replaced, and
4856 it is -1 if it may or may not have been reloaded or replaced.
4857
4858 Note that there is no verification that the address will be valid after
4859 this routine does its work. Instead, we rely on the fact that the address
4860 was valid when reload started. So we need only undo things that reload
4861 could have broken. These are wrong register types, pseudos not allocated
4862 to a hard register, and frame pointer elimination. */
4863
4864 static int
4865 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4866 rtx *loc, int opnum, enum reload_type type,
4867 int ind_levels, rtx_insn *insn)
4868 {
4869 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4870 : ADDR_SPACE_GENERIC;
4871 int regno;
4872 int removed_and = 0;
4873 int op_index;
4874 rtx tem;
4875
4876 /* If the address is a register, see if it is a legitimate address and
4877 reload if not. We first handle the cases where we need not reload
4878 or where we must reload in a non-standard way. */
4879
4880 if (REG_P (ad))
4881 {
4882 regno = REGNO (ad);
4883
4884 if (reg_equiv_constant (regno) != 0)
4885 {
4886 find_reloads_address_part (reg_equiv_constant (regno), loc,
4887 base_reg_class (mode, as, MEM, SCRATCH),
4888 GET_MODE (ad), opnum, type, ind_levels);
4889 return 1;
4890 }
4891
4892 tem = reg_equiv_memory_loc (regno);
4893 if (tem != 0)
4894 {
4895 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4896 {
4897 tem = make_memloc (ad, regno);
4898 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4899 XEXP (tem, 0),
4900 MEM_ADDR_SPACE (tem)))
4901 {
4902 rtx orig = tem;
4903
4904 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4905 &XEXP (tem, 0), opnum,
4906 ADDR_TYPE (type), ind_levels, insn);
4907 if (!rtx_equal_p (tem, orig))
4908 push_reg_equiv_alt_mem (regno, tem);
4909 }
4910 /* We can avoid a reload if the register's equivalent memory
4911 expression is valid as an indirect memory address.
4912 But not all addresses are valid in a mem used as an indirect
4913 address: only reg or reg+constant. */
4914
4915 if (ind_levels > 0
4916 && strict_memory_address_addr_space_p (mode, tem, as)
4917 && (REG_P (XEXP (tem, 0))
4918 || (GET_CODE (XEXP (tem, 0)) == PLUS
4919 && REG_P (XEXP (XEXP (tem, 0), 0))
4920 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4921 {
4922 /* TEM is not the same as what we'll be replacing the
4923 pseudo with after reload, put a USE in front of INSN
4924 in the final reload pass. */
4925 if (replace_reloads
4926 && num_not_at_initial_offset
4927 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4928 {
4929 *loc = tem;
4930 /* We mark the USE with QImode so that we
4931 recognize it as one that can be safely
4932 deleted at the end of reload. */
4933 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4934 insn), QImode);
4935
4936 /* This doesn't really count as replacing the address
4937 as a whole, since it is still a memory access. */
4938 }
4939 return 0;
4940 }
4941 ad = tem;
4942 }
4943 }
4944
4945 /* The only remaining case where we can avoid a reload is if this is a
4946 hard register that is valid as a base register and which is not the
4947 subject of a CLOBBER in this insn. */
4948
4949 else if (regno < FIRST_PSEUDO_REGISTER
4950 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4951 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4952 return 0;
4953
4954 /* If we do not have one of the cases above, we must do the reload. */
4955 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4956 base_reg_class (mode, as, MEM, SCRATCH),
4957 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4958 return 1;
4959 }
4960
4961 if (strict_memory_address_addr_space_p (mode, ad, as))
4962 {
4963 /* The address appears valid, so reloads are not needed.
4964 But the address may contain an eliminable register.
4965 This can happen because a machine with indirect addressing
4966 may consider a pseudo register by itself a valid address even when
4967 it has failed to get a hard reg.
4968 So do a tree-walk to find and eliminate all such regs. */
4969
4970 /* But first quickly dispose of a common case. */
4971 if (GET_CODE (ad) == PLUS
4972 && CONST_INT_P (XEXP (ad, 1))
4973 && REG_P (XEXP (ad, 0))
4974 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4975 return 0;
4976
4977 subst_reg_equivs_changed = 0;
4978 *loc = subst_reg_equivs (ad, insn);
4979
4980 if (! subst_reg_equivs_changed)
4981 return 0;
4982
4983 /* Check result for validity after substitution. */
4984 if (strict_memory_address_addr_space_p (mode, ad, as))
4985 return 0;
4986 }
4987
4988 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4989 do
4990 {
4991 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4992 {
4993 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4994 ind_levels, win);
4995 }
4996 break;
4997 win:
4998 *memrefloc = copy_rtx (*memrefloc);
4999 XEXP (*memrefloc, 0) = ad;
5000 move_replacements (&ad, &XEXP (*memrefloc, 0));
5001 return -1;
5002 }
5003 while (0);
5004 #endif
5005
5006 /* The address is not valid. We have to figure out why. First see if
5007 we have an outer AND and remove it if so. Then analyze what's inside. */
5008
5009 if (GET_CODE (ad) == AND)
5010 {
5011 removed_and = 1;
5012 loc = &XEXP (ad, 0);
5013 ad = *loc;
5014 }
5015
5016 /* One possibility for why the address is invalid is that it is itself
5017 a MEM. This can happen when the frame pointer is being eliminated, a
5018 pseudo is not allocated to a hard register, and the offset between the
5019 frame and stack pointers is not its initial value. In that case the
5020 pseudo will have been replaced by a MEM referring to the
5021 stack pointer. */
5022 if (MEM_P (ad))
5023 {
5024 /* First ensure that the address in this MEM is valid. Then, unless
5025 indirect addresses are valid, reload the MEM into a register. */
5026 tem = ad;
5027 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5028 opnum, ADDR_TYPE (type),
5029 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5030
5031 /* If tem was changed, then we must create a new memory reference to
5032 hold it and store it back into memrefloc. */
5033 if (tem != ad && memrefloc)
5034 {
5035 *memrefloc = copy_rtx (*memrefloc);
5036 copy_replacements (tem, XEXP (*memrefloc, 0));
5037 loc = &XEXP (*memrefloc, 0);
5038 if (removed_and)
5039 loc = &XEXP (*loc, 0);
5040 }
5041
5042 /* Check similar cases as for indirect addresses as above except
5043 that we can allow pseudos and a MEM since they should have been
5044 taken care of above. */
5045
5046 if (ind_levels == 0
5047 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5048 || MEM_P (XEXP (tem, 0))
5049 || ! (REG_P (XEXP (tem, 0))
5050 || (GET_CODE (XEXP (tem, 0)) == PLUS
5051 && REG_P (XEXP (XEXP (tem, 0), 0))
5052 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5053 {
5054 /* Must use TEM here, not AD, since it is the one that will
5055 have any subexpressions reloaded, if needed. */
5056 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5057 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5058 VOIDmode, 0,
5059 0, opnum, type);
5060 return ! removed_and;
5061 }
5062 else
5063 return 0;
5064 }
5065
5066 /* If we have address of a stack slot but it's not valid because the
5067 displacement is too large, compute the sum in a register.
5068 Handle all base registers here, not just fp/ap/sp, because on some
5069 targets (namely SH) we can also get too large displacements from
5070 big-endian corrections. */
5071 else if (GET_CODE (ad) == PLUS
5072 && REG_P (XEXP (ad, 0))
5073 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5074 && CONST_INT_P (XEXP (ad, 1))
5075 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5076 CONST_INT)
5077 /* Similarly, if we were to reload the base register and the
5078 mem+offset address is still invalid, then we want to reload
5079 the whole address, not just the base register. */
5080 || ! maybe_memory_address_addr_space_p
5081 (mode, ad, as, &(XEXP (ad, 0)))))
5082
5083 {
5084 /* Unshare the MEM rtx so we can safely alter it. */
5085 if (memrefloc)
5086 {
5087 *memrefloc = copy_rtx (*memrefloc);
5088 loc = &XEXP (*memrefloc, 0);
5089 if (removed_and)
5090 loc = &XEXP (*loc, 0);
5091 }
5092
5093 if (double_reg_address_ok[mode]
5094 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5095 PLUS, CONST_INT))
5096 {
5097 /* Unshare the sum as well. */
5098 *loc = ad = copy_rtx (ad);
5099
5100 /* Reload the displacement into an index reg.
5101 We assume the frame pointer or arg pointer is a base reg. */
5102 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5103 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5104 type, ind_levels);
5105 return 0;
5106 }
5107 else
5108 {
5109 /* If the sum of two regs is not necessarily valid,
5110 reload the sum into a base reg.
5111 That will at least work. */
5112 find_reloads_address_part (ad, loc,
5113 base_reg_class (mode, as, MEM, SCRATCH),
5114 GET_MODE (ad), opnum, type, ind_levels);
5115 }
5116 return ! removed_and;
5117 }
5118
5119 /* If we have an indexed stack slot, there are three possible reasons why
5120 it might be invalid: The index might need to be reloaded, the address
5121 might have been made by frame pointer elimination and hence have a
5122 constant out of range, or both reasons might apply.
5123
5124 We can easily check for an index needing reload, but even if that is the
5125 case, we might also have an invalid constant. To avoid making the
5126 conservative assumption and requiring two reloads, we see if this address
5127 is valid when not interpreted strictly. If it is, the only problem is
5128 that the index needs a reload and find_reloads_address_1 will take care
5129 of it.
5130
5131 Handle all base registers here, not just fp/ap/sp, because on some
5132 targets (namely SPARC) we can also get invalid addresses from preventive
5133 subreg big-endian corrections made by find_reloads_toplev. We
5134 can also get expressions involving LO_SUM (rather than PLUS) from
5135 find_reloads_subreg_address.
5136
5137 If we decide to do something, it must be that `double_reg_address_ok'
5138 is true. We generate a reload of the base register + constant and
5139 rework the sum so that the reload register will be added to the index.
5140 This is safe because we know the address isn't shared.
5141
5142 We check for the base register as both the first and second operand of
5143 the innermost PLUS and/or LO_SUM. */
5144
5145 for (op_index = 0; op_index < 2; ++op_index)
5146 {
5147 rtx operand, addend;
5148 enum rtx_code inner_code;
5149
5150 if (GET_CODE (ad) != PLUS)
5151 continue;
5152
5153 inner_code = GET_CODE (XEXP (ad, 0));
5154 if (!(GET_CODE (ad) == PLUS
5155 && CONST_INT_P (XEXP (ad, 1))
5156 && (inner_code == PLUS || inner_code == LO_SUM)))
5157 continue;
5158
5159 operand = XEXP (XEXP (ad, 0), op_index);
5160 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5161 continue;
5162
5163 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5164
5165 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5166 GET_CODE (addend))
5167 || operand == frame_pointer_rtx
5168 || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5169 && operand == hard_frame_pointer_rtx)
5170 || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5171 && operand == arg_pointer_rtx)
5172 || operand == stack_pointer_rtx)
5173 && ! maybe_memory_address_addr_space_p
5174 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5175 {
5176 rtx offset_reg;
5177 enum reg_class cls;
5178
5179 offset_reg = plus_constant (GET_MODE (ad), operand,
5180 INTVAL (XEXP (ad, 1)));
5181
5182 /* Form the adjusted address. */
5183 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5184 ad = gen_rtx_PLUS (GET_MODE (ad),
5185 op_index == 0 ? offset_reg : addend,
5186 op_index == 0 ? addend : offset_reg);
5187 else
5188 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5189 op_index == 0 ? offset_reg : addend,
5190 op_index == 0 ? addend : offset_reg);
5191 *loc = ad;
5192
5193 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5194 find_reloads_address_part (XEXP (ad, op_index),
5195 &XEXP (ad, op_index), cls,
5196 GET_MODE (ad), opnum, type, ind_levels);
5197 find_reloads_address_1 (mode, as,
5198 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5199 GET_CODE (XEXP (ad, op_index)),
5200 &XEXP (ad, 1 - op_index), opnum,
5201 type, 0, insn);
5202
5203 return 0;
5204 }
5205 }
5206
5207 /* See if address becomes valid when an eliminable register
5208 in a sum is replaced. */
5209
5210 tem = ad;
5211 if (GET_CODE (ad) == PLUS)
5212 tem = subst_indexed_address (ad);
5213 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5214 {
5215 /* Ok, we win that way. Replace any additional eliminable
5216 registers. */
5217
5218 subst_reg_equivs_changed = 0;
5219 tem = subst_reg_equivs (tem, insn);
5220
5221 /* Make sure that didn't make the address invalid again. */
5222
5223 if (! subst_reg_equivs_changed
5224 || strict_memory_address_addr_space_p (mode, tem, as))
5225 {
5226 *loc = tem;
5227 return 0;
5228 }
5229 }
5230
5231 /* If constants aren't valid addresses, reload the constant address
5232 into a register. */
5233 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5234 {
5235 machine_mode address_mode = GET_MODE (ad);
5236 if (address_mode == VOIDmode)
5237 address_mode = targetm.addr_space.address_mode (as);
5238
5239 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5240 Unshare it so we can safely alter it. */
5241 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5242 && CONSTANT_POOL_ADDRESS_P (ad))
5243 {
5244 *memrefloc = copy_rtx (*memrefloc);
5245 loc = &XEXP (*memrefloc, 0);
5246 if (removed_and)
5247 loc = &XEXP (*loc, 0);
5248 }
5249
5250 find_reloads_address_part (ad, loc,
5251 base_reg_class (mode, as, MEM, SCRATCH),
5252 address_mode, opnum, type, ind_levels);
5253 return ! removed_and;
5254 }
5255
5256 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5257 opnum, type, ind_levels, insn);
5258 }
5259 \f
5260 /* Find all pseudo regs appearing in AD
5261 that are eliminable in favor of equivalent values
5262 and do not have hard regs; replace them by their equivalents.
5263 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5264 front of it for pseudos that we have to replace with stack slots. */
5265
5266 static rtx
5267 subst_reg_equivs (rtx ad, rtx_insn *insn)
5268 {
5269 RTX_CODE code = GET_CODE (ad);
5270 int i;
5271 const char *fmt;
5272
5273 switch (code)
5274 {
5275 case HIGH:
5276 case CONST:
5277 CASE_CONST_ANY:
5278 case SYMBOL_REF:
5279 case LABEL_REF:
5280 case PC:
5281 case CC0:
5282 return ad;
5283
5284 case REG:
5285 {
5286 int regno = REGNO (ad);
5287
5288 if (reg_equiv_constant (regno) != 0)
5289 {
5290 subst_reg_equivs_changed = 1;
5291 return reg_equiv_constant (regno);
5292 }
5293 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5294 {
5295 rtx mem = make_memloc (ad, regno);
5296 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5297 {
5298 subst_reg_equivs_changed = 1;
5299 /* We mark the USE with QImode so that we recognize it
5300 as one that can be safely deleted at the end of
5301 reload. */
5302 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5303 QImode);
5304 return mem;
5305 }
5306 }
5307 }
5308 return ad;
5309
5310 case PLUS:
5311 /* Quickly dispose of a common case. */
5312 if (XEXP (ad, 0) == frame_pointer_rtx
5313 && CONST_INT_P (XEXP (ad, 1)))
5314 return ad;
5315 break;
5316
5317 default:
5318 break;
5319 }
5320
5321 fmt = GET_RTX_FORMAT (code);
5322 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5323 if (fmt[i] == 'e')
5324 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5325 return ad;
5326 }
5327 \f
5328 /* Compute the sum of X and Y, making canonicalizations assumed in an
5329 address, namely: sum constant integers, surround the sum of two
5330 constants with a CONST, put the constant as the second operand, and
5331 group the constant on the outermost sum.
5332
5333 This routine assumes both inputs are already in canonical form. */
5334
5335 rtx
5336 form_sum (machine_mode mode, rtx x, rtx y)
5337 {
5338 rtx tem;
5339
5340 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5341 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5342
5343 if (CONST_INT_P (x))
5344 return plus_constant (mode, y, INTVAL (x));
5345 else if (CONST_INT_P (y))
5346 return plus_constant (mode, x, INTVAL (y));
5347 else if (CONSTANT_P (x))
5348 tem = x, x = y, y = tem;
5349
5350 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5351 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5352
5353 /* Note that if the operands of Y are specified in the opposite
5354 order in the recursive calls below, infinite recursion will occur. */
5355 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5356 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5357
5358 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5359 constant will have been placed second. */
5360 if (CONSTANT_P (x) && CONSTANT_P (y))
5361 {
5362 if (GET_CODE (x) == CONST)
5363 x = XEXP (x, 0);
5364 if (GET_CODE (y) == CONST)
5365 y = XEXP (y, 0);
5366
5367 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5368 }
5369
5370 return gen_rtx_PLUS (mode, x, y);
5371 }
5372 \f
5373 /* If ADDR is a sum containing a pseudo register that should be
5374 replaced with a constant (from reg_equiv_constant),
5375 return the result of doing so, and also apply the associative
5376 law so that the result is more likely to be a valid address.
5377 (But it is not guaranteed to be one.)
5378
5379 Note that at most one register is replaced, even if more are
5380 replaceable. Also, we try to put the result into a canonical form
5381 so it is more likely to be a valid address.
5382
5383 In all other cases, return ADDR. */
5384
5385 static rtx
5386 subst_indexed_address (rtx addr)
5387 {
5388 rtx op0 = 0, op1 = 0, op2 = 0;
5389 rtx tem;
5390 int regno;
5391
5392 if (GET_CODE (addr) == PLUS)
5393 {
5394 /* Try to find a register to replace. */
5395 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5396 if (REG_P (op0)
5397 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5398 && reg_renumber[regno] < 0
5399 && reg_equiv_constant (regno) != 0)
5400 op0 = reg_equiv_constant (regno);
5401 else if (REG_P (op1)
5402 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5403 && reg_renumber[regno] < 0
5404 && reg_equiv_constant (regno) != 0)
5405 op1 = reg_equiv_constant (regno);
5406 else if (GET_CODE (op0) == PLUS
5407 && (tem = subst_indexed_address (op0)) != op0)
5408 op0 = tem;
5409 else if (GET_CODE (op1) == PLUS
5410 && (tem = subst_indexed_address (op1)) != op1)
5411 op1 = tem;
5412 else
5413 return addr;
5414
5415 /* Pick out up to three things to add. */
5416 if (GET_CODE (op1) == PLUS)
5417 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5418 else if (GET_CODE (op0) == PLUS)
5419 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5420
5421 /* Compute the sum. */
5422 if (op2 != 0)
5423 op1 = form_sum (GET_MODE (addr), op1, op2);
5424 if (op1 != 0)
5425 op0 = form_sum (GET_MODE (addr), op0, op1);
5426
5427 return op0;
5428 }
5429 return addr;
5430 }
5431 \f
5432 /* Update the REG_INC notes for an insn. It updates all REG_INC
5433 notes for the instruction which refer to REGNO the to refer
5434 to the reload number.
5435
5436 INSN is the insn for which any REG_INC notes need updating.
5437
5438 REGNO is the register number which has been reloaded.
5439
5440 RELOADNUM is the reload number. */
5441
5442 static void
5443 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5444 int reloadnum ATTRIBUTE_UNUSED)
5445 {
5446 if (!AUTO_INC_DEC)
5447 return;
5448
5449 for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5450 if (REG_NOTE_KIND (link) == REG_INC
5451 && (int) REGNO (XEXP (link, 0)) == regno)
5452 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5453 }
5454 \f
5455 /* Record the pseudo registers we must reload into hard registers in a
5456 subexpression of a would-be memory address, X referring to a value
5457 in mode MODE. (This function is not called if the address we find
5458 is strictly valid.)
5459
5460 CONTEXT = 1 means we are considering regs as index regs,
5461 = 0 means we are considering them as base regs.
5462 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5463 or an autoinc code.
5464 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5465 is the code of the index part of the address. Otherwise, pass SCRATCH
5466 for this argument.
5467 OPNUM and TYPE specify the purpose of any reloads made.
5468
5469 IND_LEVELS says how many levels of indirect addressing are
5470 supported at this point in the address.
5471
5472 INSN, if nonzero, is the insn in which we do the reload. It is used
5473 to determine if we may generate output reloads.
5474
5475 We return nonzero if X, as a whole, is reloaded or replaced. */
5476
5477 /* Note that we take shortcuts assuming that no multi-reg machine mode
5478 occurs as part of an address.
5479 Also, this is not fully machine-customizable; it works for machines
5480 such as VAXen and 68000's and 32000's, but other possible machines
5481 could have addressing modes that this does not handle right.
5482 If you add push_reload calls here, you need to make sure gen_reload
5483 handles those cases gracefully. */
5484
5485 static int
5486 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5487 rtx x, int context,
5488 enum rtx_code outer_code, enum rtx_code index_code,
5489 rtx *loc, int opnum, enum reload_type type,
5490 int ind_levels, rtx_insn *insn)
5491 {
5492 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5493 ((CONTEXT) == 0 \
5494 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5495 : REGNO_OK_FOR_INDEX_P (REGNO))
5496
5497 enum reg_class context_reg_class;
5498 RTX_CODE code = GET_CODE (x);
5499 bool reloaded_inner_of_autoinc = false;
5500
5501 if (context == 1)
5502 context_reg_class = INDEX_REG_CLASS;
5503 else
5504 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5505
5506 switch (code)
5507 {
5508 case PLUS:
5509 {
5510 rtx orig_op0 = XEXP (x, 0);
5511 rtx orig_op1 = XEXP (x, 1);
5512 RTX_CODE code0 = GET_CODE (orig_op0);
5513 RTX_CODE code1 = GET_CODE (orig_op1);
5514 rtx op0 = orig_op0;
5515 rtx op1 = orig_op1;
5516
5517 if (GET_CODE (op0) == SUBREG)
5518 {
5519 op0 = SUBREG_REG (op0);
5520 code0 = GET_CODE (op0);
5521 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5522 op0 = gen_rtx_REG (word_mode,
5523 (REGNO (op0) +
5524 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5525 GET_MODE (SUBREG_REG (orig_op0)),
5526 SUBREG_BYTE (orig_op0),
5527 GET_MODE (orig_op0))));
5528 }
5529
5530 if (GET_CODE (op1) == SUBREG)
5531 {
5532 op1 = SUBREG_REG (op1);
5533 code1 = GET_CODE (op1);
5534 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5535 /* ??? Why is this given op1's mode and above for
5536 ??? op0 SUBREGs we use word_mode? */
5537 op1 = gen_rtx_REG (GET_MODE (op1),
5538 (REGNO (op1) +
5539 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5540 GET_MODE (SUBREG_REG (orig_op1)),
5541 SUBREG_BYTE (orig_op1),
5542 GET_MODE (orig_op1))));
5543 }
5544 /* Plus in the index register may be created only as a result of
5545 register rematerialization for expression like &localvar*4. Reload it.
5546 It may be possible to combine the displacement on the outer level,
5547 but it is probably not worthwhile to do so. */
5548 if (context == 1)
5549 {
5550 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5551 opnum, ADDR_TYPE (type), ind_levels, insn);
5552 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5553 context_reg_class,
5554 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5555 return 1;
5556 }
5557
5558 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5559 || code0 == ZERO_EXTEND || code1 == MEM)
5560 {
5561 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5562 &XEXP (x, 0), opnum, type, ind_levels,
5563 insn);
5564 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5565 &XEXP (x, 1), opnum, type, ind_levels,
5566 insn);
5567 }
5568
5569 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5570 || code1 == ZERO_EXTEND || code0 == MEM)
5571 {
5572 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5573 &XEXP (x, 0), opnum, type, ind_levels,
5574 insn);
5575 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5576 &XEXP (x, 1), opnum, type, ind_levels,
5577 insn);
5578 }
5579
5580 else if (code0 == CONST_INT || code0 == CONST
5581 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5582 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5583 &XEXP (x, 1), opnum, type, ind_levels,
5584 insn);
5585
5586 else if (code1 == CONST_INT || code1 == CONST
5587 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5588 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5589 &XEXP (x, 0), opnum, type, ind_levels,
5590 insn);
5591
5592 else if (code0 == REG && code1 == REG)
5593 {
5594 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5595 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5596 return 0;
5597 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5598 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5599 return 0;
5600 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5601 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5602 &XEXP (x, 1), opnum, type, ind_levels,
5603 insn);
5604 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5605 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5606 &XEXP (x, 0), opnum, type, ind_levels,
5607 insn);
5608 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5609 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5610 &XEXP (x, 0), opnum, type, ind_levels,
5611 insn);
5612 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5613 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5614 &XEXP (x, 1), opnum, type, ind_levels,
5615 insn);
5616 else
5617 {
5618 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5619 &XEXP (x, 0), opnum, type, ind_levels,
5620 insn);
5621 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5622 &XEXP (x, 1), opnum, type, ind_levels,
5623 insn);
5624 }
5625 }
5626
5627 else if (code0 == REG)
5628 {
5629 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5630 &XEXP (x, 0), opnum, type, ind_levels,
5631 insn);
5632 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5633 &XEXP (x, 1), opnum, type, ind_levels,
5634 insn);
5635 }
5636
5637 else if (code1 == REG)
5638 {
5639 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5640 &XEXP (x, 1), opnum, type, ind_levels,
5641 insn);
5642 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5643 &XEXP (x, 0), opnum, type, ind_levels,
5644 insn);
5645 }
5646 }
5647
5648 return 0;
5649
5650 case POST_MODIFY:
5651 case PRE_MODIFY:
5652 {
5653 rtx op0 = XEXP (x, 0);
5654 rtx op1 = XEXP (x, 1);
5655 enum rtx_code index_code;
5656 int regno;
5657 int reloadnum;
5658
5659 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5660 return 0;
5661
5662 /* Currently, we only support {PRE,POST}_MODIFY constructs
5663 where a base register is {inc,dec}remented by the contents
5664 of another register or by a constant value. Thus, these
5665 operands must match. */
5666 gcc_assert (op0 == XEXP (op1, 0));
5667
5668 /* Require index register (or constant). Let's just handle the
5669 register case in the meantime... If the target allows
5670 auto-modify by a constant then we could try replacing a pseudo
5671 register with its equivalent constant where applicable.
5672
5673 We also handle the case where the register was eliminated
5674 resulting in a PLUS subexpression.
5675
5676 If we later decide to reload the whole PRE_MODIFY or
5677 POST_MODIFY, inc_for_reload might clobber the reload register
5678 before reading the index. The index register might therefore
5679 need to live longer than a TYPE reload normally would, so be
5680 conservative and class it as RELOAD_OTHER. */
5681 if ((REG_P (XEXP (op1, 1))
5682 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5683 || GET_CODE (XEXP (op1, 1)) == PLUS)
5684 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5685 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5686 ind_levels, insn);
5687
5688 gcc_assert (REG_P (XEXP (op1, 0)));
5689
5690 regno = REGNO (XEXP (op1, 0));
5691 index_code = GET_CODE (XEXP (op1, 1));
5692
5693 /* A register that is incremented cannot be constant! */
5694 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5695 || reg_equiv_constant (regno) == 0);
5696
5697 /* Handle a register that is equivalent to a memory location
5698 which cannot be addressed directly. */
5699 if (reg_equiv_memory_loc (regno) != 0
5700 && (reg_equiv_address (regno) != 0
5701 || num_not_at_initial_offset))
5702 {
5703 rtx tem = make_memloc (XEXP (x, 0), regno);
5704
5705 if (reg_equiv_address (regno)
5706 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5707 {
5708 rtx orig = tem;
5709
5710 /* First reload the memory location's address.
5711 We can't use ADDR_TYPE (type) here, because we need to
5712 write back the value after reading it, hence we actually
5713 need two registers. */
5714 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5715 &XEXP (tem, 0), opnum,
5716 RELOAD_OTHER,
5717 ind_levels, insn);
5718
5719 if (!rtx_equal_p (tem, orig))
5720 push_reg_equiv_alt_mem (regno, tem);
5721
5722 /* Then reload the memory location into a base
5723 register. */
5724 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5725 &XEXP (op1, 0),
5726 base_reg_class (mode, as,
5727 code, index_code),
5728 GET_MODE (x), GET_MODE (x), 0,
5729 0, opnum, RELOAD_OTHER);
5730
5731 update_auto_inc_notes (this_insn, regno, reloadnum);
5732 return 0;
5733 }
5734 }
5735
5736 if (reg_renumber[regno] >= 0)
5737 regno = reg_renumber[regno];
5738
5739 /* We require a base register here... */
5740 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5741 {
5742 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5743 &XEXP (op1, 0), &XEXP (x, 0),
5744 base_reg_class (mode, as,
5745 code, index_code),
5746 GET_MODE (x), GET_MODE (x), 0, 0,
5747 opnum, RELOAD_OTHER);
5748
5749 update_auto_inc_notes (this_insn, regno, reloadnum);
5750 return 0;
5751 }
5752 }
5753 return 0;
5754
5755 case POST_INC:
5756 case POST_DEC:
5757 case PRE_INC:
5758 case PRE_DEC:
5759 if (REG_P (XEXP (x, 0)))
5760 {
5761 int regno = REGNO (XEXP (x, 0));
5762 int value = 0;
5763 rtx x_orig = x;
5764
5765 /* A register that is incremented cannot be constant! */
5766 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5767 || reg_equiv_constant (regno) == 0);
5768
5769 /* Handle a register that is equivalent to a memory location
5770 which cannot be addressed directly. */
5771 if (reg_equiv_memory_loc (regno) != 0
5772 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5773 {
5774 rtx tem = make_memloc (XEXP (x, 0), regno);
5775 if (reg_equiv_address (regno)
5776 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5777 {
5778 rtx orig = tem;
5779
5780 /* First reload the memory location's address.
5781 We can't use ADDR_TYPE (type) here, because we need to
5782 write back the value after reading it, hence we actually
5783 need two registers. */
5784 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5785 &XEXP (tem, 0), opnum, type,
5786 ind_levels, insn);
5787 reloaded_inner_of_autoinc = true;
5788 if (!rtx_equal_p (tem, orig))
5789 push_reg_equiv_alt_mem (regno, tem);
5790 /* Put this inside a new increment-expression. */
5791 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5792 /* Proceed to reload that, as if it contained a register. */
5793 }
5794 }
5795
5796 /* If we have a hard register that is ok in this incdec context,
5797 don't make a reload. If the register isn't nice enough for
5798 autoincdec, we can reload it. But, if an autoincrement of a
5799 register that we here verified as playing nice, still outside
5800 isn't "valid", it must be that no autoincrement is "valid".
5801 If that is true and something made an autoincrement anyway,
5802 this must be a special context where one is allowed.
5803 (For example, a "push" instruction.)
5804 We can't improve this address, so leave it alone. */
5805
5806 /* Otherwise, reload the autoincrement into a suitable hard reg
5807 and record how much to increment by. */
5808
5809 if (reg_renumber[regno] >= 0)
5810 regno = reg_renumber[regno];
5811 if (regno >= FIRST_PSEUDO_REGISTER
5812 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5813 index_code))
5814 {
5815 int reloadnum;
5816
5817 /* If we can output the register afterwards, do so, this
5818 saves the extra update.
5819 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5820 CALL_INSN - and it does not set CC0.
5821 But don't do this if we cannot directly address the
5822 memory location, since this will make it harder to
5823 reuse address reloads, and increases register pressure.
5824 Also don't do this if we can probably update x directly. */
5825 rtx equiv = (MEM_P (XEXP (x, 0))
5826 ? XEXP (x, 0)
5827 : reg_equiv_mem (regno));
5828 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5829 if (insn && NONJUMP_INSN_P (insn)
5830 #if HAVE_cc0
5831 && ! sets_cc0_p (PATTERN (insn))
5832 #endif
5833 && (regno < FIRST_PSEUDO_REGISTER
5834 || (equiv
5835 && memory_operand (equiv, GET_MODE (equiv))
5836 && ! (icode != CODE_FOR_nothing
5837 && insn_operand_matches (icode, 0, equiv)
5838 && insn_operand_matches (icode, 1, equiv))))
5839 /* Using RELOAD_OTHER means we emit this and the reload we
5840 made earlier in the wrong order. */
5841 && !reloaded_inner_of_autoinc)
5842 {
5843 /* We use the original pseudo for loc, so that
5844 emit_reload_insns() knows which pseudo this
5845 reload refers to and updates the pseudo rtx, not
5846 its equivalent memory location, as well as the
5847 corresponding entry in reg_last_reload_reg. */
5848 loc = &XEXP (x_orig, 0);
5849 x = XEXP (x, 0);
5850 reloadnum
5851 = push_reload (x, x, loc, loc,
5852 context_reg_class,
5853 GET_MODE (x), GET_MODE (x), 0, 0,
5854 opnum, RELOAD_OTHER);
5855 }
5856 else
5857 {
5858 reloadnum
5859 = push_reload (x, x, loc, (rtx*) 0,
5860 context_reg_class,
5861 GET_MODE (x), GET_MODE (x), 0, 0,
5862 opnum, type);
5863 rld[reloadnum].inc
5864 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5865
5866 value = 1;
5867 }
5868
5869 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5870 reloadnum);
5871 }
5872 return value;
5873 }
5874 return 0;
5875
5876 case TRUNCATE:
5877 case SIGN_EXTEND:
5878 case ZERO_EXTEND:
5879 /* Look for parts to reload in the inner expression and reload them
5880 too, in addition to this operation. Reloading all inner parts in
5881 addition to this one shouldn't be necessary, but at this point,
5882 we don't know if we can possibly omit any part that *can* be
5883 reloaded. Targets that are better off reloading just either part
5884 (or perhaps even a different part of an outer expression), should
5885 define LEGITIMIZE_RELOAD_ADDRESS. */
5886 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5887 context, code, SCRATCH, &XEXP (x, 0), opnum,
5888 type, ind_levels, insn);
5889 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5890 context_reg_class,
5891 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5892 return 1;
5893
5894 case MEM:
5895 /* This is probably the result of a substitution, by eliminate_regs, of
5896 an equivalent address for a pseudo that was not allocated to a hard
5897 register. Verify that the specified address is valid and reload it
5898 into a register.
5899
5900 Since we know we are going to reload this item, don't decrement for
5901 the indirection level.
5902
5903 Note that this is actually conservative: it would be slightly more
5904 efficient to use the value of SPILL_INDIRECT_LEVELS from
5905 reload1.c here. */
5906
5907 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5908 opnum, ADDR_TYPE (type), ind_levels, insn);
5909 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5910 context_reg_class,
5911 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5912 return 1;
5913
5914 case REG:
5915 {
5916 int regno = REGNO (x);
5917
5918 if (reg_equiv_constant (regno) != 0)
5919 {
5920 find_reloads_address_part (reg_equiv_constant (regno), loc,
5921 context_reg_class,
5922 GET_MODE (x), opnum, type, ind_levels);
5923 return 1;
5924 }
5925
5926 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5927 that feeds this insn. */
5928 if (reg_equiv_mem (regno) != 0)
5929 {
5930 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5931 context_reg_class,
5932 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5933 return 1;
5934 }
5935 #endif
5936
5937 if (reg_equiv_memory_loc (regno)
5938 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5939 {
5940 rtx tem = make_memloc (x, regno);
5941 if (reg_equiv_address (regno) != 0
5942 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5943 {
5944 x = tem;
5945 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5946 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5947 ind_levels, insn);
5948 if (!rtx_equal_p (x, tem))
5949 push_reg_equiv_alt_mem (regno, x);
5950 }
5951 }
5952
5953 if (reg_renumber[regno] >= 0)
5954 regno = reg_renumber[regno];
5955
5956 if (regno >= FIRST_PSEUDO_REGISTER
5957 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5958 index_code))
5959 {
5960 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5961 context_reg_class,
5962 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5963 return 1;
5964 }
5965
5966 /* If a register appearing in an address is the subject of a CLOBBER
5967 in this insn, reload it into some other register to be safe.
5968 The CLOBBER is supposed to make the register unavailable
5969 from before this insn to after it. */
5970 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5971 {
5972 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5973 context_reg_class,
5974 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5975 return 1;
5976 }
5977 }
5978 return 0;
5979
5980 case SUBREG:
5981 if (REG_P (SUBREG_REG (x)))
5982 {
5983 /* If this is a SUBREG of a hard register and the resulting register
5984 is of the wrong class, reload the whole SUBREG. This avoids
5985 needless copies if SUBREG_REG is multi-word. */
5986 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5987 {
5988 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5989
5990 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5991 index_code))
5992 {
5993 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5994 context_reg_class,
5995 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5996 return 1;
5997 }
5998 }
5999 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6000 is larger than the class size, then reload the whole SUBREG. */
6001 else
6002 {
6003 enum reg_class rclass = context_reg_class;
6004 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6005 > reg_class_size[(int) rclass])
6006 {
6007 /* If the inner register will be replaced by a memory
6008 reference, we can do this only if we can replace the
6009 whole subreg by a (narrower) memory reference. If
6010 this is not possible, fall through and reload just
6011 the inner register (including address reloads). */
6012 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6013 {
6014 rtx tem = find_reloads_subreg_address (x, opnum,
6015 ADDR_TYPE (type),
6016 ind_levels, insn,
6017 NULL);
6018 if (tem)
6019 {
6020 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6021 GET_MODE (tem), VOIDmode, 0, 0,
6022 opnum, type);
6023 return 1;
6024 }
6025 }
6026 else
6027 {
6028 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6029 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6030 return 1;
6031 }
6032 }
6033 }
6034 }
6035 break;
6036
6037 default:
6038 break;
6039 }
6040
6041 {
6042 const char *fmt = GET_RTX_FORMAT (code);
6043 int i;
6044
6045 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6046 {
6047 if (fmt[i] == 'e')
6048 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6049 we get here. */
6050 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6051 code, SCRATCH, &XEXP (x, i),
6052 opnum, type, ind_levels, insn);
6053 }
6054 }
6055
6056 #undef REG_OK_FOR_CONTEXT
6057 return 0;
6058 }
6059 \f
6060 /* X, which is found at *LOC, is a part of an address that needs to be
6061 reloaded into a register of class RCLASS. If X is a constant, or if
6062 X is a PLUS that contains a constant, check that the constant is a
6063 legitimate operand and that we are supposed to be able to load
6064 it into the register.
6065
6066 If not, force the constant into memory and reload the MEM instead.
6067
6068 MODE is the mode to use, in case X is an integer constant.
6069
6070 OPNUM and TYPE describe the purpose of any reloads made.
6071
6072 IND_LEVELS says how many levels of indirect addressing this machine
6073 supports. */
6074
6075 static void
6076 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6077 machine_mode mode, int opnum,
6078 enum reload_type type, int ind_levels)
6079 {
6080 if (CONSTANT_P (x)
6081 && (!targetm.legitimate_constant_p (mode, x)
6082 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6083 {
6084 x = force_const_mem (mode, x);
6085 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6086 opnum, type, ind_levels, 0);
6087 }
6088
6089 else if (GET_CODE (x) == PLUS
6090 && CONSTANT_P (XEXP (x, 1))
6091 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6092 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6093 == NO_REGS))
6094 {
6095 rtx tem;
6096
6097 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6098 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6099 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6100 opnum, type, ind_levels, 0);
6101 }
6102
6103 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6104 mode, VOIDmode, 0, 0, opnum, type);
6105 }
6106 \f
6107 /* X, a subreg of a pseudo, is a part of an address that needs to be
6108 reloaded, and the pseusdo is equivalent to a memory location.
6109
6110 Attempt to replace the whole subreg by a (possibly narrower or wider)
6111 memory reference. If this is possible, return this new memory
6112 reference, and push all required address reloads. Otherwise,
6113 return NULL.
6114
6115 OPNUM and TYPE identify the purpose of the reload.
6116
6117 IND_LEVELS says how many levels of indirect addressing are
6118 supported at this point in the address.
6119
6120 INSN, if nonzero, is the insn in which we do the reload. It is used
6121 to determine where to put USEs for pseudos that we have to replace with
6122 stack slots. */
6123
6124 static rtx
6125 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6126 int ind_levels, rtx_insn *insn,
6127 int *address_reloaded)
6128 {
6129 machine_mode outer_mode = GET_MODE (x);
6130 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6131 int regno = REGNO (SUBREG_REG (x));
6132 int reloaded = 0;
6133 rtx tem, orig;
6134 int offset;
6135
6136 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6137
6138 /* We cannot replace the subreg with a modified memory reference if:
6139
6140 - we have a paradoxical subreg that implicitly acts as a zero or
6141 sign extension operation due to LOAD_EXTEND_OP;
6142
6143 - we have a subreg that is implicitly supposed to act on the full
6144 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6145
6146 - the address of the equivalent memory location is mode-dependent; or
6147
6148 - we have a paradoxical subreg and the resulting memory is not
6149 sufficiently aligned to allow access in the wider mode.
6150
6151 In addition, we choose not to perform the replacement for *any*
6152 paradoxical subreg, even if it were possible in principle. This
6153 is to avoid generating wider memory references than necessary.
6154
6155 This corresponds to how previous versions of reload used to handle
6156 paradoxical subregs where no address reload was required. */
6157
6158 if (paradoxical_subreg_p (x))
6159 return NULL;
6160
6161 if (WORD_REGISTER_OPERATIONS
6162 && GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6163 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6164 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6165 return NULL;
6166
6167 /* Since we don't attempt to handle paradoxical subregs, we can just
6168 call into simplify_subreg, which will handle all remaining checks
6169 for us. */
6170 orig = make_memloc (SUBREG_REG (x), regno);
6171 offset = SUBREG_BYTE (x);
6172 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6173 if (!tem || !MEM_P (tem))
6174 return NULL;
6175
6176 /* Now push all required address reloads, if any. */
6177 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6178 XEXP (tem, 0), &XEXP (tem, 0),
6179 opnum, type, ind_levels, insn);
6180 /* ??? Do we need to handle nonzero offsets somehow? */
6181 if (!offset && !rtx_equal_p (tem, orig))
6182 push_reg_equiv_alt_mem (regno, tem);
6183
6184 /* For some processors an address may be valid in the original mode but
6185 not in a smaller mode. For example, ARM accepts a scaled index register
6186 in SImode but not in HImode. Note that this is only a problem if the
6187 address in reg_equiv_mem is already invalid in the new mode; other
6188 cases would be fixed by find_reloads_address as usual.
6189
6190 ??? We attempt to handle such cases here by doing an additional reload
6191 of the full address after the usual processing by find_reloads_address.
6192 Note that this may not work in the general case, but it seems to cover
6193 the cases where this situation currently occurs. A more general fix
6194 might be to reload the *value* instead of the address, but this would
6195 not be expected by the callers of this routine as-is.
6196
6197 If find_reloads_address already completed replaced the address, there
6198 is nothing further to do. */
6199 if (reloaded == 0
6200 && reg_equiv_mem (regno) != 0
6201 && !strict_memory_address_addr_space_p
6202 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6203 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6204 {
6205 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6206 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6207 MEM, SCRATCH),
6208 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6209 reloaded = 1;
6210 }
6211
6212 /* If this is not a toplevel operand, find_reloads doesn't see this
6213 substitution. We have to emit a USE of the pseudo so that
6214 delete_output_reload can see it. */
6215 if (replace_reloads && recog_data.operand[opnum] != x)
6216 /* We mark the USE with QImode so that we recognize it as one that
6217 can be safely deleted at the end of reload. */
6218 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6219 QImode);
6220
6221 if (address_reloaded)
6222 *address_reloaded = reloaded;
6223
6224 return tem;
6225 }
6226 \f
6227 /* Substitute into the current INSN the registers into which we have reloaded
6228 the things that need reloading. The array `replacements'
6229 contains the locations of all pointers that must be changed
6230 and says what to replace them with.
6231
6232 Return the rtx that X translates into; usually X, but modified. */
6233
6234 void
6235 subst_reloads (rtx_insn *insn)
6236 {
6237 int i;
6238
6239 for (i = 0; i < n_replacements; i++)
6240 {
6241 struct replacement *r = &replacements[i];
6242 rtx reloadreg = rld[r->what].reg_rtx;
6243 if (reloadreg)
6244 {
6245 #ifdef DEBUG_RELOAD
6246 /* This checking takes a very long time on some platforms
6247 causing the gcc.c-torture/compile/limits-fnargs.c test
6248 to time out during testing. See PR 31850.
6249
6250 Internal consistency test. Check that we don't modify
6251 anything in the equivalence arrays. Whenever something from
6252 those arrays needs to be reloaded, it must be unshared before
6253 being substituted into; the equivalence must not be modified.
6254 Otherwise, if the equivalence is used after that, it will
6255 have been modified, and the thing substituted (probably a
6256 register) is likely overwritten and not a usable equivalence. */
6257 int check_regno;
6258
6259 for (check_regno = 0; check_regno < max_regno; check_regno++)
6260 {
6261 #define CHECK_MODF(ARRAY) \
6262 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6263 || !loc_mentioned_in_p (r->where, \
6264 (*reg_equivs)[check_regno].ARRAY))
6265
6266 CHECK_MODF (constant);
6267 CHECK_MODF (memory_loc);
6268 CHECK_MODF (address);
6269 CHECK_MODF (mem);
6270 #undef CHECK_MODF
6271 }
6272 #endif /* DEBUG_RELOAD */
6273
6274 /* If we're replacing a LABEL_REF with a register, there must
6275 already be an indication (to e.g. flow) which label this
6276 register refers to. */
6277 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6278 || !JUMP_P (insn)
6279 || find_reg_note (insn,
6280 REG_LABEL_OPERAND,
6281 XEXP (*r->where, 0))
6282 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6283
6284 /* Encapsulate RELOADREG so its machine mode matches what
6285 used to be there. Note that gen_lowpart_common will
6286 do the wrong thing if RELOADREG is multi-word. RELOADREG
6287 will always be a REG here. */
6288 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6289 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6290
6291 *r->where = reloadreg;
6292 }
6293 /* If reload got no reg and isn't optional, something's wrong. */
6294 else
6295 gcc_assert (rld[r->what].optional);
6296 }
6297 }
6298 \f
6299 /* Make a copy of any replacements being done into X and move those
6300 copies to locations in Y, a copy of X. */
6301
6302 void
6303 copy_replacements (rtx x, rtx y)
6304 {
6305 copy_replacements_1 (&x, &y, n_replacements);
6306 }
6307
6308 static void
6309 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6310 {
6311 int i, j;
6312 rtx x, y;
6313 struct replacement *r;
6314 enum rtx_code code;
6315 const char *fmt;
6316
6317 for (j = 0; j < orig_replacements; j++)
6318 if (replacements[j].where == px)
6319 {
6320 r = &replacements[n_replacements++];
6321 r->where = py;
6322 r->what = replacements[j].what;
6323 r->mode = replacements[j].mode;
6324 }
6325
6326 x = *px;
6327 y = *py;
6328 code = GET_CODE (x);
6329 fmt = GET_RTX_FORMAT (code);
6330
6331 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6332 {
6333 if (fmt[i] == 'e')
6334 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6335 else if (fmt[i] == 'E')
6336 for (j = XVECLEN (x, i); --j >= 0; )
6337 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6338 orig_replacements);
6339 }
6340 }
6341
6342 /* Change any replacements being done to *X to be done to *Y. */
6343
6344 void
6345 move_replacements (rtx *x, rtx *y)
6346 {
6347 int i;
6348
6349 for (i = 0; i < n_replacements; i++)
6350 if (replacements[i].where == x)
6351 replacements[i].where = y;
6352 }
6353 \f
6354 /* If LOC was scheduled to be replaced by something, return the replacement.
6355 Otherwise, return *LOC. */
6356
6357 rtx
6358 find_replacement (rtx *loc)
6359 {
6360 struct replacement *r;
6361
6362 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6363 {
6364 rtx reloadreg = rld[r->what].reg_rtx;
6365
6366 if (reloadreg && r->where == loc)
6367 {
6368 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6369 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6370
6371 return reloadreg;
6372 }
6373 else if (reloadreg && GET_CODE (*loc) == SUBREG
6374 && r->where == &SUBREG_REG (*loc))
6375 {
6376 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6377 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6378
6379 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6380 GET_MODE (SUBREG_REG (*loc)),
6381 SUBREG_BYTE (*loc));
6382 }
6383 }
6384
6385 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6386 what's inside and make a new rtl if so. */
6387 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6388 || GET_CODE (*loc) == MULT)
6389 {
6390 rtx x = find_replacement (&XEXP (*loc, 0));
6391 rtx y = find_replacement (&XEXP (*loc, 1));
6392
6393 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6394 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6395 }
6396
6397 return *loc;
6398 }
6399 \f
6400 /* Return nonzero if register in range [REGNO, ENDREGNO)
6401 appears either explicitly or implicitly in X
6402 other than being stored into (except for earlyclobber operands).
6403
6404 References contained within the substructure at LOC do not count.
6405 LOC may be zero, meaning don't ignore anything.
6406
6407 This is similar to refers_to_regno_p in rtlanal.c except that we
6408 look at equivalences for pseudos that didn't get hard registers. */
6409
6410 static int
6411 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6412 rtx x, rtx *loc)
6413 {
6414 int i;
6415 unsigned int r;
6416 RTX_CODE code;
6417 const char *fmt;
6418
6419 if (x == 0)
6420 return 0;
6421
6422 repeat:
6423 code = GET_CODE (x);
6424
6425 switch (code)
6426 {
6427 case REG:
6428 r = REGNO (x);
6429
6430 /* If this is a pseudo, a hard register must not have been allocated.
6431 X must therefore either be a constant or be in memory. */
6432 if (r >= FIRST_PSEUDO_REGISTER)
6433 {
6434 if (reg_equiv_memory_loc (r))
6435 return refers_to_regno_for_reload_p (regno, endregno,
6436 reg_equiv_memory_loc (r),
6437 (rtx*) 0);
6438
6439 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6440 return 0;
6441 }
6442
6443 return (endregno > r
6444 && regno < r + (r < FIRST_PSEUDO_REGISTER
6445 ? hard_regno_nregs[r][GET_MODE (x)]
6446 : 1));
6447
6448 case SUBREG:
6449 /* If this is a SUBREG of a hard reg, we can see exactly which
6450 registers are being modified. Otherwise, handle normally. */
6451 if (REG_P (SUBREG_REG (x))
6452 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6453 {
6454 unsigned int inner_regno = subreg_regno (x);
6455 unsigned int inner_endregno
6456 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6457 ? subreg_nregs (x) : 1);
6458
6459 return endregno > inner_regno && regno < inner_endregno;
6460 }
6461 break;
6462
6463 case CLOBBER:
6464 case SET:
6465 if (&SET_DEST (x) != loc
6466 /* Note setting a SUBREG counts as referring to the REG it is in for
6467 a pseudo but not for hard registers since we can
6468 treat each word individually. */
6469 && ((GET_CODE (SET_DEST (x)) == SUBREG
6470 && loc != &SUBREG_REG (SET_DEST (x))
6471 && REG_P (SUBREG_REG (SET_DEST (x)))
6472 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6473 && refers_to_regno_for_reload_p (regno, endregno,
6474 SUBREG_REG (SET_DEST (x)),
6475 loc))
6476 /* If the output is an earlyclobber operand, this is
6477 a conflict. */
6478 || ((!REG_P (SET_DEST (x))
6479 || earlyclobber_operand_p (SET_DEST (x)))
6480 && refers_to_regno_for_reload_p (regno, endregno,
6481 SET_DEST (x), loc))))
6482 return 1;
6483
6484 if (code == CLOBBER || loc == &SET_SRC (x))
6485 return 0;
6486 x = SET_SRC (x);
6487 goto repeat;
6488
6489 default:
6490 break;
6491 }
6492
6493 /* X does not match, so try its subexpressions. */
6494
6495 fmt = GET_RTX_FORMAT (code);
6496 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6497 {
6498 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6499 {
6500 if (i == 0)
6501 {
6502 x = XEXP (x, 0);
6503 goto repeat;
6504 }
6505 else
6506 if (refers_to_regno_for_reload_p (regno, endregno,
6507 XEXP (x, i), loc))
6508 return 1;
6509 }
6510 else if (fmt[i] == 'E')
6511 {
6512 int j;
6513 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6514 if (loc != &XVECEXP (x, i, j)
6515 && refers_to_regno_for_reload_p (regno, endregno,
6516 XVECEXP (x, i, j), loc))
6517 return 1;
6518 }
6519 }
6520 return 0;
6521 }
6522
6523 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6524 we check if any register number in X conflicts with the relevant register
6525 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6526 contains a MEM (we don't bother checking for memory addresses that can't
6527 conflict because we expect this to be a rare case.
6528
6529 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6530 that we look at equivalences for pseudos that didn't get hard registers. */
6531
6532 int
6533 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6534 {
6535 int regno, endregno;
6536
6537 /* Overly conservative. */
6538 if (GET_CODE (x) == STRICT_LOW_PART
6539 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6540 x = XEXP (x, 0);
6541
6542 /* If either argument is a constant, then modifying X can not affect IN. */
6543 if (CONSTANT_P (x) || CONSTANT_P (in))
6544 return 0;
6545 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6546 return refers_to_mem_for_reload_p (in);
6547 else if (GET_CODE (x) == SUBREG)
6548 {
6549 regno = REGNO (SUBREG_REG (x));
6550 if (regno < FIRST_PSEUDO_REGISTER)
6551 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6552 GET_MODE (SUBREG_REG (x)),
6553 SUBREG_BYTE (x),
6554 GET_MODE (x));
6555 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6556 ? subreg_nregs (x) : 1);
6557
6558 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6559 }
6560 else if (REG_P (x))
6561 {
6562 regno = REGNO (x);
6563
6564 /* If this is a pseudo, it must not have been assigned a hard register.
6565 Therefore, it must either be in memory or be a constant. */
6566
6567 if (regno >= FIRST_PSEUDO_REGISTER)
6568 {
6569 if (reg_equiv_memory_loc (regno))
6570 return refers_to_mem_for_reload_p (in);
6571 gcc_assert (reg_equiv_constant (regno));
6572 return 0;
6573 }
6574
6575 endregno = END_REGNO (x);
6576
6577 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6578 }
6579 else if (MEM_P (x))
6580 return refers_to_mem_for_reload_p (in);
6581 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6582 || GET_CODE (x) == CC0)
6583 return reg_mentioned_p (x, in);
6584 else
6585 {
6586 gcc_assert (GET_CODE (x) == PLUS);
6587
6588 /* We actually want to know if X is mentioned somewhere inside IN.
6589 We must not say that (plus (sp) (const_int 124)) is in
6590 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6591 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6592 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6593 while (MEM_P (in))
6594 in = XEXP (in, 0);
6595 if (REG_P (in))
6596 return 0;
6597 else if (GET_CODE (in) == PLUS)
6598 return (rtx_equal_p (x, in)
6599 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6600 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6601 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6602 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6603 }
6604
6605 gcc_unreachable ();
6606 }
6607
6608 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6609 registers. */
6610
6611 static int
6612 refers_to_mem_for_reload_p (rtx x)
6613 {
6614 const char *fmt;
6615 int i;
6616
6617 if (MEM_P (x))
6618 return 1;
6619
6620 if (REG_P (x))
6621 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6622 && reg_equiv_memory_loc (REGNO (x)));
6623
6624 fmt = GET_RTX_FORMAT (GET_CODE (x));
6625 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6626 if (fmt[i] == 'e'
6627 && (MEM_P (XEXP (x, i))
6628 || refers_to_mem_for_reload_p (XEXP (x, i))))
6629 return 1;
6630
6631 return 0;
6632 }
6633 \f
6634 /* Check the insns before INSN to see if there is a suitable register
6635 containing the same value as GOAL.
6636 If OTHER is -1, look for a register in class RCLASS.
6637 Otherwise, just see if register number OTHER shares GOAL's value.
6638
6639 Return an rtx for the register found, or zero if none is found.
6640
6641 If RELOAD_REG_P is (short *)1,
6642 we reject any hard reg that appears in reload_reg_rtx
6643 because such a hard reg is also needed coming into this insn.
6644
6645 If RELOAD_REG_P is any other nonzero value,
6646 it is a vector indexed by hard reg number
6647 and we reject any hard reg whose element in the vector is nonnegative
6648 as well as any that appears in reload_reg_rtx.
6649
6650 If GOAL is zero, then GOALREG is a register number; we look
6651 for an equivalent for that register.
6652
6653 MODE is the machine mode of the value we want an equivalence for.
6654 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6655
6656 This function is used by jump.c as well as in the reload pass.
6657
6658 If GOAL is the sum of the stack pointer and a constant, we treat it
6659 as if it were a constant except that sp is required to be unchanging. */
6660
6661 rtx
6662 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6663 short *reload_reg_p, int goalreg, machine_mode mode)
6664 {
6665 rtx_insn *p = insn;
6666 rtx goaltry, valtry, value;
6667 rtx_insn *where;
6668 rtx pat;
6669 int regno = -1;
6670 int valueno;
6671 int goal_mem = 0;
6672 int goal_const = 0;
6673 int goal_mem_addr_varies = 0;
6674 int need_stable_sp = 0;
6675 int nregs;
6676 int valuenregs;
6677 int num = 0;
6678
6679 if (goal == 0)
6680 regno = goalreg;
6681 else if (REG_P (goal))
6682 regno = REGNO (goal);
6683 else if (MEM_P (goal))
6684 {
6685 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6686 if (MEM_VOLATILE_P (goal))
6687 return 0;
6688 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6689 return 0;
6690 /* An address with side effects must be reexecuted. */
6691 switch (code)
6692 {
6693 case POST_INC:
6694 case PRE_INC:
6695 case POST_DEC:
6696 case PRE_DEC:
6697 case POST_MODIFY:
6698 case PRE_MODIFY:
6699 return 0;
6700 default:
6701 break;
6702 }
6703 goal_mem = 1;
6704 }
6705 else if (CONSTANT_P (goal))
6706 goal_const = 1;
6707 else if (GET_CODE (goal) == PLUS
6708 && XEXP (goal, 0) == stack_pointer_rtx
6709 && CONSTANT_P (XEXP (goal, 1)))
6710 goal_const = need_stable_sp = 1;
6711 else if (GET_CODE (goal) == PLUS
6712 && XEXP (goal, 0) == frame_pointer_rtx
6713 && CONSTANT_P (XEXP (goal, 1)))
6714 goal_const = 1;
6715 else
6716 return 0;
6717
6718 num = 0;
6719 /* Scan insns back from INSN, looking for one that copies
6720 a value into or out of GOAL.
6721 Stop and give up if we reach a label. */
6722
6723 while (1)
6724 {
6725 p = PREV_INSN (p);
6726 if (p && DEBUG_INSN_P (p))
6727 continue;
6728 num++;
6729 if (p == 0 || LABEL_P (p)
6730 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6731 return 0;
6732
6733 /* Don't reuse register contents from before a setjmp-type
6734 function call; on the second return (from the longjmp) it
6735 might have been clobbered by a later reuse. It doesn't
6736 seem worthwhile to actually go and see if it is actually
6737 reused even if that information would be readily available;
6738 just don't reuse it across the setjmp call. */
6739 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6740 return 0;
6741
6742 if (NONJUMP_INSN_P (p)
6743 /* If we don't want spill regs ... */
6744 && (! (reload_reg_p != 0
6745 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6746 /* ... then ignore insns introduced by reload; they aren't
6747 useful and can cause results in reload_as_needed to be
6748 different from what they were when calculating the need for
6749 spills. If we notice an input-reload insn here, we will
6750 reject it below, but it might hide a usable equivalent.
6751 That makes bad code. It may even fail: perhaps no reg was
6752 spilled for this insn because it was assumed we would find
6753 that equivalent. */
6754 || INSN_UID (p) < reload_first_uid))
6755 {
6756 rtx tem;
6757 pat = single_set (p);
6758
6759 /* First check for something that sets some reg equal to GOAL. */
6760 if (pat != 0
6761 && ((regno >= 0
6762 && true_regnum (SET_SRC (pat)) == regno
6763 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6764 ||
6765 (regno >= 0
6766 && true_regnum (SET_DEST (pat)) == regno
6767 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6768 ||
6769 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6770 /* When looking for stack pointer + const,
6771 make sure we don't use a stack adjust. */
6772 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6773 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6774 || (goal_mem
6775 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6776 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6777 || (goal_mem
6778 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6779 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6780 /* If we are looking for a constant,
6781 and something equivalent to that constant was copied
6782 into a reg, we can use that reg. */
6783 || (goal_const && REG_NOTES (p) != 0
6784 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6785 && ((rtx_equal_p (XEXP (tem, 0), goal)
6786 && (valueno
6787 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6788 || (REG_P (SET_DEST (pat))
6789 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6790 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6791 && CONST_INT_P (goal)
6792 && 0 != (goaltry
6793 = operand_subword (XEXP (tem, 0), 0, 0,
6794 VOIDmode))
6795 && rtx_equal_p (goal, goaltry)
6796 && (valtry
6797 = operand_subword (SET_DEST (pat), 0, 0,
6798 VOIDmode))
6799 && (valueno = true_regnum (valtry)) >= 0)))
6800 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6801 NULL_RTX))
6802 && REG_P (SET_DEST (pat))
6803 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6804 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6805 && CONST_INT_P (goal)
6806 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6807 VOIDmode))
6808 && rtx_equal_p (goal, goaltry)
6809 && (valtry
6810 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6811 && (valueno = true_regnum (valtry)) >= 0)))
6812 {
6813 if (other >= 0)
6814 {
6815 if (valueno != other)
6816 continue;
6817 }
6818 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6819 continue;
6820 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6821 mode, valueno))
6822 continue;
6823 value = valtry;
6824 where = p;
6825 break;
6826 }
6827 }
6828 }
6829
6830 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6831 (or copying VALUE into GOAL, if GOAL is also a register).
6832 Now verify that VALUE is really valid. */
6833
6834 /* VALUENO is the register number of VALUE; a hard register. */
6835
6836 /* Don't try to re-use something that is killed in this insn. We want
6837 to be able to trust REG_UNUSED notes. */
6838 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6839 return 0;
6840
6841 /* If we propose to get the value from the stack pointer or if GOAL is
6842 a MEM based on the stack pointer, we need a stable SP. */
6843 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6844 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6845 goal)))
6846 need_stable_sp = 1;
6847
6848 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6849 if (GET_MODE (value) != mode)
6850 return 0;
6851
6852 /* Reject VALUE if it was loaded from GOAL
6853 and is also a register that appears in the address of GOAL. */
6854
6855 if (goal_mem && value == SET_DEST (single_set (where))
6856 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6857 goal, (rtx*) 0))
6858 return 0;
6859
6860 /* Reject registers that overlap GOAL. */
6861
6862 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6863 nregs = hard_regno_nregs[regno][mode];
6864 else
6865 nregs = 1;
6866 valuenregs = hard_regno_nregs[valueno][mode];
6867
6868 if (!goal_mem && !goal_const
6869 && regno + nregs > valueno && regno < valueno + valuenregs)
6870 return 0;
6871
6872 /* Reject VALUE if it is one of the regs reserved for reloads.
6873 Reload1 knows how to reuse them anyway, and it would get
6874 confused if we allocated one without its knowledge.
6875 (Now that insns introduced by reload are ignored above,
6876 this case shouldn't happen, but I'm not positive.) */
6877
6878 if (reload_reg_p != 0 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6879 {
6880 int i;
6881 for (i = 0; i < valuenregs; ++i)
6882 if (reload_reg_p[valueno + i] >= 0)
6883 return 0;
6884 }
6885
6886 /* Reject VALUE if it is a register being used for an input reload
6887 even if it is not one of those reserved. */
6888
6889 if (reload_reg_p != 0)
6890 {
6891 int i;
6892 for (i = 0; i < n_reloads; i++)
6893 if (rld[i].reg_rtx != 0 && rld[i].in)
6894 {
6895 int regno1 = REGNO (rld[i].reg_rtx);
6896 int nregs1 = hard_regno_nregs[regno1]
6897 [GET_MODE (rld[i].reg_rtx)];
6898 if (regno1 < valueno + valuenregs
6899 && regno1 + nregs1 > valueno)
6900 return 0;
6901 }
6902 }
6903
6904 if (goal_mem)
6905 /* We must treat frame pointer as varying here,
6906 since it can vary--in a nonlocal goto as generated by expand_goto. */
6907 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6908
6909 /* Now verify that the values of GOAL and VALUE remain unaltered
6910 until INSN is reached. */
6911
6912 p = insn;
6913 while (1)
6914 {
6915 p = PREV_INSN (p);
6916 if (p == where)
6917 return value;
6918
6919 /* Don't trust the conversion past a function call
6920 if either of the two is in a call-clobbered register, or memory. */
6921 if (CALL_P (p))
6922 {
6923 int i;
6924
6925 if (goal_mem || need_stable_sp)
6926 return 0;
6927
6928 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6929 for (i = 0; i < nregs; ++i)
6930 if (call_used_regs[regno + i]
6931 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6932 return 0;
6933
6934 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6935 for (i = 0; i < valuenregs; ++i)
6936 if (call_used_regs[valueno + i]
6937 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6938 return 0;
6939 }
6940
6941 if (INSN_P (p))
6942 {
6943 pat = PATTERN (p);
6944
6945 /* Watch out for unspec_volatile, and volatile asms. */
6946 if (volatile_insn_p (pat))
6947 return 0;
6948
6949 /* If this insn P stores in either GOAL or VALUE, return 0.
6950 If GOAL is a memory ref and this insn writes memory, return 0.
6951 If GOAL is a memory ref and its address is not constant,
6952 and this insn P changes a register used in GOAL, return 0. */
6953
6954 if (GET_CODE (pat) == COND_EXEC)
6955 pat = COND_EXEC_CODE (pat);
6956 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6957 {
6958 rtx dest = SET_DEST (pat);
6959 while (GET_CODE (dest) == SUBREG
6960 || GET_CODE (dest) == ZERO_EXTRACT
6961 || GET_CODE (dest) == STRICT_LOW_PART)
6962 dest = XEXP (dest, 0);
6963 if (REG_P (dest))
6964 {
6965 int xregno = REGNO (dest);
6966 int xnregs;
6967 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6968 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6969 else
6970 xnregs = 1;
6971 if (xregno < regno + nregs && xregno + xnregs > regno)
6972 return 0;
6973 if (xregno < valueno + valuenregs
6974 && xregno + xnregs > valueno)
6975 return 0;
6976 if (goal_mem_addr_varies
6977 && reg_overlap_mentioned_for_reload_p (dest, goal))
6978 return 0;
6979 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6980 return 0;
6981 }
6982 else if (goal_mem && MEM_P (dest)
6983 && ! push_operand (dest, GET_MODE (dest)))
6984 return 0;
6985 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6986 && reg_equiv_memory_loc (regno) != 0)
6987 return 0;
6988 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6989 return 0;
6990 }
6991 else if (GET_CODE (pat) == PARALLEL)
6992 {
6993 int i;
6994 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6995 {
6996 rtx v1 = XVECEXP (pat, 0, i);
6997 if (GET_CODE (v1) == COND_EXEC)
6998 v1 = COND_EXEC_CODE (v1);
6999 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7000 {
7001 rtx dest = SET_DEST (v1);
7002 while (GET_CODE (dest) == SUBREG
7003 || GET_CODE (dest) == ZERO_EXTRACT
7004 || GET_CODE (dest) == STRICT_LOW_PART)
7005 dest = XEXP (dest, 0);
7006 if (REG_P (dest))
7007 {
7008 int xregno = REGNO (dest);
7009 int xnregs;
7010 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7011 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7012 else
7013 xnregs = 1;
7014 if (xregno < regno + nregs
7015 && xregno + xnregs > regno)
7016 return 0;
7017 if (xregno < valueno + valuenregs
7018 && xregno + xnregs > valueno)
7019 return 0;
7020 if (goal_mem_addr_varies
7021 && reg_overlap_mentioned_for_reload_p (dest,
7022 goal))
7023 return 0;
7024 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7025 return 0;
7026 }
7027 else if (goal_mem && MEM_P (dest)
7028 && ! push_operand (dest, GET_MODE (dest)))
7029 return 0;
7030 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7031 && reg_equiv_memory_loc (regno) != 0)
7032 return 0;
7033 else if (need_stable_sp
7034 && push_operand (dest, GET_MODE (dest)))
7035 return 0;
7036 }
7037 }
7038 }
7039
7040 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7041 {
7042 rtx link;
7043
7044 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7045 link = XEXP (link, 1))
7046 {
7047 pat = XEXP (link, 0);
7048 if (GET_CODE (pat) == CLOBBER)
7049 {
7050 rtx dest = SET_DEST (pat);
7051
7052 if (REG_P (dest))
7053 {
7054 int xregno = REGNO (dest);
7055 int xnregs
7056 = hard_regno_nregs[xregno][GET_MODE (dest)];
7057
7058 if (xregno < regno + nregs
7059 && xregno + xnregs > regno)
7060 return 0;
7061 else if (xregno < valueno + valuenregs
7062 && xregno + xnregs > valueno)
7063 return 0;
7064 else if (goal_mem_addr_varies
7065 && reg_overlap_mentioned_for_reload_p (dest,
7066 goal))
7067 return 0;
7068 }
7069
7070 else if (goal_mem && MEM_P (dest)
7071 && ! push_operand (dest, GET_MODE (dest)))
7072 return 0;
7073 else if (need_stable_sp
7074 && push_operand (dest, GET_MODE (dest)))
7075 return 0;
7076 }
7077 }
7078 }
7079
7080 #if AUTO_INC_DEC
7081 /* If this insn auto-increments or auto-decrements
7082 either regno or valueno, return 0 now.
7083 If GOAL is a memory ref and its address is not constant,
7084 and this insn P increments a register used in GOAL, return 0. */
7085 {
7086 rtx link;
7087
7088 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7089 if (REG_NOTE_KIND (link) == REG_INC
7090 && REG_P (XEXP (link, 0)))
7091 {
7092 int incno = REGNO (XEXP (link, 0));
7093 if (incno < regno + nregs && incno >= regno)
7094 return 0;
7095 if (incno < valueno + valuenregs && incno >= valueno)
7096 return 0;
7097 if (goal_mem_addr_varies
7098 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7099 goal))
7100 return 0;
7101 }
7102 }
7103 #endif
7104 }
7105 }
7106 }
7107 \f
7108 /* Find a place where INCED appears in an increment or decrement operator
7109 within X, and return the amount INCED is incremented or decremented by.
7110 The value is always positive. */
7111
7112 static int
7113 find_inc_amount (rtx x, rtx inced)
7114 {
7115 enum rtx_code code = GET_CODE (x);
7116 const char *fmt;
7117 int i;
7118
7119 if (code == MEM)
7120 {
7121 rtx addr = XEXP (x, 0);
7122 if ((GET_CODE (addr) == PRE_DEC
7123 || GET_CODE (addr) == POST_DEC
7124 || GET_CODE (addr) == PRE_INC
7125 || GET_CODE (addr) == POST_INC)
7126 && XEXP (addr, 0) == inced)
7127 return GET_MODE_SIZE (GET_MODE (x));
7128 else if ((GET_CODE (addr) == PRE_MODIFY
7129 || GET_CODE (addr) == POST_MODIFY)
7130 && GET_CODE (XEXP (addr, 1)) == PLUS
7131 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7132 && XEXP (addr, 0) == inced
7133 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7134 {
7135 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7136 return i < 0 ? -i : i;
7137 }
7138 }
7139
7140 fmt = GET_RTX_FORMAT (code);
7141 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7142 {
7143 if (fmt[i] == 'e')
7144 {
7145 int tem = find_inc_amount (XEXP (x, i), inced);
7146 if (tem != 0)
7147 return tem;
7148 }
7149 if (fmt[i] == 'E')
7150 {
7151 int j;
7152 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7153 {
7154 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7155 if (tem != 0)
7156 return tem;
7157 }
7158 }
7159 }
7160
7161 return 0;
7162 }
7163 \f
7164 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7165 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7166
7167 static int
7168 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7169 rtx insn)
7170 {
7171 rtx link;
7172
7173 if (!AUTO_INC_DEC)
7174 return 0;
7175
7176 gcc_assert (insn);
7177
7178 if (! INSN_P (insn))
7179 return 0;
7180
7181 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7182 if (REG_NOTE_KIND (link) == REG_INC)
7183 {
7184 unsigned int test = (int) REGNO (XEXP (link, 0));
7185 if (test >= regno && test < endregno)
7186 return 1;
7187 }
7188 return 0;
7189 }
7190
7191 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7192 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7193 REG_INC. REGNO must refer to a hard register. */
7194
7195 int
7196 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7197 int sets)
7198 {
7199 unsigned int nregs, endregno;
7200
7201 /* regno must be a hard register. */
7202 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7203
7204 nregs = hard_regno_nregs[regno][mode];
7205 endregno = regno + nregs;
7206
7207 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7208 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7209 && REG_P (XEXP (PATTERN (insn), 0)))
7210 {
7211 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7212
7213 return test >= regno && test < endregno;
7214 }
7215
7216 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7217 return 1;
7218
7219 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7220 {
7221 int i = XVECLEN (PATTERN (insn), 0) - 1;
7222
7223 for (; i >= 0; i--)
7224 {
7225 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7226 if ((GET_CODE (elt) == CLOBBER
7227 || (sets == 1 && GET_CODE (elt) == SET))
7228 && REG_P (XEXP (elt, 0)))
7229 {
7230 unsigned int test = REGNO (XEXP (elt, 0));
7231
7232 if (test >= regno && test < endregno)
7233 return 1;
7234 }
7235 if (sets == 2
7236 && reg_inc_found_and_valid_p (regno, endregno, elt))
7237 return 1;
7238 }
7239 }
7240
7241 return 0;
7242 }
7243
7244 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7245 rtx
7246 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7247 {
7248 int regno;
7249
7250 if (GET_MODE (reloadreg) == mode)
7251 return reloadreg;
7252
7253 regno = REGNO (reloadreg);
7254
7255 if (REG_WORDS_BIG_ENDIAN)
7256 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7257 - (int) hard_regno_nregs[regno][mode];
7258
7259 return gen_rtx_REG (mode, regno);
7260 }
7261
7262 static const char *const reload_when_needed_name[] =
7263 {
7264 "RELOAD_FOR_INPUT",
7265 "RELOAD_FOR_OUTPUT",
7266 "RELOAD_FOR_INSN",
7267 "RELOAD_FOR_INPUT_ADDRESS",
7268 "RELOAD_FOR_INPADDR_ADDRESS",
7269 "RELOAD_FOR_OUTPUT_ADDRESS",
7270 "RELOAD_FOR_OUTADDR_ADDRESS",
7271 "RELOAD_FOR_OPERAND_ADDRESS",
7272 "RELOAD_FOR_OPADDR_ADDR",
7273 "RELOAD_OTHER",
7274 "RELOAD_FOR_OTHER_ADDRESS"
7275 };
7276
7277 /* These functions are used to print the variables set by 'find_reloads' */
7278
7279 DEBUG_FUNCTION void
7280 debug_reload_to_stream (FILE *f)
7281 {
7282 int r;
7283 const char *prefix;
7284
7285 if (! f)
7286 f = stderr;
7287 for (r = 0; r < n_reloads; r++)
7288 {
7289 fprintf (f, "Reload %d: ", r);
7290
7291 if (rld[r].in != 0)
7292 {
7293 fprintf (f, "reload_in (%s) = ",
7294 GET_MODE_NAME (rld[r].inmode));
7295 print_inline_rtx (f, rld[r].in, 24);
7296 fprintf (f, "\n\t");
7297 }
7298
7299 if (rld[r].out != 0)
7300 {
7301 fprintf (f, "reload_out (%s) = ",
7302 GET_MODE_NAME (rld[r].outmode));
7303 print_inline_rtx (f, rld[r].out, 24);
7304 fprintf (f, "\n\t");
7305 }
7306
7307 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7308
7309 fprintf (f, "%s (opnum = %d)",
7310 reload_when_needed_name[(int) rld[r].when_needed],
7311 rld[r].opnum);
7312
7313 if (rld[r].optional)
7314 fprintf (f, ", optional");
7315
7316 if (rld[r].nongroup)
7317 fprintf (f, ", nongroup");
7318
7319 if (rld[r].inc != 0)
7320 fprintf (f, ", inc by %d", rld[r].inc);
7321
7322 if (rld[r].nocombine)
7323 fprintf (f, ", can't combine");
7324
7325 if (rld[r].secondary_p)
7326 fprintf (f, ", secondary_reload_p");
7327
7328 if (rld[r].in_reg != 0)
7329 {
7330 fprintf (f, "\n\treload_in_reg: ");
7331 print_inline_rtx (f, rld[r].in_reg, 24);
7332 }
7333
7334 if (rld[r].out_reg != 0)
7335 {
7336 fprintf (f, "\n\treload_out_reg: ");
7337 print_inline_rtx (f, rld[r].out_reg, 24);
7338 }
7339
7340 if (rld[r].reg_rtx != 0)
7341 {
7342 fprintf (f, "\n\treload_reg_rtx: ");
7343 print_inline_rtx (f, rld[r].reg_rtx, 24);
7344 }
7345
7346 prefix = "\n\t";
7347 if (rld[r].secondary_in_reload != -1)
7348 {
7349 fprintf (f, "%ssecondary_in_reload = %d",
7350 prefix, rld[r].secondary_in_reload);
7351 prefix = ", ";
7352 }
7353
7354 if (rld[r].secondary_out_reload != -1)
7355 fprintf (f, "%ssecondary_out_reload = %d\n",
7356 prefix, rld[r].secondary_out_reload);
7357
7358 prefix = "\n\t";
7359 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7360 {
7361 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7362 insn_data[rld[r].secondary_in_icode].name);
7363 prefix = ", ";
7364 }
7365
7366 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7367 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7368 insn_data[rld[r].secondary_out_icode].name);
7369
7370 fprintf (f, "\n");
7371 }
7372 }
7373
7374 DEBUG_FUNCTION void
7375 debug_reload (void)
7376 {
7377 debug_reload_to_stream (stderr);
7378 }