gimple-fold.c (fold_const_aggregate_ref_1): Use DECL_P.
[gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
26
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
29
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
36
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
43
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
52
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
55
56 NOTE SIDE EFFECTS:
57
58 find_reloads can alter the operands of the instruction it is called on.
59
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
64
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
67
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71
72 Using a reload register for several reloads in one insn:
73
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
85
86 #define REG_OK_STRICT
87
88 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
89 #undef DEBUG_RELOAD
90
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "tm.h"
95 #include "rtl-error.h"
96 #include "tm_p.h"
97 #include "insn-config.h"
98 #include "symtab.h"
99 #include "hashtab.h"
100 #include "hash-set.h"
101 #include "vec.h"
102 #include "machmode.h"
103 #include "hard-reg-set.h"
104 #include "input.h"
105 #include "function.h"
106 #include "rtl.h"
107 #include "flags.h"
108 #include "statistics.h"
109 #include "double-int.h"
110 #include "real.h"
111 #include "fixed-value.h"
112 #include "alias.h"
113 #include "wide-int.h"
114 #include "inchash.h"
115 #include "tree.h"
116 #include "expmed.h"
117 #include "dojump.h"
118 #include "explow.h"
119 #include "calls.h"
120 #include "emit-rtl.h"
121 #include "varasm.h"
122 #include "stmt.h"
123 #include "expr.h"
124 #include "insn-codes.h"
125 #include "optabs.h"
126 #include "recog.h"
127 #include "dominance.h"
128 #include "cfg.h"
129 #include "predict.h"
130 #include "basic-block.h"
131 #include "df.h"
132 #include "reload.h"
133 #include "regs.h"
134 #include "addresses.h"
135 #include "params.h"
136 #include "target.h"
137 #include "ira.h"
138
139 /* True if X is a constant that can be forced into the constant pool.
140 MODE is the mode of the operand, or VOIDmode if not known. */
141 #define CONST_POOL_OK_P(MODE, X) \
142 ((MODE) != VOIDmode \
143 && CONSTANT_P (X) \
144 && GET_CODE (X) != HIGH \
145 && !targetm.cannot_force_const_mem (MODE, X))
146
147 /* True if C is a non-empty register class that has too few registers
148 to be safely used as a reload target class. */
149
150 static inline bool
151 small_register_class_p (reg_class_t rclass)
152 {
153 return (reg_class_size [(int) rclass] == 1
154 || (reg_class_size [(int) rclass] >= 1
155 && targetm.class_likely_spilled_p (rclass)));
156 }
157
158 \f
159 /* All reloads of the current insn are recorded here. See reload.h for
160 comments. */
161 int n_reloads;
162 struct reload rld[MAX_RELOADS];
163
164 /* All the "earlyclobber" operands of the current insn
165 are recorded here. */
166 int n_earlyclobbers;
167 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
168
169 int reload_n_operands;
170
171 /* Replacing reloads.
172
173 If `replace_reloads' is nonzero, then as each reload is recorded
174 an entry is made for it in the table `replacements'.
175 Then later `subst_reloads' can look through that table and
176 perform all the replacements needed. */
177
178 /* Nonzero means record the places to replace. */
179 static int replace_reloads;
180
181 /* Each replacement is recorded with a structure like this. */
182 struct replacement
183 {
184 rtx *where; /* Location to store in */
185 int what; /* which reload this is for */
186 machine_mode mode; /* mode it must have */
187 };
188
189 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
190
191 /* Number of replacements currently recorded. */
192 static int n_replacements;
193
194 /* Used to track what is modified by an operand. */
195 struct decomposition
196 {
197 int reg_flag; /* Nonzero if referencing a register. */
198 int safe; /* Nonzero if this can't conflict with anything. */
199 rtx base; /* Base address for MEM. */
200 HOST_WIDE_INT start; /* Starting offset or register number. */
201 HOST_WIDE_INT end; /* Ending offset or register number. */
202 };
203
204 #ifdef SECONDARY_MEMORY_NEEDED
205
206 /* Save MEMs needed to copy from one class of registers to another. One MEM
207 is used per mode, but normally only one or two modes are ever used.
208
209 We keep two versions, before and after register elimination. The one
210 after register elimination is record separately for each operand. This
211 is done in case the address is not valid to be sure that we separately
212 reload each. */
213
214 static rtx secondary_memlocs[NUM_MACHINE_MODES];
215 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
216 static int secondary_memlocs_elim_used = 0;
217 #endif
218
219 /* The instruction we are doing reloads for;
220 so we can test whether a register dies in it. */
221 static rtx_insn *this_insn;
222
223 /* Nonzero if this instruction is a user-specified asm with operands. */
224 static int this_insn_is_asm;
225
226 /* If hard_regs_live_known is nonzero,
227 we can tell which hard regs are currently live,
228 at least enough to succeed in choosing dummy reloads. */
229 static int hard_regs_live_known;
230
231 /* Indexed by hard reg number,
232 element is nonnegative if hard reg has been spilled.
233 This vector is passed to `find_reloads' as an argument
234 and is not changed here. */
235 static short *static_reload_reg_p;
236
237 /* Set to 1 in subst_reg_equivs if it changes anything. */
238 static int subst_reg_equivs_changed;
239
240 /* On return from push_reload, holds the reload-number for the OUT
241 operand, which can be different for that from the input operand. */
242 static int output_reloadnum;
243
244 /* Compare two RTX's. */
245 #define MATCHES(x, y) \
246 (x == y || (x != 0 && (REG_P (x) \
247 ? REG_P (y) && REGNO (x) == REGNO (y) \
248 : rtx_equal_p (x, y) && ! side_effects_p (x))))
249
250 /* Indicates if two reloads purposes are for similar enough things that we
251 can merge their reloads. */
252 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
253 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
254 || ((when1) == (when2) && (op1) == (op2)) \
255 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
256 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
257 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
258 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
259 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
260
261 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
262 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
263 ((when1) != (when2) \
264 || ! ((op1) == (op2) \
265 || (when1) == RELOAD_FOR_INPUT \
266 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
267 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
268
269 /* If we are going to reload an address, compute the reload type to
270 use. */
271 #define ADDR_TYPE(type) \
272 ((type) == RELOAD_FOR_INPUT_ADDRESS \
273 ? RELOAD_FOR_INPADDR_ADDRESS \
274 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
275 ? RELOAD_FOR_OUTADDR_ADDRESS \
276 : (type)))
277
278 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
279 machine_mode, enum reload_type,
280 enum insn_code *, secondary_reload_info *);
281 static enum reg_class find_valid_class (machine_mode, machine_mode,
282 int, unsigned int);
283 static void push_replacement (rtx *, int, machine_mode);
284 static void dup_replacements (rtx *, rtx *);
285 static void combine_reloads (void);
286 static int find_reusable_reload (rtx *, rtx, enum reg_class,
287 enum reload_type, int, int);
288 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
289 machine_mode, reg_class_t, int, int);
290 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
291 static struct decomposition decompose (rtx);
292 static int immune_p (rtx, rtx, struct decomposition);
293 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
294 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
295 rtx_insn *, int *);
296 static rtx make_memloc (rtx, int);
297 static int maybe_memory_address_addr_space_p (machine_mode, rtx,
298 addr_space_t, rtx *);
299 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
300 int, enum reload_type, int, rtx_insn *);
301 static rtx subst_reg_equivs (rtx, rtx_insn *);
302 static rtx subst_indexed_address (rtx);
303 static void update_auto_inc_notes (rtx_insn *, int, int);
304 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
305 enum rtx_code, enum rtx_code, rtx *,
306 int, enum reload_type,int, rtx_insn *);
307 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
308 machine_mode, int,
309 enum reload_type, int);
310 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
311 int, rtx_insn *, int *);
312 static void copy_replacements_1 (rtx *, rtx *, int);
313 static int find_inc_amount (rtx, rtx);
314 static int refers_to_mem_for_reload_p (rtx);
315 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
316 rtx, rtx *);
317
318 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
319 list yet. */
320
321 static void
322 push_reg_equiv_alt_mem (int regno, rtx mem)
323 {
324 rtx it;
325
326 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
327 if (rtx_equal_p (XEXP (it, 0), mem))
328 return;
329
330 reg_equiv_alt_mem_list (regno)
331 = alloc_EXPR_LIST (REG_EQUIV, mem,
332 reg_equiv_alt_mem_list (regno));
333 }
334 \f
335 /* Determine if any secondary reloads are needed for loading (if IN_P is
336 nonzero) or storing (if IN_P is zero) X to or from a reload register of
337 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
338 are needed, push them.
339
340 Return the reload number of the secondary reload we made, or -1 if
341 we didn't need one. *PICODE is set to the insn_code to use if we do
342 need a secondary reload. */
343
344 static int
345 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
346 enum reg_class reload_class,
347 machine_mode reload_mode, enum reload_type type,
348 enum insn_code *picode, secondary_reload_info *prev_sri)
349 {
350 enum reg_class rclass = NO_REGS;
351 enum reg_class scratch_class;
352 machine_mode mode = reload_mode;
353 enum insn_code icode = CODE_FOR_nothing;
354 enum insn_code t_icode = CODE_FOR_nothing;
355 enum reload_type secondary_type;
356 int s_reload, t_reload = -1;
357 const char *scratch_constraint;
358 secondary_reload_info sri;
359
360 if (type == RELOAD_FOR_INPUT_ADDRESS
361 || type == RELOAD_FOR_OUTPUT_ADDRESS
362 || type == RELOAD_FOR_INPADDR_ADDRESS
363 || type == RELOAD_FOR_OUTADDR_ADDRESS)
364 secondary_type = type;
365 else
366 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
367
368 *picode = CODE_FOR_nothing;
369
370 /* If X is a paradoxical SUBREG, use the inner value to determine both the
371 mode and object being reloaded. */
372 if (paradoxical_subreg_p (x))
373 {
374 x = SUBREG_REG (x);
375 reload_mode = GET_MODE (x);
376 }
377
378 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
379 is still a pseudo-register by now, it *must* have an equivalent MEM
380 but we don't want to assume that), use that equivalent when seeing if
381 a secondary reload is needed since whether or not a reload is needed
382 might be sensitive to the form of the MEM. */
383
384 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
385 && reg_equiv_mem (REGNO (x)))
386 x = reg_equiv_mem (REGNO (x));
387
388 sri.icode = CODE_FOR_nothing;
389 sri.prev_sri = prev_sri;
390 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
391 reload_mode, &sri);
392 icode = (enum insn_code) sri.icode;
393
394 /* If we don't need any secondary registers, done. */
395 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
396 return -1;
397
398 if (rclass != NO_REGS)
399 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
400 reload_mode, type, &t_icode, &sri);
401
402 /* If we will be using an insn, the secondary reload is for a
403 scratch register. */
404
405 if (icode != CODE_FOR_nothing)
406 {
407 /* If IN_P is nonzero, the reload register will be the output in
408 operand 0. If IN_P is zero, the reload register will be the input
409 in operand 1. Outputs should have an initial "=", which we must
410 skip. */
411
412 /* ??? It would be useful to be able to handle only two, or more than
413 three, operands, but for now we can only handle the case of having
414 exactly three: output, input and one temp/scratch. */
415 gcc_assert (insn_data[(int) icode].n_operands == 3);
416
417 /* ??? We currently have no way to represent a reload that needs
418 an icode to reload from an intermediate tertiary reload register.
419 We should probably have a new field in struct reload to tag a
420 chain of scratch operand reloads onto. */
421 gcc_assert (rclass == NO_REGS);
422
423 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
424 gcc_assert (*scratch_constraint == '=');
425 scratch_constraint++;
426 if (*scratch_constraint == '&')
427 scratch_constraint++;
428 scratch_class = (reg_class_for_constraint
429 (lookup_constraint (scratch_constraint)));
430
431 rclass = scratch_class;
432 mode = insn_data[(int) icode].operand[2].mode;
433 }
434
435 /* This case isn't valid, so fail. Reload is allowed to use the same
436 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
437 in the case of a secondary register, we actually need two different
438 registers for correct code. We fail here to prevent the possibility of
439 silently generating incorrect code later.
440
441 The convention is that secondary input reloads are valid only if the
442 secondary_class is different from class. If you have such a case, you
443 can not use secondary reloads, you must work around the problem some
444 other way.
445
446 Allow this when a reload_in/out pattern is being used. I.e. assume
447 that the generated code handles this case. */
448
449 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
450 || t_icode != CODE_FOR_nothing);
451
452 /* See if we can reuse an existing secondary reload. */
453 for (s_reload = 0; s_reload < n_reloads; s_reload++)
454 if (rld[s_reload].secondary_p
455 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
456 || reg_class_subset_p (rld[s_reload].rclass, rclass))
457 && ((in_p && rld[s_reload].inmode == mode)
458 || (! in_p && rld[s_reload].outmode == mode))
459 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
460 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
461 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
462 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
463 && (small_register_class_p (rclass)
464 || targetm.small_register_classes_for_mode_p (VOIDmode))
465 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
466 opnum, rld[s_reload].opnum))
467 {
468 if (in_p)
469 rld[s_reload].inmode = mode;
470 if (! in_p)
471 rld[s_reload].outmode = mode;
472
473 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
474 rld[s_reload].rclass = rclass;
475
476 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
477 rld[s_reload].optional &= optional;
478 rld[s_reload].secondary_p = 1;
479 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
480 opnum, rld[s_reload].opnum))
481 rld[s_reload].when_needed = RELOAD_OTHER;
482
483 break;
484 }
485
486 if (s_reload == n_reloads)
487 {
488 #ifdef SECONDARY_MEMORY_NEEDED
489 /* If we need a memory location to copy between the two reload regs,
490 set it up now. Note that we do the input case before making
491 the reload and the output case after. This is due to the
492 way reloads are output. */
493
494 if (in_p && icode == CODE_FOR_nothing
495 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
496 {
497 get_secondary_mem (x, reload_mode, opnum, type);
498
499 /* We may have just added new reloads. Make sure we add
500 the new reload at the end. */
501 s_reload = n_reloads;
502 }
503 #endif
504
505 /* We need to make a new secondary reload for this register class. */
506 rld[s_reload].in = rld[s_reload].out = 0;
507 rld[s_reload].rclass = rclass;
508
509 rld[s_reload].inmode = in_p ? mode : VOIDmode;
510 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
511 rld[s_reload].reg_rtx = 0;
512 rld[s_reload].optional = optional;
513 rld[s_reload].inc = 0;
514 /* Maybe we could combine these, but it seems too tricky. */
515 rld[s_reload].nocombine = 1;
516 rld[s_reload].in_reg = 0;
517 rld[s_reload].out_reg = 0;
518 rld[s_reload].opnum = opnum;
519 rld[s_reload].when_needed = secondary_type;
520 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
521 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
522 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
523 rld[s_reload].secondary_out_icode
524 = ! in_p ? t_icode : CODE_FOR_nothing;
525 rld[s_reload].secondary_p = 1;
526
527 n_reloads++;
528
529 #ifdef SECONDARY_MEMORY_NEEDED
530 if (! in_p && icode == CODE_FOR_nothing
531 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
532 get_secondary_mem (x, mode, opnum, type);
533 #endif
534 }
535
536 *picode = icode;
537 return s_reload;
538 }
539
540 /* If a secondary reload is needed, return its class. If both an intermediate
541 register and a scratch register is needed, we return the class of the
542 intermediate register. */
543 reg_class_t
544 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
545 rtx x)
546 {
547 enum insn_code icode;
548 secondary_reload_info sri;
549
550 sri.icode = CODE_FOR_nothing;
551 sri.prev_sri = NULL;
552 rclass
553 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
554 icode = (enum insn_code) sri.icode;
555
556 /* If there are no secondary reloads at all, we return NO_REGS.
557 If an intermediate register is needed, we return its class. */
558 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
559 return rclass;
560
561 /* No intermediate register is needed, but we have a special reload
562 pattern, which we assume for now needs a scratch register. */
563 return scratch_reload_class (icode);
564 }
565
566 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
567 three operands, verify that operand 2 is an output operand, and return
568 its register class.
569 ??? We'd like to be able to handle any pattern with at least 2 operands,
570 for zero or more scratch registers, but that needs more infrastructure. */
571 enum reg_class
572 scratch_reload_class (enum insn_code icode)
573 {
574 const char *scratch_constraint;
575 enum reg_class rclass;
576
577 gcc_assert (insn_data[(int) icode].n_operands == 3);
578 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
579 gcc_assert (*scratch_constraint == '=');
580 scratch_constraint++;
581 if (*scratch_constraint == '&')
582 scratch_constraint++;
583 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
584 gcc_assert (rclass != NO_REGS);
585 return rclass;
586 }
587 \f
588 #ifdef SECONDARY_MEMORY_NEEDED
589
590 /* Return a memory location that will be used to copy X in mode MODE.
591 If we haven't already made a location for this mode in this insn,
592 call find_reloads_address on the location being returned. */
593
594 rtx
595 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
596 int opnum, enum reload_type type)
597 {
598 rtx loc;
599 int mem_valid;
600
601 /* By default, if MODE is narrower than a word, widen it to a word.
602 This is required because most machines that require these memory
603 locations do not support short load and stores from all registers
604 (e.g., FP registers). */
605
606 #ifdef SECONDARY_MEMORY_NEEDED_MODE
607 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
608 #else
609 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
610 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
611 #endif
612
613 /* If we already have made a MEM for this operand in MODE, return it. */
614 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
615 return secondary_memlocs_elim[(int) mode][opnum];
616
617 /* If this is the first time we've tried to get a MEM for this mode,
618 allocate a new one. `something_changed' in reload will get set
619 by noticing that the frame size has changed. */
620
621 if (secondary_memlocs[(int) mode] == 0)
622 {
623 #ifdef SECONDARY_MEMORY_NEEDED_RTX
624 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
625 #else
626 secondary_memlocs[(int) mode]
627 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
628 #endif
629 }
630
631 /* Get a version of the address doing any eliminations needed. If that
632 didn't give us a new MEM, make a new one if it isn't valid. */
633
634 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
635 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
636 MEM_ADDR_SPACE (loc));
637
638 if (! mem_valid && loc == secondary_memlocs[(int) mode])
639 loc = copy_rtx (loc);
640
641 /* The only time the call below will do anything is if the stack
642 offset is too large. In that case IND_LEVELS doesn't matter, so we
643 can just pass a zero. Adjust the type to be the address of the
644 corresponding object. If the address was valid, save the eliminated
645 address. If it wasn't valid, we need to make a reload each time, so
646 don't save it. */
647
648 if (! mem_valid)
649 {
650 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
651 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
652 : RELOAD_OTHER);
653
654 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
655 opnum, type, 0, 0);
656 }
657
658 secondary_memlocs_elim[(int) mode][opnum] = loc;
659 if (secondary_memlocs_elim_used <= (int)mode)
660 secondary_memlocs_elim_used = (int)mode + 1;
661 return loc;
662 }
663
664 /* Clear any secondary memory locations we've made. */
665
666 void
667 clear_secondary_mem (void)
668 {
669 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
670 }
671 #endif /* SECONDARY_MEMORY_NEEDED */
672 \f
673
674 /* Find the largest class which has at least one register valid in
675 mode INNER, and which for every such register, that register number
676 plus N is also valid in OUTER (if in range) and is cheap to move
677 into REGNO. Such a class must exist. */
678
679 static enum reg_class
680 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
681 machine_mode inner ATTRIBUTE_UNUSED, int n,
682 unsigned int dest_regno ATTRIBUTE_UNUSED)
683 {
684 int best_cost = -1;
685 int rclass;
686 int regno;
687 enum reg_class best_class = NO_REGS;
688 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
689 unsigned int best_size = 0;
690 int cost;
691
692 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
693 {
694 int bad = 0;
695 int good = 0;
696 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
697 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
698 {
699 if (HARD_REGNO_MODE_OK (regno, inner))
700 {
701 good = 1;
702 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
703 && ! HARD_REGNO_MODE_OK (regno + n, outer))
704 bad = 1;
705 }
706 }
707
708 if (bad || !good)
709 continue;
710 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
711
712 if ((reg_class_size[rclass] > best_size
713 && (best_cost < 0 || best_cost >= cost))
714 || best_cost > cost)
715 {
716 best_class = (enum reg_class) rclass;
717 best_size = reg_class_size[rclass];
718 best_cost = register_move_cost (outer, (enum reg_class) rclass,
719 dest_class);
720 }
721 }
722
723 gcc_assert (best_size != 0);
724
725 return best_class;
726 }
727
728 /* We are trying to reload a subreg of something that is not a register.
729 Find the largest class which contains only registers valid in
730 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
731 which we would eventually like to obtain the object. */
732
733 static enum reg_class
734 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
735 machine_mode mode ATTRIBUTE_UNUSED,
736 enum reg_class dest_class ATTRIBUTE_UNUSED)
737 {
738 int best_cost = -1;
739 int rclass;
740 int regno;
741 enum reg_class best_class = NO_REGS;
742 unsigned int best_size = 0;
743 int cost;
744
745 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
746 {
747 int bad = 0;
748 for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++)
749 {
750 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
751 && !HARD_REGNO_MODE_OK (regno, mode))
752 bad = 1;
753 }
754
755 if (bad)
756 continue;
757
758 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
759
760 if ((reg_class_size[rclass] > best_size
761 && (best_cost < 0 || best_cost >= cost))
762 || best_cost > cost)
763 {
764 best_class = (enum reg_class) rclass;
765 best_size = reg_class_size[rclass];
766 best_cost = register_move_cost (outer, (enum reg_class) rclass,
767 dest_class);
768 }
769 }
770
771 gcc_assert (best_size != 0);
772
773 #ifdef LIMIT_RELOAD_CLASS
774 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
775 #endif
776 return best_class;
777 }
778 \f
779 /* Return the number of a previously made reload that can be combined with
780 a new one, or n_reloads if none of the existing reloads can be used.
781 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
782 push_reload, they determine the kind of the new reload that we try to
783 combine. P_IN points to the corresponding value of IN, which can be
784 modified by this function.
785 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
786
787 static int
788 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
789 enum reload_type type, int opnum, int dont_share)
790 {
791 rtx in = *p_in;
792 int i;
793 /* We can't merge two reloads if the output of either one is
794 earlyclobbered. */
795
796 if (earlyclobber_operand_p (out))
797 return n_reloads;
798
799 /* We can use an existing reload if the class is right
800 and at least one of IN and OUT is a match
801 and the other is at worst neutral.
802 (A zero compared against anything is neutral.)
803
804 For targets with small register classes, don't use existing reloads
805 unless they are for the same thing since that can cause us to need
806 more reload registers than we otherwise would. */
807
808 for (i = 0; i < n_reloads; i++)
809 if ((reg_class_subset_p (rclass, rld[i].rclass)
810 || reg_class_subset_p (rld[i].rclass, rclass))
811 /* If the existing reload has a register, it must fit our class. */
812 && (rld[i].reg_rtx == 0
813 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
814 true_regnum (rld[i].reg_rtx)))
815 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
816 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
817 || (out != 0 && MATCHES (rld[i].out, out)
818 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
819 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
820 && (small_register_class_p (rclass)
821 || targetm.small_register_classes_for_mode_p (VOIDmode))
822 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
823 return i;
824
825 /* Reloading a plain reg for input can match a reload to postincrement
826 that reg, since the postincrement's value is the right value.
827 Likewise, it can match a preincrement reload, since we regard
828 the preincrementation as happening before any ref in this insn
829 to that register. */
830 for (i = 0; i < n_reloads; i++)
831 if ((reg_class_subset_p (rclass, rld[i].rclass)
832 || reg_class_subset_p (rld[i].rclass, rclass))
833 /* If the existing reload has a register, it must fit our
834 class. */
835 && (rld[i].reg_rtx == 0
836 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
837 true_regnum (rld[i].reg_rtx)))
838 && out == 0 && rld[i].out == 0 && rld[i].in != 0
839 && ((REG_P (in)
840 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
841 && MATCHES (XEXP (rld[i].in, 0), in))
842 || (REG_P (rld[i].in)
843 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
844 && MATCHES (XEXP (in, 0), rld[i].in)))
845 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
846 && (small_register_class_p (rclass)
847 || targetm.small_register_classes_for_mode_p (VOIDmode))
848 && MERGABLE_RELOADS (type, rld[i].when_needed,
849 opnum, rld[i].opnum))
850 {
851 /* Make sure reload_in ultimately has the increment,
852 not the plain register. */
853 if (REG_P (in))
854 *p_in = rld[i].in;
855 return i;
856 }
857 return n_reloads;
858 }
859
860 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
861 expression. MODE is the mode that X will be used in. OUTPUT is true if
862 the function is invoked for the output part of an enclosing reload. */
863
864 static bool
865 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
866 {
867 rtx inner;
868
869 /* Only SUBREGs are problematical. */
870 if (GET_CODE (x) != SUBREG)
871 return false;
872
873 inner = SUBREG_REG (x);
874
875 /* If INNER is a constant or PLUS, then INNER will need reloading. */
876 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
877 return true;
878
879 /* If INNER is not a hard register, then INNER will not need reloading. */
880 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
881 return false;
882
883 /* If INNER is not ok for MODE, then INNER will need reloading. */
884 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
885 return true;
886
887 /* If this is for an output, and the outer part is a word or smaller,
888 INNER is larger than a word and the number of registers in INNER is
889 not the same as the number of words in INNER, then INNER will need
890 reloading (with an in-out reload). */
891 return (output
892 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
893 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
894 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
895 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
896 }
897
898 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
899 requiring an extra reload register. The caller has already found that
900 IN contains some reference to REGNO, so check that we can produce the
901 new value in a single step. E.g. if we have
902 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
903 instruction that adds one to a register, this should succeed.
904 However, if we have something like
905 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
906 needs to be loaded into a register first, we need a separate reload
907 register.
908 Such PLUS reloads are generated by find_reload_address_part.
909 The out-of-range PLUS expressions are usually introduced in the instruction
910 patterns by register elimination and substituting pseudos without a home
911 by their function-invariant equivalences. */
912 static int
913 can_reload_into (rtx in, int regno, machine_mode mode)
914 {
915 rtx dst;
916 rtx_insn *test_insn;
917 int r = 0;
918 struct recog_data_d save_recog_data;
919
920 /* For matching constraints, we often get notional input reloads where
921 we want to use the original register as the reload register. I.e.
922 technically this is a non-optional input-output reload, but IN is
923 already a valid register, and has been chosen as the reload register.
924 Speed this up, since it trivially works. */
925 if (REG_P (in))
926 return 1;
927
928 /* To test MEMs properly, we'd have to take into account all the reloads
929 that are already scheduled, which can become quite complicated.
930 And since we've already handled address reloads for this MEM, it
931 should always succeed anyway. */
932 if (MEM_P (in))
933 return 1;
934
935 /* If we can make a simple SET insn that does the job, everything should
936 be fine. */
937 dst = gen_rtx_REG (mode, regno);
938 test_insn = make_insn_raw (gen_rtx_SET (dst, in));
939 save_recog_data = recog_data;
940 if (recog_memoized (test_insn) >= 0)
941 {
942 extract_insn (test_insn);
943 r = constrain_operands (1, get_enabled_alternatives (test_insn));
944 }
945 recog_data = save_recog_data;
946 return r;
947 }
948
949 /* Record one reload that needs to be performed.
950 IN is an rtx saying where the data are to be found before this instruction.
951 OUT says where they must be stored after the instruction.
952 (IN is zero for data not read, and OUT is zero for data not written.)
953 INLOC and OUTLOC point to the places in the instructions where
954 IN and OUT were found.
955 If IN and OUT are both nonzero, it means the same register must be used
956 to reload both IN and OUT.
957
958 RCLASS is a register class required for the reloaded data.
959 INMODE is the machine mode that the instruction requires
960 for the reg that replaces IN and OUTMODE is likewise for OUT.
961
962 If IN is zero, then OUT's location and mode should be passed as
963 INLOC and INMODE.
964
965 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
966
967 OPTIONAL nonzero means this reload does not need to be performed:
968 it can be discarded if that is more convenient.
969
970 OPNUM and TYPE say what the purpose of this reload is.
971
972 The return value is the reload-number for this reload.
973
974 If both IN and OUT are nonzero, in some rare cases we might
975 want to make two separate reloads. (Actually we never do this now.)
976 Therefore, the reload-number for OUT is stored in
977 output_reloadnum when we return; the return value applies to IN.
978 Usually (presently always), when IN and OUT are nonzero,
979 the two reload-numbers are equal, but the caller should be careful to
980 distinguish them. */
981
982 int
983 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
984 enum reg_class rclass, machine_mode inmode,
985 machine_mode outmode, int strict_low, int optional,
986 int opnum, enum reload_type type)
987 {
988 int i;
989 int dont_share = 0;
990 int dont_remove_subreg = 0;
991 #ifdef LIMIT_RELOAD_CLASS
992 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
993 #endif
994 int secondary_in_reload = -1, secondary_out_reload = -1;
995 enum insn_code secondary_in_icode = CODE_FOR_nothing;
996 enum insn_code secondary_out_icode = CODE_FOR_nothing;
997 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
998 subreg_in_class = NO_REGS;
999
1000 /* INMODE and/or OUTMODE could be VOIDmode if no mode
1001 has been specified for the operand. In that case,
1002 use the operand's mode as the mode to reload. */
1003 if (inmode == VOIDmode && in != 0)
1004 inmode = GET_MODE (in);
1005 if (outmode == VOIDmode && out != 0)
1006 outmode = GET_MODE (out);
1007
1008 /* If find_reloads and friends until now missed to replace a pseudo
1009 with a constant of reg_equiv_constant something went wrong
1010 beforehand.
1011 Note that it can't simply be done here if we missed it earlier
1012 since the constant might need to be pushed into the literal pool
1013 and the resulting memref would probably need further
1014 reloading. */
1015 if (in != 0 && REG_P (in))
1016 {
1017 int regno = REGNO (in);
1018
1019 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1020 || reg_renumber[regno] >= 0
1021 || reg_equiv_constant (regno) == NULL_RTX);
1022 }
1023
1024 /* reg_equiv_constant only contains constants which are obviously
1025 not appropriate as destination. So if we would need to replace
1026 the destination pseudo with a constant we are in real
1027 trouble. */
1028 if (out != 0 && REG_P (out))
1029 {
1030 int regno = REGNO (out);
1031
1032 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1033 || reg_renumber[regno] >= 0
1034 || reg_equiv_constant (regno) == NULL_RTX);
1035 }
1036
1037 /* If we have a read-write operand with an address side-effect,
1038 change either IN or OUT so the side-effect happens only once. */
1039 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1040 switch (GET_CODE (XEXP (in, 0)))
1041 {
1042 case POST_INC: case POST_DEC: case POST_MODIFY:
1043 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1044 break;
1045
1046 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1047 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1048 break;
1049
1050 default:
1051 break;
1052 }
1053
1054 /* If we are reloading a (SUBREG constant ...), really reload just the
1055 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1056 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1057 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1058 register is a pseudo, also reload the inside expression.
1059 For machines that extend byte loads, do this for any SUBREG of a pseudo
1060 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1061 M2 is an integral mode that gets extended when loaded.
1062 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1063 where either M1 is not valid for R or M2 is wider than a word but we
1064 only need one register to store an M2-sized quantity in R.
1065 (However, if OUT is nonzero, we need to reload the reg *and*
1066 the subreg, so do nothing here, and let following statement handle it.)
1067
1068 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1069 we can't handle it here because CONST_INT does not indicate a mode.
1070
1071 Similarly, we must reload the inside expression if we have a
1072 STRICT_LOW_PART (presumably, in == out in this case).
1073
1074 Also reload the inner expression if it does not require a secondary
1075 reload but the SUBREG does.
1076
1077 Finally, reload the inner expression if it is a register that is in
1078 the class whose registers cannot be referenced in a different size
1079 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1080 cannot reload just the inside since we might end up with the wrong
1081 register class. But if it is inside a STRICT_LOW_PART, we have
1082 no choice, so we hope we do get the right register class there. */
1083
1084 if (in != 0 && GET_CODE (in) == SUBREG
1085 && (subreg_lowpart_p (in) || strict_low)
1086 #ifdef CANNOT_CHANGE_MODE_CLASS
1087 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1088 #endif
1089 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1090 && (CONSTANT_P (SUBREG_REG (in))
1091 || GET_CODE (SUBREG_REG (in)) == PLUS
1092 || strict_low
1093 || (((REG_P (SUBREG_REG (in))
1094 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1095 || MEM_P (SUBREG_REG (in)))
1096 && ((GET_MODE_PRECISION (inmode)
1097 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1098 #ifdef LOAD_EXTEND_OP
1099 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1100 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1101 <= UNITS_PER_WORD)
1102 && (GET_MODE_PRECISION (inmode)
1103 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1104 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1105 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1106 #endif
1107 #ifdef WORD_REGISTER_OPERATIONS
1108 || ((GET_MODE_PRECISION (inmode)
1109 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1110 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1111 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1112 / UNITS_PER_WORD)))
1113 #endif
1114 ))
1115 || (REG_P (SUBREG_REG (in))
1116 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1117 /* The case where out is nonzero
1118 is handled differently in the following statement. */
1119 && (out == 0 || subreg_lowpart_p (in))
1120 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1121 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1122 > UNITS_PER_WORD)
1123 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1124 / UNITS_PER_WORD)
1125 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1126 [GET_MODE (SUBREG_REG (in))]))
1127 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1128 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1129 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1130 SUBREG_REG (in))
1131 == NO_REGS))
1132 #ifdef CANNOT_CHANGE_MODE_CLASS
1133 || (REG_P (SUBREG_REG (in))
1134 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1135 && REG_CANNOT_CHANGE_MODE_P
1136 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1137 #endif
1138 ))
1139 {
1140 #ifdef LIMIT_RELOAD_CLASS
1141 in_subreg_loc = inloc;
1142 #endif
1143 inloc = &SUBREG_REG (in);
1144 in = *inloc;
1145 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1146 if (MEM_P (in))
1147 /* This is supposed to happen only for paradoxical subregs made by
1148 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1149 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1150 #endif
1151 inmode = GET_MODE (in);
1152 }
1153
1154 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1155 where M1 is not valid for R if it was not handled by the code above.
1156
1157 Similar issue for (SUBREG constant ...) if it was not handled by the
1158 code above. This can happen if SUBREG_BYTE != 0.
1159
1160 However, we must reload the inner reg *as well as* the subreg in
1161 that case. */
1162
1163 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1164 {
1165 if (REG_P (SUBREG_REG (in)))
1166 subreg_in_class
1167 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1168 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1169 GET_MODE (SUBREG_REG (in)),
1170 SUBREG_BYTE (in),
1171 GET_MODE (in)),
1172 REGNO (SUBREG_REG (in)));
1173 else if (GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1174 subreg_in_class = find_valid_class_1 (inmode,
1175 GET_MODE (SUBREG_REG (in)),
1176 rclass);
1177
1178 /* This relies on the fact that emit_reload_insns outputs the
1179 instructions for input reloads of type RELOAD_OTHER in the same
1180 order as the reloads. Thus if the outer reload is also of type
1181 RELOAD_OTHER, we are guaranteed that this inner reload will be
1182 output before the outer reload. */
1183 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1184 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1185 dont_remove_subreg = 1;
1186 }
1187
1188 /* Similarly for paradoxical and problematical SUBREGs on the output.
1189 Note that there is no reason we need worry about the previous value
1190 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1191 entitled to clobber it all (except in the case of a word mode subreg
1192 or of a STRICT_LOW_PART, in that latter case the constraint should
1193 label it input-output.) */
1194 if (out != 0 && GET_CODE (out) == SUBREG
1195 && (subreg_lowpart_p (out) || strict_low)
1196 #ifdef CANNOT_CHANGE_MODE_CLASS
1197 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1198 #endif
1199 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1200 && (CONSTANT_P (SUBREG_REG (out))
1201 || strict_low
1202 || (((REG_P (SUBREG_REG (out))
1203 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1204 || MEM_P (SUBREG_REG (out)))
1205 && ((GET_MODE_PRECISION (outmode)
1206 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1207 #ifdef WORD_REGISTER_OPERATIONS
1208 || ((GET_MODE_PRECISION (outmode)
1209 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1210 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1211 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1212 / UNITS_PER_WORD)))
1213 #endif
1214 ))
1215 || (REG_P (SUBREG_REG (out))
1216 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1217 /* The case of a word mode subreg
1218 is handled differently in the following statement. */
1219 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1220 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1221 > UNITS_PER_WORD))
1222 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1223 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1224 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1225 SUBREG_REG (out))
1226 == NO_REGS))
1227 #ifdef CANNOT_CHANGE_MODE_CLASS
1228 || (REG_P (SUBREG_REG (out))
1229 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1230 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1231 GET_MODE (SUBREG_REG (out)),
1232 outmode))
1233 #endif
1234 ))
1235 {
1236 #ifdef LIMIT_RELOAD_CLASS
1237 out_subreg_loc = outloc;
1238 #endif
1239 outloc = &SUBREG_REG (out);
1240 out = *outloc;
1241 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1242 gcc_assert (!MEM_P (out)
1243 || GET_MODE_SIZE (GET_MODE (out))
1244 <= GET_MODE_SIZE (outmode));
1245 #endif
1246 outmode = GET_MODE (out);
1247 }
1248
1249 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1250 where either M1 is not valid for R or M2 is wider than a word but we
1251 only need one register to store an M2-sized quantity in R.
1252
1253 However, we must reload the inner reg *as well as* the subreg in
1254 that case and the inner reg is an in-out reload. */
1255
1256 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1257 {
1258 enum reg_class in_out_class
1259 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1260 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1261 GET_MODE (SUBREG_REG (out)),
1262 SUBREG_BYTE (out),
1263 GET_MODE (out)),
1264 REGNO (SUBREG_REG (out)));
1265
1266 /* This relies on the fact that emit_reload_insns outputs the
1267 instructions for output reloads of type RELOAD_OTHER in reverse
1268 order of the reloads. Thus if the outer reload is also of type
1269 RELOAD_OTHER, we are guaranteed that this inner reload will be
1270 output after the outer reload. */
1271 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1272 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1273 0, 0, opnum, RELOAD_OTHER);
1274 dont_remove_subreg = 1;
1275 }
1276
1277 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1278 if (in != 0 && out != 0 && MEM_P (out)
1279 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1280 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1281 dont_share = 1;
1282
1283 /* If IN is a SUBREG of a hard register, make a new REG. This
1284 simplifies some of the cases below. */
1285
1286 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1287 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1288 && ! dont_remove_subreg)
1289 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1290
1291 /* Similarly for OUT. */
1292 if (out != 0 && GET_CODE (out) == SUBREG
1293 && REG_P (SUBREG_REG (out))
1294 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1295 && ! dont_remove_subreg)
1296 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1297
1298 /* Narrow down the class of register wanted if that is
1299 desirable on this machine for efficiency. */
1300 {
1301 reg_class_t preferred_class = rclass;
1302
1303 if (in != 0)
1304 preferred_class = targetm.preferred_reload_class (in, rclass);
1305
1306 /* Output reloads may need analogous treatment, different in detail. */
1307 if (out != 0)
1308 preferred_class
1309 = targetm.preferred_output_reload_class (out, preferred_class);
1310
1311 /* Discard what the target said if we cannot do it. */
1312 if (preferred_class != NO_REGS
1313 || (optional && type == RELOAD_FOR_OUTPUT))
1314 rclass = (enum reg_class) preferred_class;
1315 }
1316
1317 /* Make sure we use a class that can handle the actual pseudo
1318 inside any subreg. For example, on the 386, QImode regs
1319 can appear within SImode subregs. Although GENERAL_REGS
1320 can handle SImode, QImode needs a smaller class. */
1321 #ifdef LIMIT_RELOAD_CLASS
1322 if (in_subreg_loc)
1323 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1324 else if (in != 0 && GET_CODE (in) == SUBREG)
1325 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1326
1327 if (out_subreg_loc)
1328 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1329 if (out != 0 && GET_CODE (out) == SUBREG)
1330 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1331 #endif
1332
1333 /* Verify that this class is at least possible for the mode that
1334 is specified. */
1335 if (this_insn_is_asm)
1336 {
1337 machine_mode mode;
1338 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1339 mode = inmode;
1340 else
1341 mode = outmode;
1342 if (mode == VOIDmode)
1343 {
1344 error_for_asm (this_insn, "cannot reload integer constant "
1345 "operand in %<asm%>");
1346 mode = word_mode;
1347 if (in != 0)
1348 inmode = word_mode;
1349 if (out != 0)
1350 outmode = word_mode;
1351 }
1352 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1353 if (HARD_REGNO_MODE_OK (i, mode)
1354 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1355 break;
1356 if (i == FIRST_PSEUDO_REGISTER)
1357 {
1358 error_for_asm (this_insn, "impossible register constraint "
1359 "in %<asm%>");
1360 /* Avoid further trouble with this insn. */
1361 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1362 /* We used to continue here setting class to ALL_REGS, but it triggers
1363 sanity check on i386 for:
1364 void foo(long double d)
1365 {
1366 asm("" :: "a" (d));
1367 }
1368 Returning zero here ought to be safe as we take care in
1369 find_reloads to not process the reloads when instruction was
1370 replaced by USE. */
1371
1372 return 0;
1373 }
1374 }
1375
1376 /* Optional output reloads are always OK even if we have no register class,
1377 since the function of these reloads is only to have spill_reg_store etc.
1378 set, so that the storing insn can be deleted later. */
1379 gcc_assert (rclass != NO_REGS
1380 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1381
1382 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1383
1384 if (i == n_reloads)
1385 {
1386 /* See if we need a secondary reload register to move between CLASS
1387 and IN or CLASS and OUT. Get the icode and push any required reloads
1388 needed for each of them if so. */
1389
1390 if (in != 0)
1391 secondary_in_reload
1392 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1393 &secondary_in_icode, NULL);
1394 if (out != 0 && GET_CODE (out) != SCRATCH)
1395 secondary_out_reload
1396 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1397 type, &secondary_out_icode, NULL);
1398
1399 /* We found no existing reload suitable for re-use.
1400 So add an additional reload. */
1401
1402 #ifdef SECONDARY_MEMORY_NEEDED
1403 if (subreg_in_class == NO_REGS
1404 && in != 0
1405 && (REG_P (in)
1406 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1407 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1408 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1409 /* If a memory location is needed for the copy, make one. */
1410 if (subreg_in_class != NO_REGS
1411 && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1412 get_secondary_mem (in, inmode, opnum, type);
1413 #endif
1414
1415 i = n_reloads;
1416 rld[i].in = in;
1417 rld[i].out = out;
1418 rld[i].rclass = rclass;
1419 rld[i].inmode = inmode;
1420 rld[i].outmode = outmode;
1421 rld[i].reg_rtx = 0;
1422 rld[i].optional = optional;
1423 rld[i].inc = 0;
1424 rld[i].nocombine = 0;
1425 rld[i].in_reg = inloc ? *inloc : 0;
1426 rld[i].out_reg = outloc ? *outloc : 0;
1427 rld[i].opnum = opnum;
1428 rld[i].when_needed = type;
1429 rld[i].secondary_in_reload = secondary_in_reload;
1430 rld[i].secondary_out_reload = secondary_out_reload;
1431 rld[i].secondary_in_icode = secondary_in_icode;
1432 rld[i].secondary_out_icode = secondary_out_icode;
1433 rld[i].secondary_p = 0;
1434
1435 n_reloads++;
1436
1437 #ifdef SECONDARY_MEMORY_NEEDED
1438 if (out != 0
1439 && (REG_P (out)
1440 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1441 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1442 && SECONDARY_MEMORY_NEEDED (rclass,
1443 REGNO_REG_CLASS (reg_or_subregno (out)),
1444 outmode))
1445 get_secondary_mem (out, outmode, opnum, type);
1446 #endif
1447 }
1448 else
1449 {
1450 /* We are reusing an existing reload,
1451 but we may have additional information for it.
1452 For example, we may now have both IN and OUT
1453 while the old one may have just one of them. */
1454
1455 /* The modes can be different. If they are, we want to reload in
1456 the larger mode, so that the value is valid for both modes. */
1457 if (inmode != VOIDmode
1458 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1459 rld[i].inmode = inmode;
1460 if (outmode != VOIDmode
1461 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1462 rld[i].outmode = outmode;
1463 if (in != 0)
1464 {
1465 rtx in_reg = inloc ? *inloc : 0;
1466 /* If we merge reloads for two distinct rtl expressions that
1467 are identical in content, there might be duplicate address
1468 reloads. Remove the extra set now, so that if we later find
1469 that we can inherit this reload, we can get rid of the
1470 address reloads altogether.
1471
1472 Do not do this if both reloads are optional since the result
1473 would be an optional reload which could potentially leave
1474 unresolved address replacements.
1475
1476 It is not sufficient to call transfer_replacements since
1477 choose_reload_regs will remove the replacements for address
1478 reloads of inherited reloads which results in the same
1479 problem. */
1480 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1481 && ! (rld[i].optional && optional))
1482 {
1483 /* We must keep the address reload with the lower operand
1484 number alive. */
1485 if (opnum > rld[i].opnum)
1486 {
1487 remove_address_replacements (in);
1488 in = rld[i].in;
1489 in_reg = rld[i].in_reg;
1490 }
1491 else
1492 remove_address_replacements (rld[i].in);
1493 }
1494 /* When emitting reloads we don't necessarily look at the in-
1495 and outmode, but also directly at the operands (in and out).
1496 So we can't simply overwrite them with whatever we have found
1497 for this (to-be-merged) reload, we have to "merge" that too.
1498 Reusing another reload already verified that we deal with the
1499 same operands, just possibly in different modes. So we
1500 overwrite the operands only when the new mode is larger.
1501 See also PR33613. */
1502 if (!rld[i].in
1503 || GET_MODE_SIZE (GET_MODE (in))
1504 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1505 rld[i].in = in;
1506 if (!rld[i].in_reg
1507 || (in_reg
1508 && GET_MODE_SIZE (GET_MODE (in_reg))
1509 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1510 rld[i].in_reg = in_reg;
1511 }
1512 if (out != 0)
1513 {
1514 if (!rld[i].out
1515 || (out
1516 && GET_MODE_SIZE (GET_MODE (out))
1517 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1518 rld[i].out = out;
1519 if (outloc
1520 && (!rld[i].out_reg
1521 || GET_MODE_SIZE (GET_MODE (*outloc))
1522 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1523 rld[i].out_reg = *outloc;
1524 }
1525 if (reg_class_subset_p (rclass, rld[i].rclass))
1526 rld[i].rclass = rclass;
1527 rld[i].optional &= optional;
1528 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1529 opnum, rld[i].opnum))
1530 rld[i].when_needed = RELOAD_OTHER;
1531 rld[i].opnum = MIN (rld[i].opnum, opnum);
1532 }
1533
1534 /* If the ostensible rtx being reloaded differs from the rtx found
1535 in the location to substitute, this reload is not safe to combine
1536 because we cannot reliably tell whether it appears in the insn. */
1537
1538 if (in != 0 && in != *inloc)
1539 rld[i].nocombine = 1;
1540
1541 #if 0
1542 /* This was replaced by changes in find_reloads_address_1 and the new
1543 function inc_for_reload, which go with a new meaning of reload_inc. */
1544
1545 /* If this is an IN/OUT reload in an insn that sets the CC,
1546 it must be for an autoincrement. It doesn't work to store
1547 the incremented value after the insn because that would clobber the CC.
1548 So we must do the increment of the value reloaded from,
1549 increment it, store it back, then decrement again. */
1550 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1551 {
1552 out = 0;
1553 rld[i].out = 0;
1554 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1555 /* If we did not find a nonzero amount-to-increment-by,
1556 that contradicts the belief that IN is being incremented
1557 in an address in this insn. */
1558 gcc_assert (rld[i].inc != 0);
1559 }
1560 #endif
1561
1562 /* If we will replace IN and OUT with the reload-reg,
1563 record where they are located so that substitution need
1564 not do a tree walk. */
1565
1566 if (replace_reloads)
1567 {
1568 if (inloc != 0)
1569 {
1570 struct replacement *r = &replacements[n_replacements++];
1571 r->what = i;
1572 r->where = inloc;
1573 r->mode = inmode;
1574 }
1575 if (outloc != 0 && outloc != inloc)
1576 {
1577 struct replacement *r = &replacements[n_replacements++];
1578 r->what = i;
1579 r->where = outloc;
1580 r->mode = outmode;
1581 }
1582 }
1583
1584 /* If this reload is just being introduced and it has both
1585 an incoming quantity and an outgoing quantity that are
1586 supposed to be made to match, see if either one of the two
1587 can serve as the place to reload into.
1588
1589 If one of them is acceptable, set rld[i].reg_rtx
1590 to that one. */
1591
1592 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1593 {
1594 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1595 inmode, outmode,
1596 rld[i].rclass, i,
1597 earlyclobber_operand_p (out));
1598
1599 /* If the outgoing register already contains the same value
1600 as the incoming one, we can dispense with loading it.
1601 The easiest way to tell the caller that is to give a phony
1602 value for the incoming operand (same as outgoing one). */
1603 if (rld[i].reg_rtx == out
1604 && (REG_P (in) || CONSTANT_P (in))
1605 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1606 static_reload_reg_p, i, inmode))
1607 rld[i].in = out;
1608 }
1609
1610 /* If this is an input reload and the operand contains a register that
1611 dies in this insn and is used nowhere else, see if it is the right class
1612 to be used for this reload. Use it if so. (This occurs most commonly
1613 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1614 this if it is also an output reload that mentions the register unless
1615 the output is a SUBREG that clobbers an entire register.
1616
1617 Note that the operand might be one of the spill regs, if it is a
1618 pseudo reg and we are in a block where spilling has not taken place.
1619 But if there is no spilling in this block, that is OK.
1620 An explicitly used hard reg cannot be a spill reg. */
1621
1622 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1623 {
1624 rtx note;
1625 int regno;
1626 machine_mode rel_mode = inmode;
1627
1628 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1629 rel_mode = outmode;
1630
1631 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1632 if (REG_NOTE_KIND (note) == REG_DEAD
1633 && REG_P (XEXP (note, 0))
1634 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1635 && reg_mentioned_p (XEXP (note, 0), in)
1636 /* Check that a former pseudo is valid; see find_dummy_reload. */
1637 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1638 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1639 ORIGINAL_REGNO (XEXP (note, 0)))
1640 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1641 && ! refers_to_regno_for_reload_p (regno,
1642 end_hard_regno (rel_mode,
1643 regno),
1644 PATTERN (this_insn), inloc)
1645 && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1646 /* If this is also an output reload, IN cannot be used as
1647 the reload register if it is set in this insn unless IN
1648 is also OUT. */
1649 && (out == 0 || in == out
1650 || ! hard_reg_set_here_p (regno,
1651 end_hard_regno (rel_mode, regno),
1652 PATTERN (this_insn)))
1653 /* ??? Why is this code so different from the previous?
1654 Is there any simple coherent way to describe the two together?
1655 What's going on here. */
1656 && (in != out
1657 || (GET_CODE (in) == SUBREG
1658 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1659 / UNITS_PER_WORD)
1660 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1661 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1662 /* Make sure the operand fits in the reg that dies. */
1663 && (GET_MODE_SIZE (rel_mode)
1664 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1665 && HARD_REGNO_MODE_OK (regno, inmode)
1666 && HARD_REGNO_MODE_OK (regno, outmode))
1667 {
1668 unsigned int offs;
1669 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1670 hard_regno_nregs[regno][outmode]);
1671
1672 for (offs = 0; offs < nregs; offs++)
1673 if (fixed_regs[regno + offs]
1674 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1675 regno + offs))
1676 break;
1677
1678 if (offs == nregs
1679 && (! (refers_to_regno_for_reload_p
1680 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1681 || can_reload_into (in, regno, inmode)))
1682 {
1683 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1684 break;
1685 }
1686 }
1687 }
1688
1689 if (out)
1690 output_reloadnum = i;
1691
1692 return i;
1693 }
1694
1695 /* Record an additional place we must replace a value
1696 for which we have already recorded a reload.
1697 RELOADNUM is the value returned by push_reload
1698 when the reload was recorded.
1699 This is used in insn patterns that use match_dup. */
1700
1701 static void
1702 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1703 {
1704 if (replace_reloads)
1705 {
1706 struct replacement *r = &replacements[n_replacements++];
1707 r->what = reloadnum;
1708 r->where = loc;
1709 r->mode = mode;
1710 }
1711 }
1712
1713 /* Duplicate any replacement we have recorded to apply at
1714 location ORIG_LOC to also be performed at DUP_LOC.
1715 This is used in insn patterns that use match_dup. */
1716
1717 static void
1718 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1719 {
1720 int i, n = n_replacements;
1721
1722 for (i = 0; i < n; i++)
1723 {
1724 struct replacement *r = &replacements[i];
1725 if (r->where == orig_loc)
1726 push_replacement (dup_loc, r->what, r->mode);
1727 }
1728 }
1729 \f
1730 /* Transfer all replacements that used to be in reload FROM to be in
1731 reload TO. */
1732
1733 void
1734 transfer_replacements (int to, int from)
1735 {
1736 int i;
1737
1738 for (i = 0; i < n_replacements; i++)
1739 if (replacements[i].what == from)
1740 replacements[i].what = to;
1741 }
1742 \f
1743 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1744 or a subpart of it. If we have any replacements registered for IN_RTX,
1745 cancel the reloads that were supposed to load them.
1746 Return nonzero if we canceled any reloads. */
1747 int
1748 remove_address_replacements (rtx in_rtx)
1749 {
1750 int i, j;
1751 char reload_flags[MAX_RELOADS];
1752 int something_changed = 0;
1753
1754 memset (reload_flags, 0, sizeof reload_flags);
1755 for (i = 0, j = 0; i < n_replacements; i++)
1756 {
1757 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1758 reload_flags[replacements[i].what] |= 1;
1759 else
1760 {
1761 replacements[j++] = replacements[i];
1762 reload_flags[replacements[i].what] |= 2;
1763 }
1764 }
1765 /* Note that the following store must be done before the recursive calls. */
1766 n_replacements = j;
1767
1768 for (i = n_reloads - 1; i >= 0; i--)
1769 {
1770 if (reload_flags[i] == 1)
1771 {
1772 deallocate_reload_reg (i);
1773 remove_address_replacements (rld[i].in);
1774 rld[i].in = 0;
1775 something_changed = 1;
1776 }
1777 }
1778 return something_changed;
1779 }
1780 \f
1781 /* If there is only one output reload, and it is not for an earlyclobber
1782 operand, try to combine it with a (logically unrelated) input reload
1783 to reduce the number of reload registers needed.
1784
1785 This is safe if the input reload does not appear in
1786 the value being output-reloaded, because this implies
1787 it is not needed any more once the original insn completes.
1788
1789 If that doesn't work, see we can use any of the registers that
1790 die in this insn as a reload register. We can if it is of the right
1791 class and does not appear in the value being output-reloaded. */
1792
1793 static void
1794 combine_reloads (void)
1795 {
1796 int i, regno;
1797 int output_reload = -1;
1798 int secondary_out = -1;
1799 rtx note;
1800
1801 /* Find the output reload; return unless there is exactly one
1802 and that one is mandatory. */
1803
1804 for (i = 0; i < n_reloads; i++)
1805 if (rld[i].out != 0)
1806 {
1807 if (output_reload >= 0)
1808 return;
1809 output_reload = i;
1810 }
1811
1812 if (output_reload < 0 || rld[output_reload].optional)
1813 return;
1814
1815 /* An input-output reload isn't combinable. */
1816
1817 if (rld[output_reload].in != 0)
1818 return;
1819
1820 /* If this reload is for an earlyclobber operand, we can't do anything. */
1821 if (earlyclobber_operand_p (rld[output_reload].out))
1822 return;
1823
1824 /* If there is a reload for part of the address of this operand, we would
1825 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1826 its life to the point where doing this combine would not lower the
1827 number of spill registers needed. */
1828 for (i = 0; i < n_reloads; i++)
1829 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1830 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1831 && rld[i].opnum == rld[output_reload].opnum)
1832 return;
1833
1834 /* Check each input reload; can we combine it? */
1835
1836 for (i = 0; i < n_reloads; i++)
1837 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1838 /* Life span of this reload must not extend past main insn. */
1839 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1840 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1841 && rld[i].when_needed != RELOAD_OTHER
1842 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1843 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1844 [(int) rld[output_reload].outmode])
1845 && rld[i].inc == 0
1846 && rld[i].reg_rtx == 0
1847 #ifdef SECONDARY_MEMORY_NEEDED
1848 /* Don't combine two reloads with different secondary
1849 memory locations. */
1850 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1851 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1852 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1853 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1854 #endif
1855 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1856 ? (rld[i].rclass == rld[output_reload].rclass)
1857 : (reg_class_subset_p (rld[i].rclass,
1858 rld[output_reload].rclass)
1859 || reg_class_subset_p (rld[output_reload].rclass,
1860 rld[i].rclass)))
1861 && (MATCHES (rld[i].in, rld[output_reload].out)
1862 /* Args reversed because the first arg seems to be
1863 the one that we imagine being modified
1864 while the second is the one that might be affected. */
1865 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1866 rld[i].in)
1867 /* However, if the input is a register that appears inside
1868 the output, then we also can't share.
1869 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1870 If the same reload reg is used for both reg 69 and the
1871 result to be stored in memory, then that result
1872 will clobber the address of the memory ref. */
1873 && ! (REG_P (rld[i].in)
1874 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1875 rld[output_reload].out))))
1876 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1877 rld[i].when_needed != RELOAD_FOR_INPUT)
1878 && (reg_class_size[(int) rld[i].rclass]
1879 || targetm.small_register_classes_for_mode_p (VOIDmode))
1880 /* We will allow making things slightly worse by combining an
1881 input and an output, but no worse than that. */
1882 && (rld[i].when_needed == RELOAD_FOR_INPUT
1883 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1884 {
1885 int j;
1886
1887 /* We have found a reload to combine with! */
1888 rld[i].out = rld[output_reload].out;
1889 rld[i].out_reg = rld[output_reload].out_reg;
1890 rld[i].outmode = rld[output_reload].outmode;
1891 /* Mark the old output reload as inoperative. */
1892 rld[output_reload].out = 0;
1893 /* The combined reload is needed for the entire insn. */
1894 rld[i].when_needed = RELOAD_OTHER;
1895 /* If the output reload had a secondary reload, copy it. */
1896 if (rld[output_reload].secondary_out_reload != -1)
1897 {
1898 rld[i].secondary_out_reload
1899 = rld[output_reload].secondary_out_reload;
1900 rld[i].secondary_out_icode
1901 = rld[output_reload].secondary_out_icode;
1902 }
1903
1904 #ifdef SECONDARY_MEMORY_NEEDED
1905 /* Copy any secondary MEM. */
1906 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1907 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1908 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1909 #endif
1910 /* If required, minimize the register class. */
1911 if (reg_class_subset_p (rld[output_reload].rclass,
1912 rld[i].rclass))
1913 rld[i].rclass = rld[output_reload].rclass;
1914
1915 /* Transfer all replacements from the old reload to the combined. */
1916 for (j = 0; j < n_replacements; j++)
1917 if (replacements[j].what == output_reload)
1918 replacements[j].what = i;
1919
1920 return;
1921 }
1922
1923 /* If this insn has only one operand that is modified or written (assumed
1924 to be the first), it must be the one corresponding to this reload. It
1925 is safe to use anything that dies in this insn for that output provided
1926 that it does not occur in the output (we already know it isn't an
1927 earlyclobber. If this is an asm insn, give up. */
1928
1929 if (INSN_CODE (this_insn) == -1)
1930 return;
1931
1932 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1933 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1934 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1935 return;
1936
1937 /* See if some hard register that dies in this insn and is not used in
1938 the output is the right class. Only works if the register we pick
1939 up can fully hold our output reload. */
1940 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1941 if (REG_NOTE_KIND (note) == REG_DEAD
1942 && REG_P (XEXP (note, 0))
1943 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1944 rld[output_reload].out)
1945 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1946 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1947 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1948 regno)
1949 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1950 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1951 /* Ensure that a secondary or tertiary reload for this output
1952 won't want this register. */
1953 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1954 || (!(TEST_HARD_REG_BIT
1955 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1956 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1957 || !(TEST_HARD_REG_BIT
1958 (reg_class_contents[(int) rld[secondary_out].rclass],
1959 regno)))))
1960 && !fixed_regs[regno]
1961 /* Check that a former pseudo is valid; see find_dummy_reload. */
1962 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1963 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1964 ORIGINAL_REGNO (XEXP (note, 0)))
1965 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1966 {
1967 rld[output_reload].reg_rtx
1968 = gen_rtx_REG (rld[output_reload].outmode, regno);
1969 return;
1970 }
1971 }
1972 \f
1973 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1974 See if one of IN and OUT is a register that may be used;
1975 this is desirable since a spill-register won't be needed.
1976 If so, return the register rtx that proves acceptable.
1977
1978 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1979 RCLASS is the register class required for the reload.
1980
1981 If FOR_REAL is >= 0, it is the number of the reload,
1982 and in some cases when it can be discovered that OUT doesn't need
1983 to be computed, clear out rld[FOR_REAL].out.
1984
1985 If FOR_REAL is -1, this should not be done, because this call
1986 is just to see if a register can be found, not to find and install it.
1987
1988 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1989 puts an additional constraint on being able to use IN for OUT since
1990 IN must not appear elsewhere in the insn (it is assumed that IN itself
1991 is safe from the earlyclobber). */
1992
1993 static rtx
1994 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1995 machine_mode inmode, machine_mode outmode,
1996 reg_class_t rclass, int for_real, int earlyclobber)
1997 {
1998 rtx in = real_in;
1999 rtx out = real_out;
2000 int in_offset = 0;
2001 int out_offset = 0;
2002 rtx value = 0;
2003
2004 /* If operands exceed a word, we can't use either of them
2005 unless they have the same size. */
2006 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
2007 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
2008 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
2009 return 0;
2010
2011 /* Note that {in,out}_offset are needed only when 'in' or 'out'
2012 respectively refers to a hard register. */
2013
2014 /* Find the inside of any subregs. */
2015 while (GET_CODE (out) == SUBREG)
2016 {
2017 if (REG_P (SUBREG_REG (out))
2018 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
2019 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
2020 GET_MODE (SUBREG_REG (out)),
2021 SUBREG_BYTE (out),
2022 GET_MODE (out));
2023 out = SUBREG_REG (out);
2024 }
2025 while (GET_CODE (in) == SUBREG)
2026 {
2027 if (REG_P (SUBREG_REG (in))
2028 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
2029 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
2030 GET_MODE (SUBREG_REG (in)),
2031 SUBREG_BYTE (in),
2032 GET_MODE (in));
2033 in = SUBREG_REG (in);
2034 }
2035
2036 /* Narrow down the reg class, the same way push_reload will;
2037 otherwise we might find a dummy now, but push_reload won't. */
2038 {
2039 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2040 if (preferred_class != NO_REGS)
2041 rclass = (enum reg_class) preferred_class;
2042 }
2043
2044 /* See if OUT will do. */
2045 if (REG_P (out)
2046 && REGNO (out) < FIRST_PSEUDO_REGISTER)
2047 {
2048 unsigned int regno = REGNO (out) + out_offset;
2049 unsigned int nwords = hard_regno_nregs[regno][outmode];
2050 rtx saved_rtx;
2051
2052 /* When we consider whether the insn uses OUT,
2053 ignore references within IN. They don't prevent us
2054 from copying IN into OUT, because those refs would
2055 move into the insn that reloads IN.
2056
2057 However, we only ignore IN in its role as this reload.
2058 If the insn uses IN elsewhere and it contains OUT,
2059 that counts. We can't be sure it's the "same" operand
2060 so it might not go through this reload.
2061
2062 We also need to avoid using OUT if it, or part of it, is a
2063 fixed register. Modifying such registers, even transiently,
2064 may have undefined effects on the machine, such as modifying
2065 the stack pointer. */
2066 saved_rtx = *inloc;
2067 *inloc = const0_rtx;
2068
2069 if (regno < FIRST_PSEUDO_REGISTER
2070 && HARD_REGNO_MODE_OK (regno, outmode)
2071 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2072 PATTERN (this_insn), outloc))
2073 {
2074 unsigned int i;
2075
2076 for (i = 0; i < nwords; i++)
2077 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2078 regno + i)
2079 || fixed_regs[regno + i])
2080 break;
2081
2082 if (i == nwords)
2083 {
2084 if (REG_P (real_out))
2085 value = real_out;
2086 else
2087 value = gen_rtx_REG (outmode, regno);
2088 }
2089 }
2090
2091 *inloc = saved_rtx;
2092 }
2093
2094 /* Consider using IN if OUT was not acceptable
2095 or if OUT dies in this insn (like the quotient in a divmod insn).
2096 We can't use IN unless it is dies in this insn,
2097 which means we must know accurately which hard regs are live.
2098 Also, the result can't go in IN if IN is used within OUT,
2099 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2100 if (hard_regs_live_known
2101 && REG_P (in)
2102 && REGNO (in) < FIRST_PSEUDO_REGISTER
2103 && (value == 0
2104 || find_reg_note (this_insn, REG_UNUSED, real_out))
2105 && find_reg_note (this_insn, REG_DEAD, real_in)
2106 && !fixed_regs[REGNO (in)]
2107 && HARD_REGNO_MODE_OK (REGNO (in),
2108 /* The only case where out and real_out might
2109 have different modes is where real_out
2110 is a subreg, and in that case, out
2111 has a real mode. */
2112 (GET_MODE (out) != VOIDmode
2113 ? GET_MODE (out) : outmode))
2114 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2115 /* However only do this if we can be sure that this input
2116 operand doesn't correspond with an uninitialized pseudo.
2117 global can assign some hardreg to it that is the same as
2118 the one assigned to a different, also live pseudo (as it
2119 can ignore the conflict). We must never introduce writes
2120 to such hardregs, as they would clobber the other live
2121 pseudo. See PR 20973. */
2122 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2123 ORIGINAL_REGNO (in))
2124 /* Similarly, only do this if we can be sure that the death
2125 note is still valid. global can assign some hardreg to
2126 the pseudo referenced in the note and simultaneously a
2127 subword of this hardreg to a different, also live pseudo,
2128 because only another subword of the hardreg is actually
2129 used in the insn. This cannot happen if the pseudo has
2130 been assigned exactly one hardreg. See PR 33732. */
2131 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2132 {
2133 unsigned int regno = REGNO (in) + in_offset;
2134 unsigned int nwords = hard_regno_nregs[regno][inmode];
2135
2136 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2137 && ! hard_reg_set_here_p (regno, regno + nwords,
2138 PATTERN (this_insn))
2139 && (! earlyclobber
2140 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2141 PATTERN (this_insn), inloc)))
2142 {
2143 unsigned int i;
2144
2145 for (i = 0; i < nwords; i++)
2146 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2147 regno + i))
2148 break;
2149
2150 if (i == nwords)
2151 {
2152 /* If we were going to use OUT as the reload reg
2153 and changed our mind, it means OUT is a dummy that
2154 dies here. So don't bother copying value to it. */
2155 if (for_real >= 0 && value == real_out)
2156 rld[for_real].out = 0;
2157 if (REG_P (real_in))
2158 value = real_in;
2159 else
2160 value = gen_rtx_REG (inmode, regno);
2161 }
2162 }
2163 }
2164
2165 return value;
2166 }
2167 \f
2168 /* This page contains subroutines used mainly for determining
2169 whether the IN or an OUT of a reload can serve as the
2170 reload register. */
2171
2172 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2173
2174 int
2175 earlyclobber_operand_p (rtx x)
2176 {
2177 int i;
2178
2179 for (i = 0; i < n_earlyclobbers; i++)
2180 if (reload_earlyclobbers[i] == x)
2181 return 1;
2182
2183 return 0;
2184 }
2185
2186 /* Return 1 if expression X alters a hard reg in the range
2187 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2188 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2189 X should be the body of an instruction. */
2190
2191 static int
2192 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2193 {
2194 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2195 {
2196 rtx op0 = SET_DEST (x);
2197
2198 while (GET_CODE (op0) == SUBREG)
2199 op0 = SUBREG_REG (op0);
2200 if (REG_P (op0))
2201 {
2202 unsigned int r = REGNO (op0);
2203
2204 /* See if this reg overlaps range under consideration. */
2205 if (r < end_regno
2206 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2207 return 1;
2208 }
2209 }
2210 else if (GET_CODE (x) == PARALLEL)
2211 {
2212 int i = XVECLEN (x, 0) - 1;
2213
2214 for (; i >= 0; i--)
2215 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2216 return 1;
2217 }
2218
2219 return 0;
2220 }
2221
2222 /* Return 1 if ADDR is a valid memory address for mode MODE
2223 in address space AS, and check that each pseudo reg has the
2224 proper kind of hard reg. */
2225
2226 int
2227 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2228 rtx addr, addr_space_t as)
2229 {
2230 #ifdef GO_IF_LEGITIMATE_ADDRESS
2231 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2232 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2233 return 0;
2234
2235 win:
2236 return 1;
2237 #else
2238 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2239 #endif
2240 }
2241 \f
2242 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2243 if they are the same hard reg, and has special hacks for
2244 autoincrement and autodecrement.
2245 This is specifically intended for find_reloads to use
2246 in determining whether two operands match.
2247 X is the operand whose number is the lower of the two.
2248
2249 The value is 2 if Y contains a pre-increment that matches
2250 a non-incrementing address in X. */
2251
2252 /* ??? To be completely correct, we should arrange to pass
2253 for X the output operand and for Y the input operand.
2254 For now, we assume that the output operand has the lower number
2255 because that is natural in (SET output (... input ...)). */
2256
2257 int
2258 operands_match_p (rtx x, rtx y)
2259 {
2260 int i;
2261 RTX_CODE code = GET_CODE (x);
2262 const char *fmt;
2263 int success_2;
2264
2265 if (x == y)
2266 return 1;
2267 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2268 && (REG_P (y) || (GET_CODE (y) == SUBREG
2269 && REG_P (SUBREG_REG (y)))))
2270 {
2271 int j;
2272
2273 if (code == SUBREG)
2274 {
2275 i = REGNO (SUBREG_REG (x));
2276 if (i >= FIRST_PSEUDO_REGISTER)
2277 goto slow;
2278 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2279 GET_MODE (SUBREG_REG (x)),
2280 SUBREG_BYTE (x),
2281 GET_MODE (x));
2282 }
2283 else
2284 i = REGNO (x);
2285
2286 if (GET_CODE (y) == SUBREG)
2287 {
2288 j = REGNO (SUBREG_REG (y));
2289 if (j >= FIRST_PSEUDO_REGISTER)
2290 goto slow;
2291 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2292 GET_MODE (SUBREG_REG (y)),
2293 SUBREG_BYTE (y),
2294 GET_MODE (y));
2295 }
2296 else
2297 j = REGNO (y);
2298
2299 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2300 multiple hard register group of scalar integer registers, so that
2301 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2302 register. */
2303 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2304 && SCALAR_INT_MODE_P (GET_MODE (x))
2305 && i < FIRST_PSEUDO_REGISTER)
2306 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2307 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2308 && SCALAR_INT_MODE_P (GET_MODE (y))
2309 && j < FIRST_PSEUDO_REGISTER)
2310 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2311
2312 return i == j;
2313 }
2314 /* If two operands must match, because they are really a single
2315 operand of an assembler insn, then two postincrements are invalid
2316 because the assembler insn would increment only once.
2317 On the other hand, a postincrement matches ordinary indexing
2318 if the postincrement is the output operand. */
2319 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2320 return operands_match_p (XEXP (x, 0), y);
2321 /* Two preincrements are invalid
2322 because the assembler insn would increment only once.
2323 On the other hand, a preincrement matches ordinary indexing
2324 if the preincrement is the input operand.
2325 In this case, return 2, since some callers need to do special
2326 things when this happens. */
2327 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2328 || GET_CODE (y) == PRE_MODIFY)
2329 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2330
2331 slow:
2332
2333 /* Now we have disposed of all the cases in which different rtx codes
2334 can match. */
2335 if (code != GET_CODE (y))
2336 return 0;
2337
2338 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2339 if (GET_MODE (x) != GET_MODE (y))
2340 return 0;
2341
2342 /* MEMs referring to different address space are not equivalent. */
2343 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2344 return 0;
2345
2346 switch (code)
2347 {
2348 CASE_CONST_UNIQUE:
2349 return 0;
2350
2351 case LABEL_REF:
2352 return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
2353 case SYMBOL_REF:
2354 return XSTR (x, 0) == XSTR (y, 0);
2355
2356 default:
2357 break;
2358 }
2359
2360 /* Compare the elements. If any pair of corresponding elements
2361 fail to match, return 0 for the whole things. */
2362
2363 success_2 = 0;
2364 fmt = GET_RTX_FORMAT (code);
2365 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2366 {
2367 int val, j;
2368 switch (fmt[i])
2369 {
2370 case 'w':
2371 if (XWINT (x, i) != XWINT (y, i))
2372 return 0;
2373 break;
2374
2375 case 'i':
2376 if (XINT (x, i) != XINT (y, i))
2377 return 0;
2378 break;
2379
2380 case 'e':
2381 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2382 if (val == 0)
2383 return 0;
2384 /* If any subexpression returns 2,
2385 we should return 2 if we are successful. */
2386 if (val == 2)
2387 success_2 = 1;
2388 break;
2389
2390 case '0':
2391 break;
2392
2393 case 'E':
2394 if (XVECLEN (x, i) != XVECLEN (y, i))
2395 return 0;
2396 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2397 {
2398 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2399 if (val == 0)
2400 return 0;
2401 if (val == 2)
2402 success_2 = 1;
2403 }
2404 break;
2405
2406 /* It is believed that rtx's at this level will never
2407 contain anything but integers and other rtx's,
2408 except for within LABEL_REFs and SYMBOL_REFs. */
2409 default:
2410 gcc_unreachable ();
2411 }
2412 }
2413 return 1 + success_2;
2414 }
2415 \f
2416 /* Describe the range of registers or memory referenced by X.
2417 If X is a register, set REG_FLAG and put the first register
2418 number into START and the last plus one into END.
2419 If X is a memory reference, put a base address into BASE
2420 and a range of integer offsets into START and END.
2421 If X is pushing on the stack, we can assume it causes no trouble,
2422 so we set the SAFE field. */
2423
2424 static struct decomposition
2425 decompose (rtx x)
2426 {
2427 struct decomposition val;
2428 int all_const = 0;
2429
2430 memset (&val, 0, sizeof (val));
2431
2432 switch (GET_CODE (x))
2433 {
2434 case MEM:
2435 {
2436 rtx base = NULL_RTX, offset = 0;
2437 rtx addr = XEXP (x, 0);
2438
2439 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2440 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2441 {
2442 val.base = XEXP (addr, 0);
2443 val.start = -GET_MODE_SIZE (GET_MODE (x));
2444 val.end = GET_MODE_SIZE (GET_MODE (x));
2445 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2446 return val;
2447 }
2448
2449 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2450 {
2451 if (GET_CODE (XEXP (addr, 1)) == PLUS
2452 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2453 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2454 {
2455 val.base = XEXP (addr, 0);
2456 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2457 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2458 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2459 return val;
2460 }
2461 }
2462
2463 if (GET_CODE (addr) == CONST)
2464 {
2465 addr = XEXP (addr, 0);
2466 all_const = 1;
2467 }
2468 if (GET_CODE (addr) == PLUS)
2469 {
2470 if (CONSTANT_P (XEXP (addr, 0)))
2471 {
2472 base = XEXP (addr, 1);
2473 offset = XEXP (addr, 0);
2474 }
2475 else if (CONSTANT_P (XEXP (addr, 1)))
2476 {
2477 base = XEXP (addr, 0);
2478 offset = XEXP (addr, 1);
2479 }
2480 }
2481
2482 if (offset == 0)
2483 {
2484 base = addr;
2485 offset = const0_rtx;
2486 }
2487 if (GET_CODE (offset) == CONST)
2488 offset = XEXP (offset, 0);
2489 if (GET_CODE (offset) == PLUS)
2490 {
2491 if (CONST_INT_P (XEXP (offset, 0)))
2492 {
2493 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2494 offset = XEXP (offset, 0);
2495 }
2496 else if (CONST_INT_P (XEXP (offset, 1)))
2497 {
2498 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2499 offset = XEXP (offset, 1);
2500 }
2501 else
2502 {
2503 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2504 offset = const0_rtx;
2505 }
2506 }
2507 else if (!CONST_INT_P (offset))
2508 {
2509 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2510 offset = const0_rtx;
2511 }
2512
2513 if (all_const && GET_CODE (base) == PLUS)
2514 base = gen_rtx_CONST (GET_MODE (base), base);
2515
2516 gcc_assert (CONST_INT_P (offset));
2517
2518 val.start = INTVAL (offset);
2519 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2520 val.base = base;
2521 }
2522 break;
2523
2524 case REG:
2525 val.reg_flag = 1;
2526 val.start = true_regnum (x);
2527 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2528 {
2529 /* A pseudo with no hard reg. */
2530 val.start = REGNO (x);
2531 val.end = val.start + 1;
2532 }
2533 else
2534 /* A hard reg. */
2535 val.end = end_hard_regno (GET_MODE (x), val.start);
2536 break;
2537
2538 case SUBREG:
2539 if (!REG_P (SUBREG_REG (x)))
2540 /* This could be more precise, but it's good enough. */
2541 return decompose (SUBREG_REG (x));
2542 val.reg_flag = 1;
2543 val.start = true_regnum (x);
2544 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2545 return decompose (SUBREG_REG (x));
2546 else
2547 /* A hard reg. */
2548 val.end = val.start + subreg_nregs (x);
2549 break;
2550
2551 case SCRATCH:
2552 /* This hasn't been assigned yet, so it can't conflict yet. */
2553 val.safe = 1;
2554 break;
2555
2556 default:
2557 gcc_assert (CONSTANT_P (x));
2558 val.safe = 1;
2559 break;
2560 }
2561 return val;
2562 }
2563
2564 /* Return 1 if altering Y will not modify the value of X.
2565 Y is also described by YDATA, which should be decompose (Y). */
2566
2567 static int
2568 immune_p (rtx x, rtx y, struct decomposition ydata)
2569 {
2570 struct decomposition xdata;
2571
2572 if (ydata.reg_flag)
2573 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2574 if (ydata.safe)
2575 return 1;
2576
2577 gcc_assert (MEM_P (y));
2578 /* If Y is memory and X is not, Y can't affect X. */
2579 if (!MEM_P (x))
2580 return 1;
2581
2582 xdata = decompose (x);
2583
2584 if (! rtx_equal_p (xdata.base, ydata.base))
2585 {
2586 /* If bases are distinct symbolic constants, there is no overlap. */
2587 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2588 return 1;
2589 /* Constants and stack slots never overlap. */
2590 if (CONSTANT_P (xdata.base)
2591 && (ydata.base == frame_pointer_rtx
2592 || ydata.base == hard_frame_pointer_rtx
2593 || ydata.base == stack_pointer_rtx))
2594 return 1;
2595 if (CONSTANT_P (ydata.base)
2596 && (xdata.base == frame_pointer_rtx
2597 || xdata.base == hard_frame_pointer_rtx
2598 || xdata.base == stack_pointer_rtx))
2599 return 1;
2600 /* If either base is variable, we don't know anything. */
2601 return 0;
2602 }
2603
2604 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2605 }
2606
2607 /* Similar, but calls decompose. */
2608
2609 int
2610 safe_from_earlyclobber (rtx op, rtx clobber)
2611 {
2612 struct decomposition early_data;
2613
2614 early_data = decompose (clobber);
2615 return immune_p (op, clobber, early_data);
2616 }
2617 \f
2618 /* Main entry point of this file: search the body of INSN
2619 for values that need reloading and record them with push_reload.
2620 REPLACE nonzero means record also where the values occur
2621 so that subst_reloads can be used.
2622
2623 IND_LEVELS says how many levels of indirection are supported by this
2624 machine; a value of zero means that a memory reference is not a valid
2625 memory address.
2626
2627 LIVE_KNOWN says we have valid information about which hard
2628 regs are live at each point in the program; this is true when
2629 we are called from global_alloc but false when stupid register
2630 allocation has been done.
2631
2632 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2633 which is nonnegative if the reg has been commandeered for reloading into.
2634 It is copied into STATIC_RELOAD_REG_P and referenced from there
2635 by various subroutines.
2636
2637 Return TRUE if some operands need to be changed, because of swapping
2638 commutative operands, reg_equiv_address substitution, or whatever. */
2639
2640 int
2641 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2642 short *reload_reg_p)
2643 {
2644 int insn_code_number;
2645 int i, j;
2646 int noperands;
2647 /* These start out as the constraints for the insn
2648 and they are chewed up as we consider alternatives. */
2649 const char *constraints[MAX_RECOG_OPERANDS];
2650 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2651 a register. */
2652 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2653 char pref_or_nothing[MAX_RECOG_OPERANDS];
2654 /* Nonzero for a MEM operand whose entire address needs a reload.
2655 May be -1 to indicate the entire address may or may not need a reload. */
2656 int address_reloaded[MAX_RECOG_OPERANDS];
2657 /* Nonzero for an address operand that needs to be completely reloaded.
2658 May be -1 to indicate the entire operand may or may not need a reload. */
2659 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2660 /* Value of enum reload_type to use for operand. */
2661 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2662 /* Value of enum reload_type to use within address of operand. */
2663 enum reload_type address_type[MAX_RECOG_OPERANDS];
2664 /* Save the usage of each operand. */
2665 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2666 int no_input_reloads = 0, no_output_reloads = 0;
2667 int n_alternatives;
2668 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2669 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2670 char this_alternative_win[MAX_RECOG_OPERANDS];
2671 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2672 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2673 int this_alternative_matches[MAX_RECOG_OPERANDS];
2674 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2675 int this_alternative_number;
2676 int goal_alternative_number = 0;
2677 int operand_reloadnum[MAX_RECOG_OPERANDS];
2678 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2679 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2680 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2681 char goal_alternative_win[MAX_RECOG_OPERANDS];
2682 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2683 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2684 int goal_alternative_swapped;
2685 int best;
2686 int commutative;
2687 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2688 rtx substed_operand[MAX_RECOG_OPERANDS];
2689 rtx body = PATTERN (insn);
2690 rtx set = single_set (insn);
2691 int goal_earlyclobber = 0, this_earlyclobber;
2692 machine_mode operand_mode[MAX_RECOG_OPERANDS];
2693 int retval = 0;
2694
2695 this_insn = insn;
2696 n_reloads = 0;
2697 n_replacements = 0;
2698 n_earlyclobbers = 0;
2699 replace_reloads = replace;
2700 hard_regs_live_known = live_known;
2701 static_reload_reg_p = reload_reg_p;
2702
2703 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2704 neither are insns that SET cc0. Insns that use CC0 are not allowed
2705 to have any input reloads. */
2706 if (JUMP_P (insn) || CALL_P (insn))
2707 no_output_reloads = 1;
2708
2709 if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (insn)))
2710 no_input_reloads = 1;
2711 if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (insn)))
2712 no_output_reloads = 1;
2713
2714 #ifdef SECONDARY_MEMORY_NEEDED
2715 /* The eliminated forms of any secondary memory locations are per-insn, so
2716 clear them out here. */
2717
2718 if (secondary_memlocs_elim_used)
2719 {
2720 memset (secondary_memlocs_elim, 0,
2721 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2722 secondary_memlocs_elim_used = 0;
2723 }
2724 #endif
2725
2726 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2727 is cheap to move between them. If it is not, there may not be an insn
2728 to do the copy, so we may need a reload. */
2729 if (GET_CODE (body) == SET
2730 && REG_P (SET_DEST (body))
2731 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2732 && REG_P (SET_SRC (body))
2733 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2734 && register_move_cost (GET_MODE (SET_SRC (body)),
2735 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2736 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2737 return 0;
2738
2739 extract_insn (insn);
2740
2741 noperands = reload_n_operands = recog_data.n_operands;
2742 n_alternatives = recog_data.n_alternatives;
2743
2744 /* Just return "no reloads" if insn has no operands with constraints. */
2745 if (noperands == 0 || n_alternatives == 0)
2746 return 0;
2747
2748 insn_code_number = INSN_CODE (insn);
2749 this_insn_is_asm = insn_code_number < 0;
2750
2751 memcpy (operand_mode, recog_data.operand_mode,
2752 noperands * sizeof (machine_mode));
2753 memcpy (constraints, recog_data.constraints,
2754 noperands * sizeof (const char *));
2755
2756 commutative = -1;
2757
2758 /* If we will need to know, later, whether some pair of operands
2759 are the same, we must compare them now and save the result.
2760 Reloading the base and index registers will clobber them
2761 and afterward they will fail to match. */
2762
2763 for (i = 0; i < noperands; i++)
2764 {
2765 const char *p;
2766 int c;
2767 char *end;
2768
2769 substed_operand[i] = recog_data.operand[i];
2770 p = constraints[i];
2771
2772 modified[i] = RELOAD_READ;
2773
2774 /* Scan this operand's constraint to see if it is an output operand,
2775 an in-out operand, is commutative, or should match another. */
2776
2777 while ((c = *p))
2778 {
2779 p += CONSTRAINT_LEN (c, p);
2780 switch (c)
2781 {
2782 case '=':
2783 modified[i] = RELOAD_WRITE;
2784 break;
2785 case '+':
2786 modified[i] = RELOAD_READ_WRITE;
2787 break;
2788 case '%':
2789 {
2790 /* The last operand should not be marked commutative. */
2791 gcc_assert (i != noperands - 1);
2792
2793 /* We currently only support one commutative pair of
2794 operands. Some existing asm code currently uses more
2795 than one pair. Previously, that would usually work,
2796 but sometimes it would crash the compiler. We
2797 continue supporting that case as well as we can by
2798 silently ignoring all but the first pair. In the
2799 future we may handle it correctly. */
2800 if (commutative < 0)
2801 commutative = i;
2802 else
2803 gcc_assert (this_insn_is_asm);
2804 }
2805 break;
2806 /* Use of ISDIGIT is tempting here, but it may get expensive because
2807 of locale support we don't want. */
2808 case '0': case '1': case '2': case '3': case '4':
2809 case '5': case '6': case '7': case '8': case '9':
2810 {
2811 c = strtoul (p - 1, &end, 10);
2812 p = end;
2813
2814 operands_match[c][i]
2815 = operands_match_p (recog_data.operand[c],
2816 recog_data.operand[i]);
2817
2818 /* An operand may not match itself. */
2819 gcc_assert (c != i);
2820
2821 /* If C can be commuted with C+1, and C might need to match I,
2822 then C+1 might also need to match I. */
2823 if (commutative >= 0)
2824 {
2825 if (c == commutative || c == commutative + 1)
2826 {
2827 int other = c + (c == commutative ? 1 : -1);
2828 operands_match[other][i]
2829 = operands_match_p (recog_data.operand[other],
2830 recog_data.operand[i]);
2831 }
2832 if (i == commutative || i == commutative + 1)
2833 {
2834 int other = i + (i == commutative ? 1 : -1);
2835 operands_match[c][other]
2836 = operands_match_p (recog_data.operand[c],
2837 recog_data.operand[other]);
2838 }
2839 /* Note that C is supposed to be less than I.
2840 No need to consider altering both C and I because in
2841 that case we would alter one into the other. */
2842 }
2843 }
2844 }
2845 }
2846 }
2847
2848 /* Examine each operand that is a memory reference or memory address
2849 and reload parts of the addresses into index registers.
2850 Also here any references to pseudo regs that didn't get hard regs
2851 but are equivalent to constants get replaced in the insn itself
2852 with those constants. Nobody will ever see them again.
2853
2854 Finally, set up the preferred classes of each operand. */
2855
2856 for (i = 0; i < noperands; i++)
2857 {
2858 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2859
2860 address_reloaded[i] = 0;
2861 address_operand_reloaded[i] = 0;
2862 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2863 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2864 : RELOAD_OTHER);
2865 address_type[i]
2866 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2867 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2868 : RELOAD_OTHER);
2869
2870 if (*constraints[i] == 0)
2871 /* Ignore things like match_operator operands. */
2872 ;
2873 else if (insn_extra_address_constraint
2874 (lookup_constraint (constraints[i])))
2875 {
2876 address_operand_reloaded[i]
2877 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2878 recog_data.operand[i],
2879 recog_data.operand_loc[i],
2880 i, operand_type[i], ind_levels, insn);
2881
2882 /* If we now have a simple operand where we used to have a
2883 PLUS or MULT, re-recognize and try again. */
2884 if ((OBJECT_P (*recog_data.operand_loc[i])
2885 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2886 && (GET_CODE (recog_data.operand[i]) == MULT
2887 || GET_CODE (recog_data.operand[i]) == PLUS))
2888 {
2889 INSN_CODE (insn) = -1;
2890 retval = find_reloads (insn, replace, ind_levels, live_known,
2891 reload_reg_p);
2892 return retval;
2893 }
2894
2895 recog_data.operand[i] = *recog_data.operand_loc[i];
2896 substed_operand[i] = recog_data.operand[i];
2897
2898 /* Address operands are reloaded in their existing mode,
2899 no matter what is specified in the machine description. */
2900 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2901
2902 /* If the address is a single CONST_INT pick address mode
2903 instead otherwise we will later not know in which mode
2904 the reload should be performed. */
2905 if (operand_mode[i] == VOIDmode)
2906 operand_mode[i] = Pmode;
2907
2908 }
2909 else if (code == MEM)
2910 {
2911 address_reloaded[i]
2912 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2913 recog_data.operand_loc[i],
2914 XEXP (recog_data.operand[i], 0),
2915 &XEXP (recog_data.operand[i], 0),
2916 i, address_type[i], ind_levels, insn);
2917 recog_data.operand[i] = *recog_data.operand_loc[i];
2918 substed_operand[i] = recog_data.operand[i];
2919 }
2920 else if (code == SUBREG)
2921 {
2922 rtx reg = SUBREG_REG (recog_data.operand[i]);
2923 rtx op
2924 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2925 ind_levels,
2926 set != 0
2927 && &SET_DEST (set) == recog_data.operand_loc[i],
2928 insn,
2929 &address_reloaded[i]);
2930
2931 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2932 that didn't get a hard register, emit a USE with a REG_EQUAL
2933 note in front so that we might inherit a previous, possibly
2934 wider reload. */
2935
2936 if (replace
2937 && MEM_P (op)
2938 && REG_P (reg)
2939 && (GET_MODE_SIZE (GET_MODE (reg))
2940 >= GET_MODE_SIZE (GET_MODE (op)))
2941 && reg_equiv_constant (REGNO (reg)) == 0)
2942 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2943 insn),
2944 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2945
2946 substed_operand[i] = recog_data.operand[i] = op;
2947 }
2948 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2949 /* We can get a PLUS as an "operand" as a result of register
2950 elimination. See eliminate_regs and gen_reload. We handle
2951 a unary operator by reloading the operand. */
2952 substed_operand[i] = recog_data.operand[i]
2953 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2954 ind_levels, 0, insn,
2955 &address_reloaded[i]);
2956 else if (code == REG)
2957 {
2958 /* This is equivalent to calling find_reloads_toplev.
2959 The code is duplicated for speed.
2960 When we find a pseudo always equivalent to a constant,
2961 we replace it by the constant. We must be sure, however,
2962 that we don't try to replace it in the insn in which it
2963 is being set. */
2964 int regno = REGNO (recog_data.operand[i]);
2965 if (reg_equiv_constant (regno) != 0
2966 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2967 {
2968 /* Record the existing mode so that the check if constants are
2969 allowed will work when operand_mode isn't specified. */
2970
2971 if (operand_mode[i] == VOIDmode)
2972 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2973
2974 substed_operand[i] = recog_data.operand[i]
2975 = reg_equiv_constant (regno);
2976 }
2977 if (reg_equiv_memory_loc (regno) != 0
2978 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2979 /* We need not give a valid is_set_dest argument since the case
2980 of a constant equivalence was checked above. */
2981 substed_operand[i] = recog_data.operand[i]
2982 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2983 ind_levels, 0, insn,
2984 &address_reloaded[i]);
2985 }
2986 /* If the operand is still a register (we didn't replace it with an
2987 equivalent), get the preferred class to reload it into. */
2988 code = GET_CODE (recog_data.operand[i]);
2989 preferred_class[i]
2990 = ((code == REG && REGNO (recog_data.operand[i])
2991 >= FIRST_PSEUDO_REGISTER)
2992 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2993 : NO_REGS);
2994 pref_or_nothing[i]
2995 = (code == REG
2996 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2997 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2998 }
2999
3000 /* If this is simply a copy from operand 1 to operand 0, merge the
3001 preferred classes for the operands. */
3002 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
3003 && recog_data.operand[1] == SET_SRC (set))
3004 {
3005 preferred_class[0] = preferred_class[1]
3006 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
3007 pref_or_nothing[0] |= pref_or_nothing[1];
3008 pref_or_nothing[1] |= pref_or_nothing[0];
3009 }
3010
3011 /* Now see what we need for pseudo-regs that didn't get hard regs
3012 or got the wrong kind of hard reg. For this, we must consider
3013 all the operands together against the register constraints. */
3014
3015 best = MAX_RECOG_OPERANDS * 2 + 600;
3016
3017 goal_alternative_swapped = 0;
3018
3019 /* The constraints are made of several alternatives.
3020 Each operand's constraint looks like foo,bar,... with commas
3021 separating the alternatives. The first alternatives for all
3022 operands go together, the second alternatives go together, etc.
3023
3024 First loop over alternatives. */
3025
3026 alternative_mask enabled = get_enabled_alternatives (insn);
3027 for (this_alternative_number = 0;
3028 this_alternative_number < n_alternatives;
3029 this_alternative_number++)
3030 {
3031 int swapped;
3032
3033 if (!TEST_BIT (enabled, this_alternative_number))
3034 {
3035 int i;
3036
3037 for (i = 0; i < recog_data.n_operands; i++)
3038 constraints[i] = skip_alternative (constraints[i]);
3039
3040 continue;
3041 }
3042
3043 /* If insn is commutative (it's safe to exchange a certain pair
3044 of operands) then we need to try each alternative twice, the
3045 second time matching those two operands as if we had
3046 exchanged them. To do this, really exchange them in
3047 operands. */
3048 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3049 {
3050 /* Loop over operands for one constraint alternative. */
3051 /* LOSERS counts those that don't fit this alternative
3052 and would require loading. */
3053 int losers = 0;
3054 /* BAD is set to 1 if it some operand can't fit this alternative
3055 even after reloading. */
3056 int bad = 0;
3057 /* REJECT is a count of how undesirable this alternative says it is
3058 if any reloading is required. If the alternative matches exactly
3059 then REJECT is ignored, but otherwise it gets this much
3060 counted against it in addition to the reloading needed. Each
3061 ? counts three times here since we want the disparaging caused by
3062 a bad register class to only count 1/3 as much. */
3063 int reject = 0;
3064
3065 if (swapped)
3066 {
3067 recog_data.operand[commutative] = substed_operand[commutative + 1];
3068 recog_data.operand[commutative + 1] = substed_operand[commutative];
3069 /* Swap the duplicates too. */
3070 for (i = 0; i < recog_data.n_dups; i++)
3071 if (recog_data.dup_num[i] == commutative
3072 || recog_data.dup_num[i] == commutative + 1)
3073 *recog_data.dup_loc[i]
3074 = recog_data.operand[(int) recog_data.dup_num[i]];
3075
3076 std::swap (preferred_class[commutative],
3077 preferred_class[commutative + 1]);
3078 std::swap (pref_or_nothing[commutative],
3079 pref_or_nothing[commutative + 1]);
3080 std::swap (address_reloaded[commutative],
3081 address_reloaded[commutative + 1]);
3082 }
3083
3084 this_earlyclobber = 0;
3085
3086 for (i = 0; i < noperands; i++)
3087 {
3088 const char *p = constraints[i];
3089 char *end;
3090 int len;
3091 int win = 0;
3092 int did_match = 0;
3093 /* 0 => this operand can be reloaded somehow for this alternative. */
3094 int badop = 1;
3095 /* 0 => this operand can be reloaded if the alternative allows regs. */
3096 int winreg = 0;
3097 int c;
3098 int m;
3099 rtx operand = recog_data.operand[i];
3100 int offset = 0;
3101 /* Nonzero means this is a MEM that must be reloaded into a reg
3102 regardless of what the constraint says. */
3103 int force_reload = 0;
3104 int offmemok = 0;
3105 /* Nonzero if a constant forced into memory would be OK for this
3106 operand. */
3107 int constmemok = 0;
3108 int earlyclobber = 0;
3109 enum constraint_num cn;
3110 enum reg_class cl;
3111
3112 /* If the predicate accepts a unary operator, it means that
3113 we need to reload the operand, but do not do this for
3114 match_operator and friends. */
3115 if (UNARY_P (operand) && *p != 0)
3116 operand = XEXP (operand, 0);
3117
3118 /* If the operand is a SUBREG, extract
3119 the REG or MEM (or maybe even a constant) within.
3120 (Constants can occur as a result of reg_equiv_constant.) */
3121
3122 while (GET_CODE (operand) == SUBREG)
3123 {
3124 /* Offset only matters when operand is a REG and
3125 it is a hard reg. This is because it is passed
3126 to reg_fits_class_p if it is a REG and all pseudos
3127 return 0 from that function. */
3128 if (REG_P (SUBREG_REG (operand))
3129 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3130 {
3131 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3132 GET_MODE (SUBREG_REG (operand)),
3133 SUBREG_BYTE (operand),
3134 GET_MODE (operand)) < 0)
3135 force_reload = 1;
3136 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3137 GET_MODE (SUBREG_REG (operand)),
3138 SUBREG_BYTE (operand),
3139 GET_MODE (operand));
3140 }
3141 operand = SUBREG_REG (operand);
3142 /* Force reload if this is a constant or PLUS or if there may
3143 be a problem accessing OPERAND in the outer mode. */
3144 if (CONSTANT_P (operand)
3145 || GET_CODE (operand) == PLUS
3146 /* We must force a reload of paradoxical SUBREGs
3147 of a MEM because the alignment of the inner value
3148 may not be enough to do the outer reference. On
3149 big-endian machines, it may also reference outside
3150 the object.
3151
3152 On machines that extend byte operations and we have a
3153 SUBREG where both the inner and outer modes are no wider
3154 than a word and the inner mode is narrower, is integral,
3155 and gets extended when loaded from memory, combine.c has
3156 made assumptions about the behavior of the machine in such
3157 register access. If the data is, in fact, in memory we
3158 must always load using the size assumed to be in the
3159 register and let the insn do the different-sized
3160 accesses.
3161
3162 This is doubly true if WORD_REGISTER_OPERATIONS. In
3163 this case eliminate_regs has left non-paradoxical
3164 subregs for push_reload to see. Make sure it does
3165 by forcing the reload.
3166
3167 ??? When is it right at this stage to have a subreg
3168 of a mem that is _not_ to be handled specially? IMO
3169 those should have been reduced to just a mem. */
3170 || ((MEM_P (operand)
3171 || (REG_P (operand)
3172 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3173 #ifndef WORD_REGISTER_OPERATIONS
3174 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3175 < BIGGEST_ALIGNMENT)
3176 && (GET_MODE_SIZE (operand_mode[i])
3177 > GET_MODE_SIZE (GET_MODE (operand))))
3178 || BYTES_BIG_ENDIAN
3179 #ifdef LOAD_EXTEND_OP
3180 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3181 && (GET_MODE_SIZE (GET_MODE (operand))
3182 <= UNITS_PER_WORD)
3183 && (GET_MODE_SIZE (operand_mode[i])
3184 > GET_MODE_SIZE (GET_MODE (operand)))
3185 && INTEGRAL_MODE_P (GET_MODE (operand))
3186 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3187 #endif
3188 )
3189 #endif
3190 )
3191 )
3192 force_reload = 1;
3193 }
3194
3195 this_alternative[i] = NO_REGS;
3196 this_alternative_win[i] = 0;
3197 this_alternative_match_win[i] = 0;
3198 this_alternative_offmemok[i] = 0;
3199 this_alternative_earlyclobber[i] = 0;
3200 this_alternative_matches[i] = -1;
3201
3202 /* An empty constraint or empty alternative
3203 allows anything which matched the pattern. */
3204 if (*p == 0 || *p == ',')
3205 win = 1, badop = 0;
3206
3207 /* Scan this alternative's specs for this operand;
3208 set WIN if the operand fits any letter in this alternative.
3209 Otherwise, clear BADOP if this operand could
3210 fit some letter after reloads,
3211 or set WINREG if this operand could fit after reloads
3212 provided the constraint allows some registers. */
3213
3214 do
3215 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3216 {
3217 case '\0':
3218 len = 0;
3219 break;
3220 case ',':
3221 c = '\0';
3222 break;
3223
3224 case '?':
3225 reject += 6;
3226 break;
3227
3228 case '!':
3229 reject = 600;
3230 break;
3231
3232 case '#':
3233 /* Ignore rest of this alternative as far as
3234 reloading is concerned. */
3235 do
3236 p++;
3237 while (*p && *p != ',');
3238 len = 0;
3239 break;
3240
3241 case '0': case '1': case '2': case '3': case '4':
3242 case '5': case '6': case '7': case '8': case '9':
3243 m = strtoul (p, &end, 10);
3244 p = end;
3245 len = 0;
3246
3247 this_alternative_matches[i] = m;
3248 /* We are supposed to match a previous operand.
3249 If we do, we win if that one did.
3250 If we do not, count both of the operands as losers.
3251 (This is too conservative, since most of the time
3252 only a single reload insn will be needed to make
3253 the two operands win. As a result, this alternative
3254 may be rejected when it is actually desirable.) */
3255 if ((swapped && (m != commutative || i != commutative + 1))
3256 /* If we are matching as if two operands were swapped,
3257 also pretend that operands_match had been computed
3258 with swapped.
3259 But if I is the second of those and C is the first,
3260 don't exchange them, because operands_match is valid
3261 only on one side of its diagonal. */
3262 ? (operands_match
3263 [(m == commutative || m == commutative + 1)
3264 ? 2 * commutative + 1 - m : m]
3265 [(i == commutative || i == commutative + 1)
3266 ? 2 * commutative + 1 - i : i])
3267 : operands_match[m][i])
3268 {
3269 /* If we are matching a non-offsettable address where an
3270 offsettable address was expected, then we must reject
3271 this combination, because we can't reload it. */
3272 if (this_alternative_offmemok[m]
3273 && MEM_P (recog_data.operand[m])
3274 && this_alternative[m] == NO_REGS
3275 && ! this_alternative_win[m])
3276 bad = 1;
3277
3278 did_match = this_alternative_win[m];
3279 }
3280 else
3281 {
3282 /* Operands don't match. */
3283 rtx value;
3284 int loc1, loc2;
3285 /* Retroactively mark the operand we had to match
3286 as a loser, if it wasn't already. */
3287 if (this_alternative_win[m])
3288 losers++;
3289 this_alternative_win[m] = 0;
3290 if (this_alternative[m] == NO_REGS)
3291 bad = 1;
3292 /* But count the pair only once in the total badness of
3293 this alternative, if the pair can be a dummy reload.
3294 The pointers in operand_loc are not swapped; swap
3295 them by hand if necessary. */
3296 if (swapped && i == commutative)
3297 loc1 = commutative + 1;
3298 else if (swapped && i == commutative + 1)
3299 loc1 = commutative;
3300 else
3301 loc1 = i;
3302 if (swapped && m == commutative)
3303 loc2 = commutative + 1;
3304 else if (swapped && m == commutative + 1)
3305 loc2 = commutative;
3306 else
3307 loc2 = m;
3308 value
3309 = find_dummy_reload (recog_data.operand[i],
3310 recog_data.operand[m],
3311 recog_data.operand_loc[loc1],
3312 recog_data.operand_loc[loc2],
3313 operand_mode[i], operand_mode[m],
3314 this_alternative[m], -1,
3315 this_alternative_earlyclobber[m]);
3316
3317 if (value != 0)
3318 losers--;
3319 }
3320 /* This can be fixed with reloads if the operand
3321 we are supposed to match can be fixed with reloads. */
3322 badop = 0;
3323 this_alternative[i] = this_alternative[m];
3324
3325 /* If we have to reload this operand and some previous
3326 operand also had to match the same thing as this
3327 operand, we don't know how to do that. So reject this
3328 alternative. */
3329 if (! did_match || force_reload)
3330 for (j = 0; j < i; j++)
3331 if (this_alternative_matches[j]
3332 == this_alternative_matches[i])
3333 {
3334 badop = 1;
3335 break;
3336 }
3337 break;
3338
3339 case 'p':
3340 /* All necessary reloads for an address_operand
3341 were handled in find_reloads_address. */
3342 this_alternative[i]
3343 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3344 ADDRESS, SCRATCH);
3345 win = 1;
3346 badop = 0;
3347 break;
3348
3349 case TARGET_MEM_CONSTRAINT:
3350 if (force_reload)
3351 break;
3352 if (MEM_P (operand)
3353 || (REG_P (operand)
3354 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3355 && reg_renumber[REGNO (operand)] < 0))
3356 win = 1;
3357 if (CONST_POOL_OK_P (operand_mode[i], operand))
3358 badop = 0;
3359 constmemok = 1;
3360 break;
3361
3362 case '<':
3363 if (MEM_P (operand)
3364 && ! address_reloaded[i]
3365 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3366 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3367 win = 1;
3368 break;
3369
3370 case '>':
3371 if (MEM_P (operand)
3372 && ! address_reloaded[i]
3373 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3374 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3375 win = 1;
3376 break;
3377
3378 /* Memory operand whose address is not offsettable. */
3379 case 'V':
3380 if (force_reload)
3381 break;
3382 if (MEM_P (operand)
3383 && ! (ind_levels ? offsettable_memref_p (operand)
3384 : offsettable_nonstrict_memref_p (operand))
3385 /* Certain mem addresses will become offsettable
3386 after they themselves are reloaded. This is important;
3387 we don't want our own handling of unoffsettables
3388 to override the handling of reg_equiv_address. */
3389 && !(REG_P (XEXP (operand, 0))
3390 && (ind_levels == 0
3391 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3392 win = 1;
3393 break;
3394
3395 /* Memory operand whose address is offsettable. */
3396 case 'o':
3397 if (force_reload)
3398 break;
3399 if ((MEM_P (operand)
3400 /* If IND_LEVELS, find_reloads_address won't reload a
3401 pseudo that didn't get a hard reg, so we have to
3402 reject that case. */
3403 && ((ind_levels ? offsettable_memref_p (operand)
3404 : offsettable_nonstrict_memref_p (operand))
3405 /* A reloaded address is offsettable because it is now
3406 just a simple register indirect. */
3407 || address_reloaded[i] == 1))
3408 || (REG_P (operand)
3409 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3410 && reg_renumber[REGNO (operand)] < 0
3411 /* If reg_equiv_address is nonzero, we will be
3412 loading it into a register; hence it will be
3413 offsettable, but we cannot say that reg_equiv_mem
3414 is offsettable without checking. */
3415 && ((reg_equiv_mem (REGNO (operand)) != 0
3416 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3417 || (reg_equiv_address (REGNO (operand)) != 0))))
3418 win = 1;
3419 if (CONST_POOL_OK_P (operand_mode[i], operand)
3420 || MEM_P (operand))
3421 badop = 0;
3422 constmemok = 1;
3423 offmemok = 1;
3424 break;
3425
3426 case '&':
3427 /* Output operand that is stored before the need for the
3428 input operands (and their index registers) is over. */
3429 earlyclobber = 1, this_earlyclobber = 1;
3430 break;
3431
3432 case 'X':
3433 force_reload = 0;
3434 win = 1;
3435 break;
3436
3437 case 'g':
3438 if (! force_reload
3439 /* A PLUS is never a valid operand, but reload can make
3440 it from a register when eliminating registers. */
3441 && GET_CODE (operand) != PLUS
3442 /* A SCRATCH is not a valid operand. */
3443 && GET_CODE (operand) != SCRATCH
3444 && (! CONSTANT_P (operand)
3445 || ! flag_pic
3446 || LEGITIMATE_PIC_OPERAND_P (operand))
3447 && (GENERAL_REGS == ALL_REGS
3448 || !REG_P (operand)
3449 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3450 && reg_renumber[REGNO (operand)] < 0)))
3451 win = 1;
3452 cl = GENERAL_REGS;
3453 goto reg;
3454
3455 default:
3456 cn = lookup_constraint (p);
3457 switch (get_constraint_type (cn))
3458 {
3459 case CT_REGISTER:
3460 cl = reg_class_for_constraint (cn);
3461 if (cl != NO_REGS)
3462 goto reg;
3463 break;
3464
3465 case CT_CONST_INT:
3466 if (CONST_INT_P (operand)
3467 && (insn_const_int_ok_for_constraint
3468 (INTVAL (operand), cn)))
3469 win = true;
3470 break;
3471
3472 case CT_MEMORY:
3473 if (force_reload)
3474 break;
3475 if (constraint_satisfied_p (operand, cn))
3476 win = 1;
3477 /* If the address was already reloaded,
3478 we win as well. */
3479 else if (MEM_P (operand) && address_reloaded[i] == 1)
3480 win = 1;
3481 /* Likewise if the address will be reloaded because
3482 reg_equiv_address is nonzero. For reg_equiv_mem
3483 we have to check. */
3484 else if (REG_P (operand)
3485 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3486 && reg_renumber[REGNO (operand)] < 0
3487 && ((reg_equiv_mem (REGNO (operand)) != 0
3488 && (constraint_satisfied_p
3489 (reg_equiv_mem (REGNO (operand)),
3490 cn)))
3491 || (reg_equiv_address (REGNO (operand))
3492 != 0)))
3493 win = 1;
3494
3495 /* If we didn't already win, we can reload
3496 constants via force_const_mem, and other
3497 MEMs by reloading the address like for 'o'. */
3498 if (CONST_POOL_OK_P (operand_mode[i], operand)
3499 || MEM_P (operand))
3500 badop = 0;
3501 constmemok = 1;
3502 offmemok = 1;
3503 break;
3504
3505 case CT_ADDRESS:
3506 if (constraint_satisfied_p (operand, cn))
3507 win = 1;
3508
3509 /* If we didn't already win, we can reload
3510 the address into a base register. */
3511 this_alternative[i]
3512 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3513 ADDRESS, SCRATCH);
3514 badop = 0;
3515 break;
3516
3517 case CT_FIXED_FORM:
3518 if (constraint_satisfied_p (operand, cn))
3519 win = 1;
3520 break;
3521 }
3522 break;
3523
3524 reg:
3525 this_alternative[i]
3526 = reg_class_subunion[this_alternative[i]][cl];
3527 if (GET_MODE (operand) == BLKmode)
3528 break;
3529 winreg = 1;
3530 if (REG_P (operand)
3531 && reg_fits_class_p (operand, this_alternative[i],
3532 offset, GET_MODE (recog_data.operand[i])))
3533 win = 1;
3534 break;
3535 }
3536 while ((p += len), c);
3537
3538 if (swapped == (commutative >= 0 ? 1 : 0))
3539 constraints[i] = p;
3540
3541 /* If this operand could be handled with a reg,
3542 and some reg is allowed, then this operand can be handled. */
3543 if (winreg && this_alternative[i] != NO_REGS
3544 && (win || !class_only_fixed_regs[this_alternative[i]]))
3545 badop = 0;
3546
3547 /* Record which operands fit this alternative. */
3548 this_alternative_earlyclobber[i] = earlyclobber;
3549 if (win && ! force_reload)
3550 this_alternative_win[i] = 1;
3551 else if (did_match && ! force_reload)
3552 this_alternative_match_win[i] = 1;
3553 else
3554 {
3555 int const_to_mem = 0;
3556
3557 this_alternative_offmemok[i] = offmemok;
3558 losers++;
3559 if (badop)
3560 bad = 1;
3561 /* Alternative loses if it has no regs for a reg operand. */
3562 if (REG_P (operand)
3563 && this_alternative[i] == NO_REGS
3564 && this_alternative_matches[i] < 0)
3565 bad = 1;
3566
3567 /* If this is a constant that is reloaded into the desired
3568 class by copying it to memory first, count that as another
3569 reload. This is consistent with other code and is
3570 required to avoid choosing another alternative when
3571 the constant is moved into memory by this function on
3572 an early reload pass. Note that the test here is
3573 precisely the same as in the code below that calls
3574 force_const_mem. */
3575 if (CONST_POOL_OK_P (operand_mode[i], operand)
3576 && ((targetm.preferred_reload_class (operand,
3577 this_alternative[i])
3578 == NO_REGS)
3579 || no_input_reloads))
3580 {
3581 const_to_mem = 1;
3582 if (this_alternative[i] != NO_REGS)
3583 losers++;
3584 }
3585
3586 /* Alternative loses if it requires a type of reload not
3587 permitted for this insn. We can always reload SCRATCH
3588 and objects with a REG_UNUSED note. */
3589 if (GET_CODE (operand) != SCRATCH
3590 && modified[i] != RELOAD_READ && no_output_reloads
3591 && ! find_reg_note (insn, REG_UNUSED, operand))
3592 bad = 1;
3593 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3594 && ! const_to_mem)
3595 bad = 1;
3596
3597 /* If we can't reload this value at all, reject this
3598 alternative. Note that we could also lose due to
3599 LIMIT_RELOAD_CLASS, but we don't check that
3600 here. */
3601
3602 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3603 {
3604 if (targetm.preferred_reload_class (operand,
3605 this_alternative[i])
3606 == NO_REGS)
3607 reject = 600;
3608
3609 if (operand_type[i] == RELOAD_FOR_OUTPUT
3610 && (targetm.preferred_output_reload_class (operand,
3611 this_alternative[i])
3612 == NO_REGS))
3613 reject = 600;
3614 }
3615
3616 /* We prefer to reload pseudos over reloading other things,
3617 since such reloads may be able to be eliminated later.
3618 If we are reloading a SCRATCH, we won't be generating any
3619 insns, just using a register, so it is also preferred.
3620 So bump REJECT in other cases. Don't do this in the
3621 case where we are forcing a constant into memory and
3622 it will then win since we don't want to have a different
3623 alternative match then. */
3624 if (! (REG_P (operand)
3625 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3626 && GET_CODE (operand) != SCRATCH
3627 && ! (const_to_mem && constmemok))
3628 reject += 2;
3629
3630 /* Input reloads can be inherited more often than output
3631 reloads can be removed, so penalize output reloads. */
3632 if (operand_type[i] != RELOAD_FOR_INPUT
3633 && GET_CODE (operand) != SCRATCH)
3634 reject++;
3635 }
3636
3637 /* If this operand is a pseudo register that didn't get
3638 a hard reg and this alternative accepts some
3639 register, see if the class that we want is a subset
3640 of the preferred class for this register. If not,
3641 but it intersects that class, use the preferred class
3642 instead. If it does not intersect the preferred
3643 class, show that usage of this alternative should be
3644 discouraged; it will be discouraged more still if the
3645 register is `preferred or nothing'. We do this
3646 because it increases the chance of reusing our spill
3647 register in a later insn and avoiding a pair of
3648 memory stores and loads.
3649
3650 Don't bother with this if this alternative will
3651 accept this operand.
3652
3653 Don't do this for a multiword operand, since it is
3654 only a small win and has the risk of requiring more
3655 spill registers, which could cause a large loss.
3656
3657 Don't do this if the preferred class has only one
3658 register because we might otherwise exhaust the
3659 class. */
3660
3661 if (! win && ! did_match
3662 && this_alternative[i] != NO_REGS
3663 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3664 && reg_class_size [(int) preferred_class[i]] > 0
3665 && ! small_register_class_p (preferred_class[i]))
3666 {
3667 if (! reg_class_subset_p (this_alternative[i],
3668 preferred_class[i]))
3669 {
3670 /* Since we don't have a way of forming the intersection,
3671 we just do something special if the preferred class
3672 is a subset of the class we have; that's the most
3673 common case anyway. */
3674 if (reg_class_subset_p (preferred_class[i],
3675 this_alternative[i]))
3676 this_alternative[i] = preferred_class[i];
3677 else
3678 reject += (2 + 2 * pref_or_nothing[i]);
3679 }
3680 }
3681 }
3682
3683 /* Now see if any output operands that are marked "earlyclobber"
3684 in this alternative conflict with any input operands
3685 or any memory addresses. */
3686
3687 for (i = 0; i < noperands; i++)
3688 if (this_alternative_earlyclobber[i]
3689 && (this_alternative_win[i] || this_alternative_match_win[i]))
3690 {
3691 struct decomposition early_data;
3692
3693 early_data = decompose (recog_data.operand[i]);
3694
3695 gcc_assert (modified[i] != RELOAD_READ);
3696
3697 if (this_alternative[i] == NO_REGS)
3698 {
3699 this_alternative_earlyclobber[i] = 0;
3700 gcc_assert (this_insn_is_asm);
3701 error_for_asm (this_insn,
3702 "%<&%> constraint used with no register class");
3703 }
3704
3705 for (j = 0; j < noperands; j++)
3706 /* Is this an input operand or a memory ref? */
3707 if ((MEM_P (recog_data.operand[j])
3708 || modified[j] != RELOAD_WRITE)
3709 && j != i
3710 /* Ignore things like match_operator operands. */
3711 && !recog_data.is_operator[j]
3712 /* Don't count an input operand that is constrained to match
3713 the early clobber operand. */
3714 && ! (this_alternative_matches[j] == i
3715 && rtx_equal_p (recog_data.operand[i],
3716 recog_data.operand[j]))
3717 /* Is it altered by storing the earlyclobber operand? */
3718 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3719 early_data))
3720 {
3721 /* If the output is in a non-empty few-regs class,
3722 it's costly to reload it, so reload the input instead. */
3723 if (small_register_class_p (this_alternative[i])
3724 && (REG_P (recog_data.operand[j])
3725 || GET_CODE (recog_data.operand[j]) == SUBREG))
3726 {
3727 losers++;
3728 this_alternative_win[j] = 0;
3729 this_alternative_match_win[j] = 0;
3730 }
3731 else
3732 break;
3733 }
3734 /* If an earlyclobber operand conflicts with something,
3735 it must be reloaded, so request this and count the cost. */
3736 if (j != noperands)
3737 {
3738 losers++;
3739 this_alternative_win[i] = 0;
3740 this_alternative_match_win[j] = 0;
3741 for (j = 0; j < noperands; j++)
3742 if (this_alternative_matches[j] == i
3743 && this_alternative_match_win[j])
3744 {
3745 this_alternative_win[j] = 0;
3746 this_alternative_match_win[j] = 0;
3747 losers++;
3748 }
3749 }
3750 }
3751
3752 /* If one alternative accepts all the operands, no reload required,
3753 choose that alternative; don't consider the remaining ones. */
3754 if (losers == 0)
3755 {
3756 /* Unswap these so that they are never swapped at `finish'. */
3757 if (swapped)
3758 {
3759 recog_data.operand[commutative] = substed_operand[commutative];
3760 recog_data.operand[commutative + 1]
3761 = substed_operand[commutative + 1];
3762 }
3763 for (i = 0; i < noperands; i++)
3764 {
3765 goal_alternative_win[i] = this_alternative_win[i];
3766 goal_alternative_match_win[i] = this_alternative_match_win[i];
3767 goal_alternative[i] = this_alternative[i];
3768 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3769 goal_alternative_matches[i] = this_alternative_matches[i];
3770 goal_alternative_earlyclobber[i]
3771 = this_alternative_earlyclobber[i];
3772 }
3773 goal_alternative_number = this_alternative_number;
3774 goal_alternative_swapped = swapped;
3775 goal_earlyclobber = this_earlyclobber;
3776 goto finish;
3777 }
3778
3779 /* REJECT, set by the ! and ? constraint characters and when a register
3780 would be reloaded into a non-preferred class, discourages the use of
3781 this alternative for a reload goal. REJECT is incremented by six
3782 for each ? and two for each non-preferred class. */
3783 losers = losers * 6 + reject;
3784
3785 /* If this alternative can be made to work by reloading,
3786 and it needs less reloading than the others checked so far,
3787 record it as the chosen goal for reloading. */
3788 if (! bad)
3789 {
3790 if (best > losers)
3791 {
3792 for (i = 0; i < noperands; i++)
3793 {
3794 goal_alternative[i] = this_alternative[i];
3795 goal_alternative_win[i] = this_alternative_win[i];
3796 goal_alternative_match_win[i]
3797 = this_alternative_match_win[i];
3798 goal_alternative_offmemok[i]
3799 = this_alternative_offmemok[i];
3800 goal_alternative_matches[i] = this_alternative_matches[i];
3801 goal_alternative_earlyclobber[i]
3802 = this_alternative_earlyclobber[i];
3803 }
3804 goal_alternative_swapped = swapped;
3805 best = losers;
3806 goal_alternative_number = this_alternative_number;
3807 goal_earlyclobber = this_earlyclobber;
3808 }
3809 }
3810
3811 if (swapped)
3812 {
3813 /* If the commutative operands have been swapped, swap
3814 them back in order to check the next alternative. */
3815 recog_data.operand[commutative] = substed_operand[commutative];
3816 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3817 /* Unswap the duplicates too. */
3818 for (i = 0; i < recog_data.n_dups; i++)
3819 if (recog_data.dup_num[i] == commutative
3820 || recog_data.dup_num[i] == commutative + 1)
3821 *recog_data.dup_loc[i]
3822 = recog_data.operand[(int) recog_data.dup_num[i]];
3823
3824 /* Unswap the operand related information as well. */
3825 std::swap (preferred_class[commutative],
3826 preferred_class[commutative + 1]);
3827 std::swap (pref_or_nothing[commutative],
3828 pref_or_nothing[commutative + 1]);
3829 std::swap (address_reloaded[commutative],
3830 address_reloaded[commutative + 1]);
3831 }
3832 }
3833 }
3834
3835 /* The operands don't meet the constraints.
3836 goal_alternative describes the alternative
3837 that we could reach by reloading the fewest operands.
3838 Reload so as to fit it. */
3839
3840 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3841 {
3842 /* No alternative works with reloads?? */
3843 if (insn_code_number >= 0)
3844 fatal_insn ("unable to generate reloads for:", insn);
3845 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3846 /* Avoid further trouble with this insn. */
3847 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3848 n_reloads = 0;
3849 return 0;
3850 }
3851
3852 /* Jump to `finish' from above if all operands are valid already.
3853 In that case, goal_alternative_win is all 1. */
3854 finish:
3855
3856 /* Right now, for any pair of operands I and J that are required to match,
3857 with I < J,
3858 goal_alternative_matches[J] is I.
3859 Set up goal_alternative_matched as the inverse function:
3860 goal_alternative_matched[I] = J. */
3861
3862 for (i = 0; i < noperands; i++)
3863 goal_alternative_matched[i] = -1;
3864
3865 for (i = 0; i < noperands; i++)
3866 if (! goal_alternative_win[i]
3867 && goal_alternative_matches[i] >= 0)
3868 goal_alternative_matched[goal_alternative_matches[i]] = i;
3869
3870 for (i = 0; i < noperands; i++)
3871 goal_alternative_win[i] |= goal_alternative_match_win[i];
3872
3873 /* If the best alternative is with operands 1 and 2 swapped,
3874 consider them swapped before reporting the reloads. Update the
3875 operand numbers of any reloads already pushed. */
3876
3877 if (goal_alternative_swapped)
3878 {
3879 std::swap (substed_operand[commutative],
3880 substed_operand[commutative + 1]);
3881 std::swap (recog_data.operand[commutative],
3882 recog_data.operand[commutative + 1]);
3883 std::swap (*recog_data.operand_loc[commutative],
3884 *recog_data.operand_loc[commutative + 1]);
3885
3886 for (i = 0; i < n_reloads; i++)
3887 {
3888 if (rld[i].opnum == commutative)
3889 rld[i].opnum = commutative + 1;
3890 else if (rld[i].opnum == commutative + 1)
3891 rld[i].opnum = commutative;
3892 }
3893 }
3894
3895 for (i = 0; i < noperands; i++)
3896 {
3897 operand_reloadnum[i] = -1;
3898
3899 /* If this is an earlyclobber operand, we need to widen the scope.
3900 The reload must remain valid from the start of the insn being
3901 reloaded until after the operand is stored into its destination.
3902 We approximate this with RELOAD_OTHER even though we know that we
3903 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3904
3905 One special case that is worth checking is when we have an
3906 output that is earlyclobber but isn't used past the insn (typically
3907 a SCRATCH). In this case, we only need have the reload live
3908 through the insn itself, but not for any of our input or output
3909 reloads.
3910 But we must not accidentally narrow the scope of an existing
3911 RELOAD_OTHER reload - leave these alone.
3912
3913 In any case, anything needed to address this operand can remain
3914 however they were previously categorized. */
3915
3916 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3917 operand_type[i]
3918 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3919 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3920 }
3921
3922 /* Any constants that aren't allowed and can't be reloaded
3923 into registers are here changed into memory references. */
3924 for (i = 0; i < noperands; i++)
3925 if (! goal_alternative_win[i])
3926 {
3927 rtx op = recog_data.operand[i];
3928 rtx subreg = NULL_RTX;
3929 rtx plus = NULL_RTX;
3930 machine_mode mode = operand_mode[i];
3931
3932 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3933 push_reload so we have to let them pass here. */
3934 if (GET_CODE (op) == SUBREG)
3935 {
3936 subreg = op;
3937 op = SUBREG_REG (op);
3938 mode = GET_MODE (op);
3939 }
3940
3941 if (GET_CODE (op) == PLUS)
3942 {
3943 plus = op;
3944 op = XEXP (op, 1);
3945 }
3946
3947 if (CONST_POOL_OK_P (mode, op)
3948 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3949 == NO_REGS)
3950 || no_input_reloads))
3951 {
3952 int this_address_reloaded;
3953 rtx tem = force_const_mem (mode, op);
3954
3955 /* If we stripped a SUBREG or a PLUS above add it back. */
3956 if (plus != NULL_RTX)
3957 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3958
3959 if (subreg != NULL_RTX)
3960 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3961
3962 this_address_reloaded = 0;
3963 substed_operand[i] = recog_data.operand[i]
3964 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3965 0, insn, &this_address_reloaded);
3966
3967 /* If the alternative accepts constant pool refs directly
3968 there will be no reload needed at all. */
3969 if (plus == NULL_RTX
3970 && subreg == NULL_RTX
3971 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3972 ? substed_operand[i]
3973 : NULL,
3974 recog_data.constraints[i],
3975 goal_alternative_number))
3976 goal_alternative_win[i] = 1;
3977 }
3978 }
3979
3980 /* Record the values of the earlyclobber operands for the caller. */
3981 if (goal_earlyclobber)
3982 for (i = 0; i < noperands; i++)
3983 if (goal_alternative_earlyclobber[i])
3984 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3985
3986 /* Now record reloads for all the operands that need them. */
3987 for (i = 0; i < noperands; i++)
3988 if (! goal_alternative_win[i])
3989 {
3990 /* Operands that match previous ones have already been handled. */
3991 if (goal_alternative_matches[i] >= 0)
3992 ;
3993 /* Handle an operand with a nonoffsettable address
3994 appearing where an offsettable address will do
3995 by reloading the address into a base register.
3996
3997 ??? We can also do this when the operand is a register and
3998 reg_equiv_mem is not offsettable, but this is a bit tricky,
3999 so we don't bother with it. It may not be worth doing. */
4000 else if (goal_alternative_matched[i] == -1
4001 && goal_alternative_offmemok[i]
4002 && MEM_P (recog_data.operand[i]))
4003 {
4004 /* If the address to be reloaded is a VOIDmode constant,
4005 use the default address mode as mode of the reload register,
4006 as would have been done by find_reloads_address. */
4007 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4008 machine_mode address_mode;
4009
4010 address_mode = get_address_mode (recog_data.operand[i]);
4011 operand_reloadnum[i]
4012 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4013 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4014 base_reg_class (VOIDmode, as, MEM, SCRATCH),
4015 address_mode,
4016 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4017 rld[operand_reloadnum[i]].inc
4018 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4019
4020 /* If this operand is an output, we will have made any
4021 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4022 now we are treating part of the operand as an input, so
4023 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4024
4025 if (modified[i] == RELOAD_WRITE)
4026 {
4027 for (j = 0; j < n_reloads; j++)
4028 {
4029 if (rld[j].opnum == i)
4030 {
4031 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4032 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4033 else if (rld[j].when_needed
4034 == RELOAD_FOR_OUTADDR_ADDRESS)
4035 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4036 }
4037 }
4038 }
4039 }
4040 else if (goal_alternative_matched[i] == -1)
4041 {
4042 operand_reloadnum[i]
4043 = push_reload ((modified[i] != RELOAD_WRITE
4044 ? recog_data.operand[i] : 0),
4045 (modified[i] != RELOAD_READ
4046 ? recog_data.operand[i] : 0),
4047 (modified[i] != RELOAD_WRITE
4048 ? recog_data.operand_loc[i] : 0),
4049 (modified[i] != RELOAD_READ
4050 ? recog_data.operand_loc[i] : 0),
4051 (enum reg_class) goal_alternative[i],
4052 (modified[i] == RELOAD_WRITE
4053 ? VOIDmode : operand_mode[i]),
4054 (modified[i] == RELOAD_READ
4055 ? VOIDmode : operand_mode[i]),
4056 (insn_code_number < 0 ? 0
4057 : insn_data[insn_code_number].operand[i].strict_low),
4058 0, i, operand_type[i]);
4059 }
4060 /* In a matching pair of operands, one must be input only
4061 and the other must be output only.
4062 Pass the input operand as IN and the other as OUT. */
4063 else if (modified[i] == RELOAD_READ
4064 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4065 {
4066 operand_reloadnum[i]
4067 = push_reload (recog_data.operand[i],
4068 recog_data.operand[goal_alternative_matched[i]],
4069 recog_data.operand_loc[i],
4070 recog_data.operand_loc[goal_alternative_matched[i]],
4071 (enum reg_class) goal_alternative[i],
4072 operand_mode[i],
4073 operand_mode[goal_alternative_matched[i]],
4074 0, 0, i, RELOAD_OTHER);
4075 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4076 }
4077 else if (modified[i] == RELOAD_WRITE
4078 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4079 {
4080 operand_reloadnum[goal_alternative_matched[i]]
4081 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4082 recog_data.operand[i],
4083 recog_data.operand_loc[goal_alternative_matched[i]],
4084 recog_data.operand_loc[i],
4085 (enum reg_class) goal_alternative[i],
4086 operand_mode[goal_alternative_matched[i]],
4087 operand_mode[i],
4088 0, 0, i, RELOAD_OTHER);
4089 operand_reloadnum[i] = output_reloadnum;
4090 }
4091 else
4092 {
4093 gcc_assert (insn_code_number < 0);
4094 error_for_asm (insn, "inconsistent operand constraints "
4095 "in an %<asm%>");
4096 /* Avoid further trouble with this insn. */
4097 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4098 n_reloads = 0;
4099 return 0;
4100 }
4101 }
4102 else if (goal_alternative_matched[i] < 0
4103 && goal_alternative_matches[i] < 0
4104 && address_operand_reloaded[i] != 1
4105 && optimize)
4106 {
4107 /* For each non-matching operand that's a MEM or a pseudo-register
4108 that didn't get a hard register, make an optional reload.
4109 This may get done even if the insn needs no reloads otherwise. */
4110
4111 rtx operand = recog_data.operand[i];
4112
4113 while (GET_CODE (operand) == SUBREG)
4114 operand = SUBREG_REG (operand);
4115 if ((MEM_P (operand)
4116 || (REG_P (operand)
4117 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4118 /* If this is only for an output, the optional reload would not
4119 actually cause us to use a register now, just note that
4120 something is stored here. */
4121 && (goal_alternative[i] != NO_REGS
4122 || modified[i] == RELOAD_WRITE)
4123 && ! no_input_reloads
4124 /* An optional output reload might allow to delete INSN later.
4125 We mustn't make in-out reloads on insns that are not permitted
4126 output reloads.
4127 If this is an asm, we can't delete it; we must not even call
4128 push_reload for an optional output reload in this case,
4129 because we can't be sure that the constraint allows a register,
4130 and push_reload verifies the constraints for asms. */
4131 && (modified[i] == RELOAD_READ
4132 || (! no_output_reloads && ! this_insn_is_asm)))
4133 operand_reloadnum[i]
4134 = push_reload ((modified[i] != RELOAD_WRITE
4135 ? recog_data.operand[i] : 0),
4136 (modified[i] != RELOAD_READ
4137 ? recog_data.operand[i] : 0),
4138 (modified[i] != RELOAD_WRITE
4139 ? recog_data.operand_loc[i] : 0),
4140 (modified[i] != RELOAD_READ
4141 ? recog_data.operand_loc[i] : 0),
4142 (enum reg_class) goal_alternative[i],
4143 (modified[i] == RELOAD_WRITE
4144 ? VOIDmode : operand_mode[i]),
4145 (modified[i] == RELOAD_READ
4146 ? VOIDmode : operand_mode[i]),
4147 (insn_code_number < 0 ? 0
4148 : insn_data[insn_code_number].operand[i].strict_low),
4149 1, i, operand_type[i]);
4150 /* If a memory reference remains (either as a MEM or a pseudo that
4151 did not get a hard register), yet we can't make an optional
4152 reload, check if this is actually a pseudo register reference;
4153 we then need to emit a USE and/or a CLOBBER so that reload
4154 inheritance will do the right thing. */
4155 else if (replace
4156 && (MEM_P (operand)
4157 || (REG_P (operand)
4158 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4159 && reg_renumber [REGNO (operand)] < 0)))
4160 {
4161 operand = *recog_data.operand_loc[i];
4162
4163 while (GET_CODE (operand) == SUBREG)
4164 operand = SUBREG_REG (operand);
4165 if (REG_P (operand))
4166 {
4167 if (modified[i] != RELOAD_WRITE)
4168 /* We mark the USE with QImode so that we recognize
4169 it as one that can be safely deleted at the end
4170 of reload. */
4171 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4172 insn), QImode);
4173 if (modified[i] != RELOAD_READ)
4174 emit_insn_after (gen_clobber (operand), insn);
4175 }
4176 }
4177 }
4178 else if (goal_alternative_matches[i] >= 0
4179 && goal_alternative_win[goal_alternative_matches[i]]
4180 && modified[i] == RELOAD_READ
4181 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4182 && ! no_input_reloads && ! no_output_reloads
4183 && optimize)
4184 {
4185 /* Similarly, make an optional reload for a pair of matching
4186 objects that are in MEM or a pseudo that didn't get a hard reg. */
4187
4188 rtx operand = recog_data.operand[i];
4189
4190 while (GET_CODE (operand) == SUBREG)
4191 operand = SUBREG_REG (operand);
4192 if ((MEM_P (operand)
4193 || (REG_P (operand)
4194 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4195 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4196 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4197 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4198 recog_data.operand[i],
4199 recog_data.operand_loc[goal_alternative_matches[i]],
4200 recog_data.operand_loc[i],
4201 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4202 operand_mode[goal_alternative_matches[i]],
4203 operand_mode[i],
4204 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4205 }
4206
4207 /* Perform whatever substitutions on the operands we are supposed
4208 to make due to commutativity or replacement of registers
4209 with equivalent constants or memory slots. */
4210
4211 for (i = 0; i < noperands; i++)
4212 {
4213 /* We only do this on the last pass through reload, because it is
4214 possible for some data (like reg_equiv_address) to be changed during
4215 later passes. Moreover, we lose the opportunity to get a useful
4216 reload_{in,out}_reg when we do these replacements. */
4217
4218 if (replace)
4219 {
4220 rtx substitution = substed_operand[i];
4221
4222 *recog_data.operand_loc[i] = substitution;
4223
4224 /* If we're replacing an operand with a LABEL_REF, we need to
4225 make sure that there's a REG_LABEL_OPERAND note attached to
4226 this instruction. */
4227 if (GET_CODE (substitution) == LABEL_REF
4228 && !find_reg_note (insn, REG_LABEL_OPERAND,
4229 LABEL_REF_LABEL (substitution))
4230 /* For a JUMP_P, if it was a branch target it must have
4231 already been recorded as such. */
4232 && (!JUMP_P (insn)
4233 || !label_is_jump_target_p (LABEL_REF_LABEL (substitution),
4234 insn)))
4235 {
4236 add_reg_note (insn, REG_LABEL_OPERAND,
4237 LABEL_REF_LABEL (substitution));
4238 if (LABEL_P (LABEL_REF_LABEL (substitution)))
4239 ++LABEL_NUSES (LABEL_REF_LABEL (substitution));
4240 }
4241
4242 }
4243 else
4244 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4245 }
4246
4247 /* If this insn pattern contains any MATCH_DUP's, make sure that
4248 they will be substituted if the operands they match are substituted.
4249 Also do now any substitutions we already did on the operands.
4250
4251 Don't do this if we aren't making replacements because we might be
4252 propagating things allocated by frame pointer elimination into places
4253 it doesn't expect. */
4254
4255 if (insn_code_number >= 0 && replace)
4256 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4257 {
4258 int opno = recog_data.dup_num[i];
4259 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4260 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4261 }
4262
4263 #if 0
4264 /* This loses because reloading of prior insns can invalidate the equivalence
4265 (or at least find_equiv_reg isn't smart enough to find it any more),
4266 causing this insn to need more reload regs than it needed before.
4267 It may be too late to make the reload regs available.
4268 Now this optimization is done safely in choose_reload_regs. */
4269
4270 /* For each reload of a reg into some other class of reg,
4271 search for an existing equivalent reg (same value now) in the right class.
4272 We can use it as long as we don't need to change its contents. */
4273 for (i = 0; i < n_reloads; i++)
4274 if (rld[i].reg_rtx == 0
4275 && rld[i].in != 0
4276 && REG_P (rld[i].in)
4277 && rld[i].out == 0)
4278 {
4279 rld[i].reg_rtx
4280 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4281 static_reload_reg_p, 0, rld[i].inmode);
4282 /* Prevent generation of insn to load the value
4283 because the one we found already has the value. */
4284 if (rld[i].reg_rtx)
4285 rld[i].in = rld[i].reg_rtx;
4286 }
4287 #endif
4288
4289 /* If we detected error and replaced asm instruction by USE, forget about the
4290 reloads. */
4291 if (GET_CODE (PATTERN (insn)) == USE
4292 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4293 n_reloads = 0;
4294
4295 /* Perhaps an output reload can be combined with another
4296 to reduce needs by one. */
4297 if (!goal_earlyclobber)
4298 combine_reloads ();
4299
4300 /* If we have a pair of reloads for parts of an address, they are reloading
4301 the same object, the operands themselves were not reloaded, and they
4302 are for two operands that are supposed to match, merge the reloads and
4303 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4304
4305 for (i = 0; i < n_reloads; i++)
4306 {
4307 int k;
4308
4309 for (j = i + 1; j < n_reloads; j++)
4310 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4311 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4312 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4313 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4314 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4315 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4316 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4317 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4318 && rtx_equal_p (rld[i].in, rld[j].in)
4319 && (operand_reloadnum[rld[i].opnum] < 0
4320 || rld[operand_reloadnum[rld[i].opnum]].optional)
4321 && (operand_reloadnum[rld[j].opnum] < 0
4322 || rld[operand_reloadnum[rld[j].opnum]].optional)
4323 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4324 || (goal_alternative_matches[rld[j].opnum]
4325 == rld[i].opnum)))
4326 {
4327 for (k = 0; k < n_replacements; k++)
4328 if (replacements[k].what == j)
4329 replacements[k].what = i;
4330
4331 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4332 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4333 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4334 else
4335 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4336 rld[j].in = 0;
4337 }
4338 }
4339
4340 /* Scan all the reloads and update their type.
4341 If a reload is for the address of an operand and we didn't reload
4342 that operand, change the type. Similarly, change the operand number
4343 of a reload when two operands match. If a reload is optional, treat it
4344 as though the operand isn't reloaded.
4345
4346 ??? This latter case is somewhat odd because if we do the optional
4347 reload, it means the object is hanging around. Thus we need only
4348 do the address reload if the optional reload was NOT done.
4349
4350 Change secondary reloads to be the address type of their operand, not
4351 the normal type.
4352
4353 If an operand's reload is now RELOAD_OTHER, change any
4354 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4355 RELOAD_FOR_OTHER_ADDRESS. */
4356
4357 for (i = 0; i < n_reloads; i++)
4358 {
4359 if (rld[i].secondary_p
4360 && rld[i].when_needed == operand_type[rld[i].opnum])
4361 rld[i].when_needed = address_type[rld[i].opnum];
4362
4363 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4364 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4365 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4366 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4367 && (operand_reloadnum[rld[i].opnum] < 0
4368 || rld[operand_reloadnum[rld[i].opnum]].optional))
4369 {
4370 /* If we have a secondary reload to go along with this reload,
4371 change its type to RELOAD_FOR_OPADDR_ADDR. */
4372
4373 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4374 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4375 && rld[i].secondary_in_reload != -1)
4376 {
4377 int secondary_in_reload = rld[i].secondary_in_reload;
4378
4379 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4380
4381 /* If there's a tertiary reload we have to change it also. */
4382 if (secondary_in_reload > 0
4383 && rld[secondary_in_reload].secondary_in_reload != -1)
4384 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4385 = RELOAD_FOR_OPADDR_ADDR;
4386 }
4387
4388 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4389 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4390 && rld[i].secondary_out_reload != -1)
4391 {
4392 int secondary_out_reload = rld[i].secondary_out_reload;
4393
4394 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4395
4396 /* If there's a tertiary reload we have to change it also. */
4397 if (secondary_out_reload
4398 && rld[secondary_out_reload].secondary_out_reload != -1)
4399 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4400 = RELOAD_FOR_OPADDR_ADDR;
4401 }
4402
4403 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4404 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4405 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4406 else
4407 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4408 }
4409
4410 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4411 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4412 && operand_reloadnum[rld[i].opnum] >= 0
4413 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4414 == RELOAD_OTHER))
4415 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4416
4417 if (goal_alternative_matches[rld[i].opnum] >= 0)
4418 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4419 }
4420
4421 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4422 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4423 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4424
4425 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4426 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4427 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4428 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4429 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4430 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4431 This is complicated by the fact that a single operand can have more
4432 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4433 choose_reload_regs without affecting code quality, and cases that
4434 actually fail are extremely rare, so it turns out to be better to fix
4435 the problem here by not generating cases that choose_reload_regs will
4436 fail for. */
4437 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4438 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4439 a single operand.
4440 We can reduce the register pressure by exploiting that a
4441 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4442 does not conflict with any of them, if it is only used for the first of
4443 the RELOAD_FOR_X_ADDRESS reloads. */
4444 {
4445 int first_op_addr_num = -2;
4446 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4447 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4448 int need_change = 0;
4449 /* We use last_op_addr_reload and the contents of the above arrays
4450 first as flags - -2 means no instance encountered, -1 means exactly
4451 one instance encountered.
4452 If more than one instance has been encountered, we store the reload
4453 number of the first reload of the kind in question; reload numbers
4454 are known to be non-negative. */
4455 for (i = 0; i < noperands; i++)
4456 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4457 for (i = n_reloads - 1; i >= 0; i--)
4458 {
4459 switch (rld[i].when_needed)
4460 {
4461 case RELOAD_FOR_OPERAND_ADDRESS:
4462 if (++first_op_addr_num >= 0)
4463 {
4464 first_op_addr_num = i;
4465 need_change = 1;
4466 }
4467 break;
4468 case RELOAD_FOR_INPUT_ADDRESS:
4469 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4470 {
4471 first_inpaddr_num[rld[i].opnum] = i;
4472 need_change = 1;
4473 }
4474 break;
4475 case RELOAD_FOR_OUTPUT_ADDRESS:
4476 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4477 {
4478 first_outpaddr_num[rld[i].opnum] = i;
4479 need_change = 1;
4480 }
4481 break;
4482 default:
4483 break;
4484 }
4485 }
4486
4487 if (need_change)
4488 {
4489 for (i = 0; i < n_reloads; i++)
4490 {
4491 int first_num;
4492 enum reload_type type;
4493
4494 switch (rld[i].when_needed)
4495 {
4496 case RELOAD_FOR_OPADDR_ADDR:
4497 first_num = first_op_addr_num;
4498 type = RELOAD_FOR_OPERAND_ADDRESS;
4499 break;
4500 case RELOAD_FOR_INPADDR_ADDRESS:
4501 first_num = first_inpaddr_num[rld[i].opnum];
4502 type = RELOAD_FOR_INPUT_ADDRESS;
4503 break;
4504 case RELOAD_FOR_OUTADDR_ADDRESS:
4505 first_num = first_outpaddr_num[rld[i].opnum];
4506 type = RELOAD_FOR_OUTPUT_ADDRESS;
4507 break;
4508 default:
4509 continue;
4510 }
4511 if (first_num < 0)
4512 continue;
4513 else if (i > first_num)
4514 rld[i].when_needed = type;
4515 else
4516 {
4517 /* Check if the only TYPE reload that uses reload I is
4518 reload FIRST_NUM. */
4519 for (j = n_reloads - 1; j > first_num; j--)
4520 {
4521 if (rld[j].when_needed == type
4522 && (rld[i].secondary_p
4523 ? rld[j].secondary_in_reload == i
4524 : reg_mentioned_p (rld[i].in, rld[j].in)))
4525 {
4526 rld[i].when_needed = type;
4527 break;
4528 }
4529 }
4530 }
4531 }
4532 }
4533 }
4534
4535 /* See if we have any reloads that are now allowed to be merged
4536 because we've changed when the reload is needed to
4537 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4538 check for the most common cases. */
4539
4540 for (i = 0; i < n_reloads; i++)
4541 if (rld[i].in != 0 && rld[i].out == 0
4542 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4543 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4544 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4545 for (j = 0; j < n_reloads; j++)
4546 if (i != j && rld[j].in != 0 && rld[j].out == 0
4547 && rld[j].when_needed == rld[i].when_needed
4548 && MATCHES (rld[i].in, rld[j].in)
4549 && rld[i].rclass == rld[j].rclass
4550 && !rld[i].nocombine && !rld[j].nocombine
4551 && rld[i].reg_rtx == rld[j].reg_rtx)
4552 {
4553 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4554 transfer_replacements (i, j);
4555 rld[j].in = 0;
4556 }
4557
4558 /* If we made any reloads for addresses, see if they violate a
4559 "no input reloads" requirement for this insn. But loads that we
4560 do after the insn (such as for output addresses) are fine. */
4561 if (HAVE_cc0 && no_input_reloads)
4562 for (i = 0; i < n_reloads; i++)
4563 gcc_assert (rld[i].in == 0
4564 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4565 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4566
4567 /* Compute reload_mode and reload_nregs. */
4568 for (i = 0; i < n_reloads; i++)
4569 {
4570 rld[i].mode
4571 = (rld[i].inmode == VOIDmode
4572 || (GET_MODE_SIZE (rld[i].outmode)
4573 > GET_MODE_SIZE (rld[i].inmode)))
4574 ? rld[i].outmode : rld[i].inmode;
4575
4576 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4577 }
4578
4579 /* Special case a simple move with an input reload and a
4580 destination of a hard reg, if the hard reg is ok, use it. */
4581 for (i = 0; i < n_reloads; i++)
4582 if (rld[i].when_needed == RELOAD_FOR_INPUT
4583 && GET_CODE (PATTERN (insn)) == SET
4584 && REG_P (SET_DEST (PATTERN (insn)))
4585 && (SET_SRC (PATTERN (insn)) == rld[i].in
4586 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4587 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4588 {
4589 rtx dest = SET_DEST (PATTERN (insn));
4590 unsigned int regno = REGNO (dest);
4591
4592 if (regno < FIRST_PSEUDO_REGISTER
4593 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4594 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4595 {
4596 int nr = hard_regno_nregs[regno][rld[i].mode];
4597 int ok = 1, nri;
4598
4599 for (nri = 1; nri < nr; nri ++)
4600 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4601 {
4602 ok = 0;
4603 break;
4604 }
4605
4606 if (ok)
4607 rld[i].reg_rtx = dest;
4608 }
4609 }
4610
4611 return retval;
4612 }
4613
4614 /* Return true if alternative number ALTNUM in constraint-string
4615 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4616 MEM gives the reference if it didn't need any reloads, otherwise it
4617 is null. */
4618
4619 static bool
4620 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4621 const char *constraint, int altnum)
4622 {
4623 int c;
4624
4625 /* Skip alternatives before the one requested. */
4626 while (altnum > 0)
4627 {
4628 while (*constraint++ != ',')
4629 ;
4630 altnum--;
4631 }
4632 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4633 If one of them is present, this alternative accepts the result of
4634 passing a constant-pool reference through find_reloads_toplev.
4635
4636 The same is true of extra memory constraints if the address
4637 was reloaded into a register. However, the target may elect
4638 to disallow the original constant address, forcing it to be
4639 reloaded into a register instead. */
4640 for (; (c = *constraint) && c != ',' && c != '#';
4641 constraint += CONSTRAINT_LEN (c, constraint))
4642 {
4643 enum constraint_num cn = lookup_constraint (constraint);
4644 if (insn_extra_memory_constraint (cn)
4645 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4646 return true;
4647 }
4648 return false;
4649 }
4650 \f
4651 /* Scan X for memory references and scan the addresses for reloading.
4652 Also checks for references to "constant" regs that we want to eliminate
4653 and replaces them with the values they stand for.
4654 We may alter X destructively if it contains a reference to such.
4655 If X is just a constant reg, we return the equivalent value
4656 instead of X.
4657
4658 IND_LEVELS says how many levels of indirect addressing this machine
4659 supports.
4660
4661 OPNUM and TYPE identify the purpose of the reload.
4662
4663 IS_SET_DEST is true if X is the destination of a SET, which is not
4664 appropriate to be replaced by a constant.
4665
4666 INSN, if nonzero, is the insn in which we do the reload. It is used
4667 to determine if we may generate output reloads, and where to put USEs
4668 for pseudos that we have to replace with stack slots.
4669
4670 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4671 result of find_reloads_address. */
4672
4673 static rtx
4674 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4675 int ind_levels, int is_set_dest, rtx_insn *insn,
4676 int *address_reloaded)
4677 {
4678 RTX_CODE code = GET_CODE (x);
4679
4680 const char *fmt = GET_RTX_FORMAT (code);
4681 int i;
4682 int copied;
4683
4684 if (code == REG)
4685 {
4686 /* This code is duplicated for speed in find_reloads. */
4687 int regno = REGNO (x);
4688 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4689 x = reg_equiv_constant (regno);
4690 #if 0
4691 /* This creates (subreg (mem...)) which would cause an unnecessary
4692 reload of the mem. */
4693 else if (reg_equiv_mem (regno) != 0)
4694 x = reg_equiv_mem (regno);
4695 #endif
4696 else if (reg_equiv_memory_loc (regno)
4697 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4698 {
4699 rtx mem = make_memloc (x, regno);
4700 if (reg_equiv_address (regno)
4701 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4702 {
4703 /* If this is not a toplevel operand, find_reloads doesn't see
4704 this substitution. We have to emit a USE of the pseudo so
4705 that delete_output_reload can see it. */
4706 if (replace_reloads && recog_data.operand[opnum] != x)
4707 /* We mark the USE with QImode so that we recognize it
4708 as one that can be safely deleted at the end of
4709 reload. */
4710 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4711 QImode);
4712 x = mem;
4713 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4714 opnum, type, ind_levels, insn);
4715 if (!rtx_equal_p (x, mem))
4716 push_reg_equiv_alt_mem (regno, x);
4717 if (address_reloaded)
4718 *address_reloaded = i;
4719 }
4720 }
4721 return x;
4722 }
4723 if (code == MEM)
4724 {
4725 rtx tem = x;
4726
4727 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4728 opnum, type, ind_levels, insn);
4729 if (address_reloaded)
4730 *address_reloaded = i;
4731
4732 return tem;
4733 }
4734
4735 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4736 {
4737 /* Check for SUBREG containing a REG that's equivalent to a
4738 constant. If the constant has a known value, truncate it
4739 right now. Similarly if we are extracting a single-word of a
4740 multi-word constant. If the constant is symbolic, allow it
4741 to be substituted normally. push_reload will strip the
4742 subreg later. The constant must not be VOIDmode, because we
4743 will lose the mode of the register (this should never happen
4744 because one of the cases above should handle it). */
4745
4746 int regno = REGNO (SUBREG_REG (x));
4747 rtx tem;
4748
4749 if (regno >= FIRST_PSEUDO_REGISTER
4750 && reg_renumber[regno] < 0
4751 && reg_equiv_constant (regno) != 0)
4752 {
4753 tem =
4754 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4755 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4756 gcc_assert (tem);
4757 if (CONSTANT_P (tem)
4758 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4759 {
4760 tem = force_const_mem (GET_MODE (x), tem);
4761 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4762 &XEXP (tem, 0), opnum, type,
4763 ind_levels, insn);
4764 if (address_reloaded)
4765 *address_reloaded = i;
4766 }
4767 return tem;
4768 }
4769
4770 /* If the subreg contains a reg that will be converted to a mem,
4771 attempt to convert the whole subreg to a (narrower or wider)
4772 memory reference instead. If this succeeds, we're done --
4773 otherwise fall through to check whether the inner reg still
4774 needs address reloads anyway. */
4775
4776 if (regno >= FIRST_PSEUDO_REGISTER
4777 && reg_equiv_memory_loc (regno) != 0)
4778 {
4779 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4780 insn, address_reloaded);
4781 if (tem)
4782 return tem;
4783 }
4784 }
4785
4786 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4787 {
4788 if (fmt[i] == 'e')
4789 {
4790 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4791 ind_levels, is_set_dest, insn,
4792 address_reloaded);
4793 /* If we have replaced a reg with it's equivalent memory loc -
4794 that can still be handled here e.g. if it's in a paradoxical
4795 subreg - we must make the change in a copy, rather than using
4796 a destructive change. This way, find_reloads can still elect
4797 not to do the change. */
4798 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4799 {
4800 x = shallow_copy_rtx (x);
4801 copied = 1;
4802 }
4803 XEXP (x, i) = new_part;
4804 }
4805 }
4806 return x;
4807 }
4808
4809 /* Return a mem ref for the memory equivalent of reg REGNO.
4810 This mem ref is not shared with anything. */
4811
4812 static rtx
4813 make_memloc (rtx ad, int regno)
4814 {
4815 /* We must rerun eliminate_regs, in case the elimination
4816 offsets have changed. */
4817 rtx tem
4818 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4819 0);
4820
4821 /* If TEM might contain a pseudo, we must copy it to avoid
4822 modifying it when we do the substitution for the reload. */
4823 if (rtx_varies_p (tem, 0))
4824 tem = copy_rtx (tem);
4825
4826 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4827 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4828
4829 /* Copy the result if it's still the same as the equivalence, to avoid
4830 modifying it when we do the substitution for the reload. */
4831 if (tem == reg_equiv_memory_loc (regno))
4832 tem = copy_rtx (tem);
4833 return tem;
4834 }
4835
4836 /* Returns true if AD could be turned into a valid memory reference
4837 to mode MODE in address space AS by reloading the part pointed to
4838 by PART into a register. */
4839
4840 static int
4841 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4842 addr_space_t as, rtx *part)
4843 {
4844 int retv;
4845 rtx tem = *part;
4846 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4847
4848 *part = reg;
4849 retv = memory_address_addr_space_p (mode, ad, as);
4850 *part = tem;
4851
4852 return retv;
4853 }
4854
4855 /* Record all reloads needed for handling memory address AD
4856 which appears in *LOC in a memory reference to mode MODE
4857 which itself is found in location *MEMREFLOC.
4858 Note that we take shortcuts assuming that no multi-reg machine mode
4859 occurs as part of an address.
4860
4861 OPNUM and TYPE specify the purpose of this reload.
4862
4863 IND_LEVELS says how many levels of indirect addressing this machine
4864 supports.
4865
4866 INSN, if nonzero, is the insn in which we do the reload. It is used
4867 to determine if we may generate output reloads, and where to put USEs
4868 for pseudos that we have to replace with stack slots.
4869
4870 Value is one if this address is reloaded or replaced as a whole; it is
4871 zero if the top level of this address was not reloaded or replaced, and
4872 it is -1 if it may or may not have been reloaded or replaced.
4873
4874 Note that there is no verification that the address will be valid after
4875 this routine does its work. Instead, we rely on the fact that the address
4876 was valid when reload started. So we need only undo things that reload
4877 could have broken. These are wrong register types, pseudos not allocated
4878 to a hard register, and frame pointer elimination. */
4879
4880 static int
4881 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4882 rtx *loc, int opnum, enum reload_type type,
4883 int ind_levels, rtx_insn *insn)
4884 {
4885 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4886 : ADDR_SPACE_GENERIC;
4887 int regno;
4888 int removed_and = 0;
4889 int op_index;
4890 rtx tem;
4891
4892 /* If the address is a register, see if it is a legitimate address and
4893 reload if not. We first handle the cases where we need not reload
4894 or where we must reload in a non-standard way. */
4895
4896 if (REG_P (ad))
4897 {
4898 regno = REGNO (ad);
4899
4900 if (reg_equiv_constant (regno) != 0)
4901 {
4902 find_reloads_address_part (reg_equiv_constant (regno), loc,
4903 base_reg_class (mode, as, MEM, SCRATCH),
4904 GET_MODE (ad), opnum, type, ind_levels);
4905 return 1;
4906 }
4907
4908 tem = reg_equiv_memory_loc (regno);
4909 if (tem != 0)
4910 {
4911 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4912 {
4913 tem = make_memloc (ad, regno);
4914 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4915 XEXP (tem, 0),
4916 MEM_ADDR_SPACE (tem)))
4917 {
4918 rtx orig = tem;
4919
4920 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4921 &XEXP (tem, 0), opnum,
4922 ADDR_TYPE (type), ind_levels, insn);
4923 if (!rtx_equal_p (tem, orig))
4924 push_reg_equiv_alt_mem (regno, tem);
4925 }
4926 /* We can avoid a reload if the register's equivalent memory
4927 expression is valid as an indirect memory address.
4928 But not all addresses are valid in a mem used as an indirect
4929 address: only reg or reg+constant. */
4930
4931 if (ind_levels > 0
4932 && strict_memory_address_addr_space_p (mode, tem, as)
4933 && (REG_P (XEXP (tem, 0))
4934 || (GET_CODE (XEXP (tem, 0)) == PLUS
4935 && REG_P (XEXP (XEXP (tem, 0), 0))
4936 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4937 {
4938 /* TEM is not the same as what we'll be replacing the
4939 pseudo with after reload, put a USE in front of INSN
4940 in the final reload pass. */
4941 if (replace_reloads
4942 && num_not_at_initial_offset
4943 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4944 {
4945 *loc = tem;
4946 /* We mark the USE with QImode so that we
4947 recognize it as one that can be safely
4948 deleted at the end of reload. */
4949 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4950 insn), QImode);
4951
4952 /* This doesn't really count as replacing the address
4953 as a whole, since it is still a memory access. */
4954 }
4955 return 0;
4956 }
4957 ad = tem;
4958 }
4959 }
4960
4961 /* The only remaining case where we can avoid a reload is if this is a
4962 hard register that is valid as a base register and which is not the
4963 subject of a CLOBBER in this insn. */
4964
4965 else if (regno < FIRST_PSEUDO_REGISTER
4966 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4967 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4968 return 0;
4969
4970 /* If we do not have one of the cases above, we must do the reload. */
4971 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4972 base_reg_class (mode, as, MEM, SCRATCH),
4973 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4974 return 1;
4975 }
4976
4977 if (strict_memory_address_addr_space_p (mode, ad, as))
4978 {
4979 /* The address appears valid, so reloads are not needed.
4980 But the address may contain an eliminable register.
4981 This can happen because a machine with indirect addressing
4982 may consider a pseudo register by itself a valid address even when
4983 it has failed to get a hard reg.
4984 So do a tree-walk to find and eliminate all such regs. */
4985
4986 /* But first quickly dispose of a common case. */
4987 if (GET_CODE (ad) == PLUS
4988 && CONST_INT_P (XEXP (ad, 1))
4989 && REG_P (XEXP (ad, 0))
4990 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4991 return 0;
4992
4993 subst_reg_equivs_changed = 0;
4994 *loc = subst_reg_equivs (ad, insn);
4995
4996 if (! subst_reg_equivs_changed)
4997 return 0;
4998
4999 /* Check result for validity after substitution. */
5000 if (strict_memory_address_addr_space_p (mode, ad, as))
5001 return 0;
5002 }
5003
5004 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5005 do
5006 {
5007 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5008 {
5009 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5010 ind_levels, win);
5011 }
5012 break;
5013 win:
5014 *memrefloc = copy_rtx (*memrefloc);
5015 XEXP (*memrefloc, 0) = ad;
5016 move_replacements (&ad, &XEXP (*memrefloc, 0));
5017 return -1;
5018 }
5019 while (0);
5020 #endif
5021
5022 /* The address is not valid. We have to figure out why. First see if
5023 we have an outer AND and remove it if so. Then analyze what's inside. */
5024
5025 if (GET_CODE (ad) == AND)
5026 {
5027 removed_and = 1;
5028 loc = &XEXP (ad, 0);
5029 ad = *loc;
5030 }
5031
5032 /* One possibility for why the address is invalid is that it is itself
5033 a MEM. This can happen when the frame pointer is being eliminated, a
5034 pseudo is not allocated to a hard register, and the offset between the
5035 frame and stack pointers is not its initial value. In that case the
5036 pseudo will have been replaced by a MEM referring to the
5037 stack pointer. */
5038 if (MEM_P (ad))
5039 {
5040 /* First ensure that the address in this MEM is valid. Then, unless
5041 indirect addresses are valid, reload the MEM into a register. */
5042 tem = ad;
5043 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5044 opnum, ADDR_TYPE (type),
5045 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5046
5047 /* If tem was changed, then we must create a new memory reference to
5048 hold it and store it back into memrefloc. */
5049 if (tem != ad && memrefloc)
5050 {
5051 *memrefloc = copy_rtx (*memrefloc);
5052 copy_replacements (tem, XEXP (*memrefloc, 0));
5053 loc = &XEXP (*memrefloc, 0);
5054 if (removed_and)
5055 loc = &XEXP (*loc, 0);
5056 }
5057
5058 /* Check similar cases as for indirect addresses as above except
5059 that we can allow pseudos and a MEM since they should have been
5060 taken care of above. */
5061
5062 if (ind_levels == 0
5063 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5064 || MEM_P (XEXP (tem, 0))
5065 || ! (REG_P (XEXP (tem, 0))
5066 || (GET_CODE (XEXP (tem, 0)) == PLUS
5067 && REG_P (XEXP (XEXP (tem, 0), 0))
5068 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5069 {
5070 /* Must use TEM here, not AD, since it is the one that will
5071 have any subexpressions reloaded, if needed. */
5072 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5073 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5074 VOIDmode, 0,
5075 0, opnum, type);
5076 return ! removed_and;
5077 }
5078 else
5079 return 0;
5080 }
5081
5082 /* If we have address of a stack slot but it's not valid because the
5083 displacement is too large, compute the sum in a register.
5084 Handle all base registers here, not just fp/ap/sp, because on some
5085 targets (namely SH) we can also get too large displacements from
5086 big-endian corrections. */
5087 else if (GET_CODE (ad) == PLUS
5088 && REG_P (XEXP (ad, 0))
5089 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5090 && CONST_INT_P (XEXP (ad, 1))
5091 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5092 CONST_INT)
5093 /* Similarly, if we were to reload the base register and the
5094 mem+offset address is still invalid, then we want to reload
5095 the whole address, not just the base register. */
5096 || ! maybe_memory_address_addr_space_p
5097 (mode, ad, as, &(XEXP (ad, 0)))))
5098
5099 {
5100 /* Unshare the MEM rtx so we can safely alter it. */
5101 if (memrefloc)
5102 {
5103 *memrefloc = copy_rtx (*memrefloc);
5104 loc = &XEXP (*memrefloc, 0);
5105 if (removed_and)
5106 loc = &XEXP (*loc, 0);
5107 }
5108
5109 if (double_reg_address_ok
5110 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5111 PLUS, CONST_INT))
5112 {
5113 /* Unshare the sum as well. */
5114 *loc = ad = copy_rtx (ad);
5115
5116 /* Reload the displacement into an index reg.
5117 We assume the frame pointer or arg pointer is a base reg. */
5118 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5119 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5120 type, ind_levels);
5121 return 0;
5122 }
5123 else
5124 {
5125 /* If the sum of two regs is not necessarily valid,
5126 reload the sum into a base reg.
5127 That will at least work. */
5128 find_reloads_address_part (ad, loc,
5129 base_reg_class (mode, as, MEM, SCRATCH),
5130 GET_MODE (ad), opnum, type, ind_levels);
5131 }
5132 return ! removed_and;
5133 }
5134
5135 /* If we have an indexed stack slot, there are three possible reasons why
5136 it might be invalid: The index might need to be reloaded, the address
5137 might have been made by frame pointer elimination and hence have a
5138 constant out of range, or both reasons might apply.
5139
5140 We can easily check for an index needing reload, but even if that is the
5141 case, we might also have an invalid constant. To avoid making the
5142 conservative assumption and requiring two reloads, we see if this address
5143 is valid when not interpreted strictly. If it is, the only problem is
5144 that the index needs a reload and find_reloads_address_1 will take care
5145 of it.
5146
5147 Handle all base registers here, not just fp/ap/sp, because on some
5148 targets (namely SPARC) we can also get invalid addresses from preventive
5149 subreg big-endian corrections made by find_reloads_toplev. We
5150 can also get expressions involving LO_SUM (rather than PLUS) from
5151 find_reloads_subreg_address.
5152
5153 If we decide to do something, it must be that `double_reg_address_ok'
5154 is true. We generate a reload of the base register + constant and
5155 rework the sum so that the reload register will be added to the index.
5156 This is safe because we know the address isn't shared.
5157
5158 We check for the base register as both the first and second operand of
5159 the innermost PLUS and/or LO_SUM. */
5160
5161 for (op_index = 0; op_index < 2; ++op_index)
5162 {
5163 rtx operand, addend;
5164 enum rtx_code inner_code;
5165
5166 if (GET_CODE (ad) != PLUS)
5167 continue;
5168
5169 inner_code = GET_CODE (XEXP (ad, 0));
5170 if (!(GET_CODE (ad) == PLUS
5171 && CONST_INT_P (XEXP (ad, 1))
5172 && (inner_code == PLUS || inner_code == LO_SUM)))
5173 continue;
5174
5175 operand = XEXP (XEXP (ad, 0), op_index);
5176 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5177 continue;
5178
5179 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5180
5181 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5182 GET_CODE (addend))
5183 || operand == frame_pointer_rtx
5184 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5185 || operand == hard_frame_pointer_rtx
5186 #endif
5187 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5188 || operand == arg_pointer_rtx
5189 #endif
5190 || operand == stack_pointer_rtx)
5191 && ! maybe_memory_address_addr_space_p
5192 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5193 {
5194 rtx offset_reg;
5195 enum reg_class cls;
5196
5197 offset_reg = plus_constant (GET_MODE (ad), operand,
5198 INTVAL (XEXP (ad, 1)));
5199
5200 /* Form the adjusted address. */
5201 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5202 ad = gen_rtx_PLUS (GET_MODE (ad),
5203 op_index == 0 ? offset_reg : addend,
5204 op_index == 0 ? addend : offset_reg);
5205 else
5206 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5207 op_index == 0 ? offset_reg : addend,
5208 op_index == 0 ? addend : offset_reg);
5209 *loc = ad;
5210
5211 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5212 find_reloads_address_part (XEXP (ad, op_index),
5213 &XEXP (ad, op_index), cls,
5214 GET_MODE (ad), opnum, type, ind_levels);
5215 find_reloads_address_1 (mode, as,
5216 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5217 GET_CODE (XEXP (ad, op_index)),
5218 &XEXP (ad, 1 - op_index), opnum,
5219 type, 0, insn);
5220
5221 return 0;
5222 }
5223 }
5224
5225 /* See if address becomes valid when an eliminable register
5226 in a sum is replaced. */
5227
5228 tem = ad;
5229 if (GET_CODE (ad) == PLUS)
5230 tem = subst_indexed_address (ad);
5231 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5232 {
5233 /* Ok, we win that way. Replace any additional eliminable
5234 registers. */
5235
5236 subst_reg_equivs_changed = 0;
5237 tem = subst_reg_equivs (tem, insn);
5238
5239 /* Make sure that didn't make the address invalid again. */
5240
5241 if (! subst_reg_equivs_changed
5242 || strict_memory_address_addr_space_p (mode, tem, as))
5243 {
5244 *loc = tem;
5245 return 0;
5246 }
5247 }
5248
5249 /* If constants aren't valid addresses, reload the constant address
5250 into a register. */
5251 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5252 {
5253 machine_mode address_mode = GET_MODE (ad);
5254 if (address_mode == VOIDmode)
5255 address_mode = targetm.addr_space.address_mode (as);
5256
5257 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5258 Unshare it so we can safely alter it. */
5259 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5260 && CONSTANT_POOL_ADDRESS_P (ad))
5261 {
5262 *memrefloc = copy_rtx (*memrefloc);
5263 loc = &XEXP (*memrefloc, 0);
5264 if (removed_and)
5265 loc = &XEXP (*loc, 0);
5266 }
5267
5268 find_reloads_address_part (ad, loc,
5269 base_reg_class (mode, as, MEM, SCRATCH),
5270 address_mode, opnum, type, ind_levels);
5271 return ! removed_and;
5272 }
5273
5274 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5275 opnum, type, ind_levels, insn);
5276 }
5277 \f
5278 /* Find all pseudo regs appearing in AD
5279 that are eliminable in favor of equivalent values
5280 and do not have hard regs; replace them by their equivalents.
5281 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5282 front of it for pseudos that we have to replace with stack slots. */
5283
5284 static rtx
5285 subst_reg_equivs (rtx ad, rtx_insn *insn)
5286 {
5287 RTX_CODE code = GET_CODE (ad);
5288 int i;
5289 const char *fmt;
5290
5291 switch (code)
5292 {
5293 case HIGH:
5294 case CONST:
5295 CASE_CONST_ANY:
5296 case SYMBOL_REF:
5297 case LABEL_REF:
5298 case PC:
5299 case CC0:
5300 return ad;
5301
5302 case REG:
5303 {
5304 int regno = REGNO (ad);
5305
5306 if (reg_equiv_constant (regno) != 0)
5307 {
5308 subst_reg_equivs_changed = 1;
5309 return reg_equiv_constant (regno);
5310 }
5311 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5312 {
5313 rtx mem = make_memloc (ad, regno);
5314 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5315 {
5316 subst_reg_equivs_changed = 1;
5317 /* We mark the USE with QImode so that we recognize it
5318 as one that can be safely deleted at the end of
5319 reload. */
5320 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5321 QImode);
5322 return mem;
5323 }
5324 }
5325 }
5326 return ad;
5327
5328 case PLUS:
5329 /* Quickly dispose of a common case. */
5330 if (XEXP (ad, 0) == frame_pointer_rtx
5331 && CONST_INT_P (XEXP (ad, 1)))
5332 return ad;
5333 break;
5334
5335 default:
5336 break;
5337 }
5338
5339 fmt = GET_RTX_FORMAT (code);
5340 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5341 if (fmt[i] == 'e')
5342 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5343 return ad;
5344 }
5345 \f
5346 /* Compute the sum of X and Y, making canonicalizations assumed in an
5347 address, namely: sum constant integers, surround the sum of two
5348 constants with a CONST, put the constant as the second operand, and
5349 group the constant on the outermost sum.
5350
5351 This routine assumes both inputs are already in canonical form. */
5352
5353 rtx
5354 form_sum (machine_mode mode, rtx x, rtx y)
5355 {
5356 rtx tem;
5357
5358 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5359 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5360
5361 if (CONST_INT_P (x))
5362 return plus_constant (mode, y, INTVAL (x));
5363 else if (CONST_INT_P (y))
5364 return plus_constant (mode, x, INTVAL (y));
5365 else if (CONSTANT_P (x))
5366 tem = x, x = y, y = tem;
5367
5368 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5369 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5370
5371 /* Note that if the operands of Y are specified in the opposite
5372 order in the recursive calls below, infinite recursion will occur. */
5373 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5374 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5375
5376 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5377 constant will have been placed second. */
5378 if (CONSTANT_P (x) && CONSTANT_P (y))
5379 {
5380 if (GET_CODE (x) == CONST)
5381 x = XEXP (x, 0);
5382 if (GET_CODE (y) == CONST)
5383 y = XEXP (y, 0);
5384
5385 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5386 }
5387
5388 return gen_rtx_PLUS (mode, x, y);
5389 }
5390 \f
5391 /* If ADDR is a sum containing a pseudo register that should be
5392 replaced with a constant (from reg_equiv_constant),
5393 return the result of doing so, and also apply the associative
5394 law so that the result is more likely to be a valid address.
5395 (But it is not guaranteed to be one.)
5396
5397 Note that at most one register is replaced, even if more are
5398 replaceable. Also, we try to put the result into a canonical form
5399 so it is more likely to be a valid address.
5400
5401 In all other cases, return ADDR. */
5402
5403 static rtx
5404 subst_indexed_address (rtx addr)
5405 {
5406 rtx op0 = 0, op1 = 0, op2 = 0;
5407 rtx tem;
5408 int regno;
5409
5410 if (GET_CODE (addr) == PLUS)
5411 {
5412 /* Try to find a register to replace. */
5413 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5414 if (REG_P (op0)
5415 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5416 && reg_renumber[regno] < 0
5417 && reg_equiv_constant (regno) != 0)
5418 op0 = reg_equiv_constant (regno);
5419 else if (REG_P (op1)
5420 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5421 && reg_renumber[regno] < 0
5422 && reg_equiv_constant (regno) != 0)
5423 op1 = reg_equiv_constant (regno);
5424 else if (GET_CODE (op0) == PLUS
5425 && (tem = subst_indexed_address (op0)) != op0)
5426 op0 = tem;
5427 else if (GET_CODE (op1) == PLUS
5428 && (tem = subst_indexed_address (op1)) != op1)
5429 op1 = tem;
5430 else
5431 return addr;
5432
5433 /* Pick out up to three things to add. */
5434 if (GET_CODE (op1) == PLUS)
5435 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5436 else if (GET_CODE (op0) == PLUS)
5437 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5438
5439 /* Compute the sum. */
5440 if (op2 != 0)
5441 op1 = form_sum (GET_MODE (addr), op1, op2);
5442 if (op1 != 0)
5443 op0 = form_sum (GET_MODE (addr), op0, op1);
5444
5445 return op0;
5446 }
5447 return addr;
5448 }
5449 \f
5450 /* Update the REG_INC notes for an insn. It updates all REG_INC
5451 notes for the instruction which refer to REGNO the to refer
5452 to the reload number.
5453
5454 INSN is the insn for which any REG_INC notes need updating.
5455
5456 REGNO is the register number which has been reloaded.
5457
5458 RELOADNUM is the reload number. */
5459
5460 static void
5461 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5462 int reloadnum ATTRIBUTE_UNUSED)
5463 {
5464 #ifdef AUTO_INC_DEC
5465 rtx link;
5466
5467 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5468 if (REG_NOTE_KIND (link) == REG_INC
5469 && (int) REGNO (XEXP (link, 0)) == regno)
5470 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5471 #endif
5472 }
5473 \f
5474 /* Record the pseudo registers we must reload into hard registers in a
5475 subexpression of a would-be memory address, X referring to a value
5476 in mode MODE. (This function is not called if the address we find
5477 is strictly valid.)
5478
5479 CONTEXT = 1 means we are considering regs as index regs,
5480 = 0 means we are considering them as base regs.
5481 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5482 or an autoinc code.
5483 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5484 is the code of the index part of the address. Otherwise, pass SCRATCH
5485 for this argument.
5486 OPNUM and TYPE specify the purpose of any reloads made.
5487
5488 IND_LEVELS says how many levels of indirect addressing are
5489 supported at this point in the address.
5490
5491 INSN, if nonzero, is the insn in which we do the reload. It is used
5492 to determine if we may generate output reloads.
5493
5494 We return nonzero if X, as a whole, is reloaded or replaced. */
5495
5496 /* Note that we take shortcuts assuming that no multi-reg machine mode
5497 occurs as part of an address.
5498 Also, this is not fully machine-customizable; it works for machines
5499 such as VAXen and 68000's and 32000's, but other possible machines
5500 could have addressing modes that this does not handle right.
5501 If you add push_reload calls here, you need to make sure gen_reload
5502 handles those cases gracefully. */
5503
5504 static int
5505 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5506 rtx x, int context,
5507 enum rtx_code outer_code, enum rtx_code index_code,
5508 rtx *loc, int opnum, enum reload_type type,
5509 int ind_levels, rtx_insn *insn)
5510 {
5511 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5512 ((CONTEXT) == 0 \
5513 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5514 : REGNO_OK_FOR_INDEX_P (REGNO))
5515
5516 enum reg_class context_reg_class;
5517 RTX_CODE code = GET_CODE (x);
5518 bool reloaded_inner_of_autoinc = false;
5519
5520 if (context == 1)
5521 context_reg_class = INDEX_REG_CLASS;
5522 else
5523 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5524
5525 switch (code)
5526 {
5527 case PLUS:
5528 {
5529 rtx orig_op0 = XEXP (x, 0);
5530 rtx orig_op1 = XEXP (x, 1);
5531 RTX_CODE code0 = GET_CODE (orig_op0);
5532 RTX_CODE code1 = GET_CODE (orig_op1);
5533 rtx op0 = orig_op0;
5534 rtx op1 = orig_op1;
5535
5536 if (GET_CODE (op0) == SUBREG)
5537 {
5538 op0 = SUBREG_REG (op0);
5539 code0 = GET_CODE (op0);
5540 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5541 op0 = gen_rtx_REG (word_mode,
5542 (REGNO (op0) +
5543 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5544 GET_MODE (SUBREG_REG (orig_op0)),
5545 SUBREG_BYTE (orig_op0),
5546 GET_MODE (orig_op0))));
5547 }
5548
5549 if (GET_CODE (op1) == SUBREG)
5550 {
5551 op1 = SUBREG_REG (op1);
5552 code1 = GET_CODE (op1);
5553 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5554 /* ??? Why is this given op1's mode and above for
5555 ??? op0 SUBREGs we use word_mode? */
5556 op1 = gen_rtx_REG (GET_MODE (op1),
5557 (REGNO (op1) +
5558 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5559 GET_MODE (SUBREG_REG (orig_op1)),
5560 SUBREG_BYTE (orig_op1),
5561 GET_MODE (orig_op1))));
5562 }
5563 /* Plus in the index register may be created only as a result of
5564 register rematerialization for expression like &localvar*4. Reload it.
5565 It may be possible to combine the displacement on the outer level,
5566 but it is probably not worthwhile to do so. */
5567 if (context == 1)
5568 {
5569 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5570 opnum, ADDR_TYPE (type), ind_levels, insn);
5571 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5572 context_reg_class,
5573 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5574 return 1;
5575 }
5576
5577 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5578 || code0 == ZERO_EXTEND || code1 == MEM)
5579 {
5580 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5581 &XEXP (x, 0), opnum, type, ind_levels,
5582 insn);
5583 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5584 &XEXP (x, 1), opnum, type, ind_levels,
5585 insn);
5586 }
5587
5588 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5589 || code1 == ZERO_EXTEND || code0 == MEM)
5590 {
5591 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5592 &XEXP (x, 0), opnum, type, ind_levels,
5593 insn);
5594 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5595 &XEXP (x, 1), opnum, type, ind_levels,
5596 insn);
5597 }
5598
5599 else if (code0 == CONST_INT || code0 == CONST
5600 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5601 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5602 &XEXP (x, 1), opnum, type, ind_levels,
5603 insn);
5604
5605 else if (code1 == CONST_INT || code1 == CONST
5606 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5607 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5608 &XEXP (x, 0), opnum, type, ind_levels,
5609 insn);
5610
5611 else if (code0 == REG && code1 == REG)
5612 {
5613 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5614 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5615 return 0;
5616 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5617 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5618 return 0;
5619 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5620 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5621 &XEXP (x, 1), opnum, type, ind_levels,
5622 insn);
5623 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5624 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5625 &XEXP (x, 0), opnum, type, ind_levels,
5626 insn);
5627 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5628 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5629 &XEXP (x, 0), opnum, type, ind_levels,
5630 insn);
5631 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5632 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5633 &XEXP (x, 1), opnum, type, ind_levels,
5634 insn);
5635 else
5636 {
5637 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5638 &XEXP (x, 0), opnum, type, ind_levels,
5639 insn);
5640 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5641 &XEXP (x, 1), opnum, type, ind_levels,
5642 insn);
5643 }
5644 }
5645
5646 else if (code0 == REG)
5647 {
5648 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5649 &XEXP (x, 0), opnum, type, ind_levels,
5650 insn);
5651 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5652 &XEXP (x, 1), opnum, type, ind_levels,
5653 insn);
5654 }
5655
5656 else if (code1 == REG)
5657 {
5658 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5659 &XEXP (x, 1), opnum, type, ind_levels,
5660 insn);
5661 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5662 &XEXP (x, 0), opnum, type, ind_levels,
5663 insn);
5664 }
5665 }
5666
5667 return 0;
5668
5669 case POST_MODIFY:
5670 case PRE_MODIFY:
5671 {
5672 rtx op0 = XEXP (x, 0);
5673 rtx op1 = XEXP (x, 1);
5674 enum rtx_code index_code;
5675 int regno;
5676 int reloadnum;
5677
5678 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5679 return 0;
5680
5681 /* Currently, we only support {PRE,POST}_MODIFY constructs
5682 where a base register is {inc,dec}remented by the contents
5683 of another register or by a constant value. Thus, these
5684 operands must match. */
5685 gcc_assert (op0 == XEXP (op1, 0));
5686
5687 /* Require index register (or constant). Let's just handle the
5688 register case in the meantime... If the target allows
5689 auto-modify by a constant then we could try replacing a pseudo
5690 register with its equivalent constant where applicable.
5691
5692 We also handle the case where the register was eliminated
5693 resulting in a PLUS subexpression.
5694
5695 If we later decide to reload the whole PRE_MODIFY or
5696 POST_MODIFY, inc_for_reload might clobber the reload register
5697 before reading the index. The index register might therefore
5698 need to live longer than a TYPE reload normally would, so be
5699 conservative and class it as RELOAD_OTHER. */
5700 if ((REG_P (XEXP (op1, 1))
5701 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5702 || GET_CODE (XEXP (op1, 1)) == PLUS)
5703 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5704 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5705 ind_levels, insn);
5706
5707 gcc_assert (REG_P (XEXP (op1, 0)));
5708
5709 regno = REGNO (XEXP (op1, 0));
5710 index_code = GET_CODE (XEXP (op1, 1));
5711
5712 /* A register that is incremented cannot be constant! */
5713 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5714 || reg_equiv_constant (regno) == 0);
5715
5716 /* Handle a register that is equivalent to a memory location
5717 which cannot be addressed directly. */
5718 if (reg_equiv_memory_loc (regno) != 0
5719 && (reg_equiv_address (regno) != 0
5720 || num_not_at_initial_offset))
5721 {
5722 rtx tem = make_memloc (XEXP (x, 0), regno);
5723
5724 if (reg_equiv_address (regno)
5725 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5726 {
5727 rtx orig = tem;
5728
5729 /* First reload the memory location's address.
5730 We can't use ADDR_TYPE (type) here, because we need to
5731 write back the value after reading it, hence we actually
5732 need two registers. */
5733 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5734 &XEXP (tem, 0), opnum,
5735 RELOAD_OTHER,
5736 ind_levels, insn);
5737
5738 if (!rtx_equal_p (tem, orig))
5739 push_reg_equiv_alt_mem (regno, tem);
5740
5741 /* Then reload the memory location into a base
5742 register. */
5743 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5744 &XEXP (op1, 0),
5745 base_reg_class (mode, as,
5746 code, index_code),
5747 GET_MODE (x), GET_MODE (x), 0,
5748 0, opnum, RELOAD_OTHER);
5749
5750 update_auto_inc_notes (this_insn, regno, reloadnum);
5751 return 0;
5752 }
5753 }
5754
5755 if (reg_renumber[regno] >= 0)
5756 regno = reg_renumber[regno];
5757
5758 /* We require a base register here... */
5759 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5760 {
5761 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5762 &XEXP (op1, 0), &XEXP (x, 0),
5763 base_reg_class (mode, as,
5764 code, index_code),
5765 GET_MODE (x), GET_MODE (x), 0, 0,
5766 opnum, RELOAD_OTHER);
5767
5768 update_auto_inc_notes (this_insn, regno, reloadnum);
5769 return 0;
5770 }
5771 }
5772 return 0;
5773
5774 case POST_INC:
5775 case POST_DEC:
5776 case PRE_INC:
5777 case PRE_DEC:
5778 if (REG_P (XEXP (x, 0)))
5779 {
5780 int regno = REGNO (XEXP (x, 0));
5781 int value = 0;
5782 rtx x_orig = x;
5783
5784 /* A register that is incremented cannot be constant! */
5785 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5786 || reg_equiv_constant (regno) == 0);
5787
5788 /* Handle a register that is equivalent to a memory location
5789 which cannot be addressed directly. */
5790 if (reg_equiv_memory_loc (regno) != 0
5791 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5792 {
5793 rtx tem = make_memloc (XEXP (x, 0), regno);
5794 if (reg_equiv_address (regno)
5795 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5796 {
5797 rtx orig = tem;
5798
5799 /* First reload the memory location's address.
5800 We can't use ADDR_TYPE (type) here, because we need to
5801 write back the value after reading it, hence we actually
5802 need two registers. */
5803 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5804 &XEXP (tem, 0), opnum, type,
5805 ind_levels, insn);
5806 reloaded_inner_of_autoinc = true;
5807 if (!rtx_equal_p (tem, orig))
5808 push_reg_equiv_alt_mem (regno, tem);
5809 /* Put this inside a new increment-expression. */
5810 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5811 /* Proceed to reload that, as if it contained a register. */
5812 }
5813 }
5814
5815 /* If we have a hard register that is ok in this incdec context,
5816 don't make a reload. If the register isn't nice enough for
5817 autoincdec, we can reload it. But, if an autoincrement of a
5818 register that we here verified as playing nice, still outside
5819 isn't "valid", it must be that no autoincrement is "valid".
5820 If that is true and something made an autoincrement anyway,
5821 this must be a special context where one is allowed.
5822 (For example, a "push" instruction.)
5823 We can't improve this address, so leave it alone. */
5824
5825 /* Otherwise, reload the autoincrement into a suitable hard reg
5826 and record how much to increment by. */
5827
5828 if (reg_renumber[regno] >= 0)
5829 regno = reg_renumber[regno];
5830 if (regno >= FIRST_PSEUDO_REGISTER
5831 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5832 index_code))
5833 {
5834 int reloadnum;
5835
5836 /* If we can output the register afterwards, do so, this
5837 saves the extra update.
5838 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5839 CALL_INSN - and it does not set CC0.
5840 But don't do this if we cannot directly address the
5841 memory location, since this will make it harder to
5842 reuse address reloads, and increases register pressure.
5843 Also don't do this if we can probably update x directly. */
5844 rtx equiv = (MEM_P (XEXP (x, 0))
5845 ? XEXP (x, 0)
5846 : reg_equiv_mem (regno));
5847 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5848 if (insn && NONJUMP_INSN_P (insn) && equiv
5849 && memory_operand (equiv, GET_MODE (equiv))
5850 #if HAVE_cc0
5851 && ! sets_cc0_p (PATTERN (insn))
5852 #endif
5853 && ! (icode != CODE_FOR_nothing
5854 && insn_operand_matches (icode, 0, equiv)
5855 && insn_operand_matches (icode, 1, equiv))
5856 /* Using RELOAD_OTHER means we emit this and the reload we
5857 made earlier in the wrong order. */
5858 && !reloaded_inner_of_autoinc)
5859 {
5860 /* We use the original pseudo for loc, so that
5861 emit_reload_insns() knows which pseudo this
5862 reload refers to and updates the pseudo rtx, not
5863 its equivalent memory location, as well as the
5864 corresponding entry in reg_last_reload_reg. */
5865 loc = &XEXP (x_orig, 0);
5866 x = XEXP (x, 0);
5867 reloadnum
5868 = push_reload (x, x, loc, loc,
5869 context_reg_class,
5870 GET_MODE (x), GET_MODE (x), 0, 0,
5871 opnum, RELOAD_OTHER);
5872 }
5873 else
5874 {
5875 reloadnum
5876 = push_reload (x, x, loc, (rtx*) 0,
5877 context_reg_class,
5878 GET_MODE (x), GET_MODE (x), 0, 0,
5879 opnum, type);
5880 rld[reloadnum].inc
5881 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5882
5883 value = 1;
5884 }
5885
5886 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5887 reloadnum);
5888 }
5889 return value;
5890 }
5891 return 0;
5892
5893 case TRUNCATE:
5894 case SIGN_EXTEND:
5895 case ZERO_EXTEND:
5896 /* Look for parts to reload in the inner expression and reload them
5897 too, in addition to this operation. Reloading all inner parts in
5898 addition to this one shouldn't be necessary, but at this point,
5899 we don't know if we can possibly omit any part that *can* be
5900 reloaded. Targets that are better off reloading just either part
5901 (or perhaps even a different part of an outer expression), should
5902 define LEGITIMIZE_RELOAD_ADDRESS. */
5903 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5904 context, code, SCRATCH, &XEXP (x, 0), opnum,
5905 type, ind_levels, insn);
5906 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5907 context_reg_class,
5908 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5909 return 1;
5910
5911 case MEM:
5912 /* This is probably the result of a substitution, by eliminate_regs, of
5913 an equivalent address for a pseudo that was not allocated to a hard
5914 register. Verify that the specified address is valid and reload it
5915 into a register.
5916
5917 Since we know we are going to reload this item, don't decrement for
5918 the indirection level.
5919
5920 Note that this is actually conservative: it would be slightly more
5921 efficient to use the value of SPILL_INDIRECT_LEVELS from
5922 reload1.c here. */
5923
5924 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5925 opnum, ADDR_TYPE (type), ind_levels, insn);
5926 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5927 context_reg_class,
5928 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5929 return 1;
5930
5931 case REG:
5932 {
5933 int regno = REGNO (x);
5934
5935 if (reg_equiv_constant (regno) != 0)
5936 {
5937 find_reloads_address_part (reg_equiv_constant (regno), loc,
5938 context_reg_class,
5939 GET_MODE (x), opnum, type, ind_levels);
5940 return 1;
5941 }
5942
5943 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5944 that feeds this insn. */
5945 if (reg_equiv_mem (regno) != 0)
5946 {
5947 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5948 context_reg_class,
5949 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5950 return 1;
5951 }
5952 #endif
5953
5954 if (reg_equiv_memory_loc (regno)
5955 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5956 {
5957 rtx tem = make_memloc (x, regno);
5958 if (reg_equiv_address (regno) != 0
5959 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5960 {
5961 x = tem;
5962 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5963 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5964 ind_levels, insn);
5965 if (!rtx_equal_p (x, tem))
5966 push_reg_equiv_alt_mem (regno, x);
5967 }
5968 }
5969
5970 if (reg_renumber[regno] >= 0)
5971 regno = reg_renumber[regno];
5972
5973 if (regno >= FIRST_PSEUDO_REGISTER
5974 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5975 index_code))
5976 {
5977 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5978 context_reg_class,
5979 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5980 return 1;
5981 }
5982
5983 /* If a register appearing in an address is the subject of a CLOBBER
5984 in this insn, reload it into some other register to be safe.
5985 The CLOBBER is supposed to make the register unavailable
5986 from before this insn to after it. */
5987 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5988 {
5989 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5990 context_reg_class,
5991 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5992 return 1;
5993 }
5994 }
5995 return 0;
5996
5997 case SUBREG:
5998 if (REG_P (SUBREG_REG (x)))
5999 {
6000 /* If this is a SUBREG of a hard register and the resulting register
6001 is of the wrong class, reload the whole SUBREG. This avoids
6002 needless copies if SUBREG_REG is multi-word. */
6003 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6004 {
6005 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6006
6007 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6008 index_code))
6009 {
6010 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6011 context_reg_class,
6012 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6013 return 1;
6014 }
6015 }
6016 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6017 is larger than the class size, then reload the whole SUBREG. */
6018 else
6019 {
6020 enum reg_class rclass = context_reg_class;
6021 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6022 > reg_class_size[(int) rclass])
6023 {
6024 /* If the inner register will be replaced by a memory
6025 reference, we can do this only if we can replace the
6026 whole subreg by a (narrower) memory reference. If
6027 this is not possible, fall through and reload just
6028 the inner register (including address reloads). */
6029 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6030 {
6031 rtx tem = find_reloads_subreg_address (x, opnum,
6032 ADDR_TYPE (type),
6033 ind_levels, insn,
6034 NULL);
6035 if (tem)
6036 {
6037 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6038 GET_MODE (tem), VOIDmode, 0, 0,
6039 opnum, type);
6040 return 1;
6041 }
6042 }
6043 else
6044 {
6045 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6046 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6047 return 1;
6048 }
6049 }
6050 }
6051 }
6052 break;
6053
6054 default:
6055 break;
6056 }
6057
6058 {
6059 const char *fmt = GET_RTX_FORMAT (code);
6060 int i;
6061
6062 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6063 {
6064 if (fmt[i] == 'e')
6065 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6066 we get here. */
6067 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6068 code, SCRATCH, &XEXP (x, i),
6069 opnum, type, ind_levels, insn);
6070 }
6071 }
6072
6073 #undef REG_OK_FOR_CONTEXT
6074 return 0;
6075 }
6076 \f
6077 /* X, which is found at *LOC, is a part of an address that needs to be
6078 reloaded into a register of class RCLASS. If X is a constant, or if
6079 X is a PLUS that contains a constant, check that the constant is a
6080 legitimate operand and that we are supposed to be able to load
6081 it into the register.
6082
6083 If not, force the constant into memory and reload the MEM instead.
6084
6085 MODE is the mode to use, in case X is an integer constant.
6086
6087 OPNUM and TYPE describe the purpose of any reloads made.
6088
6089 IND_LEVELS says how many levels of indirect addressing this machine
6090 supports. */
6091
6092 static void
6093 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6094 machine_mode mode, int opnum,
6095 enum reload_type type, int ind_levels)
6096 {
6097 if (CONSTANT_P (x)
6098 && (!targetm.legitimate_constant_p (mode, x)
6099 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6100 {
6101 x = force_const_mem (mode, x);
6102 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6103 opnum, type, ind_levels, 0);
6104 }
6105
6106 else if (GET_CODE (x) == PLUS
6107 && CONSTANT_P (XEXP (x, 1))
6108 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6109 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6110 == NO_REGS))
6111 {
6112 rtx tem;
6113
6114 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6115 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6116 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6117 opnum, type, ind_levels, 0);
6118 }
6119
6120 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6121 mode, VOIDmode, 0, 0, opnum, type);
6122 }
6123 \f
6124 /* X, a subreg of a pseudo, is a part of an address that needs to be
6125 reloaded, and the pseusdo is equivalent to a memory location.
6126
6127 Attempt to replace the whole subreg by a (possibly narrower or wider)
6128 memory reference. If this is possible, return this new memory
6129 reference, and push all required address reloads. Otherwise,
6130 return NULL.
6131
6132 OPNUM and TYPE identify the purpose of the reload.
6133
6134 IND_LEVELS says how many levels of indirect addressing are
6135 supported at this point in the address.
6136
6137 INSN, if nonzero, is the insn in which we do the reload. It is used
6138 to determine where to put USEs for pseudos that we have to replace with
6139 stack slots. */
6140
6141 static rtx
6142 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6143 int ind_levels, rtx_insn *insn,
6144 int *address_reloaded)
6145 {
6146 machine_mode outer_mode = GET_MODE (x);
6147 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6148 int regno = REGNO (SUBREG_REG (x));
6149 int reloaded = 0;
6150 rtx tem, orig;
6151 int offset;
6152
6153 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6154
6155 /* We cannot replace the subreg with a modified memory reference if:
6156
6157 - we have a paradoxical subreg that implicitly acts as a zero or
6158 sign extension operation due to LOAD_EXTEND_OP;
6159
6160 - we have a subreg that is implicitly supposed to act on the full
6161 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6162
6163 - the address of the equivalent memory location is mode-dependent; or
6164
6165 - we have a paradoxical subreg and the resulting memory is not
6166 sufficiently aligned to allow access in the wider mode.
6167
6168 In addition, we choose not to perform the replacement for *any*
6169 paradoxical subreg, even if it were possible in principle. This
6170 is to avoid generating wider memory references than necessary.
6171
6172 This corresponds to how previous versions of reload used to handle
6173 paradoxical subregs where no address reload was required. */
6174
6175 if (paradoxical_subreg_p (x))
6176 return NULL;
6177
6178 #ifdef WORD_REGISTER_OPERATIONS
6179 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6180 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6181 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6182 return NULL;
6183 #endif
6184
6185 /* Since we don't attempt to handle paradoxical subregs, we can just
6186 call into simplify_subreg, which will handle all remaining checks
6187 for us. */
6188 orig = make_memloc (SUBREG_REG (x), regno);
6189 offset = SUBREG_BYTE (x);
6190 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6191 if (!tem || !MEM_P (tem))
6192 return NULL;
6193
6194 /* Now push all required address reloads, if any. */
6195 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6196 XEXP (tem, 0), &XEXP (tem, 0),
6197 opnum, type, ind_levels, insn);
6198 /* ??? Do we need to handle nonzero offsets somehow? */
6199 if (!offset && !rtx_equal_p (tem, orig))
6200 push_reg_equiv_alt_mem (regno, tem);
6201
6202 /* For some processors an address may be valid in the original mode but
6203 not in a smaller mode. For example, ARM accepts a scaled index register
6204 in SImode but not in HImode. Note that this is only a problem if the
6205 address in reg_equiv_mem is already invalid in the new mode; other
6206 cases would be fixed by find_reloads_address as usual.
6207
6208 ??? We attempt to handle such cases here by doing an additional reload
6209 of the full address after the usual processing by find_reloads_address.
6210 Note that this may not work in the general case, but it seems to cover
6211 the cases where this situation currently occurs. A more general fix
6212 might be to reload the *value* instead of the address, but this would
6213 not be expected by the callers of this routine as-is.
6214
6215 If find_reloads_address already completed replaced the address, there
6216 is nothing further to do. */
6217 if (reloaded == 0
6218 && reg_equiv_mem (regno) != 0
6219 && !strict_memory_address_addr_space_p
6220 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6221 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6222 {
6223 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6224 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6225 MEM, SCRATCH),
6226 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6227 reloaded = 1;
6228 }
6229
6230 /* If this is not a toplevel operand, find_reloads doesn't see this
6231 substitution. We have to emit a USE of the pseudo so that
6232 delete_output_reload can see it. */
6233 if (replace_reloads && recog_data.operand[opnum] != x)
6234 /* We mark the USE with QImode so that we recognize it as one that
6235 can be safely deleted at the end of reload. */
6236 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6237 QImode);
6238
6239 if (address_reloaded)
6240 *address_reloaded = reloaded;
6241
6242 return tem;
6243 }
6244 \f
6245 /* Substitute into the current INSN the registers into which we have reloaded
6246 the things that need reloading. The array `replacements'
6247 contains the locations of all pointers that must be changed
6248 and says what to replace them with.
6249
6250 Return the rtx that X translates into; usually X, but modified. */
6251
6252 void
6253 subst_reloads (rtx_insn *insn)
6254 {
6255 int i;
6256
6257 for (i = 0; i < n_replacements; i++)
6258 {
6259 struct replacement *r = &replacements[i];
6260 rtx reloadreg = rld[r->what].reg_rtx;
6261 if (reloadreg)
6262 {
6263 #ifdef DEBUG_RELOAD
6264 /* This checking takes a very long time on some platforms
6265 causing the gcc.c-torture/compile/limits-fnargs.c test
6266 to time out during testing. See PR 31850.
6267
6268 Internal consistency test. Check that we don't modify
6269 anything in the equivalence arrays. Whenever something from
6270 those arrays needs to be reloaded, it must be unshared before
6271 being substituted into; the equivalence must not be modified.
6272 Otherwise, if the equivalence is used after that, it will
6273 have been modified, and the thing substituted (probably a
6274 register) is likely overwritten and not a usable equivalence. */
6275 int check_regno;
6276
6277 for (check_regno = 0; check_regno < max_regno; check_regno++)
6278 {
6279 #define CHECK_MODF(ARRAY) \
6280 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6281 || !loc_mentioned_in_p (r->where, \
6282 (*reg_equivs)[check_regno].ARRAY))
6283
6284 CHECK_MODF (constant);
6285 CHECK_MODF (memory_loc);
6286 CHECK_MODF (address);
6287 CHECK_MODF (mem);
6288 #undef CHECK_MODF
6289 }
6290 #endif /* DEBUG_RELOAD */
6291
6292 /* If we're replacing a LABEL_REF with a register, there must
6293 already be an indication (to e.g. flow) which label this
6294 register refers to. */
6295 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6296 || !JUMP_P (insn)
6297 || find_reg_note (insn,
6298 REG_LABEL_OPERAND,
6299 XEXP (*r->where, 0))
6300 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6301
6302 /* Encapsulate RELOADREG so its machine mode matches what
6303 used to be there. Note that gen_lowpart_common will
6304 do the wrong thing if RELOADREG is multi-word. RELOADREG
6305 will always be a REG here. */
6306 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6307 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6308
6309 *r->where = reloadreg;
6310 }
6311 /* If reload got no reg and isn't optional, something's wrong. */
6312 else
6313 gcc_assert (rld[r->what].optional);
6314 }
6315 }
6316 \f
6317 /* Make a copy of any replacements being done into X and move those
6318 copies to locations in Y, a copy of X. */
6319
6320 void
6321 copy_replacements (rtx x, rtx y)
6322 {
6323 copy_replacements_1 (&x, &y, n_replacements);
6324 }
6325
6326 static void
6327 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6328 {
6329 int i, j;
6330 rtx x, y;
6331 struct replacement *r;
6332 enum rtx_code code;
6333 const char *fmt;
6334
6335 for (j = 0; j < orig_replacements; j++)
6336 if (replacements[j].where == px)
6337 {
6338 r = &replacements[n_replacements++];
6339 r->where = py;
6340 r->what = replacements[j].what;
6341 r->mode = replacements[j].mode;
6342 }
6343
6344 x = *px;
6345 y = *py;
6346 code = GET_CODE (x);
6347 fmt = GET_RTX_FORMAT (code);
6348
6349 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6350 {
6351 if (fmt[i] == 'e')
6352 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6353 else if (fmt[i] == 'E')
6354 for (j = XVECLEN (x, i); --j >= 0; )
6355 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6356 orig_replacements);
6357 }
6358 }
6359
6360 /* Change any replacements being done to *X to be done to *Y. */
6361
6362 void
6363 move_replacements (rtx *x, rtx *y)
6364 {
6365 int i;
6366
6367 for (i = 0; i < n_replacements; i++)
6368 if (replacements[i].where == x)
6369 replacements[i].where = y;
6370 }
6371 \f
6372 /* If LOC was scheduled to be replaced by something, return the replacement.
6373 Otherwise, return *LOC. */
6374
6375 rtx
6376 find_replacement (rtx *loc)
6377 {
6378 struct replacement *r;
6379
6380 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6381 {
6382 rtx reloadreg = rld[r->what].reg_rtx;
6383
6384 if (reloadreg && r->where == loc)
6385 {
6386 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6387 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6388
6389 return reloadreg;
6390 }
6391 else if (reloadreg && GET_CODE (*loc) == SUBREG
6392 && r->where == &SUBREG_REG (*loc))
6393 {
6394 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6395 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6396
6397 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6398 GET_MODE (SUBREG_REG (*loc)),
6399 SUBREG_BYTE (*loc));
6400 }
6401 }
6402
6403 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6404 what's inside and make a new rtl if so. */
6405 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6406 || GET_CODE (*loc) == MULT)
6407 {
6408 rtx x = find_replacement (&XEXP (*loc, 0));
6409 rtx y = find_replacement (&XEXP (*loc, 1));
6410
6411 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6412 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6413 }
6414
6415 return *loc;
6416 }
6417 \f
6418 /* Return nonzero if register in range [REGNO, ENDREGNO)
6419 appears either explicitly or implicitly in X
6420 other than being stored into (except for earlyclobber operands).
6421
6422 References contained within the substructure at LOC do not count.
6423 LOC may be zero, meaning don't ignore anything.
6424
6425 This is similar to refers_to_regno_p in rtlanal.c except that we
6426 look at equivalences for pseudos that didn't get hard registers. */
6427
6428 static int
6429 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6430 rtx x, rtx *loc)
6431 {
6432 int i;
6433 unsigned int r;
6434 RTX_CODE code;
6435 const char *fmt;
6436
6437 if (x == 0)
6438 return 0;
6439
6440 repeat:
6441 code = GET_CODE (x);
6442
6443 switch (code)
6444 {
6445 case REG:
6446 r = REGNO (x);
6447
6448 /* If this is a pseudo, a hard register must not have been allocated.
6449 X must therefore either be a constant or be in memory. */
6450 if (r >= FIRST_PSEUDO_REGISTER)
6451 {
6452 if (reg_equiv_memory_loc (r))
6453 return refers_to_regno_for_reload_p (regno, endregno,
6454 reg_equiv_memory_loc (r),
6455 (rtx*) 0);
6456
6457 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6458 return 0;
6459 }
6460
6461 return (endregno > r
6462 && regno < r + (r < FIRST_PSEUDO_REGISTER
6463 ? hard_regno_nregs[r][GET_MODE (x)]
6464 : 1));
6465
6466 case SUBREG:
6467 /* If this is a SUBREG of a hard reg, we can see exactly which
6468 registers are being modified. Otherwise, handle normally. */
6469 if (REG_P (SUBREG_REG (x))
6470 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6471 {
6472 unsigned int inner_regno = subreg_regno (x);
6473 unsigned int inner_endregno
6474 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6475 ? subreg_nregs (x) : 1);
6476
6477 return endregno > inner_regno && regno < inner_endregno;
6478 }
6479 break;
6480
6481 case CLOBBER:
6482 case SET:
6483 if (&SET_DEST (x) != loc
6484 /* Note setting a SUBREG counts as referring to the REG it is in for
6485 a pseudo but not for hard registers since we can
6486 treat each word individually. */
6487 && ((GET_CODE (SET_DEST (x)) == SUBREG
6488 && loc != &SUBREG_REG (SET_DEST (x))
6489 && REG_P (SUBREG_REG (SET_DEST (x)))
6490 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6491 && refers_to_regno_for_reload_p (regno, endregno,
6492 SUBREG_REG (SET_DEST (x)),
6493 loc))
6494 /* If the output is an earlyclobber operand, this is
6495 a conflict. */
6496 || ((!REG_P (SET_DEST (x))
6497 || earlyclobber_operand_p (SET_DEST (x)))
6498 && refers_to_regno_for_reload_p (regno, endregno,
6499 SET_DEST (x), loc))))
6500 return 1;
6501
6502 if (code == CLOBBER || loc == &SET_SRC (x))
6503 return 0;
6504 x = SET_SRC (x);
6505 goto repeat;
6506
6507 default:
6508 break;
6509 }
6510
6511 /* X does not match, so try its subexpressions. */
6512
6513 fmt = GET_RTX_FORMAT (code);
6514 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6515 {
6516 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6517 {
6518 if (i == 0)
6519 {
6520 x = XEXP (x, 0);
6521 goto repeat;
6522 }
6523 else
6524 if (refers_to_regno_for_reload_p (regno, endregno,
6525 XEXP (x, i), loc))
6526 return 1;
6527 }
6528 else if (fmt[i] == 'E')
6529 {
6530 int j;
6531 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6532 if (loc != &XVECEXP (x, i, j)
6533 && refers_to_regno_for_reload_p (regno, endregno,
6534 XVECEXP (x, i, j), loc))
6535 return 1;
6536 }
6537 }
6538 return 0;
6539 }
6540
6541 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6542 we check if any register number in X conflicts with the relevant register
6543 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6544 contains a MEM (we don't bother checking for memory addresses that can't
6545 conflict because we expect this to be a rare case.
6546
6547 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6548 that we look at equivalences for pseudos that didn't get hard registers. */
6549
6550 int
6551 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6552 {
6553 int regno, endregno;
6554
6555 /* Overly conservative. */
6556 if (GET_CODE (x) == STRICT_LOW_PART
6557 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6558 x = XEXP (x, 0);
6559
6560 /* If either argument is a constant, then modifying X can not affect IN. */
6561 if (CONSTANT_P (x) || CONSTANT_P (in))
6562 return 0;
6563 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6564 return refers_to_mem_for_reload_p (in);
6565 else if (GET_CODE (x) == SUBREG)
6566 {
6567 regno = REGNO (SUBREG_REG (x));
6568 if (regno < FIRST_PSEUDO_REGISTER)
6569 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6570 GET_MODE (SUBREG_REG (x)),
6571 SUBREG_BYTE (x),
6572 GET_MODE (x));
6573 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6574 ? subreg_nregs (x) : 1);
6575
6576 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6577 }
6578 else if (REG_P (x))
6579 {
6580 regno = REGNO (x);
6581
6582 /* If this is a pseudo, it must not have been assigned a hard register.
6583 Therefore, it must either be in memory or be a constant. */
6584
6585 if (regno >= FIRST_PSEUDO_REGISTER)
6586 {
6587 if (reg_equiv_memory_loc (regno))
6588 return refers_to_mem_for_reload_p (in);
6589 gcc_assert (reg_equiv_constant (regno));
6590 return 0;
6591 }
6592
6593 endregno = END_REGNO (x);
6594
6595 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6596 }
6597 else if (MEM_P (x))
6598 return refers_to_mem_for_reload_p (in);
6599 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6600 || GET_CODE (x) == CC0)
6601 return reg_mentioned_p (x, in);
6602 else
6603 {
6604 gcc_assert (GET_CODE (x) == PLUS);
6605
6606 /* We actually want to know if X is mentioned somewhere inside IN.
6607 We must not say that (plus (sp) (const_int 124)) is in
6608 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6609 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6610 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6611 while (MEM_P (in))
6612 in = XEXP (in, 0);
6613 if (REG_P (in))
6614 return 0;
6615 else if (GET_CODE (in) == PLUS)
6616 return (rtx_equal_p (x, in)
6617 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6618 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6619 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6620 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6621 }
6622
6623 gcc_unreachable ();
6624 }
6625
6626 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6627 registers. */
6628
6629 static int
6630 refers_to_mem_for_reload_p (rtx x)
6631 {
6632 const char *fmt;
6633 int i;
6634
6635 if (MEM_P (x))
6636 return 1;
6637
6638 if (REG_P (x))
6639 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6640 && reg_equiv_memory_loc (REGNO (x)));
6641
6642 fmt = GET_RTX_FORMAT (GET_CODE (x));
6643 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6644 if (fmt[i] == 'e'
6645 && (MEM_P (XEXP (x, i))
6646 || refers_to_mem_for_reload_p (XEXP (x, i))))
6647 return 1;
6648
6649 return 0;
6650 }
6651 \f
6652 /* Check the insns before INSN to see if there is a suitable register
6653 containing the same value as GOAL.
6654 If OTHER is -1, look for a register in class RCLASS.
6655 Otherwise, just see if register number OTHER shares GOAL's value.
6656
6657 Return an rtx for the register found, or zero if none is found.
6658
6659 If RELOAD_REG_P is (short *)1,
6660 we reject any hard reg that appears in reload_reg_rtx
6661 because such a hard reg is also needed coming into this insn.
6662
6663 If RELOAD_REG_P is any other nonzero value,
6664 it is a vector indexed by hard reg number
6665 and we reject any hard reg whose element in the vector is nonnegative
6666 as well as any that appears in reload_reg_rtx.
6667
6668 If GOAL is zero, then GOALREG is a register number; we look
6669 for an equivalent for that register.
6670
6671 MODE is the machine mode of the value we want an equivalence for.
6672 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6673
6674 This function is used by jump.c as well as in the reload pass.
6675
6676 If GOAL is the sum of the stack pointer and a constant, we treat it
6677 as if it were a constant except that sp is required to be unchanging. */
6678
6679 rtx
6680 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6681 short *reload_reg_p, int goalreg, machine_mode mode)
6682 {
6683 rtx_insn *p = insn;
6684 rtx goaltry, valtry, value;
6685 rtx_insn *where;
6686 rtx pat;
6687 int regno = -1;
6688 int valueno;
6689 int goal_mem = 0;
6690 int goal_const = 0;
6691 int goal_mem_addr_varies = 0;
6692 int need_stable_sp = 0;
6693 int nregs;
6694 int valuenregs;
6695 int num = 0;
6696
6697 if (goal == 0)
6698 regno = goalreg;
6699 else if (REG_P (goal))
6700 regno = REGNO (goal);
6701 else if (MEM_P (goal))
6702 {
6703 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6704 if (MEM_VOLATILE_P (goal))
6705 return 0;
6706 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6707 return 0;
6708 /* An address with side effects must be reexecuted. */
6709 switch (code)
6710 {
6711 case POST_INC:
6712 case PRE_INC:
6713 case POST_DEC:
6714 case PRE_DEC:
6715 case POST_MODIFY:
6716 case PRE_MODIFY:
6717 return 0;
6718 default:
6719 break;
6720 }
6721 goal_mem = 1;
6722 }
6723 else if (CONSTANT_P (goal))
6724 goal_const = 1;
6725 else if (GET_CODE (goal) == PLUS
6726 && XEXP (goal, 0) == stack_pointer_rtx
6727 && CONSTANT_P (XEXP (goal, 1)))
6728 goal_const = need_stable_sp = 1;
6729 else if (GET_CODE (goal) == PLUS
6730 && XEXP (goal, 0) == frame_pointer_rtx
6731 && CONSTANT_P (XEXP (goal, 1)))
6732 goal_const = 1;
6733 else
6734 return 0;
6735
6736 num = 0;
6737 /* Scan insns back from INSN, looking for one that copies
6738 a value into or out of GOAL.
6739 Stop and give up if we reach a label. */
6740
6741 while (1)
6742 {
6743 p = PREV_INSN (p);
6744 if (p && DEBUG_INSN_P (p))
6745 continue;
6746 num++;
6747 if (p == 0 || LABEL_P (p)
6748 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6749 return 0;
6750
6751 /* Don't reuse register contents from before a setjmp-type
6752 function call; on the second return (from the longjmp) it
6753 might have been clobbered by a later reuse. It doesn't
6754 seem worthwhile to actually go and see if it is actually
6755 reused even if that information would be readily available;
6756 just don't reuse it across the setjmp call. */
6757 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6758 return 0;
6759
6760 if (NONJUMP_INSN_P (p)
6761 /* If we don't want spill regs ... */
6762 && (! (reload_reg_p != 0
6763 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6764 /* ... then ignore insns introduced by reload; they aren't
6765 useful and can cause results in reload_as_needed to be
6766 different from what they were when calculating the need for
6767 spills. If we notice an input-reload insn here, we will
6768 reject it below, but it might hide a usable equivalent.
6769 That makes bad code. It may even fail: perhaps no reg was
6770 spilled for this insn because it was assumed we would find
6771 that equivalent. */
6772 || INSN_UID (p) < reload_first_uid))
6773 {
6774 rtx tem;
6775 pat = single_set (p);
6776
6777 /* First check for something that sets some reg equal to GOAL. */
6778 if (pat != 0
6779 && ((regno >= 0
6780 && true_regnum (SET_SRC (pat)) == regno
6781 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6782 ||
6783 (regno >= 0
6784 && true_regnum (SET_DEST (pat)) == regno
6785 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6786 ||
6787 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6788 /* When looking for stack pointer + const,
6789 make sure we don't use a stack adjust. */
6790 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6791 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6792 || (goal_mem
6793 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6794 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6795 || (goal_mem
6796 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6797 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6798 /* If we are looking for a constant,
6799 and something equivalent to that constant was copied
6800 into a reg, we can use that reg. */
6801 || (goal_const && REG_NOTES (p) != 0
6802 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6803 && ((rtx_equal_p (XEXP (tem, 0), goal)
6804 && (valueno
6805 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6806 || (REG_P (SET_DEST (pat))
6807 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6808 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6809 && CONST_INT_P (goal)
6810 && 0 != (goaltry
6811 = operand_subword (XEXP (tem, 0), 0, 0,
6812 VOIDmode))
6813 && rtx_equal_p (goal, goaltry)
6814 && (valtry
6815 = operand_subword (SET_DEST (pat), 0, 0,
6816 VOIDmode))
6817 && (valueno = true_regnum (valtry)) >= 0)))
6818 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6819 NULL_RTX))
6820 && REG_P (SET_DEST (pat))
6821 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6822 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6823 && CONST_INT_P (goal)
6824 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6825 VOIDmode))
6826 && rtx_equal_p (goal, goaltry)
6827 && (valtry
6828 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6829 && (valueno = true_regnum (valtry)) >= 0)))
6830 {
6831 if (other >= 0)
6832 {
6833 if (valueno != other)
6834 continue;
6835 }
6836 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6837 continue;
6838 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6839 mode, valueno))
6840 continue;
6841 value = valtry;
6842 where = p;
6843 break;
6844 }
6845 }
6846 }
6847
6848 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6849 (or copying VALUE into GOAL, if GOAL is also a register).
6850 Now verify that VALUE is really valid. */
6851
6852 /* VALUENO is the register number of VALUE; a hard register. */
6853
6854 /* Don't try to re-use something that is killed in this insn. We want
6855 to be able to trust REG_UNUSED notes. */
6856 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6857 return 0;
6858
6859 /* If we propose to get the value from the stack pointer or if GOAL is
6860 a MEM based on the stack pointer, we need a stable SP. */
6861 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6862 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6863 goal)))
6864 need_stable_sp = 1;
6865
6866 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6867 if (GET_MODE (value) != mode)
6868 return 0;
6869
6870 /* Reject VALUE if it was loaded from GOAL
6871 and is also a register that appears in the address of GOAL. */
6872
6873 if (goal_mem && value == SET_DEST (single_set (where))
6874 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6875 goal, (rtx*) 0))
6876 return 0;
6877
6878 /* Reject registers that overlap GOAL. */
6879
6880 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6881 nregs = hard_regno_nregs[regno][mode];
6882 else
6883 nregs = 1;
6884 valuenregs = hard_regno_nregs[valueno][mode];
6885
6886 if (!goal_mem && !goal_const
6887 && regno + nregs > valueno && regno < valueno + valuenregs)
6888 return 0;
6889
6890 /* Reject VALUE if it is one of the regs reserved for reloads.
6891 Reload1 knows how to reuse them anyway, and it would get
6892 confused if we allocated one without its knowledge.
6893 (Now that insns introduced by reload are ignored above,
6894 this case shouldn't happen, but I'm not positive.) */
6895
6896 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6897 {
6898 int i;
6899 for (i = 0; i < valuenregs; ++i)
6900 if (reload_reg_p[valueno + i] >= 0)
6901 return 0;
6902 }
6903
6904 /* Reject VALUE if it is a register being used for an input reload
6905 even if it is not one of those reserved. */
6906
6907 if (reload_reg_p != 0)
6908 {
6909 int i;
6910 for (i = 0; i < n_reloads; i++)
6911 if (rld[i].reg_rtx != 0 && rld[i].in)
6912 {
6913 int regno1 = REGNO (rld[i].reg_rtx);
6914 int nregs1 = hard_regno_nregs[regno1]
6915 [GET_MODE (rld[i].reg_rtx)];
6916 if (regno1 < valueno + valuenregs
6917 && regno1 + nregs1 > valueno)
6918 return 0;
6919 }
6920 }
6921
6922 if (goal_mem)
6923 /* We must treat frame pointer as varying here,
6924 since it can vary--in a nonlocal goto as generated by expand_goto. */
6925 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6926
6927 /* Now verify that the values of GOAL and VALUE remain unaltered
6928 until INSN is reached. */
6929
6930 p = insn;
6931 while (1)
6932 {
6933 p = PREV_INSN (p);
6934 if (p == where)
6935 return value;
6936
6937 /* Don't trust the conversion past a function call
6938 if either of the two is in a call-clobbered register, or memory. */
6939 if (CALL_P (p))
6940 {
6941 int i;
6942
6943 if (goal_mem || need_stable_sp)
6944 return 0;
6945
6946 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6947 for (i = 0; i < nregs; ++i)
6948 if (call_used_regs[regno + i]
6949 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6950 return 0;
6951
6952 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6953 for (i = 0; i < valuenregs; ++i)
6954 if (call_used_regs[valueno + i]
6955 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6956 return 0;
6957 }
6958
6959 if (INSN_P (p))
6960 {
6961 pat = PATTERN (p);
6962
6963 /* Watch out for unspec_volatile, and volatile asms. */
6964 if (volatile_insn_p (pat))
6965 return 0;
6966
6967 /* If this insn P stores in either GOAL or VALUE, return 0.
6968 If GOAL is a memory ref and this insn writes memory, return 0.
6969 If GOAL is a memory ref and its address is not constant,
6970 and this insn P changes a register used in GOAL, return 0. */
6971
6972 if (GET_CODE (pat) == COND_EXEC)
6973 pat = COND_EXEC_CODE (pat);
6974 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6975 {
6976 rtx dest = SET_DEST (pat);
6977 while (GET_CODE (dest) == SUBREG
6978 || GET_CODE (dest) == ZERO_EXTRACT
6979 || GET_CODE (dest) == STRICT_LOW_PART)
6980 dest = XEXP (dest, 0);
6981 if (REG_P (dest))
6982 {
6983 int xregno = REGNO (dest);
6984 int xnregs;
6985 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6986 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6987 else
6988 xnregs = 1;
6989 if (xregno < regno + nregs && xregno + xnregs > regno)
6990 return 0;
6991 if (xregno < valueno + valuenregs
6992 && xregno + xnregs > valueno)
6993 return 0;
6994 if (goal_mem_addr_varies
6995 && reg_overlap_mentioned_for_reload_p (dest, goal))
6996 return 0;
6997 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6998 return 0;
6999 }
7000 else if (goal_mem && MEM_P (dest)
7001 && ! push_operand (dest, GET_MODE (dest)))
7002 return 0;
7003 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7004 && reg_equiv_memory_loc (regno) != 0)
7005 return 0;
7006 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7007 return 0;
7008 }
7009 else if (GET_CODE (pat) == PARALLEL)
7010 {
7011 int i;
7012 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7013 {
7014 rtx v1 = XVECEXP (pat, 0, i);
7015 if (GET_CODE (v1) == COND_EXEC)
7016 v1 = COND_EXEC_CODE (v1);
7017 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7018 {
7019 rtx dest = SET_DEST (v1);
7020 while (GET_CODE (dest) == SUBREG
7021 || GET_CODE (dest) == ZERO_EXTRACT
7022 || GET_CODE (dest) == STRICT_LOW_PART)
7023 dest = XEXP (dest, 0);
7024 if (REG_P (dest))
7025 {
7026 int xregno = REGNO (dest);
7027 int xnregs;
7028 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7029 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7030 else
7031 xnregs = 1;
7032 if (xregno < regno + nregs
7033 && xregno + xnregs > regno)
7034 return 0;
7035 if (xregno < valueno + valuenregs
7036 && xregno + xnregs > valueno)
7037 return 0;
7038 if (goal_mem_addr_varies
7039 && reg_overlap_mentioned_for_reload_p (dest,
7040 goal))
7041 return 0;
7042 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7043 return 0;
7044 }
7045 else if (goal_mem && MEM_P (dest)
7046 && ! push_operand (dest, GET_MODE (dest)))
7047 return 0;
7048 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7049 && reg_equiv_memory_loc (regno) != 0)
7050 return 0;
7051 else if (need_stable_sp
7052 && push_operand (dest, GET_MODE (dest)))
7053 return 0;
7054 }
7055 }
7056 }
7057
7058 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7059 {
7060 rtx link;
7061
7062 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7063 link = XEXP (link, 1))
7064 {
7065 pat = XEXP (link, 0);
7066 if (GET_CODE (pat) == CLOBBER)
7067 {
7068 rtx dest = SET_DEST (pat);
7069
7070 if (REG_P (dest))
7071 {
7072 int xregno = REGNO (dest);
7073 int xnregs
7074 = hard_regno_nregs[xregno][GET_MODE (dest)];
7075
7076 if (xregno < regno + nregs
7077 && xregno + xnregs > regno)
7078 return 0;
7079 else if (xregno < valueno + valuenregs
7080 && xregno + xnregs > valueno)
7081 return 0;
7082 else if (goal_mem_addr_varies
7083 && reg_overlap_mentioned_for_reload_p (dest,
7084 goal))
7085 return 0;
7086 }
7087
7088 else if (goal_mem && MEM_P (dest)
7089 && ! push_operand (dest, GET_MODE (dest)))
7090 return 0;
7091 else if (need_stable_sp
7092 && push_operand (dest, GET_MODE (dest)))
7093 return 0;
7094 }
7095 }
7096 }
7097
7098 #ifdef AUTO_INC_DEC
7099 /* If this insn auto-increments or auto-decrements
7100 either regno or valueno, return 0 now.
7101 If GOAL is a memory ref and its address is not constant,
7102 and this insn P increments a register used in GOAL, return 0. */
7103 {
7104 rtx link;
7105
7106 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7107 if (REG_NOTE_KIND (link) == REG_INC
7108 && REG_P (XEXP (link, 0)))
7109 {
7110 int incno = REGNO (XEXP (link, 0));
7111 if (incno < regno + nregs && incno >= regno)
7112 return 0;
7113 if (incno < valueno + valuenregs && incno >= valueno)
7114 return 0;
7115 if (goal_mem_addr_varies
7116 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7117 goal))
7118 return 0;
7119 }
7120 }
7121 #endif
7122 }
7123 }
7124 }
7125 \f
7126 /* Find a place where INCED appears in an increment or decrement operator
7127 within X, and return the amount INCED is incremented or decremented by.
7128 The value is always positive. */
7129
7130 static int
7131 find_inc_amount (rtx x, rtx inced)
7132 {
7133 enum rtx_code code = GET_CODE (x);
7134 const char *fmt;
7135 int i;
7136
7137 if (code == MEM)
7138 {
7139 rtx addr = XEXP (x, 0);
7140 if ((GET_CODE (addr) == PRE_DEC
7141 || GET_CODE (addr) == POST_DEC
7142 || GET_CODE (addr) == PRE_INC
7143 || GET_CODE (addr) == POST_INC)
7144 && XEXP (addr, 0) == inced)
7145 return GET_MODE_SIZE (GET_MODE (x));
7146 else if ((GET_CODE (addr) == PRE_MODIFY
7147 || GET_CODE (addr) == POST_MODIFY)
7148 && GET_CODE (XEXP (addr, 1)) == PLUS
7149 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7150 && XEXP (addr, 0) == inced
7151 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7152 {
7153 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7154 return i < 0 ? -i : i;
7155 }
7156 }
7157
7158 fmt = GET_RTX_FORMAT (code);
7159 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7160 {
7161 if (fmt[i] == 'e')
7162 {
7163 int tem = find_inc_amount (XEXP (x, i), inced);
7164 if (tem != 0)
7165 return tem;
7166 }
7167 if (fmt[i] == 'E')
7168 {
7169 int j;
7170 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7171 {
7172 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7173 if (tem != 0)
7174 return tem;
7175 }
7176 }
7177 }
7178
7179 return 0;
7180 }
7181 \f
7182 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7183 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7184
7185 #ifdef AUTO_INC_DEC
7186 static int
7187 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7188 rtx insn)
7189 {
7190 rtx link;
7191
7192 gcc_assert (insn);
7193
7194 if (! INSN_P (insn))
7195 return 0;
7196
7197 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7198 if (REG_NOTE_KIND (link) == REG_INC)
7199 {
7200 unsigned int test = (int) REGNO (XEXP (link, 0));
7201 if (test >= regno && test < endregno)
7202 return 1;
7203 }
7204 return 0;
7205 }
7206 #else
7207
7208 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7209
7210 #endif
7211
7212 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7213 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7214 REG_INC. REGNO must refer to a hard register. */
7215
7216 int
7217 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7218 int sets)
7219 {
7220 unsigned int nregs, endregno;
7221
7222 /* regno must be a hard register. */
7223 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7224
7225 nregs = hard_regno_nregs[regno][mode];
7226 endregno = regno + nregs;
7227
7228 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7229 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7230 && REG_P (XEXP (PATTERN (insn), 0)))
7231 {
7232 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7233
7234 return test >= regno && test < endregno;
7235 }
7236
7237 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7238 return 1;
7239
7240 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7241 {
7242 int i = XVECLEN (PATTERN (insn), 0) - 1;
7243
7244 for (; i >= 0; i--)
7245 {
7246 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7247 if ((GET_CODE (elt) == CLOBBER
7248 || (sets == 1 && GET_CODE (elt) == SET))
7249 && REG_P (XEXP (elt, 0)))
7250 {
7251 unsigned int test = REGNO (XEXP (elt, 0));
7252
7253 if (test >= regno && test < endregno)
7254 return 1;
7255 }
7256 if (sets == 2
7257 && reg_inc_found_and_valid_p (regno, endregno, elt))
7258 return 1;
7259 }
7260 }
7261
7262 return 0;
7263 }
7264
7265 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7266 rtx
7267 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7268 {
7269 int regno;
7270
7271 if (GET_MODE (reloadreg) == mode)
7272 return reloadreg;
7273
7274 regno = REGNO (reloadreg);
7275
7276 if (REG_WORDS_BIG_ENDIAN)
7277 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7278 - (int) hard_regno_nregs[regno][mode];
7279
7280 return gen_rtx_REG (mode, regno);
7281 }
7282
7283 static const char *const reload_when_needed_name[] =
7284 {
7285 "RELOAD_FOR_INPUT",
7286 "RELOAD_FOR_OUTPUT",
7287 "RELOAD_FOR_INSN",
7288 "RELOAD_FOR_INPUT_ADDRESS",
7289 "RELOAD_FOR_INPADDR_ADDRESS",
7290 "RELOAD_FOR_OUTPUT_ADDRESS",
7291 "RELOAD_FOR_OUTADDR_ADDRESS",
7292 "RELOAD_FOR_OPERAND_ADDRESS",
7293 "RELOAD_FOR_OPADDR_ADDR",
7294 "RELOAD_OTHER",
7295 "RELOAD_FOR_OTHER_ADDRESS"
7296 };
7297
7298 /* These functions are used to print the variables set by 'find_reloads' */
7299
7300 DEBUG_FUNCTION void
7301 debug_reload_to_stream (FILE *f)
7302 {
7303 int r;
7304 const char *prefix;
7305
7306 if (! f)
7307 f = stderr;
7308 for (r = 0; r < n_reloads; r++)
7309 {
7310 fprintf (f, "Reload %d: ", r);
7311
7312 if (rld[r].in != 0)
7313 {
7314 fprintf (f, "reload_in (%s) = ",
7315 GET_MODE_NAME (rld[r].inmode));
7316 print_inline_rtx (f, rld[r].in, 24);
7317 fprintf (f, "\n\t");
7318 }
7319
7320 if (rld[r].out != 0)
7321 {
7322 fprintf (f, "reload_out (%s) = ",
7323 GET_MODE_NAME (rld[r].outmode));
7324 print_inline_rtx (f, rld[r].out, 24);
7325 fprintf (f, "\n\t");
7326 }
7327
7328 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7329
7330 fprintf (f, "%s (opnum = %d)",
7331 reload_when_needed_name[(int) rld[r].when_needed],
7332 rld[r].opnum);
7333
7334 if (rld[r].optional)
7335 fprintf (f, ", optional");
7336
7337 if (rld[r].nongroup)
7338 fprintf (f, ", nongroup");
7339
7340 if (rld[r].inc != 0)
7341 fprintf (f, ", inc by %d", rld[r].inc);
7342
7343 if (rld[r].nocombine)
7344 fprintf (f, ", can't combine");
7345
7346 if (rld[r].secondary_p)
7347 fprintf (f, ", secondary_reload_p");
7348
7349 if (rld[r].in_reg != 0)
7350 {
7351 fprintf (f, "\n\treload_in_reg: ");
7352 print_inline_rtx (f, rld[r].in_reg, 24);
7353 }
7354
7355 if (rld[r].out_reg != 0)
7356 {
7357 fprintf (f, "\n\treload_out_reg: ");
7358 print_inline_rtx (f, rld[r].out_reg, 24);
7359 }
7360
7361 if (rld[r].reg_rtx != 0)
7362 {
7363 fprintf (f, "\n\treload_reg_rtx: ");
7364 print_inline_rtx (f, rld[r].reg_rtx, 24);
7365 }
7366
7367 prefix = "\n\t";
7368 if (rld[r].secondary_in_reload != -1)
7369 {
7370 fprintf (f, "%ssecondary_in_reload = %d",
7371 prefix, rld[r].secondary_in_reload);
7372 prefix = ", ";
7373 }
7374
7375 if (rld[r].secondary_out_reload != -1)
7376 fprintf (f, "%ssecondary_out_reload = %d\n",
7377 prefix, rld[r].secondary_out_reload);
7378
7379 prefix = "\n\t";
7380 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7381 {
7382 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7383 insn_data[rld[r].secondary_in_icode].name);
7384 prefix = ", ";
7385 }
7386
7387 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7388 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7389 insn_data[rld[r].secondary_out_icode].name);
7390
7391 fprintf (f, "\n");
7392 }
7393 }
7394
7395 DEBUG_FUNCTION void
7396 debug_reload (void)
7397 {
7398 debug_reload_to_stream (stderr);
7399 }