Update copyright years in gcc/
[gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
26
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
29
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
36
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
43
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
52
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
55
56 NOTE SIDE EFFECTS:
57
58 find_reloads can alter the operands of the instruction it is called on.
59
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
64
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
67
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71
72 Using a reload register for several reloads in one insn:
73
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
85
86 #define REG_OK_STRICT
87
88 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
89 #undef DEBUG_RELOAD
90
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "tm.h"
95 #include "rtl-error.h"
96 #include "tm_p.h"
97 #include "insn-config.h"
98 #include "expr.h"
99 #include "optabs.h"
100 #include "recog.h"
101 #include "df.h"
102 #include "reload.h"
103 #include "regs.h"
104 #include "addresses.h"
105 #include "hard-reg-set.h"
106 #include "flags.h"
107 #include "function.h"
108 #include "params.h"
109 #include "target.h"
110 #include "ira.h"
111
112 /* True if X is a constant that can be forced into the constant pool.
113 MODE is the mode of the operand, or VOIDmode if not known. */
114 #define CONST_POOL_OK_P(MODE, X) \
115 ((MODE) != VOIDmode \
116 && CONSTANT_P (X) \
117 && GET_CODE (X) != HIGH \
118 && !targetm.cannot_force_const_mem (MODE, X))
119
120 /* True if C is a non-empty register class that has too few registers
121 to be safely used as a reload target class. */
122
123 static inline bool
124 small_register_class_p (reg_class_t rclass)
125 {
126 return (reg_class_size [(int) rclass] == 1
127 || (reg_class_size [(int) rclass] >= 1
128 && targetm.class_likely_spilled_p (rclass)));
129 }
130
131 \f
132 /* All reloads of the current insn are recorded here. See reload.h for
133 comments. */
134 int n_reloads;
135 struct reload rld[MAX_RELOADS];
136
137 /* All the "earlyclobber" operands of the current insn
138 are recorded here. */
139 int n_earlyclobbers;
140 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
141
142 int reload_n_operands;
143
144 /* Replacing reloads.
145
146 If `replace_reloads' is nonzero, then as each reload is recorded
147 an entry is made for it in the table `replacements'.
148 Then later `subst_reloads' can look through that table and
149 perform all the replacements needed. */
150
151 /* Nonzero means record the places to replace. */
152 static int replace_reloads;
153
154 /* Each replacement is recorded with a structure like this. */
155 struct replacement
156 {
157 rtx *where; /* Location to store in */
158 int what; /* which reload this is for */
159 enum machine_mode mode; /* mode it must have */
160 };
161
162 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
163
164 /* Number of replacements currently recorded. */
165 static int n_replacements;
166
167 /* Used to track what is modified by an operand. */
168 struct decomposition
169 {
170 int reg_flag; /* Nonzero if referencing a register. */
171 int safe; /* Nonzero if this can't conflict with anything. */
172 rtx base; /* Base address for MEM. */
173 HOST_WIDE_INT start; /* Starting offset or register number. */
174 HOST_WIDE_INT end; /* Ending offset or register number. */
175 };
176
177 #ifdef SECONDARY_MEMORY_NEEDED
178
179 /* Save MEMs needed to copy from one class of registers to another. One MEM
180 is used per mode, but normally only one or two modes are ever used.
181
182 We keep two versions, before and after register elimination. The one
183 after register elimination is record separately for each operand. This
184 is done in case the address is not valid to be sure that we separately
185 reload each. */
186
187 static rtx secondary_memlocs[NUM_MACHINE_MODES];
188 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
189 static int secondary_memlocs_elim_used = 0;
190 #endif
191
192 /* The instruction we are doing reloads for;
193 so we can test whether a register dies in it. */
194 static rtx this_insn;
195
196 /* Nonzero if this instruction is a user-specified asm with operands. */
197 static int this_insn_is_asm;
198
199 /* If hard_regs_live_known is nonzero,
200 we can tell which hard regs are currently live,
201 at least enough to succeed in choosing dummy reloads. */
202 static int hard_regs_live_known;
203
204 /* Indexed by hard reg number,
205 element is nonnegative if hard reg has been spilled.
206 This vector is passed to `find_reloads' as an argument
207 and is not changed here. */
208 static short *static_reload_reg_p;
209
210 /* Set to 1 in subst_reg_equivs if it changes anything. */
211 static int subst_reg_equivs_changed;
212
213 /* On return from push_reload, holds the reload-number for the OUT
214 operand, which can be different for that from the input operand. */
215 static int output_reloadnum;
216
217 /* Compare two RTX's. */
218 #define MATCHES(x, y) \
219 (x == y || (x != 0 && (REG_P (x) \
220 ? REG_P (y) && REGNO (x) == REGNO (y) \
221 : rtx_equal_p (x, y) && ! side_effects_p (x))))
222
223 /* Indicates if two reloads purposes are for similar enough things that we
224 can merge their reloads. */
225 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
226 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
227 || ((when1) == (when2) && (op1) == (op2)) \
228 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
229 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
230 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
231 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
232 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
233
234 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
235 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
236 ((when1) != (when2) \
237 || ! ((op1) == (op2) \
238 || (when1) == RELOAD_FOR_INPUT \
239 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
240 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
241
242 /* If we are going to reload an address, compute the reload type to
243 use. */
244 #define ADDR_TYPE(type) \
245 ((type) == RELOAD_FOR_INPUT_ADDRESS \
246 ? RELOAD_FOR_INPADDR_ADDRESS \
247 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
248 ? RELOAD_FOR_OUTADDR_ADDRESS \
249 : (type)))
250
251 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
252 enum machine_mode, enum reload_type,
253 enum insn_code *, secondary_reload_info *);
254 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
255 int, unsigned int);
256 static void push_replacement (rtx *, int, enum machine_mode);
257 static void dup_replacements (rtx *, rtx *);
258 static void combine_reloads (void);
259 static int find_reusable_reload (rtx *, rtx, enum reg_class,
260 enum reload_type, int, int);
261 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
262 enum machine_mode, reg_class_t, int, int);
263 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
264 static struct decomposition decompose (rtx);
265 static int immune_p (rtx, rtx, struct decomposition);
266 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
267 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
268 int *);
269 static rtx make_memloc (rtx, int);
270 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
271 addr_space_t, rtx *);
272 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
273 int, enum reload_type, int, rtx);
274 static rtx subst_reg_equivs (rtx, rtx);
275 static rtx subst_indexed_address (rtx);
276 static void update_auto_inc_notes (rtx, int, int);
277 static int find_reloads_address_1 (enum machine_mode, addr_space_t, rtx, int,
278 enum rtx_code, enum rtx_code, rtx *,
279 int, enum reload_type,int, rtx);
280 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
281 enum machine_mode, int,
282 enum reload_type, int);
283 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
284 int, rtx, int *);
285 static void copy_replacements_1 (rtx *, rtx *, int);
286 static int find_inc_amount (rtx, rtx);
287 static int refers_to_mem_for_reload_p (rtx);
288 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
289 rtx, rtx *);
290
291 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
292 list yet. */
293
294 static void
295 push_reg_equiv_alt_mem (int regno, rtx mem)
296 {
297 rtx it;
298
299 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
300 if (rtx_equal_p (XEXP (it, 0), mem))
301 return;
302
303 reg_equiv_alt_mem_list (regno)
304 = alloc_EXPR_LIST (REG_EQUIV, mem,
305 reg_equiv_alt_mem_list (regno));
306 }
307 \f
308 /* Determine if any secondary reloads are needed for loading (if IN_P is
309 nonzero) or storing (if IN_P is zero) X to or from a reload register of
310 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
311 are needed, push them.
312
313 Return the reload number of the secondary reload we made, or -1 if
314 we didn't need one. *PICODE is set to the insn_code to use if we do
315 need a secondary reload. */
316
317 static int
318 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
319 enum reg_class reload_class,
320 enum machine_mode reload_mode, enum reload_type type,
321 enum insn_code *picode, secondary_reload_info *prev_sri)
322 {
323 enum reg_class rclass = NO_REGS;
324 enum reg_class scratch_class;
325 enum machine_mode mode = reload_mode;
326 enum insn_code icode = CODE_FOR_nothing;
327 enum insn_code t_icode = CODE_FOR_nothing;
328 enum reload_type secondary_type;
329 int s_reload, t_reload = -1;
330 const char *scratch_constraint;
331 char letter;
332 secondary_reload_info sri;
333
334 if (type == RELOAD_FOR_INPUT_ADDRESS
335 || type == RELOAD_FOR_OUTPUT_ADDRESS
336 || type == RELOAD_FOR_INPADDR_ADDRESS
337 || type == RELOAD_FOR_OUTADDR_ADDRESS)
338 secondary_type = type;
339 else
340 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
341
342 *picode = CODE_FOR_nothing;
343
344 /* If X is a paradoxical SUBREG, use the inner value to determine both the
345 mode and object being reloaded. */
346 if (paradoxical_subreg_p (x))
347 {
348 x = SUBREG_REG (x);
349 reload_mode = GET_MODE (x);
350 }
351
352 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
353 is still a pseudo-register by now, it *must* have an equivalent MEM
354 but we don't want to assume that), use that equivalent when seeing if
355 a secondary reload is needed since whether or not a reload is needed
356 might be sensitive to the form of the MEM. */
357
358 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
359 && reg_equiv_mem (REGNO (x)))
360 x = reg_equiv_mem (REGNO (x));
361
362 sri.icode = CODE_FOR_nothing;
363 sri.prev_sri = prev_sri;
364 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
365 reload_mode, &sri);
366 icode = (enum insn_code) sri.icode;
367
368 /* If we don't need any secondary registers, done. */
369 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
370 return -1;
371
372 if (rclass != NO_REGS)
373 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
374 reload_mode, type, &t_icode, &sri);
375
376 /* If we will be using an insn, the secondary reload is for a
377 scratch register. */
378
379 if (icode != CODE_FOR_nothing)
380 {
381 /* If IN_P is nonzero, the reload register will be the output in
382 operand 0. If IN_P is zero, the reload register will be the input
383 in operand 1. Outputs should have an initial "=", which we must
384 skip. */
385
386 /* ??? It would be useful to be able to handle only two, or more than
387 three, operands, but for now we can only handle the case of having
388 exactly three: output, input and one temp/scratch. */
389 gcc_assert (insn_data[(int) icode].n_operands == 3);
390
391 /* ??? We currently have no way to represent a reload that needs
392 an icode to reload from an intermediate tertiary reload register.
393 We should probably have a new field in struct reload to tag a
394 chain of scratch operand reloads onto. */
395 gcc_assert (rclass == NO_REGS);
396
397 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
398 gcc_assert (*scratch_constraint == '=');
399 scratch_constraint++;
400 if (*scratch_constraint == '&')
401 scratch_constraint++;
402 letter = *scratch_constraint;
403 scratch_class = (letter == 'r' ? GENERAL_REGS
404 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
405 scratch_constraint));
406
407 rclass = scratch_class;
408 mode = insn_data[(int) icode].operand[2].mode;
409 }
410
411 /* This case isn't valid, so fail. Reload is allowed to use the same
412 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
413 in the case of a secondary register, we actually need two different
414 registers for correct code. We fail here to prevent the possibility of
415 silently generating incorrect code later.
416
417 The convention is that secondary input reloads are valid only if the
418 secondary_class is different from class. If you have such a case, you
419 can not use secondary reloads, you must work around the problem some
420 other way.
421
422 Allow this when a reload_in/out pattern is being used. I.e. assume
423 that the generated code handles this case. */
424
425 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
426 || t_icode != CODE_FOR_nothing);
427
428 /* See if we can reuse an existing secondary reload. */
429 for (s_reload = 0; s_reload < n_reloads; s_reload++)
430 if (rld[s_reload].secondary_p
431 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
432 || reg_class_subset_p (rld[s_reload].rclass, rclass))
433 && ((in_p && rld[s_reload].inmode == mode)
434 || (! in_p && rld[s_reload].outmode == mode))
435 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
436 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
437 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
438 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
439 && (small_register_class_p (rclass)
440 || targetm.small_register_classes_for_mode_p (VOIDmode))
441 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
442 opnum, rld[s_reload].opnum))
443 {
444 if (in_p)
445 rld[s_reload].inmode = mode;
446 if (! in_p)
447 rld[s_reload].outmode = mode;
448
449 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
450 rld[s_reload].rclass = rclass;
451
452 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
453 rld[s_reload].optional &= optional;
454 rld[s_reload].secondary_p = 1;
455 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
456 opnum, rld[s_reload].opnum))
457 rld[s_reload].when_needed = RELOAD_OTHER;
458
459 break;
460 }
461
462 if (s_reload == n_reloads)
463 {
464 #ifdef SECONDARY_MEMORY_NEEDED
465 /* If we need a memory location to copy between the two reload regs,
466 set it up now. Note that we do the input case before making
467 the reload and the output case after. This is due to the
468 way reloads are output. */
469
470 if (in_p && icode == CODE_FOR_nothing
471 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
472 {
473 get_secondary_mem (x, reload_mode, opnum, type);
474
475 /* We may have just added new reloads. Make sure we add
476 the new reload at the end. */
477 s_reload = n_reloads;
478 }
479 #endif
480
481 /* We need to make a new secondary reload for this register class. */
482 rld[s_reload].in = rld[s_reload].out = 0;
483 rld[s_reload].rclass = rclass;
484
485 rld[s_reload].inmode = in_p ? mode : VOIDmode;
486 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
487 rld[s_reload].reg_rtx = 0;
488 rld[s_reload].optional = optional;
489 rld[s_reload].inc = 0;
490 /* Maybe we could combine these, but it seems too tricky. */
491 rld[s_reload].nocombine = 1;
492 rld[s_reload].in_reg = 0;
493 rld[s_reload].out_reg = 0;
494 rld[s_reload].opnum = opnum;
495 rld[s_reload].when_needed = secondary_type;
496 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
497 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
498 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
499 rld[s_reload].secondary_out_icode
500 = ! in_p ? t_icode : CODE_FOR_nothing;
501 rld[s_reload].secondary_p = 1;
502
503 n_reloads++;
504
505 #ifdef SECONDARY_MEMORY_NEEDED
506 if (! in_p && icode == CODE_FOR_nothing
507 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
508 get_secondary_mem (x, mode, opnum, type);
509 #endif
510 }
511
512 *picode = icode;
513 return s_reload;
514 }
515
516 /* If a secondary reload is needed, return its class. If both an intermediate
517 register and a scratch register is needed, we return the class of the
518 intermediate register. */
519 reg_class_t
520 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
521 rtx x)
522 {
523 enum insn_code icode;
524 secondary_reload_info sri;
525
526 sri.icode = CODE_FOR_nothing;
527 sri.prev_sri = NULL;
528 rclass
529 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
530 icode = (enum insn_code) sri.icode;
531
532 /* If there are no secondary reloads at all, we return NO_REGS.
533 If an intermediate register is needed, we return its class. */
534 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
535 return rclass;
536
537 /* No intermediate register is needed, but we have a special reload
538 pattern, which we assume for now needs a scratch register. */
539 return scratch_reload_class (icode);
540 }
541
542 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
543 three operands, verify that operand 2 is an output operand, and return
544 its register class.
545 ??? We'd like to be able to handle any pattern with at least 2 operands,
546 for zero or more scratch registers, but that needs more infrastructure. */
547 enum reg_class
548 scratch_reload_class (enum insn_code icode)
549 {
550 const char *scratch_constraint;
551 char scratch_letter;
552 enum reg_class rclass;
553
554 gcc_assert (insn_data[(int) icode].n_operands == 3);
555 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
556 gcc_assert (*scratch_constraint == '=');
557 scratch_constraint++;
558 if (*scratch_constraint == '&')
559 scratch_constraint++;
560 scratch_letter = *scratch_constraint;
561 if (scratch_letter == 'r')
562 return GENERAL_REGS;
563 rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
564 scratch_constraint);
565 gcc_assert (rclass != NO_REGS);
566 return rclass;
567 }
568 \f
569 #ifdef SECONDARY_MEMORY_NEEDED
570
571 /* Return a memory location that will be used to copy X in mode MODE.
572 If we haven't already made a location for this mode in this insn,
573 call find_reloads_address on the location being returned. */
574
575 rtx
576 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
577 int opnum, enum reload_type type)
578 {
579 rtx loc;
580 int mem_valid;
581
582 /* By default, if MODE is narrower than a word, widen it to a word.
583 This is required because most machines that require these memory
584 locations do not support short load and stores from all registers
585 (e.g., FP registers). */
586
587 #ifdef SECONDARY_MEMORY_NEEDED_MODE
588 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
589 #else
590 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
591 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
592 #endif
593
594 /* If we already have made a MEM for this operand in MODE, return it. */
595 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
596 return secondary_memlocs_elim[(int) mode][opnum];
597
598 /* If this is the first time we've tried to get a MEM for this mode,
599 allocate a new one. `something_changed' in reload will get set
600 by noticing that the frame size has changed. */
601
602 if (secondary_memlocs[(int) mode] == 0)
603 {
604 #ifdef SECONDARY_MEMORY_NEEDED_RTX
605 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
606 #else
607 secondary_memlocs[(int) mode]
608 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
609 #endif
610 }
611
612 /* Get a version of the address doing any eliminations needed. If that
613 didn't give us a new MEM, make a new one if it isn't valid. */
614
615 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
616 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
617 MEM_ADDR_SPACE (loc));
618
619 if (! mem_valid && loc == secondary_memlocs[(int) mode])
620 loc = copy_rtx (loc);
621
622 /* The only time the call below will do anything is if the stack
623 offset is too large. In that case IND_LEVELS doesn't matter, so we
624 can just pass a zero. Adjust the type to be the address of the
625 corresponding object. If the address was valid, save the eliminated
626 address. If it wasn't valid, we need to make a reload each time, so
627 don't save it. */
628
629 if (! mem_valid)
630 {
631 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
632 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
633 : RELOAD_OTHER);
634
635 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
636 opnum, type, 0, 0);
637 }
638
639 secondary_memlocs_elim[(int) mode][opnum] = loc;
640 if (secondary_memlocs_elim_used <= (int)mode)
641 secondary_memlocs_elim_used = (int)mode + 1;
642 return loc;
643 }
644
645 /* Clear any secondary memory locations we've made. */
646
647 void
648 clear_secondary_mem (void)
649 {
650 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
651 }
652 #endif /* SECONDARY_MEMORY_NEEDED */
653 \f
654
655 /* Find the largest class which has at least one register valid in
656 mode INNER, and which for every such register, that register number
657 plus N is also valid in OUTER (if in range) and is cheap to move
658 into REGNO. Such a class must exist. */
659
660 static enum reg_class
661 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
662 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
663 unsigned int dest_regno ATTRIBUTE_UNUSED)
664 {
665 int best_cost = -1;
666 int rclass;
667 int regno;
668 enum reg_class best_class = NO_REGS;
669 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
670 unsigned int best_size = 0;
671 int cost;
672
673 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
674 {
675 int bad = 0;
676 int good = 0;
677 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
678 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
679 {
680 if (HARD_REGNO_MODE_OK (regno, inner))
681 {
682 good = 1;
683 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
684 || ! HARD_REGNO_MODE_OK (regno + n, outer))
685 bad = 1;
686 }
687 }
688
689 if (bad || !good)
690 continue;
691 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
692
693 if ((reg_class_size[rclass] > best_size
694 && (best_cost < 0 || best_cost >= cost))
695 || best_cost > cost)
696 {
697 best_class = (enum reg_class) rclass;
698 best_size = reg_class_size[rclass];
699 best_cost = register_move_cost (outer, (enum reg_class) rclass,
700 dest_class);
701 }
702 }
703
704 gcc_assert (best_size != 0);
705
706 return best_class;
707 }
708
709 /* We are trying to reload a subreg of something that is not a register.
710 Find the largest class which has at least one register valid in
711 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
712 which we would eventually like to obtain the object. */
713
714 static enum reg_class
715 find_valid_class_1 (enum machine_mode outer ATTRIBUTE_UNUSED,
716 enum machine_mode mode ATTRIBUTE_UNUSED,
717 enum reg_class dest_class ATTRIBUTE_UNUSED)
718 {
719 int best_cost = -1;
720 int rclass;
721 int regno;
722 enum reg_class best_class = NO_REGS;
723 unsigned int best_size = 0;
724 int cost;
725
726 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
727 {
728 int bad = 0;
729 for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++)
730 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
731 && !HARD_REGNO_MODE_OK (regno, mode))
732 bad = 1;
733
734 if (bad)
735 continue;
736
737 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
738
739 if ((reg_class_size[rclass] > best_size
740 && (best_cost < 0 || best_cost >= cost))
741 || best_cost > cost)
742 {
743 best_class = (enum reg_class) rclass;
744 best_size = reg_class_size[rclass];
745 best_cost = register_move_cost (outer, (enum reg_class) rclass,
746 dest_class);
747 }
748 }
749
750 gcc_assert (best_size != 0);
751
752 #ifdef LIMIT_RELOAD_CLASS
753 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
754 #endif
755 return best_class;
756 }
757 \f
758 /* Return the number of a previously made reload that can be combined with
759 a new one, or n_reloads if none of the existing reloads can be used.
760 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
761 push_reload, they determine the kind of the new reload that we try to
762 combine. P_IN points to the corresponding value of IN, which can be
763 modified by this function.
764 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
765
766 static int
767 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
768 enum reload_type type, int opnum, int dont_share)
769 {
770 rtx in = *p_in;
771 int i;
772 /* We can't merge two reloads if the output of either one is
773 earlyclobbered. */
774
775 if (earlyclobber_operand_p (out))
776 return n_reloads;
777
778 /* We can use an existing reload if the class is right
779 and at least one of IN and OUT is a match
780 and the other is at worst neutral.
781 (A zero compared against anything is neutral.)
782
783 For targets with small register classes, don't use existing reloads
784 unless they are for the same thing since that can cause us to need
785 more reload registers than we otherwise would. */
786
787 for (i = 0; i < n_reloads; i++)
788 if ((reg_class_subset_p (rclass, rld[i].rclass)
789 || reg_class_subset_p (rld[i].rclass, rclass))
790 /* If the existing reload has a register, it must fit our class. */
791 && (rld[i].reg_rtx == 0
792 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
793 true_regnum (rld[i].reg_rtx)))
794 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
795 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
796 || (out != 0 && MATCHES (rld[i].out, out)
797 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
798 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
799 && (small_register_class_p (rclass)
800 || targetm.small_register_classes_for_mode_p (VOIDmode))
801 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
802 return i;
803
804 /* Reloading a plain reg for input can match a reload to postincrement
805 that reg, since the postincrement's value is the right value.
806 Likewise, it can match a preincrement reload, since we regard
807 the preincrementation as happening before any ref in this insn
808 to that register. */
809 for (i = 0; i < n_reloads; i++)
810 if ((reg_class_subset_p (rclass, rld[i].rclass)
811 || reg_class_subset_p (rld[i].rclass, rclass))
812 /* If the existing reload has a register, it must fit our
813 class. */
814 && (rld[i].reg_rtx == 0
815 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
816 true_regnum (rld[i].reg_rtx)))
817 && out == 0 && rld[i].out == 0 && rld[i].in != 0
818 && ((REG_P (in)
819 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
820 && MATCHES (XEXP (rld[i].in, 0), in))
821 || (REG_P (rld[i].in)
822 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
823 && MATCHES (XEXP (in, 0), rld[i].in)))
824 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
825 && (small_register_class_p (rclass)
826 || targetm.small_register_classes_for_mode_p (VOIDmode))
827 && MERGABLE_RELOADS (type, rld[i].when_needed,
828 opnum, rld[i].opnum))
829 {
830 /* Make sure reload_in ultimately has the increment,
831 not the plain register. */
832 if (REG_P (in))
833 *p_in = rld[i].in;
834 return i;
835 }
836 return n_reloads;
837 }
838
839 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
840 expression. MODE is the mode that X will be used in. OUTPUT is true if
841 the function is invoked for the output part of an enclosing reload. */
842
843 static bool
844 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, bool output)
845 {
846 rtx inner;
847
848 /* Only SUBREGs are problematical. */
849 if (GET_CODE (x) != SUBREG)
850 return false;
851
852 inner = SUBREG_REG (x);
853
854 /* If INNER is a constant or PLUS, then INNER will need reloading. */
855 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
856 return true;
857
858 /* If INNER is not a hard register, then INNER will not need reloading. */
859 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
860 return false;
861
862 /* If INNER is not ok for MODE, then INNER will need reloading. */
863 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
864 return true;
865
866 /* If this is for an output, and the outer part is a word or smaller,
867 INNER is larger than a word and the number of registers in INNER is
868 not the same as the number of words in INNER, then INNER will need
869 reloading (with an in-out reload). */
870 return (output
871 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
872 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
873 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
874 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
875 }
876
877 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
878 requiring an extra reload register. The caller has already found that
879 IN contains some reference to REGNO, so check that we can produce the
880 new value in a single step. E.g. if we have
881 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
882 instruction that adds one to a register, this should succeed.
883 However, if we have something like
884 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
885 needs to be loaded into a register first, we need a separate reload
886 register.
887 Such PLUS reloads are generated by find_reload_address_part.
888 The out-of-range PLUS expressions are usually introduced in the instruction
889 patterns by register elimination and substituting pseudos without a home
890 by their function-invariant equivalences. */
891 static int
892 can_reload_into (rtx in, int regno, enum machine_mode mode)
893 {
894 rtx dst, test_insn;
895 int r = 0;
896 struct recog_data save_recog_data;
897
898 /* For matching constraints, we often get notional input reloads where
899 we want to use the original register as the reload register. I.e.
900 technically this is a non-optional input-output reload, but IN is
901 already a valid register, and has been chosen as the reload register.
902 Speed this up, since it trivially works. */
903 if (REG_P (in))
904 return 1;
905
906 /* To test MEMs properly, we'd have to take into account all the reloads
907 that are already scheduled, which can become quite complicated.
908 And since we've already handled address reloads for this MEM, it
909 should always succeed anyway. */
910 if (MEM_P (in))
911 return 1;
912
913 /* If we can make a simple SET insn that does the job, everything should
914 be fine. */
915 dst = gen_rtx_REG (mode, regno);
916 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
917 save_recog_data = recog_data;
918 if (recog_memoized (test_insn) >= 0)
919 {
920 extract_insn (test_insn);
921 r = constrain_operands (1);
922 }
923 recog_data = save_recog_data;
924 return r;
925 }
926
927 /* Record one reload that needs to be performed.
928 IN is an rtx saying where the data are to be found before this instruction.
929 OUT says where they must be stored after the instruction.
930 (IN is zero for data not read, and OUT is zero for data not written.)
931 INLOC and OUTLOC point to the places in the instructions where
932 IN and OUT were found.
933 If IN and OUT are both nonzero, it means the same register must be used
934 to reload both IN and OUT.
935
936 RCLASS is a register class required for the reloaded data.
937 INMODE is the machine mode that the instruction requires
938 for the reg that replaces IN and OUTMODE is likewise for OUT.
939
940 If IN is zero, then OUT's location and mode should be passed as
941 INLOC and INMODE.
942
943 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
944
945 OPTIONAL nonzero means this reload does not need to be performed:
946 it can be discarded if that is more convenient.
947
948 OPNUM and TYPE say what the purpose of this reload is.
949
950 The return value is the reload-number for this reload.
951
952 If both IN and OUT are nonzero, in some rare cases we might
953 want to make two separate reloads. (Actually we never do this now.)
954 Therefore, the reload-number for OUT is stored in
955 output_reloadnum when we return; the return value applies to IN.
956 Usually (presently always), when IN and OUT are nonzero,
957 the two reload-numbers are equal, but the caller should be careful to
958 distinguish them. */
959
960 int
961 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
962 enum reg_class rclass, enum machine_mode inmode,
963 enum machine_mode outmode, int strict_low, int optional,
964 int opnum, enum reload_type type)
965 {
966 int i;
967 int dont_share = 0;
968 int dont_remove_subreg = 0;
969 #ifdef LIMIT_RELOAD_CLASS
970 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
971 #endif
972 int secondary_in_reload = -1, secondary_out_reload = -1;
973 enum insn_code secondary_in_icode = CODE_FOR_nothing;
974 enum insn_code secondary_out_icode = CODE_FOR_nothing;
975 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
976 subreg_in_class = NO_REGS;
977
978 /* INMODE and/or OUTMODE could be VOIDmode if no mode
979 has been specified for the operand. In that case,
980 use the operand's mode as the mode to reload. */
981 if (inmode == VOIDmode && in != 0)
982 inmode = GET_MODE (in);
983 if (outmode == VOIDmode && out != 0)
984 outmode = GET_MODE (out);
985
986 /* If find_reloads and friends until now missed to replace a pseudo
987 with a constant of reg_equiv_constant something went wrong
988 beforehand.
989 Note that it can't simply be done here if we missed it earlier
990 since the constant might need to be pushed into the literal pool
991 and the resulting memref would probably need further
992 reloading. */
993 if (in != 0 && REG_P (in))
994 {
995 int regno = REGNO (in);
996
997 gcc_assert (regno < FIRST_PSEUDO_REGISTER
998 || reg_renumber[regno] >= 0
999 || reg_equiv_constant (regno) == NULL_RTX);
1000 }
1001
1002 /* reg_equiv_constant only contains constants which are obviously
1003 not appropriate as destination. So if we would need to replace
1004 the destination pseudo with a constant we are in real
1005 trouble. */
1006 if (out != 0 && REG_P (out))
1007 {
1008 int regno = REGNO (out);
1009
1010 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1011 || reg_renumber[regno] >= 0
1012 || reg_equiv_constant (regno) == NULL_RTX);
1013 }
1014
1015 /* If we have a read-write operand with an address side-effect,
1016 change either IN or OUT so the side-effect happens only once. */
1017 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1018 switch (GET_CODE (XEXP (in, 0)))
1019 {
1020 case POST_INC: case POST_DEC: case POST_MODIFY:
1021 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1022 break;
1023
1024 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1025 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1026 break;
1027
1028 default:
1029 break;
1030 }
1031
1032 /* If we are reloading a (SUBREG constant ...), really reload just the
1033 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1034 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1035 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1036 register is a pseudo, also reload the inside expression.
1037 For machines that extend byte loads, do this for any SUBREG of a pseudo
1038 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1039 M2 is an integral mode that gets extended when loaded.
1040 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1041 where either M1 is not valid for R or M2 is wider than a word but we
1042 only need one register to store an M2-sized quantity in R.
1043 (However, if OUT is nonzero, we need to reload the reg *and*
1044 the subreg, so do nothing here, and let following statement handle it.)
1045
1046 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1047 we can't handle it here because CONST_INT does not indicate a mode.
1048
1049 Similarly, we must reload the inside expression if we have a
1050 STRICT_LOW_PART (presumably, in == out in this case).
1051
1052 Also reload the inner expression if it does not require a secondary
1053 reload but the SUBREG does.
1054
1055 Finally, reload the inner expression if it is a register that is in
1056 the class whose registers cannot be referenced in a different size
1057 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1058 cannot reload just the inside since we might end up with the wrong
1059 register class. But if it is inside a STRICT_LOW_PART, we have
1060 no choice, so we hope we do get the right register class there. */
1061
1062 if (in != 0 && GET_CODE (in) == SUBREG
1063 && (subreg_lowpart_p (in) || strict_low)
1064 #ifdef CANNOT_CHANGE_MODE_CLASS
1065 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1066 #endif
1067 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1068 && (CONSTANT_P (SUBREG_REG (in))
1069 || GET_CODE (SUBREG_REG (in)) == PLUS
1070 || strict_low
1071 || (((REG_P (SUBREG_REG (in))
1072 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1073 || MEM_P (SUBREG_REG (in)))
1074 && ((GET_MODE_PRECISION (inmode)
1075 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1076 #ifdef LOAD_EXTEND_OP
1077 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1078 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1079 <= UNITS_PER_WORD)
1080 && (GET_MODE_PRECISION (inmode)
1081 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1082 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1083 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1084 #endif
1085 #ifdef WORD_REGISTER_OPERATIONS
1086 || ((GET_MODE_PRECISION (inmode)
1087 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1088 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1089 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1090 / UNITS_PER_WORD)))
1091 #endif
1092 ))
1093 || (REG_P (SUBREG_REG (in))
1094 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1095 /* The case where out is nonzero
1096 is handled differently in the following statement. */
1097 && (out == 0 || subreg_lowpart_p (in))
1098 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1099 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1100 > UNITS_PER_WORD)
1101 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1102 / UNITS_PER_WORD)
1103 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1104 [GET_MODE (SUBREG_REG (in))]))
1105 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1106 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1107 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1108 SUBREG_REG (in))
1109 == NO_REGS))
1110 #ifdef CANNOT_CHANGE_MODE_CLASS
1111 || (REG_P (SUBREG_REG (in))
1112 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1113 && REG_CANNOT_CHANGE_MODE_P
1114 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1115 #endif
1116 ))
1117 {
1118 #ifdef LIMIT_RELOAD_CLASS
1119 in_subreg_loc = inloc;
1120 #endif
1121 inloc = &SUBREG_REG (in);
1122 in = *inloc;
1123 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1124 if (MEM_P (in))
1125 /* This is supposed to happen only for paradoxical subregs made by
1126 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1127 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1128 #endif
1129 inmode = GET_MODE (in);
1130 }
1131
1132 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1133 where M1 is not valid for R if it was not handled by the code above.
1134
1135 Similar issue for (SUBREG constant ...) if it was not handled by the
1136 code above. This can happen if SUBREG_BYTE != 0.
1137
1138 However, we must reload the inner reg *as well as* the subreg in
1139 that case. */
1140
1141 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1142 {
1143 if (REG_P (SUBREG_REG (in)))
1144 subreg_in_class
1145 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1146 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1147 GET_MODE (SUBREG_REG (in)),
1148 SUBREG_BYTE (in),
1149 GET_MODE (in)),
1150 REGNO (SUBREG_REG (in)));
1151 else if (GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1152 subreg_in_class = find_valid_class_1 (inmode,
1153 GET_MODE (SUBREG_REG (in)),
1154 rclass);
1155
1156 /* This relies on the fact that emit_reload_insns outputs the
1157 instructions for input reloads of type RELOAD_OTHER in the same
1158 order as the reloads. Thus if the outer reload is also of type
1159 RELOAD_OTHER, we are guaranteed that this inner reload will be
1160 output before the outer reload. */
1161 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1162 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1163 dont_remove_subreg = 1;
1164 }
1165
1166 /* Similarly for paradoxical and problematical SUBREGs on the output.
1167 Note that there is no reason we need worry about the previous value
1168 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1169 entitled to clobber it all (except in the case of a word mode subreg
1170 or of a STRICT_LOW_PART, in that latter case the constraint should
1171 label it input-output.) */
1172 if (out != 0 && GET_CODE (out) == SUBREG
1173 && (subreg_lowpart_p (out) || strict_low)
1174 #ifdef CANNOT_CHANGE_MODE_CLASS
1175 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1176 #endif
1177 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1178 && (CONSTANT_P (SUBREG_REG (out))
1179 || strict_low
1180 || (((REG_P (SUBREG_REG (out))
1181 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1182 || MEM_P (SUBREG_REG (out)))
1183 && ((GET_MODE_PRECISION (outmode)
1184 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1185 #ifdef WORD_REGISTER_OPERATIONS
1186 || ((GET_MODE_PRECISION (outmode)
1187 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1188 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1189 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1190 / UNITS_PER_WORD)))
1191 #endif
1192 ))
1193 || (REG_P (SUBREG_REG (out))
1194 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1195 /* The case of a word mode subreg
1196 is handled differently in the following statement. */
1197 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1198 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1199 > UNITS_PER_WORD))
1200 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1201 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1202 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1203 SUBREG_REG (out))
1204 == NO_REGS))
1205 #ifdef CANNOT_CHANGE_MODE_CLASS
1206 || (REG_P (SUBREG_REG (out))
1207 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1208 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1209 GET_MODE (SUBREG_REG (out)),
1210 outmode))
1211 #endif
1212 ))
1213 {
1214 #ifdef LIMIT_RELOAD_CLASS
1215 out_subreg_loc = outloc;
1216 #endif
1217 outloc = &SUBREG_REG (out);
1218 out = *outloc;
1219 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1220 gcc_assert (!MEM_P (out)
1221 || GET_MODE_SIZE (GET_MODE (out))
1222 <= GET_MODE_SIZE (outmode));
1223 #endif
1224 outmode = GET_MODE (out);
1225 }
1226
1227 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1228 where either M1 is not valid for R or M2 is wider than a word but we
1229 only need one register to store an M2-sized quantity in R.
1230
1231 However, we must reload the inner reg *as well as* the subreg in
1232 that case and the inner reg is an in-out reload. */
1233
1234 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1235 {
1236 enum reg_class in_out_class
1237 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1238 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1239 GET_MODE (SUBREG_REG (out)),
1240 SUBREG_BYTE (out),
1241 GET_MODE (out)),
1242 REGNO (SUBREG_REG (out)));
1243
1244 /* This relies on the fact that emit_reload_insns outputs the
1245 instructions for output reloads of type RELOAD_OTHER in reverse
1246 order of the reloads. Thus if the outer reload is also of type
1247 RELOAD_OTHER, we are guaranteed that this inner reload will be
1248 output after the outer reload. */
1249 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1250 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1251 0, 0, opnum, RELOAD_OTHER);
1252 dont_remove_subreg = 1;
1253 }
1254
1255 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1256 if (in != 0 && out != 0 && MEM_P (out)
1257 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1258 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1259 dont_share = 1;
1260
1261 /* If IN is a SUBREG of a hard register, make a new REG. This
1262 simplifies some of the cases below. */
1263
1264 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1265 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1266 && ! dont_remove_subreg)
1267 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1268
1269 /* Similarly for OUT. */
1270 if (out != 0 && GET_CODE (out) == SUBREG
1271 && REG_P (SUBREG_REG (out))
1272 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1273 && ! dont_remove_subreg)
1274 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1275
1276 /* Narrow down the class of register wanted if that is
1277 desirable on this machine for efficiency. */
1278 {
1279 reg_class_t preferred_class = rclass;
1280
1281 if (in != 0)
1282 preferred_class = targetm.preferred_reload_class (in, rclass);
1283
1284 /* Output reloads may need analogous treatment, different in detail. */
1285 if (out != 0)
1286 preferred_class
1287 = targetm.preferred_output_reload_class (out, preferred_class);
1288
1289 /* Discard what the target said if we cannot do it. */
1290 if (preferred_class != NO_REGS
1291 || (optional && type == RELOAD_FOR_OUTPUT))
1292 rclass = (enum reg_class) preferred_class;
1293 }
1294
1295 /* Make sure we use a class that can handle the actual pseudo
1296 inside any subreg. For example, on the 386, QImode regs
1297 can appear within SImode subregs. Although GENERAL_REGS
1298 can handle SImode, QImode needs a smaller class. */
1299 #ifdef LIMIT_RELOAD_CLASS
1300 if (in_subreg_loc)
1301 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1302 else if (in != 0 && GET_CODE (in) == SUBREG)
1303 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1304
1305 if (out_subreg_loc)
1306 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1307 if (out != 0 && GET_CODE (out) == SUBREG)
1308 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1309 #endif
1310
1311 /* Verify that this class is at least possible for the mode that
1312 is specified. */
1313 if (this_insn_is_asm)
1314 {
1315 enum machine_mode mode;
1316 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1317 mode = inmode;
1318 else
1319 mode = outmode;
1320 if (mode == VOIDmode)
1321 {
1322 error_for_asm (this_insn, "cannot reload integer constant "
1323 "operand in %<asm%>");
1324 mode = word_mode;
1325 if (in != 0)
1326 inmode = word_mode;
1327 if (out != 0)
1328 outmode = word_mode;
1329 }
1330 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1331 if (HARD_REGNO_MODE_OK (i, mode)
1332 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1333 break;
1334 if (i == FIRST_PSEUDO_REGISTER)
1335 {
1336 error_for_asm (this_insn, "impossible register constraint "
1337 "in %<asm%>");
1338 /* Avoid further trouble with this insn. */
1339 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1340 /* We used to continue here setting class to ALL_REGS, but it triggers
1341 sanity check on i386 for:
1342 void foo(long double d)
1343 {
1344 asm("" :: "a" (d));
1345 }
1346 Returning zero here ought to be safe as we take care in
1347 find_reloads to not process the reloads when instruction was
1348 replaced by USE. */
1349
1350 return 0;
1351 }
1352 }
1353
1354 /* Optional output reloads are always OK even if we have no register class,
1355 since the function of these reloads is only to have spill_reg_store etc.
1356 set, so that the storing insn can be deleted later. */
1357 gcc_assert (rclass != NO_REGS
1358 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1359
1360 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1361
1362 if (i == n_reloads)
1363 {
1364 /* See if we need a secondary reload register to move between CLASS
1365 and IN or CLASS and OUT. Get the icode and push any required reloads
1366 needed for each of them if so. */
1367
1368 if (in != 0)
1369 secondary_in_reload
1370 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1371 &secondary_in_icode, NULL);
1372 if (out != 0 && GET_CODE (out) != SCRATCH)
1373 secondary_out_reload
1374 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1375 type, &secondary_out_icode, NULL);
1376
1377 /* We found no existing reload suitable for re-use.
1378 So add an additional reload. */
1379
1380 #ifdef SECONDARY_MEMORY_NEEDED
1381 if (subreg_in_class == NO_REGS
1382 && in != 0
1383 && (REG_P (in)
1384 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1385 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1386 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1387 /* If a memory location is needed for the copy, make one. */
1388 if (subreg_in_class != NO_REGS
1389 && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1390 get_secondary_mem (in, inmode, opnum, type);
1391 #endif
1392
1393 i = n_reloads;
1394 rld[i].in = in;
1395 rld[i].out = out;
1396 rld[i].rclass = rclass;
1397 rld[i].inmode = inmode;
1398 rld[i].outmode = outmode;
1399 rld[i].reg_rtx = 0;
1400 rld[i].optional = optional;
1401 rld[i].inc = 0;
1402 rld[i].nocombine = 0;
1403 rld[i].in_reg = inloc ? *inloc : 0;
1404 rld[i].out_reg = outloc ? *outloc : 0;
1405 rld[i].opnum = opnum;
1406 rld[i].when_needed = type;
1407 rld[i].secondary_in_reload = secondary_in_reload;
1408 rld[i].secondary_out_reload = secondary_out_reload;
1409 rld[i].secondary_in_icode = secondary_in_icode;
1410 rld[i].secondary_out_icode = secondary_out_icode;
1411 rld[i].secondary_p = 0;
1412
1413 n_reloads++;
1414
1415 #ifdef SECONDARY_MEMORY_NEEDED
1416 if (out != 0
1417 && (REG_P (out)
1418 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1419 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1420 && SECONDARY_MEMORY_NEEDED (rclass,
1421 REGNO_REG_CLASS (reg_or_subregno (out)),
1422 outmode))
1423 get_secondary_mem (out, outmode, opnum, type);
1424 #endif
1425 }
1426 else
1427 {
1428 /* We are reusing an existing reload,
1429 but we may have additional information for it.
1430 For example, we may now have both IN and OUT
1431 while the old one may have just one of them. */
1432
1433 /* The modes can be different. If they are, we want to reload in
1434 the larger mode, so that the value is valid for both modes. */
1435 if (inmode != VOIDmode
1436 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1437 rld[i].inmode = inmode;
1438 if (outmode != VOIDmode
1439 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1440 rld[i].outmode = outmode;
1441 if (in != 0)
1442 {
1443 rtx in_reg = inloc ? *inloc : 0;
1444 /* If we merge reloads for two distinct rtl expressions that
1445 are identical in content, there might be duplicate address
1446 reloads. Remove the extra set now, so that if we later find
1447 that we can inherit this reload, we can get rid of the
1448 address reloads altogether.
1449
1450 Do not do this if both reloads are optional since the result
1451 would be an optional reload which could potentially leave
1452 unresolved address replacements.
1453
1454 It is not sufficient to call transfer_replacements since
1455 choose_reload_regs will remove the replacements for address
1456 reloads of inherited reloads which results in the same
1457 problem. */
1458 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1459 && ! (rld[i].optional && optional))
1460 {
1461 /* We must keep the address reload with the lower operand
1462 number alive. */
1463 if (opnum > rld[i].opnum)
1464 {
1465 remove_address_replacements (in);
1466 in = rld[i].in;
1467 in_reg = rld[i].in_reg;
1468 }
1469 else
1470 remove_address_replacements (rld[i].in);
1471 }
1472 /* When emitting reloads we don't necessarily look at the in-
1473 and outmode, but also directly at the operands (in and out).
1474 So we can't simply overwrite them with whatever we have found
1475 for this (to-be-merged) reload, we have to "merge" that too.
1476 Reusing another reload already verified that we deal with the
1477 same operands, just possibly in different modes. So we
1478 overwrite the operands only when the new mode is larger.
1479 See also PR33613. */
1480 if (!rld[i].in
1481 || GET_MODE_SIZE (GET_MODE (in))
1482 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1483 rld[i].in = in;
1484 if (!rld[i].in_reg
1485 || (in_reg
1486 && GET_MODE_SIZE (GET_MODE (in_reg))
1487 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1488 rld[i].in_reg = in_reg;
1489 }
1490 if (out != 0)
1491 {
1492 if (!rld[i].out
1493 || (out
1494 && GET_MODE_SIZE (GET_MODE (out))
1495 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1496 rld[i].out = out;
1497 if (outloc
1498 && (!rld[i].out_reg
1499 || GET_MODE_SIZE (GET_MODE (*outloc))
1500 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1501 rld[i].out_reg = *outloc;
1502 }
1503 if (reg_class_subset_p (rclass, rld[i].rclass))
1504 rld[i].rclass = rclass;
1505 rld[i].optional &= optional;
1506 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1507 opnum, rld[i].opnum))
1508 rld[i].when_needed = RELOAD_OTHER;
1509 rld[i].opnum = MIN (rld[i].opnum, opnum);
1510 }
1511
1512 /* If the ostensible rtx being reloaded differs from the rtx found
1513 in the location to substitute, this reload is not safe to combine
1514 because we cannot reliably tell whether it appears in the insn. */
1515
1516 if (in != 0 && in != *inloc)
1517 rld[i].nocombine = 1;
1518
1519 #if 0
1520 /* This was replaced by changes in find_reloads_address_1 and the new
1521 function inc_for_reload, which go with a new meaning of reload_inc. */
1522
1523 /* If this is an IN/OUT reload in an insn that sets the CC,
1524 it must be for an autoincrement. It doesn't work to store
1525 the incremented value after the insn because that would clobber the CC.
1526 So we must do the increment of the value reloaded from,
1527 increment it, store it back, then decrement again. */
1528 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1529 {
1530 out = 0;
1531 rld[i].out = 0;
1532 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1533 /* If we did not find a nonzero amount-to-increment-by,
1534 that contradicts the belief that IN is being incremented
1535 in an address in this insn. */
1536 gcc_assert (rld[i].inc != 0);
1537 }
1538 #endif
1539
1540 /* If we will replace IN and OUT with the reload-reg,
1541 record where they are located so that substitution need
1542 not do a tree walk. */
1543
1544 if (replace_reloads)
1545 {
1546 if (inloc != 0)
1547 {
1548 struct replacement *r = &replacements[n_replacements++];
1549 r->what = i;
1550 r->where = inloc;
1551 r->mode = inmode;
1552 }
1553 if (outloc != 0 && outloc != inloc)
1554 {
1555 struct replacement *r = &replacements[n_replacements++];
1556 r->what = i;
1557 r->where = outloc;
1558 r->mode = outmode;
1559 }
1560 }
1561
1562 /* If this reload is just being introduced and it has both
1563 an incoming quantity and an outgoing quantity that are
1564 supposed to be made to match, see if either one of the two
1565 can serve as the place to reload into.
1566
1567 If one of them is acceptable, set rld[i].reg_rtx
1568 to that one. */
1569
1570 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1571 {
1572 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1573 inmode, outmode,
1574 rld[i].rclass, i,
1575 earlyclobber_operand_p (out));
1576
1577 /* If the outgoing register already contains the same value
1578 as the incoming one, we can dispense with loading it.
1579 The easiest way to tell the caller that is to give a phony
1580 value for the incoming operand (same as outgoing one). */
1581 if (rld[i].reg_rtx == out
1582 && (REG_P (in) || CONSTANT_P (in))
1583 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1584 static_reload_reg_p, i, inmode))
1585 rld[i].in = out;
1586 }
1587
1588 /* If this is an input reload and the operand contains a register that
1589 dies in this insn and is used nowhere else, see if it is the right class
1590 to be used for this reload. Use it if so. (This occurs most commonly
1591 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1592 this if it is also an output reload that mentions the register unless
1593 the output is a SUBREG that clobbers an entire register.
1594
1595 Note that the operand might be one of the spill regs, if it is a
1596 pseudo reg and we are in a block where spilling has not taken place.
1597 But if there is no spilling in this block, that is OK.
1598 An explicitly used hard reg cannot be a spill reg. */
1599
1600 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1601 {
1602 rtx note;
1603 int regno;
1604 enum machine_mode rel_mode = inmode;
1605
1606 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1607 rel_mode = outmode;
1608
1609 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1610 if (REG_NOTE_KIND (note) == REG_DEAD
1611 && REG_P (XEXP (note, 0))
1612 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1613 && reg_mentioned_p (XEXP (note, 0), in)
1614 /* Check that a former pseudo is valid; see find_dummy_reload. */
1615 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1616 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1617 ORIGINAL_REGNO (XEXP (note, 0)))
1618 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1619 && ! refers_to_regno_for_reload_p (regno,
1620 end_hard_regno (rel_mode,
1621 regno),
1622 PATTERN (this_insn), inloc)
1623 /* If this is also an output reload, IN cannot be used as
1624 the reload register if it is set in this insn unless IN
1625 is also OUT. */
1626 && (out == 0 || in == out
1627 || ! hard_reg_set_here_p (regno,
1628 end_hard_regno (rel_mode, regno),
1629 PATTERN (this_insn)))
1630 /* ??? Why is this code so different from the previous?
1631 Is there any simple coherent way to describe the two together?
1632 What's going on here. */
1633 && (in != out
1634 || (GET_CODE (in) == SUBREG
1635 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1636 / UNITS_PER_WORD)
1637 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1638 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1639 /* Make sure the operand fits in the reg that dies. */
1640 && (GET_MODE_SIZE (rel_mode)
1641 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1642 && HARD_REGNO_MODE_OK (regno, inmode)
1643 && HARD_REGNO_MODE_OK (regno, outmode))
1644 {
1645 unsigned int offs;
1646 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1647 hard_regno_nregs[regno][outmode]);
1648
1649 for (offs = 0; offs < nregs; offs++)
1650 if (fixed_regs[regno + offs]
1651 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1652 regno + offs))
1653 break;
1654
1655 if (offs == nregs
1656 && (! (refers_to_regno_for_reload_p
1657 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1658 || can_reload_into (in, regno, inmode)))
1659 {
1660 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1661 break;
1662 }
1663 }
1664 }
1665
1666 if (out)
1667 output_reloadnum = i;
1668
1669 return i;
1670 }
1671
1672 /* Record an additional place we must replace a value
1673 for which we have already recorded a reload.
1674 RELOADNUM is the value returned by push_reload
1675 when the reload was recorded.
1676 This is used in insn patterns that use match_dup. */
1677
1678 static void
1679 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1680 {
1681 if (replace_reloads)
1682 {
1683 struct replacement *r = &replacements[n_replacements++];
1684 r->what = reloadnum;
1685 r->where = loc;
1686 r->mode = mode;
1687 }
1688 }
1689
1690 /* Duplicate any replacement we have recorded to apply at
1691 location ORIG_LOC to also be performed at DUP_LOC.
1692 This is used in insn patterns that use match_dup. */
1693
1694 static void
1695 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1696 {
1697 int i, n = n_replacements;
1698
1699 for (i = 0; i < n; i++)
1700 {
1701 struct replacement *r = &replacements[i];
1702 if (r->where == orig_loc)
1703 push_replacement (dup_loc, r->what, r->mode);
1704 }
1705 }
1706 \f
1707 /* Transfer all replacements that used to be in reload FROM to be in
1708 reload TO. */
1709
1710 void
1711 transfer_replacements (int to, int from)
1712 {
1713 int i;
1714
1715 for (i = 0; i < n_replacements; i++)
1716 if (replacements[i].what == from)
1717 replacements[i].what = to;
1718 }
1719 \f
1720 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1721 or a subpart of it. If we have any replacements registered for IN_RTX,
1722 cancel the reloads that were supposed to load them.
1723 Return nonzero if we canceled any reloads. */
1724 int
1725 remove_address_replacements (rtx in_rtx)
1726 {
1727 int i, j;
1728 char reload_flags[MAX_RELOADS];
1729 int something_changed = 0;
1730
1731 memset (reload_flags, 0, sizeof reload_flags);
1732 for (i = 0, j = 0; i < n_replacements; i++)
1733 {
1734 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1735 reload_flags[replacements[i].what] |= 1;
1736 else
1737 {
1738 replacements[j++] = replacements[i];
1739 reload_flags[replacements[i].what] |= 2;
1740 }
1741 }
1742 /* Note that the following store must be done before the recursive calls. */
1743 n_replacements = j;
1744
1745 for (i = n_reloads - 1; i >= 0; i--)
1746 {
1747 if (reload_flags[i] == 1)
1748 {
1749 deallocate_reload_reg (i);
1750 remove_address_replacements (rld[i].in);
1751 rld[i].in = 0;
1752 something_changed = 1;
1753 }
1754 }
1755 return something_changed;
1756 }
1757 \f
1758 /* If there is only one output reload, and it is not for an earlyclobber
1759 operand, try to combine it with a (logically unrelated) input reload
1760 to reduce the number of reload registers needed.
1761
1762 This is safe if the input reload does not appear in
1763 the value being output-reloaded, because this implies
1764 it is not needed any more once the original insn completes.
1765
1766 If that doesn't work, see we can use any of the registers that
1767 die in this insn as a reload register. We can if it is of the right
1768 class and does not appear in the value being output-reloaded. */
1769
1770 static void
1771 combine_reloads (void)
1772 {
1773 int i, regno;
1774 int output_reload = -1;
1775 int secondary_out = -1;
1776 rtx note;
1777
1778 /* Find the output reload; return unless there is exactly one
1779 and that one is mandatory. */
1780
1781 for (i = 0; i < n_reloads; i++)
1782 if (rld[i].out != 0)
1783 {
1784 if (output_reload >= 0)
1785 return;
1786 output_reload = i;
1787 }
1788
1789 if (output_reload < 0 || rld[output_reload].optional)
1790 return;
1791
1792 /* An input-output reload isn't combinable. */
1793
1794 if (rld[output_reload].in != 0)
1795 return;
1796
1797 /* If this reload is for an earlyclobber operand, we can't do anything. */
1798 if (earlyclobber_operand_p (rld[output_reload].out))
1799 return;
1800
1801 /* If there is a reload for part of the address of this operand, we would
1802 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1803 its life to the point where doing this combine would not lower the
1804 number of spill registers needed. */
1805 for (i = 0; i < n_reloads; i++)
1806 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1807 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1808 && rld[i].opnum == rld[output_reload].opnum)
1809 return;
1810
1811 /* Check each input reload; can we combine it? */
1812
1813 for (i = 0; i < n_reloads; i++)
1814 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1815 /* Life span of this reload must not extend past main insn. */
1816 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1817 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1818 && rld[i].when_needed != RELOAD_OTHER
1819 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1820 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1821 [(int) rld[output_reload].outmode])
1822 && rld[i].inc == 0
1823 && rld[i].reg_rtx == 0
1824 #ifdef SECONDARY_MEMORY_NEEDED
1825 /* Don't combine two reloads with different secondary
1826 memory locations. */
1827 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1828 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1829 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1830 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1831 #endif
1832 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1833 ? (rld[i].rclass == rld[output_reload].rclass)
1834 : (reg_class_subset_p (rld[i].rclass,
1835 rld[output_reload].rclass)
1836 || reg_class_subset_p (rld[output_reload].rclass,
1837 rld[i].rclass)))
1838 && (MATCHES (rld[i].in, rld[output_reload].out)
1839 /* Args reversed because the first arg seems to be
1840 the one that we imagine being modified
1841 while the second is the one that might be affected. */
1842 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1843 rld[i].in)
1844 /* However, if the input is a register that appears inside
1845 the output, then we also can't share.
1846 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1847 If the same reload reg is used for both reg 69 and the
1848 result to be stored in memory, then that result
1849 will clobber the address of the memory ref. */
1850 && ! (REG_P (rld[i].in)
1851 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1852 rld[output_reload].out))))
1853 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1854 rld[i].when_needed != RELOAD_FOR_INPUT)
1855 && (reg_class_size[(int) rld[i].rclass]
1856 || targetm.small_register_classes_for_mode_p (VOIDmode))
1857 /* We will allow making things slightly worse by combining an
1858 input and an output, but no worse than that. */
1859 && (rld[i].when_needed == RELOAD_FOR_INPUT
1860 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1861 {
1862 int j;
1863
1864 /* We have found a reload to combine with! */
1865 rld[i].out = rld[output_reload].out;
1866 rld[i].out_reg = rld[output_reload].out_reg;
1867 rld[i].outmode = rld[output_reload].outmode;
1868 /* Mark the old output reload as inoperative. */
1869 rld[output_reload].out = 0;
1870 /* The combined reload is needed for the entire insn. */
1871 rld[i].when_needed = RELOAD_OTHER;
1872 /* If the output reload had a secondary reload, copy it. */
1873 if (rld[output_reload].secondary_out_reload != -1)
1874 {
1875 rld[i].secondary_out_reload
1876 = rld[output_reload].secondary_out_reload;
1877 rld[i].secondary_out_icode
1878 = rld[output_reload].secondary_out_icode;
1879 }
1880
1881 #ifdef SECONDARY_MEMORY_NEEDED
1882 /* Copy any secondary MEM. */
1883 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1884 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1885 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1886 #endif
1887 /* If required, minimize the register class. */
1888 if (reg_class_subset_p (rld[output_reload].rclass,
1889 rld[i].rclass))
1890 rld[i].rclass = rld[output_reload].rclass;
1891
1892 /* Transfer all replacements from the old reload to the combined. */
1893 for (j = 0; j < n_replacements; j++)
1894 if (replacements[j].what == output_reload)
1895 replacements[j].what = i;
1896
1897 return;
1898 }
1899
1900 /* If this insn has only one operand that is modified or written (assumed
1901 to be the first), it must be the one corresponding to this reload. It
1902 is safe to use anything that dies in this insn for that output provided
1903 that it does not occur in the output (we already know it isn't an
1904 earlyclobber. If this is an asm insn, give up. */
1905
1906 if (INSN_CODE (this_insn) == -1)
1907 return;
1908
1909 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1910 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1911 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1912 return;
1913
1914 /* See if some hard register that dies in this insn and is not used in
1915 the output is the right class. Only works if the register we pick
1916 up can fully hold our output reload. */
1917 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1918 if (REG_NOTE_KIND (note) == REG_DEAD
1919 && REG_P (XEXP (note, 0))
1920 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1921 rld[output_reload].out)
1922 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1923 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1924 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1925 regno)
1926 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1927 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1928 /* Ensure that a secondary or tertiary reload for this output
1929 won't want this register. */
1930 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1931 || (!(TEST_HARD_REG_BIT
1932 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1933 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1934 || !(TEST_HARD_REG_BIT
1935 (reg_class_contents[(int) rld[secondary_out].rclass],
1936 regno)))))
1937 && !fixed_regs[regno]
1938 /* Check that a former pseudo is valid; see find_dummy_reload. */
1939 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1940 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1941 ORIGINAL_REGNO (XEXP (note, 0)))
1942 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1943 {
1944 rld[output_reload].reg_rtx
1945 = gen_rtx_REG (rld[output_reload].outmode, regno);
1946 return;
1947 }
1948 }
1949 \f
1950 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1951 See if one of IN and OUT is a register that may be used;
1952 this is desirable since a spill-register won't be needed.
1953 If so, return the register rtx that proves acceptable.
1954
1955 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1956 RCLASS is the register class required for the reload.
1957
1958 If FOR_REAL is >= 0, it is the number of the reload,
1959 and in some cases when it can be discovered that OUT doesn't need
1960 to be computed, clear out rld[FOR_REAL].out.
1961
1962 If FOR_REAL is -1, this should not be done, because this call
1963 is just to see if a register can be found, not to find and install it.
1964
1965 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1966 puts an additional constraint on being able to use IN for OUT since
1967 IN must not appear elsewhere in the insn (it is assumed that IN itself
1968 is safe from the earlyclobber). */
1969
1970 static rtx
1971 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1972 enum machine_mode inmode, enum machine_mode outmode,
1973 reg_class_t rclass, int for_real, int earlyclobber)
1974 {
1975 rtx in = real_in;
1976 rtx out = real_out;
1977 int in_offset = 0;
1978 int out_offset = 0;
1979 rtx value = 0;
1980
1981 /* If operands exceed a word, we can't use either of them
1982 unless they have the same size. */
1983 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1984 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1985 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1986 return 0;
1987
1988 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1989 respectively refers to a hard register. */
1990
1991 /* Find the inside of any subregs. */
1992 while (GET_CODE (out) == SUBREG)
1993 {
1994 if (REG_P (SUBREG_REG (out))
1995 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1996 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1997 GET_MODE (SUBREG_REG (out)),
1998 SUBREG_BYTE (out),
1999 GET_MODE (out));
2000 out = SUBREG_REG (out);
2001 }
2002 while (GET_CODE (in) == SUBREG)
2003 {
2004 if (REG_P (SUBREG_REG (in))
2005 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
2006 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
2007 GET_MODE (SUBREG_REG (in)),
2008 SUBREG_BYTE (in),
2009 GET_MODE (in));
2010 in = SUBREG_REG (in);
2011 }
2012
2013 /* Narrow down the reg class, the same way push_reload will;
2014 otherwise we might find a dummy now, but push_reload won't. */
2015 {
2016 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2017 if (preferred_class != NO_REGS)
2018 rclass = (enum reg_class) preferred_class;
2019 }
2020
2021 /* See if OUT will do. */
2022 if (REG_P (out)
2023 && REGNO (out) < FIRST_PSEUDO_REGISTER)
2024 {
2025 unsigned int regno = REGNO (out) + out_offset;
2026 unsigned int nwords = hard_regno_nregs[regno][outmode];
2027 rtx saved_rtx;
2028
2029 /* When we consider whether the insn uses OUT,
2030 ignore references within IN. They don't prevent us
2031 from copying IN into OUT, because those refs would
2032 move into the insn that reloads IN.
2033
2034 However, we only ignore IN in its role as this reload.
2035 If the insn uses IN elsewhere and it contains OUT,
2036 that counts. We can't be sure it's the "same" operand
2037 so it might not go through this reload.
2038
2039 We also need to avoid using OUT if it, or part of it, is a
2040 fixed register. Modifying such registers, even transiently,
2041 may have undefined effects on the machine, such as modifying
2042 the stack pointer. */
2043 saved_rtx = *inloc;
2044 *inloc = const0_rtx;
2045
2046 if (regno < FIRST_PSEUDO_REGISTER
2047 && HARD_REGNO_MODE_OK (regno, outmode)
2048 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2049 PATTERN (this_insn), outloc))
2050 {
2051 unsigned int i;
2052
2053 for (i = 0; i < nwords; i++)
2054 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2055 regno + i)
2056 || fixed_regs[regno + i])
2057 break;
2058
2059 if (i == nwords)
2060 {
2061 if (REG_P (real_out))
2062 value = real_out;
2063 else
2064 value = gen_rtx_REG (outmode, regno);
2065 }
2066 }
2067
2068 *inloc = saved_rtx;
2069 }
2070
2071 /* Consider using IN if OUT was not acceptable
2072 or if OUT dies in this insn (like the quotient in a divmod insn).
2073 We can't use IN unless it is dies in this insn,
2074 which means we must know accurately which hard regs are live.
2075 Also, the result can't go in IN if IN is used within OUT,
2076 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2077 if (hard_regs_live_known
2078 && REG_P (in)
2079 && REGNO (in) < FIRST_PSEUDO_REGISTER
2080 && (value == 0
2081 || find_reg_note (this_insn, REG_UNUSED, real_out))
2082 && find_reg_note (this_insn, REG_DEAD, real_in)
2083 && !fixed_regs[REGNO (in)]
2084 && HARD_REGNO_MODE_OK (REGNO (in),
2085 /* The only case where out and real_out might
2086 have different modes is where real_out
2087 is a subreg, and in that case, out
2088 has a real mode. */
2089 (GET_MODE (out) != VOIDmode
2090 ? GET_MODE (out) : outmode))
2091 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2092 /* However only do this if we can be sure that this input
2093 operand doesn't correspond with an uninitialized pseudo.
2094 global can assign some hardreg to it that is the same as
2095 the one assigned to a different, also live pseudo (as it
2096 can ignore the conflict). We must never introduce writes
2097 to such hardregs, as they would clobber the other live
2098 pseudo. See PR 20973. */
2099 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
2100 ORIGINAL_REGNO (in))
2101 /* Similarly, only do this if we can be sure that the death
2102 note is still valid. global can assign some hardreg to
2103 the pseudo referenced in the note and simultaneously a
2104 subword of this hardreg to a different, also live pseudo,
2105 because only another subword of the hardreg is actually
2106 used in the insn. This cannot happen if the pseudo has
2107 been assigned exactly one hardreg. See PR 33732. */
2108 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2109 {
2110 unsigned int regno = REGNO (in) + in_offset;
2111 unsigned int nwords = hard_regno_nregs[regno][inmode];
2112
2113 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2114 && ! hard_reg_set_here_p (regno, regno + nwords,
2115 PATTERN (this_insn))
2116 && (! earlyclobber
2117 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2118 PATTERN (this_insn), inloc)))
2119 {
2120 unsigned int i;
2121
2122 for (i = 0; i < nwords; i++)
2123 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2124 regno + i))
2125 break;
2126
2127 if (i == nwords)
2128 {
2129 /* If we were going to use OUT as the reload reg
2130 and changed our mind, it means OUT is a dummy that
2131 dies here. So don't bother copying value to it. */
2132 if (for_real >= 0 && value == real_out)
2133 rld[for_real].out = 0;
2134 if (REG_P (real_in))
2135 value = real_in;
2136 else
2137 value = gen_rtx_REG (inmode, regno);
2138 }
2139 }
2140 }
2141
2142 return value;
2143 }
2144 \f
2145 /* This page contains subroutines used mainly for determining
2146 whether the IN or an OUT of a reload can serve as the
2147 reload register. */
2148
2149 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2150
2151 int
2152 earlyclobber_operand_p (rtx x)
2153 {
2154 int i;
2155
2156 for (i = 0; i < n_earlyclobbers; i++)
2157 if (reload_earlyclobbers[i] == x)
2158 return 1;
2159
2160 return 0;
2161 }
2162
2163 /* Return 1 if expression X alters a hard reg in the range
2164 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2165 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2166 X should be the body of an instruction. */
2167
2168 static int
2169 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2170 {
2171 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2172 {
2173 rtx op0 = SET_DEST (x);
2174
2175 while (GET_CODE (op0) == SUBREG)
2176 op0 = SUBREG_REG (op0);
2177 if (REG_P (op0))
2178 {
2179 unsigned int r = REGNO (op0);
2180
2181 /* See if this reg overlaps range under consideration. */
2182 if (r < end_regno
2183 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2184 return 1;
2185 }
2186 }
2187 else if (GET_CODE (x) == PARALLEL)
2188 {
2189 int i = XVECLEN (x, 0) - 1;
2190
2191 for (; i >= 0; i--)
2192 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2193 return 1;
2194 }
2195
2196 return 0;
2197 }
2198
2199 /* Return 1 if ADDR is a valid memory address for mode MODE
2200 in address space AS, and check that each pseudo reg has the
2201 proper kind of hard reg. */
2202
2203 int
2204 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2205 rtx addr, addr_space_t as)
2206 {
2207 #ifdef GO_IF_LEGITIMATE_ADDRESS
2208 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2209 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2210 return 0;
2211
2212 win:
2213 return 1;
2214 #else
2215 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2216 #endif
2217 }
2218 \f
2219 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2220 if they are the same hard reg, and has special hacks for
2221 autoincrement and autodecrement.
2222 This is specifically intended for find_reloads to use
2223 in determining whether two operands match.
2224 X is the operand whose number is the lower of the two.
2225
2226 The value is 2 if Y contains a pre-increment that matches
2227 a non-incrementing address in X. */
2228
2229 /* ??? To be completely correct, we should arrange to pass
2230 for X the output operand and for Y the input operand.
2231 For now, we assume that the output operand has the lower number
2232 because that is natural in (SET output (... input ...)). */
2233
2234 int
2235 operands_match_p (rtx x, rtx y)
2236 {
2237 int i;
2238 RTX_CODE code = GET_CODE (x);
2239 const char *fmt;
2240 int success_2;
2241
2242 if (x == y)
2243 return 1;
2244 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2245 && (REG_P (y) || (GET_CODE (y) == SUBREG
2246 && REG_P (SUBREG_REG (y)))))
2247 {
2248 int j;
2249
2250 if (code == SUBREG)
2251 {
2252 i = REGNO (SUBREG_REG (x));
2253 if (i >= FIRST_PSEUDO_REGISTER)
2254 goto slow;
2255 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2256 GET_MODE (SUBREG_REG (x)),
2257 SUBREG_BYTE (x),
2258 GET_MODE (x));
2259 }
2260 else
2261 i = REGNO (x);
2262
2263 if (GET_CODE (y) == SUBREG)
2264 {
2265 j = REGNO (SUBREG_REG (y));
2266 if (j >= FIRST_PSEUDO_REGISTER)
2267 goto slow;
2268 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2269 GET_MODE (SUBREG_REG (y)),
2270 SUBREG_BYTE (y),
2271 GET_MODE (y));
2272 }
2273 else
2274 j = REGNO (y);
2275
2276 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2277 multiple hard register group of scalar integer registers, so that
2278 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2279 register. */
2280 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2281 && SCALAR_INT_MODE_P (GET_MODE (x))
2282 && i < FIRST_PSEUDO_REGISTER)
2283 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2284 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2285 && SCALAR_INT_MODE_P (GET_MODE (y))
2286 && j < FIRST_PSEUDO_REGISTER)
2287 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2288
2289 return i == j;
2290 }
2291 /* If two operands must match, because they are really a single
2292 operand of an assembler insn, then two postincrements are invalid
2293 because the assembler insn would increment only once.
2294 On the other hand, a postincrement matches ordinary indexing
2295 if the postincrement is the output operand. */
2296 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2297 return operands_match_p (XEXP (x, 0), y);
2298 /* Two preincrements are invalid
2299 because the assembler insn would increment only once.
2300 On the other hand, a preincrement matches ordinary indexing
2301 if the preincrement is the input operand.
2302 In this case, return 2, since some callers need to do special
2303 things when this happens. */
2304 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2305 || GET_CODE (y) == PRE_MODIFY)
2306 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2307
2308 slow:
2309
2310 /* Now we have disposed of all the cases in which different rtx codes
2311 can match. */
2312 if (code != GET_CODE (y))
2313 return 0;
2314
2315 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2316 if (GET_MODE (x) != GET_MODE (y))
2317 return 0;
2318
2319 /* MEMs referring to different address space are not equivalent. */
2320 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2321 return 0;
2322
2323 switch (code)
2324 {
2325 CASE_CONST_UNIQUE:
2326 return 0;
2327
2328 case LABEL_REF:
2329 return XEXP (x, 0) == XEXP (y, 0);
2330 case SYMBOL_REF:
2331 return XSTR (x, 0) == XSTR (y, 0);
2332
2333 default:
2334 break;
2335 }
2336
2337 /* Compare the elements. If any pair of corresponding elements
2338 fail to match, return 0 for the whole things. */
2339
2340 success_2 = 0;
2341 fmt = GET_RTX_FORMAT (code);
2342 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2343 {
2344 int val, j;
2345 switch (fmt[i])
2346 {
2347 case 'w':
2348 if (XWINT (x, i) != XWINT (y, i))
2349 return 0;
2350 break;
2351
2352 case 'i':
2353 if (XINT (x, i) != XINT (y, i))
2354 return 0;
2355 break;
2356
2357 case 'e':
2358 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2359 if (val == 0)
2360 return 0;
2361 /* If any subexpression returns 2,
2362 we should return 2 if we are successful. */
2363 if (val == 2)
2364 success_2 = 1;
2365 break;
2366
2367 case '0':
2368 break;
2369
2370 case 'E':
2371 if (XVECLEN (x, i) != XVECLEN (y, i))
2372 return 0;
2373 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2374 {
2375 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2376 if (val == 0)
2377 return 0;
2378 if (val == 2)
2379 success_2 = 1;
2380 }
2381 break;
2382
2383 /* It is believed that rtx's at this level will never
2384 contain anything but integers and other rtx's,
2385 except for within LABEL_REFs and SYMBOL_REFs. */
2386 default:
2387 gcc_unreachable ();
2388 }
2389 }
2390 return 1 + success_2;
2391 }
2392 \f
2393 /* Describe the range of registers or memory referenced by X.
2394 If X is a register, set REG_FLAG and put the first register
2395 number into START and the last plus one into END.
2396 If X is a memory reference, put a base address into BASE
2397 and a range of integer offsets into START and END.
2398 If X is pushing on the stack, we can assume it causes no trouble,
2399 so we set the SAFE field. */
2400
2401 static struct decomposition
2402 decompose (rtx x)
2403 {
2404 struct decomposition val;
2405 int all_const = 0;
2406
2407 memset (&val, 0, sizeof (val));
2408
2409 switch (GET_CODE (x))
2410 {
2411 case MEM:
2412 {
2413 rtx base = NULL_RTX, offset = 0;
2414 rtx addr = XEXP (x, 0);
2415
2416 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2417 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2418 {
2419 val.base = XEXP (addr, 0);
2420 val.start = -GET_MODE_SIZE (GET_MODE (x));
2421 val.end = GET_MODE_SIZE (GET_MODE (x));
2422 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2423 return val;
2424 }
2425
2426 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2427 {
2428 if (GET_CODE (XEXP (addr, 1)) == PLUS
2429 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2430 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2431 {
2432 val.base = XEXP (addr, 0);
2433 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2434 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2435 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2436 return val;
2437 }
2438 }
2439
2440 if (GET_CODE (addr) == CONST)
2441 {
2442 addr = XEXP (addr, 0);
2443 all_const = 1;
2444 }
2445 if (GET_CODE (addr) == PLUS)
2446 {
2447 if (CONSTANT_P (XEXP (addr, 0)))
2448 {
2449 base = XEXP (addr, 1);
2450 offset = XEXP (addr, 0);
2451 }
2452 else if (CONSTANT_P (XEXP (addr, 1)))
2453 {
2454 base = XEXP (addr, 0);
2455 offset = XEXP (addr, 1);
2456 }
2457 }
2458
2459 if (offset == 0)
2460 {
2461 base = addr;
2462 offset = const0_rtx;
2463 }
2464 if (GET_CODE (offset) == CONST)
2465 offset = XEXP (offset, 0);
2466 if (GET_CODE (offset) == PLUS)
2467 {
2468 if (CONST_INT_P (XEXP (offset, 0)))
2469 {
2470 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2471 offset = XEXP (offset, 0);
2472 }
2473 else if (CONST_INT_P (XEXP (offset, 1)))
2474 {
2475 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2476 offset = XEXP (offset, 1);
2477 }
2478 else
2479 {
2480 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2481 offset = const0_rtx;
2482 }
2483 }
2484 else if (!CONST_INT_P (offset))
2485 {
2486 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2487 offset = const0_rtx;
2488 }
2489
2490 if (all_const && GET_CODE (base) == PLUS)
2491 base = gen_rtx_CONST (GET_MODE (base), base);
2492
2493 gcc_assert (CONST_INT_P (offset));
2494
2495 val.start = INTVAL (offset);
2496 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2497 val.base = base;
2498 }
2499 break;
2500
2501 case REG:
2502 val.reg_flag = 1;
2503 val.start = true_regnum (x);
2504 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2505 {
2506 /* A pseudo with no hard reg. */
2507 val.start = REGNO (x);
2508 val.end = val.start + 1;
2509 }
2510 else
2511 /* A hard reg. */
2512 val.end = end_hard_regno (GET_MODE (x), val.start);
2513 break;
2514
2515 case SUBREG:
2516 if (!REG_P (SUBREG_REG (x)))
2517 /* This could be more precise, but it's good enough. */
2518 return decompose (SUBREG_REG (x));
2519 val.reg_flag = 1;
2520 val.start = true_regnum (x);
2521 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2522 return decompose (SUBREG_REG (x));
2523 else
2524 /* A hard reg. */
2525 val.end = val.start + subreg_nregs (x);
2526 break;
2527
2528 case SCRATCH:
2529 /* This hasn't been assigned yet, so it can't conflict yet. */
2530 val.safe = 1;
2531 break;
2532
2533 default:
2534 gcc_assert (CONSTANT_P (x));
2535 val.safe = 1;
2536 break;
2537 }
2538 return val;
2539 }
2540
2541 /* Return 1 if altering Y will not modify the value of X.
2542 Y is also described by YDATA, which should be decompose (Y). */
2543
2544 static int
2545 immune_p (rtx x, rtx y, struct decomposition ydata)
2546 {
2547 struct decomposition xdata;
2548
2549 if (ydata.reg_flag)
2550 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2551 if (ydata.safe)
2552 return 1;
2553
2554 gcc_assert (MEM_P (y));
2555 /* If Y is memory and X is not, Y can't affect X. */
2556 if (!MEM_P (x))
2557 return 1;
2558
2559 xdata = decompose (x);
2560
2561 if (! rtx_equal_p (xdata.base, ydata.base))
2562 {
2563 /* If bases are distinct symbolic constants, there is no overlap. */
2564 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2565 return 1;
2566 /* Constants and stack slots never overlap. */
2567 if (CONSTANT_P (xdata.base)
2568 && (ydata.base == frame_pointer_rtx
2569 || ydata.base == hard_frame_pointer_rtx
2570 || ydata.base == stack_pointer_rtx))
2571 return 1;
2572 if (CONSTANT_P (ydata.base)
2573 && (xdata.base == frame_pointer_rtx
2574 || xdata.base == hard_frame_pointer_rtx
2575 || xdata.base == stack_pointer_rtx))
2576 return 1;
2577 /* If either base is variable, we don't know anything. */
2578 return 0;
2579 }
2580
2581 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2582 }
2583
2584 /* Similar, but calls decompose. */
2585
2586 int
2587 safe_from_earlyclobber (rtx op, rtx clobber)
2588 {
2589 struct decomposition early_data;
2590
2591 early_data = decompose (clobber);
2592 return immune_p (op, clobber, early_data);
2593 }
2594 \f
2595 /* Main entry point of this file: search the body of INSN
2596 for values that need reloading and record them with push_reload.
2597 REPLACE nonzero means record also where the values occur
2598 so that subst_reloads can be used.
2599
2600 IND_LEVELS says how many levels of indirection are supported by this
2601 machine; a value of zero means that a memory reference is not a valid
2602 memory address.
2603
2604 LIVE_KNOWN says we have valid information about which hard
2605 regs are live at each point in the program; this is true when
2606 we are called from global_alloc but false when stupid register
2607 allocation has been done.
2608
2609 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2610 which is nonnegative if the reg has been commandeered for reloading into.
2611 It is copied into STATIC_RELOAD_REG_P and referenced from there
2612 by various subroutines.
2613
2614 Return TRUE if some operands need to be changed, because of swapping
2615 commutative operands, reg_equiv_address substitution, or whatever. */
2616
2617 int
2618 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2619 short *reload_reg_p)
2620 {
2621 int insn_code_number;
2622 int i, j;
2623 int noperands;
2624 /* These start out as the constraints for the insn
2625 and they are chewed up as we consider alternatives. */
2626 const char *constraints[MAX_RECOG_OPERANDS];
2627 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2628 a register. */
2629 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2630 char pref_or_nothing[MAX_RECOG_OPERANDS];
2631 /* Nonzero for a MEM operand whose entire address needs a reload.
2632 May be -1 to indicate the entire address may or may not need a reload. */
2633 int address_reloaded[MAX_RECOG_OPERANDS];
2634 /* Nonzero for an address operand that needs to be completely reloaded.
2635 May be -1 to indicate the entire operand may or may not need a reload. */
2636 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2637 /* Value of enum reload_type to use for operand. */
2638 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2639 /* Value of enum reload_type to use within address of operand. */
2640 enum reload_type address_type[MAX_RECOG_OPERANDS];
2641 /* Save the usage of each operand. */
2642 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2643 int no_input_reloads = 0, no_output_reloads = 0;
2644 int n_alternatives;
2645 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2646 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2647 char this_alternative_win[MAX_RECOG_OPERANDS];
2648 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2649 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2650 int this_alternative_matches[MAX_RECOG_OPERANDS];
2651 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2652 int this_alternative_number;
2653 int goal_alternative_number = 0;
2654 int operand_reloadnum[MAX_RECOG_OPERANDS];
2655 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2656 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2657 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2658 char goal_alternative_win[MAX_RECOG_OPERANDS];
2659 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2660 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2661 int goal_alternative_swapped;
2662 int best;
2663 int commutative;
2664 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2665 rtx substed_operand[MAX_RECOG_OPERANDS];
2666 rtx body = PATTERN (insn);
2667 rtx set = single_set (insn);
2668 int goal_earlyclobber = 0, this_earlyclobber;
2669 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2670 int retval = 0;
2671
2672 this_insn = insn;
2673 n_reloads = 0;
2674 n_replacements = 0;
2675 n_earlyclobbers = 0;
2676 replace_reloads = replace;
2677 hard_regs_live_known = live_known;
2678 static_reload_reg_p = reload_reg_p;
2679
2680 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2681 neither are insns that SET cc0. Insns that use CC0 are not allowed
2682 to have any input reloads. */
2683 if (JUMP_P (insn) || CALL_P (insn))
2684 no_output_reloads = 1;
2685
2686 #ifdef HAVE_cc0
2687 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2688 no_input_reloads = 1;
2689 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2690 no_output_reloads = 1;
2691 #endif
2692
2693 #ifdef SECONDARY_MEMORY_NEEDED
2694 /* The eliminated forms of any secondary memory locations are per-insn, so
2695 clear them out here. */
2696
2697 if (secondary_memlocs_elim_used)
2698 {
2699 memset (secondary_memlocs_elim, 0,
2700 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2701 secondary_memlocs_elim_used = 0;
2702 }
2703 #endif
2704
2705 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2706 is cheap to move between them. If it is not, there may not be an insn
2707 to do the copy, so we may need a reload. */
2708 if (GET_CODE (body) == SET
2709 && REG_P (SET_DEST (body))
2710 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2711 && REG_P (SET_SRC (body))
2712 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2713 && register_move_cost (GET_MODE (SET_SRC (body)),
2714 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2715 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2716 return 0;
2717
2718 extract_insn (insn);
2719
2720 noperands = reload_n_operands = recog_data.n_operands;
2721 n_alternatives = recog_data.n_alternatives;
2722
2723 /* Just return "no reloads" if insn has no operands with constraints. */
2724 if (noperands == 0 || n_alternatives == 0)
2725 return 0;
2726
2727 insn_code_number = INSN_CODE (insn);
2728 this_insn_is_asm = insn_code_number < 0;
2729
2730 memcpy (operand_mode, recog_data.operand_mode,
2731 noperands * sizeof (enum machine_mode));
2732 memcpy (constraints, recog_data.constraints,
2733 noperands * sizeof (const char *));
2734
2735 commutative = -1;
2736
2737 /* If we will need to know, later, whether some pair of operands
2738 are the same, we must compare them now and save the result.
2739 Reloading the base and index registers will clobber them
2740 and afterward they will fail to match. */
2741
2742 for (i = 0; i < noperands; i++)
2743 {
2744 const char *p;
2745 int c;
2746 char *end;
2747
2748 substed_operand[i] = recog_data.operand[i];
2749 p = constraints[i];
2750
2751 modified[i] = RELOAD_READ;
2752
2753 /* Scan this operand's constraint to see if it is an output operand,
2754 an in-out operand, is commutative, or should match another. */
2755
2756 while ((c = *p))
2757 {
2758 p += CONSTRAINT_LEN (c, p);
2759 switch (c)
2760 {
2761 case '=':
2762 modified[i] = RELOAD_WRITE;
2763 break;
2764 case '+':
2765 modified[i] = RELOAD_READ_WRITE;
2766 break;
2767 case '%':
2768 {
2769 /* The last operand should not be marked commutative. */
2770 gcc_assert (i != noperands - 1);
2771
2772 /* We currently only support one commutative pair of
2773 operands. Some existing asm code currently uses more
2774 than one pair. Previously, that would usually work,
2775 but sometimes it would crash the compiler. We
2776 continue supporting that case as well as we can by
2777 silently ignoring all but the first pair. In the
2778 future we may handle it correctly. */
2779 if (commutative < 0)
2780 commutative = i;
2781 else
2782 gcc_assert (this_insn_is_asm);
2783 }
2784 break;
2785 /* Use of ISDIGIT is tempting here, but it may get expensive because
2786 of locale support we don't want. */
2787 case '0': case '1': case '2': case '3': case '4':
2788 case '5': case '6': case '7': case '8': case '9':
2789 {
2790 c = strtoul (p - 1, &end, 10);
2791 p = end;
2792
2793 operands_match[c][i]
2794 = operands_match_p (recog_data.operand[c],
2795 recog_data.operand[i]);
2796
2797 /* An operand may not match itself. */
2798 gcc_assert (c != i);
2799
2800 /* If C can be commuted with C+1, and C might need to match I,
2801 then C+1 might also need to match I. */
2802 if (commutative >= 0)
2803 {
2804 if (c == commutative || c == commutative + 1)
2805 {
2806 int other = c + (c == commutative ? 1 : -1);
2807 operands_match[other][i]
2808 = operands_match_p (recog_data.operand[other],
2809 recog_data.operand[i]);
2810 }
2811 if (i == commutative || i == commutative + 1)
2812 {
2813 int other = i + (i == commutative ? 1 : -1);
2814 operands_match[c][other]
2815 = operands_match_p (recog_data.operand[c],
2816 recog_data.operand[other]);
2817 }
2818 /* Note that C is supposed to be less than I.
2819 No need to consider altering both C and I because in
2820 that case we would alter one into the other. */
2821 }
2822 }
2823 }
2824 }
2825 }
2826
2827 /* Examine each operand that is a memory reference or memory address
2828 and reload parts of the addresses into index registers.
2829 Also here any references to pseudo regs that didn't get hard regs
2830 but are equivalent to constants get replaced in the insn itself
2831 with those constants. Nobody will ever see them again.
2832
2833 Finally, set up the preferred classes of each operand. */
2834
2835 for (i = 0; i < noperands; i++)
2836 {
2837 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2838
2839 address_reloaded[i] = 0;
2840 address_operand_reloaded[i] = 0;
2841 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2842 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2843 : RELOAD_OTHER);
2844 address_type[i]
2845 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2846 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2847 : RELOAD_OTHER);
2848
2849 if (*constraints[i] == 0)
2850 /* Ignore things like match_operator operands. */
2851 ;
2852 else if (constraints[i][0] == 'p'
2853 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2854 {
2855 address_operand_reloaded[i]
2856 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2857 recog_data.operand[i],
2858 recog_data.operand_loc[i],
2859 i, operand_type[i], ind_levels, insn);
2860
2861 /* If we now have a simple operand where we used to have a
2862 PLUS or MULT, re-recognize and try again. */
2863 if ((OBJECT_P (*recog_data.operand_loc[i])
2864 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2865 && (GET_CODE (recog_data.operand[i]) == MULT
2866 || GET_CODE (recog_data.operand[i]) == PLUS))
2867 {
2868 INSN_CODE (insn) = -1;
2869 retval = find_reloads (insn, replace, ind_levels, live_known,
2870 reload_reg_p);
2871 return retval;
2872 }
2873
2874 recog_data.operand[i] = *recog_data.operand_loc[i];
2875 substed_operand[i] = recog_data.operand[i];
2876
2877 /* Address operands are reloaded in their existing mode,
2878 no matter what is specified in the machine description. */
2879 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2880
2881 /* If the address is a single CONST_INT pick address mode
2882 instead otherwise we will later not know in which mode
2883 the reload should be performed. */
2884 if (operand_mode[i] == VOIDmode)
2885 operand_mode[i] = Pmode;
2886
2887 }
2888 else if (code == MEM)
2889 {
2890 address_reloaded[i]
2891 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2892 recog_data.operand_loc[i],
2893 XEXP (recog_data.operand[i], 0),
2894 &XEXP (recog_data.operand[i], 0),
2895 i, address_type[i], ind_levels, insn);
2896 recog_data.operand[i] = *recog_data.operand_loc[i];
2897 substed_operand[i] = recog_data.operand[i];
2898 }
2899 else if (code == SUBREG)
2900 {
2901 rtx reg = SUBREG_REG (recog_data.operand[i]);
2902 rtx op
2903 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2904 ind_levels,
2905 set != 0
2906 && &SET_DEST (set) == recog_data.operand_loc[i],
2907 insn,
2908 &address_reloaded[i]);
2909
2910 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2911 that didn't get a hard register, emit a USE with a REG_EQUAL
2912 note in front so that we might inherit a previous, possibly
2913 wider reload. */
2914
2915 if (replace
2916 && MEM_P (op)
2917 && REG_P (reg)
2918 && (GET_MODE_SIZE (GET_MODE (reg))
2919 >= GET_MODE_SIZE (GET_MODE (op)))
2920 && reg_equiv_constant (REGNO (reg)) == 0)
2921 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2922 insn),
2923 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2924
2925 substed_operand[i] = recog_data.operand[i] = op;
2926 }
2927 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2928 /* We can get a PLUS as an "operand" as a result of register
2929 elimination. See eliminate_regs and gen_reload. We handle
2930 a unary operator by reloading the operand. */
2931 substed_operand[i] = recog_data.operand[i]
2932 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2933 ind_levels, 0, insn,
2934 &address_reloaded[i]);
2935 else if (code == REG)
2936 {
2937 /* This is equivalent to calling find_reloads_toplev.
2938 The code is duplicated for speed.
2939 When we find a pseudo always equivalent to a constant,
2940 we replace it by the constant. We must be sure, however,
2941 that we don't try to replace it in the insn in which it
2942 is being set. */
2943 int regno = REGNO (recog_data.operand[i]);
2944 if (reg_equiv_constant (regno) != 0
2945 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2946 {
2947 /* Record the existing mode so that the check if constants are
2948 allowed will work when operand_mode isn't specified. */
2949
2950 if (operand_mode[i] == VOIDmode)
2951 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2952
2953 substed_operand[i] = recog_data.operand[i]
2954 = reg_equiv_constant (regno);
2955 }
2956 if (reg_equiv_memory_loc (regno) != 0
2957 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2958 /* We need not give a valid is_set_dest argument since the case
2959 of a constant equivalence was checked above. */
2960 substed_operand[i] = recog_data.operand[i]
2961 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2962 ind_levels, 0, insn,
2963 &address_reloaded[i]);
2964 }
2965 /* If the operand is still a register (we didn't replace it with an
2966 equivalent), get the preferred class to reload it into. */
2967 code = GET_CODE (recog_data.operand[i]);
2968 preferred_class[i]
2969 = ((code == REG && REGNO (recog_data.operand[i])
2970 >= FIRST_PSEUDO_REGISTER)
2971 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2972 : NO_REGS);
2973 pref_or_nothing[i]
2974 = (code == REG
2975 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2976 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2977 }
2978
2979 /* If this is simply a copy from operand 1 to operand 0, merge the
2980 preferred classes for the operands. */
2981 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2982 && recog_data.operand[1] == SET_SRC (set))
2983 {
2984 preferred_class[0] = preferred_class[1]
2985 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2986 pref_or_nothing[0] |= pref_or_nothing[1];
2987 pref_or_nothing[1] |= pref_or_nothing[0];
2988 }
2989
2990 /* Now see what we need for pseudo-regs that didn't get hard regs
2991 or got the wrong kind of hard reg. For this, we must consider
2992 all the operands together against the register constraints. */
2993
2994 best = MAX_RECOG_OPERANDS * 2 + 600;
2995
2996 goal_alternative_swapped = 0;
2997
2998 /* The constraints are made of several alternatives.
2999 Each operand's constraint looks like foo,bar,... with commas
3000 separating the alternatives. The first alternatives for all
3001 operands go together, the second alternatives go together, etc.
3002
3003 First loop over alternatives. */
3004
3005 for (this_alternative_number = 0;
3006 this_alternative_number < n_alternatives;
3007 this_alternative_number++)
3008 {
3009 int swapped;
3010
3011 if (!recog_data.alternative_enabled_p[this_alternative_number])
3012 {
3013 int i;
3014
3015 for (i = 0; i < recog_data.n_operands; i++)
3016 constraints[i] = skip_alternative (constraints[i]);
3017
3018 continue;
3019 }
3020
3021 /* If insn is commutative (it's safe to exchange a certain pair
3022 of operands) then we need to try each alternative twice, the
3023 second time matching those two operands as if we had
3024 exchanged them. To do this, really exchange them in
3025 operands. */
3026 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3027 {
3028 /* Loop over operands for one constraint alternative. */
3029 /* LOSERS counts those that don't fit this alternative
3030 and would require loading. */
3031 int losers = 0;
3032 /* BAD is set to 1 if it some operand can't fit this alternative
3033 even after reloading. */
3034 int bad = 0;
3035 /* REJECT is a count of how undesirable this alternative says it is
3036 if any reloading is required. If the alternative matches exactly
3037 then REJECT is ignored, but otherwise it gets this much
3038 counted against it in addition to the reloading needed. Each
3039 ? counts three times here since we want the disparaging caused by
3040 a bad register class to only count 1/3 as much. */
3041 int reject = 0;
3042
3043 if (swapped)
3044 {
3045 enum reg_class tclass;
3046 int t;
3047
3048 recog_data.operand[commutative] = substed_operand[commutative + 1];
3049 recog_data.operand[commutative + 1] = substed_operand[commutative];
3050 /* Swap the duplicates too. */
3051 for (i = 0; i < recog_data.n_dups; i++)
3052 if (recog_data.dup_num[i] == commutative
3053 || recog_data.dup_num[i] == commutative + 1)
3054 *recog_data.dup_loc[i]
3055 = recog_data.operand[(int) recog_data.dup_num[i]];
3056
3057 tclass = preferred_class[commutative];
3058 preferred_class[commutative] = preferred_class[commutative + 1];
3059 preferred_class[commutative + 1] = tclass;
3060
3061 t = pref_or_nothing[commutative];
3062 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3063 pref_or_nothing[commutative + 1] = t;
3064
3065 t = address_reloaded[commutative];
3066 address_reloaded[commutative] = address_reloaded[commutative + 1];
3067 address_reloaded[commutative + 1] = t;
3068 }
3069
3070 this_earlyclobber = 0;
3071
3072 for (i = 0; i < noperands; i++)
3073 {
3074 const char *p = constraints[i];
3075 char *end;
3076 int len;
3077 int win = 0;
3078 int did_match = 0;
3079 /* 0 => this operand can be reloaded somehow for this alternative. */
3080 int badop = 1;
3081 /* 0 => this operand can be reloaded if the alternative allows regs. */
3082 int winreg = 0;
3083 int c;
3084 int m;
3085 rtx operand = recog_data.operand[i];
3086 int offset = 0;
3087 /* Nonzero means this is a MEM that must be reloaded into a reg
3088 regardless of what the constraint says. */
3089 int force_reload = 0;
3090 int offmemok = 0;
3091 /* Nonzero if a constant forced into memory would be OK for this
3092 operand. */
3093 int constmemok = 0;
3094 int earlyclobber = 0;
3095
3096 /* If the predicate accepts a unary operator, it means that
3097 we need to reload the operand, but do not do this for
3098 match_operator and friends. */
3099 if (UNARY_P (operand) && *p != 0)
3100 operand = XEXP (operand, 0);
3101
3102 /* If the operand is a SUBREG, extract
3103 the REG or MEM (or maybe even a constant) within.
3104 (Constants can occur as a result of reg_equiv_constant.) */
3105
3106 while (GET_CODE (operand) == SUBREG)
3107 {
3108 /* Offset only matters when operand is a REG and
3109 it is a hard reg. This is because it is passed
3110 to reg_fits_class_p if it is a REG and all pseudos
3111 return 0 from that function. */
3112 if (REG_P (SUBREG_REG (operand))
3113 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3114 {
3115 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3116 GET_MODE (SUBREG_REG (operand)),
3117 SUBREG_BYTE (operand),
3118 GET_MODE (operand)) < 0)
3119 force_reload = 1;
3120 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3121 GET_MODE (SUBREG_REG (operand)),
3122 SUBREG_BYTE (operand),
3123 GET_MODE (operand));
3124 }
3125 operand = SUBREG_REG (operand);
3126 /* Force reload if this is a constant or PLUS or if there may
3127 be a problem accessing OPERAND in the outer mode. */
3128 if (CONSTANT_P (operand)
3129 || GET_CODE (operand) == PLUS
3130 /* We must force a reload of paradoxical SUBREGs
3131 of a MEM because the alignment of the inner value
3132 may not be enough to do the outer reference. On
3133 big-endian machines, it may also reference outside
3134 the object.
3135
3136 On machines that extend byte operations and we have a
3137 SUBREG where both the inner and outer modes are no wider
3138 than a word and the inner mode is narrower, is integral,
3139 and gets extended when loaded from memory, combine.c has
3140 made assumptions about the behavior of the machine in such
3141 register access. If the data is, in fact, in memory we
3142 must always load using the size assumed to be in the
3143 register and let the insn do the different-sized
3144 accesses.
3145
3146 This is doubly true if WORD_REGISTER_OPERATIONS. In
3147 this case eliminate_regs has left non-paradoxical
3148 subregs for push_reload to see. Make sure it does
3149 by forcing the reload.
3150
3151 ??? When is it right at this stage to have a subreg
3152 of a mem that is _not_ to be handled specially? IMO
3153 those should have been reduced to just a mem. */
3154 || ((MEM_P (operand)
3155 || (REG_P (operand)
3156 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3157 #ifndef WORD_REGISTER_OPERATIONS
3158 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3159 < BIGGEST_ALIGNMENT)
3160 && (GET_MODE_SIZE (operand_mode[i])
3161 > GET_MODE_SIZE (GET_MODE (operand))))
3162 || BYTES_BIG_ENDIAN
3163 #ifdef LOAD_EXTEND_OP
3164 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3165 && (GET_MODE_SIZE (GET_MODE (operand))
3166 <= UNITS_PER_WORD)
3167 && (GET_MODE_SIZE (operand_mode[i])
3168 > GET_MODE_SIZE (GET_MODE (operand)))
3169 && INTEGRAL_MODE_P (GET_MODE (operand))
3170 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3171 #endif
3172 )
3173 #endif
3174 )
3175 )
3176 force_reload = 1;
3177 }
3178
3179 this_alternative[i] = NO_REGS;
3180 this_alternative_win[i] = 0;
3181 this_alternative_match_win[i] = 0;
3182 this_alternative_offmemok[i] = 0;
3183 this_alternative_earlyclobber[i] = 0;
3184 this_alternative_matches[i] = -1;
3185
3186 /* An empty constraint or empty alternative
3187 allows anything which matched the pattern. */
3188 if (*p == 0 || *p == ',')
3189 win = 1, badop = 0;
3190
3191 /* Scan this alternative's specs for this operand;
3192 set WIN if the operand fits any letter in this alternative.
3193 Otherwise, clear BADOP if this operand could
3194 fit some letter after reloads,
3195 or set WINREG if this operand could fit after reloads
3196 provided the constraint allows some registers. */
3197
3198 do
3199 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3200 {
3201 case '\0':
3202 len = 0;
3203 break;
3204 case ',':
3205 c = '\0';
3206 break;
3207
3208 case '=': case '+': case '*':
3209 break;
3210
3211 case '%':
3212 /* We only support one commutative marker, the first
3213 one. We already set commutative above. */
3214 break;
3215
3216 case '?':
3217 reject += 6;
3218 break;
3219
3220 case '!':
3221 reject = 600;
3222 break;
3223
3224 case '#':
3225 /* Ignore rest of this alternative as far as
3226 reloading is concerned. */
3227 do
3228 p++;
3229 while (*p && *p != ',');
3230 len = 0;
3231 break;
3232
3233 case '0': case '1': case '2': case '3': case '4':
3234 case '5': case '6': case '7': case '8': case '9':
3235 m = strtoul (p, &end, 10);
3236 p = end;
3237 len = 0;
3238
3239 this_alternative_matches[i] = m;
3240 /* We are supposed to match a previous operand.
3241 If we do, we win if that one did.
3242 If we do not, count both of the operands as losers.
3243 (This is too conservative, since most of the time
3244 only a single reload insn will be needed to make
3245 the two operands win. As a result, this alternative
3246 may be rejected when it is actually desirable.) */
3247 if ((swapped && (m != commutative || i != commutative + 1))
3248 /* If we are matching as if two operands were swapped,
3249 also pretend that operands_match had been computed
3250 with swapped.
3251 But if I is the second of those and C is the first,
3252 don't exchange them, because operands_match is valid
3253 only on one side of its diagonal. */
3254 ? (operands_match
3255 [(m == commutative || m == commutative + 1)
3256 ? 2 * commutative + 1 - m : m]
3257 [(i == commutative || i == commutative + 1)
3258 ? 2 * commutative + 1 - i : i])
3259 : operands_match[m][i])
3260 {
3261 /* If we are matching a non-offsettable address where an
3262 offsettable address was expected, then we must reject
3263 this combination, because we can't reload it. */
3264 if (this_alternative_offmemok[m]
3265 && MEM_P (recog_data.operand[m])
3266 && this_alternative[m] == NO_REGS
3267 && ! this_alternative_win[m])
3268 bad = 1;
3269
3270 did_match = this_alternative_win[m];
3271 }
3272 else
3273 {
3274 /* Operands don't match. */
3275 rtx value;
3276 int loc1, loc2;
3277 /* Retroactively mark the operand we had to match
3278 as a loser, if it wasn't already. */
3279 if (this_alternative_win[m])
3280 losers++;
3281 this_alternative_win[m] = 0;
3282 if (this_alternative[m] == NO_REGS)
3283 bad = 1;
3284 /* But count the pair only once in the total badness of
3285 this alternative, if the pair can be a dummy reload.
3286 The pointers in operand_loc are not swapped; swap
3287 them by hand if necessary. */
3288 if (swapped && i == commutative)
3289 loc1 = commutative + 1;
3290 else if (swapped && i == commutative + 1)
3291 loc1 = commutative;
3292 else
3293 loc1 = i;
3294 if (swapped && m == commutative)
3295 loc2 = commutative + 1;
3296 else if (swapped && m == commutative + 1)
3297 loc2 = commutative;
3298 else
3299 loc2 = m;
3300 value
3301 = find_dummy_reload (recog_data.operand[i],
3302 recog_data.operand[m],
3303 recog_data.operand_loc[loc1],
3304 recog_data.operand_loc[loc2],
3305 operand_mode[i], operand_mode[m],
3306 this_alternative[m], -1,
3307 this_alternative_earlyclobber[m]);
3308
3309 if (value != 0)
3310 losers--;
3311 }
3312 /* This can be fixed with reloads if the operand
3313 we are supposed to match can be fixed with reloads. */
3314 badop = 0;
3315 this_alternative[i] = this_alternative[m];
3316
3317 /* If we have to reload this operand and some previous
3318 operand also had to match the same thing as this
3319 operand, we don't know how to do that. So reject this
3320 alternative. */
3321 if (! did_match || force_reload)
3322 for (j = 0; j < i; j++)
3323 if (this_alternative_matches[j]
3324 == this_alternative_matches[i])
3325 badop = 1;
3326 break;
3327
3328 case 'p':
3329 /* All necessary reloads for an address_operand
3330 were handled in find_reloads_address. */
3331 this_alternative[i]
3332 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3333 ADDRESS, SCRATCH);
3334 win = 1;
3335 badop = 0;
3336 break;
3337
3338 case TARGET_MEM_CONSTRAINT:
3339 if (force_reload)
3340 break;
3341 if (MEM_P (operand)
3342 || (REG_P (operand)
3343 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3344 && reg_renumber[REGNO (operand)] < 0))
3345 win = 1;
3346 if (CONST_POOL_OK_P (operand_mode[i], operand))
3347 badop = 0;
3348 constmemok = 1;
3349 break;
3350
3351 case '<':
3352 if (MEM_P (operand)
3353 && ! address_reloaded[i]
3354 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3355 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3356 win = 1;
3357 break;
3358
3359 case '>':
3360 if (MEM_P (operand)
3361 && ! address_reloaded[i]
3362 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3363 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3364 win = 1;
3365 break;
3366
3367 /* Memory operand whose address is not offsettable. */
3368 case 'V':
3369 if (force_reload)
3370 break;
3371 if (MEM_P (operand)
3372 && ! (ind_levels ? offsettable_memref_p (operand)
3373 : offsettable_nonstrict_memref_p (operand))
3374 /* Certain mem addresses will become offsettable
3375 after they themselves are reloaded. This is important;
3376 we don't want our own handling of unoffsettables
3377 to override the handling of reg_equiv_address. */
3378 && !(REG_P (XEXP (operand, 0))
3379 && (ind_levels == 0
3380 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3381 win = 1;
3382 break;
3383
3384 /* Memory operand whose address is offsettable. */
3385 case 'o':
3386 if (force_reload)
3387 break;
3388 if ((MEM_P (operand)
3389 /* If IND_LEVELS, find_reloads_address won't reload a
3390 pseudo that didn't get a hard reg, so we have to
3391 reject that case. */
3392 && ((ind_levels ? offsettable_memref_p (operand)
3393 : offsettable_nonstrict_memref_p (operand))
3394 /* A reloaded address is offsettable because it is now
3395 just a simple register indirect. */
3396 || address_reloaded[i] == 1))
3397 || (REG_P (operand)
3398 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3399 && reg_renumber[REGNO (operand)] < 0
3400 /* If reg_equiv_address is nonzero, we will be
3401 loading it into a register; hence it will be
3402 offsettable, but we cannot say that reg_equiv_mem
3403 is offsettable without checking. */
3404 && ((reg_equiv_mem (REGNO (operand)) != 0
3405 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3406 || (reg_equiv_address (REGNO (operand)) != 0))))
3407 win = 1;
3408 if (CONST_POOL_OK_P (operand_mode[i], operand)
3409 || MEM_P (operand))
3410 badop = 0;
3411 constmemok = 1;
3412 offmemok = 1;
3413 break;
3414
3415 case '&':
3416 /* Output operand that is stored before the need for the
3417 input operands (and their index registers) is over. */
3418 earlyclobber = 1, this_earlyclobber = 1;
3419 break;
3420
3421 case 'E':
3422 case 'F':
3423 if (CONST_DOUBLE_AS_FLOAT_P (operand)
3424 || (GET_CODE (operand) == CONST_VECTOR
3425 && (GET_MODE_CLASS (GET_MODE (operand))
3426 == MODE_VECTOR_FLOAT)))
3427 win = 1;
3428 break;
3429
3430 case 'G':
3431 case 'H':
3432 if (CONST_DOUBLE_AS_FLOAT_P (operand)
3433 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3434 win = 1;
3435 break;
3436
3437 case 's':
3438 if (CONST_SCALAR_INT_P (operand))
3439 break;
3440 case 'i':
3441 if (CONSTANT_P (operand)
3442 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3443 win = 1;
3444 break;
3445
3446 case 'n':
3447 if (CONST_SCALAR_INT_P (operand))
3448 win = 1;
3449 break;
3450
3451 case 'I':
3452 case 'J':
3453 case 'K':
3454 case 'L':
3455 case 'M':
3456 case 'N':
3457 case 'O':
3458 case 'P':
3459 if (CONST_INT_P (operand)
3460 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3461 win = 1;
3462 break;
3463
3464 case 'X':
3465 force_reload = 0;
3466 win = 1;
3467 break;
3468
3469 case 'g':
3470 if (! force_reload
3471 /* A PLUS is never a valid operand, but reload can make
3472 it from a register when eliminating registers. */
3473 && GET_CODE (operand) != PLUS
3474 /* A SCRATCH is not a valid operand. */
3475 && GET_CODE (operand) != SCRATCH
3476 && (! CONSTANT_P (operand)
3477 || ! flag_pic
3478 || LEGITIMATE_PIC_OPERAND_P (operand))
3479 && (GENERAL_REGS == ALL_REGS
3480 || !REG_P (operand)
3481 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3482 && reg_renumber[REGNO (operand)] < 0)))
3483 win = 1;
3484 /* Drop through into 'r' case. */
3485
3486 case 'r':
3487 this_alternative[i]
3488 = reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3489 goto reg;
3490
3491 default:
3492 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3493 {
3494 #ifdef EXTRA_CONSTRAINT_STR
3495 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3496 {
3497 if (force_reload)
3498 break;
3499 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3500 win = 1;
3501 /* If the address was already reloaded,
3502 we win as well. */
3503 else if (MEM_P (operand)
3504 && address_reloaded[i] == 1)
3505 win = 1;
3506 /* Likewise if the address will be reloaded because
3507 reg_equiv_address is nonzero. For reg_equiv_mem
3508 we have to check. */
3509 else if (REG_P (operand)
3510 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3511 && reg_renumber[REGNO (operand)] < 0
3512 && ((reg_equiv_mem (REGNO (operand)) != 0
3513 && EXTRA_CONSTRAINT_STR (reg_equiv_mem (REGNO (operand)), c, p))
3514 || (reg_equiv_address (REGNO (operand)) != 0)))
3515 win = 1;
3516
3517 /* If we didn't already win, we can reload
3518 constants via force_const_mem, and other
3519 MEMs by reloading the address like for 'o'. */
3520 if (CONST_POOL_OK_P (operand_mode[i], operand)
3521 || MEM_P (operand))
3522 badop = 0;
3523 constmemok = 1;
3524 offmemok = 1;
3525 break;
3526 }
3527 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3528 {
3529 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3530 win = 1;
3531
3532 /* If we didn't already win, we can reload
3533 the address into a base register. */
3534 this_alternative[i]
3535 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3536 ADDRESS, SCRATCH);
3537 badop = 0;
3538 break;
3539 }
3540
3541 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3542 win = 1;
3543 #endif
3544 break;
3545 }
3546
3547 this_alternative[i]
3548 = (reg_class_subunion
3549 [this_alternative[i]]
3550 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3551 reg:
3552 if (GET_MODE (operand) == BLKmode)
3553 break;
3554 winreg = 1;
3555 if (REG_P (operand)
3556 && reg_fits_class_p (operand, this_alternative[i],
3557 offset, GET_MODE (recog_data.operand[i])))
3558 win = 1;
3559 break;
3560 }
3561 while ((p += len), c);
3562
3563 if (swapped == (commutative >= 0 ? 1 : 0))
3564 constraints[i] = p;
3565
3566 /* If this operand could be handled with a reg,
3567 and some reg is allowed, then this operand can be handled. */
3568 if (winreg && this_alternative[i] != NO_REGS
3569 && (win || !class_only_fixed_regs[this_alternative[i]]))
3570 badop = 0;
3571
3572 /* Record which operands fit this alternative. */
3573 this_alternative_earlyclobber[i] = earlyclobber;
3574 if (win && ! force_reload)
3575 this_alternative_win[i] = 1;
3576 else if (did_match && ! force_reload)
3577 this_alternative_match_win[i] = 1;
3578 else
3579 {
3580 int const_to_mem = 0;
3581
3582 this_alternative_offmemok[i] = offmemok;
3583 losers++;
3584 if (badop)
3585 bad = 1;
3586 /* Alternative loses if it has no regs for a reg operand. */
3587 if (REG_P (operand)
3588 && this_alternative[i] == NO_REGS
3589 && this_alternative_matches[i] < 0)
3590 bad = 1;
3591
3592 /* If this is a constant that is reloaded into the desired
3593 class by copying it to memory first, count that as another
3594 reload. This is consistent with other code and is
3595 required to avoid choosing another alternative when
3596 the constant is moved into memory by this function on
3597 an early reload pass. Note that the test here is
3598 precisely the same as in the code below that calls
3599 force_const_mem. */
3600 if (CONST_POOL_OK_P (operand_mode[i], operand)
3601 && ((targetm.preferred_reload_class (operand,
3602 this_alternative[i])
3603 == NO_REGS)
3604 || no_input_reloads))
3605 {
3606 const_to_mem = 1;
3607 if (this_alternative[i] != NO_REGS)
3608 losers++;
3609 }
3610
3611 /* Alternative loses if it requires a type of reload not
3612 permitted for this insn. We can always reload SCRATCH
3613 and objects with a REG_UNUSED note. */
3614 if (GET_CODE (operand) != SCRATCH
3615 && modified[i] != RELOAD_READ && no_output_reloads
3616 && ! find_reg_note (insn, REG_UNUSED, operand))
3617 bad = 1;
3618 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3619 && ! const_to_mem)
3620 bad = 1;
3621
3622 /* If we can't reload this value at all, reject this
3623 alternative. Note that we could also lose due to
3624 LIMIT_RELOAD_CLASS, but we don't check that
3625 here. */
3626
3627 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3628 {
3629 if (targetm.preferred_reload_class (operand,
3630 this_alternative[i])
3631 == NO_REGS)
3632 reject = 600;
3633
3634 if (operand_type[i] == RELOAD_FOR_OUTPUT
3635 && (targetm.preferred_output_reload_class (operand,
3636 this_alternative[i])
3637 == NO_REGS))
3638 reject = 600;
3639 }
3640
3641 /* We prefer to reload pseudos over reloading other things,
3642 since such reloads may be able to be eliminated later.
3643 If we are reloading a SCRATCH, we won't be generating any
3644 insns, just using a register, so it is also preferred.
3645 So bump REJECT in other cases. Don't do this in the
3646 case where we are forcing a constant into memory and
3647 it will then win since we don't want to have a different
3648 alternative match then. */
3649 if (! (REG_P (operand)
3650 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3651 && GET_CODE (operand) != SCRATCH
3652 && ! (const_to_mem && constmemok))
3653 reject += 2;
3654
3655 /* Input reloads can be inherited more often than output
3656 reloads can be removed, so penalize output reloads. */
3657 if (operand_type[i] != RELOAD_FOR_INPUT
3658 && GET_CODE (operand) != SCRATCH)
3659 reject++;
3660 }
3661
3662 /* If this operand is a pseudo register that didn't get
3663 a hard reg and this alternative accepts some
3664 register, see if the class that we want is a subset
3665 of the preferred class for this register. If not,
3666 but it intersects that class, use the preferred class
3667 instead. If it does not intersect the preferred
3668 class, show that usage of this alternative should be
3669 discouraged; it will be discouraged more still if the
3670 register is `preferred or nothing'. We do this
3671 because it increases the chance of reusing our spill
3672 register in a later insn and avoiding a pair of
3673 memory stores and loads.
3674
3675 Don't bother with this if this alternative will
3676 accept this operand.
3677
3678 Don't do this for a multiword operand, since it is
3679 only a small win and has the risk of requiring more
3680 spill registers, which could cause a large loss.
3681
3682 Don't do this if the preferred class has only one
3683 register because we might otherwise exhaust the
3684 class. */
3685
3686 if (! win && ! did_match
3687 && this_alternative[i] != NO_REGS
3688 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3689 && reg_class_size [(int) preferred_class[i]] > 0
3690 && ! small_register_class_p (preferred_class[i]))
3691 {
3692 if (! reg_class_subset_p (this_alternative[i],
3693 preferred_class[i]))
3694 {
3695 /* Since we don't have a way of forming the intersection,
3696 we just do something special if the preferred class
3697 is a subset of the class we have; that's the most
3698 common case anyway. */
3699 if (reg_class_subset_p (preferred_class[i],
3700 this_alternative[i]))
3701 this_alternative[i] = preferred_class[i];
3702 else
3703 reject += (2 + 2 * pref_or_nothing[i]);
3704 }
3705 }
3706 }
3707
3708 /* Now see if any output operands that are marked "earlyclobber"
3709 in this alternative conflict with any input operands
3710 or any memory addresses. */
3711
3712 for (i = 0; i < noperands; i++)
3713 if (this_alternative_earlyclobber[i]
3714 && (this_alternative_win[i] || this_alternative_match_win[i]))
3715 {
3716 struct decomposition early_data;
3717
3718 early_data = decompose (recog_data.operand[i]);
3719
3720 gcc_assert (modified[i] != RELOAD_READ);
3721
3722 if (this_alternative[i] == NO_REGS)
3723 {
3724 this_alternative_earlyclobber[i] = 0;
3725 gcc_assert (this_insn_is_asm);
3726 error_for_asm (this_insn,
3727 "%<&%> constraint used with no register class");
3728 }
3729
3730 for (j = 0; j < noperands; j++)
3731 /* Is this an input operand or a memory ref? */
3732 if ((MEM_P (recog_data.operand[j])
3733 || modified[j] != RELOAD_WRITE)
3734 && j != i
3735 /* Ignore things like match_operator operands. */
3736 && !recog_data.is_operator[j]
3737 /* Don't count an input operand that is constrained to match
3738 the early clobber operand. */
3739 && ! (this_alternative_matches[j] == i
3740 && rtx_equal_p (recog_data.operand[i],
3741 recog_data.operand[j]))
3742 /* Is it altered by storing the earlyclobber operand? */
3743 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3744 early_data))
3745 {
3746 /* If the output is in a non-empty few-regs class,
3747 it's costly to reload it, so reload the input instead. */
3748 if (small_register_class_p (this_alternative[i])
3749 && (REG_P (recog_data.operand[j])
3750 || GET_CODE (recog_data.operand[j]) == SUBREG))
3751 {
3752 losers++;
3753 this_alternative_win[j] = 0;
3754 this_alternative_match_win[j] = 0;
3755 }
3756 else
3757 break;
3758 }
3759 /* If an earlyclobber operand conflicts with something,
3760 it must be reloaded, so request this and count the cost. */
3761 if (j != noperands)
3762 {
3763 losers++;
3764 this_alternative_win[i] = 0;
3765 this_alternative_match_win[j] = 0;
3766 for (j = 0; j < noperands; j++)
3767 if (this_alternative_matches[j] == i
3768 && this_alternative_match_win[j])
3769 {
3770 this_alternative_win[j] = 0;
3771 this_alternative_match_win[j] = 0;
3772 losers++;
3773 }
3774 }
3775 }
3776
3777 /* If one alternative accepts all the operands, no reload required,
3778 choose that alternative; don't consider the remaining ones. */
3779 if (losers == 0)
3780 {
3781 /* Unswap these so that they are never swapped at `finish'. */
3782 if (swapped)
3783 {
3784 recog_data.operand[commutative] = substed_operand[commutative];
3785 recog_data.operand[commutative + 1]
3786 = substed_operand[commutative + 1];
3787 }
3788 for (i = 0; i < noperands; i++)
3789 {
3790 goal_alternative_win[i] = this_alternative_win[i];
3791 goal_alternative_match_win[i] = this_alternative_match_win[i];
3792 goal_alternative[i] = this_alternative[i];
3793 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3794 goal_alternative_matches[i] = this_alternative_matches[i];
3795 goal_alternative_earlyclobber[i]
3796 = this_alternative_earlyclobber[i];
3797 }
3798 goal_alternative_number = this_alternative_number;
3799 goal_alternative_swapped = swapped;
3800 goal_earlyclobber = this_earlyclobber;
3801 goto finish;
3802 }
3803
3804 /* REJECT, set by the ! and ? constraint characters and when a register
3805 would be reloaded into a non-preferred class, discourages the use of
3806 this alternative for a reload goal. REJECT is incremented by six
3807 for each ? and two for each non-preferred class. */
3808 losers = losers * 6 + reject;
3809
3810 /* If this alternative can be made to work by reloading,
3811 and it needs less reloading than the others checked so far,
3812 record it as the chosen goal for reloading. */
3813 if (! bad)
3814 {
3815 if (best > losers)
3816 {
3817 for (i = 0; i < noperands; i++)
3818 {
3819 goal_alternative[i] = this_alternative[i];
3820 goal_alternative_win[i] = this_alternative_win[i];
3821 goal_alternative_match_win[i]
3822 = this_alternative_match_win[i];
3823 goal_alternative_offmemok[i]
3824 = this_alternative_offmemok[i];
3825 goal_alternative_matches[i] = this_alternative_matches[i];
3826 goal_alternative_earlyclobber[i]
3827 = this_alternative_earlyclobber[i];
3828 }
3829 goal_alternative_swapped = swapped;
3830 best = losers;
3831 goal_alternative_number = this_alternative_number;
3832 goal_earlyclobber = this_earlyclobber;
3833 }
3834 }
3835
3836 if (swapped)
3837 {
3838 enum reg_class tclass;
3839 int t;
3840
3841 /* If the commutative operands have been swapped, swap
3842 them back in order to check the next alternative. */
3843 recog_data.operand[commutative] = substed_operand[commutative];
3844 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3845 /* Unswap the duplicates too. */
3846 for (i = 0; i < recog_data.n_dups; i++)
3847 if (recog_data.dup_num[i] == commutative
3848 || recog_data.dup_num[i] == commutative + 1)
3849 *recog_data.dup_loc[i]
3850 = recog_data.operand[(int) recog_data.dup_num[i]];
3851
3852 /* Unswap the operand related information as well. */
3853 tclass = preferred_class[commutative];
3854 preferred_class[commutative] = preferred_class[commutative + 1];
3855 preferred_class[commutative + 1] = tclass;
3856
3857 t = pref_or_nothing[commutative];
3858 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3859 pref_or_nothing[commutative + 1] = t;
3860
3861 t = address_reloaded[commutative];
3862 address_reloaded[commutative] = address_reloaded[commutative + 1];
3863 address_reloaded[commutative + 1] = t;
3864 }
3865 }
3866 }
3867
3868 /* The operands don't meet the constraints.
3869 goal_alternative describes the alternative
3870 that we could reach by reloading the fewest operands.
3871 Reload so as to fit it. */
3872
3873 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3874 {
3875 /* No alternative works with reloads?? */
3876 if (insn_code_number >= 0)
3877 fatal_insn ("unable to generate reloads for:", insn);
3878 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3879 /* Avoid further trouble with this insn. */
3880 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3881 n_reloads = 0;
3882 return 0;
3883 }
3884
3885 /* Jump to `finish' from above if all operands are valid already.
3886 In that case, goal_alternative_win is all 1. */
3887 finish:
3888
3889 /* Right now, for any pair of operands I and J that are required to match,
3890 with I < J,
3891 goal_alternative_matches[J] is I.
3892 Set up goal_alternative_matched as the inverse function:
3893 goal_alternative_matched[I] = J. */
3894
3895 for (i = 0; i < noperands; i++)
3896 goal_alternative_matched[i] = -1;
3897
3898 for (i = 0; i < noperands; i++)
3899 if (! goal_alternative_win[i]
3900 && goal_alternative_matches[i] >= 0)
3901 goal_alternative_matched[goal_alternative_matches[i]] = i;
3902
3903 for (i = 0; i < noperands; i++)
3904 goal_alternative_win[i] |= goal_alternative_match_win[i];
3905
3906 /* If the best alternative is with operands 1 and 2 swapped,
3907 consider them swapped before reporting the reloads. Update the
3908 operand numbers of any reloads already pushed. */
3909
3910 if (goal_alternative_swapped)
3911 {
3912 rtx tem;
3913
3914 tem = substed_operand[commutative];
3915 substed_operand[commutative] = substed_operand[commutative + 1];
3916 substed_operand[commutative + 1] = tem;
3917 tem = recog_data.operand[commutative];
3918 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3919 recog_data.operand[commutative + 1] = tem;
3920 tem = *recog_data.operand_loc[commutative];
3921 *recog_data.operand_loc[commutative]
3922 = *recog_data.operand_loc[commutative + 1];
3923 *recog_data.operand_loc[commutative + 1] = tem;
3924
3925 for (i = 0; i < n_reloads; i++)
3926 {
3927 if (rld[i].opnum == commutative)
3928 rld[i].opnum = commutative + 1;
3929 else if (rld[i].opnum == commutative + 1)
3930 rld[i].opnum = commutative;
3931 }
3932 }
3933
3934 for (i = 0; i < noperands; i++)
3935 {
3936 operand_reloadnum[i] = -1;
3937
3938 /* If this is an earlyclobber operand, we need to widen the scope.
3939 The reload must remain valid from the start of the insn being
3940 reloaded until after the operand is stored into its destination.
3941 We approximate this with RELOAD_OTHER even though we know that we
3942 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3943
3944 One special case that is worth checking is when we have an
3945 output that is earlyclobber but isn't used past the insn (typically
3946 a SCRATCH). In this case, we only need have the reload live
3947 through the insn itself, but not for any of our input or output
3948 reloads.
3949 But we must not accidentally narrow the scope of an existing
3950 RELOAD_OTHER reload - leave these alone.
3951
3952 In any case, anything needed to address this operand can remain
3953 however they were previously categorized. */
3954
3955 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3956 operand_type[i]
3957 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3958 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3959 }
3960
3961 /* Any constants that aren't allowed and can't be reloaded
3962 into registers are here changed into memory references. */
3963 for (i = 0; i < noperands; i++)
3964 if (! goal_alternative_win[i])
3965 {
3966 rtx op = recog_data.operand[i];
3967 rtx subreg = NULL_RTX;
3968 rtx plus = NULL_RTX;
3969 enum machine_mode mode = operand_mode[i];
3970
3971 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3972 push_reload so we have to let them pass here. */
3973 if (GET_CODE (op) == SUBREG)
3974 {
3975 subreg = op;
3976 op = SUBREG_REG (op);
3977 mode = GET_MODE (op);
3978 }
3979
3980 if (GET_CODE (op) == PLUS)
3981 {
3982 plus = op;
3983 op = XEXP (op, 1);
3984 }
3985
3986 if (CONST_POOL_OK_P (mode, op)
3987 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3988 == NO_REGS)
3989 || no_input_reloads))
3990 {
3991 int this_address_reloaded;
3992 rtx tem = force_const_mem (mode, op);
3993
3994 /* If we stripped a SUBREG or a PLUS above add it back. */
3995 if (plus != NULL_RTX)
3996 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3997
3998 if (subreg != NULL_RTX)
3999 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
4000
4001 this_address_reloaded = 0;
4002 substed_operand[i] = recog_data.operand[i]
4003 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
4004 0, insn, &this_address_reloaded);
4005
4006 /* If the alternative accepts constant pool refs directly
4007 there will be no reload needed at all. */
4008 if (plus == NULL_RTX
4009 && subreg == NULL_RTX
4010 && alternative_allows_const_pool_ref (this_address_reloaded == 0
4011 ? substed_operand[i]
4012 : NULL,
4013 recog_data.constraints[i],
4014 goal_alternative_number))
4015 goal_alternative_win[i] = 1;
4016 }
4017 }
4018
4019 /* Record the values of the earlyclobber operands for the caller. */
4020 if (goal_earlyclobber)
4021 for (i = 0; i < noperands; i++)
4022 if (goal_alternative_earlyclobber[i])
4023 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
4024
4025 /* Now record reloads for all the operands that need them. */
4026 for (i = 0; i < noperands; i++)
4027 if (! goal_alternative_win[i])
4028 {
4029 /* Operands that match previous ones have already been handled. */
4030 if (goal_alternative_matches[i] >= 0)
4031 ;
4032 /* Handle an operand with a nonoffsettable address
4033 appearing where an offsettable address will do
4034 by reloading the address into a base register.
4035
4036 ??? We can also do this when the operand is a register and
4037 reg_equiv_mem is not offsettable, but this is a bit tricky,
4038 so we don't bother with it. It may not be worth doing. */
4039 else if (goal_alternative_matched[i] == -1
4040 && goal_alternative_offmemok[i]
4041 && MEM_P (recog_data.operand[i]))
4042 {
4043 /* If the address to be reloaded is a VOIDmode constant,
4044 use the default address mode as mode of the reload register,
4045 as would have been done by find_reloads_address. */
4046 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4047 enum machine_mode address_mode;
4048
4049 address_mode = get_address_mode (recog_data.operand[i]);
4050 operand_reloadnum[i]
4051 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4052 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4053 base_reg_class (VOIDmode, as, MEM, SCRATCH),
4054 address_mode,
4055 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4056 rld[operand_reloadnum[i]].inc
4057 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4058
4059 /* If this operand is an output, we will have made any
4060 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4061 now we are treating part of the operand as an input, so
4062 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4063
4064 if (modified[i] == RELOAD_WRITE)
4065 {
4066 for (j = 0; j < n_reloads; j++)
4067 {
4068 if (rld[j].opnum == i)
4069 {
4070 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4071 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4072 else if (rld[j].when_needed
4073 == RELOAD_FOR_OUTADDR_ADDRESS)
4074 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4075 }
4076 }
4077 }
4078 }
4079 else if (goal_alternative_matched[i] == -1)
4080 {
4081 operand_reloadnum[i]
4082 = push_reload ((modified[i] != RELOAD_WRITE
4083 ? recog_data.operand[i] : 0),
4084 (modified[i] != RELOAD_READ
4085 ? recog_data.operand[i] : 0),
4086 (modified[i] != RELOAD_WRITE
4087 ? recog_data.operand_loc[i] : 0),
4088 (modified[i] != RELOAD_READ
4089 ? recog_data.operand_loc[i] : 0),
4090 (enum reg_class) goal_alternative[i],
4091 (modified[i] == RELOAD_WRITE
4092 ? VOIDmode : operand_mode[i]),
4093 (modified[i] == RELOAD_READ
4094 ? VOIDmode : operand_mode[i]),
4095 (insn_code_number < 0 ? 0
4096 : insn_data[insn_code_number].operand[i].strict_low),
4097 0, i, operand_type[i]);
4098 }
4099 /* In a matching pair of operands, one must be input only
4100 and the other must be output only.
4101 Pass the input operand as IN and the other as OUT. */
4102 else if (modified[i] == RELOAD_READ
4103 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4104 {
4105 operand_reloadnum[i]
4106 = push_reload (recog_data.operand[i],
4107 recog_data.operand[goal_alternative_matched[i]],
4108 recog_data.operand_loc[i],
4109 recog_data.operand_loc[goal_alternative_matched[i]],
4110 (enum reg_class) goal_alternative[i],
4111 operand_mode[i],
4112 operand_mode[goal_alternative_matched[i]],
4113 0, 0, i, RELOAD_OTHER);
4114 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4115 }
4116 else if (modified[i] == RELOAD_WRITE
4117 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4118 {
4119 operand_reloadnum[goal_alternative_matched[i]]
4120 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4121 recog_data.operand[i],
4122 recog_data.operand_loc[goal_alternative_matched[i]],
4123 recog_data.operand_loc[i],
4124 (enum reg_class) goal_alternative[i],
4125 operand_mode[goal_alternative_matched[i]],
4126 operand_mode[i],
4127 0, 0, i, RELOAD_OTHER);
4128 operand_reloadnum[i] = output_reloadnum;
4129 }
4130 else
4131 {
4132 gcc_assert (insn_code_number < 0);
4133 error_for_asm (insn, "inconsistent operand constraints "
4134 "in an %<asm%>");
4135 /* Avoid further trouble with this insn. */
4136 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4137 n_reloads = 0;
4138 return 0;
4139 }
4140 }
4141 else if (goal_alternative_matched[i] < 0
4142 && goal_alternative_matches[i] < 0
4143 && address_operand_reloaded[i] != 1
4144 && optimize)
4145 {
4146 /* For each non-matching operand that's a MEM or a pseudo-register
4147 that didn't get a hard register, make an optional reload.
4148 This may get done even if the insn needs no reloads otherwise. */
4149
4150 rtx operand = recog_data.operand[i];
4151
4152 while (GET_CODE (operand) == SUBREG)
4153 operand = SUBREG_REG (operand);
4154 if ((MEM_P (operand)
4155 || (REG_P (operand)
4156 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4157 /* If this is only for an output, the optional reload would not
4158 actually cause us to use a register now, just note that
4159 something is stored here. */
4160 && (goal_alternative[i] != NO_REGS
4161 || modified[i] == RELOAD_WRITE)
4162 && ! no_input_reloads
4163 /* An optional output reload might allow to delete INSN later.
4164 We mustn't make in-out reloads on insns that are not permitted
4165 output reloads.
4166 If this is an asm, we can't delete it; we must not even call
4167 push_reload for an optional output reload in this case,
4168 because we can't be sure that the constraint allows a register,
4169 and push_reload verifies the constraints for asms. */
4170 && (modified[i] == RELOAD_READ
4171 || (! no_output_reloads && ! this_insn_is_asm)))
4172 operand_reloadnum[i]
4173 = push_reload ((modified[i] != RELOAD_WRITE
4174 ? recog_data.operand[i] : 0),
4175 (modified[i] != RELOAD_READ
4176 ? recog_data.operand[i] : 0),
4177 (modified[i] != RELOAD_WRITE
4178 ? recog_data.operand_loc[i] : 0),
4179 (modified[i] != RELOAD_READ
4180 ? recog_data.operand_loc[i] : 0),
4181 (enum reg_class) goal_alternative[i],
4182 (modified[i] == RELOAD_WRITE
4183 ? VOIDmode : operand_mode[i]),
4184 (modified[i] == RELOAD_READ
4185 ? VOIDmode : operand_mode[i]),
4186 (insn_code_number < 0 ? 0
4187 : insn_data[insn_code_number].operand[i].strict_low),
4188 1, i, operand_type[i]);
4189 /* If a memory reference remains (either as a MEM or a pseudo that
4190 did not get a hard register), yet we can't make an optional
4191 reload, check if this is actually a pseudo register reference;
4192 we then need to emit a USE and/or a CLOBBER so that reload
4193 inheritance will do the right thing. */
4194 else if (replace
4195 && (MEM_P (operand)
4196 || (REG_P (operand)
4197 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4198 && reg_renumber [REGNO (operand)] < 0)))
4199 {
4200 operand = *recog_data.operand_loc[i];
4201
4202 while (GET_CODE (operand) == SUBREG)
4203 operand = SUBREG_REG (operand);
4204 if (REG_P (operand))
4205 {
4206 if (modified[i] != RELOAD_WRITE)
4207 /* We mark the USE with QImode so that we recognize
4208 it as one that can be safely deleted at the end
4209 of reload. */
4210 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4211 insn), QImode);
4212 if (modified[i] != RELOAD_READ)
4213 emit_insn_after (gen_clobber (operand), insn);
4214 }
4215 }
4216 }
4217 else if (goal_alternative_matches[i] >= 0
4218 && goal_alternative_win[goal_alternative_matches[i]]
4219 && modified[i] == RELOAD_READ
4220 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4221 && ! no_input_reloads && ! no_output_reloads
4222 && optimize)
4223 {
4224 /* Similarly, make an optional reload for a pair of matching
4225 objects that are in MEM or a pseudo that didn't get a hard reg. */
4226
4227 rtx operand = recog_data.operand[i];
4228
4229 while (GET_CODE (operand) == SUBREG)
4230 operand = SUBREG_REG (operand);
4231 if ((MEM_P (operand)
4232 || (REG_P (operand)
4233 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4234 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4235 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4236 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4237 recog_data.operand[i],
4238 recog_data.operand_loc[goal_alternative_matches[i]],
4239 recog_data.operand_loc[i],
4240 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4241 operand_mode[goal_alternative_matches[i]],
4242 operand_mode[i],
4243 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4244 }
4245
4246 /* Perform whatever substitutions on the operands we are supposed
4247 to make due to commutativity or replacement of registers
4248 with equivalent constants or memory slots. */
4249
4250 for (i = 0; i < noperands; i++)
4251 {
4252 /* We only do this on the last pass through reload, because it is
4253 possible for some data (like reg_equiv_address) to be changed during
4254 later passes. Moreover, we lose the opportunity to get a useful
4255 reload_{in,out}_reg when we do these replacements. */
4256
4257 if (replace)
4258 {
4259 rtx substitution = substed_operand[i];
4260
4261 *recog_data.operand_loc[i] = substitution;
4262
4263 /* If we're replacing an operand with a LABEL_REF, we need to
4264 make sure that there's a REG_LABEL_OPERAND note attached to
4265 this instruction. */
4266 if (GET_CODE (substitution) == LABEL_REF
4267 && !find_reg_note (insn, REG_LABEL_OPERAND,
4268 XEXP (substitution, 0))
4269 /* For a JUMP_P, if it was a branch target it must have
4270 already been recorded as such. */
4271 && (!JUMP_P (insn)
4272 || !label_is_jump_target_p (XEXP (substitution, 0),
4273 insn)))
4274 {
4275 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4276 if (LABEL_P (XEXP (substitution, 0)))
4277 ++LABEL_NUSES (XEXP (substitution, 0));
4278 }
4279
4280 }
4281 else
4282 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4283 }
4284
4285 /* If this insn pattern contains any MATCH_DUP's, make sure that
4286 they will be substituted if the operands they match are substituted.
4287 Also do now any substitutions we already did on the operands.
4288
4289 Don't do this if we aren't making replacements because we might be
4290 propagating things allocated by frame pointer elimination into places
4291 it doesn't expect. */
4292
4293 if (insn_code_number >= 0 && replace)
4294 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4295 {
4296 int opno = recog_data.dup_num[i];
4297 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4298 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4299 }
4300
4301 #if 0
4302 /* This loses because reloading of prior insns can invalidate the equivalence
4303 (or at least find_equiv_reg isn't smart enough to find it any more),
4304 causing this insn to need more reload regs than it needed before.
4305 It may be too late to make the reload regs available.
4306 Now this optimization is done safely in choose_reload_regs. */
4307
4308 /* For each reload of a reg into some other class of reg,
4309 search for an existing equivalent reg (same value now) in the right class.
4310 We can use it as long as we don't need to change its contents. */
4311 for (i = 0; i < n_reloads; i++)
4312 if (rld[i].reg_rtx == 0
4313 && rld[i].in != 0
4314 && REG_P (rld[i].in)
4315 && rld[i].out == 0)
4316 {
4317 rld[i].reg_rtx
4318 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4319 static_reload_reg_p, 0, rld[i].inmode);
4320 /* Prevent generation of insn to load the value
4321 because the one we found already has the value. */
4322 if (rld[i].reg_rtx)
4323 rld[i].in = rld[i].reg_rtx;
4324 }
4325 #endif
4326
4327 /* If we detected error and replaced asm instruction by USE, forget about the
4328 reloads. */
4329 if (GET_CODE (PATTERN (insn)) == USE
4330 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4331 n_reloads = 0;
4332
4333 /* Perhaps an output reload can be combined with another
4334 to reduce needs by one. */
4335 if (!goal_earlyclobber)
4336 combine_reloads ();
4337
4338 /* If we have a pair of reloads for parts of an address, they are reloading
4339 the same object, the operands themselves were not reloaded, and they
4340 are for two operands that are supposed to match, merge the reloads and
4341 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4342
4343 for (i = 0; i < n_reloads; i++)
4344 {
4345 int k;
4346
4347 for (j = i + 1; j < n_reloads; j++)
4348 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4349 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4350 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4351 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4352 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4353 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4354 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4355 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4356 && rtx_equal_p (rld[i].in, rld[j].in)
4357 && (operand_reloadnum[rld[i].opnum] < 0
4358 || rld[operand_reloadnum[rld[i].opnum]].optional)
4359 && (operand_reloadnum[rld[j].opnum] < 0
4360 || rld[operand_reloadnum[rld[j].opnum]].optional)
4361 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4362 || (goal_alternative_matches[rld[j].opnum]
4363 == rld[i].opnum)))
4364 {
4365 for (k = 0; k < n_replacements; k++)
4366 if (replacements[k].what == j)
4367 replacements[k].what = i;
4368
4369 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4370 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4371 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4372 else
4373 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4374 rld[j].in = 0;
4375 }
4376 }
4377
4378 /* Scan all the reloads and update their type.
4379 If a reload is for the address of an operand and we didn't reload
4380 that operand, change the type. Similarly, change the operand number
4381 of a reload when two operands match. If a reload is optional, treat it
4382 as though the operand isn't reloaded.
4383
4384 ??? This latter case is somewhat odd because if we do the optional
4385 reload, it means the object is hanging around. Thus we need only
4386 do the address reload if the optional reload was NOT done.
4387
4388 Change secondary reloads to be the address type of their operand, not
4389 the normal type.
4390
4391 If an operand's reload is now RELOAD_OTHER, change any
4392 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4393 RELOAD_FOR_OTHER_ADDRESS. */
4394
4395 for (i = 0; i < n_reloads; i++)
4396 {
4397 if (rld[i].secondary_p
4398 && rld[i].when_needed == operand_type[rld[i].opnum])
4399 rld[i].when_needed = address_type[rld[i].opnum];
4400
4401 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4402 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4403 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4404 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4405 && (operand_reloadnum[rld[i].opnum] < 0
4406 || rld[operand_reloadnum[rld[i].opnum]].optional))
4407 {
4408 /* If we have a secondary reload to go along with this reload,
4409 change its type to RELOAD_FOR_OPADDR_ADDR. */
4410
4411 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4412 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4413 && rld[i].secondary_in_reload != -1)
4414 {
4415 int secondary_in_reload = rld[i].secondary_in_reload;
4416
4417 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4418
4419 /* If there's a tertiary reload we have to change it also. */
4420 if (secondary_in_reload > 0
4421 && rld[secondary_in_reload].secondary_in_reload != -1)
4422 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4423 = RELOAD_FOR_OPADDR_ADDR;
4424 }
4425
4426 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4427 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4428 && rld[i].secondary_out_reload != -1)
4429 {
4430 int secondary_out_reload = rld[i].secondary_out_reload;
4431
4432 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4433
4434 /* If there's a tertiary reload we have to change it also. */
4435 if (secondary_out_reload
4436 && rld[secondary_out_reload].secondary_out_reload != -1)
4437 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4438 = RELOAD_FOR_OPADDR_ADDR;
4439 }
4440
4441 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4442 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4443 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4444 else
4445 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4446 }
4447
4448 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4449 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4450 && operand_reloadnum[rld[i].opnum] >= 0
4451 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4452 == RELOAD_OTHER))
4453 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4454
4455 if (goal_alternative_matches[rld[i].opnum] >= 0)
4456 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4457 }
4458
4459 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4460 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4461 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4462
4463 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4464 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4465 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4466 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4467 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4468 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4469 This is complicated by the fact that a single operand can have more
4470 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4471 choose_reload_regs without affecting code quality, and cases that
4472 actually fail are extremely rare, so it turns out to be better to fix
4473 the problem here by not generating cases that choose_reload_regs will
4474 fail for. */
4475 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4476 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4477 a single operand.
4478 We can reduce the register pressure by exploiting that a
4479 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4480 does not conflict with any of them, if it is only used for the first of
4481 the RELOAD_FOR_X_ADDRESS reloads. */
4482 {
4483 int first_op_addr_num = -2;
4484 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4485 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4486 int need_change = 0;
4487 /* We use last_op_addr_reload and the contents of the above arrays
4488 first as flags - -2 means no instance encountered, -1 means exactly
4489 one instance encountered.
4490 If more than one instance has been encountered, we store the reload
4491 number of the first reload of the kind in question; reload numbers
4492 are known to be non-negative. */
4493 for (i = 0; i < noperands; i++)
4494 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4495 for (i = n_reloads - 1; i >= 0; i--)
4496 {
4497 switch (rld[i].when_needed)
4498 {
4499 case RELOAD_FOR_OPERAND_ADDRESS:
4500 if (++first_op_addr_num >= 0)
4501 {
4502 first_op_addr_num = i;
4503 need_change = 1;
4504 }
4505 break;
4506 case RELOAD_FOR_INPUT_ADDRESS:
4507 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4508 {
4509 first_inpaddr_num[rld[i].opnum] = i;
4510 need_change = 1;
4511 }
4512 break;
4513 case RELOAD_FOR_OUTPUT_ADDRESS:
4514 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4515 {
4516 first_outpaddr_num[rld[i].opnum] = i;
4517 need_change = 1;
4518 }
4519 break;
4520 default:
4521 break;
4522 }
4523 }
4524
4525 if (need_change)
4526 {
4527 for (i = 0; i < n_reloads; i++)
4528 {
4529 int first_num;
4530 enum reload_type type;
4531
4532 switch (rld[i].when_needed)
4533 {
4534 case RELOAD_FOR_OPADDR_ADDR:
4535 first_num = first_op_addr_num;
4536 type = RELOAD_FOR_OPERAND_ADDRESS;
4537 break;
4538 case RELOAD_FOR_INPADDR_ADDRESS:
4539 first_num = first_inpaddr_num[rld[i].opnum];
4540 type = RELOAD_FOR_INPUT_ADDRESS;
4541 break;
4542 case RELOAD_FOR_OUTADDR_ADDRESS:
4543 first_num = first_outpaddr_num[rld[i].opnum];
4544 type = RELOAD_FOR_OUTPUT_ADDRESS;
4545 break;
4546 default:
4547 continue;
4548 }
4549 if (first_num < 0)
4550 continue;
4551 else if (i > first_num)
4552 rld[i].when_needed = type;
4553 else
4554 {
4555 /* Check if the only TYPE reload that uses reload I is
4556 reload FIRST_NUM. */
4557 for (j = n_reloads - 1; j > first_num; j--)
4558 {
4559 if (rld[j].when_needed == type
4560 && (rld[i].secondary_p
4561 ? rld[j].secondary_in_reload == i
4562 : reg_mentioned_p (rld[i].in, rld[j].in)))
4563 {
4564 rld[i].when_needed = type;
4565 break;
4566 }
4567 }
4568 }
4569 }
4570 }
4571 }
4572
4573 /* See if we have any reloads that are now allowed to be merged
4574 because we've changed when the reload is needed to
4575 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4576 check for the most common cases. */
4577
4578 for (i = 0; i < n_reloads; i++)
4579 if (rld[i].in != 0 && rld[i].out == 0
4580 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4581 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4582 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4583 for (j = 0; j < n_reloads; j++)
4584 if (i != j && rld[j].in != 0 && rld[j].out == 0
4585 && rld[j].when_needed == rld[i].when_needed
4586 && MATCHES (rld[i].in, rld[j].in)
4587 && rld[i].rclass == rld[j].rclass
4588 && !rld[i].nocombine && !rld[j].nocombine
4589 && rld[i].reg_rtx == rld[j].reg_rtx)
4590 {
4591 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4592 transfer_replacements (i, j);
4593 rld[j].in = 0;
4594 }
4595
4596 #ifdef HAVE_cc0
4597 /* If we made any reloads for addresses, see if they violate a
4598 "no input reloads" requirement for this insn. But loads that we
4599 do after the insn (such as for output addresses) are fine. */
4600 if (no_input_reloads)
4601 for (i = 0; i < n_reloads; i++)
4602 gcc_assert (rld[i].in == 0
4603 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4604 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4605 #endif
4606
4607 /* Compute reload_mode and reload_nregs. */
4608 for (i = 0; i < n_reloads; i++)
4609 {
4610 rld[i].mode
4611 = (rld[i].inmode == VOIDmode
4612 || (GET_MODE_SIZE (rld[i].outmode)
4613 > GET_MODE_SIZE (rld[i].inmode)))
4614 ? rld[i].outmode : rld[i].inmode;
4615
4616 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4617 }
4618
4619 /* Special case a simple move with an input reload and a
4620 destination of a hard reg, if the hard reg is ok, use it. */
4621 for (i = 0; i < n_reloads; i++)
4622 if (rld[i].when_needed == RELOAD_FOR_INPUT
4623 && GET_CODE (PATTERN (insn)) == SET
4624 && REG_P (SET_DEST (PATTERN (insn)))
4625 && (SET_SRC (PATTERN (insn)) == rld[i].in
4626 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4627 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4628 {
4629 rtx dest = SET_DEST (PATTERN (insn));
4630 unsigned int regno = REGNO (dest);
4631
4632 if (regno < FIRST_PSEUDO_REGISTER
4633 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4634 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4635 {
4636 int nr = hard_regno_nregs[regno][rld[i].mode];
4637 int ok = 1, nri;
4638
4639 for (nri = 1; nri < nr; nri ++)
4640 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4641 ok = 0;
4642
4643 if (ok)
4644 rld[i].reg_rtx = dest;
4645 }
4646 }
4647
4648 return retval;
4649 }
4650
4651 /* Return true if alternative number ALTNUM in constraint-string
4652 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4653 MEM gives the reference if it didn't need any reloads, otherwise it
4654 is null. */
4655
4656 static bool
4657 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4658 const char *constraint, int altnum)
4659 {
4660 int c;
4661
4662 /* Skip alternatives before the one requested. */
4663 while (altnum > 0)
4664 {
4665 while (*constraint++ != ',')
4666 ;
4667 altnum--;
4668 }
4669 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4670 If one of them is present, this alternative accepts the result of
4671 passing a constant-pool reference through find_reloads_toplev.
4672
4673 The same is true of extra memory constraints if the address
4674 was reloaded into a register. However, the target may elect
4675 to disallow the original constant address, forcing it to be
4676 reloaded into a register instead. */
4677 for (; (c = *constraint) && c != ',' && c != '#';
4678 constraint += CONSTRAINT_LEN (c, constraint))
4679 {
4680 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4681 return true;
4682 #ifdef EXTRA_CONSTRAINT_STR
4683 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4684 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4685 return true;
4686 #endif
4687 }
4688 return false;
4689 }
4690 \f
4691 /* Scan X for memory references and scan the addresses for reloading.
4692 Also checks for references to "constant" regs that we want to eliminate
4693 and replaces them with the values they stand for.
4694 We may alter X destructively if it contains a reference to such.
4695 If X is just a constant reg, we return the equivalent value
4696 instead of X.
4697
4698 IND_LEVELS says how many levels of indirect addressing this machine
4699 supports.
4700
4701 OPNUM and TYPE identify the purpose of the reload.
4702
4703 IS_SET_DEST is true if X is the destination of a SET, which is not
4704 appropriate to be replaced by a constant.
4705
4706 INSN, if nonzero, is the insn in which we do the reload. It is used
4707 to determine if we may generate output reloads, and where to put USEs
4708 for pseudos that we have to replace with stack slots.
4709
4710 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4711 result of find_reloads_address. */
4712
4713 static rtx
4714 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4715 int ind_levels, int is_set_dest, rtx insn,
4716 int *address_reloaded)
4717 {
4718 RTX_CODE code = GET_CODE (x);
4719
4720 const char *fmt = GET_RTX_FORMAT (code);
4721 int i;
4722 int copied;
4723
4724 if (code == REG)
4725 {
4726 /* This code is duplicated for speed in find_reloads. */
4727 int regno = REGNO (x);
4728 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4729 x = reg_equiv_constant (regno);
4730 #if 0
4731 /* This creates (subreg (mem...)) which would cause an unnecessary
4732 reload of the mem. */
4733 else if (reg_equiv_mem (regno) != 0)
4734 x = reg_equiv_mem (regno);
4735 #endif
4736 else if (reg_equiv_memory_loc (regno)
4737 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4738 {
4739 rtx mem = make_memloc (x, regno);
4740 if (reg_equiv_address (regno)
4741 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4742 {
4743 /* If this is not a toplevel operand, find_reloads doesn't see
4744 this substitution. We have to emit a USE of the pseudo so
4745 that delete_output_reload can see it. */
4746 if (replace_reloads && recog_data.operand[opnum] != x)
4747 /* We mark the USE with QImode so that we recognize it
4748 as one that can be safely deleted at the end of
4749 reload. */
4750 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4751 QImode);
4752 x = mem;
4753 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4754 opnum, type, ind_levels, insn);
4755 if (!rtx_equal_p (x, mem))
4756 push_reg_equiv_alt_mem (regno, x);
4757 if (address_reloaded)
4758 *address_reloaded = i;
4759 }
4760 }
4761 return x;
4762 }
4763 if (code == MEM)
4764 {
4765 rtx tem = x;
4766
4767 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4768 opnum, type, ind_levels, insn);
4769 if (address_reloaded)
4770 *address_reloaded = i;
4771
4772 return tem;
4773 }
4774
4775 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4776 {
4777 /* Check for SUBREG containing a REG that's equivalent to a
4778 constant. If the constant has a known value, truncate it
4779 right now. Similarly if we are extracting a single-word of a
4780 multi-word constant. If the constant is symbolic, allow it
4781 to be substituted normally. push_reload will strip the
4782 subreg later. The constant must not be VOIDmode, because we
4783 will lose the mode of the register (this should never happen
4784 because one of the cases above should handle it). */
4785
4786 int regno = REGNO (SUBREG_REG (x));
4787 rtx tem;
4788
4789 if (regno >= FIRST_PSEUDO_REGISTER
4790 && reg_renumber[regno] < 0
4791 && reg_equiv_constant (regno) != 0)
4792 {
4793 tem =
4794 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4795 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4796 gcc_assert (tem);
4797 if (CONSTANT_P (tem)
4798 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4799 {
4800 tem = force_const_mem (GET_MODE (x), tem);
4801 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4802 &XEXP (tem, 0), opnum, type,
4803 ind_levels, insn);
4804 if (address_reloaded)
4805 *address_reloaded = i;
4806 }
4807 return tem;
4808 }
4809
4810 /* If the subreg contains a reg that will be converted to a mem,
4811 attempt to convert the whole subreg to a (narrower or wider)
4812 memory reference instead. If this succeeds, we're done --
4813 otherwise fall through to check whether the inner reg still
4814 needs address reloads anyway. */
4815
4816 if (regno >= FIRST_PSEUDO_REGISTER
4817 && reg_equiv_memory_loc (regno) != 0)
4818 {
4819 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4820 insn, address_reloaded);
4821 if (tem)
4822 return tem;
4823 }
4824 }
4825
4826 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4827 {
4828 if (fmt[i] == 'e')
4829 {
4830 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4831 ind_levels, is_set_dest, insn,
4832 address_reloaded);
4833 /* If we have replaced a reg with it's equivalent memory loc -
4834 that can still be handled here e.g. if it's in a paradoxical
4835 subreg - we must make the change in a copy, rather than using
4836 a destructive change. This way, find_reloads can still elect
4837 not to do the change. */
4838 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4839 {
4840 x = shallow_copy_rtx (x);
4841 copied = 1;
4842 }
4843 XEXP (x, i) = new_part;
4844 }
4845 }
4846 return x;
4847 }
4848
4849 /* Return a mem ref for the memory equivalent of reg REGNO.
4850 This mem ref is not shared with anything. */
4851
4852 static rtx
4853 make_memloc (rtx ad, int regno)
4854 {
4855 /* We must rerun eliminate_regs, in case the elimination
4856 offsets have changed. */
4857 rtx tem
4858 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4859 0);
4860
4861 /* If TEM might contain a pseudo, we must copy it to avoid
4862 modifying it when we do the substitution for the reload. */
4863 if (rtx_varies_p (tem, 0))
4864 tem = copy_rtx (tem);
4865
4866 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4867 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4868
4869 /* Copy the result if it's still the same as the equivalence, to avoid
4870 modifying it when we do the substitution for the reload. */
4871 if (tem == reg_equiv_memory_loc (regno))
4872 tem = copy_rtx (tem);
4873 return tem;
4874 }
4875
4876 /* Returns true if AD could be turned into a valid memory reference
4877 to mode MODE in address space AS by reloading the part pointed to
4878 by PART into a register. */
4879
4880 static int
4881 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4882 addr_space_t as, rtx *part)
4883 {
4884 int retv;
4885 rtx tem = *part;
4886 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4887
4888 *part = reg;
4889 retv = memory_address_addr_space_p (mode, ad, as);
4890 *part = tem;
4891
4892 return retv;
4893 }
4894
4895 /* Record all reloads needed for handling memory address AD
4896 which appears in *LOC in a memory reference to mode MODE
4897 which itself is found in location *MEMREFLOC.
4898 Note that we take shortcuts assuming that no multi-reg machine mode
4899 occurs as part of an address.
4900
4901 OPNUM and TYPE specify the purpose of this reload.
4902
4903 IND_LEVELS says how many levels of indirect addressing this machine
4904 supports.
4905
4906 INSN, if nonzero, is the insn in which we do the reload. It is used
4907 to determine if we may generate output reloads, and where to put USEs
4908 for pseudos that we have to replace with stack slots.
4909
4910 Value is one if this address is reloaded or replaced as a whole; it is
4911 zero if the top level of this address was not reloaded or replaced, and
4912 it is -1 if it may or may not have been reloaded or replaced.
4913
4914 Note that there is no verification that the address will be valid after
4915 this routine does its work. Instead, we rely on the fact that the address
4916 was valid when reload started. So we need only undo things that reload
4917 could have broken. These are wrong register types, pseudos not allocated
4918 to a hard register, and frame pointer elimination. */
4919
4920 static int
4921 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4922 rtx *loc, int opnum, enum reload_type type,
4923 int ind_levels, rtx insn)
4924 {
4925 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4926 : ADDR_SPACE_GENERIC;
4927 int regno;
4928 int removed_and = 0;
4929 int op_index;
4930 rtx tem;
4931
4932 /* If the address is a register, see if it is a legitimate address and
4933 reload if not. We first handle the cases where we need not reload
4934 or where we must reload in a non-standard way. */
4935
4936 if (REG_P (ad))
4937 {
4938 regno = REGNO (ad);
4939
4940 if (reg_equiv_constant (regno) != 0)
4941 {
4942 find_reloads_address_part (reg_equiv_constant (regno), loc,
4943 base_reg_class (mode, as, MEM, SCRATCH),
4944 GET_MODE (ad), opnum, type, ind_levels);
4945 return 1;
4946 }
4947
4948 tem = reg_equiv_memory_loc (regno);
4949 if (tem != 0)
4950 {
4951 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4952 {
4953 tem = make_memloc (ad, regno);
4954 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4955 XEXP (tem, 0),
4956 MEM_ADDR_SPACE (tem)))
4957 {
4958 rtx orig = tem;
4959
4960 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4961 &XEXP (tem, 0), opnum,
4962 ADDR_TYPE (type), ind_levels, insn);
4963 if (!rtx_equal_p (tem, orig))
4964 push_reg_equiv_alt_mem (regno, tem);
4965 }
4966 /* We can avoid a reload if the register's equivalent memory
4967 expression is valid as an indirect memory address.
4968 But not all addresses are valid in a mem used as an indirect
4969 address: only reg or reg+constant. */
4970
4971 if (ind_levels > 0
4972 && strict_memory_address_addr_space_p (mode, tem, as)
4973 && (REG_P (XEXP (tem, 0))
4974 || (GET_CODE (XEXP (tem, 0)) == PLUS
4975 && REG_P (XEXP (XEXP (tem, 0), 0))
4976 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4977 {
4978 /* TEM is not the same as what we'll be replacing the
4979 pseudo with after reload, put a USE in front of INSN
4980 in the final reload pass. */
4981 if (replace_reloads
4982 && num_not_at_initial_offset
4983 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4984 {
4985 *loc = tem;
4986 /* We mark the USE with QImode so that we
4987 recognize it as one that can be safely
4988 deleted at the end of reload. */
4989 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4990 insn), QImode);
4991
4992 /* This doesn't really count as replacing the address
4993 as a whole, since it is still a memory access. */
4994 }
4995 return 0;
4996 }
4997 ad = tem;
4998 }
4999 }
5000
5001 /* The only remaining case where we can avoid a reload is if this is a
5002 hard register that is valid as a base register and which is not the
5003 subject of a CLOBBER in this insn. */
5004
5005 else if (regno < FIRST_PSEUDO_REGISTER
5006 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
5007 && ! regno_clobbered_p (regno, this_insn, mode, 0))
5008 return 0;
5009
5010 /* If we do not have one of the cases above, we must do the reload. */
5011 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
5012 base_reg_class (mode, as, MEM, SCRATCH),
5013 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
5014 return 1;
5015 }
5016
5017 if (strict_memory_address_addr_space_p (mode, ad, as))
5018 {
5019 /* The address appears valid, so reloads are not needed.
5020 But the address may contain an eliminable register.
5021 This can happen because a machine with indirect addressing
5022 may consider a pseudo register by itself a valid address even when
5023 it has failed to get a hard reg.
5024 So do a tree-walk to find and eliminate all such regs. */
5025
5026 /* But first quickly dispose of a common case. */
5027 if (GET_CODE (ad) == PLUS
5028 && CONST_INT_P (XEXP (ad, 1))
5029 && REG_P (XEXP (ad, 0))
5030 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
5031 return 0;
5032
5033 subst_reg_equivs_changed = 0;
5034 *loc = subst_reg_equivs (ad, insn);
5035
5036 if (! subst_reg_equivs_changed)
5037 return 0;
5038
5039 /* Check result for validity after substitution. */
5040 if (strict_memory_address_addr_space_p (mode, ad, as))
5041 return 0;
5042 }
5043
5044 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5045 do
5046 {
5047 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5048 {
5049 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5050 ind_levels, win);
5051 }
5052 break;
5053 win:
5054 *memrefloc = copy_rtx (*memrefloc);
5055 XEXP (*memrefloc, 0) = ad;
5056 move_replacements (&ad, &XEXP (*memrefloc, 0));
5057 return -1;
5058 }
5059 while (0);
5060 #endif
5061
5062 /* The address is not valid. We have to figure out why. First see if
5063 we have an outer AND and remove it if so. Then analyze what's inside. */
5064
5065 if (GET_CODE (ad) == AND)
5066 {
5067 removed_and = 1;
5068 loc = &XEXP (ad, 0);
5069 ad = *loc;
5070 }
5071
5072 /* One possibility for why the address is invalid is that it is itself
5073 a MEM. This can happen when the frame pointer is being eliminated, a
5074 pseudo is not allocated to a hard register, and the offset between the
5075 frame and stack pointers is not its initial value. In that case the
5076 pseudo will have been replaced by a MEM referring to the
5077 stack pointer. */
5078 if (MEM_P (ad))
5079 {
5080 /* First ensure that the address in this MEM is valid. Then, unless
5081 indirect addresses are valid, reload the MEM into a register. */
5082 tem = ad;
5083 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5084 opnum, ADDR_TYPE (type),
5085 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5086
5087 /* If tem was changed, then we must create a new memory reference to
5088 hold it and store it back into memrefloc. */
5089 if (tem != ad && memrefloc)
5090 {
5091 *memrefloc = copy_rtx (*memrefloc);
5092 copy_replacements (tem, XEXP (*memrefloc, 0));
5093 loc = &XEXP (*memrefloc, 0);
5094 if (removed_and)
5095 loc = &XEXP (*loc, 0);
5096 }
5097
5098 /* Check similar cases as for indirect addresses as above except
5099 that we can allow pseudos and a MEM since they should have been
5100 taken care of above. */
5101
5102 if (ind_levels == 0
5103 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5104 || MEM_P (XEXP (tem, 0))
5105 || ! (REG_P (XEXP (tem, 0))
5106 || (GET_CODE (XEXP (tem, 0)) == PLUS
5107 && REG_P (XEXP (XEXP (tem, 0), 0))
5108 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5109 {
5110 /* Must use TEM here, not AD, since it is the one that will
5111 have any subexpressions reloaded, if needed. */
5112 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5113 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5114 VOIDmode, 0,
5115 0, opnum, type);
5116 return ! removed_and;
5117 }
5118 else
5119 return 0;
5120 }
5121
5122 /* If we have address of a stack slot but it's not valid because the
5123 displacement is too large, compute the sum in a register.
5124 Handle all base registers here, not just fp/ap/sp, because on some
5125 targets (namely SH) we can also get too large displacements from
5126 big-endian corrections. */
5127 else if (GET_CODE (ad) == PLUS
5128 && REG_P (XEXP (ad, 0))
5129 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5130 && CONST_INT_P (XEXP (ad, 1))
5131 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5132 CONST_INT)
5133 /* Similarly, if we were to reload the base register and the
5134 mem+offset address is still invalid, then we want to reload
5135 the whole address, not just the base register. */
5136 || ! maybe_memory_address_addr_space_p
5137 (mode, ad, as, &(XEXP (ad, 0)))))
5138
5139 {
5140 /* Unshare the MEM rtx so we can safely alter it. */
5141 if (memrefloc)
5142 {
5143 *memrefloc = copy_rtx (*memrefloc);
5144 loc = &XEXP (*memrefloc, 0);
5145 if (removed_and)
5146 loc = &XEXP (*loc, 0);
5147 }
5148
5149 if (double_reg_address_ok
5150 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5151 PLUS, CONST_INT))
5152 {
5153 /* Unshare the sum as well. */
5154 *loc = ad = copy_rtx (ad);
5155
5156 /* Reload the displacement into an index reg.
5157 We assume the frame pointer or arg pointer is a base reg. */
5158 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5159 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5160 type, ind_levels);
5161 return 0;
5162 }
5163 else
5164 {
5165 /* If the sum of two regs is not necessarily valid,
5166 reload the sum into a base reg.
5167 That will at least work. */
5168 find_reloads_address_part (ad, loc,
5169 base_reg_class (mode, as, MEM, SCRATCH),
5170 GET_MODE (ad), opnum, type, ind_levels);
5171 }
5172 return ! removed_and;
5173 }
5174
5175 /* If we have an indexed stack slot, there are three possible reasons why
5176 it might be invalid: The index might need to be reloaded, the address
5177 might have been made by frame pointer elimination and hence have a
5178 constant out of range, or both reasons might apply.
5179
5180 We can easily check for an index needing reload, but even if that is the
5181 case, we might also have an invalid constant. To avoid making the
5182 conservative assumption and requiring two reloads, we see if this address
5183 is valid when not interpreted strictly. If it is, the only problem is
5184 that the index needs a reload and find_reloads_address_1 will take care
5185 of it.
5186
5187 Handle all base registers here, not just fp/ap/sp, because on some
5188 targets (namely SPARC) we can also get invalid addresses from preventive
5189 subreg big-endian corrections made by find_reloads_toplev. We
5190 can also get expressions involving LO_SUM (rather than PLUS) from
5191 find_reloads_subreg_address.
5192
5193 If we decide to do something, it must be that `double_reg_address_ok'
5194 is true. We generate a reload of the base register + constant and
5195 rework the sum so that the reload register will be added to the index.
5196 This is safe because we know the address isn't shared.
5197
5198 We check for the base register as both the first and second operand of
5199 the innermost PLUS and/or LO_SUM. */
5200
5201 for (op_index = 0; op_index < 2; ++op_index)
5202 {
5203 rtx operand, addend;
5204 enum rtx_code inner_code;
5205
5206 if (GET_CODE (ad) != PLUS)
5207 continue;
5208
5209 inner_code = GET_CODE (XEXP (ad, 0));
5210 if (!(GET_CODE (ad) == PLUS
5211 && CONST_INT_P (XEXP (ad, 1))
5212 && (inner_code == PLUS || inner_code == LO_SUM)))
5213 continue;
5214
5215 operand = XEXP (XEXP (ad, 0), op_index);
5216 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5217 continue;
5218
5219 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5220
5221 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5222 GET_CODE (addend))
5223 || operand == frame_pointer_rtx
5224 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5225 || operand == hard_frame_pointer_rtx
5226 #endif
5227 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5228 || operand == arg_pointer_rtx
5229 #endif
5230 || operand == stack_pointer_rtx)
5231 && ! maybe_memory_address_addr_space_p
5232 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5233 {
5234 rtx offset_reg;
5235 enum reg_class cls;
5236
5237 offset_reg = plus_constant (GET_MODE (ad), operand,
5238 INTVAL (XEXP (ad, 1)));
5239
5240 /* Form the adjusted address. */
5241 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5242 ad = gen_rtx_PLUS (GET_MODE (ad),
5243 op_index == 0 ? offset_reg : addend,
5244 op_index == 0 ? addend : offset_reg);
5245 else
5246 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5247 op_index == 0 ? offset_reg : addend,
5248 op_index == 0 ? addend : offset_reg);
5249 *loc = ad;
5250
5251 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5252 find_reloads_address_part (XEXP (ad, op_index),
5253 &XEXP (ad, op_index), cls,
5254 GET_MODE (ad), opnum, type, ind_levels);
5255 find_reloads_address_1 (mode, as,
5256 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5257 GET_CODE (XEXP (ad, op_index)),
5258 &XEXP (ad, 1 - op_index), opnum,
5259 type, 0, insn);
5260
5261 return 0;
5262 }
5263 }
5264
5265 /* See if address becomes valid when an eliminable register
5266 in a sum is replaced. */
5267
5268 tem = ad;
5269 if (GET_CODE (ad) == PLUS)
5270 tem = subst_indexed_address (ad);
5271 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5272 {
5273 /* Ok, we win that way. Replace any additional eliminable
5274 registers. */
5275
5276 subst_reg_equivs_changed = 0;
5277 tem = subst_reg_equivs (tem, insn);
5278
5279 /* Make sure that didn't make the address invalid again. */
5280
5281 if (! subst_reg_equivs_changed
5282 || strict_memory_address_addr_space_p (mode, tem, as))
5283 {
5284 *loc = tem;
5285 return 0;
5286 }
5287 }
5288
5289 /* If constants aren't valid addresses, reload the constant address
5290 into a register. */
5291 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5292 {
5293 enum machine_mode address_mode = GET_MODE (ad);
5294 if (address_mode == VOIDmode)
5295 address_mode = targetm.addr_space.address_mode (as);
5296
5297 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5298 Unshare it so we can safely alter it. */
5299 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5300 && CONSTANT_POOL_ADDRESS_P (ad))
5301 {
5302 *memrefloc = copy_rtx (*memrefloc);
5303 loc = &XEXP (*memrefloc, 0);
5304 if (removed_and)
5305 loc = &XEXP (*loc, 0);
5306 }
5307
5308 find_reloads_address_part (ad, loc,
5309 base_reg_class (mode, as, MEM, SCRATCH),
5310 address_mode, opnum, type, ind_levels);
5311 return ! removed_and;
5312 }
5313
5314 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5315 opnum, type, ind_levels, insn);
5316 }
5317 \f
5318 /* Find all pseudo regs appearing in AD
5319 that are eliminable in favor of equivalent values
5320 and do not have hard regs; replace them by their equivalents.
5321 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5322 front of it for pseudos that we have to replace with stack slots. */
5323
5324 static rtx
5325 subst_reg_equivs (rtx ad, rtx insn)
5326 {
5327 RTX_CODE code = GET_CODE (ad);
5328 int i;
5329 const char *fmt;
5330
5331 switch (code)
5332 {
5333 case HIGH:
5334 case CONST:
5335 CASE_CONST_ANY:
5336 case SYMBOL_REF:
5337 case LABEL_REF:
5338 case PC:
5339 case CC0:
5340 return ad;
5341
5342 case REG:
5343 {
5344 int regno = REGNO (ad);
5345
5346 if (reg_equiv_constant (regno) != 0)
5347 {
5348 subst_reg_equivs_changed = 1;
5349 return reg_equiv_constant (regno);
5350 }
5351 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5352 {
5353 rtx mem = make_memloc (ad, regno);
5354 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5355 {
5356 subst_reg_equivs_changed = 1;
5357 /* We mark the USE with QImode so that we recognize it
5358 as one that can be safely deleted at the end of
5359 reload. */
5360 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5361 QImode);
5362 return mem;
5363 }
5364 }
5365 }
5366 return ad;
5367
5368 case PLUS:
5369 /* Quickly dispose of a common case. */
5370 if (XEXP (ad, 0) == frame_pointer_rtx
5371 && CONST_INT_P (XEXP (ad, 1)))
5372 return ad;
5373 break;
5374
5375 default:
5376 break;
5377 }
5378
5379 fmt = GET_RTX_FORMAT (code);
5380 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5381 if (fmt[i] == 'e')
5382 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5383 return ad;
5384 }
5385 \f
5386 /* Compute the sum of X and Y, making canonicalizations assumed in an
5387 address, namely: sum constant integers, surround the sum of two
5388 constants with a CONST, put the constant as the second operand, and
5389 group the constant on the outermost sum.
5390
5391 This routine assumes both inputs are already in canonical form. */
5392
5393 rtx
5394 form_sum (enum machine_mode mode, rtx x, rtx y)
5395 {
5396 rtx tem;
5397
5398 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5399 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5400
5401 if (CONST_INT_P (x))
5402 return plus_constant (mode, y, INTVAL (x));
5403 else if (CONST_INT_P (y))
5404 return plus_constant (mode, x, INTVAL (y));
5405 else if (CONSTANT_P (x))
5406 tem = x, x = y, y = tem;
5407
5408 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5409 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5410
5411 /* Note that if the operands of Y are specified in the opposite
5412 order in the recursive calls below, infinite recursion will occur. */
5413 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5414 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5415
5416 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5417 constant will have been placed second. */
5418 if (CONSTANT_P (x) && CONSTANT_P (y))
5419 {
5420 if (GET_CODE (x) == CONST)
5421 x = XEXP (x, 0);
5422 if (GET_CODE (y) == CONST)
5423 y = XEXP (y, 0);
5424
5425 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5426 }
5427
5428 return gen_rtx_PLUS (mode, x, y);
5429 }
5430 \f
5431 /* If ADDR is a sum containing a pseudo register that should be
5432 replaced with a constant (from reg_equiv_constant),
5433 return the result of doing so, and also apply the associative
5434 law so that the result is more likely to be a valid address.
5435 (But it is not guaranteed to be one.)
5436
5437 Note that at most one register is replaced, even if more are
5438 replaceable. Also, we try to put the result into a canonical form
5439 so it is more likely to be a valid address.
5440
5441 In all other cases, return ADDR. */
5442
5443 static rtx
5444 subst_indexed_address (rtx addr)
5445 {
5446 rtx op0 = 0, op1 = 0, op2 = 0;
5447 rtx tem;
5448 int regno;
5449
5450 if (GET_CODE (addr) == PLUS)
5451 {
5452 /* Try to find a register to replace. */
5453 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5454 if (REG_P (op0)
5455 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5456 && reg_renumber[regno] < 0
5457 && reg_equiv_constant (regno) != 0)
5458 op0 = reg_equiv_constant (regno);
5459 else if (REG_P (op1)
5460 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5461 && reg_renumber[regno] < 0
5462 && reg_equiv_constant (regno) != 0)
5463 op1 = reg_equiv_constant (regno);
5464 else if (GET_CODE (op0) == PLUS
5465 && (tem = subst_indexed_address (op0)) != op0)
5466 op0 = tem;
5467 else if (GET_CODE (op1) == PLUS
5468 && (tem = subst_indexed_address (op1)) != op1)
5469 op1 = tem;
5470 else
5471 return addr;
5472
5473 /* Pick out up to three things to add. */
5474 if (GET_CODE (op1) == PLUS)
5475 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5476 else if (GET_CODE (op0) == PLUS)
5477 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5478
5479 /* Compute the sum. */
5480 if (op2 != 0)
5481 op1 = form_sum (GET_MODE (addr), op1, op2);
5482 if (op1 != 0)
5483 op0 = form_sum (GET_MODE (addr), op0, op1);
5484
5485 return op0;
5486 }
5487 return addr;
5488 }
5489 \f
5490 /* Update the REG_INC notes for an insn. It updates all REG_INC
5491 notes for the instruction which refer to REGNO the to refer
5492 to the reload number.
5493
5494 INSN is the insn for which any REG_INC notes need updating.
5495
5496 REGNO is the register number which has been reloaded.
5497
5498 RELOADNUM is the reload number. */
5499
5500 static void
5501 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5502 int reloadnum ATTRIBUTE_UNUSED)
5503 {
5504 #ifdef AUTO_INC_DEC
5505 rtx link;
5506
5507 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5508 if (REG_NOTE_KIND (link) == REG_INC
5509 && (int) REGNO (XEXP (link, 0)) == regno)
5510 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5511 #endif
5512 }
5513 \f
5514 /* Record the pseudo registers we must reload into hard registers in a
5515 subexpression of a would-be memory address, X referring to a value
5516 in mode MODE. (This function is not called if the address we find
5517 is strictly valid.)
5518
5519 CONTEXT = 1 means we are considering regs as index regs,
5520 = 0 means we are considering them as base regs.
5521 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5522 or an autoinc code.
5523 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5524 is the code of the index part of the address. Otherwise, pass SCRATCH
5525 for this argument.
5526 OPNUM and TYPE specify the purpose of any reloads made.
5527
5528 IND_LEVELS says how many levels of indirect addressing are
5529 supported at this point in the address.
5530
5531 INSN, if nonzero, is the insn in which we do the reload. It is used
5532 to determine if we may generate output reloads.
5533
5534 We return nonzero if X, as a whole, is reloaded or replaced. */
5535
5536 /* Note that we take shortcuts assuming that no multi-reg machine mode
5537 occurs as part of an address.
5538 Also, this is not fully machine-customizable; it works for machines
5539 such as VAXen and 68000's and 32000's, but other possible machines
5540 could have addressing modes that this does not handle right.
5541 If you add push_reload calls here, you need to make sure gen_reload
5542 handles those cases gracefully. */
5543
5544 static int
5545 find_reloads_address_1 (enum machine_mode mode, addr_space_t as,
5546 rtx x, int context,
5547 enum rtx_code outer_code, enum rtx_code index_code,
5548 rtx *loc, int opnum, enum reload_type type,
5549 int ind_levels, rtx insn)
5550 {
5551 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5552 ((CONTEXT) == 0 \
5553 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5554 : REGNO_OK_FOR_INDEX_P (REGNO))
5555
5556 enum reg_class context_reg_class;
5557 RTX_CODE code = GET_CODE (x);
5558
5559 if (context == 1)
5560 context_reg_class = INDEX_REG_CLASS;
5561 else
5562 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5563
5564 switch (code)
5565 {
5566 case PLUS:
5567 {
5568 rtx orig_op0 = XEXP (x, 0);
5569 rtx orig_op1 = XEXP (x, 1);
5570 RTX_CODE code0 = GET_CODE (orig_op0);
5571 RTX_CODE code1 = GET_CODE (orig_op1);
5572 rtx op0 = orig_op0;
5573 rtx op1 = orig_op1;
5574
5575 if (GET_CODE (op0) == SUBREG)
5576 {
5577 op0 = SUBREG_REG (op0);
5578 code0 = GET_CODE (op0);
5579 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5580 op0 = gen_rtx_REG (word_mode,
5581 (REGNO (op0) +
5582 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5583 GET_MODE (SUBREG_REG (orig_op0)),
5584 SUBREG_BYTE (orig_op0),
5585 GET_MODE (orig_op0))));
5586 }
5587
5588 if (GET_CODE (op1) == SUBREG)
5589 {
5590 op1 = SUBREG_REG (op1);
5591 code1 = GET_CODE (op1);
5592 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5593 /* ??? Why is this given op1's mode and above for
5594 ??? op0 SUBREGs we use word_mode? */
5595 op1 = gen_rtx_REG (GET_MODE (op1),
5596 (REGNO (op1) +
5597 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5598 GET_MODE (SUBREG_REG (orig_op1)),
5599 SUBREG_BYTE (orig_op1),
5600 GET_MODE (orig_op1))));
5601 }
5602 /* Plus in the index register may be created only as a result of
5603 register rematerialization for expression like &localvar*4. Reload it.
5604 It may be possible to combine the displacement on the outer level,
5605 but it is probably not worthwhile to do so. */
5606 if (context == 1)
5607 {
5608 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5609 opnum, ADDR_TYPE (type), ind_levels, insn);
5610 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5611 context_reg_class,
5612 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5613 return 1;
5614 }
5615
5616 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5617 || code0 == ZERO_EXTEND || code1 == MEM)
5618 {
5619 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5620 &XEXP (x, 0), opnum, type, ind_levels,
5621 insn);
5622 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5623 &XEXP (x, 1), opnum, type, ind_levels,
5624 insn);
5625 }
5626
5627 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5628 || code1 == ZERO_EXTEND || code0 == MEM)
5629 {
5630 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5631 &XEXP (x, 0), opnum, type, ind_levels,
5632 insn);
5633 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5634 &XEXP (x, 1), opnum, type, ind_levels,
5635 insn);
5636 }
5637
5638 else if (code0 == CONST_INT || code0 == CONST
5639 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5640 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5641 &XEXP (x, 1), opnum, type, ind_levels,
5642 insn);
5643
5644 else if (code1 == CONST_INT || code1 == CONST
5645 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5646 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5647 &XEXP (x, 0), opnum, type, ind_levels,
5648 insn);
5649
5650 else if (code0 == REG && code1 == REG)
5651 {
5652 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5653 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5654 return 0;
5655 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5656 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5657 return 0;
5658 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5659 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5660 &XEXP (x, 1), opnum, type, ind_levels,
5661 insn);
5662 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5663 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5664 &XEXP (x, 0), opnum, type, ind_levels,
5665 insn);
5666 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5667 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5668 &XEXP (x, 0), opnum, type, ind_levels,
5669 insn);
5670 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5671 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5672 &XEXP (x, 1), opnum, type, ind_levels,
5673 insn);
5674 else
5675 {
5676 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5677 &XEXP (x, 0), opnum, type, ind_levels,
5678 insn);
5679 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5680 &XEXP (x, 1), opnum, type, ind_levels,
5681 insn);
5682 }
5683 }
5684
5685 else if (code0 == REG)
5686 {
5687 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5688 &XEXP (x, 0), opnum, type, ind_levels,
5689 insn);
5690 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5691 &XEXP (x, 1), opnum, type, ind_levels,
5692 insn);
5693 }
5694
5695 else if (code1 == REG)
5696 {
5697 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5698 &XEXP (x, 1), opnum, type, ind_levels,
5699 insn);
5700 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5701 &XEXP (x, 0), opnum, type, ind_levels,
5702 insn);
5703 }
5704 }
5705
5706 return 0;
5707
5708 case POST_MODIFY:
5709 case PRE_MODIFY:
5710 {
5711 rtx op0 = XEXP (x, 0);
5712 rtx op1 = XEXP (x, 1);
5713 enum rtx_code index_code;
5714 int regno;
5715 int reloadnum;
5716
5717 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5718 return 0;
5719
5720 /* Currently, we only support {PRE,POST}_MODIFY constructs
5721 where a base register is {inc,dec}remented by the contents
5722 of another register or by a constant value. Thus, these
5723 operands must match. */
5724 gcc_assert (op0 == XEXP (op1, 0));
5725
5726 /* Require index register (or constant). Let's just handle the
5727 register case in the meantime... If the target allows
5728 auto-modify by a constant then we could try replacing a pseudo
5729 register with its equivalent constant where applicable.
5730
5731 We also handle the case where the register was eliminated
5732 resulting in a PLUS subexpression.
5733
5734 If we later decide to reload the whole PRE_MODIFY or
5735 POST_MODIFY, inc_for_reload might clobber the reload register
5736 before reading the index. The index register might therefore
5737 need to live longer than a TYPE reload normally would, so be
5738 conservative and class it as RELOAD_OTHER. */
5739 if ((REG_P (XEXP (op1, 1))
5740 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5741 || GET_CODE (XEXP (op1, 1)) == PLUS)
5742 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5743 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5744 ind_levels, insn);
5745
5746 gcc_assert (REG_P (XEXP (op1, 0)));
5747
5748 regno = REGNO (XEXP (op1, 0));
5749 index_code = GET_CODE (XEXP (op1, 1));
5750
5751 /* A register that is incremented cannot be constant! */
5752 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5753 || reg_equiv_constant (regno) == 0);
5754
5755 /* Handle a register that is equivalent to a memory location
5756 which cannot be addressed directly. */
5757 if (reg_equiv_memory_loc (regno) != 0
5758 && (reg_equiv_address (regno) != 0
5759 || num_not_at_initial_offset))
5760 {
5761 rtx tem = make_memloc (XEXP (x, 0), regno);
5762
5763 if (reg_equiv_address (regno)
5764 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5765 {
5766 rtx orig = tem;
5767
5768 /* First reload the memory location's address.
5769 We can't use ADDR_TYPE (type) here, because we need to
5770 write back the value after reading it, hence we actually
5771 need two registers. */
5772 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5773 &XEXP (tem, 0), opnum,
5774 RELOAD_OTHER,
5775 ind_levels, insn);
5776
5777 if (!rtx_equal_p (tem, orig))
5778 push_reg_equiv_alt_mem (regno, tem);
5779
5780 /* Then reload the memory location into a base
5781 register. */
5782 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5783 &XEXP (op1, 0),
5784 base_reg_class (mode, as,
5785 code, index_code),
5786 GET_MODE (x), GET_MODE (x), 0,
5787 0, opnum, RELOAD_OTHER);
5788
5789 update_auto_inc_notes (this_insn, regno, reloadnum);
5790 return 0;
5791 }
5792 }
5793
5794 if (reg_renumber[regno] >= 0)
5795 regno = reg_renumber[regno];
5796
5797 /* We require a base register here... */
5798 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5799 {
5800 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5801 &XEXP (op1, 0), &XEXP (x, 0),
5802 base_reg_class (mode, as,
5803 code, index_code),
5804 GET_MODE (x), GET_MODE (x), 0, 0,
5805 opnum, RELOAD_OTHER);
5806
5807 update_auto_inc_notes (this_insn, regno, reloadnum);
5808 return 0;
5809 }
5810 }
5811 return 0;
5812
5813 case POST_INC:
5814 case POST_DEC:
5815 case PRE_INC:
5816 case PRE_DEC:
5817 if (REG_P (XEXP (x, 0)))
5818 {
5819 int regno = REGNO (XEXP (x, 0));
5820 int value = 0;
5821 rtx x_orig = x;
5822
5823 /* A register that is incremented cannot be constant! */
5824 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5825 || reg_equiv_constant (regno) == 0);
5826
5827 /* Handle a register that is equivalent to a memory location
5828 which cannot be addressed directly. */
5829 if (reg_equiv_memory_loc (regno) != 0
5830 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5831 {
5832 rtx tem = make_memloc (XEXP (x, 0), regno);
5833 if (reg_equiv_address (regno)
5834 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5835 {
5836 rtx orig = tem;
5837
5838 /* First reload the memory location's address.
5839 We can't use ADDR_TYPE (type) here, because we need to
5840 write back the value after reading it, hence we actually
5841 need two registers. */
5842 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5843 &XEXP (tem, 0), opnum, type,
5844 ind_levels, insn);
5845 if (!rtx_equal_p (tem, orig))
5846 push_reg_equiv_alt_mem (regno, tem);
5847 /* Put this inside a new increment-expression. */
5848 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5849 /* Proceed to reload that, as if it contained a register. */
5850 }
5851 }
5852
5853 /* If we have a hard register that is ok in this incdec context,
5854 don't make a reload. If the register isn't nice enough for
5855 autoincdec, we can reload it. But, if an autoincrement of a
5856 register that we here verified as playing nice, still outside
5857 isn't "valid", it must be that no autoincrement is "valid".
5858 If that is true and something made an autoincrement anyway,
5859 this must be a special context where one is allowed.
5860 (For example, a "push" instruction.)
5861 We can't improve this address, so leave it alone. */
5862
5863 /* Otherwise, reload the autoincrement into a suitable hard reg
5864 and record how much to increment by. */
5865
5866 if (reg_renumber[regno] >= 0)
5867 regno = reg_renumber[regno];
5868 if (regno >= FIRST_PSEUDO_REGISTER
5869 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5870 index_code))
5871 {
5872 int reloadnum;
5873
5874 /* If we can output the register afterwards, do so, this
5875 saves the extra update.
5876 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5877 CALL_INSN - and it does not set CC0.
5878 But don't do this if we cannot directly address the
5879 memory location, since this will make it harder to
5880 reuse address reloads, and increases register pressure.
5881 Also don't do this if we can probably update x directly. */
5882 rtx equiv = (MEM_P (XEXP (x, 0))
5883 ? XEXP (x, 0)
5884 : reg_equiv_mem (regno));
5885 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5886 if (insn && NONJUMP_INSN_P (insn) && equiv
5887 && memory_operand (equiv, GET_MODE (equiv))
5888 #ifdef HAVE_cc0
5889 && ! sets_cc0_p (PATTERN (insn))
5890 #endif
5891 && ! (icode != CODE_FOR_nothing
5892 && insn_operand_matches (icode, 0, equiv)
5893 && insn_operand_matches (icode, 1, equiv)))
5894 {
5895 /* We use the original pseudo for loc, so that
5896 emit_reload_insns() knows which pseudo this
5897 reload refers to and updates the pseudo rtx, not
5898 its equivalent memory location, as well as the
5899 corresponding entry in reg_last_reload_reg. */
5900 loc = &XEXP (x_orig, 0);
5901 x = XEXP (x, 0);
5902 reloadnum
5903 = push_reload (x, x, loc, loc,
5904 context_reg_class,
5905 GET_MODE (x), GET_MODE (x), 0, 0,
5906 opnum, RELOAD_OTHER);
5907 }
5908 else
5909 {
5910 reloadnum
5911 = push_reload (x, x, loc, (rtx*) 0,
5912 context_reg_class,
5913 GET_MODE (x), GET_MODE (x), 0, 0,
5914 opnum, type);
5915 rld[reloadnum].inc
5916 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5917
5918 value = 1;
5919 }
5920
5921 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5922 reloadnum);
5923 }
5924 return value;
5925 }
5926 return 0;
5927
5928 case TRUNCATE:
5929 case SIGN_EXTEND:
5930 case ZERO_EXTEND:
5931 /* Look for parts to reload in the inner expression and reload them
5932 too, in addition to this operation. Reloading all inner parts in
5933 addition to this one shouldn't be necessary, but at this point,
5934 we don't know if we can possibly omit any part that *can* be
5935 reloaded. Targets that are better off reloading just either part
5936 (or perhaps even a different part of an outer expression), should
5937 define LEGITIMIZE_RELOAD_ADDRESS. */
5938 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5939 context, code, SCRATCH, &XEXP (x, 0), opnum,
5940 type, ind_levels, insn);
5941 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5942 context_reg_class,
5943 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5944 return 1;
5945
5946 case MEM:
5947 /* This is probably the result of a substitution, by eliminate_regs, of
5948 an equivalent address for a pseudo that was not allocated to a hard
5949 register. Verify that the specified address is valid and reload it
5950 into a register.
5951
5952 Since we know we are going to reload this item, don't decrement for
5953 the indirection level.
5954
5955 Note that this is actually conservative: it would be slightly more
5956 efficient to use the value of SPILL_INDIRECT_LEVELS from
5957 reload1.c here. */
5958
5959 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5960 opnum, ADDR_TYPE (type), ind_levels, insn);
5961 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5962 context_reg_class,
5963 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5964 return 1;
5965
5966 case REG:
5967 {
5968 int regno = REGNO (x);
5969
5970 if (reg_equiv_constant (regno) != 0)
5971 {
5972 find_reloads_address_part (reg_equiv_constant (regno), loc,
5973 context_reg_class,
5974 GET_MODE (x), opnum, type, ind_levels);
5975 return 1;
5976 }
5977
5978 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5979 that feeds this insn. */
5980 if (reg_equiv_mem (regno) != 0)
5981 {
5982 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5983 context_reg_class,
5984 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5985 return 1;
5986 }
5987 #endif
5988
5989 if (reg_equiv_memory_loc (regno)
5990 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5991 {
5992 rtx tem = make_memloc (x, regno);
5993 if (reg_equiv_address (regno) != 0
5994 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5995 {
5996 x = tem;
5997 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5998 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5999 ind_levels, insn);
6000 if (!rtx_equal_p (x, tem))
6001 push_reg_equiv_alt_mem (regno, x);
6002 }
6003 }
6004
6005 if (reg_renumber[regno] >= 0)
6006 regno = reg_renumber[regno];
6007
6008 if (regno >= FIRST_PSEUDO_REGISTER
6009 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6010 index_code))
6011 {
6012 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6013 context_reg_class,
6014 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6015 return 1;
6016 }
6017
6018 /* If a register appearing in an address is the subject of a CLOBBER
6019 in this insn, reload it into some other register to be safe.
6020 The CLOBBER is supposed to make the register unavailable
6021 from before this insn to after it. */
6022 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
6023 {
6024 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6025 context_reg_class,
6026 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6027 return 1;
6028 }
6029 }
6030 return 0;
6031
6032 case SUBREG:
6033 if (REG_P (SUBREG_REG (x)))
6034 {
6035 /* If this is a SUBREG of a hard register and the resulting register
6036 is of the wrong class, reload the whole SUBREG. This avoids
6037 needless copies if SUBREG_REG is multi-word. */
6038 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6039 {
6040 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6041
6042 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6043 index_code))
6044 {
6045 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6046 context_reg_class,
6047 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6048 return 1;
6049 }
6050 }
6051 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6052 is larger than the class size, then reload the whole SUBREG. */
6053 else
6054 {
6055 enum reg_class rclass = context_reg_class;
6056 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6057 > reg_class_size[(int) rclass])
6058 {
6059 /* If the inner register will be replaced by a memory
6060 reference, we can do this only if we can replace the
6061 whole subreg by a (narrower) memory reference. If
6062 this is not possible, fall through and reload just
6063 the inner register (including address reloads). */
6064 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6065 {
6066 rtx tem = find_reloads_subreg_address (x, opnum,
6067 ADDR_TYPE (type),
6068 ind_levels, insn,
6069 NULL);
6070 if (tem)
6071 {
6072 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6073 GET_MODE (tem), VOIDmode, 0, 0,
6074 opnum, type);
6075 return 1;
6076 }
6077 }
6078 else
6079 {
6080 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6081 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6082 return 1;
6083 }
6084 }
6085 }
6086 }
6087 break;
6088
6089 default:
6090 break;
6091 }
6092
6093 {
6094 const char *fmt = GET_RTX_FORMAT (code);
6095 int i;
6096
6097 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6098 {
6099 if (fmt[i] == 'e')
6100 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6101 we get here. */
6102 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6103 code, SCRATCH, &XEXP (x, i),
6104 opnum, type, ind_levels, insn);
6105 }
6106 }
6107
6108 #undef REG_OK_FOR_CONTEXT
6109 return 0;
6110 }
6111 \f
6112 /* X, which is found at *LOC, is a part of an address that needs to be
6113 reloaded into a register of class RCLASS. If X is a constant, or if
6114 X is a PLUS that contains a constant, check that the constant is a
6115 legitimate operand and that we are supposed to be able to load
6116 it into the register.
6117
6118 If not, force the constant into memory and reload the MEM instead.
6119
6120 MODE is the mode to use, in case X is an integer constant.
6121
6122 OPNUM and TYPE describe the purpose of any reloads made.
6123
6124 IND_LEVELS says how many levels of indirect addressing this machine
6125 supports. */
6126
6127 static void
6128 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6129 enum machine_mode mode, int opnum,
6130 enum reload_type type, int ind_levels)
6131 {
6132 if (CONSTANT_P (x)
6133 && (!targetm.legitimate_constant_p (mode, x)
6134 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6135 {
6136 x = force_const_mem (mode, x);
6137 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6138 opnum, type, ind_levels, 0);
6139 }
6140
6141 else if (GET_CODE (x) == PLUS
6142 && CONSTANT_P (XEXP (x, 1))
6143 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6144 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6145 == NO_REGS))
6146 {
6147 rtx tem;
6148
6149 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6150 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6151 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6152 opnum, type, ind_levels, 0);
6153 }
6154
6155 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6156 mode, VOIDmode, 0, 0, opnum, type);
6157 }
6158 \f
6159 /* X, a subreg of a pseudo, is a part of an address that needs to be
6160 reloaded, and the pseusdo is equivalent to a memory location.
6161
6162 Attempt to replace the whole subreg by a (possibly narrower or wider)
6163 memory reference. If this is possible, return this new memory
6164 reference, and push all required address reloads. Otherwise,
6165 return NULL.
6166
6167 OPNUM and TYPE identify the purpose of the reload.
6168
6169 IND_LEVELS says how many levels of indirect addressing are
6170 supported at this point in the address.
6171
6172 INSN, if nonzero, is the insn in which we do the reload. It is used
6173 to determine where to put USEs for pseudos that we have to replace with
6174 stack slots. */
6175
6176 static rtx
6177 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6178 int ind_levels, rtx insn, int *address_reloaded)
6179 {
6180 enum machine_mode outer_mode = GET_MODE (x);
6181 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6182 int regno = REGNO (SUBREG_REG (x));
6183 int reloaded = 0;
6184 rtx tem, orig;
6185 int offset;
6186
6187 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6188
6189 /* We cannot replace the subreg with a modified memory reference if:
6190
6191 - we have a paradoxical subreg that implicitly acts as a zero or
6192 sign extension operation due to LOAD_EXTEND_OP;
6193
6194 - we have a subreg that is implicitly supposed to act on the full
6195 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6196
6197 - the address of the equivalent memory location is mode-dependent; or
6198
6199 - we have a paradoxical subreg and the resulting memory is not
6200 sufficiently aligned to allow access in the wider mode.
6201
6202 In addition, we choose not to perform the replacement for *any*
6203 paradoxical subreg, even if it were possible in principle. This
6204 is to avoid generating wider memory references than necessary.
6205
6206 This corresponds to how previous versions of reload used to handle
6207 paradoxical subregs where no address reload was required. */
6208
6209 if (paradoxical_subreg_p (x))
6210 return NULL;
6211
6212 #ifdef WORD_REGISTER_OPERATIONS
6213 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6214 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6215 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6216 return NULL;
6217 #endif
6218
6219 /* Since we don't attempt to handle paradoxical subregs, we can just
6220 call into simplify_subreg, which will handle all remaining checks
6221 for us. */
6222 orig = make_memloc (SUBREG_REG (x), regno);
6223 offset = SUBREG_BYTE (x);
6224 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6225 if (!tem || !MEM_P (tem))
6226 return NULL;
6227
6228 /* Now push all required address reloads, if any. */
6229 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6230 XEXP (tem, 0), &XEXP (tem, 0),
6231 opnum, type, ind_levels, insn);
6232 /* ??? Do we need to handle nonzero offsets somehow? */
6233 if (!offset && !rtx_equal_p (tem, orig))
6234 push_reg_equiv_alt_mem (regno, tem);
6235
6236 /* For some processors an address may be valid in the original mode but
6237 not in a smaller mode. For example, ARM accepts a scaled index register
6238 in SImode but not in HImode. Note that this is only a problem if the
6239 address in reg_equiv_mem is already invalid in the new mode; other
6240 cases would be fixed by find_reloads_address as usual.
6241
6242 ??? We attempt to handle such cases here by doing an additional reload
6243 of the full address after the usual processing by find_reloads_address.
6244 Note that this may not work in the general case, but it seems to cover
6245 the cases where this situation currently occurs. A more general fix
6246 might be to reload the *value* instead of the address, but this would
6247 not be expected by the callers of this routine as-is.
6248
6249 If find_reloads_address already completed replaced the address, there
6250 is nothing further to do. */
6251 if (reloaded == 0
6252 && reg_equiv_mem (regno) != 0
6253 && !strict_memory_address_addr_space_p
6254 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6255 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6256 {
6257 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6258 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6259 MEM, SCRATCH),
6260 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6261 reloaded = 1;
6262 }
6263
6264 /* If this is not a toplevel operand, find_reloads doesn't see this
6265 substitution. We have to emit a USE of the pseudo so that
6266 delete_output_reload can see it. */
6267 if (replace_reloads && recog_data.operand[opnum] != x)
6268 /* We mark the USE with QImode so that we recognize it as one that
6269 can be safely deleted at the end of reload. */
6270 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6271 QImode);
6272
6273 if (address_reloaded)
6274 *address_reloaded = reloaded;
6275
6276 return tem;
6277 }
6278 \f
6279 /* Substitute into the current INSN the registers into which we have reloaded
6280 the things that need reloading. The array `replacements'
6281 contains the locations of all pointers that must be changed
6282 and says what to replace them with.
6283
6284 Return the rtx that X translates into; usually X, but modified. */
6285
6286 void
6287 subst_reloads (rtx insn)
6288 {
6289 int i;
6290
6291 for (i = 0; i < n_replacements; i++)
6292 {
6293 struct replacement *r = &replacements[i];
6294 rtx reloadreg = rld[r->what].reg_rtx;
6295 if (reloadreg)
6296 {
6297 #ifdef DEBUG_RELOAD
6298 /* This checking takes a very long time on some platforms
6299 causing the gcc.c-torture/compile/limits-fnargs.c test
6300 to time out during testing. See PR 31850.
6301
6302 Internal consistency test. Check that we don't modify
6303 anything in the equivalence arrays. Whenever something from
6304 those arrays needs to be reloaded, it must be unshared before
6305 being substituted into; the equivalence must not be modified.
6306 Otherwise, if the equivalence is used after that, it will
6307 have been modified, and the thing substituted (probably a
6308 register) is likely overwritten and not a usable equivalence. */
6309 int check_regno;
6310
6311 for (check_regno = 0; check_regno < max_regno; check_regno++)
6312 {
6313 #define CHECK_MODF(ARRAY) \
6314 gcc_assert (!reg_equivs[check_regno].ARRAY \
6315 || !loc_mentioned_in_p (r->where, \
6316 reg_equivs[check_regno).ARRAY)]
6317
6318 CHECK_MODF (equiv_constant);
6319 CHECK_MODF (equiv_memory_loc);
6320 CHECK_MODF (equiv_address);
6321 CHECK_MODF (equiv_mem);
6322 #undef CHECK_MODF
6323 }
6324 #endif /* DEBUG_RELOAD */
6325
6326 /* If we're replacing a LABEL_REF with a register, there must
6327 already be an indication (to e.g. flow) which label this
6328 register refers to. */
6329 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6330 || !JUMP_P (insn)
6331 || find_reg_note (insn,
6332 REG_LABEL_OPERAND,
6333 XEXP (*r->where, 0))
6334 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6335
6336 /* Encapsulate RELOADREG so its machine mode matches what
6337 used to be there. Note that gen_lowpart_common will
6338 do the wrong thing if RELOADREG is multi-word. RELOADREG
6339 will always be a REG here. */
6340 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6341 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6342
6343 *r->where = reloadreg;
6344 }
6345 /* If reload got no reg and isn't optional, something's wrong. */
6346 else
6347 gcc_assert (rld[r->what].optional);
6348 }
6349 }
6350 \f
6351 /* Make a copy of any replacements being done into X and move those
6352 copies to locations in Y, a copy of X. */
6353
6354 void
6355 copy_replacements (rtx x, rtx y)
6356 {
6357 copy_replacements_1 (&x, &y, n_replacements);
6358 }
6359
6360 static void
6361 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6362 {
6363 int i, j;
6364 rtx x, y;
6365 struct replacement *r;
6366 enum rtx_code code;
6367 const char *fmt;
6368
6369 for (j = 0; j < orig_replacements; j++)
6370 if (replacements[j].where == px)
6371 {
6372 r = &replacements[n_replacements++];
6373 r->where = py;
6374 r->what = replacements[j].what;
6375 r->mode = replacements[j].mode;
6376 }
6377
6378 x = *px;
6379 y = *py;
6380 code = GET_CODE (x);
6381 fmt = GET_RTX_FORMAT (code);
6382
6383 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6384 {
6385 if (fmt[i] == 'e')
6386 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6387 else if (fmt[i] == 'E')
6388 for (j = XVECLEN (x, i); --j >= 0; )
6389 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6390 orig_replacements);
6391 }
6392 }
6393
6394 /* Change any replacements being done to *X to be done to *Y. */
6395
6396 void
6397 move_replacements (rtx *x, rtx *y)
6398 {
6399 int i;
6400
6401 for (i = 0; i < n_replacements; i++)
6402 if (replacements[i].where == x)
6403 replacements[i].where = y;
6404 }
6405 \f
6406 /* If LOC was scheduled to be replaced by something, return the replacement.
6407 Otherwise, return *LOC. */
6408
6409 rtx
6410 find_replacement (rtx *loc)
6411 {
6412 struct replacement *r;
6413
6414 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6415 {
6416 rtx reloadreg = rld[r->what].reg_rtx;
6417
6418 if (reloadreg && r->where == loc)
6419 {
6420 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6421 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6422
6423 return reloadreg;
6424 }
6425 else if (reloadreg && GET_CODE (*loc) == SUBREG
6426 && r->where == &SUBREG_REG (*loc))
6427 {
6428 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6429 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6430
6431 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6432 GET_MODE (SUBREG_REG (*loc)),
6433 SUBREG_BYTE (*loc));
6434 }
6435 }
6436
6437 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6438 what's inside and make a new rtl if so. */
6439 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6440 || GET_CODE (*loc) == MULT)
6441 {
6442 rtx x = find_replacement (&XEXP (*loc, 0));
6443 rtx y = find_replacement (&XEXP (*loc, 1));
6444
6445 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6446 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6447 }
6448
6449 return *loc;
6450 }
6451 \f
6452 /* Return nonzero if register in range [REGNO, ENDREGNO)
6453 appears either explicitly or implicitly in X
6454 other than being stored into (except for earlyclobber operands).
6455
6456 References contained within the substructure at LOC do not count.
6457 LOC may be zero, meaning don't ignore anything.
6458
6459 This is similar to refers_to_regno_p in rtlanal.c except that we
6460 look at equivalences for pseudos that didn't get hard registers. */
6461
6462 static int
6463 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6464 rtx x, rtx *loc)
6465 {
6466 int i;
6467 unsigned int r;
6468 RTX_CODE code;
6469 const char *fmt;
6470
6471 if (x == 0)
6472 return 0;
6473
6474 repeat:
6475 code = GET_CODE (x);
6476
6477 switch (code)
6478 {
6479 case REG:
6480 r = REGNO (x);
6481
6482 /* If this is a pseudo, a hard register must not have been allocated.
6483 X must therefore either be a constant or be in memory. */
6484 if (r >= FIRST_PSEUDO_REGISTER)
6485 {
6486 if (reg_equiv_memory_loc (r))
6487 return refers_to_regno_for_reload_p (regno, endregno,
6488 reg_equiv_memory_loc (r),
6489 (rtx*) 0);
6490
6491 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6492 return 0;
6493 }
6494
6495 return (endregno > r
6496 && regno < r + (r < FIRST_PSEUDO_REGISTER
6497 ? hard_regno_nregs[r][GET_MODE (x)]
6498 : 1));
6499
6500 case SUBREG:
6501 /* If this is a SUBREG of a hard reg, we can see exactly which
6502 registers are being modified. Otherwise, handle normally. */
6503 if (REG_P (SUBREG_REG (x))
6504 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6505 {
6506 unsigned int inner_regno = subreg_regno (x);
6507 unsigned int inner_endregno
6508 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6509 ? subreg_nregs (x) : 1);
6510
6511 return endregno > inner_regno && regno < inner_endregno;
6512 }
6513 break;
6514
6515 case CLOBBER:
6516 case SET:
6517 if (&SET_DEST (x) != loc
6518 /* Note setting a SUBREG counts as referring to the REG it is in for
6519 a pseudo but not for hard registers since we can
6520 treat each word individually. */
6521 && ((GET_CODE (SET_DEST (x)) == SUBREG
6522 && loc != &SUBREG_REG (SET_DEST (x))
6523 && REG_P (SUBREG_REG (SET_DEST (x)))
6524 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6525 && refers_to_regno_for_reload_p (regno, endregno,
6526 SUBREG_REG (SET_DEST (x)),
6527 loc))
6528 /* If the output is an earlyclobber operand, this is
6529 a conflict. */
6530 || ((!REG_P (SET_DEST (x))
6531 || earlyclobber_operand_p (SET_DEST (x)))
6532 && refers_to_regno_for_reload_p (regno, endregno,
6533 SET_DEST (x), loc))))
6534 return 1;
6535
6536 if (code == CLOBBER || loc == &SET_SRC (x))
6537 return 0;
6538 x = SET_SRC (x);
6539 goto repeat;
6540
6541 default:
6542 break;
6543 }
6544
6545 /* X does not match, so try its subexpressions. */
6546
6547 fmt = GET_RTX_FORMAT (code);
6548 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6549 {
6550 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6551 {
6552 if (i == 0)
6553 {
6554 x = XEXP (x, 0);
6555 goto repeat;
6556 }
6557 else
6558 if (refers_to_regno_for_reload_p (regno, endregno,
6559 XEXP (x, i), loc))
6560 return 1;
6561 }
6562 else if (fmt[i] == 'E')
6563 {
6564 int j;
6565 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6566 if (loc != &XVECEXP (x, i, j)
6567 && refers_to_regno_for_reload_p (regno, endregno,
6568 XVECEXP (x, i, j), loc))
6569 return 1;
6570 }
6571 }
6572 return 0;
6573 }
6574
6575 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6576 we check if any register number in X conflicts with the relevant register
6577 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6578 contains a MEM (we don't bother checking for memory addresses that can't
6579 conflict because we expect this to be a rare case.
6580
6581 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6582 that we look at equivalences for pseudos that didn't get hard registers. */
6583
6584 int
6585 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6586 {
6587 int regno, endregno;
6588
6589 /* Overly conservative. */
6590 if (GET_CODE (x) == STRICT_LOW_PART
6591 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6592 x = XEXP (x, 0);
6593
6594 /* If either argument is a constant, then modifying X can not affect IN. */
6595 if (CONSTANT_P (x) || CONSTANT_P (in))
6596 return 0;
6597 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6598 return refers_to_mem_for_reload_p (in);
6599 else if (GET_CODE (x) == SUBREG)
6600 {
6601 regno = REGNO (SUBREG_REG (x));
6602 if (regno < FIRST_PSEUDO_REGISTER)
6603 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6604 GET_MODE (SUBREG_REG (x)),
6605 SUBREG_BYTE (x),
6606 GET_MODE (x));
6607 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6608 ? subreg_nregs (x) : 1);
6609
6610 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6611 }
6612 else if (REG_P (x))
6613 {
6614 regno = REGNO (x);
6615
6616 /* If this is a pseudo, it must not have been assigned a hard register.
6617 Therefore, it must either be in memory or be a constant. */
6618
6619 if (regno >= FIRST_PSEUDO_REGISTER)
6620 {
6621 if (reg_equiv_memory_loc (regno))
6622 return refers_to_mem_for_reload_p (in);
6623 gcc_assert (reg_equiv_constant (regno));
6624 return 0;
6625 }
6626
6627 endregno = END_HARD_REGNO (x);
6628
6629 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6630 }
6631 else if (MEM_P (x))
6632 return refers_to_mem_for_reload_p (in);
6633 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6634 || GET_CODE (x) == CC0)
6635 return reg_mentioned_p (x, in);
6636 else
6637 {
6638 gcc_assert (GET_CODE (x) == PLUS);
6639
6640 /* We actually want to know if X is mentioned somewhere inside IN.
6641 We must not say that (plus (sp) (const_int 124)) is in
6642 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6643 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6644 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6645 while (MEM_P (in))
6646 in = XEXP (in, 0);
6647 if (REG_P (in))
6648 return 0;
6649 else if (GET_CODE (in) == PLUS)
6650 return (rtx_equal_p (x, in)
6651 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6652 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6653 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6654 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6655 }
6656
6657 gcc_unreachable ();
6658 }
6659
6660 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6661 registers. */
6662
6663 static int
6664 refers_to_mem_for_reload_p (rtx x)
6665 {
6666 const char *fmt;
6667 int i;
6668
6669 if (MEM_P (x))
6670 return 1;
6671
6672 if (REG_P (x))
6673 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6674 && reg_equiv_memory_loc (REGNO (x)));
6675
6676 fmt = GET_RTX_FORMAT (GET_CODE (x));
6677 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6678 if (fmt[i] == 'e'
6679 && (MEM_P (XEXP (x, i))
6680 || refers_to_mem_for_reload_p (XEXP (x, i))))
6681 return 1;
6682
6683 return 0;
6684 }
6685 \f
6686 /* Check the insns before INSN to see if there is a suitable register
6687 containing the same value as GOAL.
6688 If OTHER is -1, look for a register in class RCLASS.
6689 Otherwise, just see if register number OTHER shares GOAL's value.
6690
6691 Return an rtx for the register found, or zero if none is found.
6692
6693 If RELOAD_REG_P is (short *)1,
6694 we reject any hard reg that appears in reload_reg_rtx
6695 because such a hard reg is also needed coming into this insn.
6696
6697 If RELOAD_REG_P is any other nonzero value,
6698 it is a vector indexed by hard reg number
6699 and we reject any hard reg whose element in the vector is nonnegative
6700 as well as any that appears in reload_reg_rtx.
6701
6702 If GOAL is zero, then GOALREG is a register number; we look
6703 for an equivalent for that register.
6704
6705 MODE is the machine mode of the value we want an equivalence for.
6706 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6707
6708 This function is used by jump.c as well as in the reload pass.
6709
6710 If GOAL is the sum of the stack pointer and a constant, we treat it
6711 as if it were a constant except that sp is required to be unchanging. */
6712
6713 rtx
6714 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6715 short *reload_reg_p, int goalreg, enum machine_mode mode)
6716 {
6717 rtx p = insn;
6718 rtx goaltry, valtry, value, where;
6719 rtx pat;
6720 int regno = -1;
6721 int valueno;
6722 int goal_mem = 0;
6723 int goal_const = 0;
6724 int goal_mem_addr_varies = 0;
6725 int need_stable_sp = 0;
6726 int nregs;
6727 int valuenregs;
6728 int num = 0;
6729
6730 if (goal == 0)
6731 regno = goalreg;
6732 else if (REG_P (goal))
6733 regno = REGNO (goal);
6734 else if (MEM_P (goal))
6735 {
6736 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6737 if (MEM_VOLATILE_P (goal))
6738 return 0;
6739 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6740 return 0;
6741 /* An address with side effects must be reexecuted. */
6742 switch (code)
6743 {
6744 case POST_INC:
6745 case PRE_INC:
6746 case POST_DEC:
6747 case PRE_DEC:
6748 case POST_MODIFY:
6749 case PRE_MODIFY:
6750 return 0;
6751 default:
6752 break;
6753 }
6754 goal_mem = 1;
6755 }
6756 else if (CONSTANT_P (goal))
6757 goal_const = 1;
6758 else if (GET_CODE (goal) == PLUS
6759 && XEXP (goal, 0) == stack_pointer_rtx
6760 && CONSTANT_P (XEXP (goal, 1)))
6761 goal_const = need_stable_sp = 1;
6762 else if (GET_CODE (goal) == PLUS
6763 && XEXP (goal, 0) == frame_pointer_rtx
6764 && CONSTANT_P (XEXP (goal, 1)))
6765 goal_const = 1;
6766 else
6767 return 0;
6768
6769 num = 0;
6770 /* Scan insns back from INSN, looking for one that copies
6771 a value into or out of GOAL.
6772 Stop and give up if we reach a label. */
6773
6774 while (1)
6775 {
6776 p = PREV_INSN (p);
6777 if (p && DEBUG_INSN_P (p))
6778 continue;
6779 num++;
6780 if (p == 0 || LABEL_P (p)
6781 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6782 return 0;
6783
6784 /* Don't reuse register contents from before a setjmp-type
6785 function call; on the second return (from the longjmp) it
6786 might have been clobbered by a later reuse. It doesn't
6787 seem worthwhile to actually go and see if it is actually
6788 reused even if that information would be readily available;
6789 just don't reuse it across the setjmp call. */
6790 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6791 return 0;
6792
6793 if (NONJUMP_INSN_P (p)
6794 /* If we don't want spill regs ... */
6795 && (! (reload_reg_p != 0
6796 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6797 /* ... then ignore insns introduced by reload; they aren't
6798 useful and can cause results in reload_as_needed to be
6799 different from what they were when calculating the need for
6800 spills. If we notice an input-reload insn here, we will
6801 reject it below, but it might hide a usable equivalent.
6802 That makes bad code. It may even fail: perhaps no reg was
6803 spilled for this insn because it was assumed we would find
6804 that equivalent. */
6805 || INSN_UID (p) < reload_first_uid))
6806 {
6807 rtx tem;
6808 pat = single_set (p);
6809
6810 /* First check for something that sets some reg equal to GOAL. */
6811 if (pat != 0
6812 && ((regno >= 0
6813 && true_regnum (SET_SRC (pat)) == regno
6814 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6815 ||
6816 (regno >= 0
6817 && true_regnum (SET_DEST (pat)) == regno
6818 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6819 ||
6820 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6821 /* When looking for stack pointer + const,
6822 make sure we don't use a stack adjust. */
6823 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6824 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6825 || (goal_mem
6826 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6827 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6828 || (goal_mem
6829 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6830 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6831 /* If we are looking for a constant,
6832 and something equivalent to that constant was copied
6833 into a reg, we can use that reg. */
6834 || (goal_const && REG_NOTES (p) != 0
6835 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6836 && ((rtx_equal_p (XEXP (tem, 0), goal)
6837 && (valueno
6838 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6839 || (REG_P (SET_DEST (pat))
6840 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6841 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6842 && CONST_INT_P (goal)
6843 && 0 != (goaltry
6844 = operand_subword (XEXP (tem, 0), 0, 0,
6845 VOIDmode))
6846 && rtx_equal_p (goal, goaltry)
6847 && (valtry
6848 = operand_subword (SET_DEST (pat), 0, 0,
6849 VOIDmode))
6850 && (valueno = true_regnum (valtry)) >= 0)))
6851 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6852 NULL_RTX))
6853 && REG_P (SET_DEST (pat))
6854 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6855 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6856 && CONST_INT_P (goal)
6857 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6858 VOIDmode))
6859 && rtx_equal_p (goal, goaltry)
6860 && (valtry
6861 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6862 && (valueno = true_regnum (valtry)) >= 0)))
6863 {
6864 if (other >= 0)
6865 {
6866 if (valueno != other)
6867 continue;
6868 }
6869 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6870 continue;
6871 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6872 mode, valueno))
6873 continue;
6874 value = valtry;
6875 where = p;
6876 break;
6877 }
6878 }
6879 }
6880
6881 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6882 (or copying VALUE into GOAL, if GOAL is also a register).
6883 Now verify that VALUE is really valid. */
6884
6885 /* VALUENO is the register number of VALUE; a hard register. */
6886
6887 /* Don't try to re-use something that is killed in this insn. We want
6888 to be able to trust REG_UNUSED notes. */
6889 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6890 return 0;
6891
6892 /* If we propose to get the value from the stack pointer or if GOAL is
6893 a MEM based on the stack pointer, we need a stable SP. */
6894 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6895 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6896 goal)))
6897 need_stable_sp = 1;
6898
6899 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6900 if (GET_MODE (value) != mode)
6901 return 0;
6902
6903 /* Reject VALUE if it was loaded from GOAL
6904 and is also a register that appears in the address of GOAL. */
6905
6906 if (goal_mem && value == SET_DEST (single_set (where))
6907 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6908 goal, (rtx*) 0))
6909 return 0;
6910
6911 /* Reject registers that overlap GOAL. */
6912
6913 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6914 nregs = hard_regno_nregs[regno][mode];
6915 else
6916 nregs = 1;
6917 valuenregs = hard_regno_nregs[valueno][mode];
6918
6919 if (!goal_mem && !goal_const
6920 && regno + nregs > valueno && regno < valueno + valuenregs)
6921 return 0;
6922
6923 /* Reject VALUE if it is one of the regs reserved for reloads.
6924 Reload1 knows how to reuse them anyway, and it would get
6925 confused if we allocated one without its knowledge.
6926 (Now that insns introduced by reload are ignored above,
6927 this case shouldn't happen, but I'm not positive.) */
6928
6929 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6930 {
6931 int i;
6932 for (i = 0; i < valuenregs; ++i)
6933 if (reload_reg_p[valueno + i] >= 0)
6934 return 0;
6935 }
6936
6937 /* Reject VALUE if it is a register being used for an input reload
6938 even if it is not one of those reserved. */
6939
6940 if (reload_reg_p != 0)
6941 {
6942 int i;
6943 for (i = 0; i < n_reloads; i++)
6944 if (rld[i].reg_rtx != 0 && rld[i].in)
6945 {
6946 int regno1 = REGNO (rld[i].reg_rtx);
6947 int nregs1 = hard_regno_nregs[regno1]
6948 [GET_MODE (rld[i].reg_rtx)];
6949 if (regno1 < valueno + valuenregs
6950 && regno1 + nregs1 > valueno)
6951 return 0;
6952 }
6953 }
6954
6955 if (goal_mem)
6956 /* We must treat frame pointer as varying here,
6957 since it can vary--in a nonlocal goto as generated by expand_goto. */
6958 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6959
6960 /* Now verify that the values of GOAL and VALUE remain unaltered
6961 until INSN is reached. */
6962
6963 p = insn;
6964 while (1)
6965 {
6966 p = PREV_INSN (p);
6967 if (p == where)
6968 return value;
6969
6970 /* Don't trust the conversion past a function call
6971 if either of the two is in a call-clobbered register, or memory. */
6972 if (CALL_P (p))
6973 {
6974 int i;
6975
6976 if (goal_mem || need_stable_sp)
6977 return 0;
6978
6979 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6980 for (i = 0; i < nregs; ++i)
6981 if (call_used_regs[regno + i]
6982 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6983 return 0;
6984
6985 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6986 for (i = 0; i < valuenregs; ++i)
6987 if (call_used_regs[valueno + i]
6988 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6989 return 0;
6990 }
6991
6992 if (INSN_P (p))
6993 {
6994 pat = PATTERN (p);
6995
6996 /* Watch out for unspec_volatile, and volatile asms. */
6997 if (volatile_insn_p (pat))
6998 return 0;
6999
7000 /* If this insn P stores in either GOAL or VALUE, return 0.
7001 If GOAL is a memory ref and this insn writes memory, return 0.
7002 If GOAL is a memory ref and its address is not constant,
7003 and this insn P changes a register used in GOAL, return 0. */
7004
7005 if (GET_CODE (pat) == COND_EXEC)
7006 pat = COND_EXEC_CODE (pat);
7007 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
7008 {
7009 rtx dest = SET_DEST (pat);
7010 while (GET_CODE (dest) == SUBREG
7011 || GET_CODE (dest) == ZERO_EXTRACT
7012 || GET_CODE (dest) == STRICT_LOW_PART)
7013 dest = XEXP (dest, 0);
7014 if (REG_P (dest))
7015 {
7016 int xregno = REGNO (dest);
7017 int xnregs;
7018 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7019 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7020 else
7021 xnregs = 1;
7022 if (xregno < regno + nregs && xregno + xnregs > regno)
7023 return 0;
7024 if (xregno < valueno + valuenregs
7025 && xregno + xnregs > valueno)
7026 return 0;
7027 if (goal_mem_addr_varies
7028 && reg_overlap_mentioned_for_reload_p (dest, goal))
7029 return 0;
7030 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7031 return 0;
7032 }
7033 else if (goal_mem && MEM_P (dest)
7034 && ! push_operand (dest, GET_MODE (dest)))
7035 return 0;
7036 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7037 && reg_equiv_memory_loc (regno) != 0)
7038 return 0;
7039 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7040 return 0;
7041 }
7042 else if (GET_CODE (pat) == PARALLEL)
7043 {
7044 int i;
7045 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7046 {
7047 rtx v1 = XVECEXP (pat, 0, i);
7048 if (GET_CODE (v1) == COND_EXEC)
7049 v1 = COND_EXEC_CODE (v1);
7050 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7051 {
7052 rtx dest = SET_DEST (v1);
7053 while (GET_CODE (dest) == SUBREG
7054 || GET_CODE (dest) == ZERO_EXTRACT
7055 || GET_CODE (dest) == STRICT_LOW_PART)
7056 dest = XEXP (dest, 0);
7057 if (REG_P (dest))
7058 {
7059 int xregno = REGNO (dest);
7060 int xnregs;
7061 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7062 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7063 else
7064 xnregs = 1;
7065 if (xregno < regno + nregs
7066 && xregno + xnregs > regno)
7067 return 0;
7068 if (xregno < valueno + valuenregs
7069 && xregno + xnregs > valueno)
7070 return 0;
7071 if (goal_mem_addr_varies
7072 && reg_overlap_mentioned_for_reload_p (dest,
7073 goal))
7074 return 0;
7075 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7076 return 0;
7077 }
7078 else if (goal_mem && MEM_P (dest)
7079 && ! push_operand (dest, GET_MODE (dest)))
7080 return 0;
7081 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7082 && reg_equiv_memory_loc (regno) != 0)
7083 return 0;
7084 else if (need_stable_sp
7085 && push_operand (dest, GET_MODE (dest)))
7086 return 0;
7087 }
7088 }
7089 }
7090
7091 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7092 {
7093 rtx link;
7094
7095 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7096 link = XEXP (link, 1))
7097 {
7098 pat = XEXP (link, 0);
7099 if (GET_CODE (pat) == CLOBBER)
7100 {
7101 rtx dest = SET_DEST (pat);
7102
7103 if (REG_P (dest))
7104 {
7105 int xregno = REGNO (dest);
7106 int xnregs
7107 = hard_regno_nregs[xregno][GET_MODE (dest)];
7108
7109 if (xregno < regno + nregs
7110 && xregno + xnregs > regno)
7111 return 0;
7112 else if (xregno < valueno + valuenregs
7113 && xregno + xnregs > valueno)
7114 return 0;
7115 else if (goal_mem_addr_varies
7116 && reg_overlap_mentioned_for_reload_p (dest,
7117 goal))
7118 return 0;
7119 }
7120
7121 else if (goal_mem && MEM_P (dest)
7122 && ! push_operand (dest, GET_MODE (dest)))
7123 return 0;
7124 else if (need_stable_sp
7125 && push_operand (dest, GET_MODE (dest)))
7126 return 0;
7127 }
7128 }
7129 }
7130
7131 #ifdef AUTO_INC_DEC
7132 /* If this insn auto-increments or auto-decrements
7133 either regno or valueno, return 0 now.
7134 If GOAL is a memory ref and its address is not constant,
7135 and this insn P increments a register used in GOAL, return 0. */
7136 {
7137 rtx link;
7138
7139 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7140 if (REG_NOTE_KIND (link) == REG_INC
7141 && REG_P (XEXP (link, 0)))
7142 {
7143 int incno = REGNO (XEXP (link, 0));
7144 if (incno < regno + nregs && incno >= regno)
7145 return 0;
7146 if (incno < valueno + valuenregs && incno >= valueno)
7147 return 0;
7148 if (goal_mem_addr_varies
7149 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7150 goal))
7151 return 0;
7152 }
7153 }
7154 #endif
7155 }
7156 }
7157 }
7158 \f
7159 /* Find a place where INCED appears in an increment or decrement operator
7160 within X, and return the amount INCED is incremented or decremented by.
7161 The value is always positive. */
7162
7163 static int
7164 find_inc_amount (rtx x, rtx inced)
7165 {
7166 enum rtx_code code = GET_CODE (x);
7167 const char *fmt;
7168 int i;
7169
7170 if (code == MEM)
7171 {
7172 rtx addr = XEXP (x, 0);
7173 if ((GET_CODE (addr) == PRE_DEC
7174 || GET_CODE (addr) == POST_DEC
7175 || GET_CODE (addr) == PRE_INC
7176 || GET_CODE (addr) == POST_INC)
7177 && XEXP (addr, 0) == inced)
7178 return GET_MODE_SIZE (GET_MODE (x));
7179 else if ((GET_CODE (addr) == PRE_MODIFY
7180 || GET_CODE (addr) == POST_MODIFY)
7181 && GET_CODE (XEXP (addr, 1)) == PLUS
7182 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7183 && XEXP (addr, 0) == inced
7184 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7185 {
7186 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7187 return i < 0 ? -i : i;
7188 }
7189 }
7190
7191 fmt = GET_RTX_FORMAT (code);
7192 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7193 {
7194 if (fmt[i] == 'e')
7195 {
7196 int tem = find_inc_amount (XEXP (x, i), inced);
7197 if (tem != 0)
7198 return tem;
7199 }
7200 if (fmt[i] == 'E')
7201 {
7202 int j;
7203 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7204 {
7205 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7206 if (tem != 0)
7207 return tem;
7208 }
7209 }
7210 }
7211
7212 return 0;
7213 }
7214 \f
7215 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7216 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7217
7218 #ifdef AUTO_INC_DEC
7219 static int
7220 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7221 rtx insn)
7222 {
7223 rtx link;
7224
7225 gcc_assert (insn);
7226
7227 if (! INSN_P (insn))
7228 return 0;
7229
7230 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7231 if (REG_NOTE_KIND (link) == REG_INC)
7232 {
7233 unsigned int test = (int) REGNO (XEXP (link, 0));
7234 if (test >= regno && test < endregno)
7235 return 1;
7236 }
7237 return 0;
7238 }
7239 #else
7240
7241 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7242
7243 #endif
7244
7245 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7246 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7247 REG_INC. REGNO must refer to a hard register. */
7248
7249 int
7250 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7251 int sets)
7252 {
7253 unsigned int nregs, endregno;
7254
7255 /* regno must be a hard register. */
7256 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7257
7258 nregs = hard_regno_nregs[regno][mode];
7259 endregno = regno + nregs;
7260
7261 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7262 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7263 && REG_P (XEXP (PATTERN (insn), 0)))
7264 {
7265 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7266
7267 return test >= regno && test < endregno;
7268 }
7269
7270 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7271 return 1;
7272
7273 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7274 {
7275 int i = XVECLEN (PATTERN (insn), 0) - 1;
7276
7277 for (; i >= 0; i--)
7278 {
7279 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7280 if ((GET_CODE (elt) == CLOBBER
7281 || (sets == 1 && GET_CODE (elt) == SET))
7282 && REG_P (XEXP (elt, 0)))
7283 {
7284 unsigned int test = REGNO (XEXP (elt, 0));
7285
7286 if (test >= regno && test < endregno)
7287 return 1;
7288 }
7289 if (sets == 2
7290 && reg_inc_found_and_valid_p (regno, endregno, elt))
7291 return 1;
7292 }
7293 }
7294
7295 return 0;
7296 }
7297
7298 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7299 rtx
7300 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7301 {
7302 int regno;
7303
7304 if (GET_MODE (reloadreg) == mode)
7305 return reloadreg;
7306
7307 regno = REGNO (reloadreg);
7308
7309 if (REG_WORDS_BIG_ENDIAN)
7310 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7311 - (int) hard_regno_nregs[regno][mode];
7312
7313 return gen_rtx_REG (mode, regno);
7314 }
7315
7316 static const char *const reload_when_needed_name[] =
7317 {
7318 "RELOAD_FOR_INPUT",
7319 "RELOAD_FOR_OUTPUT",
7320 "RELOAD_FOR_INSN",
7321 "RELOAD_FOR_INPUT_ADDRESS",
7322 "RELOAD_FOR_INPADDR_ADDRESS",
7323 "RELOAD_FOR_OUTPUT_ADDRESS",
7324 "RELOAD_FOR_OUTADDR_ADDRESS",
7325 "RELOAD_FOR_OPERAND_ADDRESS",
7326 "RELOAD_FOR_OPADDR_ADDR",
7327 "RELOAD_OTHER",
7328 "RELOAD_FOR_OTHER_ADDRESS"
7329 };
7330
7331 /* These functions are used to print the variables set by 'find_reloads' */
7332
7333 DEBUG_FUNCTION void
7334 debug_reload_to_stream (FILE *f)
7335 {
7336 int r;
7337 const char *prefix;
7338
7339 if (! f)
7340 f = stderr;
7341 for (r = 0; r < n_reloads; r++)
7342 {
7343 fprintf (f, "Reload %d: ", r);
7344
7345 if (rld[r].in != 0)
7346 {
7347 fprintf (f, "reload_in (%s) = ",
7348 GET_MODE_NAME (rld[r].inmode));
7349 print_inline_rtx (f, rld[r].in, 24);
7350 fprintf (f, "\n\t");
7351 }
7352
7353 if (rld[r].out != 0)
7354 {
7355 fprintf (f, "reload_out (%s) = ",
7356 GET_MODE_NAME (rld[r].outmode));
7357 print_inline_rtx (f, rld[r].out, 24);
7358 fprintf (f, "\n\t");
7359 }
7360
7361 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7362
7363 fprintf (f, "%s (opnum = %d)",
7364 reload_when_needed_name[(int) rld[r].when_needed],
7365 rld[r].opnum);
7366
7367 if (rld[r].optional)
7368 fprintf (f, ", optional");
7369
7370 if (rld[r].nongroup)
7371 fprintf (f, ", nongroup");
7372
7373 if (rld[r].inc != 0)
7374 fprintf (f, ", inc by %d", rld[r].inc);
7375
7376 if (rld[r].nocombine)
7377 fprintf (f, ", can't combine");
7378
7379 if (rld[r].secondary_p)
7380 fprintf (f, ", secondary_reload_p");
7381
7382 if (rld[r].in_reg != 0)
7383 {
7384 fprintf (f, "\n\treload_in_reg: ");
7385 print_inline_rtx (f, rld[r].in_reg, 24);
7386 }
7387
7388 if (rld[r].out_reg != 0)
7389 {
7390 fprintf (f, "\n\treload_out_reg: ");
7391 print_inline_rtx (f, rld[r].out_reg, 24);
7392 }
7393
7394 if (rld[r].reg_rtx != 0)
7395 {
7396 fprintf (f, "\n\treload_reg_rtx: ");
7397 print_inline_rtx (f, rld[r].reg_rtx, 24);
7398 }
7399
7400 prefix = "\n\t";
7401 if (rld[r].secondary_in_reload != -1)
7402 {
7403 fprintf (f, "%ssecondary_in_reload = %d",
7404 prefix, rld[r].secondary_in_reload);
7405 prefix = ", ";
7406 }
7407
7408 if (rld[r].secondary_out_reload != -1)
7409 fprintf (f, "%ssecondary_out_reload = %d\n",
7410 prefix, rld[r].secondary_out_reload);
7411
7412 prefix = "\n\t";
7413 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7414 {
7415 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7416 insn_data[rld[r].secondary_in_icode].name);
7417 prefix = ", ";
7418 }
7419
7420 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7421 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7422 insn_data[rld[r].secondary_out_icode].name);
7423
7424 fprintf (f, "\n");
7425 }
7426 }
7427
7428 DEBUG_FUNCTION void
7429 debug_reload (void)
7430 {
7431 debug_reload_to_stream (stderr);
7432 }