rs6000.c (rs6000_expand_vector_set): Adjust for little endian.
[gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
26
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
29
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
36
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
43
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
52
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
55
56 NOTE SIDE EFFECTS:
57
58 find_reloads can alter the operands of the instruction it is called on.
59
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
64
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
67
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71
72 Using a reload register for several reloads in one insn:
73
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
85
86 #define REG_OK_STRICT
87
88 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
89 #undef DEBUG_RELOAD
90
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "tm.h"
95 #include "rtl-error.h"
96 #include "tm_p.h"
97 #include "insn-config.h"
98 #include "expr.h"
99 #include "optabs.h"
100 #include "recog.h"
101 #include "df.h"
102 #include "reload.h"
103 #include "regs.h"
104 #include "addresses.h"
105 #include "hard-reg-set.h"
106 #include "flags.h"
107 #include "function.h"
108 #include "params.h"
109 #include "target.h"
110 #include "ira.h"
111
112 /* True if X is a constant that can be forced into the constant pool.
113 MODE is the mode of the operand, or VOIDmode if not known. */
114 #define CONST_POOL_OK_P(MODE, X) \
115 ((MODE) != VOIDmode \
116 && CONSTANT_P (X) \
117 && GET_CODE (X) != HIGH \
118 && !targetm.cannot_force_const_mem (MODE, X))
119
120 /* True if C is a non-empty register class that has too few registers
121 to be safely used as a reload target class. */
122
123 static inline bool
124 small_register_class_p (reg_class_t rclass)
125 {
126 return (reg_class_size [(int) rclass] == 1
127 || (reg_class_size [(int) rclass] >= 1
128 && targetm.class_likely_spilled_p (rclass)));
129 }
130
131 \f
132 /* All reloads of the current insn are recorded here. See reload.h for
133 comments. */
134 int n_reloads;
135 struct reload rld[MAX_RELOADS];
136
137 /* All the "earlyclobber" operands of the current insn
138 are recorded here. */
139 int n_earlyclobbers;
140 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
141
142 int reload_n_operands;
143
144 /* Replacing reloads.
145
146 If `replace_reloads' is nonzero, then as each reload is recorded
147 an entry is made for it in the table `replacements'.
148 Then later `subst_reloads' can look through that table and
149 perform all the replacements needed. */
150
151 /* Nonzero means record the places to replace. */
152 static int replace_reloads;
153
154 /* Each replacement is recorded with a structure like this. */
155 struct replacement
156 {
157 rtx *where; /* Location to store in */
158 int what; /* which reload this is for */
159 enum machine_mode mode; /* mode it must have */
160 };
161
162 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
163
164 /* Number of replacements currently recorded. */
165 static int n_replacements;
166
167 /* Used to track what is modified by an operand. */
168 struct decomposition
169 {
170 int reg_flag; /* Nonzero if referencing a register. */
171 int safe; /* Nonzero if this can't conflict with anything. */
172 rtx base; /* Base address for MEM. */
173 HOST_WIDE_INT start; /* Starting offset or register number. */
174 HOST_WIDE_INT end; /* Ending offset or register number. */
175 };
176
177 #ifdef SECONDARY_MEMORY_NEEDED
178
179 /* Save MEMs needed to copy from one class of registers to another. One MEM
180 is used per mode, but normally only one or two modes are ever used.
181
182 We keep two versions, before and after register elimination. The one
183 after register elimination is record separately for each operand. This
184 is done in case the address is not valid to be sure that we separately
185 reload each. */
186
187 static rtx secondary_memlocs[NUM_MACHINE_MODES];
188 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
189 static int secondary_memlocs_elim_used = 0;
190 #endif
191
192 /* The instruction we are doing reloads for;
193 so we can test whether a register dies in it. */
194 static rtx this_insn;
195
196 /* Nonzero if this instruction is a user-specified asm with operands. */
197 static int this_insn_is_asm;
198
199 /* If hard_regs_live_known is nonzero,
200 we can tell which hard regs are currently live,
201 at least enough to succeed in choosing dummy reloads. */
202 static int hard_regs_live_known;
203
204 /* Indexed by hard reg number,
205 element is nonnegative if hard reg has been spilled.
206 This vector is passed to `find_reloads' as an argument
207 and is not changed here. */
208 static short *static_reload_reg_p;
209
210 /* Set to 1 in subst_reg_equivs if it changes anything. */
211 static int subst_reg_equivs_changed;
212
213 /* On return from push_reload, holds the reload-number for the OUT
214 operand, which can be different for that from the input operand. */
215 static int output_reloadnum;
216
217 /* Compare two RTX's. */
218 #define MATCHES(x, y) \
219 (x == y || (x != 0 && (REG_P (x) \
220 ? REG_P (y) && REGNO (x) == REGNO (y) \
221 : rtx_equal_p (x, y) && ! side_effects_p (x))))
222
223 /* Indicates if two reloads purposes are for similar enough things that we
224 can merge their reloads. */
225 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
226 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
227 || ((when1) == (when2) && (op1) == (op2)) \
228 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
229 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
230 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
231 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
232 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
233
234 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
235 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
236 ((when1) != (when2) \
237 || ! ((op1) == (op2) \
238 || (when1) == RELOAD_FOR_INPUT \
239 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
240 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
241
242 /* If we are going to reload an address, compute the reload type to
243 use. */
244 #define ADDR_TYPE(type) \
245 ((type) == RELOAD_FOR_INPUT_ADDRESS \
246 ? RELOAD_FOR_INPADDR_ADDRESS \
247 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
248 ? RELOAD_FOR_OUTADDR_ADDRESS \
249 : (type)))
250
251 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
252 enum machine_mode, enum reload_type,
253 enum insn_code *, secondary_reload_info *);
254 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
255 int, unsigned int);
256 static void push_replacement (rtx *, int, enum machine_mode);
257 static void dup_replacements (rtx *, rtx *);
258 static void combine_reloads (void);
259 static int find_reusable_reload (rtx *, rtx, enum reg_class,
260 enum reload_type, int, int);
261 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
262 enum machine_mode, reg_class_t, int, int);
263 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
264 static struct decomposition decompose (rtx);
265 static int immune_p (rtx, rtx, struct decomposition);
266 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
267 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
268 int *);
269 static rtx make_memloc (rtx, int);
270 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
271 addr_space_t, rtx *);
272 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
273 int, enum reload_type, int, rtx);
274 static rtx subst_reg_equivs (rtx, rtx);
275 static rtx subst_indexed_address (rtx);
276 static void update_auto_inc_notes (rtx, int, int);
277 static int find_reloads_address_1 (enum machine_mode, addr_space_t, rtx, int,
278 enum rtx_code, enum rtx_code, rtx *,
279 int, enum reload_type,int, rtx);
280 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
281 enum machine_mode, int,
282 enum reload_type, int);
283 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
284 int, rtx, int *);
285 static void copy_replacements_1 (rtx *, rtx *, int);
286 static int find_inc_amount (rtx, rtx);
287 static int refers_to_mem_for_reload_p (rtx);
288 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
289 rtx, rtx *);
290
291 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
292 list yet. */
293
294 static void
295 push_reg_equiv_alt_mem (int regno, rtx mem)
296 {
297 rtx it;
298
299 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
300 if (rtx_equal_p (XEXP (it, 0), mem))
301 return;
302
303 reg_equiv_alt_mem_list (regno)
304 = alloc_EXPR_LIST (REG_EQUIV, mem,
305 reg_equiv_alt_mem_list (regno));
306 }
307 \f
308 /* Determine if any secondary reloads are needed for loading (if IN_P is
309 nonzero) or storing (if IN_P is zero) X to or from a reload register of
310 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
311 are needed, push them.
312
313 Return the reload number of the secondary reload we made, or -1 if
314 we didn't need one. *PICODE is set to the insn_code to use if we do
315 need a secondary reload. */
316
317 static int
318 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
319 enum reg_class reload_class,
320 enum machine_mode reload_mode, enum reload_type type,
321 enum insn_code *picode, secondary_reload_info *prev_sri)
322 {
323 enum reg_class rclass = NO_REGS;
324 enum reg_class scratch_class;
325 enum machine_mode mode = reload_mode;
326 enum insn_code icode = CODE_FOR_nothing;
327 enum insn_code t_icode = CODE_FOR_nothing;
328 enum reload_type secondary_type;
329 int s_reload, t_reload = -1;
330 const char *scratch_constraint;
331 char letter;
332 secondary_reload_info sri;
333
334 if (type == RELOAD_FOR_INPUT_ADDRESS
335 || type == RELOAD_FOR_OUTPUT_ADDRESS
336 || type == RELOAD_FOR_INPADDR_ADDRESS
337 || type == RELOAD_FOR_OUTADDR_ADDRESS)
338 secondary_type = type;
339 else
340 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
341
342 *picode = CODE_FOR_nothing;
343
344 /* If X is a paradoxical SUBREG, use the inner value to determine both the
345 mode and object being reloaded. */
346 if (paradoxical_subreg_p (x))
347 {
348 x = SUBREG_REG (x);
349 reload_mode = GET_MODE (x);
350 }
351
352 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
353 is still a pseudo-register by now, it *must* have an equivalent MEM
354 but we don't want to assume that), use that equivalent when seeing if
355 a secondary reload is needed since whether or not a reload is needed
356 might be sensitive to the form of the MEM. */
357
358 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
359 && reg_equiv_mem (REGNO (x)))
360 x = reg_equiv_mem (REGNO (x));
361
362 sri.icode = CODE_FOR_nothing;
363 sri.prev_sri = prev_sri;
364 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
365 reload_mode, &sri);
366 icode = (enum insn_code) sri.icode;
367
368 /* If we don't need any secondary registers, done. */
369 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
370 return -1;
371
372 if (rclass != NO_REGS)
373 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
374 reload_mode, type, &t_icode, &sri);
375
376 /* If we will be using an insn, the secondary reload is for a
377 scratch register. */
378
379 if (icode != CODE_FOR_nothing)
380 {
381 /* If IN_P is nonzero, the reload register will be the output in
382 operand 0. If IN_P is zero, the reload register will be the input
383 in operand 1. Outputs should have an initial "=", which we must
384 skip. */
385
386 /* ??? It would be useful to be able to handle only two, or more than
387 three, operands, but for now we can only handle the case of having
388 exactly three: output, input and one temp/scratch. */
389 gcc_assert (insn_data[(int) icode].n_operands == 3);
390
391 /* ??? We currently have no way to represent a reload that needs
392 an icode to reload from an intermediate tertiary reload register.
393 We should probably have a new field in struct reload to tag a
394 chain of scratch operand reloads onto. */
395 gcc_assert (rclass == NO_REGS);
396
397 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
398 gcc_assert (*scratch_constraint == '=');
399 scratch_constraint++;
400 if (*scratch_constraint == '&')
401 scratch_constraint++;
402 letter = *scratch_constraint;
403 scratch_class = (letter == 'r' ? GENERAL_REGS
404 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
405 scratch_constraint));
406
407 rclass = scratch_class;
408 mode = insn_data[(int) icode].operand[2].mode;
409 }
410
411 /* This case isn't valid, so fail. Reload is allowed to use the same
412 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
413 in the case of a secondary register, we actually need two different
414 registers for correct code. We fail here to prevent the possibility of
415 silently generating incorrect code later.
416
417 The convention is that secondary input reloads are valid only if the
418 secondary_class is different from class. If you have such a case, you
419 can not use secondary reloads, you must work around the problem some
420 other way.
421
422 Allow this when a reload_in/out pattern is being used. I.e. assume
423 that the generated code handles this case. */
424
425 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
426 || t_icode != CODE_FOR_nothing);
427
428 /* See if we can reuse an existing secondary reload. */
429 for (s_reload = 0; s_reload < n_reloads; s_reload++)
430 if (rld[s_reload].secondary_p
431 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
432 || reg_class_subset_p (rld[s_reload].rclass, rclass))
433 && ((in_p && rld[s_reload].inmode == mode)
434 || (! in_p && rld[s_reload].outmode == mode))
435 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
436 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
437 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
438 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
439 && (small_register_class_p (rclass)
440 || targetm.small_register_classes_for_mode_p (VOIDmode))
441 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
442 opnum, rld[s_reload].opnum))
443 {
444 if (in_p)
445 rld[s_reload].inmode = mode;
446 if (! in_p)
447 rld[s_reload].outmode = mode;
448
449 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
450 rld[s_reload].rclass = rclass;
451
452 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
453 rld[s_reload].optional &= optional;
454 rld[s_reload].secondary_p = 1;
455 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
456 opnum, rld[s_reload].opnum))
457 rld[s_reload].when_needed = RELOAD_OTHER;
458
459 break;
460 }
461
462 if (s_reload == n_reloads)
463 {
464 #ifdef SECONDARY_MEMORY_NEEDED
465 /* If we need a memory location to copy between the two reload regs,
466 set it up now. Note that we do the input case before making
467 the reload and the output case after. This is due to the
468 way reloads are output. */
469
470 if (in_p && icode == CODE_FOR_nothing
471 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
472 {
473 get_secondary_mem (x, reload_mode, opnum, type);
474
475 /* We may have just added new reloads. Make sure we add
476 the new reload at the end. */
477 s_reload = n_reloads;
478 }
479 #endif
480
481 /* We need to make a new secondary reload for this register class. */
482 rld[s_reload].in = rld[s_reload].out = 0;
483 rld[s_reload].rclass = rclass;
484
485 rld[s_reload].inmode = in_p ? mode : VOIDmode;
486 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
487 rld[s_reload].reg_rtx = 0;
488 rld[s_reload].optional = optional;
489 rld[s_reload].inc = 0;
490 /* Maybe we could combine these, but it seems too tricky. */
491 rld[s_reload].nocombine = 1;
492 rld[s_reload].in_reg = 0;
493 rld[s_reload].out_reg = 0;
494 rld[s_reload].opnum = opnum;
495 rld[s_reload].when_needed = secondary_type;
496 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
497 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
498 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
499 rld[s_reload].secondary_out_icode
500 = ! in_p ? t_icode : CODE_FOR_nothing;
501 rld[s_reload].secondary_p = 1;
502
503 n_reloads++;
504
505 #ifdef SECONDARY_MEMORY_NEEDED
506 if (! in_p && icode == CODE_FOR_nothing
507 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
508 get_secondary_mem (x, mode, opnum, type);
509 #endif
510 }
511
512 *picode = icode;
513 return s_reload;
514 }
515
516 /* If a secondary reload is needed, return its class. If both an intermediate
517 register and a scratch register is needed, we return the class of the
518 intermediate register. */
519 reg_class_t
520 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
521 rtx x)
522 {
523 enum insn_code icode;
524 secondary_reload_info sri;
525
526 sri.icode = CODE_FOR_nothing;
527 sri.prev_sri = NULL;
528 rclass
529 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
530 icode = (enum insn_code) sri.icode;
531
532 /* If there are no secondary reloads at all, we return NO_REGS.
533 If an intermediate register is needed, we return its class. */
534 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
535 return rclass;
536
537 /* No intermediate register is needed, but we have a special reload
538 pattern, which we assume for now needs a scratch register. */
539 return scratch_reload_class (icode);
540 }
541
542 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
543 three operands, verify that operand 2 is an output operand, and return
544 its register class.
545 ??? We'd like to be able to handle any pattern with at least 2 operands,
546 for zero or more scratch registers, but that needs more infrastructure. */
547 enum reg_class
548 scratch_reload_class (enum insn_code icode)
549 {
550 const char *scratch_constraint;
551 char scratch_letter;
552 enum reg_class rclass;
553
554 gcc_assert (insn_data[(int) icode].n_operands == 3);
555 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
556 gcc_assert (*scratch_constraint == '=');
557 scratch_constraint++;
558 if (*scratch_constraint == '&')
559 scratch_constraint++;
560 scratch_letter = *scratch_constraint;
561 if (scratch_letter == 'r')
562 return GENERAL_REGS;
563 rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
564 scratch_constraint);
565 gcc_assert (rclass != NO_REGS);
566 return rclass;
567 }
568 \f
569 #ifdef SECONDARY_MEMORY_NEEDED
570
571 /* Return a memory location that will be used to copy X in mode MODE.
572 If we haven't already made a location for this mode in this insn,
573 call find_reloads_address on the location being returned. */
574
575 rtx
576 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
577 int opnum, enum reload_type type)
578 {
579 rtx loc;
580 int mem_valid;
581
582 /* By default, if MODE is narrower than a word, widen it to a word.
583 This is required because most machines that require these memory
584 locations do not support short load and stores from all registers
585 (e.g., FP registers). */
586
587 #ifdef SECONDARY_MEMORY_NEEDED_MODE
588 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
589 #else
590 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
591 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
592 #endif
593
594 /* If we already have made a MEM for this operand in MODE, return it. */
595 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
596 return secondary_memlocs_elim[(int) mode][opnum];
597
598 /* If this is the first time we've tried to get a MEM for this mode,
599 allocate a new one. `something_changed' in reload will get set
600 by noticing that the frame size has changed. */
601
602 if (secondary_memlocs[(int) mode] == 0)
603 {
604 #ifdef SECONDARY_MEMORY_NEEDED_RTX
605 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
606 #else
607 secondary_memlocs[(int) mode]
608 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
609 #endif
610 }
611
612 /* Get a version of the address doing any eliminations needed. If that
613 didn't give us a new MEM, make a new one if it isn't valid. */
614
615 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
616 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
617 MEM_ADDR_SPACE (loc));
618
619 if (! mem_valid && loc == secondary_memlocs[(int) mode])
620 loc = copy_rtx (loc);
621
622 /* The only time the call below will do anything is if the stack
623 offset is too large. In that case IND_LEVELS doesn't matter, so we
624 can just pass a zero. Adjust the type to be the address of the
625 corresponding object. If the address was valid, save the eliminated
626 address. If it wasn't valid, we need to make a reload each time, so
627 don't save it. */
628
629 if (! mem_valid)
630 {
631 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
632 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
633 : RELOAD_OTHER);
634
635 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
636 opnum, type, 0, 0);
637 }
638
639 secondary_memlocs_elim[(int) mode][opnum] = loc;
640 if (secondary_memlocs_elim_used <= (int)mode)
641 secondary_memlocs_elim_used = (int)mode + 1;
642 return loc;
643 }
644
645 /* Clear any secondary memory locations we've made. */
646
647 void
648 clear_secondary_mem (void)
649 {
650 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
651 }
652 #endif /* SECONDARY_MEMORY_NEEDED */
653 \f
654
655 /* Find the largest class which has at least one register valid in
656 mode INNER, and which for every such register, that register number
657 plus N is also valid in OUTER (if in range) and is cheap to move
658 into REGNO. Such a class must exist. */
659
660 static enum reg_class
661 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
662 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
663 unsigned int dest_regno ATTRIBUTE_UNUSED)
664 {
665 int best_cost = -1;
666 int rclass;
667 int regno;
668 enum reg_class best_class = NO_REGS;
669 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
670 unsigned int best_size = 0;
671 int cost;
672
673 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
674 {
675 int bad = 0;
676 int good = 0;
677 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
678 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
679 {
680 if (HARD_REGNO_MODE_OK (regno, inner))
681 {
682 good = 1;
683 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
684 && ! HARD_REGNO_MODE_OK (regno + n, outer))
685 bad = 1;
686 }
687 }
688
689 if (bad || !good)
690 continue;
691 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
692
693 if ((reg_class_size[rclass] > best_size
694 && (best_cost < 0 || best_cost >= cost))
695 || best_cost > cost)
696 {
697 best_class = (enum reg_class) rclass;
698 best_size = reg_class_size[rclass];
699 best_cost = register_move_cost (outer, (enum reg_class) rclass,
700 dest_class);
701 }
702 }
703
704 gcc_assert (best_size != 0);
705
706 return best_class;
707 }
708
709 /* We are trying to reload a subreg of something that is not a register.
710 Find the largest class which contains only registers valid in
711 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
712 which we would eventually like to obtain the object. */
713
714 static enum reg_class
715 find_valid_class_1 (enum machine_mode outer ATTRIBUTE_UNUSED,
716 enum machine_mode mode ATTRIBUTE_UNUSED,
717 enum reg_class dest_class ATTRIBUTE_UNUSED)
718 {
719 int best_cost = -1;
720 int rclass;
721 int regno;
722 enum reg_class best_class = NO_REGS;
723 unsigned int best_size = 0;
724 int cost;
725
726 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
727 {
728 int bad = 0;
729 for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++)
730 {
731 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
732 && !HARD_REGNO_MODE_OK (regno, mode))
733 bad = 1;
734 }
735
736 if (bad)
737 continue;
738
739 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
740
741 if ((reg_class_size[rclass] > best_size
742 && (best_cost < 0 || best_cost >= cost))
743 || best_cost > cost)
744 {
745 best_class = (enum reg_class) rclass;
746 best_size = reg_class_size[rclass];
747 best_cost = register_move_cost (outer, (enum reg_class) rclass,
748 dest_class);
749 }
750 }
751
752 gcc_assert (best_size != 0);
753
754 #ifdef LIMIT_RELOAD_CLASS
755 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
756 #endif
757 return best_class;
758 }
759 \f
760 /* Return the number of a previously made reload that can be combined with
761 a new one, or n_reloads if none of the existing reloads can be used.
762 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
763 push_reload, they determine the kind of the new reload that we try to
764 combine. P_IN points to the corresponding value of IN, which can be
765 modified by this function.
766 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
767
768 static int
769 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
770 enum reload_type type, int opnum, int dont_share)
771 {
772 rtx in = *p_in;
773 int i;
774 /* We can't merge two reloads if the output of either one is
775 earlyclobbered. */
776
777 if (earlyclobber_operand_p (out))
778 return n_reloads;
779
780 /* We can use an existing reload if the class is right
781 and at least one of IN and OUT is a match
782 and the other is at worst neutral.
783 (A zero compared against anything is neutral.)
784
785 For targets with small register classes, don't use existing reloads
786 unless they are for the same thing since that can cause us to need
787 more reload registers than we otherwise would. */
788
789 for (i = 0; i < n_reloads; i++)
790 if ((reg_class_subset_p (rclass, rld[i].rclass)
791 || reg_class_subset_p (rld[i].rclass, rclass))
792 /* If the existing reload has a register, it must fit our class. */
793 && (rld[i].reg_rtx == 0
794 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
795 true_regnum (rld[i].reg_rtx)))
796 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
797 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
798 || (out != 0 && MATCHES (rld[i].out, out)
799 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
800 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
801 && (small_register_class_p (rclass)
802 || targetm.small_register_classes_for_mode_p (VOIDmode))
803 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
804 return i;
805
806 /* Reloading a plain reg for input can match a reload to postincrement
807 that reg, since the postincrement's value is the right value.
808 Likewise, it can match a preincrement reload, since we regard
809 the preincrementation as happening before any ref in this insn
810 to that register. */
811 for (i = 0; i < n_reloads; i++)
812 if ((reg_class_subset_p (rclass, rld[i].rclass)
813 || reg_class_subset_p (rld[i].rclass, rclass))
814 /* If the existing reload has a register, it must fit our
815 class. */
816 && (rld[i].reg_rtx == 0
817 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
818 true_regnum (rld[i].reg_rtx)))
819 && out == 0 && rld[i].out == 0 && rld[i].in != 0
820 && ((REG_P (in)
821 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
822 && MATCHES (XEXP (rld[i].in, 0), in))
823 || (REG_P (rld[i].in)
824 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
825 && MATCHES (XEXP (in, 0), rld[i].in)))
826 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
827 && (small_register_class_p (rclass)
828 || targetm.small_register_classes_for_mode_p (VOIDmode))
829 && MERGABLE_RELOADS (type, rld[i].when_needed,
830 opnum, rld[i].opnum))
831 {
832 /* Make sure reload_in ultimately has the increment,
833 not the plain register. */
834 if (REG_P (in))
835 *p_in = rld[i].in;
836 return i;
837 }
838 return n_reloads;
839 }
840
841 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
842 expression. MODE is the mode that X will be used in. OUTPUT is true if
843 the function is invoked for the output part of an enclosing reload. */
844
845 static bool
846 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, bool output)
847 {
848 rtx inner;
849
850 /* Only SUBREGs are problematical. */
851 if (GET_CODE (x) != SUBREG)
852 return false;
853
854 inner = SUBREG_REG (x);
855
856 /* If INNER is a constant or PLUS, then INNER will need reloading. */
857 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
858 return true;
859
860 /* If INNER is not a hard register, then INNER will not need reloading. */
861 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
862 return false;
863
864 /* If INNER is not ok for MODE, then INNER will need reloading. */
865 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
866 return true;
867
868 /* If this is for an output, and the outer part is a word or smaller,
869 INNER is larger than a word and the number of registers in INNER is
870 not the same as the number of words in INNER, then INNER will need
871 reloading (with an in-out reload). */
872 return (output
873 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
874 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
875 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
876 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
877 }
878
879 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
880 requiring an extra reload register. The caller has already found that
881 IN contains some reference to REGNO, so check that we can produce the
882 new value in a single step. E.g. if we have
883 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
884 instruction that adds one to a register, this should succeed.
885 However, if we have something like
886 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
887 needs to be loaded into a register first, we need a separate reload
888 register.
889 Such PLUS reloads are generated by find_reload_address_part.
890 The out-of-range PLUS expressions are usually introduced in the instruction
891 patterns by register elimination and substituting pseudos without a home
892 by their function-invariant equivalences. */
893 static int
894 can_reload_into (rtx in, int regno, enum machine_mode mode)
895 {
896 rtx dst, test_insn;
897 int r = 0;
898 struct recog_data_d save_recog_data;
899
900 /* For matching constraints, we often get notional input reloads where
901 we want to use the original register as the reload register. I.e.
902 technically this is a non-optional input-output reload, but IN is
903 already a valid register, and has been chosen as the reload register.
904 Speed this up, since it trivially works. */
905 if (REG_P (in))
906 return 1;
907
908 /* To test MEMs properly, we'd have to take into account all the reloads
909 that are already scheduled, which can become quite complicated.
910 And since we've already handled address reloads for this MEM, it
911 should always succeed anyway. */
912 if (MEM_P (in))
913 return 1;
914
915 /* If we can make a simple SET insn that does the job, everything should
916 be fine. */
917 dst = gen_rtx_REG (mode, regno);
918 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
919 save_recog_data = recog_data;
920 if (recog_memoized (test_insn) >= 0)
921 {
922 extract_insn (test_insn);
923 r = constrain_operands (1);
924 }
925 recog_data = save_recog_data;
926 return r;
927 }
928
929 /* Record one reload that needs to be performed.
930 IN is an rtx saying where the data are to be found before this instruction.
931 OUT says where they must be stored after the instruction.
932 (IN is zero for data not read, and OUT is zero for data not written.)
933 INLOC and OUTLOC point to the places in the instructions where
934 IN and OUT were found.
935 If IN and OUT are both nonzero, it means the same register must be used
936 to reload both IN and OUT.
937
938 RCLASS is a register class required for the reloaded data.
939 INMODE is the machine mode that the instruction requires
940 for the reg that replaces IN and OUTMODE is likewise for OUT.
941
942 If IN is zero, then OUT's location and mode should be passed as
943 INLOC and INMODE.
944
945 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
946
947 OPTIONAL nonzero means this reload does not need to be performed:
948 it can be discarded if that is more convenient.
949
950 OPNUM and TYPE say what the purpose of this reload is.
951
952 The return value is the reload-number for this reload.
953
954 If both IN and OUT are nonzero, in some rare cases we might
955 want to make two separate reloads. (Actually we never do this now.)
956 Therefore, the reload-number for OUT is stored in
957 output_reloadnum when we return; the return value applies to IN.
958 Usually (presently always), when IN and OUT are nonzero,
959 the two reload-numbers are equal, but the caller should be careful to
960 distinguish them. */
961
962 int
963 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
964 enum reg_class rclass, enum machine_mode inmode,
965 enum machine_mode outmode, int strict_low, int optional,
966 int opnum, enum reload_type type)
967 {
968 int i;
969 int dont_share = 0;
970 int dont_remove_subreg = 0;
971 #ifdef LIMIT_RELOAD_CLASS
972 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
973 #endif
974 int secondary_in_reload = -1, secondary_out_reload = -1;
975 enum insn_code secondary_in_icode = CODE_FOR_nothing;
976 enum insn_code secondary_out_icode = CODE_FOR_nothing;
977 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
978 subreg_in_class = NO_REGS;
979
980 /* INMODE and/or OUTMODE could be VOIDmode if no mode
981 has been specified for the operand. In that case,
982 use the operand's mode as the mode to reload. */
983 if (inmode == VOIDmode && in != 0)
984 inmode = GET_MODE (in);
985 if (outmode == VOIDmode && out != 0)
986 outmode = GET_MODE (out);
987
988 /* If find_reloads and friends until now missed to replace a pseudo
989 with a constant of reg_equiv_constant something went wrong
990 beforehand.
991 Note that it can't simply be done here if we missed it earlier
992 since the constant might need to be pushed into the literal pool
993 and the resulting memref would probably need further
994 reloading. */
995 if (in != 0 && REG_P (in))
996 {
997 int regno = REGNO (in);
998
999 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1000 || reg_renumber[regno] >= 0
1001 || reg_equiv_constant (regno) == NULL_RTX);
1002 }
1003
1004 /* reg_equiv_constant only contains constants which are obviously
1005 not appropriate as destination. So if we would need to replace
1006 the destination pseudo with a constant we are in real
1007 trouble. */
1008 if (out != 0 && REG_P (out))
1009 {
1010 int regno = REGNO (out);
1011
1012 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1013 || reg_renumber[regno] >= 0
1014 || reg_equiv_constant (regno) == NULL_RTX);
1015 }
1016
1017 /* If we have a read-write operand with an address side-effect,
1018 change either IN or OUT so the side-effect happens only once. */
1019 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1020 switch (GET_CODE (XEXP (in, 0)))
1021 {
1022 case POST_INC: case POST_DEC: case POST_MODIFY:
1023 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1024 break;
1025
1026 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1027 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1028 break;
1029
1030 default:
1031 break;
1032 }
1033
1034 /* If we are reloading a (SUBREG constant ...), really reload just the
1035 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1036 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1037 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1038 register is a pseudo, also reload the inside expression.
1039 For machines that extend byte loads, do this for any SUBREG of a pseudo
1040 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1041 M2 is an integral mode that gets extended when loaded.
1042 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1043 where either M1 is not valid for R or M2 is wider than a word but we
1044 only need one register to store an M2-sized quantity in R.
1045 (However, if OUT is nonzero, we need to reload the reg *and*
1046 the subreg, so do nothing here, and let following statement handle it.)
1047
1048 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1049 we can't handle it here because CONST_INT does not indicate a mode.
1050
1051 Similarly, we must reload the inside expression if we have a
1052 STRICT_LOW_PART (presumably, in == out in this case).
1053
1054 Also reload the inner expression if it does not require a secondary
1055 reload but the SUBREG does.
1056
1057 Finally, reload the inner expression if it is a register that is in
1058 the class whose registers cannot be referenced in a different size
1059 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1060 cannot reload just the inside since we might end up with the wrong
1061 register class. But if it is inside a STRICT_LOW_PART, we have
1062 no choice, so we hope we do get the right register class there. */
1063
1064 if (in != 0 && GET_CODE (in) == SUBREG
1065 && (subreg_lowpart_p (in) || strict_low)
1066 #ifdef CANNOT_CHANGE_MODE_CLASS
1067 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1068 #endif
1069 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1070 && (CONSTANT_P (SUBREG_REG (in))
1071 || GET_CODE (SUBREG_REG (in)) == PLUS
1072 || strict_low
1073 || (((REG_P (SUBREG_REG (in))
1074 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1075 || MEM_P (SUBREG_REG (in)))
1076 && ((GET_MODE_PRECISION (inmode)
1077 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1078 #ifdef LOAD_EXTEND_OP
1079 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1080 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1081 <= UNITS_PER_WORD)
1082 && (GET_MODE_PRECISION (inmode)
1083 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1084 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1085 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1086 #endif
1087 #ifdef WORD_REGISTER_OPERATIONS
1088 || ((GET_MODE_PRECISION (inmode)
1089 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1090 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1091 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1092 / UNITS_PER_WORD)))
1093 #endif
1094 ))
1095 || (REG_P (SUBREG_REG (in))
1096 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1097 /* The case where out is nonzero
1098 is handled differently in the following statement. */
1099 && (out == 0 || subreg_lowpart_p (in))
1100 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1101 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1102 > UNITS_PER_WORD)
1103 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1104 / UNITS_PER_WORD)
1105 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1106 [GET_MODE (SUBREG_REG (in))]))
1107 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1108 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1109 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1110 SUBREG_REG (in))
1111 == NO_REGS))
1112 #ifdef CANNOT_CHANGE_MODE_CLASS
1113 || (REG_P (SUBREG_REG (in))
1114 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1115 && REG_CANNOT_CHANGE_MODE_P
1116 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1117 #endif
1118 ))
1119 {
1120 #ifdef LIMIT_RELOAD_CLASS
1121 in_subreg_loc = inloc;
1122 #endif
1123 inloc = &SUBREG_REG (in);
1124 in = *inloc;
1125 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1126 if (MEM_P (in))
1127 /* This is supposed to happen only for paradoxical subregs made by
1128 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1129 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1130 #endif
1131 inmode = GET_MODE (in);
1132 }
1133
1134 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1135 where M1 is not valid for R if it was not handled by the code above.
1136
1137 Similar issue for (SUBREG constant ...) if it was not handled by the
1138 code above. This can happen if SUBREG_BYTE != 0.
1139
1140 However, we must reload the inner reg *as well as* the subreg in
1141 that case. */
1142
1143 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1144 {
1145 if (REG_P (SUBREG_REG (in)))
1146 subreg_in_class
1147 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1148 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1149 GET_MODE (SUBREG_REG (in)),
1150 SUBREG_BYTE (in),
1151 GET_MODE (in)),
1152 REGNO (SUBREG_REG (in)));
1153 else if (GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1154 subreg_in_class = find_valid_class_1 (inmode,
1155 GET_MODE (SUBREG_REG (in)),
1156 rclass);
1157
1158 /* This relies on the fact that emit_reload_insns outputs the
1159 instructions for input reloads of type RELOAD_OTHER in the same
1160 order as the reloads. Thus if the outer reload is also of type
1161 RELOAD_OTHER, we are guaranteed that this inner reload will be
1162 output before the outer reload. */
1163 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1164 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1165 dont_remove_subreg = 1;
1166 }
1167
1168 /* Similarly for paradoxical and problematical SUBREGs on the output.
1169 Note that there is no reason we need worry about the previous value
1170 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1171 entitled to clobber it all (except in the case of a word mode subreg
1172 or of a STRICT_LOW_PART, in that latter case the constraint should
1173 label it input-output.) */
1174 if (out != 0 && GET_CODE (out) == SUBREG
1175 && (subreg_lowpart_p (out) || strict_low)
1176 #ifdef CANNOT_CHANGE_MODE_CLASS
1177 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1178 #endif
1179 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1180 && (CONSTANT_P (SUBREG_REG (out))
1181 || strict_low
1182 || (((REG_P (SUBREG_REG (out))
1183 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1184 || MEM_P (SUBREG_REG (out)))
1185 && ((GET_MODE_PRECISION (outmode)
1186 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1187 #ifdef WORD_REGISTER_OPERATIONS
1188 || ((GET_MODE_PRECISION (outmode)
1189 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1190 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1191 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1192 / UNITS_PER_WORD)))
1193 #endif
1194 ))
1195 || (REG_P (SUBREG_REG (out))
1196 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1197 /* The case of a word mode subreg
1198 is handled differently in the following statement. */
1199 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1200 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1201 > UNITS_PER_WORD))
1202 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1203 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1204 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1205 SUBREG_REG (out))
1206 == NO_REGS))
1207 #ifdef CANNOT_CHANGE_MODE_CLASS
1208 || (REG_P (SUBREG_REG (out))
1209 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1210 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1211 GET_MODE (SUBREG_REG (out)),
1212 outmode))
1213 #endif
1214 ))
1215 {
1216 #ifdef LIMIT_RELOAD_CLASS
1217 out_subreg_loc = outloc;
1218 #endif
1219 outloc = &SUBREG_REG (out);
1220 out = *outloc;
1221 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1222 gcc_assert (!MEM_P (out)
1223 || GET_MODE_SIZE (GET_MODE (out))
1224 <= GET_MODE_SIZE (outmode));
1225 #endif
1226 outmode = GET_MODE (out);
1227 }
1228
1229 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1230 where either M1 is not valid for R or M2 is wider than a word but we
1231 only need one register to store an M2-sized quantity in R.
1232
1233 However, we must reload the inner reg *as well as* the subreg in
1234 that case and the inner reg is an in-out reload. */
1235
1236 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1237 {
1238 enum reg_class in_out_class
1239 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1240 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1241 GET_MODE (SUBREG_REG (out)),
1242 SUBREG_BYTE (out),
1243 GET_MODE (out)),
1244 REGNO (SUBREG_REG (out)));
1245
1246 /* This relies on the fact that emit_reload_insns outputs the
1247 instructions for output reloads of type RELOAD_OTHER in reverse
1248 order of the reloads. Thus if the outer reload is also of type
1249 RELOAD_OTHER, we are guaranteed that this inner reload will be
1250 output after the outer reload. */
1251 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1252 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1253 0, 0, opnum, RELOAD_OTHER);
1254 dont_remove_subreg = 1;
1255 }
1256
1257 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1258 if (in != 0 && out != 0 && MEM_P (out)
1259 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1260 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1261 dont_share = 1;
1262
1263 /* If IN is a SUBREG of a hard register, make a new REG. This
1264 simplifies some of the cases below. */
1265
1266 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1267 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1268 && ! dont_remove_subreg)
1269 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1270
1271 /* Similarly for OUT. */
1272 if (out != 0 && GET_CODE (out) == SUBREG
1273 && REG_P (SUBREG_REG (out))
1274 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1275 && ! dont_remove_subreg)
1276 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1277
1278 /* Narrow down the class of register wanted if that is
1279 desirable on this machine for efficiency. */
1280 {
1281 reg_class_t preferred_class = rclass;
1282
1283 if (in != 0)
1284 preferred_class = targetm.preferred_reload_class (in, rclass);
1285
1286 /* Output reloads may need analogous treatment, different in detail. */
1287 if (out != 0)
1288 preferred_class
1289 = targetm.preferred_output_reload_class (out, preferred_class);
1290
1291 /* Discard what the target said if we cannot do it. */
1292 if (preferred_class != NO_REGS
1293 || (optional && type == RELOAD_FOR_OUTPUT))
1294 rclass = (enum reg_class) preferred_class;
1295 }
1296
1297 /* Make sure we use a class that can handle the actual pseudo
1298 inside any subreg. For example, on the 386, QImode regs
1299 can appear within SImode subregs. Although GENERAL_REGS
1300 can handle SImode, QImode needs a smaller class. */
1301 #ifdef LIMIT_RELOAD_CLASS
1302 if (in_subreg_loc)
1303 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1304 else if (in != 0 && GET_CODE (in) == SUBREG)
1305 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1306
1307 if (out_subreg_loc)
1308 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1309 if (out != 0 && GET_CODE (out) == SUBREG)
1310 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1311 #endif
1312
1313 /* Verify that this class is at least possible for the mode that
1314 is specified. */
1315 if (this_insn_is_asm)
1316 {
1317 enum machine_mode mode;
1318 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1319 mode = inmode;
1320 else
1321 mode = outmode;
1322 if (mode == VOIDmode)
1323 {
1324 error_for_asm (this_insn, "cannot reload integer constant "
1325 "operand in %<asm%>");
1326 mode = word_mode;
1327 if (in != 0)
1328 inmode = word_mode;
1329 if (out != 0)
1330 outmode = word_mode;
1331 }
1332 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1333 if (HARD_REGNO_MODE_OK (i, mode)
1334 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1335 break;
1336 if (i == FIRST_PSEUDO_REGISTER)
1337 {
1338 error_for_asm (this_insn, "impossible register constraint "
1339 "in %<asm%>");
1340 /* Avoid further trouble with this insn. */
1341 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1342 /* We used to continue here setting class to ALL_REGS, but it triggers
1343 sanity check on i386 for:
1344 void foo(long double d)
1345 {
1346 asm("" :: "a" (d));
1347 }
1348 Returning zero here ought to be safe as we take care in
1349 find_reloads to not process the reloads when instruction was
1350 replaced by USE. */
1351
1352 return 0;
1353 }
1354 }
1355
1356 /* Optional output reloads are always OK even if we have no register class,
1357 since the function of these reloads is only to have spill_reg_store etc.
1358 set, so that the storing insn can be deleted later. */
1359 gcc_assert (rclass != NO_REGS
1360 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1361
1362 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1363
1364 if (i == n_reloads)
1365 {
1366 /* See if we need a secondary reload register to move between CLASS
1367 and IN or CLASS and OUT. Get the icode and push any required reloads
1368 needed for each of them if so. */
1369
1370 if (in != 0)
1371 secondary_in_reload
1372 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1373 &secondary_in_icode, NULL);
1374 if (out != 0 && GET_CODE (out) != SCRATCH)
1375 secondary_out_reload
1376 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1377 type, &secondary_out_icode, NULL);
1378
1379 /* We found no existing reload suitable for re-use.
1380 So add an additional reload. */
1381
1382 #ifdef SECONDARY_MEMORY_NEEDED
1383 if (subreg_in_class == NO_REGS
1384 && in != 0
1385 && (REG_P (in)
1386 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1387 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1388 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1389 /* If a memory location is needed for the copy, make one. */
1390 if (subreg_in_class != NO_REGS
1391 && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1392 get_secondary_mem (in, inmode, opnum, type);
1393 #endif
1394
1395 i = n_reloads;
1396 rld[i].in = in;
1397 rld[i].out = out;
1398 rld[i].rclass = rclass;
1399 rld[i].inmode = inmode;
1400 rld[i].outmode = outmode;
1401 rld[i].reg_rtx = 0;
1402 rld[i].optional = optional;
1403 rld[i].inc = 0;
1404 rld[i].nocombine = 0;
1405 rld[i].in_reg = inloc ? *inloc : 0;
1406 rld[i].out_reg = outloc ? *outloc : 0;
1407 rld[i].opnum = opnum;
1408 rld[i].when_needed = type;
1409 rld[i].secondary_in_reload = secondary_in_reload;
1410 rld[i].secondary_out_reload = secondary_out_reload;
1411 rld[i].secondary_in_icode = secondary_in_icode;
1412 rld[i].secondary_out_icode = secondary_out_icode;
1413 rld[i].secondary_p = 0;
1414
1415 n_reloads++;
1416
1417 #ifdef SECONDARY_MEMORY_NEEDED
1418 if (out != 0
1419 && (REG_P (out)
1420 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1421 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1422 && SECONDARY_MEMORY_NEEDED (rclass,
1423 REGNO_REG_CLASS (reg_or_subregno (out)),
1424 outmode))
1425 get_secondary_mem (out, outmode, opnum, type);
1426 #endif
1427 }
1428 else
1429 {
1430 /* We are reusing an existing reload,
1431 but we may have additional information for it.
1432 For example, we may now have both IN and OUT
1433 while the old one may have just one of them. */
1434
1435 /* The modes can be different. If they are, we want to reload in
1436 the larger mode, so that the value is valid for both modes. */
1437 if (inmode != VOIDmode
1438 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1439 rld[i].inmode = inmode;
1440 if (outmode != VOIDmode
1441 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1442 rld[i].outmode = outmode;
1443 if (in != 0)
1444 {
1445 rtx in_reg = inloc ? *inloc : 0;
1446 /* If we merge reloads for two distinct rtl expressions that
1447 are identical in content, there might be duplicate address
1448 reloads. Remove the extra set now, so that if we later find
1449 that we can inherit this reload, we can get rid of the
1450 address reloads altogether.
1451
1452 Do not do this if both reloads are optional since the result
1453 would be an optional reload which could potentially leave
1454 unresolved address replacements.
1455
1456 It is not sufficient to call transfer_replacements since
1457 choose_reload_regs will remove the replacements for address
1458 reloads of inherited reloads which results in the same
1459 problem. */
1460 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1461 && ! (rld[i].optional && optional))
1462 {
1463 /* We must keep the address reload with the lower operand
1464 number alive. */
1465 if (opnum > rld[i].opnum)
1466 {
1467 remove_address_replacements (in);
1468 in = rld[i].in;
1469 in_reg = rld[i].in_reg;
1470 }
1471 else
1472 remove_address_replacements (rld[i].in);
1473 }
1474 /* When emitting reloads we don't necessarily look at the in-
1475 and outmode, but also directly at the operands (in and out).
1476 So we can't simply overwrite them with whatever we have found
1477 for this (to-be-merged) reload, we have to "merge" that too.
1478 Reusing another reload already verified that we deal with the
1479 same operands, just possibly in different modes. So we
1480 overwrite the operands only when the new mode is larger.
1481 See also PR33613. */
1482 if (!rld[i].in
1483 || GET_MODE_SIZE (GET_MODE (in))
1484 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1485 rld[i].in = in;
1486 if (!rld[i].in_reg
1487 || (in_reg
1488 && GET_MODE_SIZE (GET_MODE (in_reg))
1489 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1490 rld[i].in_reg = in_reg;
1491 }
1492 if (out != 0)
1493 {
1494 if (!rld[i].out
1495 || (out
1496 && GET_MODE_SIZE (GET_MODE (out))
1497 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1498 rld[i].out = out;
1499 if (outloc
1500 && (!rld[i].out_reg
1501 || GET_MODE_SIZE (GET_MODE (*outloc))
1502 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1503 rld[i].out_reg = *outloc;
1504 }
1505 if (reg_class_subset_p (rclass, rld[i].rclass))
1506 rld[i].rclass = rclass;
1507 rld[i].optional &= optional;
1508 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1509 opnum, rld[i].opnum))
1510 rld[i].when_needed = RELOAD_OTHER;
1511 rld[i].opnum = MIN (rld[i].opnum, opnum);
1512 }
1513
1514 /* If the ostensible rtx being reloaded differs from the rtx found
1515 in the location to substitute, this reload is not safe to combine
1516 because we cannot reliably tell whether it appears in the insn. */
1517
1518 if (in != 0 && in != *inloc)
1519 rld[i].nocombine = 1;
1520
1521 #if 0
1522 /* This was replaced by changes in find_reloads_address_1 and the new
1523 function inc_for_reload, which go with a new meaning of reload_inc. */
1524
1525 /* If this is an IN/OUT reload in an insn that sets the CC,
1526 it must be for an autoincrement. It doesn't work to store
1527 the incremented value after the insn because that would clobber the CC.
1528 So we must do the increment of the value reloaded from,
1529 increment it, store it back, then decrement again. */
1530 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1531 {
1532 out = 0;
1533 rld[i].out = 0;
1534 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1535 /* If we did not find a nonzero amount-to-increment-by,
1536 that contradicts the belief that IN is being incremented
1537 in an address in this insn. */
1538 gcc_assert (rld[i].inc != 0);
1539 }
1540 #endif
1541
1542 /* If we will replace IN and OUT with the reload-reg,
1543 record where they are located so that substitution need
1544 not do a tree walk. */
1545
1546 if (replace_reloads)
1547 {
1548 if (inloc != 0)
1549 {
1550 struct replacement *r = &replacements[n_replacements++];
1551 r->what = i;
1552 r->where = inloc;
1553 r->mode = inmode;
1554 }
1555 if (outloc != 0 && outloc != inloc)
1556 {
1557 struct replacement *r = &replacements[n_replacements++];
1558 r->what = i;
1559 r->where = outloc;
1560 r->mode = outmode;
1561 }
1562 }
1563
1564 /* If this reload is just being introduced and it has both
1565 an incoming quantity and an outgoing quantity that are
1566 supposed to be made to match, see if either one of the two
1567 can serve as the place to reload into.
1568
1569 If one of them is acceptable, set rld[i].reg_rtx
1570 to that one. */
1571
1572 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1573 {
1574 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1575 inmode, outmode,
1576 rld[i].rclass, i,
1577 earlyclobber_operand_p (out));
1578
1579 /* If the outgoing register already contains the same value
1580 as the incoming one, we can dispense with loading it.
1581 The easiest way to tell the caller that is to give a phony
1582 value for the incoming operand (same as outgoing one). */
1583 if (rld[i].reg_rtx == out
1584 && (REG_P (in) || CONSTANT_P (in))
1585 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1586 static_reload_reg_p, i, inmode))
1587 rld[i].in = out;
1588 }
1589
1590 /* If this is an input reload and the operand contains a register that
1591 dies in this insn and is used nowhere else, see if it is the right class
1592 to be used for this reload. Use it if so. (This occurs most commonly
1593 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1594 this if it is also an output reload that mentions the register unless
1595 the output is a SUBREG that clobbers an entire register.
1596
1597 Note that the operand might be one of the spill regs, if it is a
1598 pseudo reg and we are in a block where spilling has not taken place.
1599 But if there is no spilling in this block, that is OK.
1600 An explicitly used hard reg cannot be a spill reg. */
1601
1602 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1603 {
1604 rtx note;
1605 int regno;
1606 enum machine_mode rel_mode = inmode;
1607
1608 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1609 rel_mode = outmode;
1610
1611 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1612 if (REG_NOTE_KIND (note) == REG_DEAD
1613 && REG_P (XEXP (note, 0))
1614 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1615 && reg_mentioned_p (XEXP (note, 0), in)
1616 /* Check that a former pseudo is valid; see find_dummy_reload. */
1617 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1618 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1619 ORIGINAL_REGNO (XEXP (note, 0)))
1620 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1621 && ! refers_to_regno_for_reload_p (regno,
1622 end_hard_regno (rel_mode,
1623 regno),
1624 PATTERN (this_insn), inloc)
1625 /* If this is also an output reload, IN cannot be used as
1626 the reload register if it is set in this insn unless IN
1627 is also OUT. */
1628 && (out == 0 || in == out
1629 || ! hard_reg_set_here_p (regno,
1630 end_hard_regno (rel_mode, regno),
1631 PATTERN (this_insn)))
1632 /* ??? Why is this code so different from the previous?
1633 Is there any simple coherent way to describe the two together?
1634 What's going on here. */
1635 && (in != out
1636 || (GET_CODE (in) == SUBREG
1637 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1638 / UNITS_PER_WORD)
1639 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1640 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1641 /* Make sure the operand fits in the reg that dies. */
1642 && (GET_MODE_SIZE (rel_mode)
1643 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1644 && HARD_REGNO_MODE_OK (regno, inmode)
1645 && HARD_REGNO_MODE_OK (regno, outmode))
1646 {
1647 unsigned int offs;
1648 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1649 hard_regno_nregs[regno][outmode]);
1650
1651 for (offs = 0; offs < nregs; offs++)
1652 if (fixed_regs[regno + offs]
1653 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1654 regno + offs))
1655 break;
1656
1657 if (offs == nregs
1658 && (! (refers_to_regno_for_reload_p
1659 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1660 || can_reload_into (in, regno, inmode)))
1661 {
1662 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1663 break;
1664 }
1665 }
1666 }
1667
1668 if (out)
1669 output_reloadnum = i;
1670
1671 return i;
1672 }
1673
1674 /* Record an additional place we must replace a value
1675 for which we have already recorded a reload.
1676 RELOADNUM is the value returned by push_reload
1677 when the reload was recorded.
1678 This is used in insn patterns that use match_dup. */
1679
1680 static void
1681 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1682 {
1683 if (replace_reloads)
1684 {
1685 struct replacement *r = &replacements[n_replacements++];
1686 r->what = reloadnum;
1687 r->where = loc;
1688 r->mode = mode;
1689 }
1690 }
1691
1692 /* Duplicate any replacement we have recorded to apply at
1693 location ORIG_LOC to also be performed at DUP_LOC.
1694 This is used in insn patterns that use match_dup. */
1695
1696 static void
1697 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1698 {
1699 int i, n = n_replacements;
1700
1701 for (i = 0; i < n; i++)
1702 {
1703 struct replacement *r = &replacements[i];
1704 if (r->where == orig_loc)
1705 push_replacement (dup_loc, r->what, r->mode);
1706 }
1707 }
1708 \f
1709 /* Transfer all replacements that used to be in reload FROM to be in
1710 reload TO. */
1711
1712 void
1713 transfer_replacements (int to, int from)
1714 {
1715 int i;
1716
1717 for (i = 0; i < n_replacements; i++)
1718 if (replacements[i].what == from)
1719 replacements[i].what = to;
1720 }
1721 \f
1722 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1723 or a subpart of it. If we have any replacements registered for IN_RTX,
1724 cancel the reloads that were supposed to load them.
1725 Return nonzero if we canceled any reloads. */
1726 int
1727 remove_address_replacements (rtx in_rtx)
1728 {
1729 int i, j;
1730 char reload_flags[MAX_RELOADS];
1731 int something_changed = 0;
1732
1733 memset (reload_flags, 0, sizeof reload_flags);
1734 for (i = 0, j = 0; i < n_replacements; i++)
1735 {
1736 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1737 reload_flags[replacements[i].what] |= 1;
1738 else
1739 {
1740 replacements[j++] = replacements[i];
1741 reload_flags[replacements[i].what] |= 2;
1742 }
1743 }
1744 /* Note that the following store must be done before the recursive calls. */
1745 n_replacements = j;
1746
1747 for (i = n_reloads - 1; i >= 0; i--)
1748 {
1749 if (reload_flags[i] == 1)
1750 {
1751 deallocate_reload_reg (i);
1752 remove_address_replacements (rld[i].in);
1753 rld[i].in = 0;
1754 something_changed = 1;
1755 }
1756 }
1757 return something_changed;
1758 }
1759 \f
1760 /* If there is only one output reload, and it is not for an earlyclobber
1761 operand, try to combine it with a (logically unrelated) input reload
1762 to reduce the number of reload registers needed.
1763
1764 This is safe if the input reload does not appear in
1765 the value being output-reloaded, because this implies
1766 it is not needed any more once the original insn completes.
1767
1768 If that doesn't work, see we can use any of the registers that
1769 die in this insn as a reload register. We can if it is of the right
1770 class and does not appear in the value being output-reloaded. */
1771
1772 static void
1773 combine_reloads (void)
1774 {
1775 int i, regno;
1776 int output_reload = -1;
1777 int secondary_out = -1;
1778 rtx note;
1779
1780 /* Find the output reload; return unless there is exactly one
1781 and that one is mandatory. */
1782
1783 for (i = 0; i < n_reloads; i++)
1784 if (rld[i].out != 0)
1785 {
1786 if (output_reload >= 0)
1787 return;
1788 output_reload = i;
1789 }
1790
1791 if (output_reload < 0 || rld[output_reload].optional)
1792 return;
1793
1794 /* An input-output reload isn't combinable. */
1795
1796 if (rld[output_reload].in != 0)
1797 return;
1798
1799 /* If this reload is for an earlyclobber operand, we can't do anything. */
1800 if (earlyclobber_operand_p (rld[output_reload].out))
1801 return;
1802
1803 /* If there is a reload for part of the address of this operand, we would
1804 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1805 its life to the point where doing this combine would not lower the
1806 number of spill registers needed. */
1807 for (i = 0; i < n_reloads; i++)
1808 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1809 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1810 && rld[i].opnum == rld[output_reload].opnum)
1811 return;
1812
1813 /* Check each input reload; can we combine it? */
1814
1815 for (i = 0; i < n_reloads; i++)
1816 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1817 /* Life span of this reload must not extend past main insn. */
1818 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1819 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1820 && rld[i].when_needed != RELOAD_OTHER
1821 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1822 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1823 [(int) rld[output_reload].outmode])
1824 && rld[i].inc == 0
1825 && rld[i].reg_rtx == 0
1826 #ifdef SECONDARY_MEMORY_NEEDED
1827 /* Don't combine two reloads with different secondary
1828 memory locations. */
1829 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1830 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1831 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1832 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1833 #endif
1834 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1835 ? (rld[i].rclass == rld[output_reload].rclass)
1836 : (reg_class_subset_p (rld[i].rclass,
1837 rld[output_reload].rclass)
1838 || reg_class_subset_p (rld[output_reload].rclass,
1839 rld[i].rclass)))
1840 && (MATCHES (rld[i].in, rld[output_reload].out)
1841 /* Args reversed because the first arg seems to be
1842 the one that we imagine being modified
1843 while the second is the one that might be affected. */
1844 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1845 rld[i].in)
1846 /* However, if the input is a register that appears inside
1847 the output, then we also can't share.
1848 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1849 If the same reload reg is used for both reg 69 and the
1850 result to be stored in memory, then that result
1851 will clobber the address of the memory ref. */
1852 && ! (REG_P (rld[i].in)
1853 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1854 rld[output_reload].out))))
1855 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1856 rld[i].when_needed != RELOAD_FOR_INPUT)
1857 && (reg_class_size[(int) rld[i].rclass]
1858 || targetm.small_register_classes_for_mode_p (VOIDmode))
1859 /* We will allow making things slightly worse by combining an
1860 input and an output, but no worse than that. */
1861 && (rld[i].when_needed == RELOAD_FOR_INPUT
1862 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1863 {
1864 int j;
1865
1866 /* We have found a reload to combine with! */
1867 rld[i].out = rld[output_reload].out;
1868 rld[i].out_reg = rld[output_reload].out_reg;
1869 rld[i].outmode = rld[output_reload].outmode;
1870 /* Mark the old output reload as inoperative. */
1871 rld[output_reload].out = 0;
1872 /* The combined reload is needed for the entire insn. */
1873 rld[i].when_needed = RELOAD_OTHER;
1874 /* If the output reload had a secondary reload, copy it. */
1875 if (rld[output_reload].secondary_out_reload != -1)
1876 {
1877 rld[i].secondary_out_reload
1878 = rld[output_reload].secondary_out_reload;
1879 rld[i].secondary_out_icode
1880 = rld[output_reload].secondary_out_icode;
1881 }
1882
1883 #ifdef SECONDARY_MEMORY_NEEDED
1884 /* Copy any secondary MEM. */
1885 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1886 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1887 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1888 #endif
1889 /* If required, minimize the register class. */
1890 if (reg_class_subset_p (rld[output_reload].rclass,
1891 rld[i].rclass))
1892 rld[i].rclass = rld[output_reload].rclass;
1893
1894 /* Transfer all replacements from the old reload to the combined. */
1895 for (j = 0; j < n_replacements; j++)
1896 if (replacements[j].what == output_reload)
1897 replacements[j].what = i;
1898
1899 return;
1900 }
1901
1902 /* If this insn has only one operand that is modified or written (assumed
1903 to be the first), it must be the one corresponding to this reload. It
1904 is safe to use anything that dies in this insn for that output provided
1905 that it does not occur in the output (we already know it isn't an
1906 earlyclobber. If this is an asm insn, give up. */
1907
1908 if (INSN_CODE (this_insn) == -1)
1909 return;
1910
1911 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1912 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1913 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1914 return;
1915
1916 /* See if some hard register that dies in this insn and is not used in
1917 the output is the right class. Only works if the register we pick
1918 up can fully hold our output reload. */
1919 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1920 if (REG_NOTE_KIND (note) == REG_DEAD
1921 && REG_P (XEXP (note, 0))
1922 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1923 rld[output_reload].out)
1924 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1925 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1926 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1927 regno)
1928 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1929 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1930 /* Ensure that a secondary or tertiary reload for this output
1931 won't want this register. */
1932 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1933 || (!(TEST_HARD_REG_BIT
1934 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1935 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1936 || !(TEST_HARD_REG_BIT
1937 (reg_class_contents[(int) rld[secondary_out].rclass],
1938 regno)))))
1939 && !fixed_regs[regno]
1940 /* Check that a former pseudo is valid; see find_dummy_reload. */
1941 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1942 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1943 ORIGINAL_REGNO (XEXP (note, 0)))
1944 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1945 {
1946 rld[output_reload].reg_rtx
1947 = gen_rtx_REG (rld[output_reload].outmode, regno);
1948 return;
1949 }
1950 }
1951 \f
1952 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1953 See if one of IN and OUT is a register that may be used;
1954 this is desirable since a spill-register won't be needed.
1955 If so, return the register rtx that proves acceptable.
1956
1957 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1958 RCLASS is the register class required for the reload.
1959
1960 If FOR_REAL is >= 0, it is the number of the reload,
1961 and in some cases when it can be discovered that OUT doesn't need
1962 to be computed, clear out rld[FOR_REAL].out.
1963
1964 If FOR_REAL is -1, this should not be done, because this call
1965 is just to see if a register can be found, not to find and install it.
1966
1967 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1968 puts an additional constraint on being able to use IN for OUT since
1969 IN must not appear elsewhere in the insn (it is assumed that IN itself
1970 is safe from the earlyclobber). */
1971
1972 static rtx
1973 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1974 enum machine_mode inmode, enum machine_mode outmode,
1975 reg_class_t rclass, int for_real, int earlyclobber)
1976 {
1977 rtx in = real_in;
1978 rtx out = real_out;
1979 int in_offset = 0;
1980 int out_offset = 0;
1981 rtx value = 0;
1982
1983 /* If operands exceed a word, we can't use either of them
1984 unless they have the same size. */
1985 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1986 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1987 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1988 return 0;
1989
1990 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1991 respectively refers to a hard register. */
1992
1993 /* Find the inside of any subregs. */
1994 while (GET_CODE (out) == SUBREG)
1995 {
1996 if (REG_P (SUBREG_REG (out))
1997 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1998 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1999 GET_MODE (SUBREG_REG (out)),
2000 SUBREG_BYTE (out),
2001 GET_MODE (out));
2002 out = SUBREG_REG (out);
2003 }
2004 while (GET_CODE (in) == SUBREG)
2005 {
2006 if (REG_P (SUBREG_REG (in))
2007 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
2008 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
2009 GET_MODE (SUBREG_REG (in)),
2010 SUBREG_BYTE (in),
2011 GET_MODE (in));
2012 in = SUBREG_REG (in);
2013 }
2014
2015 /* Narrow down the reg class, the same way push_reload will;
2016 otherwise we might find a dummy now, but push_reload won't. */
2017 {
2018 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2019 if (preferred_class != NO_REGS)
2020 rclass = (enum reg_class) preferred_class;
2021 }
2022
2023 /* See if OUT will do. */
2024 if (REG_P (out)
2025 && REGNO (out) < FIRST_PSEUDO_REGISTER)
2026 {
2027 unsigned int regno = REGNO (out) + out_offset;
2028 unsigned int nwords = hard_regno_nregs[regno][outmode];
2029 rtx saved_rtx;
2030
2031 /* When we consider whether the insn uses OUT,
2032 ignore references within IN. They don't prevent us
2033 from copying IN into OUT, because those refs would
2034 move into the insn that reloads IN.
2035
2036 However, we only ignore IN in its role as this reload.
2037 If the insn uses IN elsewhere and it contains OUT,
2038 that counts. We can't be sure it's the "same" operand
2039 so it might not go through this reload.
2040
2041 We also need to avoid using OUT if it, or part of it, is a
2042 fixed register. Modifying such registers, even transiently,
2043 may have undefined effects on the machine, such as modifying
2044 the stack pointer. */
2045 saved_rtx = *inloc;
2046 *inloc = const0_rtx;
2047
2048 if (regno < FIRST_PSEUDO_REGISTER
2049 && HARD_REGNO_MODE_OK (regno, outmode)
2050 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2051 PATTERN (this_insn), outloc))
2052 {
2053 unsigned int i;
2054
2055 for (i = 0; i < nwords; i++)
2056 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2057 regno + i)
2058 || fixed_regs[regno + i])
2059 break;
2060
2061 if (i == nwords)
2062 {
2063 if (REG_P (real_out))
2064 value = real_out;
2065 else
2066 value = gen_rtx_REG (outmode, regno);
2067 }
2068 }
2069
2070 *inloc = saved_rtx;
2071 }
2072
2073 /* Consider using IN if OUT was not acceptable
2074 or if OUT dies in this insn (like the quotient in a divmod insn).
2075 We can't use IN unless it is dies in this insn,
2076 which means we must know accurately which hard regs are live.
2077 Also, the result can't go in IN if IN is used within OUT,
2078 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2079 if (hard_regs_live_known
2080 && REG_P (in)
2081 && REGNO (in) < FIRST_PSEUDO_REGISTER
2082 && (value == 0
2083 || find_reg_note (this_insn, REG_UNUSED, real_out))
2084 && find_reg_note (this_insn, REG_DEAD, real_in)
2085 && !fixed_regs[REGNO (in)]
2086 && HARD_REGNO_MODE_OK (REGNO (in),
2087 /* The only case where out and real_out might
2088 have different modes is where real_out
2089 is a subreg, and in that case, out
2090 has a real mode. */
2091 (GET_MODE (out) != VOIDmode
2092 ? GET_MODE (out) : outmode))
2093 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2094 /* However only do this if we can be sure that this input
2095 operand doesn't correspond with an uninitialized pseudo.
2096 global can assign some hardreg to it that is the same as
2097 the one assigned to a different, also live pseudo (as it
2098 can ignore the conflict). We must never introduce writes
2099 to such hardregs, as they would clobber the other live
2100 pseudo. See PR 20973. */
2101 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
2102 ORIGINAL_REGNO (in))
2103 /* Similarly, only do this if we can be sure that the death
2104 note is still valid. global can assign some hardreg to
2105 the pseudo referenced in the note and simultaneously a
2106 subword of this hardreg to a different, also live pseudo,
2107 because only another subword of the hardreg is actually
2108 used in the insn. This cannot happen if the pseudo has
2109 been assigned exactly one hardreg. See PR 33732. */
2110 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2111 {
2112 unsigned int regno = REGNO (in) + in_offset;
2113 unsigned int nwords = hard_regno_nregs[regno][inmode];
2114
2115 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2116 && ! hard_reg_set_here_p (regno, regno + nwords,
2117 PATTERN (this_insn))
2118 && (! earlyclobber
2119 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2120 PATTERN (this_insn), inloc)))
2121 {
2122 unsigned int i;
2123
2124 for (i = 0; i < nwords; i++)
2125 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2126 regno + i))
2127 break;
2128
2129 if (i == nwords)
2130 {
2131 /* If we were going to use OUT as the reload reg
2132 and changed our mind, it means OUT is a dummy that
2133 dies here. So don't bother copying value to it. */
2134 if (for_real >= 0 && value == real_out)
2135 rld[for_real].out = 0;
2136 if (REG_P (real_in))
2137 value = real_in;
2138 else
2139 value = gen_rtx_REG (inmode, regno);
2140 }
2141 }
2142 }
2143
2144 return value;
2145 }
2146 \f
2147 /* This page contains subroutines used mainly for determining
2148 whether the IN or an OUT of a reload can serve as the
2149 reload register. */
2150
2151 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2152
2153 int
2154 earlyclobber_operand_p (rtx x)
2155 {
2156 int i;
2157
2158 for (i = 0; i < n_earlyclobbers; i++)
2159 if (reload_earlyclobbers[i] == x)
2160 return 1;
2161
2162 return 0;
2163 }
2164
2165 /* Return 1 if expression X alters a hard reg in the range
2166 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2167 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2168 X should be the body of an instruction. */
2169
2170 static int
2171 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2172 {
2173 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2174 {
2175 rtx op0 = SET_DEST (x);
2176
2177 while (GET_CODE (op0) == SUBREG)
2178 op0 = SUBREG_REG (op0);
2179 if (REG_P (op0))
2180 {
2181 unsigned int r = REGNO (op0);
2182
2183 /* See if this reg overlaps range under consideration. */
2184 if (r < end_regno
2185 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2186 return 1;
2187 }
2188 }
2189 else if (GET_CODE (x) == PARALLEL)
2190 {
2191 int i = XVECLEN (x, 0) - 1;
2192
2193 for (; i >= 0; i--)
2194 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2195 return 1;
2196 }
2197
2198 return 0;
2199 }
2200
2201 /* Return 1 if ADDR is a valid memory address for mode MODE
2202 in address space AS, and check that each pseudo reg has the
2203 proper kind of hard reg. */
2204
2205 int
2206 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2207 rtx addr, addr_space_t as)
2208 {
2209 #ifdef GO_IF_LEGITIMATE_ADDRESS
2210 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2211 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2212 return 0;
2213
2214 win:
2215 return 1;
2216 #else
2217 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2218 #endif
2219 }
2220 \f
2221 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2222 if they are the same hard reg, and has special hacks for
2223 autoincrement and autodecrement.
2224 This is specifically intended for find_reloads to use
2225 in determining whether two operands match.
2226 X is the operand whose number is the lower of the two.
2227
2228 The value is 2 if Y contains a pre-increment that matches
2229 a non-incrementing address in X. */
2230
2231 /* ??? To be completely correct, we should arrange to pass
2232 for X the output operand and for Y the input operand.
2233 For now, we assume that the output operand has the lower number
2234 because that is natural in (SET output (... input ...)). */
2235
2236 int
2237 operands_match_p (rtx x, rtx y)
2238 {
2239 int i;
2240 RTX_CODE code = GET_CODE (x);
2241 const char *fmt;
2242 int success_2;
2243
2244 if (x == y)
2245 return 1;
2246 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2247 && (REG_P (y) || (GET_CODE (y) == SUBREG
2248 && REG_P (SUBREG_REG (y)))))
2249 {
2250 int j;
2251
2252 if (code == SUBREG)
2253 {
2254 i = REGNO (SUBREG_REG (x));
2255 if (i >= FIRST_PSEUDO_REGISTER)
2256 goto slow;
2257 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2258 GET_MODE (SUBREG_REG (x)),
2259 SUBREG_BYTE (x),
2260 GET_MODE (x));
2261 }
2262 else
2263 i = REGNO (x);
2264
2265 if (GET_CODE (y) == SUBREG)
2266 {
2267 j = REGNO (SUBREG_REG (y));
2268 if (j >= FIRST_PSEUDO_REGISTER)
2269 goto slow;
2270 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2271 GET_MODE (SUBREG_REG (y)),
2272 SUBREG_BYTE (y),
2273 GET_MODE (y));
2274 }
2275 else
2276 j = REGNO (y);
2277
2278 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2279 multiple hard register group of scalar integer registers, so that
2280 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2281 register. */
2282 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2283 && SCALAR_INT_MODE_P (GET_MODE (x))
2284 && i < FIRST_PSEUDO_REGISTER)
2285 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2286 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2287 && SCALAR_INT_MODE_P (GET_MODE (y))
2288 && j < FIRST_PSEUDO_REGISTER)
2289 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2290
2291 return i == j;
2292 }
2293 /* If two operands must match, because they are really a single
2294 operand of an assembler insn, then two postincrements are invalid
2295 because the assembler insn would increment only once.
2296 On the other hand, a postincrement matches ordinary indexing
2297 if the postincrement is the output operand. */
2298 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2299 return operands_match_p (XEXP (x, 0), y);
2300 /* Two preincrements are invalid
2301 because the assembler insn would increment only once.
2302 On the other hand, a preincrement matches ordinary indexing
2303 if the preincrement is the input operand.
2304 In this case, return 2, since some callers need to do special
2305 things when this happens. */
2306 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2307 || GET_CODE (y) == PRE_MODIFY)
2308 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2309
2310 slow:
2311
2312 /* Now we have disposed of all the cases in which different rtx codes
2313 can match. */
2314 if (code != GET_CODE (y))
2315 return 0;
2316
2317 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2318 if (GET_MODE (x) != GET_MODE (y))
2319 return 0;
2320
2321 /* MEMs referring to different address space are not equivalent. */
2322 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2323 return 0;
2324
2325 switch (code)
2326 {
2327 CASE_CONST_UNIQUE:
2328 return 0;
2329
2330 case LABEL_REF:
2331 return XEXP (x, 0) == XEXP (y, 0);
2332 case SYMBOL_REF:
2333 return XSTR (x, 0) == XSTR (y, 0);
2334
2335 default:
2336 break;
2337 }
2338
2339 /* Compare the elements. If any pair of corresponding elements
2340 fail to match, return 0 for the whole things. */
2341
2342 success_2 = 0;
2343 fmt = GET_RTX_FORMAT (code);
2344 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2345 {
2346 int val, j;
2347 switch (fmt[i])
2348 {
2349 case 'w':
2350 if (XWINT (x, i) != XWINT (y, i))
2351 return 0;
2352 break;
2353
2354 case 'i':
2355 if (XINT (x, i) != XINT (y, i))
2356 return 0;
2357 break;
2358
2359 case 'e':
2360 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2361 if (val == 0)
2362 return 0;
2363 /* If any subexpression returns 2,
2364 we should return 2 if we are successful. */
2365 if (val == 2)
2366 success_2 = 1;
2367 break;
2368
2369 case '0':
2370 break;
2371
2372 case 'E':
2373 if (XVECLEN (x, i) != XVECLEN (y, i))
2374 return 0;
2375 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2376 {
2377 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2378 if (val == 0)
2379 return 0;
2380 if (val == 2)
2381 success_2 = 1;
2382 }
2383 break;
2384
2385 /* It is believed that rtx's at this level will never
2386 contain anything but integers and other rtx's,
2387 except for within LABEL_REFs and SYMBOL_REFs. */
2388 default:
2389 gcc_unreachable ();
2390 }
2391 }
2392 return 1 + success_2;
2393 }
2394 \f
2395 /* Describe the range of registers or memory referenced by X.
2396 If X is a register, set REG_FLAG and put the first register
2397 number into START and the last plus one into END.
2398 If X is a memory reference, put a base address into BASE
2399 and a range of integer offsets into START and END.
2400 If X is pushing on the stack, we can assume it causes no trouble,
2401 so we set the SAFE field. */
2402
2403 static struct decomposition
2404 decompose (rtx x)
2405 {
2406 struct decomposition val;
2407 int all_const = 0;
2408
2409 memset (&val, 0, sizeof (val));
2410
2411 switch (GET_CODE (x))
2412 {
2413 case MEM:
2414 {
2415 rtx base = NULL_RTX, offset = 0;
2416 rtx addr = XEXP (x, 0);
2417
2418 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2419 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2420 {
2421 val.base = XEXP (addr, 0);
2422 val.start = -GET_MODE_SIZE (GET_MODE (x));
2423 val.end = GET_MODE_SIZE (GET_MODE (x));
2424 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2425 return val;
2426 }
2427
2428 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2429 {
2430 if (GET_CODE (XEXP (addr, 1)) == PLUS
2431 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2432 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2433 {
2434 val.base = XEXP (addr, 0);
2435 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2436 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2437 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2438 return val;
2439 }
2440 }
2441
2442 if (GET_CODE (addr) == CONST)
2443 {
2444 addr = XEXP (addr, 0);
2445 all_const = 1;
2446 }
2447 if (GET_CODE (addr) == PLUS)
2448 {
2449 if (CONSTANT_P (XEXP (addr, 0)))
2450 {
2451 base = XEXP (addr, 1);
2452 offset = XEXP (addr, 0);
2453 }
2454 else if (CONSTANT_P (XEXP (addr, 1)))
2455 {
2456 base = XEXP (addr, 0);
2457 offset = XEXP (addr, 1);
2458 }
2459 }
2460
2461 if (offset == 0)
2462 {
2463 base = addr;
2464 offset = const0_rtx;
2465 }
2466 if (GET_CODE (offset) == CONST)
2467 offset = XEXP (offset, 0);
2468 if (GET_CODE (offset) == PLUS)
2469 {
2470 if (CONST_INT_P (XEXP (offset, 0)))
2471 {
2472 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2473 offset = XEXP (offset, 0);
2474 }
2475 else if (CONST_INT_P (XEXP (offset, 1)))
2476 {
2477 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2478 offset = XEXP (offset, 1);
2479 }
2480 else
2481 {
2482 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2483 offset = const0_rtx;
2484 }
2485 }
2486 else if (!CONST_INT_P (offset))
2487 {
2488 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2489 offset = const0_rtx;
2490 }
2491
2492 if (all_const && GET_CODE (base) == PLUS)
2493 base = gen_rtx_CONST (GET_MODE (base), base);
2494
2495 gcc_assert (CONST_INT_P (offset));
2496
2497 val.start = INTVAL (offset);
2498 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2499 val.base = base;
2500 }
2501 break;
2502
2503 case REG:
2504 val.reg_flag = 1;
2505 val.start = true_regnum (x);
2506 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2507 {
2508 /* A pseudo with no hard reg. */
2509 val.start = REGNO (x);
2510 val.end = val.start + 1;
2511 }
2512 else
2513 /* A hard reg. */
2514 val.end = end_hard_regno (GET_MODE (x), val.start);
2515 break;
2516
2517 case SUBREG:
2518 if (!REG_P (SUBREG_REG (x)))
2519 /* This could be more precise, but it's good enough. */
2520 return decompose (SUBREG_REG (x));
2521 val.reg_flag = 1;
2522 val.start = true_regnum (x);
2523 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2524 return decompose (SUBREG_REG (x));
2525 else
2526 /* A hard reg. */
2527 val.end = val.start + subreg_nregs (x);
2528 break;
2529
2530 case SCRATCH:
2531 /* This hasn't been assigned yet, so it can't conflict yet. */
2532 val.safe = 1;
2533 break;
2534
2535 default:
2536 gcc_assert (CONSTANT_P (x));
2537 val.safe = 1;
2538 break;
2539 }
2540 return val;
2541 }
2542
2543 /* Return 1 if altering Y will not modify the value of X.
2544 Y is also described by YDATA, which should be decompose (Y). */
2545
2546 static int
2547 immune_p (rtx x, rtx y, struct decomposition ydata)
2548 {
2549 struct decomposition xdata;
2550
2551 if (ydata.reg_flag)
2552 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2553 if (ydata.safe)
2554 return 1;
2555
2556 gcc_assert (MEM_P (y));
2557 /* If Y is memory and X is not, Y can't affect X. */
2558 if (!MEM_P (x))
2559 return 1;
2560
2561 xdata = decompose (x);
2562
2563 if (! rtx_equal_p (xdata.base, ydata.base))
2564 {
2565 /* If bases are distinct symbolic constants, there is no overlap. */
2566 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2567 return 1;
2568 /* Constants and stack slots never overlap. */
2569 if (CONSTANT_P (xdata.base)
2570 && (ydata.base == frame_pointer_rtx
2571 || ydata.base == hard_frame_pointer_rtx
2572 || ydata.base == stack_pointer_rtx))
2573 return 1;
2574 if (CONSTANT_P (ydata.base)
2575 && (xdata.base == frame_pointer_rtx
2576 || xdata.base == hard_frame_pointer_rtx
2577 || xdata.base == stack_pointer_rtx))
2578 return 1;
2579 /* If either base is variable, we don't know anything. */
2580 return 0;
2581 }
2582
2583 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2584 }
2585
2586 /* Similar, but calls decompose. */
2587
2588 int
2589 safe_from_earlyclobber (rtx op, rtx clobber)
2590 {
2591 struct decomposition early_data;
2592
2593 early_data = decompose (clobber);
2594 return immune_p (op, clobber, early_data);
2595 }
2596 \f
2597 /* Main entry point of this file: search the body of INSN
2598 for values that need reloading and record them with push_reload.
2599 REPLACE nonzero means record also where the values occur
2600 so that subst_reloads can be used.
2601
2602 IND_LEVELS says how many levels of indirection are supported by this
2603 machine; a value of zero means that a memory reference is not a valid
2604 memory address.
2605
2606 LIVE_KNOWN says we have valid information about which hard
2607 regs are live at each point in the program; this is true when
2608 we are called from global_alloc but false when stupid register
2609 allocation has been done.
2610
2611 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2612 which is nonnegative if the reg has been commandeered for reloading into.
2613 It is copied into STATIC_RELOAD_REG_P and referenced from there
2614 by various subroutines.
2615
2616 Return TRUE if some operands need to be changed, because of swapping
2617 commutative operands, reg_equiv_address substitution, or whatever. */
2618
2619 int
2620 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2621 short *reload_reg_p)
2622 {
2623 int insn_code_number;
2624 int i, j;
2625 int noperands;
2626 /* These start out as the constraints for the insn
2627 and they are chewed up as we consider alternatives. */
2628 const char *constraints[MAX_RECOG_OPERANDS];
2629 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2630 a register. */
2631 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2632 char pref_or_nothing[MAX_RECOG_OPERANDS];
2633 /* Nonzero for a MEM operand whose entire address needs a reload.
2634 May be -1 to indicate the entire address may or may not need a reload. */
2635 int address_reloaded[MAX_RECOG_OPERANDS];
2636 /* Nonzero for an address operand that needs to be completely reloaded.
2637 May be -1 to indicate the entire operand may or may not need a reload. */
2638 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2639 /* Value of enum reload_type to use for operand. */
2640 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2641 /* Value of enum reload_type to use within address of operand. */
2642 enum reload_type address_type[MAX_RECOG_OPERANDS];
2643 /* Save the usage of each operand. */
2644 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2645 int no_input_reloads = 0, no_output_reloads = 0;
2646 int n_alternatives;
2647 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2648 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2649 char this_alternative_win[MAX_RECOG_OPERANDS];
2650 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2651 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2652 int this_alternative_matches[MAX_RECOG_OPERANDS];
2653 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2654 int this_alternative_number;
2655 int goal_alternative_number = 0;
2656 int operand_reloadnum[MAX_RECOG_OPERANDS];
2657 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2658 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2659 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2660 char goal_alternative_win[MAX_RECOG_OPERANDS];
2661 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2662 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2663 int goal_alternative_swapped;
2664 int best;
2665 int commutative;
2666 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2667 rtx substed_operand[MAX_RECOG_OPERANDS];
2668 rtx body = PATTERN (insn);
2669 rtx set = single_set (insn);
2670 int goal_earlyclobber = 0, this_earlyclobber;
2671 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2672 int retval = 0;
2673
2674 this_insn = insn;
2675 n_reloads = 0;
2676 n_replacements = 0;
2677 n_earlyclobbers = 0;
2678 replace_reloads = replace;
2679 hard_regs_live_known = live_known;
2680 static_reload_reg_p = reload_reg_p;
2681
2682 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2683 neither are insns that SET cc0. Insns that use CC0 are not allowed
2684 to have any input reloads. */
2685 if (JUMP_P (insn) || CALL_P (insn))
2686 no_output_reloads = 1;
2687
2688 #ifdef HAVE_cc0
2689 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2690 no_input_reloads = 1;
2691 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2692 no_output_reloads = 1;
2693 #endif
2694
2695 #ifdef SECONDARY_MEMORY_NEEDED
2696 /* The eliminated forms of any secondary memory locations are per-insn, so
2697 clear them out here. */
2698
2699 if (secondary_memlocs_elim_used)
2700 {
2701 memset (secondary_memlocs_elim, 0,
2702 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2703 secondary_memlocs_elim_used = 0;
2704 }
2705 #endif
2706
2707 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2708 is cheap to move between them. If it is not, there may not be an insn
2709 to do the copy, so we may need a reload. */
2710 if (GET_CODE (body) == SET
2711 && REG_P (SET_DEST (body))
2712 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2713 && REG_P (SET_SRC (body))
2714 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2715 && register_move_cost (GET_MODE (SET_SRC (body)),
2716 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2717 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2718 return 0;
2719
2720 extract_insn (insn);
2721
2722 noperands = reload_n_operands = recog_data.n_operands;
2723 n_alternatives = recog_data.n_alternatives;
2724
2725 /* Just return "no reloads" if insn has no operands with constraints. */
2726 if (noperands == 0 || n_alternatives == 0)
2727 return 0;
2728
2729 insn_code_number = INSN_CODE (insn);
2730 this_insn_is_asm = insn_code_number < 0;
2731
2732 memcpy (operand_mode, recog_data.operand_mode,
2733 noperands * sizeof (enum machine_mode));
2734 memcpy (constraints, recog_data.constraints,
2735 noperands * sizeof (const char *));
2736
2737 commutative = -1;
2738
2739 /* If we will need to know, later, whether some pair of operands
2740 are the same, we must compare them now and save the result.
2741 Reloading the base and index registers will clobber them
2742 and afterward they will fail to match. */
2743
2744 for (i = 0; i < noperands; i++)
2745 {
2746 const char *p;
2747 int c;
2748 char *end;
2749
2750 substed_operand[i] = recog_data.operand[i];
2751 p = constraints[i];
2752
2753 modified[i] = RELOAD_READ;
2754
2755 /* Scan this operand's constraint to see if it is an output operand,
2756 an in-out operand, is commutative, or should match another. */
2757
2758 while ((c = *p))
2759 {
2760 p += CONSTRAINT_LEN (c, p);
2761 switch (c)
2762 {
2763 case '=':
2764 modified[i] = RELOAD_WRITE;
2765 break;
2766 case '+':
2767 modified[i] = RELOAD_READ_WRITE;
2768 break;
2769 case '%':
2770 {
2771 /* The last operand should not be marked commutative. */
2772 gcc_assert (i != noperands - 1);
2773
2774 /* We currently only support one commutative pair of
2775 operands. Some existing asm code currently uses more
2776 than one pair. Previously, that would usually work,
2777 but sometimes it would crash the compiler. We
2778 continue supporting that case as well as we can by
2779 silently ignoring all but the first pair. In the
2780 future we may handle it correctly. */
2781 if (commutative < 0)
2782 commutative = i;
2783 else
2784 gcc_assert (this_insn_is_asm);
2785 }
2786 break;
2787 /* Use of ISDIGIT is tempting here, but it may get expensive because
2788 of locale support we don't want. */
2789 case '0': case '1': case '2': case '3': case '4':
2790 case '5': case '6': case '7': case '8': case '9':
2791 {
2792 c = strtoul (p - 1, &end, 10);
2793 p = end;
2794
2795 operands_match[c][i]
2796 = operands_match_p (recog_data.operand[c],
2797 recog_data.operand[i]);
2798
2799 /* An operand may not match itself. */
2800 gcc_assert (c != i);
2801
2802 /* If C can be commuted with C+1, and C might need to match I,
2803 then C+1 might also need to match I. */
2804 if (commutative >= 0)
2805 {
2806 if (c == commutative || c == commutative + 1)
2807 {
2808 int other = c + (c == commutative ? 1 : -1);
2809 operands_match[other][i]
2810 = operands_match_p (recog_data.operand[other],
2811 recog_data.operand[i]);
2812 }
2813 if (i == commutative || i == commutative + 1)
2814 {
2815 int other = i + (i == commutative ? 1 : -1);
2816 operands_match[c][other]
2817 = operands_match_p (recog_data.operand[c],
2818 recog_data.operand[other]);
2819 }
2820 /* Note that C is supposed to be less than I.
2821 No need to consider altering both C and I because in
2822 that case we would alter one into the other. */
2823 }
2824 }
2825 }
2826 }
2827 }
2828
2829 /* Examine each operand that is a memory reference or memory address
2830 and reload parts of the addresses into index registers.
2831 Also here any references to pseudo regs that didn't get hard regs
2832 but are equivalent to constants get replaced in the insn itself
2833 with those constants. Nobody will ever see them again.
2834
2835 Finally, set up the preferred classes of each operand. */
2836
2837 for (i = 0; i < noperands; i++)
2838 {
2839 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2840
2841 address_reloaded[i] = 0;
2842 address_operand_reloaded[i] = 0;
2843 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2844 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2845 : RELOAD_OTHER);
2846 address_type[i]
2847 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2848 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2849 : RELOAD_OTHER);
2850
2851 if (*constraints[i] == 0)
2852 /* Ignore things like match_operator operands. */
2853 ;
2854 else if (constraints[i][0] == 'p'
2855 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2856 {
2857 address_operand_reloaded[i]
2858 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2859 recog_data.operand[i],
2860 recog_data.operand_loc[i],
2861 i, operand_type[i], ind_levels, insn);
2862
2863 /* If we now have a simple operand where we used to have a
2864 PLUS or MULT, re-recognize and try again. */
2865 if ((OBJECT_P (*recog_data.operand_loc[i])
2866 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2867 && (GET_CODE (recog_data.operand[i]) == MULT
2868 || GET_CODE (recog_data.operand[i]) == PLUS))
2869 {
2870 INSN_CODE (insn) = -1;
2871 retval = find_reloads (insn, replace, ind_levels, live_known,
2872 reload_reg_p);
2873 return retval;
2874 }
2875
2876 recog_data.operand[i] = *recog_data.operand_loc[i];
2877 substed_operand[i] = recog_data.operand[i];
2878
2879 /* Address operands are reloaded in their existing mode,
2880 no matter what is specified in the machine description. */
2881 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2882
2883 /* If the address is a single CONST_INT pick address mode
2884 instead otherwise we will later not know in which mode
2885 the reload should be performed. */
2886 if (operand_mode[i] == VOIDmode)
2887 operand_mode[i] = Pmode;
2888
2889 }
2890 else if (code == MEM)
2891 {
2892 address_reloaded[i]
2893 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2894 recog_data.operand_loc[i],
2895 XEXP (recog_data.operand[i], 0),
2896 &XEXP (recog_data.operand[i], 0),
2897 i, address_type[i], ind_levels, insn);
2898 recog_data.operand[i] = *recog_data.operand_loc[i];
2899 substed_operand[i] = recog_data.operand[i];
2900 }
2901 else if (code == SUBREG)
2902 {
2903 rtx reg = SUBREG_REG (recog_data.operand[i]);
2904 rtx op
2905 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2906 ind_levels,
2907 set != 0
2908 && &SET_DEST (set) == recog_data.operand_loc[i],
2909 insn,
2910 &address_reloaded[i]);
2911
2912 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2913 that didn't get a hard register, emit a USE with a REG_EQUAL
2914 note in front so that we might inherit a previous, possibly
2915 wider reload. */
2916
2917 if (replace
2918 && MEM_P (op)
2919 && REG_P (reg)
2920 && (GET_MODE_SIZE (GET_MODE (reg))
2921 >= GET_MODE_SIZE (GET_MODE (op)))
2922 && reg_equiv_constant (REGNO (reg)) == 0)
2923 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2924 insn),
2925 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2926
2927 substed_operand[i] = recog_data.operand[i] = op;
2928 }
2929 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2930 /* We can get a PLUS as an "operand" as a result of register
2931 elimination. See eliminate_regs and gen_reload. We handle
2932 a unary operator by reloading the operand. */
2933 substed_operand[i] = recog_data.operand[i]
2934 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2935 ind_levels, 0, insn,
2936 &address_reloaded[i]);
2937 else if (code == REG)
2938 {
2939 /* This is equivalent to calling find_reloads_toplev.
2940 The code is duplicated for speed.
2941 When we find a pseudo always equivalent to a constant,
2942 we replace it by the constant. We must be sure, however,
2943 that we don't try to replace it in the insn in which it
2944 is being set. */
2945 int regno = REGNO (recog_data.operand[i]);
2946 if (reg_equiv_constant (regno) != 0
2947 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2948 {
2949 /* Record the existing mode so that the check if constants are
2950 allowed will work when operand_mode isn't specified. */
2951
2952 if (operand_mode[i] == VOIDmode)
2953 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2954
2955 substed_operand[i] = recog_data.operand[i]
2956 = reg_equiv_constant (regno);
2957 }
2958 if (reg_equiv_memory_loc (regno) != 0
2959 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2960 /* We need not give a valid is_set_dest argument since the case
2961 of a constant equivalence was checked above. */
2962 substed_operand[i] = recog_data.operand[i]
2963 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2964 ind_levels, 0, insn,
2965 &address_reloaded[i]);
2966 }
2967 /* If the operand is still a register (we didn't replace it with an
2968 equivalent), get the preferred class to reload it into. */
2969 code = GET_CODE (recog_data.operand[i]);
2970 preferred_class[i]
2971 = ((code == REG && REGNO (recog_data.operand[i])
2972 >= FIRST_PSEUDO_REGISTER)
2973 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2974 : NO_REGS);
2975 pref_or_nothing[i]
2976 = (code == REG
2977 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2978 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2979 }
2980
2981 /* If this is simply a copy from operand 1 to operand 0, merge the
2982 preferred classes for the operands. */
2983 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2984 && recog_data.operand[1] == SET_SRC (set))
2985 {
2986 preferred_class[0] = preferred_class[1]
2987 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2988 pref_or_nothing[0] |= pref_or_nothing[1];
2989 pref_or_nothing[1] |= pref_or_nothing[0];
2990 }
2991
2992 /* Now see what we need for pseudo-regs that didn't get hard regs
2993 or got the wrong kind of hard reg. For this, we must consider
2994 all the operands together against the register constraints. */
2995
2996 best = MAX_RECOG_OPERANDS * 2 + 600;
2997
2998 goal_alternative_swapped = 0;
2999
3000 /* The constraints are made of several alternatives.
3001 Each operand's constraint looks like foo,bar,... with commas
3002 separating the alternatives. The first alternatives for all
3003 operands go together, the second alternatives go together, etc.
3004
3005 First loop over alternatives. */
3006
3007 for (this_alternative_number = 0;
3008 this_alternative_number < n_alternatives;
3009 this_alternative_number++)
3010 {
3011 int swapped;
3012
3013 if (!recog_data.alternative_enabled_p[this_alternative_number])
3014 {
3015 int i;
3016
3017 for (i = 0; i < recog_data.n_operands; i++)
3018 constraints[i] = skip_alternative (constraints[i]);
3019
3020 continue;
3021 }
3022
3023 /* If insn is commutative (it's safe to exchange a certain pair
3024 of operands) then we need to try each alternative twice, the
3025 second time matching those two operands as if we had
3026 exchanged them. To do this, really exchange them in
3027 operands. */
3028 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3029 {
3030 /* Loop over operands for one constraint alternative. */
3031 /* LOSERS counts those that don't fit this alternative
3032 and would require loading. */
3033 int losers = 0;
3034 /* BAD is set to 1 if it some operand can't fit this alternative
3035 even after reloading. */
3036 int bad = 0;
3037 /* REJECT is a count of how undesirable this alternative says it is
3038 if any reloading is required. If the alternative matches exactly
3039 then REJECT is ignored, but otherwise it gets this much
3040 counted against it in addition to the reloading needed. Each
3041 ? counts three times here since we want the disparaging caused by
3042 a bad register class to only count 1/3 as much. */
3043 int reject = 0;
3044
3045 if (swapped)
3046 {
3047 enum reg_class tclass;
3048 int t;
3049
3050 recog_data.operand[commutative] = substed_operand[commutative + 1];
3051 recog_data.operand[commutative + 1] = substed_operand[commutative];
3052 /* Swap the duplicates too. */
3053 for (i = 0; i < recog_data.n_dups; i++)
3054 if (recog_data.dup_num[i] == commutative
3055 || recog_data.dup_num[i] == commutative + 1)
3056 *recog_data.dup_loc[i]
3057 = recog_data.operand[(int) recog_data.dup_num[i]];
3058
3059 tclass = preferred_class[commutative];
3060 preferred_class[commutative] = preferred_class[commutative + 1];
3061 preferred_class[commutative + 1] = tclass;
3062
3063 t = pref_or_nothing[commutative];
3064 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3065 pref_or_nothing[commutative + 1] = t;
3066
3067 t = address_reloaded[commutative];
3068 address_reloaded[commutative] = address_reloaded[commutative + 1];
3069 address_reloaded[commutative + 1] = t;
3070 }
3071
3072 this_earlyclobber = 0;
3073
3074 for (i = 0; i < noperands; i++)
3075 {
3076 const char *p = constraints[i];
3077 char *end;
3078 int len;
3079 int win = 0;
3080 int did_match = 0;
3081 /* 0 => this operand can be reloaded somehow for this alternative. */
3082 int badop = 1;
3083 /* 0 => this operand can be reloaded if the alternative allows regs. */
3084 int winreg = 0;
3085 int c;
3086 int m;
3087 rtx operand = recog_data.operand[i];
3088 int offset = 0;
3089 /* Nonzero means this is a MEM that must be reloaded into a reg
3090 regardless of what the constraint says. */
3091 int force_reload = 0;
3092 int offmemok = 0;
3093 /* Nonzero if a constant forced into memory would be OK for this
3094 operand. */
3095 int constmemok = 0;
3096 int earlyclobber = 0;
3097
3098 /* If the predicate accepts a unary operator, it means that
3099 we need to reload the operand, but do not do this for
3100 match_operator and friends. */
3101 if (UNARY_P (operand) && *p != 0)
3102 operand = XEXP (operand, 0);
3103
3104 /* If the operand is a SUBREG, extract
3105 the REG or MEM (or maybe even a constant) within.
3106 (Constants can occur as a result of reg_equiv_constant.) */
3107
3108 while (GET_CODE (operand) == SUBREG)
3109 {
3110 /* Offset only matters when operand is a REG and
3111 it is a hard reg. This is because it is passed
3112 to reg_fits_class_p if it is a REG and all pseudos
3113 return 0 from that function. */
3114 if (REG_P (SUBREG_REG (operand))
3115 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3116 {
3117 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3118 GET_MODE (SUBREG_REG (operand)),
3119 SUBREG_BYTE (operand),
3120 GET_MODE (operand)) < 0)
3121 force_reload = 1;
3122 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3123 GET_MODE (SUBREG_REG (operand)),
3124 SUBREG_BYTE (operand),
3125 GET_MODE (operand));
3126 }
3127 operand = SUBREG_REG (operand);
3128 /* Force reload if this is a constant or PLUS or if there may
3129 be a problem accessing OPERAND in the outer mode. */
3130 if (CONSTANT_P (operand)
3131 || GET_CODE (operand) == PLUS
3132 /* We must force a reload of paradoxical SUBREGs
3133 of a MEM because the alignment of the inner value
3134 may not be enough to do the outer reference. On
3135 big-endian machines, it may also reference outside
3136 the object.
3137
3138 On machines that extend byte operations and we have a
3139 SUBREG where both the inner and outer modes are no wider
3140 than a word and the inner mode is narrower, is integral,
3141 and gets extended when loaded from memory, combine.c has
3142 made assumptions about the behavior of the machine in such
3143 register access. If the data is, in fact, in memory we
3144 must always load using the size assumed to be in the
3145 register and let the insn do the different-sized
3146 accesses.
3147
3148 This is doubly true if WORD_REGISTER_OPERATIONS. In
3149 this case eliminate_regs has left non-paradoxical
3150 subregs for push_reload to see. Make sure it does
3151 by forcing the reload.
3152
3153 ??? When is it right at this stage to have a subreg
3154 of a mem that is _not_ to be handled specially? IMO
3155 those should have been reduced to just a mem. */
3156 || ((MEM_P (operand)
3157 || (REG_P (operand)
3158 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3159 #ifndef WORD_REGISTER_OPERATIONS
3160 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3161 < BIGGEST_ALIGNMENT)
3162 && (GET_MODE_SIZE (operand_mode[i])
3163 > GET_MODE_SIZE (GET_MODE (operand))))
3164 || BYTES_BIG_ENDIAN
3165 #ifdef LOAD_EXTEND_OP
3166 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3167 && (GET_MODE_SIZE (GET_MODE (operand))
3168 <= UNITS_PER_WORD)
3169 && (GET_MODE_SIZE (operand_mode[i])
3170 > GET_MODE_SIZE (GET_MODE (operand)))
3171 && INTEGRAL_MODE_P (GET_MODE (operand))
3172 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3173 #endif
3174 )
3175 #endif
3176 )
3177 )
3178 force_reload = 1;
3179 }
3180
3181 this_alternative[i] = NO_REGS;
3182 this_alternative_win[i] = 0;
3183 this_alternative_match_win[i] = 0;
3184 this_alternative_offmemok[i] = 0;
3185 this_alternative_earlyclobber[i] = 0;
3186 this_alternative_matches[i] = -1;
3187
3188 /* An empty constraint or empty alternative
3189 allows anything which matched the pattern. */
3190 if (*p == 0 || *p == ',')
3191 win = 1, badop = 0;
3192
3193 /* Scan this alternative's specs for this operand;
3194 set WIN if the operand fits any letter in this alternative.
3195 Otherwise, clear BADOP if this operand could
3196 fit some letter after reloads,
3197 or set WINREG if this operand could fit after reloads
3198 provided the constraint allows some registers. */
3199
3200 do
3201 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3202 {
3203 case '\0':
3204 len = 0;
3205 break;
3206 case ',':
3207 c = '\0';
3208 break;
3209
3210 case '=': case '+': case '*':
3211 break;
3212
3213 case '%':
3214 /* We only support one commutative marker, the first
3215 one. We already set commutative above. */
3216 break;
3217
3218 case '?':
3219 reject += 6;
3220 break;
3221
3222 case '!':
3223 reject = 600;
3224 break;
3225
3226 case '#':
3227 /* Ignore rest of this alternative as far as
3228 reloading is concerned. */
3229 do
3230 p++;
3231 while (*p && *p != ',');
3232 len = 0;
3233 break;
3234
3235 case '0': case '1': case '2': case '3': case '4':
3236 case '5': case '6': case '7': case '8': case '9':
3237 m = strtoul (p, &end, 10);
3238 p = end;
3239 len = 0;
3240
3241 this_alternative_matches[i] = m;
3242 /* We are supposed to match a previous operand.
3243 If we do, we win if that one did.
3244 If we do not, count both of the operands as losers.
3245 (This is too conservative, since most of the time
3246 only a single reload insn will be needed to make
3247 the two operands win. As a result, this alternative
3248 may be rejected when it is actually desirable.) */
3249 if ((swapped && (m != commutative || i != commutative + 1))
3250 /* If we are matching as if two operands were swapped,
3251 also pretend that operands_match had been computed
3252 with swapped.
3253 But if I is the second of those and C is the first,
3254 don't exchange them, because operands_match is valid
3255 only on one side of its diagonal. */
3256 ? (operands_match
3257 [(m == commutative || m == commutative + 1)
3258 ? 2 * commutative + 1 - m : m]
3259 [(i == commutative || i == commutative + 1)
3260 ? 2 * commutative + 1 - i : i])
3261 : operands_match[m][i])
3262 {
3263 /* If we are matching a non-offsettable address where an
3264 offsettable address was expected, then we must reject
3265 this combination, because we can't reload it. */
3266 if (this_alternative_offmemok[m]
3267 && MEM_P (recog_data.operand[m])
3268 && this_alternative[m] == NO_REGS
3269 && ! this_alternative_win[m])
3270 bad = 1;
3271
3272 did_match = this_alternative_win[m];
3273 }
3274 else
3275 {
3276 /* Operands don't match. */
3277 rtx value;
3278 int loc1, loc2;
3279 /* Retroactively mark the operand we had to match
3280 as a loser, if it wasn't already. */
3281 if (this_alternative_win[m])
3282 losers++;
3283 this_alternative_win[m] = 0;
3284 if (this_alternative[m] == NO_REGS)
3285 bad = 1;
3286 /* But count the pair only once in the total badness of
3287 this alternative, if the pair can be a dummy reload.
3288 The pointers in operand_loc are not swapped; swap
3289 them by hand if necessary. */
3290 if (swapped && i == commutative)
3291 loc1 = commutative + 1;
3292 else if (swapped && i == commutative + 1)
3293 loc1 = commutative;
3294 else
3295 loc1 = i;
3296 if (swapped && m == commutative)
3297 loc2 = commutative + 1;
3298 else if (swapped && m == commutative + 1)
3299 loc2 = commutative;
3300 else
3301 loc2 = m;
3302 value
3303 = find_dummy_reload (recog_data.operand[i],
3304 recog_data.operand[m],
3305 recog_data.operand_loc[loc1],
3306 recog_data.operand_loc[loc2],
3307 operand_mode[i], operand_mode[m],
3308 this_alternative[m], -1,
3309 this_alternative_earlyclobber[m]);
3310
3311 if (value != 0)
3312 losers--;
3313 }
3314 /* This can be fixed with reloads if the operand
3315 we are supposed to match can be fixed with reloads. */
3316 badop = 0;
3317 this_alternative[i] = this_alternative[m];
3318
3319 /* If we have to reload this operand and some previous
3320 operand also had to match the same thing as this
3321 operand, we don't know how to do that. So reject this
3322 alternative. */
3323 if (! did_match || force_reload)
3324 for (j = 0; j < i; j++)
3325 if (this_alternative_matches[j]
3326 == this_alternative_matches[i])
3327 {
3328 badop = 1;
3329 break;
3330 }
3331 break;
3332
3333 case 'p':
3334 /* All necessary reloads for an address_operand
3335 were handled in find_reloads_address. */
3336 this_alternative[i]
3337 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3338 ADDRESS, SCRATCH);
3339 win = 1;
3340 badop = 0;
3341 break;
3342
3343 case TARGET_MEM_CONSTRAINT:
3344 if (force_reload)
3345 break;
3346 if (MEM_P (operand)
3347 || (REG_P (operand)
3348 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3349 && reg_renumber[REGNO (operand)] < 0))
3350 win = 1;
3351 if (CONST_POOL_OK_P (operand_mode[i], operand))
3352 badop = 0;
3353 constmemok = 1;
3354 break;
3355
3356 case '<':
3357 if (MEM_P (operand)
3358 && ! address_reloaded[i]
3359 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3360 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3361 win = 1;
3362 break;
3363
3364 case '>':
3365 if (MEM_P (operand)
3366 && ! address_reloaded[i]
3367 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3368 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3369 win = 1;
3370 break;
3371
3372 /* Memory operand whose address is not offsettable. */
3373 case 'V':
3374 if (force_reload)
3375 break;
3376 if (MEM_P (operand)
3377 && ! (ind_levels ? offsettable_memref_p (operand)
3378 : offsettable_nonstrict_memref_p (operand))
3379 /* Certain mem addresses will become offsettable
3380 after they themselves are reloaded. This is important;
3381 we don't want our own handling of unoffsettables
3382 to override the handling of reg_equiv_address. */
3383 && !(REG_P (XEXP (operand, 0))
3384 && (ind_levels == 0
3385 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3386 win = 1;
3387 break;
3388
3389 /* Memory operand whose address is offsettable. */
3390 case 'o':
3391 if (force_reload)
3392 break;
3393 if ((MEM_P (operand)
3394 /* If IND_LEVELS, find_reloads_address won't reload a
3395 pseudo that didn't get a hard reg, so we have to
3396 reject that case. */
3397 && ((ind_levels ? offsettable_memref_p (operand)
3398 : offsettable_nonstrict_memref_p (operand))
3399 /* A reloaded address is offsettable because it is now
3400 just a simple register indirect. */
3401 || address_reloaded[i] == 1))
3402 || (REG_P (operand)
3403 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3404 && reg_renumber[REGNO (operand)] < 0
3405 /* If reg_equiv_address is nonzero, we will be
3406 loading it into a register; hence it will be
3407 offsettable, but we cannot say that reg_equiv_mem
3408 is offsettable without checking. */
3409 && ((reg_equiv_mem (REGNO (operand)) != 0
3410 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3411 || (reg_equiv_address (REGNO (operand)) != 0))))
3412 win = 1;
3413 if (CONST_POOL_OK_P (operand_mode[i], operand)
3414 || MEM_P (operand))
3415 badop = 0;
3416 constmemok = 1;
3417 offmemok = 1;
3418 break;
3419
3420 case '&':
3421 /* Output operand that is stored before the need for the
3422 input operands (and their index registers) is over. */
3423 earlyclobber = 1, this_earlyclobber = 1;
3424 break;
3425
3426 case 'E':
3427 case 'F':
3428 if (CONST_DOUBLE_AS_FLOAT_P (operand)
3429 || (GET_CODE (operand) == CONST_VECTOR
3430 && (GET_MODE_CLASS (GET_MODE (operand))
3431 == MODE_VECTOR_FLOAT)))
3432 win = 1;
3433 break;
3434
3435 case 'G':
3436 case 'H':
3437 if (CONST_DOUBLE_AS_FLOAT_P (operand)
3438 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3439 win = 1;
3440 break;
3441
3442 case 's':
3443 if (CONST_SCALAR_INT_P (operand))
3444 break;
3445 case 'i':
3446 if (CONSTANT_P (operand)
3447 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3448 win = 1;
3449 break;
3450
3451 case 'n':
3452 if (CONST_SCALAR_INT_P (operand))
3453 win = 1;
3454 break;
3455
3456 case 'I':
3457 case 'J':
3458 case 'K':
3459 case 'L':
3460 case 'M':
3461 case 'N':
3462 case 'O':
3463 case 'P':
3464 if (CONST_INT_P (operand)
3465 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3466 win = 1;
3467 break;
3468
3469 case 'X':
3470 force_reload = 0;
3471 win = 1;
3472 break;
3473
3474 case 'g':
3475 if (! force_reload
3476 /* A PLUS is never a valid operand, but reload can make
3477 it from a register when eliminating registers. */
3478 && GET_CODE (operand) != PLUS
3479 /* A SCRATCH is not a valid operand. */
3480 && GET_CODE (operand) != SCRATCH
3481 && (! CONSTANT_P (operand)
3482 || ! flag_pic
3483 || LEGITIMATE_PIC_OPERAND_P (operand))
3484 && (GENERAL_REGS == ALL_REGS
3485 || !REG_P (operand)
3486 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3487 && reg_renumber[REGNO (operand)] < 0)))
3488 win = 1;
3489 /* Drop through into 'r' case. */
3490
3491 case 'r':
3492 this_alternative[i]
3493 = reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3494 goto reg;
3495
3496 default:
3497 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3498 {
3499 #ifdef EXTRA_CONSTRAINT_STR
3500 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3501 {
3502 if (force_reload)
3503 break;
3504 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3505 win = 1;
3506 /* If the address was already reloaded,
3507 we win as well. */
3508 else if (MEM_P (operand)
3509 && address_reloaded[i] == 1)
3510 win = 1;
3511 /* Likewise if the address will be reloaded because
3512 reg_equiv_address is nonzero. For reg_equiv_mem
3513 we have to check. */
3514 else if (REG_P (operand)
3515 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3516 && reg_renumber[REGNO (operand)] < 0
3517 && ((reg_equiv_mem (REGNO (operand)) != 0
3518 && EXTRA_CONSTRAINT_STR (reg_equiv_mem (REGNO (operand)), c, p))
3519 || (reg_equiv_address (REGNO (operand)) != 0)))
3520 win = 1;
3521
3522 /* If we didn't already win, we can reload
3523 constants via force_const_mem, and other
3524 MEMs by reloading the address like for 'o'. */
3525 if (CONST_POOL_OK_P (operand_mode[i], operand)
3526 || MEM_P (operand))
3527 badop = 0;
3528 constmemok = 1;
3529 offmemok = 1;
3530 break;
3531 }
3532 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3533 {
3534 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3535 win = 1;
3536
3537 /* If we didn't already win, we can reload
3538 the address into a base register. */
3539 this_alternative[i]
3540 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3541 ADDRESS, SCRATCH);
3542 badop = 0;
3543 break;
3544 }
3545
3546 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3547 win = 1;
3548 #endif
3549 break;
3550 }
3551
3552 this_alternative[i]
3553 = (reg_class_subunion
3554 [this_alternative[i]]
3555 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3556 reg:
3557 if (GET_MODE (operand) == BLKmode)
3558 break;
3559 winreg = 1;
3560 if (REG_P (operand)
3561 && reg_fits_class_p (operand, this_alternative[i],
3562 offset, GET_MODE (recog_data.operand[i])))
3563 win = 1;
3564 break;
3565 }
3566 while ((p += len), c);
3567
3568 if (swapped == (commutative >= 0 ? 1 : 0))
3569 constraints[i] = p;
3570
3571 /* If this operand could be handled with a reg,
3572 and some reg is allowed, then this operand can be handled. */
3573 if (winreg && this_alternative[i] != NO_REGS
3574 && (win || !class_only_fixed_regs[this_alternative[i]]))
3575 badop = 0;
3576
3577 /* Record which operands fit this alternative. */
3578 this_alternative_earlyclobber[i] = earlyclobber;
3579 if (win && ! force_reload)
3580 this_alternative_win[i] = 1;
3581 else if (did_match && ! force_reload)
3582 this_alternative_match_win[i] = 1;
3583 else
3584 {
3585 int const_to_mem = 0;
3586
3587 this_alternative_offmemok[i] = offmemok;
3588 losers++;
3589 if (badop)
3590 bad = 1;
3591 /* Alternative loses if it has no regs for a reg operand. */
3592 if (REG_P (operand)
3593 && this_alternative[i] == NO_REGS
3594 && this_alternative_matches[i] < 0)
3595 bad = 1;
3596
3597 /* If this is a constant that is reloaded into the desired
3598 class by copying it to memory first, count that as another
3599 reload. This is consistent with other code and is
3600 required to avoid choosing another alternative when
3601 the constant is moved into memory by this function on
3602 an early reload pass. Note that the test here is
3603 precisely the same as in the code below that calls
3604 force_const_mem. */
3605 if (CONST_POOL_OK_P (operand_mode[i], operand)
3606 && ((targetm.preferred_reload_class (operand,
3607 this_alternative[i])
3608 == NO_REGS)
3609 || no_input_reloads))
3610 {
3611 const_to_mem = 1;
3612 if (this_alternative[i] != NO_REGS)
3613 losers++;
3614 }
3615
3616 /* Alternative loses if it requires a type of reload not
3617 permitted for this insn. We can always reload SCRATCH
3618 and objects with a REG_UNUSED note. */
3619 if (GET_CODE (operand) != SCRATCH
3620 && modified[i] != RELOAD_READ && no_output_reloads
3621 && ! find_reg_note (insn, REG_UNUSED, operand))
3622 bad = 1;
3623 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3624 && ! const_to_mem)
3625 bad = 1;
3626
3627 /* If we can't reload this value at all, reject this
3628 alternative. Note that we could also lose due to
3629 LIMIT_RELOAD_CLASS, but we don't check that
3630 here. */
3631
3632 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3633 {
3634 if (targetm.preferred_reload_class (operand,
3635 this_alternative[i])
3636 == NO_REGS)
3637 reject = 600;
3638
3639 if (operand_type[i] == RELOAD_FOR_OUTPUT
3640 && (targetm.preferred_output_reload_class (operand,
3641 this_alternative[i])
3642 == NO_REGS))
3643 reject = 600;
3644 }
3645
3646 /* We prefer to reload pseudos over reloading other things,
3647 since such reloads may be able to be eliminated later.
3648 If we are reloading a SCRATCH, we won't be generating any
3649 insns, just using a register, so it is also preferred.
3650 So bump REJECT in other cases. Don't do this in the
3651 case where we are forcing a constant into memory and
3652 it will then win since we don't want to have a different
3653 alternative match then. */
3654 if (! (REG_P (operand)
3655 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3656 && GET_CODE (operand) != SCRATCH
3657 && ! (const_to_mem && constmemok))
3658 reject += 2;
3659
3660 /* Input reloads can be inherited more often than output
3661 reloads can be removed, so penalize output reloads. */
3662 if (operand_type[i] != RELOAD_FOR_INPUT
3663 && GET_CODE (operand) != SCRATCH)
3664 reject++;
3665 }
3666
3667 /* If this operand is a pseudo register that didn't get
3668 a hard reg and this alternative accepts some
3669 register, see if the class that we want is a subset
3670 of the preferred class for this register. If not,
3671 but it intersects that class, use the preferred class
3672 instead. If it does not intersect the preferred
3673 class, show that usage of this alternative should be
3674 discouraged; it will be discouraged more still if the
3675 register is `preferred or nothing'. We do this
3676 because it increases the chance of reusing our spill
3677 register in a later insn and avoiding a pair of
3678 memory stores and loads.
3679
3680 Don't bother with this if this alternative will
3681 accept this operand.
3682
3683 Don't do this for a multiword operand, since it is
3684 only a small win and has the risk of requiring more
3685 spill registers, which could cause a large loss.
3686
3687 Don't do this if the preferred class has only one
3688 register because we might otherwise exhaust the
3689 class. */
3690
3691 if (! win && ! did_match
3692 && this_alternative[i] != NO_REGS
3693 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3694 && reg_class_size [(int) preferred_class[i]] > 0
3695 && ! small_register_class_p (preferred_class[i]))
3696 {
3697 if (! reg_class_subset_p (this_alternative[i],
3698 preferred_class[i]))
3699 {
3700 /* Since we don't have a way of forming the intersection,
3701 we just do something special if the preferred class
3702 is a subset of the class we have; that's the most
3703 common case anyway. */
3704 if (reg_class_subset_p (preferred_class[i],
3705 this_alternative[i]))
3706 this_alternative[i] = preferred_class[i];
3707 else
3708 reject += (2 + 2 * pref_or_nothing[i]);
3709 }
3710 }
3711 }
3712
3713 /* Now see if any output operands that are marked "earlyclobber"
3714 in this alternative conflict with any input operands
3715 or any memory addresses. */
3716
3717 for (i = 0; i < noperands; i++)
3718 if (this_alternative_earlyclobber[i]
3719 && (this_alternative_win[i] || this_alternative_match_win[i]))
3720 {
3721 struct decomposition early_data;
3722
3723 early_data = decompose (recog_data.operand[i]);
3724
3725 gcc_assert (modified[i] != RELOAD_READ);
3726
3727 if (this_alternative[i] == NO_REGS)
3728 {
3729 this_alternative_earlyclobber[i] = 0;
3730 gcc_assert (this_insn_is_asm);
3731 error_for_asm (this_insn,
3732 "%<&%> constraint used with no register class");
3733 }
3734
3735 for (j = 0; j < noperands; j++)
3736 /* Is this an input operand or a memory ref? */
3737 if ((MEM_P (recog_data.operand[j])
3738 || modified[j] != RELOAD_WRITE)
3739 && j != i
3740 /* Ignore things like match_operator operands. */
3741 && !recog_data.is_operator[j]
3742 /* Don't count an input operand that is constrained to match
3743 the early clobber operand. */
3744 && ! (this_alternative_matches[j] == i
3745 && rtx_equal_p (recog_data.operand[i],
3746 recog_data.operand[j]))
3747 /* Is it altered by storing the earlyclobber operand? */
3748 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3749 early_data))
3750 {
3751 /* If the output is in a non-empty few-regs class,
3752 it's costly to reload it, so reload the input instead. */
3753 if (small_register_class_p (this_alternative[i])
3754 && (REG_P (recog_data.operand[j])
3755 || GET_CODE (recog_data.operand[j]) == SUBREG))
3756 {
3757 losers++;
3758 this_alternative_win[j] = 0;
3759 this_alternative_match_win[j] = 0;
3760 }
3761 else
3762 break;
3763 }
3764 /* If an earlyclobber operand conflicts with something,
3765 it must be reloaded, so request this and count the cost. */
3766 if (j != noperands)
3767 {
3768 losers++;
3769 this_alternative_win[i] = 0;
3770 this_alternative_match_win[j] = 0;
3771 for (j = 0; j < noperands; j++)
3772 if (this_alternative_matches[j] == i
3773 && this_alternative_match_win[j])
3774 {
3775 this_alternative_win[j] = 0;
3776 this_alternative_match_win[j] = 0;
3777 losers++;
3778 }
3779 }
3780 }
3781
3782 /* If one alternative accepts all the operands, no reload required,
3783 choose that alternative; don't consider the remaining ones. */
3784 if (losers == 0)
3785 {
3786 /* Unswap these so that they are never swapped at `finish'. */
3787 if (swapped)
3788 {
3789 recog_data.operand[commutative] = substed_operand[commutative];
3790 recog_data.operand[commutative + 1]
3791 = substed_operand[commutative + 1];
3792 }
3793 for (i = 0; i < noperands; i++)
3794 {
3795 goal_alternative_win[i] = this_alternative_win[i];
3796 goal_alternative_match_win[i] = this_alternative_match_win[i];
3797 goal_alternative[i] = this_alternative[i];
3798 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3799 goal_alternative_matches[i] = this_alternative_matches[i];
3800 goal_alternative_earlyclobber[i]
3801 = this_alternative_earlyclobber[i];
3802 }
3803 goal_alternative_number = this_alternative_number;
3804 goal_alternative_swapped = swapped;
3805 goal_earlyclobber = this_earlyclobber;
3806 goto finish;
3807 }
3808
3809 /* REJECT, set by the ! and ? constraint characters and when a register
3810 would be reloaded into a non-preferred class, discourages the use of
3811 this alternative for a reload goal. REJECT is incremented by six
3812 for each ? and two for each non-preferred class. */
3813 losers = losers * 6 + reject;
3814
3815 /* If this alternative can be made to work by reloading,
3816 and it needs less reloading than the others checked so far,
3817 record it as the chosen goal for reloading. */
3818 if (! bad)
3819 {
3820 if (best > losers)
3821 {
3822 for (i = 0; i < noperands; i++)
3823 {
3824 goal_alternative[i] = this_alternative[i];
3825 goal_alternative_win[i] = this_alternative_win[i];
3826 goal_alternative_match_win[i]
3827 = this_alternative_match_win[i];
3828 goal_alternative_offmemok[i]
3829 = this_alternative_offmemok[i];
3830 goal_alternative_matches[i] = this_alternative_matches[i];
3831 goal_alternative_earlyclobber[i]
3832 = this_alternative_earlyclobber[i];
3833 }
3834 goal_alternative_swapped = swapped;
3835 best = losers;
3836 goal_alternative_number = this_alternative_number;
3837 goal_earlyclobber = this_earlyclobber;
3838 }
3839 }
3840
3841 if (swapped)
3842 {
3843 enum reg_class tclass;
3844 int t;
3845
3846 /* If the commutative operands have been swapped, swap
3847 them back in order to check the next alternative. */
3848 recog_data.operand[commutative] = substed_operand[commutative];
3849 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3850 /* Unswap the duplicates too. */
3851 for (i = 0; i < recog_data.n_dups; i++)
3852 if (recog_data.dup_num[i] == commutative
3853 || recog_data.dup_num[i] == commutative + 1)
3854 *recog_data.dup_loc[i]
3855 = recog_data.operand[(int) recog_data.dup_num[i]];
3856
3857 /* Unswap the operand related information as well. */
3858 tclass = preferred_class[commutative];
3859 preferred_class[commutative] = preferred_class[commutative + 1];
3860 preferred_class[commutative + 1] = tclass;
3861
3862 t = pref_or_nothing[commutative];
3863 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3864 pref_or_nothing[commutative + 1] = t;
3865
3866 t = address_reloaded[commutative];
3867 address_reloaded[commutative] = address_reloaded[commutative + 1];
3868 address_reloaded[commutative + 1] = t;
3869 }
3870 }
3871 }
3872
3873 /* The operands don't meet the constraints.
3874 goal_alternative describes the alternative
3875 that we could reach by reloading the fewest operands.
3876 Reload so as to fit it. */
3877
3878 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3879 {
3880 /* No alternative works with reloads?? */
3881 if (insn_code_number >= 0)
3882 fatal_insn ("unable to generate reloads for:", insn);
3883 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3884 /* Avoid further trouble with this insn. */
3885 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3886 n_reloads = 0;
3887 return 0;
3888 }
3889
3890 /* Jump to `finish' from above if all operands are valid already.
3891 In that case, goal_alternative_win is all 1. */
3892 finish:
3893
3894 /* Right now, for any pair of operands I and J that are required to match,
3895 with I < J,
3896 goal_alternative_matches[J] is I.
3897 Set up goal_alternative_matched as the inverse function:
3898 goal_alternative_matched[I] = J. */
3899
3900 for (i = 0; i < noperands; i++)
3901 goal_alternative_matched[i] = -1;
3902
3903 for (i = 0; i < noperands; i++)
3904 if (! goal_alternative_win[i]
3905 && goal_alternative_matches[i] >= 0)
3906 goal_alternative_matched[goal_alternative_matches[i]] = i;
3907
3908 for (i = 0; i < noperands; i++)
3909 goal_alternative_win[i] |= goal_alternative_match_win[i];
3910
3911 /* If the best alternative is with operands 1 and 2 swapped,
3912 consider them swapped before reporting the reloads. Update the
3913 operand numbers of any reloads already pushed. */
3914
3915 if (goal_alternative_swapped)
3916 {
3917 rtx tem;
3918
3919 tem = substed_operand[commutative];
3920 substed_operand[commutative] = substed_operand[commutative + 1];
3921 substed_operand[commutative + 1] = tem;
3922 tem = recog_data.operand[commutative];
3923 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3924 recog_data.operand[commutative + 1] = tem;
3925 tem = *recog_data.operand_loc[commutative];
3926 *recog_data.operand_loc[commutative]
3927 = *recog_data.operand_loc[commutative + 1];
3928 *recog_data.operand_loc[commutative + 1] = tem;
3929
3930 for (i = 0; i < n_reloads; i++)
3931 {
3932 if (rld[i].opnum == commutative)
3933 rld[i].opnum = commutative + 1;
3934 else if (rld[i].opnum == commutative + 1)
3935 rld[i].opnum = commutative;
3936 }
3937 }
3938
3939 for (i = 0; i < noperands; i++)
3940 {
3941 operand_reloadnum[i] = -1;
3942
3943 /* If this is an earlyclobber operand, we need to widen the scope.
3944 The reload must remain valid from the start of the insn being
3945 reloaded until after the operand is stored into its destination.
3946 We approximate this with RELOAD_OTHER even though we know that we
3947 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3948
3949 One special case that is worth checking is when we have an
3950 output that is earlyclobber but isn't used past the insn (typically
3951 a SCRATCH). In this case, we only need have the reload live
3952 through the insn itself, but not for any of our input or output
3953 reloads.
3954 But we must not accidentally narrow the scope of an existing
3955 RELOAD_OTHER reload - leave these alone.
3956
3957 In any case, anything needed to address this operand can remain
3958 however they were previously categorized. */
3959
3960 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3961 operand_type[i]
3962 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3963 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3964 }
3965
3966 /* Any constants that aren't allowed and can't be reloaded
3967 into registers are here changed into memory references. */
3968 for (i = 0; i < noperands; i++)
3969 if (! goal_alternative_win[i])
3970 {
3971 rtx op = recog_data.operand[i];
3972 rtx subreg = NULL_RTX;
3973 rtx plus = NULL_RTX;
3974 enum machine_mode mode = operand_mode[i];
3975
3976 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3977 push_reload so we have to let them pass here. */
3978 if (GET_CODE (op) == SUBREG)
3979 {
3980 subreg = op;
3981 op = SUBREG_REG (op);
3982 mode = GET_MODE (op);
3983 }
3984
3985 if (GET_CODE (op) == PLUS)
3986 {
3987 plus = op;
3988 op = XEXP (op, 1);
3989 }
3990
3991 if (CONST_POOL_OK_P (mode, op)
3992 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3993 == NO_REGS)
3994 || no_input_reloads))
3995 {
3996 int this_address_reloaded;
3997 rtx tem = force_const_mem (mode, op);
3998
3999 /* If we stripped a SUBREG or a PLUS above add it back. */
4000 if (plus != NULL_RTX)
4001 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
4002
4003 if (subreg != NULL_RTX)
4004 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
4005
4006 this_address_reloaded = 0;
4007 substed_operand[i] = recog_data.operand[i]
4008 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
4009 0, insn, &this_address_reloaded);
4010
4011 /* If the alternative accepts constant pool refs directly
4012 there will be no reload needed at all. */
4013 if (plus == NULL_RTX
4014 && subreg == NULL_RTX
4015 && alternative_allows_const_pool_ref (this_address_reloaded == 0
4016 ? substed_operand[i]
4017 : NULL,
4018 recog_data.constraints[i],
4019 goal_alternative_number))
4020 goal_alternative_win[i] = 1;
4021 }
4022 }
4023
4024 /* Record the values of the earlyclobber operands for the caller. */
4025 if (goal_earlyclobber)
4026 for (i = 0; i < noperands; i++)
4027 if (goal_alternative_earlyclobber[i])
4028 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
4029
4030 /* Now record reloads for all the operands that need them. */
4031 for (i = 0; i < noperands; i++)
4032 if (! goal_alternative_win[i])
4033 {
4034 /* Operands that match previous ones have already been handled. */
4035 if (goal_alternative_matches[i] >= 0)
4036 ;
4037 /* Handle an operand with a nonoffsettable address
4038 appearing where an offsettable address will do
4039 by reloading the address into a base register.
4040
4041 ??? We can also do this when the operand is a register and
4042 reg_equiv_mem is not offsettable, but this is a bit tricky,
4043 so we don't bother with it. It may not be worth doing. */
4044 else if (goal_alternative_matched[i] == -1
4045 && goal_alternative_offmemok[i]
4046 && MEM_P (recog_data.operand[i]))
4047 {
4048 /* If the address to be reloaded is a VOIDmode constant,
4049 use the default address mode as mode of the reload register,
4050 as would have been done by find_reloads_address. */
4051 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4052 enum machine_mode address_mode;
4053
4054 address_mode = get_address_mode (recog_data.operand[i]);
4055 operand_reloadnum[i]
4056 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4057 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4058 base_reg_class (VOIDmode, as, MEM, SCRATCH),
4059 address_mode,
4060 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4061 rld[operand_reloadnum[i]].inc
4062 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4063
4064 /* If this operand is an output, we will have made any
4065 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4066 now we are treating part of the operand as an input, so
4067 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4068
4069 if (modified[i] == RELOAD_WRITE)
4070 {
4071 for (j = 0; j < n_reloads; j++)
4072 {
4073 if (rld[j].opnum == i)
4074 {
4075 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4076 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4077 else if (rld[j].when_needed
4078 == RELOAD_FOR_OUTADDR_ADDRESS)
4079 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4080 }
4081 }
4082 }
4083 }
4084 else if (goal_alternative_matched[i] == -1)
4085 {
4086 operand_reloadnum[i]
4087 = push_reload ((modified[i] != RELOAD_WRITE
4088 ? recog_data.operand[i] : 0),
4089 (modified[i] != RELOAD_READ
4090 ? recog_data.operand[i] : 0),
4091 (modified[i] != RELOAD_WRITE
4092 ? recog_data.operand_loc[i] : 0),
4093 (modified[i] != RELOAD_READ
4094 ? recog_data.operand_loc[i] : 0),
4095 (enum reg_class) goal_alternative[i],
4096 (modified[i] == RELOAD_WRITE
4097 ? VOIDmode : operand_mode[i]),
4098 (modified[i] == RELOAD_READ
4099 ? VOIDmode : operand_mode[i]),
4100 (insn_code_number < 0 ? 0
4101 : insn_data[insn_code_number].operand[i].strict_low),
4102 0, i, operand_type[i]);
4103 }
4104 /* In a matching pair of operands, one must be input only
4105 and the other must be output only.
4106 Pass the input operand as IN and the other as OUT. */
4107 else if (modified[i] == RELOAD_READ
4108 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4109 {
4110 operand_reloadnum[i]
4111 = push_reload (recog_data.operand[i],
4112 recog_data.operand[goal_alternative_matched[i]],
4113 recog_data.operand_loc[i],
4114 recog_data.operand_loc[goal_alternative_matched[i]],
4115 (enum reg_class) goal_alternative[i],
4116 operand_mode[i],
4117 operand_mode[goal_alternative_matched[i]],
4118 0, 0, i, RELOAD_OTHER);
4119 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4120 }
4121 else if (modified[i] == RELOAD_WRITE
4122 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4123 {
4124 operand_reloadnum[goal_alternative_matched[i]]
4125 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4126 recog_data.operand[i],
4127 recog_data.operand_loc[goal_alternative_matched[i]],
4128 recog_data.operand_loc[i],
4129 (enum reg_class) goal_alternative[i],
4130 operand_mode[goal_alternative_matched[i]],
4131 operand_mode[i],
4132 0, 0, i, RELOAD_OTHER);
4133 operand_reloadnum[i] = output_reloadnum;
4134 }
4135 else
4136 {
4137 gcc_assert (insn_code_number < 0);
4138 error_for_asm (insn, "inconsistent operand constraints "
4139 "in an %<asm%>");
4140 /* Avoid further trouble with this insn. */
4141 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4142 n_reloads = 0;
4143 return 0;
4144 }
4145 }
4146 else if (goal_alternative_matched[i] < 0
4147 && goal_alternative_matches[i] < 0
4148 && address_operand_reloaded[i] != 1
4149 && optimize)
4150 {
4151 /* For each non-matching operand that's a MEM or a pseudo-register
4152 that didn't get a hard register, make an optional reload.
4153 This may get done even if the insn needs no reloads otherwise. */
4154
4155 rtx operand = recog_data.operand[i];
4156
4157 while (GET_CODE (operand) == SUBREG)
4158 operand = SUBREG_REG (operand);
4159 if ((MEM_P (operand)
4160 || (REG_P (operand)
4161 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4162 /* If this is only for an output, the optional reload would not
4163 actually cause us to use a register now, just note that
4164 something is stored here. */
4165 && (goal_alternative[i] != NO_REGS
4166 || modified[i] == RELOAD_WRITE)
4167 && ! no_input_reloads
4168 /* An optional output reload might allow to delete INSN later.
4169 We mustn't make in-out reloads on insns that are not permitted
4170 output reloads.
4171 If this is an asm, we can't delete it; we must not even call
4172 push_reload for an optional output reload in this case,
4173 because we can't be sure that the constraint allows a register,
4174 and push_reload verifies the constraints for asms. */
4175 && (modified[i] == RELOAD_READ
4176 || (! no_output_reloads && ! this_insn_is_asm)))
4177 operand_reloadnum[i]
4178 = push_reload ((modified[i] != RELOAD_WRITE
4179 ? recog_data.operand[i] : 0),
4180 (modified[i] != RELOAD_READ
4181 ? recog_data.operand[i] : 0),
4182 (modified[i] != RELOAD_WRITE
4183 ? recog_data.operand_loc[i] : 0),
4184 (modified[i] != RELOAD_READ
4185 ? recog_data.operand_loc[i] : 0),
4186 (enum reg_class) goal_alternative[i],
4187 (modified[i] == RELOAD_WRITE
4188 ? VOIDmode : operand_mode[i]),
4189 (modified[i] == RELOAD_READ
4190 ? VOIDmode : operand_mode[i]),
4191 (insn_code_number < 0 ? 0
4192 : insn_data[insn_code_number].operand[i].strict_low),
4193 1, i, operand_type[i]);
4194 /* If a memory reference remains (either as a MEM or a pseudo that
4195 did not get a hard register), yet we can't make an optional
4196 reload, check if this is actually a pseudo register reference;
4197 we then need to emit a USE and/or a CLOBBER so that reload
4198 inheritance will do the right thing. */
4199 else if (replace
4200 && (MEM_P (operand)
4201 || (REG_P (operand)
4202 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4203 && reg_renumber [REGNO (operand)] < 0)))
4204 {
4205 operand = *recog_data.operand_loc[i];
4206
4207 while (GET_CODE (operand) == SUBREG)
4208 operand = SUBREG_REG (operand);
4209 if (REG_P (operand))
4210 {
4211 if (modified[i] != RELOAD_WRITE)
4212 /* We mark the USE with QImode so that we recognize
4213 it as one that can be safely deleted at the end
4214 of reload. */
4215 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4216 insn), QImode);
4217 if (modified[i] != RELOAD_READ)
4218 emit_insn_after (gen_clobber (operand), insn);
4219 }
4220 }
4221 }
4222 else if (goal_alternative_matches[i] >= 0
4223 && goal_alternative_win[goal_alternative_matches[i]]
4224 && modified[i] == RELOAD_READ
4225 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4226 && ! no_input_reloads && ! no_output_reloads
4227 && optimize)
4228 {
4229 /* Similarly, make an optional reload for a pair of matching
4230 objects that are in MEM or a pseudo that didn't get a hard reg. */
4231
4232 rtx operand = recog_data.operand[i];
4233
4234 while (GET_CODE (operand) == SUBREG)
4235 operand = SUBREG_REG (operand);
4236 if ((MEM_P (operand)
4237 || (REG_P (operand)
4238 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4239 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4240 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4241 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4242 recog_data.operand[i],
4243 recog_data.operand_loc[goal_alternative_matches[i]],
4244 recog_data.operand_loc[i],
4245 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4246 operand_mode[goal_alternative_matches[i]],
4247 operand_mode[i],
4248 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4249 }
4250
4251 /* Perform whatever substitutions on the operands we are supposed
4252 to make due to commutativity or replacement of registers
4253 with equivalent constants or memory slots. */
4254
4255 for (i = 0; i < noperands; i++)
4256 {
4257 /* We only do this on the last pass through reload, because it is
4258 possible for some data (like reg_equiv_address) to be changed during
4259 later passes. Moreover, we lose the opportunity to get a useful
4260 reload_{in,out}_reg when we do these replacements. */
4261
4262 if (replace)
4263 {
4264 rtx substitution = substed_operand[i];
4265
4266 *recog_data.operand_loc[i] = substitution;
4267
4268 /* If we're replacing an operand with a LABEL_REF, we need to
4269 make sure that there's a REG_LABEL_OPERAND note attached to
4270 this instruction. */
4271 if (GET_CODE (substitution) == LABEL_REF
4272 && !find_reg_note (insn, REG_LABEL_OPERAND,
4273 XEXP (substitution, 0))
4274 /* For a JUMP_P, if it was a branch target it must have
4275 already been recorded as such. */
4276 && (!JUMP_P (insn)
4277 || !label_is_jump_target_p (XEXP (substitution, 0),
4278 insn)))
4279 {
4280 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4281 if (LABEL_P (XEXP (substitution, 0)))
4282 ++LABEL_NUSES (XEXP (substitution, 0));
4283 }
4284
4285 }
4286 else
4287 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4288 }
4289
4290 /* If this insn pattern contains any MATCH_DUP's, make sure that
4291 they will be substituted if the operands they match are substituted.
4292 Also do now any substitutions we already did on the operands.
4293
4294 Don't do this if we aren't making replacements because we might be
4295 propagating things allocated by frame pointer elimination into places
4296 it doesn't expect. */
4297
4298 if (insn_code_number >= 0 && replace)
4299 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4300 {
4301 int opno = recog_data.dup_num[i];
4302 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4303 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4304 }
4305
4306 #if 0
4307 /* This loses because reloading of prior insns can invalidate the equivalence
4308 (or at least find_equiv_reg isn't smart enough to find it any more),
4309 causing this insn to need more reload regs than it needed before.
4310 It may be too late to make the reload regs available.
4311 Now this optimization is done safely in choose_reload_regs. */
4312
4313 /* For each reload of a reg into some other class of reg,
4314 search for an existing equivalent reg (same value now) in the right class.
4315 We can use it as long as we don't need to change its contents. */
4316 for (i = 0; i < n_reloads; i++)
4317 if (rld[i].reg_rtx == 0
4318 && rld[i].in != 0
4319 && REG_P (rld[i].in)
4320 && rld[i].out == 0)
4321 {
4322 rld[i].reg_rtx
4323 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4324 static_reload_reg_p, 0, rld[i].inmode);
4325 /* Prevent generation of insn to load the value
4326 because the one we found already has the value. */
4327 if (rld[i].reg_rtx)
4328 rld[i].in = rld[i].reg_rtx;
4329 }
4330 #endif
4331
4332 /* If we detected error and replaced asm instruction by USE, forget about the
4333 reloads. */
4334 if (GET_CODE (PATTERN (insn)) == USE
4335 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4336 n_reloads = 0;
4337
4338 /* Perhaps an output reload can be combined with another
4339 to reduce needs by one. */
4340 if (!goal_earlyclobber)
4341 combine_reloads ();
4342
4343 /* If we have a pair of reloads for parts of an address, they are reloading
4344 the same object, the operands themselves were not reloaded, and they
4345 are for two operands that are supposed to match, merge the reloads and
4346 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4347
4348 for (i = 0; i < n_reloads; i++)
4349 {
4350 int k;
4351
4352 for (j = i + 1; j < n_reloads; j++)
4353 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4354 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4355 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4356 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4357 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4358 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4359 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4360 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4361 && rtx_equal_p (rld[i].in, rld[j].in)
4362 && (operand_reloadnum[rld[i].opnum] < 0
4363 || rld[operand_reloadnum[rld[i].opnum]].optional)
4364 && (operand_reloadnum[rld[j].opnum] < 0
4365 || rld[operand_reloadnum[rld[j].opnum]].optional)
4366 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4367 || (goal_alternative_matches[rld[j].opnum]
4368 == rld[i].opnum)))
4369 {
4370 for (k = 0; k < n_replacements; k++)
4371 if (replacements[k].what == j)
4372 replacements[k].what = i;
4373
4374 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4375 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4376 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4377 else
4378 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4379 rld[j].in = 0;
4380 }
4381 }
4382
4383 /* Scan all the reloads and update their type.
4384 If a reload is for the address of an operand and we didn't reload
4385 that operand, change the type. Similarly, change the operand number
4386 of a reload when two operands match. If a reload is optional, treat it
4387 as though the operand isn't reloaded.
4388
4389 ??? This latter case is somewhat odd because if we do the optional
4390 reload, it means the object is hanging around. Thus we need only
4391 do the address reload if the optional reload was NOT done.
4392
4393 Change secondary reloads to be the address type of their operand, not
4394 the normal type.
4395
4396 If an operand's reload is now RELOAD_OTHER, change any
4397 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4398 RELOAD_FOR_OTHER_ADDRESS. */
4399
4400 for (i = 0; i < n_reloads; i++)
4401 {
4402 if (rld[i].secondary_p
4403 && rld[i].when_needed == operand_type[rld[i].opnum])
4404 rld[i].when_needed = address_type[rld[i].opnum];
4405
4406 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4407 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4408 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4409 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4410 && (operand_reloadnum[rld[i].opnum] < 0
4411 || rld[operand_reloadnum[rld[i].opnum]].optional))
4412 {
4413 /* If we have a secondary reload to go along with this reload,
4414 change its type to RELOAD_FOR_OPADDR_ADDR. */
4415
4416 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4417 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4418 && rld[i].secondary_in_reload != -1)
4419 {
4420 int secondary_in_reload = rld[i].secondary_in_reload;
4421
4422 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4423
4424 /* If there's a tertiary reload we have to change it also. */
4425 if (secondary_in_reload > 0
4426 && rld[secondary_in_reload].secondary_in_reload != -1)
4427 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4428 = RELOAD_FOR_OPADDR_ADDR;
4429 }
4430
4431 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4432 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4433 && rld[i].secondary_out_reload != -1)
4434 {
4435 int secondary_out_reload = rld[i].secondary_out_reload;
4436
4437 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4438
4439 /* If there's a tertiary reload we have to change it also. */
4440 if (secondary_out_reload
4441 && rld[secondary_out_reload].secondary_out_reload != -1)
4442 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4443 = RELOAD_FOR_OPADDR_ADDR;
4444 }
4445
4446 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4447 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4448 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4449 else
4450 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4451 }
4452
4453 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4454 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4455 && operand_reloadnum[rld[i].opnum] >= 0
4456 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4457 == RELOAD_OTHER))
4458 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4459
4460 if (goal_alternative_matches[rld[i].opnum] >= 0)
4461 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4462 }
4463
4464 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4465 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4466 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4467
4468 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4469 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4470 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4471 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4472 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4473 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4474 This is complicated by the fact that a single operand can have more
4475 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4476 choose_reload_regs without affecting code quality, and cases that
4477 actually fail are extremely rare, so it turns out to be better to fix
4478 the problem here by not generating cases that choose_reload_regs will
4479 fail for. */
4480 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4481 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4482 a single operand.
4483 We can reduce the register pressure by exploiting that a
4484 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4485 does not conflict with any of them, if it is only used for the first of
4486 the RELOAD_FOR_X_ADDRESS reloads. */
4487 {
4488 int first_op_addr_num = -2;
4489 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4490 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4491 int need_change = 0;
4492 /* We use last_op_addr_reload and the contents of the above arrays
4493 first as flags - -2 means no instance encountered, -1 means exactly
4494 one instance encountered.
4495 If more than one instance has been encountered, we store the reload
4496 number of the first reload of the kind in question; reload numbers
4497 are known to be non-negative. */
4498 for (i = 0; i < noperands; i++)
4499 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4500 for (i = n_reloads - 1; i >= 0; i--)
4501 {
4502 switch (rld[i].when_needed)
4503 {
4504 case RELOAD_FOR_OPERAND_ADDRESS:
4505 if (++first_op_addr_num >= 0)
4506 {
4507 first_op_addr_num = i;
4508 need_change = 1;
4509 }
4510 break;
4511 case RELOAD_FOR_INPUT_ADDRESS:
4512 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4513 {
4514 first_inpaddr_num[rld[i].opnum] = i;
4515 need_change = 1;
4516 }
4517 break;
4518 case RELOAD_FOR_OUTPUT_ADDRESS:
4519 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4520 {
4521 first_outpaddr_num[rld[i].opnum] = i;
4522 need_change = 1;
4523 }
4524 break;
4525 default:
4526 break;
4527 }
4528 }
4529
4530 if (need_change)
4531 {
4532 for (i = 0; i < n_reloads; i++)
4533 {
4534 int first_num;
4535 enum reload_type type;
4536
4537 switch (rld[i].when_needed)
4538 {
4539 case RELOAD_FOR_OPADDR_ADDR:
4540 first_num = first_op_addr_num;
4541 type = RELOAD_FOR_OPERAND_ADDRESS;
4542 break;
4543 case RELOAD_FOR_INPADDR_ADDRESS:
4544 first_num = first_inpaddr_num[rld[i].opnum];
4545 type = RELOAD_FOR_INPUT_ADDRESS;
4546 break;
4547 case RELOAD_FOR_OUTADDR_ADDRESS:
4548 first_num = first_outpaddr_num[rld[i].opnum];
4549 type = RELOAD_FOR_OUTPUT_ADDRESS;
4550 break;
4551 default:
4552 continue;
4553 }
4554 if (first_num < 0)
4555 continue;
4556 else if (i > first_num)
4557 rld[i].when_needed = type;
4558 else
4559 {
4560 /* Check if the only TYPE reload that uses reload I is
4561 reload FIRST_NUM. */
4562 for (j = n_reloads - 1; j > first_num; j--)
4563 {
4564 if (rld[j].when_needed == type
4565 && (rld[i].secondary_p
4566 ? rld[j].secondary_in_reload == i
4567 : reg_mentioned_p (rld[i].in, rld[j].in)))
4568 {
4569 rld[i].when_needed = type;
4570 break;
4571 }
4572 }
4573 }
4574 }
4575 }
4576 }
4577
4578 /* See if we have any reloads that are now allowed to be merged
4579 because we've changed when the reload is needed to
4580 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4581 check for the most common cases. */
4582
4583 for (i = 0; i < n_reloads; i++)
4584 if (rld[i].in != 0 && rld[i].out == 0
4585 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4586 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4587 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4588 for (j = 0; j < n_reloads; j++)
4589 if (i != j && rld[j].in != 0 && rld[j].out == 0
4590 && rld[j].when_needed == rld[i].when_needed
4591 && MATCHES (rld[i].in, rld[j].in)
4592 && rld[i].rclass == rld[j].rclass
4593 && !rld[i].nocombine && !rld[j].nocombine
4594 && rld[i].reg_rtx == rld[j].reg_rtx)
4595 {
4596 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4597 transfer_replacements (i, j);
4598 rld[j].in = 0;
4599 }
4600
4601 #ifdef HAVE_cc0
4602 /* If we made any reloads for addresses, see if they violate a
4603 "no input reloads" requirement for this insn. But loads that we
4604 do after the insn (such as for output addresses) are fine. */
4605 if (no_input_reloads)
4606 for (i = 0; i < n_reloads; i++)
4607 gcc_assert (rld[i].in == 0
4608 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4609 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4610 #endif
4611
4612 /* Compute reload_mode and reload_nregs. */
4613 for (i = 0; i < n_reloads; i++)
4614 {
4615 rld[i].mode
4616 = (rld[i].inmode == VOIDmode
4617 || (GET_MODE_SIZE (rld[i].outmode)
4618 > GET_MODE_SIZE (rld[i].inmode)))
4619 ? rld[i].outmode : rld[i].inmode;
4620
4621 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4622 }
4623
4624 /* Special case a simple move with an input reload and a
4625 destination of a hard reg, if the hard reg is ok, use it. */
4626 for (i = 0; i < n_reloads; i++)
4627 if (rld[i].when_needed == RELOAD_FOR_INPUT
4628 && GET_CODE (PATTERN (insn)) == SET
4629 && REG_P (SET_DEST (PATTERN (insn)))
4630 && (SET_SRC (PATTERN (insn)) == rld[i].in
4631 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4632 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4633 {
4634 rtx dest = SET_DEST (PATTERN (insn));
4635 unsigned int regno = REGNO (dest);
4636
4637 if (regno < FIRST_PSEUDO_REGISTER
4638 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4639 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4640 {
4641 int nr = hard_regno_nregs[regno][rld[i].mode];
4642 int ok = 1, nri;
4643
4644 for (nri = 1; nri < nr; nri ++)
4645 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4646 {
4647 ok = 0;
4648 break;
4649 }
4650
4651 if (ok)
4652 rld[i].reg_rtx = dest;
4653 }
4654 }
4655
4656 return retval;
4657 }
4658
4659 /* Return true if alternative number ALTNUM in constraint-string
4660 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4661 MEM gives the reference if it didn't need any reloads, otherwise it
4662 is null. */
4663
4664 static bool
4665 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4666 const char *constraint, int altnum)
4667 {
4668 int c;
4669
4670 /* Skip alternatives before the one requested. */
4671 while (altnum > 0)
4672 {
4673 while (*constraint++ != ',')
4674 ;
4675 altnum--;
4676 }
4677 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4678 If one of them is present, this alternative accepts the result of
4679 passing a constant-pool reference through find_reloads_toplev.
4680
4681 The same is true of extra memory constraints if the address
4682 was reloaded into a register. However, the target may elect
4683 to disallow the original constant address, forcing it to be
4684 reloaded into a register instead. */
4685 for (; (c = *constraint) && c != ',' && c != '#';
4686 constraint += CONSTRAINT_LEN (c, constraint))
4687 {
4688 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4689 return true;
4690 #ifdef EXTRA_CONSTRAINT_STR
4691 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4692 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4693 return true;
4694 #endif
4695 }
4696 return false;
4697 }
4698 \f
4699 /* Scan X for memory references and scan the addresses for reloading.
4700 Also checks for references to "constant" regs that we want to eliminate
4701 and replaces them with the values they stand for.
4702 We may alter X destructively if it contains a reference to such.
4703 If X is just a constant reg, we return the equivalent value
4704 instead of X.
4705
4706 IND_LEVELS says how many levels of indirect addressing this machine
4707 supports.
4708
4709 OPNUM and TYPE identify the purpose of the reload.
4710
4711 IS_SET_DEST is true if X is the destination of a SET, which is not
4712 appropriate to be replaced by a constant.
4713
4714 INSN, if nonzero, is the insn in which we do the reload. It is used
4715 to determine if we may generate output reloads, and where to put USEs
4716 for pseudos that we have to replace with stack slots.
4717
4718 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4719 result of find_reloads_address. */
4720
4721 static rtx
4722 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4723 int ind_levels, int is_set_dest, rtx insn,
4724 int *address_reloaded)
4725 {
4726 RTX_CODE code = GET_CODE (x);
4727
4728 const char *fmt = GET_RTX_FORMAT (code);
4729 int i;
4730 int copied;
4731
4732 if (code == REG)
4733 {
4734 /* This code is duplicated for speed in find_reloads. */
4735 int regno = REGNO (x);
4736 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4737 x = reg_equiv_constant (regno);
4738 #if 0
4739 /* This creates (subreg (mem...)) which would cause an unnecessary
4740 reload of the mem. */
4741 else if (reg_equiv_mem (regno) != 0)
4742 x = reg_equiv_mem (regno);
4743 #endif
4744 else if (reg_equiv_memory_loc (regno)
4745 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4746 {
4747 rtx mem = make_memloc (x, regno);
4748 if (reg_equiv_address (regno)
4749 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4750 {
4751 /* If this is not a toplevel operand, find_reloads doesn't see
4752 this substitution. We have to emit a USE of the pseudo so
4753 that delete_output_reload can see it. */
4754 if (replace_reloads && recog_data.operand[opnum] != x)
4755 /* We mark the USE with QImode so that we recognize it
4756 as one that can be safely deleted at the end of
4757 reload. */
4758 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4759 QImode);
4760 x = mem;
4761 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4762 opnum, type, ind_levels, insn);
4763 if (!rtx_equal_p (x, mem))
4764 push_reg_equiv_alt_mem (regno, x);
4765 if (address_reloaded)
4766 *address_reloaded = i;
4767 }
4768 }
4769 return x;
4770 }
4771 if (code == MEM)
4772 {
4773 rtx tem = x;
4774
4775 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4776 opnum, type, ind_levels, insn);
4777 if (address_reloaded)
4778 *address_reloaded = i;
4779
4780 return tem;
4781 }
4782
4783 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4784 {
4785 /* Check for SUBREG containing a REG that's equivalent to a
4786 constant. If the constant has a known value, truncate it
4787 right now. Similarly if we are extracting a single-word of a
4788 multi-word constant. If the constant is symbolic, allow it
4789 to be substituted normally. push_reload will strip the
4790 subreg later. The constant must not be VOIDmode, because we
4791 will lose the mode of the register (this should never happen
4792 because one of the cases above should handle it). */
4793
4794 int regno = REGNO (SUBREG_REG (x));
4795 rtx tem;
4796
4797 if (regno >= FIRST_PSEUDO_REGISTER
4798 && reg_renumber[regno] < 0
4799 && reg_equiv_constant (regno) != 0)
4800 {
4801 tem =
4802 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4803 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4804 gcc_assert (tem);
4805 if (CONSTANT_P (tem)
4806 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4807 {
4808 tem = force_const_mem (GET_MODE (x), tem);
4809 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4810 &XEXP (tem, 0), opnum, type,
4811 ind_levels, insn);
4812 if (address_reloaded)
4813 *address_reloaded = i;
4814 }
4815 return tem;
4816 }
4817
4818 /* If the subreg contains a reg that will be converted to a mem,
4819 attempt to convert the whole subreg to a (narrower or wider)
4820 memory reference instead. If this succeeds, we're done --
4821 otherwise fall through to check whether the inner reg still
4822 needs address reloads anyway. */
4823
4824 if (regno >= FIRST_PSEUDO_REGISTER
4825 && reg_equiv_memory_loc (regno) != 0)
4826 {
4827 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4828 insn, address_reloaded);
4829 if (tem)
4830 return tem;
4831 }
4832 }
4833
4834 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4835 {
4836 if (fmt[i] == 'e')
4837 {
4838 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4839 ind_levels, is_set_dest, insn,
4840 address_reloaded);
4841 /* If we have replaced a reg with it's equivalent memory loc -
4842 that can still be handled here e.g. if it's in a paradoxical
4843 subreg - we must make the change in a copy, rather than using
4844 a destructive change. This way, find_reloads can still elect
4845 not to do the change. */
4846 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4847 {
4848 x = shallow_copy_rtx (x);
4849 copied = 1;
4850 }
4851 XEXP (x, i) = new_part;
4852 }
4853 }
4854 return x;
4855 }
4856
4857 /* Return a mem ref for the memory equivalent of reg REGNO.
4858 This mem ref is not shared with anything. */
4859
4860 static rtx
4861 make_memloc (rtx ad, int regno)
4862 {
4863 /* We must rerun eliminate_regs, in case the elimination
4864 offsets have changed. */
4865 rtx tem
4866 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4867 0);
4868
4869 /* If TEM might contain a pseudo, we must copy it to avoid
4870 modifying it when we do the substitution for the reload. */
4871 if (rtx_varies_p (tem, 0))
4872 tem = copy_rtx (tem);
4873
4874 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4875 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4876
4877 /* Copy the result if it's still the same as the equivalence, to avoid
4878 modifying it when we do the substitution for the reload. */
4879 if (tem == reg_equiv_memory_loc (regno))
4880 tem = copy_rtx (tem);
4881 return tem;
4882 }
4883
4884 /* Returns true if AD could be turned into a valid memory reference
4885 to mode MODE in address space AS by reloading the part pointed to
4886 by PART into a register. */
4887
4888 static int
4889 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4890 addr_space_t as, rtx *part)
4891 {
4892 int retv;
4893 rtx tem = *part;
4894 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4895
4896 *part = reg;
4897 retv = memory_address_addr_space_p (mode, ad, as);
4898 *part = tem;
4899
4900 return retv;
4901 }
4902
4903 /* Record all reloads needed for handling memory address AD
4904 which appears in *LOC in a memory reference to mode MODE
4905 which itself is found in location *MEMREFLOC.
4906 Note that we take shortcuts assuming that no multi-reg machine mode
4907 occurs as part of an address.
4908
4909 OPNUM and TYPE specify the purpose of this reload.
4910
4911 IND_LEVELS says how many levels of indirect addressing this machine
4912 supports.
4913
4914 INSN, if nonzero, is the insn in which we do the reload. It is used
4915 to determine if we may generate output reloads, and where to put USEs
4916 for pseudos that we have to replace with stack slots.
4917
4918 Value is one if this address is reloaded or replaced as a whole; it is
4919 zero if the top level of this address was not reloaded or replaced, and
4920 it is -1 if it may or may not have been reloaded or replaced.
4921
4922 Note that there is no verification that the address will be valid after
4923 this routine does its work. Instead, we rely on the fact that the address
4924 was valid when reload started. So we need only undo things that reload
4925 could have broken. These are wrong register types, pseudos not allocated
4926 to a hard register, and frame pointer elimination. */
4927
4928 static int
4929 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4930 rtx *loc, int opnum, enum reload_type type,
4931 int ind_levels, rtx insn)
4932 {
4933 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4934 : ADDR_SPACE_GENERIC;
4935 int regno;
4936 int removed_and = 0;
4937 int op_index;
4938 rtx tem;
4939
4940 /* If the address is a register, see if it is a legitimate address and
4941 reload if not. We first handle the cases where we need not reload
4942 or where we must reload in a non-standard way. */
4943
4944 if (REG_P (ad))
4945 {
4946 regno = REGNO (ad);
4947
4948 if (reg_equiv_constant (regno) != 0)
4949 {
4950 find_reloads_address_part (reg_equiv_constant (regno), loc,
4951 base_reg_class (mode, as, MEM, SCRATCH),
4952 GET_MODE (ad), opnum, type, ind_levels);
4953 return 1;
4954 }
4955
4956 tem = reg_equiv_memory_loc (regno);
4957 if (tem != 0)
4958 {
4959 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4960 {
4961 tem = make_memloc (ad, regno);
4962 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4963 XEXP (tem, 0),
4964 MEM_ADDR_SPACE (tem)))
4965 {
4966 rtx orig = tem;
4967
4968 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4969 &XEXP (tem, 0), opnum,
4970 ADDR_TYPE (type), ind_levels, insn);
4971 if (!rtx_equal_p (tem, orig))
4972 push_reg_equiv_alt_mem (regno, tem);
4973 }
4974 /* We can avoid a reload if the register's equivalent memory
4975 expression is valid as an indirect memory address.
4976 But not all addresses are valid in a mem used as an indirect
4977 address: only reg or reg+constant. */
4978
4979 if (ind_levels > 0
4980 && strict_memory_address_addr_space_p (mode, tem, as)
4981 && (REG_P (XEXP (tem, 0))
4982 || (GET_CODE (XEXP (tem, 0)) == PLUS
4983 && REG_P (XEXP (XEXP (tem, 0), 0))
4984 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4985 {
4986 /* TEM is not the same as what we'll be replacing the
4987 pseudo with after reload, put a USE in front of INSN
4988 in the final reload pass. */
4989 if (replace_reloads
4990 && num_not_at_initial_offset
4991 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4992 {
4993 *loc = tem;
4994 /* We mark the USE with QImode so that we
4995 recognize it as one that can be safely
4996 deleted at the end of reload. */
4997 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4998 insn), QImode);
4999
5000 /* This doesn't really count as replacing the address
5001 as a whole, since it is still a memory access. */
5002 }
5003 return 0;
5004 }
5005 ad = tem;
5006 }
5007 }
5008
5009 /* The only remaining case where we can avoid a reload is if this is a
5010 hard register that is valid as a base register and which is not the
5011 subject of a CLOBBER in this insn. */
5012
5013 else if (regno < FIRST_PSEUDO_REGISTER
5014 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
5015 && ! regno_clobbered_p (regno, this_insn, mode, 0))
5016 return 0;
5017
5018 /* If we do not have one of the cases above, we must do the reload. */
5019 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
5020 base_reg_class (mode, as, MEM, SCRATCH),
5021 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
5022 return 1;
5023 }
5024
5025 if (strict_memory_address_addr_space_p (mode, ad, as))
5026 {
5027 /* The address appears valid, so reloads are not needed.
5028 But the address may contain an eliminable register.
5029 This can happen because a machine with indirect addressing
5030 may consider a pseudo register by itself a valid address even when
5031 it has failed to get a hard reg.
5032 So do a tree-walk to find and eliminate all such regs. */
5033
5034 /* But first quickly dispose of a common case. */
5035 if (GET_CODE (ad) == PLUS
5036 && CONST_INT_P (XEXP (ad, 1))
5037 && REG_P (XEXP (ad, 0))
5038 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
5039 return 0;
5040
5041 subst_reg_equivs_changed = 0;
5042 *loc = subst_reg_equivs (ad, insn);
5043
5044 if (! subst_reg_equivs_changed)
5045 return 0;
5046
5047 /* Check result for validity after substitution. */
5048 if (strict_memory_address_addr_space_p (mode, ad, as))
5049 return 0;
5050 }
5051
5052 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5053 do
5054 {
5055 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5056 {
5057 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5058 ind_levels, win);
5059 }
5060 break;
5061 win:
5062 *memrefloc = copy_rtx (*memrefloc);
5063 XEXP (*memrefloc, 0) = ad;
5064 move_replacements (&ad, &XEXP (*memrefloc, 0));
5065 return -1;
5066 }
5067 while (0);
5068 #endif
5069
5070 /* The address is not valid. We have to figure out why. First see if
5071 we have an outer AND and remove it if so. Then analyze what's inside. */
5072
5073 if (GET_CODE (ad) == AND)
5074 {
5075 removed_and = 1;
5076 loc = &XEXP (ad, 0);
5077 ad = *loc;
5078 }
5079
5080 /* One possibility for why the address is invalid is that it is itself
5081 a MEM. This can happen when the frame pointer is being eliminated, a
5082 pseudo is not allocated to a hard register, and the offset between the
5083 frame and stack pointers is not its initial value. In that case the
5084 pseudo will have been replaced by a MEM referring to the
5085 stack pointer. */
5086 if (MEM_P (ad))
5087 {
5088 /* First ensure that the address in this MEM is valid. Then, unless
5089 indirect addresses are valid, reload the MEM into a register. */
5090 tem = ad;
5091 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5092 opnum, ADDR_TYPE (type),
5093 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5094
5095 /* If tem was changed, then we must create a new memory reference to
5096 hold it and store it back into memrefloc. */
5097 if (tem != ad && memrefloc)
5098 {
5099 *memrefloc = copy_rtx (*memrefloc);
5100 copy_replacements (tem, XEXP (*memrefloc, 0));
5101 loc = &XEXP (*memrefloc, 0);
5102 if (removed_and)
5103 loc = &XEXP (*loc, 0);
5104 }
5105
5106 /* Check similar cases as for indirect addresses as above except
5107 that we can allow pseudos and a MEM since they should have been
5108 taken care of above. */
5109
5110 if (ind_levels == 0
5111 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5112 || MEM_P (XEXP (tem, 0))
5113 || ! (REG_P (XEXP (tem, 0))
5114 || (GET_CODE (XEXP (tem, 0)) == PLUS
5115 && REG_P (XEXP (XEXP (tem, 0), 0))
5116 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5117 {
5118 /* Must use TEM here, not AD, since it is the one that will
5119 have any subexpressions reloaded, if needed. */
5120 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5121 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5122 VOIDmode, 0,
5123 0, opnum, type);
5124 return ! removed_and;
5125 }
5126 else
5127 return 0;
5128 }
5129
5130 /* If we have address of a stack slot but it's not valid because the
5131 displacement is too large, compute the sum in a register.
5132 Handle all base registers here, not just fp/ap/sp, because on some
5133 targets (namely SH) we can also get too large displacements from
5134 big-endian corrections. */
5135 else if (GET_CODE (ad) == PLUS
5136 && REG_P (XEXP (ad, 0))
5137 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5138 && CONST_INT_P (XEXP (ad, 1))
5139 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5140 CONST_INT)
5141 /* Similarly, if we were to reload the base register and the
5142 mem+offset address is still invalid, then we want to reload
5143 the whole address, not just the base register. */
5144 || ! maybe_memory_address_addr_space_p
5145 (mode, ad, as, &(XEXP (ad, 0)))))
5146
5147 {
5148 /* Unshare the MEM rtx so we can safely alter it. */
5149 if (memrefloc)
5150 {
5151 *memrefloc = copy_rtx (*memrefloc);
5152 loc = &XEXP (*memrefloc, 0);
5153 if (removed_and)
5154 loc = &XEXP (*loc, 0);
5155 }
5156
5157 if (double_reg_address_ok
5158 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5159 PLUS, CONST_INT))
5160 {
5161 /* Unshare the sum as well. */
5162 *loc = ad = copy_rtx (ad);
5163
5164 /* Reload the displacement into an index reg.
5165 We assume the frame pointer or arg pointer is a base reg. */
5166 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5167 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5168 type, ind_levels);
5169 return 0;
5170 }
5171 else
5172 {
5173 /* If the sum of two regs is not necessarily valid,
5174 reload the sum into a base reg.
5175 That will at least work. */
5176 find_reloads_address_part (ad, loc,
5177 base_reg_class (mode, as, MEM, SCRATCH),
5178 GET_MODE (ad), opnum, type, ind_levels);
5179 }
5180 return ! removed_and;
5181 }
5182
5183 /* If we have an indexed stack slot, there are three possible reasons why
5184 it might be invalid: The index might need to be reloaded, the address
5185 might have been made by frame pointer elimination and hence have a
5186 constant out of range, or both reasons might apply.
5187
5188 We can easily check for an index needing reload, but even if that is the
5189 case, we might also have an invalid constant. To avoid making the
5190 conservative assumption and requiring two reloads, we see if this address
5191 is valid when not interpreted strictly. If it is, the only problem is
5192 that the index needs a reload and find_reloads_address_1 will take care
5193 of it.
5194
5195 Handle all base registers here, not just fp/ap/sp, because on some
5196 targets (namely SPARC) we can also get invalid addresses from preventive
5197 subreg big-endian corrections made by find_reloads_toplev. We
5198 can also get expressions involving LO_SUM (rather than PLUS) from
5199 find_reloads_subreg_address.
5200
5201 If we decide to do something, it must be that `double_reg_address_ok'
5202 is true. We generate a reload of the base register + constant and
5203 rework the sum so that the reload register will be added to the index.
5204 This is safe because we know the address isn't shared.
5205
5206 We check for the base register as both the first and second operand of
5207 the innermost PLUS and/or LO_SUM. */
5208
5209 for (op_index = 0; op_index < 2; ++op_index)
5210 {
5211 rtx operand, addend;
5212 enum rtx_code inner_code;
5213
5214 if (GET_CODE (ad) != PLUS)
5215 continue;
5216
5217 inner_code = GET_CODE (XEXP (ad, 0));
5218 if (!(GET_CODE (ad) == PLUS
5219 && CONST_INT_P (XEXP (ad, 1))
5220 && (inner_code == PLUS || inner_code == LO_SUM)))
5221 continue;
5222
5223 operand = XEXP (XEXP (ad, 0), op_index);
5224 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5225 continue;
5226
5227 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5228
5229 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5230 GET_CODE (addend))
5231 || operand == frame_pointer_rtx
5232 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5233 || operand == hard_frame_pointer_rtx
5234 #endif
5235 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5236 || operand == arg_pointer_rtx
5237 #endif
5238 || operand == stack_pointer_rtx)
5239 && ! maybe_memory_address_addr_space_p
5240 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5241 {
5242 rtx offset_reg;
5243 enum reg_class cls;
5244
5245 offset_reg = plus_constant (GET_MODE (ad), operand,
5246 INTVAL (XEXP (ad, 1)));
5247
5248 /* Form the adjusted address. */
5249 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5250 ad = gen_rtx_PLUS (GET_MODE (ad),
5251 op_index == 0 ? offset_reg : addend,
5252 op_index == 0 ? addend : offset_reg);
5253 else
5254 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5255 op_index == 0 ? offset_reg : addend,
5256 op_index == 0 ? addend : offset_reg);
5257 *loc = ad;
5258
5259 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5260 find_reloads_address_part (XEXP (ad, op_index),
5261 &XEXP (ad, op_index), cls,
5262 GET_MODE (ad), opnum, type, ind_levels);
5263 find_reloads_address_1 (mode, as,
5264 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5265 GET_CODE (XEXP (ad, op_index)),
5266 &XEXP (ad, 1 - op_index), opnum,
5267 type, 0, insn);
5268
5269 return 0;
5270 }
5271 }
5272
5273 /* See if address becomes valid when an eliminable register
5274 in a sum is replaced. */
5275
5276 tem = ad;
5277 if (GET_CODE (ad) == PLUS)
5278 tem = subst_indexed_address (ad);
5279 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5280 {
5281 /* Ok, we win that way. Replace any additional eliminable
5282 registers. */
5283
5284 subst_reg_equivs_changed = 0;
5285 tem = subst_reg_equivs (tem, insn);
5286
5287 /* Make sure that didn't make the address invalid again. */
5288
5289 if (! subst_reg_equivs_changed
5290 || strict_memory_address_addr_space_p (mode, tem, as))
5291 {
5292 *loc = tem;
5293 return 0;
5294 }
5295 }
5296
5297 /* If constants aren't valid addresses, reload the constant address
5298 into a register. */
5299 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5300 {
5301 enum machine_mode address_mode = GET_MODE (ad);
5302 if (address_mode == VOIDmode)
5303 address_mode = targetm.addr_space.address_mode (as);
5304
5305 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5306 Unshare it so we can safely alter it. */
5307 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5308 && CONSTANT_POOL_ADDRESS_P (ad))
5309 {
5310 *memrefloc = copy_rtx (*memrefloc);
5311 loc = &XEXP (*memrefloc, 0);
5312 if (removed_and)
5313 loc = &XEXP (*loc, 0);
5314 }
5315
5316 find_reloads_address_part (ad, loc,
5317 base_reg_class (mode, as, MEM, SCRATCH),
5318 address_mode, opnum, type, ind_levels);
5319 return ! removed_and;
5320 }
5321
5322 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5323 opnum, type, ind_levels, insn);
5324 }
5325 \f
5326 /* Find all pseudo regs appearing in AD
5327 that are eliminable in favor of equivalent values
5328 and do not have hard regs; replace them by their equivalents.
5329 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5330 front of it for pseudos that we have to replace with stack slots. */
5331
5332 static rtx
5333 subst_reg_equivs (rtx ad, rtx insn)
5334 {
5335 RTX_CODE code = GET_CODE (ad);
5336 int i;
5337 const char *fmt;
5338
5339 switch (code)
5340 {
5341 case HIGH:
5342 case CONST:
5343 CASE_CONST_ANY:
5344 case SYMBOL_REF:
5345 case LABEL_REF:
5346 case PC:
5347 case CC0:
5348 return ad;
5349
5350 case REG:
5351 {
5352 int regno = REGNO (ad);
5353
5354 if (reg_equiv_constant (regno) != 0)
5355 {
5356 subst_reg_equivs_changed = 1;
5357 return reg_equiv_constant (regno);
5358 }
5359 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5360 {
5361 rtx mem = make_memloc (ad, regno);
5362 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5363 {
5364 subst_reg_equivs_changed = 1;
5365 /* We mark the USE with QImode so that we recognize it
5366 as one that can be safely deleted at the end of
5367 reload. */
5368 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5369 QImode);
5370 return mem;
5371 }
5372 }
5373 }
5374 return ad;
5375
5376 case PLUS:
5377 /* Quickly dispose of a common case. */
5378 if (XEXP (ad, 0) == frame_pointer_rtx
5379 && CONST_INT_P (XEXP (ad, 1)))
5380 return ad;
5381 break;
5382
5383 default:
5384 break;
5385 }
5386
5387 fmt = GET_RTX_FORMAT (code);
5388 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5389 if (fmt[i] == 'e')
5390 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5391 return ad;
5392 }
5393 \f
5394 /* Compute the sum of X and Y, making canonicalizations assumed in an
5395 address, namely: sum constant integers, surround the sum of two
5396 constants with a CONST, put the constant as the second operand, and
5397 group the constant on the outermost sum.
5398
5399 This routine assumes both inputs are already in canonical form. */
5400
5401 rtx
5402 form_sum (enum machine_mode mode, rtx x, rtx y)
5403 {
5404 rtx tem;
5405
5406 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5407 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5408
5409 if (CONST_INT_P (x))
5410 return plus_constant (mode, y, INTVAL (x));
5411 else if (CONST_INT_P (y))
5412 return plus_constant (mode, x, INTVAL (y));
5413 else if (CONSTANT_P (x))
5414 tem = x, x = y, y = tem;
5415
5416 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5417 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5418
5419 /* Note that if the operands of Y are specified in the opposite
5420 order in the recursive calls below, infinite recursion will occur. */
5421 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5422 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5423
5424 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5425 constant will have been placed second. */
5426 if (CONSTANT_P (x) && CONSTANT_P (y))
5427 {
5428 if (GET_CODE (x) == CONST)
5429 x = XEXP (x, 0);
5430 if (GET_CODE (y) == CONST)
5431 y = XEXP (y, 0);
5432
5433 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5434 }
5435
5436 return gen_rtx_PLUS (mode, x, y);
5437 }
5438 \f
5439 /* If ADDR is a sum containing a pseudo register that should be
5440 replaced with a constant (from reg_equiv_constant),
5441 return the result of doing so, and also apply the associative
5442 law so that the result is more likely to be a valid address.
5443 (But it is not guaranteed to be one.)
5444
5445 Note that at most one register is replaced, even if more are
5446 replaceable. Also, we try to put the result into a canonical form
5447 so it is more likely to be a valid address.
5448
5449 In all other cases, return ADDR. */
5450
5451 static rtx
5452 subst_indexed_address (rtx addr)
5453 {
5454 rtx op0 = 0, op1 = 0, op2 = 0;
5455 rtx tem;
5456 int regno;
5457
5458 if (GET_CODE (addr) == PLUS)
5459 {
5460 /* Try to find a register to replace. */
5461 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5462 if (REG_P (op0)
5463 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5464 && reg_renumber[regno] < 0
5465 && reg_equiv_constant (regno) != 0)
5466 op0 = reg_equiv_constant (regno);
5467 else if (REG_P (op1)
5468 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5469 && reg_renumber[regno] < 0
5470 && reg_equiv_constant (regno) != 0)
5471 op1 = reg_equiv_constant (regno);
5472 else if (GET_CODE (op0) == PLUS
5473 && (tem = subst_indexed_address (op0)) != op0)
5474 op0 = tem;
5475 else if (GET_CODE (op1) == PLUS
5476 && (tem = subst_indexed_address (op1)) != op1)
5477 op1 = tem;
5478 else
5479 return addr;
5480
5481 /* Pick out up to three things to add. */
5482 if (GET_CODE (op1) == PLUS)
5483 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5484 else if (GET_CODE (op0) == PLUS)
5485 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5486
5487 /* Compute the sum. */
5488 if (op2 != 0)
5489 op1 = form_sum (GET_MODE (addr), op1, op2);
5490 if (op1 != 0)
5491 op0 = form_sum (GET_MODE (addr), op0, op1);
5492
5493 return op0;
5494 }
5495 return addr;
5496 }
5497 \f
5498 /* Update the REG_INC notes for an insn. It updates all REG_INC
5499 notes for the instruction which refer to REGNO the to refer
5500 to the reload number.
5501
5502 INSN is the insn for which any REG_INC notes need updating.
5503
5504 REGNO is the register number which has been reloaded.
5505
5506 RELOADNUM is the reload number. */
5507
5508 static void
5509 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5510 int reloadnum ATTRIBUTE_UNUSED)
5511 {
5512 #ifdef AUTO_INC_DEC
5513 rtx link;
5514
5515 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5516 if (REG_NOTE_KIND (link) == REG_INC
5517 && (int) REGNO (XEXP (link, 0)) == regno)
5518 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5519 #endif
5520 }
5521 \f
5522 /* Record the pseudo registers we must reload into hard registers in a
5523 subexpression of a would-be memory address, X referring to a value
5524 in mode MODE. (This function is not called if the address we find
5525 is strictly valid.)
5526
5527 CONTEXT = 1 means we are considering regs as index regs,
5528 = 0 means we are considering them as base regs.
5529 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5530 or an autoinc code.
5531 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5532 is the code of the index part of the address. Otherwise, pass SCRATCH
5533 for this argument.
5534 OPNUM and TYPE specify the purpose of any reloads made.
5535
5536 IND_LEVELS says how many levels of indirect addressing are
5537 supported at this point in the address.
5538
5539 INSN, if nonzero, is the insn in which we do the reload. It is used
5540 to determine if we may generate output reloads.
5541
5542 We return nonzero if X, as a whole, is reloaded or replaced. */
5543
5544 /* Note that we take shortcuts assuming that no multi-reg machine mode
5545 occurs as part of an address.
5546 Also, this is not fully machine-customizable; it works for machines
5547 such as VAXen and 68000's and 32000's, but other possible machines
5548 could have addressing modes that this does not handle right.
5549 If you add push_reload calls here, you need to make sure gen_reload
5550 handles those cases gracefully. */
5551
5552 static int
5553 find_reloads_address_1 (enum machine_mode mode, addr_space_t as,
5554 rtx x, int context,
5555 enum rtx_code outer_code, enum rtx_code index_code,
5556 rtx *loc, int opnum, enum reload_type type,
5557 int ind_levels, rtx insn)
5558 {
5559 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5560 ((CONTEXT) == 0 \
5561 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5562 : REGNO_OK_FOR_INDEX_P (REGNO))
5563
5564 enum reg_class context_reg_class;
5565 RTX_CODE code = GET_CODE (x);
5566
5567 if (context == 1)
5568 context_reg_class = INDEX_REG_CLASS;
5569 else
5570 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5571
5572 switch (code)
5573 {
5574 case PLUS:
5575 {
5576 rtx orig_op0 = XEXP (x, 0);
5577 rtx orig_op1 = XEXP (x, 1);
5578 RTX_CODE code0 = GET_CODE (orig_op0);
5579 RTX_CODE code1 = GET_CODE (orig_op1);
5580 rtx op0 = orig_op0;
5581 rtx op1 = orig_op1;
5582
5583 if (GET_CODE (op0) == SUBREG)
5584 {
5585 op0 = SUBREG_REG (op0);
5586 code0 = GET_CODE (op0);
5587 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5588 op0 = gen_rtx_REG (word_mode,
5589 (REGNO (op0) +
5590 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5591 GET_MODE (SUBREG_REG (orig_op0)),
5592 SUBREG_BYTE (orig_op0),
5593 GET_MODE (orig_op0))));
5594 }
5595
5596 if (GET_CODE (op1) == SUBREG)
5597 {
5598 op1 = SUBREG_REG (op1);
5599 code1 = GET_CODE (op1);
5600 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5601 /* ??? Why is this given op1's mode and above for
5602 ??? op0 SUBREGs we use word_mode? */
5603 op1 = gen_rtx_REG (GET_MODE (op1),
5604 (REGNO (op1) +
5605 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5606 GET_MODE (SUBREG_REG (orig_op1)),
5607 SUBREG_BYTE (orig_op1),
5608 GET_MODE (orig_op1))));
5609 }
5610 /* Plus in the index register may be created only as a result of
5611 register rematerialization for expression like &localvar*4. Reload it.
5612 It may be possible to combine the displacement on the outer level,
5613 but it is probably not worthwhile to do so. */
5614 if (context == 1)
5615 {
5616 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5617 opnum, ADDR_TYPE (type), ind_levels, insn);
5618 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5619 context_reg_class,
5620 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5621 return 1;
5622 }
5623
5624 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5625 || code0 == ZERO_EXTEND || code1 == MEM)
5626 {
5627 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5628 &XEXP (x, 0), opnum, type, ind_levels,
5629 insn);
5630 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5631 &XEXP (x, 1), opnum, type, ind_levels,
5632 insn);
5633 }
5634
5635 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5636 || code1 == ZERO_EXTEND || code0 == MEM)
5637 {
5638 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5639 &XEXP (x, 0), opnum, type, ind_levels,
5640 insn);
5641 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5642 &XEXP (x, 1), opnum, type, ind_levels,
5643 insn);
5644 }
5645
5646 else if (code0 == CONST_INT || code0 == CONST
5647 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5648 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5649 &XEXP (x, 1), opnum, type, ind_levels,
5650 insn);
5651
5652 else if (code1 == CONST_INT || code1 == CONST
5653 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5654 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5655 &XEXP (x, 0), opnum, type, ind_levels,
5656 insn);
5657
5658 else if (code0 == REG && code1 == REG)
5659 {
5660 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5661 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5662 return 0;
5663 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5664 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5665 return 0;
5666 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5667 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5668 &XEXP (x, 1), opnum, type, ind_levels,
5669 insn);
5670 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5671 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5672 &XEXP (x, 0), opnum, type, ind_levels,
5673 insn);
5674 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5675 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5676 &XEXP (x, 0), opnum, type, ind_levels,
5677 insn);
5678 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5679 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5680 &XEXP (x, 1), opnum, type, ind_levels,
5681 insn);
5682 else
5683 {
5684 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5685 &XEXP (x, 0), opnum, type, ind_levels,
5686 insn);
5687 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5688 &XEXP (x, 1), opnum, type, ind_levels,
5689 insn);
5690 }
5691 }
5692
5693 else if (code0 == REG)
5694 {
5695 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5696 &XEXP (x, 0), opnum, type, ind_levels,
5697 insn);
5698 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5699 &XEXP (x, 1), opnum, type, ind_levels,
5700 insn);
5701 }
5702
5703 else if (code1 == REG)
5704 {
5705 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5706 &XEXP (x, 1), opnum, type, ind_levels,
5707 insn);
5708 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5709 &XEXP (x, 0), opnum, type, ind_levels,
5710 insn);
5711 }
5712 }
5713
5714 return 0;
5715
5716 case POST_MODIFY:
5717 case PRE_MODIFY:
5718 {
5719 rtx op0 = XEXP (x, 0);
5720 rtx op1 = XEXP (x, 1);
5721 enum rtx_code index_code;
5722 int regno;
5723 int reloadnum;
5724
5725 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5726 return 0;
5727
5728 /* Currently, we only support {PRE,POST}_MODIFY constructs
5729 where a base register is {inc,dec}remented by the contents
5730 of another register or by a constant value. Thus, these
5731 operands must match. */
5732 gcc_assert (op0 == XEXP (op1, 0));
5733
5734 /* Require index register (or constant). Let's just handle the
5735 register case in the meantime... If the target allows
5736 auto-modify by a constant then we could try replacing a pseudo
5737 register with its equivalent constant where applicable.
5738
5739 We also handle the case where the register was eliminated
5740 resulting in a PLUS subexpression.
5741
5742 If we later decide to reload the whole PRE_MODIFY or
5743 POST_MODIFY, inc_for_reload might clobber the reload register
5744 before reading the index. The index register might therefore
5745 need to live longer than a TYPE reload normally would, so be
5746 conservative and class it as RELOAD_OTHER. */
5747 if ((REG_P (XEXP (op1, 1))
5748 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5749 || GET_CODE (XEXP (op1, 1)) == PLUS)
5750 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5751 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5752 ind_levels, insn);
5753
5754 gcc_assert (REG_P (XEXP (op1, 0)));
5755
5756 regno = REGNO (XEXP (op1, 0));
5757 index_code = GET_CODE (XEXP (op1, 1));
5758
5759 /* A register that is incremented cannot be constant! */
5760 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5761 || reg_equiv_constant (regno) == 0);
5762
5763 /* Handle a register that is equivalent to a memory location
5764 which cannot be addressed directly. */
5765 if (reg_equiv_memory_loc (regno) != 0
5766 && (reg_equiv_address (regno) != 0
5767 || num_not_at_initial_offset))
5768 {
5769 rtx tem = make_memloc (XEXP (x, 0), regno);
5770
5771 if (reg_equiv_address (regno)
5772 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5773 {
5774 rtx orig = tem;
5775
5776 /* First reload the memory location's address.
5777 We can't use ADDR_TYPE (type) here, because we need to
5778 write back the value after reading it, hence we actually
5779 need two registers. */
5780 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5781 &XEXP (tem, 0), opnum,
5782 RELOAD_OTHER,
5783 ind_levels, insn);
5784
5785 if (!rtx_equal_p (tem, orig))
5786 push_reg_equiv_alt_mem (regno, tem);
5787
5788 /* Then reload the memory location into a base
5789 register. */
5790 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5791 &XEXP (op1, 0),
5792 base_reg_class (mode, as,
5793 code, index_code),
5794 GET_MODE (x), GET_MODE (x), 0,
5795 0, opnum, RELOAD_OTHER);
5796
5797 update_auto_inc_notes (this_insn, regno, reloadnum);
5798 return 0;
5799 }
5800 }
5801
5802 if (reg_renumber[regno] >= 0)
5803 regno = reg_renumber[regno];
5804
5805 /* We require a base register here... */
5806 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5807 {
5808 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5809 &XEXP (op1, 0), &XEXP (x, 0),
5810 base_reg_class (mode, as,
5811 code, index_code),
5812 GET_MODE (x), GET_MODE (x), 0, 0,
5813 opnum, RELOAD_OTHER);
5814
5815 update_auto_inc_notes (this_insn, regno, reloadnum);
5816 return 0;
5817 }
5818 }
5819 return 0;
5820
5821 case POST_INC:
5822 case POST_DEC:
5823 case PRE_INC:
5824 case PRE_DEC:
5825 if (REG_P (XEXP (x, 0)))
5826 {
5827 int regno = REGNO (XEXP (x, 0));
5828 int value = 0;
5829 rtx x_orig = x;
5830
5831 /* A register that is incremented cannot be constant! */
5832 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5833 || reg_equiv_constant (regno) == 0);
5834
5835 /* Handle a register that is equivalent to a memory location
5836 which cannot be addressed directly. */
5837 if (reg_equiv_memory_loc (regno) != 0
5838 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5839 {
5840 rtx tem = make_memloc (XEXP (x, 0), regno);
5841 if (reg_equiv_address (regno)
5842 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5843 {
5844 rtx orig = tem;
5845
5846 /* First reload the memory location's address.
5847 We can't use ADDR_TYPE (type) here, because we need to
5848 write back the value after reading it, hence we actually
5849 need two registers. */
5850 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5851 &XEXP (tem, 0), opnum, type,
5852 ind_levels, insn);
5853 if (!rtx_equal_p (tem, orig))
5854 push_reg_equiv_alt_mem (regno, tem);
5855 /* Put this inside a new increment-expression. */
5856 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5857 /* Proceed to reload that, as if it contained a register. */
5858 }
5859 }
5860
5861 /* If we have a hard register that is ok in this incdec context,
5862 don't make a reload. If the register isn't nice enough for
5863 autoincdec, we can reload it. But, if an autoincrement of a
5864 register that we here verified as playing nice, still outside
5865 isn't "valid", it must be that no autoincrement is "valid".
5866 If that is true and something made an autoincrement anyway,
5867 this must be a special context where one is allowed.
5868 (For example, a "push" instruction.)
5869 We can't improve this address, so leave it alone. */
5870
5871 /* Otherwise, reload the autoincrement into a suitable hard reg
5872 and record how much to increment by. */
5873
5874 if (reg_renumber[regno] >= 0)
5875 regno = reg_renumber[regno];
5876 if (regno >= FIRST_PSEUDO_REGISTER
5877 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5878 index_code))
5879 {
5880 int reloadnum;
5881
5882 /* If we can output the register afterwards, do so, this
5883 saves the extra update.
5884 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5885 CALL_INSN - and it does not set CC0.
5886 But don't do this if we cannot directly address the
5887 memory location, since this will make it harder to
5888 reuse address reloads, and increases register pressure.
5889 Also don't do this if we can probably update x directly. */
5890 rtx equiv = (MEM_P (XEXP (x, 0))
5891 ? XEXP (x, 0)
5892 : reg_equiv_mem (regno));
5893 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5894 if (insn && NONJUMP_INSN_P (insn) && equiv
5895 && memory_operand (equiv, GET_MODE (equiv))
5896 #ifdef HAVE_cc0
5897 && ! sets_cc0_p (PATTERN (insn))
5898 #endif
5899 && ! (icode != CODE_FOR_nothing
5900 && insn_operand_matches (icode, 0, equiv)
5901 && insn_operand_matches (icode, 1, equiv)))
5902 {
5903 /* We use the original pseudo for loc, so that
5904 emit_reload_insns() knows which pseudo this
5905 reload refers to and updates the pseudo rtx, not
5906 its equivalent memory location, as well as the
5907 corresponding entry in reg_last_reload_reg. */
5908 loc = &XEXP (x_orig, 0);
5909 x = XEXP (x, 0);
5910 reloadnum
5911 = push_reload (x, x, loc, loc,
5912 context_reg_class,
5913 GET_MODE (x), GET_MODE (x), 0, 0,
5914 opnum, RELOAD_OTHER);
5915 }
5916 else
5917 {
5918 reloadnum
5919 = push_reload (x, x, loc, (rtx*) 0,
5920 context_reg_class,
5921 GET_MODE (x), GET_MODE (x), 0, 0,
5922 opnum, type);
5923 rld[reloadnum].inc
5924 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5925
5926 value = 1;
5927 }
5928
5929 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5930 reloadnum);
5931 }
5932 return value;
5933 }
5934 return 0;
5935
5936 case TRUNCATE:
5937 case SIGN_EXTEND:
5938 case ZERO_EXTEND:
5939 /* Look for parts to reload in the inner expression and reload them
5940 too, in addition to this operation. Reloading all inner parts in
5941 addition to this one shouldn't be necessary, but at this point,
5942 we don't know if we can possibly omit any part that *can* be
5943 reloaded. Targets that are better off reloading just either part
5944 (or perhaps even a different part of an outer expression), should
5945 define LEGITIMIZE_RELOAD_ADDRESS. */
5946 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5947 context, code, SCRATCH, &XEXP (x, 0), opnum,
5948 type, ind_levels, insn);
5949 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5950 context_reg_class,
5951 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5952 return 1;
5953
5954 case MEM:
5955 /* This is probably the result of a substitution, by eliminate_regs, of
5956 an equivalent address for a pseudo that was not allocated to a hard
5957 register. Verify that the specified address is valid and reload it
5958 into a register.
5959
5960 Since we know we are going to reload this item, don't decrement for
5961 the indirection level.
5962
5963 Note that this is actually conservative: it would be slightly more
5964 efficient to use the value of SPILL_INDIRECT_LEVELS from
5965 reload1.c here. */
5966
5967 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5968 opnum, ADDR_TYPE (type), ind_levels, insn);
5969 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5970 context_reg_class,
5971 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5972 return 1;
5973
5974 case REG:
5975 {
5976 int regno = REGNO (x);
5977
5978 if (reg_equiv_constant (regno) != 0)
5979 {
5980 find_reloads_address_part (reg_equiv_constant (regno), loc,
5981 context_reg_class,
5982 GET_MODE (x), opnum, type, ind_levels);
5983 return 1;
5984 }
5985
5986 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5987 that feeds this insn. */
5988 if (reg_equiv_mem (regno) != 0)
5989 {
5990 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5991 context_reg_class,
5992 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5993 return 1;
5994 }
5995 #endif
5996
5997 if (reg_equiv_memory_loc (regno)
5998 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5999 {
6000 rtx tem = make_memloc (x, regno);
6001 if (reg_equiv_address (regno) != 0
6002 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
6003 {
6004 x = tem;
6005 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
6006 &XEXP (x, 0), opnum, ADDR_TYPE (type),
6007 ind_levels, insn);
6008 if (!rtx_equal_p (x, tem))
6009 push_reg_equiv_alt_mem (regno, x);
6010 }
6011 }
6012
6013 if (reg_renumber[regno] >= 0)
6014 regno = reg_renumber[regno];
6015
6016 if (regno >= FIRST_PSEUDO_REGISTER
6017 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6018 index_code))
6019 {
6020 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6021 context_reg_class,
6022 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6023 return 1;
6024 }
6025
6026 /* If a register appearing in an address is the subject of a CLOBBER
6027 in this insn, reload it into some other register to be safe.
6028 The CLOBBER is supposed to make the register unavailable
6029 from before this insn to after it. */
6030 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
6031 {
6032 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6033 context_reg_class,
6034 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6035 return 1;
6036 }
6037 }
6038 return 0;
6039
6040 case SUBREG:
6041 if (REG_P (SUBREG_REG (x)))
6042 {
6043 /* If this is a SUBREG of a hard register and the resulting register
6044 is of the wrong class, reload the whole SUBREG. This avoids
6045 needless copies if SUBREG_REG is multi-word. */
6046 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6047 {
6048 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6049
6050 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6051 index_code))
6052 {
6053 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6054 context_reg_class,
6055 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6056 return 1;
6057 }
6058 }
6059 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6060 is larger than the class size, then reload the whole SUBREG. */
6061 else
6062 {
6063 enum reg_class rclass = context_reg_class;
6064 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6065 > reg_class_size[(int) rclass])
6066 {
6067 /* If the inner register will be replaced by a memory
6068 reference, we can do this only if we can replace the
6069 whole subreg by a (narrower) memory reference. If
6070 this is not possible, fall through and reload just
6071 the inner register (including address reloads). */
6072 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6073 {
6074 rtx tem = find_reloads_subreg_address (x, opnum,
6075 ADDR_TYPE (type),
6076 ind_levels, insn,
6077 NULL);
6078 if (tem)
6079 {
6080 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6081 GET_MODE (tem), VOIDmode, 0, 0,
6082 opnum, type);
6083 return 1;
6084 }
6085 }
6086 else
6087 {
6088 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6089 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6090 return 1;
6091 }
6092 }
6093 }
6094 }
6095 break;
6096
6097 default:
6098 break;
6099 }
6100
6101 {
6102 const char *fmt = GET_RTX_FORMAT (code);
6103 int i;
6104
6105 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6106 {
6107 if (fmt[i] == 'e')
6108 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6109 we get here. */
6110 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6111 code, SCRATCH, &XEXP (x, i),
6112 opnum, type, ind_levels, insn);
6113 }
6114 }
6115
6116 #undef REG_OK_FOR_CONTEXT
6117 return 0;
6118 }
6119 \f
6120 /* X, which is found at *LOC, is a part of an address that needs to be
6121 reloaded into a register of class RCLASS. If X is a constant, or if
6122 X is a PLUS that contains a constant, check that the constant is a
6123 legitimate operand and that we are supposed to be able to load
6124 it into the register.
6125
6126 If not, force the constant into memory and reload the MEM instead.
6127
6128 MODE is the mode to use, in case X is an integer constant.
6129
6130 OPNUM and TYPE describe the purpose of any reloads made.
6131
6132 IND_LEVELS says how many levels of indirect addressing this machine
6133 supports. */
6134
6135 static void
6136 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6137 enum machine_mode mode, int opnum,
6138 enum reload_type type, int ind_levels)
6139 {
6140 if (CONSTANT_P (x)
6141 && (!targetm.legitimate_constant_p (mode, x)
6142 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6143 {
6144 x = force_const_mem (mode, x);
6145 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6146 opnum, type, ind_levels, 0);
6147 }
6148
6149 else if (GET_CODE (x) == PLUS
6150 && CONSTANT_P (XEXP (x, 1))
6151 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6152 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6153 == NO_REGS))
6154 {
6155 rtx tem;
6156
6157 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6158 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6159 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6160 opnum, type, ind_levels, 0);
6161 }
6162
6163 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6164 mode, VOIDmode, 0, 0, opnum, type);
6165 }
6166 \f
6167 /* X, a subreg of a pseudo, is a part of an address that needs to be
6168 reloaded, and the pseusdo is equivalent to a memory location.
6169
6170 Attempt to replace the whole subreg by a (possibly narrower or wider)
6171 memory reference. If this is possible, return this new memory
6172 reference, and push all required address reloads. Otherwise,
6173 return NULL.
6174
6175 OPNUM and TYPE identify the purpose of the reload.
6176
6177 IND_LEVELS says how many levels of indirect addressing are
6178 supported at this point in the address.
6179
6180 INSN, if nonzero, is the insn in which we do the reload. It is used
6181 to determine where to put USEs for pseudos that we have to replace with
6182 stack slots. */
6183
6184 static rtx
6185 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6186 int ind_levels, rtx insn, int *address_reloaded)
6187 {
6188 enum machine_mode outer_mode = GET_MODE (x);
6189 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6190 int regno = REGNO (SUBREG_REG (x));
6191 int reloaded = 0;
6192 rtx tem, orig;
6193 int offset;
6194
6195 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6196
6197 /* We cannot replace the subreg with a modified memory reference if:
6198
6199 - we have a paradoxical subreg that implicitly acts as a zero or
6200 sign extension operation due to LOAD_EXTEND_OP;
6201
6202 - we have a subreg that is implicitly supposed to act on the full
6203 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6204
6205 - the address of the equivalent memory location is mode-dependent; or
6206
6207 - we have a paradoxical subreg and the resulting memory is not
6208 sufficiently aligned to allow access in the wider mode.
6209
6210 In addition, we choose not to perform the replacement for *any*
6211 paradoxical subreg, even if it were possible in principle. This
6212 is to avoid generating wider memory references than necessary.
6213
6214 This corresponds to how previous versions of reload used to handle
6215 paradoxical subregs where no address reload was required. */
6216
6217 if (paradoxical_subreg_p (x))
6218 return NULL;
6219
6220 #ifdef WORD_REGISTER_OPERATIONS
6221 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6222 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6223 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6224 return NULL;
6225 #endif
6226
6227 /* Since we don't attempt to handle paradoxical subregs, we can just
6228 call into simplify_subreg, which will handle all remaining checks
6229 for us. */
6230 orig = make_memloc (SUBREG_REG (x), regno);
6231 offset = SUBREG_BYTE (x);
6232 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6233 if (!tem || !MEM_P (tem))
6234 return NULL;
6235
6236 /* Now push all required address reloads, if any. */
6237 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6238 XEXP (tem, 0), &XEXP (tem, 0),
6239 opnum, type, ind_levels, insn);
6240 /* ??? Do we need to handle nonzero offsets somehow? */
6241 if (!offset && !rtx_equal_p (tem, orig))
6242 push_reg_equiv_alt_mem (regno, tem);
6243
6244 /* For some processors an address may be valid in the original mode but
6245 not in a smaller mode. For example, ARM accepts a scaled index register
6246 in SImode but not in HImode. Note that this is only a problem if the
6247 address in reg_equiv_mem is already invalid in the new mode; other
6248 cases would be fixed by find_reloads_address as usual.
6249
6250 ??? We attempt to handle such cases here by doing an additional reload
6251 of the full address after the usual processing by find_reloads_address.
6252 Note that this may not work in the general case, but it seems to cover
6253 the cases where this situation currently occurs. A more general fix
6254 might be to reload the *value* instead of the address, but this would
6255 not be expected by the callers of this routine as-is.
6256
6257 If find_reloads_address already completed replaced the address, there
6258 is nothing further to do. */
6259 if (reloaded == 0
6260 && reg_equiv_mem (regno) != 0
6261 && !strict_memory_address_addr_space_p
6262 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6263 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6264 {
6265 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6266 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6267 MEM, SCRATCH),
6268 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6269 reloaded = 1;
6270 }
6271
6272 /* If this is not a toplevel operand, find_reloads doesn't see this
6273 substitution. We have to emit a USE of the pseudo so that
6274 delete_output_reload can see it. */
6275 if (replace_reloads && recog_data.operand[opnum] != x)
6276 /* We mark the USE with QImode so that we recognize it as one that
6277 can be safely deleted at the end of reload. */
6278 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6279 QImode);
6280
6281 if (address_reloaded)
6282 *address_reloaded = reloaded;
6283
6284 return tem;
6285 }
6286 \f
6287 /* Substitute into the current INSN the registers into which we have reloaded
6288 the things that need reloading. The array `replacements'
6289 contains the locations of all pointers that must be changed
6290 and says what to replace them with.
6291
6292 Return the rtx that X translates into; usually X, but modified. */
6293
6294 void
6295 subst_reloads (rtx insn)
6296 {
6297 int i;
6298
6299 for (i = 0; i < n_replacements; i++)
6300 {
6301 struct replacement *r = &replacements[i];
6302 rtx reloadreg = rld[r->what].reg_rtx;
6303 if (reloadreg)
6304 {
6305 #ifdef DEBUG_RELOAD
6306 /* This checking takes a very long time on some platforms
6307 causing the gcc.c-torture/compile/limits-fnargs.c test
6308 to time out during testing. See PR 31850.
6309
6310 Internal consistency test. Check that we don't modify
6311 anything in the equivalence arrays. Whenever something from
6312 those arrays needs to be reloaded, it must be unshared before
6313 being substituted into; the equivalence must not be modified.
6314 Otherwise, if the equivalence is used after that, it will
6315 have been modified, and the thing substituted (probably a
6316 register) is likely overwritten and not a usable equivalence. */
6317 int check_regno;
6318
6319 for (check_regno = 0; check_regno < max_regno; check_regno++)
6320 {
6321 #define CHECK_MODF(ARRAY) \
6322 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6323 || !loc_mentioned_in_p (r->where, \
6324 (*reg_equivs)[check_regno].ARRAY))
6325
6326 CHECK_MODF (constant);
6327 CHECK_MODF (memory_loc);
6328 CHECK_MODF (address);
6329 CHECK_MODF (mem);
6330 #undef CHECK_MODF
6331 }
6332 #endif /* DEBUG_RELOAD */
6333
6334 /* If we're replacing a LABEL_REF with a register, there must
6335 already be an indication (to e.g. flow) which label this
6336 register refers to. */
6337 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6338 || !JUMP_P (insn)
6339 || find_reg_note (insn,
6340 REG_LABEL_OPERAND,
6341 XEXP (*r->where, 0))
6342 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6343
6344 /* Encapsulate RELOADREG so its machine mode matches what
6345 used to be there. Note that gen_lowpart_common will
6346 do the wrong thing if RELOADREG is multi-word. RELOADREG
6347 will always be a REG here. */
6348 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6349 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6350
6351 *r->where = reloadreg;
6352 }
6353 /* If reload got no reg and isn't optional, something's wrong. */
6354 else
6355 gcc_assert (rld[r->what].optional);
6356 }
6357 }
6358 \f
6359 /* Make a copy of any replacements being done into X and move those
6360 copies to locations in Y, a copy of X. */
6361
6362 void
6363 copy_replacements (rtx x, rtx y)
6364 {
6365 copy_replacements_1 (&x, &y, n_replacements);
6366 }
6367
6368 static void
6369 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6370 {
6371 int i, j;
6372 rtx x, y;
6373 struct replacement *r;
6374 enum rtx_code code;
6375 const char *fmt;
6376
6377 for (j = 0; j < orig_replacements; j++)
6378 if (replacements[j].where == px)
6379 {
6380 r = &replacements[n_replacements++];
6381 r->where = py;
6382 r->what = replacements[j].what;
6383 r->mode = replacements[j].mode;
6384 }
6385
6386 x = *px;
6387 y = *py;
6388 code = GET_CODE (x);
6389 fmt = GET_RTX_FORMAT (code);
6390
6391 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6392 {
6393 if (fmt[i] == 'e')
6394 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6395 else if (fmt[i] == 'E')
6396 for (j = XVECLEN (x, i); --j >= 0; )
6397 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6398 orig_replacements);
6399 }
6400 }
6401
6402 /* Change any replacements being done to *X to be done to *Y. */
6403
6404 void
6405 move_replacements (rtx *x, rtx *y)
6406 {
6407 int i;
6408
6409 for (i = 0; i < n_replacements; i++)
6410 if (replacements[i].where == x)
6411 replacements[i].where = y;
6412 }
6413 \f
6414 /* If LOC was scheduled to be replaced by something, return the replacement.
6415 Otherwise, return *LOC. */
6416
6417 rtx
6418 find_replacement (rtx *loc)
6419 {
6420 struct replacement *r;
6421
6422 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6423 {
6424 rtx reloadreg = rld[r->what].reg_rtx;
6425
6426 if (reloadreg && r->where == loc)
6427 {
6428 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6429 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6430
6431 return reloadreg;
6432 }
6433 else if (reloadreg && GET_CODE (*loc) == SUBREG
6434 && r->where == &SUBREG_REG (*loc))
6435 {
6436 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6437 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6438
6439 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6440 GET_MODE (SUBREG_REG (*loc)),
6441 SUBREG_BYTE (*loc));
6442 }
6443 }
6444
6445 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6446 what's inside and make a new rtl if so. */
6447 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6448 || GET_CODE (*loc) == MULT)
6449 {
6450 rtx x = find_replacement (&XEXP (*loc, 0));
6451 rtx y = find_replacement (&XEXP (*loc, 1));
6452
6453 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6454 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6455 }
6456
6457 return *loc;
6458 }
6459 \f
6460 /* Return nonzero if register in range [REGNO, ENDREGNO)
6461 appears either explicitly or implicitly in X
6462 other than being stored into (except for earlyclobber operands).
6463
6464 References contained within the substructure at LOC do not count.
6465 LOC may be zero, meaning don't ignore anything.
6466
6467 This is similar to refers_to_regno_p in rtlanal.c except that we
6468 look at equivalences for pseudos that didn't get hard registers. */
6469
6470 static int
6471 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6472 rtx x, rtx *loc)
6473 {
6474 int i;
6475 unsigned int r;
6476 RTX_CODE code;
6477 const char *fmt;
6478
6479 if (x == 0)
6480 return 0;
6481
6482 repeat:
6483 code = GET_CODE (x);
6484
6485 switch (code)
6486 {
6487 case REG:
6488 r = REGNO (x);
6489
6490 /* If this is a pseudo, a hard register must not have been allocated.
6491 X must therefore either be a constant or be in memory. */
6492 if (r >= FIRST_PSEUDO_REGISTER)
6493 {
6494 if (reg_equiv_memory_loc (r))
6495 return refers_to_regno_for_reload_p (regno, endregno,
6496 reg_equiv_memory_loc (r),
6497 (rtx*) 0);
6498
6499 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6500 return 0;
6501 }
6502
6503 return (endregno > r
6504 && regno < r + (r < FIRST_PSEUDO_REGISTER
6505 ? hard_regno_nregs[r][GET_MODE (x)]
6506 : 1));
6507
6508 case SUBREG:
6509 /* If this is a SUBREG of a hard reg, we can see exactly which
6510 registers are being modified. Otherwise, handle normally. */
6511 if (REG_P (SUBREG_REG (x))
6512 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6513 {
6514 unsigned int inner_regno = subreg_regno (x);
6515 unsigned int inner_endregno
6516 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6517 ? subreg_nregs (x) : 1);
6518
6519 return endregno > inner_regno && regno < inner_endregno;
6520 }
6521 break;
6522
6523 case CLOBBER:
6524 case SET:
6525 if (&SET_DEST (x) != loc
6526 /* Note setting a SUBREG counts as referring to the REG it is in for
6527 a pseudo but not for hard registers since we can
6528 treat each word individually. */
6529 && ((GET_CODE (SET_DEST (x)) == SUBREG
6530 && loc != &SUBREG_REG (SET_DEST (x))
6531 && REG_P (SUBREG_REG (SET_DEST (x)))
6532 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6533 && refers_to_regno_for_reload_p (regno, endregno,
6534 SUBREG_REG (SET_DEST (x)),
6535 loc))
6536 /* If the output is an earlyclobber operand, this is
6537 a conflict. */
6538 || ((!REG_P (SET_DEST (x))
6539 || earlyclobber_operand_p (SET_DEST (x)))
6540 && refers_to_regno_for_reload_p (regno, endregno,
6541 SET_DEST (x), loc))))
6542 return 1;
6543
6544 if (code == CLOBBER || loc == &SET_SRC (x))
6545 return 0;
6546 x = SET_SRC (x);
6547 goto repeat;
6548
6549 default:
6550 break;
6551 }
6552
6553 /* X does not match, so try its subexpressions. */
6554
6555 fmt = GET_RTX_FORMAT (code);
6556 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6557 {
6558 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6559 {
6560 if (i == 0)
6561 {
6562 x = XEXP (x, 0);
6563 goto repeat;
6564 }
6565 else
6566 if (refers_to_regno_for_reload_p (regno, endregno,
6567 XEXP (x, i), loc))
6568 return 1;
6569 }
6570 else if (fmt[i] == 'E')
6571 {
6572 int j;
6573 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6574 if (loc != &XVECEXP (x, i, j)
6575 && refers_to_regno_for_reload_p (regno, endregno,
6576 XVECEXP (x, i, j), loc))
6577 return 1;
6578 }
6579 }
6580 return 0;
6581 }
6582
6583 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6584 we check if any register number in X conflicts with the relevant register
6585 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6586 contains a MEM (we don't bother checking for memory addresses that can't
6587 conflict because we expect this to be a rare case.
6588
6589 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6590 that we look at equivalences for pseudos that didn't get hard registers. */
6591
6592 int
6593 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6594 {
6595 int regno, endregno;
6596
6597 /* Overly conservative. */
6598 if (GET_CODE (x) == STRICT_LOW_PART
6599 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6600 x = XEXP (x, 0);
6601
6602 /* If either argument is a constant, then modifying X can not affect IN. */
6603 if (CONSTANT_P (x) || CONSTANT_P (in))
6604 return 0;
6605 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6606 return refers_to_mem_for_reload_p (in);
6607 else if (GET_CODE (x) == SUBREG)
6608 {
6609 regno = REGNO (SUBREG_REG (x));
6610 if (regno < FIRST_PSEUDO_REGISTER)
6611 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6612 GET_MODE (SUBREG_REG (x)),
6613 SUBREG_BYTE (x),
6614 GET_MODE (x));
6615 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6616 ? subreg_nregs (x) : 1);
6617
6618 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6619 }
6620 else if (REG_P (x))
6621 {
6622 regno = REGNO (x);
6623
6624 /* If this is a pseudo, it must not have been assigned a hard register.
6625 Therefore, it must either be in memory or be a constant. */
6626
6627 if (regno >= FIRST_PSEUDO_REGISTER)
6628 {
6629 if (reg_equiv_memory_loc (regno))
6630 return refers_to_mem_for_reload_p (in);
6631 gcc_assert (reg_equiv_constant (regno));
6632 return 0;
6633 }
6634
6635 endregno = END_HARD_REGNO (x);
6636
6637 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6638 }
6639 else if (MEM_P (x))
6640 return refers_to_mem_for_reload_p (in);
6641 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6642 || GET_CODE (x) == CC0)
6643 return reg_mentioned_p (x, in);
6644 else
6645 {
6646 gcc_assert (GET_CODE (x) == PLUS);
6647
6648 /* We actually want to know if X is mentioned somewhere inside IN.
6649 We must not say that (plus (sp) (const_int 124)) is in
6650 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6651 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6652 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6653 while (MEM_P (in))
6654 in = XEXP (in, 0);
6655 if (REG_P (in))
6656 return 0;
6657 else if (GET_CODE (in) == PLUS)
6658 return (rtx_equal_p (x, in)
6659 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6660 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6661 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6662 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6663 }
6664
6665 gcc_unreachable ();
6666 }
6667
6668 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6669 registers. */
6670
6671 static int
6672 refers_to_mem_for_reload_p (rtx x)
6673 {
6674 const char *fmt;
6675 int i;
6676
6677 if (MEM_P (x))
6678 return 1;
6679
6680 if (REG_P (x))
6681 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6682 && reg_equiv_memory_loc (REGNO (x)));
6683
6684 fmt = GET_RTX_FORMAT (GET_CODE (x));
6685 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6686 if (fmt[i] == 'e'
6687 && (MEM_P (XEXP (x, i))
6688 || refers_to_mem_for_reload_p (XEXP (x, i))))
6689 return 1;
6690
6691 return 0;
6692 }
6693 \f
6694 /* Check the insns before INSN to see if there is a suitable register
6695 containing the same value as GOAL.
6696 If OTHER is -1, look for a register in class RCLASS.
6697 Otherwise, just see if register number OTHER shares GOAL's value.
6698
6699 Return an rtx for the register found, or zero if none is found.
6700
6701 If RELOAD_REG_P is (short *)1,
6702 we reject any hard reg that appears in reload_reg_rtx
6703 because such a hard reg is also needed coming into this insn.
6704
6705 If RELOAD_REG_P is any other nonzero value,
6706 it is a vector indexed by hard reg number
6707 and we reject any hard reg whose element in the vector is nonnegative
6708 as well as any that appears in reload_reg_rtx.
6709
6710 If GOAL is zero, then GOALREG is a register number; we look
6711 for an equivalent for that register.
6712
6713 MODE is the machine mode of the value we want an equivalence for.
6714 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6715
6716 This function is used by jump.c as well as in the reload pass.
6717
6718 If GOAL is the sum of the stack pointer and a constant, we treat it
6719 as if it were a constant except that sp is required to be unchanging. */
6720
6721 rtx
6722 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6723 short *reload_reg_p, int goalreg, enum machine_mode mode)
6724 {
6725 rtx p = insn;
6726 rtx goaltry, valtry, value, where;
6727 rtx pat;
6728 int regno = -1;
6729 int valueno;
6730 int goal_mem = 0;
6731 int goal_const = 0;
6732 int goal_mem_addr_varies = 0;
6733 int need_stable_sp = 0;
6734 int nregs;
6735 int valuenregs;
6736 int num = 0;
6737
6738 if (goal == 0)
6739 regno = goalreg;
6740 else if (REG_P (goal))
6741 regno = REGNO (goal);
6742 else if (MEM_P (goal))
6743 {
6744 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6745 if (MEM_VOLATILE_P (goal))
6746 return 0;
6747 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6748 return 0;
6749 /* An address with side effects must be reexecuted. */
6750 switch (code)
6751 {
6752 case POST_INC:
6753 case PRE_INC:
6754 case POST_DEC:
6755 case PRE_DEC:
6756 case POST_MODIFY:
6757 case PRE_MODIFY:
6758 return 0;
6759 default:
6760 break;
6761 }
6762 goal_mem = 1;
6763 }
6764 else if (CONSTANT_P (goal))
6765 goal_const = 1;
6766 else if (GET_CODE (goal) == PLUS
6767 && XEXP (goal, 0) == stack_pointer_rtx
6768 && CONSTANT_P (XEXP (goal, 1)))
6769 goal_const = need_stable_sp = 1;
6770 else if (GET_CODE (goal) == PLUS
6771 && XEXP (goal, 0) == frame_pointer_rtx
6772 && CONSTANT_P (XEXP (goal, 1)))
6773 goal_const = 1;
6774 else
6775 return 0;
6776
6777 num = 0;
6778 /* Scan insns back from INSN, looking for one that copies
6779 a value into or out of GOAL.
6780 Stop and give up if we reach a label. */
6781
6782 while (1)
6783 {
6784 p = PREV_INSN (p);
6785 if (p && DEBUG_INSN_P (p))
6786 continue;
6787 num++;
6788 if (p == 0 || LABEL_P (p)
6789 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6790 return 0;
6791
6792 /* Don't reuse register contents from before a setjmp-type
6793 function call; on the second return (from the longjmp) it
6794 might have been clobbered by a later reuse. It doesn't
6795 seem worthwhile to actually go and see if it is actually
6796 reused even if that information would be readily available;
6797 just don't reuse it across the setjmp call. */
6798 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6799 return 0;
6800
6801 if (NONJUMP_INSN_P (p)
6802 /* If we don't want spill regs ... */
6803 && (! (reload_reg_p != 0
6804 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6805 /* ... then ignore insns introduced by reload; they aren't
6806 useful and can cause results in reload_as_needed to be
6807 different from what they were when calculating the need for
6808 spills. If we notice an input-reload insn here, we will
6809 reject it below, but it might hide a usable equivalent.
6810 That makes bad code. It may even fail: perhaps no reg was
6811 spilled for this insn because it was assumed we would find
6812 that equivalent. */
6813 || INSN_UID (p) < reload_first_uid))
6814 {
6815 rtx tem;
6816 pat = single_set (p);
6817
6818 /* First check for something that sets some reg equal to GOAL. */
6819 if (pat != 0
6820 && ((regno >= 0
6821 && true_regnum (SET_SRC (pat)) == regno
6822 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6823 ||
6824 (regno >= 0
6825 && true_regnum (SET_DEST (pat)) == regno
6826 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6827 ||
6828 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6829 /* When looking for stack pointer + const,
6830 make sure we don't use a stack adjust. */
6831 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6832 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6833 || (goal_mem
6834 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6835 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6836 || (goal_mem
6837 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6838 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6839 /* If we are looking for a constant,
6840 and something equivalent to that constant was copied
6841 into a reg, we can use that reg. */
6842 || (goal_const && REG_NOTES (p) != 0
6843 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6844 && ((rtx_equal_p (XEXP (tem, 0), goal)
6845 && (valueno
6846 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6847 || (REG_P (SET_DEST (pat))
6848 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6849 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6850 && CONST_INT_P (goal)
6851 && 0 != (goaltry
6852 = operand_subword (XEXP (tem, 0), 0, 0,
6853 VOIDmode))
6854 && rtx_equal_p (goal, goaltry)
6855 && (valtry
6856 = operand_subword (SET_DEST (pat), 0, 0,
6857 VOIDmode))
6858 && (valueno = true_regnum (valtry)) >= 0)))
6859 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6860 NULL_RTX))
6861 && REG_P (SET_DEST (pat))
6862 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6863 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6864 && CONST_INT_P (goal)
6865 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6866 VOIDmode))
6867 && rtx_equal_p (goal, goaltry)
6868 && (valtry
6869 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6870 && (valueno = true_regnum (valtry)) >= 0)))
6871 {
6872 if (other >= 0)
6873 {
6874 if (valueno != other)
6875 continue;
6876 }
6877 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6878 continue;
6879 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6880 mode, valueno))
6881 continue;
6882 value = valtry;
6883 where = p;
6884 break;
6885 }
6886 }
6887 }
6888
6889 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6890 (or copying VALUE into GOAL, if GOAL is also a register).
6891 Now verify that VALUE is really valid. */
6892
6893 /* VALUENO is the register number of VALUE; a hard register. */
6894
6895 /* Don't try to re-use something that is killed in this insn. We want
6896 to be able to trust REG_UNUSED notes. */
6897 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6898 return 0;
6899
6900 /* If we propose to get the value from the stack pointer or if GOAL is
6901 a MEM based on the stack pointer, we need a stable SP. */
6902 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6903 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6904 goal)))
6905 need_stable_sp = 1;
6906
6907 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6908 if (GET_MODE (value) != mode)
6909 return 0;
6910
6911 /* Reject VALUE if it was loaded from GOAL
6912 and is also a register that appears in the address of GOAL. */
6913
6914 if (goal_mem && value == SET_DEST (single_set (where))
6915 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6916 goal, (rtx*) 0))
6917 return 0;
6918
6919 /* Reject registers that overlap GOAL. */
6920
6921 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6922 nregs = hard_regno_nregs[regno][mode];
6923 else
6924 nregs = 1;
6925 valuenregs = hard_regno_nregs[valueno][mode];
6926
6927 if (!goal_mem && !goal_const
6928 && regno + nregs > valueno && regno < valueno + valuenregs)
6929 return 0;
6930
6931 /* Reject VALUE if it is one of the regs reserved for reloads.
6932 Reload1 knows how to reuse them anyway, and it would get
6933 confused if we allocated one without its knowledge.
6934 (Now that insns introduced by reload are ignored above,
6935 this case shouldn't happen, but I'm not positive.) */
6936
6937 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6938 {
6939 int i;
6940 for (i = 0; i < valuenregs; ++i)
6941 if (reload_reg_p[valueno + i] >= 0)
6942 return 0;
6943 }
6944
6945 /* Reject VALUE if it is a register being used for an input reload
6946 even if it is not one of those reserved. */
6947
6948 if (reload_reg_p != 0)
6949 {
6950 int i;
6951 for (i = 0; i < n_reloads; i++)
6952 if (rld[i].reg_rtx != 0 && rld[i].in)
6953 {
6954 int regno1 = REGNO (rld[i].reg_rtx);
6955 int nregs1 = hard_regno_nregs[regno1]
6956 [GET_MODE (rld[i].reg_rtx)];
6957 if (regno1 < valueno + valuenregs
6958 && regno1 + nregs1 > valueno)
6959 return 0;
6960 }
6961 }
6962
6963 if (goal_mem)
6964 /* We must treat frame pointer as varying here,
6965 since it can vary--in a nonlocal goto as generated by expand_goto. */
6966 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6967
6968 /* Now verify that the values of GOAL and VALUE remain unaltered
6969 until INSN is reached. */
6970
6971 p = insn;
6972 while (1)
6973 {
6974 p = PREV_INSN (p);
6975 if (p == where)
6976 return value;
6977
6978 /* Don't trust the conversion past a function call
6979 if either of the two is in a call-clobbered register, or memory. */
6980 if (CALL_P (p))
6981 {
6982 int i;
6983
6984 if (goal_mem || need_stable_sp)
6985 return 0;
6986
6987 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6988 for (i = 0; i < nregs; ++i)
6989 if (call_used_regs[regno + i]
6990 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6991 return 0;
6992
6993 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6994 for (i = 0; i < valuenregs; ++i)
6995 if (call_used_regs[valueno + i]
6996 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6997 return 0;
6998 }
6999
7000 if (INSN_P (p))
7001 {
7002 pat = PATTERN (p);
7003
7004 /* Watch out for unspec_volatile, and volatile asms. */
7005 if (volatile_insn_p (pat))
7006 return 0;
7007
7008 /* If this insn P stores in either GOAL or VALUE, return 0.
7009 If GOAL is a memory ref and this insn writes memory, return 0.
7010 If GOAL is a memory ref and its address is not constant,
7011 and this insn P changes a register used in GOAL, return 0. */
7012
7013 if (GET_CODE (pat) == COND_EXEC)
7014 pat = COND_EXEC_CODE (pat);
7015 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
7016 {
7017 rtx dest = SET_DEST (pat);
7018 while (GET_CODE (dest) == SUBREG
7019 || GET_CODE (dest) == ZERO_EXTRACT
7020 || GET_CODE (dest) == STRICT_LOW_PART)
7021 dest = XEXP (dest, 0);
7022 if (REG_P (dest))
7023 {
7024 int xregno = REGNO (dest);
7025 int xnregs;
7026 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7027 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7028 else
7029 xnregs = 1;
7030 if (xregno < regno + nregs && xregno + xnregs > regno)
7031 return 0;
7032 if (xregno < valueno + valuenregs
7033 && xregno + xnregs > valueno)
7034 return 0;
7035 if (goal_mem_addr_varies
7036 && reg_overlap_mentioned_for_reload_p (dest, goal))
7037 return 0;
7038 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7039 return 0;
7040 }
7041 else if (goal_mem && MEM_P (dest)
7042 && ! push_operand (dest, GET_MODE (dest)))
7043 return 0;
7044 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7045 && reg_equiv_memory_loc (regno) != 0)
7046 return 0;
7047 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7048 return 0;
7049 }
7050 else if (GET_CODE (pat) == PARALLEL)
7051 {
7052 int i;
7053 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7054 {
7055 rtx v1 = XVECEXP (pat, 0, i);
7056 if (GET_CODE (v1) == COND_EXEC)
7057 v1 = COND_EXEC_CODE (v1);
7058 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7059 {
7060 rtx dest = SET_DEST (v1);
7061 while (GET_CODE (dest) == SUBREG
7062 || GET_CODE (dest) == ZERO_EXTRACT
7063 || GET_CODE (dest) == STRICT_LOW_PART)
7064 dest = XEXP (dest, 0);
7065 if (REG_P (dest))
7066 {
7067 int xregno = REGNO (dest);
7068 int xnregs;
7069 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7070 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7071 else
7072 xnregs = 1;
7073 if (xregno < regno + nregs
7074 && xregno + xnregs > regno)
7075 return 0;
7076 if (xregno < valueno + valuenregs
7077 && xregno + xnregs > valueno)
7078 return 0;
7079 if (goal_mem_addr_varies
7080 && reg_overlap_mentioned_for_reload_p (dest,
7081 goal))
7082 return 0;
7083 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7084 return 0;
7085 }
7086 else if (goal_mem && MEM_P (dest)
7087 && ! push_operand (dest, GET_MODE (dest)))
7088 return 0;
7089 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7090 && reg_equiv_memory_loc (regno) != 0)
7091 return 0;
7092 else if (need_stable_sp
7093 && push_operand (dest, GET_MODE (dest)))
7094 return 0;
7095 }
7096 }
7097 }
7098
7099 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7100 {
7101 rtx link;
7102
7103 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7104 link = XEXP (link, 1))
7105 {
7106 pat = XEXP (link, 0);
7107 if (GET_CODE (pat) == CLOBBER)
7108 {
7109 rtx dest = SET_DEST (pat);
7110
7111 if (REG_P (dest))
7112 {
7113 int xregno = REGNO (dest);
7114 int xnregs
7115 = hard_regno_nregs[xregno][GET_MODE (dest)];
7116
7117 if (xregno < regno + nregs
7118 && xregno + xnregs > regno)
7119 return 0;
7120 else if (xregno < valueno + valuenregs
7121 && xregno + xnregs > valueno)
7122 return 0;
7123 else if (goal_mem_addr_varies
7124 && reg_overlap_mentioned_for_reload_p (dest,
7125 goal))
7126 return 0;
7127 }
7128
7129 else if (goal_mem && MEM_P (dest)
7130 && ! push_operand (dest, GET_MODE (dest)))
7131 return 0;
7132 else if (need_stable_sp
7133 && push_operand (dest, GET_MODE (dest)))
7134 return 0;
7135 }
7136 }
7137 }
7138
7139 #ifdef AUTO_INC_DEC
7140 /* If this insn auto-increments or auto-decrements
7141 either regno or valueno, return 0 now.
7142 If GOAL is a memory ref and its address is not constant,
7143 and this insn P increments a register used in GOAL, return 0. */
7144 {
7145 rtx link;
7146
7147 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7148 if (REG_NOTE_KIND (link) == REG_INC
7149 && REG_P (XEXP (link, 0)))
7150 {
7151 int incno = REGNO (XEXP (link, 0));
7152 if (incno < regno + nregs && incno >= regno)
7153 return 0;
7154 if (incno < valueno + valuenregs && incno >= valueno)
7155 return 0;
7156 if (goal_mem_addr_varies
7157 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7158 goal))
7159 return 0;
7160 }
7161 }
7162 #endif
7163 }
7164 }
7165 }
7166 \f
7167 /* Find a place where INCED appears in an increment or decrement operator
7168 within X, and return the amount INCED is incremented or decremented by.
7169 The value is always positive. */
7170
7171 static int
7172 find_inc_amount (rtx x, rtx inced)
7173 {
7174 enum rtx_code code = GET_CODE (x);
7175 const char *fmt;
7176 int i;
7177
7178 if (code == MEM)
7179 {
7180 rtx addr = XEXP (x, 0);
7181 if ((GET_CODE (addr) == PRE_DEC
7182 || GET_CODE (addr) == POST_DEC
7183 || GET_CODE (addr) == PRE_INC
7184 || GET_CODE (addr) == POST_INC)
7185 && XEXP (addr, 0) == inced)
7186 return GET_MODE_SIZE (GET_MODE (x));
7187 else if ((GET_CODE (addr) == PRE_MODIFY
7188 || GET_CODE (addr) == POST_MODIFY)
7189 && GET_CODE (XEXP (addr, 1)) == PLUS
7190 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7191 && XEXP (addr, 0) == inced
7192 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7193 {
7194 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7195 return i < 0 ? -i : i;
7196 }
7197 }
7198
7199 fmt = GET_RTX_FORMAT (code);
7200 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7201 {
7202 if (fmt[i] == 'e')
7203 {
7204 int tem = find_inc_amount (XEXP (x, i), inced);
7205 if (tem != 0)
7206 return tem;
7207 }
7208 if (fmt[i] == 'E')
7209 {
7210 int j;
7211 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7212 {
7213 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7214 if (tem != 0)
7215 return tem;
7216 }
7217 }
7218 }
7219
7220 return 0;
7221 }
7222 \f
7223 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7224 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7225
7226 #ifdef AUTO_INC_DEC
7227 static int
7228 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7229 rtx insn)
7230 {
7231 rtx link;
7232
7233 gcc_assert (insn);
7234
7235 if (! INSN_P (insn))
7236 return 0;
7237
7238 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7239 if (REG_NOTE_KIND (link) == REG_INC)
7240 {
7241 unsigned int test = (int) REGNO (XEXP (link, 0));
7242 if (test >= regno && test < endregno)
7243 return 1;
7244 }
7245 return 0;
7246 }
7247 #else
7248
7249 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7250
7251 #endif
7252
7253 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7254 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7255 REG_INC. REGNO must refer to a hard register. */
7256
7257 int
7258 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7259 int sets)
7260 {
7261 unsigned int nregs, endregno;
7262
7263 /* regno must be a hard register. */
7264 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7265
7266 nregs = hard_regno_nregs[regno][mode];
7267 endregno = regno + nregs;
7268
7269 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7270 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7271 && REG_P (XEXP (PATTERN (insn), 0)))
7272 {
7273 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7274
7275 return test >= regno && test < endregno;
7276 }
7277
7278 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7279 return 1;
7280
7281 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7282 {
7283 int i = XVECLEN (PATTERN (insn), 0) - 1;
7284
7285 for (; i >= 0; i--)
7286 {
7287 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7288 if ((GET_CODE (elt) == CLOBBER
7289 || (sets == 1 && GET_CODE (elt) == SET))
7290 && REG_P (XEXP (elt, 0)))
7291 {
7292 unsigned int test = REGNO (XEXP (elt, 0));
7293
7294 if (test >= regno && test < endregno)
7295 return 1;
7296 }
7297 if (sets == 2
7298 && reg_inc_found_and_valid_p (regno, endregno, elt))
7299 return 1;
7300 }
7301 }
7302
7303 return 0;
7304 }
7305
7306 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7307 rtx
7308 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7309 {
7310 int regno;
7311
7312 if (GET_MODE (reloadreg) == mode)
7313 return reloadreg;
7314
7315 regno = REGNO (reloadreg);
7316
7317 if (REG_WORDS_BIG_ENDIAN)
7318 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7319 - (int) hard_regno_nregs[regno][mode];
7320
7321 return gen_rtx_REG (mode, regno);
7322 }
7323
7324 static const char *const reload_when_needed_name[] =
7325 {
7326 "RELOAD_FOR_INPUT",
7327 "RELOAD_FOR_OUTPUT",
7328 "RELOAD_FOR_INSN",
7329 "RELOAD_FOR_INPUT_ADDRESS",
7330 "RELOAD_FOR_INPADDR_ADDRESS",
7331 "RELOAD_FOR_OUTPUT_ADDRESS",
7332 "RELOAD_FOR_OUTADDR_ADDRESS",
7333 "RELOAD_FOR_OPERAND_ADDRESS",
7334 "RELOAD_FOR_OPADDR_ADDR",
7335 "RELOAD_OTHER",
7336 "RELOAD_FOR_OTHER_ADDRESS"
7337 };
7338
7339 /* These functions are used to print the variables set by 'find_reloads' */
7340
7341 DEBUG_FUNCTION void
7342 debug_reload_to_stream (FILE *f)
7343 {
7344 int r;
7345 const char *prefix;
7346
7347 if (! f)
7348 f = stderr;
7349 for (r = 0; r < n_reloads; r++)
7350 {
7351 fprintf (f, "Reload %d: ", r);
7352
7353 if (rld[r].in != 0)
7354 {
7355 fprintf (f, "reload_in (%s) = ",
7356 GET_MODE_NAME (rld[r].inmode));
7357 print_inline_rtx (f, rld[r].in, 24);
7358 fprintf (f, "\n\t");
7359 }
7360
7361 if (rld[r].out != 0)
7362 {
7363 fprintf (f, "reload_out (%s) = ",
7364 GET_MODE_NAME (rld[r].outmode));
7365 print_inline_rtx (f, rld[r].out, 24);
7366 fprintf (f, "\n\t");
7367 }
7368
7369 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7370
7371 fprintf (f, "%s (opnum = %d)",
7372 reload_when_needed_name[(int) rld[r].when_needed],
7373 rld[r].opnum);
7374
7375 if (rld[r].optional)
7376 fprintf (f, ", optional");
7377
7378 if (rld[r].nongroup)
7379 fprintf (f, ", nongroup");
7380
7381 if (rld[r].inc != 0)
7382 fprintf (f, ", inc by %d", rld[r].inc);
7383
7384 if (rld[r].nocombine)
7385 fprintf (f, ", can't combine");
7386
7387 if (rld[r].secondary_p)
7388 fprintf (f, ", secondary_reload_p");
7389
7390 if (rld[r].in_reg != 0)
7391 {
7392 fprintf (f, "\n\treload_in_reg: ");
7393 print_inline_rtx (f, rld[r].in_reg, 24);
7394 }
7395
7396 if (rld[r].out_reg != 0)
7397 {
7398 fprintf (f, "\n\treload_out_reg: ");
7399 print_inline_rtx (f, rld[r].out_reg, 24);
7400 }
7401
7402 if (rld[r].reg_rtx != 0)
7403 {
7404 fprintf (f, "\n\treload_reg_rtx: ");
7405 print_inline_rtx (f, rld[r].reg_rtx, 24);
7406 }
7407
7408 prefix = "\n\t";
7409 if (rld[r].secondary_in_reload != -1)
7410 {
7411 fprintf (f, "%ssecondary_in_reload = %d",
7412 prefix, rld[r].secondary_in_reload);
7413 prefix = ", ";
7414 }
7415
7416 if (rld[r].secondary_out_reload != -1)
7417 fprintf (f, "%ssecondary_out_reload = %d\n",
7418 prefix, rld[r].secondary_out_reload);
7419
7420 prefix = "\n\t";
7421 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7422 {
7423 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7424 insn_data[rld[r].secondary_in_icode].name);
7425 prefix = ", ";
7426 }
7427
7428 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7429 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7430 insn_data[rld[r].secondary_out_icode].name);
7431
7432 fprintf (f, "\n");
7433 }
7434 }
7435
7436 DEBUG_FUNCTION void
7437 debug_reload (void)
7438 {
7439 debug_reload_to_stream (stderr);
7440 }