[ARM] Add ACLE 2.0 predefined marco __ARM_FEATURE_IDIV
[gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
26
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
29
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
36
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
43
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
52
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
55
56 NOTE SIDE EFFECTS:
57
58 find_reloads can alter the operands of the instruction it is called on.
59
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
64
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
67
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71
72 Using a reload register for several reloads in one insn:
73
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
85
86 #define REG_OK_STRICT
87
88 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
89 #undef DEBUG_RELOAD
90
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "tm.h"
95 #include "rtl-error.h"
96 #include "tm_p.h"
97 #include "insn-config.h"
98 #include "expr.h"
99 #include "optabs.h"
100 #include "recog.h"
101 #include "df.h"
102 #include "reload.h"
103 #include "regs.h"
104 #include "addresses.h"
105 #include "hard-reg-set.h"
106 #include "flags.h"
107 #include "hashtab.h"
108 #include "hash-set.h"
109 #include "vec.h"
110 #include "machmode.h"
111 #include "input.h"
112 #include "function.h"
113 #include "params.h"
114 #include "target.h"
115 #include "ira.h"
116
117 /* True if X is a constant that can be forced into the constant pool.
118 MODE is the mode of the operand, or VOIDmode if not known. */
119 #define CONST_POOL_OK_P(MODE, X) \
120 ((MODE) != VOIDmode \
121 && CONSTANT_P (X) \
122 && GET_CODE (X) != HIGH \
123 && !targetm.cannot_force_const_mem (MODE, X))
124
125 /* True if C is a non-empty register class that has too few registers
126 to be safely used as a reload target class. */
127
128 static inline bool
129 small_register_class_p (reg_class_t rclass)
130 {
131 return (reg_class_size [(int) rclass] == 1
132 || (reg_class_size [(int) rclass] >= 1
133 && targetm.class_likely_spilled_p (rclass)));
134 }
135
136 \f
137 /* All reloads of the current insn are recorded here. See reload.h for
138 comments. */
139 int n_reloads;
140 struct reload rld[MAX_RELOADS];
141
142 /* All the "earlyclobber" operands of the current insn
143 are recorded here. */
144 int n_earlyclobbers;
145 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
146
147 int reload_n_operands;
148
149 /* Replacing reloads.
150
151 If `replace_reloads' is nonzero, then as each reload is recorded
152 an entry is made for it in the table `replacements'.
153 Then later `subst_reloads' can look through that table and
154 perform all the replacements needed. */
155
156 /* Nonzero means record the places to replace. */
157 static int replace_reloads;
158
159 /* Each replacement is recorded with a structure like this. */
160 struct replacement
161 {
162 rtx *where; /* Location to store in */
163 int what; /* which reload this is for */
164 enum machine_mode mode; /* mode it must have */
165 };
166
167 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
168
169 /* Number of replacements currently recorded. */
170 static int n_replacements;
171
172 /* Used to track what is modified by an operand. */
173 struct decomposition
174 {
175 int reg_flag; /* Nonzero if referencing a register. */
176 int safe; /* Nonzero if this can't conflict with anything. */
177 rtx base; /* Base address for MEM. */
178 HOST_WIDE_INT start; /* Starting offset or register number. */
179 HOST_WIDE_INT end; /* Ending offset or register number. */
180 };
181
182 #ifdef SECONDARY_MEMORY_NEEDED
183
184 /* Save MEMs needed to copy from one class of registers to another. One MEM
185 is used per mode, but normally only one or two modes are ever used.
186
187 We keep two versions, before and after register elimination. The one
188 after register elimination is record separately for each operand. This
189 is done in case the address is not valid to be sure that we separately
190 reload each. */
191
192 static rtx secondary_memlocs[NUM_MACHINE_MODES];
193 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
194 static int secondary_memlocs_elim_used = 0;
195 #endif
196
197 /* The instruction we are doing reloads for;
198 so we can test whether a register dies in it. */
199 static rtx_insn *this_insn;
200
201 /* Nonzero if this instruction is a user-specified asm with operands. */
202 static int this_insn_is_asm;
203
204 /* If hard_regs_live_known is nonzero,
205 we can tell which hard regs are currently live,
206 at least enough to succeed in choosing dummy reloads. */
207 static int hard_regs_live_known;
208
209 /* Indexed by hard reg number,
210 element is nonnegative if hard reg has been spilled.
211 This vector is passed to `find_reloads' as an argument
212 and is not changed here. */
213 static short *static_reload_reg_p;
214
215 /* Set to 1 in subst_reg_equivs if it changes anything. */
216 static int subst_reg_equivs_changed;
217
218 /* On return from push_reload, holds the reload-number for the OUT
219 operand, which can be different for that from the input operand. */
220 static int output_reloadnum;
221
222 /* Compare two RTX's. */
223 #define MATCHES(x, y) \
224 (x == y || (x != 0 && (REG_P (x) \
225 ? REG_P (y) && REGNO (x) == REGNO (y) \
226 : rtx_equal_p (x, y) && ! side_effects_p (x))))
227
228 /* Indicates if two reloads purposes are for similar enough things that we
229 can merge their reloads. */
230 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
231 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
232 || ((when1) == (when2) && (op1) == (op2)) \
233 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
234 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
235 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
236 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
237 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
238
239 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
240 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
241 ((when1) != (when2) \
242 || ! ((op1) == (op2) \
243 || (when1) == RELOAD_FOR_INPUT \
244 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
245 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
246
247 /* If we are going to reload an address, compute the reload type to
248 use. */
249 #define ADDR_TYPE(type) \
250 ((type) == RELOAD_FOR_INPUT_ADDRESS \
251 ? RELOAD_FOR_INPADDR_ADDRESS \
252 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
253 ? RELOAD_FOR_OUTADDR_ADDRESS \
254 : (type)))
255
256 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
257 enum machine_mode, enum reload_type,
258 enum insn_code *, secondary_reload_info *);
259 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
260 int, unsigned int);
261 static void push_replacement (rtx *, int, enum machine_mode);
262 static void dup_replacements (rtx *, rtx *);
263 static void combine_reloads (void);
264 static int find_reusable_reload (rtx *, rtx, enum reg_class,
265 enum reload_type, int, int);
266 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
267 enum machine_mode, reg_class_t, int, int);
268 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
269 static struct decomposition decompose (rtx);
270 static int immune_p (rtx, rtx, struct decomposition);
271 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
272 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
273 rtx_insn *, int *);
274 static rtx make_memloc (rtx, int);
275 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
276 addr_space_t, rtx *);
277 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
278 int, enum reload_type, int, rtx_insn *);
279 static rtx subst_reg_equivs (rtx, rtx_insn *);
280 static rtx subst_indexed_address (rtx);
281 static void update_auto_inc_notes (rtx_insn *, int, int);
282 static int find_reloads_address_1 (enum machine_mode, addr_space_t, rtx, int,
283 enum rtx_code, enum rtx_code, rtx *,
284 int, enum reload_type,int, rtx_insn *);
285 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
286 enum machine_mode, int,
287 enum reload_type, int);
288 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
289 int, rtx_insn *, int *);
290 static void copy_replacements_1 (rtx *, rtx *, int);
291 static int find_inc_amount (rtx, rtx);
292 static int refers_to_mem_for_reload_p (rtx);
293 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
294 rtx, rtx *);
295
296 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
297 list yet. */
298
299 static void
300 push_reg_equiv_alt_mem (int regno, rtx mem)
301 {
302 rtx it;
303
304 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
305 if (rtx_equal_p (XEXP (it, 0), mem))
306 return;
307
308 reg_equiv_alt_mem_list (regno)
309 = alloc_EXPR_LIST (REG_EQUIV, mem,
310 reg_equiv_alt_mem_list (regno));
311 }
312 \f
313 /* Determine if any secondary reloads are needed for loading (if IN_P is
314 nonzero) or storing (if IN_P is zero) X to or from a reload register of
315 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
316 are needed, push them.
317
318 Return the reload number of the secondary reload we made, or -1 if
319 we didn't need one. *PICODE is set to the insn_code to use if we do
320 need a secondary reload. */
321
322 static int
323 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
324 enum reg_class reload_class,
325 enum machine_mode reload_mode, enum reload_type type,
326 enum insn_code *picode, secondary_reload_info *prev_sri)
327 {
328 enum reg_class rclass = NO_REGS;
329 enum reg_class scratch_class;
330 enum machine_mode mode = reload_mode;
331 enum insn_code icode = CODE_FOR_nothing;
332 enum insn_code t_icode = CODE_FOR_nothing;
333 enum reload_type secondary_type;
334 int s_reload, t_reload = -1;
335 const char *scratch_constraint;
336 secondary_reload_info sri;
337
338 if (type == RELOAD_FOR_INPUT_ADDRESS
339 || type == RELOAD_FOR_OUTPUT_ADDRESS
340 || type == RELOAD_FOR_INPADDR_ADDRESS
341 || type == RELOAD_FOR_OUTADDR_ADDRESS)
342 secondary_type = type;
343 else
344 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
345
346 *picode = CODE_FOR_nothing;
347
348 /* If X is a paradoxical SUBREG, use the inner value to determine both the
349 mode and object being reloaded. */
350 if (paradoxical_subreg_p (x))
351 {
352 x = SUBREG_REG (x);
353 reload_mode = GET_MODE (x);
354 }
355
356 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
357 is still a pseudo-register by now, it *must* have an equivalent MEM
358 but we don't want to assume that), use that equivalent when seeing if
359 a secondary reload is needed since whether or not a reload is needed
360 might be sensitive to the form of the MEM. */
361
362 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
363 && reg_equiv_mem (REGNO (x)))
364 x = reg_equiv_mem (REGNO (x));
365
366 sri.icode = CODE_FOR_nothing;
367 sri.prev_sri = prev_sri;
368 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
369 reload_mode, &sri);
370 icode = (enum insn_code) sri.icode;
371
372 /* If we don't need any secondary registers, done. */
373 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
374 return -1;
375
376 if (rclass != NO_REGS)
377 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
378 reload_mode, type, &t_icode, &sri);
379
380 /* If we will be using an insn, the secondary reload is for a
381 scratch register. */
382
383 if (icode != CODE_FOR_nothing)
384 {
385 /* If IN_P is nonzero, the reload register will be the output in
386 operand 0. If IN_P is zero, the reload register will be the input
387 in operand 1. Outputs should have an initial "=", which we must
388 skip. */
389
390 /* ??? It would be useful to be able to handle only two, or more than
391 three, operands, but for now we can only handle the case of having
392 exactly three: output, input and one temp/scratch. */
393 gcc_assert (insn_data[(int) icode].n_operands == 3);
394
395 /* ??? We currently have no way to represent a reload that needs
396 an icode to reload from an intermediate tertiary reload register.
397 We should probably have a new field in struct reload to tag a
398 chain of scratch operand reloads onto. */
399 gcc_assert (rclass == NO_REGS);
400
401 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
402 gcc_assert (*scratch_constraint == '=');
403 scratch_constraint++;
404 if (*scratch_constraint == '&')
405 scratch_constraint++;
406 scratch_class = (reg_class_for_constraint
407 (lookup_constraint (scratch_constraint)));
408
409 rclass = scratch_class;
410 mode = insn_data[(int) icode].operand[2].mode;
411 }
412
413 /* This case isn't valid, so fail. Reload is allowed to use the same
414 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
415 in the case of a secondary register, we actually need two different
416 registers for correct code. We fail here to prevent the possibility of
417 silently generating incorrect code later.
418
419 The convention is that secondary input reloads are valid only if the
420 secondary_class is different from class. If you have such a case, you
421 can not use secondary reloads, you must work around the problem some
422 other way.
423
424 Allow this when a reload_in/out pattern is being used. I.e. assume
425 that the generated code handles this case. */
426
427 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
428 || t_icode != CODE_FOR_nothing);
429
430 /* See if we can reuse an existing secondary reload. */
431 for (s_reload = 0; s_reload < n_reloads; s_reload++)
432 if (rld[s_reload].secondary_p
433 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
434 || reg_class_subset_p (rld[s_reload].rclass, rclass))
435 && ((in_p && rld[s_reload].inmode == mode)
436 || (! in_p && rld[s_reload].outmode == mode))
437 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
438 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
439 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
440 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
441 && (small_register_class_p (rclass)
442 || targetm.small_register_classes_for_mode_p (VOIDmode))
443 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
444 opnum, rld[s_reload].opnum))
445 {
446 if (in_p)
447 rld[s_reload].inmode = mode;
448 if (! in_p)
449 rld[s_reload].outmode = mode;
450
451 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
452 rld[s_reload].rclass = rclass;
453
454 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
455 rld[s_reload].optional &= optional;
456 rld[s_reload].secondary_p = 1;
457 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
458 opnum, rld[s_reload].opnum))
459 rld[s_reload].when_needed = RELOAD_OTHER;
460
461 break;
462 }
463
464 if (s_reload == n_reloads)
465 {
466 #ifdef SECONDARY_MEMORY_NEEDED
467 /* If we need a memory location to copy between the two reload regs,
468 set it up now. Note that we do the input case before making
469 the reload and the output case after. This is due to the
470 way reloads are output. */
471
472 if (in_p && icode == CODE_FOR_nothing
473 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
474 {
475 get_secondary_mem (x, reload_mode, opnum, type);
476
477 /* We may have just added new reloads. Make sure we add
478 the new reload at the end. */
479 s_reload = n_reloads;
480 }
481 #endif
482
483 /* We need to make a new secondary reload for this register class. */
484 rld[s_reload].in = rld[s_reload].out = 0;
485 rld[s_reload].rclass = rclass;
486
487 rld[s_reload].inmode = in_p ? mode : VOIDmode;
488 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
489 rld[s_reload].reg_rtx = 0;
490 rld[s_reload].optional = optional;
491 rld[s_reload].inc = 0;
492 /* Maybe we could combine these, but it seems too tricky. */
493 rld[s_reload].nocombine = 1;
494 rld[s_reload].in_reg = 0;
495 rld[s_reload].out_reg = 0;
496 rld[s_reload].opnum = opnum;
497 rld[s_reload].when_needed = secondary_type;
498 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
499 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
500 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
501 rld[s_reload].secondary_out_icode
502 = ! in_p ? t_icode : CODE_FOR_nothing;
503 rld[s_reload].secondary_p = 1;
504
505 n_reloads++;
506
507 #ifdef SECONDARY_MEMORY_NEEDED
508 if (! in_p && icode == CODE_FOR_nothing
509 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
510 get_secondary_mem (x, mode, opnum, type);
511 #endif
512 }
513
514 *picode = icode;
515 return s_reload;
516 }
517
518 /* If a secondary reload is needed, return its class. If both an intermediate
519 register and a scratch register is needed, we return the class of the
520 intermediate register. */
521 reg_class_t
522 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
523 rtx x)
524 {
525 enum insn_code icode;
526 secondary_reload_info sri;
527
528 sri.icode = CODE_FOR_nothing;
529 sri.prev_sri = NULL;
530 rclass
531 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
532 icode = (enum insn_code) sri.icode;
533
534 /* If there are no secondary reloads at all, we return NO_REGS.
535 If an intermediate register is needed, we return its class. */
536 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
537 return rclass;
538
539 /* No intermediate register is needed, but we have a special reload
540 pattern, which we assume for now needs a scratch register. */
541 return scratch_reload_class (icode);
542 }
543
544 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
545 three operands, verify that operand 2 is an output operand, and return
546 its register class.
547 ??? We'd like to be able to handle any pattern with at least 2 operands,
548 for zero or more scratch registers, but that needs more infrastructure. */
549 enum reg_class
550 scratch_reload_class (enum insn_code icode)
551 {
552 const char *scratch_constraint;
553 enum reg_class rclass;
554
555 gcc_assert (insn_data[(int) icode].n_operands == 3);
556 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
557 gcc_assert (*scratch_constraint == '=');
558 scratch_constraint++;
559 if (*scratch_constraint == '&')
560 scratch_constraint++;
561 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
562 gcc_assert (rclass != NO_REGS);
563 return rclass;
564 }
565 \f
566 #ifdef SECONDARY_MEMORY_NEEDED
567
568 /* Return a memory location that will be used to copy X in mode MODE.
569 If we haven't already made a location for this mode in this insn,
570 call find_reloads_address on the location being returned. */
571
572 rtx
573 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
574 int opnum, enum reload_type type)
575 {
576 rtx loc;
577 int mem_valid;
578
579 /* By default, if MODE is narrower than a word, widen it to a word.
580 This is required because most machines that require these memory
581 locations do not support short load and stores from all registers
582 (e.g., FP registers). */
583
584 #ifdef SECONDARY_MEMORY_NEEDED_MODE
585 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
586 #else
587 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
588 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
589 #endif
590
591 /* If we already have made a MEM for this operand in MODE, return it. */
592 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
593 return secondary_memlocs_elim[(int) mode][opnum];
594
595 /* If this is the first time we've tried to get a MEM for this mode,
596 allocate a new one. `something_changed' in reload will get set
597 by noticing that the frame size has changed. */
598
599 if (secondary_memlocs[(int) mode] == 0)
600 {
601 #ifdef SECONDARY_MEMORY_NEEDED_RTX
602 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
603 #else
604 secondary_memlocs[(int) mode]
605 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
606 #endif
607 }
608
609 /* Get a version of the address doing any eliminations needed. If that
610 didn't give us a new MEM, make a new one if it isn't valid. */
611
612 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
613 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
614 MEM_ADDR_SPACE (loc));
615
616 if (! mem_valid && loc == secondary_memlocs[(int) mode])
617 loc = copy_rtx (loc);
618
619 /* The only time the call below will do anything is if the stack
620 offset is too large. In that case IND_LEVELS doesn't matter, so we
621 can just pass a zero. Adjust the type to be the address of the
622 corresponding object. If the address was valid, save the eliminated
623 address. If it wasn't valid, we need to make a reload each time, so
624 don't save it. */
625
626 if (! mem_valid)
627 {
628 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
629 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
630 : RELOAD_OTHER);
631
632 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
633 opnum, type, 0, 0);
634 }
635
636 secondary_memlocs_elim[(int) mode][opnum] = loc;
637 if (secondary_memlocs_elim_used <= (int)mode)
638 secondary_memlocs_elim_used = (int)mode + 1;
639 return loc;
640 }
641
642 /* Clear any secondary memory locations we've made. */
643
644 void
645 clear_secondary_mem (void)
646 {
647 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
648 }
649 #endif /* SECONDARY_MEMORY_NEEDED */
650 \f
651
652 /* Find the largest class which has at least one register valid in
653 mode INNER, and which for every such register, that register number
654 plus N is also valid in OUTER (if in range) and is cheap to move
655 into REGNO. Such a class must exist. */
656
657 static enum reg_class
658 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
659 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
660 unsigned int dest_regno ATTRIBUTE_UNUSED)
661 {
662 int best_cost = -1;
663 int rclass;
664 int regno;
665 enum reg_class best_class = NO_REGS;
666 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
667 unsigned int best_size = 0;
668 int cost;
669
670 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
671 {
672 int bad = 0;
673 int good = 0;
674 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
675 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
676 {
677 if (HARD_REGNO_MODE_OK (regno, inner))
678 {
679 good = 1;
680 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
681 && ! HARD_REGNO_MODE_OK (regno + n, outer))
682 bad = 1;
683 }
684 }
685
686 if (bad || !good)
687 continue;
688 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
689
690 if ((reg_class_size[rclass] > best_size
691 && (best_cost < 0 || best_cost >= cost))
692 || best_cost > cost)
693 {
694 best_class = (enum reg_class) rclass;
695 best_size = reg_class_size[rclass];
696 best_cost = register_move_cost (outer, (enum reg_class) rclass,
697 dest_class);
698 }
699 }
700
701 gcc_assert (best_size != 0);
702
703 return best_class;
704 }
705
706 /* We are trying to reload a subreg of something that is not a register.
707 Find the largest class which contains only registers valid in
708 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
709 which we would eventually like to obtain the object. */
710
711 static enum reg_class
712 find_valid_class_1 (enum machine_mode outer ATTRIBUTE_UNUSED,
713 enum machine_mode mode ATTRIBUTE_UNUSED,
714 enum reg_class dest_class ATTRIBUTE_UNUSED)
715 {
716 int best_cost = -1;
717 int rclass;
718 int regno;
719 enum reg_class best_class = NO_REGS;
720 unsigned int best_size = 0;
721 int cost;
722
723 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
724 {
725 int bad = 0;
726 for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++)
727 {
728 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
729 && !HARD_REGNO_MODE_OK (regno, mode))
730 bad = 1;
731 }
732
733 if (bad)
734 continue;
735
736 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
737
738 if ((reg_class_size[rclass] > best_size
739 && (best_cost < 0 || best_cost >= cost))
740 || best_cost > cost)
741 {
742 best_class = (enum reg_class) rclass;
743 best_size = reg_class_size[rclass];
744 best_cost = register_move_cost (outer, (enum reg_class) rclass,
745 dest_class);
746 }
747 }
748
749 gcc_assert (best_size != 0);
750
751 #ifdef LIMIT_RELOAD_CLASS
752 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
753 #endif
754 return best_class;
755 }
756 \f
757 /* Return the number of a previously made reload that can be combined with
758 a new one, or n_reloads if none of the existing reloads can be used.
759 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
760 push_reload, they determine the kind of the new reload that we try to
761 combine. P_IN points to the corresponding value of IN, which can be
762 modified by this function.
763 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
764
765 static int
766 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
767 enum reload_type type, int opnum, int dont_share)
768 {
769 rtx in = *p_in;
770 int i;
771 /* We can't merge two reloads if the output of either one is
772 earlyclobbered. */
773
774 if (earlyclobber_operand_p (out))
775 return n_reloads;
776
777 /* We can use an existing reload if the class is right
778 and at least one of IN and OUT is a match
779 and the other is at worst neutral.
780 (A zero compared against anything is neutral.)
781
782 For targets with small register classes, don't use existing reloads
783 unless they are for the same thing since that can cause us to need
784 more reload registers than we otherwise would. */
785
786 for (i = 0; i < n_reloads; i++)
787 if ((reg_class_subset_p (rclass, rld[i].rclass)
788 || reg_class_subset_p (rld[i].rclass, rclass))
789 /* If the existing reload has a register, it must fit our class. */
790 && (rld[i].reg_rtx == 0
791 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
792 true_regnum (rld[i].reg_rtx)))
793 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
794 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
795 || (out != 0 && MATCHES (rld[i].out, out)
796 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
797 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
798 && (small_register_class_p (rclass)
799 || targetm.small_register_classes_for_mode_p (VOIDmode))
800 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
801 return i;
802
803 /* Reloading a plain reg for input can match a reload to postincrement
804 that reg, since the postincrement's value is the right value.
805 Likewise, it can match a preincrement reload, since we regard
806 the preincrementation as happening before any ref in this insn
807 to that register. */
808 for (i = 0; i < n_reloads; i++)
809 if ((reg_class_subset_p (rclass, rld[i].rclass)
810 || reg_class_subset_p (rld[i].rclass, rclass))
811 /* If the existing reload has a register, it must fit our
812 class. */
813 && (rld[i].reg_rtx == 0
814 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
815 true_regnum (rld[i].reg_rtx)))
816 && out == 0 && rld[i].out == 0 && rld[i].in != 0
817 && ((REG_P (in)
818 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
819 && MATCHES (XEXP (rld[i].in, 0), in))
820 || (REG_P (rld[i].in)
821 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
822 && MATCHES (XEXP (in, 0), rld[i].in)))
823 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
824 && (small_register_class_p (rclass)
825 || targetm.small_register_classes_for_mode_p (VOIDmode))
826 && MERGABLE_RELOADS (type, rld[i].when_needed,
827 opnum, rld[i].opnum))
828 {
829 /* Make sure reload_in ultimately has the increment,
830 not the plain register. */
831 if (REG_P (in))
832 *p_in = rld[i].in;
833 return i;
834 }
835 return n_reloads;
836 }
837
838 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
839 expression. MODE is the mode that X will be used in. OUTPUT is true if
840 the function is invoked for the output part of an enclosing reload. */
841
842 static bool
843 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, bool output)
844 {
845 rtx inner;
846
847 /* Only SUBREGs are problematical. */
848 if (GET_CODE (x) != SUBREG)
849 return false;
850
851 inner = SUBREG_REG (x);
852
853 /* If INNER is a constant or PLUS, then INNER will need reloading. */
854 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
855 return true;
856
857 /* If INNER is not a hard register, then INNER will not need reloading. */
858 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
859 return false;
860
861 /* If INNER is not ok for MODE, then INNER will need reloading. */
862 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
863 return true;
864
865 /* If this is for an output, and the outer part is a word or smaller,
866 INNER is larger than a word and the number of registers in INNER is
867 not the same as the number of words in INNER, then INNER will need
868 reloading (with an in-out reload). */
869 return (output
870 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
871 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
872 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
873 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
874 }
875
876 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
877 requiring an extra reload register. The caller has already found that
878 IN contains some reference to REGNO, so check that we can produce the
879 new value in a single step. E.g. if we have
880 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
881 instruction that adds one to a register, this should succeed.
882 However, if we have something like
883 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
884 needs to be loaded into a register first, we need a separate reload
885 register.
886 Such PLUS reloads are generated by find_reload_address_part.
887 The out-of-range PLUS expressions are usually introduced in the instruction
888 patterns by register elimination and substituting pseudos without a home
889 by their function-invariant equivalences. */
890 static int
891 can_reload_into (rtx in, int regno, enum machine_mode mode)
892 {
893 rtx dst;
894 rtx_insn *test_insn;
895 int r = 0;
896 struct recog_data_d save_recog_data;
897
898 /* For matching constraints, we often get notional input reloads where
899 we want to use the original register as the reload register. I.e.
900 technically this is a non-optional input-output reload, but IN is
901 already a valid register, and has been chosen as the reload register.
902 Speed this up, since it trivially works. */
903 if (REG_P (in))
904 return 1;
905
906 /* To test MEMs properly, we'd have to take into account all the reloads
907 that are already scheduled, which can become quite complicated.
908 And since we've already handled address reloads for this MEM, it
909 should always succeed anyway. */
910 if (MEM_P (in))
911 return 1;
912
913 /* If we can make a simple SET insn that does the job, everything should
914 be fine. */
915 dst = gen_rtx_REG (mode, regno);
916 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
917 save_recog_data = recog_data;
918 if (recog_memoized (test_insn) >= 0)
919 {
920 extract_insn (test_insn);
921 r = constrain_operands (1);
922 }
923 recog_data = save_recog_data;
924 return r;
925 }
926
927 /* Record one reload that needs to be performed.
928 IN is an rtx saying where the data are to be found before this instruction.
929 OUT says where they must be stored after the instruction.
930 (IN is zero for data not read, and OUT is zero for data not written.)
931 INLOC and OUTLOC point to the places in the instructions where
932 IN and OUT were found.
933 If IN and OUT are both nonzero, it means the same register must be used
934 to reload both IN and OUT.
935
936 RCLASS is a register class required for the reloaded data.
937 INMODE is the machine mode that the instruction requires
938 for the reg that replaces IN and OUTMODE is likewise for OUT.
939
940 If IN is zero, then OUT's location and mode should be passed as
941 INLOC and INMODE.
942
943 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
944
945 OPTIONAL nonzero means this reload does not need to be performed:
946 it can be discarded if that is more convenient.
947
948 OPNUM and TYPE say what the purpose of this reload is.
949
950 The return value is the reload-number for this reload.
951
952 If both IN and OUT are nonzero, in some rare cases we might
953 want to make two separate reloads. (Actually we never do this now.)
954 Therefore, the reload-number for OUT is stored in
955 output_reloadnum when we return; the return value applies to IN.
956 Usually (presently always), when IN and OUT are nonzero,
957 the two reload-numbers are equal, but the caller should be careful to
958 distinguish them. */
959
960 int
961 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
962 enum reg_class rclass, enum machine_mode inmode,
963 enum machine_mode outmode, int strict_low, int optional,
964 int opnum, enum reload_type type)
965 {
966 int i;
967 int dont_share = 0;
968 int dont_remove_subreg = 0;
969 #ifdef LIMIT_RELOAD_CLASS
970 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
971 #endif
972 int secondary_in_reload = -1, secondary_out_reload = -1;
973 enum insn_code secondary_in_icode = CODE_FOR_nothing;
974 enum insn_code secondary_out_icode = CODE_FOR_nothing;
975 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
976 subreg_in_class = NO_REGS;
977
978 /* INMODE and/or OUTMODE could be VOIDmode if no mode
979 has been specified for the operand. In that case,
980 use the operand's mode as the mode to reload. */
981 if (inmode == VOIDmode && in != 0)
982 inmode = GET_MODE (in);
983 if (outmode == VOIDmode && out != 0)
984 outmode = GET_MODE (out);
985
986 /* If find_reloads and friends until now missed to replace a pseudo
987 with a constant of reg_equiv_constant something went wrong
988 beforehand.
989 Note that it can't simply be done here if we missed it earlier
990 since the constant might need to be pushed into the literal pool
991 and the resulting memref would probably need further
992 reloading. */
993 if (in != 0 && REG_P (in))
994 {
995 int regno = REGNO (in);
996
997 gcc_assert (regno < FIRST_PSEUDO_REGISTER
998 || reg_renumber[regno] >= 0
999 || reg_equiv_constant (regno) == NULL_RTX);
1000 }
1001
1002 /* reg_equiv_constant only contains constants which are obviously
1003 not appropriate as destination. So if we would need to replace
1004 the destination pseudo with a constant we are in real
1005 trouble. */
1006 if (out != 0 && REG_P (out))
1007 {
1008 int regno = REGNO (out);
1009
1010 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1011 || reg_renumber[regno] >= 0
1012 || reg_equiv_constant (regno) == NULL_RTX);
1013 }
1014
1015 /* If we have a read-write operand with an address side-effect,
1016 change either IN or OUT so the side-effect happens only once. */
1017 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1018 switch (GET_CODE (XEXP (in, 0)))
1019 {
1020 case POST_INC: case POST_DEC: case POST_MODIFY:
1021 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1022 break;
1023
1024 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1025 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1026 break;
1027
1028 default:
1029 break;
1030 }
1031
1032 /* If we are reloading a (SUBREG constant ...), really reload just the
1033 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1034 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1035 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1036 register is a pseudo, also reload the inside expression.
1037 For machines that extend byte loads, do this for any SUBREG of a pseudo
1038 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1039 M2 is an integral mode that gets extended when loaded.
1040 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1041 where either M1 is not valid for R or M2 is wider than a word but we
1042 only need one register to store an M2-sized quantity in R.
1043 (However, if OUT is nonzero, we need to reload the reg *and*
1044 the subreg, so do nothing here, and let following statement handle it.)
1045
1046 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1047 we can't handle it here because CONST_INT does not indicate a mode.
1048
1049 Similarly, we must reload the inside expression if we have a
1050 STRICT_LOW_PART (presumably, in == out in this case).
1051
1052 Also reload the inner expression if it does not require a secondary
1053 reload but the SUBREG does.
1054
1055 Finally, reload the inner expression if it is a register that is in
1056 the class whose registers cannot be referenced in a different size
1057 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1058 cannot reload just the inside since we might end up with the wrong
1059 register class. But if it is inside a STRICT_LOW_PART, we have
1060 no choice, so we hope we do get the right register class there. */
1061
1062 if (in != 0 && GET_CODE (in) == SUBREG
1063 && (subreg_lowpart_p (in) || strict_low)
1064 #ifdef CANNOT_CHANGE_MODE_CLASS
1065 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1066 #endif
1067 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1068 && (CONSTANT_P (SUBREG_REG (in))
1069 || GET_CODE (SUBREG_REG (in)) == PLUS
1070 || strict_low
1071 || (((REG_P (SUBREG_REG (in))
1072 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1073 || MEM_P (SUBREG_REG (in)))
1074 && ((GET_MODE_PRECISION (inmode)
1075 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1076 #ifdef LOAD_EXTEND_OP
1077 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1078 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1079 <= UNITS_PER_WORD)
1080 && (GET_MODE_PRECISION (inmode)
1081 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1082 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1083 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1084 #endif
1085 #ifdef WORD_REGISTER_OPERATIONS
1086 || ((GET_MODE_PRECISION (inmode)
1087 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1088 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1089 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1090 / UNITS_PER_WORD)))
1091 #endif
1092 ))
1093 || (REG_P (SUBREG_REG (in))
1094 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1095 /* The case where out is nonzero
1096 is handled differently in the following statement. */
1097 && (out == 0 || subreg_lowpart_p (in))
1098 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1099 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1100 > UNITS_PER_WORD)
1101 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1102 / UNITS_PER_WORD)
1103 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1104 [GET_MODE (SUBREG_REG (in))]))
1105 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1106 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1107 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1108 SUBREG_REG (in))
1109 == NO_REGS))
1110 #ifdef CANNOT_CHANGE_MODE_CLASS
1111 || (REG_P (SUBREG_REG (in))
1112 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1113 && REG_CANNOT_CHANGE_MODE_P
1114 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1115 #endif
1116 ))
1117 {
1118 #ifdef LIMIT_RELOAD_CLASS
1119 in_subreg_loc = inloc;
1120 #endif
1121 inloc = &SUBREG_REG (in);
1122 in = *inloc;
1123 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1124 if (MEM_P (in))
1125 /* This is supposed to happen only for paradoxical subregs made by
1126 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1127 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1128 #endif
1129 inmode = GET_MODE (in);
1130 }
1131
1132 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1133 where M1 is not valid for R if it was not handled by the code above.
1134
1135 Similar issue for (SUBREG constant ...) if it was not handled by the
1136 code above. This can happen if SUBREG_BYTE != 0.
1137
1138 However, we must reload the inner reg *as well as* the subreg in
1139 that case. */
1140
1141 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1142 {
1143 if (REG_P (SUBREG_REG (in)))
1144 subreg_in_class
1145 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1146 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1147 GET_MODE (SUBREG_REG (in)),
1148 SUBREG_BYTE (in),
1149 GET_MODE (in)),
1150 REGNO (SUBREG_REG (in)));
1151 else if (GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1152 subreg_in_class = find_valid_class_1 (inmode,
1153 GET_MODE (SUBREG_REG (in)),
1154 rclass);
1155
1156 /* This relies on the fact that emit_reload_insns outputs the
1157 instructions for input reloads of type RELOAD_OTHER in the same
1158 order as the reloads. Thus if the outer reload is also of type
1159 RELOAD_OTHER, we are guaranteed that this inner reload will be
1160 output before the outer reload. */
1161 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1162 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1163 dont_remove_subreg = 1;
1164 }
1165
1166 /* Similarly for paradoxical and problematical SUBREGs on the output.
1167 Note that there is no reason we need worry about the previous value
1168 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1169 entitled to clobber it all (except in the case of a word mode subreg
1170 or of a STRICT_LOW_PART, in that latter case the constraint should
1171 label it input-output.) */
1172 if (out != 0 && GET_CODE (out) == SUBREG
1173 && (subreg_lowpart_p (out) || strict_low)
1174 #ifdef CANNOT_CHANGE_MODE_CLASS
1175 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1176 #endif
1177 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1178 && (CONSTANT_P (SUBREG_REG (out))
1179 || strict_low
1180 || (((REG_P (SUBREG_REG (out))
1181 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1182 || MEM_P (SUBREG_REG (out)))
1183 && ((GET_MODE_PRECISION (outmode)
1184 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1185 #ifdef WORD_REGISTER_OPERATIONS
1186 || ((GET_MODE_PRECISION (outmode)
1187 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1188 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1189 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1190 / UNITS_PER_WORD)))
1191 #endif
1192 ))
1193 || (REG_P (SUBREG_REG (out))
1194 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1195 /* The case of a word mode subreg
1196 is handled differently in the following statement. */
1197 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1198 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1199 > UNITS_PER_WORD))
1200 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1201 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1202 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1203 SUBREG_REG (out))
1204 == NO_REGS))
1205 #ifdef CANNOT_CHANGE_MODE_CLASS
1206 || (REG_P (SUBREG_REG (out))
1207 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1208 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1209 GET_MODE (SUBREG_REG (out)),
1210 outmode))
1211 #endif
1212 ))
1213 {
1214 #ifdef LIMIT_RELOAD_CLASS
1215 out_subreg_loc = outloc;
1216 #endif
1217 outloc = &SUBREG_REG (out);
1218 out = *outloc;
1219 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1220 gcc_assert (!MEM_P (out)
1221 || GET_MODE_SIZE (GET_MODE (out))
1222 <= GET_MODE_SIZE (outmode));
1223 #endif
1224 outmode = GET_MODE (out);
1225 }
1226
1227 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1228 where either M1 is not valid for R or M2 is wider than a word but we
1229 only need one register to store an M2-sized quantity in R.
1230
1231 However, we must reload the inner reg *as well as* the subreg in
1232 that case and the inner reg is an in-out reload. */
1233
1234 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1235 {
1236 enum reg_class in_out_class
1237 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1238 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1239 GET_MODE (SUBREG_REG (out)),
1240 SUBREG_BYTE (out),
1241 GET_MODE (out)),
1242 REGNO (SUBREG_REG (out)));
1243
1244 /* This relies on the fact that emit_reload_insns outputs the
1245 instructions for output reloads of type RELOAD_OTHER in reverse
1246 order of the reloads. Thus if the outer reload is also of type
1247 RELOAD_OTHER, we are guaranteed that this inner reload will be
1248 output after the outer reload. */
1249 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1250 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1251 0, 0, opnum, RELOAD_OTHER);
1252 dont_remove_subreg = 1;
1253 }
1254
1255 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1256 if (in != 0 && out != 0 && MEM_P (out)
1257 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1258 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1259 dont_share = 1;
1260
1261 /* If IN is a SUBREG of a hard register, make a new REG. This
1262 simplifies some of the cases below. */
1263
1264 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1265 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1266 && ! dont_remove_subreg)
1267 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1268
1269 /* Similarly for OUT. */
1270 if (out != 0 && GET_CODE (out) == SUBREG
1271 && REG_P (SUBREG_REG (out))
1272 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1273 && ! dont_remove_subreg)
1274 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1275
1276 /* Narrow down the class of register wanted if that is
1277 desirable on this machine for efficiency. */
1278 {
1279 reg_class_t preferred_class = rclass;
1280
1281 if (in != 0)
1282 preferred_class = targetm.preferred_reload_class (in, rclass);
1283
1284 /* Output reloads may need analogous treatment, different in detail. */
1285 if (out != 0)
1286 preferred_class
1287 = targetm.preferred_output_reload_class (out, preferred_class);
1288
1289 /* Discard what the target said if we cannot do it. */
1290 if (preferred_class != NO_REGS
1291 || (optional && type == RELOAD_FOR_OUTPUT))
1292 rclass = (enum reg_class) preferred_class;
1293 }
1294
1295 /* Make sure we use a class that can handle the actual pseudo
1296 inside any subreg. For example, on the 386, QImode regs
1297 can appear within SImode subregs. Although GENERAL_REGS
1298 can handle SImode, QImode needs a smaller class. */
1299 #ifdef LIMIT_RELOAD_CLASS
1300 if (in_subreg_loc)
1301 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1302 else if (in != 0 && GET_CODE (in) == SUBREG)
1303 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1304
1305 if (out_subreg_loc)
1306 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1307 if (out != 0 && GET_CODE (out) == SUBREG)
1308 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1309 #endif
1310
1311 /* Verify that this class is at least possible for the mode that
1312 is specified. */
1313 if (this_insn_is_asm)
1314 {
1315 enum machine_mode mode;
1316 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1317 mode = inmode;
1318 else
1319 mode = outmode;
1320 if (mode == VOIDmode)
1321 {
1322 error_for_asm (this_insn, "cannot reload integer constant "
1323 "operand in %<asm%>");
1324 mode = word_mode;
1325 if (in != 0)
1326 inmode = word_mode;
1327 if (out != 0)
1328 outmode = word_mode;
1329 }
1330 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1331 if (HARD_REGNO_MODE_OK (i, mode)
1332 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1333 break;
1334 if (i == FIRST_PSEUDO_REGISTER)
1335 {
1336 error_for_asm (this_insn, "impossible register constraint "
1337 "in %<asm%>");
1338 /* Avoid further trouble with this insn. */
1339 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1340 /* We used to continue here setting class to ALL_REGS, but it triggers
1341 sanity check on i386 for:
1342 void foo(long double d)
1343 {
1344 asm("" :: "a" (d));
1345 }
1346 Returning zero here ought to be safe as we take care in
1347 find_reloads to not process the reloads when instruction was
1348 replaced by USE. */
1349
1350 return 0;
1351 }
1352 }
1353
1354 /* Optional output reloads are always OK even if we have no register class,
1355 since the function of these reloads is only to have spill_reg_store etc.
1356 set, so that the storing insn can be deleted later. */
1357 gcc_assert (rclass != NO_REGS
1358 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1359
1360 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1361
1362 if (i == n_reloads)
1363 {
1364 /* See if we need a secondary reload register to move between CLASS
1365 and IN or CLASS and OUT. Get the icode and push any required reloads
1366 needed for each of them if so. */
1367
1368 if (in != 0)
1369 secondary_in_reload
1370 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1371 &secondary_in_icode, NULL);
1372 if (out != 0 && GET_CODE (out) != SCRATCH)
1373 secondary_out_reload
1374 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1375 type, &secondary_out_icode, NULL);
1376
1377 /* We found no existing reload suitable for re-use.
1378 So add an additional reload. */
1379
1380 #ifdef SECONDARY_MEMORY_NEEDED
1381 if (subreg_in_class == NO_REGS
1382 && in != 0
1383 && (REG_P (in)
1384 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1385 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1386 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1387 /* If a memory location is needed for the copy, make one. */
1388 if (subreg_in_class != NO_REGS
1389 && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1390 get_secondary_mem (in, inmode, opnum, type);
1391 #endif
1392
1393 i = n_reloads;
1394 rld[i].in = in;
1395 rld[i].out = out;
1396 rld[i].rclass = rclass;
1397 rld[i].inmode = inmode;
1398 rld[i].outmode = outmode;
1399 rld[i].reg_rtx = 0;
1400 rld[i].optional = optional;
1401 rld[i].inc = 0;
1402 rld[i].nocombine = 0;
1403 rld[i].in_reg = inloc ? *inloc : 0;
1404 rld[i].out_reg = outloc ? *outloc : 0;
1405 rld[i].opnum = opnum;
1406 rld[i].when_needed = type;
1407 rld[i].secondary_in_reload = secondary_in_reload;
1408 rld[i].secondary_out_reload = secondary_out_reload;
1409 rld[i].secondary_in_icode = secondary_in_icode;
1410 rld[i].secondary_out_icode = secondary_out_icode;
1411 rld[i].secondary_p = 0;
1412
1413 n_reloads++;
1414
1415 #ifdef SECONDARY_MEMORY_NEEDED
1416 if (out != 0
1417 && (REG_P (out)
1418 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1419 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1420 && SECONDARY_MEMORY_NEEDED (rclass,
1421 REGNO_REG_CLASS (reg_or_subregno (out)),
1422 outmode))
1423 get_secondary_mem (out, outmode, opnum, type);
1424 #endif
1425 }
1426 else
1427 {
1428 /* We are reusing an existing reload,
1429 but we may have additional information for it.
1430 For example, we may now have both IN and OUT
1431 while the old one may have just one of them. */
1432
1433 /* The modes can be different. If they are, we want to reload in
1434 the larger mode, so that the value is valid for both modes. */
1435 if (inmode != VOIDmode
1436 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1437 rld[i].inmode = inmode;
1438 if (outmode != VOIDmode
1439 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1440 rld[i].outmode = outmode;
1441 if (in != 0)
1442 {
1443 rtx in_reg = inloc ? *inloc : 0;
1444 /* If we merge reloads for two distinct rtl expressions that
1445 are identical in content, there might be duplicate address
1446 reloads. Remove the extra set now, so that if we later find
1447 that we can inherit this reload, we can get rid of the
1448 address reloads altogether.
1449
1450 Do not do this if both reloads are optional since the result
1451 would be an optional reload which could potentially leave
1452 unresolved address replacements.
1453
1454 It is not sufficient to call transfer_replacements since
1455 choose_reload_regs will remove the replacements for address
1456 reloads of inherited reloads which results in the same
1457 problem. */
1458 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1459 && ! (rld[i].optional && optional))
1460 {
1461 /* We must keep the address reload with the lower operand
1462 number alive. */
1463 if (opnum > rld[i].opnum)
1464 {
1465 remove_address_replacements (in);
1466 in = rld[i].in;
1467 in_reg = rld[i].in_reg;
1468 }
1469 else
1470 remove_address_replacements (rld[i].in);
1471 }
1472 /* When emitting reloads we don't necessarily look at the in-
1473 and outmode, but also directly at the operands (in and out).
1474 So we can't simply overwrite them with whatever we have found
1475 for this (to-be-merged) reload, we have to "merge" that too.
1476 Reusing another reload already verified that we deal with the
1477 same operands, just possibly in different modes. So we
1478 overwrite the operands only when the new mode is larger.
1479 See also PR33613. */
1480 if (!rld[i].in
1481 || GET_MODE_SIZE (GET_MODE (in))
1482 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1483 rld[i].in = in;
1484 if (!rld[i].in_reg
1485 || (in_reg
1486 && GET_MODE_SIZE (GET_MODE (in_reg))
1487 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1488 rld[i].in_reg = in_reg;
1489 }
1490 if (out != 0)
1491 {
1492 if (!rld[i].out
1493 || (out
1494 && GET_MODE_SIZE (GET_MODE (out))
1495 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1496 rld[i].out = out;
1497 if (outloc
1498 && (!rld[i].out_reg
1499 || GET_MODE_SIZE (GET_MODE (*outloc))
1500 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1501 rld[i].out_reg = *outloc;
1502 }
1503 if (reg_class_subset_p (rclass, rld[i].rclass))
1504 rld[i].rclass = rclass;
1505 rld[i].optional &= optional;
1506 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1507 opnum, rld[i].opnum))
1508 rld[i].when_needed = RELOAD_OTHER;
1509 rld[i].opnum = MIN (rld[i].opnum, opnum);
1510 }
1511
1512 /* If the ostensible rtx being reloaded differs from the rtx found
1513 in the location to substitute, this reload is not safe to combine
1514 because we cannot reliably tell whether it appears in the insn. */
1515
1516 if (in != 0 && in != *inloc)
1517 rld[i].nocombine = 1;
1518
1519 #if 0
1520 /* This was replaced by changes in find_reloads_address_1 and the new
1521 function inc_for_reload, which go with a new meaning of reload_inc. */
1522
1523 /* If this is an IN/OUT reload in an insn that sets the CC,
1524 it must be for an autoincrement. It doesn't work to store
1525 the incremented value after the insn because that would clobber the CC.
1526 So we must do the increment of the value reloaded from,
1527 increment it, store it back, then decrement again. */
1528 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1529 {
1530 out = 0;
1531 rld[i].out = 0;
1532 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1533 /* If we did not find a nonzero amount-to-increment-by,
1534 that contradicts the belief that IN is being incremented
1535 in an address in this insn. */
1536 gcc_assert (rld[i].inc != 0);
1537 }
1538 #endif
1539
1540 /* If we will replace IN and OUT with the reload-reg,
1541 record where they are located so that substitution need
1542 not do a tree walk. */
1543
1544 if (replace_reloads)
1545 {
1546 if (inloc != 0)
1547 {
1548 struct replacement *r = &replacements[n_replacements++];
1549 r->what = i;
1550 r->where = inloc;
1551 r->mode = inmode;
1552 }
1553 if (outloc != 0 && outloc != inloc)
1554 {
1555 struct replacement *r = &replacements[n_replacements++];
1556 r->what = i;
1557 r->where = outloc;
1558 r->mode = outmode;
1559 }
1560 }
1561
1562 /* If this reload is just being introduced and it has both
1563 an incoming quantity and an outgoing quantity that are
1564 supposed to be made to match, see if either one of the two
1565 can serve as the place to reload into.
1566
1567 If one of them is acceptable, set rld[i].reg_rtx
1568 to that one. */
1569
1570 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1571 {
1572 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1573 inmode, outmode,
1574 rld[i].rclass, i,
1575 earlyclobber_operand_p (out));
1576
1577 /* If the outgoing register already contains the same value
1578 as the incoming one, we can dispense with loading it.
1579 The easiest way to tell the caller that is to give a phony
1580 value for the incoming operand (same as outgoing one). */
1581 if (rld[i].reg_rtx == out
1582 && (REG_P (in) || CONSTANT_P (in))
1583 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1584 static_reload_reg_p, i, inmode))
1585 rld[i].in = out;
1586 }
1587
1588 /* If this is an input reload and the operand contains a register that
1589 dies in this insn and is used nowhere else, see if it is the right class
1590 to be used for this reload. Use it if so. (This occurs most commonly
1591 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1592 this if it is also an output reload that mentions the register unless
1593 the output is a SUBREG that clobbers an entire register.
1594
1595 Note that the operand might be one of the spill regs, if it is a
1596 pseudo reg and we are in a block where spilling has not taken place.
1597 But if there is no spilling in this block, that is OK.
1598 An explicitly used hard reg cannot be a spill reg. */
1599
1600 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1601 {
1602 rtx note;
1603 int regno;
1604 enum machine_mode rel_mode = inmode;
1605
1606 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1607 rel_mode = outmode;
1608
1609 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1610 if (REG_NOTE_KIND (note) == REG_DEAD
1611 && REG_P (XEXP (note, 0))
1612 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1613 && reg_mentioned_p (XEXP (note, 0), in)
1614 /* Check that a former pseudo is valid; see find_dummy_reload. */
1615 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1616 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1617 ORIGINAL_REGNO (XEXP (note, 0)))
1618 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1619 && ! refers_to_regno_for_reload_p (regno,
1620 end_hard_regno (rel_mode,
1621 regno),
1622 PATTERN (this_insn), inloc)
1623 /* If this is also an output reload, IN cannot be used as
1624 the reload register if it is set in this insn unless IN
1625 is also OUT. */
1626 && (out == 0 || in == out
1627 || ! hard_reg_set_here_p (regno,
1628 end_hard_regno (rel_mode, regno),
1629 PATTERN (this_insn)))
1630 /* ??? Why is this code so different from the previous?
1631 Is there any simple coherent way to describe the two together?
1632 What's going on here. */
1633 && (in != out
1634 || (GET_CODE (in) == SUBREG
1635 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1636 / UNITS_PER_WORD)
1637 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1638 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1639 /* Make sure the operand fits in the reg that dies. */
1640 && (GET_MODE_SIZE (rel_mode)
1641 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1642 && HARD_REGNO_MODE_OK (regno, inmode)
1643 && HARD_REGNO_MODE_OK (regno, outmode))
1644 {
1645 unsigned int offs;
1646 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1647 hard_regno_nregs[regno][outmode]);
1648
1649 for (offs = 0; offs < nregs; offs++)
1650 if (fixed_regs[regno + offs]
1651 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1652 regno + offs))
1653 break;
1654
1655 if (offs == nregs
1656 && (! (refers_to_regno_for_reload_p
1657 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1658 || can_reload_into (in, regno, inmode)))
1659 {
1660 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1661 break;
1662 }
1663 }
1664 }
1665
1666 if (out)
1667 output_reloadnum = i;
1668
1669 return i;
1670 }
1671
1672 /* Record an additional place we must replace a value
1673 for which we have already recorded a reload.
1674 RELOADNUM is the value returned by push_reload
1675 when the reload was recorded.
1676 This is used in insn patterns that use match_dup. */
1677
1678 static void
1679 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1680 {
1681 if (replace_reloads)
1682 {
1683 struct replacement *r = &replacements[n_replacements++];
1684 r->what = reloadnum;
1685 r->where = loc;
1686 r->mode = mode;
1687 }
1688 }
1689
1690 /* Duplicate any replacement we have recorded to apply at
1691 location ORIG_LOC to also be performed at DUP_LOC.
1692 This is used in insn patterns that use match_dup. */
1693
1694 static void
1695 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1696 {
1697 int i, n = n_replacements;
1698
1699 for (i = 0; i < n; i++)
1700 {
1701 struct replacement *r = &replacements[i];
1702 if (r->where == orig_loc)
1703 push_replacement (dup_loc, r->what, r->mode);
1704 }
1705 }
1706 \f
1707 /* Transfer all replacements that used to be in reload FROM to be in
1708 reload TO. */
1709
1710 void
1711 transfer_replacements (int to, int from)
1712 {
1713 int i;
1714
1715 for (i = 0; i < n_replacements; i++)
1716 if (replacements[i].what == from)
1717 replacements[i].what = to;
1718 }
1719 \f
1720 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1721 or a subpart of it. If we have any replacements registered for IN_RTX,
1722 cancel the reloads that were supposed to load them.
1723 Return nonzero if we canceled any reloads. */
1724 int
1725 remove_address_replacements (rtx in_rtx)
1726 {
1727 int i, j;
1728 char reload_flags[MAX_RELOADS];
1729 int something_changed = 0;
1730
1731 memset (reload_flags, 0, sizeof reload_flags);
1732 for (i = 0, j = 0; i < n_replacements; i++)
1733 {
1734 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1735 reload_flags[replacements[i].what] |= 1;
1736 else
1737 {
1738 replacements[j++] = replacements[i];
1739 reload_flags[replacements[i].what] |= 2;
1740 }
1741 }
1742 /* Note that the following store must be done before the recursive calls. */
1743 n_replacements = j;
1744
1745 for (i = n_reloads - 1; i >= 0; i--)
1746 {
1747 if (reload_flags[i] == 1)
1748 {
1749 deallocate_reload_reg (i);
1750 remove_address_replacements (rld[i].in);
1751 rld[i].in = 0;
1752 something_changed = 1;
1753 }
1754 }
1755 return something_changed;
1756 }
1757 \f
1758 /* If there is only one output reload, and it is not for an earlyclobber
1759 operand, try to combine it with a (logically unrelated) input reload
1760 to reduce the number of reload registers needed.
1761
1762 This is safe if the input reload does not appear in
1763 the value being output-reloaded, because this implies
1764 it is not needed any more once the original insn completes.
1765
1766 If that doesn't work, see we can use any of the registers that
1767 die in this insn as a reload register. We can if it is of the right
1768 class and does not appear in the value being output-reloaded. */
1769
1770 static void
1771 combine_reloads (void)
1772 {
1773 int i, regno;
1774 int output_reload = -1;
1775 int secondary_out = -1;
1776 rtx note;
1777
1778 /* Find the output reload; return unless there is exactly one
1779 and that one is mandatory. */
1780
1781 for (i = 0; i < n_reloads; i++)
1782 if (rld[i].out != 0)
1783 {
1784 if (output_reload >= 0)
1785 return;
1786 output_reload = i;
1787 }
1788
1789 if (output_reload < 0 || rld[output_reload].optional)
1790 return;
1791
1792 /* An input-output reload isn't combinable. */
1793
1794 if (rld[output_reload].in != 0)
1795 return;
1796
1797 /* If this reload is for an earlyclobber operand, we can't do anything. */
1798 if (earlyclobber_operand_p (rld[output_reload].out))
1799 return;
1800
1801 /* If there is a reload for part of the address of this operand, we would
1802 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1803 its life to the point where doing this combine would not lower the
1804 number of spill registers needed. */
1805 for (i = 0; i < n_reloads; i++)
1806 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1807 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1808 && rld[i].opnum == rld[output_reload].opnum)
1809 return;
1810
1811 /* Check each input reload; can we combine it? */
1812
1813 for (i = 0; i < n_reloads; i++)
1814 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1815 /* Life span of this reload must not extend past main insn. */
1816 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1817 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1818 && rld[i].when_needed != RELOAD_OTHER
1819 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1820 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1821 [(int) rld[output_reload].outmode])
1822 && rld[i].inc == 0
1823 && rld[i].reg_rtx == 0
1824 #ifdef SECONDARY_MEMORY_NEEDED
1825 /* Don't combine two reloads with different secondary
1826 memory locations. */
1827 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1828 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1829 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1830 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1831 #endif
1832 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1833 ? (rld[i].rclass == rld[output_reload].rclass)
1834 : (reg_class_subset_p (rld[i].rclass,
1835 rld[output_reload].rclass)
1836 || reg_class_subset_p (rld[output_reload].rclass,
1837 rld[i].rclass)))
1838 && (MATCHES (rld[i].in, rld[output_reload].out)
1839 /* Args reversed because the first arg seems to be
1840 the one that we imagine being modified
1841 while the second is the one that might be affected. */
1842 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1843 rld[i].in)
1844 /* However, if the input is a register that appears inside
1845 the output, then we also can't share.
1846 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1847 If the same reload reg is used for both reg 69 and the
1848 result to be stored in memory, then that result
1849 will clobber the address of the memory ref. */
1850 && ! (REG_P (rld[i].in)
1851 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1852 rld[output_reload].out))))
1853 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1854 rld[i].when_needed != RELOAD_FOR_INPUT)
1855 && (reg_class_size[(int) rld[i].rclass]
1856 || targetm.small_register_classes_for_mode_p (VOIDmode))
1857 /* We will allow making things slightly worse by combining an
1858 input and an output, but no worse than that. */
1859 && (rld[i].when_needed == RELOAD_FOR_INPUT
1860 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1861 {
1862 int j;
1863
1864 /* We have found a reload to combine with! */
1865 rld[i].out = rld[output_reload].out;
1866 rld[i].out_reg = rld[output_reload].out_reg;
1867 rld[i].outmode = rld[output_reload].outmode;
1868 /* Mark the old output reload as inoperative. */
1869 rld[output_reload].out = 0;
1870 /* The combined reload is needed for the entire insn. */
1871 rld[i].when_needed = RELOAD_OTHER;
1872 /* If the output reload had a secondary reload, copy it. */
1873 if (rld[output_reload].secondary_out_reload != -1)
1874 {
1875 rld[i].secondary_out_reload
1876 = rld[output_reload].secondary_out_reload;
1877 rld[i].secondary_out_icode
1878 = rld[output_reload].secondary_out_icode;
1879 }
1880
1881 #ifdef SECONDARY_MEMORY_NEEDED
1882 /* Copy any secondary MEM. */
1883 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1884 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1885 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1886 #endif
1887 /* If required, minimize the register class. */
1888 if (reg_class_subset_p (rld[output_reload].rclass,
1889 rld[i].rclass))
1890 rld[i].rclass = rld[output_reload].rclass;
1891
1892 /* Transfer all replacements from the old reload to the combined. */
1893 for (j = 0; j < n_replacements; j++)
1894 if (replacements[j].what == output_reload)
1895 replacements[j].what = i;
1896
1897 return;
1898 }
1899
1900 /* If this insn has only one operand that is modified or written (assumed
1901 to be the first), it must be the one corresponding to this reload. It
1902 is safe to use anything that dies in this insn for that output provided
1903 that it does not occur in the output (we already know it isn't an
1904 earlyclobber. If this is an asm insn, give up. */
1905
1906 if (INSN_CODE (this_insn) == -1)
1907 return;
1908
1909 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1910 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1911 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1912 return;
1913
1914 /* See if some hard register that dies in this insn and is not used in
1915 the output is the right class. Only works if the register we pick
1916 up can fully hold our output reload. */
1917 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1918 if (REG_NOTE_KIND (note) == REG_DEAD
1919 && REG_P (XEXP (note, 0))
1920 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1921 rld[output_reload].out)
1922 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1923 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1924 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1925 regno)
1926 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1927 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1928 /* Ensure that a secondary or tertiary reload for this output
1929 won't want this register. */
1930 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1931 || (!(TEST_HARD_REG_BIT
1932 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1933 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1934 || !(TEST_HARD_REG_BIT
1935 (reg_class_contents[(int) rld[secondary_out].rclass],
1936 regno)))))
1937 && !fixed_regs[regno]
1938 /* Check that a former pseudo is valid; see find_dummy_reload. */
1939 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1940 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1941 ORIGINAL_REGNO (XEXP (note, 0)))
1942 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1943 {
1944 rld[output_reload].reg_rtx
1945 = gen_rtx_REG (rld[output_reload].outmode, regno);
1946 return;
1947 }
1948 }
1949 \f
1950 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1951 See if one of IN and OUT is a register that may be used;
1952 this is desirable since a spill-register won't be needed.
1953 If so, return the register rtx that proves acceptable.
1954
1955 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1956 RCLASS is the register class required for the reload.
1957
1958 If FOR_REAL is >= 0, it is the number of the reload,
1959 and in some cases when it can be discovered that OUT doesn't need
1960 to be computed, clear out rld[FOR_REAL].out.
1961
1962 If FOR_REAL is -1, this should not be done, because this call
1963 is just to see if a register can be found, not to find and install it.
1964
1965 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1966 puts an additional constraint on being able to use IN for OUT since
1967 IN must not appear elsewhere in the insn (it is assumed that IN itself
1968 is safe from the earlyclobber). */
1969
1970 static rtx
1971 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1972 enum machine_mode inmode, enum machine_mode outmode,
1973 reg_class_t rclass, int for_real, int earlyclobber)
1974 {
1975 rtx in = real_in;
1976 rtx out = real_out;
1977 int in_offset = 0;
1978 int out_offset = 0;
1979 rtx value = 0;
1980
1981 /* If operands exceed a word, we can't use either of them
1982 unless they have the same size. */
1983 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1984 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1985 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1986 return 0;
1987
1988 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1989 respectively refers to a hard register. */
1990
1991 /* Find the inside of any subregs. */
1992 while (GET_CODE (out) == SUBREG)
1993 {
1994 if (REG_P (SUBREG_REG (out))
1995 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1996 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1997 GET_MODE (SUBREG_REG (out)),
1998 SUBREG_BYTE (out),
1999 GET_MODE (out));
2000 out = SUBREG_REG (out);
2001 }
2002 while (GET_CODE (in) == SUBREG)
2003 {
2004 if (REG_P (SUBREG_REG (in))
2005 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
2006 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
2007 GET_MODE (SUBREG_REG (in)),
2008 SUBREG_BYTE (in),
2009 GET_MODE (in));
2010 in = SUBREG_REG (in);
2011 }
2012
2013 /* Narrow down the reg class, the same way push_reload will;
2014 otherwise we might find a dummy now, but push_reload won't. */
2015 {
2016 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2017 if (preferred_class != NO_REGS)
2018 rclass = (enum reg_class) preferred_class;
2019 }
2020
2021 /* See if OUT will do. */
2022 if (REG_P (out)
2023 && REGNO (out) < FIRST_PSEUDO_REGISTER)
2024 {
2025 unsigned int regno = REGNO (out) + out_offset;
2026 unsigned int nwords = hard_regno_nregs[regno][outmode];
2027 rtx saved_rtx;
2028
2029 /* When we consider whether the insn uses OUT,
2030 ignore references within IN. They don't prevent us
2031 from copying IN into OUT, because those refs would
2032 move into the insn that reloads IN.
2033
2034 However, we only ignore IN in its role as this reload.
2035 If the insn uses IN elsewhere and it contains OUT,
2036 that counts. We can't be sure it's the "same" operand
2037 so it might not go through this reload.
2038
2039 We also need to avoid using OUT if it, or part of it, is a
2040 fixed register. Modifying such registers, even transiently,
2041 may have undefined effects on the machine, such as modifying
2042 the stack pointer. */
2043 saved_rtx = *inloc;
2044 *inloc = const0_rtx;
2045
2046 if (regno < FIRST_PSEUDO_REGISTER
2047 && HARD_REGNO_MODE_OK (regno, outmode)
2048 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2049 PATTERN (this_insn), outloc))
2050 {
2051 unsigned int i;
2052
2053 for (i = 0; i < nwords; i++)
2054 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2055 regno + i)
2056 || fixed_regs[regno + i])
2057 break;
2058
2059 if (i == nwords)
2060 {
2061 if (REG_P (real_out))
2062 value = real_out;
2063 else
2064 value = gen_rtx_REG (outmode, regno);
2065 }
2066 }
2067
2068 *inloc = saved_rtx;
2069 }
2070
2071 /* Consider using IN if OUT was not acceptable
2072 or if OUT dies in this insn (like the quotient in a divmod insn).
2073 We can't use IN unless it is dies in this insn,
2074 which means we must know accurately which hard regs are live.
2075 Also, the result can't go in IN if IN is used within OUT,
2076 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2077 if (hard_regs_live_known
2078 && REG_P (in)
2079 && REGNO (in) < FIRST_PSEUDO_REGISTER
2080 && (value == 0
2081 || find_reg_note (this_insn, REG_UNUSED, real_out))
2082 && find_reg_note (this_insn, REG_DEAD, real_in)
2083 && !fixed_regs[REGNO (in)]
2084 && HARD_REGNO_MODE_OK (REGNO (in),
2085 /* The only case where out and real_out might
2086 have different modes is where real_out
2087 is a subreg, and in that case, out
2088 has a real mode. */
2089 (GET_MODE (out) != VOIDmode
2090 ? GET_MODE (out) : outmode))
2091 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2092 /* However only do this if we can be sure that this input
2093 operand doesn't correspond with an uninitialized pseudo.
2094 global can assign some hardreg to it that is the same as
2095 the one assigned to a different, also live pseudo (as it
2096 can ignore the conflict). We must never introduce writes
2097 to such hardregs, as they would clobber the other live
2098 pseudo. See PR 20973. */
2099 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2100 ORIGINAL_REGNO (in))
2101 /* Similarly, only do this if we can be sure that the death
2102 note is still valid. global can assign some hardreg to
2103 the pseudo referenced in the note and simultaneously a
2104 subword of this hardreg to a different, also live pseudo,
2105 because only another subword of the hardreg is actually
2106 used in the insn. This cannot happen if the pseudo has
2107 been assigned exactly one hardreg. See PR 33732. */
2108 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2109 {
2110 unsigned int regno = REGNO (in) + in_offset;
2111 unsigned int nwords = hard_regno_nregs[regno][inmode];
2112
2113 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2114 && ! hard_reg_set_here_p (regno, regno + nwords,
2115 PATTERN (this_insn))
2116 && (! earlyclobber
2117 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2118 PATTERN (this_insn), inloc)))
2119 {
2120 unsigned int i;
2121
2122 for (i = 0; i < nwords; i++)
2123 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2124 regno + i))
2125 break;
2126
2127 if (i == nwords)
2128 {
2129 /* If we were going to use OUT as the reload reg
2130 and changed our mind, it means OUT is a dummy that
2131 dies here. So don't bother copying value to it. */
2132 if (for_real >= 0 && value == real_out)
2133 rld[for_real].out = 0;
2134 if (REG_P (real_in))
2135 value = real_in;
2136 else
2137 value = gen_rtx_REG (inmode, regno);
2138 }
2139 }
2140 }
2141
2142 return value;
2143 }
2144 \f
2145 /* This page contains subroutines used mainly for determining
2146 whether the IN or an OUT of a reload can serve as the
2147 reload register. */
2148
2149 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2150
2151 int
2152 earlyclobber_operand_p (rtx x)
2153 {
2154 int i;
2155
2156 for (i = 0; i < n_earlyclobbers; i++)
2157 if (reload_earlyclobbers[i] == x)
2158 return 1;
2159
2160 return 0;
2161 }
2162
2163 /* Return 1 if expression X alters a hard reg in the range
2164 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2165 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2166 X should be the body of an instruction. */
2167
2168 static int
2169 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2170 {
2171 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2172 {
2173 rtx op0 = SET_DEST (x);
2174
2175 while (GET_CODE (op0) == SUBREG)
2176 op0 = SUBREG_REG (op0);
2177 if (REG_P (op0))
2178 {
2179 unsigned int r = REGNO (op0);
2180
2181 /* See if this reg overlaps range under consideration. */
2182 if (r < end_regno
2183 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2184 return 1;
2185 }
2186 }
2187 else if (GET_CODE (x) == PARALLEL)
2188 {
2189 int i = XVECLEN (x, 0) - 1;
2190
2191 for (; i >= 0; i--)
2192 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2193 return 1;
2194 }
2195
2196 return 0;
2197 }
2198
2199 /* Return 1 if ADDR is a valid memory address for mode MODE
2200 in address space AS, and check that each pseudo reg has the
2201 proper kind of hard reg. */
2202
2203 int
2204 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2205 rtx addr, addr_space_t as)
2206 {
2207 #ifdef GO_IF_LEGITIMATE_ADDRESS
2208 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2209 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2210 return 0;
2211
2212 win:
2213 return 1;
2214 #else
2215 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2216 #endif
2217 }
2218 \f
2219 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2220 if they are the same hard reg, and has special hacks for
2221 autoincrement and autodecrement.
2222 This is specifically intended for find_reloads to use
2223 in determining whether two operands match.
2224 X is the operand whose number is the lower of the two.
2225
2226 The value is 2 if Y contains a pre-increment that matches
2227 a non-incrementing address in X. */
2228
2229 /* ??? To be completely correct, we should arrange to pass
2230 for X the output operand and for Y the input operand.
2231 For now, we assume that the output operand has the lower number
2232 because that is natural in (SET output (... input ...)). */
2233
2234 int
2235 operands_match_p (rtx x, rtx y)
2236 {
2237 int i;
2238 RTX_CODE code = GET_CODE (x);
2239 const char *fmt;
2240 int success_2;
2241
2242 if (x == y)
2243 return 1;
2244 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2245 && (REG_P (y) || (GET_CODE (y) == SUBREG
2246 && REG_P (SUBREG_REG (y)))))
2247 {
2248 int j;
2249
2250 if (code == SUBREG)
2251 {
2252 i = REGNO (SUBREG_REG (x));
2253 if (i >= FIRST_PSEUDO_REGISTER)
2254 goto slow;
2255 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2256 GET_MODE (SUBREG_REG (x)),
2257 SUBREG_BYTE (x),
2258 GET_MODE (x));
2259 }
2260 else
2261 i = REGNO (x);
2262
2263 if (GET_CODE (y) == SUBREG)
2264 {
2265 j = REGNO (SUBREG_REG (y));
2266 if (j >= FIRST_PSEUDO_REGISTER)
2267 goto slow;
2268 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2269 GET_MODE (SUBREG_REG (y)),
2270 SUBREG_BYTE (y),
2271 GET_MODE (y));
2272 }
2273 else
2274 j = REGNO (y);
2275
2276 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2277 multiple hard register group of scalar integer registers, so that
2278 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2279 register. */
2280 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2281 && SCALAR_INT_MODE_P (GET_MODE (x))
2282 && i < FIRST_PSEUDO_REGISTER)
2283 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2284 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2285 && SCALAR_INT_MODE_P (GET_MODE (y))
2286 && j < FIRST_PSEUDO_REGISTER)
2287 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2288
2289 return i == j;
2290 }
2291 /* If two operands must match, because they are really a single
2292 operand of an assembler insn, then two postincrements are invalid
2293 because the assembler insn would increment only once.
2294 On the other hand, a postincrement matches ordinary indexing
2295 if the postincrement is the output operand. */
2296 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2297 return operands_match_p (XEXP (x, 0), y);
2298 /* Two preincrements are invalid
2299 because the assembler insn would increment only once.
2300 On the other hand, a preincrement matches ordinary indexing
2301 if the preincrement is the input operand.
2302 In this case, return 2, since some callers need to do special
2303 things when this happens. */
2304 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2305 || GET_CODE (y) == PRE_MODIFY)
2306 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2307
2308 slow:
2309
2310 /* Now we have disposed of all the cases in which different rtx codes
2311 can match. */
2312 if (code != GET_CODE (y))
2313 return 0;
2314
2315 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2316 if (GET_MODE (x) != GET_MODE (y))
2317 return 0;
2318
2319 /* MEMs referring to different address space are not equivalent. */
2320 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2321 return 0;
2322
2323 switch (code)
2324 {
2325 CASE_CONST_UNIQUE:
2326 return 0;
2327
2328 case LABEL_REF:
2329 return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
2330 case SYMBOL_REF:
2331 return XSTR (x, 0) == XSTR (y, 0);
2332
2333 default:
2334 break;
2335 }
2336
2337 /* Compare the elements. If any pair of corresponding elements
2338 fail to match, return 0 for the whole things. */
2339
2340 success_2 = 0;
2341 fmt = GET_RTX_FORMAT (code);
2342 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2343 {
2344 int val, j;
2345 switch (fmt[i])
2346 {
2347 case 'w':
2348 if (XWINT (x, i) != XWINT (y, i))
2349 return 0;
2350 break;
2351
2352 case 'i':
2353 if (XINT (x, i) != XINT (y, i))
2354 return 0;
2355 break;
2356
2357 case 'e':
2358 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2359 if (val == 0)
2360 return 0;
2361 /* If any subexpression returns 2,
2362 we should return 2 if we are successful. */
2363 if (val == 2)
2364 success_2 = 1;
2365 break;
2366
2367 case '0':
2368 break;
2369
2370 case 'E':
2371 if (XVECLEN (x, i) != XVECLEN (y, i))
2372 return 0;
2373 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2374 {
2375 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2376 if (val == 0)
2377 return 0;
2378 if (val == 2)
2379 success_2 = 1;
2380 }
2381 break;
2382
2383 /* It is believed that rtx's at this level will never
2384 contain anything but integers and other rtx's,
2385 except for within LABEL_REFs and SYMBOL_REFs. */
2386 default:
2387 gcc_unreachable ();
2388 }
2389 }
2390 return 1 + success_2;
2391 }
2392 \f
2393 /* Describe the range of registers or memory referenced by X.
2394 If X is a register, set REG_FLAG and put the first register
2395 number into START and the last plus one into END.
2396 If X is a memory reference, put a base address into BASE
2397 and a range of integer offsets into START and END.
2398 If X is pushing on the stack, we can assume it causes no trouble,
2399 so we set the SAFE field. */
2400
2401 static struct decomposition
2402 decompose (rtx x)
2403 {
2404 struct decomposition val;
2405 int all_const = 0;
2406
2407 memset (&val, 0, sizeof (val));
2408
2409 switch (GET_CODE (x))
2410 {
2411 case MEM:
2412 {
2413 rtx base = NULL_RTX, offset = 0;
2414 rtx addr = XEXP (x, 0);
2415
2416 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2417 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2418 {
2419 val.base = XEXP (addr, 0);
2420 val.start = -GET_MODE_SIZE (GET_MODE (x));
2421 val.end = GET_MODE_SIZE (GET_MODE (x));
2422 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2423 return val;
2424 }
2425
2426 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2427 {
2428 if (GET_CODE (XEXP (addr, 1)) == PLUS
2429 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2430 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2431 {
2432 val.base = XEXP (addr, 0);
2433 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2434 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2435 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2436 return val;
2437 }
2438 }
2439
2440 if (GET_CODE (addr) == CONST)
2441 {
2442 addr = XEXP (addr, 0);
2443 all_const = 1;
2444 }
2445 if (GET_CODE (addr) == PLUS)
2446 {
2447 if (CONSTANT_P (XEXP (addr, 0)))
2448 {
2449 base = XEXP (addr, 1);
2450 offset = XEXP (addr, 0);
2451 }
2452 else if (CONSTANT_P (XEXP (addr, 1)))
2453 {
2454 base = XEXP (addr, 0);
2455 offset = XEXP (addr, 1);
2456 }
2457 }
2458
2459 if (offset == 0)
2460 {
2461 base = addr;
2462 offset = const0_rtx;
2463 }
2464 if (GET_CODE (offset) == CONST)
2465 offset = XEXP (offset, 0);
2466 if (GET_CODE (offset) == PLUS)
2467 {
2468 if (CONST_INT_P (XEXP (offset, 0)))
2469 {
2470 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2471 offset = XEXP (offset, 0);
2472 }
2473 else if (CONST_INT_P (XEXP (offset, 1)))
2474 {
2475 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2476 offset = XEXP (offset, 1);
2477 }
2478 else
2479 {
2480 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2481 offset = const0_rtx;
2482 }
2483 }
2484 else if (!CONST_INT_P (offset))
2485 {
2486 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2487 offset = const0_rtx;
2488 }
2489
2490 if (all_const && GET_CODE (base) == PLUS)
2491 base = gen_rtx_CONST (GET_MODE (base), base);
2492
2493 gcc_assert (CONST_INT_P (offset));
2494
2495 val.start = INTVAL (offset);
2496 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2497 val.base = base;
2498 }
2499 break;
2500
2501 case REG:
2502 val.reg_flag = 1;
2503 val.start = true_regnum (x);
2504 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2505 {
2506 /* A pseudo with no hard reg. */
2507 val.start = REGNO (x);
2508 val.end = val.start + 1;
2509 }
2510 else
2511 /* A hard reg. */
2512 val.end = end_hard_regno (GET_MODE (x), val.start);
2513 break;
2514
2515 case SUBREG:
2516 if (!REG_P (SUBREG_REG (x)))
2517 /* This could be more precise, but it's good enough. */
2518 return decompose (SUBREG_REG (x));
2519 val.reg_flag = 1;
2520 val.start = true_regnum (x);
2521 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2522 return decompose (SUBREG_REG (x));
2523 else
2524 /* A hard reg. */
2525 val.end = val.start + subreg_nregs (x);
2526 break;
2527
2528 case SCRATCH:
2529 /* This hasn't been assigned yet, so it can't conflict yet. */
2530 val.safe = 1;
2531 break;
2532
2533 default:
2534 gcc_assert (CONSTANT_P (x));
2535 val.safe = 1;
2536 break;
2537 }
2538 return val;
2539 }
2540
2541 /* Return 1 if altering Y will not modify the value of X.
2542 Y is also described by YDATA, which should be decompose (Y). */
2543
2544 static int
2545 immune_p (rtx x, rtx y, struct decomposition ydata)
2546 {
2547 struct decomposition xdata;
2548
2549 if (ydata.reg_flag)
2550 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2551 if (ydata.safe)
2552 return 1;
2553
2554 gcc_assert (MEM_P (y));
2555 /* If Y is memory and X is not, Y can't affect X. */
2556 if (!MEM_P (x))
2557 return 1;
2558
2559 xdata = decompose (x);
2560
2561 if (! rtx_equal_p (xdata.base, ydata.base))
2562 {
2563 /* If bases are distinct symbolic constants, there is no overlap. */
2564 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2565 return 1;
2566 /* Constants and stack slots never overlap. */
2567 if (CONSTANT_P (xdata.base)
2568 && (ydata.base == frame_pointer_rtx
2569 || ydata.base == hard_frame_pointer_rtx
2570 || ydata.base == stack_pointer_rtx))
2571 return 1;
2572 if (CONSTANT_P (ydata.base)
2573 && (xdata.base == frame_pointer_rtx
2574 || xdata.base == hard_frame_pointer_rtx
2575 || xdata.base == stack_pointer_rtx))
2576 return 1;
2577 /* If either base is variable, we don't know anything. */
2578 return 0;
2579 }
2580
2581 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2582 }
2583
2584 /* Similar, but calls decompose. */
2585
2586 int
2587 safe_from_earlyclobber (rtx op, rtx clobber)
2588 {
2589 struct decomposition early_data;
2590
2591 early_data = decompose (clobber);
2592 return immune_p (op, clobber, early_data);
2593 }
2594 \f
2595 /* Main entry point of this file: search the body of INSN
2596 for values that need reloading and record them with push_reload.
2597 REPLACE nonzero means record also where the values occur
2598 so that subst_reloads can be used.
2599
2600 IND_LEVELS says how many levels of indirection are supported by this
2601 machine; a value of zero means that a memory reference is not a valid
2602 memory address.
2603
2604 LIVE_KNOWN says we have valid information about which hard
2605 regs are live at each point in the program; this is true when
2606 we are called from global_alloc but false when stupid register
2607 allocation has been done.
2608
2609 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2610 which is nonnegative if the reg has been commandeered for reloading into.
2611 It is copied into STATIC_RELOAD_REG_P and referenced from there
2612 by various subroutines.
2613
2614 Return TRUE if some operands need to be changed, because of swapping
2615 commutative operands, reg_equiv_address substitution, or whatever. */
2616
2617 int
2618 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2619 short *reload_reg_p)
2620 {
2621 int insn_code_number;
2622 int i, j;
2623 int noperands;
2624 /* These start out as the constraints for the insn
2625 and they are chewed up as we consider alternatives. */
2626 const char *constraints[MAX_RECOG_OPERANDS];
2627 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2628 a register. */
2629 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2630 char pref_or_nothing[MAX_RECOG_OPERANDS];
2631 /* Nonzero for a MEM operand whose entire address needs a reload.
2632 May be -1 to indicate the entire address may or may not need a reload. */
2633 int address_reloaded[MAX_RECOG_OPERANDS];
2634 /* Nonzero for an address operand that needs to be completely reloaded.
2635 May be -1 to indicate the entire operand may or may not need a reload. */
2636 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2637 /* Value of enum reload_type to use for operand. */
2638 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2639 /* Value of enum reload_type to use within address of operand. */
2640 enum reload_type address_type[MAX_RECOG_OPERANDS];
2641 /* Save the usage of each operand. */
2642 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2643 int no_input_reloads = 0, no_output_reloads = 0;
2644 int n_alternatives;
2645 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2646 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2647 char this_alternative_win[MAX_RECOG_OPERANDS];
2648 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2649 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2650 int this_alternative_matches[MAX_RECOG_OPERANDS];
2651 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2652 int this_alternative_number;
2653 int goal_alternative_number = 0;
2654 int operand_reloadnum[MAX_RECOG_OPERANDS];
2655 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2656 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2657 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2658 char goal_alternative_win[MAX_RECOG_OPERANDS];
2659 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2660 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2661 int goal_alternative_swapped;
2662 int best;
2663 int commutative;
2664 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2665 rtx substed_operand[MAX_RECOG_OPERANDS];
2666 rtx body = PATTERN (insn);
2667 rtx set = single_set (insn);
2668 int goal_earlyclobber = 0, this_earlyclobber;
2669 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2670 int retval = 0;
2671
2672 this_insn = insn;
2673 n_reloads = 0;
2674 n_replacements = 0;
2675 n_earlyclobbers = 0;
2676 replace_reloads = replace;
2677 hard_regs_live_known = live_known;
2678 static_reload_reg_p = reload_reg_p;
2679
2680 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2681 neither are insns that SET cc0. Insns that use CC0 are not allowed
2682 to have any input reloads. */
2683 if (JUMP_P (insn) || CALL_P (insn))
2684 no_output_reloads = 1;
2685
2686 #ifdef HAVE_cc0
2687 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2688 no_input_reloads = 1;
2689 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2690 no_output_reloads = 1;
2691 #endif
2692
2693 #ifdef SECONDARY_MEMORY_NEEDED
2694 /* The eliminated forms of any secondary memory locations are per-insn, so
2695 clear them out here. */
2696
2697 if (secondary_memlocs_elim_used)
2698 {
2699 memset (secondary_memlocs_elim, 0,
2700 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2701 secondary_memlocs_elim_used = 0;
2702 }
2703 #endif
2704
2705 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2706 is cheap to move between them. If it is not, there may not be an insn
2707 to do the copy, so we may need a reload. */
2708 if (GET_CODE (body) == SET
2709 && REG_P (SET_DEST (body))
2710 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2711 && REG_P (SET_SRC (body))
2712 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2713 && register_move_cost (GET_MODE (SET_SRC (body)),
2714 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2715 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2716 return 0;
2717
2718 extract_insn (insn);
2719
2720 noperands = reload_n_operands = recog_data.n_operands;
2721 n_alternatives = recog_data.n_alternatives;
2722
2723 /* Just return "no reloads" if insn has no operands with constraints. */
2724 if (noperands == 0 || n_alternatives == 0)
2725 return 0;
2726
2727 insn_code_number = INSN_CODE (insn);
2728 this_insn_is_asm = insn_code_number < 0;
2729
2730 memcpy (operand_mode, recog_data.operand_mode,
2731 noperands * sizeof (enum machine_mode));
2732 memcpy (constraints, recog_data.constraints,
2733 noperands * sizeof (const char *));
2734
2735 commutative = -1;
2736
2737 /* If we will need to know, later, whether some pair of operands
2738 are the same, we must compare them now and save the result.
2739 Reloading the base and index registers will clobber them
2740 and afterward they will fail to match. */
2741
2742 for (i = 0; i < noperands; i++)
2743 {
2744 const char *p;
2745 int c;
2746 char *end;
2747
2748 substed_operand[i] = recog_data.operand[i];
2749 p = constraints[i];
2750
2751 modified[i] = RELOAD_READ;
2752
2753 /* Scan this operand's constraint to see if it is an output operand,
2754 an in-out operand, is commutative, or should match another. */
2755
2756 while ((c = *p))
2757 {
2758 p += CONSTRAINT_LEN (c, p);
2759 switch (c)
2760 {
2761 case '=':
2762 modified[i] = RELOAD_WRITE;
2763 break;
2764 case '+':
2765 modified[i] = RELOAD_READ_WRITE;
2766 break;
2767 case '%':
2768 {
2769 /* The last operand should not be marked commutative. */
2770 gcc_assert (i != noperands - 1);
2771
2772 /* We currently only support one commutative pair of
2773 operands. Some existing asm code currently uses more
2774 than one pair. Previously, that would usually work,
2775 but sometimes it would crash the compiler. We
2776 continue supporting that case as well as we can by
2777 silently ignoring all but the first pair. In the
2778 future we may handle it correctly. */
2779 if (commutative < 0)
2780 commutative = i;
2781 else
2782 gcc_assert (this_insn_is_asm);
2783 }
2784 break;
2785 /* Use of ISDIGIT is tempting here, but it may get expensive because
2786 of locale support we don't want. */
2787 case '0': case '1': case '2': case '3': case '4':
2788 case '5': case '6': case '7': case '8': case '9':
2789 {
2790 c = strtoul (p - 1, &end, 10);
2791 p = end;
2792
2793 operands_match[c][i]
2794 = operands_match_p (recog_data.operand[c],
2795 recog_data.operand[i]);
2796
2797 /* An operand may not match itself. */
2798 gcc_assert (c != i);
2799
2800 /* If C can be commuted with C+1, and C might need to match I,
2801 then C+1 might also need to match I. */
2802 if (commutative >= 0)
2803 {
2804 if (c == commutative || c == commutative + 1)
2805 {
2806 int other = c + (c == commutative ? 1 : -1);
2807 operands_match[other][i]
2808 = operands_match_p (recog_data.operand[other],
2809 recog_data.operand[i]);
2810 }
2811 if (i == commutative || i == commutative + 1)
2812 {
2813 int other = i + (i == commutative ? 1 : -1);
2814 operands_match[c][other]
2815 = operands_match_p (recog_data.operand[c],
2816 recog_data.operand[other]);
2817 }
2818 /* Note that C is supposed to be less than I.
2819 No need to consider altering both C and I because in
2820 that case we would alter one into the other. */
2821 }
2822 }
2823 }
2824 }
2825 }
2826
2827 /* Examine each operand that is a memory reference or memory address
2828 and reload parts of the addresses into index registers.
2829 Also here any references to pseudo regs that didn't get hard regs
2830 but are equivalent to constants get replaced in the insn itself
2831 with those constants. Nobody will ever see them again.
2832
2833 Finally, set up the preferred classes of each operand. */
2834
2835 for (i = 0; i < noperands; i++)
2836 {
2837 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2838
2839 address_reloaded[i] = 0;
2840 address_operand_reloaded[i] = 0;
2841 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2842 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2843 : RELOAD_OTHER);
2844 address_type[i]
2845 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2846 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2847 : RELOAD_OTHER);
2848
2849 if (*constraints[i] == 0)
2850 /* Ignore things like match_operator operands. */
2851 ;
2852 else if (insn_extra_address_constraint
2853 (lookup_constraint (constraints[i])))
2854 {
2855 address_operand_reloaded[i]
2856 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2857 recog_data.operand[i],
2858 recog_data.operand_loc[i],
2859 i, operand_type[i], ind_levels, insn);
2860
2861 /* If we now have a simple operand where we used to have a
2862 PLUS or MULT, re-recognize and try again. */
2863 if ((OBJECT_P (*recog_data.operand_loc[i])
2864 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2865 && (GET_CODE (recog_data.operand[i]) == MULT
2866 || GET_CODE (recog_data.operand[i]) == PLUS))
2867 {
2868 INSN_CODE (insn) = -1;
2869 retval = find_reloads (insn, replace, ind_levels, live_known,
2870 reload_reg_p);
2871 return retval;
2872 }
2873
2874 recog_data.operand[i] = *recog_data.operand_loc[i];
2875 substed_operand[i] = recog_data.operand[i];
2876
2877 /* Address operands are reloaded in their existing mode,
2878 no matter what is specified in the machine description. */
2879 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2880
2881 /* If the address is a single CONST_INT pick address mode
2882 instead otherwise we will later not know in which mode
2883 the reload should be performed. */
2884 if (operand_mode[i] == VOIDmode)
2885 operand_mode[i] = Pmode;
2886
2887 }
2888 else if (code == MEM)
2889 {
2890 address_reloaded[i]
2891 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2892 recog_data.operand_loc[i],
2893 XEXP (recog_data.operand[i], 0),
2894 &XEXP (recog_data.operand[i], 0),
2895 i, address_type[i], ind_levels, insn);
2896 recog_data.operand[i] = *recog_data.operand_loc[i];
2897 substed_operand[i] = recog_data.operand[i];
2898 }
2899 else if (code == SUBREG)
2900 {
2901 rtx reg = SUBREG_REG (recog_data.operand[i]);
2902 rtx op
2903 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2904 ind_levels,
2905 set != 0
2906 && &SET_DEST (set) == recog_data.operand_loc[i],
2907 insn,
2908 &address_reloaded[i]);
2909
2910 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2911 that didn't get a hard register, emit a USE with a REG_EQUAL
2912 note in front so that we might inherit a previous, possibly
2913 wider reload. */
2914
2915 if (replace
2916 && MEM_P (op)
2917 && REG_P (reg)
2918 && (GET_MODE_SIZE (GET_MODE (reg))
2919 >= GET_MODE_SIZE (GET_MODE (op)))
2920 && reg_equiv_constant (REGNO (reg)) == 0)
2921 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2922 insn),
2923 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2924
2925 substed_operand[i] = recog_data.operand[i] = op;
2926 }
2927 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2928 /* We can get a PLUS as an "operand" as a result of register
2929 elimination. See eliminate_regs and gen_reload. We handle
2930 a unary operator by reloading the operand. */
2931 substed_operand[i] = recog_data.operand[i]
2932 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2933 ind_levels, 0, insn,
2934 &address_reloaded[i]);
2935 else if (code == REG)
2936 {
2937 /* This is equivalent to calling find_reloads_toplev.
2938 The code is duplicated for speed.
2939 When we find a pseudo always equivalent to a constant,
2940 we replace it by the constant. We must be sure, however,
2941 that we don't try to replace it in the insn in which it
2942 is being set. */
2943 int regno = REGNO (recog_data.operand[i]);
2944 if (reg_equiv_constant (regno) != 0
2945 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2946 {
2947 /* Record the existing mode so that the check if constants are
2948 allowed will work when operand_mode isn't specified. */
2949
2950 if (operand_mode[i] == VOIDmode)
2951 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2952
2953 substed_operand[i] = recog_data.operand[i]
2954 = reg_equiv_constant (regno);
2955 }
2956 if (reg_equiv_memory_loc (regno) != 0
2957 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2958 /* We need not give a valid is_set_dest argument since the case
2959 of a constant equivalence was checked above. */
2960 substed_operand[i] = recog_data.operand[i]
2961 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2962 ind_levels, 0, insn,
2963 &address_reloaded[i]);
2964 }
2965 /* If the operand is still a register (we didn't replace it with an
2966 equivalent), get the preferred class to reload it into. */
2967 code = GET_CODE (recog_data.operand[i]);
2968 preferred_class[i]
2969 = ((code == REG && REGNO (recog_data.operand[i])
2970 >= FIRST_PSEUDO_REGISTER)
2971 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2972 : NO_REGS);
2973 pref_or_nothing[i]
2974 = (code == REG
2975 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2976 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2977 }
2978
2979 /* If this is simply a copy from operand 1 to operand 0, merge the
2980 preferred classes for the operands. */
2981 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2982 && recog_data.operand[1] == SET_SRC (set))
2983 {
2984 preferred_class[0] = preferred_class[1]
2985 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2986 pref_or_nothing[0] |= pref_or_nothing[1];
2987 pref_or_nothing[1] |= pref_or_nothing[0];
2988 }
2989
2990 /* Now see what we need for pseudo-regs that didn't get hard regs
2991 or got the wrong kind of hard reg. For this, we must consider
2992 all the operands together against the register constraints. */
2993
2994 best = MAX_RECOG_OPERANDS * 2 + 600;
2995
2996 goal_alternative_swapped = 0;
2997
2998 /* The constraints are made of several alternatives.
2999 Each operand's constraint looks like foo,bar,... with commas
3000 separating the alternatives. The first alternatives for all
3001 operands go together, the second alternatives go together, etc.
3002
3003 First loop over alternatives. */
3004
3005 for (this_alternative_number = 0;
3006 this_alternative_number < n_alternatives;
3007 this_alternative_number++)
3008 {
3009 int swapped;
3010
3011 if (!TEST_BIT (recog_data.enabled_alternatives, this_alternative_number))
3012 {
3013 int i;
3014
3015 for (i = 0; i < recog_data.n_operands; i++)
3016 constraints[i] = skip_alternative (constraints[i]);
3017
3018 continue;
3019 }
3020
3021 /* If insn is commutative (it's safe to exchange a certain pair
3022 of operands) then we need to try each alternative twice, the
3023 second time matching those two operands as if we had
3024 exchanged them. To do this, really exchange them in
3025 operands. */
3026 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3027 {
3028 /* Loop over operands for one constraint alternative. */
3029 /* LOSERS counts those that don't fit this alternative
3030 and would require loading. */
3031 int losers = 0;
3032 /* BAD is set to 1 if it some operand can't fit this alternative
3033 even after reloading. */
3034 int bad = 0;
3035 /* REJECT is a count of how undesirable this alternative says it is
3036 if any reloading is required. If the alternative matches exactly
3037 then REJECT is ignored, but otherwise it gets this much
3038 counted against it in addition to the reloading needed. Each
3039 ? counts three times here since we want the disparaging caused by
3040 a bad register class to only count 1/3 as much. */
3041 int reject = 0;
3042
3043 if (swapped)
3044 {
3045 enum reg_class tclass;
3046 int t;
3047
3048 recog_data.operand[commutative] = substed_operand[commutative + 1];
3049 recog_data.operand[commutative + 1] = substed_operand[commutative];
3050 /* Swap the duplicates too. */
3051 for (i = 0; i < recog_data.n_dups; i++)
3052 if (recog_data.dup_num[i] == commutative
3053 || recog_data.dup_num[i] == commutative + 1)
3054 *recog_data.dup_loc[i]
3055 = recog_data.operand[(int) recog_data.dup_num[i]];
3056
3057 tclass = preferred_class[commutative];
3058 preferred_class[commutative] = preferred_class[commutative + 1];
3059 preferred_class[commutative + 1] = tclass;
3060
3061 t = pref_or_nothing[commutative];
3062 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3063 pref_or_nothing[commutative + 1] = t;
3064
3065 t = address_reloaded[commutative];
3066 address_reloaded[commutative] = address_reloaded[commutative + 1];
3067 address_reloaded[commutative + 1] = t;
3068 }
3069
3070 this_earlyclobber = 0;
3071
3072 for (i = 0; i < noperands; i++)
3073 {
3074 const char *p = constraints[i];
3075 char *end;
3076 int len;
3077 int win = 0;
3078 int did_match = 0;
3079 /* 0 => this operand can be reloaded somehow for this alternative. */
3080 int badop = 1;
3081 /* 0 => this operand can be reloaded if the alternative allows regs. */
3082 int winreg = 0;
3083 int c;
3084 int m;
3085 rtx operand = recog_data.operand[i];
3086 int offset = 0;
3087 /* Nonzero means this is a MEM that must be reloaded into a reg
3088 regardless of what the constraint says. */
3089 int force_reload = 0;
3090 int offmemok = 0;
3091 /* Nonzero if a constant forced into memory would be OK for this
3092 operand. */
3093 int constmemok = 0;
3094 int earlyclobber = 0;
3095 enum constraint_num cn;
3096 enum reg_class cl;
3097
3098 /* If the predicate accepts a unary operator, it means that
3099 we need to reload the operand, but do not do this for
3100 match_operator and friends. */
3101 if (UNARY_P (operand) && *p != 0)
3102 operand = XEXP (operand, 0);
3103
3104 /* If the operand is a SUBREG, extract
3105 the REG or MEM (or maybe even a constant) within.
3106 (Constants can occur as a result of reg_equiv_constant.) */
3107
3108 while (GET_CODE (operand) == SUBREG)
3109 {
3110 /* Offset only matters when operand is a REG and
3111 it is a hard reg. This is because it is passed
3112 to reg_fits_class_p if it is a REG and all pseudos
3113 return 0 from that function. */
3114 if (REG_P (SUBREG_REG (operand))
3115 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3116 {
3117 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3118 GET_MODE (SUBREG_REG (operand)),
3119 SUBREG_BYTE (operand),
3120 GET_MODE (operand)) < 0)
3121 force_reload = 1;
3122 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3123 GET_MODE (SUBREG_REG (operand)),
3124 SUBREG_BYTE (operand),
3125 GET_MODE (operand));
3126 }
3127 operand = SUBREG_REG (operand);
3128 /* Force reload if this is a constant or PLUS or if there may
3129 be a problem accessing OPERAND in the outer mode. */
3130 if (CONSTANT_P (operand)
3131 || GET_CODE (operand) == PLUS
3132 /* We must force a reload of paradoxical SUBREGs
3133 of a MEM because the alignment of the inner value
3134 may not be enough to do the outer reference. On
3135 big-endian machines, it may also reference outside
3136 the object.
3137
3138 On machines that extend byte operations and we have a
3139 SUBREG where both the inner and outer modes are no wider
3140 than a word and the inner mode is narrower, is integral,
3141 and gets extended when loaded from memory, combine.c has
3142 made assumptions about the behavior of the machine in such
3143 register access. If the data is, in fact, in memory we
3144 must always load using the size assumed to be in the
3145 register and let the insn do the different-sized
3146 accesses.
3147
3148 This is doubly true if WORD_REGISTER_OPERATIONS. In
3149 this case eliminate_regs has left non-paradoxical
3150 subregs for push_reload to see. Make sure it does
3151 by forcing the reload.
3152
3153 ??? When is it right at this stage to have a subreg
3154 of a mem that is _not_ to be handled specially? IMO
3155 those should have been reduced to just a mem. */
3156 || ((MEM_P (operand)
3157 || (REG_P (operand)
3158 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3159 #ifndef WORD_REGISTER_OPERATIONS
3160 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3161 < BIGGEST_ALIGNMENT)
3162 && (GET_MODE_SIZE (operand_mode[i])
3163 > GET_MODE_SIZE (GET_MODE (operand))))
3164 || BYTES_BIG_ENDIAN
3165 #ifdef LOAD_EXTEND_OP
3166 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3167 && (GET_MODE_SIZE (GET_MODE (operand))
3168 <= UNITS_PER_WORD)
3169 && (GET_MODE_SIZE (operand_mode[i])
3170 > GET_MODE_SIZE (GET_MODE (operand)))
3171 && INTEGRAL_MODE_P (GET_MODE (operand))
3172 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3173 #endif
3174 )
3175 #endif
3176 )
3177 )
3178 force_reload = 1;
3179 }
3180
3181 this_alternative[i] = NO_REGS;
3182 this_alternative_win[i] = 0;
3183 this_alternative_match_win[i] = 0;
3184 this_alternative_offmemok[i] = 0;
3185 this_alternative_earlyclobber[i] = 0;
3186 this_alternative_matches[i] = -1;
3187
3188 /* An empty constraint or empty alternative
3189 allows anything which matched the pattern. */
3190 if (*p == 0 || *p == ',')
3191 win = 1, badop = 0;
3192
3193 /* Scan this alternative's specs for this operand;
3194 set WIN if the operand fits any letter in this alternative.
3195 Otherwise, clear BADOP if this operand could
3196 fit some letter after reloads,
3197 or set WINREG if this operand could fit after reloads
3198 provided the constraint allows some registers. */
3199
3200 do
3201 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3202 {
3203 case '\0':
3204 len = 0;
3205 break;
3206 case ',':
3207 c = '\0';
3208 break;
3209
3210 case '?':
3211 reject += 6;
3212 break;
3213
3214 case '!':
3215 reject = 600;
3216 break;
3217
3218 case '#':
3219 /* Ignore rest of this alternative as far as
3220 reloading is concerned. */
3221 do
3222 p++;
3223 while (*p && *p != ',');
3224 len = 0;
3225 break;
3226
3227 case '0': case '1': case '2': case '3': case '4':
3228 case '5': case '6': case '7': case '8': case '9':
3229 m = strtoul (p, &end, 10);
3230 p = end;
3231 len = 0;
3232
3233 this_alternative_matches[i] = m;
3234 /* We are supposed to match a previous operand.
3235 If we do, we win if that one did.
3236 If we do not, count both of the operands as losers.
3237 (This is too conservative, since most of the time
3238 only a single reload insn will be needed to make
3239 the two operands win. As a result, this alternative
3240 may be rejected when it is actually desirable.) */
3241 if ((swapped && (m != commutative || i != commutative + 1))
3242 /* If we are matching as if two operands were swapped,
3243 also pretend that operands_match had been computed
3244 with swapped.
3245 But if I is the second of those and C is the first,
3246 don't exchange them, because operands_match is valid
3247 only on one side of its diagonal. */
3248 ? (operands_match
3249 [(m == commutative || m == commutative + 1)
3250 ? 2 * commutative + 1 - m : m]
3251 [(i == commutative || i == commutative + 1)
3252 ? 2 * commutative + 1 - i : i])
3253 : operands_match[m][i])
3254 {
3255 /* If we are matching a non-offsettable address where an
3256 offsettable address was expected, then we must reject
3257 this combination, because we can't reload it. */
3258 if (this_alternative_offmemok[m]
3259 && MEM_P (recog_data.operand[m])
3260 && this_alternative[m] == NO_REGS
3261 && ! this_alternative_win[m])
3262 bad = 1;
3263
3264 did_match = this_alternative_win[m];
3265 }
3266 else
3267 {
3268 /* Operands don't match. */
3269 rtx value;
3270 int loc1, loc2;
3271 /* Retroactively mark the operand we had to match
3272 as a loser, if it wasn't already. */
3273 if (this_alternative_win[m])
3274 losers++;
3275 this_alternative_win[m] = 0;
3276 if (this_alternative[m] == NO_REGS)
3277 bad = 1;
3278 /* But count the pair only once in the total badness of
3279 this alternative, if the pair can be a dummy reload.
3280 The pointers in operand_loc are not swapped; swap
3281 them by hand if necessary. */
3282 if (swapped && i == commutative)
3283 loc1 = commutative + 1;
3284 else if (swapped && i == commutative + 1)
3285 loc1 = commutative;
3286 else
3287 loc1 = i;
3288 if (swapped && m == commutative)
3289 loc2 = commutative + 1;
3290 else if (swapped && m == commutative + 1)
3291 loc2 = commutative;
3292 else
3293 loc2 = m;
3294 value
3295 = find_dummy_reload (recog_data.operand[i],
3296 recog_data.operand[m],
3297 recog_data.operand_loc[loc1],
3298 recog_data.operand_loc[loc2],
3299 operand_mode[i], operand_mode[m],
3300 this_alternative[m], -1,
3301 this_alternative_earlyclobber[m]);
3302
3303 if (value != 0)
3304 losers--;
3305 }
3306 /* This can be fixed with reloads if the operand
3307 we are supposed to match can be fixed with reloads. */
3308 badop = 0;
3309 this_alternative[i] = this_alternative[m];
3310
3311 /* If we have to reload this operand and some previous
3312 operand also had to match the same thing as this
3313 operand, we don't know how to do that. So reject this
3314 alternative. */
3315 if (! did_match || force_reload)
3316 for (j = 0; j < i; j++)
3317 if (this_alternative_matches[j]
3318 == this_alternative_matches[i])
3319 {
3320 badop = 1;
3321 break;
3322 }
3323 break;
3324
3325 case 'p':
3326 /* All necessary reloads for an address_operand
3327 were handled in find_reloads_address. */
3328 this_alternative[i]
3329 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3330 ADDRESS, SCRATCH);
3331 win = 1;
3332 badop = 0;
3333 break;
3334
3335 case TARGET_MEM_CONSTRAINT:
3336 if (force_reload)
3337 break;
3338 if (MEM_P (operand)
3339 || (REG_P (operand)
3340 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3341 && reg_renumber[REGNO (operand)] < 0))
3342 win = 1;
3343 if (CONST_POOL_OK_P (operand_mode[i], operand))
3344 badop = 0;
3345 constmemok = 1;
3346 break;
3347
3348 case '<':
3349 if (MEM_P (operand)
3350 && ! address_reloaded[i]
3351 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3352 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3353 win = 1;
3354 break;
3355
3356 case '>':
3357 if (MEM_P (operand)
3358 && ! address_reloaded[i]
3359 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3360 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3361 win = 1;
3362 break;
3363
3364 /* Memory operand whose address is not offsettable. */
3365 case 'V':
3366 if (force_reload)
3367 break;
3368 if (MEM_P (operand)
3369 && ! (ind_levels ? offsettable_memref_p (operand)
3370 : offsettable_nonstrict_memref_p (operand))
3371 /* Certain mem addresses will become offsettable
3372 after they themselves are reloaded. This is important;
3373 we don't want our own handling of unoffsettables
3374 to override the handling of reg_equiv_address. */
3375 && !(REG_P (XEXP (operand, 0))
3376 && (ind_levels == 0
3377 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3378 win = 1;
3379 break;
3380
3381 /* Memory operand whose address is offsettable. */
3382 case 'o':
3383 if (force_reload)
3384 break;
3385 if ((MEM_P (operand)
3386 /* If IND_LEVELS, find_reloads_address won't reload a
3387 pseudo that didn't get a hard reg, so we have to
3388 reject that case. */
3389 && ((ind_levels ? offsettable_memref_p (operand)
3390 : offsettable_nonstrict_memref_p (operand))
3391 /* A reloaded address is offsettable because it is now
3392 just a simple register indirect. */
3393 || address_reloaded[i] == 1))
3394 || (REG_P (operand)
3395 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3396 && reg_renumber[REGNO (operand)] < 0
3397 /* If reg_equiv_address is nonzero, we will be
3398 loading it into a register; hence it will be
3399 offsettable, but we cannot say that reg_equiv_mem
3400 is offsettable without checking. */
3401 && ((reg_equiv_mem (REGNO (operand)) != 0
3402 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3403 || (reg_equiv_address (REGNO (operand)) != 0))))
3404 win = 1;
3405 if (CONST_POOL_OK_P (operand_mode[i], operand)
3406 || MEM_P (operand))
3407 badop = 0;
3408 constmemok = 1;
3409 offmemok = 1;
3410 break;
3411
3412 case '&':
3413 /* Output operand that is stored before the need for the
3414 input operands (and their index registers) is over. */
3415 earlyclobber = 1, this_earlyclobber = 1;
3416 break;
3417
3418 case 'X':
3419 force_reload = 0;
3420 win = 1;
3421 break;
3422
3423 case 'g':
3424 if (! force_reload
3425 /* A PLUS is never a valid operand, but reload can make
3426 it from a register when eliminating registers. */
3427 && GET_CODE (operand) != PLUS
3428 /* A SCRATCH is not a valid operand. */
3429 && GET_CODE (operand) != SCRATCH
3430 && (! CONSTANT_P (operand)
3431 || ! flag_pic
3432 || LEGITIMATE_PIC_OPERAND_P (operand))
3433 && (GENERAL_REGS == ALL_REGS
3434 || !REG_P (operand)
3435 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3436 && reg_renumber[REGNO (operand)] < 0)))
3437 win = 1;
3438 cl = GENERAL_REGS;
3439 goto reg;
3440
3441 default:
3442 cn = lookup_constraint (p);
3443 switch (get_constraint_type (cn))
3444 {
3445 case CT_REGISTER:
3446 cl = reg_class_for_constraint (cn);
3447 if (cl != NO_REGS)
3448 goto reg;
3449 break;
3450
3451 case CT_CONST_INT:
3452 if (CONST_INT_P (operand)
3453 && (insn_const_int_ok_for_constraint
3454 (INTVAL (operand), cn)))
3455 win = true;
3456 break;
3457
3458 case CT_MEMORY:
3459 if (force_reload)
3460 break;
3461 if (constraint_satisfied_p (operand, cn))
3462 win = 1;
3463 /* If the address was already reloaded,
3464 we win as well. */
3465 else if (MEM_P (operand) && address_reloaded[i] == 1)
3466 win = 1;
3467 /* Likewise if the address will be reloaded because
3468 reg_equiv_address is nonzero. For reg_equiv_mem
3469 we have to check. */
3470 else if (REG_P (operand)
3471 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3472 && reg_renumber[REGNO (operand)] < 0
3473 && ((reg_equiv_mem (REGNO (operand)) != 0
3474 && (constraint_satisfied_p
3475 (reg_equiv_mem (REGNO (operand)),
3476 cn)))
3477 || (reg_equiv_address (REGNO (operand))
3478 != 0)))
3479 win = 1;
3480
3481 /* If we didn't already win, we can reload
3482 constants via force_const_mem, and other
3483 MEMs by reloading the address like for 'o'. */
3484 if (CONST_POOL_OK_P (operand_mode[i], operand)
3485 || MEM_P (operand))
3486 badop = 0;
3487 constmemok = 1;
3488 offmemok = 1;
3489 break;
3490
3491 case CT_ADDRESS:
3492 if (constraint_satisfied_p (operand, cn))
3493 win = 1;
3494
3495 /* If we didn't already win, we can reload
3496 the address into a base register. */
3497 this_alternative[i]
3498 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3499 ADDRESS, SCRATCH);
3500 badop = 0;
3501 break;
3502
3503 case CT_FIXED_FORM:
3504 if (constraint_satisfied_p (operand, cn))
3505 win = 1;
3506 break;
3507 }
3508 break;
3509
3510 reg:
3511 this_alternative[i]
3512 = reg_class_subunion[this_alternative[i]][cl];
3513 if (GET_MODE (operand) == BLKmode)
3514 break;
3515 winreg = 1;
3516 if (REG_P (operand)
3517 && reg_fits_class_p (operand, this_alternative[i],
3518 offset, GET_MODE (recog_data.operand[i])))
3519 win = 1;
3520 break;
3521 }
3522 while ((p += len), c);
3523
3524 if (swapped == (commutative >= 0 ? 1 : 0))
3525 constraints[i] = p;
3526
3527 /* If this operand could be handled with a reg,
3528 and some reg is allowed, then this operand can be handled. */
3529 if (winreg && this_alternative[i] != NO_REGS
3530 && (win || !class_only_fixed_regs[this_alternative[i]]))
3531 badop = 0;
3532
3533 /* Record which operands fit this alternative. */
3534 this_alternative_earlyclobber[i] = earlyclobber;
3535 if (win && ! force_reload)
3536 this_alternative_win[i] = 1;
3537 else if (did_match && ! force_reload)
3538 this_alternative_match_win[i] = 1;
3539 else
3540 {
3541 int const_to_mem = 0;
3542
3543 this_alternative_offmemok[i] = offmemok;
3544 losers++;
3545 if (badop)
3546 bad = 1;
3547 /* Alternative loses if it has no regs for a reg operand. */
3548 if (REG_P (operand)
3549 && this_alternative[i] == NO_REGS
3550 && this_alternative_matches[i] < 0)
3551 bad = 1;
3552
3553 /* If this is a constant that is reloaded into the desired
3554 class by copying it to memory first, count that as another
3555 reload. This is consistent with other code and is
3556 required to avoid choosing another alternative when
3557 the constant is moved into memory by this function on
3558 an early reload pass. Note that the test here is
3559 precisely the same as in the code below that calls
3560 force_const_mem. */
3561 if (CONST_POOL_OK_P (operand_mode[i], operand)
3562 && ((targetm.preferred_reload_class (operand,
3563 this_alternative[i])
3564 == NO_REGS)
3565 || no_input_reloads))
3566 {
3567 const_to_mem = 1;
3568 if (this_alternative[i] != NO_REGS)
3569 losers++;
3570 }
3571
3572 /* Alternative loses if it requires a type of reload not
3573 permitted for this insn. We can always reload SCRATCH
3574 and objects with a REG_UNUSED note. */
3575 if (GET_CODE (operand) != SCRATCH
3576 && modified[i] != RELOAD_READ && no_output_reloads
3577 && ! find_reg_note (insn, REG_UNUSED, operand))
3578 bad = 1;
3579 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3580 && ! const_to_mem)
3581 bad = 1;
3582
3583 /* If we can't reload this value at all, reject this
3584 alternative. Note that we could also lose due to
3585 LIMIT_RELOAD_CLASS, but we don't check that
3586 here. */
3587
3588 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3589 {
3590 if (targetm.preferred_reload_class (operand,
3591 this_alternative[i])
3592 == NO_REGS)
3593 reject = 600;
3594
3595 if (operand_type[i] == RELOAD_FOR_OUTPUT
3596 && (targetm.preferred_output_reload_class (operand,
3597 this_alternative[i])
3598 == NO_REGS))
3599 reject = 600;
3600 }
3601
3602 /* We prefer to reload pseudos over reloading other things,
3603 since such reloads may be able to be eliminated later.
3604 If we are reloading a SCRATCH, we won't be generating any
3605 insns, just using a register, so it is also preferred.
3606 So bump REJECT in other cases. Don't do this in the
3607 case where we are forcing a constant into memory and
3608 it will then win since we don't want to have a different
3609 alternative match then. */
3610 if (! (REG_P (operand)
3611 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3612 && GET_CODE (operand) != SCRATCH
3613 && ! (const_to_mem && constmemok))
3614 reject += 2;
3615
3616 /* Input reloads can be inherited more often than output
3617 reloads can be removed, so penalize output reloads. */
3618 if (operand_type[i] != RELOAD_FOR_INPUT
3619 && GET_CODE (operand) != SCRATCH)
3620 reject++;
3621 }
3622
3623 /* If this operand is a pseudo register that didn't get
3624 a hard reg and this alternative accepts some
3625 register, see if the class that we want is a subset
3626 of the preferred class for this register. If not,
3627 but it intersects that class, use the preferred class
3628 instead. If it does not intersect the preferred
3629 class, show that usage of this alternative should be
3630 discouraged; it will be discouraged more still if the
3631 register is `preferred or nothing'. We do this
3632 because it increases the chance of reusing our spill
3633 register in a later insn and avoiding a pair of
3634 memory stores and loads.
3635
3636 Don't bother with this if this alternative will
3637 accept this operand.
3638
3639 Don't do this for a multiword operand, since it is
3640 only a small win and has the risk of requiring more
3641 spill registers, which could cause a large loss.
3642
3643 Don't do this if the preferred class has only one
3644 register because we might otherwise exhaust the
3645 class. */
3646
3647 if (! win && ! did_match
3648 && this_alternative[i] != NO_REGS
3649 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3650 && reg_class_size [(int) preferred_class[i]] > 0
3651 && ! small_register_class_p (preferred_class[i]))
3652 {
3653 if (! reg_class_subset_p (this_alternative[i],
3654 preferred_class[i]))
3655 {
3656 /* Since we don't have a way of forming the intersection,
3657 we just do something special if the preferred class
3658 is a subset of the class we have; that's the most
3659 common case anyway. */
3660 if (reg_class_subset_p (preferred_class[i],
3661 this_alternative[i]))
3662 this_alternative[i] = preferred_class[i];
3663 else
3664 reject += (2 + 2 * pref_or_nothing[i]);
3665 }
3666 }
3667 }
3668
3669 /* Now see if any output operands that are marked "earlyclobber"
3670 in this alternative conflict with any input operands
3671 or any memory addresses. */
3672
3673 for (i = 0; i < noperands; i++)
3674 if (this_alternative_earlyclobber[i]
3675 && (this_alternative_win[i] || this_alternative_match_win[i]))
3676 {
3677 struct decomposition early_data;
3678
3679 early_data = decompose (recog_data.operand[i]);
3680
3681 gcc_assert (modified[i] != RELOAD_READ);
3682
3683 if (this_alternative[i] == NO_REGS)
3684 {
3685 this_alternative_earlyclobber[i] = 0;
3686 gcc_assert (this_insn_is_asm);
3687 error_for_asm (this_insn,
3688 "%<&%> constraint used with no register class");
3689 }
3690
3691 for (j = 0; j < noperands; j++)
3692 /* Is this an input operand or a memory ref? */
3693 if ((MEM_P (recog_data.operand[j])
3694 || modified[j] != RELOAD_WRITE)
3695 && j != i
3696 /* Ignore things like match_operator operands. */
3697 && !recog_data.is_operator[j]
3698 /* Don't count an input operand that is constrained to match
3699 the early clobber operand. */
3700 && ! (this_alternative_matches[j] == i
3701 && rtx_equal_p (recog_data.operand[i],
3702 recog_data.operand[j]))
3703 /* Is it altered by storing the earlyclobber operand? */
3704 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3705 early_data))
3706 {
3707 /* If the output is in a non-empty few-regs class,
3708 it's costly to reload it, so reload the input instead. */
3709 if (small_register_class_p (this_alternative[i])
3710 && (REG_P (recog_data.operand[j])
3711 || GET_CODE (recog_data.operand[j]) == SUBREG))
3712 {
3713 losers++;
3714 this_alternative_win[j] = 0;
3715 this_alternative_match_win[j] = 0;
3716 }
3717 else
3718 break;
3719 }
3720 /* If an earlyclobber operand conflicts with something,
3721 it must be reloaded, so request this and count the cost. */
3722 if (j != noperands)
3723 {
3724 losers++;
3725 this_alternative_win[i] = 0;
3726 this_alternative_match_win[j] = 0;
3727 for (j = 0; j < noperands; j++)
3728 if (this_alternative_matches[j] == i
3729 && this_alternative_match_win[j])
3730 {
3731 this_alternative_win[j] = 0;
3732 this_alternative_match_win[j] = 0;
3733 losers++;
3734 }
3735 }
3736 }
3737
3738 /* If one alternative accepts all the operands, no reload required,
3739 choose that alternative; don't consider the remaining ones. */
3740 if (losers == 0)
3741 {
3742 /* Unswap these so that they are never swapped at `finish'. */
3743 if (swapped)
3744 {
3745 recog_data.operand[commutative] = substed_operand[commutative];
3746 recog_data.operand[commutative + 1]
3747 = substed_operand[commutative + 1];
3748 }
3749 for (i = 0; i < noperands; i++)
3750 {
3751 goal_alternative_win[i] = this_alternative_win[i];
3752 goal_alternative_match_win[i] = this_alternative_match_win[i];
3753 goal_alternative[i] = this_alternative[i];
3754 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3755 goal_alternative_matches[i] = this_alternative_matches[i];
3756 goal_alternative_earlyclobber[i]
3757 = this_alternative_earlyclobber[i];
3758 }
3759 goal_alternative_number = this_alternative_number;
3760 goal_alternative_swapped = swapped;
3761 goal_earlyclobber = this_earlyclobber;
3762 goto finish;
3763 }
3764
3765 /* REJECT, set by the ! and ? constraint characters and when a register
3766 would be reloaded into a non-preferred class, discourages the use of
3767 this alternative for a reload goal. REJECT is incremented by six
3768 for each ? and two for each non-preferred class. */
3769 losers = losers * 6 + reject;
3770
3771 /* If this alternative can be made to work by reloading,
3772 and it needs less reloading than the others checked so far,
3773 record it as the chosen goal for reloading. */
3774 if (! bad)
3775 {
3776 if (best > losers)
3777 {
3778 for (i = 0; i < noperands; i++)
3779 {
3780 goal_alternative[i] = this_alternative[i];
3781 goal_alternative_win[i] = this_alternative_win[i];
3782 goal_alternative_match_win[i]
3783 = this_alternative_match_win[i];
3784 goal_alternative_offmemok[i]
3785 = this_alternative_offmemok[i];
3786 goal_alternative_matches[i] = this_alternative_matches[i];
3787 goal_alternative_earlyclobber[i]
3788 = this_alternative_earlyclobber[i];
3789 }
3790 goal_alternative_swapped = swapped;
3791 best = losers;
3792 goal_alternative_number = this_alternative_number;
3793 goal_earlyclobber = this_earlyclobber;
3794 }
3795 }
3796
3797 if (swapped)
3798 {
3799 enum reg_class tclass;
3800 int t;
3801
3802 /* If the commutative operands have been swapped, swap
3803 them back in order to check the next alternative. */
3804 recog_data.operand[commutative] = substed_operand[commutative];
3805 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3806 /* Unswap the duplicates too. */
3807 for (i = 0; i < recog_data.n_dups; i++)
3808 if (recog_data.dup_num[i] == commutative
3809 || recog_data.dup_num[i] == commutative + 1)
3810 *recog_data.dup_loc[i]
3811 = recog_data.operand[(int) recog_data.dup_num[i]];
3812
3813 /* Unswap the operand related information as well. */
3814 tclass = preferred_class[commutative];
3815 preferred_class[commutative] = preferred_class[commutative + 1];
3816 preferred_class[commutative + 1] = tclass;
3817
3818 t = pref_or_nothing[commutative];
3819 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3820 pref_or_nothing[commutative + 1] = t;
3821
3822 t = address_reloaded[commutative];
3823 address_reloaded[commutative] = address_reloaded[commutative + 1];
3824 address_reloaded[commutative + 1] = t;
3825 }
3826 }
3827 }
3828
3829 /* The operands don't meet the constraints.
3830 goal_alternative describes the alternative
3831 that we could reach by reloading the fewest operands.
3832 Reload so as to fit it. */
3833
3834 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3835 {
3836 /* No alternative works with reloads?? */
3837 if (insn_code_number >= 0)
3838 fatal_insn ("unable to generate reloads for:", insn);
3839 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3840 /* Avoid further trouble with this insn. */
3841 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3842 n_reloads = 0;
3843 return 0;
3844 }
3845
3846 /* Jump to `finish' from above if all operands are valid already.
3847 In that case, goal_alternative_win is all 1. */
3848 finish:
3849
3850 /* Right now, for any pair of operands I and J that are required to match,
3851 with I < J,
3852 goal_alternative_matches[J] is I.
3853 Set up goal_alternative_matched as the inverse function:
3854 goal_alternative_matched[I] = J. */
3855
3856 for (i = 0; i < noperands; i++)
3857 goal_alternative_matched[i] = -1;
3858
3859 for (i = 0; i < noperands; i++)
3860 if (! goal_alternative_win[i]
3861 && goal_alternative_matches[i] >= 0)
3862 goal_alternative_matched[goal_alternative_matches[i]] = i;
3863
3864 for (i = 0; i < noperands; i++)
3865 goal_alternative_win[i] |= goal_alternative_match_win[i];
3866
3867 /* If the best alternative is with operands 1 and 2 swapped,
3868 consider them swapped before reporting the reloads. Update the
3869 operand numbers of any reloads already pushed. */
3870
3871 if (goal_alternative_swapped)
3872 {
3873 rtx tem;
3874
3875 tem = substed_operand[commutative];
3876 substed_operand[commutative] = substed_operand[commutative + 1];
3877 substed_operand[commutative + 1] = tem;
3878 tem = recog_data.operand[commutative];
3879 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3880 recog_data.operand[commutative + 1] = tem;
3881 tem = *recog_data.operand_loc[commutative];
3882 *recog_data.operand_loc[commutative]
3883 = *recog_data.operand_loc[commutative + 1];
3884 *recog_data.operand_loc[commutative + 1] = tem;
3885
3886 for (i = 0; i < n_reloads; i++)
3887 {
3888 if (rld[i].opnum == commutative)
3889 rld[i].opnum = commutative + 1;
3890 else if (rld[i].opnum == commutative + 1)
3891 rld[i].opnum = commutative;
3892 }
3893 }
3894
3895 for (i = 0; i < noperands; i++)
3896 {
3897 operand_reloadnum[i] = -1;
3898
3899 /* If this is an earlyclobber operand, we need to widen the scope.
3900 The reload must remain valid from the start of the insn being
3901 reloaded until after the operand is stored into its destination.
3902 We approximate this with RELOAD_OTHER even though we know that we
3903 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3904
3905 One special case that is worth checking is when we have an
3906 output that is earlyclobber but isn't used past the insn (typically
3907 a SCRATCH). In this case, we only need have the reload live
3908 through the insn itself, but not for any of our input or output
3909 reloads.
3910 But we must not accidentally narrow the scope of an existing
3911 RELOAD_OTHER reload - leave these alone.
3912
3913 In any case, anything needed to address this operand can remain
3914 however they were previously categorized. */
3915
3916 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3917 operand_type[i]
3918 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3919 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3920 }
3921
3922 /* Any constants that aren't allowed and can't be reloaded
3923 into registers are here changed into memory references. */
3924 for (i = 0; i < noperands; i++)
3925 if (! goal_alternative_win[i])
3926 {
3927 rtx op = recog_data.operand[i];
3928 rtx subreg = NULL_RTX;
3929 rtx plus = NULL_RTX;
3930 enum machine_mode mode = operand_mode[i];
3931
3932 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3933 push_reload so we have to let them pass here. */
3934 if (GET_CODE (op) == SUBREG)
3935 {
3936 subreg = op;
3937 op = SUBREG_REG (op);
3938 mode = GET_MODE (op);
3939 }
3940
3941 if (GET_CODE (op) == PLUS)
3942 {
3943 plus = op;
3944 op = XEXP (op, 1);
3945 }
3946
3947 if (CONST_POOL_OK_P (mode, op)
3948 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3949 == NO_REGS)
3950 || no_input_reloads))
3951 {
3952 int this_address_reloaded;
3953 rtx tem = force_const_mem (mode, op);
3954
3955 /* If we stripped a SUBREG or a PLUS above add it back. */
3956 if (plus != NULL_RTX)
3957 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3958
3959 if (subreg != NULL_RTX)
3960 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3961
3962 this_address_reloaded = 0;
3963 substed_operand[i] = recog_data.operand[i]
3964 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3965 0, insn, &this_address_reloaded);
3966
3967 /* If the alternative accepts constant pool refs directly
3968 there will be no reload needed at all. */
3969 if (plus == NULL_RTX
3970 && subreg == NULL_RTX
3971 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3972 ? substed_operand[i]
3973 : NULL,
3974 recog_data.constraints[i],
3975 goal_alternative_number))
3976 goal_alternative_win[i] = 1;
3977 }
3978 }
3979
3980 /* Record the values of the earlyclobber operands for the caller. */
3981 if (goal_earlyclobber)
3982 for (i = 0; i < noperands; i++)
3983 if (goal_alternative_earlyclobber[i])
3984 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3985
3986 /* Now record reloads for all the operands that need them. */
3987 for (i = 0; i < noperands; i++)
3988 if (! goal_alternative_win[i])
3989 {
3990 /* Operands that match previous ones have already been handled. */
3991 if (goal_alternative_matches[i] >= 0)
3992 ;
3993 /* Handle an operand with a nonoffsettable address
3994 appearing where an offsettable address will do
3995 by reloading the address into a base register.
3996
3997 ??? We can also do this when the operand is a register and
3998 reg_equiv_mem is not offsettable, but this is a bit tricky,
3999 so we don't bother with it. It may not be worth doing. */
4000 else if (goal_alternative_matched[i] == -1
4001 && goal_alternative_offmemok[i]
4002 && MEM_P (recog_data.operand[i]))
4003 {
4004 /* If the address to be reloaded is a VOIDmode constant,
4005 use the default address mode as mode of the reload register,
4006 as would have been done by find_reloads_address. */
4007 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4008 enum machine_mode address_mode;
4009
4010 address_mode = get_address_mode (recog_data.operand[i]);
4011 operand_reloadnum[i]
4012 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4013 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4014 base_reg_class (VOIDmode, as, MEM, SCRATCH),
4015 address_mode,
4016 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4017 rld[operand_reloadnum[i]].inc
4018 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4019
4020 /* If this operand is an output, we will have made any
4021 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4022 now we are treating part of the operand as an input, so
4023 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4024
4025 if (modified[i] == RELOAD_WRITE)
4026 {
4027 for (j = 0; j < n_reloads; j++)
4028 {
4029 if (rld[j].opnum == i)
4030 {
4031 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4032 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4033 else if (rld[j].when_needed
4034 == RELOAD_FOR_OUTADDR_ADDRESS)
4035 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4036 }
4037 }
4038 }
4039 }
4040 else if (goal_alternative_matched[i] == -1)
4041 {
4042 operand_reloadnum[i]
4043 = push_reload ((modified[i] != RELOAD_WRITE
4044 ? recog_data.operand[i] : 0),
4045 (modified[i] != RELOAD_READ
4046 ? recog_data.operand[i] : 0),
4047 (modified[i] != RELOAD_WRITE
4048 ? recog_data.operand_loc[i] : 0),
4049 (modified[i] != RELOAD_READ
4050 ? recog_data.operand_loc[i] : 0),
4051 (enum reg_class) goal_alternative[i],
4052 (modified[i] == RELOAD_WRITE
4053 ? VOIDmode : operand_mode[i]),
4054 (modified[i] == RELOAD_READ
4055 ? VOIDmode : operand_mode[i]),
4056 (insn_code_number < 0 ? 0
4057 : insn_data[insn_code_number].operand[i].strict_low),
4058 0, i, operand_type[i]);
4059 }
4060 /* In a matching pair of operands, one must be input only
4061 and the other must be output only.
4062 Pass the input operand as IN and the other as OUT. */
4063 else if (modified[i] == RELOAD_READ
4064 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4065 {
4066 operand_reloadnum[i]
4067 = push_reload (recog_data.operand[i],
4068 recog_data.operand[goal_alternative_matched[i]],
4069 recog_data.operand_loc[i],
4070 recog_data.operand_loc[goal_alternative_matched[i]],
4071 (enum reg_class) goal_alternative[i],
4072 operand_mode[i],
4073 operand_mode[goal_alternative_matched[i]],
4074 0, 0, i, RELOAD_OTHER);
4075 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4076 }
4077 else if (modified[i] == RELOAD_WRITE
4078 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4079 {
4080 operand_reloadnum[goal_alternative_matched[i]]
4081 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4082 recog_data.operand[i],
4083 recog_data.operand_loc[goal_alternative_matched[i]],
4084 recog_data.operand_loc[i],
4085 (enum reg_class) goal_alternative[i],
4086 operand_mode[goal_alternative_matched[i]],
4087 operand_mode[i],
4088 0, 0, i, RELOAD_OTHER);
4089 operand_reloadnum[i] = output_reloadnum;
4090 }
4091 else
4092 {
4093 gcc_assert (insn_code_number < 0);
4094 error_for_asm (insn, "inconsistent operand constraints "
4095 "in an %<asm%>");
4096 /* Avoid further trouble with this insn. */
4097 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4098 n_reloads = 0;
4099 return 0;
4100 }
4101 }
4102 else if (goal_alternative_matched[i] < 0
4103 && goal_alternative_matches[i] < 0
4104 && address_operand_reloaded[i] != 1
4105 && optimize)
4106 {
4107 /* For each non-matching operand that's a MEM or a pseudo-register
4108 that didn't get a hard register, make an optional reload.
4109 This may get done even if the insn needs no reloads otherwise. */
4110
4111 rtx operand = recog_data.operand[i];
4112
4113 while (GET_CODE (operand) == SUBREG)
4114 operand = SUBREG_REG (operand);
4115 if ((MEM_P (operand)
4116 || (REG_P (operand)
4117 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4118 /* If this is only for an output, the optional reload would not
4119 actually cause us to use a register now, just note that
4120 something is stored here. */
4121 && (goal_alternative[i] != NO_REGS
4122 || modified[i] == RELOAD_WRITE)
4123 && ! no_input_reloads
4124 /* An optional output reload might allow to delete INSN later.
4125 We mustn't make in-out reloads on insns that are not permitted
4126 output reloads.
4127 If this is an asm, we can't delete it; we must not even call
4128 push_reload for an optional output reload in this case,
4129 because we can't be sure that the constraint allows a register,
4130 and push_reload verifies the constraints for asms. */
4131 && (modified[i] == RELOAD_READ
4132 || (! no_output_reloads && ! this_insn_is_asm)))
4133 operand_reloadnum[i]
4134 = push_reload ((modified[i] != RELOAD_WRITE
4135 ? recog_data.operand[i] : 0),
4136 (modified[i] != RELOAD_READ
4137 ? recog_data.operand[i] : 0),
4138 (modified[i] != RELOAD_WRITE
4139 ? recog_data.operand_loc[i] : 0),
4140 (modified[i] != RELOAD_READ
4141 ? recog_data.operand_loc[i] : 0),
4142 (enum reg_class) goal_alternative[i],
4143 (modified[i] == RELOAD_WRITE
4144 ? VOIDmode : operand_mode[i]),
4145 (modified[i] == RELOAD_READ
4146 ? VOIDmode : operand_mode[i]),
4147 (insn_code_number < 0 ? 0
4148 : insn_data[insn_code_number].operand[i].strict_low),
4149 1, i, operand_type[i]);
4150 /* If a memory reference remains (either as a MEM or a pseudo that
4151 did not get a hard register), yet we can't make an optional
4152 reload, check if this is actually a pseudo register reference;
4153 we then need to emit a USE and/or a CLOBBER so that reload
4154 inheritance will do the right thing. */
4155 else if (replace
4156 && (MEM_P (operand)
4157 || (REG_P (operand)
4158 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4159 && reg_renumber [REGNO (operand)] < 0)))
4160 {
4161 operand = *recog_data.operand_loc[i];
4162
4163 while (GET_CODE (operand) == SUBREG)
4164 operand = SUBREG_REG (operand);
4165 if (REG_P (operand))
4166 {
4167 if (modified[i] != RELOAD_WRITE)
4168 /* We mark the USE with QImode so that we recognize
4169 it as one that can be safely deleted at the end
4170 of reload. */
4171 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4172 insn), QImode);
4173 if (modified[i] != RELOAD_READ)
4174 emit_insn_after (gen_clobber (operand), insn);
4175 }
4176 }
4177 }
4178 else if (goal_alternative_matches[i] >= 0
4179 && goal_alternative_win[goal_alternative_matches[i]]
4180 && modified[i] == RELOAD_READ
4181 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4182 && ! no_input_reloads && ! no_output_reloads
4183 && optimize)
4184 {
4185 /* Similarly, make an optional reload for a pair of matching
4186 objects that are in MEM or a pseudo that didn't get a hard reg. */
4187
4188 rtx operand = recog_data.operand[i];
4189
4190 while (GET_CODE (operand) == SUBREG)
4191 operand = SUBREG_REG (operand);
4192 if ((MEM_P (operand)
4193 || (REG_P (operand)
4194 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4195 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4196 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4197 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4198 recog_data.operand[i],
4199 recog_data.operand_loc[goal_alternative_matches[i]],
4200 recog_data.operand_loc[i],
4201 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4202 operand_mode[goal_alternative_matches[i]],
4203 operand_mode[i],
4204 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4205 }
4206
4207 /* Perform whatever substitutions on the operands we are supposed
4208 to make due to commutativity or replacement of registers
4209 with equivalent constants or memory slots. */
4210
4211 for (i = 0; i < noperands; i++)
4212 {
4213 /* We only do this on the last pass through reload, because it is
4214 possible for some data (like reg_equiv_address) to be changed during
4215 later passes. Moreover, we lose the opportunity to get a useful
4216 reload_{in,out}_reg when we do these replacements. */
4217
4218 if (replace)
4219 {
4220 rtx substitution = substed_operand[i];
4221
4222 *recog_data.operand_loc[i] = substitution;
4223
4224 /* If we're replacing an operand with a LABEL_REF, we need to
4225 make sure that there's a REG_LABEL_OPERAND note attached to
4226 this instruction. */
4227 if (GET_CODE (substitution) == LABEL_REF
4228 && !find_reg_note (insn, REG_LABEL_OPERAND,
4229 LABEL_REF_LABEL (substitution))
4230 /* For a JUMP_P, if it was a branch target it must have
4231 already been recorded as such. */
4232 && (!JUMP_P (insn)
4233 || !label_is_jump_target_p (LABEL_REF_LABEL (substitution),
4234 insn)))
4235 {
4236 add_reg_note (insn, REG_LABEL_OPERAND,
4237 LABEL_REF_LABEL (substitution));
4238 if (LABEL_P (LABEL_REF_LABEL (substitution)))
4239 ++LABEL_NUSES (LABEL_REF_LABEL (substitution));
4240 }
4241
4242 }
4243 else
4244 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4245 }
4246
4247 /* If this insn pattern contains any MATCH_DUP's, make sure that
4248 they will be substituted if the operands they match are substituted.
4249 Also do now any substitutions we already did on the operands.
4250
4251 Don't do this if we aren't making replacements because we might be
4252 propagating things allocated by frame pointer elimination into places
4253 it doesn't expect. */
4254
4255 if (insn_code_number >= 0 && replace)
4256 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4257 {
4258 int opno = recog_data.dup_num[i];
4259 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4260 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4261 }
4262
4263 #if 0
4264 /* This loses because reloading of prior insns can invalidate the equivalence
4265 (or at least find_equiv_reg isn't smart enough to find it any more),
4266 causing this insn to need more reload regs than it needed before.
4267 It may be too late to make the reload regs available.
4268 Now this optimization is done safely in choose_reload_regs. */
4269
4270 /* For each reload of a reg into some other class of reg,
4271 search for an existing equivalent reg (same value now) in the right class.
4272 We can use it as long as we don't need to change its contents. */
4273 for (i = 0; i < n_reloads; i++)
4274 if (rld[i].reg_rtx == 0
4275 && rld[i].in != 0
4276 && REG_P (rld[i].in)
4277 && rld[i].out == 0)
4278 {
4279 rld[i].reg_rtx
4280 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4281 static_reload_reg_p, 0, rld[i].inmode);
4282 /* Prevent generation of insn to load the value
4283 because the one we found already has the value. */
4284 if (rld[i].reg_rtx)
4285 rld[i].in = rld[i].reg_rtx;
4286 }
4287 #endif
4288
4289 /* If we detected error and replaced asm instruction by USE, forget about the
4290 reloads. */
4291 if (GET_CODE (PATTERN (insn)) == USE
4292 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4293 n_reloads = 0;
4294
4295 /* Perhaps an output reload can be combined with another
4296 to reduce needs by one. */
4297 if (!goal_earlyclobber)
4298 combine_reloads ();
4299
4300 /* If we have a pair of reloads for parts of an address, they are reloading
4301 the same object, the operands themselves were not reloaded, and they
4302 are for two operands that are supposed to match, merge the reloads and
4303 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4304
4305 for (i = 0; i < n_reloads; i++)
4306 {
4307 int k;
4308
4309 for (j = i + 1; j < n_reloads; j++)
4310 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4311 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4312 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4313 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4314 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4315 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4316 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4317 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4318 && rtx_equal_p (rld[i].in, rld[j].in)
4319 && (operand_reloadnum[rld[i].opnum] < 0
4320 || rld[operand_reloadnum[rld[i].opnum]].optional)
4321 && (operand_reloadnum[rld[j].opnum] < 0
4322 || rld[operand_reloadnum[rld[j].opnum]].optional)
4323 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4324 || (goal_alternative_matches[rld[j].opnum]
4325 == rld[i].opnum)))
4326 {
4327 for (k = 0; k < n_replacements; k++)
4328 if (replacements[k].what == j)
4329 replacements[k].what = i;
4330
4331 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4332 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4333 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4334 else
4335 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4336 rld[j].in = 0;
4337 }
4338 }
4339
4340 /* Scan all the reloads and update their type.
4341 If a reload is for the address of an operand and we didn't reload
4342 that operand, change the type. Similarly, change the operand number
4343 of a reload when two operands match. If a reload is optional, treat it
4344 as though the operand isn't reloaded.
4345
4346 ??? This latter case is somewhat odd because if we do the optional
4347 reload, it means the object is hanging around. Thus we need only
4348 do the address reload if the optional reload was NOT done.
4349
4350 Change secondary reloads to be the address type of their operand, not
4351 the normal type.
4352
4353 If an operand's reload is now RELOAD_OTHER, change any
4354 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4355 RELOAD_FOR_OTHER_ADDRESS. */
4356
4357 for (i = 0; i < n_reloads; i++)
4358 {
4359 if (rld[i].secondary_p
4360 && rld[i].when_needed == operand_type[rld[i].opnum])
4361 rld[i].when_needed = address_type[rld[i].opnum];
4362
4363 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4364 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4365 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4366 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4367 && (operand_reloadnum[rld[i].opnum] < 0
4368 || rld[operand_reloadnum[rld[i].opnum]].optional))
4369 {
4370 /* If we have a secondary reload to go along with this reload,
4371 change its type to RELOAD_FOR_OPADDR_ADDR. */
4372
4373 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4374 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4375 && rld[i].secondary_in_reload != -1)
4376 {
4377 int secondary_in_reload = rld[i].secondary_in_reload;
4378
4379 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4380
4381 /* If there's a tertiary reload we have to change it also. */
4382 if (secondary_in_reload > 0
4383 && rld[secondary_in_reload].secondary_in_reload != -1)
4384 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4385 = RELOAD_FOR_OPADDR_ADDR;
4386 }
4387
4388 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4389 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4390 && rld[i].secondary_out_reload != -1)
4391 {
4392 int secondary_out_reload = rld[i].secondary_out_reload;
4393
4394 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4395
4396 /* If there's a tertiary reload we have to change it also. */
4397 if (secondary_out_reload
4398 && rld[secondary_out_reload].secondary_out_reload != -1)
4399 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4400 = RELOAD_FOR_OPADDR_ADDR;
4401 }
4402
4403 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4404 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4405 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4406 else
4407 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4408 }
4409
4410 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4411 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4412 && operand_reloadnum[rld[i].opnum] >= 0
4413 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4414 == RELOAD_OTHER))
4415 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4416
4417 if (goal_alternative_matches[rld[i].opnum] >= 0)
4418 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4419 }
4420
4421 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4422 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4423 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4424
4425 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4426 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4427 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4428 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4429 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4430 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4431 This is complicated by the fact that a single operand can have more
4432 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4433 choose_reload_regs without affecting code quality, and cases that
4434 actually fail are extremely rare, so it turns out to be better to fix
4435 the problem here by not generating cases that choose_reload_regs will
4436 fail for. */
4437 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4438 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4439 a single operand.
4440 We can reduce the register pressure by exploiting that a
4441 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4442 does not conflict with any of them, if it is only used for the first of
4443 the RELOAD_FOR_X_ADDRESS reloads. */
4444 {
4445 int first_op_addr_num = -2;
4446 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4447 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4448 int need_change = 0;
4449 /* We use last_op_addr_reload and the contents of the above arrays
4450 first as flags - -2 means no instance encountered, -1 means exactly
4451 one instance encountered.
4452 If more than one instance has been encountered, we store the reload
4453 number of the first reload of the kind in question; reload numbers
4454 are known to be non-negative. */
4455 for (i = 0; i < noperands; i++)
4456 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4457 for (i = n_reloads - 1; i >= 0; i--)
4458 {
4459 switch (rld[i].when_needed)
4460 {
4461 case RELOAD_FOR_OPERAND_ADDRESS:
4462 if (++first_op_addr_num >= 0)
4463 {
4464 first_op_addr_num = i;
4465 need_change = 1;
4466 }
4467 break;
4468 case RELOAD_FOR_INPUT_ADDRESS:
4469 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4470 {
4471 first_inpaddr_num[rld[i].opnum] = i;
4472 need_change = 1;
4473 }
4474 break;
4475 case RELOAD_FOR_OUTPUT_ADDRESS:
4476 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4477 {
4478 first_outpaddr_num[rld[i].opnum] = i;
4479 need_change = 1;
4480 }
4481 break;
4482 default:
4483 break;
4484 }
4485 }
4486
4487 if (need_change)
4488 {
4489 for (i = 0; i < n_reloads; i++)
4490 {
4491 int first_num;
4492 enum reload_type type;
4493
4494 switch (rld[i].when_needed)
4495 {
4496 case RELOAD_FOR_OPADDR_ADDR:
4497 first_num = first_op_addr_num;
4498 type = RELOAD_FOR_OPERAND_ADDRESS;
4499 break;
4500 case RELOAD_FOR_INPADDR_ADDRESS:
4501 first_num = first_inpaddr_num[rld[i].opnum];
4502 type = RELOAD_FOR_INPUT_ADDRESS;
4503 break;
4504 case RELOAD_FOR_OUTADDR_ADDRESS:
4505 first_num = first_outpaddr_num[rld[i].opnum];
4506 type = RELOAD_FOR_OUTPUT_ADDRESS;
4507 break;
4508 default:
4509 continue;
4510 }
4511 if (first_num < 0)
4512 continue;
4513 else if (i > first_num)
4514 rld[i].when_needed = type;
4515 else
4516 {
4517 /* Check if the only TYPE reload that uses reload I is
4518 reload FIRST_NUM. */
4519 for (j = n_reloads - 1; j > first_num; j--)
4520 {
4521 if (rld[j].when_needed == type
4522 && (rld[i].secondary_p
4523 ? rld[j].secondary_in_reload == i
4524 : reg_mentioned_p (rld[i].in, rld[j].in)))
4525 {
4526 rld[i].when_needed = type;
4527 break;
4528 }
4529 }
4530 }
4531 }
4532 }
4533 }
4534
4535 /* See if we have any reloads that are now allowed to be merged
4536 because we've changed when the reload is needed to
4537 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4538 check for the most common cases. */
4539
4540 for (i = 0; i < n_reloads; i++)
4541 if (rld[i].in != 0 && rld[i].out == 0
4542 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4543 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4544 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4545 for (j = 0; j < n_reloads; j++)
4546 if (i != j && rld[j].in != 0 && rld[j].out == 0
4547 && rld[j].when_needed == rld[i].when_needed
4548 && MATCHES (rld[i].in, rld[j].in)
4549 && rld[i].rclass == rld[j].rclass
4550 && !rld[i].nocombine && !rld[j].nocombine
4551 && rld[i].reg_rtx == rld[j].reg_rtx)
4552 {
4553 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4554 transfer_replacements (i, j);
4555 rld[j].in = 0;
4556 }
4557
4558 #ifdef HAVE_cc0
4559 /* If we made any reloads for addresses, see if they violate a
4560 "no input reloads" requirement for this insn. But loads that we
4561 do after the insn (such as for output addresses) are fine. */
4562 if (no_input_reloads)
4563 for (i = 0; i < n_reloads; i++)
4564 gcc_assert (rld[i].in == 0
4565 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4566 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4567 #endif
4568
4569 /* Compute reload_mode and reload_nregs. */
4570 for (i = 0; i < n_reloads; i++)
4571 {
4572 rld[i].mode
4573 = (rld[i].inmode == VOIDmode
4574 || (GET_MODE_SIZE (rld[i].outmode)
4575 > GET_MODE_SIZE (rld[i].inmode)))
4576 ? rld[i].outmode : rld[i].inmode;
4577
4578 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4579 }
4580
4581 /* Special case a simple move with an input reload and a
4582 destination of a hard reg, if the hard reg is ok, use it. */
4583 for (i = 0; i < n_reloads; i++)
4584 if (rld[i].when_needed == RELOAD_FOR_INPUT
4585 && GET_CODE (PATTERN (insn)) == SET
4586 && REG_P (SET_DEST (PATTERN (insn)))
4587 && (SET_SRC (PATTERN (insn)) == rld[i].in
4588 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4589 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4590 {
4591 rtx dest = SET_DEST (PATTERN (insn));
4592 unsigned int regno = REGNO (dest);
4593
4594 if (regno < FIRST_PSEUDO_REGISTER
4595 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4596 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4597 {
4598 int nr = hard_regno_nregs[regno][rld[i].mode];
4599 int ok = 1, nri;
4600
4601 for (nri = 1; nri < nr; nri ++)
4602 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4603 {
4604 ok = 0;
4605 break;
4606 }
4607
4608 if (ok)
4609 rld[i].reg_rtx = dest;
4610 }
4611 }
4612
4613 return retval;
4614 }
4615
4616 /* Return true if alternative number ALTNUM in constraint-string
4617 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4618 MEM gives the reference if it didn't need any reloads, otherwise it
4619 is null. */
4620
4621 static bool
4622 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4623 const char *constraint, int altnum)
4624 {
4625 int c;
4626
4627 /* Skip alternatives before the one requested. */
4628 while (altnum > 0)
4629 {
4630 while (*constraint++ != ',')
4631 ;
4632 altnum--;
4633 }
4634 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4635 If one of them is present, this alternative accepts the result of
4636 passing a constant-pool reference through find_reloads_toplev.
4637
4638 The same is true of extra memory constraints if the address
4639 was reloaded into a register. However, the target may elect
4640 to disallow the original constant address, forcing it to be
4641 reloaded into a register instead. */
4642 for (; (c = *constraint) && c != ',' && c != '#';
4643 constraint += CONSTRAINT_LEN (c, constraint))
4644 {
4645 enum constraint_num cn = lookup_constraint (constraint);
4646 if (insn_extra_memory_constraint (cn)
4647 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4648 return true;
4649 }
4650 return false;
4651 }
4652 \f
4653 /* Scan X for memory references and scan the addresses for reloading.
4654 Also checks for references to "constant" regs that we want to eliminate
4655 and replaces them with the values they stand for.
4656 We may alter X destructively if it contains a reference to such.
4657 If X is just a constant reg, we return the equivalent value
4658 instead of X.
4659
4660 IND_LEVELS says how many levels of indirect addressing this machine
4661 supports.
4662
4663 OPNUM and TYPE identify the purpose of the reload.
4664
4665 IS_SET_DEST is true if X is the destination of a SET, which is not
4666 appropriate to be replaced by a constant.
4667
4668 INSN, if nonzero, is the insn in which we do the reload. It is used
4669 to determine if we may generate output reloads, and where to put USEs
4670 for pseudos that we have to replace with stack slots.
4671
4672 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4673 result of find_reloads_address. */
4674
4675 static rtx
4676 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4677 int ind_levels, int is_set_dest, rtx_insn *insn,
4678 int *address_reloaded)
4679 {
4680 RTX_CODE code = GET_CODE (x);
4681
4682 const char *fmt = GET_RTX_FORMAT (code);
4683 int i;
4684 int copied;
4685
4686 if (code == REG)
4687 {
4688 /* This code is duplicated for speed in find_reloads. */
4689 int regno = REGNO (x);
4690 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4691 x = reg_equiv_constant (regno);
4692 #if 0
4693 /* This creates (subreg (mem...)) which would cause an unnecessary
4694 reload of the mem. */
4695 else if (reg_equiv_mem (regno) != 0)
4696 x = reg_equiv_mem (regno);
4697 #endif
4698 else if (reg_equiv_memory_loc (regno)
4699 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4700 {
4701 rtx mem = make_memloc (x, regno);
4702 if (reg_equiv_address (regno)
4703 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4704 {
4705 /* If this is not a toplevel operand, find_reloads doesn't see
4706 this substitution. We have to emit a USE of the pseudo so
4707 that delete_output_reload can see it. */
4708 if (replace_reloads && recog_data.operand[opnum] != x)
4709 /* We mark the USE with QImode so that we recognize it
4710 as one that can be safely deleted at the end of
4711 reload. */
4712 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4713 QImode);
4714 x = mem;
4715 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4716 opnum, type, ind_levels, insn);
4717 if (!rtx_equal_p (x, mem))
4718 push_reg_equiv_alt_mem (regno, x);
4719 if (address_reloaded)
4720 *address_reloaded = i;
4721 }
4722 }
4723 return x;
4724 }
4725 if (code == MEM)
4726 {
4727 rtx tem = x;
4728
4729 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4730 opnum, type, ind_levels, insn);
4731 if (address_reloaded)
4732 *address_reloaded = i;
4733
4734 return tem;
4735 }
4736
4737 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4738 {
4739 /* Check for SUBREG containing a REG that's equivalent to a
4740 constant. If the constant has a known value, truncate it
4741 right now. Similarly if we are extracting a single-word of a
4742 multi-word constant. If the constant is symbolic, allow it
4743 to be substituted normally. push_reload will strip the
4744 subreg later. The constant must not be VOIDmode, because we
4745 will lose the mode of the register (this should never happen
4746 because one of the cases above should handle it). */
4747
4748 int regno = REGNO (SUBREG_REG (x));
4749 rtx tem;
4750
4751 if (regno >= FIRST_PSEUDO_REGISTER
4752 && reg_renumber[regno] < 0
4753 && reg_equiv_constant (regno) != 0)
4754 {
4755 tem =
4756 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4757 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4758 gcc_assert (tem);
4759 if (CONSTANT_P (tem)
4760 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4761 {
4762 tem = force_const_mem (GET_MODE (x), tem);
4763 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4764 &XEXP (tem, 0), opnum, type,
4765 ind_levels, insn);
4766 if (address_reloaded)
4767 *address_reloaded = i;
4768 }
4769 return tem;
4770 }
4771
4772 /* If the subreg contains a reg that will be converted to a mem,
4773 attempt to convert the whole subreg to a (narrower or wider)
4774 memory reference instead. If this succeeds, we're done --
4775 otherwise fall through to check whether the inner reg still
4776 needs address reloads anyway. */
4777
4778 if (regno >= FIRST_PSEUDO_REGISTER
4779 && reg_equiv_memory_loc (regno) != 0)
4780 {
4781 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4782 insn, address_reloaded);
4783 if (tem)
4784 return tem;
4785 }
4786 }
4787
4788 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4789 {
4790 if (fmt[i] == 'e')
4791 {
4792 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4793 ind_levels, is_set_dest, insn,
4794 address_reloaded);
4795 /* If we have replaced a reg with it's equivalent memory loc -
4796 that can still be handled here e.g. if it's in a paradoxical
4797 subreg - we must make the change in a copy, rather than using
4798 a destructive change. This way, find_reloads can still elect
4799 not to do the change. */
4800 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4801 {
4802 x = shallow_copy_rtx (x);
4803 copied = 1;
4804 }
4805 XEXP (x, i) = new_part;
4806 }
4807 }
4808 return x;
4809 }
4810
4811 /* Return a mem ref for the memory equivalent of reg REGNO.
4812 This mem ref is not shared with anything. */
4813
4814 static rtx
4815 make_memloc (rtx ad, int regno)
4816 {
4817 /* We must rerun eliminate_regs, in case the elimination
4818 offsets have changed. */
4819 rtx tem
4820 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4821 0);
4822
4823 /* If TEM might contain a pseudo, we must copy it to avoid
4824 modifying it when we do the substitution for the reload. */
4825 if (rtx_varies_p (tem, 0))
4826 tem = copy_rtx (tem);
4827
4828 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4829 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4830
4831 /* Copy the result if it's still the same as the equivalence, to avoid
4832 modifying it when we do the substitution for the reload. */
4833 if (tem == reg_equiv_memory_loc (regno))
4834 tem = copy_rtx (tem);
4835 return tem;
4836 }
4837
4838 /* Returns true if AD could be turned into a valid memory reference
4839 to mode MODE in address space AS by reloading the part pointed to
4840 by PART into a register. */
4841
4842 static int
4843 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4844 addr_space_t as, rtx *part)
4845 {
4846 int retv;
4847 rtx tem = *part;
4848 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4849
4850 *part = reg;
4851 retv = memory_address_addr_space_p (mode, ad, as);
4852 *part = tem;
4853
4854 return retv;
4855 }
4856
4857 /* Record all reloads needed for handling memory address AD
4858 which appears in *LOC in a memory reference to mode MODE
4859 which itself is found in location *MEMREFLOC.
4860 Note that we take shortcuts assuming that no multi-reg machine mode
4861 occurs as part of an address.
4862
4863 OPNUM and TYPE specify the purpose of this reload.
4864
4865 IND_LEVELS says how many levels of indirect addressing this machine
4866 supports.
4867
4868 INSN, if nonzero, is the insn in which we do the reload. It is used
4869 to determine if we may generate output reloads, and where to put USEs
4870 for pseudos that we have to replace with stack slots.
4871
4872 Value is one if this address is reloaded or replaced as a whole; it is
4873 zero if the top level of this address was not reloaded or replaced, and
4874 it is -1 if it may or may not have been reloaded or replaced.
4875
4876 Note that there is no verification that the address will be valid after
4877 this routine does its work. Instead, we rely on the fact that the address
4878 was valid when reload started. So we need only undo things that reload
4879 could have broken. These are wrong register types, pseudos not allocated
4880 to a hard register, and frame pointer elimination. */
4881
4882 static int
4883 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4884 rtx *loc, int opnum, enum reload_type type,
4885 int ind_levels, rtx_insn *insn)
4886 {
4887 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4888 : ADDR_SPACE_GENERIC;
4889 int regno;
4890 int removed_and = 0;
4891 int op_index;
4892 rtx tem;
4893
4894 /* If the address is a register, see if it is a legitimate address and
4895 reload if not. We first handle the cases where we need not reload
4896 or where we must reload in a non-standard way. */
4897
4898 if (REG_P (ad))
4899 {
4900 regno = REGNO (ad);
4901
4902 if (reg_equiv_constant (regno) != 0)
4903 {
4904 find_reloads_address_part (reg_equiv_constant (regno), loc,
4905 base_reg_class (mode, as, MEM, SCRATCH),
4906 GET_MODE (ad), opnum, type, ind_levels);
4907 return 1;
4908 }
4909
4910 tem = reg_equiv_memory_loc (regno);
4911 if (tem != 0)
4912 {
4913 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4914 {
4915 tem = make_memloc (ad, regno);
4916 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4917 XEXP (tem, 0),
4918 MEM_ADDR_SPACE (tem)))
4919 {
4920 rtx orig = tem;
4921
4922 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4923 &XEXP (tem, 0), opnum,
4924 ADDR_TYPE (type), ind_levels, insn);
4925 if (!rtx_equal_p (tem, orig))
4926 push_reg_equiv_alt_mem (regno, tem);
4927 }
4928 /* We can avoid a reload if the register's equivalent memory
4929 expression is valid as an indirect memory address.
4930 But not all addresses are valid in a mem used as an indirect
4931 address: only reg or reg+constant. */
4932
4933 if (ind_levels > 0
4934 && strict_memory_address_addr_space_p (mode, tem, as)
4935 && (REG_P (XEXP (tem, 0))
4936 || (GET_CODE (XEXP (tem, 0)) == PLUS
4937 && REG_P (XEXP (XEXP (tem, 0), 0))
4938 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4939 {
4940 /* TEM is not the same as what we'll be replacing the
4941 pseudo with after reload, put a USE in front of INSN
4942 in the final reload pass. */
4943 if (replace_reloads
4944 && num_not_at_initial_offset
4945 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4946 {
4947 *loc = tem;
4948 /* We mark the USE with QImode so that we
4949 recognize it as one that can be safely
4950 deleted at the end of reload. */
4951 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4952 insn), QImode);
4953
4954 /* This doesn't really count as replacing the address
4955 as a whole, since it is still a memory access. */
4956 }
4957 return 0;
4958 }
4959 ad = tem;
4960 }
4961 }
4962
4963 /* The only remaining case where we can avoid a reload is if this is a
4964 hard register that is valid as a base register and which is not the
4965 subject of a CLOBBER in this insn. */
4966
4967 else if (regno < FIRST_PSEUDO_REGISTER
4968 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4969 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4970 return 0;
4971
4972 /* If we do not have one of the cases above, we must do the reload. */
4973 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4974 base_reg_class (mode, as, MEM, SCRATCH),
4975 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4976 return 1;
4977 }
4978
4979 if (strict_memory_address_addr_space_p (mode, ad, as))
4980 {
4981 /* The address appears valid, so reloads are not needed.
4982 But the address may contain an eliminable register.
4983 This can happen because a machine with indirect addressing
4984 may consider a pseudo register by itself a valid address even when
4985 it has failed to get a hard reg.
4986 So do a tree-walk to find and eliminate all such regs. */
4987
4988 /* But first quickly dispose of a common case. */
4989 if (GET_CODE (ad) == PLUS
4990 && CONST_INT_P (XEXP (ad, 1))
4991 && REG_P (XEXP (ad, 0))
4992 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4993 return 0;
4994
4995 subst_reg_equivs_changed = 0;
4996 *loc = subst_reg_equivs (ad, insn);
4997
4998 if (! subst_reg_equivs_changed)
4999 return 0;
5000
5001 /* Check result for validity after substitution. */
5002 if (strict_memory_address_addr_space_p (mode, ad, as))
5003 return 0;
5004 }
5005
5006 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5007 do
5008 {
5009 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5010 {
5011 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5012 ind_levels, win);
5013 }
5014 break;
5015 win:
5016 *memrefloc = copy_rtx (*memrefloc);
5017 XEXP (*memrefloc, 0) = ad;
5018 move_replacements (&ad, &XEXP (*memrefloc, 0));
5019 return -1;
5020 }
5021 while (0);
5022 #endif
5023
5024 /* The address is not valid. We have to figure out why. First see if
5025 we have an outer AND and remove it if so. Then analyze what's inside. */
5026
5027 if (GET_CODE (ad) == AND)
5028 {
5029 removed_and = 1;
5030 loc = &XEXP (ad, 0);
5031 ad = *loc;
5032 }
5033
5034 /* One possibility for why the address is invalid is that it is itself
5035 a MEM. This can happen when the frame pointer is being eliminated, a
5036 pseudo is not allocated to a hard register, and the offset between the
5037 frame and stack pointers is not its initial value. In that case the
5038 pseudo will have been replaced by a MEM referring to the
5039 stack pointer. */
5040 if (MEM_P (ad))
5041 {
5042 /* First ensure that the address in this MEM is valid. Then, unless
5043 indirect addresses are valid, reload the MEM into a register. */
5044 tem = ad;
5045 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5046 opnum, ADDR_TYPE (type),
5047 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5048
5049 /* If tem was changed, then we must create a new memory reference to
5050 hold it and store it back into memrefloc. */
5051 if (tem != ad && memrefloc)
5052 {
5053 *memrefloc = copy_rtx (*memrefloc);
5054 copy_replacements (tem, XEXP (*memrefloc, 0));
5055 loc = &XEXP (*memrefloc, 0);
5056 if (removed_and)
5057 loc = &XEXP (*loc, 0);
5058 }
5059
5060 /* Check similar cases as for indirect addresses as above except
5061 that we can allow pseudos and a MEM since they should have been
5062 taken care of above. */
5063
5064 if (ind_levels == 0
5065 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5066 || MEM_P (XEXP (tem, 0))
5067 || ! (REG_P (XEXP (tem, 0))
5068 || (GET_CODE (XEXP (tem, 0)) == PLUS
5069 && REG_P (XEXP (XEXP (tem, 0), 0))
5070 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5071 {
5072 /* Must use TEM here, not AD, since it is the one that will
5073 have any subexpressions reloaded, if needed. */
5074 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5075 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5076 VOIDmode, 0,
5077 0, opnum, type);
5078 return ! removed_and;
5079 }
5080 else
5081 return 0;
5082 }
5083
5084 /* If we have address of a stack slot but it's not valid because the
5085 displacement is too large, compute the sum in a register.
5086 Handle all base registers here, not just fp/ap/sp, because on some
5087 targets (namely SH) we can also get too large displacements from
5088 big-endian corrections. */
5089 else if (GET_CODE (ad) == PLUS
5090 && REG_P (XEXP (ad, 0))
5091 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5092 && CONST_INT_P (XEXP (ad, 1))
5093 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5094 CONST_INT)
5095 /* Similarly, if we were to reload the base register and the
5096 mem+offset address is still invalid, then we want to reload
5097 the whole address, not just the base register. */
5098 || ! maybe_memory_address_addr_space_p
5099 (mode, ad, as, &(XEXP (ad, 0)))))
5100
5101 {
5102 /* Unshare the MEM rtx so we can safely alter it. */
5103 if (memrefloc)
5104 {
5105 *memrefloc = copy_rtx (*memrefloc);
5106 loc = &XEXP (*memrefloc, 0);
5107 if (removed_and)
5108 loc = &XEXP (*loc, 0);
5109 }
5110
5111 if (double_reg_address_ok
5112 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5113 PLUS, CONST_INT))
5114 {
5115 /* Unshare the sum as well. */
5116 *loc = ad = copy_rtx (ad);
5117
5118 /* Reload the displacement into an index reg.
5119 We assume the frame pointer or arg pointer is a base reg. */
5120 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5121 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5122 type, ind_levels);
5123 return 0;
5124 }
5125 else
5126 {
5127 /* If the sum of two regs is not necessarily valid,
5128 reload the sum into a base reg.
5129 That will at least work. */
5130 find_reloads_address_part (ad, loc,
5131 base_reg_class (mode, as, MEM, SCRATCH),
5132 GET_MODE (ad), opnum, type, ind_levels);
5133 }
5134 return ! removed_and;
5135 }
5136
5137 /* If we have an indexed stack slot, there are three possible reasons why
5138 it might be invalid: The index might need to be reloaded, the address
5139 might have been made by frame pointer elimination and hence have a
5140 constant out of range, or both reasons might apply.
5141
5142 We can easily check for an index needing reload, but even if that is the
5143 case, we might also have an invalid constant. To avoid making the
5144 conservative assumption and requiring two reloads, we see if this address
5145 is valid when not interpreted strictly. If it is, the only problem is
5146 that the index needs a reload and find_reloads_address_1 will take care
5147 of it.
5148
5149 Handle all base registers here, not just fp/ap/sp, because on some
5150 targets (namely SPARC) we can also get invalid addresses from preventive
5151 subreg big-endian corrections made by find_reloads_toplev. We
5152 can also get expressions involving LO_SUM (rather than PLUS) from
5153 find_reloads_subreg_address.
5154
5155 If we decide to do something, it must be that `double_reg_address_ok'
5156 is true. We generate a reload of the base register + constant and
5157 rework the sum so that the reload register will be added to the index.
5158 This is safe because we know the address isn't shared.
5159
5160 We check for the base register as both the first and second operand of
5161 the innermost PLUS and/or LO_SUM. */
5162
5163 for (op_index = 0; op_index < 2; ++op_index)
5164 {
5165 rtx operand, addend;
5166 enum rtx_code inner_code;
5167
5168 if (GET_CODE (ad) != PLUS)
5169 continue;
5170
5171 inner_code = GET_CODE (XEXP (ad, 0));
5172 if (!(GET_CODE (ad) == PLUS
5173 && CONST_INT_P (XEXP (ad, 1))
5174 && (inner_code == PLUS || inner_code == LO_SUM)))
5175 continue;
5176
5177 operand = XEXP (XEXP (ad, 0), op_index);
5178 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5179 continue;
5180
5181 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5182
5183 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5184 GET_CODE (addend))
5185 || operand == frame_pointer_rtx
5186 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5187 || operand == hard_frame_pointer_rtx
5188 #endif
5189 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5190 || operand == arg_pointer_rtx
5191 #endif
5192 || operand == stack_pointer_rtx)
5193 && ! maybe_memory_address_addr_space_p
5194 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5195 {
5196 rtx offset_reg;
5197 enum reg_class cls;
5198
5199 offset_reg = plus_constant (GET_MODE (ad), operand,
5200 INTVAL (XEXP (ad, 1)));
5201
5202 /* Form the adjusted address. */
5203 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5204 ad = gen_rtx_PLUS (GET_MODE (ad),
5205 op_index == 0 ? offset_reg : addend,
5206 op_index == 0 ? addend : offset_reg);
5207 else
5208 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5209 op_index == 0 ? offset_reg : addend,
5210 op_index == 0 ? addend : offset_reg);
5211 *loc = ad;
5212
5213 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5214 find_reloads_address_part (XEXP (ad, op_index),
5215 &XEXP (ad, op_index), cls,
5216 GET_MODE (ad), opnum, type, ind_levels);
5217 find_reloads_address_1 (mode, as,
5218 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5219 GET_CODE (XEXP (ad, op_index)),
5220 &XEXP (ad, 1 - op_index), opnum,
5221 type, 0, insn);
5222
5223 return 0;
5224 }
5225 }
5226
5227 /* See if address becomes valid when an eliminable register
5228 in a sum is replaced. */
5229
5230 tem = ad;
5231 if (GET_CODE (ad) == PLUS)
5232 tem = subst_indexed_address (ad);
5233 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5234 {
5235 /* Ok, we win that way. Replace any additional eliminable
5236 registers. */
5237
5238 subst_reg_equivs_changed = 0;
5239 tem = subst_reg_equivs (tem, insn);
5240
5241 /* Make sure that didn't make the address invalid again. */
5242
5243 if (! subst_reg_equivs_changed
5244 || strict_memory_address_addr_space_p (mode, tem, as))
5245 {
5246 *loc = tem;
5247 return 0;
5248 }
5249 }
5250
5251 /* If constants aren't valid addresses, reload the constant address
5252 into a register. */
5253 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5254 {
5255 enum machine_mode address_mode = GET_MODE (ad);
5256 if (address_mode == VOIDmode)
5257 address_mode = targetm.addr_space.address_mode (as);
5258
5259 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5260 Unshare it so we can safely alter it. */
5261 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5262 && CONSTANT_POOL_ADDRESS_P (ad))
5263 {
5264 *memrefloc = copy_rtx (*memrefloc);
5265 loc = &XEXP (*memrefloc, 0);
5266 if (removed_and)
5267 loc = &XEXP (*loc, 0);
5268 }
5269
5270 find_reloads_address_part (ad, loc,
5271 base_reg_class (mode, as, MEM, SCRATCH),
5272 address_mode, opnum, type, ind_levels);
5273 return ! removed_and;
5274 }
5275
5276 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5277 opnum, type, ind_levels, insn);
5278 }
5279 \f
5280 /* Find all pseudo regs appearing in AD
5281 that are eliminable in favor of equivalent values
5282 and do not have hard regs; replace them by their equivalents.
5283 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5284 front of it for pseudos that we have to replace with stack slots. */
5285
5286 static rtx
5287 subst_reg_equivs (rtx ad, rtx_insn *insn)
5288 {
5289 RTX_CODE code = GET_CODE (ad);
5290 int i;
5291 const char *fmt;
5292
5293 switch (code)
5294 {
5295 case HIGH:
5296 case CONST:
5297 CASE_CONST_ANY:
5298 case SYMBOL_REF:
5299 case LABEL_REF:
5300 case PC:
5301 case CC0:
5302 return ad;
5303
5304 case REG:
5305 {
5306 int regno = REGNO (ad);
5307
5308 if (reg_equiv_constant (regno) != 0)
5309 {
5310 subst_reg_equivs_changed = 1;
5311 return reg_equiv_constant (regno);
5312 }
5313 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5314 {
5315 rtx mem = make_memloc (ad, regno);
5316 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5317 {
5318 subst_reg_equivs_changed = 1;
5319 /* We mark the USE with QImode so that we recognize it
5320 as one that can be safely deleted at the end of
5321 reload. */
5322 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5323 QImode);
5324 return mem;
5325 }
5326 }
5327 }
5328 return ad;
5329
5330 case PLUS:
5331 /* Quickly dispose of a common case. */
5332 if (XEXP (ad, 0) == frame_pointer_rtx
5333 && CONST_INT_P (XEXP (ad, 1)))
5334 return ad;
5335 break;
5336
5337 default:
5338 break;
5339 }
5340
5341 fmt = GET_RTX_FORMAT (code);
5342 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5343 if (fmt[i] == 'e')
5344 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5345 return ad;
5346 }
5347 \f
5348 /* Compute the sum of X and Y, making canonicalizations assumed in an
5349 address, namely: sum constant integers, surround the sum of two
5350 constants with a CONST, put the constant as the second operand, and
5351 group the constant on the outermost sum.
5352
5353 This routine assumes both inputs are already in canonical form. */
5354
5355 rtx
5356 form_sum (enum machine_mode mode, rtx x, rtx y)
5357 {
5358 rtx tem;
5359
5360 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5361 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5362
5363 if (CONST_INT_P (x))
5364 return plus_constant (mode, y, INTVAL (x));
5365 else if (CONST_INT_P (y))
5366 return plus_constant (mode, x, INTVAL (y));
5367 else if (CONSTANT_P (x))
5368 tem = x, x = y, y = tem;
5369
5370 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5371 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5372
5373 /* Note that if the operands of Y are specified in the opposite
5374 order in the recursive calls below, infinite recursion will occur. */
5375 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5376 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5377
5378 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5379 constant will have been placed second. */
5380 if (CONSTANT_P (x) && CONSTANT_P (y))
5381 {
5382 if (GET_CODE (x) == CONST)
5383 x = XEXP (x, 0);
5384 if (GET_CODE (y) == CONST)
5385 y = XEXP (y, 0);
5386
5387 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5388 }
5389
5390 return gen_rtx_PLUS (mode, x, y);
5391 }
5392 \f
5393 /* If ADDR is a sum containing a pseudo register that should be
5394 replaced with a constant (from reg_equiv_constant),
5395 return the result of doing so, and also apply the associative
5396 law so that the result is more likely to be a valid address.
5397 (But it is not guaranteed to be one.)
5398
5399 Note that at most one register is replaced, even if more are
5400 replaceable. Also, we try to put the result into a canonical form
5401 so it is more likely to be a valid address.
5402
5403 In all other cases, return ADDR. */
5404
5405 static rtx
5406 subst_indexed_address (rtx addr)
5407 {
5408 rtx op0 = 0, op1 = 0, op2 = 0;
5409 rtx tem;
5410 int regno;
5411
5412 if (GET_CODE (addr) == PLUS)
5413 {
5414 /* Try to find a register to replace. */
5415 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5416 if (REG_P (op0)
5417 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5418 && reg_renumber[regno] < 0
5419 && reg_equiv_constant (regno) != 0)
5420 op0 = reg_equiv_constant (regno);
5421 else if (REG_P (op1)
5422 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5423 && reg_renumber[regno] < 0
5424 && reg_equiv_constant (regno) != 0)
5425 op1 = reg_equiv_constant (regno);
5426 else if (GET_CODE (op0) == PLUS
5427 && (tem = subst_indexed_address (op0)) != op0)
5428 op0 = tem;
5429 else if (GET_CODE (op1) == PLUS
5430 && (tem = subst_indexed_address (op1)) != op1)
5431 op1 = tem;
5432 else
5433 return addr;
5434
5435 /* Pick out up to three things to add. */
5436 if (GET_CODE (op1) == PLUS)
5437 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5438 else if (GET_CODE (op0) == PLUS)
5439 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5440
5441 /* Compute the sum. */
5442 if (op2 != 0)
5443 op1 = form_sum (GET_MODE (addr), op1, op2);
5444 if (op1 != 0)
5445 op0 = form_sum (GET_MODE (addr), op0, op1);
5446
5447 return op0;
5448 }
5449 return addr;
5450 }
5451 \f
5452 /* Update the REG_INC notes for an insn. It updates all REG_INC
5453 notes for the instruction which refer to REGNO the to refer
5454 to the reload number.
5455
5456 INSN is the insn for which any REG_INC notes need updating.
5457
5458 REGNO is the register number which has been reloaded.
5459
5460 RELOADNUM is the reload number. */
5461
5462 static void
5463 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5464 int reloadnum ATTRIBUTE_UNUSED)
5465 {
5466 #ifdef AUTO_INC_DEC
5467 rtx link;
5468
5469 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5470 if (REG_NOTE_KIND (link) == REG_INC
5471 && (int) REGNO (XEXP (link, 0)) == regno)
5472 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5473 #endif
5474 }
5475 \f
5476 /* Record the pseudo registers we must reload into hard registers in a
5477 subexpression of a would-be memory address, X referring to a value
5478 in mode MODE. (This function is not called if the address we find
5479 is strictly valid.)
5480
5481 CONTEXT = 1 means we are considering regs as index regs,
5482 = 0 means we are considering them as base regs.
5483 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5484 or an autoinc code.
5485 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5486 is the code of the index part of the address. Otherwise, pass SCRATCH
5487 for this argument.
5488 OPNUM and TYPE specify the purpose of any reloads made.
5489
5490 IND_LEVELS says how many levels of indirect addressing are
5491 supported at this point in the address.
5492
5493 INSN, if nonzero, is the insn in which we do the reload. It is used
5494 to determine if we may generate output reloads.
5495
5496 We return nonzero if X, as a whole, is reloaded or replaced. */
5497
5498 /* Note that we take shortcuts assuming that no multi-reg machine mode
5499 occurs as part of an address.
5500 Also, this is not fully machine-customizable; it works for machines
5501 such as VAXen and 68000's and 32000's, but other possible machines
5502 could have addressing modes that this does not handle right.
5503 If you add push_reload calls here, you need to make sure gen_reload
5504 handles those cases gracefully. */
5505
5506 static int
5507 find_reloads_address_1 (enum machine_mode mode, addr_space_t as,
5508 rtx x, int context,
5509 enum rtx_code outer_code, enum rtx_code index_code,
5510 rtx *loc, int opnum, enum reload_type type,
5511 int ind_levels, rtx_insn *insn)
5512 {
5513 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5514 ((CONTEXT) == 0 \
5515 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5516 : REGNO_OK_FOR_INDEX_P (REGNO))
5517
5518 enum reg_class context_reg_class;
5519 RTX_CODE code = GET_CODE (x);
5520 bool reloaded_inner_of_autoinc = false;
5521
5522 if (context == 1)
5523 context_reg_class = INDEX_REG_CLASS;
5524 else
5525 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5526
5527 switch (code)
5528 {
5529 case PLUS:
5530 {
5531 rtx orig_op0 = XEXP (x, 0);
5532 rtx orig_op1 = XEXP (x, 1);
5533 RTX_CODE code0 = GET_CODE (orig_op0);
5534 RTX_CODE code1 = GET_CODE (orig_op1);
5535 rtx op0 = orig_op0;
5536 rtx op1 = orig_op1;
5537
5538 if (GET_CODE (op0) == SUBREG)
5539 {
5540 op0 = SUBREG_REG (op0);
5541 code0 = GET_CODE (op0);
5542 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5543 op0 = gen_rtx_REG (word_mode,
5544 (REGNO (op0) +
5545 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5546 GET_MODE (SUBREG_REG (orig_op0)),
5547 SUBREG_BYTE (orig_op0),
5548 GET_MODE (orig_op0))));
5549 }
5550
5551 if (GET_CODE (op1) == SUBREG)
5552 {
5553 op1 = SUBREG_REG (op1);
5554 code1 = GET_CODE (op1);
5555 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5556 /* ??? Why is this given op1's mode and above for
5557 ??? op0 SUBREGs we use word_mode? */
5558 op1 = gen_rtx_REG (GET_MODE (op1),
5559 (REGNO (op1) +
5560 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5561 GET_MODE (SUBREG_REG (orig_op1)),
5562 SUBREG_BYTE (orig_op1),
5563 GET_MODE (orig_op1))));
5564 }
5565 /* Plus in the index register may be created only as a result of
5566 register rematerialization for expression like &localvar*4. Reload it.
5567 It may be possible to combine the displacement on the outer level,
5568 but it is probably not worthwhile to do so. */
5569 if (context == 1)
5570 {
5571 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5572 opnum, ADDR_TYPE (type), ind_levels, insn);
5573 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5574 context_reg_class,
5575 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5576 return 1;
5577 }
5578
5579 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5580 || code0 == ZERO_EXTEND || code1 == MEM)
5581 {
5582 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5583 &XEXP (x, 0), opnum, type, ind_levels,
5584 insn);
5585 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5586 &XEXP (x, 1), opnum, type, ind_levels,
5587 insn);
5588 }
5589
5590 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5591 || code1 == ZERO_EXTEND || code0 == MEM)
5592 {
5593 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5594 &XEXP (x, 0), opnum, type, ind_levels,
5595 insn);
5596 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5597 &XEXP (x, 1), opnum, type, ind_levels,
5598 insn);
5599 }
5600
5601 else if (code0 == CONST_INT || code0 == CONST
5602 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5603 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5604 &XEXP (x, 1), opnum, type, ind_levels,
5605 insn);
5606
5607 else if (code1 == CONST_INT || code1 == CONST
5608 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5609 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5610 &XEXP (x, 0), opnum, type, ind_levels,
5611 insn);
5612
5613 else if (code0 == REG && code1 == REG)
5614 {
5615 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5616 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5617 return 0;
5618 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5619 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5620 return 0;
5621 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5622 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5623 &XEXP (x, 1), opnum, type, ind_levels,
5624 insn);
5625 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5626 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5627 &XEXP (x, 0), opnum, type, ind_levels,
5628 insn);
5629 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5630 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5631 &XEXP (x, 0), opnum, type, ind_levels,
5632 insn);
5633 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5634 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5635 &XEXP (x, 1), opnum, type, ind_levels,
5636 insn);
5637 else
5638 {
5639 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5640 &XEXP (x, 0), opnum, type, ind_levels,
5641 insn);
5642 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5643 &XEXP (x, 1), opnum, type, ind_levels,
5644 insn);
5645 }
5646 }
5647
5648 else if (code0 == REG)
5649 {
5650 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5651 &XEXP (x, 0), opnum, type, ind_levels,
5652 insn);
5653 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5654 &XEXP (x, 1), opnum, type, ind_levels,
5655 insn);
5656 }
5657
5658 else if (code1 == REG)
5659 {
5660 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5661 &XEXP (x, 1), opnum, type, ind_levels,
5662 insn);
5663 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5664 &XEXP (x, 0), opnum, type, ind_levels,
5665 insn);
5666 }
5667 }
5668
5669 return 0;
5670
5671 case POST_MODIFY:
5672 case PRE_MODIFY:
5673 {
5674 rtx op0 = XEXP (x, 0);
5675 rtx op1 = XEXP (x, 1);
5676 enum rtx_code index_code;
5677 int regno;
5678 int reloadnum;
5679
5680 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5681 return 0;
5682
5683 /* Currently, we only support {PRE,POST}_MODIFY constructs
5684 where a base register is {inc,dec}remented by the contents
5685 of another register or by a constant value. Thus, these
5686 operands must match. */
5687 gcc_assert (op0 == XEXP (op1, 0));
5688
5689 /* Require index register (or constant). Let's just handle the
5690 register case in the meantime... If the target allows
5691 auto-modify by a constant then we could try replacing a pseudo
5692 register with its equivalent constant where applicable.
5693
5694 We also handle the case where the register was eliminated
5695 resulting in a PLUS subexpression.
5696
5697 If we later decide to reload the whole PRE_MODIFY or
5698 POST_MODIFY, inc_for_reload might clobber the reload register
5699 before reading the index. The index register might therefore
5700 need to live longer than a TYPE reload normally would, so be
5701 conservative and class it as RELOAD_OTHER. */
5702 if ((REG_P (XEXP (op1, 1))
5703 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5704 || GET_CODE (XEXP (op1, 1)) == PLUS)
5705 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5706 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5707 ind_levels, insn);
5708
5709 gcc_assert (REG_P (XEXP (op1, 0)));
5710
5711 regno = REGNO (XEXP (op1, 0));
5712 index_code = GET_CODE (XEXP (op1, 1));
5713
5714 /* A register that is incremented cannot be constant! */
5715 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5716 || reg_equiv_constant (regno) == 0);
5717
5718 /* Handle a register that is equivalent to a memory location
5719 which cannot be addressed directly. */
5720 if (reg_equiv_memory_loc (regno) != 0
5721 && (reg_equiv_address (regno) != 0
5722 || num_not_at_initial_offset))
5723 {
5724 rtx tem = make_memloc (XEXP (x, 0), regno);
5725
5726 if (reg_equiv_address (regno)
5727 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5728 {
5729 rtx orig = tem;
5730
5731 /* First reload the memory location's address.
5732 We can't use ADDR_TYPE (type) here, because we need to
5733 write back the value after reading it, hence we actually
5734 need two registers. */
5735 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5736 &XEXP (tem, 0), opnum,
5737 RELOAD_OTHER,
5738 ind_levels, insn);
5739
5740 if (!rtx_equal_p (tem, orig))
5741 push_reg_equiv_alt_mem (regno, tem);
5742
5743 /* Then reload the memory location into a base
5744 register. */
5745 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5746 &XEXP (op1, 0),
5747 base_reg_class (mode, as,
5748 code, index_code),
5749 GET_MODE (x), GET_MODE (x), 0,
5750 0, opnum, RELOAD_OTHER);
5751
5752 update_auto_inc_notes (this_insn, regno, reloadnum);
5753 return 0;
5754 }
5755 }
5756
5757 if (reg_renumber[regno] >= 0)
5758 regno = reg_renumber[regno];
5759
5760 /* We require a base register here... */
5761 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5762 {
5763 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5764 &XEXP (op1, 0), &XEXP (x, 0),
5765 base_reg_class (mode, as,
5766 code, index_code),
5767 GET_MODE (x), GET_MODE (x), 0, 0,
5768 opnum, RELOAD_OTHER);
5769
5770 update_auto_inc_notes (this_insn, regno, reloadnum);
5771 return 0;
5772 }
5773 }
5774 return 0;
5775
5776 case POST_INC:
5777 case POST_DEC:
5778 case PRE_INC:
5779 case PRE_DEC:
5780 if (REG_P (XEXP (x, 0)))
5781 {
5782 int regno = REGNO (XEXP (x, 0));
5783 int value = 0;
5784 rtx x_orig = x;
5785
5786 /* A register that is incremented cannot be constant! */
5787 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5788 || reg_equiv_constant (regno) == 0);
5789
5790 /* Handle a register that is equivalent to a memory location
5791 which cannot be addressed directly. */
5792 if (reg_equiv_memory_loc (regno) != 0
5793 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5794 {
5795 rtx tem = make_memloc (XEXP (x, 0), regno);
5796 if (reg_equiv_address (regno)
5797 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5798 {
5799 rtx orig = tem;
5800
5801 /* First reload the memory location's address.
5802 We can't use ADDR_TYPE (type) here, because we need to
5803 write back the value after reading it, hence we actually
5804 need two registers. */
5805 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5806 &XEXP (tem, 0), opnum, type,
5807 ind_levels, insn);
5808 reloaded_inner_of_autoinc = true;
5809 if (!rtx_equal_p (tem, orig))
5810 push_reg_equiv_alt_mem (regno, tem);
5811 /* Put this inside a new increment-expression. */
5812 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5813 /* Proceed to reload that, as if it contained a register. */
5814 }
5815 }
5816
5817 /* If we have a hard register that is ok in this incdec context,
5818 don't make a reload. If the register isn't nice enough for
5819 autoincdec, we can reload it. But, if an autoincrement of a
5820 register that we here verified as playing nice, still outside
5821 isn't "valid", it must be that no autoincrement is "valid".
5822 If that is true and something made an autoincrement anyway,
5823 this must be a special context where one is allowed.
5824 (For example, a "push" instruction.)
5825 We can't improve this address, so leave it alone. */
5826
5827 /* Otherwise, reload the autoincrement into a suitable hard reg
5828 and record how much to increment by. */
5829
5830 if (reg_renumber[regno] >= 0)
5831 regno = reg_renumber[regno];
5832 if (regno >= FIRST_PSEUDO_REGISTER
5833 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5834 index_code))
5835 {
5836 int reloadnum;
5837
5838 /* If we can output the register afterwards, do so, this
5839 saves the extra update.
5840 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5841 CALL_INSN - and it does not set CC0.
5842 But don't do this if we cannot directly address the
5843 memory location, since this will make it harder to
5844 reuse address reloads, and increases register pressure.
5845 Also don't do this if we can probably update x directly. */
5846 rtx equiv = (MEM_P (XEXP (x, 0))
5847 ? XEXP (x, 0)
5848 : reg_equiv_mem (regno));
5849 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5850 if (insn && NONJUMP_INSN_P (insn) && equiv
5851 && memory_operand (equiv, GET_MODE (equiv))
5852 #ifdef HAVE_cc0
5853 && ! sets_cc0_p (PATTERN (insn))
5854 #endif
5855 && ! (icode != CODE_FOR_nothing
5856 && insn_operand_matches (icode, 0, equiv)
5857 && insn_operand_matches (icode, 1, equiv))
5858 /* Using RELOAD_OTHER means we emit this and the reload we
5859 made earlier in the wrong order. */
5860 && !reloaded_inner_of_autoinc)
5861 {
5862 /* We use the original pseudo for loc, so that
5863 emit_reload_insns() knows which pseudo this
5864 reload refers to and updates the pseudo rtx, not
5865 its equivalent memory location, as well as the
5866 corresponding entry in reg_last_reload_reg. */
5867 loc = &XEXP (x_orig, 0);
5868 x = XEXP (x, 0);
5869 reloadnum
5870 = push_reload (x, x, loc, loc,
5871 context_reg_class,
5872 GET_MODE (x), GET_MODE (x), 0, 0,
5873 opnum, RELOAD_OTHER);
5874 }
5875 else
5876 {
5877 reloadnum
5878 = push_reload (x, x, loc, (rtx*) 0,
5879 context_reg_class,
5880 GET_MODE (x), GET_MODE (x), 0, 0,
5881 opnum, type);
5882 rld[reloadnum].inc
5883 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5884
5885 value = 1;
5886 }
5887
5888 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5889 reloadnum);
5890 }
5891 return value;
5892 }
5893 return 0;
5894
5895 case TRUNCATE:
5896 case SIGN_EXTEND:
5897 case ZERO_EXTEND:
5898 /* Look for parts to reload in the inner expression and reload them
5899 too, in addition to this operation. Reloading all inner parts in
5900 addition to this one shouldn't be necessary, but at this point,
5901 we don't know if we can possibly omit any part that *can* be
5902 reloaded. Targets that are better off reloading just either part
5903 (or perhaps even a different part of an outer expression), should
5904 define LEGITIMIZE_RELOAD_ADDRESS. */
5905 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5906 context, code, SCRATCH, &XEXP (x, 0), opnum,
5907 type, ind_levels, insn);
5908 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5909 context_reg_class,
5910 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5911 return 1;
5912
5913 case MEM:
5914 /* This is probably the result of a substitution, by eliminate_regs, of
5915 an equivalent address for a pseudo that was not allocated to a hard
5916 register. Verify that the specified address is valid and reload it
5917 into a register.
5918
5919 Since we know we are going to reload this item, don't decrement for
5920 the indirection level.
5921
5922 Note that this is actually conservative: it would be slightly more
5923 efficient to use the value of SPILL_INDIRECT_LEVELS from
5924 reload1.c here. */
5925
5926 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5927 opnum, ADDR_TYPE (type), ind_levels, insn);
5928 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5929 context_reg_class,
5930 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5931 return 1;
5932
5933 case REG:
5934 {
5935 int regno = REGNO (x);
5936
5937 if (reg_equiv_constant (regno) != 0)
5938 {
5939 find_reloads_address_part (reg_equiv_constant (regno), loc,
5940 context_reg_class,
5941 GET_MODE (x), opnum, type, ind_levels);
5942 return 1;
5943 }
5944
5945 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5946 that feeds this insn. */
5947 if (reg_equiv_mem (regno) != 0)
5948 {
5949 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5950 context_reg_class,
5951 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5952 return 1;
5953 }
5954 #endif
5955
5956 if (reg_equiv_memory_loc (regno)
5957 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5958 {
5959 rtx tem = make_memloc (x, regno);
5960 if (reg_equiv_address (regno) != 0
5961 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5962 {
5963 x = tem;
5964 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5965 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5966 ind_levels, insn);
5967 if (!rtx_equal_p (x, tem))
5968 push_reg_equiv_alt_mem (regno, x);
5969 }
5970 }
5971
5972 if (reg_renumber[regno] >= 0)
5973 regno = reg_renumber[regno];
5974
5975 if (regno >= FIRST_PSEUDO_REGISTER
5976 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5977 index_code))
5978 {
5979 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5980 context_reg_class,
5981 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5982 return 1;
5983 }
5984
5985 /* If a register appearing in an address is the subject of a CLOBBER
5986 in this insn, reload it into some other register to be safe.
5987 The CLOBBER is supposed to make the register unavailable
5988 from before this insn to after it. */
5989 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5990 {
5991 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5992 context_reg_class,
5993 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5994 return 1;
5995 }
5996 }
5997 return 0;
5998
5999 case SUBREG:
6000 if (REG_P (SUBREG_REG (x)))
6001 {
6002 /* If this is a SUBREG of a hard register and the resulting register
6003 is of the wrong class, reload the whole SUBREG. This avoids
6004 needless copies if SUBREG_REG is multi-word. */
6005 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6006 {
6007 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6008
6009 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6010 index_code))
6011 {
6012 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6013 context_reg_class,
6014 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6015 return 1;
6016 }
6017 }
6018 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6019 is larger than the class size, then reload the whole SUBREG. */
6020 else
6021 {
6022 enum reg_class rclass = context_reg_class;
6023 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6024 > reg_class_size[(int) rclass])
6025 {
6026 /* If the inner register will be replaced by a memory
6027 reference, we can do this only if we can replace the
6028 whole subreg by a (narrower) memory reference. If
6029 this is not possible, fall through and reload just
6030 the inner register (including address reloads). */
6031 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6032 {
6033 rtx tem = find_reloads_subreg_address (x, opnum,
6034 ADDR_TYPE (type),
6035 ind_levels, insn,
6036 NULL);
6037 if (tem)
6038 {
6039 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6040 GET_MODE (tem), VOIDmode, 0, 0,
6041 opnum, type);
6042 return 1;
6043 }
6044 }
6045 else
6046 {
6047 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6048 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6049 return 1;
6050 }
6051 }
6052 }
6053 }
6054 break;
6055
6056 default:
6057 break;
6058 }
6059
6060 {
6061 const char *fmt = GET_RTX_FORMAT (code);
6062 int i;
6063
6064 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6065 {
6066 if (fmt[i] == 'e')
6067 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6068 we get here. */
6069 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6070 code, SCRATCH, &XEXP (x, i),
6071 opnum, type, ind_levels, insn);
6072 }
6073 }
6074
6075 #undef REG_OK_FOR_CONTEXT
6076 return 0;
6077 }
6078 \f
6079 /* X, which is found at *LOC, is a part of an address that needs to be
6080 reloaded into a register of class RCLASS. If X is a constant, or if
6081 X is a PLUS that contains a constant, check that the constant is a
6082 legitimate operand and that we are supposed to be able to load
6083 it into the register.
6084
6085 If not, force the constant into memory and reload the MEM instead.
6086
6087 MODE is the mode to use, in case X is an integer constant.
6088
6089 OPNUM and TYPE describe the purpose of any reloads made.
6090
6091 IND_LEVELS says how many levels of indirect addressing this machine
6092 supports. */
6093
6094 static void
6095 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6096 enum machine_mode mode, int opnum,
6097 enum reload_type type, int ind_levels)
6098 {
6099 if (CONSTANT_P (x)
6100 && (!targetm.legitimate_constant_p (mode, x)
6101 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6102 {
6103 x = force_const_mem (mode, x);
6104 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6105 opnum, type, ind_levels, 0);
6106 }
6107
6108 else if (GET_CODE (x) == PLUS
6109 && CONSTANT_P (XEXP (x, 1))
6110 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6111 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6112 == NO_REGS))
6113 {
6114 rtx tem;
6115
6116 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6117 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6118 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6119 opnum, type, ind_levels, 0);
6120 }
6121
6122 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6123 mode, VOIDmode, 0, 0, opnum, type);
6124 }
6125 \f
6126 /* X, a subreg of a pseudo, is a part of an address that needs to be
6127 reloaded, and the pseusdo is equivalent to a memory location.
6128
6129 Attempt to replace the whole subreg by a (possibly narrower or wider)
6130 memory reference. If this is possible, return this new memory
6131 reference, and push all required address reloads. Otherwise,
6132 return NULL.
6133
6134 OPNUM and TYPE identify the purpose of the reload.
6135
6136 IND_LEVELS says how many levels of indirect addressing are
6137 supported at this point in the address.
6138
6139 INSN, if nonzero, is the insn in which we do the reload. It is used
6140 to determine where to put USEs for pseudos that we have to replace with
6141 stack slots. */
6142
6143 static rtx
6144 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6145 int ind_levels, rtx_insn *insn,
6146 int *address_reloaded)
6147 {
6148 enum machine_mode outer_mode = GET_MODE (x);
6149 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6150 int regno = REGNO (SUBREG_REG (x));
6151 int reloaded = 0;
6152 rtx tem, orig;
6153 int offset;
6154
6155 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6156
6157 /* We cannot replace the subreg with a modified memory reference if:
6158
6159 - we have a paradoxical subreg that implicitly acts as a zero or
6160 sign extension operation due to LOAD_EXTEND_OP;
6161
6162 - we have a subreg that is implicitly supposed to act on the full
6163 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6164
6165 - the address of the equivalent memory location is mode-dependent; or
6166
6167 - we have a paradoxical subreg and the resulting memory is not
6168 sufficiently aligned to allow access in the wider mode.
6169
6170 In addition, we choose not to perform the replacement for *any*
6171 paradoxical subreg, even if it were possible in principle. This
6172 is to avoid generating wider memory references than necessary.
6173
6174 This corresponds to how previous versions of reload used to handle
6175 paradoxical subregs where no address reload was required. */
6176
6177 if (paradoxical_subreg_p (x))
6178 return NULL;
6179
6180 #ifdef WORD_REGISTER_OPERATIONS
6181 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6182 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6183 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6184 return NULL;
6185 #endif
6186
6187 /* Since we don't attempt to handle paradoxical subregs, we can just
6188 call into simplify_subreg, which will handle all remaining checks
6189 for us. */
6190 orig = make_memloc (SUBREG_REG (x), regno);
6191 offset = SUBREG_BYTE (x);
6192 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6193 if (!tem || !MEM_P (tem))
6194 return NULL;
6195
6196 /* Now push all required address reloads, if any. */
6197 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6198 XEXP (tem, 0), &XEXP (tem, 0),
6199 opnum, type, ind_levels, insn);
6200 /* ??? Do we need to handle nonzero offsets somehow? */
6201 if (!offset && !rtx_equal_p (tem, orig))
6202 push_reg_equiv_alt_mem (regno, tem);
6203
6204 /* For some processors an address may be valid in the original mode but
6205 not in a smaller mode. For example, ARM accepts a scaled index register
6206 in SImode but not in HImode. Note that this is only a problem if the
6207 address in reg_equiv_mem is already invalid in the new mode; other
6208 cases would be fixed by find_reloads_address as usual.
6209
6210 ??? We attempt to handle such cases here by doing an additional reload
6211 of the full address after the usual processing by find_reloads_address.
6212 Note that this may not work in the general case, but it seems to cover
6213 the cases where this situation currently occurs. A more general fix
6214 might be to reload the *value* instead of the address, but this would
6215 not be expected by the callers of this routine as-is.
6216
6217 If find_reloads_address already completed replaced the address, there
6218 is nothing further to do. */
6219 if (reloaded == 0
6220 && reg_equiv_mem (regno) != 0
6221 && !strict_memory_address_addr_space_p
6222 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6223 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6224 {
6225 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6226 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6227 MEM, SCRATCH),
6228 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6229 reloaded = 1;
6230 }
6231
6232 /* If this is not a toplevel operand, find_reloads doesn't see this
6233 substitution. We have to emit a USE of the pseudo so that
6234 delete_output_reload can see it. */
6235 if (replace_reloads && recog_data.operand[opnum] != x)
6236 /* We mark the USE with QImode so that we recognize it as one that
6237 can be safely deleted at the end of reload. */
6238 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6239 QImode);
6240
6241 if (address_reloaded)
6242 *address_reloaded = reloaded;
6243
6244 return tem;
6245 }
6246 \f
6247 /* Substitute into the current INSN the registers into which we have reloaded
6248 the things that need reloading. The array `replacements'
6249 contains the locations of all pointers that must be changed
6250 and says what to replace them with.
6251
6252 Return the rtx that X translates into; usually X, but modified. */
6253
6254 void
6255 subst_reloads (rtx_insn *insn)
6256 {
6257 int i;
6258
6259 for (i = 0; i < n_replacements; i++)
6260 {
6261 struct replacement *r = &replacements[i];
6262 rtx reloadreg = rld[r->what].reg_rtx;
6263 if (reloadreg)
6264 {
6265 #ifdef DEBUG_RELOAD
6266 /* This checking takes a very long time on some platforms
6267 causing the gcc.c-torture/compile/limits-fnargs.c test
6268 to time out during testing. See PR 31850.
6269
6270 Internal consistency test. Check that we don't modify
6271 anything in the equivalence arrays. Whenever something from
6272 those arrays needs to be reloaded, it must be unshared before
6273 being substituted into; the equivalence must not be modified.
6274 Otherwise, if the equivalence is used after that, it will
6275 have been modified, and the thing substituted (probably a
6276 register) is likely overwritten and not a usable equivalence. */
6277 int check_regno;
6278
6279 for (check_regno = 0; check_regno < max_regno; check_regno++)
6280 {
6281 #define CHECK_MODF(ARRAY) \
6282 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6283 || !loc_mentioned_in_p (r->where, \
6284 (*reg_equivs)[check_regno].ARRAY))
6285
6286 CHECK_MODF (constant);
6287 CHECK_MODF (memory_loc);
6288 CHECK_MODF (address);
6289 CHECK_MODF (mem);
6290 #undef CHECK_MODF
6291 }
6292 #endif /* DEBUG_RELOAD */
6293
6294 /* If we're replacing a LABEL_REF with a register, there must
6295 already be an indication (to e.g. flow) which label this
6296 register refers to. */
6297 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6298 || !JUMP_P (insn)
6299 || find_reg_note (insn,
6300 REG_LABEL_OPERAND,
6301 XEXP (*r->where, 0))
6302 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6303
6304 /* Encapsulate RELOADREG so its machine mode matches what
6305 used to be there. Note that gen_lowpart_common will
6306 do the wrong thing if RELOADREG is multi-word. RELOADREG
6307 will always be a REG here. */
6308 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6309 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6310
6311 *r->where = reloadreg;
6312 }
6313 /* If reload got no reg and isn't optional, something's wrong. */
6314 else
6315 gcc_assert (rld[r->what].optional);
6316 }
6317 }
6318 \f
6319 /* Make a copy of any replacements being done into X and move those
6320 copies to locations in Y, a copy of X. */
6321
6322 void
6323 copy_replacements (rtx x, rtx y)
6324 {
6325 copy_replacements_1 (&x, &y, n_replacements);
6326 }
6327
6328 static void
6329 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6330 {
6331 int i, j;
6332 rtx x, y;
6333 struct replacement *r;
6334 enum rtx_code code;
6335 const char *fmt;
6336
6337 for (j = 0; j < orig_replacements; j++)
6338 if (replacements[j].where == px)
6339 {
6340 r = &replacements[n_replacements++];
6341 r->where = py;
6342 r->what = replacements[j].what;
6343 r->mode = replacements[j].mode;
6344 }
6345
6346 x = *px;
6347 y = *py;
6348 code = GET_CODE (x);
6349 fmt = GET_RTX_FORMAT (code);
6350
6351 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6352 {
6353 if (fmt[i] == 'e')
6354 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6355 else if (fmt[i] == 'E')
6356 for (j = XVECLEN (x, i); --j >= 0; )
6357 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6358 orig_replacements);
6359 }
6360 }
6361
6362 /* Change any replacements being done to *X to be done to *Y. */
6363
6364 void
6365 move_replacements (rtx *x, rtx *y)
6366 {
6367 int i;
6368
6369 for (i = 0; i < n_replacements; i++)
6370 if (replacements[i].where == x)
6371 replacements[i].where = y;
6372 }
6373 \f
6374 /* If LOC was scheduled to be replaced by something, return the replacement.
6375 Otherwise, return *LOC. */
6376
6377 rtx
6378 find_replacement (rtx *loc)
6379 {
6380 struct replacement *r;
6381
6382 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6383 {
6384 rtx reloadreg = rld[r->what].reg_rtx;
6385
6386 if (reloadreg && r->where == loc)
6387 {
6388 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6389 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6390
6391 return reloadreg;
6392 }
6393 else if (reloadreg && GET_CODE (*loc) == SUBREG
6394 && r->where == &SUBREG_REG (*loc))
6395 {
6396 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6397 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6398
6399 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6400 GET_MODE (SUBREG_REG (*loc)),
6401 SUBREG_BYTE (*loc));
6402 }
6403 }
6404
6405 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6406 what's inside and make a new rtl if so. */
6407 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6408 || GET_CODE (*loc) == MULT)
6409 {
6410 rtx x = find_replacement (&XEXP (*loc, 0));
6411 rtx y = find_replacement (&XEXP (*loc, 1));
6412
6413 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6414 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6415 }
6416
6417 return *loc;
6418 }
6419 \f
6420 /* Return nonzero if register in range [REGNO, ENDREGNO)
6421 appears either explicitly or implicitly in X
6422 other than being stored into (except for earlyclobber operands).
6423
6424 References contained within the substructure at LOC do not count.
6425 LOC may be zero, meaning don't ignore anything.
6426
6427 This is similar to refers_to_regno_p in rtlanal.c except that we
6428 look at equivalences for pseudos that didn't get hard registers. */
6429
6430 static int
6431 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6432 rtx x, rtx *loc)
6433 {
6434 int i;
6435 unsigned int r;
6436 RTX_CODE code;
6437 const char *fmt;
6438
6439 if (x == 0)
6440 return 0;
6441
6442 repeat:
6443 code = GET_CODE (x);
6444
6445 switch (code)
6446 {
6447 case REG:
6448 r = REGNO (x);
6449
6450 /* If this is a pseudo, a hard register must not have been allocated.
6451 X must therefore either be a constant or be in memory. */
6452 if (r >= FIRST_PSEUDO_REGISTER)
6453 {
6454 if (reg_equiv_memory_loc (r))
6455 return refers_to_regno_for_reload_p (regno, endregno,
6456 reg_equiv_memory_loc (r),
6457 (rtx*) 0);
6458
6459 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6460 return 0;
6461 }
6462
6463 return (endregno > r
6464 && regno < r + (r < FIRST_PSEUDO_REGISTER
6465 ? hard_regno_nregs[r][GET_MODE (x)]
6466 : 1));
6467
6468 case SUBREG:
6469 /* If this is a SUBREG of a hard reg, we can see exactly which
6470 registers are being modified. Otherwise, handle normally. */
6471 if (REG_P (SUBREG_REG (x))
6472 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6473 {
6474 unsigned int inner_regno = subreg_regno (x);
6475 unsigned int inner_endregno
6476 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6477 ? subreg_nregs (x) : 1);
6478
6479 return endregno > inner_regno && regno < inner_endregno;
6480 }
6481 break;
6482
6483 case CLOBBER:
6484 case SET:
6485 if (&SET_DEST (x) != loc
6486 /* Note setting a SUBREG counts as referring to the REG it is in for
6487 a pseudo but not for hard registers since we can
6488 treat each word individually. */
6489 && ((GET_CODE (SET_DEST (x)) == SUBREG
6490 && loc != &SUBREG_REG (SET_DEST (x))
6491 && REG_P (SUBREG_REG (SET_DEST (x)))
6492 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6493 && refers_to_regno_for_reload_p (regno, endregno,
6494 SUBREG_REG (SET_DEST (x)),
6495 loc))
6496 /* If the output is an earlyclobber operand, this is
6497 a conflict. */
6498 || ((!REG_P (SET_DEST (x))
6499 || earlyclobber_operand_p (SET_DEST (x)))
6500 && refers_to_regno_for_reload_p (regno, endregno,
6501 SET_DEST (x), loc))))
6502 return 1;
6503
6504 if (code == CLOBBER || loc == &SET_SRC (x))
6505 return 0;
6506 x = SET_SRC (x);
6507 goto repeat;
6508
6509 default:
6510 break;
6511 }
6512
6513 /* X does not match, so try its subexpressions. */
6514
6515 fmt = GET_RTX_FORMAT (code);
6516 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6517 {
6518 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6519 {
6520 if (i == 0)
6521 {
6522 x = XEXP (x, 0);
6523 goto repeat;
6524 }
6525 else
6526 if (refers_to_regno_for_reload_p (regno, endregno,
6527 XEXP (x, i), loc))
6528 return 1;
6529 }
6530 else if (fmt[i] == 'E')
6531 {
6532 int j;
6533 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6534 if (loc != &XVECEXP (x, i, j)
6535 && refers_to_regno_for_reload_p (regno, endregno,
6536 XVECEXP (x, i, j), loc))
6537 return 1;
6538 }
6539 }
6540 return 0;
6541 }
6542
6543 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6544 we check if any register number in X conflicts with the relevant register
6545 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6546 contains a MEM (we don't bother checking for memory addresses that can't
6547 conflict because we expect this to be a rare case.
6548
6549 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6550 that we look at equivalences for pseudos that didn't get hard registers. */
6551
6552 int
6553 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6554 {
6555 int regno, endregno;
6556
6557 /* Overly conservative. */
6558 if (GET_CODE (x) == STRICT_LOW_PART
6559 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6560 x = XEXP (x, 0);
6561
6562 /* If either argument is a constant, then modifying X can not affect IN. */
6563 if (CONSTANT_P (x) || CONSTANT_P (in))
6564 return 0;
6565 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6566 return refers_to_mem_for_reload_p (in);
6567 else if (GET_CODE (x) == SUBREG)
6568 {
6569 regno = REGNO (SUBREG_REG (x));
6570 if (regno < FIRST_PSEUDO_REGISTER)
6571 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6572 GET_MODE (SUBREG_REG (x)),
6573 SUBREG_BYTE (x),
6574 GET_MODE (x));
6575 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6576 ? subreg_nregs (x) : 1);
6577
6578 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6579 }
6580 else if (REG_P (x))
6581 {
6582 regno = REGNO (x);
6583
6584 /* If this is a pseudo, it must not have been assigned a hard register.
6585 Therefore, it must either be in memory or be a constant. */
6586
6587 if (regno >= FIRST_PSEUDO_REGISTER)
6588 {
6589 if (reg_equiv_memory_loc (regno))
6590 return refers_to_mem_for_reload_p (in);
6591 gcc_assert (reg_equiv_constant (regno));
6592 return 0;
6593 }
6594
6595 endregno = END_HARD_REGNO (x);
6596
6597 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6598 }
6599 else if (MEM_P (x))
6600 return refers_to_mem_for_reload_p (in);
6601 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6602 || GET_CODE (x) == CC0)
6603 return reg_mentioned_p (x, in);
6604 else
6605 {
6606 gcc_assert (GET_CODE (x) == PLUS);
6607
6608 /* We actually want to know if X is mentioned somewhere inside IN.
6609 We must not say that (plus (sp) (const_int 124)) is in
6610 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6611 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6612 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6613 while (MEM_P (in))
6614 in = XEXP (in, 0);
6615 if (REG_P (in))
6616 return 0;
6617 else if (GET_CODE (in) == PLUS)
6618 return (rtx_equal_p (x, in)
6619 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6620 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6621 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6622 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6623 }
6624
6625 gcc_unreachable ();
6626 }
6627
6628 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6629 registers. */
6630
6631 static int
6632 refers_to_mem_for_reload_p (rtx x)
6633 {
6634 const char *fmt;
6635 int i;
6636
6637 if (MEM_P (x))
6638 return 1;
6639
6640 if (REG_P (x))
6641 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6642 && reg_equiv_memory_loc (REGNO (x)));
6643
6644 fmt = GET_RTX_FORMAT (GET_CODE (x));
6645 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6646 if (fmt[i] == 'e'
6647 && (MEM_P (XEXP (x, i))
6648 || refers_to_mem_for_reload_p (XEXP (x, i))))
6649 return 1;
6650
6651 return 0;
6652 }
6653 \f
6654 /* Check the insns before INSN to see if there is a suitable register
6655 containing the same value as GOAL.
6656 If OTHER is -1, look for a register in class RCLASS.
6657 Otherwise, just see if register number OTHER shares GOAL's value.
6658
6659 Return an rtx for the register found, or zero if none is found.
6660
6661 If RELOAD_REG_P is (short *)1,
6662 we reject any hard reg that appears in reload_reg_rtx
6663 because such a hard reg is also needed coming into this insn.
6664
6665 If RELOAD_REG_P is any other nonzero value,
6666 it is a vector indexed by hard reg number
6667 and we reject any hard reg whose element in the vector is nonnegative
6668 as well as any that appears in reload_reg_rtx.
6669
6670 If GOAL is zero, then GOALREG is a register number; we look
6671 for an equivalent for that register.
6672
6673 MODE is the machine mode of the value we want an equivalence for.
6674 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6675
6676 This function is used by jump.c as well as in the reload pass.
6677
6678 If GOAL is the sum of the stack pointer and a constant, we treat it
6679 as if it were a constant except that sp is required to be unchanging. */
6680
6681 rtx
6682 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6683 short *reload_reg_p, int goalreg, enum machine_mode mode)
6684 {
6685 rtx_insn *p = insn;
6686 rtx goaltry, valtry, value;
6687 rtx_insn *where;
6688 rtx pat;
6689 int regno = -1;
6690 int valueno;
6691 int goal_mem = 0;
6692 int goal_const = 0;
6693 int goal_mem_addr_varies = 0;
6694 int need_stable_sp = 0;
6695 int nregs;
6696 int valuenregs;
6697 int num = 0;
6698
6699 if (goal == 0)
6700 regno = goalreg;
6701 else if (REG_P (goal))
6702 regno = REGNO (goal);
6703 else if (MEM_P (goal))
6704 {
6705 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6706 if (MEM_VOLATILE_P (goal))
6707 return 0;
6708 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6709 return 0;
6710 /* An address with side effects must be reexecuted. */
6711 switch (code)
6712 {
6713 case POST_INC:
6714 case PRE_INC:
6715 case POST_DEC:
6716 case PRE_DEC:
6717 case POST_MODIFY:
6718 case PRE_MODIFY:
6719 return 0;
6720 default:
6721 break;
6722 }
6723 goal_mem = 1;
6724 }
6725 else if (CONSTANT_P (goal))
6726 goal_const = 1;
6727 else if (GET_CODE (goal) == PLUS
6728 && XEXP (goal, 0) == stack_pointer_rtx
6729 && CONSTANT_P (XEXP (goal, 1)))
6730 goal_const = need_stable_sp = 1;
6731 else if (GET_CODE (goal) == PLUS
6732 && XEXP (goal, 0) == frame_pointer_rtx
6733 && CONSTANT_P (XEXP (goal, 1)))
6734 goal_const = 1;
6735 else
6736 return 0;
6737
6738 num = 0;
6739 /* Scan insns back from INSN, looking for one that copies
6740 a value into or out of GOAL.
6741 Stop and give up if we reach a label. */
6742
6743 while (1)
6744 {
6745 p = PREV_INSN (p);
6746 if (p && DEBUG_INSN_P (p))
6747 continue;
6748 num++;
6749 if (p == 0 || LABEL_P (p)
6750 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6751 return 0;
6752
6753 /* Don't reuse register contents from before a setjmp-type
6754 function call; on the second return (from the longjmp) it
6755 might have been clobbered by a later reuse. It doesn't
6756 seem worthwhile to actually go and see if it is actually
6757 reused even if that information would be readily available;
6758 just don't reuse it across the setjmp call. */
6759 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6760 return 0;
6761
6762 if (NONJUMP_INSN_P (p)
6763 /* If we don't want spill regs ... */
6764 && (! (reload_reg_p != 0
6765 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6766 /* ... then ignore insns introduced by reload; they aren't
6767 useful and can cause results in reload_as_needed to be
6768 different from what they were when calculating the need for
6769 spills. If we notice an input-reload insn here, we will
6770 reject it below, but it might hide a usable equivalent.
6771 That makes bad code. It may even fail: perhaps no reg was
6772 spilled for this insn because it was assumed we would find
6773 that equivalent. */
6774 || INSN_UID (p) < reload_first_uid))
6775 {
6776 rtx tem;
6777 pat = single_set (p);
6778
6779 /* First check for something that sets some reg equal to GOAL. */
6780 if (pat != 0
6781 && ((regno >= 0
6782 && true_regnum (SET_SRC (pat)) == regno
6783 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6784 ||
6785 (regno >= 0
6786 && true_regnum (SET_DEST (pat)) == regno
6787 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6788 ||
6789 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6790 /* When looking for stack pointer + const,
6791 make sure we don't use a stack adjust. */
6792 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6793 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6794 || (goal_mem
6795 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6796 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6797 || (goal_mem
6798 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6799 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6800 /* If we are looking for a constant,
6801 and something equivalent to that constant was copied
6802 into a reg, we can use that reg. */
6803 || (goal_const && REG_NOTES (p) != 0
6804 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6805 && ((rtx_equal_p (XEXP (tem, 0), goal)
6806 && (valueno
6807 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6808 || (REG_P (SET_DEST (pat))
6809 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6810 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6811 && CONST_INT_P (goal)
6812 && 0 != (goaltry
6813 = operand_subword (XEXP (tem, 0), 0, 0,
6814 VOIDmode))
6815 && rtx_equal_p (goal, goaltry)
6816 && (valtry
6817 = operand_subword (SET_DEST (pat), 0, 0,
6818 VOIDmode))
6819 && (valueno = true_regnum (valtry)) >= 0)))
6820 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6821 NULL_RTX))
6822 && REG_P (SET_DEST (pat))
6823 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6824 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6825 && CONST_INT_P (goal)
6826 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6827 VOIDmode))
6828 && rtx_equal_p (goal, goaltry)
6829 && (valtry
6830 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6831 && (valueno = true_regnum (valtry)) >= 0)))
6832 {
6833 if (other >= 0)
6834 {
6835 if (valueno != other)
6836 continue;
6837 }
6838 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6839 continue;
6840 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6841 mode, valueno))
6842 continue;
6843 value = valtry;
6844 where = p;
6845 break;
6846 }
6847 }
6848 }
6849
6850 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6851 (or copying VALUE into GOAL, if GOAL is also a register).
6852 Now verify that VALUE is really valid. */
6853
6854 /* VALUENO is the register number of VALUE; a hard register. */
6855
6856 /* Don't try to re-use something that is killed in this insn. We want
6857 to be able to trust REG_UNUSED notes. */
6858 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6859 return 0;
6860
6861 /* If we propose to get the value from the stack pointer or if GOAL is
6862 a MEM based on the stack pointer, we need a stable SP. */
6863 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6864 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6865 goal)))
6866 need_stable_sp = 1;
6867
6868 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6869 if (GET_MODE (value) != mode)
6870 return 0;
6871
6872 /* Reject VALUE if it was loaded from GOAL
6873 and is also a register that appears in the address of GOAL. */
6874
6875 if (goal_mem && value == SET_DEST (single_set (where))
6876 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6877 goal, (rtx*) 0))
6878 return 0;
6879
6880 /* Reject registers that overlap GOAL. */
6881
6882 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6883 nregs = hard_regno_nregs[regno][mode];
6884 else
6885 nregs = 1;
6886 valuenregs = hard_regno_nregs[valueno][mode];
6887
6888 if (!goal_mem && !goal_const
6889 && regno + nregs > valueno && regno < valueno + valuenregs)
6890 return 0;
6891
6892 /* Reject VALUE if it is one of the regs reserved for reloads.
6893 Reload1 knows how to reuse them anyway, and it would get
6894 confused if we allocated one without its knowledge.
6895 (Now that insns introduced by reload are ignored above,
6896 this case shouldn't happen, but I'm not positive.) */
6897
6898 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6899 {
6900 int i;
6901 for (i = 0; i < valuenregs; ++i)
6902 if (reload_reg_p[valueno + i] >= 0)
6903 return 0;
6904 }
6905
6906 /* Reject VALUE if it is a register being used for an input reload
6907 even if it is not one of those reserved. */
6908
6909 if (reload_reg_p != 0)
6910 {
6911 int i;
6912 for (i = 0; i < n_reloads; i++)
6913 if (rld[i].reg_rtx != 0 && rld[i].in)
6914 {
6915 int regno1 = REGNO (rld[i].reg_rtx);
6916 int nregs1 = hard_regno_nregs[regno1]
6917 [GET_MODE (rld[i].reg_rtx)];
6918 if (regno1 < valueno + valuenregs
6919 && regno1 + nregs1 > valueno)
6920 return 0;
6921 }
6922 }
6923
6924 if (goal_mem)
6925 /* We must treat frame pointer as varying here,
6926 since it can vary--in a nonlocal goto as generated by expand_goto. */
6927 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6928
6929 /* Now verify that the values of GOAL and VALUE remain unaltered
6930 until INSN is reached. */
6931
6932 p = insn;
6933 while (1)
6934 {
6935 p = PREV_INSN (p);
6936 if (p == where)
6937 return value;
6938
6939 /* Don't trust the conversion past a function call
6940 if either of the two is in a call-clobbered register, or memory. */
6941 if (CALL_P (p))
6942 {
6943 int i;
6944
6945 if (goal_mem || need_stable_sp)
6946 return 0;
6947
6948 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6949 for (i = 0; i < nregs; ++i)
6950 if (call_used_regs[regno + i]
6951 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6952 return 0;
6953
6954 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6955 for (i = 0; i < valuenregs; ++i)
6956 if (call_used_regs[valueno + i]
6957 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6958 return 0;
6959 }
6960
6961 if (INSN_P (p))
6962 {
6963 pat = PATTERN (p);
6964
6965 /* Watch out for unspec_volatile, and volatile asms. */
6966 if (volatile_insn_p (pat))
6967 return 0;
6968
6969 /* If this insn P stores in either GOAL or VALUE, return 0.
6970 If GOAL is a memory ref and this insn writes memory, return 0.
6971 If GOAL is a memory ref and its address is not constant,
6972 and this insn P changes a register used in GOAL, return 0. */
6973
6974 if (GET_CODE (pat) == COND_EXEC)
6975 pat = COND_EXEC_CODE (pat);
6976 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6977 {
6978 rtx dest = SET_DEST (pat);
6979 while (GET_CODE (dest) == SUBREG
6980 || GET_CODE (dest) == ZERO_EXTRACT
6981 || GET_CODE (dest) == STRICT_LOW_PART)
6982 dest = XEXP (dest, 0);
6983 if (REG_P (dest))
6984 {
6985 int xregno = REGNO (dest);
6986 int xnregs;
6987 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6988 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6989 else
6990 xnregs = 1;
6991 if (xregno < regno + nregs && xregno + xnregs > regno)
6992 return 0;
6993 if (xregno < valueno + valuenregs
6994 && xregno + xnregs > valueno)
6995 return 0;
6996 if (goal_mem_addr_varies
6997 && reg_overlap_mentioned_for_reload_p (dest, goal))
6998 return 0;
6999 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7000 return 0;
7001 }
7002 else if (goal_mem && MEM_P (dest)
7003 && ! push_operand (dest, GET_MODE (dest)))
7004 return 0;
7005 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7006 && reg_equiv_memory_loc (regno) != 0)
7007 return 0;
7008 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7009 return 0;
7010 }
7011 else if (GET_CODE (pat) == PARALLEL)
7012 {
7013 int i;
7014 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7015 {
7016 rtx v1 = XVECEXP (pat, 0, i);
7017 if (GET_CODE (v1) == COND_EXEC)
7018 v1 = COND_EXEC_CODE (v1);
7019 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7020 {
7021 rtx dest = SET_DEST (v1);
7022 while (GET_CODE (dest) == SUBREG
7023 || GET_CODE (dest) == ZERO_EXTRACT
7024 || GET_CODE (dest) == STRICT_LOW_PART)
7025 dest = XEXP (dest, 0);
7026 if (REG_P (dest))
7027 {
7028 int xregno = REGNO (dest);
7029 int xnregs;
7030 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7031 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7032 else
7033 xnregs = 1;
7034 if (xregno < regno + nregs
7035 && xregno + xnregs > regno)
7036 return 0;
7037 if (xregno < valueno + valuenregs
7038 && xregno + xnregs > valueno)
7039 return 0;
7040 if (goal_mem_addr_varies
7041 && reg_overlap_mentioned_for_reload_p (dest,
7042 goal))
7043 return 0;
7044 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7045 return 0;
7046 }
7047 else if (goal_mem && MEM_P (dest)
7048 && ! push_operand (dest, GET_MODE (dest)))
7049 return 0;
7050 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7051 && reg_equiv_memory_loc (regno) != 0)
7052 return 0;
7053 else if (need_stable_sp
7054 && push_operand (dest, GET_MODE (dest)))
7055 return 0;
7056 }
7057 }
7058 }
7059
7060 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7061 {
7062 rtx link;
7063
7064 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7065 link = XEXP (link, 1))
7066 {
7067 pat = XEXP (link, 0);
7068 if (GET_CODE (pat) == CLOBBER)
7069 {
7070 rtx dest = SET_DEST (pat);
7071
7072 if (REG_P (dest))
7073 {
7074 int xregno = REGNO (dest);
7075 int xnregs
7076 = hard_regno_nregs[xregno][GET_MODE (dest)];
7077
7078 if (xregno < regno + nregs
7079 && xregno + xnregs > regno)
7080 return 0;
7081 else if (xregno < valueno + valuenregs
7082 && xregno + xnregs > valueno)
7083 return 0;
7084 else if (goal_mem_addr_varies
7085 && reg_overlap_mentioned_for_reload_p (dest,
7086 goal))
7087 return 0;
7088 }
7089
7090 else if (goal_mem && MEM_P (dest)
7091 && ! push_operand (dest, GET_MODE (dest)))
7092 return 0;
7093 else if (need_stable_sp
7094 && push_operand (dest, GET_MODE (dest)))
7095 return 0;
7096 }
7097 }
7098 }
7099
7100 #ifdef AUTO_INC_DEC
7101 /* If this insn auto-increments or auto-decrements
7102 either regno or valueno, return 0 now.
7103 If GOAL is a memory ref and its address is not constant,
7104 and this insn P increments a register used in GOAL, return 0. */
7105 {
7106 rtx link;
7107
7108 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7109 if (REG_NOTE_KIND (link) == REG_INC
7110 && REG_P (XEXP (link, 0)))
7111 {
7112 int incno = REGNO (XEXP (link, 0));
7113 if (incno < regno + nregs && incno >= regno)
7114 return 0;
7115 if (incno < valueno + valuenregs && incno >= valueno)
7116 return 0;
7117 if (goal_mem_addr_varies
7118 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7119 goal))
7120 return 0;
7121 }
7122 }
7123 #endif
7124 }
7125 }
7126 }
7127 \f
7128 /* Find a place where INCED appears in an increment or decrement operator
7129 within X, and return the amount INCED is incremented or decremented by.
7130 The value is always positive. */
7131
7132 static int
7133 find_inc_amount (rtx x, rtx inced)
7134 {
7135 enum rtx_code code = GET_CODE (x);
7136 const char *fmt;
7137 int i;
7138
7139 if (code == MEM)
7140 {
7141 rtx addr = XEXP (x, 0);
7142 if ((GET_CODE (addr) == PRE_DEC
7143 || GET_CODE (addr) == POST_DEC
7144 || GET_CODE (addr) == PRE_INC
7145 || GET_CODE (addr) == POST_INC)
7146 && XEXP (addr, 0) == inced)
7147 return GET_MODE_SIZE (GET_MODE (x));
7148 else if ((GET_CODE (addr) == PRE_MODIFY
7149 || GET_CODE (addr) == POST_MODIFY)
7150 && GET_CODE (XEXP (addr, 1)) == PLUS
7151 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7152 && XEXP (addr, 0) == inced
7153 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7154 {
7155 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7156 return i < 0 ? -i : i;
7157 }
7158 }
7159
7160 fmt = GET_RTX_FORMAT (code);
7161 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7162 {
7163 if (fmt[i] == 'e')
7164 {
7165 int tem = find_inc_amount (XEXP (x, i), inced);
7166 if (tem != 0)
7167 return tem;
7168 }
7169 if (fmt[i] == 'E')
7170 {
7171 int j;
7172 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7173 {
7174 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7175 if (tem != 0)
7176 return tem;
7177 }
7178 }
7179 }
7180
7181 return 0;
7182 }
7183 \f
7184 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7185 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7186
7187 #ifdef AUTO_INC_DEC
7188 static int
7189 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7190 rtx insn)
7191 {
7192 rtx link;
7193
7194 gcc_assert (insn);
7195
7196 if (! INSN_P (insn))
7197 return 0;
7198
7199 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7200 if (REG_NOTE_KIND (link) == REG_INC)
7201 {
7202 unsigned int test = (int) REGNO (XEXP (link, 0));
7203 if (test >= regno && test < endregno)
7204 return 1;
7205 }
7206 return 0;
7207 }
7208 #else
7209
7210 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7211
7212 #endif
7213
7214 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7215 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7216 REG_INC. REGNO must refer to a hard register. */
7217
7218 int
7219 regno_clobbered_p (unsigned int regno, rtx_insn *insn, enum machine_mode mode,
7220 int sets)
7221 {
7222 unsigned int nregs, endregno;
7223
7224 /* regno must be a hard register. */
7225 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7226
7227 nregs = hard_regno_nregs[regno][mode];
7228 endregno = regno + nregs;
7229
7230 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7231 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7232 && REG_P (XEXP (PATTERN (insn), 0)))
7233 {
7234 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7235
7236 return test >= regno && test < endregno;
7237 }
7238
7239 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7240 return 1;
7241
7242 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7243 {
7244 int i = XVECLEN (PATTERN (insn), 0) - 1;
7245
7246 for (; i >= 0; i--)
7247 {
7248 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7249 if ((GET_CODE (elt) == CLOBBER
7250 || (sets == 1 && GET_CODE (elt) == SET))
7251 && REG_P (XEXP (elt, 0)))
7252 {
7253 unsigned int test = REGNO (XEXP (elt, 0));
7254
7255 if (test >= regno && test < endregno)
7256 return 1;
7257 }
7258 if (sets == 2
7259 && reg_inc_found_and_valid_p (regno, endregno, elt))
7260 return 1;
7261 }
7262 }
7263
7264 return 0;
7265 }
7266
7267 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7268 rtx
7269 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7270 {
7271 int regno;
7272
7273 if (GET_MODE (reloadreg) == mode)
7274 return reloadreg;
7275
7276 regno = REGNO (reloadreg);
7277
7278 if (REG_WORDS_BIG_ENDIAN)
7279 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7280 - (int) hard_regno_nregs[regno][mode];
7281
7282 return gen_rtx_REG (mode, regno);
7283 }
7284
7285 static const char *const reload_when_needed_name[] =
7286 {
7287 "RELOAD_FOR_INPUT",
7288 "RELOAD_FOR_OUTPUT",
7289 "RELOAD_FOR_INSN",
7290 "RELOAD_FOR_INPUT_ADDRESS",
7291 "RELOAD_FOR_INPADDR_ADDRESS",
7292 "RELOAD_FOR_OUTPUT_ADDRESS",
7293 "RELOAD_FOR_OUTADDR_ADDRESS",
7294 "RELOAD_FOR_OPERAND_ADDRESS",
7295 "RELOAD_FOR_OPADDR_ADDR",
7296 "RELOAD_OTHER",
7297 "RELOAD_FOR_OTHER_ADDRESS"
7298 };
7299
7300 /* These functions are used to print the variables set by 'find_reloads' */
7301
7302 DEBUG_FUNCTION void
7303 debug_reload_to_stream (FILE *f)
7304 {
7305 int r;
7306 const char *prefix;
7307
7308 if (! f)
7309 f = stderr;
7310 for (r = 0; r < n_reloads; r++)
7311 {
7312 fprintf (f, "Reload %d: ", r);
7313
7314 if (rld[r].in != 0)
7315 {
7316 fprintf (f, "reload_in (%s) = ",
7317 GET_MODE_NAME (rld[r].inmode));
7318 print_inline_rtx (f, rld[r].in, 24);
7319 fprintf (f, "\n\t");
7320 }
7321
7322 if (rld[r].out != 0)
7323 {
7324 fprintf (f, "reload_out (%s) = ",
7325 GET_MODE_NAME (rld[r].outmode));
7326 print_inline_rtx (f, rld[r].out, 24);
7327 fprintf (f, "\n\t");
7328 }
7329
7330 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7331
7332 fprintf (f, "%s (opnum = %d)",
7333 reload_when_needed_name[(int) rld[r].when_needed],
7334 rld[r].opnum);
7335
7336 if (rld[r].optional)
7337 fprintf (f, ", optional");
7338
7339 if (rld[r].nongroup)
7340 fprintf (f, ", nongroup");
7341
7342 if (rld[r].inc != 0)
7343 fprintf (f, ", inc by %d", rld[r].inc);
7344
7345 if (rld[r].nocombine)
7346 fprintf (f, ", can't combine");
7347
7348 if (rld[r].secondary_p)
7349 fprintf (f, ", secondary_reload_p");
7350
7351 if (rld[r].in_reg != 0)
7352 {
7353 fprintf (f, "\n\treload_in_reg: ");
7354 print_inline_rtx (f, rld[r].in_reg, 24);
7355 }
7356
7357 if (rld[r].out_reg != 0)
7358 {
7359 fprintf (f, "\n\treload_out_reg: ");
7360 print_inline_rtx (f, rld[r].out_reg, 24);
7361 }
7362
7363 if (rld[r].reg_rtx != 0)
7364 {
7365 fprintf (f, "\n\treload_reg_rtx: ");
7366 print_inline_rtx (f, rld[r].reg_rtx, 24);
7367 }
7368
7369 prefix = "\n\t";
7370 if (rld[r].secondary_in_reload != -1)
7371 {
7372 fprintf (f, "%ssecondary_in_reload = %d",
7373 prefix, rld[r].secondary_in_reload);
7374 prefix = ", ";
7375 }
7376
7377 if (rld[r].secondary_out_reload != -1)
7378 fprintf (f, "%ssecondary_out_reload = %d\n",
7379 prefix, rld[r].secondary_out_reload);
7380
7381 prefix = "\n\t";
7382 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7383 {
7384 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7385 insn_data[rld[r].secondary_in_icode].name);
7386 prefix = ", ";
7387 }
7388
7389 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7390 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7391 insn_data[rld[r].secondary_out_icode].name);
7392
7393 fprintf (f, "\n");
7394 }
7395 }
7396
7397 DEBUG_FUNCTION void
7398 debug_reload (void)
7399 {
7400 debug_reload_to_stream (stderr);
7401 }