invoke.texi (-fvar-tracking-assignments): New.
[gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
28
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
31
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
38
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
45
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
54
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
57
58 NOTE SIDE EFFECTS:
59
60 find_reloads can alter the operands of the instruction it is called on.
61
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
65 better that way.
66
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
69
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
73
74 Using a reload register for several reloads in one insn:
75
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
79
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
82 register.
83
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
87
88 #define REG_OK_STRICT
89
90 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
91 #undef DEBUG_RELOAD
92
93 #include "config.h"
94 #include "system.h"
95 #include "coretypes.h"
96 #include "tm.h"
97 #include "rtl.h"
98 #include "tm_p.h"
99 #include "insn-config.h"
100 #include "expr.h"
101 #include "optabs.h"
102 #include "recog.h"
103 #include "reload.h"
104 #include "regs.h"
105 #include "addresses.h"
106 #include "hard-reg-set.h"
107 #include "flags.h"
108 #include "real.h"
109 #include "output.h"
110 #include "function.h"
111 #include "toplev.h"
112 #include "params.h"
113 #include "target.h"
114 #include "df.h"
115
116 /* True if X is a constant that can be forced into the constant pool. */
117 #define CONST_POOL_OK_P(X) \
118 (CONSTANT_P (X) \
119 && GET_CODE (X) != HIGH \
120 && !targetm.cannot_force_const_mem (X))
121
122 /* True if C is a non-empty register class that has too few registers
123 to be safely used as a reload target class. */
124 #define SMALL_REGISTER_CLASS_P(C) \
125 (reg_class_size [(C)] == 1 \
126 || (reg_class_size [(C)] >= 1 && CLASS_LIKELY_SPILLED_P (C)))
127
128 \f
129 /* All reloads of the current insn are recorded here. See reload.h for
130 comments. */
131 int n_reloads;
132 struct reload rld[MAX_RELOADS];
133
134 /* All the "earlyclobber" operands of the current insn
135 are recorded here. */
136 int n_earlyclobbers;
137 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
138
139 int reload_n_operands;
140
141 /* Replacing reloads.
142
143 If `replace_reloads' is nonzero, then as each reload is recorded
144 an entry is made for it in the table `replacements'.
145 Then later `subst_reloads' can look through that table and
146 perform all the replacements needed. */
147
148 /* Nonzero means record the places to replace. */
149 static int replace_reloads;
150
151 /* Each replacement is recorded with a structure like this. */
152 struct replacement
153 {
154 rtx *where; /* Location to store in */
155 rtx *subreg_loc; /* Location of SUBREG if WHERE is inside
156 a SUBREG; 0 otherwise. */
157 int what; /* which reload this is for */
158 enum machine_mode mode; /* mode it must have */
159 };
160
161 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
162
163 /* Number of replacements currently recorded. */
164 static int n_replacements;
165
166 /* Used to track what is modified by an operand. */
167 struct decomposition
168 {
169 int reg_flag; /* Nonzero if referencing a register. */
170 int safe; /* Nonzero if this can't conflict with anything. */
171 rtx base; /* Base address for MEM. */
172 HOST_WIDE_INT start; /* Starting offset or register number. */
173 HOST_WIDE_INT end; /* Ending offset or register number. */
174 };
175
176 #ifdef SECONDARY_MEMORY_NEEDED
177
178 /* Save MEMs needed to copy from one class of registers to another. One MEM
179 is used per mode, but normally only one or two modes are ever used.
180
181 We keep two versions, before and after register elimination. The one
182 after register elimination is record separately for each operand. This
183 is done in case the address is not valid to be sure that we separately
184 reload each. */
185
186 static rtx secondary_memlocs[NUM_MACHINE_MODES];
187 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
188 static int secondary_memlocs_elim_used = 0;
189 #endif
190
191 /* The instruction we are doing reloads for;
192 so we can test whether a register dies in it. */
193 static rtx this_insn;
194
195 /* Nonzero if this instruction is a user-specified asm with operands. */
196 static int this_insn_is_asm;
197
198 /* If hard_regs_live_known is nonzero,
199 we can tell which hard regs are currently live,
200 at least enough to succeed in choosing dummy reloads. */
201 static int hard_regs_live_known;
202
203 /* Indexed by hard reg number,
204 element is nonnegative if hard reg has been spilled.
205 This vector is passed to `find_reloads' as an argument
206 and is not changed here. */
207 static short *static_reload_reg_p;
208
209 /* Set to 1 in subst_reg_equivs if it changes anything. */
210 static int subst_reg_equivs_changed;
211
212 /* On return from push_reload, holds the reload-number for the OUT
213 operand, which can be different for that from the input operand. */
214 static int output_reloadnum;
215
216 /* Compare two RTX's. */
217 #define MATCHES(x, y) \
218 (x == y || (x != 0 && (REG_P (x) \
219 ? REG_P (y) && REGNO (x) == REGNO (y) \
220 : rtx_equal_p (x, y) && ! side_effects_p (x))))
221
222 /* Indicates if two reloads purposes are for similar enough things that we
223 can merge their reloads. */
224 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
225 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
226 || ((when1) == (when2) && (op1) == (op2)) \
227 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
228 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
229 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
230 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
231 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
232
233 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
234 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
235 ((when1) != (when2) \
236 || ! ((op1) == (op2) \
237 || (when1) == RELOAD_FOR_INPUT \
238 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
239 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
240
241 /* If we are going to reload an address, compute the reload type to
242 use. */
243 #define ADDR_TYPE(type) \
244 ((type) == RELOAD_FOR_INPUT_ADDRESS \
245 ? RELOAD_FOR_INPADDR_ADDRESS \
246 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
247 ? RELOAD_FOR_OUTADDR_ADDRESS \
248 : (type)))
249
250 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
251 enum machine_mode, enum reload_type,
252 enum insn_code *, secondary_reload_info *);
253 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
254 int, unsigned int);
255 static int reload_inner_reg_of_subreg (rtx, enum machine_mode, int);
256 static void push_replacement (rtx *, int, enum machine_mode);
257 static void dup_replacements (rtx *, rtx *);
258 static void combine_reloads (void);
259 static int find_reusable_reload (rtx *, rtx, enum reg_class,
260 enum reload_type, int, int);
261 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
262 enum machine_mode, enum reg_class, int, int);
263 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
264 static struct decomposition decompose (rtx);
265 static int immune_p (rtx, rtx, struct decomposition);
266 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
267 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
268 int *);
269 static rtx make_memloc (rtx, int);
270 static int maybe_memory_address_p (enum machine_mode, rtx, rtx *);
271 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
272 int, enum reload_type, int, rtx);
273 static rtx subst_reg_equivs (rtx, rtx);
274 static rtx subst_indexed_address (rtx);
275 static void update_auto_inc_notes (rtx, int, int);
276 static int find_reloads_address_1 (enum machine_mode, rtx, int,
277 enum rtx_code, enum rtx_code, rtx *,
278 int, enum reload_type,int, rtx);
279 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
280 enum machine_mode, int,
281 enum reload_type, int);
282 static rtx find_reloads_subreg_address (rtx, int, int, enum reload_type,
283 int, rtx);
284 static void copy_replacements_1 (rtx *, rtx *, int);
285 static int find_inc_amount (rtx, rtx);
286 static int refers_to_mem_for_reload_p (rtx);
287 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
288 rtx, rtx *);
289
290 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
291 list yet. */
292
293 static void
294 push_reg_equiv_alt_mem (int regno, rtx mem)
295 {
296 rtx it;
297
298 for (it = reg_equiv_alt_mem_list [regno]; it; it = XEXP (it, 1))
299 if (rtx_equal_p (XEXP (it, 0), mem))
300 return;
301
302 reg_equiv_alt_mem_list [regno]
303 = alloc_EXPR_LIST (REG_EQUIV, mem,
304 reg_equiv_alt_mem_list [regno]);
305 }
306 \f
307 /* Determine if any secondary reloads are needed for loading (if IN_P is
308 nonzero) or storing (if IN_P is zero) X to or from a reload register of
309 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
310 are needed, push them.
311
312 Return the reload number of the secondary reload we made, or -1 if
313 we didn't need one. *PICODE is set to the insn_code to use if we do
314 need a secondary reload. */
315
316 static int
317 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
318 enum reg_class reload_class,
319 enum machine_mode reload_mode, enum reload_type type,
320 enum insn_code *picode, secondary_reload_info *prev_sri)
321 {
322 enum reg_class rclass = NO_REGS;
323 enum reg_class scratch_class;
324 enum machine_mode mode = reload_mode;
325 enum insn_code icode = CODE_FOR_nothing;
326 enum insn_code t_icode = CODE_FOR_nothing;
327 enum reload_type secondary_type;
328 int s_reload, t_reload = -1;
329 const char *scratch_constraint;
330 char letter;
331 secondary_reload_info sri;
332
333 if (type == RELOAD_FOR_INPUT_ADDRESS
334 || type == RELOAD_FOR_OUTPUT_ADDRESS
335 || type == RELOAD_FOR_INPADDR_ADDRESS
336 || type == RELOAD_FOR_OUTADDR_ADDRESS)
337 secondary_type = type;
338 else
339 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
340
341 *picode = CODE_FOR_nothing;
342
343 /* If X is a paradoxical SUBREG, use the inner value to determine both the
344 mode and object being reloaded. */
345 if (GET_CODE (x) == SUBREG
346 && (GET_MODE_SIZE (GET_MODE (x))
347 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
348 {
349 x = SUBREG_REG (x);
350 reload_mode = GET_MODE (x);
351 }
352
353 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
354 is still a pseudo-register by now, it *must* have an equivalent MEM
355 but we don't want to assume that), use that equivalent when seeing if
356 a secondary reload is needed since whether or not a reload is needed
357 might be sensitive to the form of the MEM. */
358
359 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
360 && reg_equiv_mem[REGNO (x)] != 0)
361 x = reg_equiv_mem[REGNO (x)];
362
363 sri.icode = CODE_FOR_nothing;
364 sri.prev_sri = prev_sri;
365 rclass = targetm.secondary_reload (in_p, x, reload_class, reload_mode, &sri);
366 icode = (enum insn_code) sri.icode;
367
368 /* If we don't need any secondary registers, done. */
369 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
370 return -1;
371
372 if (rclass != NO_REGS)
373 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
374 reload_mode, type, &t_icode, &sri);
375
376 /* If we will be using an insn, the secondary reload is for a
377 scratch register. */
378
379 if (icode != CODE_FOR_nothing)
380 {
381 /* If IN_P is nonzero, the reload register will be the output in
382 operand 0. If IN_P is zero, the reload register will be the input
383 in operand 1. Outputs should have an initial "=", which we must
384 skip. */
385
386 /* ??? It would be useful to be able to handle only two, or more than
387 three, operands, but for now we can only handle the case of having
388 exactly three: output, input and one temp/scratch. */
389 gcc_assert (insn_data[(int) icode].n_operands == 3);
390
391 /* ??? We currently have no way to represent a reload that needs
392 an icode to reload from an intermediate tertiary reload register.
393 We should probably have a new field in struct reload to tag a
394 chain of scratch operand reloads onto. */
395 gcc_assert (rclass == NO_REGS);
396
397 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
398 gcc_assert (*scratch_constraint == '=');
399 scratch_constraint++;
400 if (*scratch_constraint == '&')
401 scratch_constraint++;
402 letter = *scratch_constraint;
403 scratch_class = (letter == 'r' ? GENERAL_REGS
404 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
405 scratch_constraint));
406
407 rclass = scratch_class;
408 mode = insn_data[(int) icode].operand[2].mode;
409 }
410
411 /* This case isn't valid, so fail. Reload is allowed to use the same
412 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
413 in the case of a secondary register, we actually need two different
414 registers for correct code. We fail here to prevent the possibility of
415 silently generating incorrect code later.
416
417 The convention is that secondary input reloads are valid only if the
418 secondary_class is different from class. If you have such a case, you
419 can not use secondary reloads, you must work around the problem some
420 other way.
421
422 Allow this when a reload_in/out pattern is being used. I.e. assume
423 that the generated code handles this case. */
424
425 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
426 || t_icode != CODE_FOR_nothing);
427
428 /* See if we can reuse an existing secondary reload. */
429 for (s_reload = 0; s_reload < n_reloads; s_reload++)
430 if (rld[s_reload].secondary_p
431 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
432 || reg_class_subset_p (rld[s_reload].rclass, rclass))
433 && ((in_p && rld[s_reload].inmode == mode)
434 || (! in_p && rld[s_reload].outmode == mode))
435 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
436 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
437 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
438 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
439 && (SMALL_REGISTER_CLASS_P (rclass) || SMALL_REGISTER_CLASSES)
440 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
441 opnum, rld[s_reload].opnum))
442 {
443 if (in_p)
444 rld[s_reload].inmode = mode;
445 if (! in_p)
446 rld[s_reload].outmode = mode;
447
448 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
449 rld[s_reload].rclass = rclass;
450
451 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
452 rld[s_reload].optional &= optional;
453 rld[s_reload].secondary_p = 1;
454 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
455 opnum, rld[s_reload].opnum))
456 rld[s_reload].when_needed = RELOAD_OTHER;
457
458 break;
459 }
460
461 if (s_reload == n_reloads)
462 {
463 #ifdef SECONDARY_MEMORY_NEEDED
464 /* If we need a memory location to copy between the two reload regs,
465 set it up now. Note that we do the input case before making
466 the reload and the output case after. This is due to the
467 way reloads are output. */
468
469 if (in_p && icode == CODE_FOR_nothing
470 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
471 {
472 get_secondary_mem (x, reload_mode, opnum, type);
473
474 /* We may have just added new reloads. Make sure we add
475 the new reload at the end. */
476 s_reload = n_reloads;
477 }
478 #endif
479
480 /* We need to make a new secondary reload for this register class. */
481 rld[s_reload].in = rld[s_reload].out = 0;
482 rld[s_reload].rclass = rclass;
483
484 rld[s_reload].inmode = in_p ? mode : VOIDmode;
485 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
486 rld[s_reload].reg_rtx = 0;
487 rld[s_reload].optional = optional;
488 rld[s_reload].inc = 0;
489 /* Maybe we could combine these, but it seems too tricky. */
490 rld[s_reload].nocombine = 1;
491 rld[s_reload].in_reg = 0;
492 rld[s_reload].out_reg = 0;
493 rld[s_reload].opnum = opnum;
494 rld[s_reload].when_needed = secondary_type;
495 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
496 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
497 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
498 rld[s_reload].secondary_out_icode
499 = ! in_p ? t_icode : CODE_FOR_nothing;
500 rld[s_reload].secondary_p = 1;
501
502 n_reloads++;
503
504 #ifdef SECONDARY_MEMORY_NEEDED
505 if (! in_p && icode == CODE_FOR_nothing
506 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
507 get_secondary_mem (x, mode, opnum, type);
508 #endif
509 }
510
511 *picode = icode;
512 return s_reload;
513 }
514
515 /* If a secondary reload is needed, return its class. If both an intermediate
516 register and a scratch register is needed, we return the class of the
517 intermediate register. */
518 enum reg_class
519 secondary_reload_class (bool in_p, enum reg_class rclass,
520 enum machine_mode mode, rtx x)
521 {
522 enum insn_code icode;
523 secondary_reload_info sri;
524
525 sri.icode = CODE_FOR_nothing;
526 sri.prev_sri = NULL;
527 rclass = targetm.secondary_reload (in_p, x, rclass, mode, &sri);
528 icode = (enum insn_code) sri.icode;
529
530 /* If there are no secondary reloads at all, we return NO_REGS.
531 If an intermediate register is needed, we return its class. */
532 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
533 return rclass;
534
535 /* No intermediate register is needed, but we have a special reload
536 pattern, which we assume for now needs a scratch register. */
537 return scratch_reload_class (icode);
538 }
539
540 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
541 three operands, verify that operand 2 is an output operand, and return
542 its register class.
543 ??? We'd like to be able to handle any pattern with at least 2 operands,
544 for zero or more scratch registers, but that needs more infrastructure. */
545 enum reg_class
546 scratch_reload_class (enum insn_code icode)
547 {
548 const char *scratch_constraint;
549 char scratch_letter;
550 enum reg_class rclass;
551
552 gcc_assert (insn_data[(int) icode].n_operands == 3);
553 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
554 gcc_assert (*scratch_constraint == '=');
555 scratch_constraint++;
556 if (*scratch_constraint == '&')
557 scratch_constraint++;
558 scratch_letter = *scratch_constraint;
559 if (scratch_letter == 'r')
560 return GENERAL_REGS;
561 rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
562 scratch_constraint);
563 gcc_assert (rclass != NO_REGS);
564 return rclass;
565 }
566 \f
567 #ifdef SECONDARY_MEMORY_NEEDED
568
569 /* Return a memory location that will be used to copy X in mode MODE.
570 If we haven't already made a location for this mode in this insn,
571 call find_reloads_address on the location being returned. */
572
573 rtx
574 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
575 int opnum, enum reload_type type)
576 {
577 rtx loc;
578 int mem_valid;
579
580 /* By default, if MODE is narrower than a word, widen it to a word.
581 This is required because most machines that require these memory
582 locations do not support short load and stores from all registers
583 (e.g., FP registers). */
584
585 #ifdef SECONDARY_MEMORY_NEEDED_MODE
586 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
587 #else
588 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
589 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
590 #endif
591
592 /* If we already have made a MEM for this operand in MODE, return it. */
593 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
594 return secondary_memlocs_elim[(int) mode][opnum];
595
596 /* If this is the first time we've tried to get a MEM for this mode,
597 allocate a new one. `something_changed' in reload will get set
598 by noticing that the frame size has changed. */
599
600 if (secondary_memlocs[(int) mode] == 0)
601 {
602 #ifdef SECONDARY_MEMORY_NEEDED_RTX
603 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
604 #else
605 secondary_memlocs[(int) mode]
606 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
607 #endif
608 }
609
610 /* Get a version of the address doing any eliminations needed. If that
611 didn't give us a new MEM, make a new one if it isn't valid. */
612
613 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
614 mem_valid = strict_memory_address_p (mode, XEXP (loc, 0));
615
616 if (! mem_valid && loc == secondary_memlocs[(int) mode])
617 loc = copy_rtx (loc);
618
619 /* The only time the call below will do anything is if the stack
620 offset is too large. In that case IND_LEVELS doesn't matter, so we
621 can just pass a zero. Adjust the type to be the address of the
622 corresponding object. If the address was valid, save the eliminated
623 address. If it wasn't valid, we need to make a reload each time, so
624 don't save it. */
625
626 if (! mem_valid)
627 {
628 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
629 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
630 : RELOAD_OTHER);
631
632 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
633 opnum, type, 0, 0);
634 }
635
636 secondary_memlocs_elim[(int) mode][opnum] = loc;
637 if (secondary_memlocs_elim_used <= (int)mode)
638 secondary_memlocs_elim_used = (int)mode + 1;
639 return loc;
640 }
641
642 /* Clear any secondary memory locations we've made. */
643
644 void
645 clear_secondary_mem (void)
646 {
647 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
648 }
649 #endif /* SECONDARY_MEMORY_NEEDED */
650 \f
651
652 /* Find the largest class which has at least one register valid in
653 mode INNER, and which for every such register, that register number
654 plus N is also valid in OUTER (if in range) and is cheap to move
655 into REGNO. Such a class must exist. */
656
657 static enum reg_class
658 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
659 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
660 unsigned int dest_regno ATTRIBUTE_UNUSED)
661 {
662 int best_cost = -1;
663 int rclass;
664 int regno;
665 enum reg_class best_class = NO_REGS;
666 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
667 unsigned int best_size = 0;
668 int cost;
669
670 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
671 {
672 int bad = 0;
673 int good = 0;
674 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
675 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
676 {
677 if (HARD_REGNO_MODE_OK (regno, inner))
678 {
679 good = 1;
680 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
681 || ! HARD_REGNO_MODE_OK (regno + n, outer))
682 bad = 1;
683 }
684 }
685
686 if (bad || !good)
687 continue;
688 cost = REGISTER_MOVE_COST (outer, (enum reg_class) rclass, dest_class);
689
690 if ((reg_class_size[rclass] > best_size
691 && (best_cost < 0 || best_cost >= cost))
692 || best_cost > cost)
693 {
694 best_class = (enum reg_class) rclass;
695 best_size = reg_class_size[rclass];
696 best_cost = REGISTER_MOVE_COST (outer, (enum reg_class) rclass,
697 dest_class);
698 }
699 }
700
701 gcc_assert (best_size != 0);
702
703 return best_class;
704 }
705 \f
706 /* Return the number of a previously made reload that can be combined with
707 a new one, or n_reloads if none of the existing reloads can be used.
708 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
709 push_reload, they determine the kind of the new reload that we try to
710 combine. P_IN points to the corresponding value of IN, which can be
711 modified by this function.
712 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
713
714 static int
715 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
716 enum reload_type type, int opnum, int dont_share)
717 {
718 rtx in = *p_in;
719 int i;
720 /* We can't merge two reloads if the output of either one is
721 earlyclobbered. */
722
723 if (earlyclobber_operand_p (out))
724 return n_reloads;
725
726 /* We can use an existing reload if the class is right
727 and at least one of IN and OUT is a match
728 and the other is at worst neutral.
729 (A zero compared against anything is neutral.)
730
731 If SMALL_REGISTER_CLASSES, don't use existing reloads unless they are
732 for the same thing since that can cause us to need more reload registers
733 than we otherwise would. */
734
735 for (i = 0; i < n_reloads; i++)
736 if ((reg_class_subset_p (rclass, rld[i].rclass)
737 || reg_class_subset_p (rld[i].rclass, rclass))
738 /* If the existing reload has a register, it must fit our class. */
739 && (rld[i].reg_rtx == 0
740 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
741 true_regnum (rld[i].reg_rtx)))
742 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
743 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
744 || (out != 0 && MATCHES (rld[i].out, out)
745 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
746 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
747 && (SMALL_REGISTER_CLASS_P (rclass) || SMALL_REGISTER_CLASSES)
748 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
749 return i;
750
751 /* Reloading a plain reg for input can match a reload to postincrement
752 that reg, since the postincrement's value is the right value.
753 Likewise, it can match a preincrement reload, since we regard
754 the preincrementation as happening before any ref in this insn
755 to that register. */
756 for (i = 0; i < n_reloads; i++)
757 if ((reg_class_subset_p (rclass, rld[i].rclass)
758 || reg_class_subset_p (rld[i].rclass, rclass))
759 /* If the existing reload has a register, it must fit our
760 class. */
761 && (rld[i].reg_rtx == 0
762 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
763 true_regnum (rld[i].reg_rtx)))
764 && out == 0 && rld[i].out == 0 && rld[i].in != 0
765 && ((REG_P (in)
766 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
767 && MATCHES (XEXP (rld[i].in, 0), in))
768 || (REG_P (rld[i].in)
769 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
770 && MATCHES (XEXP (in, 0), rld[i].in)))
771 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
772 && (SMALL_REGISTER_CLASS_P (rclass) || SMALL_REGISTER_CLASSES)
773 && MERGABLE_RELOADS (type, rld[i].when_needed,
774 opnum, rld[i].opnum))
775 {
776 /* Make sure reload_in ultimately has the increment,
777 not the plain register. */
778 if (REG_P (in))
779 *p_in = rld[i].in;
780 return i;
781 }
782 return n_reloads;
783 }
784
785 /* Return nonzero if X is a SUBREG which will require reloading of its
786 SUBREG_REG expression. */
787
788 static int
789 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, int output)
790 {
791 rtx inner;
792
793 /* Only SUBREGs are problematical. */
794 if (GET_CODE (x) != SUBREG)
795 return 0;
796
797 inner = SUBREG_REG (x);
798
799 /* If INNER is a constant or PLUS, then INNER must be reloaded. */
800 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
801 return 1;
802
803 /* If INNER is not a hard register, then INNER will not need to
804 be reloaded. */
805 if (!REG_P (inner)
806 || REGNO (inner) >= FIRST_PSEUDO_REGISTER)
807 return 0;
808
809 /* If INNER is not ok for MODE, then INNER will need reloading. */
810 if (! HARD_REGNO_MODE_OK (subreg_regno (x), mode))
811 return 1;
812
813 /* If the outer part is a word or smaller, INNER larger than a
814 word and the number of regs for INNER is not the same as the
815 number of words in INNER, then INNER will need reloading. */
816 return (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
817 && output
818 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
819 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
820 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
821 }
822
823 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
824 requiring an extra reload register. The caller has already found that
825 IN contains some reference to REGNO, so check that we can produce the
826 new value in a single step. E.g. if we have
827 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
828 instruction that adds one to a register, this should succeed.
829 However, if we have something like
830 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
831 needs to be loaded into a register first, we need a separate reload
832 register.
833 Such PLUS reloads are generated by find_reload_address_part.
834 The out-of-range PLUS expressions are usually introduced in the instruction
835 patterns by register elimination and substituting pseudos without a home
836 by their function-invariant equivalences. */
837 static int
838 can_reload_into (rtx in, int regno, enum machine_mode mode)
839 {
840 rtx dst, test_insn;
841 int r = 0;
842 struct recog_data save_recog_data;
843
844 /* For matching constraints, we often get notional input reloads where
845 we want to use the original register as the reload register. I.e.
846 technically this is a non-optional input-output reload, but IN is
847 already a valid register, and has been chosen as the reload register.
848 Speed this up, since it trivially works. */
849 if (REG_P (in))
850 return 1;
851
852 /* To test MEMs properly, we'd have to take into account all the reloads
853 that are already scheduled, which can become quite complicated.
854 And since we've already handled address reloads for this MEM, it
855 should always succeed anyway. */
856 if (MEM_P (in))
857 return 1;
858
859 /* If we can make a simple SET insn that does the job, everything should
860 be fine. */
861 dst = gen_rtx_REG (mode, regno);
862 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
863 save_recog_data = recog_data;
864 if (recog_memoized (test_insn) >= 0)
865 {
866 extract_insn (test_insn);
867 r = constrain_operands (1);
868 }
869 recog_data = save_recog_data;
870 return r;
871 }
872
873 /* Record one reload that needs to be performed.
874 IN is an rtx saying where the data are to be found before this instruction.
875 OUT says where they must be stored after the instruction.
876 (IN is zero for data not read, and OUT is zero for data not written.)
877 INLOC and OUTLOC point to the places in the instructions where
878 IN and OUT were found.
879 If IN and OUT are both nonzero, it means the same register must be used
880 to reload both IN and OUT.
881
882 RCLASS is a register class required for the reloaded data.
883 INMODE is the machine mode that the instruction requires
884 for the reg that replaces IN and OUTMODE is likewise for OUT.
885
886 If IN is zero, then OUT's location and mode should be passed as
887 INLOC and INMODE.
888
889 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
890
891 OPTIONAL nonzero means this reload does not need to be performed:
892 it can be discarded if that is more convenient.
893
894 OPNUM and TYPE say what the purpose of this reload is.
895
896 The return value is the reload-number for this reload.
897
898 If both IN and OUT are nonzero, in some rare cases we might
899 want to make two separate reloads. (Actually we never do this now.)
900 Therefore, the reload-number for OUT is stored in
901 output_reloadnum when we return; the return value applies to IN.
902 Usually (presently always), when IN and OUT are nonzero,
903 the two reload-numbers are equal, but the caller should be careful to
904 distinguish them. */
905
906 int
907 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
908 enum reg_class rclass, enum machine_mode inmode,
909 enum machine_mode outmode, int strict_low, int optional,
910 int opnum, enum reload_type type)
911 {
912 int i;
913 int dont_share = 0;
914 int dont_remove_subreg = 0;
915 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
916 int secondary_in_reload = -1, secondary_out_reload = -1;
917 enum insn_code secondary_in_icode = CODE_FOR_nothing;
918 enum insn_code secondary_out_icode = CODE_FOR_nothing;
919
920 /* INMODE and/or OUTMODE could be VOIDmode if no mode
921 has been specified for the operand. In that case,
922 use the operand's mode as the mode to reload. */
923 if (inmode == VOIDmode && in != 0)
924 inmode = GET_MODE (in);
925 if (outmode == VOIDmode && out != 0)
926 outmode = GET_MODE (out);
927
928 /* If find_reloads and friends until now missed to replace a pseudo
929 with a constant of reg_equiv_constant something went wrong
930 beforehand.
931 Note that it can't simply be done here if we missed it earlier
932 since the constant might need to be pushed into the literal pool
933 and the resulting memref would probably need further
934 reloading. */
935 if (in != 0 && REG_P (in))
936 {
937 int regno = REGNO (in);
938
939 gcc_assert (regno < FIRST_PSEUDO_REGISTER
940 || reg_renumber[regno] >= 0
941 || reg_equiv_constant[regno] == NULL_RTX);
942 }
943
944 /* reg_equiv_constant only contains constants which are obviously
945 not appropriate as destination. So if we would need to replace
946 the destination pseudo with a constant we are in real
947 trouble. */
948 if (out != 0 && REG_P (out))
949 {
950 int regno = REGNO (out);
951
952 gcc_assert (regno < FIRST_PSEUDO_REGISTER
953 || reg_renumber[regno] >= 0
954 || reg_equiv_constant[regno] == NULL_RTX);
955 }
956
957 /* If we have a read-write operand with an address side-effect,
958 change either IN or OUT so the side-effect happens only once. */
959 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
960 switch (GET_CODE (XEXP (in, 0)))
961 {
962 case POST_INC: case POST_DEC: case POST_MODIFY:
963 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
964 break;
965
966 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
967 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
968 break;
969
970 default:
971 break;
972 }
973
974 /* If we are reloading a (SUBREG constant ...), really reload just the
975 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
976 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
977 a pseudo and hence will become a MEM) with M1 wider than M2 and the
978 register is a pseudo, also reload the inside expression.
979 For machines that extend byte loads, do this for any SUBREG of a pseudo
980 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
981 M2 is an integral mode that gets extended when loaded.
982 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
983 either M1 is not valid for R or M2 is wider than a word but we only
984 need one word to store an M2-sized quantity in R.
985 (However, if OUT is nonzero, we need to reload the reg *and*
986 the subreg, so do nothing here, and let following statement handle it.)
987
988 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
989 we can't handle it here because CONST_INT does not indicate a mode.
990
991 Similarly, we must reload the inside expression if we have a
992 STRICT_LOW_PART (presumably, in == out in this case).
993
994 Also reload the inner expression if it does not require a secondary
995 reload but the SUBREG does.
996
997 Finally, reload the inner expression if it is a register that is in
998 the class whose registers cannot be referenced in a different size
999 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1000 cannot reload just the inside since we might end up with the wrong
1001 register class. But if it is inside a STRICT_LOW_PART, we have
1002 no choice, so we hope we do get the right register class there. */
1003
1004 if (in != 0 && GET_CODE (in) == SUBREG
1005 && (subreg_lowpart_p (in) || strict_low)
1006 #ifdef CANNOT_CHANGE_MODE_CLASS
1007 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1008 #endif
1009 && (CONSTANT_P (SUBREG_REG (in))
1010 || GET_CODE (SUBREG_REG (in)) == PLUS
1011 || strict_low
1012 || (((REG_P (SUBREG_REG (in))
1013 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1014 || MEM_P (SUBREG_REG (in)))
1015 && ((GET_MODE_SIZE (inmode)
1016 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1017 #ifdef LOAD_EXTEND_OP
1018 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1019 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1020 <= UNITS_PER_WORD)
1021 && (GET_MODE_SIZE (inmode)
1022 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1023 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1024 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1025 #endif
1026 #ifdef WORD_REGISTER_OPERATIONS
1027 || ((GET_MODE_SIZE (inmode)
1028 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1029 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1030 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1031 / UNITS_PER_WORD)))
1032 #endif
1033 ))
1034 || (REG_P (SUBREG_REG (in))
1035 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1036 /* The case where out is nonzero
1037 is handled differently in the following statement. */
1038 && (out == 0 || subreg_lowpart_p (in))
1039 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1040 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1041 > UNITS_PER_WORD)
1042 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1043 / UNITS_PER_WORD)
1044 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1045 [GET_MODE (SUBREG_REG (in))]))
1046 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1047 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1048 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1049 SUBREG_REG (in))
1050 == NO_REGS))
1051 #ifdef CANNOT_CHANGE_MODE_CLASS
1052 || (REG_P (SUBREG_REG (in))
1053 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1054 && REG_CANNOT_CHANGE_MODE_P
1055 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1056 #endif
1057 ))
1058 {
1059 in_subreg_loc = inloc;
1060 inloc = &SUBREG_REG (in);
1061 in = *inloc;
1062 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1063 if (MEM_P (in))
1064 /* This is supposed to happen only for paradoxical subregs made by
1065 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1066 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1067 #endif
1068 inmode = GET_MODE (in);
1069 }
1070
1071 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1072 either M1 is not valid for R or M2 is wider than a word but we only
1073 need one word to store an M2-sized quantity in R.
1074
1075 However, we must reload the inner reg *as well as* the subreg in
1076 that case. */
1077
1078 /* Similar issue for (SUBREG constant ...) if it was not handled by the
1079 code above. This can happen if SUBREG_BYTE != 0. */
1080
1081 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, 0))
1082 {
1083 enum reg_class in_class = rclass;
1084
1085 if (REG_P (SUBREG_REG (in)))
1086 in_class
1087 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1088 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1089 GET_MODE (SUBREG_REG (in)),
1090 SUBREG_BYTE (in),
1091 GET_MODE (in)),
1092 REGNO (SUBREG_REG (in)));
1093
1094 /* This relies on the fact that emit_reload_insns outputs the
1095 instructions for input reloads of type RELOAD_OTHER in the same
1096 order as the reloads. Thus if the outer reload is also of type
1097 RELOAD_OTHER, we are guaranteed that this inner reload will be
1098 output before the outer reload. */
1099 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1100 in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1101 dont_remove_subreg = 1;
1102 }
1103
1104 /* Similarly for paradoxical and problematical SUBREGs on the output.
1105 Note that there is no reason we need worry about the previous value
1106 of SUBREG_REG (out); even if wider than out,
1107 storing in a subreg is entitled to clobber it all
1108 (except in the case of STRICT_LOW_PART,
1109 and in that case the constraint should label it input-output.) */
1110 if (out != 0 && GET_CODE (out) == SUBREG
1111 && (subreg_lowpart_p (out) || strict_low)
1112 #ifdef CANNOT_CHANGE_MODE_CLASS
1113 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1114 #endif
1115 && (CONSTANT_P (SUBREG_REG (out))
1116 || strict_low
1117 || (((REG_P (SUBREG_REG (out))
1118 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1119 || MEM_P (SUBREG_REG (out)))
1120 && ((GET_MODE_SIZE (outmode)
1121 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1122 #ifdef WORD_REGISTER_OPERATIONS
1123 || ((GET_MODE_SIZE (outmode)
1124 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1125 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1126 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1127 / UNITS_PER_WORD)))
1128 #endif
1129 ))
1130 || (REG_P (SUBREG_REG (out))
1131 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1132 && ((GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1133 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1134 > UNITS_PER_WORD)
1135 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1136 / UNITS_PER_WORD)
1137 != (int) hard_regno_nregs[REGNO (SUBREG_REG (out))]
1138 [GET_MODE (SUBREG_REG (out))]))
1139 || ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode)))
1140 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1141 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1142 SUBREG_REG (out))
1143 == NO_REGS))
1144 #ifdef CANNOT_CHANGE_MODE_CLASS
1145 || (REG_P (SUBREG_REG (out))
1146 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1147 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1148 GET_MODE (SUBREG_REG (out)),
1149 outmode))
1150 #endif
1151 ))
1152 {
1153 out_subreg_loc = outloc;
1154 outloc = &SUBREG_REG (out);
1155 out = *outloc;
1156 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1157 gcc_assert (!MEM_P (out)
1158 || GET_MODE_SIZE (GET_MODE (out))
1159 <= GET_MODE_SIZE (outmode));
1160 #endif
1161 outmode = GET_MODE (out);
1162 }
1163
1164 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1165 either M1 is not valid for R or M2 is wider than a word but we only
1166 need one word to store an M2-sized quantity in R.
1167
1168 However, we must reload the inner reg *as well as* the subreg in
1169 that case. In this case, the inner reg is an in-out reload. */
1170
1171 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, 1))
1172 {
1173 /* This relies on the fact that emit_reload_insns outputs the
1174 instructions for output reloads of type RELOAD_OTHER in reverse
1175 order of the reloads. Thus if the outer reload is also of type
1176 RELOAD_OTHER, we are guaranteed that this inner reload will be
1177 output after the outer reload. */
1178 dont_remove_subreg = 1;
1179 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1180 &SUBREG_REG (out),
1181 find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1182 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1183 GET_MODE (SUBREG_REG (out)),
1184 SUBREG_BYTE (out),
1185 GET_MODE (out)),
1186 REGNO (SUBREG_REG (out))),
1187 VOIDmode, VOIDmode, 0, 0,
1188 opnum, RELOAD_OTHER);
1189 }
1190
1191 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1192 if (in != 0 && out != 0 && MEM_P (out)
1193 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1194 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1195 dont_share = 1;
1196
1197 /* If IN is a SUBREG of a hard register, make a new REG. This
1198 simplifies some of the cases below. */
1199
1200 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1201 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1202 && ! dont_remove_subreg)
1203 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1204
1205 /* Similarly for OUT. */
1206 if (out != 0 && GET_CODE (out) == SUBREG
1207 && REG_P (SUBREG_REG (out))
1208 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1209 && ! dont_remove_subreg)
1210 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1211
1212 /* Narrow down the class of register wanted if that is
1213 desirable on this machine for efficiency. */
1214 {
1215 enum reg_class preferred_class = rclass;
1216
1217 if (in != 0)
1218 preferred_class = PREFERRED_RELOAD_CLASS (in, rclass);
1219
1220 /* Output reloads may need analogous treatment, different in detail. */
1221 #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
1222 if (out != 0)
1223 preferred_class = PREFERRED_OUTPUT_RELOAD_CLASS (out, preferred_class);
1224 #endif
1225
1226 /* Discard what the target said if we cannot do it. */
1227 if (preferred_class != NO_REGS
1228 || (optional && type == RELOAD_FOR_OUTPUT))
1229 rclass = preferred_class;
1230 }
1231
1232 /* Make sure we use a class that can handle the actual pseudo
1233 inside any subreg. For example, on the 386, QImode regs
1234 can appear within SImode subregs. Although GENERAL_REGS
1235 can handle SImode, QImode needs a smaller class. */
1236 #ifdef LIMIT_RELOAD_CLASS
1237 if (in_subreg_loc)
1238 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1239 else if (in != 0 && GET_CODE (in) == SUBREG)
1240 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1241
1242 if (out_subreg_loc)
1243 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1244 if (out != 0 && GET_CODE (out) == SUBREG)
1245 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1246 #endif
1247
1248 /* Verify that this class is at least possible for the mode that
1249 is specified. */
1250 if (this_insn_is_asm)
1251 {
1252 enum machine_mode mode;
1253 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1254 mode = inmode;
1255 else
1256 mode = outmode;
1257 if (mode == VOIDmode)
1258 {
1259 error_for_asm (this_insn, "cannot reload integer constant "
1260 "operand in %<asm%>");
1261 mode = word_mode;
1262 if (in != 0)
1263 inmode = word_mode;
1264 if (out != 0)
1265 outmode = word_mode;
1266 }
1267 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1268 if (HARD_REGNO_MODE_OK (i, mode)
1269 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1270 break;
1271 if (i == FIRST_PSEUDO_REGISTER)
1272 {
1273 error_for_asm (this_insn, "impossible register constraint "
1274 "in %<asm%>");
1275 /* Avoid further trouble with this insn. */
1276 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1277 /* We used to continue here setting class to ALL_REGS, but it triggers
1278 sanity check on i386 for:
1279 void foo(long double d)
1280 {
1281 asm("" :: "a" (d));
1282 }
1283 Returning zero here ought to be safe as we take care in
1284 find_reloads to not process the reloads when instruction was
1285 replaced by USE. */
1286
1287 return 0;
1288 }
1289 }
1290
1291 /* Optional output reloads are always OK even if we have no register class,
1292 since the function of these reloads is only to have spill_reg_store etc.
1293 set, so that the storing insn can be deleted later. */
1294 gcc_assert (rclass != NO_REGS
1295 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1296
1297 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1298
1299 if (i == n_reloads)
1300 {
1301 /* See if we need a secondary reload register to move between CLASS
1302 and IN or CLASS and OUT. Get the icode and push any required reloads
1303 needed for each of them if so. */
1304
1305 if (in != 0)
1306 secondary_in_reload
1307 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1308 &secondary_in_icode, NULL);
1309 if (out != 0 && GET_CODE (out) != SCRATCH)
1310 secondary_out_reload
1311 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1312 type, &secondary_out_icode, NULL);
1313
1314 /* We found no existing reload suitable for re-use.
1315 So add an additional reload. */
1316
1317 #ifdef SECONDARY_MEMORY_NEEDED
1318 /* If a memory location is needed for the copy, make one. */
1319 if (in != 0
1320 && (REG_P (in)
1321 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1322 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
1323 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
1324 rclass, inmode))
1325 get_secondary_mem (in, inmode, opnum, type);
1326 #endif
1327
1328 i = n_reloads;
1329 rld[i].in = in;
1330 rld[i].out = out;
1331 rld[i].rclass = rclass;
1332 rld[i].inmode = inmode;
1333 rld[i].outmode = outmode;
1334 rld[i].reg_rtx = 0;
1335 rld[i].optional = optional;
1336 rld[i].inc = 0;
1337 rld[i].nocombine = 0;
1338 rld[i].in_reg = inloc ? *inloc : 0;
1339 rld[i].out_reg = outloc ? *outloc : 0;
1340 rld[i].opnum = opnum;
1341 rld[i].when_needed = type;
1342 rld[i].secondary_in_reload = secondary_in_reload;
1343 rld[i].secondary_out_reload = secondary_out_reload;
1344 rld[i].secondary_in_icode = secondary_in_icode;
1345 rld[i].secondary_out_icode = secondary_out_icode;
1346 rld[i].secondary_p = 0;
1347
1348 n_reloads++;
1349
1350 #ifdef SECONDARY_MEMORY_NEEDED
1351 if (out != 0
1352 && (REG_P (out)
1353 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1354 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1355 && SECONDARY_MEMORY_NEEDED (rclass,
1356 REGNO_REG_CLASS (reg_or_subregno (out)),
1357 outmode))
1358 get_secondary_mem (out, outmode, opnum, type);
1359 #endif
1360 }
1361 else
1362 {
1363 /* We are reusing an existing reload,
1364 but we may have additional information for it.
1365 For example, we may now have both IN and OUT
1366 while the old one may have just one of them. */
1367
1368 /* The modes can be different. If they are, we want to reload in
1369 the larger mode, so that the value is valid for both modes. */
1370 if (inmode != VOIDmode
1371 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1372 rld[i].inmode = inmode;
1373 if (outmode != VOIDmode
1374 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1375 rld[i].outmode = outmode;
1376 if (in != 0)
1377 {
1378 rtx in_reg = inloc ? *inloc : 0;
1379 /* If we merge reloads for two distinct rtl expressions that
1380 are identical in content, there might be duplicate address
1381 reloads. Remove the extra set now, so that if we later find
1382 that we can inherit this reload, we can get rid of the
1383 address reloads altogether.
1384
1385 Do not do this if both reloads are optional since the result
1386 would be an optional reload which could potentially leave
1387 unresolved address replacements.
1388
1389 It is not sufficient to call transfer_replacements since
1390 choose_reload_regs will remove the replacements for address
1391 reloads of inherited reloads which results in the same
1392 problem. */
1393 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1394 && ! (rld[i].optional && optional))
1395 {
1396 /* We must keep the address reload with the lower operand
1397 number alive. */
1398 if (opnum > rld[i].opnum)
1399 {
1400 remove_address_replacements (in);
1401 in = rld[i].in;
1402 in_reg = rld[i].in_reg;
1403 }
1404 else
1405 remove_address_replacements (rld[i].in);
1406 }
1407 /* When emitting reloads we don't necessarily look at the in-
1408 and outmode, but also directly at the operands (in and out).
1409 So we can't simply overwrite them with whatever we have found
1410 for this (to-be-merged) reload, we have to "merge" that too.
1411 Reusing another reload already verified that we deal with the
1412 same operands, just possibly in different modes. So we
1413 overwrite the operands only when the new mode is larger.
1414 See also PR33613. */
1415 if (!rld[i].in
1416 || GET_MODE_SIZE (GET_MODE (in))
1417 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1418 rld[i].in = in;
1419 if (!rld[i].in_reg
1420 || (in_reg
1421 && GET_MODE_SIZE (GET_MODE (in_reg))
1422 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1423 rld[i].in_reg = in_reg;
1424 }
1425 if (out != 0)
1426 {
1427 if (!rld[i].out
1428 || (out
1429 && GET_MODE_SIZE (GET_MODE (out))
1430 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1431 rld[i].out = out;
1432 if (outloc
1433 && (!rld[i].out_reg
1434 || GET_MODE_SIZE (GET_MODE (*outloc))
1435 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1436 rld[i].out_reg = *outloc;
1437 }
1438 if (reg_class_subset_p (rclass, rld[i].rclass))
1439 rld[i].rclass = rclass;
1440 rld[i].optional &= optional;
1441 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1442 opnum, rld[i].opnum))
1443 rld[i].when_needed = RELOAD_OTHER;
1444 rld[i].opnum = MIN (rld[i].opnum, opnum);
1445 }
1446
1447 /* If the ostensible rtx being reloaded differs from the rtx found
1448 in the location to substitute, this reload is not safe to combine
1449 because we cannot reliably tell whether it appears in the insn. */
1450
1451 if (in != 0 && in != *inloc)
1452 rld[i].nocombine = 1;
1453
1454 #if 0
1455 /* This was replaced by changes in find_reloads_address_1 and the new
1456 function inc_for_reload, which go with a new meaning of reload_inc. */
1457
1458 /* If this is an IN/OUT reload in an insn that sets the CC,
1459 it must be for an autoincrement. It doesn't work to store
1460 the incremented value after the insn because that would clobber the CC.
1461 So we must do the increment of the value reloaded from,
1462 increment it, store it back, then decrement again. */
1463 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1464 {
1465 out = 0;
1466 rld[i].out = 0;
1467 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1468 /* If we did not find a nonzero amount-to-increment-by,
1469 that contradicts the belief that IN is being incremented
1470 in an address in this insn. */
1471 gcc_assert (rld[i].inc != 0);
1472 }
1473 #endif
1474
1475 /* If we will replace IN and OUT with the reload-reg,
1476 record where they are located so that substitution need
1477 not do a tree walk. */
1478
1479 if (replace_reloads)
1480 {
1481 if (inloc != 0)
1482 {
1483 struct replacement *r = &replacements[n_replacements++];
1484 r->what = i;
1485 r->subreg_loc = in_subreg_loc;
1486 r->where = inloc;
1487 r->mode = inmode;
1488 }
1489 if (outloc != 0 && outloc != inloc)
1490 {
1491 struct replacement *r = &replacements[n_replacements++];
1492 r->what = i;
1493 r->where = outloc;
1494 r->subreg_loc = out_subreg_loc;
1495 r->mode = outmode;
1496 }
1497 }
1498
1499 /* If this reload is just being introduced and it has both
1500 an incoming quantity and an outgoing quantity that are
1501 supposed to be made to match, see if either one of the two
1502 can serve as the place to reload into.
1503
1504 If one of them is acceptable, set rld[i].reg_rtx
1505 to that one. */
1506
1507 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1508 {
1509 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1510 inmode, outmode,
1511 rld[i].rclass, i,
1512 earlyclobber_operand_p (out));
1513
1514 /* If the outgoing register already contains the same value
1515 as the incoming one, we can dispense with loading it.
1516 The easiest way to tell the caller that is to give a phony
1517 value for the incoming operand (same as outgoing one). */
1518 if (rld[i].reg_rtx == out
1519 && (REG_P (in) || CONSTANT_P (in))
1520 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1521 static_reload_reg_p, i, inmode))
1522 rld[i].in = out;
1523 }
1524
1525 /* If this is an input reload and the operand contains a register that
1526 dies in this insn and is used nowhere else, see if it is the right class
1527 to be used for this reload. Use it if so. (This occurs most commonly
1528 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1529 this if it is also an output reload that mentions the register unless
1530 the output is a SUBREG that clobbers an entire register.
1531
1532 Note that the operand might be one of the spill regs, if it is a
1533 pseudo reg and we are in a block where spilling has not taken place.
1534 But if there is no spilling in this block, that is OK.
1535 An explicitly used hard reg cannot be a spill reg. */
1536
1537 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1538 {
1539 rtx note;
1540 int regno;
1541 enum machine_mode rel_mode = inmode;
1542
1543 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1544 rel_mode = outmode;
1545
1546 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1547 if (REG_NOTE_KIND (note) == REG_DEAD
1548 && REG_P (XEXP (note, 0))
1549 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1550 && reg_mentioned_p (XEXP (note, 0), in)
1551 /* Check that a former pseudo is valid; see find_dummy_reload. */
1552 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1553 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1554 ORIGINAL_REGNO (XEXP (note, 0)))
1555 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1556 && ! refers_to_regno_for_reload_p (regno,
1557 end_hard_regno (rel_mode,
1558 regno),
1559 PATTERN (this_insn), inloc)
1560 /* If this is also an output reload, IN cannot be used as
1561 the reload register if it is set in this insn unless IN
1562 is also OUT. */
1563 && (out == 0 || in == out
1564 || ! hard_reg_set_here_p (regno,
1565 end_hard_regno (rel_mode, regno),
1566 PATTERN (this_insn)))
1567 /* ??? Why is this code so different from the previous?
1568 Is there any simple coherent way to describe the two together?
1569 What's going on here. */
1570 && (in != out
1571 || (GET_CODE (in) == SUBREG
1572 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1573 / UNITS_PER_WORD)
1574 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1575 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1576 /* Make sure the operand fits in the reg that dies. */
1577 && (GET_MODE_SIZE (rel_mode)
1578 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1579 && HARD_REGNO_MODE_OK (regno, inmode)
1580 && HARD_REGNO_MODE_OK (regno, outmode))
1581 {
1582 unsigned int offs;
1583 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1584 hard_regno_nregs[regno][outmode]);
1585
1586 for (offs = 0; offs < nregs; offs++)
1587 if (fixed_regs[regno + offs]
1588 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1589 regno + offs))
1590 break;
1591
1592 if (offs == nregs
1593 && (! (refers_to_regno_for_reload_p
1594 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1595 || can_reload_into (in, regno, inmode)))
1596 {
1597 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1598 break;
1599 }
1600 }
1601 }
1602
1603 if (out)
1604 output_reloadnum = i;
1605
1606 return i;
1607 }
1608
1609 /* Record an additional place we must replace a value
1610 for which we have already recorded a reload.
1611 RELOADNUM is the value returned by push_reload
1612 when the reload was recorded.
1613 This is used in insn patterns that use match_dup. */
1614
1615 static void
1616 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1617 {
1618 if (replace_reloads)
1619 {
1620 struct replacement *r = &replacements[n_replacements++];
1621 r->what = reloadnum;
1622 r->where = loc;
1623 r->subreg_loc = 0;
1624 r->mode = mode;
1625 }
1626 }
1627
1628 /* Duplicate any replacement we have recorded to apply at
1629 location ORIG_LOC to also be performed at DUP_LOC.
1630 This is used in insn patterns that use match_dup. */
1631
1632 static void
1633 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1634 {
1635 int i, n = n_replacements;
1636
1637 for (i = 0; i < n; i++)
1638 {
1639 struct replacement *r = &replacements[i];
1640 if (r->where == orig_loc)
1641 push_replacement (dup_loc, r->what, r->mode);
1642 }
1643 }
1644 \f
1645 /* Transfer all replacements that used to be in reload FROM to be in
1646 reload TO. */
1647
1648 void
1649 transfer_replacements (int to, int from)
1650 {
1651 int i;
1652
1653 for (i = 0; i < n_replacements; i++)
1654 if (replacements[i].what == from)
1655 replacements[i].what = to;
1656 }
1657 \f
1658 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1659 or a subpart of it. If we have any replacements registered for IN_RTX,
1660 cancel the reloads that were supposed to load them.
1661 Return nonzero if we canceled any reloads. */
1662 int
1663 remove_address_replacements (rtx in_rtx)
1664 {
1665 int i, j;
1666 char reload_flags[MAX_RELOADS];
1667 int something_changed = 0;
1668
1669 memset (reload_flags, 0, sizeof reload_flags);
1670 for (i = 0, j = 0; i < n_replacements; i++)
1671 {
1672 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1673 reload_flags[replacements[i].what] |= 1;
1674 else
1675 {
1676 replacements[j++] = replacements[i];
1677 reload_flags[replacements[i].what] |= 2;
1678 }
1679 }
1680 /* Note that the following store must be done before the recursive calls. */
1681 n_replacements = j;
1682
1683 for (i = n_reloads - 1; i >= 0; i--)
1684 {
1685 if (reload_flags[i] == 1)
1686 {
1687 deallocate_reload_reg (i);
1688 remove_address_replacements (rld[i].in);
1689 rld[i].in = 0;
1690 something_changed = 1;
1691 }
1692 }
1693 return something_changed;
1694 }
1695 \f
1696 /* If there is only one output reload, and it is not for an earlyclobber
1697 operand, try to combine it with a (logically unrelated) input reload
1698 to reduce the number of reload registers needed.
1699
1700 This is safe if the input reload does not appear in
1701 the value being output-reloaded, because this implies
1702 it is not needed any more once the original insn completes.
1703
1704 If that doesn't work, see we can use any of the registers that
1705 die in this insn as a reload register. We can if it is of the right
1706 class and does not appear in the value being output-reloaded. */
1707
1708 static void
1709 combine_reloads (void)
1710 {
1711 int i, regno;
1712 int output_reload = -1;
1713 int secondary_out = -1;
1714 rtx note;
1715
1716 /* Find the output reload; return unless there is exactly one
1717 and that one is mandatory. */
1718
1719 for (i = 0; i < n_reloads; i++)
1720 if (rld[i].out != 0)
1721 {
1722 if (output_reload >= 0)
1723 return;
1724 output_reload = i;
1725 }
1726
1727 if (output_reload < 0 || rld[output_reload].optional)
1728 return;
1729
1730 /* An input-output reload isn't combinable. */
1731
1732 if (rld[output_reload].in != 0)
1733 return;
1734
1735 /* If this reload is for an earlyclobber operand, we can't do anything. */
1736 if (earlyclobber_operand_p (rld[output_reload].out))
1737 return;
1738
1739 /* If there is a reload for part of the address of this operand, we would
1740 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1741 its life to the point where doing this combine would not lower the
1742 number of spill registers needed. */
1743 for (i = 0; i < n_reloads; i++)
1744 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1745 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1746 && rld[i].opnum == rld[output_reload].opnum)
1747 return;
1748
1749 /* Check each input reload; can we combine it? */
1750
1751 for (i = 0; i < n_reloads; i++)
1752 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1753 /* Life span of this reload must not extend past main insn. */
1754 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1755 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1756 && rld[i].when_needed != RELOAD_OTHER
1757 && (CLASS_MAX_NREGS (rld[i].rclass, rld[i].inmode)
1758 == CLASS_MAX_NREGS (rld[output_reload].rclass,
1759 rld[output_reload].outmode))
1760 && rld[i].inc == 0
1761 && rld[i].reg_rtx == 0
1762 #ifdef SECONDARY_MEMORY_NEEDED
1763 /* Don't combine two reloads with different secondary
1764 memory locations. */
1765 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1766 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1767 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1768 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1769 #endif
1770 && (SMALL_REGISTER_CLASSES
1771 ? (rld[i].rclass == rld[output_reload].rclass)
1772 : (reg_class_subset_p (rld[i].rclass,
1773 rld[output_reload].rclass)
1774 || reg_class_subset_p (rld[output_reload].rclass,
1775 rld[i].rclass)))
1776 && (MATCHES (rld[i].in, rld[output_reload].out)
1777 /* Args reversed because the first arg seems to be
1778 the one that we imagine being modified
1779 while the second is the one that might be affected. */
1780 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1781 rld[i].in)
1782 /* However, if the input is a register that appears inside
1783 the output, then we also can't share.
1784 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1785 If the same reload reg is used for both reg 69 and the
1786 result to be stored in memory, then that result
1787 will clobber the address of the memory ref. */
1788 && ! (REG_P (rld[i].in)
1789 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1790 rld[output_reload].out))))
1791 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1792 rld[i].when_needed != RELOAD_FOR_INPUT)
1793 && (reg_class_size[(int) rld[i].rclass]
1794 || SMALL_REGISTER_CLASSES)
1795 /* We will allow making things slightly worse by combining an
1796 input and an output, but no worse than that. */
1797 && (rld[i].when_needed == RELOAD_FOR_INPUT
1798 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1799 {
1800 int j;
1801
1802 /* We have found a reload to combine with! */
1803 rld[i].out = rld[output_reload].out;
1804 rld[i].out_reg = rld[output_reload].out_reg;
1805 rld[i].outmode = rld[output_reload].outmode;
1806 /* Mark the old output reload as inoperative. */
1807 rld[output_reload].out = 0;
1808 /* The combined reload is needed for the entire insn. */
1809 rld[i].when_needed = RELOAD_OTHER;
1810 /* If the output reload had a secondary reload, copy it. */
1811 if (rld[output_reload].secondary_out_reload != -1)
1812 {
1813 rld[i].secondary_out_reload
1814 = rld[output_reload].secondary_out_reload;
1815 rld[i].secondary_out_icode
1816 = rld[output_reload].secondary_out_icode;
1817 }
1818
1819 #ifdef SECONDARY_MEMORY_NEEDED
1820 /* Copy any secondary MEM. */
1821 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1822 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1823 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1824 #endif
1825 /* If required, minimize the register class. */
1826 if (reg_class_subset_p (rld[output_reload].rclass,
1827 rld[i].rclass))
1828 rld[i].rclass = rld[output_reload].rclass;
1829
1830 /* Transfer all replacements from the old reload to the combined. */
1831 for (j = 0; j < n_replacements; j++)
1832 if (replacements[j].what == output_reload)
1833 replacements[j].what = i;
1834
1835 return;
1836 }
1837
1838 /* If this insn has only one operand that is modified or written (assumed
1839 to be the first), it must be the one corresponding to this reload. It
1840 is safe to use anything that dies in this insn for that output provided
1841 that it does not occur in the output (we already know it isn't an
1842 earlyclobber. If this is an asm insn, give up. */
1843
1844 if (INSN_CODE (this_insn) == -1)
1845 return;
1846
1847 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1848 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1849 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1850 return;
1851
1852 /* See if some hard register that dies in this insn and is not used in
1853 the output is the right class. Only works if the register we pick
1854 up can fully hold our output reload. */
1855 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1856 if (REG_NOTE_KIND (note) == REG_DEAD
1857 && REG_P (XEXP (note, 0))
1858 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1859 rld[output_reload].out)
1860 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1861 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1862 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1863 regno)
1864 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1865 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1866 /* Ensure that a secondary or tertiary reload for this output
1867 won't want this register. */
1868 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1869 || (!(TEST_HARD_REG_BIT
1870 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1871 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1872 || !(TEST_HARD_REG_BIT
1873 (reg_class_contents[(int) rld[secondary_out].rclass],
1874 regno)))))
1875 && !fixed_regs[regno]
1876 /* Check that a former pseudo is valid; see find_dummy_reload. */
1877 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1878 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1879 ORIGINAL_REGNO (XEXP (note, 0)))
1880 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1881 {
1882 rld[output_reload].reg_rtx
1883 = gen_rtx_REG (rld[output_reload].outmode, regno);
1884 return;
1885 }
1886 }
1887 \f
1888 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1889 See if one of IN and OUT is a register that may be used;
1890 this is desirable since a spill-register won't be needed.
1891 If so, return the register rtx that proves acceptable.
1892
1893 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1894 RCLASS is the register class required for the reload.
1895
1896 If FOR_REAL is >= 0, it is the number of the reload,
1897 and in some cases when it can be discovered that OUT doesn't need
1898 to be computed, clear out rld[FOR_REAL].out.
1899
1900 If FOR_REAL is -1, this should not be done, because this call
1901 is just to see if a register can be found, not to find and install it.
1902
1903 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1904 puts an additional constraint on being able to use IN for OUT since
1905 IN must not appear elsewhere in the insn (it is assumed that IN itself
1906 is safe from the earlyclobber). */
1907
1908 static rtx
1909 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1910 enum machine_mode inmode, enum machine_mode outmode,
1911 enum reg_class rclass, int for_real, int earlyclobber)
1912 {
1913 rtx in = real_in;
1914 rtx out = real_out;
1915 int in_offset = 0;
1916 int out_offset = 0;
1917 rtx value = 0;
1918
1919 /* If operands exceed a word, we can't use either of them
1920 unless they have the same size. */
1921 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1922 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1923 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1924 return 0;
1925
1926 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1927 respectively refers to a hard register. */
1928
1929 /* Find the inside of any subregs. */
1930 while (GET_CODE (out) == SUBREG)
1931 {
1932 if (REG_P (SUBREG_REG (out))
1933 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1934 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1935 GET_MODE (SUBREG_REG (out)),
1936 SUBREG_BYTE (out),
1937 GET_MODE (out));
1938 out = SUBREG_REG (out);
1939 }
1940 while (GET_CODE (in) == SUBREG)
1941 {
1942 if (REG_P (SUBREG_REG (in))
1943 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1944 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1945 GET_MODE (SUBREG_REG (in)),
1946 SUBREG_BYTE (in),
1947 GET_MODE (in));
1948 in = SUBREG_REG (in);
1949 }
1950
1951 /* Narrow down the reg class, the same way push_reload will;
1952 otherwise we might find a dummy now, but push_reload won't. */
1953 {
1954 enum reg_class preferred_class = PREFERRED_RELOAD_CLASS (in, rclass);
1955 if (preferred_class != NO_REGS)
1956 rclass = preferred_class;
1957 }
1958
1959 /* See if OUT will do. */
1960 if (REG_P (out)
1961 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1962 {
1963 unsigned int regno = REGNO (out) + out_offset;
1964 unsigned int nwords = hard_regno_nregs[regno][outmode];
1965 rtx saved_rtx;
1966
1967 /* When we consider whether the insn uses OUT,
1968 ignore references within IN. They don't prevent us
1969 from copying IN into OUT, because those refs would
1970 move into the insn that reloads IN.
1971
1972 However, we only ignore IN in its role as this reload.
1973 If the insn uses IN elsewhere and it contains OUT,
1974 that counts. We can't be sure it's the "same" operand
1975 so it might not go through this reload. */
1976 saved_rtx = *inloc;
1977 *inloc = const0_rtx;
1978
1979 if (regno < FIRST_PSEUDO_REGISTER
1980 && HARD_REGNO_MODE_OK (regno, outmode)
1981 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1982 PATTERN (this_insn), outloc))
1983 {
1984 unsigned int i;
1985
1986 for (i = 0; i < nwords; i++)
1987 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1988 regno + i))
1989 break;
1990
1991 if (i == nwords)
1992 {
1993 if (REG_P (real_out))
1994 value = real_out;
1995 else
1996 value = gen_rtx_REG (outmode, regno);
1997 }
1998 }
1999
2000 *inloc = saved_rtx;
2001 }
2002
2003 /* Consider using IN if OUT was not acceptable
2004 or if OUT dies in this insn (like the quotient in a divmod insn).
2005 We can't use IN unless it is dies in this insn,
2006 which means we must know accurately which hard regs are live.
2007 Also, the result can't go in IN if IN is used within OUT,
2008 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2009 if (hard_regs_live_known
2010 && REG_P (in)
2011 && REGNO (in) < FIRST_PSEUDO_REGISTER
2012 && (value == 0
2013 || find_reg_note (this_insn, REG_UNUSED, real_out))
2014 && find_reg_note (this_insn, REG_DEAD, real_in)
2015 && !fixed_regs[REGNO (in)]
2016 && HARD_REGNO_MODE_OK (REGNO (in),
2017 /* The only case where out and real_out might
2018 have different modes is where real_out
2019 is a subreg, and in that case, out
2020 has a real mode. */
2021 (GET_MODE (out) != VOIDmode
2022 ? GET_MODE (out) : outmode))
2023 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2024 /* However only do this if we can be sure that this input
2025 operand doesn't correspond with an uninitialized pseudo.
2026 global can assign some hardreg to it that is the same as
2027 the one assigned to a different, also live pseudo (as it
2028 can ignore the conflict). We must never introduce writes
2029 to such hardregs, as they would clobber the other live
2030 pseudo. See PR 20973. */
2031 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
2032 ORIGINAL_REGNO (in))
2033 /* Similarly, only do this if we can be sure that the death
2034 note is still valid. global can assign some hardreg to
2035 the pseudo referenced in the note and simultaneously a
2036 subword of this hardreg to a different, also live pseudo,
2037 because only another subword of the hardreg is actually
2038 used in the insn. This cannot happen if the pseudo has
2039 been assigned exactly one hardreg. See PR 33732. */
2040 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2041 {
2042 unsigned int regno = REGNO (in) + in_offset;
2043 unsigned int nwords = hard_regno_nregs[regno][inmode];
2044
2045 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2046 && ! hard_reg_set_here_p (regno, regno + nwords,
2047 PATTERN (this_insn))
2048 && (! earlyclobber
2049 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2050 PATTERN (this_insn), inloc)))
2051 {
2052 unsigned int i;
2053
2054 for (i = 0; i < nwords; i++)
2055 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2056 regno + i))
2057 break;
2058
2059 if (i == nwords)
2060 {
2061 /* If we were going to use OUT as the reload reg
2062 and changed our mind, it means OUT is a dummy that
2063 dies here. So don't bother copying value to it. */
2064 if (for_real >= 0 && value == real_out)
2065 rld[for_real].out = 0;
2066 if (REG_P (real_in))
2067 value = real_in;
2068 else
2069 value = gen_rtx_REG (inmode, regno);
2070 }
2071 }
2072 }
2073
2074 return value;
2075 }
2076 \f
2077 /* This page contains subroutines used mainly for determining
2078 whether the IN or an OUT of a reload can serve as the
2079 reload register. */
2080
2081 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2082
2083 int
2084 earlyclobber_operand_p (rtx x)
2085 {
2086 int i;
2087
2088 for (i = 0; i < n_earlyclobbers; i++)
2089 if (reload_earlyclobbers[i] == x)
2090 return 1;
2091
2092 return 0;
2093 }
2094
2095 /* Return 1 if expression X alters a hard reg in the range
2096 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2097 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2098 X should be the body of an instruction. */
2099
2100 static int
2101 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2102 {
2103 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2104 {
2105 rtx op0 = SET_DEST (x);
2106
2107 while (GET_CODE (op0) == SUBREG)
2108 op0 = SUBREG_REG (op0);
2109 if (REG_P (op0))
2110 {
2111 unsigned int r = REGNO (op0);
2112
2113 /* See if this reg overlaps range under consideration. */
2114 if (r < end_regno
2115 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2116 return 1;
2117 }
2118 }
2119 else if (GET_CODE (x) == PARALLEL)
2120 {
2121 int i = XVECLEN (x, 0) - 1;
2122
2123 for (; i >= 0; i--)
2124 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2125 return 1;
2126 }
2127
2128 return 0;
2129 }
2130
2131 /* Return 1 if ADDR is a valid memory address for mode MODE,
2132 and check that each pseudo reg has the proper kind of
2133 hard reg. */
2134
2135 int
2136 strict_memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
2137 {
2138 #ifdef GO_IF_LEGITIMATE_ADDRESS
2139 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2140 return 0;
2141
2142 win:
2143 return 1;
2144 #else
2145 return targetm.legitimate_address_p (mode, addr, 1);
2146 #endif
2147 }
2148 \f
2149 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2150 if they are the same hard reg, and has special hacks for
2151 autoincrement and autodecrement.
2152 This is specifically intended for find_reloads to use
2153 in determining whether two operands match.
2154 X is the operand whose number is the lower of the two.
2155
2156 The value is 2 if Y contains a pre-increment that matches
2157 a non-incrementing address in X. */
2158
2159 /* ??? To be completely correct, we should arrange to pass
2160 for X the output operand and for Y the input operand.
2161 For now, we assume that the output operand has the lower number
2162 because that is natural in (SET output (... input ...)). */
2163
2164 int
2165 operands_match_p (rtx x, rtx y)
2166 {
2167 int i;
2168 RTX_CODE code = GET_CODE (x);
2169 const char *fmt;
2170 int success_2;
2171
2172 if (x == y)
2173 return 1;
2174 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2175 && (REG_P (y) || (GET_CODE (y) == SUBREG
2176 && REG_P (SUBREG_REG (y)))))
2177 {
2178 int j;
2179
2180 if (code == SUBREG)
2181 {
2182 i = REGNO (SUBREG_REG (x));
2183 if (i >= FIRST_PSEUDO_REGISTER)
2184 goto slow;
2185 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2186 GET_MODE (SUBREG_REG (x)),
2187 SUBREG_BYTE (x),
2188 GET_MODE (x));
2189 }
2190 else
2191 i = REGNO (x);
2192
2193 if (GET_CODE (y) == SUBREG)
2194 {
2195 j = REGNO (SUBREG_REG (y));
2196 if (j >= FIRST_PSEUDO_REGISTER)
2197 goto slow;
2198 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2199 GET_MODE (SUBREG_REG (y)),
2200 SUBREG_BYTE (y),
2201 GET_MODE (y));
2202 }
2203 else
2204 j = REGNO (y);
2205
2206 /* On a WORDS_BIG_ENDIAN machine, point to the last register of a
2207 multiple hard register group of scalar integer registers, so that
2208 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2209 register. */
2210 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2211 && SCALAR_INT_MODE_P (GET_MODE (x))
2212 && i < FIRST_PSEUDO_REGISTER)
2213 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2214 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2215 && SCALAR_INT_MODE_P (GET_MODE (y))
2216 && j < FIRST_PSEUDO_REGISTER)
2217 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2218
2219 return i == j;
2220 }
2221 /* If two operands must match, because they are really a single
2222 operand of an assembler insn, then two postincrements are invalid
2223 because the assembler insn would increment only once.
2224 On the other hand, a postincrement matches ordinary indexing
2225 if the postincrement is the output operand. */
2226 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2227 return operands_match_p (XEXP (x, 0), y);
2228 /* Two preincrements are invalid
2229 because the assembler insn would increment only once.
2230 On the other hand, a preincrement matches ordinary indexing
2231 if the preincrement is the input operand.
2232 In this case, return 2, since some callers need to do special
2233 things when this happens. */
2234 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2235 || GET_CODE (y) == PRE_MODIFY)
2236 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2237
2238 slow:
2239
2240 /* Now we have disposed of all the cases in which different rtx codes
2241 can match. */
2242 if (code != GET_CODE (y))
2243 return 0;
2244
2245 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2246 if (GET_MODE (x) != GET_MODE (y))
2247 return 0;
2248
2249 switch (code)
2250 {
2251 case CONST_INT:
2252 case CONST_DOUBLE:
2253 case CONST_FIXED:
2254 return 0;
2255
2256 case LABEL_REF:
2257 return XEXP (x, 0) == XEXP (y, 0);
2258 case SYMBOL_REF:
2259 return XSTR (x, 0) == XSTR (y, 0);
2260
2261 default:
2262 break;
2263 }
2264
2265 /* Compare the elements. If any pair of corresponding elements
2266 fail to match, return 0 for the whole things. */
2267
2268 success_2 = 0;
2269 fmt = GET_RTX_FORMAT (code);
2270 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2271 {
2272 int val, j;
2273 switch (fmt[i])
2274 {
2275 case 'w':
2276 if (XWINT (x, i) != XWINT (y, i))
2277 return 0;
2278 break;
2279
2280 case 'i':
2281 if (XINT (x, i) != XINT (y, i))
2282 return 0;
2283 break;
2284
2285 case 'e':
2286 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2287 if (val == 0)
2288 return 0;
2289 /* If any subexpression returns 2,
2290 we should return 2 if we are successful. */
2291 if (val == 2)
2292 success_2 = 1;
2293 break;
2294
2295 case '0':
2296 break;
2297
2298 case 'E':
2299 if (XVECLEN (x, i) != XVECLEN (y, i))
2300 return 0;
2301 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2302 {
2303 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2304 if (val == 0)
2305 return 0;
2306 if (val == 2)
2307 success_2 = 1;
2308 }
2309 break;
2310
2311 /* It is believed that rtx's at this level will never
2312 contain anything but integers and other rtx's,
2313 except for within LABEL_REFs and SYMBOL_REFs. */
2314 default:
2315 gcc_unreachable ();
2316 }
2317 }
2318 return 1 + success_2;
2319 }
2320 \f
2321 /* Describe the range of registers or memory referenced by X.
2322 If X is a register, set REG_FLAG and put the first register
2323 number into START and the last plus one into END.
2324 If X is a memory reference, put a base address into BASE
2325 and a range of integer offsets into START and END.
2326 If X is pushing on the stack, we can assume it causes no trouble,
2327 so we set the SAFE field. */
2328
2329 static struct decomposition
2330 decompose (rtx x)
2331 {
2332 struct decomposition val;
2333 int all_const = 0;
2334
2335 memset (&val, 0, sizeof (val));
2336
2337 switch (GET_CODE (x))
2338 {
2339 case MEM:
2340 {
2341 rtx base = NULL_RTX, offset = 0;
2342 rtx addr = XEXP (x, 0);
2343
2344 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2345 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2346 {
2347 val.base = XEXP (addr, 0);
2348 val.start = -GET_MODE_SIZE (GET_MODE (x));
2349 val.end = GET_MODE_SIZE (GET_MODE (x));
2350 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2351 return val;
2352 }
2353
2354 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2355 {
2356 if (GET_CODE (XEXP (addr, 1)) == PLUS
2357 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2358 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2359 {
2360 val.base = XEXP (addr, 0);
2361 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2362 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2363 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2364 return val;
2365 }
2366 }
2367
2368 if (GET_CODE (addr) == CONST)
2369 {
2370 addr = XEXP (addr, 0);
2371 all_const = 1;
2372 }
2373 if (GET_CODE (addr) == PLUS)
2374 {
2375 if (CONSTANT_P (XEXP (addr, 0)))
2376 {
2377 base = XEXP (addr, 1);
2378 offset = XEXP (addr, 0);
2379 }
2380 else if (CONSTANT_P (XEXP (addr, 1)))
2381 {
2382 base = XEXP (addr, 0);
2383 offset = XEXP (addr, 1);
2384 }
2385 }
2386
2387 if (offset == 0)
2388 {
2389 base = addr;
2390 offset = const0_rtx;
2391 }
2392 if (GET_CODE (offset) == CONST)
2393 offset = XEXP (offset, 0);
2394 if (GET_CODE (offset) == PLUS)
2395 {
2396 if (CONST_INT_P (XEXP (offset, 0)))
2397 {
2398 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2399 offset = XEXP (offset, 0);
2400 }
2401 else if (CONST_INT_P (XEXP (offset, 1)))
2402 {
2403 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2404 offset = XEXP (offset, 1);
2405 }
2406 else
2407 {
2408 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2409 offset = const0_rtx;
2410 }
2411 }
2412 else if (!CONST_INT_P (offset))
2413 {
2414 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2415 offset = const0_rtx;
2416 }
2417
2418 if (all_const && GET_CODE (base) == PLUS)
2419 base = gen_rtx_CONST (GET_MODE (base), base);
2420
2421 gcc_assert (CONST_INT_P (offset));
2422
2423 val.start = INTVAL (offset);
2424 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2425 val.base = base;
2426 }
2427 break;
2428
2429 case REG:
2430 val.reg_flag = 1;
2431 val.start = true_regnum (x);
2432 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2433 {
2434 /* A pseudo with no hard reg. */
2435 val.start = REGNO (x);
2436 val.end = val.start + 1;
2437 }
2438 else
2439 /* A hard reg. */
2440 val.end = end_hard_regno (GET_MODE (x), val.start);
2441 break;
2442
2443 case SUBREG:
2444 if (!REG_P (SUBREG_REG (x)))
2445 /* This could be more precise, but it's good enough. */
2446 return decompose (SUBREG_REG (x));
2447 val.reg_flag = 1;
2448 val.start = true_regnum (x);
2449 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2450 return decompose (SUBREG_REG (x));
2451 else
2452 /* A hard reg. */
2453 val.end = val.start + subreg_nregs (x);
2454 break;
2455
2456 case SCRATCH:
2457 /* This hasn't been assigned yet, so it can't conflict yet. */
2458 val.safe = 1;
2459 break;
2460
2461 default:
2462 gcc_assert (CONSTANT_P (x));
2463 val.safe = 1;
2464 break;
2465 }
2466 return val;
2467 }
2468
2469 /* Return 1 if altering Y will not modify the value of X.
2470 Y is also described by YDATA, which should be decompose (Y). */
2471
2472 static int
2473 immune_p (rtx x, rtx y, struct decomposition ydata)
2474 {
2475 struct decomposition xdata;
2476
2477 if (ydata.reg_flag)
2478 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2479 if (ydata.safe)
2480 return 1;
2481
2482 gcc_assert (MEM_P (y));
2483 /* If Y is memory and X is not, Y can't affect X. */
2484 if (!MEM_P (x))
2485 return 1;
2486
2487 xdata = decompose (x);
2488
2489 if (! rtx_equal_p (xdata.base, ydata.base))
2490 {
2491 /* If bases are distinct symbolic constants, there is no overlap. */
2492 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2493 return 1;
2494 /* Constants and stack slots never overlap. */
2495 if (CONSTANT_P (xdata.base)
2496 && (ydata.base == frame_pointer_rtx
2497 || ydata.base == hard_frame_pointer_rtx
2498 || ydata.base == stack_pointer_rtx))
2499 return 1;
2500 if (CONSTANT_P (ydata.base)
2501 && (xdata.base == frame_pointer_rtx
2502 || xdata.base == hard_frame_pointer_rtx
2503 || xdata.base == stack_pointer_rtx))
2504 return 1;
2505 /* If either base is variable, we don't know anything. */
2506 return 0;
2507 }
2508
2509 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2510 }
2511
2512 /* Similar, but calls decompose. */
2513
2514 int
2515 safe_from_earlyclobber (rtx op, rtx clobber)
2516 {
2517 struct decomposition early_data;
2518
2519 early_data = decompose (clobber);
2520 return immune_p (op, clobber, early_data);
2521 }
2522 \f
2523 /* Main entry point of this file: search the body of INSN
2524 for values that need reloading and record them with push_reload.
2525 REPLACE nonzero means record also where the values occur
2526 so that subst_reloads can be used.
2527
2528 IND_LEVELS says how many levels of indirection are supported by this
2529 machine; a value of zero means that a memory reference is not a valid
2530 memory address.
2531
2532 LIVE_KNOWN says we have valid information about which hard
2533 regs are live at each point in the program; this is true when
2534 we are called from global_alloc but false when stupid register
2535 allocation has been done.
2536
2537 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2538 which is nonnegative if the reg has been commandeered for reloading into.
2539 It is copied into STATIC_RELOAD_REG_P and referenced from there
2540 by various subroutines.
2541
2542 Return TRUE if some operands need to be changed, because of swapping
2543 commutative operands, reg_equiv_address substitution, or whatever. */
2544
2545 int
2546 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2547 short *reload_reg_p)
2548 {
2549 int insn_code_number;
2550 int i, j;
2551 int noperands;
2552 /* These start out as the constraints for the insn
2553 and they are chewed up as we consider alternatives. */
2554 const char *constraints[MAX_RECOG_OPERANDS];
2555 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2556 a register. */
2557 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2558 char pref_or_nothing[MAX_RECOG_OPERANDS];
2559 /* Nonzero for a MEM operand whose entire address needs a reload.
2560 May be -1 to indicate the entire address may or may not need a reload. */
2561 int address_reloaded[MAX_RECOG_OPERANDS];
2562 /* Nonzero for an address operand that needs to be completely reloaded.
2563 May be -1 to indicate the entire operand may or may not need a reload. */
2564 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2565 /* Value of enum reload_type to use for operand. */
2566 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2567 /* Value of enum reload_type to use within address of operand. */
2568 enum reload_type address_type[MAX_RECOG_OPERANDS];
2569 /* Save the usage of each operand. */
2570 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2571 int no_input_reloads = 0, no_output_reloads = 0;
2572 int n_alternatives;
2573 enum reg_class this_alternative[MAX_RECOG_OPERANDS];
2574 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2575 char this_alternative_win[MAX_RECOG_OPERANDS];
2576 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2577 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2578 int this_alternative_matches[MAX_RECOG_OPERANDS];
2579 int swapped;
2580 int goal_alternative[MAX_RECOG_OPERANDS];
2581 int this_alternative_number;
2582 int goal_alternative_number = 0;
2583 int operand_reloadnum[MAX_RECOG_OPERANDS];
2584 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2585 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2586 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2587 char goal_alternative_win[MAX_RECOG_OPERANDS];
2588 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2589 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2590 int goal_alternative_swapped;
2591 int best;
2592 int commutative;
2593 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2594 rtx substed_operand[MAX_RECOG_OPERANDS];
2595 rtx body = PATTERN (insn);
2596 rtx set = single_set (insn);
2597 int goal_earlyclobber = 0, this_earlyclobber;
2598 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2599 int retval = 0;
2600
2601 this_insn = insn;
2602 n_reloads = 0;
2603 n_replacements = 0;
2604 n_earlyclobbers = 0;
2605 replace_reloads = replace;
2606 hard_regs_live_known = live_known;
2607 static_reload_reg_p = reload_reg_p;
2608
2609 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2610 neither are insns that SET cc0. Insns that use CC0 are not allowed
2611 to have any input reloads. */
2612 if (JUMP_P (insn) || CALL_P (insn))
2613 no_output_reloads = 1;
2614
2615 #ifdef HAVE_cc0
2616 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2617 no_input_reloads = 1;
2618 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2619 no_output_reloads = 1;
2620 #endif
2621
2622 #ifdef SECONDARY_MEMORY_NEEDED
2623 /* The eliminated forms of any secondary memory locations are per-insn, so
2624 clear them out here. */
2625
2626 if (secondary_memlocs_elim_used)
2627 {
2628 memset (secondary_memlocs_elim, 0,
2629 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2630 secondary_memlocs_elim_used = 0;
2631 }
2632 #endif
2633
2634 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2635 is cheap to move between them. If it is not, there may not be an insn
2636 to do the copy, so we may need a reload. */
2637 if (GET_CODE (body) == SET
2638 && REG_P (SET_DEST (body))
2639 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2640 && REG_P (SET_SRC (body))
2641 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2642 && REGISTER_MOVE_COST (GET_MODE (SET_SRC (body)),
2643 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2644 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2645 return 0;
2646
2647 extract_insn (insn);
2648
2649 noperands = reload_n_operands = recog_data.n_operands;
2650 n_alternatives = recog_data.n_alternatives;
2651
2652 /* Just return "no reloads" if insn has no operands with constraints. */
2653 if (noperands == 0 || n_alternatives == 0)
2654 return 0;
2655
2656 insn_code_number = INSN_CODE (insn);
2657 this_insn_is_asm = insn_code_number < 0;
2658
2659 memcpy (operand_mode, recog_data.operand_mode,
2660 noperands * sizeof (enum machine_mode));
2661 memcpy (constraints, recog_data.constraints,
2662 noperands * sizeof (const char *));
2663
2664 commutative = -1;
2665
2666 /* If we will need to know, later, whether some pair of operands
2667 are the same, we must compare them now and save the result.
2668 Reloading the base and index registers will clobber them
2669 and afterward they will fail to match. */
2670
2671 for (i = 0; i < noperands; i++)
2672 {
2673 const char *p;
2674 int c;
2675 char *end;
2676
2677 substed_operand[i] = recog_data.operand[i];
2678 p = constraints[i];
2679
2680 modified[i] = RELOAD_READ;
2681
2682 /* Scan this operand's constraint to see if it is an output operand,
2683 an in-out operand, is commutative, or should match another. */
2684
2685 while ((c = *p))
2686 {
2687 p += CONSTRAINT_LEN (c, p);
2688 switch (c)
2689 {
2690 case '=':
2691 modified[i] = RELOAD_WRITE;
2692 break;
2693 case '+':
2694 modified[i] = RELOAD_READ_WRITE;
2695 break;
2696 case '%':
2697 {
2698 /* The last operand should not be marked commutative. */
2699 gcc_assert (i != noperands - 1);
2700
2701 /* We currently only support one commutative pair of
2702 operands. Some existing asm code currently uses more
2703 than one pair. Previously, that would usually work,
2704 but sometimes it would crash the compiler. We
2705 continue supporting that case as well as we can by
2706 silently ignoring all but the first pair. In the
2707 future we may handle it correctly. */
2708 if (commutative < 0)
2709 commutative = i;
2710 else
2711 gcc_assert (this_insn_is_asm);
2712 }
2713 break;
2714 /* Use of ISDIGIT is tempting here, but it may get expensive because
2715 of locale support we don't want. */
2716 case '0': case '1': case '2': case '3': case '4':
2717 case '5': case '6': case '7': case '8': case '9':
2718 {
2719 c = strtoul (p - 1, &end, 10);
2720 p = end;
2721
2722 operands_match[c][i]
2723 = operands_match_p (recog_data.operand[c],
2724 recog_data.operand[i]);
2725
2726 /* An operand may not match itself. */
2727 gcc_assert (c != i);
2728
2729 /* If C can be commuted with C+1, and C might need to match I,
2730 then C+1 might also need to match I. */
2731 if (commutative >= 0)
2732 {
2733 if (c == commutative || c == commutative + 1)
2734 {
2735 int other = c + (c == commutative ? 1 : -1);
2736 operands_match[other][i]
2737 = operands_match_p (recog_data.operand[other],
2738 recog_data.operand[i]);
2739 }
2740 if (i == commutative || i == commutative + 1)
2741 {
2742 int other = i + (i == commutative ? 1 : -1);
2743 operands_match[c][other]
2744 = operands_match_p (recog_data.operand[c],
2745 recog_data.operand[other]);
2746 }
2747 /* Note that C is supposed to be less than I.
2748 No need to consider altering both C and I because in
2749 that case we would alter one into the other. */
2750 }
2751 }
2752 }
2753 }
2754 }
2755
2756 /* Examine each operand that is a memory reference or memory address
2757 and reload parts of the addresses into index registers.
2758 Also here any references to pseudo regs that didn't get hard regs
2759 but are equivalent to constants get replaced in the insn itself
2760 with those constants. Nobody will ever see them again.
2761
2762 Finally, set up the preferred classes of each operand. */
2763
2764 for (i = 0; i < noperands; i++)
2765 {
2766 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2767
2768 address_reloaded[i] = 0;
2769 address_operand_reloaded[i] = 0;
2770 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2771 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2772 : RELOAD_OTHER);
2773 address_type[i]
2774 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2775 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2776 : RELOAD_OTHER);
2777
2778 if (*constraints[i] == 0)
2779 /* Ignore things like match_operator operands. */
2780 ;
2781 else if (constraints[i][0] == 'p'
2782 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2783 {
2784 address_operand_reloaded[i]
2785 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2786 recog_data.operand[i],
2787 recog_data.operand_loc[i],
2788 i, operand_type[i], ind_levels, insn);
2789
2790 /* If we now have a simple operand where we used to have a
2791 PLUS or MULT, re-recognize and try again. */
2792 if ((OBJECT_P (*recog_data.operand_loc[i])
2793 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2794 && (GET_CODE (recog_data.operand[i]) == MULT
2795 || GET_CODE (recog_data.operand[i]) == PLUS))
2796 {
2797 INSN_CODE (insn) = -1;
2798 retval = find_reloads (insn, replace, ind_levels, live_known,
2799 reload_reg_p);
2800 return retval;
2801 }
2802
2803 recog_data.operand[i] = *recog_data.operand_loc[i];
2804 substed_operand[i] = recog_data.operand[i];
2805
2806 /* Address operands are reloaded in their existing mode,
2807 no matter what is specified in the machine description. */
2808 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2809 }
2810 else if (code == MEM)
2811 {
2812 address_reloaded[i]
2813 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2814 recog_data.operand_loc[i],
2815 XEXP (recog_data.operand[i], 0),
2816 &XEXP (recog_data.operand[i], 0),
2817 i, address_type[i], ind_levels, insn);
2818 recog_data.operand[i] = *recog_data.operand_loc[i];
2819 substed_operand[i] = recog_data.operand[i];
2820 }
2821 else if (code == SUBREG)
2822 {
2823 rtx reg = SUBREG_REG (recog_data.operand[i]);
2824 rtx op
2825 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2826 ind_levels,
2827 set != 0
2828 && &SET_DEST (set) == recog_data.operand_loc[i],
2829 insn,
2830 &address_reloaded[i]);
2831
2832 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2833 that didn't get a hard register, emit a USE with a REG_EQUAL
2834 note in front so that we might inherit a previous, possibly
2835 wider reload. */
2836
2837 if (replace
2838 && MEM_P (op)
2839 && REG_P (reg)
2840 && (GET_MODE_SIZE (GET_MODE (reg))
2841 >= GET_MODE_SIZE (GET_MODE (op)))
2842 && reg_equiv_constant[REGNO (reg)] == 0)
2843 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2844 insn),
2845 REG_EQUAL, reg_equiv_memory_loc[REGNO (reg)]);
2846
2847 substed_operand[i] = recog_data.operand[i] = op;
2848 }
2849 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2850 /* We can get a PLUS as an "operand" as a result of register
2851 elimination. See eliminate_regs and gen_reload. We handle
2852 a unary operator by reloading the operand. */
2853 substed_operand[i] = recog_data.operand[i]
2854 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2855 ind_levels, 0, insn,
2856 &address_reloaded[i]);
2857 else if (code == REG)
2858 {
2859 /* This is equivalent to calling find_reloads_toplev.
2860 The code is duplicated for speed.
2861 When we find a pseudo always equivalent to a constant,
2862 we replace it by the constant. We must be sure, however,
2863 that we don't try to replace it in the insn in which it
2864 is being set. */
2865 int regno = REGNO (recog_data.operand[i]);
2866 if (reg_equiv_constant[regno] != 0
2867 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2868 {
2869 /* Record the existing mode so that the check if constants are
2870 allowed will work when operand_mode isn't specified. */
2871
2872 if (operand_mode[i] == VOIDmode)
2873 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2874
2875 substed_operand[i] = recog_data.operand[i]
2876 = reg_equiv_constant[regno];
2877 }
2878 if (reg_equiv_memory_loc[regno] != 0
2879 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
2880 /* We need not give a valid is_set_dest argument since the case
2881 of a constant equivalence was checked above. */
2882 substed_operand[i] = recog_data.operand[i]
2883 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2884 ind_levels, 0, insn,
2885 &address_reloaded[i]);
2886 }
2887 /* If the operand is still a register (we didn't replace it with an
2888 equivalent), get the preferred class to reload it into. */
2889 code = GET_CODE (recog_data.operand[i]);
2890 preferred_class[i]
2891 = ((code == REG && REGNO (recog_data.operand[i])
2892 >= FIRST_PSEUDO_REGISTER)
2893 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2894 : NO_REGS);
2895 pref_or_nothing[i]
2896 = (code == REG
2897 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2898 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2899 }
2900
2901 /* If this is simply a copy from operand 1 to operand 0, merge the
2902 preferred classes for the operands. */
2903 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2904 && recog_data.operand[1] == SET_SRC (set))
2905 {
2906 preferred_class[0] = preferred_class[1]
2907 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2908 pref_or_nothing[0] |= pref_or_nothing[1];
2909 pref_or_nothing[1] |= pref_or_nothing[0];
2910 }
2911
2912 /* Now see what we need for pseudo-regs that didn't get hard regs
2913 or got the wrong kind of hard reg. For this, we must consider
2914 all the operands together against the register constraints. */
2915
2916 best = MAX_RECOG_OPERANDS * 2 + 600;
2917
2918 swapped = 0;
2919 goal_alternative_swapped = 0;
2920 try_swapped:
2921
2922 /* The constraints are made of several alternatives.
2923 Each operand's constraint looks like foo,bar,... with commas
2924 separating the alternatives. The first alternatives for all
2925 operands go together, the second alternatives go together, etc.
2926
2927 First loop over alternatives. */
2928
2929 for (this_alternative_number = 0;
2930 this_alternative_number < n_alternatives;
2931 this_alternative_number++)
2932 {
2933 /* Loop over operands for one constraint alternative. */
2934 /* LOSERS counts those that don't fit this alternative
2935 and would require loading. */
2936 int losers = 0;
2937 /* BAD is set to 1 if it some operand can't fit this alternative
2938 even after reloading. */
2939 int bad = 0;
2940 /* REJECT is a count of how undesirable this alternative says it is
2941 if any reloading is required. If the alternative matches exactly
2942 then REJECT is ignored, but otherwise it gets this much
2943 counted against it in addition to the reloading needed. Each
2944 ? counts three times here since we want the disparaging caused by
2945 a bad register class to only count 1/3 as much. */
2946 int reject = 0;
2947
2948 if (!recog_data.alternative_enabled_p[this_alternative_number])
2949 {
2950 int i;
2951
2952 for (i = 0; i < recog_data.n_operands; i++)
2953 constraints[i] = skip_alternative (constraints[i]);
2954
2955 continue;
2956 }
2957
2958 this_earlyclobber = 0;
2959
2960 for (i = 0; i < noperands; i++)
2961 {
2962 const char *p = constraints[i];
2963 char *end;
2964 int len;
2965 int win = 0;
2966 int did_match = 0;
2967 /* 0 => this operand can be reloaded somehow for this alternative. */
2968 int badop = 1;
2969 /* 0 => this operand can be reloaded if the alternative allows regs. */
2970 int winreg = 0;
2971 int c;
2972 int m;
2973 rtx operand = recog_data.operand[i];
2974 int offset = 0;
2975 /* Nonzero means this is a MEM that must be reloaded into a reg
2976 regardless of what the constraint says. */
2977 int force_reload = 0;
2978 int offmemok = 0;
2979 /* Nonzero if a constant forced into memory would be OK for this
2980 operand. */
2981 int constmemok = 0;
2982 int earlyclobber = 0;
2983
2984 /* If the predicate accepts a unary operator, it means that
2985 we need to reload the operand, but do not do this for
2986 match_operator and friends. */
2987 if (UNARY_P (operand) && *p != 0)
2988 operand = XEXP (operand, 0);
2989
2990 /* If the operand is a SUBREG, extract
2991 the REG or MEM (or maybe even a constant) within.
2992 (Constants can occur as a result of reg_equiv_constant.) */
2993
2994 while (GET_CODE (operand) == SUBREG)
2995 {
2996 /* Offset only matters when operand is a REG and
2997 it is a hard reg. This is because it is passed
2998 to reg_fits_class_p if it is a REG and all pseudos
2999 return 0 from that function. */
3000 if (REG_P (SUBREG_REG (operand))
3001 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3002 {
3003 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3004 GET_MODE (SUBREG_REG (operand)),
3005 SUBREG_BYTE (operand),
3006 GET_MODE (operand)) < 0)
3007 force_reload = 1;
3008 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3009 GET_MODE (SUBREG_REG (operand)),
3010 SUBREG_BYTE (operand),
3011 GET_MODE (operand));
3012 }
3013 operand = SUBREG_REG (operand);
3014 /* Force reload if this is a constant or PLUS or if there may
3015 be a problem accessing OPERAND in the outer mode. */
3016 if (CONSTANT_P (operand)
3017 || GET_CODE (operand) == PLUS
3018 /* We must force a reload of paradoxical SUBREGs
3019 of a MEM because the alignment of the inner value
3020 may not be enough to do the outer reference. On
3021 big-endian machines, it may also reference outside
3022 the object.
3023
3024 On machines that extend byte operations and we have a
3025 SUBREG where both the inner and outer modes are no wider
3026 than a word and the inner mode is narrower, is integral,
3027 and gets extended when loaded from memory, combine.c has
3028 made assumptions about the behavior of the machine in such
3029 register access. If the data is, in fact, in memory we
3030 must always load using the size assumed to be in the
3031 register and let the insn do the different-sized
3032 accesses.
3033
3034 This is doubly true if WORD_REGISTER_OPERATIONS. In
3035 this case eliminate_regs has left non-paradoxical
3036 subregs for push_reload to see. Make sure it does
3037 by forcing the reload.
3038
3039 ??? When is it right at this stage to have a subreg
3040 of a mem that is _not_ to be handled specially? IMO
3041 those should have been reduced to just a mem. */
3042 || ((MEM_P (operand)
3043 || (REG_P (operand)
3044 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3045 #ifndef WORD_REGISTER_OPERATIONS
3046 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3047 < BIGGEST_ALIGNMENT)
3048 && (GET_MODE_SIZE (operand_mode[i])
3049 > GET_MODE_SIZE (GET_MODE (operand))))
3050 || BYTES_BIG_ENDIAN
3051 #ifdef LOAD_EXTEND_OP
3052 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3053 && (GET_MODE_SIZE (GET_MODE (operand))
3054 <= UNITS_PER_WORD)
3055 && (GET_MODE_SIZE (operand_mode[i])
3056 > GET_MODE_SIZE (GET_MODE (operand)))
3057 && INTEGRAL_MODE_P (GET_MODE (operand))
3058 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3059 #endif
3060 )
3061 #endif
3062 )
3063 )
3064 force_reload = 1;
3065 }
3066
3067 this_alternative[i] = NO_REGS;
3068 this_alternative_win[i] = 0;
3069 this_alternative_match_win[i] = 0;
3070 this_alternative_offmemok[i] = 0;
3071 this_alternative_earlyclobber[i] = 0;
3072 this_alternative_matches[i] = -1;
3073
3074 /* An empty constraint or empty alternative
3075 allows anything which matched the pattern. */
3076 if (*p == 0 || *p == ',')
3077 win = 1, badop = 0;
3078
3079 /* Scan this alternative's specs for this operand;
3080 set WIN if the operand fits any letter in this alternative.
3081 Otherwise, clear BADOP if this operand could
3082 fit some letter after reloads,
3083 or set WINREG if this operand could fit after reloads
3084 provided the constraint allows some registers. */
3085
3086 do
3087 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3088 {
3089 case '\0':
3090 len = 0;
3091 break;
3092 case ',':
3093 c = '\0';
3094 break;
3095
3096 case '=': case '+': case '*':
3097 break;
3098
3099 case '%':
3100 /* We only support one commutative marker, the first
3101 one. We already set commutative above. */
3102 break;
3103
3104 case '?':
3105 reject += 6;
3106 break;
3107
3108 case '!':
3109 reject = 600;
3110 break;
3111
3112 case '#':
3113 /* Ignore rest of this alternative as far as
3114 reloading is concerned. */
3115 do
3116 p++;
3117 while (*p && *p != ',');
3118 len = 0;
3119 break;
3120
3121 case '0': case '1': case '2': case '3': case '4':
3122 case '5': case '6': case '7': case '8': case '9':
3123 m = strtoul (p, &end, 10);
3124 p = end;
3125 len = 0;
3126
3127 this_alternative_matches[i] = m;
3128 /* We are supposed to match a previous operand.
3129 If we do, we win if that one did.
3130 If we do not, count both of the operands as losers.
3131 (This is too conservative, since most of the time
3132 only a single reload insn will be needed to make
3133 the two operands win. As a result, this alternative
3134 may be rejected when it is actually desirable.) */
3135 if ((swapped && (m != commutative || i != commutative + 1))
3136 /* If we are matching as if two operands were swapped,
3137 also pretend that operands_match had been computed
3138 with swapped.
3139 But if I is the second of those and C is the first,
3140 don't exchange them, because operands_match is valid
3141 only on one side of its diagonal. */
3142 ? (operands_match
3143 [(m == commutative || m == commutative + 1)
3144 ? 2 * commutative + 1 - m : m]
3145 [(i == commutative || i == commutative + 1)
3146 ? 2 * commutative + 1 - i : i])
3147 : operands_match[m][i])
3148 {
3149 /* If we are matching a non-offsettable address where an
3150 offsettable address was expected, then we must reject
3151 this combination, because we can't reload it. */
3152 if (this_alternative_offmemok[m]
3153 && MEM_P (recog_data.operand[m])
3154 && this_alternative[m] == NO_REGS
3155 && ! this_alternative_win[m])
3156 bad = 1;
3157
3158 did_match = this_alternative_win[m];
3159 }
3160 else
3161 {
3162 /* Operands don't match. */
3163 rtx value;
3164 int loc1, loc2;
3165 /* Retroactively mark the operand we had to match
3166 as a loser, if it wasn't already. */
3167 if (this_alternative_win[m])
3168 losers++;
3169 this_alternative_win[m] = 0;
3170 if (this_alternative[m] == NO_REGS)
3171 bad = 1;
3172 /* But count the pair only once in the total badness of
3173 this alternative, if the pair can be a dummy reload.
3174 The pointers in operand_loc are not swapped; swap
3175 them by hand if necessary. */
3176 if (swapped && i == commutative)
3177 loc1 = commutative + 1;
3178 else if (swapped && i == commutative + 1)
3179 loc1 = commutative;
3180 else
3181 loc1 = i;
3182 if (swapped && m == commutative)
3183 loc2 = commutative + 1;
3184 else if (swapped && m == commutative + 1)
3185 loc2 = commutative;
3186 else
3187 loc2 = m;
3188 value
3189 = find_dummy_reload (recog_data.operand[i],
3190 recog_data.operand[m],
3191 recog_data.operand_loc[loc1],
3192 recog_data.operand_loc[loc2],
3193 operand_mode[i], operand_mode[m],
3194 this_alternative[m], -1,
3195 this_alternative_earlyclobber[m]);
3196
3197 if (value != 0)
3198 losers--;
3199 }
3200 /* This can be fixed with reloads if the operand
3201 we are supposed to match can be fixed with reloads. */
3202 badop = 0;
3203 this_alternative[i] = this_alternative[m];
3204
3205 /* If we have to reload this operand and some previous
3206 operand also had to match the same thing as this
3207 operand, we don't know how to do that. So reject this
3208 alternative. */
3209 if (! did_match || force_reload)
3210 for (j = 0; j < i; j++)
3211 if (this_alternative_matches[j]
3212 == this_alternative_matches[i])
3213 badop = 1;
3214 break;
3215
3216 case 'p':
3217 /* All necessary reloads for an address_operand
3218 were handled in find_reloads_address. */
3219 this_alternative[i] = base_reg_class (VOIDmode, ADDRESS,
3220 SCRATCH);
3221 win = 1;
3222 badop = 0;
3223 break;
3224
3225 case TARGET_MEM_CONSTRAINT:
3226 if (force_reload)
3227 break;
3228 if (MEM_P (operand)
3229 || (REG_P (operand)
3230 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3231 && reg_renumber[REGNO (operand)] < 0))
3232 win = 1;
3233 if (CONST_POOL_OK_P (operand))
3234 badop = 0;
3235 constmemok = 1;
3236 break;
3237
3238 case '<':
3239 if (MEM_P (operand)
3240 && ! address_reloaded[i]
3241 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3242 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3243 win = 1;
3244 break;
3245
3246 case '>':
3247 if (MEM_P (operand)
3248 && ! address_reloaded[i]
3249 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3250 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3251 win = 1;
3252 break;
3253
3254 /* Memory operand whose address is not offsettable. */
3255 case 'V':
3256 if (force_reload)
3257 break;
3258 if (MEM_P (operand)
3259 && ! (ind_levels ? offsettable_memref_p (operand)
3260 : offsettable_nonstrict_memref_p (operand))
3261 /* Certain mem addresses will become offsettable
3262 after they themselves are reloaded. This is important;
3263 we don't want our own handling of unoffsettables
3264 to override the handling of reg_equiv_address. */
3265 && !(REG_P (XEXP (operand, 0))
3266 && (ind_levels == 0
3267 || reg_equiv_address[REGNO (XEXP (operand, 0))] != 0)))
3268 win = 1;
3269 break;
3270
3271 /* Memory operand whose address is offsettable. */
3272 case 'o':
3273 if (force_reload)
3274 break;
3275 if ((MEM_P (operand)
3276 /* If IND_LEVELS, find_reloads_address won't reload a
3277 pseudo that didn't get a hard reg, so we have to
3278 reject that case. */
3279 && ((ind_levels ? offsettable_memref_p (operand)
3280 : offsettable_nonstrict_memref_p (operand))
3281 /* A reloaded address is offsettable because it is now
3282 just a simple register indirect. */
3283 || address_reloaded[i] == 1))
3284 || (REG_P (operand)
3285 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3286 && reg_renumber[REGNO (operand)] < 0
3287 /* If reg_equiv_address is nonzero, we will be
3288 loading it into a register; hence it will be
3289 offsettable, but we cannot say that reg_equiv_mem
3290 is offsettable without checking. */
3291 && ((reg_equiv_mem[REGNO (operand)] != 0
3292 && offsettable_memref_p (reg_equiv_mem[REGNO (operand)]))
3293 || (reg_equiv_address[REGNO (operand)] != 0))))
3294 win = 1;
3295 if (CONST_POOL_OK_P (operand)
3296 || MEM_P (operand))
3297 badop = 0;
3298 constmemok = 1;
3299 offmemok = 1;
3300 break;
3301
3302 case '&':
3303 /* Output operand that is stored before the need for the
3304 input operands (and their index registers) is over. */
3305 earlyclobber = 1, this_earlyclobber = 1;
3306 break;
3307
3308 case 'E':
3309 case 'F':
3310 if (GET_CODE (operand) == CONST_DOUBLE
3311 || (GET_CODE (operand) == CONST_VECTOR
3312 && (GET_MODE_CLASS (GET_MODE (operand))
3313 == MODE_VECTOR_FLOAT)))
3314 win = 1;
3315 break;
3316
3317 case 'G':
3318 case 'H':
3319 if (GET_CODE (operand) == CONST_DOUBLE
3320 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3321 win = 1;
3322 break;
3323
3324 case 's':
3325 if (CONST_INT_P (operand)
3326 || (GET_CODE (operand) == CONST_DOUBLE
3327 && GET_MODE (operand) == VOIDmode))
3328 break;
3329 case 'i':
3330 if (CONSTANT_P (operand)
3331 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3332 win = 1;
3333 break;
3334
3335 case 'n':
3336 if (CONST_INT_P (operand)
3337 || (GET_CODE (operand) == CONST_DOUBLE
3338 && GET_MODE (operand) == VOIDmode))
3339 win = 1;
3340 break;
3341
3342 case 'I':
3343 case 'J':
3344 case 'K':
3345 case 'L':
3346 case 'M':
3347 case 'N':
3348 case 'O':
3349 case 'P':
3350 if (CONST_INT_P (operand)
3351 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3352 win = 1;
3353 break;
3354
3355 case 'X':
3356 force_reload = 0;
3357 win = 1;
3358 break;
3359
3360 case 'g':
3361 if (! force_reload
3362 /* A PLUS is never a valid operand, but reload can make
3363 it from a register when eliminating registers. */
3364 && GET_CODE (operand) != PLUS
3365 /* A SCRATCH is not a valid operand. */
3366 && GET_CODE (operand) != SCRATCH
3367 && (! CONSTANT_P (operand)
3368 || ! flag_pic
3369 || LEGITIMATE_PIC_OPERAND_P (operand))
3370 && (GENERAL_REGS == ALL_REGS
3371 || !REG_P (operand)
3372 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3373 && reg_renumber[REGNO (operand)] < 0)))
3374 win = 1;
3375 /* Drop through into 'r' case. */
3376
3377 case 'r':
3378 this_alternative[i]
3379 = reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3380 goto reg;
3381
3382 default:
3383 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3384 {
3385 #ifdef EXTRA_CONSTRAINT_STR
3386 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3387 {
3388 if (force_reload)
3389 break;
3390 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3391 win = 1;
3392 /* If the address was already reloaded,
3393 we win as well. */
3394 else if (MEM_P (operand)
3395 && address_reloaded[i] == 1)
3396 win = 1;
3397 /* Likewise if the address will be reloaded because
3398 reg_equiv_address is nonzero. For reg_equiv_mem
3399 we have to check. */
3400 else if (REG_P (operand)
3401 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3402 && reg_renumber[REGNO (operand)] < 0
3403 && ((reg_equiv_mem[REGNO (operand)] != 0
3404 && EXTRA_CONSTRAINT_STR (reg_equiv_mem[REGNO (operand)], c, p))
3405 || (reg_equiv_address[REGNO (operand)] != 0)))
3406 win = 1;
3407
3408 /* If we didn't already win, we can reload
3409 constants via force_const_mem, and other
3410 MEMs by reloading the address like for 'o'. */
3411 if (CONST_POOL_OK_P (operand)
3412 || MEM_P (operand))
3413 badop = 0;
3414 constmemok = 1;
3415 offmemok = 1;
3416 break;
3417 }
3418 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3419 {
3420 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3421 win = 1;
3422
3423 /* If we didn't already win, we can reload
3424 the address into a base register. */
3425 this_alternative[i] = base_reg_class (VOIDmode,
3426 ADDRESS,
3427 SCRATCH);
3428 badop = 0;
3429 break;
3430 }
3431
3432 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3433 win = 1;
3434 #endif
3435 break;
3436 }
3437
3438 this_alternative[i]
3439 = (reg_class_subunion
3440 [this_alternative[i]]
3441 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3442 reg:
3443 if (GET_MODE (operand) == BLKmode)
3444 break;
3445 winreg = 1;
3446 if (REG_P (operand)
3447 && reg_fits_class_p (operand, this_alternative[i],
3448 offset, GET_MODE (recog_data.operand[i])))
3449 win = 1;
3450 break;
3451 }
3452 while ((p += len), c);
3453
3454 constraints[i] = p;
3455
3456 /* If this operand could be handled with a reg,
3457 and some reg is allowed, then this operand can be handled. */
3458 if (winreg && this_alternative[i] != NO_REGS)
3459 badop = 0;
3460
3461 /* Record which operands fit this alternative. */
3462 this_alternative_earlyclobber[i] = earlyclobber;
3463 if (win && ! force_reload)
3464 this_alternative_win[i] = 1;
3465 else if (did_match && ! force_reload)
3466 this_alternative_match_win[i] = 1;
3467 else
3468 {
3469 int const_to_mem = 0;
3470
3471 this_alternative_offmemok[i] = offmemok;
3472 losers++;
3473 if (badop)
3474 bad = 1;
3475 /* Alternative loses if it has no regs for a reg operand. */
3476 if (REG_P (operand)
3477 && this_alternative[i] == NO_REGS
3478 && this_alternative_matches[i] < 0)
3479 bad = 1;
3480
3481 /* If this is a constant that is reloaded into the desired
3482 class by copying it to memory first, count that as another
3483 reload. This is consistent with other code and is
3484 required to avoid choosing another alternative when
3485 the constant is moved into memory by this function on
3486 an early reload pass. Note that the test here is
3487 precisely the same as in the code below that calls
3488 force_const_mem. */
3489 if (CONST_POOL_OK_P (operand)
3490 && ((PREFERRED_RELOAD_CLASS (operand, this_alternative[i])
3491 == NO_REGS)
3492 || no_input_reloads)
3493 && operand_mode[i] != VOIDmode)
3494 {
3495 const_to_mem = 1;
3496 if (this_alternative[i] != NO_REGS)
3497 losers++;
3498 }
3499
3500 /* Alternative loses if it requires a type of reload not
3501 permitted for this insn. We can always reload SCRATCH
3502 and objects with a REG_UNUSED note. */
3503 if (GET_CODE (operand) != SCRATCH
3504 && modified[i] != RELOAD_READ && no_output_reloads
3505 && ! find_reg_note (insn, REG_UNUSED, operand))
3506 bad = 1;
3507 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3508 && ! const_to_mem)
3509 bad = 1;
3510
3511 /* If we can't reload this value at all, reject this
3512 alternative. Note that we could also lose due to
3513 LIMIT_RELOAD_CLASS, but we don't check that
3514 here. */
3515
3516 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3517 {
3518 if (PREFERRED_RELOAD_CLASS (operand, this_alternative[i])
3519 == NO_REGS)
3520 reject = 600;
3521
3522 #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
3523 if (operand_type[i] == RELOAD_FOR_OUTPUT
3524 && (PREFERRED_OUTPUT_RELOAD_CLASS (operand,
3525 this_alternative[i])
3526 == NO_REGS))
3527 reject = 600;
3528 #endif
3529 }
3530
3531 /* We prefer to reload pseudos over reloading other things,
3532 since such reloads may be able to be eliminated later.
3533 If we are reloading a SCRATCH, we won't be generating any
3534 insns, just using a register, so it is also preferred.
3535 So bump REJECT in other cases. Don't do this in the
3536 case where we are forcing a constant into memory and
3537 it will then win since we don't want to have a different
3538 alternative match then. */
3539 if (! (REG_P (operand)
3540 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3541 && GET_CODE (operand) != SCRATCH
3542 && ! (const_to_mem && constmemok))
3543 reject += 2;
3544
3545 /* Input reloads can be inherited more often than output
3546 reloads can be removed, so penalize output reloads. */
3547 if (operand_type[i] != RELOAD_FOR_INPUT
3548 && GET_CODE (operand) != SCRATCH)
3549 reject++;
3550 }
3551
3552 /* If this operand is a pseudo register that didn't get a hard
3553 reg and this alternative accepts some register, see if the
3554 class that we want is a subset of the preferred class for this
3555 register. If not, but it intersects that class, use the
3556 preferred class instead. If it does not intersect the preferred
3557 class, show that usage of this alternative should be discouraged;
3558 it will be discouraged more still if the register is `preferred
3559 or nothing'. We do this because it increases the chance of
3560 reusing our spill register in a later insn and avoiding a pair
3561 of memory stores and loads.
3562
3563 Don't bother with this if this alternative will accept this
3564 operand.
3565
3566 Don't do this for a multiword operand, since it is only a
3567 small win and has the risk of requiring more spill registers,
3568 which could cause a large loss.
3569
3570 Don't do this if the preferred class has only one register
3571 because we might otherwise exhaust the class. */
3572
3573 if (! win && ! did_match
3574 && this_alternative[i] != NO_REGS
3575 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3576 && reg_class_size [(int) preferred_class[i]] > 0
3577 && ! SMALL_REGISTER_CLASS_P (preferred_class[i]))
3578 {
3579 if (! reg_class_subset_p (this_alternative[i],
3580 preferred_class[i]))
3581 {
3582 /* Since we don't have a way of forming the intersection,
3583 we just do something special if the preferred class
3584 is a subset of the class we have; that's the most
3585 common case anyway. */
3586 if (reg_class_subset_p (preferred_class[i],
3587 this_alternative[i]))
3588 this_alternative[i] = preferred_class[i];
3589 else
3590 reject += (2 + 2 * pref_or_nothing[i]);
3591 }
3592 }
3593 }
3594
3595 /* Now see if any output operands that are marked "earlyclobber"
3596 in this alternative conflict with any input operands
3597 or any memory addresses. */
3598
3599 for (i = 0; i < noperands; i++)
3600 if (this_alternative_earlyclobber[i]
3601 && (this_alternative_win[i] || this_alternative_match_win[i]))
3602 {
3603 struct decomposition early_data;
3604
3605 early_data = decompose (recog_data.operand[i]);
3606
3607 gcc_assert (modified[i] != RELOAD_READ);
3608
3609 if (this_alternative[i] == NO_REGS)
3610 {
3611 this_alternative_earlyclobber[i] = 0;
3612 gcc_assert (this_insn_is_asm);
3613 error_for_asm (this_insn,
3614 "%<&%> constraint used with no register class");
3615 }
3616
3617 for (j = 0; j < noperands; j++)
3618 /* Is this an input operand or a memory ref? */
3619 if ((MEM_P (recog_data.operand[j])
3620 || modified[j] != RELOAD_WRITE)
3621 && j != i
3622 /* Ignore things like match_operator operands. */
3623 && *recog_data.constraints[j] != 0
3624 /* Don't count an input operand that is constrained to match
3625 the early clobber operand. */
3626 && ! (this_alternative_matches[j] == i
3627 && rtx_equal_p (recog_data.operand[i],
3628 recog_data.operand[j]))
3629 /* Is it altered by storing the earlyclobber operand? */
3630 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3631 early_data))
3632 {
3633 /* If the output is in a non-empty few-regs class,
3634 it's costly to reload it, so reload the input instead. */
3635 if (SMALL_REGISTER_CLASS_P (this_alternative[i])
3636 && (REG_P (recog_data.operand[j])
3637 || GET_CODE (recog_data.operand[j]) == SUBREG))
3638 {
3639 losers++;
3640 this_alternative_win[j] = 0;
3641 this_alternative_match_win[j] = 0;
3642 }
3643 else
3644 break;
3645 }
3646 /* If an earlyclobber operand conflicts with something,
3647 it must be reloaded, so request this and count the cost. */
3648 if (j != noperands)
3649 {
3650 losers++;
3651 this_alternative_win[i] = 0;
3652 this_alternative_match_win[j] = 0;
3653 for (j = 0; j < noperands; j++)
3654 if (this_alternative_matches[j] == i
3655 && this_alternative_match_win[j])
3656 {
3657 this_alternative_win[j] = 0;
3658 this_alternative_match_win[j] = 0;
3659 losers++;
3660 }
3661 }
3662 }
3663
3664 /* If one alternative accepts all the operands, no reload required,
3665 choose that alternative; don't consider the remaining ones. */
3666 if (losers == 0)
3667 {
3668 /* Unswap these so that they are never swapped at `finish'. */
3669 if (commutative >= 0)
3670 {
3671 recog_data.operand[commutative] = substed_operand[commutative];
3672 recog_data.operand[commutative + 1]
3673 = substed_operand[commutative + 1];
3674 }
3675 for (i = 0; i < noperands; i++)
3676 {
3677 goal_alternative_win[i] = this_alternative_win[i];
3678 goal_alternative_match_win[i] = this_alternative_match_win[i];
3679 goal_alternative[i] = this_alternative[i];
3680 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3681 goal_alternative_matches[i] = this_alternative_matches[i];
3682 goal_alternative_earlyclobber[i]
3683 = this_alternative_earlyclobber[i];
3684 }
3685 goal_alternative_number = this_alternative_number;
3686 goal_alternative_swapped = swapped;
3687 goal_earlyclobber = this_earlyclobber;
3688 goto finish;
3689 }
3690
3691 /* REJECT, set by the ! and ? constraint characters and when a register
3692 would be reloaded into a non-preferred class, discourages the use of
3693 this alternative for a reload goal. REJECT is incremented by six
3694 for each ? and two for each non-preferred class. */
3695 losers = losers * 6 + reject;
3696
3697 /* If this alternative can be made to work by reloading,
3698 and it needs less reloading than the others checked so far,
3699 record it as the chosen goal for reloading. */
3700 if (! bad && best > losers)
3701 {
3702 for (i = 0; i < noperands; i++)
3703 {
3704 goal_alternative[i] = this_alternative[i];
3705 goal_alternative_win[i] = this_alternative_win[i];
3706 goal_alternative_match_win[i] = this_alternative_match_win[i];
3707 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3708 goal_alternative_matches[i] = this_alternative_matches[i];
3709 goal_alternative_earlyclobber[i]
3710 = this_alternative_earlyclobber[i];
3711 }
3712 goal_alternative_swapped = swapped;
3713 best = losers;
3714 goal_alternative_number = this_alternative_number;
3715 goal_earlyclobber = this_earlyclobber;
3716 }
3717 }
3718
3719 /* If insn is commutative (it's safe to exchange a certain pair of operands)
3720 then we need to try each alternative twice,
3721 the second time matching those two operands
3722 as if we had exchanged them.
3723 To do this, really exchange them in operands.
3724
3725 If we have just tried the alternatives the second time,
3726 return operands to normal and drop through. */
3727
3728 if (commutative >= 0)
3729 {
3730 swapped = !swapped;
3731 if (swapped)
3732 {
3733 enum reg_class tclass;
3734 int t;
3735
3736 recog_data.operand[commutative] = substed_operand[commutative + 1];
3737 recog_data.operand[commutative + 1] = substed_operand[commutative];
3738 /* Swap the duplicates too. */
3739 for (i = 0; i < recog_data.n_dups; i++)
3740 if (recog_data.dup_num[i] == commutative
3741 || recog_data.dup_num[i] == commutative + 1)
3742 *recog_data.dup_loc[i]
3743 = recog_data.operand[(int) recog_data.dup_num[i]];
3744
3745 tclass = preferred_class[commutative];
3746 preferred_class[commutative] = preferred_class[commutative + 1];
3747 preferred_class[commutative + 1] = tclass;
3748
3749 t = pref_or_nothing[commutative];
3750 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3751 pref_or_nothing[commutative + 1] = t;
3752
3753 t = address_reloaded[commutative];
3754 address_reloaded[commutative] = address_reloaded[commutative + 1];
3755 address_reloaded[commutative + 1] = t;
3756
3757 memcpy (constraints, recog_data.constraints,
3758 noperands * sizeof (const char *));
3759 goto try_swapped;
3760 }
3761 else
3762 {
3763 recog_data.operand[commutative] = substed_operand[commutative];
3764 recog_data.operand[commutative + 1]
3765 = substed_operand[commutative + 1];
3766 /* Unswap the duplicates too. */
3767 for (i = 0; i < recog_data.n_dups; i++)
3768 if (recog_data.dup_num[i] == commutative
3769 || recog_data.dup_num[i] == commutative + 1)
3770 *recog_data.dup_loc[i]
3771 = recog_data.operand[(int) recog_data.dup_num[i]];
3772 }
3773 }
3774
3775 /* The operands don't meet the constraints.
3776 goal_alternative describes the alternative
3777 that we could reach by reloading the fewest operands.
3778 Reload so as to fit it. */
3779
3780 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3781 {
3782 /* No alternative works with reloads?? */
3783 if (insn_code_number >= 0)
3784 fatal_insn ("unable to generate reloads for:", insn);
3785 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3786 /* Avoid further trouble with this insn. */
3787 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3788 n_reloads = 0;
3789 return 0;
3790 }
3791
3792 /* Jump to `finish' from above if all operands are valid already.
3793 In that case, goal_alternative_win is all 1. */
3794 finish:
3795
3796 /* Right now, for any pair of operands I and J that are required to match,
3797 with I < J,
3798 goal_alternative_matches[J] is I.
3799 Set up goal_alternative_matched as the inverse function:
3800 goal_alternative_matched[I] = J. */
3801
3802 for (i = 0; i < noperands; i++)
3803 goal_alternative_matched[i] = -1;
3804
3805 for (i = 0; i < noperands; i++)
3806 if (! goal_alternative_win[i]
3807 && goal_alternative_matches[i] >= 0)
3808 goal_alternative_matched[goal_alternative_matches[i]] = i;
3809
3810 for (i = 0; i < noperands; i++)
3811 goal_alternative_win[i] |= goal_alternative_match_win[i];
3812
3813 /* If the best alternative is with operands 1 and 2 swapped,
3814 consider them swapped before reporting the reloads. Update the
3815 operand numbers of any reloads already pushed. */
3816
3817 if (goal_alternative_swapped)
3818 {
3819 rtx tem;
3820
3821 tem = substed_operand[commutative];
3822 substed_operand[commutative] = substed_operand[commutative + 1];
3823 substed_operand[commutative + 1] = tem;
3824 tem = recog_data.operand[commutative];
3825 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3826 recog_data.operand[commutative + 1] = tem;
3827 tem = *recog_data.operand_loc[commutative];
3828 *recog_data.operand_loc[commutative]
3829 = *recog_data.operand_loc[commutative + 1];
3830 *recog_data.operand_loc[commutative + 1] = tem;
3831
3832 for (i = 0; i < n_reloads; i++)
3833 {
3834 if (rld[i].opnum == commutative)
3835 rld[i].opnum = commutative + 1;
3836 else if (rld[i].opnum == commutative + 1)
3837 rld[i].opnum = commutative;
3838 }
3839 }
3840
3841 for (i = 0; i < noperands; i++)
3842 {
3843 operand_reloadnum[i] = -1;
3844
3845 /* If this is an earlyclobber operand, we need to widen the scope.
3846 The reload must remain valid from the start of the insn being
3847 reloaded until after the operand is stored into its destination.
3848 We approximate this with RELOAD_OTHER even though we know that we
3849 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3850
3851 One special case that is worth checking is when we have an
3852 output that is earlyclobber but isn't used past the insn (typically
3853 a SCRATCH). In this case, we only need have the reload live
3854 through the insn itself, but not for any of our input or output
3855 reloads.
3856 But we must not accidentally narrow the scope of an existing
3857 RELOAD_OTHER reload - leave these alone.
3858
3859 In any case, anything needed to address this operand can remain
3860 however they were previously categorized. */
3861
3862 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3863 operand_type[i]
3864 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3865 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3866 }
3867
3868 /* Any constants that aren't allowed and can't be reloaded
3869 into registers are here changed into memory references. */
3870 for (i = 0; i < noperands; i++)
3871 if (! goal_alternative_win[i])
3872 {
3873 rtx op = recog_data.operand[i];
3874 rtx subreg = NULL_RTX;
3875 rtx plus = NULL_RTX;
3876 enum machine_mode mode = operand_mode[i];
3877
3878 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3879 push_reload so we have to let them pass here. */
3880 if (GET_CODE (op) == SUBREG)
3881 {
3882 subreg = op;
3883 op = SUBREG_REG (op);
3884 mode = GET_MODE (op);
3885 }
3886
3887 if (GET_CODE (op) == PLUS)
3888 {
3889 plus = op;
3890 op = XEXP (op, 1);
3891 }
3892
3893 if (CONST_POOL_OK_P (op)
3894 && ((PREFERRED_RELOAD_CLASS (op,
3895 (enum reg_class) goal_alternative[i])
3896 == NO_REGS)
3897 || no_input_reloads)
3898 && mode != VOIDmode)
3899 {
3900 int this_address_reloaded;
3901 rtx tem = force_const_mem (mode, op);
3902
3903 /* If we stripped a SUBREG or a PLUS above add it back. */
3904 if (plus != NULL_RTX)
3905 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3906
3907 if (subreg != NULL_RTX)
3908 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3909
3910 this_address_reloaded = 0;
3911 substed_operand[i] = recog_data.operand[i]
3912 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3913 0, insn, &this_address_reloaded);
3914
3915 /* If the alternative accepts constant pool refs directly
3916 there will be no reload needed at all. */
3917 if (plus == NULL_RTX
3918 && subreg == NULL_RTX
3919 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3920 ? substed_operand[i]
3921 : NULL,
3922 recog_data.constraints[i],
3923 goal_alternative_number))
3924 goal_alternative_win[i] = 1;
3925 }
3926 }
3927
3928 /* Record the values of the earlyclobber operands for the caller. */
3929 if (goal_earlyclobber)
3930 for (i = 0; i < noperands; i++)
3931 if (goal_alternative_earlyclobber[i])
3932 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3933
3934 /* Now record reloads for all the operands that need them. */
3935 for (i = 0; i < noperands; i++)
3936 if (! goal_alternative_win[i])
3937 {
3938 /* Operands that match previous ones have already been handled. */
3939 if (goal_alternative_matches[i] >= 0)
3940 ;
3941 /* Handle an operand with a nonoffsettable address
3942 appearing where an offsettable address will do
3943 by reloading the address into a base register.
3944
3945 ??? We can also do this when the operand is a register and
3946 reg_equiv_mem is not offsettable, but this is a bit tricky,
3947 so we don't bother with it. It may not be worth doing. */
3948 else if (goal_alternative_matched[i] == -1
3949 && goal_alternative_offmemok[i]
3950 && MEM_P (recog_data.operand[i]))
3951 {
3952 /* If the address to be reloaded is a VOIDmode constant,
3953 use Pmode as mode of the reload register, as would have
3954 been done by find_reloads_address. */
3955 enum machine_mode address_mode;
3956 address_mode = GET_MODE (XEXP (recog_data.operand[i], 0));
3957 if (address_mode == VOIDmode)
3958 address_mode = Pmode;
3959
3960 operand_reloadnum[i]
3961 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3962 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3963 base_reg_class (VOIDmode, MEM, SCRATCH),
3964 address_mode,
3965 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
3966 rld[operand_reloadnum[i]].inc
3967 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3968
3969 /* If this operand is an output, we will have made any
3970 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3971 now we are treating part of the operand as an input, so
3972 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
3973
3974 if (modified[i] == RELOAD_WRITE)
3975 {
3976 for (j = 0; j < n_reloads; j++)
3977 {
3978 if (rld[j].opnum == i)
3979 {
3980 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
3981 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
3982 else if (rld[j].when_needed
3983 == RELOAD_FOR_OUTADDR_ADDRESS)
3984 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
3985 }
3986 }
3987 }
3988 }
3989 else if (goal_alternative_matched[i] == -1)
3990 {
3991 operand_reloadnum[i]
3992 = push_reload ((modified[i] != RELOAD_WRITE
3993 ? recog_data.operand[i] : 0),
3994 (modified[i] != RELOAD_READ
3995 ? recog_data.operand[i] : 0),
3996 (modified[i] != RELOAD_WRITE
3997 ? recog_data.operand_loc[i] : 0),
3998 (modified[i] != RELOAD_READ
3999 ? recog_data.operand_loc[i] : 0),
4000 (enum reg_class) goal_alternative[i],
4001 (modified[i] == RELOAD_WRITE
4002 ? VOIDmode : operand_mode[i]),
4003 (modified[i] == RELOAD_READ
4004 ? VOIDmode : operand_mode[i]),
4005 (insn_code_number < 0 ? 0
4006 : insn_data[insn_code_number].operand[i].strict_low),
4007 0, i, operand_type[i]);
4008 }
4009 /* In a matching pair of operands, one must be input only
4010 and the other must be output only.
4011 Pass the input operand as IN and the other as OUT. */
4012 else if (modified[i] == RELOAD_READ
4013 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4014 {
4015 operand_reloadnum[i]
4016 = push_reload (recog_data.operand[i],
4017 recog_data.operand[goal_alternative_matched[i]],
4018 recog_data.operand_loc[i],
4019 recog_data.operand_loc[goal_alternative_matched[i]],
4020 (enum reg_class) goal_alternative[i],
4021 operand_mode[i],
4022 operand_mode[goal_alternative_matched[i]],
4023 0, 0, i, RELOAD_OTHER);
4024 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4025 }
4026 else if (modified[i] == RELOAD_WRITE
4027 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4028 {
4029 operand_reloadnum[goal_alternative_matched[i]]
4030 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4031 recog_data.operand[i],
4032 recog_data.operand_loc[goal_alternative_matched[i]],
4033 recog_data.operand_loc[i],
4034 (enum reg_class) goal_alternative[i],
4035 operand_mode[goal_alternative_matched[i]],
4036 operand_mode[i],
4037 0, 0, i, RELOAD_OTHER);
4038 operand_reloadnum[i] = output_reloadnum;
4039 }
4040 else
4041 {
4042 gcc_assert (insn_code_number < 0);
4043 error_for_asm (insn, "inconsistent operand constraints "
4044 "in an %<asm%>");
4045 /* Avoid further trouble with this insn. */
4046 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4047 n_reloads = 0;
4048 return 0;
4049 }
4050 }
4051 else if (goal_alternative_matched[i] < 0
4052 && goal_alternative_matches[i] < 0
4053 && address_operand_reloaded[i] != 1
4054 && optimize)
4055 {
4056 /* For each non-matching operand that's a MEM or a pseudo-register
4057 that didn't get a hard register, make an optional reload.
4058 This may get done even if the insn needs no reloads otherwise. */
4059
4060 rtx operand = recog_data.operand[i];
4061
4062 while (GET_CODE (operand) == SUBREG)
4063 operand = SUBREG_REG (operand);
4064 if ((MEM_P (operand)
4065 || (REG_P (operand)
4066 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4067 /* If this is only for an output, the optional reload would not
4068 actually cause us to use a register now, just note that
4069 something is stored here. */
4070 && ((enum reg_class) goal_alternative[i] != NO_REGS
4071 || modified[i] == RELOAD_WRITE)
4072 && ! no_input_reloads
4073 /* An optional output reload might allow to delete INSN later.
4074 We mustn't make in-out reloads on insns that are not permitted
4075 output reloads.
4076 If this is an asm, we can't delete it; we must not even call
4077 push_reload for an optional output reload in this case,
4078 because we can't be sure that the constraint allows a register,
4079 and push_reload verifies the constraints for asms. */
4080 && (modified[i] == RELOAD_READ
4081 || (! no_output_reloads && ! this_insn_is_asm)))
4082 operand_reloadnum[i]
4083 = push_reload ((modified[i] != RELOAD_WRITE
4084 ? recog_data.operand[i] : 0),
4085 (modified[i] != RELOAD_READ
4086 ? recog_data.operand[i] : 0),
4087 (modified[i] != RELOAD_WRITE
4088 ? recog_data.operand_loc[i] : 0),
4089 (modified[i] != RELOAD_READ
4090 ? recog_data.operand_loc[i] : 0),
4091 (enum reg_class) goal_alternative[i],
4092 (modified[i] == RELOAD_WRITE
4093 ? VOIDmode : operand_mode[i]),
4094 (modified[i] == RELOAD_READ
4095 ? VOIDmode : operand_mode[i]),
4096 (insn_code_number < 0 ? 0
4097 : insn_data[insn_code_number].operand[i].strict_low),
4098 1, i, operand_type[i]);
4099 /* If a memory reference remains (either as a MEM or a pseudo that
4100 did not get a hard register), yet we can't make an optional
4101 reload, check if this is actually a pseudo register reference;
4102 we then need to emit a USE and/or a CLOBBER so that reload
4103 inheritance will do the right thing. */
4104 else if (replace
4105 && (MEM_P (operand)
4106 || (REG_P (operand)
4107 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4108 && reg_renumber [REGNO (operand)] < 0)))
4109 {
4110 operand = *recog_data.operand_loc[i];
4111
4112 while (GET_CODE (operand) == SUBREG)
4113 operand = SUBREG_REG (operand);
4114 if (REG_P (operand))
4115 {
4116 if (modified[i] != RELOAD_WRITE)
4117 /* We mark the USE with QImode so that we recognize
4118 it as one that can be safely deleted at the end
4119 of reload. */
4120 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4121 insn), QImode);
4122 if (modified[i] != RELOAD_READ)
4123 emit_insn_after (gen_clobber (operand), insn);
4124 }
4125 }
4126 }
4127 else if (goal_alternative_matches[i] >= 0
4128 && goal_alternative_win[goal_alternative_matches[i]]
4129 && modified[i] == RELOAD_READ
4130 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4131 && ! no_input_reloads && ! no_output_reloads
4132 && optimize)
4133 {
4134 /* Similarly, make an optional reload for a pair of matching
4135 objects that are in MEM or a pseudo that didn't get a hard reg. */
4136
4137 rtx operand = recog_data.operand[i];
4138
4139 while (GET_CODE (operand) == SUBREG)
4140 operand = SUBREG_REG (operand);
4141 if ((MEM_P (operand)
4142 || (REG_P (operand)
4143 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4144 && ((enum reg_class) goal_alternative[goal_alternative_matches[i]]
4145 != NO_REGS))
4146 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4147 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4148 recog_data.operand[i],
4149 recog_data.operand_loc[goal_alternative_matches[i]],
4150 recog_data.operand_loc[i],
4151 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4152 operand_mode[goal_alternative_matches[i]],
4153 operand_mode[i],
4154 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4155 }
4156
4157 /* Perform whatever substitutions on the operands we are supposed
4158 to make due to commutativity or replacement of registers
4159 with equivalent constants or memory slots. */
4160
4161 for (i = 0; i < noperands; i++)
4162 {
4163 /* We only do this on the last pass through reload, because it is
4164 possible for some data (like reg_equiv_address) to be changed during
4165 later passes. Moreover, we lose the opportunity to get a useful
4166 reload_{in,out}_reg when we do these replacements. */
4167
4168 if (replace)
4169 {
4170 rtx substitution = substed_operand[i];
4171
4172 *recog_data.operand_loc[i] = substitution;
4173
4174 /* If we're replacing an operand with a LABEL_REF, we need to
4175 make sure that there's a REG_LABEL_OPERAND note attached to
4176 this instruction. */
4177 if (GET_CODE (substitution) == LABEL_REF
4178 && !find_reg_note (insn, REG_LABEL_OPERAND,
4179 XEXP (substitution, 0))
4180 /* For a JUMP_P, if it was a branch target it must have
4181 already been recorded as such. */
4182 && (!JUMP_P (insn)
4183 || !label_is_jump_target_p (XEXP (substitution, 0),
4184 insn)))
4185 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4186 }
4187 else
4188 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4189 }
4190
4191 /* If this insn pattern contains any MATCH_DUP's, make sure that
4192 they will be substituted if the operands they match are substituted.
4193 Also do now any substitutions we already did on the operands.
4194
4195 Don't do this if we aren't making replacements because we might be
4196 propagating things allocated by frame pointer elimination into places
4197 it doesn't expect. */
4198
4199 if (insn_code_number >= 0 && replace)
4200 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4201 {
4202 int opno = recog_data.dup_num[i];
4203 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4204 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4205 }
4206
4207 #if 0
4208 /* This loses because reloading of prior insns can invalidate the equivalence
4209 (or at least find_equiv_reg isn't smart enough to find it any more),
4210 causing this insn to need more reload regs than it needed before.
4211 It may be too late to make the reload regs available.
4212 Now this optimization is done safely in choose_reload_regs. */
4213
4214 /* For each reload of a reg into some other class of reg,
4215 search for an existing equivalent reg (same value now) in the right class.
4216 We can use it as long as we don't need to change its contents. */
4217 for (i = 0; i < n_reloads; i++)
4218 if (rld[i].reg_rtx == 0
4219 && rld[i].in != 0
4220 && REG_P (rld[i].in)
4221 && rld[i].out == 0)
4222 {
4223 rld[i].reg_rtx
4224 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4225 static_reload_reg_p, 0, rld[i].inmode);
4226 /* Prevent generation of insn to load the value
4227 because the one we found already has the value. */
4228 if (rld[i].reg_rtx)
4229 rld[i].in = rld[i].reg_rtx;
4230 }
4231 #endif
4232
4233 /* If we detected error and replaced asm instruction by USE, forget about the
4234 reloads. */
4235 if (GET_CODE (PATTERN (insn)) == USE
4236 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4237 n_reloads = 0;
4238
4239 /* Perhaps an output reload can be combined with another
4240 to reduce needs by one. */
4241 if (!goal_earlyclobber)
4242 combine_reloads ();
4243
4244 /* If we have a pair of reloads for parts of an address, they are reloading
4245 the same object, the operands themselves were not reloaded, and they
4246 are for two operands that are supposed to match, merge the reloads and
4247 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4248
4249 for (i = 0; i < n_reloads; i++)
4250 {
4251 int k;
4252
4253 for (j = i + 1; j < n_reloads; j++)
4254 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4255 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4256 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4257 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4258 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4259 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4260 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4261 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4262 && rtx_equal_p (rld[i].in, rld[j].in)
4263 && (operand_reloadnum[rld[i].opnum] < 0
4264 || rld[operand_reloadnum[rld[i].opnum]].optional)
4265 && (operand_reloadnum[rld[j].opnum] < 0
4266 || rld[operand_reloadnum[rld[j].opnum]].optional)
4267 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4268 || (goal_alternative_matches[rld[j].opnum]
4269 == rld[i].opnum)))
4270 {
4271 for (k = 0; k < n_replacements; k++)
4272 if (replacements[k].what == j)
4273 replacements[k].what = i;
4274
4275 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4276 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4277 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4278 else
4279 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4280 rld[j].in = 0;
4281 }
4282 }
4283
4284 /* Scan all the reloads and update their type.
4285 If a reload is for the address of an operand and we didn't reload
4286 that operand, change the type. Similarly, change the operand number
4287 of a reload when two operands match. If a reload is optional, treat it
4288 as though the operand isn't reloaded.
4289
4290 ??? This latter case is somewhat odd because if we do the optional
4291 reload, it means the object is hanging around. Thus we need only
4292 do the address reload if the optional reload was NOT done.
4293
4294 Change secondary reloads to be the address type of their operand, not
4295 the normal type.
4296
4297 If an operand's reload is now RELOAD_OTHER, change any
4298 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4299 RELOAD_FOR_OTHER_ADDRESS. */
4300
4301 for (i = 0; i < n_reloads; i++)
4302 {
4303 if (rld[i].secondary_p
4304 && rld[i].when_needed == operand_type[rld[i].opnum])
4305 rld[i].when_needed = address_type[rld[i].opnum];
4306
4307 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4308 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4309 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4310 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4311 && (operand_reloadnum[rld[i].opnum] < 0
4312 || rld[operand_reloadnum[rld[i].opnum]].optional))
4313 {
4314 /* If we have a secondary reload to go along with this reload,
4315 change its type to RELOAD_FOR_OPADDR_ADDR. */
4316
4317 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4318 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4319 && rld[i].secondary_in_reload != -1)
4320 {
4321 int secondary_in_reload = rld[i].secondary_in_reload;
4322
4323 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4324
4325 /* If there's a tertiary reload we have to change it also. */
4326 if (secondary_in_reload > 0
4327 && rld[secondary_in_reload].secondary_in_reload != -1)
4328 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4329 = RELOAD_FOR_OPADDR_ADDR;
4330 }
4331
4332 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4333 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4334 && rld[i].secondary_out_reload != -1)
4335 {
4336 int secondary_out_reload = rld[i].secondary_out_reload;
4337
4338 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4339
4340 /* If there's a tertiary reload we have to change it also. */
4341 if (secondary_out_reload
4342 && rld[secondary_out_reload].secondary_out_reload != -1)
4343 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4344 = RELOAD_FOR_OPADDR_ADDR;
4345 }
4346
4347 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4348 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4349 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4350 else
4351 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4352 }
4353
4354 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4355 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4356 && operand_reloadnum[rld[i].opnum] >= 0
4357 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4358 == RELOAD_OTHER))
4359 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4360
4361 if (goal_alternative_matches[rld[i].opnum] >= 0)
4362 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4363 }
4364
4365 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4366 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4367 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4368
4369 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4370 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4371 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4372 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4373 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4374 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4375 This is complicated by the fact that a single operand can have more
4376 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4377 choose_reload_regs without affecting code quality, and cases that
4378 actually fail are extremely rare, so it turns out to be better to fix
4379 the problem here by not generating cases that choose_reload_regs will
4380 fail for. */
4381 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4382 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4383 a single operand.
4384 We can reduce the register pressure by exploiting that a
4385 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4386 does not conflict with any of them, if it is only used for the first of
4387 the RELOAD_FOR_X_ADDRESS reloads. */
4388 {
4389 int first_op_addr_num = -2;
4390 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4391 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4392 int need_change = 0;
4393 /* We use last_op_addr_reload and the contents of the above arrays
4394 first as flags - -2 means no instance encountered, -1 means exactly
4395 one instance encountered.
4396 If more than one instance has been encountered, we store the reload
4397 number of the first reload of the kind in question; reload numbers
4398 are known to be non-negative. */
4399 for (i = 0; i < noperands; i++)
4400 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4401 for (i = n_reloads - 1; i >= 0; i--)
4402 {
4403 switch (rld[i].when_needed)
4404 {
4405 case RELOAD_FOR_OPERAND_ADDRESS:
4406 if (++first_op_addr_num >= 0)
4407 {
4408 first_op_addr_num = i;
4409 need_change = 1;
4410 }
4411 break;
4412 case RELOAD_FOR_INPUT_ADDRESS:
4413 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4414 {
4415 first_inpaddr_num[rld[i].opnum] = i;
4416 need_change = 1;
4417 }
4418 break;
4419 case RELOAD_FOR_OUTPUT_ADDRESS:
4420 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4421 {
4422 first_outpaddr_num[rld[i].opnum] = i;
4423 need_change = 1;
4424 }
4425 break;
4426 default:
4427 break;
4428 }
4429 }
4430
4431 if (need_change)
4432 {
4433 for (i = 0; i < n_reloads; i++)
4434 {
4435 int first_num;
4436 enum reload_type type;
4437
4438 switch (rld[i].when_needed)
4439 {
4440 case RELOAD_FOR_OPADDR_ADDR:
4441 first_num = first_op_addr_num;
4442 type = RELOAD_FOR_OPERAND_ADDRESS;
4443 break;
4444 case RELOAD_FOR_INPADDR_ADDRESS:
4445 first_num = first_inpaddr_num[rld[i].opnum];
4446 type = RELOAD_FOR_INPUT_ADDRESS;
4447 break;
4448 case RELOAD_FOR_OUTADDR_ADDRESS:
4449 first_num = first_outpaddr_num[rld[i].opnum];
4450 type = RELOAD_FOR_OUTPUT_ADDRESS;
4451 break;
4452 default:
4453 continue;
4454 }
4455 if (first_num < 0)
4456 continue;
4457 else if (i > first_num)
4458 rld[i].when_needed = type;
4459 else
4460 {
4461 /* Check if the only TYPE reload that uses reload I is
4462 reload FIRST_NUM. */
4463 for (j = n_reloads - 1; j > first_num; j--)
4464 {
4465 if (rld[j].when_needed == type
4466 && (rld[i].secondary_p
4467 ? rld[j].secondary_in_reload == i
4468 : reg_mentioned_p (rld[i].in, rld[j].in)))
4469 {
4470 rld[i].when_needed = type;
4471 break;
4472 }
4473 }
4474 }
4475 }
4476 }
4477 }
4478
4479 /* See if we have any reloads that are now allowed to be merged
4480 because we've changed when the reload is needed to
4481 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4482 check for the most common cases. */
4483
4484 for (i = 0; i < n_reloads; i++)
4485 if (rld[i].in != 0 && rld[i].out == 0
4486 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4487 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4488 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4489 for (j = 0; j < n_reloads; j++)
4490 if (i != j && rld[j].in != 0 && rld[j].out == 0
4491 && rld[j].when_needed == rld[i].when_needed
4492 && MATCHES (rld[i].in, rld[j].in)
4493 && rld[i].rclass == rld[j].rclass
4494 && !rld[i].nocombine && !rld[j].nocombine
4495 && rld[i].reg_rtx == rld[j].reg_rtx)
4496 {
4497 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4498 transfer_replacements (i, j);
4499 rld[j].in = 0;
4500 }
4501
4502 #ifdef HAVE_cc0
4503 /* If we made any reloads for addresses, see if they violate a
4504 "no input reloads" requirement for this insn. But loads that we
4505 do after the insn (such as for output addresses) are fine. */
4506 if (no_input_reloads)
4507 for (i = 0; i < n_reloads; i++)
4508 gcc_assert (rld[i].in == 0
4509 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4510 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4511 #endif
4512
4513 /* Compute reload_mode and reload_nregs. */
4514 for (i = 0; i < n_reloads; i++)
4515 {
4516 rld[i].mode
4517 = (rld[i].inmode == VOIDmode
4518 || (GET_MODE_SIZE (rld[i].outmode)
4519 > GET_MODE_SIZE (rld[i].inmode)))
4520 ? rld[i].outmode : rld[i].inmode;
4521
4522 rld[i].nregs = CLASS_MAX_NREGS (rld[i].rclass, rld[i].mode);
4523 }
4524
4525 /* Special case a simple move with an input reload and a
4526 destination of a hard reg, if the hard reg is ok, use it. */
4527 for (i = 0; i < n_reloads; i++)
4528 if (rld[i].when_needed == RELOAD_FOR_INPUT
4529 && GET_CODE (PATTERN (insn)) == SET
4530 && REG_P (SET_DEST (PATTERN (insn)))
4531 && (SET_SRC (PATTERN (insn)) == rld[i].in
4532 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4533 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4534 {
4535 rtx dest = SET_DEST (PATTERN (insn));
4536 unsigned int regno = REGNO (dest);
4537
4538 if (regno < FIRST_PSEUDO_REGISTER
4539 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4540 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4541 {
4542 int nr = hard_regno_nregs[regno][rld[i].mode];
4543 int ok = 1, nri;
4544
4545 for (nri = 1; nri < nr; nri ++)
4546 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4547 ok = 0;
4548
4549 if (ok)
4550 rld[i].reg_rtx = dest;
4551 }
4552 }
4553
4554 return retval;
4555 }
4556
4557 /* Return true if alternative number ALTNUM in constraint-string
4558 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4559 MEM gives the reference if it didn't need any reloads, otherwise it
4560 is null. */
4561
4562 static bool
4563 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4564 const char *constraint, int altnum)
4565 {
4566 int c;
4567
4568 /* Skip alternatives before the one requested. */
4569 while (altnum > 0)
4570 {
4571 while (*constraint++ != ',');
4572 altnum--;
4573 }
4574 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4575 If one of them is present, this alternative accepts the result of
4576 passing a constant-pool reference through find_reloads_toplev.
4577
4578 The same is true of extra memory constraints if the address
4579 was reloaded into a register. However, the target may elect
4580 to disallow the original constant address, forcing it to be
4581 reloaded into a register instead. */
4582 for (; (c = *constraint) && c != ',' && c != '#';
4583 constraint += CONSTRAINT_LEN (c, constraint))
4584 {
4585 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4586 return true;
4587 #ifdef EXTRA_CONSTRAINT_STR
4588 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4589 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4590 return true;
4591 #endif
4592 }
4593 return false;
4594 }
4595 \f
4596 /* Scan X for memory references and scan the addresses for reloading.
4597 Also checks for references to "constant" regs that we want to eliminate
4598 and replaces them with the values they stand for.
4599 We may alter X destructively if it contains a reference to such.
4600 If X is just a constant reg, we return the equivalent value
4601 instead of X.
4602
4603 IND_LEVELS says how many levels of indirect addressing this machine
4604 supports.
4605
4606 OPNUM and TYPE identify the purpose of the reload.
4607
4608 IS_SET_DEST is true if X is the destination of a SET, which is not
4609 appropriate to be replaced by a constant.
4610
4611 INSN, if nonzero, is the insn in which we do the reload. It is used
4612 to determine if we may generate output reloads, and where to put USEs
4613 for pseudos that we have to replace with stack slots.
4614
4615 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4616 result of find_reloads_address. */
4617
4618 static rtx
4619 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4620 int ind_levels, int is_set_dest, rtx insn,
4621 int *address_reloaded)
4622 {
4623 RTX_CODE code = GET_CODE (x);
4624
4625 const char *fmt = GET_RTX_FORMAT (code);
4626 int i;
4627 int copied;
4628
4629 if (code == REG)
4630 {
4631 /* This code is duplicated for speed in find_reloads. */
4632 int regno = REGNO (x);
4633 if (reg_equiv_constant[regno] != 0 && !is_set_dest)
4634 x = reg_equiv_constant[regno];
4635 #if 0
4636 /* This creates (subreg (mem...)) which would cause an unnecessary
4637 reload of the mem. */
4638 else if (reg_equiv_mem[regno] != 0)
4639 x = reg_equiv_mem[regno];
4640 #endif
4641 else if (reg_equiv_memory_loc[regno]
4642 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
4643 {
4644 rtx mem = make_memloc (x, regno);
4645 if (reg_equiv_address[regno]
4646 || ! rtx_equal_p (mem, reg_equiv_mem[regno]))
4647 {
4648 /* If this is not a toplevel operand, find_reloads doesn't see
4649 this substitution. We have to emit a USE of the pseudo so
4650 that delete_output_reload can see it. */
4651 if (replace_reloads && recog_data.operand[opnum] != x)
4652 /* We mark the USE with QImode so that we recognize it
4653 as one that can be safely deleted at the end of
4654 reload. */
4655 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4656 QImode);
4657 x = mem;
4658 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4659 opnum, type, ind_levels, insn);
4660 if (!rtx_equal_p (x, mem))
4661 push_reg_equiv_alt_mem (regno, x);
4662 if (address_reloaded)
4663 *address_reloaded = i;
4664 }
4665 }
4666 return x;
4667 }
4668 if (code == MEM)
4669 {
4670 rtx tem = x;
4671
4672 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4673 opnum, type, ind_levels, insn);
4674 if (address_reloaded)
4675 *address_reloaded = i;
4676
4677 return tem;
4678 }
4679
4680 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4681 {
4682 /* Check for SUBREG containing a REG that's equivalent to a
4683 constant. If the constant has a known value, truncate it
4684 right now. Similarly if we are extracting a single-word of a
4685 multi-word constant. If the constant is symbolic, allow it
4686 to be substituted normally. push_reload will strip the
4687 subreg later. The constant must not be VOIDmode, because we
4688 will lose the mode of the register (this should never happen
4689 because one of the cases above should handle it). */
4690
4691 int regno = REGNO (SUBREG_REG (x));
4692 rtx tem;
4693
4694 if (regno >= FIRST_PSEUDO_REGISTER
4695 && reg_renumber[regno] < 0
4696 && reg_equiv_constant[regno] != 0)
4697 {
4698 tem =
4699 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant[regno],
4700 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4701 gcc_assert (tem);
4702 if (CONSTANT_P (tem) && !LEGITIMATE_CONSTANT_P (tem))
4703 {
4704 tem = force_const_mem (GET_MODE (x), tem);
4705 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4706 &XEXP (tem, 0), opnum, type,
4707 ind_levels, insn);
4708 if (address_reloaded)
4709 *address_reloaded = i;
4710 }
4711 return tem;
4712 }
4713
4714 /* If the subreg contains a reg that will be converted to a mem,
4715 convert the subreg to a narrower memref now.
4716 Otherwise, we would get (subreg (mem ...) ...),
4717 which would force reload of the mem.
4718
4719 We also need to do this if there is an equivalent MEM that is
4720 not offsettable. In that case, alter_subreg would produce an
4721 invalid address on big-endian machines.
4722
4723 For machines that extend byte loads, we must not reload using
4724 a wider mode if we have a paradoxical SUBREG. find_reloads will
4725 force a reload in that case. So we should not do anything here. */
4726
4727 if (regno >= FIRST_PSEUDO_REGISTER
4728 #ifdef LOAD_EXTEND_OP
4729 && (GET_MODE_SIZE (GET_MODE (x))
4730 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4731 #endif
4732 && (reg_equiv_address[regno] != 0
4733 || (reg_equiv_mem[regno] != 0
4734 && (! strict_memory_address_p (GET_MODE (x),
4735 XEXP (reg_equiv_mem[regno], 0))
4736 || ! offsettable_memref_p (reg_equiv_mem[regno])
4737 || num_not_at_initial_offset))))
4738 x = find_reloads_subreg_address (x, 1, opnum, type, ind_levels,
4739 insn);
4740 }
4741
4742 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4743 {
4744 if (fmt[i] == 'e')
4745 {
4746 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4747 ind_levels, is_set_dest, insn,
4748 address_reloaded);
4749 /* If we have replaced a reg with it's equivalent memory loc -
4750 that can still be handled here e.g. if it's in a paradoxical
4751 subreg - we must make the change in a copy, rather than using
4752 a destructive change. This way, find_reloads can still elect
4753 not to do the change. */
4754 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4755 {
4756 x = shallow_copy_rtx (x);
4757 copied = 1;
4758 }
4759 XEXP (x, i) = new_part;
4760 }
4761 }
4762 return x;
4763 }
4764
4765 /* Return a mem ref for the memory equivalent of reg REGNO.
4766 This mem ref is not shared with anything. */
4767
4768 static rtx
4769 make_memloc (rtx ad, int regno)
4770 {
4771 /* We must rerun eliminate_regs, in case the elimination
4772 offsets have changed. */
4773 rtx tem
4774 = XEXP (eliminate_regs (reg_equiv_memory_loc[regno], VOIDmode, NULL_RTX),
4775 0);
4776
4777 /* If TEM might contain a pseudo, we must copy it to avoid
4778 modifying it when we do the substitution for the reload. */
4779 if (rtx_varies_p (tem, 0))
4780 tem = copy_rtx (tem);
4781
4782 tem = replace_equiv_address_nv (reg_equiv_memory_loc[regno], tem);
4783 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4784
4785 /* Copy the result if it's still the same as the equivalence, to avoid
4786 modifying it when we do the substitution for the reload. */
4787 if (tem == reg_equiv_memory_loc[regno])
4788 tem = copy_rtx (tem);
4789 return tem;
4790 }
4791
4792 /* Returns true if AD could be turned into a valid memory reference
4793 to mode MODE by reloading the part pointed to by PART into a
4794 register. */
4795
4796 static int
4797 maybe_memory_address_p (enum machine_mode mode, rtx ad, rtx *part)
4798 {
4799 int retv;
4800 rtx tem = *part;
4801 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4802
4803 *part = reg;
4804 retv = memory_address_p (mode, ad);
4805 *part = tem;
4806
4807 return retv;
4808 }
4809
4810 /* Record all reloads needed for handling memory address AD
4811 which appears in *LOC in a memory reference to mode MODE
4812 which itself is found in location *MEMREFLOC.
4813 Note that we take shortcuts assuming that no multi-reg machine mode
4814 occurs as part of an address.
4815
4816 OPNUM and TYPE specify the purpose of this reload.
4817
4818 IND_LEVELS says how many levels of indirect addressing this machine
4819 supports.
4820
4821 INSN, if nonzero, is the insn in which we do the reload. It is used
4822 to determine if we may generate output reloads, and where to put USEs
4823 for pseudos that we have to replace with stack slots.
4824
4825 Value is one if this address is reloaded or replaced as a whole; it is
4826 zero if the top level of this address was not reloaded or replaced, and
4827 it is -1 if it may or may not have been reloaded or replaced.
4828
4829 Note that there is no verification that the address will be valid after
4830 this routine does its work. Instead, we rely on the fact that the address
4831 was valid when reload started. So we need only undo things that reload
4832 could have broken. These are wrong register types, pseudos not allocated
4833 to a hard register, and frame pointer elimination. */
4834
4835 static int
4836 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4837 rtx *loc, int opnum, enum reload_type type,
4838 int ind_levels, rtx insn)
4839 {
4840 int regno;
4841 int removed_and = 0;
4842 int op_index;
4843 rtx tem;
4844
4845 /* If the address is a register, see if it is a legitimate address and
4846 reload if not. We first handle the cases where we need not reload
4847 or where we must reload in a non-standard way. */
4848
4849 if (REG_P (ad))
4850 {
4851 regno = REGNO (ad);
4852
4853 if (reg_equiv_constant[regno] != 0)
4854 {
4855 find_reloads_address_part (reg_equiv_constant[regno], loc,
4856 base_reg_class (mode, MEM, SCRATCH),
4857 GET_MODE (ad), opnum, type, ind_levels);
4858 return 1;
4859 }
4860
4861 tem = reg_equiv_memory_loc[regno];
4862 if (tem != 0)
4863 {
4864 if (reg_equiv_address[regno] != 0 || num_not_at_initial_offset)
4865 {
4866 tem = make_memloc (ad, regno);
4867 if (! strict_memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
4868 {
4869 rtx orig = tem;
4870
4871 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4872 &XEXP (tem, 0), opnum,
4873 ADDR_TYPE (type), ind_levels, insn);
4874 if (!rtx_equal_p (tem, orig))
4875 push_reg_equiv_alt_mem (regno, tem);
4876 }
4877 /* We can avoid a reload if the register's equivalent memory
4878 expression is valid as an indirect memory address.
4879 But not all addresses are valid in a mem used as an indirect
4880 address: only reg or reg+constant. */
4881
4882 if (ind_levels > 0
4883 && strict_memory_address_p (mode, tem)
4884 && (REG_P (XEXP (tem, 0))
4885 || (GET_CODE (XEXP (tem, 0)) == PLUS
4886 && REG_P (XEXP (XEXP (tem, 0), 0))
4887 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4888 {
4889 /* TEM is not the same as what we'll be replacing the
4890 pseudo with after reload, put a USE in front of INSN
4891 in the final reload pass. */
4892 if (replace_reloads
4893 && num_not_at_initial_offset
4894 && ! rtx_equal_p (tem, reg_equiv_mem[regno]))
4895 {
4896 *loc = tem;
4897 /* We mark the USE with QImode so that we
4898 recognize it as one that can be safely
4899 deleted at the end of reload. */
4900 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4901 insn), QImode);
4902
4903 /* This doesn't really count as replacing the address
4904 as a whole, since it is still a memory access. */
4905 }
4906 return 0;
4907 }
4908 ad = tem;
4909 }
4910 }
4911
4912 /* The only remaining case where we can avoid a reload is if this is a
4913 hard register that is valid as a base register and which is not the
4914 subject of a CLOBBER in this insn. */
4915
4916 else if (regno < FIRST_PSEUDO_REGISTER
4917 && regno_ok_for_base_p (regno, mode, MEM, SCRATCH)
4918 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4919 return 0;
4920
4921 /* If we do not have one of the cases above, we must do the reload. */
4922 push_reload (ad, NULL_RTX, loc, (rtx*) 0, base_reg_class (mode, MEM, SCRATCH),
4923 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4924 return 1;
4925 }
4926
4927 if (strict_memory_address_p (mode, ad))
4928 {
4929 /* The address appears valid, so reloads are not needed.
4930 But the address may contain an eliminable register.
4931 This can happen because a machine with indirect addressing
4932 may consider a pseudo register by itself a valid address even when
4933 it has failed to get a hard reg.
4934 So do a tree-walk to find and eliminate all such regs. */
4935
4936 /* But first quickly dispose of a common case. */
4937 if (GET_CODE (ad) == PLUS
4938 && CONST_INT_P (XEXP (ad, 1))
4939 && REG_P (XEXP (ad, 0))
4940 && reg_equiv_constant[REGNO (XEXP (ad, 0))] == 0)
4941 return 0;
4942
4943 subst_reg_equivs_changed = 0;
4944 *loc = subst_reg_equivs (ad, insn);
4945
4946 if (! subst_reg_equivs_changed)
4947 return 0;
4948
4949 /* Check result for validity after substitution. */
4950 if (strict_memory_address_p (mode, ad))
4951 return 0;
4952 }
4953
4954 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4955 do
4956 {
4957 if (memrefloc)
4958 {
4959 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4960 ind_levels, win);
4961 }
4962 break;
4963 win:
4964 *memrefloc = copy_rtx (*memrefloc);
4965 XEXP (*memrefloc, 0) = ad;
4966 move_replacements (&ad, &XEXP (*memrefloc, 0));
4967 return -1;
4968 }
4969 while (0);
4970 #endif
4971
4972 /* The address is not valid. We have to figure out why. First see if
4973 we have an outer AND and remove it if so. Then analyze what's inside. */
4974
4975 if (GET_CODE (ad) == AND)
4976 {
4977 removed_and = 1;
4978 loc = &XEXP (ad, 0);
4979 ad = *loc;
4980 }
4981
4982 /* One possibility for why the address is invalid is that it is itself
4983 a MEM. This can happen when the frame pointer is being eliminated, a
4984 pseudo is not allocated to a hard register, and the offset between the
4985 frame and stack pointers is not its initial value. In that case the
4986 pseudo will have been replaced by a MEM referring to the
4987 stack pointer. */
4988 if (MEM_P (ad))
4989 {
4990 /* First ensure that the address in this MEM is valid. Then, unless
4991 indirect addresses are valid, reload the MEM into a register. */
4992 tem = ad;
4993 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
4994 opnum, ADDR_TYPE (type),
4995 ind_levels == 0 ? 0 : ind_levels - 1, insn);
4996
4997 /* If tem was changed, then we must create a new memory reference to
4998 hold it and store it back into memrefloc. */
4999 if (tem != ad && memrefloc)
5000 {
5001 *memrefloc = copy_rtx (*memrefloc);
5002 copy_replacements (tem, XEXP (*memrefloc, 0));
5003 loc = &XEXP (*memrefloc, 0);
5004 if (removed_and)
5005 loc = &XEXP (*loc, 0);
5006 }
5007
5008 /* Check similar cases as for indirect addresses as above except
5009 that we can allow pseudos and a MEM since they should have been
5010 taken care of above. */
5011
5012 if (ind_levels == 0
5013 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5014 || MEM_P (XEXP (tem, 0))
5015 || ! (REG_P (XEXP (tem, 0))
5016 || (GET_CODE (XEXP (tem, 0)) == PLUS
5017 && REG_P (XEXP (XEXP (tem, 0), 0))
5018 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5019 {
5020 /* Must use TEM here, not AD, since it is the one that will
5021 have any subexpressions reloaded, if needed. */
5022 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5023 base_reg_class (mode, MEM, SCRATCH), GET_MODE (tem),
5024 VOIDmode, 0,
5025 0, opnum, type);
5026 return ! removed_and;
5027 }
5028 else
5029 return 0;
5030 }
5031
5032 /* If we have address of a stack slot but it's not valid because the
5033 displacement is too large, compute the sum in a register.
5034 Handle all base registers here, not just fp/ap/sp, because on some
5035 targets (namely SH) we can also get too large displacements from
5036 big-endian corrections. */
5037 else if (GET_CODE (ad) == PLUS
5038 && REG_P (XEXP (ad, 0))
5039 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5040 && CONST_INT_P (XEXP (ad, 1))
5041 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, PLUS,
5042 CONST_INT))
5043
5044 {
5045 /* Unshare the MEM rtx so we can safely alter it. */
5046 if (memrefloc)
5047 {
5048 *memrefloc = copy_rtx (*memrefloc);
5049 loc = &XEXP (*memrefloc, 0);
5050 if (removed_and)
5051 loc = &XEXP (*loc, 0);
5052 }
5053
5054 if (double_reg_address_ok)
5055 {
5056 /* Unshare the sum as well. */
5057 *loc = ad = copy_rtx (ad);
5058
5059 /* Reload the displacement into an index reg.
5060 We assume the frame pointer or arg pointer is a base reg. */
5061 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5062 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5063 type, ind_levels);
5064 return 0;
5065 }
5066 else
5067 {
5068 /* If the sum of two regs is not necessarily valid,
5069 reload the sum into a base reg.
5070 That will at least work. */
5071 find_reloads_address_part (ad, loc,
5072 base_reg_class (mode, MEM, SCRATCH),
5073 Pmode, opnum, type, ind_levels);
5074 }
5075 return ! removed_and;
5076 }
5077
5078 /* If we have an indexed stack slot, there are three possible reasons why
5079 it might be invalid: The index might need to be reloaded, the address
5080 might have been made by frame pointer elimination and hence have a
5081 constant out of range, or both reasons might apply.
5082
5083 We can easily check for an index needing reload, but even if that is the
5084 case, we might also have an invalid constant. To avoid making the
5085 conservative assumption and requiring two reloads, we see if this address
5086 is valid when not interpreted strictly. If it is, the only problem is
5087 that the index needs a reload and find_reloads_address_1 will take care
5088 of it.
5089
5090 Handle all base registers here, not just fp/ap/sp, because on some
5091 targets (namely SPARC) we can also get invalid addresses from preventive
5092 subreg big-endian corrections made by find_reloads_toplev. We
5093 can also get expressions involving LO_SUM (rather than PLUS) from
5094 find_reloads_subreg_address.
5095
5096 If we decide to do something, it must be that `double_reg_address_ok'
5097 is true. We generate a reload of the base register + constant and
5098 rework the sum so that the reload register will be added to the index.
5099 This is safe because we know the address isn't shared.
5100
5101 We check for the base register as both the first and second operand of
5102 the innermost PLUS and/or LO_SUM. */
5103
5104 for (op_index = 0; op_index < 2; ++op_index)
5105 {
5106 rtx operand, addend;
5107 enum rtx_code inner_code;
5108
5109 if (GET_CODE (ad) != PLUS)
5110 continue;
5111
5112 inner_code = GET_CODE (XEXP (ad, 0));
5113 if (!(GET_CODE (ad) == PLUS
5114 && CONST_INT_P (XEXP (ad, 1))
5115 && (inner_code == PLUS || inner_code == LO_SUM)))
5116 continue;
5117
5118 operand = XEXP (XEXP (ad, 0), op_index);
5119 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5120 continue;
5121
5122 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5123
5124 if ((regno_ok_for_base_p (REGNO (operand), mode, inner_code,
5125 GET_CODE (addend))
5126 || operand == frame_pointer_rtx
5127 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5128 || operand == hard_frame_pointer_rtx
5129 #endif
5130 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5131 || operand == arg_pointer_rtx
5132 #endif
5133 || operand == stack_pointer_rtx)
5134 && ! maybe_memory_address_p (mode, ad,
5135 &XEXP (XEXP (ad, 0), 1 - op_index)))
5136 {
5137 rtx offset_reg;
5138 enum reg_class cls;
5139
5140 offset_reg = plus_constant (operand, INTVAL (XEXP (ad, 1)));
5141
5142 /* Form the adjusted address. */
5143 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5144 ad = gen_rtx_PLUS (GET_MODE (ad),
5145 op_index == 0 ? offset_reg : addend,
5146 op_index == 0 ? addend : offset_reg);
5147 else
5148 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5149 op_index == 0 ? offset_reg : addend,
5150 op_index == 0 ? addend : offset_reg);
5151 *loc = ad;
5152
5153 cls = base_reg_class (mode, MEM, GET_CODE (addend));
5154 find_reloads_address_part (XEXP (ad, op_index),
5155 &XEXP (ad, op_index), cls,
5156 GET_MODE (ad), opnum, type, ind_levels);
5157 find_reloads_address_1 (mode,
5158 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5159 GET_CODE (XEXP (ad, op_index)),
5160 &XEXP (ad, 1 - op_index), opnum,
5161 type, 0, insn);
5162
5163 return 0;
5164 }
5165 }
5166
5167 /* See if address becomes valid when an eliminable register
5168 in a sum is replaced. */
5169
5170 tem = ad;
5171 if (GET_CODE (ad) == PLUS)
5172 tem = subst_indexed_address (ad);
5173 if (tem != ad && strict_memory_address_p (mode, tem))
5174 {
5175 /* Ok, we win that way. Replace any additional eliminable
5176 registers. */
5177
5178 subst_reg_equivs_changed = 0;
5179 tem = subst_reg_equivs (tem, insn);
5180
5181 /* Make sure that didn't make the address invalid again. */
5182
5183 if (! subst_reg_equivs_changed || strict_memory_address_p (mode, tem))
5184 {
5185 *loc = tem;
5186 return 0;
5187 }
5188 }
5189
5190 /* If constants aren't valid addresses, reload the constant address
5191 into a register. */
5192 if (CONSTANT_P (ad) && ! strict_memory_address_p (mode, ad))
5193 {
5194 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5195 Unshare it so we can safely alter it. */
5196 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5197 && CONSTANT_POOL_ADDRESS_P (ad))
5198 {
5199 *memrefloc = copy_rtx (*memrefloc);
5200 loc = &XEXP (*memrefloc, 0);
5201 if (removed_and)
5202 loc = &XEXP (*loc, 0);
5203 }
5204
5205 find_reloads_address_part (ad, loc, base_reg_class (mode, MEM, SCRATCH),
5206 Pmode, opnum, type, ind_levels);
5207 return ! removed_and;
5208 }
5209
5210 return find_reloads_address_1 (mode, ad, 0, MEM, SCRATCH, loc, opnum, type,
5211 ind_levels, insn);
5212 }
5213 \f
5214 /* Find all pseudo regs appearing in AD
5215 that are eliminable in favor of equivalent values
5216 and do not have hard regs; replace them by their equivalents.
5217 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5218 front of it for pseudos that we have to replace with stack slots. */
5219
5220 static rtx
5221 subst_reg_equivs (rtx ad, rtx insn)
5222 {
5223 RTX_CODE code = GET_CODE (ad);
5224 int i;
5225 const char *fmt;
5226
5227 switch (code)
5228 {
5229 case HIGH:
5230 case CONST_INT:
5231 case CONST:
5232 case CONST_DOUBLE:
5233 case CONST_FIXED:
5234 case CONST_VECTOR:
5235 case SYMBOL_REF:
5236 case LABEL_REF:
5237 case PC:
5238 case CC0:
5239 return ad;
5240
5241 case REG:
5242 {
5243 int regno = REGNO (ad);
5244
5245 if (reg_equiv_constant[regno] != 0)
5246 {
5247 subst_reg_equivs_changed = 1;
5248 return reg_equiv_constant[regno];
5249 }
5250 if (reg_equiv_memory_loc[regno] && num_not_at_initial_offset)
5251 {
5252 rtx mem = make_memloc (ad, regno);
5253 if (! rtx_equal_p (mem, reg_equiv_mem[regno]))
5254 {
5255 subst_reg_equivs_changed = 1;
5256 /* We mark the USE with QImode so that we recognize it
5257 as one that can be safely deleted at the end of
5258 reload. */
5259 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5260 QImode);
5261 return mem;
5262 }
5263 }
5264 }
5265 return ad;
5266
5267 case PLUS:
5268 /* Quickly dispose of a common case. */
5269 if (XEXP (ad, 0) == frame_pointer_rtx
5270 && CONST_INT_P (XEXP (ad, 1)))
5271 return ad;
5272 break;
5273
5274 default:
5275 break;
5276 }
5277
5278 fmt = GET_RTX_FORMAT (code);
5279 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5280 if (fmt[i] == 'e')
5281 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5282 return ad;
5283 }
5284 \f
5285 /* Compute the sum of X and Y, making canonicalizations assumed in an
5286 address, namely: sum constant integers, surround the sum of two
5287 constants with a CONST, put the constant as the second operand, and
5288 group the constant on the outermost sum.
5289
5290 This routine assumes both inputs are already in canonical form. */
5291
5292 rtx
5293 form_sum (rtx x, rtx y)
5294 {
5295 rtx tem;
5296 enum machine_mode mode = GET_MODE (x);
5297
5298 if (mode == VOIDmode)
5299 mode = GET_MODE (y);
5300
5301 if (mode == VOIDmode)
5302 mode = Pmode;
5303
5304 if (CONST_INT_P (x))
5305 return plus_constant (y, INTVAL (x));
5306 else if (CONST_INT_P (y))
5307 return plus_constant (x, INTVAL (y));
5308 else if (CONSTANT_P (x))
5309 tem = x, x = y, y = tem;
5310
5311 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5312 return form_sum (XEXP (x, 0), form_sum (XEXP (x, 1), y));
5313
5314 /* Note that if the operands of Y are specified in the opposite
5315 order in the recursive calls below, infinite recursion will occur. */
5316 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5317 return form_sum (form_sum (x, XEXP (y, 0)), XEXP (y, 1));
5318
5319 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5320 constant will have been placed second. */
5321 if (CONSTANT_P (x) && CONSTANT_P (y))
5322 {
5323 if (GET_CODE (x) == CONST)
5324 x = XEXP (x, 0);
5325 if (GET_CODE (y) == CONST)
5326 y = XEXP (y, 0);
5327
5328 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5329 }
5330
5331 return gen_rtx_PLUS (mode, x, y);
5332 }
5333 \f
5334 /* If ADDR is a sum containing a pseudo register that should be
5335 replaced with a constant (from reg_equiv_constant),
5336 return the result of doing so, and also apply the associative
5337 law so that the result is more likely to be a valid address.
5338 (But it is not guaranteed to be one.)
5339
5340 Note that at most one register is replaced, even if more are
5341 replaceable. Also, we try to put the result into a canonical form
5342 so it is more likely to be a valid address.
5343
5344 In all other cases, return ADDR. */
5345
5346 static rtx
5347 subst_indexed_address (rtx addr)
5348 {
5349 rtx op0 = 0, op1 = 0, op2 = 0;
5350 rtx tem;
5351 int regno;
5352
5353 if (GET_CODE (addr) == PLUS)
5354 {
5355 /* Try to find a register to replace. */
5356 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5357 if (REG_P (op0)
5358 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5359 && reg_renumber[regno] < 0
5360 && reg_equiv_constant[regno] != 0)
5361 op0 = reg_equiv_constant[regno];
5362 else if (REG_P (op1)
5363 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5364 && reg_renumber[regno] < 0
5365 && reg_equiv_constant[regno] != 0)
5366 op1 = reg_equiv_constant[regno];
5367 else if (GET_CODE (op0) == PLUS
5368 && (tem = subst_indexed_address (op0)) != op0)
5369 op0 = tem;
5370 else if (GET_CODE (op1) == PLUS
5371 && (tem = subst_indexed_address (op1)) != op1)
5372 op1 = tem;
5373 else
5374 return addr;
5375
5376 /* Pick out up to three things to add. */
5377 if (GET_CODE (op1) == PLUS)
5378 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5379 else if (GET_CODE (op0) == PLUS)
5380 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5381
5382 /* Compute the sum. */
5383 if (op2 != 0)
5384 op1 = form_sum (op1, op2);
5385 if (op1 != 0)
5386 op0 = form_sum (op0, op1);
5387
5388 return op0;
5389 }
5390 return addr;
5391 }
5392 \f
5393 /* Update the REG_INC notes for an insn. It updates all REG_INC
5394 notes for the instruction which refer to REGNO the to refer
5395 to the reload number.
5396
5397 INSN is the insn for which any REG_INC notes need updating.
5398
5399 REGNO is the register number which has been reloaded.
5400
5401 RELOADNUM is the reload number. */
5402
5403 static void
5404 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5405 int reloadnum ATTRIBUTE_UNUSED)
5406 {
5407 #ifdef AUTO_INC_DEC
5408 rtx link;
5409
5410 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5411 if (REG_NOTE_KIND (link) == REG_INC
5412 && (int) REGNO (XEXP (link, 0)) == regno)
5413 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5414 #endif
5415 }
5416 \f
5417 /* Record the pseudo registers we must reload into hard registers in a
5418 subexpression of a would-be memory address, X referring to a value
5419 in mode MODE. (This function is not called if the address we find
5420 is strictly valid.)
5421
5422 CONTEXT = 1 means we are considering regs as index regs,
5423 = 0 means we are considering them as base regs.
5424 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5425 or an autoinc code.
5426 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5427 is the code of the index part of the address. Otherwise, pass SCRATCH
5428 for this argument.
5429 OPNUM and TYPE specify the purpose of any reloads made.
5430
5431 IND_LEVELS says how many levels of indirect addressing are
5432 supported at this point in the address.
5433
5434 INSN, if nonzero, is the insn in which we do the reload. It is used
5435 to determine if we may generate output reloads.
5436
5437 We return nonzero if X, as a whole, is reloaded or replaced. */
5438
5439 /* Note that we take shortcuts assuming that no multi-reg machine mode
5440 occurs as part of an address.
5441 Also, this is not fully machine-customizable; it works for machines
5442 such as VAXen and 68000's and 32000's, but other possible machines
5443 could have addressing modes that this does not handle right.
5444 If you add push_reload calls here, you need to make sure gen_reload
5445 handles those cases gracefully. */
5446
5447 static int
5448 find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
5449 enum rtx_code outer_code, enum rtx_code index_code,
5450 rtx *loc, int opnum, enum reload_type type,
5451 int ind_levels, rtx insn)
5452 {
5453 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, OUTER, INDEX) \
5454 ((CONTEXT) == 0 \
5455 ? regno_ok_for_base_p (REGNO, MODE, OUTER, INDEX) \
5456 : REGNO_OK_FOR_INDEX_P (REGNO))
5457
5458 enum reg_class context_reg_class;
5459 RTX_CODE code = GET_CODE (x);
5460
5461 if (context == 1)
5462 context_reg_class = INDEX_REG_CLASS;
5463 else
5464 context_reg_class = base_reg_class (mode, outer_code, index_code);
5465
5466 switch (code)
5467 {
5468 case PLUS:
5469 {
5470 rtx orig_op0 = XEXP (x, 0);
5471 rtx orig_op1 = XEXP (x, 1);
5472 RTX_CODE code0 = GET_CODE (orig_op0);
5473 RTX_CODE code1 = GET_CODE (orig_op1);
5474 rtx op0 = orig_op0;
5475 rtx op1 = orig_op1;
5476
5477 if (GET_CODE (op0) == SUBREG)
5478 {
5479 op0 = SUBREG_REG (op0);
5480 code0 = GET_CODE (op0);
5481 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5482 op0 = gen_rtx_REG (word_mode,
5483 (REGNO (op0) +
5484 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5485 GET_MODE (SUBREG_REG (orig_op0)),
5486 SUBREG_BYTE (orig_op0),
5487 GET_MODE (orig_op0))));
5488 }
5489
5490 if (GET_CODE (op1) == SUBREG)
5491 {
5492 op1 = SUBREG_REG (op1);
5493 code1 = GET_CODE (op1);
5494 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5495 /* ??? Why is this given op1's mode and above for
5496 ??? op0 SUBREGs we use word_mode? */
5497 op1 = gen_rtx_REG (GET_MODE (op1),
5498 (REGNO (op1) +
5499 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5500 GET_MODE (SUBREG_REG (orig_op1)),
5501 SUBREG_BYTE (orig_op1),
5502 GET_MODE (orig_op1))));
5503 }
5504 /* Plus in the index register may be created only as a result of
5505 register rematerialization for expression like &localvar*4. Reload it.
5506 It may be possible to combine the displacement on the outer level,
5507 but it is probably not worthwhile to do so. */
5508 if (context == 1)
5509 {
5510 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5511 opnum, ADDR_TYPE (type), ind_levels, insn);
5512 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5513 context_reg_class,
5514 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5515 return 1;
5516 }
5517
5518 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5519 || code0 == ZERO_EXTEND || code1 == MEM)
5520 {
5521 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5522 &XEXP (x, 0), opnum, type, ind_levels,
5523 insn);
5524 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5525 &XEXP (x, 1), opnum, type, ind_levels,
5526 insn);
5527 }
5528
5529 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5530 || code1 == ZERO_EXTEND || code0 == MEM)
5531 {
5532 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5533 &XEXP (x, 0), opnum, type, ind_levels,
5534 insn);
5535 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5536 &XEXP (x, 1), opnum, type, ind_levels,
5537 insn);
5538 }
5539
5540 else if (code0 == CONST_INT || code0 == CONST
5541 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5542 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5543 &XEXP (x, 1), opnum, type, ind_levels,
5544 insn);
5545
5546 else if (code1 == CONST_INT || code1 == CONST
5547 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5548 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5549 &XEXP (x, 0), opnum, type, ind_levels,
5550 insn);
5551
5552 else if (code0 == REG && code1 == REG)
5553 {
5554 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5555 && regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5556 return 0;
5557 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5558 && regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5559 return 0;
5560 else if (regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5561 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5562 &XEXP (x, 1), opnum, type, ind_levels,
5563 insn);
5564 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5565 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5566 &XEXP (x, 0), opnum, type, ind_levels,
5567 insn);
5568 else if (regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5569 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5570 &XEXP (x, 0), opnum, type, ind_levels,
5571 insn);
5572 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5573 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5574 &XEXP (x, 1), opnum, type, ind_levels,
5575 insn);
5576 else
5577 {
5578 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5579 &XEXP (x, 0), opnum, type, ind_levels,
5580 insn);
5581 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5582 &XEXP (x, 1), opnum, type, ind_levels,
5583 insn);
5584 }
5585 }
5586
5587 else if (code0 == REG)
5588 {
5589 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5590 &XEXP (x, 0), opnum, type, ind_levels,
5591 insn);
5592 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5593 &XEXP (x, 1), opnum, type, ind_levels,
5594 insn);
5595 }
5596
5597 else if (code1 == REG)
5598 {
5599 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5600 &XEXP (x, 1), opnum, type, ind_levels,
5601 insn);
5602 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5603 &XEXP (x, 0), opnum, type, ind_levels,
5604 insn);
5605 }
5606 }
5607
5608 return 0;
5609
5610 case POST_MODIFY:
5611 case PRE_MODIFY:
5612 {
5613 rtx op0 = XEXP (x, 0);
5614 rtx op1 = XEXP (x, 1);
5615 enum rtx_code index_code;
5616 int regno;
5617 int reloadnum;
5618
5619 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5620 return 0;
5621
5622 /* Currently, we only support {PRE,POST}_MODIFY constructs
5623 where a base register is {inc,dec}remented by the contents
5624 of another register or by a constant value. Thus, these
5625 operands must match. */
5626 gcc_assert (op0 == XEXP (op1, 0));
5627
5628 /* Require index register (or constant). Let's just handle the
5629 register case in the meantime... If the target allows
5630 auto-modify by a constant then we could try replacing a pseudo
5631 register with its equivalent constant where applicable.
5632
5633 We also handle the case where the register was eliminated
5634 resulting in a PLUS subexpression.
5635
5636 If we later decide to reload the whole PRE_MODIFY or
5637 POST_MODIFY, inc_for_reload might clobber the reload register
5638 before reading the index. The index register might therefore
5639 need to live longer than a TYPE reload normally would, so be
5640 conservative and class it as RELOAD_OTHER. */
5641 if ((REG_P (XEXP (op1, 1))
5642 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5643 || GET_CODE (XEXP (op1, 1)) == PLUS)
5644 find_reloads_address_1 (mode, XEXP (op1, 1), 1, code, SCRATCH,
5645 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5646 ind_levels, insn);
5647
5648 gcc_assert (REG_P (XEXP (op1, 0)));
5649
5650 regno = REGNO (XEXP (op1, 0));
5651 index_code = GET_CODE (XEXP (op1, 1));
5652
5653 /* A register that is incremented cannot be constant! */
5654 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5655 || reg_equiv_constant[regno] == 0);
5656
5657 /* Handle a register that is equivalent to a memory location
5658 which cannot be addressed directly. */
5659 if (reg_equiv_memory_loc[regno] != 0
5660 && (reg_equiv_address[regno] != 0
5661 || num_not_at_initial_offset))
5662 {
5663 rtx tem = make_memloc (XEXP (x, 0), regno);
5664
5665 if (reg_equiv_address[regno]
5666 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5667 {
5668 rtx orig = tem;
5669
5670 /* First reload the memory location's address.
5671 We can't use ADDR_TYPE (type) here, because we need to
5672 write back the value after reading it, hence we actually
5673 need two registers. */
5674 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5675 &XEXP (tem, 0), opnum,
5676 RELOAD_OTHER,
5677 ind_levels, insn);
5678
5679 if (!rtx_equal_p (tem, orig))
5680 push_reg_equiv_alt_mem (regno, tem);
5681
5682 /* Then reload the memory location into a base
5683 register. */
5684 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5685 &XEXP (op1, 0),
5686 base_reg_class (mode, code,
5687 index_code),
5688 GET_MODE (x), GET_MODE (x), 0,
5689 0, opnum, RELOAD_OTHER);
5690
5691 update_auto_inc_notes (this_insn, regno, reloadnum);
5692 return 0;
5693 }
5694 }
5695
5696 if (reg_renumber[regno] >= 0)
5697 regno = reg_renumber[regno];
5698
5699 /* We require a base register here... */
5700 if (!regno_ok_for_base_p (regno, GET_MODE (x), code, index_code))
5701 {
5702 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5703 &XEXP (op1, 0), &XEXP (x, 0),
5704 base_reg_class (mode, code, index_code),
5705 GET_MODE (x), GET_MODE (x), 0, 0,
5706 opnum, RELOAD_OTHER);
5707
5708 update_auto_inc_notes (this_insn, regno, reloadnum);
5709 return 0;
5710 }
5711 }
5712 return 0;
5713
5714 case POST_INC:
5715 case POST_DEC:
5716 case PRE_INC:
5717 case PRE_DEC:
5718 if (REG_P (XEXP (x, 0)))
5719 {
5720 int regno = REGNO (XEXP (x, 0));
5721 int value = 0;
5722 rtx x_orig = x;
5723
5724 /* A register that is incremented cannot be constant! */
5725 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5726 || reg_equiv_constant[regno] == 0);
5727
5728 /* Handle a register that is equivalent to a memory location
5729 which cannot be addressed directly. */
5730 if (reg_equiv_memory_loc[regno] != 0
5731 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
5732 {
5733 rtx tem = make_memloc (XEXP (x, 0), regno);
5734 if (reg_equiv_address[regno]
5735 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5736 {
5737 rtx orig = tem;
5738
5739 /* First reload the memory location's address.
5740 We can't use ADDR_TYPE (type) here, because we need to
5741 write back the value after reading it, hence we actually
5742 need two registers. */
5743 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5744 &XEXP (tem, 0), opnum, type,
5745 ind_levels, insn);
5746 if (!rtx_equal_p (tem, orig))
5747 push_reg_equiv_alt_mem (regno, tem);
5748 /* Put this inside a new increment-expression. */
5749 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5750 /* Proceed to reload that, as if it contained a register. */
5751 }
5752 }
5753
5754 /* If we have a hard register that is ok in this incdec context,
5755 don't make a reload. If the register isn't nice enough for
5756 autoincdec, we can reload it. But, if an autoincrement of a
5757 register that we here verified as playing nice, still outside
5758 isn't "valid", it must be that no autoincrement is "valid".
5759 If that is true and something made an autoincrement anyway,
5760 this must be a special context where one is allowed.
5761 (For example, a "push" instruction.)
5762 We can't improve this address, so leave it alone. */
5763
5764 /* Otherwise, reload the autoincrement into a suitable hard reg
5765 and record how much to increment by. */
5766
5767 if (reg_renumber[regno] >= 0)
5768 regno = reg_renumber[regno];
5769 if (regno >= FIRST_PSEUDO_REGISTER
5770 || !REG_OK_FOR_CONTEXT (context, regno, mode, code,
5771 index_code))
5772 {
5773 int reloadnum;
5774
5775 /* If we can output the register afterwards, do so, this
5776 saves the extra update.
5777 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5778 CALL_INSN - and it does not set CC0.
5779 But don't do this if we cannot directly address the
5780 memory location, since this will make it harder to
5781 reuse address reloads, and increases register pressure.
5782 Also don't do this if we can probably update x directly. */
5783 rtx equiv = (MEM_P (XEXP (x, 0))
5784 ? XEXP (x, 0)
5785 : reg_equiv_mem[regno]);
5786 int icode = (int) optab_handler (add_optab, Pmode)->insn_code;
5787 if (insn && NONJUMP_INSN_P (insn) && equiv
5788 && memory_operand (equiv, GET_MODE (equiv))
5789 #ifdef HAVE_cc0
5790 && ! sets_cc0_p (PATTERN (insn))
5791 #endif
5792 && ! (icode != CODE_FOR_nothing
5793 && ((*insn_data[icode].operand[0].predicate)
5794 (equiv, Pmode))
5795 && ((*insn_data[icode].operand[1].predicate)
5796 (equiv, Pmode))))
5797 {
5798 /* We use the original pseudo for loc, so that
5799 emit_reload_insns() knows which pseudo this
5800 reload refers to and updates the pseudo rtx, not
5801 its equivalent memory location, as well as the
5802 corresponding entry in reg_last_reload_reg. */
5803 loc = &XEXP (x_orig, 0);
5804 x = XEXP (x, 0);
5805 reloadnum
5806 = push_reload (x, x, loc, loc,
5807 context_reg_class,
5808 GET_MODE (x), GET_MODE (x), 0, 0,
5809 opnum, RELOAD_OTHER);
5810 }
5811 else
5812 {
5813 reloadnum
5814 = push_reload (x, x, loc, (rtx*) 0,
5815 context_reg_class,
5816 GET_MODE (x), GET_MODE (x), 0, 0,
5817 opnum, type);
5818 rld[reloadnum].inc
5819 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5820
5821 value = 1;
5822 }
5823
5824 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5825 reloadnum);
5826 }
5827 return value;
5828 }
5829 return 0;
5830
5831 case TRUNCATE:
5832 case SIGN_EXTEND:
5833 case ZERO_EXTEND:
5834 /* Look for parts to reload in the inner expression and reload them
5835 too, in addition to this operation. Reloading all inner parts in
5836 addition to this one shouldn't be necessary, but at this point,
5837 we don't know if we can possibly omit any part that *can* be
5838 reloaded. Targets that are better off reloading just either part
5839 (or perhaps even a different part of an outer expression), should
5840 define LEGITIMIZE_RELOAD_ADDRESS. */
5841 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), XEXP (x, 0),
5842 context, code, SCRATCH, &XEXP (x, 0), opnum,
5843 type, ind_levels, insn);
5844 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5845 context_reg_class,
5846 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5847 return 1;
5848
5849 case MEM:
5850 /* This is probably the result of a substitution, by eliminate_regs, of
5851 an equivalent address for a pseudo that was not allocated to a hard
5852 register. Verify that the specified address is valid and reload it
5853 into a register.
5854
5855 Since we know we are going to reload this item, don't decrement for
5856 the indirection level.
5857
5858 Note that this is actually conservative: it would be slightly more
5859 efficient to use the value of SPILL_INDIRECT_LEVELS from
5860 reload1.c here. */
5861
5862 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5863 opnum, ADDR_TYPE (type), ind_levels, insn);
5864 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5865 context_reg_class,
5866 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5867 return 1;
5868
5869 case REG:
5870 {
5871 int regno = REGNO (x);
5872
5873 if (reg_equiv_constant[regno] != 0)
5874 {
5875 find_reloads_address_part (reg_equiv_constant[regno], loc,
5876 context_reg_class,
5877 GET_MODE (x), opnum, type, ind_levels);
5878 return 1;
5879 }
5880
5881 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5882 that feeds this insn. */
5883 if (reg_equiv_mem[regno] != 0)
5884 {
5885 push_reload (reg_equiv_mem[regno], NULL_RTX, loc, (rtx*) 0,
5886 context_reg_class,
5887 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5888 return 1;
5889 }
5890 #endif
5891
5892 if (reg_equiv_memory_loc[regno]
5893 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
5894 {
5895 rtx tem = make_memloc (x, regno);
5896 if (reg_equiv_address[regno] != 0
5897 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5898 {
5899 x = tem;
5900 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5901 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5902 ind_levels, insn);
5903 if (!rtx_equal_p (x, tem))
5904 push_reg_equiv_alt_mem (regno, x);
5905 }
5906 }
5907
5908 if (reg_renumber[regno] >= 0)
5909 regno = reg_renumber[regno];
5910
5911 if (regno >= FIRST_PSEUDO_REGISTER
5912 || !REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5913 index_code))
5914 {
5915 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5916 context_reg_class,
5917 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5918 return 1;
5919 }
5920
5921 /* If a register appearing in an address is the subject of a CLOBBER
5922 in this insn, reload it into some other register to be safe.
5923 The CLOBBER is supposed to make the register unavailable
5924 from before this insn to after it. */
5925 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5926 {
5927 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5928 context_reg_class,
5929 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5930 return 1;
5931 }
5932 }
5933 return 0;
5934
5935 case SUBREG:
5936 if (REG_P (SUBREG_REG (x)))
5937 {
5938 /* If this is a SUBREG of a hard register and the resulting register
5939 is of the wrong class, reload the whole SUBREG. This avoids
5940 needless copies if SUBREG_REG is multi-word. */
5941 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5942 {
5943 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5944
5945 if (!REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5946 index_code))
5947 {
5948 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5949 context_reg_class,
5950 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5951 return 1;
5952 }
5953 }
5954 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5955 is larger than the class size, then reload the whole SUBREG. */
5956 else
5957 {
5958 enum reg_class rclass = context_reg_class;
5959 if ((unsigned) CLASS_MAX_NREGS (rclass, GET_MODE (SUBREG_REG (x)))
5960 > reg_class_size[rclass])
5961 {
5962 x = find_reloads_subreg_address (x, 0, opnum,
5963 ADDR_TYPE (type),
5964 ind_levels, insn);
5965 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
5966 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5967 return 1;
5968 }
5969 }
5970 }
5971 break;
5972
5973 default:
5974 break;
5975 }
5976
5977 {
5978 const char *fmt = GET_RTX_FORMAT (code);
5979 int i;
5980
5981 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5982 {
5983 if (fmt[i] == 'e')
5984 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
5985 we get here. */
5986 find_reloads_address_1 (mode, XEXP (x, i), context, code, SCRATCH,
5987 &XEXP (x, i), opnum, type, ind_levels, insn);
5988 }
5989 }
5990
5991 #undef REG_OK_FOR_CONTEXT
5992 return 0;
5993 }
5994 \f
5995 /* X, which is found at *LOC, is a part of an address that needs to be
5996 reloaded into a register of class RCLASS. If X is a constant, or if
5997 X is a PLUS that contains a constant, check that the constant is a
5998 legitimate operand and that we are supposed to be able to load
5999 it into the register.
6000
6001 If not, force the constant into memory and reload the MEM instead.
6002
6003 MODE is the mode to use, in case X is an integer constant.
6004
6005 OPNUM and TYPE describe the purpose of any reloads made.
6006
6007 IND_LEVELS says how many levels of indirect addressing this machine
6008 supports. */
6009
6010 static void
6011 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6012 enum machine_mode mode, int opnum,
6013 enum reload_type type, int ind_levels)
6014 {
6015 if (CONSTANT_P (x)
6016 && (! LEGITIMATE_CONSTANT_P (x)
6017 || PREFERRED_RELOAD_CLASS (x, rclass) == NO_REGS))
6018 {
6019 x = force_const_mem (mode, x);
6020 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6021 opnum, type, ind_levels, 0);
6022 }
6023
6024 else if (GET_CODE (x) == PLUS
6025 && CONSTANT_P (XEXP (x, 1))
6026 && (! LEGITIMATE_CONSTANT_P (XEXP (x, 1))
6027 || PREFERRED_RELOAD_CLASS (XEXP (x, 1), rclass) == NO_REGS))
6028 {
6029 rtx tem;
6030
6031 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6032 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6033 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6034 opnum, type, ind_levels, 0);
6035 }
6036
6037 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6038 mode, VOIDmode, 0, 0, opnum, type);
6039 }
6040 \f
6041 /* X, a subreg of a pseudo, is a part of an address that needs to be
6042 reloaded.
6043
6044 If the pseudo is equivalent to a memory location that cannot be directly
6045 addressed, make the necessary address reloads.
6046
6047 If address reloads have been necessary, or if the address is changed
6048 by register elimination, return the rtx of the memory location;
6049 otherwise, return X.
6050
6051 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
6052 memory location.
6053
6054 OPNUM and TYPE identify the purpose of the reload.
6055
6056 IND_LEVELS says how many levels of indirect addressing are
6057 supported at this point in the address.
6058
6059 INSN, if nonzero, is the insn in which we do the reload. It is used
6060 to determine where to put USEs for pseudos that we have to replace with
6061 stack slots. */
6062
6063 static rtx
6064 find_reloads_subreg_address (rtx x, int force_replace, int opnum,
6065 enum reload_type type, int ind_levels, rtx insn)
6066 {
6067 int regno = REGNO (SUBREG_REG (x));
6068
6069 if (reg_equiv_memory_loc[regno])
6070 {
6071 /* If the address is not directly addressable, or if the address is not
6072 offsettable, then it must be replaced. */
6073 if (! force_replace
6074 && (reg_equiv_address[regno]
6075 || ! offsettable_memref_p (reg_equiv_mem[regno])))
6076 force_replace = 1;
6077
6078 if (force_replace || num_not_at_initial_offset)
6079 {
6080 rtx tem = make_memloc (SUBREG_REG (x), regno);
6081
6082 /* If the address changes because of register elimination, then
6083 it must be replaced. */
6084 if (force_replace
6085 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
6086 {
6087 unsigned outer_size = GET_MODE_SIZE (GET_MODE (x));
6088 unsigned inner_size = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
6089 int offset;
6090 rtx orig = tem;
6091 int reloaded;
6092
6093 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
6094 hold the correct (negative) byte offset. */
6095 if (BYTES_BIG_ENDIAN && outer_size > inner_size)
6096 offset = inner_size - outer_size;
6097 else
6098 offset = SUBREG_BYTE (x);
6099
6100 XEXP (tem, 0) = plus_constant (XEXP (tem, 0), offset);
6101 PUT_MODE (tem, GET_MODE (x));
6102 if (MEM_OFFSET (tem))
6103 set_mem_offset (tem, plus_constant (MEM_OFFSET (tem), offset));
6104
6105 /* If this was a paradoxical subreg that we replaced, the
6106 resulting memory must be sufficiently aligned to allow
6107 us to widen the mode of the memory. */
6108 if (outer_size > inner_size)
6109 {
6110 rtx base;
6111
6112 base = XEXP (tem, 0);
6113 if (GET_CODE (base) == PLUS)
6114 {
6115 if (CONST_INT_P (XEXP (base, 1))
6116 && INTVAL (XEXP (base, 1)) % outer_size != 0)
6117 return x;
6118 base = XEXP (base, 0);
6119 }
6120 if (!REG_P (base)
6121 || (REGNO_POINTER_ALIGN (REGNO (base))
6122 < outer_size * BITS_PER_UNIT))
6123 return x;
6124 }
6125
6126 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6127 XEXP (tem, 0), &XEXP (tem, 0),
6128 opnum, type, ind_levels, insn);
6129 /* ??? Do we need to handle nonzero offsets somehow? */
6130 if (!offset && !rtx_equal_p (tem, orig))
6131 push_reg_equiv_alt_mem (regno, tem);
6132
6133 /* For some processors an address may be valid in the
6134 original mode but not in a smaller mode. For
6135 example, ARM accepts a scaled index register in
6136 SImode but not in HImode. Note that this is only
6137 a problem if the address in reg_equiv_mem is already
6138 invalid in the new mode; other cases would be fixed
6139 by find_reloads_address as usual.
6140
6141 ??? We attempt to handle such cases here by doing an
6142 additional reload of the full address after the
6143 usual processing by find_reloads_address. Note that
6144 this may not work in the general case, but it seems
6145 to cover the cases where this situation currently
6146 occurs. A more general fix might be to reload the
6147 *value* instead of the address, but this would not
6148 be expected by the callers of this routine as-is.
6149
6150 If find_reloads_address already completed replaced
6151 the address, there is nothing further to do. */
6152 if (reloaded == 0
6153 && reg_equiv_mem[regno] != 0
6154 && !strict_memory_address_p (GET_MODE (x),
6155 XEXP (reg_equiv_mem[regno], 0)))
6156 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6157 base_reg_class (GET_MODE (tem), MEM, SCRATCH),
6158 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0,
6159 opnum, type);
6160
6161 /* If this is not a toplevel operand, find_reloads doesn't see
6162 this substitution. We have to emit a USE of the pseudo so
6163 that delete_output_reload can see it. */
6164 if (replace_reloads && recog_data.operand[opnum] != x)
6165 /* We mark the USE with QImode so that we recognize it
6166 as one that can be safely deleted at the end of
6167 reload. */
6168 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode,
6169 SUBREG_REG (x)),
6170 insn), QImode);
6171 x = tem;
6172 }
6173 }
6174 }
6175 return x;
6176 }
6177 \f
6178 /* Substitute into the current INSN the registers into which we have reloaded
6179 the things that need reloading. The array `replacements'
6180 contains the locations of all pointers that must be changed
6181 and says what to replace them with.
6182
6183 Return the rtx that X translates into; usually X, but modified. */
6184
6185 void
6186 subst_reloads (rtx insn)
6187 {
6188 int i;
6189
6190 for (i = 0; i < n_replacements; i++)
6191 {
6192 struct replacement *r = &replacements[i];
6193 rtx reloadreg = rld[r->what].reg_rtx;
6194 if (reloadreg)
6195 {
6196 #ifdef DEBUG_RELOAD
6197 /* This checking takes a very long time on some platforms
6198 causing the gcc.c-torture/compile/limits-fnargs.c test
6199 to time out during testing. See PR 31850.
6200
6201 Internal consistency test. Check that we don't modify
6202 anything in the equivalence arrays. Whenever something from
6203 those arrays needs to be reloaded, it must be unshared before
6204 being substituted into; the equivalence must not be modified.
6205 Otherwise, if the equivalence is used after that, it will
6206 have been modified, and the thing substituted (probably a
6207 register) is likely overwritten and not a usable equivalence. */
6208 int check_regno;
6209
6210 for (check_regno = 0; check_regno < max_regno; check_regno++)
6211 {
6212 #define CHECK_MODF(ARRAY) \
6213 gcc_assert (!ARRAY[check_regno] \
6214 || !loc_mentioned_in_p (r->where, \
6215 ARRAY[check_regno]))
6216
6217 CHECK_MODF (reg_equiv_constant);
6218 CHECK_MODF (reg_equiv_memory_loc);
6219 CHECK_MODF (reg_equiv_address);
6220 CHECK_MODF (reg_equiv_mem);
6221 #undef CHECK_MODF
6222 }
6223 #endif /* DEBUG_RELOAD */
6224
6225 /* If we're replacing a LABEL_REF with a register, there must
6226 already be an indication (to e.g. flow) which label this
6227 register refers to. */
6228 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6229 || !JUMP_P (insn)
6230 || find_reg_note (insn,
6231 REG_LABEL_OPERAND,
6232 XEXP (*r->where, 0))
6233 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6234
6235 /* Encapsulate RELOADREG so its machine mode matches what
6236 used to be there. Note that gen_lowpart_common will
6237 do the wrong thing if RELOADREG is multi-word. RELOADREG
6238 will always be a REG here. */
6239 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6240 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6241
6242 /* If we are putting this into a SUBREG and RELOADREG is a
6243 SUBREG, we would be making nested SUBREGs, so we have to fix
6244 this up. Note that r->where == &SUBREG_REG (*r->subreg_loc). */
6245
6246 if (r->subreg_loc != 0 && GET_CODE (reloadreg) == SUBREG)
6247 {
6248 if (GET_MODE (*r->subreg_loc)
6249 == GET_MODE (SUBREG_REG (reloadreg)))
6250 *r->subreg_loc = SUBREG_REG (reloadreg);
6251 else
6252 {
6253 int final_offset =
6254 SUBREG_BYTE (*r->subreg_loc) + SUBREG_BYTE (reloadreg);
6255
6256 /* When working with SUBREGs the rule is that the byte
6257 offset must be a multiple of the SUBREG's mode. */
6258 final_offset = (final_offset /
6259 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6260 final_offset = (final_offset *
6261 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6262
6263 *r->where = SUBREG_REG (reloadreg);
6264 SUBREG_BYTE (*r->subreg_loc) = final_offset;
6265 }
6266 }
6267 else
6268 *r->where = reloadreg;
6269 }
6270 /* If reload got no reg and isn't optional, something's wrong. */
6271 else
6272 gcc_assert (rld[r->what].optional);
6273 }
6274 }
6275 \f
6276 /* Make a copy of any replacements being done into X and move those
6277 copies to locations in Y, a copy of X. */
6278
6279 void
6280 copy_replacements (rtx x, rtx y)
6281 {
6282 /* We can't support X being a SUBREG because we might then need to know its
6283 location if something inside it was replaced. */
6284 gcc_assert (GET_CODE (x) != SUBREG);
6285
6286 copy_replacements_1 (&x, &y, n_replacements);
6287 }
6288
6289 static void
6290 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6291 {
6292 int i, j;
6293 rtx x, y;
6294 struct replacement *r;
6295 enum rtx_code code;
6296 const char *fmt;
6297
6298 for (j = 0; j < orig_replacements; j++)
6299 {
6300 if (replacements[j].subreg_loc == px)
6301 {
6302 r = &replacements[n_replacements++];
6303 r->where = replacements[j].where;
6304 r->subreg_loc = py;
6305 r->what = replacements[j].what;
6306 r->mode = replacements[j].mode;
6307 }
6308 else if (replacements[j].where == px)
6309 {
6310 r = &replacements[n_replacements++];
6311 r->where = py;
6312 r->subreg_loc = 0;
6313 r->what = replacements[j].what;
6314 r->mode = replacements[j].mode;
6315 }
6316 }
6317
6318 x = *px;
6319 y = *py;
6320 code = GET_CODE (x);
6321 fmt = GET_RTX_FORMAT (code);
6322
6323 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6324 {
6325 if (fmt[i] == 'e')
6326 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6327 else if (fmt[i] == 'E')
6328 for (j = XVECLEN (x, i); --j >= 0; )
6329 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6330 orig_replacements);
6331 }
6332 }
6333
6334 /* Change any replacements being done to *X to be done to *Y. */
6335
6336 void
6337 move_replacements (rtx *x, rtx *y)
6338 {
6339 int i;
6340
6341 for (i = 0; i < n_replacements; i++)
6342 if (replacements[i].subreg_loc == x)
6343 replacements[i].subreg_loc = y;
6344 else if (replacements[i].where == x)
6345 {
6346 replacements[i].where = y;
6347 replacements[i].subreg_loc = 0;
6348 }
6349 }
6350 \f
6351 /* If LOC was scheduled to be replaced by something, return the replacement.
6352 Otherwise, return *LOC. */
6353
6354 rtx
6355 find_replacement (rtx *loc)
6356 {
6357 struct replacement *r;
6358
6359 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6360 {
6361 rtx reloadreg = rld[r->what].reg_rtx;
6362
6363 if (reloadreg && r->where == loc)
6364 {
6365 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6366 reloadreg = gen_rtx_REG (r->mode, REGNO (reloadreg));
6367
6368 return reloadreg;
6369 }
6370 else if (reloadreg && r->subreg_loc == loc)
6371 {
6372 /* RELOADREG must be either a REG or a SUBREG.
6373
6374 ??? Is it actually still ever a SUBREG? If so, why? */
6375
6376 if (REG_P (reloadreg))
6377 return gen_rtx_REG (GET_MODE (*loc),
6378 (REGNO (reloadreg) +
6379 subreg_regno_offset (REGNO (SUBREG_REG (*loc)),
6380 GET_MODE (SUBREG_REG (*loc)),
6381 SUBREG_BYTE (*loc),
6382 GET_MODE (*loc))));
6383 else if (GET_MODE (reloadreg) == GET_MODE (*loc))
6384 return reloadreg;
6385 else
6386 {
6387 int final_offset = SUBREG_BYTE (reloadreg) + SUBREG_BYTE (*loc);
6388
6389 /* When working with SUBREGs the rule is that the byte
6390 offset must be a multiple of the SUBREG's mode. */
6391 final_offset = (final_offset / GET_MODE_SIZE (GET_MODE (*loc)));
6392 final_offset = (final_offset * GET_MODE_SIZE (GET_MODE (*loc)));
6393 return gen_rtx_SUBREG (GET_MODE (*loc), SUBREG_REG (reloadreg),
6394 final_offset);
6395 }
6396 }
6397 }
6398
6399 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6400 what's inside and make a new rtl if so. */
6401 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6402 || GET_CODE (*loc) == MULT)
6403 {
6404 rtx x = find_replacement (&XEXP (*loc, 0));
6405 rtx y = find_replacement (&XEXP (*loc, 1));
6406
6407 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6408 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6409 }
6410
6411 return *loc;
6412 }
6413 \f
6414 /* Return nonzero if register in range [REGNO, ENDREGNO)
6415 appears either explicitly or implicitly in X
6416 other than being stored into (except for earlyclobber operands).
6417
6418 References contained within the substructure at LOC do not count.
6419 LOC may be zero, meaning don't ignore anything.
6420
6421 This is similar to refers_to_regno_p in rtlanal.c except that we
6422 look at equivalences for pseudos that didn't get hard registers. */
6423
6424 static int
6425 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6426 rtx x, rtx *loc)
6427 {
6428 int i;
6429 unsigned int r;
6430 RTX_CODE code;
6431 const char *fmt;
6432
6433 if (x == 0)
6434 return 0;
6435
6436 repeat:
6437 code = GET_CODE (x);
6438
6439 switch (code)
6440 {
6441 case REG:
6442 r = REGNO (x);
6443
6444 /* If this is a pseudo, a hard register must not have been allocated.
6445 X must therefore either be a constant or be in memory. */
6446 if (r >= FIRST_PSEUDO_REGISTER)
6447 {
6448 if (reg_equiv_memory_loc[r])
6449 return refers_to_regno_for_reload_p (regno, endregno,
6450 reg_equiv_memory_loc[r],
6451 (rtx*) 0);
6452
6453 gcc_assert (reg_equiv_constant[r] || reg_equiv_invariant[r]);
6454 return 0;
6455 }
6456
6457 return (endregno > r
6458 && regno < r + (r < FIRST_PSEUDO_REGISTER
6459 ? hard_regno_nregs[r][GET_MODE (x)]
6460 : 1));
6461
6462 case SUBREG:
6463 /* If this is a SUBREG of a hard reg, we can see exactly which
6464 registers are being modified. Otherwise, handle normally. */
6465 if (REG_P (SUBREG_REG (x))
6466 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6467 {
6468 unsigned int inner_regno = subreg_regno (x);
6469 unsigned int inner_endregno
6470 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6471 ? subreg_nregs (x) : 1);
6472
6473 return endregno > inner_regno && regno < inner_endregno;
6474 }
6475 break;
6476
6477 case CLOBBER:
6478 case SET:
6479 if (&SET_DEST (x) != loc
6480 /* Note setting a SUBREG counts as referring to the REG it is in for
6481 a pseudo but not for hard registers since we can
6482 treat each word individually. */
6483 && ((GET_CODE (SET_DEST (x)) == SUBREG
6484 && loc != &SUBREG_REG (SET_DEST (x))
6485 && REG_P (SUBREG_REG (SET_DEST (x)))
6486 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6487 && refers_to_regno_for_reload_p (regno, endregno,
6488 SUBREG_REG (SET_DEST (x)),
6489 loc))
6490 /* If the output is an earlyclobber operand, this is
6491 a conflict. */
6492 || ((!REG_P (SET_DEST (x))
6493 || earlyclobber_operand_p (SET_DEST (x)))
6494 && refers_to_regno_for_reload_p (regno, endregno,
6495 SET_DEST (x), loc))))
6496 return 1;
6497
6498 if (code == CLOBBER || loc == &SET_SRC (x))
6499 return 0;
6500 x = SET_SRC (x);
6501 goto repeat;
6502
6503 default:
6504 break;
6505 }
6506
6507 /* X does not match, so try its subexpressions. */
6508
6509 fmt = GET_RTX_FORMAT (code);
6510 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6511 {
6512 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6513 {
6514 if (i == 0)
6515 {
6516 x = XEXP (x, 0);
6517 goto repeat;
6518 }
6519 else
6520 if (refers_to_regno_for_reload_p (regno, endregno,
6521 XEXP (x, i), loc))
6522 return 1;
6523 }
6524 else if (fmt[i] == 'E')
6525 {
6526 int j;
6527 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6528 if (loc != &XVECEXP (x, i, j)
6529 && refers_to_regno_for_reload_p (regno, endregno,
6530 XVECEXP (x, i, j), loc))
6531 return 1;
6532 }
6533 }
6534 return 0;
6535 }
6536
6537 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6538 we check if any register number in X conflicts with the relevant register
6539 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6540 contains a MEM (we don't bother checking for memory addresses that can't
6541 conflict because we expect this to be a rare case.
6542
6543 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6544 that we look at equivalences for pseudos that didn't get hard registers. */
6545
6546 int
6547 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6548 {
6549 int regno, endregno;
6550
6551 /* Overly conservative. */
6552 if (GET_CODE (x) == STRICT_LOW_PART
6553 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6554 x = XEXP (x, 0);
6555
6556 /* If either argument is a constant, then modifying X can not affect IN. */
6557 if (CONSTANT_P (x) || CONSTANT_P (in))
6558 return 0;
6559 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6560 return refers_to_mem_for_reload_p (in);
6561 else if (GET_CODE (x) == SUBREG)
6562 {
6563 regno = REGNO (SUBREG_REG (x));
6564 if (regno < FIRST_PSEUDO_REGISTER)
6565 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6566 GET_MODE (SUBREG_REG (x)),
6567 SUBREG_BYTE (x),
6568 GET_MODE (x));
6569 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6570 ? subreg_nregs (x) : 1);
6571
6572 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6573 }
6574 else if (REG_P (x))
6575 {
6576 regno = REGNO (x);
6577
6578 /* If this is a pseudo, it must not have been assigned a hard register.
6579 Therefore, it must either be in memory or be a constant. */
6580
6581 if (regno >= FIRST_PSEUDO_REGISTER)
6582 {
6583 if (reg_equiv_memory_loc[regno])
6584 return refers_to_mem_for_reload_p (in);
6585 gcc_assert (reg_equiv_constant[regno]);
6586 return 0;
6587 }
6588
6589 endregno = END_HARD_REGNO (x);
6590
6591 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6592 }
6593 else if (MEM_P (x))
6594 return refers_to_mem_for_reload_p (in);
6595 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6596 || GET_CODE (x) == CC0)
6597 return reg_mentioned_p (x, in);
6598 else
6599 {
6600 gcc_assert (GET_CODE (x) == PLUS);
6601
6602 /* We actually want to know if X is mentioned somewhere inside IN.
6603 We must not say that (plus (sp) (const_int 124)) is in
6604 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6605 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6606 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6607 while (MEM_P (in))
6608 in = XEXP (in, 0);
6609 if (REG_P (in))
6610 return 0;
6611 else if (GET_CODE (in) == PLUS)
6612 return (rtx_equal_p (x, in)
6613 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6614 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6615 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6616 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6617 }
6618
6619 gcc_unreachable ();
6620 }
6621
6622 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6623 registers. */
6624
6625 static int
6626 refers_to_mem_for_reload_p (rtx x)
6627 {
6628 const char *fmt;
6629 int i;
6630
6631 if (MEM_P (x))
6632 return 1;
6633
6634 if (REG_P (x))
6635 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6636 && reg_equiv_memory_loc[REGNO (x)]);
6637
6638 fmt = GET_RTX_FORMAT (GET_CODE (x));
6639 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6640 if (fmt[i] == 'e'
6641 && (MEM_P (XEXP (x, i))
6642 || refers_to_mem_for_reload_p (XEXP (x, i))))
6643 return 1;
6644
6645 return 0;
6646 }
6647 \f
6648 /* Check the insns before INSN to see if there is a suitable register
6649 containing the same value as GOAL.
6650 If OTHER is -1, look for a register in class RCLASS.
6651 Otherwise, just see if register number OTHER shares GOAL's value.
6652
6653 Return an rtx for the register found, or zero if none is found.
6654
6655 If RELOAD_REG_P is (short *)1,
6656 we reject any hard reg that appears in reload_reg_rtx
6657 because such a hard reg is also needed coming into this insn.
6658
6659 If RELOAD_REG_P is any other nonzero value,
6660 it is a vector indexed by hard reg number
6661 and we reject any hard reg whose element in the vector is nonnegative
6662 as well as any that appears in reload_reg_rtx.
6663
6664 If GOAL is zero, then GOALREG is a register number; we look
6665 for an equivalent for that register.
6666
6667 MODE is the machine mode of the value we want an equivalence for.
6668 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6669
6670 This function is used by jump.c as well as in the reload pass.
6671
6672 If GOAL is the sum of the stack pointer and a constant, we treat it
6673 as if it were a constant except that sp is required to be unchanging. */
6674
6675 rtx
6676 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6677 short *reload_reg_p, int goalreg, enum machine_mode mode)
6678 {
6679 rtx p = insn;
6680 rtx goaltry, valtry, value, where;
6681 rtx pat;
6682 int regno = -1;
6683 int valueno;
6684 int goal_mem = 0;
6685 int goal_const = 0;
6686 int goal_mem_addr_varies = 0;
6687 int need_stable_sp = 0;
6688 int nregs;
6689 int valuenregs;
6690 int num = 0;
6691
6692 if (goal == 0)
6693 regno = goalreg;
6694 else if (REG_P (goal))
6695 regno = REGNO (goal);
6696 else if (MEM_P (goal))
6697 {
6698 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6699 if (MEM_VOLATILE_P (goal))
6700 return 0;
6701 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6702 return 0;
6703 /* An address with side effects must be reexecuted. */
6704 switch (code)
6705 {
6706 case POST_INC:
6707 case PRE_INC:
6708 case POST_DEC:
6709 case PRE_DEC:
6710 case POST_MODIFY:
6711 case PRE_MODIFY:
6712 return 0;
6713 default:
6714 break;
6715 }
6716 goal_mem = 1;
6717 }
6718 else if (CONSTANT_P (goal))
6719 goal_const = 1;
6720 else if (GET_CODE (goal) == PLUS
6721 && XEXP (goal, 0) == stack_pointer_rtx
6722 && CONSTANT_P (XEXP (goal, 1)))
6723 goal_const = need_stable_sp = 1;
6724 else if (GET_CODE (goal) == PLUS
6725 && XEXP (goal, 0) == frame_pointer_rtx
6726 && CONSTANT_P (XEXP (goal, 1)))
6727 goal_const = 1;
6728 else
6729 return 0;
6730
6731 num = 0;
6732 /* Scan insns back from INSN, looking for one that copies
6733 a value into or out of GOAL.
6734 Stop and give up if we reach a label. */
6735
6736 while (1)
6737 {
6738 p = PREV_INSN (p);
6739 if (p && DEBUG_INSN_P (p))
6740 continue;
6741 num++;
6742 if (p == 0 || LABEL_P (p)
6743 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6744 return 0;
6745
6746 if (NONJUMP_INSN_P (p)
6747 /* If we don't want spill regs ... */
6748 && (! (reload_reg_p != 0
6749 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6750 /* ... then ignore insns introduced by reload; they aren't
6751 useful and can cause results in reload_as_needed to be
6752 different from what they were when calculating the need for
6753 spills. If we notice an input-reload insn here, we will
6754 reject it below, but it might hide a usable equivalent.
6755 That makes bad code. It may even fail: perhaps no reg was
6756 spilled for this insn because it was assumed we would find
6757 that equivalent. */
6758 || INSN_UID (p) < reload_first_uid))
6759 {
6760 rtx tem;
6761 pat = single_set (p);
6762
6763 /* First check for something that sets some reg equal to GOAL. */
6764 if (pat != 0
6765 && ((regno >= 0
6766 && true_regnum (SET_SRC (pat)) == regno
6767 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6768 ||
6769 (regno >= 0
6770 && true_regnum (SET_DEST (pat)) == regno
6771 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6772 ||
6773 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6774 /* When looking for stack pointer + const,
6775 make sure we don't use a stack adjust. */
6776 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6777 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6778 || (goal_mem
6779 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6780 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6781 || (goal_mem
6782 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6783 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6784 /* If we are looking for a constant,
6785 and something equivalent to that constant was copied
6786 into a reg, we can use that reg. */
6787 || (goal_const && REG_NOTES (p) != 0
6788 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6789 && ((rtx_equal_p (XEXP (tem, 0), goal)
6790 && (valueno
6791 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6792 || (REG_P (SET_DEST (pat))
6793 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6794 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6795 && CONST_INT_P (goal)
6796 && 0 != (goaltry
6797 = operand_subword (XEXP (tem, 0), 0, 0,
6798 VOIDmode))
6799 && rtx_equal_p (goal, goaltry)
6800 && (valtry
6801 = operand_subword (SET_DEST (pat), 0, 0,
6802 VOIDmode))
6803 && (valueno = true_regnum (valtry)) >= 0)))
6804 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6805 NULL_RTX))
6806 && REG_P (SET_DEST (pat))
6807 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6808 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6809 && CONST_INT_P (goal)
6810 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6811 VOIDmode))
6812 && rtx_equal_p (goal, goaltry)
6813 && (valtry
6814 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6815 && (valueno = true_regnum (valtry)) >= 0)))
6816 {
6817 if (other >= 0)
6818 {
6819 if (valueno != other)
6820 continue;
6821 }
6822 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6823 continue;
6824 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6825 mode, valueno))
6826 continue;
6827 value = valtry;
6828 where = p;
6829 break;
6830 }
6831 }
6832 }
6833
6834 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6835 (or copying VALUE into GOAL, if GOAL is also a register).
6836 Now verify that VALUE is really valid. */
6837
6838 /* VALUENO is the register number of VALUE; a hard register. */
6839
6840 /* Don't try to re-use something that is killed in this insn. We want
6841 to be able to trust REG_UNUSED notes. */
6842 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6843 return 0;
6844
6845 /* If we propose to get the value from the stack pointer or if GOAL is
6846 a MEM based on the stack pointer, we need a stable SP. */
6847 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6848 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6849 goal)))
6850 need_stable_sp = 1;
6851
6852 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6853 if (GET_MODE (value) != mode)
6854 return 0;
6855
6856 /* Reject VALUE if it was loaded from GOAL
6857 and is also a register that appears in the address of GOAL. */
6858
6859 if (goal_mem && value == SET_DEST (single_set (where))
6860 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6861 goal, (rtx*) 0))
6862 return 0;
6863
6864 /* Reject registers that overlap GOAL. */
6865
6866 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6867 nregs = hard_regno_nregs[regno][mode];
6868 else
6869 nregs = 1;
6870 valuenregs = hard_regno_nregs[valueno][mode];
6871
6872 if (!goal_mem && !goal_const
6873 && regno + nregs > valueno && regno < valueno + valuenregs)
6874 return 0;
6875
6876 /* Reject VALUE if it is one of the regs reserved for reloads.
6877 Reload1 knows how to reuse them anyway, and it would get
6878 confused if we allocated one without its knowledge.
6879 (Now that insns introduced by reload are ignored above,
6880 this case shouldn't happen, but I'm not positive.) */
6881
6882 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6883 {
6884 int i;
6885 for (i = 0; i < valuenregs; ++i)
6886 if (reload_reg_p[valueno + i] >= 0)
6887 return 0;
6888 }
6889
6890 /* Reject VALUE if it is a register being used for an input reload
6891 even if it is not one of those reserved. */
6892
6893 if (reload_reg_p != 0)
6894 {
6895 int i;
6896 for (i = 0; i < n_reloads; i++)
6897 if (rld[i].reg_rtx != 0 && rld[i].in)
6898 {
6899 int regno1 = REGNO (rld[i].reg_rtx);
6900 int nregs1 = hard_regno_nregs[regno1]
6901 [GET_MODE (rld[i].reg_rtx)];
6902 if (regno1 < valueno + valuenregs
6903 && regno1 + nregs1 > valueno)
6904 return 0;
6905 }
6906 }
6907
6908 if (goal_mem)
6909 /* We must treat frame pointer as varying here,
6910 since it can vary--in a nonlocal goto as generated by expand_goto. */
6911 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6912
6913 /* Now verify that the values of GOAL and VALUE remain unaltered
6914 until INSN is reached. */
6915
6916 p = insn;
6917 while (1)
6918 {
6919 p = PREV_INSN (p);
6920 if (p == where)
6921 return value;
6922
6923 /* Don't trust the conversion past a function call
6924 if either of the two is in a call-clobbered register, or memory. */
6925 if (CALL_P (p))
6926 {
6927 int i;
6928
6929 if (goal_mem || need_stable_sp)
6930 return 0;
6931
6932 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6933 for (i = 0; i < nregs; ++i)
6934 if (call_used_regs[regno + i]
6935 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6936 return 0;
6937
6938 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6939 for (i = 0; i < valuenregs; ++i)
6940 if (call_used_regs[valueno + i]
6941 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6942 return 0;
6943 }
6944
6945 if (INSN_P (p))
6946 {
6947 pat = PATTERN (p);
6948
6949 /* Watch out for unspec_volatile, and volatile asms. */
6950 if (volatile_insn_p (pat))
6951 return 0;
6952
6953 /* If this insn P stores in either GOAL or VALUE, return 0.
6954 If GOAL is a memory ref and this insn writes memory, return 0.
6955 If GOAL is a memory ref and its address is not constant,
6956 and this insn P changes a register used in GOAL, return 0. */
6957
6958 if (GET_CODE (pat) == COND_EXEC)
6959 pat = COND_EXEC_CODE (pat);
6960 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6961 {
6962 rtx dest = SET_DEST (pat);
6963 while (GET_CODE (dest) == SUBREG
6964 || GET_CODE (dest) == ZERO_EXTRACT
6965 || GET_CODE (dest) == STRICT_LOW_PART)
6966 dest = XEXP (dest, 0);
6967 if (REG_P (dest))
6968 {
6969 int xregno = REGNO (dest);
6970 int xnregs;
6971 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6972 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6973 else
6974 xnregs = 1;
6975 if (xregno < regno + nregs && xregno + xnregs > regno)
6976 return 0;
6977 if (xregno < valueno + valuenregs
6978 && xregno + xnregs > valueno)
6979 return 0;
6980 if (goal_mem_addr_varies
6981 && reg_overlap_mentioned_for_reload_p (dest, goal))
6982 return 0;
6983 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6984 return 0;
6985 }
6986 else if (goal_mem && MEM_P (dest)
6987 && ! push_operand (dest, GET_MODE (dest)))
6988 return 0;
6989 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6990 && reg_equiv_memory_loc[regno] != 0)
6991 return 0;
6992 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6993 return 0;
6994 }
6995 else if (GET_CODE (pat) == PARALLEL)
6996 {
6997 int i;
6998 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6999 {
7000 rtx v1 = XVECEXP (pat, 0, i);
7001 if (GET_CODE (v1) == COND_EXEC)
7002 v1 = COND_EXEC_CODE (v1);
7003 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7004 {
7005 rtx dest = SET_DEST (v1);
7006 while (GET_CODE (dest) == SUBREG
7007 || GET_CODE (dest) == ZERO_EXTRACT
7008 || GET_CODE (dest) == STRICT_LOW_PART)
7009 dest = XEXP (dest, 0);
7010 if (REG_P (dest))
7011 {
7012 int xregno = REGNO (dest);
7013 int xnregs;
7014 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7015 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7016 else
7017 xnregs = 1;
7018 if (xregno < regno + nregs
7019 && xregno + xnregs > regno)
7020 return 0;
7021 if (xregno < valueno + valuenregs
7022 && xregno + xnregs > valueno)
7023 return 0;
7024 if (goal_mem_addr_varies
7025 && reg_overlap_mentioned_for_reload_p (dest,
7026 goal))
7027 return 0;
7028 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7029 return 0;
7030 }
7031 else if (goal_mem && MEM_P (dest)
7032 && ! push_operand (dest, GET_MODE (dest)))
7033 return 0;
7034 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7035 && reg_equiv_memory_loc[regno] != 0)
7036 return 0;
7037 else if (need_stable_sp
7038 && push_operand (dest, GET_MODE (dest)))
7039 return 0;
7040 }
7041 }
7042 }
7043
7044 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7045 {
7046 rtx link;
7047
7048 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7049 link = XEXP (link, 1))
7050 {
7051 pat = XEXP (link, 0);
7052 if (GET_CODE (pat) == CLOBBER)
7053 {
7054 rtx dest = SET_DEST (pat);
7055
7056 if (REG_P (dest))
7057 {
7058 int xregno = REGNO (dest);
7059 int xnregs
7060 = hard_regno_nregs[xregno][GET_MODE (dest)];
7061
7062 if (xregno < regno + nregs
7063 && xregno + xnregs > regno)
7064 return 0;
7065 else if (xregno < valueno + valuenregs
7066 && xregno + xnregs > valueno)
7067 return 0;
7068 else if (goal_mem_addr_varies
7069 && reg_overlap_mentioned_for_reload_p (dest,
7070 goal))
7071 return 0;
7072 }
7073
7074 else if (goal_mem && MEM_P (dest)
7075 && ! push_operand (dest, GET_MODE (dest)))
7076 return 0;
7077 else if (need_stable_sp
7078 && push_operand (dest, GET_MODE (dest)))
7079 return 0;
7080 }
7081 }
7082 }
7083
7084 #ifdef AUTO_INC_DEC
7085 /* If this insn auto-increments or auto-decrements
7086 either regno or valueno, return 0 now.
7087 If GOAL is a memory ref and its address is not constant,
7088 and this insn P increments a register used in GOAL, return 0. */
7089 {
7090 rtx link;
7091
7092 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7093 if (REG_NOTE_KIND (link) == REG_INC
7094 && REG_P (XEXP (link, 0)))
7095 {
7096 int incno = REGNO (XEXP (link, 0));
7097 if (incno < regno + nregs && incno >= regno)
7098 return 0;
7099 if (incno < valueno + valuenregs && incno >= valueno)
7100 return 0;
7101 if (goal_mem_addr_varies
7102 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7103 goal))
7104 return 0;
7105 }
7106 }
7107 #endif
7108 }
7109 }
7110 }
7111 \f
7112 /* Find a place where INCED appears in an increment or decrement operator
7113 within X, and return the amount INCED is incremented or decremented by.
7114 The value is always positive. */
7115
7116 static int
7117 find_inc_amount (rtx x, rtx inced)
7118 {
7119 enum rtx_code code = GET_CODE (x);
7120 const char *fmt;
7121 int i;
7122
7123 if (code == MEM)
7124 {
7125 rtx addr = XEXP (x, 0);
7126 if ((GET_CODE (addr) == PRE_DEC
7127 || GET_CODE (addr) == POST_DEC
7128 || GET_CODE (addr) == PRE_INC
7129 || GET_CODE (addr) == POST_INC)
7130 && XEXP (addr, 0) == inced)
7131 return GET_MODE_SIZE (GET_MODE (x));
7132 else if ((GET_CODE (addr) == PRE_MODIFY
7133 || GET_CODE (addr) == POST_MODIFY)
7134 && GET_CODE (XEXP (addr, 1)) == PLUS
7135 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7136 && XEXP (addr, 0) == inced
7137 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7138 {
7139 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7140 return i < 0 ? -i : i;
7141 }
7142 }
7143
7144 fmt = GET_RTX_FORMAT (code);
7145 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7146 {
7147 if (fmt[i] == 'e')
7148 {
7149 int tem = find_inc_amount (XEXP (x, i), inced);
7150 if (tem != 0)
7151 return tem;
7152 }
7153 if (fmt[i] == 'E')
7154 {
7155 int j;
7156 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7157 {
7158 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7159 if (tem != 0)
7160 return tem;
7161 }
7162 }
7163 }
7164
7165 return 0;
7166 }
7167 \f
7168 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7169 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7170
7171 #ifdef AUTO_INC_DEC
7172 static int
7173 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7174 rtx insn)
7175 {
7176 rtx link;
7177
7178 gcc_assert (insn);
7179
7180 if (! INSN_P (insn))
7181 return 0;
7182
7183 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7184 if (REG_NOTE_KIND (link) == REG_INC)
7185 {
7186 unsigned int test = (int) REGNO (XEXP (link, 0));
7187 if (test >= regno && test < endregno)
7188 return 1;
7189 }
7190 return 0;
7191 }
7192 #else
7193
7194 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7195
7196 #endif
7197
7198 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7199 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7200 REG_INC. REGNO must refer to a hard register. */
7201
7202 int
7203 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7204 int sets)
7205 {
7206 unsigned int nregs, endregno;
7207
7208 /* regno must be a hard register. */
7209 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7210
7211 nregs = hard_regno_nregs[regno][mode];
7212 endregno = regno + nregs;
7213
7214 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7215 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7216 && REG_P (XEXP (PATTERN (insn), 0)))
7217 {
7218 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7219
7220 return test >= regno && test < endregno;
7221 }
7222
7223 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7224 return 1;
7225
7226 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7227 {
7228 int i = XVECLEN (PATTERN (insn), 0) - 1;
7229
7230 for (; i >= 0; i--)
7231 {
7232 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7233 if ((GET_CODE (elt) == CLOBBER
7234 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7235 && REG_P (XEXP (elt, 0)))
7236 {
7237 unsigned int test = REGNO (XEXP (elt, 0));
7238
7239 if (test >= regno && test < endregno)
7240 return 1;
7241 }
7242 if (sets == 2
7243 && reg_inc_found_and_valid_p (regno, endregno, elt))
7244 return 1;
7245 }
7246 }
7247
7248 return 0;
7249 }
7250
7251 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7252 rtx
7253 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7254 {
7255 int regno;
7256
7257 if (GET_MODE (reloadreg) == mode)
7258 return reloadreg;
7259
7260 regno = REGNO (reloadreg);
7261
7262 if (WORDS_BIG_ENDIAN)
7263 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7264 - (int) hard_regno_nregs[regno][mode];
7265
7266 return gen_rtx_REG (mode, regno);
7267 }
7268
7269 static const char *const reload_when_needed_name[] =
7270 {
7271 "RELOAD_FOR_INPUT",
7272 "RELOAD_FOR_OUTPUT",
7273 "RELOAD_FOR_INSN",
7274 "RELOAD_FOR_INPUT_ADDRESS",
7275 "RELOAD_FOR_INPADDR_ADDRESS",
7276 "RELOAD_FOR_OUTPUT_ADDRESS",
7277 "RELOAD_FOR_OUTADDR_ADDRESS",
7278 "RELOAD_FOR_OPERAND_ADDRESS",
7279 "RELOAD_FOR_OPADDR_ADDR",
7280 "RELOAD_OTHER",
7281 "RELOAD_FOR_OTHER_ADDRESS"
7282 };
7283
7284 /* These functions are used to print the variables set by 'find_reloads' */
7285
7286 void
7287 debug_reload_to_stream (FILE *f)
7288 {
7289 int r;
7290 const char *prefix;
7291
7292 if (! f)
7293 f = stderr;
7294 for (r = 0; r < n_reloads; r++)
7295 {
7296 fprintf (f, "Reload %d: ", r);
7297
7298 if (rld[r].in != 0)
7299 {
7300 fprintf (f, "reload_in (%s) = ",
7301 GET_MODE_NAME (rld[r].inmode));
7302 print_inline_rtx (f, rld[r].in, 24);
7303 fprintf (f, "\n\t");
7304 }
7305
7306 if (rld[r].out != 0)
7307 {
7308 fprintf (f, "reload_out (%s) = ",
7309 GET_MODE_NAME (rld[r].outmode));
7310 print_inline_rtx (f, rld[r].out, 24);
7311 fprintf (f, "\n\t");
7312 }
7313
7314 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7315
7316 fprintf (f, "%s (opnum = %d)",
7317 reload_when_needed_name[(int) rld[r].when_needed],
7318 rld[r].opnum);
7319
7320 if (rld[r].optional)
7321 fprintf (f, ", optional");
7322
7323 if (rld[r].nongroup)
7324 fprintf (f, ", nongroup");
7325
7326 if (rld[r].inc != 0)
7327 fprintf (f, ", inc by %d", rld[r].inc);
7328
7329 if (rld[r].nocombine)
7330 fprintf (f, ", can't combine");
7331
7332 if (rld[r].secondary_p)
7333 fprintf (f, ", secondary_reload_p");
7334
7335 if (rld[r].in_reg != 0)
7336 {
7337 fprintf (f, "\n\treload_in_reg: ");
7338 print_inline_rtx (f, rld[r].in_reg, 24);
7339 }
7340
7341 if (rld[r].out_reg != 0)
7342 {
7343 fprintf (f, "\n\treload_out_reg: ");
7344 print_inline_rtx (f, rld[r].out_reg, 24);
7345 }
7346
7347 if (rld[r].reg_rtx != 0)
7348 {
7349 fprintf (f, "\n\treload_reg_rtx: ");
7350 print_inline_rtx (f, rld[r].reg_rtx, 24);
7351 }
7352
7353 prefix = "\n\t";
7354 if (rld[r].secondary_in_reload != -1)
7355 {
7356 fprintf (f, "%ssecondary_in_reload = %d",
7357 prefix, rld[r].secondary_in_reload);
7358 prefix = ", ";
7359 }
7360
7361 if (rld[r].secondary_out_reload != -1)
7362 fprintf (f, "%ssecondary_out_reload = %d\n",
7363 prefix, rld[r].secondary_out_reload);
7364
7365 prefix = "\n\t";
7366 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7367 {
7368 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7369 insn_data[rld[r].secondary_in_icode].name);
7370 prefix = ", ";
7371 }
7372
7373 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7374 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7375 insn_data[rld[r].secondary_out_icode].name);
7376
7377 fprintf (f, "\n");
7378 }
7379 }
7380
7381 void
7382 debug_reload (void)
7383 {
7384 debug_reload_to_stream (stderr);
7385 }