re PR c++/18075 (#pragma implementation broken in presence of #pragma ident)
[gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
28
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
31
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
38
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
45
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
54
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
57
58 NOTE SIDE EFFECTS:
59
60 find_reloads can alter the operands of the instruction it is called on.
61
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
65 better that way.
66
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
69
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
73
74 Using a reload register for several reloads in one insn:
75
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
79
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
82 register.
83
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
87
88 #define REG_OK_STRICT
89
90 #include "config.h"
91 #include "system.h"
92 #include "coretypes.h"
93 #include "tm.h"
94 #include "rtl.h"
95 #include "tm_p.h"
96 #include "insn-config.h"
97 #include "expr.h"
98 #include "optabs.h"
99 #include "recog.h"
100 #include "reload.h"
101 #include "regs.h"
102 #include "hard-reg-set.h"
103 #include "flags.h"
104 #include "real.h"
105 #include "output.h"
106 #include "function.h"
107 #include "toplev.h"
108 #include "params.h"
109 #include "target.h"
110
111 /* True if X is a constant that can be forced into the constant pool. */
112 #define CONST_POOL_OK_P(X) \
113 (CONSTANT_P (X) \
114 && GET_CODE (X) != HIGH \
115 && !targetm.cannot_force_const_mem (X))
116 \f
117 /* All reloads of the current insn are recorded here. See reload.h for
118 comments. */
119 int n_reloads;
120 struct reload rld[MAX_RELOADS];
121
122 /* All the "earlyclobber" operands of the current insn
123 are recorded here. */
124 int n_earlyclobbers;
125 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
126
127 int reload_n_operands;
128
129 /* Replacing reloads.
130
131 If `replace_reloads' is nonzero, then as each reload is recorded
132 an entry is made for it in the table `replacements'.
133 Then later `subst_reloads' can look through that table and
134 perform all the replacements needed. */
135
136 /* Nonzero means record the places to replace. */
137 static int replace_reloads;
138
139 /* Each replacement is recorded with a structure like this. */
140 struct replacement
141 {
142 rtx *where; /* Location to store in */
143 rtx *subreg_loc; /* Location of SUBREG if WHERE is inside
144 a SUBREG; 0 otherwise. */
145 int what; /* which reload this is for */
146 enum machine_mode mode; /* mode it must have */
147 };
148
149 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
150
151 /* Number of replacements currently recorded. */
152 static int n_replacements;
153
154 /* Used to track what is modified by an operand. */
155 struct decomposition
156 {
157 int reg_flag; /* Nonzero if referencing a register. */
158 int safe; /* Nonzero if this can't conflict with anything. */
159 rtx base; /* Base address for MEM. */
160 HOST_WIDE_INT start; /* Starting offset or register number. */
161 HOST_WIDE_INT end; /* Ending offset or register number. */
162 };
163
164 #ifdef SECONDARY_MEMORY_NEEDED
165
166 /* Save MEMs needed to copy from one class of registers to another. One MEM
167 is used per mode, but normally only one or two modes are ever used.
168
169 We keep two versions, before and after register elimination. The one
170 after register elimination is record separately for each operand. This
171 is done in case the address is not valid to be sure that we separately
172 reload each. */
173
174 static rtx secondary_memlocs[NUM_MACHINE_MODES];
175 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
176 static int secondary_memlocs_elim_used = 0;
177 #endif
178
179 /* The instruction we are doing reloads for;
180 so we can test whether a register dies in it. */
181 static rtx this_insn;
182
183 /* Nonzero if this instruction is a user-specified asm with operands. */
184 static int this_insn_is_asm;
185
186 /* If hard_regs_live_known is nonzero,
187 we can tell which hard regs are currently live,
188 at least enough to succeed in choosing dummy reloads. */
189 static int hard_regs_live_known;
190
191 /* Indexed by hard reg number,
192 element is nonnegative if hard reg has been spilled.
193 This vector is passed to `find_reloads' as an argument
194 and is not changed here. */
195 static short *static_reload_reg_p;
196
197 /* Set to 1 in subst_reg_equivs if it changes anything. */
198 static int subst_reg_equivs_changed;
199
200 /* On return from push_reload, holds the reload-number for the OUT
201 operand, which can be different for that from the input operand. */
202 static int output_reloadnum;
203
204 /* Compare two RTX's. */
205 #define MATCHES(x, y) \
206 (x == y || (x != 0 && (REG_P (x) \
207 ? REG_P (y) && REGNO (x) == REGNO (y) \
208 : rtx_equal_p (x, y) && ! side_effects_p (x))))
209
210 /* Indicates if two reloads purposes are for similar enough things that we
211 can merge their reloads. */
212 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
213 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
214 || ((when1) == (when2) && (op1) == (op2)) \
215 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
216 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
217 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
218 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
219 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
220
221 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
222 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
223 ((when1) != (when2) \
224 || ! ((op1) == (op2) \
225 || (when1) == RELOAD_FOR_INPUT \
226 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
227 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
228
229 /* If we are going to reload an address, compute the reload type to
230 use. */
231 #define ADDR_TYPE(type) \
232 ((type) == RELOAD_FOR_INPUT_ADDRESS \
233 ? RELOAD_FOR_INPADDR_ADDRESS \
234 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
235 ? RELOAD_FOR_OUTADDR_ADDRESS \
236 : (type)))
237
238 #ifdef HAVE_SECONDARY_RELOADS
239 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
240 enum machine_mode, enum reload_type,
241 enum insn_code *);
242 #endif
243 static enum reg_class find_valid_class (enum machine_mode, int, unsigned int);
244 static int reload_inner_reg_of_subreg (rtx, enum machine_mode, int);
245 static void push_replacement (rtx *, int, enum machine_mode);
246 static void dup_replacements (rtx *, rtx *);
247 static void combine_reloads (void);
248 static int find_reusable_reload (rtx *, rtx, enum reg_class,
249 enum reload_type, int, int);
250 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
251 enum machine_mode, enum reg_class, int, int);
252 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
253 static struct decomposition decompose (rtx);
254 static int immune_p (rtx, rtx, struct decomposition);
255 static int alternative_allows_memconst (const char *, int);
256 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
257 int *);
258 static rtx make_memloc (rtx, int);
259 static int maybe_memory_address_p (enum machine_mode, rtx, rtx *);
260 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
261 int, enum reload_type, int, rtx);
262 static rtx subst_reg_equivs (rtx, rtx);
263 static rtx subst_indexed_address (rtx);
264 static void update_auto_inc_notes (rtx, int, int);
265 static int find_reloads_address_1 (enum machine_mode, rtx, int, rtx *,
266 int, enum reload_type,int, rtx);
267 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
268 enum machine_mode, int,
269 enum reload_type, int);
270 static rtx find_reloads_subreg_address (rtx, int, int, enum reload_type,
271 int, rtx);
272 static void copy_replacements_1 (rtx *, rtx *, int);
273 static int find_inc_amount (rtx, rtx);
274 \f
275 #ifdef HAVE_SECONDARY_RELOADS
276
277 /* Determine if any secondary reloads are needed for loading (if IN_P is
278 nonzero) or storing (if IN_P is zero) X to or from a reload register of
279 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
280 are needed, push them.
281
282 Return the reload number of the secondary reload we made, or -1 if
283 we didn't need one. *PICODE is set to the insn_code to use if we do
284 need a secondary reload. */
285
286 static int
287 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
288 enum reg_class reload_class,
289 enum machine_mode reload_mode, enum reload_type type,
290 enum insn_code *picode)
291 {
292 enum reg_class class = NO_REGS;
293 enum machine_mode mode = reload_mode;
294 enum insn_code icode = CODE_FOR_nothing;
295 enum reg_class t_class = NO_REGS;
296 enum machine_mode t_mode = VOIDmode;
297 enum insn_code t_icode = CODE_FOR_nothing;
298 enum reload_type secondary_type;
299 int s_reload, t_reload = -1;
300
301 if (type == RELOAD_FOR_INPUT_ADDRESS
302 || type == RELOAD_FOR_OUTPUT_ADDRESS
303 || type == RELOAD_FOR_INPADDR_ADDRESS
304 || type == RELOAD_FOR_OUTADDR_ADDRESS)
305 secondary_type = type;
306 else
307 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
308
309 *picode = CODE_FOR_nothing;
310
311 /* If X is a paradoxical SUBREG, use the inner value to determine both the
312 mode and object being reloaded. */
313 if (GET_CODE (x) == SUBREG
314 && (GET_MODE_SIZE (GET_MODE (x))
315 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
316 {
317 x = SUBREG_REG (x);
318 reload_mode = GET_MODE (x);
319 }
320
321 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
322 is still a pseudo-register by now, it *must* have an equivalent MEM
323 but we don't want to assume that), use that equivalent when seeing if
324 a secondary reload is needed since whether or not a reload is needed
325 might be sensitive to the form of the MEM. */
326
327 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
328 && reg_equiv_mem[REGNO (x)] != 0)
329 x = reg_equiv_mem[REGNO (x)];
330
331 #ifdef SECONDARY_INPUT_RELOAD_CLASS
332 if (in_p)
333 class = SECONDARY_INPUT_RELOAD_CLASS (reload_class, reload_mode, x);
334 #endif
335
336 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
337 if (! in_p)
338 class = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class, reload_mode, x);
339 #endif
340
341 /* If we don't need any secondary registers, done. */
342 if (class == NO_REGS)
343 return -1;
344
345 /* Get a possible insn to use. If the predicate doesn't accept X, don't
346 use the insn. */
347
348 icode = (in_p ? reload_in_optab[(int) reload_mode]
349 : reload_out_optab[(int) reload_mode]);
350
351 if (icode != CODE_FOR_nothing
352 && insn_data[(int) icode].operand[in_p].predicate
353 && (! (insn_data[(int) icode].operand[in_p].predicate) (x, reload_mode)))
354 icode = CODE_FOR_nothing;
355
356 /* If we will be using an insn, see if it can directly handle the reload
357 register we will be using. If it can, the secondary reload is for a
358 scratch register. If it can't, we will use the secondary reload for
359 an intermediate register and require a tertiary reload for the scratch
360 register. */
361
362 if (icode != CODE_FOR_nothing)
363 {
364 /* If IN_P is nonzero, the reload register will be the output in
365 operand 0. If IN_P is zero, the reload register will be the input
366 in operand 1. Outputs should have an initial "=", which we must
367 skip. */
368
369 enum reg_class insn_class;
370
371 if (insn_data[(int) icode].operand[!in_p].constraint[0] == 0)
372 insn_class = ALL_REGS;
373 else
374 {
375 const char *insn_constraint
376 = &insn_data[(int) icode].operand[!in_p].constraint[in_p];
377 char insn_letter = *insn_constraint;
378 insn_class
379 = (insn_letter == 'r' ? GENERAL_REGS
380 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) insn_letter,
381 insn_constraint));
382
383 gcc_assert (insn_class != NO_REGS);
384 gcc_assert (!in_p
385 || insn_data[(int) icode].operand[!in_p].constraint[0]
386 == '=');
387 }
388
389 /* The scratch register's constraint must start with "=&". */
390 gcc_assert (insn_data[(int) icode].operand[2].constraint[0] == '='
391 && insn_data[(int) icode].operand[2].constraint[1] == '&');
392
393 if (reg_class_subset_p (reload_class, insn_class))
394 mode = insn_data[(int) icode].operand[2].mode;
395 else
396 {
397 const char *t_constraint
398 = &insn_data[(int) icode].operand[2].constraint[2];
399 char t_letter = *t_constraint;
400 class = insn_class;
401 t_mode = insn_data[(int) icode].operand[2].mode;
402 t_class = (t_letter == 'r' ? GENERAL_REGS
403 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) t_letter,
404 t_constraint));
405 t_icode = icode;
406 icode = CODE_FOR_nothing;
407 }
408 }
409
410 /* This case isn't valid, so fail. Reload is allowed to use the same
411 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
412 in the case of a secondary register, we actually need two different
413 registers for correct code. We fail here to prevent the possibility of
414 silently generating incorrect code later.
415
416 The convention is that secondary input reloads are valid only if the
417 secondary_class is different from class. If you have such a case, you
418 can not use secondary reloads, you must work around the problem some
419 other way.
420
421 Allow this when a reload_in/out pattern is being used. I.e. assume
422 that the generated code handles this case. */
423
424 gcc_assert (!in_p || class != reload_class || icode != CODE_FOR_nothing
425 || t_icode != CODE_FOR_nothing);
426
427 /* If we need a tertiary reload, see if we have one we can reuse or else
428 make a new one. */
429
430 if (t_class != NO_REGS)
431 {
432 for (t_reload = 0; t_reload < n_reloads; t_reload++)
433 if (rld[t_reload].secondary_p
434 && (reg_class_subset_p (t_class, rld[t_reload].class)
435 || reg_class_subset_p (rld[t_reload].class, t_class))
436 && ((in_p && rld[t_reload].inmode == t_mode)
437 || (! in_p && rld[t_reload].outmode == t_mode))
438 && ((in_p && (rld[t_reload].secondary_in_icode
439 == CODE_FOR_nothing))
440 || (! in_p &&(rld[t_reload].secondary_out_icode
441 == CODE_FOR_nothing)))
442 && (reg_class_size[(int) t_class] == 1 || SMALL_REGISTER_CLASSES)
443 && MERGABLE_RELOADS (secondary_type,
444 rld[t_reload].when_needed,
445 opnum, rld[t_reload].opnum))
446 {
447 if (in_p)
448 rld[t_reload].inmode = t_mode;
449 if (! in_p)
450 rld[t_reload].outmode = t_mode;
451
452 if (reg_class_subset_p (t_class, rld[t_reload].class))
453 rld[t_reload].class = t_class;
454
455 rld[t_reload].opnum = MIN (rld[t_reload].opnum, opnum);
456 rld[t_reload].optional &= optional;
457 rld[t_reload].secondary_p = 1;
458 if (MERGE_TO_OTHER (secondary_type, rld[t_reload].when_needed,
459 opnum, rld[t_reload].opnum))
460 rld[t_reload].when_needed = RELOAD_OTHER;
461 }
462
463 if (t_reload == n_reloads)
464 {
465 /* We need to make a new tertiary reload for this register class. */
466 rld[t_reload].in = rld[t_reload].out = 0;
467 rld[t_reload].class = t_class;
468 rld[t_reload].inmode = in_p ? t_mode : VOIDmode;
469 rld[t_reload].outmode = ! in_p ? t_mode : VOIDmode;
470 rld[t_reload].reg_rtx = 0;
471 rld[t_reload].optional = optional;
472 rld[t_reload].inc = 0;
473 /* Maybe we could combine these, but it seems too tricky. */
474 rld[t_reload].nocombine = 1;
475 rld[t_reload].in_reg = 0;
476 rld[t_reload].out_reg = 0;
477 rld[t_reload].opnum = opnum;
478 rld[t_reload].when_needed = secondary_type;
479 rld[t_reload].secondary_in_reload = -1;
480 rld[t_reload].secondary_out_reload = -1;
481 rld[t_reload].secondary_in_icode = CODE_FOR_nothing;
482 rld[t_reload].secondary_out_icode = CODE_FOR_nothing;
483 rld[t_reload].secondary_p = 1;
484
485 n_reloads++;
486 }
487 }
488
489 /* See if we can reuse an existing secondary reload. */
490 for (s_reload = 0; s_reload < n_reloads; s_reload++)
491 if (rld[s_reload].secondary_p
492 && (reg_class_subset_p (class, rld[s_reload].class)
493 || reg_class_subset_p (rld[s_reload].class, class))
494 && ((in_p && rld[s_reload].inmode == mode)
495 || (! in_p && rld[s_reload].outmode == mode))
496 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
497 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
498 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
499 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
500 && (reg_class_size[(int) class] == 1 || SMALL_REGISTER_CLASSES)
501 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
502 opnum, rld[s_reload].opnum))
503 {
504 if (in_p)
505 rld[s_reload].inmode = mode;
506 if (! in_p)
507 rld[s_reload].outmode = mode;
508
509 if (reg_class_subset_p (class, rld[s_reload].class))
510 rld[s_reload].class = class;
511
512 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
513 rld[s_reload].optional &= optional;
514 rld[s_reload].secondary_p = 1;
515 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
516 opnum, rld[s_reload].opnum))
517 rld[s_reload].when_needed = RELOAD_OTHER;
518 }
519
520 if (s_reload == n_reloads)
521 {
522 #ifdef SECONDARY_MEMORY_NEEDED
523 /* If we need a memory location to copy between the two reload regs,
524 set it up now. Note that we do the input case before making
525 the reload and the output case after. This is due to the
526 way reloads are output. */
527
528 if (in_p && icode == CODE_FOR_nothing
529 && SECONDARY_MEMORY_NEEDED (class, reload_class, mode))
530 {
531 get_secondary_mem (x, reload_mode, opnum, type);
532
533 /* We may have just added new reloads. Make sure we add
534 the new reload at the end. */
535 s_reload = n_reloads;
536 }
537 #endif
538
539 /* We need to make a new secondary reload for this register class. */
540 rld[s_reload].in = rld[s_reload].out = 0;
541 rld[s_reload].class = class;
542
543 rld[s_reload].inmode = in_p ? mode : VOIDmode;
544 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
545 rld[s_reload].reg_rtx = 0;
546 rld[s_reload].optional = optional;
547 rld[s_reload].inc = 0;
548 /* Maybe we could combine these, but it seems too tricky. */
549 rld[s_reload].nocombine = 1;
550 rld[s_reload].in_reg = 0;
551 rld[s_reload].out_reg = 0;
552 rld[s_reload].opnum = opnum;
553 rld[s_reload].when_needed = secondary_type;
554 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
555 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
556 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
557 rld[s_reload].secondary_out_icode
558 = ! in_p ? t_icode : CODE_FOR_nothing;
559 rld[s_reload].secondary_p = 1;
560
561 n_reloads++;
562
563 #ifdef SECONDARY_MEMORY_NEEDED
564 if (! in_p && icode == CODE_FOR_nothing
565 && SECONDARY_MEMORY_NEEDED (reload_class, class, mode))
566 get_secondary_mem (x, mode, opnum, type);
567 #endif
568 }
569
570 *picode = icode;
571 return s_reload;
572 }
573 #endif /* HAVE_SECONDARY_RELOADS */
574 \f
575 #ifdef SECONDARY_MEMORY_NEEDED
576
577 /* Return a memory location that will be used to copy X in mode MODE.
578 If we haven't already made a location for this mode in this insn,
579 call find_reloads_address on the location being returned. */
580
581 rtx
582 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
583 int opnum, enum reload_type type)
584 {
585 rtx loc;
586 int mem_valid;
587
588 /* By default, if MODE is narrower than a word, widen it to a word.
589 This is required because most machines that require these memory
590 locations do not support short load and stores from all registers
591 (e.g., FP registers). */
592
593 #ifdef SECONDARY_MEMORY_NEEDED_MODE
594 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
595 #else
596 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
597 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
598 #endif
599
600 /* If we already have made a MEM for this operand in MODE, return it. */
601 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
602 return secondary_memlocs_elim[(int) mode][opnum];
603
604 /* If this is the first time we've tried to get a MEM for this mode,
605 allocate a new one. `something_changed' in reload will get set
606 by noticing that the frame size has changed. */
607
608 if (secondary_memlocs[(int) mode] == 0)
609 {
610 #ifdef SECONDARY_MEMORY_NEEDED_RTX
611 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
612 #else
613 secondary_memlocs[(int) mode]
614 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
615 #endif
616 }
617
618 /* Get a version of the address doing any eliminations needed. If that
619 didn't give us a new MEM, make a new one if it isn't valid. */
620
621 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
622 mem_valid = strict_memory_address_p (mode, XEXP (loc, 0));
623
624 if (! mem_valid && loc == secondary_memlocs[(int) mode])
625 loc = copy_rtx (loc);
626
627 /* The only time the call below will do anything is if the stack
628 offset is too large. In that case IND_LEVELS doesn't matter, so we
629 can just pass a zero. Adjust the type to be the address of the
630 corresponding object. If the address was valid, save the eliminated
631 address. If it wasn't valid, we need to make a reload each time, so
632 don't save it. */
633
634 if (! mem_valid)
635 {
636 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
637 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
638 : RELOAD_OTHER);
639
640 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
641 opnum, type, 0, 0);
642 }
643
644 secondary_memlocs_elim[(int) mode][opnum] = loc;
645 if (secondary_memlocs_elim_used <= (int)mode)
646 secondary_memlocs_elim_used = (int)mode + 1;
647 return loc;
648 }
649
650 /* Clear any secondary memory locations we've made. */
651
652 void
653 clear_secondary_mem (void)
654 {
655 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
656 }
657 #endif /* SECONDARY_MEMORY_NEEDED */
658 \f
659 /* Find the largest class for which every register number plus N is valid in
660 M1 (if in range) and is cheap to move into REGNO.
661 Abort if no such class exists. */
662
663 static enum reg_class
664 find_valid_class (enum machine_mode m1 ATTRIBUTE_UNUSED, int n,
665 unsigned int dest_regno ATTRIBUTE_UNUSED)
666 {
667 int best_cost = -1;
668 int class;
669 int regno;
670 enum reg_class best_class = NO_REGS;
671 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
672 unsigned int best_size = 0;
673 int cost;
674
675 for (class = 1; class < N_REG_CLASSES; class++)
676 {
677 int bad = 0;
678 for (regno = 0; regno < FIRST_PSEUDO_REGISTER && ! bad; regno++)
679 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno)
680 && TEST_HARD_REG_BIT (reg_class_contents[class], regno + n)
681 && ! HARD_REGNO_MODE_OK (regno + n, m1))
682 bad = 1;
683
684 if (bad)
685 continue;
686 cost = REGISTER_MOVE_COST (m1, class, dest_class);
687
688 if ((reg_class_size[class] > best_size
689 && (best_cost < 0 || best_cost >= cost))
690 || best_cost > cost)
691 {
692 best_class = class;
693 best_size = reg_class_size[class];
694 best_cost = REGISTER_MOVE_COST (m1, class, dest_class);
695 }
696 }
697
698 gcc_assert (best_size != 0);
699
700 return best_class;
701 }
702 \f
703 /* Return the number of a previously made reload that can be combined with
704 a new one, or n_reloads if none of the existing reloads can be used.
705 OUT, CLASS, TYPE and OPNUM are the same arguments as passed to
706 push_reload, they determine the kind of the new reload that we try to
707 combine. P_IN points to the corresponding value of IN, which can be
708 modified by this function.
709 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
710
711 static int
712 find_reusable_reload (rtx *p_in, rtx out, enum reg_class class,
713 enum reload_type type, int opnum, int dont_share)
714 {
715 rtx in = *p_in;
716 int i;
717 /* We can't merge two reloads if the output of either one is
718 earlyclobbered. */
719
720 if (earlyclobber_operand_p (out))
721 return n_reloads;
722
723 /* We can use an existing reload if the class is right
724 and at least one of IN and OUT is a match
725 and the other is at worst neutral.
726 (A zero compared against anything is neutral.)
727
728 If SMALL_REGISTER_CLASSES, don't use existing reloads unless they are
729 for the same thing since that can cause us to need more reload registers
730 than we otherwise would. */
731
732 for (i = 0; i < n_reloads; i++)
733 if ((reg_class_subset_p (class, rld[i].class)
734 || reg_class_subset_p (rld[i].class, class))
735 /* If the existing reload has a register, it must fit our class. */
736 && (rld[i].reg_rtx == 0
737 || TEST_HARD_REG_BIT (reg_class_contents[(int) class],
738 true_regnum (rld[i].reg_rtx)))
739 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
740 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
741 || (out != 0 && MATCHES (rld[i].out, out)
742 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
743 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
744 && (reg_class_size[(int) class] == 1 || SMALL_REGISTER_CLASSES)
745 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
746 return i;
747
748 /* Reloading a plain reg for input can match a reload to postincrement
749 that reg, since the postincrement's value is the right value.
750 Likewise, it can match a preincrement reload, since we regard
751 the preincrementation as happening before any ref in this insn
752 to that register. */
753 for (i = 0; i < n_reloads; i++)
754 if ((reg_class_subset_p (class, rld[i].class)
755 || reg_class_subset_p (rld[i].class, class))
756 /* If the existing reload has a register, it must fit our
757 class. */
758 && (rld[i].reg_rtx == 0
759 || TEST_HARD_REG_BIT (reg_class_contents[(int) class],
760 true_regnum (rld[i].reg_rtx)))
761 && out == 0 && rld[i].out == 0 && rld[i].in != 0
762 && ((REG_P (in)
763 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
764 && MATCHES (XEXP (rld[i].in, 0), in))
765 || (REG_P (rld[i].in)
766 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
767 && MATCHES (XEXP (in, 0), rld[i].in)))
768 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
769 && (reg_class_size[(int) class] == 1 || SMALL_REGISTER_CLASSES)
770 && MERGABLE_RELOADS (type, rld[i].when_needed,
771 opnum, rld[i].opnum))
772 {
773 /* Make sure reload_in ultimately has the increment,
774 not the plain register. */
775 if (REG_P (in))
776 *p_in = rld[i].in;
777 return i;
778 }
779 return n_reloads;
780 }
781
782 /* Return nonzero if X is a SUBREG which will require reloading of its
783 SUBREG_REG expression. */
784
785 static int
786 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, int output)
787 {
788 rtx inner;
789
790 /* Only SUBREGs are problematical. */
791 if (GET_CODE (x) != SUBREG)
792 return 0;
793
794 inner = SUBREG_REG (x);
795
796 /* If INNER is a constant or PLUS, then INNER must be reloaded. */
797 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
798 return 1;
799
800 /* If INNER is not a hard register, then INNER will not need to
801 be reloaded. */
802 if (!REG_P (inner)
803 || REGNO (inner) >= FIRST_PSEUDO_REGISTER)
804 return 0;
805
806 /* If INNER is not ok for MODE, then INNER will need reloading. */
807 if (! HARD_REGNO_MODE_OK (subreg_regno (x), mode))
808 return 1;
809
810 /* If the outer part is a word or smaller, INNER larger than a
811 word and the number of regs for INNER is not the same as the
812 number of words in INNER, then INNER will need reloading. */
813 return (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
814 && output
815 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
816 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
817 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
818 }
819
820 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
821 requiring an extra reload register. The caller has already found that
822 IN contains some reference to REGNO, so check that we can produce the
823 new value in a single step. E.g. if we have
824 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
825 instruction that adds one to a register, this should succeed.
826 However, if we have something like
827 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
828 needs to be loaded into a register first, we need a separate reload
829 register.
830 Such PLUS reloads are generated by find_reload_address_part.
831 The out-of-range PLUS expressions are usually introduced in the instruction
832 patterns by register elimination and substituting pseudos without a home
833 by their function-invariant equivalences. */
834 static int
835 can_reload_into (rtx in, int regno, enum machine_mode mode)
836 {
837 rtx dst, test_insn;
838 int r = 0;
839 struct recog_data save_recog_data;
840
841 /* For matching constraints, we often get notional input reloads where
842 we want to use the original register as the reload register. I.e.
843 technically this is a non-optional input-output reload, but IN is
844 already a valid register, and has been chosen as the reload register.
845 Speed this up, since it trivially works. */
846 if (REG_P (in))
847 return 1;
848
849 /* To test MEMs properly, we'd have to take into account all the reloads
850 that are already scheduled, which can become quite complicated.
851 And since we've already handled address reloads for this MEM, it
852 should always succeed anyway. */
853 if (MEM_P (in))
854 return 1;
855
856 /* If we can make a simple SET insn that does the job, everything should
857 be fine. */
858 dst = gen_rtx_REG (mode, regno);
859 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
860 save_recog_data = recog_data;
861 if (recog_memoized (test_insn) >= 0)
862 {
863 extract_insn (test_insn);
864 r = constrain_operands (1);
865 }
866 recog_data = save_recog_data;
867 return r;
868 }
869
870 /* Record one reload that needs to be performed.
871 IN is an rtx saying where the data are to be found before this instruction.
872 OUT says where they must be stored after the instruction.
873 (IN is zero for data not read, and OUT is zero for data not written.)
874 INLOC and OUTLOC point to the places in the instructions where
875 IN and OUT were found.
876 If IN and OUT are both nonzero, it means the same register must be used
877 to reload both IN and OUT.
878
879 CLASS is a register class required for the reloaded data.
880 INMODE is the machine mode that the instruction requires
881 for the reg that replaces IN and OUTMODE is likewise for OUT.
882
883 If IN is zero, then OUT's location and mode should be passed as
884 INLOC and INMODE.
885
886 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
887
888 OPTIONAL nonzero means this reload does not need to be performed:
889 it can be discarded if that is more convenient.
890
891 OPNUM and TYPE say what the purpose of this reload is.
892
893 The return value is the reload-number for this reload.
894
895 If both IN and OUT are nonzero, in some rare cases we might
896 want to make two separate reloads. (Actually we never do this now.)
897 Therefore, the reload-number for OUT is stored in
898 output_reloadnum when we return; the return value applies to IN.
899 Usually (presently always), when IN and OUT are nonzero,
900 the two reload-numbers are equal, but the caller should be careful to
901 distinguish them. */
902
903 int
904 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
905 enum reg_class class, enum machine_mode inmode,
906 enum machine_mode outmode, int strict_low, int optional,
907 int opnum, enum reload_type type)
908 {
909 int i;
910 int dont_share = 0;
911 int dont_remove_subreg = 0;
912 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
913 int secondary_in_reload = -1, secondary_out_reload = -1;
914 enum insn_code secondary_in_icode = CODE_FOR_nothing;
915 enum insn_code secondary_out_icode = CODE_FOR_nothing;
916
917 /* INMODE and/or OUTMODE could be VOIDmode if no mode
918 has been specified for the operand. In that case,
919 use the operand's mode as the mode to reload. */
920 if (inmode == VOIDmode && in != 0)
921 inmode = GET_MODE (in);
922 if (outmode == VOIDmode && out != 0)
923 outmode = GET_MODE (out);
924
925 /* If IN is a pseudo register everywhere-equivalent to a constant, and
926 it is not in a hard register, reload straight from the constant,
927 since we want to get rid of such pseudo registers.
928 Often this is done earlier, but not always in find_reloads_address. */
929 if (in != 0 && REG_P (in))
930 {
931 int regno = REGNO (in);
932
933 if (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] < 0
934 && reg_equiv_constant[regno] != 0)
935 in = reg_equiv_constant[regno];
936 }
937
938 /* Likewise for OUT. Of course, OUT will never be equivalent to
939 an actual constant, but it might be equivalent to a memory location
940 (in the case of a parameter). */
941 if (out != 0 && REG_P (out))
942 {
943 int regno = REGNO (out);
944
945 if (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] < 0
946 && reg_equiv_constant[regno] != 0)
947 out = reg_equiv_constant[regno];
948 }
949
950 /* If we have a read-write operand with an address side-effect,
951 change either IN or OUT so the side-effect happens only once. */
952 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
953 switch (GET_CODE (XEXP (in, 0)))
954 {
955 case POST_INC: case POST_DEC: case POST_MODIFY:
956 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
957 break;
958
959 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
960 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
961 break;
962
963 default:
964 break;
965 }
966
967 /* If we are reloading a (SUBREG constant ...), really reload just the
968 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
969 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
970 a pseudo and hence will become a MEM) with M1 wider than M2 and the
971 register is a pseudo, also reload the inside expression.
972 For machines that extend byte loads, do this for any SUBREG of a pseudo
973 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
974 M2 is an integral mode that gets extended when loaded.
975 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
976 either M1 is not valid for R or M2 is wider than a word but we only
977 need one word to store an M2-sized quantity in R.
978 (However, if OUT is nonzero, we need to reload the reg *and*
979 the subreg, so do nothing here, and let following statement handle it.)
980
981 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
982 we can't handle it here because CONST_INT does not indicate a mode.
983
984 Similarly, we must reload the inside expression if we have a
985 STRICT_LOW_PART (presumably, in == out in the cas).
986
987 Also reload the inner expression if it does not require a secondary
988 reload but the SUBREG does.
989
990 Finally, reload the inner expression if it is a register that is in
991 the class whose registers cannot be referenced in a different size
992 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
993 cannot reload just the inside since we might end up with the wrong
994 register class. But if it is inside a STRICT_LOW_PART, we have
995 no choice, so we hope we do get the right register class there. */
996
997 if (in != 0 && GET_CODE (in) == SUBREG
998 && (subreg_lowpart_p (in) || strict_low)
999 #ifdef CANNOT_CHANGE_MODE_CLASS
1000 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, class)
1001 #endif
1002 && (CONSTANT_P (SUBREG_REG (in))
1003 || GET_CODE (SUBREG_REG (in)) == PLUS
1004 || strict_low
1005 || (((REG_P (SUBREG_REG (in))
1006 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1007 || MEM_P (SUBREG_REG (in)))
1008 && ((GET_MODE_SIZE (inmode)
1009 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1010 #ifdef LOAD_EXTEND_OP
1011 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1012 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1013 <= UNITS_PER_WORD)
1014 && (GET_MODE_SIZE (inmode)
1015 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1016 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1017 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1018 #endif
1019 #ifdef WORD_REGISTER_OPERATIONS
1020 || ((GET_MODE_SIZE (inmode)
1021 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1022 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1023 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1024 / UNITS_PER_WORD)))
1025 #endif
1026 ))
1027 || (REG_P (SUBREG_REG (in))
1028 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1029 /* The case where out is nonzero
1030 is handled differently in the following statement. */
1031 && (out == 0 || subreg_lowpart_p (in))
1032 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1033 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1034 > UNITS_PER_WORD)
1035 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1036 / UNITS_PER_WORD)
1037 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1038 [GET_MODE (SUBREG_REG (in))]))
1039 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1040 #ifdef SECONDARY_INPUT_RELOAD_CLASS
1041 || (SECONDARY_INPUT_RELOAD_CLASS (class, inmode, in) != NO_REGS
1042 && (SECONDARY_INPUT_RELOAD_CLASS (class,
1043 GET_MODE (SUBREG_REG (in)),
1044 SUBREG_REG (in))
1045 == NO_REGS))
1046 #endif
1047 #ifdef CANNOT_CHANGE_MODE_CLASS
1048 || (REG_P (SUBREG_REG (in))
1049 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1050 && REG_CANNOT_CHANGE_MODE_P
1051 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1052 #endif
1053 ))
1054 {
1055 in_subreg_loc = inloc;
1056 inloc = &SUBREG_REG (in);
1057 in = *inloc;
1058 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1059 if (MEM_P (in))
1060 /* This is supposed to happen only for paradoxical subregs made by
1061 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1062 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1063 #endif
1064 inmode = GET_MODE (in);
1065 }
1066
1067 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1068 either M1 is not valid for R or M2 is wider than a word but we only
1069 need one word to store an M2-sized quantity in R.
1070
1071 However, we must reload the inner reg *as well as* the subreg in
1072 that case. */
1073
1074 /* Similar issue for (SUBREG constant ...) if it was not handled by the
1075 code above. This can happen if SUBREG_BYTE != 0. */
1076
1077 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, 0))
1078 {
1079 enum reg_class in_class = class;
1080
1081 if (REG_P (SUBREG_REG (in)))
1082 in_class
1083 = find_valid_class (inmode,
1084 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1085 GET_MODE (SUBREG_REG (in)),
1086 SUBREG_BYTE (in),
1087 GET_MODE (in)),
1088 REGNO (SUBREG_REG (in)));
1089
1090 /* This relies on the fact that emit_reload_insns outputs the
1091 instructions for input reloads of type RELOAD_OTHER in the same
1092 order as the reloads. Thus if the outer reload is also of type
1093 RELOAD_OTHER, we are guaranteed that this inner reload will be
1094 output before the outer reload. */
1095 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1096 in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1097 dont_remove_subreg = 1;
1098 }
1099
1100 /* Similarly for paradoxical and problematical SUBREGs on the output.
1101 Note that there is no reason we need worry about the previous value
1102 of SUBREG_REG (out); even if wider than out,
1103 storing in a subreg is entitled to clobber it all
1104 (except in the case of STRICT_LOW_PART,
1105 and in that case the constraint should label it input-output.) */
1106 if (out != 0 && GET_CODE (out) == SUBREG
1107 && (subreg_lowpart_p (out) || strict_low)
1108 #ifdef CANNOT_CHANGE_MODE_CLASS
1109 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, class)
1110 #endif
1111 && (CONSTANT_P (SUBREG_REG (out))
1112 || strict_low
1113 || (((REG_P (SUBREG_REG (out))
1114 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1115 || MEM_P (SUBREG_REG (out)))
1116 && ((GET_MODE_SIZE (outmode)
1117 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1118 #ifdef WORD_REGISTER_OPERATIONS
1119 || ((GET_MODE_SIZE (outmode)
1120 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1121 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1122 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1123 / UNITS_PER_WORD)))
1124 #endif
1125 ))
1126 || (REG_P (SUBREG_REG (out))
1127 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1128 && ((GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1129 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1130 > UNITS_PER_WORD)
1131 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1132 / UNITS_PER_WORD)
1133 != (int) hard_regno_nregs[REGNO (SUBREG_REG (out))]
1134 [GET_MODE (SUBREG_REG (out))]))
1135 || ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode)))
1136 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
1137 || (SECONDARY_OUTPUT_RELOAD_CLASS (class, outmode, out) != NO_REGS
1138 && (SECONDARY_OUTPUT_RELOAD_CLASS (class,
1139 GET_MODE (SUBREG_REG (out)),
1140 SUBREG_REG (out))
1141 == NO_REGS))
1142 #endif
1143 #ifdef CANNOT_CHANGE_MODE_CLASS
1144 || (REG_P (SUBREG_REG (out))
1145 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1146 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1147 GET_MODE (SUBREG_REG (out)),
1148 outmode))
1149 #endif
1150 ))
1151 {
1152 out_subreg_loc = outloc;
1153 outloc = &SUBREG_REG (out);
1154 out = *outloc;
1155 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1156 gcc_assert (!MEM_P (out)
1157 || GET_MODE_SIZE (GET_MODE (out))
1158 <= GET_MODE_SIZE (outmode));
1159 #endif
1160 outmode = GET_MODE (out);
1161 }
1162
1163 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1164 either M1 is not valid for R or M2 is wider than a word but we only
1165 need one word to store an M2-sized quantity in R.
1166
1167 However, we must reload the inner reg *as well as* the subreg in
1168 that case. In this case, the inner reg is an in-out reload. */
1169
1170 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, 1))
1171 {
1172 /* This relies on the fact that emit_reload_insns outputs the
1173 instructions for output reloads of type RELOAD_OTHER in reverse
1174 order of the reloads. Thus if the outer reload is also of type
1175 RELOAD_OTHER, we are guaranteed that this inner reload will be
1176 output after the outer reload. */
1177 dont_remove_subreg = 1;
1178 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1179 &SUBREG_REG (out),
1180 find_valid_class (outmode,
1181 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1182 GET_MODE (SUBREG_REG (out)),
1183 SUBREG_BYTE (out),
1184 GET_MODE (out)),
1185 REGNO (SUBREG_REG (out))),
1186 VOIDmode, VOIDmode, 0, 0,
1187 opnum, RELOAD_OTHER);
1188 }
1189
1190 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1191 if (in != 0 && out != 0 && MEM_P (out)
1192 && (REG_P (in) || MEM_P (in))
1193 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1194 dont_share = 1;
1195
1196 /* If IN is a SUBREG of a hard register, make a new REG. This
1197 simplifies some of the cases below. */
1198
1199 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1200 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1201 && ! dont_remove_subreg)
1202 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1203
1204 /* Similarly for OUT. */
1205 if (out != 0 && GET_CODE (out) == SUBREG
1206 && REG_P (SUBREG_REG (out))
1207 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1208 && ! dont_remove_subreg)
1209 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1210
1211 /* Narrow down the class of register wanted if that is
1212 desirable on this machine for efficiency. */
1213 if (in != 0)
1214 class = PREFERRED_RELOAD_CLASS (in, class);
1215
1216 /* Output reloads may need analogous treatment, different in detail. */
1217 #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
1218 if (out != 0)
1219 class = PREFERRED_OUTPUT_RELOAD_CLASS (out, class);
1220 #endif
1221
1222 /* Make sure we use a class that can handle the actual pseudo
1223 inside any subreg. For example, on the 386, QImode regs
1224 can appear within SImode subregs. Although GENERAL_REGS
1225 can handle SImode, QImode needs a smaller class. */
1226 #ifdef LIMIT_RELOAD_CLASS
1227 if (in_subreg_loc)
1228 class = LIMIT_RELOAD_CLASS (inmode, class);
1229 else if (in != 0 && GET_CODE (in) == SUBREG)
1230 class = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), class);
1231
1232 if (out_subreg_loc)
1233 class = LIMIT_RELOAD_CLASS (outmode, class);
1234 if (out != 0 && GET_CODE (out) == SUBREG)
1235 class = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), class);
1236 #endif
1237
1238 /* Verify that this class is at least possible for the mode that
1239 is specified. */
1240 if (this_insn_is_asm)
1241 {
1242 enum machine_mode mode;
1243 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1244 mode = inmode;
1245 else
1246 mode = outmode;
1247 if (mode == VOIDmode)
1248 {
1249 error_for_asm (this_insn, "cannot reload integer constant "
1250 "operand in %<asm%>");
1251 mode = word_mode;
1252 if (in != 0)
1253 inmode = word_mode;
1254 if (out != 0)
1255 outmode = word_mode;
1256 }
1257 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1258 if (HARD_REGNO_MODE_OK (i, mode)
1259 && TEST_HARD_REG_BIT (reg_class_contents[(int) class], i))
1260 {
1261 int nregs = hard_regno_nregs[i][mode];
1262
1263 int j;
1264 for (j = 1; j < nregs; j++)
1265 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class], i + j))
1266 break;
1267 if (j == nregs)
1268 break;
1269 }
1270 if (i == FIRST_PSEUDO_REGISTER)
1271 {
1272 error_for_asm (this_insn, "impossible register constraint "
1273 "in %<asm%>");
1274 class = ALL_REGS;
1275 }
1276 }
1277
1278 /* Optional output reloads are always OK even if we have no register class,
1279 since the function of these reloads is only to have spill_reg_store etc.
1280 set, so that the storing insn can be deleted later. */
1281 gcc_assert (class != NO_REGS
1282 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1283
1284 i = find_reusable_reload (&in, out, class, type, opnum, dont_share);
1285
1286 if (i == n_reloads)
1287 {
1288 /* See if we need a secondary reload register to move between CLASS
1289 and IN or CLASS and OUT. Get the icode and push any required reloads
1290 needed for each of them if so. */
1291
1292 #ifdef SECONDARY_INPUT_RELOAD_CLASS
1293 if (in != 0)
1294 secondary_in_reload
1295 = push_secondary_reload (1, in, opnum, optional, class, inmode, type,
1296 &secondary_in_icode);
1297 #endif
1298
1299 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
1300 if (out != 0 && GET_CODE (out) != SCRATCH)
1301 secondary_out_reload
1302 = push_secondary_reload (0, out, opnum, optional, class, outmode,
1303 type, &secondary_out_icode);
1304 #endif
1305
1306 /* We found no existing reload suitable for re-use.
1307 So add an additional reload. */
1308
1309 #ifdef SECONDARY_MEMORY_NEEDED
1310 /* If a memory location is needed for the copy, make one. */
1311 if (in != 0 && (REG_P (in) || GET_CODE (in) == SUBREG)
1312 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
1313 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
1314 class, inmode))
1315 get_secondary_mem (in, inmode, opnum, type);
1316 #endif
1317
1318 i = n_reloads;
1319 rld[i].in = in;
1320 rld[i].out = out;
1321 rld[i].class = class;
1322 rld[i].inmode = inmode;
1323 rld[i].outmode = outmode;
1324 rld[i].reg_rtx = 0;
1325 rld[i].optional = optional;
1326 rld[i].inc = 0;
1327 rld[i].nocombine = 0;
1328 rld[i].in_reg = inloc ? *inloc : 0;
1329 rld[i].out_reg = outloc ? *outloc : 0;
1330 rld[i].opnum = opnum;
1331 rld[i].when_needed = type;
1332 rld[i].secondary_in_reload = secondary_in_reload;
1333 rld[i].secondary_out_reload = secondary_out_reload;
1334 rld[i].secondary_in_icode = secondary_in_icode;
1335 rld[i].secondary_out_icode = secondary_out_icode;
1336 rld[i].secondary_p = 0;
1337
1338 n_reloads++;
1339
1340 #ifdef SECONDARY_MEMORY_NEEDED
1341 if (out != 0 && (REG_P (out) || GET_CODE (out) == SUBREG)
1342 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1343 && SECONDARY_MEMORY_NEEDED (class,
1344 REGNO_REG_CLASS (reg_or_subregno (out)),
1345 outmode))
1346 get_secondary_mem (out, outmode, opnum, type);
1347 #endif
1348 }
1349 else
1350 {
1351 /* We are reusing an existing reload,
1352 but we may have additional information for it.
1353 For example, we may now have both IN and OUT
1354 while the old one may have just one of them. */
1355
1356 /* The modes can be different. If they are, we want to reload in
1357 the larger mode, so that the value is valid for both modes. */
1358 if (inmode != VOIDmode
1359 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1360 rld[i].inmode = inmode;
1361 if (outmode != VOIDmode
1362 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1363 rld[i].outmode = outmode;
1364 if (in != 0)
1365 {
1366 rtx in_reg = inloc ? *inloc : 0;
1367 /* If we merge reloads for two distinct rtl expressions that
1368 are identical in content, there might be duplicate address
1369 reloads. Remove the extra set now, so that if we later find
1370 that we can inherit this reload, we can get rid of the
1371 address reloads altogether.
1372
1373 Do not do this if both reloads are optional since the result
1374 would be an optional reload which could potentially leave
1375 unresolved address replacements.
1376
1377 It is not sufficient to call transfer_replacements since
1378 choose_reload_regs will remove the replacements for address
1379 reloads of inherited reloads which results in the same
1380 problem. */
1381 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1382 && ! (rld[i].optional && optional))
1383 {
1384 /* We must keep the address reload with the lower operand
1385 number alive. */
1386 if (opnum > rld[i].opnum)
1387 {
1388 remove_address_replacements (in);
1389 in = rld[i].in;
1390 in_reg = rld[i].in_reg;
1391 }
1392 else
1393 remove_address_replacements (rld[i].in);
1394 }
1395 rld[i].in = in;
1396 rld[i].in_reg = in_reg;
1397 }
1398 if (out != 0)
1399 {
1400 rld[i].out = out;
1401 rld[i].out_reg = outloc ? *outloc : 0;
1402 }
1403 if (reg_class_subset_p (class, rld[i].class))
1404 rld[i].class = class;
1405 rld[i].optional &= optional;
1406 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1407 opnum, rld[i].opnum))
1408 rld[i].when_needed = RELOAD_OTHER;
1409 rld[i].opnum = MIN (rld[i].opnum, opnum);
1410 }
1411
1412 /* If the ostensible rtx being reloaded differs from the rtx found
1413 in the location to substitute, this reload is not safe to combine
1414 because we cannot reliably tell whether it appears in the insn. */
1415
1416 if (in != 0 && in != *inloc)
1417 rld[i].nocombine = 1;
1418
1419 #if 0
1420 /* This was replaced by changes in find_reloads_address_1 and the new
1421 function inc_for_reload, which go with a new meaning of reload_inc. */
1422
1423 /* If this is an IN/OUT reload in an insn that sets the CC,
1424 it must be for an autoincrement. It doesn't work to store
1425 the incremented value after the insn because that would clobber the CC.
1426 So we must do the increment of the value reloaded from,
1427 increment it, store it back, then decrement again. */
1428 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1429 {
1430 out = 0;
1431 rld[i].out = 0;
1432 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1433 /* If we did not find a nonzero amount-to-increment-by,
1434 that contradicts the belief that IN is being incremented
1435 in an address in this insn. */
1436 gcc_assert (rld[i].inc != 0);
1437 }
1438 #endif
1439
1440 /* If we will replace IN and OUT with the reload-reg,
1441 record where they are located so that substitution need
1442 not do a tree walk. */
1443
1444 if (replace_reloads)
1445 {
1446 if (inloc != 0)
1447 {
1448 struct replacement *r = &replacements[n_replacements++];
1449 r->what = i;
1450 r->subreg_loc = in_subreg_loc;
1451 r->where = inloc;
1452 r->mode = inmode;
1453 }
1454 if (outloc != 0 && outloc != inloc)
1455 {
1456 struct replacement *r = &replacements[n_replacements++];
1457 r->what = i;
1458 r->where = outloc;
1459 r->subreg_loc = out_subreg_loc;
1460 r->mode = outmode;
1461 }
1462 }
1463
1464 /* If this reload is just being introduced and it has both
1465 an incoming quantity and an outgoing quantity that are
1466 supposed to be made to match, see if either one of the two
1467 can serve as the place to reload into.
1468
1469 If one of them is acceptable, set rld[i].reg_rtx
1470 to that one. */
1471
1472 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1473 {
1474 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1475 inmode, outmode,
1476 rld[i].class, i,
1477 earlyclobber_operand_p (out));
1478
1479 /* If the outgoing register already contains the same value
1480 as the incoming one, we can dispense with loading it.
1481 The easiest way to tell the caller that is to give a phony
1482 value for the incoming operand (same as outgoing one). */
1483 if (rld[i].reg_rtx == out
1484 && (REG_P (in) || CONSTANT_P (in))
1485 && 0 != find_equiv_reg (in, this_insn, 0, REGNO (out),
1486 static_reload_reg_p, i, inmode))
1487 rld[i].in = out;
1488 }
1489
1490 /* If this is an input reload and the operand contains a register that
1491 dies in this insn and is used nowhere else, see if it is the right class
1492 to be used for this reload. Use it if so. (This occurs most commonly
1493 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1494 this if it is also an output reload that mentions the register unless
1495 the output is a SUBREG that clobbers an entire register.
1496
1497 Note that the operand might be one of the spill regs, if it is a
1498 pseudo reg and we are in a block where spilling has not taken place.
1499 But if there is no spilling in this block, that is OK.
1500 An explicitly used hard reg cannot be a spill reg. */
1501
1502 if (rld[i].reg_rtx == 0 && in != 0)
1503 {
1504 rtx note;
1505 int regno;
1506 enum machine_mode rel_mode = inmode;
1507
1508 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1509 rel_mode = outmode;
1510
1511 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1512 if (REG_NOTE_KIND (note) == REG_DEAD
1513 && REG_P (XEXP (note, 0))
1514 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1515 && reg_mentioned_p (XEXP (note, 0), in)
1516 && ! refers_to_regno_for_reload_p (regno,
1517 (regno
1518 + hard_regno_nregs[regno]
1519 [rel_mode]),
1520 PATTERN (this_insn), inloc)
1521 /* If this is also an output reload, IN cannot be used as
1522 the reload register if it is set in this insn unless IN
1523 is also OUT. */
1524 && (out == 0 || in == out
1525 || ! hard_reg_set_here_p (regno,
1526 (regno
1527 + hard_regno_nregs[regno]
1528 [rel_mode]),
1529 PATTERN (this_insn)))
1530 /* ??? Why is this code so different from the previous?
1531 Is there any simple coherent way to describe the two together?
1532 What's going on here. */
1533 && (in != out
1534 || (GET_CODE (in) == SUBREG
1535 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1536 / UNITS_PER_WORD)
1537 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1538 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1539 /* Make sure the operand fits in the reg that dies. */
1540 && (GET_MODE_SIZE (rel_mode)
1541 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1542 && HARD_REGNO_MODE_OK (regno, inmode)
1543 && HARD_REGNO_MODE_OK (regno, outmode))
1544 {
1545 unsigned int offs;
1546 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1547 hard_regno_nregs[regno][outmode]);
1548
1549 for (offs = 0; offs < nregs; offs++)
1550 if (fixed_regs[regno + offs]
1551 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
1552 regno + offs))
1553 break;
1554
1555 if (offs == nregs
1556 && (! (refers_to_regno_for_reload_p
1557 (regno, (regno + hard_regno_nregs[regno][inmode]),
1558 in, (rtx *)0))
1559 || can_reload_into (in, regno, inmode)))
1560 {
1561 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1562 break;
1563 }
1564 }
1565 }
1566
1567 if (out)
1568 output_reloadnum = i;
1569
1570 return i;
1571 }
1572
1573 /* Record an additional place we must replace a value
1574 for which we have already recorded a reload.
1575 RELOADNUM is the value returned by push_reload
1576 when the reload was recorded.
1577 This is used in insn patterns that use match_dup. */
1578
1579 static void
1580 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1581 {
1582 if (replace_reloads)
1583 {
1584 struct replacement *r = &replacements[n_replacements++];
1585 r->what = reloadnum;
1586 r->where = loc;
1587 r->subreg_loc = 0;
1588 r->mode = mode;
1589 }
1590 }
1591
1592 /* Duplicate any replacement we have recorded to apply at
1593 location ORIG_LOC to also be performed at DUP_LOC.
1594 This is used in insn patterns that use match_dup. */
1595
1596 static void
1597 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1598 {
1599 int i, n = n_replacements;
1600
1601 for (i = 0; i < n; i++)
1602 {
1603 struct replacement *r = &replacements[i];
1604 if (r->where == orig_loc)
1605 push_replacement (dup_loc, r->what, r->mode);
1606 }
1607 }
1608 \f
1609 /* Transfer all replacements that used to be in reload FROM to be in
1610 reload TO. */
1611
1612 void
1613 transfer_replacements (int to, int from)
1614 {
1615 int i;
1616
1617 for (i = 0; i < n_replacements; i++)
1618 if (replacements[i].what == from)
1619 replacements[i].what = to;
1620 }
1621 \f
1622 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1623 or a subpart of it. If we have any replacements registered for IN_RTX,
1624 cancel the reloads that were supposed to load them.
1625 Return nonzero if we canceled any reloads. */
1626 int
1627 remove_address_replacements (rtx in_rtx)
1628 {
1629 int i, j;
1630 char reload_flags[MAX_RELOADS];
1631 int something_changed = 0;
1632
1633 memset (reload_flags, 0, sizeof reload_flags);
1634 for (i = 0, j = 0; i < n_replacements; i++)
1635 {
1636 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1637 reload_flags[replacements[i].what] |= 1;
1638 else
1639 {
1640 replacements[j++] = replacements[i];
1641 reload_flags[replacements[i].what] |= 2;
1642 }
1643 }
1644 /* Note that the following store must be done before the recursive calls. */
1645 n_replacements = j;
1646
1647 for (i = n_reloads - 1; i >= 0; i--)
1648 {
1649 if (reload_flags[i] == 1)
1650 {
1651 deallocate_reload_reg (i);
1652 remove_address_replacements (rld[i].in);
1653 rld[i].in = 0;
1654 something_changed = 1;
1655 }
1656 }
1657 return something_changed;
1658 }
1659 \f
1660 /* If there is only one output reload, and it is not for an earlyclobber
1661 operand, try to combine it with a (logically unrelated) input reload
1662 to reduce the number of reload registers needed.
1663
1664 This is safe if the input reload does not appear in
1665 the value being output-reloaded, because this implies
1666 it is not needed any more once the original insn completes.
1667
1668 If that doesn't work, see we can use any of the registers that
1669 die in this insn as a reload register. We can if it is of the right
1670 class and does not appear in the value being output-reloaded. */
1671
1672 static void
1673 combine_reloads (void)
1674 {
1675 int i;
1676 int output_reload = -1;
1677 int secondary_out = -1;
1678 rtx note;
1679
1680 /* Find the output reload; return unless there is exactly one
1681 and that one is mandatory. */
1682
1683 for (i = 0; i < n_reloads; i++)
1684 if (rld[i].out != 0)
1685 {
1686 if (output_reload >= 0)
1687 return;
1688 output_reload = i;
1689 }
1690
1691 if (output_reload < 0 || rld[output_reload].optional)
1692 return;
1693
1694 /* An input-output reload isn't combinable. */
1695
1696 if (rld[output_reload].in != 0)
1697 return;
1698
1699 /* If this reload is for an earlyclobber operand, we can't do anything. */
1700 if (earlyclobber_operand_p (rld[output_reload].out))
1701 return;
1702
1703 /* If there is a reload for part of the address of this operand, we would
1704 need to chnage it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1705 its life to the point where doing this combine would not lower the
1706 number of spill registers needed. */
1707 for (i = 0; i < n_reloads; i++)
1708 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1709 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1710 && rld[i].opnum == rld[output_reload].opnum)
1711 return;
1712
1713 /* Check each input reload; can we combine it? */
1714
1715 for (i = 0; i < n_reloads; i++)
1716 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1717 /* Life span of this reload must not extend past main insn. */
1718 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1719 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1720 && rld[i].when_needed != RELOAD_OTHER
1721 && (CLASS_MAX_NREGS (rld[i].class, rld[i].inmode)
1722 == CLASS_MAX_NREGS (rld[output_reload].class,
1723 rld[output_reload].outmode))
1724 && rld[i].inc == 0
1725 && rld[i].reg_rtx == 0
1726 #ifdef SECONDARY_MEMORY_NEEDED
1727 /* Don't combine two reloads with different secondary
1728 memory locations. */
1729 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1730 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1731 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1732 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1733 #endif
1734 && (SMALL_REGISTER_CLASSES
1735 ? (rld[i].class == rld[output_reload].class)
1736 : (reg_class_subset_p (rld[i].class,
1737 rld[output_reload].class)
1738 || reg_class_subset_p (rld[output_reload].class,
1739 rld[i].class)))
1740 && (MATCHES (rld[i].in, rld[output_reload].out)
1741 /* Args reversed because the first arg seems to be
1742 the one that we imagine being modified
1743 while the second is the one that might be affected. */
1744 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1745 rld[i].in)
1746 /* However, if the input is a register that appears inside
1747 the output, then we also can't share.
1748 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1749 If the same reload reg is used for both reg 69 and the
1750 result to be stored in memory, then that result
1751 will clobber the address of the memory ref. */
1752 && ! (REG_P (rld[i].in)
1753 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1754 rld[output_reload].out))))
1755 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1756 rld[i].when_needed != RELOAD_FOR_INPUT)
1757 && (reg_class_size[(int) rld[i].class]
1758 || SMALL_REGISTER_CLASSES)
1759 /* We will allow making things slightly worse by combining an
1760 input and an output, but no worse than that. */
1761 && (rld[i].when_needed == RELOAD_FOR_INPUT
1762 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1763 {
1764 int j;
1765
1766 /* We have found a reload to combine with! */
1767 rld[i].out = rld[output_reload].out;
1768 rld[i].out_reg = rld[output_reload].out_reg;
1769 rld[i].outmode = rld[output_reload].outmode;
1770 /* Mark the old output reload as inoperative. */
1771 rld[output_reload].out = 0;
1772 /* The combined reload is needed for the entire insn. */
1773 rld[i].when_needed = RELOAD_OTHER;
1774 /* If the output reload had a secondary reload, copy it. */
1775 if (rld[output_reload].secondary_out_reload != -1)
1776 {
1777 rld[i].secondary_out_reload
1778 = rld[output_reload].secondary_out_reload;
1779 rld[i].secondary_out_icode
1780 = rld[output_reload].secondary_out_icode;
1781 }
1782
1783 #ifdef SECONDARY_MEMORY_NEEDED
1784 /* Copy any secondary MEM. */
1785 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1786 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1787 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1788 #endif
1789 /* If required, minimize the register class. */
1790 if (reg_class_subset_p (rld[output_reload].class,
1791 rld[i].class))
1792 rld[i].class = rld[output_reload].class;
1793
1794 /* Transfer all replacements from the old reload to the combined. */
1795 for (j = 0; j < n_replacements; j++)
1796 if (replacements[j].what == output_reload)
1797 replacements[j].what = i;
1798
1799 return;
1800 }
1801
1802 /* If this insn has only one operand that is modified or written (assumed
1803 to be the first), it must be the one corresponding to this reload. It
1804 is safe to use anything that dies in this insn for that output provided
1805 that it does not occur in the output (we already know it isn't an
1806 earlyclobber. If this is an asm insn, give up. */
1807
1808 if (INSN_CODE (this_insn) == -1)
1809 return;
1810
1811 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1812 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1813 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1814 return;
1815
1816 /* See if some hard register that dies in this insn and is not used in
1817 the output is the right class. Only works if the register we pick
1818 up can fully hold our output reload. */
1819 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1820 if (REG_NOTE_KIND (note) == REG_DEAD
1821 && REG_P (XEXP (note, 0))
1822 && ! reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1823 rld[output_reload].out)
1824 && REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1825 && HARD_REGNO_MODE_OK (REGNO (XEXP (note, 0)), rld[output_reload].outmode)
1826 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].class],
1827 REGNO (XEXP (note, 0)))
1828 && (hard_regno_nregs[REGNO (XEXP (note, 0))][rld[output_reload].outmode]
1829 <= hard_regno_nregs[REGNO (XEXP (note, 0))][GET_MODE (XEXP (note, 0))])
1830 /* Ensure that a secondary or tertiary reload for this output
1831 won't want this register. */
1832 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1833 || (! (TEST_HARD_REG_BIT
1834 (reg_class_contents[(int) rld[secondary_out].class],
1835 REGNO (XEXP (note, 0))))
1836 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1837 || ! (TEST_HARD_REG_BIT
1838 (reg_class_contents[(int) rld[secondary_out].class],
1839 REGNO (XEXP (note, 0)))))))
1840 && ! fixed_regs[REGNO (XEXP (note, 0))])
1841 {
1842 rld[output_reload].reg_rtx
1843 = gen_rtx_REG (rld[output_reload].outmode,
1844 REGNO (XEXP (note, 0)));
1845 return;
1846 }
1847 }
1848 \f
1849 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1850 See if one of IN and OUT is a register that may be used;
1851 this is desirable since a spill-register won't be needed.
1852 If so, return the register rtx that proves acceptable.
1853
1854 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1855 CLASS is the register class required for the reload.
1856
1857 If FOR_REAL is >= 0, it is the number of the reload,
1858 and in some cases when it can be discovered that OUT doesn't need
1859 to be computed, clear out rld[FOR_REAL].out.
1860
1861 If FOR_REAL is -1, this should not be done, because this call
1862 is just to see if a register can be found, not to find and install it.
1863
1864 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1865 puts an additional constraint on being able to use IN for OUT since
1866 IN must not appear elsewhere in the insn (it is assumed that IN itself
1867 is safe from the earlyclobber). */
1868
1869 static rtx
1870 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1871 enum machine_mode inmode, enum machine_mode outmode,
1872 enum reg_class class, int for_real, int earlyclobber)
1873 {
1874 rtx in = real_in;
1875 rtx out = real_out;
1876 int in_offset = 0;
1877 int out_offset = 0;
1878 rtx value = 0;
1879
1880 /* If operands exceed a word, we can't use either of them
1881 unless they have the same size. */
1882 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1883 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1884 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1885 return 0;
1886
1887 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1888 respectively refers to a hard register. */
1889
1890 /* Find the inside of any subregs. */
1891 while (GET_CODE (out) == SUBREG)
1892 {
1893 if (REG_P (SUBREG_REG (out))
1894 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1895 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1896 GET_MODE (SUBREG_REG (out)),
1897 SUBREG_BYTE (out),
1898 GET_MODE (out));
1899 out = SUBREG_REG (out);
1900 }
1901 while (GET_CODE (in) == SUBREG)
1902 {
1903 if (REG_P (SUBREG_REG (in))
1904 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1905 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1906 GET_MODE (SUBREG_REG (in)),
1907 SUBREG_BYTE (in),
1908 GET_MODE (in));
1909 in = SUBREG_REG (in);
1910 }
1911
1912 /* Narrow down the reg class, the same way push_reload will;
1913 otherwise we might find a dummy now, but push_reload won't. */
1914 class = PREFERRED_RELOAD_CLASS (in, class);
1915
1916 /* See if OUT will do. */
1917 if (REG_P (out)
1918 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1919 {
1920 unsigned int regno = REGNO (out) + out_offset;
1921 unsigned int nwords = hard_regno_nregs[regno][outmode];
1922 rtx saved_rtx;
1923
1924 /* When we consider whether the insn uses OUT,
1925 ignore references within IN. They don't prevent us
1926 from copying IN into OUT, because those refs would
1927 move into the insn that reloads IN.
1928
1929 However, we only ignore IN in its role as this reload.
1930 If the insn uses IN elsewhere and it contains OUT,
1931 that counts. We can't be sure it's the "same" operand
1932 so it might not go through this reload. */
1933 saved_rtx = *inloc;
1934 *inloc = const0_rtx;
1935
1936 if (regno < FIRST_PSEUDO_REGISTER
1937 && HARD_REGNO_MODE_OK (regno, outmode)
1938 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1939 PATTERN (this_insn), outloc))
1940 {
1941 unsigned int i;
1942
1943 for (i = 0; i < nwords; i++)
1944 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
1945 regno + i))
1946 break;
1947
1948 if (i == nwords)
1949 {
1950 if (REG_P (real_out))
1951 value = real_out;
1952 else
1953 value = gen_rtx_REG (outmode, regno);
1954 }
1955 }
1956
1957 *inloc = saved_rtx;
1958 }
1959
1960 /* Consider using IN if OUT was not acceptable
1961 or if OUT dies in this insn (like the quotient in a divmod insn).
1962 We can't use IN unless it is dies in this insn,
1963 which means we must know accurately which hard regs are live.
1964 Also, the result can't go in IN if IN is used within OUT,
1965 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
1966 if (hard_regs_live_known
1967 && REG_P (in)
1968 && REGNO (in) < FIRST_PSEUDO_REGISTER
1969 && (value == 0
1970 || find_reg_note (this_insn, REG_UNUSED, real_out))
1971 && find_reg_note (this_insn, REG_DEAD, real_in)
1972 && !fixed_regs[REGNO (in)]
1973 && HARD_REGNO_MODE_OK (REGNO (in),
1974 /* The only case where out and real_out might
1975 have different modes is where real_out
1976 is a subreg, and in that case, out
1977 has a real mode. */
1978 (GET_MODE (out) != VOIDmode
1979 ? GET_MODE (out) : outmode)))
1980 {
1981 unsigned int regno = REGNO (in) + in_offset;
1982 unsigned int nwords = hard_regno_nregs[regno][inmode];
1983
1984 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
1985 && ! hard_reg_set_here_p (regno, regno + nwords,
1986 PATTERN (this_insn))
1987 && (! earlyclobber
1988 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
1989 PATTERN (this_insn), inloc)))
1990 {
1991 unsigned int i;
1992
1993 for (i = 0; i < nwords; i++)
1994 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
1995 regno + i))
1996 break;
1997
1998 if (i == nwords)
1999 {
2000 /* If we were going to use OUT as the reload reg
2001 and changed our mind, it means OUT is a dummy that
2002 dies here. So don't bother copying value to it. */
2003 if (for_real >= 0 && value == real_out)
2004 rld[for_real].out = 0;
2005 if (REG_P (real_in))
2006 value = real_in;
2007 else
2008 value = gen_rtx_REG (inmode, regno);
2009 }
2010 }
2011 }
2012
2013 return value;
2014 }
2015 \f
2016 /* This page contains subroutines used mainly for determining
2017 whether the IN or an OUT of a reload can serve as the
2018 reload register. */
2019
2020 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2021
2022 int
2023 earlyclobber_operand_p (rtx x)
2024 {
2025 int i;
2026
2027 for (i = 0; i < n_earlyclobbers; i++)
2028 if (reload_earlyclobbers[i] == x)
2029 return 1;
2030
2031 return 0;
2032 }
2033
2034 /* Return 1 if expression X alters a hard reg in the range
2035 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2036 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2037 X should be the body of an instruction. */
2038
2039 static int
2040 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2041 {
2042 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2043 {
2044 rtx op0 = SET_DEST (x);
2045
2046 while (GET_CODE (op0) == SUBREG)
2047 op0 = SUBREG_REG (op0);
2048 if (REG_P (op0))
2049 {
2050 unsigned int r = REGNO (op0);
2051
2052 /* See if this reg overlaps range under consideration. */
2053 if (r < end_regno
2054 && r + hard_regno_nregs[r][GET_MODE (op0)] > beg_regno)
2055 return 1;
2056 }
2057 }
2058 else if (GET_CODE (x) == PARALLEL)
2059 {
2060 int i = XVECLEN (x, 0) - 1;
2061
2062 for (; i >= 0; i--)
2063 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2064 return 1;
2065 }
2066
2067 return 0;
2068 }
2069
2070 /* Return 1 if ADDR is a valid memory address for mode MODE,
2071 and check that each pseudo reg has the proper kind of
2072 hard reg. */
2073
2074 int
2075 strict_memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
2076 {
2077 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2078 return 0;
2079
2080 win:
2081 return 1;
2082 }
2083 \f
2084 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2085 if they are the same hard reg, and has special hacks for
2086 autoincrement and autodecrement.
2087 This is specifically intended for find_reloads to use
2088 in determining whether two operands match.
2089 X is the operand whose number is the lower of the two.
2090
2091 The value is 2 if Y contains a pre-increment that matches
2092 a non-incrementing address in X. */
2093
2094 /* ??? To be completely correct, we should arrange to pass
2095 for X the output operand and for Y the input operand.
2096 For now, we assume that the output operand has the lower number
2097 because that is natural in (SET output (... input ...)). */
2098
2099 int
2100 operands_match_p (rtx x, rtx y)
2101 {
2102 int i;
2103 RTX_CODE code = GET_CODE (x);
2104 const char *fmt;
2105 int success_2;
2106
2107 if (x == y)
2108 return 1;
2109 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2110 && (REG_P (y) || (GET_CODE (y) == SUBREG
2111 && REG_P (SUBREG_REG (y)))))
2112 {
2113 int j;
2114
2115 if (code == SUBREG)
2116 {
2117 i = REGNO (SUBREG_REG (x));
2118 if (i >= FIRST_PSEUDO_REGISTER)
2119 goto slow;
2120 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2121 GET_MODE (SUBREG_REG (x)),
2122 SUBREG_BYTE (x),
2123 GET_MODE (x));
2124 }
2125 else
2126 i = REGNO (x);
2127
2128 if (GET_CODE (y) == SUBREG)
2129 {
2130 j = REGNO (SUBREG_REG (y));
2131 if (j >= FIRST_PSEUDO_REGISTER)
2132 goto slow;
2133 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2134 GET_MODE (SUBREG_REG (y)),
2135 SUBREG_BYTE (y),
2136 GET_MODE (y));
2137 }
2138 else
2139 j = REGNO (y);
2140
2141 /* On a WORDS_BIG_ENDIAN machine, point to the last register of a
2142 multiple hard register group, so that for example (reg:DI 0) and
2143 (reg:SI 1) will be considered the same register. */
2144 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2145 && i < FIRST_PSEUDO_REGISTER)
2146 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2147 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2148 && j < FIRST_PSEUDO_REGISTER)
2149 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2150
2151 return i == j;
2152 }
2153 /* If two operands must match, because they are really a single
2154 operand of an assembler insn, then two postincrements are invalid
2155 because the assembler insn would increment only once.
2156 On the other hand, a postincrement matches ordinary indexing
2157 if the postincrement is the output operand. */
2158 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2159 return operands_match_p (XEXP (x, 0), y);
2160 /* Two preincrements are invalid
2161 because the assembler insn would increment only once.
2162 On the other hand, a preincrement matches ordinary indexing
2163 if the preincrement is the input operand.
2164 In this case, return 2, since some callers need to do special
2165 things when this happens. */
2166 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2167 || GET_CODE (y) == PRE_MODIFY)
2168 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2169
2170 slow:
2171
2172 /* Now we have disposed of all the cases
2173 in which different rtx codes can match. */
2174 if (code != GET_CODE (y))
2175 return 0;
2176 if (code == LABEL_REF)
2177 return XEXP (x, 0) == XEXP (y, 0);
2178 if (code == SYMBOL_REF)
2179 return XSTR (x, 0) == XSTR (y, 0);
2180
2181 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2182
2183 if (GET_MODE (x) != GET_MODE (y))
2184 return 0;
2185
2186 /* Compare the elements. If any pair of corresponding elements
2187 fail to match, return 0 for the whole things. */
2188
2189 success_2 = 0;
2190 fmt = GET_RTX_FORMAT (code);
2191 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2192 {
2193 int val, j;
2194 switch (fmt[i])
2195 {
2196 case 'w':
2197 if (XWINT (x, i) != XWINT (y, i))
2198 return 0;
2199 break;
2200
2201 case 'i':
2202 if (XINT (x, i) != XINT (y, i))
2203 return 0;
2204 break;
2205
2206 case 'e':
2207 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2208 if (val == 0)
2209 return 0;
2210 /* If any subexpression returns 2,
2211 we should return 2 if we are successful. */
2212 if (val == 2)
2213 success_2 = 1;
2214 break;
2215
2216 case '0':
2217 break;
2218
2219 case 'E':
2220 if (XVECLEN (x, i) != XVECLEN (y, i))
2221 return 0;
2222 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2223 {
2224 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2225 if (val == 0)
2226 return 0;
2227 if (val == 2)
2228 success_2 = 1;
2229 }
2230 break;
2231
2232 /* It is believed that rtx's at this level will never
2233 contain anything but integers and other rtx's,
2234 except for within LABEL_REFs and SYMBOL_REFs. */
2235 default:
2236 gcc_unreachable ();
2237 }
2238 }
2239 return 1 + success_2;
2240 }
2241 \f
2242 /* Describe the range of registers or memory referenced by X.
2243 If X is a register, set REG_FLAG and put the first register
2244 number into START and the last plus one into END.
2245 If X is a memory reference, put a base address into BASE
2246 and a range of integer offsets into START and END.
2247 If X is pushing on the stack, we can assume it causes no trouble,
2248 so we set the SAFE field. */
2249
2250 static struct decomposition
2251 decompose (rtx x)
2252 {
2253 struct decomposition val;
2254 int all_const = 0;
2255
2256 memset (&val, 0, sizeof (val));
2257
2258 switch (GET_CODE (x))
2259 {
2260 case MEM:
2261 {
2262 rtx base = NULL_RTX, offset = 0;
2263 rtx addr = XEXP (x, 0);
2264
2265 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2266 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2267 {
2268 val.base = XEXP (addr, 0);
2269 val.start = -GET_MODE_SIZE (GET_MODE (x));
2270 val.end = GET_MODE_SIZE (GET_MODE (x));
2271 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2272 return val;
2273 }
2274
2275 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2276 {
2277 if (GET_CODE (XEXP (addr, 1)) == PLUS
2278 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2279 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2280 {
2281 val.base = XEXP (addr, 0);
2282 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2283 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2284 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2285 return val;
2286 }
2287 }
2288
2289 if (GET_CODE (addr) == CONST)
2290 {
2291 addr = XEXP (addr, 0);
2292 all_const = 1;
2293 }
2294 if (GET_CODE (addr) == PLUS)
2295 {
2296 if (CONSTANT_P (XEXP (addr, 0)))
2297 {
2298 base = XEXP (addr, 1);
2299 offset = XEXP (addr, 0);
2300 }
2301 else if (CONSTANT_P (XEXP (addr, 1)))
2302 {
2303 base = XEXP (addr, 0);
2304 offset = XEXP (addr, 1);
2305 }
2306 }
2307
2308 if (offset == 0)
2309 {
2310 base = addr;
2311 offset = const0_rtx;
2312 }
2313 if (GET_CODE (offset) == CONST)
2314 offset = XEXP (offset, 0);
2315 if (GET_CODE (offset) == PLUS)
2316 {
2317 if (GET_CODE (XEXP (offset, 0)) == CONST_INT)
2318 {
2319 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2320 offset = XEXP (offset, 0);
2321 }
2322 else if (GET_CODE (XEXP (offset, 1)) == CONST_INT)
2323 {
2324 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2325 offset = XEXP (offset, 1);
2326 }
2327 else
2328 {
2329 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2330 offset = const0_rtx;
2331 }
2332 }
2333 else if (GET_CODE (offset) != CONST_INT)
2334 {
2335 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2336 offset = const0_rtx;
2337 }
2338
2339 if (all_const && GET_CODE (base) == PLUS)
2340 base = gen_rtx_CONST (GET_MODE (base), base);
2341
2342 gcc_assert (GET_CODE (offset) == CONST_INT);
2343
2344 val.start = INTVAL (offset);
2345 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2346 val.base = base;
2347 }
2348 break;
2349
2350 case REG:
2351 val.reg_flag = 1;
2352 val.start = true_regnum (x);
2353 if (val.start < 0)
2354 {
2355 /* A pseudo with no hard reg. */
2356 val.start = REGNO (x);
2357 val.end = val.start + 1;
2358 }
2359 else
2360 /* A hard reg. */
2361 val.end = val.start + hard_regno_nregs[val.start][GET_MODE (x)];
2362 break;
2363
2364 case SUBREG:
2365 if (!REG_P (SUBREG_REG (x)))
2366 /* This could be more precise, but it's good enough. */
2367 return decompose (SUBREG_REG (x));
2368 val.reg_flag = 1;
2369 val.start = true_regnum (x);
2370 if (val.start < 0)
2371 return decompose (SUBREG_REG (x));
2372 else
2373 /* A hard reg. */
2374 val.end = val.start + hard_regno_nregs[val.start][GET_MODE (x)];
2375 break;
2376
2377 case SCRATCH:
2378 /* This hasn't been assigned yet, so it can't conflict yet. */
2379 val.safe = 1;
2380 break;
2381
2382 default:
2383 gcc_assert (CONSTANT_P (x));
2384 val.safe = 1;
2385 break;
2386 }
2387 return val;
2388 }
2389
2390 /* Return 1 if altering Y will not modify the value of X.
2391 Y is also described by YDATA, which should be decompose (Y). */
2392
2393 static int
2394 immune_p (rtx x, rtx y, struct decomposition ydata)
2395 {
2396 struct decomposition xdata;
2397
2398 if (ydata.reg_flag)
2399 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2400 if (ydata.safe)
2401 return 1;
2402
2403 gcc_assert (MEM_P (y));
2404 /* If Y is memory and X is not, Y can't affect X. */
2405 if (!MEM_P (x))
2406 return 1;
2407
2408 xdata = decompose (x);
2409
2410 if (! rtx_equal_p (xdata.base, ydata.base))
2411 {
2412 /* If bases are distinct symbolic constants, there is no overlap. */
2413 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2414 return 1;
2415 /* Constants and stack slots never overlap. */
2416 if (CONSTANT_P (xdata.base)
2417 && (ydata.base == frame_pointer_rtx
2418 || ydata.base == hard_frame_pointer_rtx
2419 || ydata.base == stack_pointer_rtx))
2420 return 1;
2421 if (CONSTANT_P (ydata.base)
2422 && (xdata.base == frame_pointer_rtx
2423 || xdata.base == hard_frame_pointer_rtx
2424 || xdata.base == stack_pointer_rtx))
2425 return 1;
2426 /* If either base is variable, we don't know anything. */
2427 return 0;
2428 }
2429
2430 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2431 }
2432
2433 /* Similar, but calls decompose. */
2434
2435 int
2436 safe_from_earlyclobber (rtx op, rtx clobber)
2437 {
2438 struct decomposition early_data;
2439
2440 early_data = decompose (clobber);
2441 return immune_p (op, clobber, early_data);
2442 }
2443 \f
2444 /* Main entry point of this file: search the body of INSN
2445 for values that need reloading and record them with push_reload.
2446 REPLACE nonzero means record also where the values occur
2447 so that subst_reloads can be used.
2448
2449 IND_LEVELS says how many levels of indirection are supported by this
2450 machine; a value of zero means that a memory reference is not a valid
2451 memory address.
2452
2453 LIVE_KNOWN says we have valid information about which hard
2454 regs are live at each point in the program; this is true when
2455 we are called from global_alloc but false when stupid register
2456 allocation has been done.
2457
2458 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2459 which is nonnegative if the reg has been commandeered for reloading into.
2460 It is copied into STATIC_RELOAD_REG_P and referenced from there
2461 by various subroutines.
2462
2463 Return TRUE if some operands need to be changed, because of swapping
2464 commutative operands, reg_equiv_address substitution, or whatever. */
2465
2466 int
2467 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2468 short *reload_reg_p)
2469 {
2470 int insn_code_number;
2471 int i, j;
2472 int noperands;
2473 /* These start out as the constraints for the insn
2474 and they are chewed up as we consider alternatives. */
2475 char *constraints[MAX_RECOG_OPERANDS];
2476 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2477 a register. */
2478 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2479 char pref_or_nothing[MAX_RECOG_OPERANDS];
2480 /* Nonzero for a MEM operand whose entire address needs a reload.
2481 May be -1 to indicate the entire address may or may not need a reload. */
2482 int address_reloaded[MAX_RECOG_OPERANDS];
2483 /* Nonzero for an address operand that needs to be completely reloaded.
2484 May be -1 to indicate the entire operand may or may not need a reload. */
2485 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2486 /* Value of enum reload_type to use for operand. */
2487 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2488 /* Value of enum reload_type to use within address of operand. */
2489 enum reload_type address_type[MAX_RECOG_OPERANDS];
2490 /* Save the usage of each operand. */
2491 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2492 int no_input_reloads = 0, no_output_reloads = 0;
2493 int n_alternatives;
2494 int this_alternative[MAX_RECOG_OPERANDS];
2495 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2496 char this_alternative_win[MAX_RECOG_OPERANDS];
2497 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2498 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2499 int this_alternative_matches[MAX_RECOG_OPERANDS];
2500 int swapped;
2501 int goal_alternative[MAX_RECOG_OPERANDS];
2502 int this_alternative_number;
2503 int goal_alternative_number = 0;
2504 int operand_reloadnum[MAX_RECOG_OPERANDS];
2505 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2506 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2507 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2508 char goal_alternative_win[MAX_RECOG_OPERANDS];
2509 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2510 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2511 int goal_alternative_swapped;
2512 int best;
2513 int commutative;
2514 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2515 rtx substed_operand[MAX_RECOG_OPERANDS];
2516 rtx body = PATTERN (insn);
2517 rtx set = single_set (insn);
2518 int goal_earlyclobber = 0, this_earlyclobber;
2519 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2520 int retval = 0;
2521
2522 this_insn = insn;
2523 n_reloads = 0;
2524 n_replacements = 0;
2525 n_earlyclobbers = 0;
2526 replace_reloads = replace;
2527 hard_regs_live_known = live_known;
2528 static_reload_reg_p = reload_reg_p;
2529
2530 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2531 neither are insns that SET cc0. Insns that use CC0 are not allowed
2532 to have any input reloads. */
2533 if (JUMP_P (insn) || CALL_P (insn))
2534 no_output_reloads = 1;
2535
2536 #ifdef HAVE_cc0
2537 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2538 no_input_reloads = 1;
2539 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2540 no_output_reloads = 1;
2541 #endif
2542
2543 #ifdef SECONDARY_MEMORY_NEEDED
2544 /* The eliminated forms of any secondary memory locations are per-insn, so
2545 clear them out here. */
2546
2547 if (secondary_memlocs_elim_used)
2548 {
2549 memset (secondary_memlocs_elim, 0,
2550 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2551 secondary_memlocs_elim_used = 0;
2552 }
2553 #endif
2554
2555 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2556 is cheap to move between them. If it is not, there may not be an insn
2557 to do the copy, so we may need a reload. */
2558 if (GET_CODE (body) == SET
2559 && REG_P (SET_DEST (body))
2560 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2561 && REG_P (SET_SRC (body))
2562 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2563 && REGISTER_MOVE_COST (GET_MODE (SET_SRC (body)),
2564 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2565 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2566 return 0;
2567
2568 extract_insn (insn);
2569
2570 noperands = reload_n_operands = recog_data.n_operands;
2571 n_alternatives = recog_data.n_alternatives;
2572
2573 /* Just return "no reloads" if insn has no operands with constraints. */
2574 if (noperands == 0 || n_alternatives == 0)
2575 return 0;
2576
2577 insn_code_number = INSN_CODE (insn);
2578 this_insn_is_asm = insn_code_number < 0;
2579
2580 memcpy (operand_mode, recog_data.operand_mode,
2581 noperands * sizeof (enum machine_mode));
2582 memcpy (constraints, recog_data.constraints, noperands * sizeof (char *));
2583
2584 commutative = -1;
2585
2586 /* If we will need to know, later, whether some pair of operands
2587 are the same, we must compare them now and save the result.
2588 Reloading the base and index registers will clobber them
2589 and afterward they will fail to match. */
2590
2591 for (i = 0; i < noperands; i++)
2592 {
2593 char *p;
2594 int c;
2595
2596 substed_operand[i] = recog_data.operand[i];
2597 p = constraints[i];
2598
2599 modified[i] = RELOAD_READ;
2600
2601 /* Scan this operand's constraint to see if it is an output operand,
2602 an in-out operand, is commutative, or should match another. */
2603
2604 while ((c = *p))
2605 {
2606 p += CONSTRAINT_LEN (c, p);
2607 switch (c)
2608 {
2609 case '=':
2610 modified[i] = RELOAD_WRITE;
2611 break;
2612 case '+':
2613 modified[i] = RELOAD_READ_WRITE;
2614 break;
2615 case '%':
2616 {
2617 /* The last operand should not be marked commutative. */
2618 gcc_assert (i != noperands - 1);
2619
2620 /* We currently only support one commutative pair of
2621 operands. Some existing asm code currently uses more
2622 than one pair. Previously, that would usually work,
2623 but sometimes it would crash the compiler. We
2624 continue supporting that case as well as we can by
2625 silently ignoring all but the first pair. In the
2626 future we may handle it correctly. */
2627 if (commutative < 0)
2628 commutative = i;
2629 else
2630 gcc_assert (this_insn_is_asm);
2631 }
2632 break;
2633 /* Use of ISDIGIT is tempting here, but it may get expensive because
2634 of locale support we don't want. */
2635 case '0': case '1': case '2': case '3': case '4':
2636 case '5': case '6': case '7': case '8': case '9':
2637 {
2638 c = strtoul (p - 1, &p, 10);
2639
2640 operands_match[c][i]
2641 = operands_match_p (recog_data.operand[c],
2642 recog_data.operand[i]);
2643
2644 /* An operand may not match itself. */
2645 gcc_assert (c != i);
2646
2647 /* If C can be commuted with C+1, and C might need to match I,
2648 then C+1 might also need to match I. */
2649 if (commutative >= 0)
2650 {
2651 if (c == commutative || c == commutative + 1)
2652 {
2653 int other = c + (c == commutative ? 1 : -1);
2654 operands_match[other][i]
2655 = operands_match_p (recog_data.operand[other],
2656 recog_data.operand[i]);
2657 }
2658 if (i == commutative || i == commutative + 1)
2659 {
2660 int other = i + (i == commutative ? 1 : -1);
2661 operands_match[c][other]
2662 = operands_match_p (recog_data.operand[c],
2663 recog_data.operand[other]);
2664 }
2665 /* Note that C is supposed to be less than I.
2666 No need to consider altering both C and I because in
2667 that case we would alter one into the other. */
2668 }
2669 }
2670 }
2671 }
2672 }
2673
2674 /* Examine each operand that is a memory reference or memory address
2675 and reload parts of the addresses into index registers.
2676 Also here any references to pseudo regs that didn't get hard regs
2677 but are equivalent to constants get replaced in the insn itself
2678 with those constants. Nobody will ever see them again.
2679
2680 Finally, set up the preferred classes of each operand. */
2681
2682 for (i = 0; i < noperands; i++)
2683 {
2684 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2685
2686 address_reloaded[i] = 0;
2687 address_operand_reloaded[i] = 0;
2688 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2689 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2690 : RELOAD_OTHER);
2691 address_type[i]
2692 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2693 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2694 : RELOAD_OTHER);
2695
2696 if (*constraints[i] == 0)
2697 /* Ignore things like match_operator operands. */
2698 ;
2699 else if (constraints[i][0] == 'p'
2700 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2701 {
2702 address_operand_reloaded[i]
2703 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2704 recog_data.operand[i],
2705 recog_data.operand_loc[i],
2706 i, operand_type[i], ind_levels, insn);
2707
2708 /* If we now have a simple operand where we used to have a
2709 PLUS or MULT, re-recognize and try again. */
2710 if ((OBJECT_P (*recog_data.operand_loc[i])
2711 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2712 && (GET_CODE (recog_data.operand[i]) == MULT
2713 || GET_CODE (recog_data.operand[i]) == PLUS))
2714 {
2715 INSN_CODE (insn) = -1;
2716 retval = find_reloads (insn, replace, ind_levels, live_known,
2717 reload_reg_p);
2718 return retval;
2719 }
2720
2721 recog_data.operand[i] = *recog_data.operand_loc[i];
2722 substed_operand[i] = recog_data.operand[i];
2723
2724 /* Address operands are reloaded in their existing mode,
2725 no matter what is specified in the machine description. */
2726 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2727 }
2728 else if (code == MEM)
2729 {
2730 address_reloaded[i]
2731 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2732 recog_data.operand_loc[i],
2733 XEXP (recog_data.operand[i], 0),
2734 &XEXP (recog_data.operand[i], 0),
2735 i, address_type[i], ind_levels, insn);
2736 recog_data.operand[i] = *recog_data.operand_loc[i];
2737 substed_operand[i] = recog_data.operand[i];
2738 }
2739 else if (code == SUBREG)
2740 {
2741 rtx reg = SUBREG_REG (recog_data.operand[i]);
2742 rtx op
2743 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2744 ind_levels,
2745 set != 0
2746 && &SET_DEST (set) == recog_data.operand_loc[i],
2747 insn,
2748 &address_reloaded[i]);
2749
2750 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2751 that didn't get a hard register, emit a USE with a REG_EQUAL
2752 note in front so that we might inherit a previous, possibly
2753 wider reload. */
2754
2755 if (replace
2756 && MEM_P (op)
2757 && REG_P (reg)
2758 && (GET_MODE_SIZE (GET_MODE (reg))
2759 >= GET_MODE_SIZE (GET_MODE (op))))
2760 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2761 insn),
2762 REG_EQUAL, reg_equiv_memory_loc[REGNO (reg)]);
2763
2764 substed_operand[i] = recog_data.operand[i] = op;
2765 }
2766 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2767 /* We can get a PLUS as an "operand" as a result of register
2768 elimination. See eliminate_regs and gen_reload. We handle
2769 a unary operator by reloading the operand. */
2770 substed_operand[i] = recog_data.operand[i]
2771 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2772 ind_levels, 0, insn,
2773 &address_reloaded[i]);
2774 else if (code == REG)
2775 {
2776 /* This is equivalent to calling find_reloads_toplev.
2777 The code is duplicated for speed.
2778 When we find a pseudo always equivalent to a constant,
2779 we replace it by the constant. We must be sure, however,
2780 that we don't try to replace it in the insn in which it
2781 is being set. */
2782 int regno = REGNO (recog_data.operand[i]);
2783 if (reg_equiv_constant[regno] != 0
2784 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2785 {
2786 /* Record the existing mode so that the check if constants are
2787 allowed will work when operand_mode isn't specified. */
2788
2789 if (operand_mode[i] == VOIDmode)
2790 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2791
2792 substed_operand[i] = recog_data.operand[i]
2793 = reg_equiv_constant[regno];
2794 }
2795 if (reg_equiv_memory_loc[regno] != 0
2796 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
2797 /* We need not give a valid is_set_dest argument since the case
2798 of a constant equivalence was checked above. */
2799 substed_operand[i] = recog_data.operand[i]
2800 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2801 ind_levels, 0, insn,
2802 &address_reloaded[i]);
2803 }
2804 /* If the operand is still a register (we didn't replace it with an
2805 equivalent), get the preferred class to reload it into. */
2806 code = GET_CODE (recog_data.operand[i]);
2807 preferred_class[i]
2808 = ((code == REG && REGNO (recog_data.operand[i])
2809 >= FIRST_PSEUDO_REGISTER)
2810 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2811 : NO_REGS);
2812 pref_or_nothing[i]
2813 = (code == REG
2814 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2815 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2816 }
2817
2818 /* If this is simply a copy from operand 1 to operand 0, merge the
2819 preferred classes for the operands. */
2820 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2821 && recog_data.operand[1] == SET_SRC (set))
2822 {
2823 preferred_class[0] = preferred_class[1]
2824 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2825 pref_or_nothing[0] |= pref_or_nothing[1];
2826 pref_or_nothing[1] |= pref_or_nothing[0];
2827 }
2828
2829 /* Now see what we need for pseudo-regs that didn't get hard regs
2830 or got the wrong kind of hard reg. For this, we must consider
2831 all the operands together against the register constraints. */
2832
2833 best = MAX_RECOG_OPERANDS * 2 + 600;
2834
2835 swapped = 0;
2836 goal_alternative_swapped = 0;
2837 try_swapped:
2838
2839 /* The constraints are made of several alternatives.
2840 Each operand's constraint looks like foo,bar,... with commas
2841 separating the alternatives. The first alternatives for all
2842 operands go together, the second alternatives go together, etc.
2843
2844 First loop over alternatives. */
2845
2846 for (this_alternative_number = 0;
2847 this_alternative_number < n_alternatives;
2848 this_alternative_number++)
2849 {
2850 /* Loop over operands for one constraint alternative. */
2851 /* LOSERS counts those that don't fit this alternative
2852 and would require loading. */
2853 int losers = 0;
2854 /* BAD is set to 1 if it some operand can't fit this alternative
2855 even after reloading. */
2856 int bad = 0;
2857 /* REJECT is a count of how undesirable this alternative says it is
2858 if any reloading is required. If the alternative matches exactly
2859 then REJECT is ignored, but otherwise it gets this much
2860 counted against it in addition to the reloading needed. Each
2861 ? counts three times here since we want the disparaging caused by
2862 a bad register class to only count 1/3 as much. */
2863 int reject = 0;
2864
2865 this_earlyclobber = 0;
2866
2867 for (i = 0; i < noperands; i++)
2868 {
2869 char *p = constraints[i];
2870 char *end;
2871 int len;
2872 int win = 0;
2873 int did_match = 0;
2874 /* 0 => this operand can be reloaded somehow for this alternative. */
2875 int badop = 1;
2876 /* 0 => this operand can be reloaded if the alternative allows regs. */
2877 int winreg = 0;
2878 int c;
2879 int m;
2880 rtx operand = recog_data.operand[i];
2881 int offset = 0;
2882 /* Nonzero means this is a MEM that must be reloaded into a reg
2883 regardless of what the constraint says. */
2884 int force_reload = 0;
2885 int offmemok = 0;
2886 /* Nonzero if a constant forced into memory would be OK for this
2887 operand. */
2888 int constmemok = 0;
2889 int earlyclobber = 0;
2890
2891 /* If the predicate accepts a unary operator, it means that
2892 we need to reload the operand, but do not do this for
2893 match_operator and friends. */
2894 if (UNARY_P (operand) && *p != 0)
2895 operand = XEXP (operand, 0);
2896
2897 /* If the operand is a SUBREG, extract
2898 the REG or MEM (or maybe even a constant) within.
2899 (Constants can occur as a result of reg_equiv_constant.) */
2900
2901 while (GET_CODE (operand) == SUBREG)
2902 {
2903 /* Offset only matters when operand is a REG and
2904 it is a hard reg. This is because it is passed
2905 to reg_fits_class_p if it is a REG and all pseudos
2906 return 0 from that function. */
2907 if (REG_P (SUBREG_REG (operand))
2908 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
2909 {
2910 if (!subreg_offset_representable_p
2911 (REGNO (SUBREG_REG (operand)),
2912 GET_MODE (SUBREG_REG (operand)),
2913 SUBREG_BYTE (operand),
2914 GET_MODE (operand)))
2915 force_reload = 1;
2916 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
2917 GET_MODE (SUBREG_REG (operand)),
2918 SUBREG_BYTE (operand),
2919 GET_MODE (operand));
2920 }
2921 operand = SUBREG_REG (operand);
2922 /* Force reload if this is a constant or PLUS or if there may
2923 be a problem accessing OPERAND in the outer mode. */
2924 if (CONSTANT_P (operand)
2925 || GET_CODE (operand) == PLUS
2926 /* We must force a reload of paradoxical SUBREGs
2927 of a MEM because the alignment of the inner value
2928 may not be enough to do the outer reference. On
2929 big-endian machines, it may also reference outside
2930 the object.
2931
2932 On machines that extend byte operations and we have a
2933 SUBREG where both the inner and outer modes are no wider
2934 than a word and the inner mode is narrower, is integral,
2935 and gets extended when loaded from memory, combine.c has
2936 made assumptions about the behavior of the machine in such
2937 register access. If the data is, in fact, in memory we
2938 must always load using the size assumed to be in the
2939 register and let the insn do the different-sized
2940 accesses.
2941
2942 This is doubly true if WORD_REGISTER_OPERATIONS. In
2943 this case eliminate_regs has left non-paradoxical
2944 subregs for push_reload to see. Make sure it does
2945 by forcing the reload.
2946
2947 ??? When is it right at this stage to have a subreg
2948 of a mem that is _not_ to be handled specially? IMO
2949 those should have been reduced to just a mem. */
2950 || ((MEM_P (operand)
2951 || (REG_P (operand)
2952 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
2953 #ifndef WORD_REGISTER_OPERATIONS
2954 && (((GET_MODE_BITSIZE (GET_MODE (operand))
2955 < BIGGEST_ALIGNMENT)
2956 && (GET_MODE_SIZE (operand_mode[i])
2957 > GET_MODE_SIZE (GET_MODE (operand))))
2958 || BYTES_BIG_ENDIAN
2959 #ifdef LOAD_EXTEND_OP
2960 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
2961 && (GET_MODE_SIZE (GET_MODE (operand))
2962 <= UNITS_PER_WORD)
2963 && (GET_MODE_SIZE (operand_mode[i])
2964 > GET_MODE_SIZE (GET_MODE (operand)))
2965 && INTEGRAL_MODE_P (GET_MODE (operand))
2966 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
2967 #endif
2968 )
2969 #endif
2970 )
2971 )
2972 force_reload = 1;
2973 }
2974
2975 this_alternative[i] = (int) NO_REGS;
2976 this_alternative_win[i] = 0;
2977 this_alternative_match_win[i] = 0;
2978 this_alternative_offmemok[i] = 0;
2979 this_alternative_earlyclobber[i] = 0;
2980 this_alternative_matches[i] = -1;
2981
2982 /* An empty constraint or empty alternative
2983 allows anything which matched the pattern. */
2984 if (*p == 0 || *p == ',')
2985 win = 1, badop = 0;
2986
2987 /* Scan this alternative's specs for this operand;
2988 set WIN if the operand fits any letter in this alternative.
2989 Otherwise, clear BADOP if this operand could
2990 fit some letter after reloads,
2991 or set WINREG if this operand could fit after reloads
2992 provided the constraint allows some registers. */
2993
2994 do
2995 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
2996 {
2997 case '\0':
2998 len = 0;
2999 break;
3000 case ',':
3001 c = '\0';
3002 break;
3003
3004 case '=': case '+': case '*':
3005 break;
3006
3007 case '%':
3008 /* We only support one commutative marker, the first
3009 one. We already set commutative above. */
3010 break;
3011
3012 case '?':
3013 reject += 6;
3014 break;
3015
3016 case '!':
3017 reject = 600;
3018 break;
3019
3020 case '#':
3021 /* Ignore rest of this alternative as far as
3022 reloading is concerned. */
3023 do
3024 p++;
3025 while (*p && *p != ',');
3026 len = 0;
3027 break;
3028
3029 case '0': case '1': case '2': case '3': case '4':
3030 case '5': case '6': case '7': case '8': case '9':
3031 m = strtoul (p, &end, 10);
3032 p = end;
3033 len = 0;
3034
3035 this_alternative_matches[i] = m;
3036 /* We are supposed to match a previous operand.
3037 If we do, we win if that one did.
3038 If we do not, count both of the operands as losers.
3039 (This is too conservative, since most of the time
3040 only a single reload insn will be needed to make
3041 the two operands win. As a result, this alternative
3042 may be rejected when it is actually desirable.) */
3043 if ((swapped && (m != commutative || i != commutative + 1))
3044 /* If we are matching as if two operands were swapped,
3045 also pretend that operands_match had been computed
3046 with swapped.
3047 But if I is the second of those and C is the first,
3048 don't exchange them, because operands_match is valid
3049 only on one side of its diagonal. */
3050 ? (operands_match
3051 [(m == commutative || m == commutative + 1)
3052 ? 2 * commutative + 1 - m : m]
3053 [(i == commutative || i == commutative + 1)
3054 ? 2 * commutative + 1 - i : i])
3055 : operands_match[m][i])
3056 {
3057 /* If we are matching a non-offsettable address where an
3058 offsettable address was expected, then we must reject
3059 this combination, because we can't reload it. */
3060 if (this_alternative_offmemok[m]
3061 && MEM_P (recog_data.operand[m])
3062 && this_alternative[m] == (int) NO_REGS
3063 && ! this_alternative_win[m])
3064 bad = 1;
3065
3066 did_match = this_alternative_win[m];
3067 }
3068 else
3069 {
3070 /* Operands don't match. */
3071 rtx value;
3072 int loc1, loc2;
3073 /* Retroactively mark the operand we had to match
3074 as a loser, if it wasn't already. */
3075 if (this_alternative_win[m])
3076 losers++;
3077 this_alternative_win[m] = 0;
3078 if (this_alternative[m] == (int) NO_REGS)
3079 bad = 1;
3080 /* But count the pair only once in the total badness of
3081 this alternative, if the pair can be a dummy reload.
3082 The pointers in operand_loc are not swapped; swap
3083 them by hand if necessary. */
3084 if (swapped && i == commutative)
3085 loc1 = commutative + 1;
3086 else if (swapped && i == commutative + 1)
3087 loc1 = commutative;
3088 else
3089 loc1 = i;
3090 if (swapped && m == commutative)
3091 loc2 = commutative + 1;
3092 else if (swapped && m == commutative + 1)
3093 loc2 = commutative;
3094 else
3095 loc2 = m;
3096 value
3097 = find_dummy_reload (recog_data.operand[i],
3098 recog_data.operand[m],
3099 recog_data.operand_loc[loc1],
3100 recog_data.operand_loc[loc2],
3101 operand_mode[i], operand_mode[m],
3102 this_alternative[m], -1,
3103 this_alternative_earlyclobber[m]);
3104
3105 if (value != 0)
3106 losers--;
3107 }
3108 /* This can be fixed with reloads if the operand
3109 we are supposed to match can be fixed with reloads. */
3110 badop = 0;
3111 this_alternative[i] = this_alternative[m];
3112
3113 /* If we have to reload this operand and some previous
3114 operand also had to match the same thing as this
3115 operand, we don't know how to do that. So reject this
3116 alternative. */
3117 if (! did_match || force_reload)
3118 for (j = 0; j < i; j++)
3119 if (this_alternative_matches[j]
3120 == this_alternative_matches[i])
3121 badop = 1;
3122 break;
3123
3124 case 'p':
3125 /* All necessary reloads for an address_operand
3126 were handled in find_reloads_address. */
3127 this_alternative[i] = (int) MODE_BASE_REG_CLASS (VOIDmode);
3128 win = 1;
3129 badop = 0;
3130 break;
3131
3132 case 'm':
3133 if (force_reload)
3134 break;
3135 if (MEM_P (operand)
3136 || (REG_P (operand)
3137 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3138 && reg_renumber[REGNO (operand)] < 0))
3139 win = 1;
3140 if (CONST_POOL_OK_P (operand))
3141 badop = 0;
3142 constmemok = 1;
3143 break;
3144
3145 case '<':
3146 if (MEM_P (operand)
3147 && ! address_reloaded[i]
3148 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3149 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3150 win = 1;
3151 break;
3152
3153 case '>':
3154 if (MEM_P (operand)
3155 && ! address_reloaded[i]
3156 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3157 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3158 win = 1;
3159 break;
3160
3161 /* Memory operand whose address is not offsettable. */
3162 case 'V':
3163 if (force_reload)
3164 break;
3165 if (MEM_P (operand)
3166 && ! (ind_levels ? offsettable_memref_p (operand)
3167 : offsettable_nonstrict_memref_p (operand))
3168 /* Certain mem addresses will become offsettable
3169 after they themselves are reloaded. This is important;
3170 we don't want our own handling of unoffsettables
3171 to override the handling of reg_equiv_address. */
3172 && !(REG_P (XEXP (operand, 0))
3173 && (ind_levels == 0
3174 || reg_equiv_address[REGNO (XEXP (operand, 0))] != 0)))
3175 win = 1;
3176 break;
3177
3178 /* Memory operand whose address is offsettable. */
3179 case 'o':
3180 if (force_reload)
3181 break;
3182 if ((MEM_P (operand)
3183 /* If IND_LEVELS, find_reloads_address won't reload a
3184 pseudo that didn't get a hard reg, so we have to
3185 reject that case. */
3186 && ((ind_levels ? offsettable_memref_p (operand)
3187 : offsettable_nonstrict_memref_p (operand))
3188 /* A reloaded address is offsettable because it is now
3189 just a simple register indirect. */
3190 || address_reloaded[i] == 1))
3191 || (REG_P (operand)
3192 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3193 && reg_renumber[REGNO (operand)] < 0
3194 /* If reg_equiv_address is nonzero, we will be
3195 loading it into a register; hence it will be
3196 offsettable, but we cannot say that reg_equiv_mem
3197 is offsettable without checking. */
3198 && ((reg_equiv_mem[REGNO (operand)] != 0
3199 && offsettable_memref_p (reg_equiv_mem[REGNO (operand)]))
3200 || (reg_equiv_address[REGNO (operand)] != 0))))
3201 win = 1;
3202 if (CONST_POOL_OK_P (operand)
3203 || MEM_P (operand))
3204 badop = 0;
3205 constmemok = 1;
3206 offmemok = 1;
3207 break;
3208
3209 case '&':
3210 /* Output operand that is stored before the need for the
3211 input operands (and their index registers) is over. */
3212 earlyclobber = 1, this_earlyclobber = 1;
3213 break;
3214
3215 case 'E':
3216 case 'F':
3217 if (GET_CODE (operand) == CONST_DOUBLE
3218 || (GET_CODE (operand) == CONST_VECTOR
3219 && (GET_MODE_CLASS (GET_MODE (operand))
3220 == MODE_VECTOR_FLOAT)))
3221 win = 1;
3222 break;
3223
3224 case 'G':
3225 case 'H':
3226 if (GET_CODE (operand) == CONST_DOUBLE
3227 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3228 win = 1;
3229 break;
3230
3231 case 's':
3232 if (GET_CODE (operand) == CONST_INT
3233 || (GET_CODE (operand) == CONST_DOUBLE
3234 && GET_MODE (operand) == VOIDmode))
3235 break;
3236 case 'i':
3237 if (CONSTANT_P (operand)
3238 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3239 win = 1;
3240 break;
3241
3242 case 'n':
3243 if (GET_CODE (operand) == CONST_INT
3244 || (GET_CODE (operand) == CONST_DOUBLE
3245 && GET_MODE (operand) == VOIDmode))
3246 win = 1;
3247 break;
3248
3249 case 'I':
3250 case 'J':
3251 case 'K':
3252 case 'L':
3253 case 'M':
3254 case 'N':
3255 case 'O':
3256 case 'P':
3257 if (GET_CODE (operand) == CONST_INT
3258 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3259 win = 1;
3260 break;
3261
3262 case 'X':
3263 win = 1;
3264 break;
3265
3266 case 'g':
3267 if (! force_reload
3268 /* A PLUS is never a valid operand, but reload can make
3269 it from a register when eliminating registers. */
3270 && GET_CODE (operand) != PLUS
3271 /* A SCRATCH is not a valid operand. */
3272 && GET_CODE (operand) != SCRATCH
3273 && (! CONSTANT_P (operand)
3274 || ! flag_pic
3275 || LEGITIMATE_PIC_OPERAND_P (operand))
3276 && (GENERAL_REGS == ALL_REGS
3277 || !REG_P (operand)
3278 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3279 && reg_renumber[REGNO (operand)] < 0)))
3280 win = 1;
3281 /* Drop through into 'r' case. */
3282
3283 case 'r':
3284 this_alternative[i]
3285 = (int) reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3286 goto reg;
3287
3288 default:
3289 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3290 {
3291 #ifdef EXTRA_CONSTRAINT_STR
3292 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3293 {
3294 if (force_reload)
3295 break;
3296 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3297 win = 1;
3298 /* If the address was already reloaded,
3299 we win as well. */
3300 else if (MEM_P (operand)
3301 && address_reloaded[i] == 1)
3302 win = 1;
3303 /* Likewise if the address will be reloaded because
3304 reg_equiv_address is nonzero. For reg_equiv_mem
3305 we have to check. */
3306 else if (REG_P (operand)
3307 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3308 && reg_renumber[REGNO (operand)] < 0
3309 && ((reg_equiv_mem[REGNO (operand)] != 0
3310 && EXTRA_CONSTRAINT_STR (reg_equiv_mem[REGNO (operand)], c, p))
3311 || (reg_equiv_address[REGNO (operand)] != 0)))
3312 win = 1;
3313
3314 /* If we didn't already win, we can reload
3315 constants via force_const_mem, and other
3316 MEMs by reloading the address like for 'o'. */
3317 if (CONST_POOL_OK_P (operand)
3318 || MEM_P (operand))
3319 badop = 0;
3320 constmemok = 1;
3321 offmemok = 1;
3322 break;
3323 }
3324 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3325 {
3326 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3327 win = 1;
3328
3329 /* If we didn't already win, we can reload
3330 the address into a base register. */
3331 this_alternative[i] = (int) MODE_BASE_REG_CLASS (VOIDmode);
3332 badop = 0;
3333 break;
3334 }
3335
3336 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3337 win = 1;
3338 #endif
3339 break;
3340 }
3341
3342 this_alternative[i]
3343 = (int) (reg_class_subunion
3344 [this_alternative[i]]
3345 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3346 reg:
3347 if (GET_MODE (operand) == BLKmode)
3348 break;
3349 winreg = 1;
3350 if (REG_P (operand)
3351 && reg_fits_class_p (operand, this_alternative[i],
3352 offset, GET_MODE (recog_data.operand[i])))
3353 win = 1;
3354 break;
3355 }
3356 while ((p += len), c);
3357
3358 constraints[i] = p;
3359
3360 /* If this operand could be handled with a reg,
3361 and some reg is allowed, then this operand can be handled. */
3362 if (winreg && this_alternative[i] != (int) NO_REGS)
3363 badop = 0;
3364
3365 /* Record which operands fit this alternative. */
3366 this_alternative_earlyclobber[i] = earlyclobber;
3367 if (win && ! force_reload)
3368 this_alternative_win[i] = 1;
3369 else if (did_match && ! force_reload)
3370 this_alternative_match_win[i] = 1;
3371 else
3372 {
3373 int const_to_mem = 0;
3374
3375 this_alternative_offmemok[i] = offmemok;
3376 losers++;
3377 if (badop)
3378 bad = 1;
3379 /* Alternative loses if it has no regs for a reg operand. */
3380 if (REG_P (operand)
3381 && this_alternative[i] == (int) NO_REGS
3382 && this_alternative_matches[i] < 0)
3383 bad = 1;
3384
3385 /* If this is a constant that is reloaded into the desired
3386 class by copying it to memory first, count that as another
3387 reload. This is consistent with other code and is
3388 required to avoid choosing another alternative when
3389 the constant is moved into memory by this function on
3390 an early reload pass. Note that the test here is
3391 precisely the same as in the code below that calls
3392 force_const_mem. */
3393 if (CONST_POOL_OK_P (operand)
3394 && ((PREFERRED_RELOAD_CLASS (operand,
3395 (enum reg_class) this_alternative[i])
3396 == NO_REGS)
3397 || no_input_reloads)
3398 && operand_mode[i] != VOIDmode)
3399 {
3400 const_to_mem = 1;
3401 if (this_alternative[i] != (int) NO_REGS)
3402 losers++;
3403 }
3404
3405 /* If we can't reload this value at all, reject this
3406 alternative. Note that we could also lose due to
3407 LIMIT_RELOAD_RELOAD_CLASS, but we don't check that
3408 here. */
3409
3410 if (! CONSTANT_P (operand)
3411 && (enum reg_class) this_alternative[i] != NO_REGS
3412 && (PREFERRED_RELOAD_CLASS (operand,
3413 (enum reg_class) this_alternative[i])
3414 == NO_REGS))
3415 bad = 1;
3416
3417 /* Alternative loses if it requires a type of reload not
3418 permitted for this insn. We can always reload SCRATCH
3419 and objects with a REG_UNUSED note. */
3420 else if (GET_CODE (operand) != SCRATCH
3421 && modified[i] != RELOAD_READ && no_output_reloads
3422 && ! find_reg_note (insn, REG_UNUSED, operand))
3423 bad = 1;
3424 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3425 && ! const_to_mem)
3426 bad = 1;
3427
3428 /* We prefer to reload pseudos over reloading other things,
3429 since such reloads may be able to be eliminated later.
3430 If we are reloading a SCRATCH, we won't be generating any
3431 insns, just using a register, so it is also preferred.
3432 So bump REJECT in other cases. Don't do this in the
3433 case where we are forcing a constant into memory and
3434 it will then win since we don't want to have a different
3435 alternative match then. */
3436 if (! (REG_P (operand)
3437 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3438 && GET_CODE (operand) != SCRATCH
3439 && ! (const_to_mem && constmemok))
3440 reject += 2;
3441
3442 /* Input reloads can be inherited more often than output
3443 reloads can be removed, so penalize output reloads. */
3444 if (operand_type[i] != RELOAD_FOR_INPUT
3445 && GET_CODE (operand) != SCRATCH)
3446 reject++;
3447 }
3448
3449 /* If this operand is a pseudo register that didn't get a hard
3450 reg and this alternative accepts some register, see if the
3451 class that we want is a subset of the preferred class for this
3452 register. If not, but it intersects that class, use the
3453 preferred class instead. If it does not intersect the preferred
3454 class, show that usage of this alternative should be discouraged;
3455 it will be discouraged more still if the register is `preferred
3456 or nothing'. We do this because it increases the chance of
3457 reusing our spill register in a later insn and avoiding a pair
3458 of memory stores and loads.
3459
3460 Don't bother with this if this alternative will accept this
3461 operand.
3462
3463 Don't do this for a multiword operand, since it is only a
3464 small win and has the risk of requiring more spill registers,
3465 which could cause a large loss.
3466
3467 Don't do this if the preferred class has only one register
3468 because we might otherwise exhaust the class. */
3469
3470 if (! win && ! did_match
3471 && this_alternative[i] != (int) NO_REGS
3472 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3473 && reg_class_size[(int) preferred_class[i]] > 1)
3474 {
3475 if (! reg_class_subset_p (this_alternative[i],
3476 preferred_class[i]))
3477 {
3478 /* Since we don't have a way of forming the intersection,
3479 we just do something special if the preferred class
3480 is a subset of the class we have; that's the most
3481 common case anyway. */
3482 if (reg_class_subset_p (preferred_class[i],
3483 this_alternative[i]))
3484 this_alternative[i] = (int) preferred_class[i];
3485 else
3486 reject += (2 + 2 * pref_or_nothing[i]);
3487 }
3488 }
3489 }
3490
3491 /* Now see if any output operands that are marked "earlyclobber"
3492 in this alternative conflict with any input operands
3493 or any memory addresses. */
3494
3495 for (i = 0; i < noperands; i++)
3496 if (this_alternative_earlyclobber[i]
3497 && (this_alternative_win[i] || this_alternative_match_win[i]))
3498 {
3499 struct decomposition early_data;
3500
3501 early_data = decompose (recog_data.operand[i]);
3502
3503 gcc_assert (modified[i] != RELOAD_READ);
3504
3505 if (this_alternative[i] == NO_REGS)
3506 {
3507 this_alternative_earlyclobber[i] = 0;
3508 gcc_assert (this_insn_is_asm);
3509 error_for_asm (this_insn,
3510 "%<&%> constraint used with no register class");
3511 }
3512
3513 for (j = 0; j < noperands; j++)
3514 /* Is this an input operand or a memory ref? */
3515 if ((MEM_P (recog_data.operand[j])
3516 || modified[j] != RELOAD_WRITE)
3517 && j != i
3518 /* Ignore things like match_operator operands. */
3519 && *recog_data.constraints[j] != 0
3520 /* Don't count an input operand that is constrained to match
3521 the early clobber operand. */
3522 && ! (this_alternative_matches[j] == i
3523 && rtx_equal_p (recog_data.operand[i],
3524 recog_data.operand[j]))
3525 /* Is it altered by storing the earlyclobber operand? */
3526 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3527 early_data))
3528 {
3529 /* If the output is in a single-reg class,
3530 it's costly to reload it, so reload the input instead. */
3531 if (reg_class_size[this_alternative[i]] == 1
3532 && (REG_P (recog_data.operand[j])
3533 || GET_CODE (recog_data.operand[j]) == SUBREG))
3534 {
3535 losers++;
3536 this_alternative_win[j] = 0;
3537 this_alternative_match_win[j] = 0;
3538 }
3539 else
3540 break;
3541 }
3542 /* If an earlyclobber operand conflicts with something,
3543 it must be reloaded, so request this and count the cost. */
3544 if (j != noperands)
3545 {
3546 losers++;
3547 this_alternative_win[i] = 0;
3548 this_alternative_match_win[j] = 0;
3549 for (j = 0; j < noperands; j++)
3550 if (this_alternative_matches[j] == i
3551 && this_alternative_match_win[j])
3552 {
3553 this_alternative_win[j] = 0;
3554 this_alternative_match_win[j] = 0;
3555 losers++;
3556 }
3557 }
3558 }
3559
3560 /* If one alternative accepts all the operands, no reload required,
3561 choose that alternative; don't consider the remaining ones. */
3562 if (losers == 0)
3563 {
3564 /* Unswap these so that they are never swapped at `finish'. */
3565 if (commutative >= 0)
3566 {
3567 recog_data.operand[commutative] = substed_operand[commutative];
3568 recog_data.operand[commutative + 1]
3569 = substed_operand[commutative + 1];
3570 }
3571 for (i = 0; i < noperands; i++)
3572 {
3573 goal_alternative_win[i] = this_alternative_win[i];
3574 goal_alternative_match_win[i] = this_alternative_match_win[i];
3575 goal_alternative[i] = this_alternative[i];
3576 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3577 goal_alternative_matches[i] = this_alternative_matches[i];
3578 goal_alternative_earlyclobber[i]
3579 = this_alternative_earlyclobber[i];
3580 }
3581 goal_alternative_number = this_alternative_number;
3582 goal_alternative_swapped = swapped;
3583 goal_earlyclobber = this_earlyclobber;
3584 goto finish;
3585 }
3586
3587 /* REJECT, set by the ! and ? constraint characters and when a register
3588 would be reloaded into a non-preferred class, discourages the use of
3589 this alternative for a reload goal. REJECT is incremented by six
3590 for each ? and two for each non-preferred class. */
3591 losers = losers * 6 + reject;
3592
3593 /* If this alternative can be made to work by reloading,
3594 and it needs less reloading than the others checked so far,
3595 record it as the chosen goal for reloading. */
3596 if (! bad && best > losers)
3597 {
3598 for (i = 0; i < noperands; i++)
3599 {
3600 goal_alternative[i] = this_alternative[i];
3601 goal_alternative_win[i] = this_alternative_win[i];
3602 goal_alternative_match_win[i] = this_alternative_match_win[i];
3603 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3604 goal_alternative_matches[i] = this_alternative_matches[i];
3605 goal_alternative_earlyclobber[i]
3606 = this_alternative_earlyclobber[i];
3607 }
3608 goal_alternative_swapped = swapped;
3609 best = losers;
3610 goal_alternative_number = this_alternative_number;
3611 goal_earlyclobber = this_earlyclobber;
3612 }
3613 }
3614
3615 /* If insn is commutative (it's safe to exchange a certain pair of operands)
3616 then we need to try each alternative twice,
3617 the second time matching those two operands
3618 as if we had exchanged them.
3619 To do this, really exchange them in operands.
3620
3621 If we have just tried the alternatives the second time,
3622 return operands to normal and drop through. */
3623
3624 if (commutative >= 0)
3625 {
3626 swapped = !swapped;
3627 if (swapped)
3628 {
3629 enum reg_class tclass;
3630 int t;
3631
3632 recog_data.operand[commutative] = substed_operand[commutative + 1];
3633 recog_data.operand[commutative + 1] = substed_operand[commutative];
3634 /* Swap the duplicates too. */
3635 for (i = 0; i < recog_data.n_dups; i++)
3636 if (recog_data.dup_num[i] == commutative
3637 || recog_data.dup_num[i] == commutative + 1)
3638 *recog_data.dup_loc[i]
3639 = recog_data.operand[(int) recog_data.dup_num[i]];
3640
3641 tclass = preferred_class[commutative];
3642 preferred_class[commutative] = preferred_class[commutative + 1];
3643 preferred_class[commutative + 1] = tclass;
3644
3645 t = pref_or_nothing[commutative];
3646 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3647 pref_or_nothing[commutative + 1] = t;
3648
3649 memcpy (constraints, recog_data.constraints,
3650 noperands * sizeof (char *));
3651 goto try_swapped;
3652 }
3653 else
3654 {
3655 recog_data.operand[commutative] = substed_operand[commutative];
3656 recog_data.operand[commutative + 1]
3657 = substed_operand[commutative + 1];
3658 /* Unswap the duplicates too. */
3659 for (i = 0; i < recog_data.n_dups; i++)
3660 if (recog_data.dup_num[i] == commutative
3661 || recog_data.dup_num[i] == commutative + 1)
3662 *recog_data.dup_loc[i]
3663 = recog_data.operand[(int) recog_data.dup_num[i]];
3664 }
3665 }
3666
3667 /* The operands don't meet the constraints.
3668 goal_alternative describes the alternative
3669 that we could reach by reloading the fewest operands.
3670 Reload so as to fit it. */
3671
3672 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3673 {
3674 /* No alternative works with reloads?? */
3675 if (insn_code_number >= 0)
3676 fatal_insn ("unable to generate reloads for:", insn);
3677 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3678 /* Avoid further trouble with this insn. */
3679 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3680 n_reloads = 0;
3681 return 0;
3682 }
3683
3684 /* Jump to `finish' from above if all operands are valid already.
3685 In that case, goal_alternative_win is all 1. */
3686 finish:
3687
3688 /* Right now, for any pair of operands I and J that are required to match,
3689 with I < J,
3690 goal_alternative_matches[J] is I.
3691 Set up goal_alternative_matched as the inverse function:
3692 goal_alternative_matched[I] = J. */
3693
3694 for (i = 0; i < noperands; i++)
3695 goal_alternative_matched[i] = -1;
3696
3697 for (i = 0; i < noperands; i++)
3698 if (! goal_alternative_win[i]
3699 && goal_alternative_matches[i] >= 0)
3700 goal_alternative_matched[goal_alternative_matches[i]] = i;
3701
3702 for (i = 0; i < noperands; i++)
3703 goal_alternative_win[i] |= goal_alternative_match_win[i];
3704
3705 /* If the best alternative is with operands 1 and 2 swapped,
3706 consider them swapped before reporting the reloads. Update the
3707 operand numbers of any reloads already pushed. */
3708
3709 if (goal_alternative_swapped)
3710 {
3711 rtx tem;
3712
3713 tem = substed_operand[commutative];
3714 substed_operand[commutative] = substed_operand[commutative + 1];
3715 substed_operand[commutative + 1] = tem;
3716 tem = recog_data.operand[commutative];
3717 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3718 recog_data.operand[commutative + 1] = tem;
3719 tem = *recog_data.operand_loc[commutative];
3720 *recog_data.operand_loc[commutative]
3721 = *recog_data.operand_loc[commutative + 1];
3722 *recog_data.operand_loc[commutative + 1] = tem;
3723
3724 for (i = 0; i < n_reloads; i++)
3725 {
3726 if (rld[i].opnum == commutative)
3727 rld[i].opnum = commutative + 1;
3728 else if (rld[i].opnum == commutative + 1)
3729 rld[i].opnum = commutative;
3730 }
3731 }
3732
3733 for (i = 0; i < noperands; i++)
3734 {
3735 operand_reloadnum[i] = -1;
3736
3737 /* If this is an earlyclobber operand, we need to widen the scope.
3738 The reload must remain valid from the start of the insn being
3739 reloaded until after the operand is stored into its destination.
3740 We approximate this with RELOAD_OTHER even though we know that we
3741 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3742
3743 One special case that is worth checking is when we have an
3744 output that is earlyclobber but isn't used past the insn (typically
3745 a SCRATCH). In this case, we only need have the reload live
3746 through the insn itself, but not for any of our input or output
3747 reloads.
3748 But we must not accidentally narrow the scope of an existing
3749 RELOAD_OTHER reload - leave these alone.
3750
3751 In any case, anything needed to address this operand can remain
3752 however they were previously categorized. */
3753
3754 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3755 operand_type[i]
3756 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3757 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3758 }
3759
3760 /* Any constants that aren't allowed and can't be reloaded
3761 into registers are here changed into memory references. */
3762 for (i = 0; i < noperands; i++)
3763 if (! goal_alternative_win[i]
3764 && CONST_POOL_OK_P (recog_data.operand[i])
3765 && ((PREFERRED_RELOAD_CLASS (recog_data.operand[i],
3766 (enum reg_class) goal_alternative[i])
3767 == NO_REGS)
3768 || no_input_reloads)
3769 && operand_mode[i] != VOIDmode)
3770 {
3771 substed_operand[i] = recog_data.operand[i]
3772 = find_reloads_toplev (force_const_mem (operand_mode[i],
3773 recog_data.operand[i]),
3774 i, address_type[i], ind_levels, 0, insn,
3775 NULL);
3776 if (alternative_allows_memconst (recog_data.constraints[i],
3777 goal_alternative_number))
3778 goal_alternative_win[i] = 1;
3779 }
3780
3781 /* Likewise any invalid constants appearing as operand of a PLUS
3782 that is to be reloaded. */
3783 for (i = 0; i < noperands; i++)
3784 if (! goal_alternative_win[i]
3785 && GET_CODE (recog_data.operand[i]) == PLUS
3786 && CONST_POOL_OK_P (XEXP (recog_data.operand[i], 1))
3787 && (PREFERRED_RELOAD_CLASS (XEXP (recog_data.operand[i], 1),
3788 (enum reg_class) goal_alternative[i])
3789 == NO_REGS)
3790 && operand_mode[i] != VOIDmode)
3791 {
3792 rtx tem = force_const_mem (operand_mode[i],
3793 XEXP (recog_data.operand[i], 1));
3794 tem = gen_rtx_PLUS (operand_mode[i],
3795 XEXP (recog_data.operand[i], 0), tem);
3796
3797 substed_operand[i] = recog_data.operand[i]
3798 = find_reloads_toplev (tem, i, address_type[i],
3799 ind_levels, 0, insn, NULL);
3800 }
3801
3802 /* Record the values of the earlyclobber operands for the caller. */
3803 if (goal_earlyclobber)
3804 for (i = 0; i < noperands; i++)
3805 if (goal_alternative_earlyclobber[i])
3806 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3807
3808 /* Now record reloads for all the operands that need them. */
3809 for (i = 0; i < noperands; i++)
3810 if (! goal_alternative_win[i])
3811 {
3812 /* Operands that match previous ones have already been handled. */
3813 if (goal_alternative_matches[i] >= 0)
3814 ;
3815 /* Handle an operand with a nonoffsettable address
3816 appearing where an offsettable address will do
3817 by reloading the address into a base register.
3818
3819 ??? We can also do this when the operand is a register and
3820 reg_equiv_mem is not offsettable, but this is a bit tricky,
3821 so we don't bother with it. It may not be worth doing. */
3822 else if (goal_alternative_matched[i] == -1
3823 && goal_alternative_offmemok[i]
3824 && MEM_P (recog_data.operand[i]))
3825 {
3826 operand_reloadnum[i]
3827 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3828 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3829 MODE_BASE_REG_CLASS (VOIDmode),
3830 GET_MODE (XEXP (recog_data.operand[i], 0)),
3831 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
3832 rld[operand_reloadnum[i]].inc
3833 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3834
3835 /* If this operand is an output, we will have made any
3836 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3837 now we are treating part of the operand as an input, so
3838 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
3839
3840 if (modified[i] == RELOAD_WRITE)
3841 {
3842 for (j = 0; j < n_reloads; j++)
3843 {
3844 if (rld[j].opnum == i)
3845 {
3846 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
3847 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
3848 else if (rld[j].when_needed
3849 == RELOAD_FOR_OUTADDR_ADDRESS)
3850 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
3851 }
3852 }
3853 }
3854 }
3855 else if (goal_alternative_matched[i] == -1)
3856 {
3857 operand_reloadnum[i]
3858 = push_reload ((modified[i] != RELOAD_WRITE
3859 ? recog_data.operand[i] : 0),
3860 (modified[i] != RELOAD_READ
3861 ? recog_data.operand[i] : 0),
3862 (modified[i] != RELOAD_WRITE
3863 ? recog_data.operand_loc[i] : 0),
3864 (modified[i] != RELOAD_READ
3865 ? recog_data.operand_loc[i] : 0),
3866 (enum reg_class) goal_alternative[i],
3867 (modified[i] == RELOAD_WRITE
3868 ? VOIDmode : operand_mode[i]),
3869 (modified[i] == RELOAD_READ
3870 ? VOIDmode : operand_mode[i]),
3871 (insn_code_number < 0 ? 0
3872 : insn_data[insn_code_number].operand[i].strict_low),
3873 0, i, operand_type[i]);
3874 }
3875 /* In a matching pair of operands, one must be input only
3876 and the other must be output only.
3877 Pass the input operand as IN and the other as OUT. */
3878 else if (modified[i] == RELOAD_READ
3879 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
3880 {
3881 operand_reloadnum[i]
3882 = push_reload (recog_data.operand[i],
3883 recog_data.operand[goal_alternative_matched[i]],
3884 recog_data.operand_loc[i],
3885 recog_data.operand_loc[goal_alternative_matched[i]],
3886 (enum reg_class) goal_alternative[i],
3887 operand_mode[i],
3888 operand_mode[goal_alternative_matched[i]],
3889 0, 0, i, RELOAD_OTHER);
3890 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
3891 }
3892 else if (modified[i] == RELOAD_WRITE
3893 && modified[goal_alternative_matched[i]] == RELOAD_READ)
3894 {
3895 operand_reloadnum[goal_alternative_matched[i]]
3896 = push_reload (recog_data.operand[goal_alternative_matched[i]],
3897 recog_data.operand[i],
3898 recog_data.operand_loc[goal_alternative_matched[i]],
3899 recog_data.operand_loc[i],
3900 (enum reg_class) goal_alternative[i],
3901 operand_mode[goal_alternative_matched[i]],
3902 operand_mode[i],
3903 0, 0, i, RELOAD_OTHER);
3904 operand_reloadnum[i] = output_reloadnum;
3905 }
3906 else
3907 {
3908 gcc_assert (insn_code_number < 0);
3909 error_for_asm (insn, "inconsistent operand constraints "
3910 "in an %<asm%>");
3911 /* Avoid further trouble with this insn. */
3912 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3913 n_reloads = 0;
3914 return 0;
3915 }
3916 }
3917 else if (goal_alternative_matched[i] < 0
3918 && goal_alternative_matches[i] < 0
3919 && address_operand_reloaded[i] != 1
3920 && optimize)
3921 {
3922 /* For each non-matching operand that's a MEM or a pseudo-register
3923 that didn't get a hard register, make an optional reload.
3924 This may get done even if the insn needs no reloads otherwise. */
3925
3926 rtx operand = recog_data.operand[i];
3927
3928 while (GET_CODE (operand) == SUBREG)
3929 operand = SUBREG_REG (operand);
3930 if ((MEM_P (operand)
3931 || (REG_P (operand)
3932 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3933 /* If this is only for an output, the optional reload would not
3934 actually cause us to use a register now, just note that
3935 something is stored here. */
3936 && ((enum reg_class) goal_alternative[i] != NO_REGS
3937 || modified[i] == RELOAD_WRITE)
3938 && ! no_input_reloads
3939 /* An optional output reload might allow to delete INSN later.
3940 We mustn't make in-out reloads on insns that are not permitted
3941 output reloads.
3942 If this is an asm, we can't delete it; we must not even call
3943 push_reload for an optional output reload in this case,
3944 because we can't be sure that the constraint allows a register,
3945 and push_reload verifies the constraints for asms. */
3946 && (modified[i] == RELOAD_READ
3947 || (! no_output_reloads && ! this_insn_is_asm)))
3948 operand_reloadnum[i]
3949 = push_reload ((modified[i] != RELOAD_WRITE
3950 ? recog_data.operand[i] : 0),
3951 (modified[i] != RELOAD_READ
3952 ? recog_data.operand[i] : 0),
3953 (modified[i] != RELOAD_WRITE
3954 ? recog_data.operand_loc[i] : 0),
3955 (modified[i] != RELOAD_READ
3956 ? recog_data.operand_loc[i] : 0),
3957 (enum reg_class) goal_alternative[i],
3958 (modified[i] == RELOAD_WRITE
3959 ? VOIDmode : operand_mode[i]),
3960 (modified[i] == RELOAD_READ
3961 ? VOIDmode : operand_mode[i]),
3962 (insn_code_number < 0 ? 0
3963 : insn_data[insn_code_number].operand[i].strict_low),
3964 1, i, operand_type[i]);
3965 /* If a memory reference remains (either as a MEM or a pseudo that
3966 did not get a hard register), yet we can't make an optional
3967 reload, check if this is actually a pseudo register reference;
3968 we then need to emit a USE and/or a CLOBBER so that reload
3969 inheritance will do the right thing. */
3970 else if (replace
3971 && (MEM_P (operand)
3972 || (REG_P (operand)
3973 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3974 && reg_renumber [REGNO (operand)] < 0)))
3975 {
3976 operand = *recog_data.operand_loc[i];
3977
3978 while (GET_CODE (operand) == SUBREG)
3979 operand = SUBREG_REG (operand);
3980 if (REG_P (operand))
3981 {
3982 if (modified[i] != RELOAD_WRITE)
3983 /* We mark the USE with QImode so that we recognize
3984 it as one that can be safely deleted at the end
3985 of reload. */
3986 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
3987 insn), QImode);
3988 if (modified[i] != RELOAD_READ)
3989 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, operand), insn);
3990 }
3991 }
3992 }
3993 else if (goal_alternative_matches[i] >= 0
3994 && goal_alternative_win[goal_alternative_matches[i]]
3995 && modified[i] == RELOAD_READ
3996 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
3997 && ! no_input_reloads && ! no_output_reloads
3998 && optimize)
3999 {
4000 /* Similarly, make an optional reload for a pair of matching
4001 objects that are in MEM or a pseudo that didn't get a hard reg. */
4002
4003 rtx operand = recog_data.operand[i];
4004
4005 while (GET_CODE (operand) == SUBREG)
4006 operand = SUBREG_REG (operand);
4007 if ((MEM_P (operand)
4008 || (REG_P (operand)
4009 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4010 && ((enum reg_class) goal_alternative[goal_alternative_matches[i]]
4011 != NO_REGS))
4012 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4013 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4014 recog_data.operand[i],
4015 recog_data.operand_loc[goal_alternative_matches[i]],
4016 recog_data.operand_loc[i],
4017 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4018 operand_mode[goal_alternative_matches[i]],
4019 operand_mode[i],
4020 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4021 }
4022
4023 /* Perform whatever substitutions on the operands we are supposed
4024 to make due to commutativity or replacement of registers
4025 with equivalent constants or memory slots. */
4026
4027 for (i = 0; i < noperands; i++)
4028 {
4029 /* We only do this on the last pass through reload, because it is
4030 possible for some data (like reg_equiv_address) to be changed during
4031 later passes. Moreover, we loose the opportunity to get a useful
4032 reload_{in,out}_reg when we do these replacements. */
4033
4034 if (replace)
4035 {
4036 rtx substitution = substed_operand[i];
4037
4038 *recog_data.operand_loc[i] = substitution;
4039
4040 /* If we're replacing an operand with a LABEL_REF, we need
4041 to make sure that there's a REG_LABEL note attached to
4042 this instruction. */
4043 if (!JUMP_P (insn)
4044 && GET_CODE (substitution) == LABEL_REF
4045 && !find_reg_note (insn, REG_LABEL, XEXP (substitution, 0)))
4046 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL,
4047 XEXP (substitution, 0),
4048 REG_NOTES (insn));
4049 }
4050 else
4051 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4052 }
4053
4054 /* If this insn pattern contains any MATCH_DUP's, make sure that
4055 they will be substituted if the operands they match are substituted.
4056 Also do now any substitutions we already did on the operands.
4057
4058 Don't do this if we aren't making replacements because we might be
4059 propagating things allocated by frame pointer elimination into places
4060 it doesn't expect. */
4061
4062 if (insn_code_number >= 0 && replace)
4063 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4064 {
4065 int opno = recog_data.dup_num[i];
4066 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4067 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4068 }
4069
4070 #if 0
4071 /* This loses because reloading of prior insns can invalidate the equivalence
4072 (or at least find_equiv_reg isn't smart enough to find it any more),
4073 causing this insn to need more reload regs than it needed before.
4074 It may be too late to make the reload regs available.
4075 Now this optimization is done safely in choose_reload_regs. */
4076
4077 /* For each reload of a reg into some other class of reg,
4078 search for an existing equivalent reg (same value now) in the right class.
4079 We can use it as long as we don't need to change its contents. */
4080 for (i = 0; i < n_reloads; i++)
4081 if (rld[i].reg_rtx == 0
4082 && rld[i].in != 0
4083 && REG_P (rld[i].in)
4084 && rld[i].out == 0)
4085 {
4086 rld[i].reg_rtx
4087 = find_equiv_reg (rld[i].in, insn, rld[i].class, -1,
4088 static_reload_reg_p, 0, rld[i].inmode);
4089 /* Prevent generation of insn to load the value
4090 because the one we found already has the value. */
4091 if (rld[i].reg_rtx)
4092 rld[i].in = rld[i].reg_rtx;
4093 }
4094 #endif
4095
4096 /* Perhaps an output reload can be combined with another
4097 to reduce needs by one. */
4098 if (!goal_earlyclobber)
4099 combine_reloads ();
4100
4101 /* If we have a pair of reloads for parts of an address, they are reloading
4102 the same object, the operands themselves were not reloaded, and they
4103 are for two operands that are supposed to match, merge the reloads and
4104 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4105
4106 for (i = 0; i < n_reloads; i++)
4107 {
4108 int k;
4109
4110 for (j = i + 1; j < n_reloads; j++)
4111 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4112 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4113 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4114 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4115 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4116 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4117 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4118 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4119 && rtx_equal_p (rld[i].in, rld[j].in)
4120 && (operand_reloadnum[rld[i].opnum] < 0
4121 || rld[operand_reloadnum[rld[i].opnum]].optional)
4122 && (operand_reloadnum[rld[j].opnum] < 0
4123 || rld[operand_reloadnum[rld[j].opnum]].optional)
4124 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4125 || (goal_alternative_matches[rld[j].opnum]
4126 == rld[i].opnum)))
4127 {
4128 for (k = 0; k < n_replacements; k++)
4129 if (replacements[k].what == j)
4130 replacements[k].what = i;
4131
4132 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4133 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4134 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4135 else
4136 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4137 rld[j].in = 0;
4138 }
4139 }
4140
4141 /* Scan all the reloads and update their type.
4142 If a reload is for the address of an operand and we didn't reload
4143 that operand, change the type. Similarly, change the operand number
4144 of a reload when two operands match. If a reload is optional, treat it
4145 as though the operand isn't reloaded.
4146
4147 ??? This latter case is somewhat odd because if we do the optional
4148 reload, it means the object is hanging around. Thus we need only
4149 do the address reload if the optional reload was NOT done.
4150
4151 Change secondary reloads to be the address type of their operand, not
4152 the normal type.
4153
4154 If an operand's reload is now RELOAD_OTHER, change any
4155 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4156 RELOAD_FOR_OTHER_ADDRESS. */
4157
4158 for (i = 0; i < n_reloads; i++)
4159 {
4160 if (rld[i].secondary_p
4161 && rld[i].when_needed == operand_type[rld[i].opnum])
4162 rld[i].when_needed = address_type[rld[i].opnum];
4163
4164 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4165 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4166 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4167 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4168 && (operand_reloadnum[rld[i].opnum] < 0
4169 || rld[operand_reloadnum[rld[i].opnum]].optional))
4170 {
4171 /* If we have a secondary reload to go along with this reload,
4172 change its type to RELOAD_FOR_OPADDR_ADDR. */
4173
4174 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4175 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4176 && rld[i].secondary_in_reload != -1)
4177 {
4178 int secondary_in_reload = rld[i].secondary_in_reload;
4179
4180 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4181
4182 /* If there's a tertiary reload we have to change it also. */
4183 if (secondary_in_reload > 0
4184 && rld[secondary_in_reload].secondary_in_reload != -1)
4185 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4186 = RELOAD_FOR_OPADDR_ADDR;
4187 }
4188
4189 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4190 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4191 && rld[i].secondary_out_reload != -1)
4192 {
4193 int secondary_out_reload = rld[i].secondary_out_reload;
4194
4195 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4196
4197 /* If there's a tertiary reload we have to change it also. */
4198 if (secondary_out_reload
4199 && rld[secondary_out_reload].secondary_out_reload != -1)
4200 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4201 = RELOAD_FOR_OPADDR_ADDR;
4202 }
4203
4204 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4205 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4206 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4207 else
4208 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4209 }
4210
4211 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4212 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4213 && operand_reloadnum[rld[i].opnum] >= 0
4214 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4215 == RELOAD_OTHER))
4216 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4217
4218 if (goal_alternative_matches[rld[i].opnum] >= 0)
4219 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4220 }
4221
4222 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4223 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4224 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4225
4226 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4227 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4228 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4229 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4230 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4231 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4232 This is complicated by the fact that a single operand can have more
4233 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4234 choose_reload_regs without affecting code quality, and cases that
4235 actually fail are extremely rare, so it turns out to be better to fix
4236 the problem here by not generating cases that choose_reload_regs will
4237 fail for. */
4238 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4239 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4240 a single operand.
4241 We can reduce the register pressure by exploiting that a
4242 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4243 does not conflict with any of them, if it is only used for the first of
4244 the RELOAD_FOR_X_ADDRESS reloads. */
4245 {
4246 int first_op_addr_num = -2;
4247 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4248 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4249 int need_change = 0;
4250 /* We use last_op_addr_reload and the contents of the above arrays
4251 first as flags - -2 means no instance encountered, -1 means exactly
4252 one instance encountered.
4253 If more than one instance has been encountered, we store the reload
4254 number of the first reload of the kind in question; reload numbers
4255 are known to be non-negative. */
4256 for (i = 0; i < noperands; i++)
4257 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4258 for (i = n_reloads - 1; i >= 0; i--)
4259 {
4260 switch (rld[i].when_needed)
4261 {
4262 case RELOAD_FOR_OPERAND_ADDRESS:
4263 if (++first_op_addr_num >= 0)
4264 {
4265 first_op_addr_num = i;
4266 need_change = 1;
4267 }
4268 break;
4269 case RELOAD_FOR_INPUT_ADDRESS:
4270 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4271 {
4272 first_inpaddr_num[rld[i].opnum] = i;
4273 need_change = 1;
4274 }
4275 break;
4276 case RELOAD_FOR_OUTPUT_ADDRESS:
4277 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4278 {
4279 first_outpaddr_num[rld[i].opnum] = i;
4280 need_change = 1;
4281 }
4282 break;
4283 default:
4284 break;
4285 }
4286 }
4287
4288 if (need_change)
4289 {
4290 for (i = 0; i < n_reloads; i++)
4291 {
4292 int first_num;
4293 enum reload_type type;
4294
4295 switch (rld[i].when_needed)
4296 {
4297 case RELOAD_FOR_OPADDR_ADDR:
4298 first_num = first_op_addr_num;
4299 type = RELOAD_FOR_OPERAND_ADDRESS;
4300 break;
4301 case RELOAD_FOR_INPADDR_ADDRESS:
4302 first_num = first_inpaddr_num[rld[i].opnum];
4303 type = RELOAD_FOR_INPUT_ADDRESS;
4304 break;
4305 case RELOAD_FOR_OUTADDR_ADDRESS:
4306 first_num = first_outpaddr_num[rld[i].opnum];
4307 type = RELOAD_FOR_OUTPUT_ADDRESS;
4308 break;
4309 default:
4310 continue;
4311 }
4312 if (first_num < 0)
4313 continue;
4314 else if (i > first_num)
4315 rld[i].when_needed = type;
4316 else
4317 {
4318 /* Check if the only TYPE reload that uses reload I is
4319 reload FIRST_NUM. */
4320 for (j = n_reloads - 1; j > first_num; j--)
4321 {
4322 if (rld[j].when_needed == type
4323 && (rld[i].secondary_p
4324 ? rld[j].secondary_in_reload == i
4325 : reg_mentioned_p (rld[i].in, rld[j].in)))
4326 {
4327 rld[i].when_needed = type;
4328 break;
4329 }
4330 }
4331 }
4332 }
4333 }
4334 }
4335
4336 /* See if we have any reloads that are now allowed to be merged
4337 because we've changed when the reload is needed to
4338 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4339 check for the most common cases. */
4340
4341 for (i = 0; i < n_reloads; i++)
4342 if (rld[i].in != 0 && rld[i].out == 0
4343 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4344 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4345 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4346 for (j = 0; j < n_reloads; j++)
4347 if (i != j && rld[j].in != 0 && rld[j].out == 0
4348 && rld[j].when_needed == rld[i].when_needed
4349 && MATCHES (rld[i].in, rld[j].in)
4350 && rld[i].class == rld[j].class
4351 && !rld[i].nocombine && !rld[j].nocombine
4352 && rld[i].reg_rtx == rld[j].reg_rtx)
4353 {
4354 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4355 transfer_replacements (i, j);
4356 rld[j].in = 0;
4357 }
4358
4359 #ifdef HAVE_cc0
4360 /* If we made any reloads for addresses, see if they violate a
4361 "no input reloads" requirement for this insn. But loads that we
4362 do after the insn (such as for output addresses) are fine. */
4363 if (no_input_reloads)
4364 for (i = 0; i < n_reloads; i++)
4365 gcc_assert (rld[i].in == 0
4366 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4367 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4368 #endif
4369
4370 /* Compute reload_mode and reload_nregs. */
4371 for (i = 0; i < n_reloads; i++)
4372 {
4373 rld[i].mode
4374 = (rld[i].inmode == VOIDmode
4375 || (GET_MODE_SIZE (rld[i].outmode)
4376 > GET_MODE_SIZE (rld[i].inmode)))
4377 ? rld[i].outmode : rld[i].inmode;
4378
4379 rld[i].nregs = CLASS_MAX_NREGS (rld[i].class, rld[i].mode);
4380 }
4381
4382 /* Special case a simple move with an input reload and a
4383 destination of a hard reg, if the hard reg is ok, use it. */
4384 for (i = 0; i < n_reloads; i++)
4385 if (rld[i].when_needed == RELOAD_FOR_INPUT
4386 && GET_CODE (PATTERN (insn)) == SET
4387 && REG_P (SET_DEST (PATTERN (insn)))
4388 && SET_SRC (PATTERN (insn)) == rld[i].in)
4389 {
4390 rtx dest = SET_DEST (PATTERN (insn));
4391 unsigned int regno = REGNO (dest);
4392
4393 if (regno < FIRST_PSEUDO_REGISTER
4394 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].class], regno)
4395 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4396 {
4397 int nr = hard_regno_nregs[regno][rld[i].mode];
4398 int ok = 1, nri;
4399
4400 for (nri = 1; nri < nr; nri ++)
4401 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].class], regno + nri))
4402 ok = 0;
4403
4404 if (ok)
4405 rld[i].reg_rtx = dest;
4406 }
4407 }
4408
4409 return retval;
4410 }
4411
4412 /* Return 1 if alternative number ALTNUM in constraint-string CONSTRAINT
4413 accepts a memory operand with constant address. */
4414
4415 static int
4416 alternative_allows_memconst (const char *constraint, int altnum)
4417 {
4418 int c;
4419 /* Skip alternatives before the one requested. */
4420 while (altnum > 0)
4421 {
4422 while (*constraint++ != ',');
4423 altnum--;
4424 }
4425 /* Scan the requested alternative for 'm' or 'o'.
4426 If one of them is present, this alternative accepts memory constants. */
4427 for (; (c = *constraint) && c != ',' && c != '#';
4428 constraint += CONSTRAINT_LEN (c, constraint))
4429 if (c == 'm' || c == 'o' || EXTRA_MEMORY_CONSTRAINT (c, constraint))
4430 return 1;
4431 return 0;
4432 }
4433 \f
4434 /* Scan X for memory references and scan the addresses for reloading.
4435 Also checks for references to "constant" regs that we want to eliminate
4436 and replaces them with the values they stand for.
4437 We may alter X destructively if it contains a reference to such.
4438 If X is just a constant reg, we return the equivalent value
4439 instead of X.
4440
4441 IND_LEVELS says how many levels of indirect addressing this machine
4442 supports.
4443
4444 OPNUM and TYPE identify the purpose of the reload.
4445
4446 IS_SET_DEST is true if X is the destination of a SET, which is not
4447 appropriate to be replaced by a constant.
4448
4449 INSN, if nonzero, is the insn in which we do the reload. It is used
4450 to determine if we may generate output reloads, and where to put USEs
4451 for pseudos that we have to replace with stack slots.
4452
4453 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4454 result of find_reloads_address. */
4455
4456 static rtx
4457 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4458 int ind_levels, int is_set_dest, rtx insn,
4459 int *address_reloaded)
4460 {
4461 RTX_CODE code = GET_CODE (x);
4462
4463 const char *fmt = GET_RTX_FORMAT (code);
4464 int i;
4465 int copied;
4466
4467 if (code == REG)
4468 {
4469 /* This code is duplicated for speed in find_reloads. */
4470 int regno = REGNO (x);
4471 if (reg_equiv_constant[regno] != 0 && !is_set_dest)
4472 x = reg_equiv_constant[regno];
4473 #if 0
4474 /* This creates (subreg (mem...)) which would cause an unnecessary
4475 reload of the mem. */
4476 else if (reg_equiv_mem[regno] != 0)
4477 x = reg_equiv_mem[regno];
4478 #endif
4479 else if (reg_equiv_memory_loc[regno]
4480 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
4481 {
4482 rtx mem = make_memloc (x, regno);
4483 if (reg_equiv_address[regno]
4484 || ! rtx_equal_p (mem, reg_equiv_mem[regno]))
4485 {
4486 /* If this is not a toplevel operand, find_reloads doesn't see
4487 this substitution. We have to emit a USE of the pseudo so
4488 that delete_output_reload can see it. */
4489 if (replace_reloads && recog_data.operand[opnum] != x)
4490 /* We mark the USE with QImode so that we recognize it
4491 as one that can be safely deleted at the end of
4492 reload. */
4493 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4494 QImode);
4495 x = mem;
4496 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4497 opnum, type, ind_levels, insn);
4498 if (address_reloaded)
4499 *address_reloaded = i;
4500 }
4501 }
4502 return x;
4503 }
4504 if (code == MEM)
4505 {
4506 rtx tem = x;
4507
4508 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4509 opnum, type, ind_levels, insn);
4510 if (address_reloaded)
4511 *address_reloaded = i;
4512
4513 return tem;
4514 }
4515
4516 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4517 {
4518 /* Check for SUBREG containing a REG that's equivalent to a constant.
4519 If the constant has a known value, truncate it right now.
4520 Similarly if we are extracting a single-word of a multi-word
4521 constant. If the constant is symbolic, allow it to be substituted
4522 normally. push_reload will strip the subreg later. If the
4523 constant is VOIDmode, abort because we will lose the mode of
4524 the register (this should never happen because one of the cases
4525 above should handle it). */
4526
4527 int regno = REGNO (SUBREG_REG (x));
4528 rtx tem;
4529
4530 if (subreg_lowpart_p (x)
4531 && regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] < 0
4532 && reg_equiv_constant[regno] != 0
4533 && (tem = gen_lowpart_common (GET_MODE (x),
4534 reg_equiv_constant[regno])) != 0)
4535 return tem;
4536
4537 if (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] < 0
4538 && reg_equiv_constant[regno] != 0)
4539 {
4540 tem =
4541 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant[regno],
4542 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4543 gcc_assert (tem);
4544 return tem;
4545 }
4546
4547 /* If the subreg contains a reg that will be converted to a mem,
4548 convert the subreg to a narrower memref now.
4549 Otherwise, we would get (subreg (mem ...) ...),
4550 which would force reload of the mem.
4551
4552 We also need to do this if there is an equivalent MEM that is
4553 not offsettable. In that case, alter_subreg would produce an
4554 invalid address on big-endian machines.
4555
4556 For machines that extend byte loads, we must not reload using
4557 a wider mode if we have a paradoxical SUBREG. find_reloads will
4558 force a reload in that case. So we should not do anything here. */
4559
4560 else if (regno >= FIRST_PSEUDO_REGISTER
4561 #ifdef LOAD_EXTEND_OP
4562 && (GET_MODE_SIZE (GET_MODE (x))
4563 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4564 #endif
4565 && (reg_equiv_address[regno] != 0
4566 || (reg_equiv_mem[regno] != 0
4567 && (! strict_memory_address_p (GET_MODE (x),
4568 XEXP (reg_equiv_mem[regno], 0))
4569 || ! offsettable_memref_p (reg_equiv_mem[regno])
4570 || num_not_at_initial_offset))))
4571 x = find_reloads_subreg_address (x, 1, opnum, type, ind_levels,
4572 insn);
4573 }
4574
4575 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4576 {
4577 if (fmt[i] == 'e')
4578 {
4579 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4580 ind_levels, is_set_dest, insn,
4581 address_reloaded);
4582 /* If we have replaced a reg with it's equivalent memory loc -
4583 that can still be handled here e.g. if it's in a paradoxical
4584 subreg - we must make the change in a copy, rather than using
4585 a destructive change. This way, find_reloads can still elect
4586 not to do the change. */
4587 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4588 {
4589 x = shallow_copy_rtx (x);
4590 copied = 1;
4591 }
4592 XEXP (x, i) = new_part;
4593 }
4594 }
4595 return x;
4596 }
4597
4598 /* Return a mem ref for the memory equivalent of reg REGNO.
4599 This mem ref is not shared with anything. */
4600
4601 static rtx
4602 make_memloc (rtx ad, int regno)
4603 {
4604 /* We must rerun eliminate_regs, in case the elimination
4605 offsets have changed. */
4606 rtx tem
4607 = XEXP (eliminate_regs (reg_equiv_memory_loc[regno], 0, NULL_RTX), 0);
4608
4609 /* If TEM might contain a pseudo, we must copy it to avoid
4610 modifying it when we do the substitution for the reload. */
4611 if (rtx_varies_p (tem, 0))
4612 tem = copy_rtx (tem);
4613
4614 tem = replace_equiv_address_nv (reg_equiv_memory_loc[regno], tem);
4615 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4616
4617 /* Copy the result if it's still the same as the equivalence, to avoid
4618 modifying it when we do the substitution for the reload. */
4619 if (tem == reg_equiv_memory_loc[regno])
4620 tem = copy_rtx (tem);
4621 return tem;
4622 }
4623
4624 /* Returns true if AD could be turned into a valid memory reference
4625 to mode MODE by reloading the part pointed to by PART into a
4626 register. */
4627
4628 static int
4629 maybe_memory_address_p (enum machine_mode mode, rtx ad, rtx *part)
4630 {
4631 int retv;
4632 rtx tem = *part;
4633 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4634
4635 *part = reg;
4636 retv = memory_address_p (mode, ad);
4637 *part = tem;
4638
4639 return retv;
4640 }
4641
4642 /* Record all reloads needed for handling memory address AD
4643 which appears in *LOC in a memory reference to mode MODE
4644 which itself is found in location *MEMREFLOC.
4645 Note that we take shortcuts assuming that no multi-reg machine mode
4646 occurs as part of an address.
4647
4648 OPNUM and TYPE specify the purpose of this reload.
4649
4650 IND_LEVELS says how many levels of indirect addressing this machine
4651 supports.
4652
4653 INSN, if nonzero, is the insn in which we do the reload. It is used
4654 to determine if we may generate output reloads, and where to put USEs
4655 for pseudos that we have to replace with stack slots.
4656
4657 Value is one if this address is reloaded or replaced as a whole; it is
4658 zero if the top level of this address was not reloaded or replaced, and
4659 it is -1 if it may or may not have been reloaded or replaced.
4660
4661 Note that there is no verification that the address will be valid after
4662 this routine does its work. Instead, we rely on the fact that the address
4663 was valid when reload started. So we need only undo things that reload
4664 could have broken. These are wrong register types, pseudos not allocated
4665 to a hard register, and frame pointer elimination. */
4666
4667 static int
4668 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4669 rtx *loc, int opnum, enum reload_type type,
4670 int ind_levels, rtx insn)
4671 {
4672 int regno;
4673 int removed_and = 0;
4674 int op_index;
4675 rtx tem;
4676
4677 /* If the address is a register, see if it is a legitimate address and
4678 reload if not. We first handle the cases where we need not reload
4679 or where we must reload in a non-standard way. */
4680
4681 if (REG_P (ad))
4682 {
4683 regno = REGNO (ad);
4684
4685 /* If the register is equivalent to an invariant expression, substitute
4686 the invariant, and eliminate any eliminable register references. */
4687 tem = reg_equiv_constant[regno];
4688 if (tem != 0
4689 && (tem = eliminate_regs (tem, mode, insn))
4690 && strict_memory_address_p (mode, tem))
4691 {
4692 *loc = ad = tem;
4693 return 0;
4694 }
4695
4696 tem = reg_equiv_memory_loc[regno];
4697 if (tem != 0)
4698 {
4699 if (reg_equiv_address[regno] != 0 || num_not_at_initial_offset)
4700 {
4701 tem = make_memloc (ad, regno);
4702 if (! strict_memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
4703 {
4704 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4705 &XEXP (tem, 0), opnum,
4706 ADDR_TYPE (type), ind_levels, insn);
4707 }
4708 /* We can avoid a reload if the register's equivalent memory
4709 expression is valid as an indirect memory address.
4710 But not all addresses are valid in a mem used as an indirect
4711 address: only reg or reg+constant. */
4712
4713 if (ind_levels > 0
4714 && strict_memory_address_p (mode, tem)
4715 && (REG_P (XEXP (tem, 0))
4716 || (GET_CODE (XEXP (tem, 0)) == PLUS
4717 && REG_P (XEXP (XEXP (tem, 0), 0))
4718 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4719 {
4720 /* TEM is not the same as what we'll be replacing the
4721 pseudo with after reload, put a USE in front of INSN
4722 in the final reload pass. */
4723 if (replace_reloads
4724 && num_not_at_initial_offset
4725 && ! rtx_equal_p (tem, reg_equiv_mem[regno]))
4726 {
4727 *loc = tem;
4728 /* We mark the USE with QImode so that we
4729 recognize it as one that can be safely
4730 deleted at the end of reload. */
4731 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4732 insn), QImode);
4733
4734 /* This doesn't really count as replacing the address
4735 as a whole, since it is still a memory access. */
4736 }
4737 return 0;
4738 }
4739 ad = tem;
4740 }
4741 }
4742
4743 /* The only remaining case where we can avoid a reload is if this is a
4744 hard register that is valid as a base register and which is not the
4745 subject of a CLOBBER in this insn. */
4746
4747 else if (regno < FIRST_PSEUDO_REGISTER
4748 && REGNO_MODE_OK_FOR_BASE_P (regno, mode)
4749 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4750 return 0;
4751
4752 /* If we do not have one of the cases above, we must do the reload. */
4753 push_reload (ad, NULL_RTX, loc, (rtx*) 0, MODE_BASE_REG_CLASS (mode),
4754 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4755 return 1;
4756 }
4757
4758 if (strict_memory_address_p (mode, ad))
4759 {
4760 /* The address appears valid, so reloads are not needed.
4761 But the address may contain an eliminable register.
4762 This can happen because a machine with indirect addressing
4763 may consider a pseudo register by itself a valid address even when
4764 it has failed to get a hard reg.
4765 So do a tree-walk to find and eliminate all such regs. */
4766
4767 /* But first quickly dispose of a common case. */
4768 if (GET_CODE (ad) == PLUS
4769 && GET_CODE (XEXP (ad, 1)) == CONST_INT
4770 && REG_P (XEXP (ad, 0))
4771 && reg_equiv_constant[REGNO (XEXP (ad, 0))] == 0)
4772 return 0;
4773
4774 subst_reg_equivs_changed = 0;
4775 *loc = subst_reg_equivs (ad, insn);
4776
4777 if (! subst_reg_equivs_changed)
4778 return 0;
4779
4780 /* Check result for validity after substitution. */
4781 if (strict_memory_address_p (mode, ad))
4782 return 0;
4783 }
4784
4785 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4786 do
4787 {
4788 if (memrefloc)
4789 {
4790 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4791 ind_levels, win);
4792 }
4793 break;
4794 win:
4795 *memrefloc = copy_rtx (*memrefloc);
4796 XEXP (*memrefloc, 0) = ad;
4797 move_replacements (&ad, &XEXP (*memrefloc, 0));
4798 return -1;
4799 }
4800 while (0);
4801 #endif
4802
4803 /* The address is not valid. We have to figure out why. First see if
4804 we have an outer AND and remove it if so. Then analyze what's inside. */
4805
4806 if (GET_CODE (ad) == AND)
4807 {
4808 removed_and = 1;
4809 loc = &XEXP (ad, 0);
4810 ad = *loc;
4811 }
4812
4813 /* One possibility for why the address is invalid is that it is itself
4814 a MEM. This can happen when the frame pointer is being eliminated, a
4815 pseudo is not allocated to a hard register, and the offset between the
4816 frame and stack pointers is not its initial value. In that case the
4817 pseudo will have been replaced by a MEM referring to the
4818 stack pointer. */
4819 if (MEM_P (ad))
4820 {
4821 /* First ensure that the address in this MEM is valid. Then, unless
4822 indirect addresses are valid, reload the MEM into a register. */
4823 tem = ad;
4824 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
4825 opnum, ADDR_TYPE (type),
4826 ind_levels == 0 ? 0 : ind_levels - 1, insn);
4827
4828 /* If tem was changed, then we must create a new memory reference to
4829 hold it and store it back into memrefloc. */
4830 if (tem != ad && memrefloc)
4831 {
4832 *memrefloc = copy_rtx (*memrefloc);
4833 copy_replacements (tem, XEXP (*memrefloc, 0));
4834 loc = &XEXP (*memrefloc, 0);
4835 if (removed_and)
4836 loc = &XEXP (*loc, 0);
4837 }
4838
4839 /* Check similar cases as for indirect addresses as above except
4840 that we can allow pseudos and a MEM since they should have been
4841 taken care of above. */
4842
4843 if (ind_levels == 0
4844 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
4845 || MEM_P (XEXP (tem, 0))
4846 || ! (REG_P (XEXP (tem, 0))
4847 || (GET_CODE (XEXP (tem, 0)) == PLUS
4848 && REG_P (XEXP (XEXP (tem, 0), 0))
4849 && GET_CODE (XEXP (XEXP (tem, 0), 1)) == CONST_INT)))
4850 {
4851 /* Must use TEM here, not AD, since it is the one that will
4852 have any subexpressions reloaded, if needed. */
4853 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
4854 MODE_BASE_REG_CLASS (mode), GET_MODE (tem),
4855 VOIDmode, 0,
4856 0, opnum, type);
4857 return ! removed_and;
4858 }
4859 else
4860 return 0;
4861 }
4862
4863 /* If we have address of a stack slot but it's not valid because the
4864 displacement is too large, compute the sum in a register.
4865 Handle all base registers here, not just fp/ap/sp, because on some
4866 targets (namely SH) we can also get too large displacements from
4867 big-endian corrections. */
4868 else if (GET_CODE (ad) == PLUS
4869 && REG_P (XEXP (ad, 0))
4870 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
4871 && REG_MODE_OK_FOR_BASE_P (XEXP (ad, 0), mode)
4872 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
4873 {
4874 /* Unshare the MEM rtx so we can safely alter it. */
4875 if (memrefloc)
4876 {
4877 *memrefloc = copy_rtx (*memrefloc);
4878 loc = &XEXP (*memrefloc, 0);
4879 if (removed_and)
4880 loc = &XEXP (*loc, 0);
4881 }
4882
4883 if (double_reg_address_ok)
4884 {
4885 /* Unshare the sum as well. */
4886 *loc = ad = copy_rtx (ad);
4887
4888 /* Reload the displacement into an index reg.
4889 We assume the frame pointer or arg pointer is a base reg. */
4890 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
4891 INDEX_REG_CLASS, GET_MODE (ad), opnum,
4892 type, ind_levels);
4893 return 0;
4894 }
4895 else
4896 {
4897 /* If the sum of two regs is not necessarily valid,
4898 reload the sum into a base reg.
4899 That will at least work. */
4900 find_reloads_address_part (ad, loc, MODE_BASE_REG_CLASS (mode),
4901 Pmode, opnum, type, ind_levels);
4902 }
4903 return ! removed_and;
4904 }
4905
4906 /* If we have an indexed stack slot, there are three possible reasons why
4907 it might be invalid: The index might need to be reloaded, the address
4908 might have been made by frame pointer elimination and hence have a
4909 constant out of range, or both reasons might apply.
4910
4911 We can easily check for an index needing reload, but even if that is the
4912 case, we might also have an invalid constant. To avoid making the
4913 conservative assumption and requiring two reloads, we see if this address
4914 is valid when not interpreted strictly. If it is, the only problem is
4915 that the index needs a reload and find_reloads_address_1 will take care
4916 of it.
4917
4918 Handle all base registers here, not just fp/ap/sp, because on some
4919 targets (namely SPARC) we can also get invalid addresses from preventive
4920 subreg big-endian corrections made by find_reloads_toplev. We
4921 can also get expressions involving LO_SUM (rather than PLUS) from
4922 find_reloads_subreg_address.
4923
4924 If we decide to do something, it must be that `double_reg_address_ok'
4925 is true. We generate a reload of the base register + constant and
4926 rework the sum so that the reload register will be added to the index.
4927 This is safe because we know the address isn't shared.
4928
4929 We check for the base register as both the first and second operand of
4930 the innermost PLUS and/or LO_SUM. */
4931
4932 for (op_index = 0; op_index < 2; ++op_index)
4933 {
4934 rtx operand;
4935
4936 if (!(GET_CODE (ad) == PLUS
4937 && GET_CODE (XEXP (ad, 1)) == CONST_INT
4938 && (GET_CODE (XEXP (ad, 0)) == PLUS
4939 || GET_CODE (XEXP (ad, 0)) == LO_SUM)))
4940 continue;
4941
4942 operand = XEXP (XEXP (ad, 0), op_index);
4943 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
4944 continue;
4945
4946 if ((REG_MODE_OK_FOR_BASE_P (operand, mode)
4947 || operand == frame_pointer_rtx
4948 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4949 || operand == hard_frame_pointer_rtx
4950 #endif
4951 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4952 || operand == arg_pointer_rtx
4953 #endif
4954 || operand == stack_pointer_rtx)
4955 && ! maybe_memory_address_p (mode, ad,
4956 &XEXP (XEXP (ad, 0), 1 - op_index)))
4957 {
4958 rtx offset_reg;
4959 rtx addend;
4960
4961 offset_reg = plus_constant (operand, INTVAL (XEXP (ad, 1)));
4962 addend = XEXP (XEXP (ad, 0), 1 - op_index);
4963
4964 /* Form the adjusted address. */
4965 if (GET_CODE (XEXP (ad, 0)) == PLUS)
4966 ad = gen_rtx_PLUS (GET_MODE (ad),
4967 op_index == 0 ? offset_reg : addend,
4968 op_index == 0 ? addend : offset_reg);
4969 else
4970 ad = gen_rtx_LO_SUM (GET_MODE (ad),
4971 op_index == 0 ? offset_reg : addend,
4972 op_index == 0 ? addend : offset_reg);
4973 *loc = ad;
4974
4975 find_reloads_address_part (XEXP (ad, op_index),
4976 &XEXP (ad, op_index),
4977 MODE_BASE_REG_CLASS (mode),
4978 GET_MODE (ad), opnum, type, ind_levels);
4979 find_reloads_address_1 (mode,
4980 XEXP (ad, 1 - op_index), 1,
4981 &XEXP (ad, 1 - op_index), opnum,
4982 type, 0, insn);
4983
4984 return 0;
4985 }
4986 }
4987
4988 /* See if address becomes valid when an eliminable register
4989 in a sum is replaced. */
4990
4991 tem = ad;
4992 if (GET_CODE (ad) == PLUS)
4993 tem = subst_indexed_address (ad);
4994 if (tem != ad && strict_memory_address_p (mode, tem))
4995 {
4996 /* Ok, we win that way. Replace any additional eliminable
4997 registers. */
4998
4999 subst_reg_equivs_changed = 0;
5000 tem = subst_reg_equivs (tem, insn);
5001
5002 /* Make sure that didn't make the address invalid again. */
5003
5004 if (! subst_reg_equivs_changed || strict_memory_address_p (mode, tem))
5005 {
5006 *loc = tem;
5007 return 0;
5008 }
5009 }
5010
5011 /* If constants aren't valid addresses, reload the constant address
5012 into a register. */
5013 if (CONSTANT_P (ad) && ! strict_memory_address_p (mode, ad))
5014 {
5015 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5016 Unshare it so we can safely alter it. */
5017 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5018 && CONSTANT_POOL_ADDRESS_P (ad))
5019 {
5020 *memrefloc = copy_rtx (*memrefloc);
5021 loc = &XEXP (*memrefloc, 0);
5022 if (removed_and)
5023 loc = &XEXP (*loc, 0);
5024 }
5025
5026 find_reloads_address_part (ad, loc, MODE_BASE_REG_CLASS (mode),
5027 Pmode, opnum, type, ind_levels);
5028 return ! removed_and;
5029 }
5030
5031 return find_reloads_address_1 (mode, ad, 0, loc, opnum, type, ind_levels,
5032 insn);
5033 }
5034 \f
5035 /* Find all pseudo regs appearing in AD
5036 that are eliminable in favor of equivalent values
5037 and do not have hard regs; replace them by their equivalents.
5038 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5039 front of it for pseudos that we have to replace with stack slots. */
5040
5041 static rtx
5042 subst_reg_equivs (rtx ad, rtx insn)
5043 {
5044 RTX_CODE code = GET_CODE (ad);
5045 int i;
5046 const char *fmt;
5047
5048 switch (code)
5049 {
5050 case HIGH:
5051 case CONST_INT:
5052 case CONST:
5053 case CONST_DOUBLE:
5054 case CONST_VECTOR:
5055 case SYMBOL_REF:
5056 case LABEL_REF:
5057 case PC:
5058 case CC0:
5059 return ad;
5060
5061 case REG:
5062 {
5063 int regno = REGNO (ad);
5064
5065 if (reg_equiv_constant[regno] != 0)
5066 {
5067 subst_reg_equivs_changed = 1;
5068 return reg_equiv_constant[regno];
5069 }
5070 if (reg_equiv_memory_loc[regno] && num_not_at_initial_offset)
5071 {
5072 rtx mem = make_memloc (ad, regno);
5073 if (! rtx_equal_p (mem, reg_equiv_mem[regno]))
5074 {
5075 subst_reg_equivs_changed = 1;
5076 /* We mark the USE with QImode so that we recognize it
5077 as one that can be safely deleted at the end of
5078 reload. */
5079 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5080 QImode);
5081 return mem;
5082 }
5083 }
5084 }
5085 return ad;
5086
5087 case PLUS:
5088 /* Quickly dispose of a common case. */
5089 if (XEXP (ad, 0) == frame_pointer_rtx
5090 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
5091 return ad;
5092 break;
5093
5094 default:
5095 break;
5096 }
5097
5098 fmt = GET_RTX_FORMAT (code);
5099 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5100 if (fmt[i] == 'e')
5101 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5102 return ad;
5103 }
5104 \f
5105 /* Compute the sum of X and Y, making canonicalizations assumed in an
5106 address, namely: sum constant integers, surround the sum of two
5107 constants with a CONST, put the constant as the second operand, and
5108 group the constant on the outermost sum.
5109
5110 This routine assumes both inputs are already in canonical form. */
5111
5112 rtx
5113 form_sum (rtx x, rtx y)
5114 {
5115 rtx tem;
5116 enum machine_mode mode = GET_MODE (x);
5117
5118 if (mode == VOIDmode)
5119 mode = GET_MODE (y);
5120
5121 if (mode == VOIDmode)
5122 mode = Pmode;
5123
5124 if (GET_CODE (x) == CONST_INT)
5125 return plus_constant (y, INTVAL (x));
5126 else if (GET_CODE (y) == CONST_INT)
5127 return plus_constant (x, INTVAL (y));
5128 else if (CONSTANT_P (x))
5129 tem = x, x = y, y = tem;
5130
5131 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5132 return form_sum (XEXP (x, 0), form_sum (XEXP (x, 1), y));
5133
5134 /* Note that if the operands of Y are specified in the opposite
5135 order in the recursive calls below, infinite recursion will occur. */
5136 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5137 return form_sum (form_sum (x, XEXP (y, 0)), XEXP (y, 1));
5138
5139 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5140 constant will have been placed second. */
5141 if (CONSTANT_P (x) && CONSTANT_P (y))
5142 {
5143 if (GET_CODE (x) == CONST)
5144 x = XEXP (x, 0);
5145 if (GET_CODE (y) == CONST)
5146 y = XEXP (y, 0);
5147
5148 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5149 }
5150
5151 return gen_rtx_PLUS (mode, x, y);
5152 }
5153 \f
5154 /* If ADDR is a sum containing a pseudo register that should be
5155 replaced with a constant (from reg_equiv_constant),
5156 return the result of doing so, and also apply the associative
5157 law so that the result is more likely to be a valid address.
5158 (But it is not guaranteed to be one.)
5159
5160 Note that at most one register is replaced, even if more are
5161 replaceable. Also, we try to put the result into a canonical form
5162 so it is more likely to be a valid address.
5163
5164 In all other cases, return ADDR. */
5165
5166 static rtx
5167 subst_indexed_address (rtx addr)
5168 {
5169 rtx op0 = 0, op1 = 0, op2 = 0;
5170 rtx tem;
5171 int regno;
5172
5173 if (GET_CODE (addr) == PLUS)
5174 {
5175 /* Try to find a register to replace. */
5176 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5177 if (REG_P (op0)
5178 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5179 && reg_renumber[regno] < 0
5180 && reg_equiv_constant[regno] != 0)
5181 op0 = reg_equiv_constant[regno];
5182 else if (REG_P (op1)
5183 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5184 && reg_renumber[regno] < 0
5185 && reg_equiv_constant[regno] != 0)
5186 op1 = reg_equiv_constant[regno];
5187 else if (GET_CODE (op0) == PLUS
5188 && (tem = subst_indexed_address (op0)) != op0)
5189 op0 = tem;
5190 else if (GET_CODE (op1) == PLUS
5191 && (tem = subst_indexed_address (op1)) != op1)
5192 op1 = tem;
5193 else
5194 return addr;
5195
5196 /* Pick out up to three things to add. */
5197 if (GET_CODE (op1) == PLUS)
5198 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5199 else if (GET_CODE (op0) == PLUS)
5200 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5201
5202 /* Compute the sum. */
5203 if (op2 != 0)
5204 op1 = form_sum (op1, op2);
5205 if (op1 != 0)
5206 op0 = form_sum (op0, op1);
5207
5208 return op0;
5209 }
5210 return addr;
5211 }
5212 \f
5213 /* Update the REG_INC notes for an insn. It updates all REG_INC
5214 notes for the instruction which refer to REGNO the to refer
5215 to the reload number.
5216
5217 INSN is the insn for which any REG_INC notes need updating.
5218
5219 REGNO is the register number which has been reloaded.
5220
5221 RELOADNUM is the reload number. */
5222
5223 static void
5224 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5225 int reloadnum ATTRIBUTE_UNUSED)
5226 {
5227 #ifdef AUTO_INC_DEC
5228 rtx link;
5229
5230 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5231 if (REG_NOTE_KIND (link) == REG_INC
5232 && (int) REGNO (XEXP (link, 0)) == regno)
5233 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5234 #endif
5235 }
5236 \f
5237 /* Record the pseudo registers we must reload into hard registers in a
5238 subexpression of a would-be memory address, X referring to a value
5239 in mode MODE. (This function is not called if the address we find
5240 is strictly valid.)
5241
5242 CONTEXT = 1 means we are considering regs as index regs,
5243 = 0 means we are considering them as base regs, = 2 means we
5244 are considering them as base regs for REG + REG.
5245
5246 OPNUM and TYPE specify the purpose of any reloads made.
5247
5248 IND_LEVELS says how many levels of indirect addressing are
5249 supported at this point in the address.
5250
5251 INSN, if nonzero, is the insn in which we do the reload. It is used
5252 to determine if we may generate output reloads.
5253
5254 We return nonzero if X, as a whole, is reloaded or replaced. */
5255
5256 /* Note that we take shortcuts assuming that no multi-reg machine mode
5257 occurs as part of an address.
5258 Also, this is not fully machine-customizable; it works for machines
5259 such as VAXen and 68000's and 32000's, but other possible machines
5260 could have addressing modes that this does not handle right. */
5261
5262 static int
5263 find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
5264 rtx *loc, int opnum, enum reload_type type,
5265 int ind_levels, rtx insn)
5266 {
5267 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE) \
5268 ((CONTEXT) == 2 \
5269 ? REGNO_MODE_OK_FOR_REG_BASE_P (REGNO, MODE) \
5270 : (CONTEXT) == 1 \
5271 ? REGNO_OK_FOR_INDEX_P (REGNO) \
5272 : REGNO_MODE_OK_FOR_BASE_P (REGNO, MODE))
5273
5274 enum reg_class context_reg_class;
5275 RTX_CODE code = GET_CODE (x);
5276
5277 if (context == 2)
5278 context_reg_class = MODE_BASE_REG_REG_CLASS (mode);
5279 else if (context == 1)
5280 context_reg_class = INDEX_REG_CLASS;
5281 else
5282 context_reg_class = MODE_BASE_REG_CLASS (mode);
5283
5284 switch (code)
5285 {
5286 case PLUS:
5287 {
5288 rtx orig_op0 = XEXP (x, 0);
5289 rtx orig_op1 = XEXP (x, 1);
5290 RTX_CODE code0 = GET_CODE (orig_op0);
5291 RTX_CODE code1 = GET_CODE (orig_op1);
5292 rtx op0 = orig_op0;
5293 rtx op1 = orig_op1;
5294
5295 if (GET_CODE (op0) == SUBREG)
5296 {
5297 op0 = SUBREG_REG (op0);
5298 code0 = GET_CODE (op0);
5299 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5300 op0 = gen_rtx_REG (word_mode,
5301 (REGNO (op0) +
5302 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5303 GET_MODE (SUBREG_REG (orig_op0)),
5304 SUBREG_BYTE (orig_op0),
5305 GET_MODE (orig_op0))));
5306 }
5307
5308 if (GET_CODE (op1) == SUBREG)
5309 {
5310 op1 = SUBREG_REG (op1);
5311 code1 = GET_CODE (op1);
5312 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5313 /* ??? Why is this given op1's mode and above for
5314 ??? op0 SUBREGs we use word_mode? */
5315 op1 = gen_rtx_REG (GET_MODE (op1),
5316 (REGNO (op1) +
5317 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5318 GET_MODE (SUBREG_REG (orig_op1)),
5319 SUBREG_BYTE (orig_op1),
5320 GET_MODE (orig_op1))));
5321 }
5322 /* Plus in the index register may be created only as a result of
5323 register remateralization for expression like &localvar*4. Reload it.
5324 It may be possible to combine the displacement on the outer level,
5325 but it is probably not worthwhile to do so. */
5326 if (context == 1)
5327 {
5328 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5329 opnum, ADDR_TYPE (type), ind_levels, insn);
5330 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5331 context_reg_class,
5332 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5333 return 1;
5334 }
5335
5336 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5337 || code0 == ZERO_EXTEND || code1 == MEM)
5338 {
5339 find_reloads_address_1 (mode, orig_op0, 1, &XEXP (x, 0), opnum,
5340 type, ind_levels, insn);
5341 find_reloads_address_1 (mode, orig_op1, 0, &XEXP (x, 1), opnum,
5342 type, ind_levels, insn);
5343 }
5344
5345 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5346 || code1 == ZERO_EXTEND || code0 == MEM)
5347 {
5348 find_reloads_address_1 (mode, orig_op0, 0, &XEXP (x, 0), opnum,
5349 type, ind_levels, insn);
5350 find_reloads_address_1 (mode, orig_op1, 1, &XEXP (x, 1), opnum,
5351 type, ind_levels, insn);
5352 }
5353
5354 else if (code0 == CONST_INT || code0 == CONST
5355 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5356 find_reloads_address_1 (mode, orig_op1, 0, &XEXP (x, 1), opnum,
5357 type, ind_levels, insn);
5358
5359 else if (code1 == CONST_INT || code1 == CONST
5360 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5361 find_reloads_address_1 (mode, orig_op0, 0, &XEXP (x, 0), opnum,
5362 type, ind_levels, insn);
5363
5364 else if (code0 == REG && code1 == REG)
5365 {
5366 if (REG_OK_FOR_INDEX_P (op0)
5367 && REG_MODE_OK_FOR_REG_BASE_P (op1, mode))
5368 return 0;
5369 else if (REG_OK_FOR_INDEX_P (op1)
5370 && REG_MODE_OK_FOR_REG_BASE_P (op0, mode))
5371 return 0;
5372 else if (REG_MODE_OK_FOR_REG_BASE_P (op1, mode))
5373 find_reloads_address_1 (mode, orig_op0, 1, &XEXP (x, 0), opnum,
5374 type, ind_levels, insn);
5375 else if (REG_MODE_OK_FOR_REG_BASE_P (op0, mode))
5376 find_reloads_address_1 (mode, orig_op1, 1, &XEXP (x, 1), opnum,
5377 type, ind_levels, insn);
5378 else if (REG_OK_FOR_INDEX_P (op1))
5379 find_reloads_address_1 (mode, orig_op0, 2, &XEXP (x, 0), opnum,
5380 type, ind_levels, insn);
5381 else if (REG_OK_FOR_INDEX_P (op0))
5382 find_reloads_address_1 (mode, orig_op1, 2, &XEXP (x, 1), opnum,
5383 type, ind_levels, insn);
5384 else
5385 {
5386 find_reloads_address_1 (mode, orig_op0, 1, &XEXP (x, 0), opnum,
5387 type, ind_levels, insn);
5388 find_reloads_address_1 (mode, orig_op1, 0, &XEXP (x, 1), opnum,
5389 type, ind_levels, insn);
5390 }
5391 }
5392
5393 else if (code0 == REG)
5394 {
5395 find_reloads_address_1 (mode, orig_op0, 1, &XEXP (x, 0), opnum,
5396 type, ind_levels, insn);
5397 find_reloads_address_1 (mode, orig_op1, 0, &XEXP (x, 1), opnum,
5398 type, ind_levels, insn);
5399 }
5400
5401 else if (code1 == REG)
5402 {
5403 find_reloads_address_1 (mode, orig_op1, 1, &XEXP (x, 1), opnum,
5404 type, ind_levels, insn);
5405 find_reloads_address_1 (mode, orig_op0, 0, &XEXP (x, 0), opnum,
5406 type, ind_levels, insn);
5407 }
5408 }
5409
5410 return 0;
5411
5412 case POST_MODIFY:
5413 case PRE_MODIFY:
5414 {
5415 rtx op0 = XEXP (x, 0);
5416 rtx op1 = XEXP (x, 1);
5417 int regno;
5418 int reloadnum;
5419
5420 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5421 return 0;
5422
5423 /* Currently, we only support {PRE,POST}_MODIFY constructs
5424 where a base register is {inc,dec}remented by the contents
5425 of another register or by a constant value. Thus, these
5426 operands must match. */
5427 gcc_assert (op0 == XEXP (op1, 0));
5428
5429 /* Require index register (or constant). Let's just handle the
5430 register case in the meantime... If the target allows
5431 auto-modify by a constant then we could try replacing a pseudo
5432 register with its equivalent constant where applicable. */
5433 if (REG_P (XEXP (op1, 1)))
5434 if (!REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5435 find_reloads_address_1 (mode, XEXP (op1, 1), 1, &XEXP (op1, 1),
5436 opnum, type, ind_levels, insn);
5437
5438 gcc_assert (REG_P (XEXP (op1, 0)));
5439
5440 regno = REGNO (XEXP (op1, 0));
5441
5442 /* A register that is incremented cannot be constant! */
5443 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5444 || reg_equiv_constant[regno] == 0);
5445
5446 /* Handle a register that is equivalent to a memory location
5447 which cannot be addressed directly. */
5448 if (reg_equiv_memory_loc[regno] != 0
5449 && (reg_equiv_address[regno] != 0
5450 || num_not_at_initial_offset))
5451 {
5452 rtx tem = make_memloc (XEXP (x, 0), regno);
5453
5454 if (reg_equiv_address[regno]
5455 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5456 {
5457 /* First reload the memory location's address.
5458 We can't use ADDR_TYPE (type) here, because we need to
5459 write back the value after reading it, hence we actually
5460 need two registers. */
5461 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5462 &XEXP (tem, 0), opnum,
5463 RELOAD_OTHER,
5464 ind_levels, insn);
5465
5466 /* Then reload the memory location into a base
5467 register. */
5468 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5469 &XEXP (op1, 0),
5470 MODE_BASE_REG_CLASS (mode),
5471 GET_MODE (x), GET_MODE (x), 0,
5472 0, opnum, RELOAD_OTHER);
5473
5474 update_auto_inc_notes (this_insn, regno, reloadnum);
5475 return 0;
5476 }
5477 }
5478
5479 if (reg_renumber[regno] >= 0)
5480 regno = reg_renumber[regno];
5481
5482 /* We require a base register here... */
5483 if (!REGNO_MODE_OK_FOR_BASE_P (regno, GET_MODE (x)))
5484 {
5485 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5486 &XEXP (op1, 0), &XEXP (x, 0),
5487 MODE_BASE_REG_CLASS (mode),
5488 GET_MODE (x), GET_MODE (x), 0, 0,
5489 opnum, RELOAD_OTHER);
5490
5491 update_auto_inc_notes (this_insn, regno, reloadnum);
5492 return 0;
5493 }
5494 }
5495 return 0;
5496
5497 case POST_INC:
5498 case POST_DEC:
5499 case PRE_INC:
5500 case PRE_DEC:
5501 if (REG_P (XEXP (x, 0)))
5502 {
5503 int regno = REGNO (XEXP (x, 0));
5504 int value = 0;
5505 rtx x_orig = x;
5506
5507 /* A register that is incremented cannot be constant! */
5508 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5509 || reg_equiv_constant[regno] == 0);
5510
5511 /* Handle a register that is equivalent to a memory location
5512 which cannot be addressed directly. */
5513 if (reg_equiv_memory_loc[regno] != 0
5514 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
5515 {
5516 rtx tem = make_memloc (XEXP (x, 0), regno);
5517 if (reg_equiv_address[regno]
5518 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5519 {
5520 /* First reload the memory location's address.
5521 We can't use ADDR_TYPE (type) here, because we need to
5522 write back the value after reading it, hence we actually
5523 need two registers. */
5524 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5525 &XEXP (tem, 0), opnum, type,
5526 ind_levels, insn);
5527 /* Put this inside a new increment-expression. */
5528 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5529 /* Proceed to reload that, as if it contained a register. */
5530 }
5531 }
5532
5533 /* If we have a hard register that is ok as an index,
5534 don't make a reload. If an autoincrement of a nice register
5535 isn't "valid", it must be that no autoincrement is "valid".
5536 If that is true and something made an autoincrement anyway,
5537 this must be a special context where one is allowed.
5538 (For example, a "push" instruction.)
5539 We can't improve this address, so leave it alone. */
5540
5541 /* Otherwise, reload the autoincrement into a suitable hard reg
5542 and record how much to increment by. */
5543
5544 if (reg_renumber[regno] >= 0)
5545 regno = reg_renumber[regno];
5546 if (regno >= FIRST_PSEUDO_REGISTER
5547 || !REG_OK_FOR_CONTEXT (context, regno, mode))
5548 {
5549 int reloadnum;
5550
5551 /* If we can output the register afterwards, do so, this
5552 saves the extra update.
5553 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5554 CALL_INSN - and it does not set CC0.
5555 But don't do this if we cannot directly address the
5556 memory location, since this will make it harder to
5557 reuse address reloads, and increases register pressure.
5558 Also don't do this if we can probably update x directly. */
5559 rtx equiv = (MEM_P (XEXP (x, 0))
5560 ? XEXP (x, 0)
5561 : reg_equiv_mem[regno]);
5562 int icode = (int) add_optab->handlers[(int) Pmode].insn_code;
5563 if (insn && NONJUMP_INSN_P (insn) && equiv
5564 && memory_operand (equiv, GET_MODE (equiv))
5565 #ifdef HAVE_cc0
5566 && ! sets_cc0_p (PATTERN (insn))
5567 #endif
5568 && ! (icode != CODE_FOR_nothing
5569 && ((*insn_data[icode].operand[0].predicate)
5570 (equiv, Pmode))
5571 && ((*insn_data[icode].operand[1].predicate)
5572 (equiv, Pmode))))
5573 {
5574 /* We use the original pseudo for loc, so that
5575 emit_reload_insns() knows which pseudo this
5576 reload refers to and updates the pseudo rtx, not
5577 its equivalent memory location, as well as the
5578 corresponding entry in reg_last_reload_reg. */
5579 loc = &XEXP (x_orig, 0);
5580 x = XEXP (x, 0);
5581 reloadnum
5582 = push_reload (x, x, loc, loc,
5583 context_reg_class,
5584 GET_MODE (x), GET_MODE (x), 0, 0,
5585 opnum, RELOAD_OTHER);
5586 }
5587 else
5588 {
5589 reloadnum
5590 = push_reload (x, NULL_RTX, loc, (rtx*) 0,
5591 context_reg_class,
5592 GET_MODE (x), GET_MODE (x), 0, 0,
5593 opnum, type);
5594 rld[reloadnum].inc
5595 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5596
5597 value = 1;
5598 }
5599
5600 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5601 reloadnum);
5602 }
5603 return value;
5604 }
5605
5606 else if (MEM_P (XEXP (x, 0)))
5607 {
5608 /* This is probably the result of a substitution, by eliminate_regs,
5609 of an equivalent address for a pseudo that was not allocated to a
5610 hard register. Verify that the specified address is valid and
5611 reload it into a register. */
5612 /* Variable `tem' might or might not be used in FIND_REG_INC_NOTE. */
5613 rtx tem ATTRIBUTE_UNUSED = XEXP (x, 0);
5614 rtx link;
5615 int reloadnum;
5616
5617 /* Since we know we are going to reload this item, don't decrement
5618 for the indirection level.
5619
5620 Note that this is actually conservative: it would be slightly
5621 more efficient to use the value of SPILL_INDIRECT_LEVELS from
5622 reload1.c here. */
5623 /* We can't use ADDR_TYPE (type) here, because we need to
5624 write back the value after reading it, hence we actually
5625 need two registers. */
5626 find_reloads_address (GET_MODE (x), &XEXP (x, 0),
5627 XEXP (XEXP (x, 0), 0), &XEXP (XEXP (x, 0), 0),
5628 opnum, type, ind_levels, insn);
5629
5630 reloadnum = push_reload (x, NULL_RTX, loc, (rtx*) 0,
5631 context_reg_class,
5632 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5633 rld[reloadnum].inc
5634 = find_inc_amount (PATTERN (this_insn), XEXP (x, 0));
5635
5636 link = FIND_REG_INC_NOTE (this_insn, tem);
5637 if (link != 0)
5638 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5639
5640 return 1;
5641 }
5642 return 0;
5643
5644 case MEM:
5645 /* This is probably the result of a substitution, by eliminate_regs, of
5646 an equivalent address for a pseudo that was not allocated to a hard
5647 register. Verify that the specified address is valid and reload it
5648 into a register.
5649
5650 Since we know we are going to reload this item, don't decrement for
5651 the indirection level.
5652
5653 Note that this is actually conservative: it would be slightly more
5654 efficient to use the value of SPILL_INDIRECT_LEVELS from
5655 reload1.c here. */
5656
5657 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5658 opnum, ADDR_TYPE (type), ind_levels, insn);
5659 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5660 context_reg_class,
5661 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5662 return 1;
5663
5664 case REG:
5665 {
5666 int regno = REGNO (x);
5667
5668 if (reg_equiv_constant[regno] != 0)
5669 {
5670 find_reloads_address_part (reg_equiv_constant[regno], loc,
5671 context_reg_class,
5672 GET_MODE (x), opnum, type, ind_levels);
5673 return 1;
5674 }
5675
5676 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5677 that feeds this insn. */
5678 if (reg_equiv_mem[regno] != 0)
5679 {
5680 push_reload (reg_equiv_mem[regno], NULL_RTX, loc, (rtx*) 0,
5681 context_reg_class,
5682 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5683 return 1;
5684 }
5685 #endif
5686
5687 if (reg_equiv_memory_loc[regno]
5688 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
5689 {
5690 rtx tem = make_memloc (x, regno);
5691 if (reg_equiv_address[regno] != 0
5692 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5693 {
5694 x = tem;
5695 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5696 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5697 ind_levels, insn);
5698 }
5699 }
5700
5701 if (reg_renumber[regno] >= 0)
5702 regno = reg_renumber[regno];
5703
5704 if (regno >= FIRST_PSEUDO_REGISTER
5705 || !REG_OK_FOR_CONTEXT (context, regno, mode))
5706 {
5707 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5708 context_reg_class,
5709 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5710 return 1;
5711 }
5712
5713 /* If a register appearing in an address is the subject of a CLOBBER
5714 in this insn, reload it into some other register to be safe.
5715 The CLOBBER is supposed to make the register unavailable
5716 from before this insn to after it. */
5717 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5718 {
5719 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5720 context_reg_class,
5721 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5722 return 1;
5723 }
5724 }
5725 return 0;
5726
5727 case SUBREG:
5728 if (REG_P (SUBREG_REG (x)))
5729 {
5730 /* If this is a SUBREG of a hard register and the resulting register
5731 is of the wrong class, reload the whole SUBREG. This avoids
5732 needless copies if SUBREG_REG is multi-word. */
5733 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5734 {
5735 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5736
5737 if (! REG_OK_FOR_CONTEXT (context, regno, mode))
5738 {
5739 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5740 context_reg_class,
5741 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5742 return 1;
5743 }
5744 }
5745 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5746 is larger than the class size, then reload the whole SUBREG. */
5747 else
5748 {
5749 enum reg_class class = context_reg_class;
5750 if ((unsigned) CLASS_MAX_NREGS (class, GET_MODE (SUBREG_REG (x)))
5751 > reg_class_size[class])
5752 {
5753 x = find_reloads_subreg_address (x, 0, opnum, type,
5754 ind_levels, insn);
5755 push_reload (x, NULL_RTX, loc, (rtx*) 0, class,
5756 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5757 return 1;
5758 }
5759 }
5760 }
5761 break;
5762
5763 default:
5764 break;
5765 }
5766
5767 {
5768 const char *fmt = GET_RTX_FORMAT (code);
5769 int i;
5770
5771 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5772 {
5773 if (fmt[i] == 'e')
5774 find_reloads_address_1 (mode, XEXP (x, i), context, &XEXP (x, i),
5775 opnum, type, ind_levels, insn);
5776 }
5777 }
5778
5779 #undef REG_OK_FOR_CONTEXT
5780 return 0;
5781 }
5782 \f
5783 /* X, which is found at *LOC, is a part of an address that needs to be
5784 reloaded into a register of class CLASS. If X is a constant, or if
5785 X is a PLUS that contains a constant, check that the constant is a
5786 legitimate operand and that we are supposed to be able to load
5787 it into the register.
5788
5789 If not, force the constant into memory and reload the MEM instead.
5790
5791 MODE is the mode to use, in case X is an integer constant.
5792
5793 OPNUM and TYPE describe the purpose of any reloads made.
5794
5795 IND_LEVELS says how many levels of indirect addressing this machine
5796 supports. */
5797
5798 static void
5799 find_reloads_address_part (rtx x, rtx *loc, enum reg_class class,
5800 enum machine_mode mode, int opnum,
5801 enum reload_type type, int ind_levels)
5802 {
5803 if (CONSTANT_P (x)
5804 && (! LEGITIMATE_CONSTANT_P (x)
5805 || PREFERRED_RELOAD_CLASS (x, class) == NO_REGS))
5806 {
5807 rtx tem;
5808
5809 tem = x = force_const_mem (mode, x);
5810 find_reloads_address (mode, &tem, XEXP (tem, 0), &XEXP (tem, 0),
5811 opnum, type, ind_levels, 0);
5812 }
5813
5814 else if (GET_CODE (x) == PLUS
5815 && CONSTANT_P (XEXP (x, 1))
5816 && (! LEGITIMATE_CONSTANT_P (XEXP (x, 1))
5817 || PREFERRED_RELOAD_CLASS (XEXP (x, 1), class) == NO_REGS))
5818 {
5819 rtx tem;
5820
5821 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
5822 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
5823 find_reloads_address (mode, &tem, XEXP (tem, 0), &XEXP (tem, 0),
5824 opnum, type, ind_levels, 0);
5825 }
5826
5827 push_reload (x, NULL_RTX, loc, (rtx*) 0, class,
5828 mode, VOIDmode, 0, 0, opnum, type);
5829 }
5830 \f
5831 /* X, a subreg of a pseudo, is a part of an address that needs to be
5832 reloaded.
5833
5834 If the pseudo is equivalent to a memory location that cannot be directly
5835 addressed, make the necessary address reloads.
5836
5837 If address reloads have been necessary, or if the address is changed
5838 by register elimination, return the rtx of the memory location;
5839 otherwise, return X.
5840
5841 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
5842 memory location.
5843
5844 OPNUM and TYPE identify the purpose of the reload.
5845
5846 IND_LEVELS says how many levels of indirect addressing are
5847 supported at this point in the address.
5848
5849 INSN, if nonzero, is the insn in which we do the reload. It is used
5850 to determine where to put USEs for pseudos that we have to replace with
5851 stack slots. */
5852
5853 static rtx
5854 find_reloads_subreg_address (rtx x, int force_replace, int opnum,
5855 enum reload_type type, int ind_levels, rtx insn)
5856 {
5857 int regno = REGNO (SUBREG_REG (x));
5858
5859 if (reg_equiv_memory_loc[regno])
5860 {
5861 /* If the address is not directly addressable, or if the address is not
5862 offsettable, then it must be replaced. */
5863 if (! force_replace
5864 && (reg_equiv_address[regno]
5865 || ! offsettable_memref_p (reg_equiv_mem[regno])))
5866 force_replace = 1;
5867
5868 if (force_replace || num_not_at_initial_offset)
5869 {
5870 rtx tem = make_memloc (SUBREG_REG (x), regno);
5871
5872 /* If the address changes because of register elimination, then
5873 it must be replaced. */
5874 if (force_replace
5875 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5876 {
5877 unsigned outer_size = GET_MODE_SIZE (GET_MODE (x));
5878 unsigned inner_size = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
5879 int offset;
5880
5881 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
5882 hold the correct (negative) byte offset. */
5883 if (BYTES_BIG_ENDIAN && outer_size > inner_size)
5884 offset = inner_size - outer_size;
5885 else
5886 offset = SUBREG_BYTE (x);
5887
5888 XEXP (tem, 0) = plus_constant (XEXP (tem, 0), offset);
5889 PUT_MODE (tem, GET_MODE (x));
5890
5891 /* If this was a paradoxical subreg that we replaced, the
5892 resulting memory must be sufficiently aligned to allow
5893 us to widen the mode of the memory. */
5894 if (outer_size > inner_size && STRICT_ALIGNMENT)
5895 {
5896 rtx base;
5897
5898 base = XEXP (tem, 0);
5899 if (GET_CODE (base) == PLUS)
5900 {
5901 if (GET_CODE (XEXP (base, 1)) == CONST_INT
5902 && INTVAL (XEXP (base, 1)) % outer_size != 0)
5903 return x;
5904 base = XEXP (base, 0);
5905 }
5906 if (!REG_P (base)
5907 || (REGNO_POINTER_ALIGN (REGNO (base))
5908 < outer_size * BITS_PER_UNIT))
5909 return x;
5910 }
5911
5912 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5913 &XEXP (tem, 0), opnum, ADDR_TYPE (type),
5914 ind_levels, insn);
5915
5916 /* If this is not a toplevel operand, find_reloads doesn't see
5917 this substitution. We have to emit a USE of the pseudo so
5918 that delete_output_reload can see it. */
5919 if (replace_reloads && recog_data.operand[opnum] != x)
5920 /* We mark the USE with QImode so that we recognize it
5921 as one that can be safely deleted at the end of
5922 reload. */
5923 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode,
5924 SUBREG_REG (x)),
5925 insn), QImode);
5926 x = tem;
5927 }
5928 }
5929 }
5930 return x;
5931 }
5932 \f
5933 /* Substitute into the current INSN the registers into which we have reloaded
5934 the things that need reloading. The array `replacements'
5935 contains the locations of all pointers that must be changed
5936 and says what to replace them with.
5937
5938 Return the rtx that X translates into; usually X, but modified. */
5939
5940 void
5941 subst_reloads (rtx insn)
5942 {
5943 int i;
5944
5945 for (i = 0; i < n_replacements; i++)
5946 {
5947 struct replacement *r = &replacements[i];
5948 rtx reloadreg = rld[r->what].reg_rtx;
5949 if (reloadreg)
5950 {
5951 #ifdef ENABLE_CHECKING
5952 /* Internal consistency test. Check that we don't modify
5953 anything in the equivalence arrays. Whenever something from
5954 those arrays needs to be reloaded, it must be unshared before
5955 being substituted into; the equivalence must not be modified.
5956 Otherwise, if the equivalence is used after that, it will
5957 have been modified, and the thing substituted (probably a
5958 register) is likely overwritten and not a usable equivalence. */
5959 int check_regno;
5960
5961 for (check_regno = 0; check_regno < max_regno; check_regno++)
5962 {
5963 #define CHECK_MODF(ARRAY) \
5964 gcc_assert (!ARRAY[check_regno] \
5965 || !loc_mentioned_in_p (r->where, \
5966 ARRAY[check_regno]))
5967
5968 CHECK_MODF (reg_equiv_constant);
5969 CHECK_MODF (reg_equiv_memory_loc);
5970 CHECK_MODF (reg_equiv_address);
5971 CHECK_MODF (reg_equiv_mem);
5972 #undef CHECK_MODF
5973 }
5974 #endif /* ENABLE_CHECKING */
5975
5976 /* If we're replacing a LABEL_REF with a register, add a
5977 REG_LABEL note to indicate to flow which label this
5978 register refers to. */
5979 if (GET_CODE (*r->where) == LABEL_REF
5980 && JUMP_P (insn))
5981 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL,
5982 XEXP (*r->where, 0),
5983 REG_NOTES (insn));
5984
5985 /* Encapsulate RELOADREG so its machine mode matches what
5986 used to be there. Note that gen_lowpart_common will
5987 do the wrong thing if RELOADREG is multi-word. RELOADREG
5988 will always be a REG here. */
5989 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
5990 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
5991
5992 /* If we are putting this into a SUBREG and RELOADREG is a
5993 SUBREG, we would be making nested SUBREGs, so we have to fix
5994 this up. Note that r->where == &SUBREG_REG (*r->subreg_loc). */
5995
5996 if (r->subreg_loc != 0 && GET_CODE (reloadreg) == SUBREG)
5997 {
5998 if (GET_MODE (*r->subreg_loc)
5999 == GET_MODE (SUBREG_REG (reloadreg)))
6000 *r->subreg_loc = SUBREG_REG (reloadreg);
6001 else
6002 {
6003 int final_offset =
6004 SUBREG_BYTE (*r->subreg_loc) + SUBREG_BYTE (reloadreg);
6005
6006 /* When working with SUBREGs the rule is that the byte
6007 offset must be a multiple of the SUBREG's mode. */
6008 final_offset = (final_offset /
6009 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6010 final_offset = (final_offset *
6011 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6012
6013 *r->where = SUBREG_REG (reloadreg);
6014 SUBREG_BYTE (*r->subreg_loc) = final_offset;
6015 }
6016 }
6017 else
6018 *r->where = reloadreg;
6019 }
6020 /* If reload got no reg and isn't optional, something's wrong. */
6021 else
6022 gcc_assert (rld[r->what].optional);
6023 }
6024 }
6025 \f
6026 /* Make a copy of any replacements being done into X and move those
6027 copies to locations in Y, a copy of X. */
6028
6029 void
6030 copy_replacements (rtx x, rtx y)
6031 {
6032 /* We can't support X being a SUBREG because we might then need to know its
6033 location if something inside it was replaced. */
6034 gcc_assert (GET_CODE (x) != SUBREG);
6035
6036 copy_replacements_1 (&x, &y, n_replacements);
6037 }
6038
6039 static void
6040 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6041 {
6042 int i, j;
6043 rtx x, y;
6044 struct replacement *r;
6045 enum rtx_code code;
6046 const char *fmt;
6047
6048 for (j = 0; j < orig_replacements; j++)
6049 {
6050 if (replacements[j].subreg_loc == px)
6051 {
6052 r = &replacements[n_replacements++];
6053 r->where = replacements[j].where;
6054 r->subreg_loc = py;
6055 r->what = replacements[j].what;
6056 r->mode = replacements[j].mode;
6057 }
6058 else if (replacements[j].where == px)
6059 {
6060 r = &replacements[n_replacements++];
6061 r->where = py;
6062 r->subreg_loc = 0;
6063 r->what = replacements[j].what;
6064 r->mode = replacements[j].mode;
6065 }
6066 }
6067
6068 x = *px;
6069 y = *py;
6070 code = GET_CODE (x);
6071 fmt = GET_RTX_FORMAT (code);
6072
6073 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6074 {
6075 if (fmt[i] == 'e')
6076 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6077 else if (fmt[i] == 'E')
6078 for (j = XVECLEN (x, i); --j >= 0; )
6079 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6080 orig_replacements);
6081 }
6082 }
6083
6084 /* Change any replacements being done to *X to be done to *Y. */
6085
6086 void
6087 move_replacements (rtx *x, rtx *y)
6088 {
6089 int i;
6090
6091 for (i = 0; i < n_replacements; i++)
6092 if (replacements[i].subreg_loc == x)
6093 replacements[i].subreg_loc = y;
6094 else if (replacements[i].where == x)
6095 {
6096 replacements[i].where = y;
6097 replacements[i].subreg_loc = 0;
6098 }
6099 }
6100 \f
6101 /* If LOC was scheduled to be replaced by something, return the replacement.
6102 Otherwise, return *LOC. */
6103
6104 rtx
6105 find_replacement (rtx *loc)
6106 {
6107 struct replacement *r;
6108
6109 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6110 {
6111 rtx reloadreg = rld[r->what].reg_rtx;
6112
6113 if (reloadreg && r->where == loc)
6114 {
6115 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6116 reloadreg = gen_rtx_REG (r->mode, REGNO (reloadreg));
6117
6118 return reloadreg;
6119 }
6120 else if (reloadreg && r->subreg_loc == loc)
6121 {
6122 /* RELOADREG must be either a REG or a SUBREG.
6123
6124 ??? Is it actually still ever a SUBREG? If so, why? */
6125
6126 if (REG_P (reloadreg))
6127 return gen_rtx_REG (GET_MODE (*loc),
6128 (REGNO (reloadreg) +
6129 subreg_regno_offset (REGNO (SUBREG_REG (*loc)),
6130 GET_MODE (SUBREG_REG (*loc)),
6131 SUBREG_BYTE (*loc),
6132 GET_MODE (*loc))));
6133 else if (GET_MODE (reloadreg) == GET_MODE (*loc))
6134 return reloadreg;
6135 else
6136 {
6137 int final_offset = SUBREG_BYTE (reloadreg) + SUBREG_BYTE (*loc);
6138
6139 /* When working with SUBREGs the rule is that the byte
6140 offset must be a multiple of the SUBREG's mode. */
6141 final_offset = (final_offset / GET_MODE_SIZE (GET_MODE (*loc)));
6142 final_offset = (final_offset * GET_MODE_SIZE (GET_MODE (*loc)));
6143 return gen_rtx_SUBREG (GET_MODE (*loc), SUBREG_REG (reloadreg),
6144 final_offset);
6145 }
6146 }
6147 }
6148
6149 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6150 what's inside and make a new rtl if so. */
6151 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6152 || GET_CODE (*loc) == MULT)
6153 {
6154 rtx x = find_replacement (&XEXP (*loc, 0));
6155 rtx y = find_replacement (&XEXP (*loc, 1));
6156
6157 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6158 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6159 }
6160
6161 return *loc;
6162 }
6163 \f
6164 /* Return nonzero if register in range [REGNO, ENDREGNO)
6165 appears either explicitly or implicitly in X
6166 other than being stored into (except for earlyclobber operands).
6167
6168 References contained within the substructure at LOC do not count.
6169 LOC may be zero, meaning don't ignore anything.
6170
6171 This is similar to refers_to_regno_p in rtlanal.c except that we
6172 look at equivalences for pseudos that didn't get hard registers. */
6173
6174 int
6175 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6176 rtx x, rtx *loc)
6177 {
6178 int i;
6179 unsigned int r;
6180 RTX_CODE code;
6181 const char *fmt;
6182
6183 if (x == 0)
6184 return 0;
6185
6186 repeat:
6187 code = GET_CODE (x);
6188
6189 switch (code)
6190 {
6191 case REG:
6192 r = REGNO (x);
6193
6194 /* If this is a pseudo, a hard register must not have been allocated.
6195 X must therefore either be a constant or be in memory. */
6196 if (r >= FIRST_PSEUDO_REGISTER)
6197 {
6198 if (reg_equiv_memory_loc[r])
6199 return refers_to_regno_for_reload_p (regno, endregno,
6200 reg_equiv_memory_loc[r],
6201 (rtx*) 0);
6202
6203 gcc_assert (reg_equiv_constant[r]);
6204 return 0;
6205 }
6206
6207 return (endregno > r
6208 && regno < r + (r < FIRST_PSEUDO_REGISTER
6209 ? hard_regno_nregs[r][GET_MODE (x)]
6210 : 1));
6211
6212 case SUBREG:
6213 /* If this is a SUBREG of a hard reg, we can see exactly which
6214 registers are being modified. Otherwise, handle normally. */
6215 if (REG_P (SUBREG_REG (x))
6216 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6217 {
6218 unsigned int inner_regno = subreg_regno (x);
6219 unsigned int inner_endregno
6220 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6221 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
6222
6223 return endregno > inner_regno && regno < inner_endregno;
6224 }
6225 break;
6226
6227 case CLOBBER:
6228 case SET:
6229 if (&SET_DEST (x) != loc
6230 /* Note setting a SUBREG counts as referring to the REG it is in for
6231 a pseudo but not for hard registers since we can
6232 treat each word individually. */
6233 && ((GET_CODE (SET_DEST (x)) == SUBREG
6234 && loc != &SUBREG_REG (SET_DEST (x))
6235 && REG_P (SUBREG_REG (SET_DEST (x)))
6236 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6237 && refers_to_regno_for_reload_p (regno, endregno,
6238 SUBREG_REG (SET_DEST (x)),
6239 loc))
6240 /* If the output is an earlyclobber operand, this is
6241 a conflict. */
6242 || ((!REG_P (SET_DEST (x))
6243 || earlyclobber_operand_p (SET_DEST (x)))
6244 && refers_to_regno_for_reload_p (regno, endregno,
6245 SET_DEST (x), loc))))
6246 return 1;
6247
6248 if (code == CLOBBER || loc == &SET_SRC (x))
6249 return 0;
6250 x = SET_SRC (x);
6251 goto repeat;
6252
6253 default:
6254 break;
6255 }
6256
6257 /* X does not match, so try its subexpressions. */
6258
6259 fmt = GET_RTX_FORMAT (code);
6260 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6261 {
6262 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6263 {
6264 if (i == 0)
6265 {
6266 x = XEXP (x, 0);
6267 goto repeat;
6268 }
6269 else
6270 if (refers_to_regno_for_reload_p (regno, endregno,
6271 XEXP (x, i), loc))
6272 return 1;
6273 }
6274 else if (fmt[i] == 'E')
6275 {
6276 int j;
6277 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6278 if (loc != &XVECEXP (x, i, j)
6279 && refers_to_regno_for_reload_p (regno, endregno,
6280 XVECEXP (x, i, j), loc))
6281 return 1;
6282 }
6283 }
6284 return 0;
6285 }
6286
6287 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6288 we check if any register number in X conflicts with the relevant register
6289 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6290 contains a MEM (we don't bother checking for memory addresses that can't
6291 conflict because we expect this to be a rare case.
6292
6293 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6294 that we look at equivalences for pseudos that didn't get hard registers. */
6295
6296 int
6297 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6298 {
6299 int regno, endregno;
6300
6301 /* Overly conservative. */
6302 if (GET_CODE (x) == STRICT_LOW_PART
6303 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6304 x = XEXP (x, 0);
6305
6306 /* If either argument is a constant, then modifying X can not affect IN. */
6307 if (CONSTANT_P (x) || CONSTANT_P (in))
6308 return 0;
6309 else if (GET_CODE (x) == SUBREG)
6310 {
6311 regno = REGNO (SUBREG_REG (x));
6312 if (regno < FIRST_PSEUDO_REGISTER)
6313 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6314 GET_MODE (SUBREG_REG (x)),
6315 SUBREG_BYTE (x),
6316 GET_MODE (x));
6317 }
6318 else if (REG_P (x))
6319 {
6320 regno = REGNO (x);
6321
6322 /* If this is a pseudo, it must not have been assigned a hard register.
6323 Therefore, it must either be in memory or be a constant. */
6324
6325 if (regno >= FIRST_PSEUDO_REGISTER)
6326 {
6327 if (reg_equiv_memory_loc[regno])
6328 return refers_to_mem_for_reload_p (in);
6329 gcc_assert (reg_equiv_constant[regno]);
6330 return 0;
6331 }
6332 }
6333 else if (MEM_P (x))
6334 return refers_to_mem_for_reload_p (in);
6335 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6336 || GET_CODE (x) == CC0)
6337 return reg_mentioned_p (x, in);
6338 else
6339 {
6340 gcc_assert (GET_CODE (x) == PLUS);
6341
6342 /* We actually want to know if X is mentioned somewhere inside IN.
6343 We must not say that (plus (sp) (const_int 124)) is in
6344 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6345 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6346 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6347 while (MEM_P (in))
6348 in = XEXP (in, 0);
6349 if (REG_P (in))
6350 return 0;
6351 else if (GET_CODE (in) == PLUS)
6352 return (reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6353 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6354 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6355 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6356 }
6357
6358 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6359 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
6360
6361 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6362 }
6363
6364 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6365 registers. */
6366
6367 int
6368 refers_to_mem_for_reload_p (rtx x)
6369 {
6370 const char *fmt;
6371 int i;
6372
6373 if (MEM_P (x))
6374 return 1;
6375
6376 if (REG_P (x))
6377 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6378 && reg_equiv_memory_loc[REGNO (x)]);
6379
6380 fmt = GET_RTX_FORMAT (GET_CODE (x));
6381 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6382 if (fmt[i] == 'e'
6383 && (MEM_P (XEXP (x, i))
6384 || refers_to_mem_for_reload_p (XEXP (x, i))))
6385 return 1;
6386
6387 return 0;
6388 }
6389 \f
6390 /* Check the insns before INSN to see if there is a suitable register
6391 containing the same value as GOAL.
6392 If OTHER is -1, look for a register in class CLASS.
6393 Otherwise, just see if register number OTHER shares GOAL's value.
6394
6395 Return an rtx for the register found, or zero if none is found.
6396
6397 If RELOAD_REG_P is (short *)1,
6398 we reject any hard reg that appears in reload_reg_rtx
6399 because such a hard reg is also needed coming into this insn.
6400
6401 If RELOAD_REG_P is any other nonzero value,
6402 it is a vector indexed by hard reg number
6403 and we reject any hard reg whose element in the vector is nonnegative
6404 as well as any that appears in reload_reg_rtx.
6405
6406 If GOAL is zero, then GOALREG is a register number; we look
6407 for an equivalent for that register.
6408
6409 MODE is the machine mode of the value we want an equivalence for.
6410 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6411
6412 This function is used by jump.c as well as in the reload pass.
6413
6414 If GOAL is the sum of the stack pointer and a constant, we treat it
6415 as if it were a constant except that sp is required to be unchanging. */
6416
6417 rtx
6418 find_equiv_reg (rtx goal, rtx insn, enum reg_class class, int other,
6419 short *reload_reg_p, int goalreg, enum machine_mode mode)
6420 {
6421 rtx p = insn;
6422 rtx goaltry, valtry, value, where;
6423 rtx pat;
6424 int regno = -1;
6425 int valueno;
6426 int goal_mem = 0;
6427 int goal_const = 0;
6428 int goal_mem_addr_varies = 0;
6429 int need_stable_sp = 0;
6430 int nregs;
6431 int valuenregs;
6432 int num = 0;
6433
6434 if (goal == 0)
6435 regno = goalreg;
6436 else if (REG_P (goal))
6437 regno = REGNO (goal);
6438 else if (MEM_P (goal))
6439 {
6440 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6441 if (MEM_VOLATILE_P (goal))
6442 return 0;
6443 if (flag_float_store && GET_MODE_CLASS (GET_MODE (goal)) == MODE_FLOAT)
6444 return 0;
6445 /* An address with side effects must be reexecuted. */
6446 switch (code)
6447 {
6448 case POST_INC:
6449 case PRE_INC:
6450 case POST_DEC:
6451 case PRE_DEC:
6452 case POST_MODIFY:
6453 case PRE_MODIFY:
6454 return 0;
6455 default:
6456 break;
6457 }
6458 goal_mem = 1;
6459 }
6460 else if (CONSTANT_P (goal))
6461 goal_const = 1;
6462 else if (GET_CODE (goal) == PLUS
6463 && XEXP (goal, 0) == stack_pointer_rtx
6464 && CONSTANT_P (XEXP (goal, 1)))
6465 goal_const = need_stable_sp = 1;
6466 else if (GET_CODE (goal) == PLUS
6467 && XEXP (goal, 0) == frame_pointer_rtx
6468 && CONSTANT_P (XEXP (goal, 1)))
6469 goal_const = 1;
6470 else
6471 return 0;
6472
6473 num = 0;
6474 /* Scan insns back from INSN, looking for one that copies
6475 a value into or out of GOAL.
6476 Stop and give up if we reach a label. */
6477
6478 while (1)
6479 {
6480 p = PREV_INSN (p);
6481 num++;
6482 if (p == 0 || LABEL_P (p)
6483 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6484 return 0;
6485
6486 if (NONJUMP_INSN_P (p)
6487 /* If we don't want spill regs ... */
6488 && (! (reload_reg_p != 0
6489 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6490 /* ... then ignore insns introduced by reload; they aren't
6491 useful and can cause results in reload_as_needed to be
6492 different from what they were when calculating the need for
6493 spills. If we notice an input-reload insn here, we will
6494 reject it below, but it might hide a usable equivalent.
6495 That makes bad code. It may even abort: perhaps no reg was
6496 spilled for this insn because it was assumed we would find
6497 that equivalent. */
6498 || INSN_UID (p) < reload_first_uid))
6499 {
6500 rtx tem;
6501 pat = single_set (p);
6502
6503 /* First check for something that sets some reg equal to GOAL. */
6504 if (pat != 0
6505 && ((regno >= 0
6506 && true_regnum (SET_SRC (pat)) == regno
6507 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6508 ||
6509 (regno >= 0
6510 && true_regnum (SET_DEST (pat)) == regno
6511 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6512 ||
6513 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6514 /* When looking for stack pointer + const,
6515 make sure we don't use a stack adjust. */
6516 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6517 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6518 || (goal_mem
6519 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6520 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6521 || (goal_mem
6522 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6523 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6524 /* If we are looking for a constant,
6525 and something equivalent to that constant was copied
6526 into a reg, we can use that reg. */
6527 || (goal_const && REG_NOTES (p) != 0
6528 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6529 && ((rtx_equal_p (XEXP (tem, 0), goal)
6530 && (valueno
6531 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6532 || (REG_P (SET_DEST (pat))
6533 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6534 && (GET_MODE_CLASS (GET_MODE (XEXP (tem, 0)))
6535 == MODE_FLOAT)
6536 && GET_CODE (goal) == CONST_INT
6537 && 0 != (goaltry
6538 = operand_subword (XEXP (tem, 0), 0, 0,
6539 VOIDmode))
6540 && rtx_equal_p (goal, goaltry)
6541 && (valtry
6542 = operand_subword (SET_DEST (pat), 0, 0,
6543 VOIDmode))
6544 && (valueno = true_regnum (valtry)) >= 0)))
6545 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6546 NULL_RTX))
6547 && REG_P (SET_DEST (pat))
6548 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6549 && (GET_MODE_CLASS (GET_MODE (XEXP (tem, 0)))
6550 == MODE_FLOAT)
6551 && GET_CODE (goal) == CONST_INT
6552 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6553 VOIDmode))
6554 && rtx_equal_p (goal, goaltry)
6555 && (valtry
6556 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6557 && (valueno = true_regnum (valtry)) >= 0)))
6558 {
6559 if (other >= 0)
6560 {
6561 if (valueno != other)
6562 continue;
6563 }
6564 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6565 continue;
6566 else
6567 {
6568 int i;
6569
6570 for (i = hard_regno_nregs[valueno][mode] - 1; i >= 0; i--)
6571 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
6572 valueno + i))
6573 break;
6574 if (i >= 0)
6575 continue;
6576 }
6577 value = valtry;
6578 where = p;
6579 break;
6580 }
6581 }
6582 }
6583
6584 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6585 (or copying VALUE into GOAL, if GOAL is also a register).
6586 Now verify that VALUE is really valid. */
6587
6588 /* VALUENO is the register number of VALUE; a hard register. */
6589
6590 /* Don't try to re-use something that is killed in this insn. We want
6591 to be able to trust REG_UNUSED notes. */
6592 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6593 return 0;
6594
6595 /* If we propose to get the value from the stack pointer or if GOAL is
6596 a MEM based on the stack pointer, we need a stable SP. */
6597 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6598 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6599 goal)))
6600 need_stable_sp = 1;
6601
6602 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6603 if (GET_MODE (value) != mode)
6604 return 0;
6605
6606 /* Reject VALUE if it was loaded from GOAL
6607 and is also a register that appears in the address of GOAL. */
6608
6609 if (goal_mem && value == SET_DEST (single_set (where))
6610 && refers_to_regno_for_reload_p (valueno,
6611 (valueno
6612 + hard_regno_nregs[valueno][mode]),
6613 goal, (rtx*) 0))
6614 return 0;
6615
6616 /* Reject registers that overlap GOAL. */
6617
6618 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6619 nregs = hard_regno_nregs[regno][mode];
6620 else
6621 nregs = 1;
6622 valuenregs = hard_regno_nregs[valueno][mode];
6623
6624 if (!goal_mem && !goal_const
6625 && regno + nregs > valueno && regno < valueno + valuenregs)
6626 return 0;
6627
6628 /* Reject VALUE if it is one of the regs reserved for reloads.
6629 Reload1 knows how to reuse them anyway, and it would get
6630 confused if we allocated one without its knowledge.
6631 (Now that insns introduced by reload are ignored above,
6632 this case shouldn't happen, but I'm not positive.) */
6633
6634 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6635 {
6636 int i;
6637 for (i = 0; i < valuenregs; ++i)
6638 if (reload_reg_p[valueno + i] >= 0)
6639 return 0;
6640 }
6641
6642 /* Reject VALUE if it is a register being used for an input reload
6643 even if it is not one of those reserved. */
6644
6645 if (reload_reg_p != 0)
6646 {
6647 int i;
6648 for (i = 0; i < n_reloads; i++)
6649 if (rld[i].reg_rtx != 0 && rld[i].in)
6650 {
6651 int regno1 = REGNO (rld[i].reg_rtx);
6652 int nregs1 = hard_regno_nregs[regno1]
6653 [GET_MODE (rld[i].reg_rtx)];
6654 if (regno1 < valueno + valuenregs
6655 && regno1 + nregs1 > valueno)
6656 return 0;
6657 }
6658 }
6659
6660 if (goal_mem)
6661 /* We must treat frame pointer as varying here,
6662 since it can vary--in a nonlocal goto as generated by expand_goto. */
6663 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6664
6665 /* Now verify that the values of GOAL and VALUE remain unaltered
6666 until INSN is reached. */
6667
6668 p = insn;
6669 while (1)
6670 {
6671 p = PREV_INSN (p);
6672 if (p == where)
6673 return value;
6674
6675 /* Don't trust the conversion past a function call
6676 if either of the two is in a call-clobbered register, or memory. */
6677 if (CALL_P (p))
6678 {
6679 int i;
6680
6681 if (goal_mem || need_stable_sp)
6682 return 0;
6683
6684 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6685 for (i = 0; i < nregs; ++i)
6686 if (call_used_regs[regno + i])
6687 return 0;
6688
6689 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6690 for (i = 0; i < valuenregs; ++i)
6691 if (call_used_regs[valueno + i])
6692 return 0;
6693 #ifdef NON_SAVING_SETJMP
6694 if (NON_SAVING_SETJMP && find_reg_note (p, REG_SETJMP, NULL))
6695 return 0;
6696 #endif
6697 }
6698
6699 if (INSN_P (p))
6700 {
6701 pat = PATTERN (p);
6702
6703 /* Watch out for unspec_volatile, and volatile asms. */
6704 if (volatile_insn_p (pat))
6705 return 0;
6706
6707 /* If this insn P stores in either GOAL or VALUE, return 0.
6708 If GOAL is a memory ref and this insn writes memory, return 0.
6709 If GOAL is a memory ref and its address is not constant,
6710 and this insn P changes a register used in GOAL, return 0. */
6711
6712 if (GET_CODE (pat) == COND_EXEC)
6713 pat = COND_EXEC_CODE (pat);
6714 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6715 {
6716 rtx dest = SET_DEST (pat);
6717 while (GET_CODE (dest) == SUBREG
6718 || GET_CODE (dest) == ZERO_EXTRACT
6719 || GET_CODE (dest) == SIGN_EXTRACT
6720 || GET_CODE (dest) == STRICT_LOW_PART)
6721 dest = XEXP (dest, 0);
6722 if (REG_P (dest))
6723 {
6724 int xregno = REGNO (dest);
6725 int xnregs;
6726 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6727 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6728 else
6729 xnregs = 1;
6730 if (xregno < regno + nregs && xregno + xnregs > regno)
6731 return 0;
6732 if (xregno < valueno + valuenregs
6733 && xregno + xnregs > valueno)
6734 return 0;
6735 if (goal_mem_addr_varies
6736 && reg_overlap_mentioned_for_reload_p (dest, goal))
6737 return 0;
6738 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6739 return 0;
6740 }
6741 else if (goal_mem && MEM_P (dest)
6742 && ! push_operand (dest, GET_MODE (dest)))
6743 return 0;
6744 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6745 && reg_equiv_memory_loc[regno] != 0)
6746 return 0;
6747 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6748 return 0;
6749 }
6750 else if (GET_CODE (pat) == PARALLEL)
6751 {
6752 int i;
6753 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6754 {
6755 rtx v1 = XVECEXP (pat, 0, i);
6756 if (GET_CODE (v1) == COND_EXEC)
6757 v1 = COND_EXEC_CODE (v1);
6758 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
6759 {
6760 rtx dest = SET_DEST (v1);
6761 while (GET_CODE (dest) == SUBREG
6762 || GET_CODE (dest) == ZERO_EXTRACT
6763 || GET_CODE (dest) == SIGN_EXTRACT
6764 || GET_CODE (dest) == STRICT_LOW_PART)
6765 dest = XEXP (dest, 0);
6766 if (REG_P (dest))
6767 {
6768 int xregno = REGNO (dest);
6769 int xnregs;
6770 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6771 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6772 else
6773 xnregs = 1;
6774 if (xregno < regno + nregs
6775 && xregno + xnregs > regno)
6776 return 0;
6777 if (xregno < valueno + valuenregs
6778 && xregno + xnregs > valueno)
6779 return 0;
6780 if (goal_mem_addr_varies
6781 && reg_overlap_mentioned_for_reload_p (dest,
6782 goal))
6783 return 0;
6784 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6785 return 0;
6786 }
6787 else if (goal_mem && MEM_P (dest)
6788 && ! push_operand (dest, GET_MODE (dest)))
6789 return 0;
6790 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6791 && reg_equiv_memory_loc[regno] != 0)
6792 return 0;
6793 else if (need_stable_sp
6794 && push_operand (dest, GET_MODE (dest)))
6795 return 0;
6796 }
6797 }
6798 }
6799
6800 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
6801 {
6802 rtx link;
6803
6804 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
6805 link = XEXP (link, 1))
6806 {
6807 pat = XEXP (link, 0);
6808 if (GET_CODE (pat) == CLOBBER)
6809 {
6810 rtx dest = SET_DEST (pat);
6811
6812 if (REG_P (dest))
6813 {
6814 int xregno = REGNO (dest);
6815 int xnregs
6816 = hard_regno_nregs[xregno][GET_MODE (dest)];
6817
6818 if (xregno < regno + nregs
6819 && xregno + xnregs > regno)
6820 return 0;
6821 else if (xregno < valueno + valuenregs
6822 && xregno + xnregs > valueno)
6823 return 0;
6824 else if (goal_mem_addr_varies
6825 && reg_overlap_mentioned_for_reload_p (dest,
6826 goal))
6827 return 0;
6828 }
6829
6830 else if (goal_mem && MEM_P (dest)
6831 && ! push_operand (dest, GET_MODE (dest)))
6832 return 0;
6833 else if (need_stable_sp
6834 && push_operand (dest, GET_MODE (dest)))
6835 return 0;
6836 }
6837 }
6838 }
6839
6840 #ifdef AUTO_INC_DEC
6841 /* If this insn auto-increments or auto-decrements
6842 either regno or valueno, return 0 now.
6843 If GOAL is a memory ref and its address is not constant,
6844 and this insn P increments a register used in GOAL, return 0. */
6845 {
6846 rtx link;
6847
6848 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
6849 if (REG_NOTE_KIND (link) == REG_INC
6850 && REG_P (XEXP (link, 0)))
6851 {
6852 int incno = REGNO (XEXP (link, 0));
6853 if (incno < regno + nregs && incno >= regno)
6854 return 0;
6855 if (incno < valueno + valuenregs && incno >= valueno)
6856 return 0;
6857 if (goal_mem_addr_varies
6858 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
6859 goal))
6860 return 0;
6861 }
6862 }
6863 #endif
6864 }
6865 }
6866 }
6867 \f
6868 /* Find a place where INCED appears in an increment or decrement operator
6869 within X, and return the amount INCED is incremented or decremented by.
6870 The value is always positive. */
6871
6872 static int
6873 find_inc_amount (rtx x, rtx inced)
6874 {
6875 enum rtx_code code = GET_CODE (x);
6876 const char *fmt;
6877 int i;
6878
6879 if (code == MEM)
6880 {
6881 rtx addr = XEXP (x, 0);
6882 if ((GET_CODE (addr) == PRE_DEC
6883 || GET_CODE (addr) == POST_DEC
6884 || GET_CODE (addr) == PRE_INC
6885 || GET_CODE (addr) == POST_INC)
6886 && XEXP (addr, 0) == inced)
6887 return GET_MODE_SIZE (GET_MODE (x));
6888 else if ((GET_CODE (addr) == PRE_MODIFY
6889 || GET_CODE (addr) == POST_MODIFY)
6890 && GET_CODE (XEXP (addr, 1)) == PLUS
6891 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
6892 && XEXP (addr, 0) == inced
6893 && GET_CODE (XEXP (XEXP (addr, 1), 1)) == CONST_INT)
6894 {
6895 i = INTVAL (XEXP (XEXP (addr, 1), 1));
6896 return i < 0 ? -i : i;
6897 }
6898 }
6899
6900 fmt = GET_RTX_FORMAT (code);
6901 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6902 {
6903 if (fmt[i] == 'e')
6904 {
6905 int tem = find_inc_amount (XEXP (x, i), inced);
6906 if (tem != 0)
6907 return tem;
6908 }
6909 if (fmt[i] == 'E')
6910 {
6911 int j;
6912 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6913 {
6914 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
6915 if (tem != 0)
6916 return tem;
6917 }
6918 }
6919 }
6920
6921 return 0;
6922 }
6923 \f
6924 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
6925 If SETS is nonzero, also consider SETs. */
6926
6927 int
6928 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
6929 int sets)
6930 {
6931 unsigned int nregs = hard_regno_nregs[regno][mode];
6932 unsigned int endregno = regno + nregs;
6933
6934 if ((GET_CODE (PATTERN (insn)) == CLOBBER
6935 || (sets && GET_CODE (PATTERN (insn)) == SET))
6936 && REG_P (XEXP (PATTERN (insn), 0)))
6937 {
6938 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
6939
6940 return test >= regno && test < endregno;
6941 }
6942
6943 if (GET_CODE (PATTERN (insn)) == PARALLEL)
6944 {
6945 int i = XVECLEN (PATTERN (insn), 0) - 1;
6946
6947 for (; i >= 0; i--)
6948 {
6949 rtx elt = XVECEXP (PATTERN (insn), 0, i);
6950 if ((GET_CODE (elt) == CLOBBER
6951 || (sets && GET_CODE (PATTERN (insn)) == SET))
6952 && REG_P (XEXP (elt, 0)))
6953 {
6954 unsigned int test = REGNO (XEXP (elt, 0));
6955
6956 if (test >= regno && test < endregno)
6957 return 1;
6958 }
6959 }
6960 }
6961
6962 return 0;
6963 }
6964
6965 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
6966 rtx
6967 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
6968 {
6969 int regno;
6970
6971 if (GET_MODE (reloadreg) == mode)
6972 return reloadreg;
6973
6974 regno = REGNO (reloadreg);
6975
6976 if (WORDS_BIG_ENDIAN)
6977 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
6978 - (int) hard_regno_nregs[regno][mode];
6979
6980 return gen_rtx_REG (mode, regno);
6981 }
6982
6983 static const char *const reload_when_needed_name[] =
6984 {
6985 "RELOAD_FOR_INPUT",
6986 "RELOAD_FOR_OUTPUT",
6987 "RELOAD_FOR_INSN",
6988 "RELOAD_FOR_INPUT_ADDRESS",
6989 "RELOAD_FOR_INPADDR_ADDRESS",
6990 "RELOAD_FOR_OUTPUT_ADDRESS",
6991 "RELOAD_FOR_OUTADDR_ADDRESS",
6992 "RELOAD_FOR_OPERAND_ADDRESS",
6993 "RELOAD_FOR_OPADDR_ADDR",
6994 "RELOAD_OTHER",
6995 "RELOAD_FOR_OTHER_ADDRESS"
6996 };
6997
6998 static const char * const reg_class_names[] = REG_CLASS_NAMES;
6999
7000 /* These functions are used to print the variables set by 'find_reloads' */
7001
7002 void
7003 debug_reload_to_stream (FILE *f)
7004 {
7005 int r;
7006 const char *prefix;
7007
7008 if (! f)
7009 f = stderr;
7010 for (r = 0; r < n_reloads; r++)
7011 {
7012 fprintf (f, "Reload %d: ", r);
7013
7014 if (rld[r].in != 0)
7015 {
7016 fprintf (f, "reload_in (%s) = ",
7017 GET_MODE_NAME (rld[r].inmode));
7018 print_inline_rtx (f, rld[r].in, 24);
7019 fprintf (f, "\n\t");
7020 }
7021
7022 if (rld[r].out != 0)
7023 {
7024 fprintf (f, "reload_out (%s) = ",
7025 GET_MODE_NAME (rld[r].outmode));
7026 print_inline_rtx (f, rld[r].out, 24);
7027 fprintf (f, "\n\t");
7028 }
7029
7030 fprintf (f, "%s, ", reg_class_names[(int) rld[r].class]);
7031
7032 fprintf (f, "%s (opnum = %d)",
7033 reload_when_needed_name[(int) rld[r].when_needed],
7034 rld[r].opnum);
7035
7036 if (rld[r].optional)
7037 fprintf (f, ", optional");
7038
7039 if (rld[r].nongroup)
7040 fprintf (f, ", nongroup");
7041
7042 if (rld[r].inc != 0)
7043 fprintf (f, ", inc by %d", rld[r].inc);
7044
7045 if (rld[r].nocombine)
7046 fprintf (f, ", can't combine");
7047
7048 if (rld[r].secondary_p)
7049 fprintf (f, ", secondary_reload_p");
7050
7051 if (rld[r].in_reg != 0)
7052 {
7053 fprintf (f, "\n\treload_in_reg: ");
7054 print_inline_rtx (f, rld[r].in_reg, 24);
7055 }
7056
7057 if (rld[r].out_reg != 0)
7058 {
7059 fprintf (f, "\n\treload_out_reg: ");
7060 print_inline_rtx (f, rld[r].out_reg, 24);
7061 }
7062
7063 if (rld[r].reg_rtx != 0)
7064 {
7065 fprintf (f, "\n\treload_reg_rtx: ");
7066 print_inline_rtx (f, rld[r].reg_rtx, 24);
7067 }
7068
7069 prefix = "\n\t";
7070 if (rld[r].secondary_in_reload != -1)
7071 {
7072 fprintf (f, "%ssecondary_in_reload = %d",
7073 prefix, rld[r].secondary_in_reload);
7074 prefix = ", ";
7075 }
7076
7077 if (rld[r].secondary_out_reload != -1)
7078 fprintf (f, "%ssecondary_out_reload = %d\n",
7079 prefix, rld[r].secondary_out_reload);
7080
7081 prefix = "\n\t";
7082 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7083 {
7084 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7085 insn_data[rld[r].secondary_in_icode].name);
7086 prefix = ", ";
7087 }
7088
7089 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7090 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7091 insn_data[rld[r].secondary_out_icode].name);
7092
7093 fprintf (f, "\n");
7094 }
7095 }
7096
7097 void
7098 debug_reload (void)
7099 {
7100 debug_reload_to_stream (stderr);
7101 }