re PR target/13926 (GCC generates jumps that are too large to fit in word displacemen...
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "toplev.h"
39 #include "basic-block.h"
40 #include "output.h"
41 #include "reload.h"
42
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
46 #else
47 #define STACK_PUSH_CODE PRE_INC
48 #endif
49 #endif
50
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
54 #else
55 #define STACK_POP_CODE POST_DEC
56 #endif
57 #endif
58
59 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
60 static rtx *find_single_use_1 (rtx, rtx *);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
63
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
69
70 init_recog and init_recog_no_volatile are responsible for setting this. */
71
72 int volatile_ok;
73
74 struct recog_data recog_data;
75
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
82
83 int which_alternative;
84
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
88
89 int reload_completed;
90
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
93
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
97
98 void
99 init_recog_no_volatile (void)
100 {
101 volatile_ok = 0;
102 }
103
104 void
105 init_recog (void)
106 {
107 volatile_ok = 1;
108 }
109
110 /* Try recognizing the instruction INSN,
111 and return the code number that results.
112 Remember the code so that repeated calls do not
113 need to spend the time for actual rerecognition.
114
115 This function is the normal interface to instruction recognition.
116 The automatically-generated function `recog' is normally called
117 through this one. (The only exception is in combine.c.) */
118
119 int
120 recog_memoized_1 (rtx insn)
121 {
122 if (INSN_CODE (insn) < 0)
123 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
124 return INSN_CODE (insn);
125 }
126 \f
127 /* Check that X is an insn-body for an `asm' with operands
128 and that the operands mentioned in it are legitimate. */
129
130 int
131 check_asm_operands (rtx x)
132 {
133 int noperands;
134 rtx *operands;
135 const char **constraints;
136 int i;
137
138 /* Post-reload, be more strict with things. */
139 if (reload_completed)
140 {
141 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
142 extract_insn (make_insn_raw (x));
143 constrain_operands (1);
144 return which_alternative >= 0;
145 }
146
147 noperands = asm_noperands (x);
148 if (noperands < 0)
149 return 0;
150 if (noperands == 0)
151 return 1;
152
153 operands = alloca (noperands * sizeof (rtx));
154 constraints = alloca (noperands * sizeof (char *));
155
156 decode_asm_operands (x, operands, NULL, constraints, NULL);
157
158 for (i = 0; i < noperands; i++)
159 {
160 const char *c = constraints[i];
161 if (c[0] == '%')
162 c++;
163 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
164 c = constraints[c[0] - '0'];
165
166 if (! asm_operand_ok (operands[i], c))
167 return 0;
168 }
169
170 return 1;
171 }
172 \f
173 /* Static data for the next two routines. */
174
175 typedef struct change_t
176 {
177 rtx object;
178 int old_code;
179 rtx *loc;
180 rtx old;
181 } change_t;
182
183 static change_t *changes;
184 static int changes_allocated;
185
186 static int num_changes = 0;
187
188 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
189 at which NEW will be placed. If OBJECT is zero, no validation is done,
190 the change is simply made.
191
192 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
193 will be called with the address and mode as parameters. If OBJECT is
194 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
195 the change in place.
196
197 IN_GROUP is nonzero if this is part of a group of changes that must be
198 performed as a group. In that case, the changes will be stored. The
199 function `apply_change_group' will validate and apply the changes.
200
201 If IN_GROUP is zero, this is a single change. Try to recognize the insn
202 or validate the memory reference with the change applied. If the result
203 is not valid for the machine, suppress the change and return zero.
204 Otherwise, perform the change and return 1. */
205
206 int
207 validate_change (rtx object, rtx *loc, rtx new, int in_group)
208 {
209 rtx old = *loc;
210
211 if (old == new || rtx_equal_p (old, new))
212 return 1;
213
214 if (in_group == 0 && num_changes != 0)
215 abort ();
216
217 *loc = new;
218
219 /* Save the information describing this change. */
220 if (num_changes >= changes_allocated)
221 {
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
226 else
227 changes_allocated *= 2;
228
229 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
230 }
231
232 changes[num_changes].object = object;
233 changes[num_changes].loc = loc;
234 changes[num_changes].old = old;
235
236 if (object && !MEM_P (object))
237 {
238 /* Set INSN_CODE to force rerecognition of insn. Save old code in
239 case invalid. */
240 changes[num_changes].old_code = INSN_CODE (object);
241 INSN_CODE (object) = -1;
242 }
243
244 num_changes++;
245
246 /* If we are making a group of changes, return 1. Otherwise, validate the
247 change group we made. */
248
249 if (in_group)
250 return 1;
251 else
252 return apply_change_group ();
253 }
254
255 /* This subroutine of apply_change_group verifies whether the changes to INSN
256 were valid; i.e. whether INSN can still be recognized. */
257
258 int
259 insn_invalid_p (rtx insn)
260 {
261 rtx pat = PATTERN (insn);
262 int num_clobbers = 0;
263 /* If we are before reload and the pattern is a SET, see if we can add
264 clobbers. */
265 int icode = recog (pat, insn,
266 (GET_CODE (pat) == SET
267 && ! reload_completed && ! reload_in_progress)
268 ? &num_clobbers : 0);
269 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
270
271
272 /* If this is an asm and the operand aren't legal, then fail. Likewise if
273 this is not an asm and the insn wasn't recognized. */
274 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
275 || (!is_asm && icode < 0))
276 return 1;
277
278 /* If we have to add CLOBBERs, fail if we have to add ones that reference
279 hard registers since our callers can't know if they are live or not.
280 Otherwise, add them. */
281 if (num_clobbers > 0)
282 {
283 rtx newpat;
284
285 if (added_clobbers_hard_reg_p (icode))
286 return 1;
287
288 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
289 XVECEXP (newpat, 0, 0) = pat;
290 add_clobbers (newpat, icode);
291 PATTERN (insn) = pat = newpat;
292 }
293
294 /* After reload, verify that all constraints are satisfied. */
295 if (reload_completed)
296 {
297 extract_insn (insn);
298
299 if (! constrain_operands (1))
300 return 1;
301 }
302
303 INSN_CODE (insn) = icode;
304 return 0;
305 }
306
307 /* Return number of changes made and not validated yet. */
308 int
309 num_changes_pending (void)
310 {
311 return num_changes;
312 }
313
314 /* Apply a group of changes previously issued with `validate_change'.
315 Return 1 if all changes are valid, zero otherwise. */
316
317 int
318 apply_change_group (void)
319 {
320 int i;
321 rtx last_validated = NULL_RTX;
322
323 /* The changes have been applied and all INSN_CODEs have been reset to force
324 rerecognition.
325
326 The changes are valid if we aren't given an object, or if we are
327 given a MEM and it still is a valid address, or if this is in insn
328 and it is recognized. In the latter case, if reload has completed,
329 we also require that the operands meet the constraints for
330 the insn. */
331
332 for (i = 0; i < num_changes; i++)
333 {
334 rtx object = changes[i].object;
335
336 /* If there is no object to test or if it is the same as the one we
337 already tested, ignore it. */
338 if (object == 0 || object == last_validated)
339 continue;
340
341 if (MEM_P (object))
342 {
343 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
344 break;
345 }
346 else if (insn_invalid_p (object))
347 {
348 rtx pat = PATTERN (object);
349
350 /* Perhaps we couldn't recognize the insn because there were
351 extra CLOBBERs at the end. If so, try to re-recognize
352 without the last CLOBBER (later iterations will cause each of
353 them to be eliminated, in turn). But don't do this if we
354 have an ASM_OPERAND. */
355 if (GET_CODE (pat) == PARALLEL
356 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
357 && asm_noperands (PATTERN (object)) < 0)
358 {
359 rtx newpat;
360
361 if (XVECLEN (pat, 0) == 2)
362 newpat = XVECEXP (pat, 0, 0);
363 else
364 {
365 int j;
366
367 newpat
368 = gen_rtx_PARALLEL (VOIDmode,
369 rtvec_alloc (XVECLEN (pat, 0) - 1));
370 for (j = 0; j < XVECLEN (newpat, 0); j++)
371 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
372 }
373
374 /* Add a new change to this group to replace the pattern
375 with this new pattern. Then consider this change
376 as having succeeded. The change we added will
377 cause the entire call to fail if things remain invalid.
378
379 Note that this can lose if a later change than the one
380 we are processing specified &XVECEXP (PATTERN (object), 0, X)
381 but this shouldn't occur. */
382
383 validate_change (object, &PATTERN (object), newpat, 1);
384 continue;
385 }
386 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
387 /* If this insn is a CLOBBER or USE, it is always valid, but is
388 never recognized. */
389 continue;
390 else
391 break;
392 }
393 last_validated = object;
394 }
395
396 if (i == num_changes)
397 {
398 basic_block bb;
399
400 for (i = 0; i < num_changes; i++)
401 if (changes[i].object
402 && INSN_P (changes[i].object)
403 && (bb = BLOCK_FOR_INSN (changes[i].object)))
404 bb->flags |= BB_DIRTY;
405
406 num_changes = 0;
407 return 1;
408 }
409 else
410 {
411 cancel_changes (0);
412 return 0;
413 }
414 }
415
416 /* Return the number of changes so far in the current group. */
417
418 int
419 num_validated_changes (void)
420 {
421 return num_changes;
422 }
423
424 /* Retract the changes numbered NUM and up. */
425
426 void
427 cancel_changes (int num)
428 {
429 int i;
430
431 /* Back out all the changes. Do this in the opposite order in which
432 they were made. */
433 for (i = num_changes - 1; i >= num; i--)
434 {
435 *changes[i].loc = changes[i].old;
436 if (changes[i].object && !MEM_P (changes[i].object))
437 INSN_CODE (changes[i].object) = changes[i].old_code;
438 }
439 num_changes = num;
440 }
441
442 /* Replace every occurrence of FROM in X with TO. Mark each change with
443 validate_change passing OBJECT. */
444
445 static void
446 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
447 {
448 int i, j;
449 const char *fmt;
450 rtx x = *loc;
451 enum rtx_code code;
452 enum machine_mode op0_mode = VOIDmode;
453 int prev_changes = num_changes;
454 rtx new;
455
456 if (!x)
457 return;
458
459 code = GET_CODE (x);
460 fmt = GET_RTX_FORMAT (code);
461 if (fmt[0] == 'e')
462 op0_mode = GET_MODE (XEXP (x, 0));
463
464 /* X matches FROM if it is the same rtx or they are both referring to the
465 same register in the same mode. Avoid calling rtx_equal_p unless the
466 operands look similar. */
467
468 if (x == from
469 || (REG_P (x) && REG_P (from)
470 && GET_MODE (x) == GET_MODE (from)
471 && REGNO (x) == REGNO (from))
472 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
473 && rtx_equal_p (x, from)))
474 {
475 validate_change (object, loc, to, 1);
476 return;
477 }
478
479 /* Call ourself recursively to perform the replacements.
480 We must not replace inside already replaced expression, otherwise we
481 get infinite recursion for replacements like (reg X)->(subreg (reg X))
482 done by regmove, so we must special case shared ASM_OPERANDS. */
483
484 if (GET_CODE (x) == PARALLEL)
485 {
486 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
487 {
488 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
489 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
490 {
491 /* Verify that operands are really shared. */
492 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0))) !=
493 ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, j))))
494 abort ();
495 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
496 from, to, object);
497 }
498 else
499 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
500 }
501 }
502 else
503 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
504 {
505 if (fmt[i] == 'e')
506 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
507 else if (fmt[i] == 'E')
508 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
509 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
510 }
511
512 /* If we didn't substitute, there is nothing more to do. */
513 if (num_changes == prev_changes)
514 return;
515
516 /* Allow substituted expression to have different mode. This is used by
517 regmove to change mode of pseudo register. */
518 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
519 op0_mode = GET_MODE (XEXP (x, 0));
520
521 /* Do changes needed to keep rtx consistent. Don't do any other
522 simplifications, as it is not our job. */
523
524 if (SWAPPABLE_OPERANDS_P (x)
525 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
526 {
527 validate_change (object, loc,
528 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
529 : swap_condition (code),
530 GET_MODE (x), XEXP (x, 1),
531 XEXP (x, 0)), 1);
532 x = *loc;
533 code = GET_CODE (x);
534 }
535
536 switch (code)
537 {
538 case PLUS:
539 /* If we have a PLUS whose second operand is now a CONST_INT, use
540 simplify_gen_binary to try to simplify it.
541 ??? We may want later to remove this, once simplification is
542 separated from this function. */
543 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
544 validate_change (object, loc,
545 simplify_gen_binary
546 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
547 break;
548 case MINUS:
549 if (GET_CODE (XEXP (x, 1)) == CONST_INT
550 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
551 validate_change (object, loc,
552 simplify_gen_binary
553 (PLUS, GET_MODE (x), XEXP (x, 0),
554 simplify_gen_unary (NEG,
555 GET_MODE (x), XEXP (x, 1),
556 GET_MODE (x))), 1);
557 break;
558 case ZERO_EXTEND:
559 case SIGN_EXTEND:
560 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
561 {
562 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
563 op0_mode);
564 /* If any of the above failed, substitute in something that
565 we know won't be recognized. */
566 if (!new)
567 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
568 validate_change (object, loc, new, 1);
569 }
570 break;
571 case SUBREG:
572 /* All subregs possible to simplify should be simplified. */
573 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
574 SUBREG_BYTE (x));
575
576 /* Subregs of VOIDmode operands are incorrect. */
577 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
578 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
579 if (new)
580 validate_change (object, loc, new, 1);
581 break;
582 case ZERO_EXTRACT:
583 case SIGN_EXTRACT:
584 /* If we are replacing a register with memory, try to change the memory
585 to be the mode required for memory in extract operations (this isn't
586 likely to be an insertion operation; if it was, nothing bad will
587 happen, we might just fail in some cases). */
588
589 if (MEM_P (XEXP (x, 0))
590 && GET_CODE (XEXP (x, 1)) == CONST_INT
591 && GET_CODE (XEXP (x, 2)) == CONST_INT
592 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
593 && !MEM_VOLATILE_P (XEXP (x, 0)))
594 {
595 enum machine_mode wanted_mode = VOIDmode;
596 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
597 int pos = INTVAL (XEXP (x, 2));
598
599 if (GET_CODE (x) == ZERO_EXTRACT)
600 {
601 enum machine_mode new_mode
602 = mode_for_extraction (EP_extzv, 1);
603 if (new_mode != MAX_MACHINE_MODE)
604 wanted_mode = new_mode;
605 }
606 else if (GET_CODE (x) == SIGN_EXTRACT)
607 {
608 enum machine_mode new_mode
609 = mode_for_extraction (EP_extv, 1);
610 if (new_mode != MAX_MACHINE_MODE)
611 wanted_mode = new_mode;
612 }
613
614 /* If we have a narrower mode, we can do something. */
615 if (wanted_mode != VOIDmode
616 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
617 {
618 int offset = pos / BITS_PER_UNIT;
619 rtx newmem;
620
621 /* If the bytes and bits are counted differently, we
622 must adjust the offset. */
623 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
624 offset =
625 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
626 offset);
627
628 pos %= GET_MODE_BITSIZE (wanted_mode);
629
630 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
631
632 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
633 validate_change (object, &XEXP (x, 0), newmem, 1);
634 }
635 }
636
637 break;
638
639 default:
640 break;
641 }
642 }
643
644 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
645 with TO. After all changes have been made, validate by seeing
646 if INSN is still valid. */
647
648 int
649 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
650 {
651 validate_replace_rtx_1 (loc, from, to, insn);
652 return apply_change_group ();
653 }
654
655 /* Try replacing every occurrence of FROM in INSN with TO. After all
656 changes have been made, validate by seeing if INSN is still valid. */
657
658 int
659 validate_replace_rtx (rtx from, rtx to, rtx insn)
660 {
661 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
662 return apply_change_group ();
663 }
664
665 /* Try replacing every occurrence of FROM in INSN with TO. */
666
667 void
668 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
669 {
670 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
671 }
672
673 /* Function called by note_uses to replace used subexpressions. */
674 struct validate_replace_src_data
675 {
676 rtx from; /* Old RTX */
677 rtx to; /* New RTX */
678 rtx insn; /* Insn in which substitution is occurring. */
679 };
680
681 static void
682 validate_replace_src_1 (rtx *x, void *data)
683 {
684 struct validate_replace_src_data *d
685 = (struct validate_replace_src_data *) data;
686
687 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
688 }
689
690 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
691 SET_DESTs. */
692
693 void
694 validate_replace_src_group (rtx from, rtx to, rtx insn)
695 {
696 struct validate_replace_src_data d;
697
698 d.from = from;
699 d.to = to;
700 d.insn = insn;
701 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
702 }
703 \f
704 #ifdef HAVE_cc0
705 /* Return 1 if the insn using CC0 set by INSN does not contain
706 any ordered tests applied to the condition codes.
707 EQ and NE tests do not count. */
708
709 int
710 next_insn_tests_no_inequality (rtx insn)
711 {
712 rtx next = next_cc0_user (insn);
713
714 /* If there is no next insn, we have to take the conservative choice. */
715 if (next == 0)
716 return 0;
717
718 return (INSN_P (next)
719 && ! inequality_comparisons_p (PATTERN (next)));
720 }
721 #endif
722 \f
723 /* This is used by find_single_use to locate an rtx that contains exactly one
724 use of DEST, which is typically either a REG or CC0. It returns a
725 pointer to the innermost rtx expression containing DEST. Appearances of
726 DEST that are being used to totally replace it are not counted. */
727
728 static rtx *
729 find_single_use_1 (rtx dest, rtx *loc)
730 {
731 rtx x = *loc;
732 enum rtx_code code = GET_CODE (x);
733 rtx *result = 0;
734 rtx *this_result;
735 int i;
736 const char *fmt;
737
738 switch (code)
739 {
740 case CONST_INT:
741 case CONST:
742 case LABEL_REF:
743 case SYMBOL_REF:
744 case CONST_DOUBLE:
745 case CONST_VECTOR:
746 case CLOBBER:
747 return 0;
748
749 case SET:
750 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
751 of a REG that occupies all of the REG, the insn uses DEST if
752 it is mentioned in the destination or the source. Otherwise, we
753 need just check the source. */
754 if (GET_CODE (SET_DEST (x)) != CC0
755 && GET_CODE (SET_DEST (x)) != PC
756 && !REG_P (SET_DEST (x))
757 && ! (GET_CODE (SET_DEST (x)) == SUBREG
758 && REG_P (SUBREG_REG (SET_DEST (x)))
759 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
760 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
761 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
762 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
763 break;
764
765 return find_single_use_1 (dest, &SET_SRC (x));
766
767 case MEM:
768 case SUBREG:
769 return find_single_use_1 (dest, &XEXP (x, 0));
770
771 default:
772 break;
773 }
774
775 /* If it wasn't one of the common cases above, check each expression and
776 vector of this code. Look for a unique usage of DEST. */
777
778 fmt = GET_RTX_FORMAT (code);
779 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
780 {
781 if (fmt[i] == 'e')
782 {
783 if (dest == XEXP (x, i)
784 || (REG_P (dest) && REG_P (XEXP (x, i))
785 && REGNO (dest) == REGNO (XEXP (x, i))))
786 this_result = loc;
787 else
788 this_result = find_single_use_1 (dest, &XEXP (x, i));
789
790 if (result == 0)
791 result = this_result;
792 else if (this_result)
793 /* Duplicate usage. */
794 return 0;
795 }
796 else if (fmt[i] == 'E')
797 {
798 int j;
799
800 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
801 {
802 if (XVECEXP (x, i, j) == dest
803 || (REG_P (dest)
804 && REG_P (XVECEXP (x, i, j))
805 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
806 this_result = loc;
807 else
808 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
809
810 if (result == 0)
811 result = this_result;
812 else if (this_result)
813 return 0;
814 }
815 }
816 }
817
818 return result;
819 }
820 \f
821 /* See if DEST, produced in INSN, is used only a single time in the
822 sequel. If so, return a pointer to the innermost rtx expression in which
823 it is used.
824
825 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
826
827 This routine will return usually zero either before flow is called (because
828 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
829 note can't be trusted).
830
831 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
832 care about REG_DEAD notes or LOG_LINKS.
833
834 Otherwise, we find the single use by finding an insn that has a
835 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
836 only referenced once in that insn, we know that it must be the first
837 and last insn referencing DEST. */
838
839 rtx *
840 find_single_use (rtx dest, rtx insn, rtx *ploc)
841 {
842 rtx next;
843 rtx *result;
844 rtx link;
845
846 #ifdef HAVE_cc0
847 if (dest == cc0_rtx)
848 {
849 next = NEXT_INSN (insn);
850 if (next == 0
851 || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
852 return 0;
853
854 result = find_single_use_1 (dest, &PATTERN (next));
855 if (result && ploc)
856 *ploc = next;
857 return result;
858 }
859 #endif
860
861 if (reload_completed || reload_in_progress || !REG_P (dest))
862 return 0;
863
864 for (next = next_nonnote_insn (insn);
865 next != 0 && !LABEL_P (next);
866 next = next_nonnote_insn (next))
867 if (INSN_P (next) && dead_or_set_p (next, dest))
868 {
869 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
870 if (XEXP (link, 0) == insn)
871 break;
872
873 if (link)
874 {
875 result = find_single_use_1 (dest, &PATTERN (next));
876 if (ploc)
877 *ploc = next;
878 return result;
879 }
880 }
881
882 return 0;
883 }
884 \f
885 /* Return 1 if OP is a valid general operand for machine mode MODE.
886 This is either a register reference, a memory reference,
887 or a constant. In the case of a memory reference, the address
888 is checked for general validity for the target machine.
889
890 Register and memory references must have mode MODE in order to be valid,
891 but some constants have no machine mode and are valid for any mode.
892
893 If MODE is VOIDmode, OP is checked for validity for whatever mode
894 it has.
895
896 The main use of this function is as a predicate in match_operand
897 expressions in the machine description.
898
899 For an explanation of this function's behavior for registers of
900 class NO_REGS, see the comment for `register_operand'. */
901
902 int
903 general_operand (rtx op, enum machine_mode mode)
904 {
905 enum rtx_code code = GET_CODE (op);
906
907 if (mode == VOIDmode)
908 mode = GET_MODE (op);
909
910 /* Don't accept CONST_INT or anything similar
911 if the caller wants something floating. */
912 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
913 && GET_MODE_CLASS (mode) != MODE_INT
914 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
915 return 0;
916
917 if (GET_CODE (op) == CONST_INT
918 && mode != VOIDmode
919 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
920 return 0;
921
922 if (CONSTANT_P (op))
923 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
924 || mode == VOIDmode)
925 #ifdef LEGITIMATE_PIC_OPERAND_P
926 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
927 #endif
928 && LEGITIMATE_CONSTANT_P (op));
929
930 /* Except for certain constants with VOIDmode, already checked for,
931 OP's mode must match MODE if MODE specifies a mode. */
932
933 if (GET_MODE (op) != mode)
934 return 0;
935
936 if (code == SUBREG)
937 {
938 rtx sub = SUBREG_REG (op);
939
940 #ifdef INSN_SCHEDULING
941 /* On machines that have insn scheduling, we want all memory
942 reference to be explicit, so outlaw paradoxical SUBREGs. */
943 if (MEM_P (sub)
944 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
945 return 0;
946 #endif
947 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
948 may result in incorrect reference. We should simplify all valid
949 subregs of MEM anyway. But allow this after reload because we
950 might be called from cleanup_subreg_operands.
951
952 ??? This is a kludge. */
953 if (!reload_completed && SUBREG_BYTE (op) != 0
954 && MEM_P (sub))
955 return 0;
956
957 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
958 create such rtl, and we must reject it. */
959 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
960 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
961 return 0;
962
963 op = sub;
964 code = GET_CODE (op);
965 }
966
967 if (code == REG)
968 /* A register whose class is NO_REGS is not a general operand. */
969 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
970 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
971
972 if (code == MEM)
973 {
974 rtx y = XEXP (op, 0);
975
976 if (! volatile_ok && MEM_VOLATILE_P (op))
977 return 0;
978
979 /* Use the mem's mode, since it will be reloaded thus. */
980 mode = GET_MODE (op);
981 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
982 }
983
984 return 0;
985
986 win:
987 return 1;
988 }
989 \f
990 /* Return 1 if OP is a valid memory address for a memory reference
991 of mode MODE.
992
993 The main use of this function is as a predicate in match_operand
994 expressions in the machine description. */
995
996 int
997 address_operand (rtx op, enum machine_mode mode)
998 {
999 return memory_address_p (mode, op);
1000 }
1001
1002 /* Return 1 if OP is a register reference of mode MODE.
1003 If MODE is VOIDmode, accept a register in any mode.
1004
1005 The main use of this function is as a predicate in match_operand
1006 expressions in the machine description.
1007
1008 As a special exception, registers whose class is NO_REGS are
1009 not accepted by `register_operand'. The reason for this change
1010 is to allow the representation of special architecture artifacts
1011 (such as a condition code register) without extending the rtl
1012 definitions. Since registers of class NO_REGS cannot be used
1013 as registers in any case where register classes are examined,
1014 it is most consistent to keep this function from accepting them. */
1015
1016 int
1017 register_operand (rtx op, enum machine_mode mode)
1018 {
1019 if (GET_MODE (op) != mode && mode != VOIDmode)
1020 return 0;
1021
1022 if (GET_CODE (op) == SUBREG)
1023 {
1024 rtx sub = SUBREG_REG (op);
1025
1026 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1027 because it is guaranteed to be reloaded into one.
1028 Just make sure the MEM is valid in itself.
1029 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1030 but currently it does result from (SUBREG (REG)...) where the
1031 reg went on the stack.) */
1032 if (! reload_completed && MEM_P (sub))
1033 return general_operand (op, mode);
1034
1035 #ifdef CANNOT_CHANGE_MODE_CLASS
1036 if (REG_P (sub)
1037 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1038 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1039 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1040 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1041 return 0;
1042 #endif
1043
1044 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1045 create such rtl, and we must reject it. */
1046 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1047 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1048 return 0;
1049
1050 op = sub;
1051 }
1052
1053 /* We don't consider registers whose class is NO_REGS
1054 to be a register operand. */
1055 return (REG_P (op)
1056 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1057 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1058 }
1059
1060 /* Return 1 for a register in Pmode; ignore the tested mode. */
1061
1062 int
1063 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1064 {
1065 return register_operand (op, Pmode);
1066 }
1067
1068 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1069 or a hard register. */
1070
1071 int
1072 scratch_operand (rtx op, enum machine_mode mode)
1073 {
1074 if (GET_MODE (op) != mode && mode != VOIDmode)
1075 return 0;
1076
1077 return (GET_CODE (op) == SCRATCH
1078 || (REG_P (op)
1079 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1080 }
1081
1082 /* Return 1 if OP is a valid immediate operand for mode MODE.
1083
1084 The main use of this function is as a predicate in match_operand
1085 expressions in the machine description. */
1086
1087 int
1088 immediate_operand (rtx op, enum machine_mode mode)
1089 {
1090 /* Don't accept CONST_INT or anything similar
1091 if the caller wants something floating. */
1092 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1093 && GET_MODE_CLASS (mode) != MODE_INT
1094 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1095 return 0;
1096
1097 if (GET_CODE (op) == CONST_INT
1098 && mode != VOIDmode
1099 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1100 return 0;
1101
1102 return (CONSTANT_P (op)
1103 && (GET_MODE (op) == mode || mode == VOIDmode
1104 || GET_MODE (op) == VOIDmode)
1105 #ifdef LEGITIMATE_PIC_OPERAND_P
1106 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1107 #endif
1108 && LEGITIMATE_CONSTANT_P (op));
1109 }
1110
1111 /* Returns 1 if OP is an operand that is a CONST_INT. */
1112
1113 int
1114 const_int_operand (rtx op, enum machine_mode mode)
1115 {
1116 if (GET_CODE (op) != CONST_INT)
1117 return 0;
1118
1119 if (mode != VOIDmode
1120 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1121 return 0;
1122
1123 return 1;
1124 }
1125
1126 /* Returns 1 if OP is an operand that is a constant integer or constant
1127 floating-point number. */
1128
1129 int
1130 const_double_operand (rtx op, enum machine_mode mode)
1131 {
1132 /* Don't accept CONST_INT or anything similar
1133 if the caller wants something floating. */
1134 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1135 && GET_MODE_CLASS (mode) != MODE_INT
1136 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1137 return 0;
1138
1139 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1140 && (mode == VOIDmode || GET_MODE (op) == mode
1141 || GET_MODE (op) == VOIDmode));
1142 }
1143
1144 /* Return 1 if OP is a general operand that is not an immediate operand. */
1145
1146 int
1147 nonimmediate_operand (rtx op, enum machine_mode mode)
1148 {
1149 return (general_operand (op, mode) && ! CONSTANT_P (op));
1150 }
1151
1152 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1153
1154 int
1155 nonmemory_operand (rtx op, enum machine_mode mode)
1156 {
1157 if (CONSTANT_P (op))
1158 {
1159 /* Don't accept CONST_INT or anything similar
1160 if the caller wants something floating. */
1161 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1162 && GET_MODE_CLASS (mode) != MODE_INT
1163 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1164 return 0;
1165
1166 if (GET_CODE (op) == CONST_INT
1167 && mode != VOIDmode
1168 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1169 return 0;
1170
1171 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1172 || mode == VOIDmode)
1173 #ifdef LEGITIMATE_PIC_OPERAND_P
1174 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1175 #endif
1176 && LEGITIMATE_CONSTANT_P (op));
1177 }
1178
1179 if (GET_MODE (op) != mode && mode != VOIDmode)
1180 return 0;
1181
1182 if (GET_CODE (op) == SUBREG)
1183 {
1184 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1185 because it is guaranteed to be reloaded into one.
1186 Just make sure the MEM is valid in itself.
1187 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1188 but currently it does result from (SUBREG (REG)...) where the
1189 reg went on the stack.) */
1190 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1191 return general_operand (op, mode);
1192 op = SUBREG_REG (op);
1193 }
1194
1195 /* We don't consider registers whose class is NO_REGS
1196 to be a register operand. */
1197 return (REG_P (op)
1198 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1199 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1200 }
1201
1202 /* Return 1 if OP is a valid operand that stands for pushing a
1203 value of mode MODE onto the stack.
1204
1205 The main use of this function is as a predicate in match_operand
1206 expressions in the machine description. */
1207
1208 int
1209 push_operand (rtx op, enum machine_mode mode)
1210 {
1211 unsigned int rounded_size = GET_MODE_SIZE (mode);
1212
1213 #ifdef PUSH_ROUNDING
1214 rounded_size = PUSH_ROUNDING (rounded_size);
1215 #endif
1216
1217 if (!MEM_P (op))
1218 return 0;
1219
1220 if (mode != VOIDmode && GET_MODE (op) != mode)
1221 return 0;
1222
1223 op = XEXP (op, 0);
1224
1225 if (rounded_size == GET_MODE_SIZE (mode))
1226 {
1227 if (GET_CODE (op) != STACK_PUSH_CODE)
1228 return 0;
1229 }
1230 else
1231 {
1232 if (GET_CODE (op) != PRE_MODIFY
1233 || GET_CODE (XEXP (op, 1)) != PLUS
1234 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1235 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1236 #ifdef STACK_GROWS_DOWNWARD
1237 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1238 #else
1239 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1240 #endif
1241 )
1242 return 0;
1243 }
1244
1245 return XEXP (op, 0) == stack_pointer_rtx;
1246 }
1247
1248 /* Return 1 if OP is a valid operand that stands for popping a
1249 value of mode MODE off the stack.
1250
1251 The main use of this function is as a predicate in match_operand
1252 expressions in the machine description. */
1253
1254 int
1255 pop_operand (rtx op, enum machine_mode mode)
1256 {
1257 if (!MEM_P (op))
1258 return 0;
1259
1260 if (mode != VOIDmode && GET_MODE (op) != mode)
1261 return 0;
1262
1263 op = XEXP (op, 0);
1264
1265 if (GET_CODE (op) != STACK_POP_CODE)
1266 return 0;
1267
1268 return XEXP (op, 0) == stack_pointer_rtx;
1269 }
1270
1271 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1272
1273 int
1274 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1275 {
1276 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1277 return 0;
1278
1279 win:
1280 return 1;
1281 }
1282
1283 /* Return 1 if OP is a valid memory reference with mode MODE,
1284 including a valid address.
1285
1286 The main use of this function is as a predicate in match_operand
1287 expressions in the machine description. */
1288
1289 int
1290 memory_operand (rtx op, enum machine_mode mode)
1291 {
1292 rtx inner;
1293
1294 if (! reload_completed)
1295 /* Note that no SUBREG is a memory operand before end of reload pass,
1296 because (SUBREG (MEM...)) forces reloading into a register. */
1297 return MEM_P (op) && general_operand (op, mode);
1298
1299 if (mode != VOIDmode && GET_MODE (op) != mode)
1300 return 0;
1301
1302 inner = op;
1303 if (GET_CODE (inner) == SUBREG)
1304 inner = SUBREG_REG (inner);
1305
1306 return (MEM_P (inner) && general_operand (op, mode));
1307 }
1308
1309 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1310 that is, a memory reference whose address is a general_operand. */
1311
1312 int
1313 indirect_operand (rtx op, enum machine_mode mode)
1314 {
1315 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1316 if (! reload_completed
1317 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1318 {
1319 int offset = SUBREG_BYTE (op);
1320 rtx inner = SUBREG_REG (op);
1321
1322 if (mode != VOIDmode && GET_MODE (op) != mode)
1323 return 0;
1324
1325 /* The only way that we can have a general_operand as the resulting
1326 address is if OFFSET is zero and the address already is an operand
1327 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1328 operand. */
1329
1330 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1331 || (GET_CODE (XEXP (inner, 0)) == PLUS
1332 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1333 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1334 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1335 }
1336
1337 return (MEM_P (op)
1338 && memory_operand (op, mode)
1339 && general_operand (XEXP (op, 0), Pmode));
1340 }
1341
1342 /* Return 1 if this is a comparison operator. This allows the use of
1343 MATCH_OPERATOR to recognize all the branch insns. */
1344
1345 int
1346 comparison_operator (rtx op, enum machine_mode mode)
1347 {
1348 return ((mode == VOIDmode || GET_MODE (op) == mode)
1349 && COMPARISON_P (op));
1350 }
1351 \f
1352 /* If BODY is an insn body that uses ASM_OPERANDS,
1353 return the number of operands (both input and output) in the insn.
1354 Otherwise return -1. */
1355
1356 int
1357 asm_noperands (rtx body)
1358 {
1359 switch (GET_CODE (body))
1360 {
1361 case ASM_OPERANDS:
1362 /* No output operands: return number of input operands. */
1363 return ASM_OPERANDS_INPUT_LENGTH (body);
1364 case SET:
1365 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1366 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1367 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1368 else
1369 return -1;
1370 case PARALLEL:
1371 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1372 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1373 {
1374 /* Multiple output operands, or 1 output plus some clobbers:
1375 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1376 int i;
1377 int n_sets;
1378
1379 /* Count backwards through CLOBBERs to determine number of SETs. */
1380 for (i = XVECLEN (body, 0); i > 0; i--)
1381 {
1382 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1383 break;
1384 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1385 return -1;
1386 }
1387
1388 /* N_SETS is now number of output operands. */
1389 n_sets = i;
1390
1391 /* Verify that all the SETs we have
1392 came from a single original asm_operands insn
1393 (so that invalid combinations are blocked). */
1394 for (i = 0; i < n_sets; i++)
1395 {
1396 rtx elt = XVECEXP (body, 0, i);
1397 if (GET_CODE (elt) != SET)
1398 return -1;
1399 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1400 return -1;
1401 /* If these ASM_OPERANDS rtx's came from different original insns
1402 then they aren't allowed together. */
1403 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1404 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1405 return -1;
1406 }
1407 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1408 + n_sets);
1409 }
1410 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1411 {
1412 /* 0 outputs, but some clobbers:
1413 body is [(asm_operands ...) (clobber (reg ...))...]. */
1414 int i;
1415
1416 /* Make sure all the other parallel things really are clobbers. */
1417 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1418 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1419 return -1;
1420
1421 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1422 }
1423 else
1424 return -1;
1425 default:
1426 return -1;
1427 }
1428 }
1429
1430 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1431 copy its operands (both input and output) into the vector OPERANDS,
1432 the locations of the operands within the insn into the vector OPERAND_LOCS,
1433 and the constraints for the operands into CONSTRAINTS.
1434 Write the modes of the operands into MODES.
1435 Return the assembler-template.
1436
1437 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1438 we don't store that info. */
1439
1440 const char *
1441 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1442 const char **constraints, enum machine_mode *modes)
1443 {
1444 int i;
1445 int noperands;
1446 const char *template = 0;
1447
1448 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1449 {
1450 rtx asmop = SET_SRC (body);
1451 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1452
1453 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1454
1455 for (i = 1; i < noperands; i++)
1456 {
1457 if (operand_locs)
1458 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1459 if (operands)
1460 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1461 if (constraints)
1462 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1463 if (modes)
1464 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1465 }
1466
1467 /* The output is in the SET.
1468 Its constraint is in the ASM_OPERANDS itself. */
1469 if (operands)
1470 operands[0] = SET_DEST (body);
1471 if (operand_locs)
1472 operand_locs[0] = &SET_DEST (body);
1473 if (constraints)
1474 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1475 if (modes)
1476 modes[0] = GET_MODE (SET_DEST (body));
1477 template = ASM_OPERANDS_TEMPLATE (asmop);
1478 }
1479 else if (GET_CODE (body) == ASM_OPERANDS)
1480 {
1481 rtx asmop = body;
1482 /* No output operands: BODY is (asm_operands ....). */
1483
1484 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1485
1486 /* The input operands are found in the 1st element vector. */
1487 /* Constraints for inputs are in the 2nd element vector. */
1488 for (i = 0; i < noperands; i++)
1489 {
1490 if (operand_locs)
1491 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1492 if (operands)
1493 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1494 if (constraints)
1495 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1496 if (modes)
1497 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1498 }
1499 template = ASM_OPERANDS_TEMPLATE (asmop);
1500 }
1501 else if (GET_CODE (body) == PARALLEL
1502 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1503 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1504 {
1505 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1506 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1507 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1508 int nout = 0; /* Does not include CLOBBERs. */
1509
1510 /* At least one output, plus some CLOBBERs. */
1511
1512 /* The outputs are in the SETs.
1513 Their constraints are in the ASM_OPERANDS itself. */
1514 for (i = 0; i < nparallel; i++)
1515 {
1516 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1517 break; /* Past last SET */
1518
1519 if (operands)
1520 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1521 if (operand_locs)
1522 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1523 if (constraints)
1524 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1525 if (modes)
1526 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1527 nout++;
1528 }
1529
1530 for (i = 0; i < nin; i++)
1531 {
1532 if (operand_locs)
1533 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1534 if (operands)
1535 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1536 if (constraints)
1537 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1538 if (modes)
1539 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1540 }
1541
1542 template = ASM_OPERANDS_TEMPLATE (asmop);
1543 }
1544 else if (GET_CODE (body) == PARALLEL
1545 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1546 {
1547 /* No outputs, but some CLOBBERs. */
1548
1549 rtx asmop = XVECEXP (body, 0, 0);
1550 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1551
1552 for (i = 0; i < nin; i++)
1553 {
1554 if (operand_locs)
1555 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1556 if (operands)
1557 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1558 if (constraints)
1559 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1560 if (modes)
1561 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1562 }
1563
1564 template = ASM_OPERANDS_TEMPLATE (asmop);
1565 }
1566
1567 return template;
1568 }
1569
1570 /* Check if an asm_operand matches its constraints.
1571 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1572
1573 int
1574 asm_operand_ok (rtx op, const char *constraint)
1575 {
1576 int result = 0;
1577
1578 /* Use constrain_operands after reload. */
1579 if (reload_completed)
1580 abort ();
1581
1582 while (*constraint)
1583 {
1584 char c = *constraint;
1585 int len;
1586 switch (c)
1587 {
1588 case ',':
1589 constraint++;
1590 continue;
1591 case '=':
1592 case '+':
1593 case '*':
1594 case '%':
1595 case '!':
1596 case '#':
1597 case '&':
1598 case '?':
1599 break;
1600
1601 case '0': case '1': case '2': case '3': case '4':
1602 case '5': case '6': case '7': case '8': case '9':
1603 /* For best results, our caller should have given us the
1604 proper matching constraint, but we can't actually fail
1605 the check if they didn't. Indicate that results are
1606 inconclusive. */
1607 do
1608 constraint++;
1609 while (ISDIGIT (*constraint));
1610 if (! result)
1611 result = -1;
1612 continue;
1613
1614 case 'p':
1615 if (address_operand (op, VOIDmode))
1616 result = 1;
1617 break;
1618
1619 case 'm':
1620 case 'V': /* non-offsettable */
1621 if (memory_operand (op, VOIDmode))
1622 result = 1;
1623 break;
1624
1625 case 'o': /* offsettable */
1626 if (offsettable_nonstrict_memref_p (op))
1627 result = 1;
1628 break;
1629
1630 case '<':
1631 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1632 excepting those that expand_call created. Further, on some
1633 machines which do not have generalized auto inc/dec, an inc/dec
1634 is not a memory_operand.
1635
1636 Match any memory and hope things are resolved after reload. */
1637
1638 if (MEM_P (op)
1639 && (1
1640 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1641 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1642 result = 1;
1643 break;
1644
1645 case '>':
1646 if (MEM_P (op)
1647 && (1
1648 || GET_CODE (XEXP (op, 0)) == PRE_INC
1649 || GET_CODE (XEXP (op, 0)) == POST_INC))
1650 result = 1;
1651 break;
1652
1653 case 'E':
1654 case 'F':
1655 if (GET_CODE (op) == CONST_DOUBLE
1656 || (GET_CODE (op) == CONST_VECTOR
1657 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1658 result = 1;
1659 break;
1660
1661 case 'G':
1662 if (GET_CODE (op) == CONST_DOUBLE
1663 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1664 result = 1;
1665 break;
1666 case 'H':
1667 if (GET_CODE (op) == CONST_DOUBLE
1668 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1669 result = 1;
1670 break;
1671
1672 case 's':
1673 if (GET_CODE (op) == CONST_INT
1674 || (GET_CODE (op) == CONST_DOUBLE
1675 && GET_MODE (op) == VOIDmode))
1676 break;
1677 /* Fall through. */
1678
1679 case 'i':
1680 if (CONSTANT_P (op)
1681 #ifdef LEGITIMATE_PIC_OPERAND_P
1682 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1683 #endif
1684 )
1685 result = 1;
1686 break;
1687
1688 case 'n':
1689 if (GET_CODE (op) == CONST_INT
1690 || (GET_CODE (op) == CONST_DOUBLE
1691 && GET_MODE (op) == VOIDmode))
1692 result = 1;
1693 break;
1694
1695 case 'I':
1696 if (GET_CODE (op) == CONST_INT
1697 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1698 result = 1;
1699 break;
1700 case 'J':
1701 if (GET_CODE (op) == CONST_INT
1702 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1703 result = 1;
1704 break;
1705 case 'K':
1706 if (GET_CODE (op) == CONST_INT
1707 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1708 result = 1;
1709 break;
1710 case 'L':
1711 if (GET_CODE (op) == CONST_INT
1712 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1713 result = 1;
1714 break;
1715 case 'M':
1716 if (GET_CODE (op) == CONST_INT
1717 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1718 result = 1;
1719 break;
1720 case 'N':
1721 if (GET_CODE (op) == CONST_INT
1722 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1723 result = 1;
1724 break;
1725 case 'O':
1726 if (GET_CODE (op) == CONST_INT
1727 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1728 result = 1;
1729 break;
1730 case 'P':
1731 if (GET_CODE (op) == CONST_INT
1732 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1733 result = 1;
1734 break;
1735
1736 case 'X':
1737 result = 1;
1738 break;
1739
1740 case 'g':
1741 if (general_operand (op, VOIDmode))
1742 result = 1;
1743 break;
1744
1745 default:
1746 /* For all other letters, we first check for a register class,
1747 otherwise it is an EXTRA_CONSTRAINT. */
1748 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1749 {
1750 case 'r':
1751 if (GET_MODE (op) == BLKmode)
1752 break;
1753 if (register_operand (op, VOIDmode))
1754 result = 1;
1755 }
1756 #ifdef EXTRA_CONSTRAINT_STR
1757 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1758 result = 1;
1759 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1760 /* Every memory operand can be reloaded to fit. */
1761 && memory_operand (op, VOIDmode))
1762 result = 1;
1763 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1764 /* Every address operand can be reloaded to fit. */
1765 && address_operand (op, VOIDmode))
1766 result = 1;
1767 #endif
1768 break;
1769 }
1770 len = CONSTRAINT_LEN (c, constraint);
1771 do
1772 constraint++;
1773 while (--len && *constraint);
1774 if (len)
1775 return 0;
1776 }
1777
1778 return result;
1779 }
1780 \f
1781 /* Given an rtx *P, if it is a sum containing an integer constant term,
1782 return the location (type rtx *) of the pointer to that constant term.
1783 Otherwise, return a null pointer. */
1784
1785 rtx *
1786 find_constant_term_loc (rtx *p)
1787 {
1788 rtx *tem;
1789 enum rtx_code code = GET_CODE (*p);
1790
1791 /* If *P IS such a constant term, P is its location. */
1792
1793 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1794 || code == CONST)
1795 return p;
1796
1797 /* Otherwise, if not a sum, it has no constant term. */
1798
1799 if (GET_CODE (*p) != PLUS)
1800 return 0;
1801
1802 /* If one of the summands is constant, return its location. */
1803
1804 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1805 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1806 return p;
1807
1808 /* Otherwise, check each summand for containing a constant term. */
1809
1810 if (XEXP (*p, 0) != 0)
1811 {
1812 tem = find_constant_term_loc (&XEXP (*p, 0));
1813 if (tem != 0)
1814 return tem;
1815 }
1816
1817 if (XEXP (*p, 1) != 0)
1818 {
1819 tem = find_constant_term_loc (&XEXP (*p, 1));
1820 if (tem != 0)
1821 return tem;
1822 }
1823
1824 return 0;
1825 }
1826 \f
1827 /* Return 1 if OP is a memory reference
1828 whose address contains no side effects
1829 and remains valid after the addition
1830 of a positive integer less than the
1831 size of the object being referenced.
1832
1833 We assume that the original address is valid and do not check it.
1834
1835 This uses strict_memory_address_p as a subroutine, so
1836 don't use it before reload. */
1837
1838 int
1839 offsettable_memref_p (rtx op)
1840 {
1841 return ((MEM_P (op))
1842 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1843 }
1844
1845 /* Similar, but don't require a strictly valid mem ref:
1846 consider pseudo-regs valid as index or base regs. */
1847
1848 int
1849 offsettable_nonstrict_memref_p (rtx op)
1850 {
1851 return ((MEM_P (op))
1852 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1853 }
1854
1855 /* Return 1 if Y is a memory address which contains no side effects
1856 and would remain valid after the addition of a positive integer
1857 less than the size of that mode.
1858
1859 We assume that the original address is valid and do not check it.
1860 We do check that it is valid for narrower modes.
1861
1862 If STRICTP is nonzero, we require a strictly valid address,
1863 for the sake of use in reload.c. */
1864
1865 int
1866 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1867 {
1868 enum rtx_code ycode = GET_CODE (y);
1869 rtx z;
1870 rtx y1 = y;
1871 rtx *y2;
1872 int (*addressp) (enum machine_mode, rtx) =
1873 (strictp ? strict_memory_address_p : memory_address_p);
1874 unsigned int mode_sz = GET_MODE_SIZE (mode);
1875
1876 if (CONSTANT_ADDRESS_P (y))
1877 return 1;
1878
1879 /* Adjusting an offsettable address involves changing to a narrower mode.
1880 Make sure that's OK. */
1881
1882 if (mode_dependent_address_p (y))
1883 return 0;
1884
1885 /* ??? How much offset does an offsettable BLKmode reference need?
1886 Clearly that depends on the situation in which it's being used.
1887 However, the current situation in which we test 0xffffffff is
1888 less than ideal. Caveat user. */
1889 if (mode_sz == 0)
1890 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1891
1892 /* If the expression contains a constant term,
1893 see if it remains valid when max possible offset is added. */
1894
1895 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1896 {
1897 int good;
1898
1899 y1 = *y2;
1900 *y2 = plus_constant (*y2, mode_sz - 1);
1901 /* Use QImode because an odd displacement may be automatically invalid
1902 for any wider mode. But it should be valid for a single byte. */
1903 good = (*addressp) (QImode, y);
1904
1905 /* In any case, restore old contents of memory. */
1906 *y2 = y1;
1907 return good;
1908 }
1909
1910 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1911 return 0;
1912
1913 /* The offset added here is chosen as the maximum offset that
1914 any instruction could need to add when operating on something
1915 of the specified mode. We assume that if Y and Y+c are
1916 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1917 go inside a LO_SUM here, so we do so as well. */
1918 if (GET_CODE (y) == LO_SUM
1919 && mode != BLKmode
1920 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1921 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1922 plus_constant (XEXP (y, 1), mode_sz - 1));
1923 else
1924 z = plus_constant (y, mode_sz - 1);
1925
1926 /* Use QImode because an odd displacement may be automatically invalid
1927 for any wider mode. But it should be valid for a single byte. */
1928 return (*addressp) (QImode, z);
1929 }
1930
1931 /* Return 1 if ADDR is an address-expression whose effect depends
1932 on the mode of the memory reference it is used in.
1933
1934 Autoincrement addressing is a typical example of mode-dependence
1935 because the amount of the increment depends on the mode. */
1936
1937 int
1938 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1939 {
1940 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1941 return 0;
1942 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1943 win: ATTRIBUTE_UNUSED_LABEL
1944 return 1;
1945 }
1946 \f
1947 /* Like extract_insn, but save insn extracted and don't extract again, when
1948 called again for the same insn expecting that recog_data still contain the
1949 valid information. This is used primary by gen_attr infrastructure that
1950 often does extract insn again and again. */
1951 void
1952 extract_insn_cached (rtx insn)
1953 {
1954 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1955 return;
1956 extract_insn (insn);
1957 recog_data.insn = insn;
1958 }
1959 /* Do cached extract_insn, constrain_operands and complain about failures.
1960 Used by insn_attrtab. */
1961 void
1962 extract_constrain_insn_cached (rtx insn)
1963 {
1964 extract_insn_cached (insn);
1965 if (which_alternative == -1
1966 && !constrain_operands (reload_completed))
1967 fatal_insn_not_found (insn);
1968 }
1969 /* Do cached constrain_operands and complain about failures. */
1970 int
1971 constrain_operands_cached (int strict)
1972 {
1973 if (which_alternative == -1)
1974 return constrain_operands (strict);
1975 else
1976 return 1;
1977 }
1978 \f
1979 /* Analyze INSN and fill in recog_data. */
1980
1981 void
1982 extract_insn (rtx insn)
1983 {
1984 int i;
1985 int icode;
1986 int noperands;
1987 rtx body = PATTERN (insn);
1988
1989 recog_data.insn = NULL;
1990 recog_data.n_operands = 0;
1991 recog_data.n_alternatives = 0;
1992 recog_data.n_dups = 0;
1993 which_alternative = -1;
1994
1995 switch (GET_CODE (body))
1996 {
1997 case USE:
1998 case CLOBBER:
1999 case ASM_INPUT:
2000 case ADDR_VEC:
2001 case ADDR_DIFF_VEC:
2002 return;
2003
2004 case SET:
2005 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2006 goto asm_insn;
2007 else
2008 goto normal_insn;
2009 case PARALLEL:
2010 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2011 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2012 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2013 goto asm_insn;
2014 else
2015 goto normal_insn;
2016 case ASM_OPERANDS:
2017 asm_insn:
2018 recog_data.n_operands = noperands = asm_noperands (body);
2019 if (noperands >= 0)
2020 {
2021 /* This insn is an `asm' with operands. */
2022
2023 /* expand_asm_operands makes sure there aren't too many operands. */
2024 if (noperands > MAX_RECOG_OPERANDS)
2025 abort ();
2026
2027 /* Now get the operand values and constraints out of the insn. */
2028 decode_asm_operands (body, recog_data.operand,
2029 recog_data.operand_loc,
2030 recog_data.constraints,
2031 recog_data.operand_mode);
2032 if (noperands > 0)
2033 {
2034 const char *p = recog_data.constraints[0];
2035 recog_data.n_alternatives = 1;
2036 while (*p)
2037 recog_data.n_alternatives += (*p++ == ',');
2038 }
2039 break;
2040 }
2041 fatal_insn_not_found (insn);
2042
2043 default:
2044 normal_insn:
2045 /* Ordinary insn: recognize it, get the operands via insn_extract
2046 and get the constraints. */
2047
2048 icode = recog_memoized (insn);
2049 if (icode < 0)
2050 fatal_insn_not_found (insn);
2051
2052 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2053 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2054 recog_data.n_dups = insn_data[icode].n_dups;
2055
2056 insn_extract (insn);
2057
2058 for (i = 0; i < noperands; i++)
2059 {
2060 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2061 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2062 /* VOIDmode match_operands gets mode from their real operand. */
2063 if (recog_data.operand_mode[i] == VOIDmode)
2064 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2065 }
2066 }
2067 for (i = 0; i < noperands; i++)
2068 recog_data.operand_type[i]
2069 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2070 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2071 : OP_IN);
2072
2073 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2074 abort ();
2075 }
2076
2077 /* After calling extract_insn, you can use this function to extract some
2078 information from the constraint strings into a more usable form.
2079 The collected data is stored in recog_op_alt. */
2080 void
2081 preprocess_constraints (void)
2082 {
2083 int i;
2084
2085 for (i = 0; i < recog_data.n_operands; i++)
2086 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2087 * sizeof (struct operand_alternative)));
2088
2089 for (i = 0; i < recog_data.n_operands; i++)
2090 {
2091 int j;
2092 struct operand_alternative *op_alt;
2093 const char *p = recog_data.constraints[i];
2094
2095 op_alt = recog_op_alt[i];
2096
2097 for (j = 0; j < recog_data.n_alternatives; j++)
2098 {
2099 op_alt[j].class = NO_REGS;
2100 op_alt[j].constraint = p;
2101 op_alt[j].matches = -1;
2102 op_alt[j].matched = -1;
2103
2104 if (*p == '\0' || *p == ',')
2105 {
2106 op_alt[j].anything_ok = 1;
2107 continue;
2108 }
2109
2110 for (;;)
2111 {
2112 char c = *p;
2113 if (c == '#')
2114 do
2115 c = *++p;
2116 while (c != ',' && c != '\0');
2117 if (c == ',' || c == '\0')
2118 {
2119 p++;
2120 break;
2121 }
2122
2123 switch (c)
2124 {
2125 case '=': case '+': case '*': case '%':
2126 case 'E': case 'F': case 'G': case 'H':
2127 case 's': case 'i': case 'n':
2128 case 'I': case 'J': case 'K': case 'L':
2129 case 'M': case 'N': case 'O': case 'P':
2130 /* These don't say anything we care about. */
2131 break;
2132
2133 case '?':
2134 op_alt[j].reject += 6;
2135 break;
2136 case '!':
2137 op_alt[j].reject += 600;
2138 break;
2139 case '&':
2140 op_alt[j].earlyclobber = 1;
2141 break;
2142
2143 case '0': case '1': case '2': case '3': case '4':
2144 case '5': case '6': case '7': case '8': case '9':
2145 {
2146 char *end;
2147 op_alt[j].matches = strtoul (p, &end, 10);
2148 recog_op_alt[op_alt[j].matches][j].matched = i;
2149 p = end;
2150 }
2151 continue;
2152
2153 case 'm':
2154 op_alt[j].memory_ok = 1;
2155 break;
2156 case '<':
2157 op_alt[j].decmem_ok = 1;
2158 break;
2159 case '>':
2160 op_alt[j].incmem_ok = 1;
2161 break;
2162 case 'V':
2163 op_alt[j].nonoffmem_ok = 1;
2164 break;
2165 case 'o':
2166 op_alt[j].offmem_ok = 1;
2167 break;
2168 case 'X':
2169 op_alt[j].anything_ok = 1;
2170 break;
2171
2172 case 'p':
2173 op_alt[j].is_address = 1;
2174 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2175 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2176 break;
2177
2178 case 'g': case 'r':
2179 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2180 break;
2181
2182 default:
2183 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2184 {
2185 op_alt[j].memory_ok = 1;
2186 break;
2187 }
2188 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2189 {
2190 op_alt[j].is_address = 1;
2191 op_alt[j].class
2192 = (reg_class_subunion
2193 [(int) op_alt[j].class]
2194 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2195 break;
2196 }
2197
2198 op_alt[j].class
2199 = (reg_class_subunion
2200 [(int) op_alt[j].class]
2201 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2202 break;
2203 }
2204 p += CONSTRAINT_LEN (c, p);
2205 }
2206 }
2207 }
2208 }
2209
2210 /* Check the operands of an insn against the insn's operand constraints
2211 and return 1 if they are valid.
2212 The information about the insn's operands, constraints, operand modes
2213 etc. is obtained from the global variables set up by extract_insn.
2214
2215 WHICH_ALTERNATIVE is set to a number which indicates which
2216 alternative of constraints was matched: 0 for the first alternative,
2217 1 for the next, etc.
2218
2219 In addition, when two operands are required to match
2220 and it happens that the output operand is (reg) while the
2221 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2222 make the output operand look like the input.
2223 This is because the output operand is the one the template will print.
2224
2225 This is used in final, just before printing the assembler code and by
2226 the routines that determine an insn's attribute.
2227
2228 If STRICT is a positive nonzero value, it means that we have been
2229 called after reload has been completed. In that case, we must
2230 do all checks strictly. If it is zero, it means that we have been called
2231 before reload has completed. In that case, we first try to see if we can
2232 find an alternative that matches strictly. If not, we try again, this
2233 time assuming that reload will fix up the insn. This provides a "best
2234 guess" for the alternative and is used to compute attributes of insns prior
2235 to reload. A negative value of STRICT is used for this internal call. */
2236
2237 struct funny_match
2238 {
2239 int this, other;
2240 };
2241
2242 int
2243 constrain_operands (int strict)
2244 {
2245 const char *constraints[MAX_RECOG_OPERANDS];
2246 int matching_operands[MAX_RECOG_OPERANDS];
2247 int earlyclobber[MAX_RECOG_OPERANDS];
2248 int c;
2249
2250 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2251 int funny_match_index;
2252
2253 which_alternative = 0;
2254 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2255 return 1;
2256
2257 for (c = 0; c < recog_data.n_operands; c++)
2258 {
2259 constraints[c] = recog_data.constraints[c];
2260 matching_operands[c] = -1;
2261 }
2262
2263 do
2264 {
2265 int opno;
2266 int lose = 0;
2267 funny_match_index = 0;
2268
2269 for (opno = 0; opno < recog_data.n_operands; opno++)
2270 {
2271 rtx op = recog_data.operand[opno];
2272 enum machine_mode mode = GET_MODE (op);
2273 const char *p = constraints[opno];
2274 int offset = 0;
2275 int win = 0;
2276 int val;
2277 int len;
2278
2279 earlyclobber[opno] = 0;
2280
2281 /* A unary operator may be accepted by the predicate, but it
2282 is irrelevant for matching constraints. */
2283 if (UNARY_P (op))
2284 op = XEXP (op, 0);
2285
2286 if (GET_CODE (op) == SUBREG)
2287 {
2288 if (REG_P (SUBREG_REG (op))
2289 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2290 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2291 GET_MODE (SUBREG_REG (op)),
2292 SUBREG_BYTE (op),
2293 GET_MODE (op));
2294 op = SUBREG_REG (op);
2295 }
2296
2297 /* An empty constraint or empty alternative
2298 allows anything which matched the pattern. */
2299 if (*p == 0 || *p == ',')
2300 win = 1;
2301
2302 do
2303 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2304 {
2305 case '\0':
2306 len = 0;
2307 break;
2308 case ',':
2309 c = '\0';
2310 break;
2311
2312 case '?': case '!': case '*': case '%':
2313 case '=': case '+':
2314 break;
2315
2316 case '#':
2317 /* Ignore rest of this alternative as far as
2318 constraint checking is concerned. */
2319 do
2320 p++;
2321 while (*p && *p != ',');
2322 len = 0;
2323 break;
2324
2325 case '&':
2326 earlyclobber[opno] = 1;
2327 break;
2328
2329 case '0': case '1': case '2': case '3': case '4':
2330 case '5': case '6': case '7': case '8': case '9':
2331 {
2332 /* This operand must be the same as a previous one.
2333 This kind of constraint is used for instructions such
2334 as add when they take only two operands.
2335
2336 Note that the lower-numbered operand is passed first.
2337
2338 If we are not testing strictly, assume that this
2339 constraint will be satisfied. */
2340
2341 char *end;
2342 int match;
2343
2344 match = strtoul (p, &end, 10);
2345 p = end;
2346
2347 if (strict < 0)
2348 val = 1;
2349 else
2350 {
2351 rtx op1 = recog_data.operand[match];
2352 rtx op2 = recog_data.operand[opno];
2353
2354 /* A unary operator may be accepted by the predicate,
2355 but it is irrelevant for matching constraints. */
2356 if (UNARY_P (op1))
2357 op1 = XEXP (op1, 0);
2358 if (UNARY_P (op2))
2359 op2 = XEXP (op2, 0);
2360
2361 val = operands_match_p (op1, op2);
2362 }
2363
2364 matching_operands[opno] = match;
2365 matching_operands[match] = opno;
2366
2367 if (val != 0)
2368 win = 1;
2369
2370 /* If output is *x and input is *--x, arrange later
2371 to change the output to *--x as well, since the
2372 output op is the one that will be printed. */
2373 if (val == 2 && strict > 0)
2374 {
2375 funny_match[funny_match_index].this = opno;
2376 funny_match[funny_match_index++].other = match;
2377 }
2378 }
2379 len = 0;
2380 break;
2381
2382 case 'p':
2383 /* p is used for address_operands. When we are called by
2384 gen_reload, no one will have checked that the address is
2385 strictly valid, i.e., that all pseudos requiring hard regs
2386 have gotten them. */
2387 if (strict <= 0
2388 || (strict_memory_address_p (recog_data.operand_mode[opno],
2389 op)))
2390 win = 1;
2391 break;
2392
2393 /* No need to check general_operand again;
2394 it was done in insn-recog.c. */
2395 case 'g':
2396 /* Anything goes unless it is a REG and really has a hard reg
2397 but the hard reg is not in the class GENERAL_REGS. */
2398 if (strict < 0
2399 || GENERAL_REGS == ALL_REGS
2400 || !REG_P (op)
2401 || (reload_in_progress
2402 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2403 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2404 win = 1;
2405 break;
2406
2407 case 'X':
2408 /* This is used for a MATCH_SCRATCH in the cases when
2409 we don't actually need anything. So anything goes
2410 any time. */
2411 win = 1;
2412 break;
2413
2414 case 'm':
2415 /* Memory operands must be valid, to the extent
2416 required by STRICT. */
2417 if (MEM_P (op))
2418 {
2419 if (strict > 0
2420 && !strict_memory_address_p (GET_MODE (op),
2421 XEXP (op, 0)))
2422 break;
2423 if (strict == 0
2424 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2425 break;
2426 win = 1;
2427 }
2428 /* Before reload, accept what reload can turn into mem. */
2429 else if (strict < 0 && CONSTANT_P (op))
2430 win = 1;
2431 /* During reload, accept a pseudo */
2432 else if (reload_in_progress && REG_P (op)
2433 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2434 win = 1;
2435 break;
2436
2437 case '<':
2438 if (MEM_P (op)
2439 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2440 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2441 win = 1;
2442 break;
2443
2444 case '>':
2445 if (MEM_P (op)
2446 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2447 || GET_CODE (XEXP (op, 0)) == POST_INC))
2448 win = 1;
2449 break;
2450
2451 case 'E':
2452 case 'F':
2453 if (GET_CODE (op) == CONST_DOUBLE
2454 || (GET_CODE (op) == CONST_VECTOR
2455 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2456 win = 1;
2457 break;
2458
2459 case 'G':
2460 case 'H':
2461 if (GET_CODE (op) == CONST_DOUBLE
2462 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2463 win = 1;
2464 break;
2465
2466 case 's':
2467 if (GET_CODE (op) == CONST_INT
2468 || (GET_CODE (op) == CONST_DOUBLE
2469 && GET_MODE (op) == VOIDmode))
2470 break;
2471 case 'i':
2472 if (CONSTANT_P (op))
2473 win = 1;
2474 break;
2475
2476 case 'n':
2477 if (GET_CODE (op) == CONST_INT
2478 || (GET_CODE (op) == CONST_DOUBLE
2479 && GET_MODE (op) == VOIDmode))
2480 win = 1;
2481 break;
2482
2483 case 'I':
2484 case 'J':
2485 case 'K':
2486 case 'L':
2487 case 'M':
2488 case 'N':
2489 case 'O':
2490 case 'P':
2491 if (GET_CODE (op) == CONST_INT
2492 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2493 win = 1;
2494 break;
2495
2496 case 'V':
2497 if (MEM_P (op)
2498 && ((strict > 0 && ! offsettable_memref_p (op))
2499 || (strict < 0
2500 && !(CONSTANT_P (op) || MEM_P (op)))
2501 || (reload_in_progress
2502 && !(REG_P (op)
2503 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2504 win = 1;
2505 break;
2506
2507 case 'o':
2508 if ((strict > 0 && offsettable_memref_p (op))
2509 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2510 /* Before reload, accept what reload can handle. */
2511 || (strict < 0
2512 && (CONSTANT_P (op) || MEM_P (op)))
2513 /* During reload, accept a pseudo */
2514 || (reload_in_progress && REG_P (op)
2515 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2516 win = 1;
2517 break;
2518
2519 default:
2520 {
2521 enum reg_class class;
2522
2523 class = (c == 'r'
2524 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2525 if (class != NO_REGS)
2526 {
2527 if (strict < 0
2528 || (strict == 0
2529 && REG_P (op)
2530 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2531 || (strict == 0 && GET_CODE (op) == SCRATCH)
2532 || (REG_P (op)
2533 && reg_fits_class_p (op, class, offset, mode)))
2534 win = 1;
2535 }
2536 #ifdef EXTRA_CONSTRAINT_STR
2537 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2538 win = 1;
2539
2540 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2541 /* Every memory operand can be reloaded to fit. */
2542 && ((strict < 0 && MEM_P (op))
2543 /* Before reload, accept what reload can turn
2544 into mem. */
2545 || (strict < 0 && CONSTANT_P (op))
2546 /* During reload, accept a pseudo */
2547 || (reload_in_progress && REG_P (op)
2548 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2549 win = 1;
2550 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2551 /* Every address operand can be reloaded to fit. */
2552 && strict < 0)
2553 win = 1;
2554 #endif
2555 break;
2556 }
2557 }
2558 while (p += len, c);
2559
2560 constraints[opno] = p;
2561 /* If this operand did not win somehow,
2562 this alternative loses. */
2563 if (! win)
2564 lose = 1;
2565 }
2566 /* This alternative won; the operands are ok.
2567 Change whichever operands this alternative says to change. */
2568 if (! lose)
2569 {
2570 int opno, eopno;
2571
2572 /* See if any earlyclobber operand conflicts with some other
2573 operand. */
2574
2575 if (strict > 0)
2576 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2577 /* Ignore earlyclobber operands now in memory,
2578 because we would often report failure when we have
2579 two memory operands, one of which was formerly a REG. */
2580 if (earlyclobber[eopno]
2581 && REG_P (recog_data.operand[eopno]))
2582 for (opno = 0; opno < recog_data.n_operands; opno++)
2583 if ((MEM_P (recog_data.operand[opno])
2584 || recog_data.operand_type[opno] != OP_OUT)
2585 && opno != eopno
2586 /* Ignore things like match_operator operands. */
2587 && *recog_data.constraints[opno] != 0
2588 && ! (matching_operands[opno] == eopno
2589 && operands_match_p (recog_data.operand[opno],
2590 recog_data.operand[eopno]))
2591 && ! safe_from_earlyclobber (recog_data.operand[opno],
2592 recog_data.operand[eopno]))
2593 lose = 1;
2594
2595 if (! lose)
2596 {
2597 while (--funny_match_index >= 0)
2598 {
2599 recog_data.operand[funny_match[funny_match_index].other]
2600 = recog_data.operand[funny_match[funny_match_index].this];
2601 }
2602
2603 return 1;
2604 }
2605 }
2606
2607 which_alternative++;
2608 }
2609 while (which_alternative < recog_data.n_alternatives);
2610
2611 which_alternative = -1;
2612 /* If we are about to reject this, but we are not to test strictly,
2613 try a very loose test. Only return failure if it fails also. */
2614 if (strict == 0)
2615 return constrain_operands (-1);
2616 else
2617 return 0;
2618 }
2619
2620 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2621 is a hard reg in class CLASS when its regno is offset by OFFSET
2622 and changed to mode MODE.
2623 If REG occupies multiple hard regs, all of them must be in CLASS. */
2624
2625 int
2626 reg_fits_class_p (rtx operand, enum reg_class class, int offset,
2627 enum machine_mode mode)
2628 {
2629 int regno = REGNO (operand);
2630 if (regno < FIRST_PSEUDO_REGISTER
2631 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2632 regno + offset))
2633 {
2634 int sr;
2635 regno += offset;
2636 for (sr = hard_regno_nregs[regno][mode] - 1;
2637 sr > 0; sr--)
2638 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2639 regno + sr))
2640 break;
2641 return sr == 0;
2642 }
2643
2644 return 0;
2645 }
2646 \f
2647 /* Split single instruction. Helper function for split_all_insns and
2648 split_all_insns_noflow. Return last insn in the sequence if successful,
2649 or NULL if unsuccessful. */
2650
2651 static rtx
2652 split_insn (rtx insn)
2653 {
2654 /* Split insns here to get max fine-grain parallelism. */
2655 rtx first = PREV_INSN (insn);
2656 rtx last = try_split (PATTERN (insn), insn, 1);
2657
2658 if (last == insn)
2659 return NULL_RTX;
2660
2661 /* try_split returns the NOTE that INSN became. */
2662 SET_INSN_DELETED (insn);
2663
2664 /* ??? Coddle to md files that generate subregs in post-reload
2665 splitters instead of computing the proper hard register. */
2666 if (reload_completed && first != last)
2667 {
2668 first = NEXT_INSN (first);
2669 for (;;)
2670 {
2671 if (INSN_P (first))
2672 cleanup_subreg_operands (first);
2673 if (first == last)
2674 break;
2675 first = NEXT_INSN (first);
2676 }
2677 }
2678 return last;
2679 }
2680
2681 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2682
2683 void
2684 split_all_insns (int upd_life)
2685 {
2686 sbitmap blocks;
2687 bool changed;
2688 basic_block bb;
2689
2690 blocks = sbitmap_alloc (last_basic_block);
2691 sbitmap_zero (blocks);
2692 changed = false;
2693
2694 FOR_EACH_BB_REVERSE (bb)
2695 {
2696 rtx insn, next;
2697 bool finish = false;
2698
2699 for (insn = BB_HEAD (bb); !finish ; insn = next)
2700 {
2701 /* Can't use `next_real_insn' because that might go across
2702 CODE_LABELS and short-out basic blocks. */
2703 next = NEXT_INSN (insn);
2704 finish = (insn == BB_END (bb));
2705 if (INSN_P (insn))
2706 {
2707 rtx set = single_set (insn);
2708
2709 /* Don't split no-op move insns. These should silently
2710 disappear later in final. Splitting such insns would
2711 break the code that handles REG_NO_CONFLICT blocks. */
2712 if (set && set_noop_p (set))
2713 {
2714 /* Nops get in the way while scheduling, so delete them
2715 now if register allocation has already been done. It
2716 is too risky to try to do this before register
2717 allocation, and there are unlikely to be very many
2718 nops then anyways. */
2719 if (reload_completed)
2720 {
2721 /* If the no-op set has a REG_UNUSED note, we need
2722 to update liveness information. */
2723 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2724 {
2725 SET_BIT (blocks, bb->index);
2726 changed = true;
2727 }
2728 /* ??? Is life info affected by deleting edges? */
2729 delete_insn_and_edges (insn);
2730 }
2731 }
2732 else
2733 {
2734 rtx last = split_insn (insn);
2735 if (last)
2736 {
2737 /* The split sequence may include barrier, but the
2738 BB boundary we are interested in will be set to
2739 previous one. */
2740
2741 while (BARRIER_P (last))
2742 last = PREV_INSN (last);
2743 SET_BIT (blocks, bb->index);
2744 changed = true;
2745 }
2746 }
2747 }
2748 }
2749 }
2750
2751 if (changed)
2752 {
2753 int old_last_basic_block = last_basic_block;
2754
2755 find_many_sub_basic_blocks (blocks);
2756
2757 if (old_last_basic_block != last_basic_block && upd_life)
2758 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2759 }
2760
2761 if (changed && upd_life)
2762 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2763 PROP_DEATH_NOTES);
2764
2765 #ifdef ENABLE_CHECKING
2766 verify_flow_info ();
2767 #endif
2768
2769 sbitmap_free (blocks);
2770 }
2771
2772 /* Same as split_all_insns, but do not expect CFG to be available.
2773 Used by machine dependent reorg passes. */
2774
2775 void
2776 split_all_insns_noflow (void)
2777 {
2778 rtx next, insn;
2779
2780 for (insn = get_insns (); insn; insn = next)
2781 {
2782 next = NEXT_INSN (insn);
2783 if (INSN_P (insn))
2784 {
2785 /* Don't split no-op move insns. These should silently
2786 disappear later in final. Splitting such insns would
2787 break the code that handles REG_NO_CONFLICT blocks. */
2788 rtx set = single_set (insn);
2789 if (set && set_noop_p (set))
2790 {
2791 /* Nops get in the way while scheduling, so delete them
2792 now if register allocation has already been done. It
2793 is too risky to try to do this before register
2794 allocation, and there are unlikely to be very many
2795 nops then anyways.
2796
2797 ??? Should we use delete_insn when the CFG isn't valid? */
2798 if (reload_completed)
2799 delete_insn_and_edges (insn);
2800 }
2801 else
2802 split_insn (insn);
2803 }
2804 }
2805 }
2806 \f
2807 #ifdef HAVE_peephole2
2808 struct peep2_insn_data
2809 {
2810 rtx insn;
2811 regset live_before;
2812 };
2813
2814 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2815 static int peep2_current;
2816
2817 /* A non-insn marker indicating the last insn of the block.
2818 The live_before regset for this element is correct, indicating
2819 global_live_at_end for the block. */
2820 #define PEEP2_EOB pc_rtx
2821
2822 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2823 does not exist. Used by the recognizer to find the next insn to match
2824 in a multi-insn pattern. */
2825
2826 rtx
2827 peep2_next_insn (int n)
2828 {
2829 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2830 abort ();
2831
2832 n += peep2_current;
2833 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2834 n -= MAX_INSNS_PER_PEEP2 + 1;
2835
2836 if (peep2_insn_data[n].insn == PEEP2_EOB)
2837 return NULL_RTX;
2838 return peep2_insn_data[n].insn;
2839 }
2840
2841 /* Return true if REGNO is dead before the Nth non-note insn
2842 after `current'. */
2843
2844 int
2845 peep2_regno_dead_p (int ofs, int regno)
2846 {
2847 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2848 abort ();
2849
2850 ofs += peep2_current;
2851 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2852 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2853
2854 if (peep2_insn_data[ofs].insn == NULL_RTX)
2855 abort ();
2856
2857 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2858 }
2859
2860 /* Similarly for a REG. */
2861
2862 int
2863 peep2_reg_dead_p (int ofs, rtx reg)
2864 {
2865 int regno, n;
2866
2867 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2868 abort ();
2869
2870 ofs += peep2_current;
2871 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2872 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2873
2874 if (peep2_insn_data[ofs].insn == NULL_RTX)
2875 abort ();
2876
2877 regno = REGNO (reg);
2878 n = hard_regno_nregs[regno][GET_MODE (reg)];
2879 while (--n >= 0)
2880 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2881 return 0;
2882 return 1;
2883 }
2884
2885 /* Try to find a hard register of mode MODE, matching the register class in
2886 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2887 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2888 in which case the only condition is that the register must be available
2889 before CURRENT_INSN.
2890 Registers that already have bits set in REG_SET will not be considered.
2891
2892 If an appropriate register is available, it will be returned and the
2893 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2894 returned. */
2895
2896 rtx
2897 peep2_find_free_register (int from, int to, const char *class_str,
2898 enum machine_mode mode, HARD_REG_SET *reg_set)
2899 {
2900 static int search_ofs;
2901 enum reg_class class;
2902 HARD_REG_SET live;
2903 int i;
2904
2905 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2906 abort ();
2907
2908 from += peep2_current;
2909 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2910 from -= MAX_INSNS_PER_PEEP2 + 1;
2911 to += peep2_current;
2912 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2913 to -= MAX_INSNS_PER_PEEP2 + 1;
2914
2915 if (peep2_insn_data[from].insn == NULL_RTX)
2916 abort ();
2917 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2918
2919 while (from != to)
2920 {
2921 HARD_REG_SET this_live;
2922
2923 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2924 from = 0;
2925 if (peep2_insn_data[from].insn == NULL_RTX)
2926 abort ();
2927 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2928 IOR_HARD_REG_SET (live, this_live);
2929 }
2930
2931 class = (class_str[0] == 'r' ? GENERAL_REGS
2932 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2933
2934 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2935 {
2936 int raw_regno, regno, success, j;
2937
2938 /* Distribute the free registers as much as possible. */
2939 raw_regno = search_ofs + i;
2940 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2941 raw_regno -= FIRST_PSEUDO_REGISTER;
2942 #ifdef REG_ALLOC_ORDER
2943 regno = reg_alloc_order[raw_regno];
2944 #else
2945 regno = raw_regno;
2946 #endif
2947
2948 /* Don't allocate fixed registers. */
2949 if (fixed_regs[regno])
2950 continue;
2951 /* Make sure the register is of the right class. */
2952 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2953 continue;
2954 /* And can support the mode we need. */
2955 if (! HARD_REGNO_MODE_OK (regno, mode))
2956 continue;
2957 /* And that we don't create an extra save/restore. */
2958 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2959 continue;
2960 /* And we don't clobber traceback for noreturn functions. */
2961 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2962 && (! reload_completed || frame_pointer_needed))
2963 continue;
2964
2965 success = 1;
2966 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2967 {
2968 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2969 || TEST_HARD_REG_BIT (live, regno + j))
2970 {
2971 success = 0;
2972 break;
2973 }
2974 }
2975 if (success)
2976 {
2977 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2978 SET_HARD_REG_BIT (*reg_set, regno + j);
2979
2980 /* Start the next search with the next register. */
2981 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2982 raw_regno = 0;
2983 search_ofs = raw_regno;
2984
2985 return gen_rtx_REG (mode, regno);
2986 }
2987 }
2988
2989 search_ofs = 0;
2990 return NULL_RTX;
2991 }
2992
2993 /* Perform the peephole2 optimization pass. */
2994
2995 void
2996 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
2997 {
2998 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
2999 rtx insn, prev;
3000 regset live;
3001 int i;
3002 basic_block bb;
3003 #ifdef HAVE_conditional_execution
3004 sbitmap blocks;
3005 bool changed;
3006 #endif
3007 bool do_cleanup_cfg = false;
3008 bool do_rebuild_jump_labels = false;
3009
3010 /* Initialize the regsets we're going to use. */
3011 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3012 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3013 live = INITIALIZE_REG_SET (rs_heads[i]);
3014
3015 #ifdef HAVE_conditional_execution
3016 blocks = sbitmap_alloc (last_basic_block);
3017 sbitmap_zero (blocks);
3018 changed = false;
3019 #else
3020 count_or_remove_death_notes (NULL, 1);
3021 #endif
3022
3023 FOR_EACH_BB_REVERSE (bb)
3024 {
3025 struct propagate_block_info *pbi;
3026
3027 /* Indicate that all slots except the last holds invalid data. */
3028 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3029 peep2_insn_data[i].insn = NULL_RTX;
3030
3031 /* Indicate that the last slot contains live_after data. */
3032 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3033 peep2_current = MAX_INSNS_PER_PEEP2;
3034
3035 /* Start up propagation. */
3036 COPY_REG_SET (live, bb->global_live_at_end);
3037 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3038
3039 #ifdef HAVE_conditional_execution
3040 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3041 #else
3042 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3043 #endif
3044
3045 for (insn = BB_END (bb); ; insn = prev)
3046 {
3047 prev = PREV_INSN (insn);
3048 if (INSN_P (insn))
3049 {
3050 rtx try, before_try, x;
3051 int match_len;
3052 rtx note;
3053 bool was_call = false;
3054
3055 /* Record this insn. */
3056 if (--peep2_current < 0)
3057 peep2_current = MAX_INSNS_PER_PEEP2;
3058 peep2_insn_data[peep2_current].insn = insn;
3059 propagate_one_insn (pbi, insn);
3060 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3061
3062 /* Match the peephole. */
3063 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3064 if (try != NULL)
3065 {
3066 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3067 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3068 cfg-related call notes. */
3069 for (i = 0; i <= match_len; ++i)
3070 {
3071 int j;
3072 rtx old_insn, new_insn, note;
3073
3074 j = i + peep2_current;
3075 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3076 j -= MAX_INSNS_PER_PEEP2 + 1;
3077 old_insn = peep2_insn_data[j].insn;
3078 if (!CALL_P (old_insn))
3079 continue;
3080 was_call = true;
3081
3082 new_insn = try;
3083 while (new_insn != NULL_RTX)
3084 {
3085 if (CALL_P (new_insn))
3086 break;
3087 new_insn = NEXT_INSN (new_insn);
3088 }
3089
3090 if (new_insn == NULL_RTX)
3091 abort ();
3092
3093 CALL_INSN_FUNCTION_USAGE (new_insn)
3094 = CALL_INSN_FUNCTION_USAGE (old_insn);
3095
3096 for (note = REG_NOTES (old_insn);
3097 note;
3098 note = XEXP (note, 1))
3099 switch (REG_NOTE_KIND (note))
3100 {
3101 case REG_NORETURN:
3102 case REG_SETJMP:
3103 case REG_ALWAYS_RETURN:
3104 REG_NOTES (new_insn)
3105 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3106 XEXP (note, 0),
3107 REG_NOTES (new_insn));
3108 default:
3109 /* Discard all other reg notes. */
3110 break;
3111 }
3112
3113 /* Croak if there is another call in the sequence. */
3114 while (++i <= match_len)
3115 {
3116 j = i + peep2_current;
3117 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3118 j -= MAX_INSNS_PER_PEEP2 + 1;
3119 old_insn = peep2_insn_data[j].insn;
3120 if (CALL_P (old_insn))
3121 abort ();
3122 }
3123 break;
3124 }
3125
3126 i = match_len + peep2_current;
3127 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3128 i -= MAX_INSNS_PER_PEEP2 + 1;
3129
3130 note = find_reg_note (peep2_insn_data[i].insn,
3131 REG_EH_REGION, NULL_RTX);
3132
3133 /* Replace the old sequence with the new. */
3134 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3135 INSN_LOCATOR (peep2_insn_data[i].insn));
3136 before_try = PREV_INSN (insn);
3137 delete_insn_chain (insn, peep2_insn_data[i].insn);
3138
3139 /* Re-insert the EH_REGION notes. */
3140 if (note || (was_call && nonlocal_goto_handler_labels))
3141 {
3142 edge eh_edge;
3143
3144 for (eh_edge = bb->succ; eh_edge
3145 ; eh_edge = eh_edge->succ_next)
3146 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3147 break;
3148
3149 for (x = try ; x != before_try ; x = PREV_INSN (x))
3150 if (CALL_P (x)
3151 || (flag_non_call_exceptions
3152 && may_trap_p (PATTERN (x))
3153 && !find_reg_note (x, REG_EH_REGION, NULL)))
3154 {
3155 if (note)
3156 REG_NOTES (x)
3157 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3158 XEXP (note, 0),
3159 REG_NOTES (x));
3160
3161 if (x != BB_END (bb) && eh_edge)
3162 {
3163 edge nfte, nehe;
3164 int flags;
3165
3166 nfte = split_block (bb, x);
3167 flags = (eh_edge->flags
3168 & (EDGE_EH | EDGE_ABNORMAL));
3169 if (CALL_P (x))
3170 flags |= EDGE_ABNORMAL_CALL;
3171 nehe = make_edge (nfte->src, eh_edge->dest,
3172 flags);
3173
3174 nehe->probability = eh_edge->probability;
3175 nfte->probability
3176 = REG_BR_PROB_BASE - nehe->probability;
3177
3178 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3179 #ifdef HAVE_conditional_execution
3180 SET_BIT (blocks, nfte->dest->index);
3181 changed = true;
3182 #endif
3183 bb = nfte->src;
3184 eh_edge = nehe;
3185 }
3186 }
3187
3188 /* Converting possibly trapping insn to non-trapping is
3189 possible. Zap dummy outgoing edges. */
3190 do_cleanup_cfg |= purge_dead_edges (bb);
3191 }
3192
3193 #ifdef HAVE_conditional_execution
3194 /* With conditional execution, we cannot back up the
3195 live information so easily, since the conditional
3196 death data structures are not so self-contained.
3197 So record that we've made a modification to this
3198 block and update life information at the end. */
3199 SET_BIT (blocks, bb->index);
3200 changed = true;
3201
3202 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3203 peep2_insn_data[i].insn = NULL_RTX;
3204 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3205 #else
3206 /* Back up lifetime information past the end of the
3207 newly created sequence. */
3208 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3209 i = 0;
3210 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3211
3212 /* Update life information for the new sequence. */
3213 x = try;
3214 do
3215 {
3216 if (INSN_P (x))
3217 {
3218 if (--i < 0)
3219 i = MAX_INSNS_PER_PEEP2;
3220 peep2_insn_data[i].insn = x;
3221 propagate_one_insn (pbi, x);
3222 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3223 }
3224 x = PREV_INSN (x);
3225 }
3226 while (x != prev);
3227
3228 /* ??? Should verify that LIVE now matches what we
3229 had before the new sequence. */
3230
3231 peep2_current = i;
3232 #endif
3233
3234 /* If we generated a jump instruction, it won't have
3235 JUMP_LABEL set. Recompute after we're done. */
3236 for (x = try; x != before_try; x = PREV_INSN (x))
3237 if (JUMP_P (x))
3238 {
3239 do_rebuild_jump_labels = true;
3240 break;
3241 }
3242 }
3243 }
3244
3245 if (insn == BB_HEAD (bb))
3246 break;
3247 }
3248
3249 free_propagate_block_info (pbi);
3250 }
3251
3252 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3253 FREE_REG_SET (peep2_insn_data[i].live_before);
3254 FREE_REG_SET (live);
3255
3256 if (do_rebuild_jump_labels)
3257 rebuild_jump_labels (get_insns ());
3258
3259 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3260 we've changed global life since exception handlers are no longer
3261 reachable. */
3262 if (do_cleanup_cfg)
3263 {
3264 cleanup_cfg (0);
3265 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3266 }
3267 #ifdef HAVE_conditional_execution
3268 else
3269 {
3270 count_or_remove_death_notes (blocks, 1);
3271 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3272 }
3273 sbitmap_free (blocks);
3274 #endif
3275 }
3276 #endif /* HAVE_peephole2 */
3277
3278 /* Common predicates for use with define_bypass. */
3279
3280 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3281 data not the address operand(s) of the store. IN_INSN must be
3282 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3283 SETs inside. */
3284
3285 int
3286 store_data_bypass_p (rtx out_insn, rtx in_insn)
3287 {
3288 rtx out_set, in_set;
3289
3290 in_set = single_set (in_insn);
3291 if (! in_set)
3292 abort ();
3293
3294 if (!MEM_P (SET_DEST (in_set)))
3295 return false;
3296
3297 out_set = single_set (out_insn);
3298 if (out_set)
3299 {
3300 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3301 return false;
3302 }
3303 else
3304 {
3305 rtx out_pat;
3306 int i;
3307
3308 out_pat = PATTERN (out_insn);
3309 if (GET_CODE (out_pat) != PARALLEL)
3310 abort ();
3311
3312 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3313 {
3314 rtx exp = XVECEXP (out_pat, 0, i);
3315
3316 if (GET_CODE (exp) == CLOBBER)
3317 continue;
3318
3319 if (GET_CODE (exp) != SET)
3320 abort ();
3321
3322 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3323 return false;
3324 }
3325 }
3326
3327 return true;
3328 }
3329
3330 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3331 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3332 or multiple set; IN_INSN should be single_set for truth, but for convenience
3333 of insn categorization may be any JUMP or CALL insn. */
3334
3335 int
3336 if_test_bypass_p (rtx out_insn, rtx in_insn)
3337 {
3338 rtx out_set, in_set;
3339
3340 in_set = single_set (in_insn);
3341 if (! in_set)
3342 {
3343 if (JUMP_P (in_insn) || CALL_P (in_insn))
3344 return false;
3345 abort ();
3346 }
3347
3348 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3349 return false;
3350 in_set = SET_SRC (in_set);
3351
3352 out_set = single_set (out_insn);
3353 if (out_set)
3354 {
3355 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3356 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3357 return false;
3358 }
3359 else
3360 {
3361 rtx out_pat;
3362 int i;
3363
3364 out_pat = PATTERN (out_insn);
3365 if (GET_CODE (out_pat) != PARALLEL)
3366 abort ();
3367
3368 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3369 {
3370 rtx exp = XVECEXP (out_pat, 0, i);
3371
3372 if (GET_CODE (exp) == CLOBBER)
3373 continue;
3374
3375 if (GET_CODE (exp) != SET)
3376 abort ();
3377
3378 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3379 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3380 return false;
3381 }
3382 }
3383
3384 return true;
3385 }