jump.c: Convert prototypes to ISO C90.
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "toplev.h"
39 #include "basic-block.h"
40 #include "output.h"
41 #include "reload.h"
42
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
46 #else
47 #define STACK_PUSH_CODE PRE_INC
48 #endif
49 #endif
50
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
54 #else
55 #define STACK_POP_CODE POST_DEC
56 #endif
57 #endif
58
59 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
60 static rtx *find_single_use_1 (rtx, rtx *);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
63
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
69
70 init_recog and init_recog_no_volatile are responsible for setting this. */
71
72 int volatile_ok;
73
74 struct recog_data recog_data;
75
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
82
83 int which_alternative;
84
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
88
89 int reload_completed;
90
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
93
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
97
98 void
99 init_recog_no_volatile (void)
100 {
101 volatile_ok = 0;
102 }
103
104 void
105 init_recog (void)
106 {
107 volatile_ok = 1;
108 }
109
110 /* Try recognizing the instruction INSN,
111 and return the code number that results.
112 Remember the code so that repeated calls do not
113 need to spend the time for actual rerecognition.
114
115 This function is the normal interface to instruction recognition.
116 The automatically-generated function `recog' is normally called
117 through this one. (The only exception is in combine.c.) */
118
119 int
120 recog_memoized_1 (rtx insn)
121 {
122 if (INSN_CODE (insn) < 0)
123 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
124 return INSN_CODE (insn);
125 }
126 \f
127 /* Check that X is an insn-body for an `asm' with operands
128 and that the operands mentioned in it are legitimate. */
129
130 int
131 check_asm_operands (rtx x)
132 {
133 int noperands;
134 rtx *operands;
135 const char **constraints;
136 int i;
137
138 /* Post-reload, be more strict with things. */
139 if (reload_completed)
140 {
141 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
142 extract_insn (make_insn_raw (x));
143 constrain_operands (1);
144 return which_alternative >= 0;
145 }
146
147 noperands = asm_noperands (x);
148 if (noperands < 0)
149 return 0;
150 if (noperands == 0)
151 return 1;
152
153 operands = (rtx *) alloca (noperands * sizeof (rtx));
154 constraints = (const char **) alloca (noperands * sizeof (char *));
155
156 decode_asm_operands (x, operands, NULL, constraints, NULL);
157
158 for (i = 0; i < noperands; i++)
159 {
160 const char *c = constraints[i];
161 if (c[0] == '%')
162 c++;
163 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
164 c = constraints[c[0] - '0'];
165
166 if (! asm_operand_ok (operands[i], c))
167 return 0;
168 }
169
170 return 1;
171 }
172 \f
173 /* Static data for the next two routines. */
174
175 typedef struct change_t
176 {
177 rtx object;
178 int old_code;
179 rtx *loc;
180 rtx old;
181 } change_t;
182
183 static change_t *changes;
184 static int changes_allocated;
185
186 static int num_changes = 0;
187
188 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
189 at which NEW will be placed. If OBJECT is zero, no validation is done,
190 the change is simply made.
191
192 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
193 will be called with the address and mode as parameters. If OBJECT is
194 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
195 the change in place.
196
197 IN_GROUP is nonzero if this is part of a group of changes that must be
198 performed as a group. In that case, the changes will be stored. The
199 function `apply_change_group' will validate and apply the changes.
200
201 If IN_GROUP is zero, this is a single change. Try to recognize the insn
202 or validate the memory reference with the change applied. If the result
203 is not valid for the machine, suppress the change and return zero.
204 Otherwise, perform the change and return 1. */
205
206 int
207 validate_change (rtx object, rtx *loc, rtx new, int in_group)
208 {
209 rtx old = *loc;
210
211 if (old == new || rtx_equal_p (old, new))
212 return 1;
213
214 if (in_group == 0 && num_changes != 0)
215 abort ();
216
217 *loc = new;
218
219 /* Save the information describing this change. */
220 if (num_changes >= changes_allocated)
221 {
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
226 else
227 changes_allocated *= 2;
228
229 changes =
230 (change_t*) xrealloc (changes,
231 sizeof (change_t) * changes_allocated);
232 }
233
234 changes[num_changes].object = object;
235 changes[num_changes].loc = loc;
236 changes[num_changes].old = old;
237
238 if (object && GET_CODE (object) != MEM)
239 {
240 /* Set INSN_CODE to force rerecognition of insn. Save old code in
241 case invalid. */
242 changes[num_changes].old_code = INSN_CODE (object);
243 INSN_CODE (object) = -1;
244 }
245
246 num_changes++;
247
248 /* If we are making a group of changes, return 1. Otherwise, validate the
249 change group we made. */
250
251 if (in_group)
252 return 1;
253 else
254 return apply_change_group ();
255 }
256
257 /* This subroutine of apply_change_group verifies whether the changes to INSN
258 were valid; i.e. whether INSN can still be recognized. */
259
260 int
261 insn_invalid_p (rtx insn)
262 {
263 rtx pat = PATTERN (insn);
264 int num_clobbers = 0;
265 /* If we are before reload and the pattern is a SET, see if we can add
266 clobbers. */
267 int icode = recog (pat, insn,
268 (GET_CODE (pat) == SET
269 && ! reload_completed && ! reload_in_progress)
270 ? &num_clobbers : 0);
271 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
272
273
274 /* If this is an asm and the operand aren't legal, then fail. Likewise if
275 this is not an asm and the insn wasn't recognized. */
276 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
277 || (!is_asm && icode < 0))
278 return 1;
279
280 /* If we have to add CLOBBERs, fail if we have to add ones that reference
281 hard registers since our callers can't know if they are live or not.
282 Otherwise, add them. */
283 if (num_clobbers > 0)
284 {
285 rtx newpat;
286
287 if (added_clobbers_hard_reg_p (icode))
288 return 1;
289
290 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
291 XVECEXP (newpat, 0, 0) = pat;
292 add_clobbers (newpat, icode);
293 PATTERN (insn) = pat = newpat;
294 }
295
296 /* After reload, verify that all constraints are satisfied. */
297 if (reload_completed)
298 {
299 extract_insn (insn);
300
301 if (! constrain_operands (1))
302 return 1;
303 }
304
305 INSN_CODE (insn) = icode;
306 return 0;
307 }
308
309 /* Return number of changes made and not validated yet. */
310 int
311 num_changes_pending (void)
312 {
313 return num_changes;
314 }
315
316 /* Apply a group of changes previously issued with `validate_change'.
317 Return 1 if all changes are valid, zero otherwise. */
318
319 int
320 apply_change_group (void)
321 {
322 int i;
323 rtx last_validated = NULL_RTX;
324
325 /* The changes have been applied and all INSN_CODEs have been reset to force
326 rerecognition.
327
328 The changes are valid if we aren't given an object, or if we are
329 given a MEM and it still is a valid address, or if this is in insn
330 and it is recognized. In the latter case, if reload has completed,
331 we also require that the operands meet the constraints for
332 the insn. */
333
334 for (i = 0; i < num_changes; i++)
335 {
336 rtx object = changes[i].object;
337
338 /* if there is no object to test or if it is the same as the one we
339 already tested, ignore it. */
340 if (object == 0 || object == last_validated)
341 continue;
342
343 if (GET_CODE (object) == MEM)
344 {
345 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
346 break;
347 }
348 else if (insn_invalid_p (object))
349 {
350 rtx pat = PATTERN (object);
351
352 /* Perhaps we couldn't recognize the insn because there were
353 extra CLOBBERs at the end. If so, try to re-recognize
354 without the last CLOBBER (later iterations will cause each of
355 them to be eliminated, in turn). But don't do this if we
356 have an ASM_OPERAND. */
357 if (GET_CODE (pat) == PARALLEL
358 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
359 && asm_noperands (PATTERN (object)) < 0)
360 {
361 rtx newpat;
362
363 if (XVECLEN (pat, 0) == 2)
364 newpat = XVECEXP (pat, 0, 0);
365 else
366 {
367 int j;
368
369 newpat
370 = gen_rtx_PARALLEL (VOIDmode,
371 rtvec_alloc (XVECLEN (pat, 0) - 1));
372 for (j = 0; j < XVECLEN (newpat, 0); j++)
373 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
374 }
375
376 /* Add a new change to this group to replace the pattern
377 with this new pattern. Then consider this change
378 as having succeeded. The change we added will
379 cause the entire call to fail if things remain invalid.
380
381 Note that this can lose if a later change than the one
382 we are processing specified &XVECEXP (PATTERN (object), 0, X)
383 but this shouldn't occur. */
384
385 validate_change (object, &PATTERN (object), newpat, 1);
386 continue;
387 }
388 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
389 /* If this insn is a CLOBBER or USE, it is always valid, but is
390 never recognized. */
391 continue;
392 else
393 break;
394 }
395 last_validated = object;
396 }
397
398 if (i == num_changes)
399 {
400 basic_block bb;
401
402 for (i = 0; i < num_changes; i++)
403 if (changes[i].object
404 && INSN_P (changes[i].object)
405 && (bb = BLOCK_FOR_INSN (changes[i].object)))
406 bb->flags |= BB_DIRTY;
407
408 num_changes = 0;
409 return 1;
410 }
411 else
412 {
413 cancel_changes (0);
414 return 0;
415 }
416 }
417
418 /* Return the number of changes so far in the current group. */
419
420 int
421 num_validated_changes (void)
422 {
423 return num_changes;
424 }
425
426 /* Retract the changes numbered NUM and up. */
427
428 void
429 cancel_changes (int num)
430 {
431 int i;
432
433 /* Back out all the changes. Do this in the opposite order in which
434 they were made. */
435 for (i = num_changes - 1; i >= num; i--)
436 {
437 *changes[i].loc = changes[i].old;
438 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
439 INSN_CODE (changes[i].object) = changes[i].old_code;
440 }
441 num_changes = num;
442 }
443
444 /* Replace every occurrence of FROM in X with TO. Mark each change with
445 validate_change passing OBJECT. */
446
447 static void
448 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
449 {
450 int i, j;
451 const char *fmt;
452 rtx x = *loc;
453 enum rtx_code code;
454 enum machine_mode op0_mode = VOIDmode;
455 int prev_changes = num_changes;
456 rtx new;
457
458 if (!x)
459 return;
460
461 code = GET_CODE (x);
462 fmt = GET_RTX_FORMAT (code);
463 if (fmt[0] == 'e')
464 op0_mode = GET_MODE (XEXP (x, 0));
465
466 /* X matches FROM if it is the same rtx or they are both referring to the
467 same register in the same mode. Avoid calling rtx_equal_p unless the
468 operands look similar. */
469
470 if (x == from
471 || (GET_CODE (x) == REG && GET_CODE (from) == REG
472 && GET_MODE (x) == GET_MODE (from)
473 && REGNO (x) == REGNO (from))
474 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
475 && rtx_equal_p (x, from)))
476 {
477 validate_change (object, loc, to, 1);
478 return;
479 }
480
481 /* Call ourself recursively to perform the replacements. */
482
483 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
484 {
485 if (fmt[i] == 'e')
486 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
487 else if (fmt[i] == 'E')
488 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
489 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
490 }
491
492 /* If we didn't substitute, there is nothing more to do. */
493 if (num_changes == prev_changes)
494 return;
495
496 /* Allow substituted expression to have different mode. This is used by
497 regmove to change mode of pseudo register. */
498 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
499 op0_mode = GET_MODE (XEXP (x, 0));
500
501 /* Do changes needed to keep rtx consistent. Don't do any other
502 simplifications, as it is not our job. */
503
504 if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
505 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
506 {
507 validate_change (object, loc,
508 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
509 : swap_condition (code),
510 GET_MODE (x), XEXP (x, 1),
511 XEXP (x, 0)), 1);
512 x = *loc;
513 code = GET_CODE (x);
514 }
515
516 switch (code)
517 {
518 case PLUS:
519 /* If we have a PLUS whose second operand is now a CONST_INT, use
520 simplify_gen_binary to try to simplify it.
521 ??? We may want later to remove this, once simplification is
522 separated from this function. */
523 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
524 validate_change (object, loc,
525 simplify_gen_binary
526 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
527 break;
528 case MINUS:
529 if (GET_CODE (XEXP (x, 1)) == CONST_INT
530 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
531 validate_change (object, loc,
532 simplify_gen_binary
533 (PLUS, GET_MODE (x), XEXP (x, 0),
534 simplify_gen_unary (NEG,
535 GET_MODE (x), XEXP (x, 1),
536 GET_MODE (x))), 1);
537 break;
538 case ZERO_EXTEND:
539 case SIGN_EXTEND:
540 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
541 {
542 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
543 op0_mode);
544 /* If any of the above failed, substitute in something that
545 we know won't be recognized. */
546 if (!new)
547 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
548 validate_change (object, loc, new, 1);
549 }
550 break;
551 case SUBREG:
552 /* All subregs possible to simplify should be simplified. */
553 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
554 SUBREG_BYTE (x));
555
556 /* Subregs of VOIDmode operands are incorrect. */
557 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
558 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
559 if (new)
560 validate_change (object, loc, new, 1);
561 break;
562 case ZERO_EXTRACT:
563 case SIGN_EXTRACT:
564 /* If we are replacing a register with memory, try to change the memory
565 to be the mode required for memory in extract operations (this isn't
566 likely to be an insertion operation; if it was, nothing bad will
567 happen, we might just fail in some cases). */
568
569 if (GET_CODE (XEXP (x, 0)) == MEM
570 && GET_CODE (XEXP (x, 1)) == CONST_INT
571 && GET_CODE (XEXP (x, 2)) == CONST_INT
572 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
573 && !MEM_VOLATILE_P (XEXP (x, 0)))
574 {
575 enum machine_mode wanted_mode = VOIDmode;
576 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
577 int pos = INTVAL (XEXP (x, 2));
578
579 if (GET_CODE (x) == ZERO_EXTRACT)
580 {
581 enum machine_mode new_mode
582 = mode_for_extraction (EP_extzv, 1);
583 if (new_mode != MAX_MACHINE_MODE)
584 wanted_mode = new_mode;
585 }
586 else if (GET_CODE (x) == SIGN_EXTRACT)
587 {
588 enum machine_mode new_mode
589 = mode_for_extraction (EP_extv, 1);
590 if (new_mode != MAX_MACHINE_MODE)
591 wanted_mode = new_mode;
592 }
593
594 /* If we have a narrower mode, we can do something. */
595 if (wanted_mode != VOIDmode
596 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
597 {
598 int offset = pos / BITS_PER_UNIT;
599 rtx newmem;
600
601 /* If the bytes and bits are counted differently, we
602 must adjust the offset. */
603 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
604 offset =
605 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
606 offset);
607
608 pos %= GET_MODE_BITSIZE (wanted_mode);
609
610 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
611
612 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
613 validate_change (object, &XEXP (x, 0), newmem, 1);
614 }
615 }
616
617 break;
618
619 default:
620 break;
621 }
622 }
623
624 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
625 with TO. After all changes have been made, validate by seeing
626 if INSN is still valid. */
627
628 int
629 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
630 {
631 validate_replace_rtx_1 (loc, from, to, insn);
632 return apply_change_group ();
633 }
634
635 /* Try replacing every occurrence of FROM in INSN with TO. After all
636 changes have been made, validate by seeing if INSN is still valid. */
637
638 int
639 validate_replace_rtx (rtx from, rtx to, rtx insn)
640 {
641 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
642 return apply_change_group ();
643 }
644
645 /* Try replacing every occurrence of FROM in INSN with TO. */
646
647 void
648 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
649 {
650 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
651 }
652
653 /* Function called by note_uses to replace used subexpressions. */
654 struct validate_replace_src_data
655 {
656 rtx from; /* Old RTX */
657 rtx to; /* New RTX */
658 rtx insn; /* Insn in which substitution is occurring. */
659 };
660
661 static void
662 validate_replace_src_1 (rtx *x, void *data)
663 {
664 struct validate_replace_src_data *d
665 = (struct validate_replace_src_data *) data;
666
667 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
668 }
669
670 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
671 SET_DESTs. */
672
673 void
674 validate_replace_src_group (rtx from, rtx to, rtx insn)
675 {
676 struct validate_replace_src_data d;
677
678 d.from = from;
679 d.to = to;
680 d.insn = insn;
681 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
682 }
683
684 /* Same as validate_replace_src_group, but validate by seeing if
685 INSN is still valid. */
686 int
687 validate_replace_src (rtx from, rtx to, rtx insn)
688 {
689 validate_replace_src_group (from, to, insn);
690 return apply_change_group ();
691 }
692 \f
693 #ifdef HAVE_cc0
694 /* Return 1 if the insn using CC0 set by INSN does not contain
695 any ordered tests applied to the condition codes.
696 EQ and NE tests do not count. */
697
698 int
699 next_insn_tests_no_inequality (rtx insn)
700 {
701 rtx next = next_cc0_user (insn);
702
703 /* If there is no next insn, we have to take the conservative choice. */
704 if (next == 0)
705 return 0;
706
707 return ((GET_CODE (next) == JUMP_INSN
708 || GET_CODE (next) == INSN
709 || GET_CODE (next) == CALL_INSN)
710 && ! inequality_comparisons_p (PATTERN (next)));
711 }
712 #endif
713 \f
714 /* This is used by find_single_use to locate an rtx that contains exactly one
715 use of DEST, which is typically either a REG or CC0. It returns a
716 pointer to the innermost rtx expression containing DEST. Appearances of
717 DEST that are being used to totally replace it are not counted. */
718
719 static rtx *
720 find_single_use_1 (rtx dest, rtx *loc)
721 {
722 rtx x = *loc;
723 enum rtx_code code = GET_CODE (x);
724 rtx *result = 0;
725 rtx *this_result;
726 int i;
727 const char *fmt;
728
729 switch (code)
730 {
731 case CONST_INT:
732 case CONST:
733 case LABEL_REF:
734 case SYMBOL_REF:
735 case CONST_DOUBLE:
736 case CONST_VECTOR:
737 case CLOBBER:
738 return 0;
739
740 case SET:
741 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
742 of a REG that occupies all of the REG, the insn uses DEST if
743 it is mentioned in the destination or the source. Otherwise, we
744 need just check the source. */
745 if (GET_CODE (SET_DEST (x)) != CC0
746 && GET_CODE (SET_DEST (x)) != PC
747 && GET_CODE (SET_DEST (x)) != REG
748 && ! (GET_CODE (SET_DEST (x)) == SUBREG
749 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
750 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
751 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
752 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
753 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
754 break;
755
756 return find_single_use_1 (dest, &SET_SRC (x));
757
758 case MEM:
759 case SUBREG:
760 return find_single_use_1 (dest, &XEXP (x, 0));
761
762 default:
763 break;
764 }
765
766 /* If it wasn't one of the common cases above, check each expression and
767 vector of this code. Look for a unique usage of DEST. */
768
769 fmt = GET_RTX_FORMAT (code);
770 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
771 {
772 if (fmt[i] == 'e')
773 {
774 if (dest == XEXP (x, i)
775 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
776 && REGNO (dest) == REGNO (XEXP (x, i))))
777 this_result = loc;
778 else
779 this_result = find_single_use_1 (dest, &XEXP (x, i));
780
781 if (result == 0)
782 result = this_result;
783 else if (this_result)
784 /* Duplicate usage. */
785 return 0;
786 }
787 else if (fmt[i] == 'E')
788 {
789 int j;
790
791 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
792 {
793 if (XVECEXP (x, i, j) == dest
794 || (GET_CODE (dest) == REG
795 && GET_CODE (XVECEXP (x, i, j)) == REG
796 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
797 this_result = loc;
798 else
799 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
800
801 if (result == 0)
802 result = this_result;
803 else if (this_result)
804 return 0;
805 }
806 }
807 }
808
809 return result;
810 }
811 \f
812 /* See if DEST, produced in INSN, is used only a single time in the
813 sequel. If so, return a pointer to the innermost rtx expression in which
814 it is used.
815
816 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
817
818 This routine will return usually zero either before flow is called (because
819 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
820 note can't be trusted).
821
822 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
823 care about REG_DEAD notes or LOG_LINKS.
824
825 Otherwise, we find the single use by finding an insn that has a
826 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
827 only referenced once in that insn, we know that it must be the first
828 and last insn referencing DEST. */
829
830 rtx *
831 find_single_use (rtx dest, rtx insn, rtx *ploc)
832 {
833 rtx next;
834 rtx *result;
835 rtx link;
836
837 #ifdef HAVE_cc0
838 if (dest == cc0_rtx)
839 {
840 next = NEXT_INSN (insn);
841 if (next == 0
842 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
843 return 0;
844
845 result = find_single_use_1 (dest, &PATTERN (next));
846 if (result && ploc)
847 *ploc = next;
848 return result;
849 }
850 #endif
851
852 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
853 return 0;
854
855 for (next = next_nonnote_insn (insn);
856 next != 0 && GET_CODE (next) != CODE_LABEL;
857 next = next_nonnote_insn (next))
858 if (INSN_P (next) && dead_or_set_p (next, dest))
859 {
860 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
861 if (XEXP (link, 0) == insn)
862 break;
863
864 if (link)
865 {
866 result = find_single_use_1 (dest, &PATTERN (next));
867 if (ploc)
868 *ploc = next;
869 return result;
870 }
871 }
872
873 return 0;
874 }
875 \f
876 /* Return 1 if OP is a valid general operand for machine mode MODE.
877 This is either a register reference, a memory reference,
878 or a constant. In the case of a memory reference, the address
879 is checked for general validity for the target machine.
880
881 Register and memory references must have mode MODE in order to be valid,
882 but some constants have no machine mode and are valid for any mode.
883
884 If MODE is VOIDmode, OP is checked for validity for whatever mode
885 it has.
886
887 The main use of this function is as a predicate in match_operand
888 expressions in the machine description.
889
890 For an explanation of this function's behavior for registers of
891 class NO_REGS, see the comment for `register_operand'. */
892
893 int
894 general_operand (rtx op, enum machine_mode mode)
895 {
896 enum rtx_code code = GET_CODE (op);
897
898 if (mode == VOIDmode)
899 mode = GET_MODE (op);
900
901 /* Don't accept CONST_INT or anything similar
902 if the caller wants something floating. */
903 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
904 && GET_MODE_CLASS (mode) != MODE_INT
905 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
906 return 0;
907
908 if (GET_CODE (op) == CONST_INT
909 && mode != VOIDmode
910 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
911 return 0;
912
913 if (CONSTANT_P (op))
914 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
915 || mode == VOIDmode)
916 #ifdef LEGITIMATE_PIC_OPERAND_P
917 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
918 #endif
919 && LEGITIMATE_CONSTANT_P (op));
920
921 /* Except for certain constants with VOIDmode, already checked for,
922 OP's mode must match MODE if MODE specifies a mode. */
923
924 if (GET_MODE (op) != mode)
925 return 0;
926
927 if (code == SUBREG)
928 {
929 rtx sub = SUBREG_REG (op);
930
931 #ifdef INSN_SCHEDULING
932 /* On machines that have insn scheduling, we want all memory
933 reference to be explicit, so outlaw paradoxical SUBREGs. */
934 if (GET_CODE (sub) == MEM
935 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
936 return 0;
937 #endif
938 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
939 may result in incorrect reference. We should simplify all valid
940 subregs of MEM anyway. But allow this after reload because we
941 might be called from cleanup_subreg_operands.
942
943 ??? This is a kludge. */
944 if (!reload_completed && SUBREG_BYTE (op) != 0
945 && GET_CODE (sub) == MEM)
946 return 0;
947
948 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
949 create such rtl, and we must reject it. */
950 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
951 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
952 return 0;
953
954 op = sub;
955 code = GET_CODE (op);
956 }
957
958 if (code == REG)
959 /* A register whose class is NO_REGS is not a general operand. */
960 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
961 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
962
963 if (code == MEM)
964 {
965 rtx y = XEXP (op, 0);
966
967 if (! volatile_ok && MEM_VOLATILE_P (op))
968 return 0;
969
970 if (GET_CODE (y) == ADDRESSOF)
971 return 1;
972
973 /* Use the mem's mode, since it will be reloaded thus. */
974 mode = GET_MODE (op);
975 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
976 }
977
978 /* Pretend this is an operand for now; we'll run force_operand
979 on its replacement in fixup_var_refs_1. */
980 if (code == ADDRESSOF)
981 return 1;
982
983 return 0;
984
985 win:
986 return 1;
987 }
988 \f
989 /* Return 1 if OP is a valid memory address for a memory reference
990 of mode MODE.
991
992 The main use of this function is as a predicate in match_operand
993 expressions in the machine description. */
994
995 int
996 address_operand (rtx op, enum machine_mode mode)
997 {
998 return memory_address_p (mode, op);
999 }
1000
1001 /* Return 1 if OP is a register reference of mode MODE.
1002 If MODE is VOIDmode, accept a register in any mode.
1003
1004 The main use of this function is as a predicate in match_operand
1005 expressions in the machine description.
1006
1007 As a special exception, registers whose class is NO_REGS are
1008 not accepted by `register_operand'. The reason for this change
1009 is to allow the representation of special architecture artifacts
1010 (such as a condition code register) without extending the rtl
1011 definitions. Since registers of class NO_REGS cannot be used
1012 as registers in any case where register classes are examined,
1013 it is most consistent to keep this function from accepting them. */
1014
1015 int
1016 register_operand (rtx op, enum machine_mode mode)
1017 {
1018 if (GET_MODE (op) != mode && mode != VOIDmode)
1019 return 0;
1020
1021 if (GET_CODE (op) == SUBREG)
1022 {
1023 rtx sub = SUBREG_REG (op);
1024
1025 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1026 because it is guaranteed to be reloaded into one.
1027 Just make sure the MEM is valid in itself.
1028 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1029 but currently it does result from (SUBREG (REG)...) where the
1030 reg went on the stack.) */
1031 if (! reload_completed && GET_CODE (sub) == MEM)
1032 return general_operand (op, mode);
1033
1034 #ifdef CANNOT_CHANGE_MODE_CLASS
1035 if (GET_CODE (sub) == REG
1036 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1037 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1038 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1039 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1040 return 0;
1041 #endif
1042
1043 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1044 create such rtl, and we must reject it. */
1045 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1046 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1047 return 0;
1048
1049 op = sub;
1050 }
1051
1052 /* If we have an ADDRESSOF, consider it valid since it will be
1053 converted into something that will not be a MEM. */
1054 if (GET_CODE (op) == ADDRESSOF)
1055 return 1;
1056
1057 /* We don't consider registers whose class is NO_REGS
1058 to be a register operand. */
1059 return (GET_CODE (op) == REG
1060 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1061 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1062 }
1063
1064 /* Return 1 for a register in Pmode; ignore the tested mode. */
1065
1066 int
1067 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1068 {
1069 return register_operand (op, Pmode);
1070 }
1071
1072 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1073 or a hard register. */
1074
1075 int
1076 scratch_operand (rtx op, enum machine_mode mode)
1077 {
1078 if (GET_MODE (op) != mode && mode != VOIDmode)
1079 return 0;
1080
1081 return (GET_CODE (op) == SCRATCH
1082 || (GET_CODE (op) == REG
1083 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1084 }
1085
1086 /* Return 1 if OP is a valid immediate operand for mode MODE.
1087
1088 The main use of this function is as a predicate in match_operand
1089 expressions in the machine description. */
1090
1091 int
1092 immediate_operand (rtx op, enum machine_mode mode)
1093 {
1094 /* Don't accept CONST_INT or anything similar
1095 if the caller wants something floating. */
1096 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1097 && GET_MODE_CLASS (mode) != MODE_INT
1098 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1099 return 0;
1100
1101 if (GET_CODE (op) == CONST_INT
1102 && mode != VOIDmode
1103 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1104 return 0;
1105
1106 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1107 result in 0/1. It seems a safe assumption that this is
1108 in range for everyone. */
1109 if (GET_CODE (op) == CONSTANT_P_RTX)
1110 return 1;
1111
1112 return (CONSTANT_P (op)
1113 && (GET_MODE (op) == mode || mode == VOIDmode
1114 || GET_MODE (op) == VOIDmode)
1115 #ifdef LEGITIMATE_PIC_OPERAND_P
1116 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1117 #endif
1118 && LEGITIMATE_CONSTANT_P (op));
1119 }
1120
1121 /* Returns 1 if OP is an operand that is a CONST_INT. */
1122
1123 int
1124 const_int_operand (rtx op, enum machine_mode mode)
1125 {
1126 if (GET_CODE (op) != CONST_INT)
1127 return 0;
1128
1129 if (mode != VOIDmode
1130 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1131 return 0;
1132
1133 return 1;
1134 }
1135
1136 /* Returns 1 if OP is an operand that is a constant integer or constant
1137 floating-point number. */
1138
1139 int
1140 const_double_operand (rtx op, enum machine_mode mode)
1141 {
1142 /* Don't accept CONST_INT or anything similar
1143 if the caller wants something floating. */
1144 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1145 && GET_MODE_CLASS (mode) != MODE_INT
1146 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1147 return 0;
1148
1149 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1150 && (mode == VOIDmode || GET_MODE (op) == mode
1151 || GET_MODE (op) == VOIDmode));
1152 }
1153
1154 /* Return 1 if OP is a general operand that is not an immediate operand. */
1155
1156 int
1157 nonimmediate_operand (rtx op, enum machine_mode mode)
1158 {
1159 return (general_operand (op, mode) && ! CONSTANT_P (op));
1160 }
1161
1162 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1163
1164 int
1165 nonmemory_operand (rtx op, enum machine_mode mode)
1166 {
1167 if (CONSTANT_P (op))
1168 {
1169 /* Don't accept CONST_INT or anything similar
1170 if the caller wants something floating. */
1171 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1172 && GET_MODE_CLASS (mode) != MODE_INT
1173 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1174 return 0;
1175
1176 if (GET_CODE (op) == CONST_INT
1177 && mode != VOIDmode
1178 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1179 return 0;
1180
1181 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1182 || mode == VOIDmode)
1183 #ifdef LEGITIMATE_PIC_OPERAND_P
1184 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1185 #endif
1186 && LEGITIMATE_CONSTANT_P (op));
1187 }
1188
1189 if (GET_MODE (op) != mode && mode != VOIDmode)
1190 return 0;
1191
1192 if (GET_CODE (op) == SUBREG)
1193 {
1194 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1195 because it is guaranteed to be reloaded into one.
1196 Just make sure the MEM is valid in itself.
1197 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1198 but currently it does result from (SUBREG (REG)...) where the
1199 reg went on the stack.) */
1200 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1201 return general_operand (op, mode);
1202 op = SUBREG_REG (op);
1203 }
1204
1205 /* We don't consider registers whose class is NO_REGS
1206 to be a register operand. */
1207 return (GET_CODE (op) == REG
1208 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1209 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1210 }
1211
1212 /* Return 1 if OP is a valid operand that stands for pushing a
1213 value of mode MODE onto the stack.
1214
1215 The main use of this function is as a predicate in match_operand
1216 expressions in the machine description. */
1217
1218 int
1219 push_operand (rtx op, enum machine_mode mode)
1220 {
1221 unsigned int rounded_size = GET_MODE_SIZE (mode);
1222
1223 #ifdef PUSH_ROUNDING
1224 rounded_size = PUSH_ROUNDING (rounded_size);
1225 #endif
1226
1227 if (GET_CODE (op) != MEM)
1228 return 0;
1229
1230 if (mode != VOIDmode && GET_MODE (op) != mode)
1231 return 0;
1232
1233 op = XEXP (op, 0);
1234
1235 if (rounded_size == GET_MODE_SIZE (mode))
1236 {
1237 if (GET_CODE (op) != STACK_PUSH_CODE)
1238 return 0;
1239 }
1240 else
1241 {
1242 if (GET_CODE (op) != PRE_MODIFY
1243 || GET_CODE (XEXP (op, 1)) != PLUS
1244 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1245 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1246 #ifdef STACK_GROWS_DOWNWARD
1247 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1248 #else
1249 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1250 #endif
1251 )
1252 return 0;
1253 }
1254
1255 return XEXP (op, 0) == stack_pointer_rtx;
1256 }
1257
1258 /* Return 1 if OP is a valid operand that stands for popping a
1259 value of mode MODE off the stack.
1260
1261 The main use of this function is as a predicate in match_operand
1262 expressions in the machine description. */
1263
1264 int
1265 pop_operand (rtx op, enum machine_mode mode)
1266 {
1267 if (GET_CODE (op) != MEM)
1268 return 0;
1269
1270 if (mode != VOIDmode && GET_MODE (op) != mode)
1271 return 0;
1272
1273 op = XEXP (op, 0);
1274
1275 if (GET_CODE (op) != STACK_POP_CODE)
1276 return 0;
1277
1278 return XEXP (op, 0) == stack_pointer_rtx;
1279 }
1280
1281 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1282
1283 int
1284 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1285 {
1286 if (GET_CODE (addr) == ADDRESSOF)
1287 return 1;
1288
1289 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1290 return 0;
1291
1292 win:
1293 return 1;
1294 }
1295
1296 /* Return 1 if OP is a valid memory reference with mode MODE,
1297 including a valid address.
1298
1299 The main use of this function is as a predicate in match_operand
1300 expressions in the machine description. */
1301
1302 int
1303 memory_operand (rtx op, enum machine_mode mode)
1304 {
1305 rtx inner;
1306
1307 if (! reload_completed)
1308 /* Note that no SUBREG is a memory operand before end of reload pass,
1309 because (SUBREG (MEM...)) forces reloading into a register. */
1310 return GET_CODE (op) == MEM && general_operand (op, mode);
1311
1312 if (mode != VOIDmode && GET_MODE (op) != mode)
1313 return 0;
1314
1315 inner = op;
1316 if (GET_CODE (inner) == SUBREG)
1317 inner = SUBREG_REG (inner);
1318
1319 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1320 }
1321
1322 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1323 that is, a memory reference whose address is a general_operand. */
1324
1325 int
1326 indirect_operand (rtx op, enum machine_mode mode)
1327 {
1328 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1329 if (! reload_completed
1330 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1331 {
1332 int offset = SUBREG_BYTE (op);
1333 rtx inner = SUBREG_REG (op);
1334
1335 if (mode != VOIDmode && GET_MODE (op) != mode)
1336 return 0;
1337
1338 /* The only way that we can have a general_operand as the resulting
1339 address is if OFFSET is zero and the address already is an operand
1340 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1341 operand. */
1342
1343 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1344 || (GET_CODE (XEXP (inner, 0)) == PLUS
1345 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1346 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1347 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1348 }
1349
1350 return (GET_CODE (op) == MEM
1351 && memory_operand (op, mode)
1352 && general_operand (XEXP (op, 0), Pmode));
1353 }
1354
1355 /* Return 1 if this is a comparison operator. This allows the use of
1356 MATCH_OPERATOR to recognize all the branch insns. */
1357
1358 int
1359 comparison_operator (rtx op, enum machine_mode mode)
1360 {
1361 return ((mode == VOIDmode || GET_MODE (op) == mode)
1362 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1363 }
1364 \f
1365 /* If BODY is an insn body that uses ASM_OPERANDS,
1366 return the number of operands (both input and output) in the insn.
1367 Otherwise return -1. */
1368
1369 int
1370 asm_noperands (rtx body)
1371 {
1372 switch (GET_CODE (body))
1373 {
1374 case ASM_OPERANDS:
1375 /* No output operands: return number of input operands. */
1376 return ASM_OPERANDS_INPUT_LENGTH (body);
1377 case SET:
1378 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1379 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1380 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1381 else
1382 return -1;
1383 case PARALLEL:
1384 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1385 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1386 {
1387 /* Multiple output operands, or 1 output plus some clobbers:
1388 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1389 int i;
1390 int n_sets;
1391
1392 /* Count backwards through CLOBBERs to determine number of SETs. */
1393 for (i = XVECLEN (body, 0); i > 0; i--)
1394 {
1395 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1396 break;
1397 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1398 return -1;
1399 }
1400
1401 /* N_SETS is now number of output operands. */
1402 n_sets = i;
1403
1404 /* Verify that all the SETs we have
1405 came from a single original asm_operands insn
1406 (so that invalid combinations are blocked). */
1407 for (i = 0; i < n_sets; i++)
1408 {
1409 rtx elt = XVECEXP (body, 0, i);
1410 if (GET_CODE (elt) != SET)
1411 return -1;
1412 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1413 return -1;
1414 /* If these ASM_OPERANDS rtx's came from different original insns
1415 then they aren't allowed together. */
1416 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1417 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1418 return -1;
1419 }
1420 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1421 + n_sets);
1422 }
1423 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1424 {
1425 /* 0 outputs, but some clobbers:
1426 body is [(asm_operands ...) (clobber (reg ...))...]. */
1427 int i;
1428
1429 /* Make sure all the other parallel things really are clobbers. */
1430 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1431 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1432 return -1;
1433
1434 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1435 }
1436 else
1437 return -1;
1438 default:
1439 return -1;
1440 }
1441 }
1442
1443 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1444 copy its operands (both input and output) into the vector OPERANDS,
1445 the locations of the operands within the insn into the vector OPERAND_LOCS,
1446 and the constraints for the operands into CONSTRAINTS.
1447 Write the modes of the operands into MODES.
1448 Return the assembler-template.
1449
1450 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1451 we don't store that info. */
1452
1453 const char *
1454 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1455 const char **constraints, enum machine_mode *modes)
1456 {
1457 int i;
1458 int noperands;
1459 const char *template = 0;
1460
1461 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1462 {
1463 rtx asmop = SET_SRC (body);
1464 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1465
1466 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1467
1468 for (i = 1; i < noperands; i++)
1469 {
1470 if (operand_locs)
1471 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1472 if (operands)
1473 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1474 if (constraints)
1475 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1476 if (modes)
1477 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1478 }
1479
1480 /* The output is in the SET.
1481 Its constraint is in the ASM_OPERANDS itself. */
1482 if (operands)
1483 operands[0] = SET_DEST (body);
1484 if (operand_locs)
1485 operand_locs[0] = &SET_DEST (body);
1486 if (constraints)
1487 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1488 if (modes)
1489 modes[0] = GET_MODE (SET_DEST (body));
1490 template = ASM_OPERANDS_TEMPLATE (asmop);
1491 }
1492 else if (GET_CODE (body) == ASM_OPERANDS)
1493 {
1494 rtx asmop = body;
1495 /* No output operands: BODY is (asm_operands ....). */
1496
1497 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1498
1499 /* The input operands are found in the 1st element vector. */
1500 /* Constraints for inputs are in the 2nd element vector. */
1501 for (i = 0; i < noperands; i++)
1502 {
1503 if (operand_locs)
1504 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1505 if (operands)
1506 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1507 if (constraints)
1508 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1509 if (modes)
1510 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1511 }
1512 template = ASM_OPERANDS_TEMPLATE (asmop);
1513 }
1514 else if (GET_CODE (body) == PARALLEL
1515 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1516 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1517 {
1518 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1519 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1520 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1521 int nout = 0; /* Does not include CLOBBERs. */
1522
1523 /* At least one output, plus some CLOBBERs. */
1524
1525 /* The outputs are in the SETs.
1526 Their constraints are in the ASM_OPERANDS itself. */
1527 for (i = 0; i < nparallel; i++)
1528 {
1529 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1530 break; /* Past last SET */
1531
1532 if (operands)
1533 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1534 if (operand_locs)
1535 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1536 if (constraints)
1537 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1538 if (modes)
1539 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1540 nout++;
1541 }
1542
1543 for (i = 0; i < nin; i++)
1544 {
1545 if (operand_locs)
1546 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1547 if (operands)
1548 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1549 if (constraints)
1550 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1551 if (modes)
1552 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1553 }
1554
1555 template = ASM_OPERANDS_TEMPLATE (asmop);
1556 }
1557 else if (GET_CODE (body) == PARALLEL
1558 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1559 {
1560 /* No outputs, but some CLOBBERs. */
1561
1562 rtx asmop = XVECEXP (body, 0, 0);
1563 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1564
1565 for (i = 0; i < nin; i++)
1566 {
1567 if (operand_locs)
1568 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1569 if (operands)
1570 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1571 if (constraints)
1572 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1573 if (modes)
1574 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1575 }
1576
1577 template = ASM_OPERANDS_TEMPLATE (asmop);
1578 }
1579
1580 return template;
1581 }
1582
1583 /* Check if an asm_operand matches it's constraints.
1584 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1585
1586 int
1587 asm_operand_ok (rtx op, const char *constraint)
1588 {
1589 int result = 0;
1590
1591 /* Use constrain_operands after reload. */
1592 if (reload_completed)
1593 abort ();
1594
1595 while (*constraint)
1596 {
1597 char c = *constraint;
1598 int len;
1599 switch (c)
1600 {
1601 case ',':
1602 constraint++;
1603 continue;
1604 case '=':
1605 case '+':
1606 case '*':
1607 case '%':
1608 case '!':
1609 case '#':
1610 case '&':
1611 case '?':
1612 break;
1613
1614 case '0': case '1': case '2': case '3': case '4':
1615 case '5': case '6': case '7': case '8': case '9':
1616 /* For best results, our caller should have given us the
1617 proper matching constraint, but we can't actually fail
1618 the check if they didn't. Indicate that results are
1619 inconclusive. */
1620 do
1621 constraint++;
1622 while (ISDIGIT (*constraint));
1623 if (! result)
1624 result = -1;
1625 continue;
1626
1627 case 'p':
1628 if (address_operand (op, VOIDmode))
1629 result = 1;
1630 break;
1631
1632 case 'm':
1633 case 'V': /* non-offsettable */
1634 if (memory_operand (op, VOIDmode))
1635 result = 1;
1636 break;
1637
1638 case 'o': /* offsettable */
1639 if (offsettable_nonstrict_memref_p (op))
1640 result = 1;
1641 break;
1642
1643 case '<':
1644 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1645 excepting those that expand_call created. Further, on some
1646 machines which do not have generalized auto inc/dec, an inc/dec
1647 is not a memory_operand.
1648
1649 Match any memory and hope things are resolved after reload. */
1650
1651 if (GET_CODE (op) == MEM
1652 && (1
1653 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1654 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1655 result = 1;
1656 break;
1657
1658 case '>':
1659 if (GET_CODE (op) == MEM
1660 && (1
1661 || GET_CODE (XEXP (op, 0)) == PRE_INC
1662 || GET_CODE (XEXP (op, 0)) == POST_INC))
1663 result = 1;
1664 break;
1665
1666 case 'E':
1667 case 'F':
1668 if (GET_CODE (op) == CONST_DOUBLE
1669 || (GET_CODE (op) == CONST_VECTOR
1670 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1671 result = 1;
1672 break;
1673
1674 case 'G':
1675 if (GET_CODE (op) == CONST_DOUBLE
1676 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1677 result = 1;
1678 break;
1679 case 'H':
1680 if (GET_CODE (op) == CONST_DOUBLE
1681 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1682 result = 1;
1683 break;
1684
1685 case 's':
1686 if (GET_CODE (op) == CONST_INT
1687 || (GET_CODE (op) == CONST_DOUBLE
1688 && GET_MODE (op) == VOIDmode))
1689 break;
1690 /* FALLTHRU */
1691
1692 case 'i':
1693 if (CONSTANT_P (op)
1694 #ifdef LEGITIMATE_PIC_OPERAND_P
1695 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1696 #endif
1697 )
1698 result = 1;
1699 break;
1700
1701 case 'n':
1702 if (GET_CODE (op) == CONST_INT
1703 || (GET_CODE (op) == CONST_DOUBLE
1704 && GET_MODE (op) == VOIDmode))
1705 result = 1;
1706 break;
1707
1708 case 'I':
1709 if (GET_CODE (op) == CONST_INT
1710 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1711 result = 1;
1712 break;
1713 case 'J':
1714 if (GET_CODE (op) == CONST_INT
1715 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1716 result = 1;
1717 break;
1718 case 'K':
1719 if (GET_CODE (op) == CONST_INT
1720 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1721 result = 1;
1722 break;
1723 case 'L':
1724 if (GET_CODE (op) == CONST_INT
1725 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1726 result = 1;
1727 break;
1728 case 'M':
1729 if (GET_CODE (op) == CONST_INT
1730 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1731 result = 1;
1732 break;
1733 case 'N':
1734 if (GET_CODE (op) == CONST_INT
1735 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1736 result = 1;
1737 break;
1738 case 'O':
1739 if (GET_CODE (op) == CONST_INT
1740 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1741 result = 1;
1742 break;
1743 case 'P':
1744 if (GET_CODE (op) == CONST_INT
1745 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1746 result = 1;
1747 break;
1748
1749 case 'X':
1750 result = 1;
1751
1752 case 'g':
1753 if (general_operand (op, VOIDmode))
1754 result = 1;
1755 break;
1756
1757 default:
1758 /* For all other letters, we first check for a register class,
1759 otherwise it is an EXTRA_CONSTRAINT. */
1760 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1761 {
1762 case 'r':
1763 if (GET_MODE (op) == BLKmode)
1764 break;
1765 if (register_operand (op, VOIDmode))
1766 result = 1;
1767 }
1768 #ifdef EXTRA_CONSTRAINT_STR
1769 if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1770 result = 1;
1771 if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1772 {
1773 /* Every memory operand can be reloaded to fit. */
1774 if (memory_operand (op, VOIDmode))
1775 result = 1;
1776 }
1777 if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1778 {
1779 /* Every address operand can be reloaded to fit. */
1780 if (address_operand (op, VOIDmode))
1781 result = 1;
1782 }
1783 #endif
1784 break;
1785 }
1786 len = CONSTRAINT_LEN (c, constraint);
1787 do
1788 constraint++;
1789 while (--len && *constraint);
1790 if (len)
1791 return 0;
1792 }
1793
1794 return result;
1795 }
1796 \f
1797 /* Given an rtx *P, if it is a sum containing an integer constant term,
1798 return the location (type rtx *) of the pointer to that constant term.
1799 Otherwise, return a null pointer. */
1800
1801 rtx *
1802 find_constant_term_loc (rtx *p)
1803 {
1804 rtx *tem;
1805 enum rtx_code code = GET_CODE (*p);
1806
1807 /* If *P IS such a constant term, P is its location. */
1808
1809 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1810 || code == CONST)
1811 return p;
1812
1813 /* Otherwise, if not a sum, it has no constant term. */
1814
1815 if (GET_CODE (*p) != PLUS)
1816 return 0;
1817
1818 /* If one of the summands is constant, return its location. */
1819
1820 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1821 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1822 return p;
1823
1824 /* Otherwise, check each summand for containing a constant term. */
1825
1826 if (XEXP (*p, 0) != 0)
1827 {
1828 tem = find_constant_term_loc (&XEXP (*p, 0));
1829 if (tem != 0)
1830 return tem;
1831 }
1832
1833 if (XEXP (*p, 1) != 0)
1834 {
1835 tem = find_constant_term_loc (&XEXP (*p, 1));
1836 if (tem != 0)
1837 return tem;
1838 }
1839
1840 return 0;
1841 }
1842 \f
1843 /* Return 1 if OP is a memory reference
1844 whose address contains no side effects
1845 and remains valid after the addition
1846 of a positive integer less than the
1847 size of the object being referenced.
1848
1849 We assume that the original address is valid and do not check it.
1850
1851 This uses strict_memory_address_p as a subroutine, so
1852 don't use it before reload. */
1853
1854 int
1855 offsettable_memref_p (rtx op)
1856 {
1857 return ((GET_CODE (op) == MEM)
1858 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1859 }
1860
1861 /* Similar, but don't require a strictly valid mem ref:
1862 consider pseudo-regs valid as index or base regs. */
1863
1864 int
1865 offsettable_nonstrict_memref_p (rtx op)
1866 {
1867 return ((GET_CODE (op) == MEM)
1868 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1869 }
1870
1871 /* Return 1 if Y is a memory address which contains no side effects
1872 and would remain valid after the addition of a positive integer
1873 less than the size of that mode.
1874
1875 We assume that the original address is valid and do not check it.
1876 We do check that it is valid for narrower modes.
1877
1878 If STRICTP is nonzero, we require a strictly valid address,
1879 for the sake of use in reload.c. */
1880
1881 int
1882 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1883 {
1884 enum rtx_code ycode = GET_CODE (y);
1885 rtx z;
1886 rtx y1 = y;
1887 rtx *y2;
1888 int (*addressp) (enum machine_mode, rtx) =
1889 (strictp ? strict_memory_address_p : memory_address_p);
1890 unsigned int mode_sz = GET_MODE_SIZE (mode);
1891
1892 if (CONSTANT_ADDRESS_P (y))
1893 return 1;
1894
1895 /* Adjusting an offsettable address involves changing to a narrower mode.
1896 Make sure that's OK. */
1897
1898 if (mode_dependent_address_p (y))
1899 return 0;
1900
1901 /* ??? How much offset does an offsettable BLKmode reference need?
1902 Clearly that depends on the situation in which it's being used.
1903 However, the current situation in which we test 0xffffffff is
1904 less than ideal. Caveat user. */
1905 if (mode_sz == 0)
1906 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1907
1908 /* If the expression contains a constant term,
1909 see if it remains valid when max possible offset is added. */
1910
1911 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1912 {
1913 int good;
1914
1915 y1 = *y2;
1916 *y2 = plus_constant (*y2, mode_sz - 1);
1917 /* Use QImode because an odd displacement may be automatically invalid
1918 for any wider mode. But it should be valid for a single byte. */
1919 good = (*addressp) (QImode, y);
1920
1921 /* In any case, restore old contents of memory. */
1922 *y2 = y1;
1923 return good;
1924 }
1925
1926 if (GET_RTX_CLASS (ycode) == 'a')
1927 return 0;
1928
1929 /* The offset added here is chosen as the maximum offset that
1930 any instruction could need to add when operating on something
1931 of the specified mode. We assume that if Y and Y+c are
1932 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1933 go inside a LO_SUM here, so we do so as well. */
1934 if (GET_CODE (y) == LO_SUM
1935 && mode != BLKmode
1936 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1937 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1938 plus_constant (XEXP (y, 1), mode_sz - 1));
1939 else
1940 z = plus_constant (y, mode_sz - 1);
1941
1942 /* Use QImode because an odd displacement may be automatically invalid
1943 for any wider mode. But it should be valid for a single byte. */
1944 return (*addressp) (QImode, z);
1945 }
1946
1947 /* Return 1 if ADDR is an address-expression whose effect depends
1948 on the mode of the memory reference it is used in.
1949
1950 Autoincrement addressing is a typical example of mode-dependence
1951 because the amount of the increment depends on the mode. */
1952
1953 int
1954 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1955 {
1956 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1957 return 0;
1958 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1959 win: ATTRIBUTE_UNUSED_LABEL
1960 return 1;
1961 }
1962 \f
1963 /* Like extract_insn, but save insn extracted and don't extract again, when
1964 called again for the same insn expecting that recog_data still contain the
1965 valid information. This is used primary by gen_attr infrastructure that
1966 often does extract insn again and again. */
1967 void
1968 extract_insn_cached (rtx insn)
1969 {
1970 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1971 return;
1972 extract_insn (insn);
1973 recog_data.insn = insn;
1974 }
1975 /* Do cached extract_insn, constrain_operand and complain about failures.
1976 Used by insn_attrtab. */
1977 void
1978 extract_constrain_insn_cached (rtx insn)
1979 {
1980 extract_insn_cached (insn);
1981 if (which_alternative == -1
1982 && !constrain_operands (reload_completed))
1983 fatal_insn_not_found (insn);
1984 }
1985 /* Do cached constrain_operand and complain about failures. */
1986 int
1987 constrain_operands_cached (int strict)
1988 {
1989 if (which_alternative == -1)
1990 return constrain_operands (strict);
1991 else
1992 return 1;
1993 }
1994 \f
1995 /* Analyze INSN and fill in recog_data. */
1996
1997 void
1998 extract_insn (rtx insn)
1999 {
2000 int i;
2001 int icode;
2002 int noperands;
2003 rtx body = PATTERN (insn);
2004
2005 recog_data.insn = NULL;
2006 recog_data.n_operands = 0;
2007 recog_data.n_alternatives = 0;
2008 recog_data.n_dups = 0;
2009 which_alternative = -1;
2010
2011 switch (GET_CODE (body))
2012 {
2013 case USE:
2014 case CLOBBER:
2015 case ASM_INPUT:
2016 case ADDR_VEC:
2017 case ADDR_DIFF_VEC:
2018 return;
2019
2020 case SET:
2021 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2022 goto asm_insn;
2023 else
2024 goto normal_insn;
2025 case PARALLEL:
2026 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2027 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2028 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2029 goto asm_insn;
2030 else
2031 goto normal_insn;
2032 case ASM_OPERANDS:
2033 asm_insn:
2034 recog_data.n_operands = noperands = asm_noperands (body);
2035 if (noperands >= 0)
2036 {
2037 /* This insn is an `asm' with operands. */
2038
2039 /* expand_asm_operands makes sure there aren't too many operands. */
2040 if (noperands > MAX_RECOG_OPERANDS)
2041 abort ();
2042
2043 /* Now get the operand values and constraints out of the insn. */
2044 decode_asm_operands (body, recog_data.operand,
2045 recog_data.operand_loc,
2046 recog_data.constraints,
2047 recog_data.operand_mode);
2048 if (noperands > 0)
2049 {
2050 const char *p = recog_data.constraints[0];
2051 recog_data.n_alternatives = 1;
2052 while (*p)
2053 recog_data.n_alternatives += (*p++ == ',');
2054 }
2055 break;
2056 }
2057 fatal_insn_not_found (insn);
2058
2059 default:
2060 normal_insn:
2061 /* Ordinary insn: recognize it, get the operands via insn_extract
2062 and get the constraints. */
2063
2064 icode = recog_memoized (insn);
2065 if (icode < 0)
2066 fatal_insn_not_found (insn);
2067
2068 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2069 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2070 recog_data.n_dups = insn_data[icode].n_dups;
2071
2072 insn_extract (insn);
2073
2074 for (i = 0; i < noperands; i++)
2075 {
2076 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2077 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2078 /* VOIDmode match_operands gets mode from their real operand. */
2079 if (recog_data.operand_mode[i] == VOIDmode)
2080 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2081 }
2082 }
2083 for (i = 0; i < noperands; i++)
2084 recog_data.operand_type[i]
2085 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2086 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2087 : OP_IN);
2088
2089 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2090 abort ();
2091 }
2092
2093 /* After calling extract_insn, you can use this function to extract some
2094 information from the constraint strings into a more usable form.
2095 The collected data is stored in recog_op_alt. */
2096 void
2097 preprocess_constraints (void)
2098 {
2099 int i;
2100
2101 memset (recog_op_alt, 0, sizeof recog_op_alt);
2102 for (i = 0; i < recog_data.n_operands; i++)
2103 {
2104 int j;
2105 struct operand_alternative *op_alt;
2106 const char *p = recog_data.constraints[i];
2107
2108 op_alt = recog_op_alt[i];
2109
2110 for (j = 0; j < recog_data.n_alternatives; j++)
2111 {
2112 op_alt[j].class = NO_REGS;
2113 op_alt[j].constraint = p;
2114 op_alt[j].matches = -1;
2115 op_alt[j].matched = -1;
2116
2117 if (*p == '\0' || *p == ',')
2118 {
2119 op_alt[j].anything_ok = 1;
2120 continue;
2121 }
2122
2123 for (;;)
2124 {
2125 char c = *p;
2126 if (c == '#')
2127 do
2128 c = *++p;
2129 while (c != ',' && c != '\0');
2130 if (c == ',' || c == '\0')
2131 {
2132 p++;
2133 break;
2134 }
2135
2136 switch (c)
2137 {
2138 case '=': case '+': case '*': case '%':
2139 case 'E': case 'F': case 'G': case 'H':
2140 case 's': case 'i': case 'n':
2141 case 'I': case 'J': case 'K': case 'L':
2142 case 'M': case 'N': case 'O': case 'P':
2143 /* These don't say anything we care about. */
2144 break;
2145
2146 case '?':
2147 op_alt[j].reject += 6;
2148 break;
2149 case '!':
2150 op_alt[j].reject += 600;
2151 break;
2152 case '&':
2153 op_alt[j].earlyclobber = 1;
2154 break;
2155
2156 case '0': case '1': case '2': case '3': case '4':
2157 case '5': case '6': case '7': case '8': case '9':
2158 {
2159 char *end;
2160 op_alt[j].matches = strtoul (p, &end, 10);
2161 recog_op_alt[op_alt[j].matches][j].matched = i;
2162 p = end;
2163 }
2164 continue;
2165
2166 case 'm':
2167 op_alt[j].memory_ok = 1;
2168 break;
2169 case '<':
2170 op_alt[j].decmem_ok = 1;
2171 break;
2172 case '>':
2173 op_alt[j].incmem_ok = 1;
2174 break;
2175 case 'V':
2176 op_alt[j].nonoffmem_ok = 1;
2177 break;
2178 case 'o':
2179 op_alt[j].offmem_ok = 1;
2180 break;
2181 case 'X':
2182 op_alt[j].anything_ok = 1;
2183 break;
2184
2185 case 'p':
2186 op_alt[j].is_address = 1;
2187 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2188 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2189 break;
2190
2191 case 'g': case 'r':
2192 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2193 break;
2194
2195 default:
2196 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2197 {
2198 op_alt[j].memory_ok = 1;
2199 break;
2200 }
2201 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2202 {
2203 op_alt[j].is_address = 1;
2204 op_alt[j].class
2205 = (reg_class_subunion
2206 [(int) op_alt[j].class]
2207 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2208 break;
2209 }
2210
2211 op_alt[j].class
2212 = (reg_class_subunion
2213 [(int) op_alt[j].class]
2214 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2215 break;
2216 }
2217 p += CONSTRAINT_LEN (c, p);
2218 }
2219 }
2220 }
2221 }
2222
2223 /* Check the operands of an insn against the insn's operand constraints
2224 and return 1 if they are valid.
2225 The information about the insn's operands, constraints, operand modes
2226 etc. is obtained from the global variables set up by extract_insn.
2227
2228 WHICH_ALTERNATIVE is set to a number which indicates which
2229 alternative of constraints was matched: 0 for the first alternative,
2230 1 for the next, etc.
2231
2232 In addition, when two operands are required to match
2233 and it happens that the output operand is (reg) while the
2234 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2235 make the output operand look like the input.
2236 This is because the output operand is the one the template will print.
2237
2238 This is used in final, just before printing the assembler code and by
2239 the routines that determine an insn's attribute.
2240
2241 If STRICT is a positive nonzero value, it means that we have been
2242 called after reload has been completed. In that case, we must
2243 do all checks strictly. If it is zero, it means that we have been called
2244 before reload has completed. In that case, we first try to see if we can
2245 find an alternative that matches strictly. If not, we try again, this
2246 time assuming that reload will fix up the insn. This provides a "best
2247 guess" for the alternative and is used to compute attributes of insns prior
2248 to reload. A negative value of STRICT is used for this internal call. */
2249
2250 struct funny_match
2251 {
2252 int this, other;
2253 };
2254
2255 int
2256 constrain_operands (int strict)
2257 {
2258 const char *constraints[MAX_RECOG_OPERANDS];
2259 int matching_operands[MAX_RECOG_OPERANDS];
2260 int earlyclobber[MAX_RECOG_OPERANDS];
2261 int c;
2262
2263 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2264 int funny_match_index;
2265
2266 which_alternative = 0;
2267 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2268 return 1;
2269
2270 for (c = 0; c < recog_data.n_operands; c++)
2271 {
2272 constraints[c] = recog_data.constraints[c];
2273 matching_operands[c] = -1;
2274 }
2275
2276 do
2277 {
2278 int opno;
2279 int lose = 0;
2280 funny_match_index = 0;
2281
2282 for (opno = 0; opno < recog_data.n_operands; opno++)
2283 {
2284 rtx op = recog_data.operand[opno];
2285 enum machine_mode mode = GET_MODE (op);
2286 const char *p = constraints[opno];
2287 int offset = 0;
2288 int win = 0;
2289 int val;
2290 int len;
2291
2292 earlyclobber[opno] = 0;
2293
2294 /* A unary operator may be accepted by the predicate, but it
2295 is irrelevant for matching constraints. */
2296 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2297 op = XEXP (op, 0);
2298
2299 if (GET_CODE (op) == SUBREG)
2300 {
2301 if (GET_CODE (SUBREG_REG (op)) == REG
2302 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2303 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2304 GET_MODE (SUBREG_REG (op)),
2305 SUBREG_BYTE (op),
2306 GET_MODE (op));
2307 op = SUBREG_REG (op);
2308 }
2309
2310 /* An empty constraint or empty alternative
2311 allows anything which matched the pattern. */
2312 if (*p == 0 || *p == ',')
2313 win = 1;
2314
2315 do
2316 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2317 {
2318 case '\0':
2319 len = 0;
2320 break;
2321 case ',':
2322 c = '\0';
2323 break;
2324
2325 case '?': case '!': case '*': case '%':
2326 case '=': case '+':
2327 break;
2328
2329 case '#':
2330 /* Ignore rest of this alternative as far as
2331 constraint checking is concerned. */
2332 do
2333 p++;
2334 while (*p && *p != ',');
2335 len = 0;
2336 break;
2337
2338 case '&':
2339 earlyclobber[opno] = 1;
2340 break;
2341
2342 case '0': case '1': case '2': case '3': case '4':
2343 case '5': case '6': case '7': case '8': case '9':
2344 {
2345 /* This operand must be the same as a previous one.
2346 This kind of constraint is used for instructions such
2347 as add when they take only two operands.
2348
2349 Note that the lower-numbered operand is passed first.
2350
2351 If we are not testing strictly, assume that this
2352 constraint will be satisfied. */
2353
2354 char *end;
2355 int match;
2356
2357 match = strtoul (p, &end, 10);
2358 p = end;
2359
2360 if (strict < 0)
2361 val = 1;
2362 else
2363 {
2364 rtx op1 = recog_data.operand[match];
2365 rtx op2 = recog_data.operand[opno];
2366
2367 /* A unary operator may be accepted by the predicate,
2368 but it is irrelevant for matching constraints. */
2369 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2370 op1 = XEXP (op1, 0);
2371 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2372 op2 = XEXP (op2, 0);
2373
2374 val = operands_match_p (op1, op2);
2375 }
2376
2377 matching_operands[opno] = match;
2378 matching_operands[match] = opno;
2379
2380 if (val != 0)
2381 win = 1;
2382
2383 /* If output is *x and input is *--x, arrange later
2384 to change the output to *--x as well, since the
2385 output op is the one that will be printed. */
2386 if (val == 2 && strict > 0)
2387 {
2388 funny_match[funny_match_index].this = opno;
2389 funny_match[funny_match_index++].other = match;
2390 }
2391 }
2392 len = 0;
2393 break;
2394
2395 case 'p':
2396 /* p is used for address_operands. When we are called by
2397 gen_reload, no one will have checked that the address is
2398 strictly valid, i.e., that all pseudos requiring hard regs
2399 have gotten them. */
2400 if (strict <= 0
2401 || (strict_memory_address_p (recog_data.operand_mode[opno],
2402 op)))
2403 win = 1;
2404 break;
2405
2406 /* No need to check general_operand again;
2407 it was done in insn-recog.c. */
2408 case 'g':
2409 /* Anything goes unless it is a REG and really has a hard reg
2410 but the hard reg is not in the class GENERAL_REGS. */
2411 if (strict < 0
2412 || GENERAL_REGS == ALL_REGS
2413 || GET_CODE (op) != REG
2414 || (reload_in_progress
2415 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2416 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2417 win = 1;
2418 break;
2419
2420 case 'X':
2421 /* This is used for a MATCH_SCRATCH in the cases when
2422 we don't actually need anything. So anything goes
2423 any time. */
2424 win = 1;
2425 break;
2426
2427 case 'm':
2428 if (GET_CODE (op) == MEM
2429 /* Before reload, accept what reload can turn into mem. */
2430 || (strict < 0 && CONSTANT_P (op))
2431 /* During reload, accept a pseudo */
2432 || (reload_in_progress && GET_CODE (op) == REG
2433 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2434 win = 1;
2435 break;
2436
2437 case '<':
2438 if (GET_CODE (op) == MEM
2439 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2440 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2441 win = 1;
2442 break;
2443
2444 case '>':
2445 if (GET_CODE (op) == MEM
2446 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2447 || GET_CODE (XEXP (op, 0)) == POST_INC))
2448 win = 1;
2449 break;
2450
2451 case 'E':
2452 case 'F':
2453 if (GET_CODE (op) == CONST_DOUBLE
2454 || (GET_CODE (op) == CONST_VECTOR
2455 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2456 win = 1;
2457 break;
2458
2459 case 'G':
2460 case 'H':
2461 if (GET_CODE (op) == CONST_DOUBLE
2462 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2463 win = 1;
2464 break;
2465
2466 case 's':
2467 if (GET_CODE (op) == CONST_INT
2468 || (GET_CODE (op) == CONST_DOUBLE
2469 && GET_MODE (op) == VOIDmode))
2470 break;
2471 case 'i':
2472 if (CONSTANT_P (op))
2473 win = 1;
2474 break;
2475
2476 case 'n':
2477 if (GET_CODE (op) == CONST_INT
2478 || (GET_CODE (op) == CONST_DOUBLE
2479 && GET_MODE (op) == VOIDmode))
2480 win = 1;
2481 break;
2482
2483 case 'I':
2484 case 'J':
2485 case 'K':
2486 case 'L':
2487 case 'M':
2488 case 'N':
2489 case 'O':
2490 case 'P':
2491 if (GET_CODE (op) == CONST_INT
2492 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2493 win = 1;
2494 break;
2495
2496 case 'V':
2497 if (GET_CODE (op) == MEM
2498 && ((strict > 0 && ! offsettable_memref_p (op))
2499 || (strict < 0
2500 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2501 || (reload_in_progress
2502 && !(GET_CODE (op) == REG
2503 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2504 win = 1;
2505 break;
2506
2507 case 'o':
2508 if ((strict > 0 && offsettable_memref_p (op))
2509 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2510 /* Before reload, accept what reload can handle. */
2511 || (strict < 0
2512 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2513 /* During reload, accept a pseudo */
2514 || (reload_in_progress && GET_CODE (op) == REG
2515 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2516 win = 1;
2517 break;
2518
2519 default:
2520 {
2521 enum reg_class class;
2522
2523 class = (c == 'r'
2524 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2525 if (class != NO_REGS)
2526 {
2527 if (strict < 0
2528 || (strict == 0
2529 && GET_CODE (op) == REG
2530 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2531 || (strict == 0 && GET_CODE (op) == SCRATCH)
2532 || (GET_CODE (op) == REG
2533 && reg_fits_class_p (op, class, offset, mode)))
2534 win = 1;
2535 }
2536 #ifdef EXTRA_CONSTRAINT_STR
2537 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2538 win = 1;
2539
2540 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2541 {
2542 /* Every memory operand can be reloaded to fit. */
2543 if (strict < 0 && GET_CODE (op) == MEM)
2544 win = 1;
2545
2546 /* Before reload, accept what reload can turn into mem. */
2547 if (strict < 0 && CONSTANT_P (op))
2548 win = 1;
2549
2550 /* During reload, accept a pseudo */
2551 if (reload_in_progress && GET_CODE (op) == REG
2552 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2553 win = 1;
2554 }
2555 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2556 {
2557 /* Every address operand can be reloaded to fit. */
2558 if (strict < 0)
2559 win = 1;
2560 }
2561 #endif
2562 break;
2563 }
2564 }
2565 while (p += len, c);
2566
2567 constraints[opno] = p;
2568 /* If this operand did not win somehow,
2569 this alternative loses. */
2570 if (! win)
2571 lose = 1;
2572 }
2573 /* This alternative won; the operands are ok.
2574 Change whichever operands this alternative says to change. */
2575 if (! lose)
2576 {
2577 int opno, eopno;
2578
2579 /* See if any earlyclobber operand conflicts with some other
2580 operand. */
2581
2582 if (strict > 0)
2583 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2584 /* Ignore earlyclobber operands now in memory,
2585 because we would often report failure when we have
2586 two memory operands, one of which was formerly a REG. */
2587 if (earlyclobber[eopno]
2588 && GET_CODE (recog_data.operand[eopno]) == REG)
2589 for (opno = 0; opno < recog_data.n_operands; opno++)
2590 if ((GET_CODE (recog_data.operand[opno]) == MEM
2591 || recog_data.operand_type[opno] != OP_OUT)
2592 && opno != eopno
2593 /* Ignore things like match_operator operands. */
2594 && *recog_data.constraints[opno] != 0
2595 && ! (matching_operands[opno] == eopno
2596 && operands_match_p (recog_data.operand[opno],
2597 recog_data.operand[eopno]))
2598 && ! safe_from_earlyclobber (recog_data.operand[opno],
2599 recog_data.operand[eopno]))
2600 lose = 1;
2601
2602 if (! lose)
2603 {
2604 while (--funny_match_index >= 0)
2605 {
2606 recog_data.operand[funny_match[funny_match_index].other]
2607 = recog_data.operand[funny_match[funny_match_index].this];
2608 }
2609
2610 return 1;
2611 }
2612 }
2613
2614 which_alternative++;
2615 }
2616 while (which_alternative < recog_data.n_alternatives);
2617
2618 which_alternative = -1;
2619 /* If we are about to reject this, but we are not to test strictly,
2620 try a very loose test. Only return failure if it fails also. */
2621 if (strict == 0)
2622 return constrain_operands (-1);
2623 else
2624 return 0;
2625 }
2626
2627 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2628 is a hard reg in class CLASS when its regno is offset by OFFSET
2629 and changed to mode MODE.
2630 If REG occupies multiple hard regs, all of them must be in CLASS. */
2631
2632 int
2633 reg_fits_class_p (rtx operand, enum reg_class class, int offset,
2634 enum machine_mode mode)
2635 {
2636 int regno = REGNO (operand);
2637 if (regno < FIRST_PSEUDO_REGISTER
2638 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2639 regno + offset))
2640 {
2641 int sr;
2642 regno += offset;
2643 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2644 sr > 0; sr--)
2645 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2646 regno + sr))
2647 break;
2648 return sr == 0;
2649 }
2650
2651 return 0;
2652 }
2653 \f
2654 /* Split single instruction. Helper function for split_all_insns.
2655 Return last insn in the sequence if successful, or NULL if unsuccessful. */
2656 static rtx
2657 split_insn (rtx insn)
2658 {
2659 rtx set;
2660 if (!INSN_P (insn))
2661 ;
2662 /* Don't split no-op move insns. These should silently
2663 disappear later in final. Splitting such insns would
2664 break the code that handles REG_NO_CONFLICT blocks. */
2665
2666 else if ((set = single_set (insn)) != NULL && set_noop_p (set))
2667 {
2668 /* Nops get in the way while scheduling, so delete them
2669 now if register allocation has already been done. It
2670 is too risky to try to do this before register
2671 allocation, and there are unlikely to be very many
2672 nops then anyways. */
2673 if (reload_completed)
2674 delete_insn_and_edges (insn);
2675 }
2676 else
2677 {
2678 /* Split insns here to get max fine-grain parallelism. */
2679 rtx first = PREV_INSN (insn);
2680 rtx last = try_split (PATTERN (insn), insn, 1);
2681
2682 if (last != insn)
2683 {
2684 /* try_split returns the NOTE that INSN became. */
2685 PUT_CODE (insn, NOTE);
2686 NOTE_SOURCE_FILE (insn) = 0;
2687 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2688
2689 /* ??? Coddle to md files that generate subregs in post-
2690 reload splitters instead of computing the proper
2691 hard register. */
2692 if (reload_completed && first != last)
2693 {
2694 first = NEXT_INSN (first);
2695 while (1)
2696 {
2697 if (INSN_P (first))
2698 cleanup_subreg_operands (first);
2699 if (first == last)
2700 break;
2701 first = NEXT_INSN (first);
2702 }
2703 }
2704 return last;
2705 }
2706 }
2707 return NULL_RTX;
2708 }
2709 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2710
2711 void
2712 split_all_insns (int upd_life)
2713 {
2714 sbitmap blocks;
2715 bool changed;
2716 basic_block bb;
2717
2718 blocks = sbitmap_alloc (last_basic_block);
2719 sbitmap_zero (blocks);
2720 changed = false;
2721
2722 FOR_EACH_BB_REVERSE (bb)
2723 {
2724 rtx insn, next;
2725 bool finish = false;
2726
2727 for (insn = bb->head; !finish ; insn = next)
2728 {
2729 rtx last;
2730
2731 /* Can't use `next_real_insn' because that might go across
2732 CODE_LABELS and short-out basic blocks. */
2733 next = NEXT_INSN (insn);
2734 finish = (insn == bb->end);
2735 last = split_insn (insn);
2736 if (last)
2737 {
2738 /* The split sequence may include barrier, but the
2739 BB boundary we are interested in will be set to previous
2740 one. */
2741
2742 while (GET_CODE (last) == BARRIER)
2743 last = PREV_INSN (last);
2744 SET_BIT (blocks, bb->index);
2745 changed = true;
2746 insn = last;
2747 }
2748 }
2749 }
2750
2751 if (changed)
2752 {
2753 int old_last_basic_block = last_basic_block;
2754
2755 find_many_sub_basic_blocks (blocks);
2756
2757 if (old_last_basic_block != last_basic_block && upd_life)
2758 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2759 }
2760
2761 if (changed && upd_life)
2762 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2763 PROP_DEATH_NOTES | PROP_REG_INFO);
2764
2765 #ifdef ENABLE_CHECKING
2766 verify_flow_info ();
2767 #endif
2768
2769 sbitmap_free (blocks);
2770 }
2771
2772 /* Same as split_all_insns, but do not expect CFG to be available.
2773 Used by machine dependent reorg passes. */
2774
2775 void
2776 split_all_insns_noflow (void)
2777 {
2778 rtx next, insn;
2779
2780 for (insn = get_insns (); insn; insn = next)
2781 {
2782 next = NEXT_INSN (insn);
2783 split_insn (insn);
2784 }
2785 return;
2786 }
2787 \f
2788 #ifdef HAVE_peephole2
2789 struct peep2_insn_data
2790 {
2791 rtx insn;
2792 regset live_before;
2793 };
2794
2795 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2796 static int peep2_current;
2797
2798 /* A non-insn marker indicating the last insn of the block.
2799 The live_before regset for this element is correct, indicating
2800 global_live_at_end for the block. */
2801 #define PEEP2_EOB pc_rtx
2802
2803 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2804 does not exist. Used by the recognizer to find the next insn to match
2805 in a multi-insn pattern. */
2806
2807 rtx
2808 peep2_next_insn (int n)
2809 {
2810 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2811 abort ();
2812
2813 n += peep2_current;
2814 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2815 n -= MAX_INSNS_PER_PEEP2 + 1;
2816
2817 if (peep2_insn_data[n].insn == PEEP2_EOB)
2818 return NULL_RTX;
2819 return peep2_insn_data[n].insn;
2820 }
2821
2822 /* Return true if REGNO is dead before the Nth non-note insn
2823 after `current'. */
2824
2825 int
2826 peep2_regno_dead_p (int ofs, int regno)
2827 {
2828 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2829 abort ();
2830
2831 ofs += peep2_current;
2832 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2833 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2834
2835 if (peep2_insn_data[ofs].insn == NULL_RTX)
2836 abort ();
2837
2838 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2839 }
2840
2841 /* Similarly for a REG. */
2842
2843 int
2844 peep2_reg_dead_p (int ofs, rtx reg)
2845 {
2846 int regno, n;
2847
2848 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2849 abort ();
2850
2851 ofs += peep2_current;
2852 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2853 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2854
2855 if (peep2_insn_data[ofs].insn == NULL_RTX)
2856 abort ();
2857
2858 regno = REGNO (reg);
2859 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2860 while (--n >= 0)
2861 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2862 return 0;
2863 return 1;
2864 }
2865
2866 /* Try to find a hard register of mode MODE, matching the register class in
2867 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2868 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2869 in which case the only condition is that the register must be available
2870 before CURRENT_INSN.
2871 Registers that already have bits set in REG_SET will not be considered.
2872
2873 If an appropriate register is available, it will be returned and the
2874 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2875 returned. */
2876
2877 rtx
2878 peep2_find_free_register (int from, int to, const char *class_str,
2879 enum machine_mode mode, HARD_REG_SET *reg_set)
2880 {
2881 static int search_ofs;
2882 enum reg_class class;
2883 HARD_REG_SET live;
2884 int i;
2885
2886 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2887 abort ();
2888
2889 from += peep2_current;
2890 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2891 from -= MAX_INSNS_PER_PEEP2 + 1;
2892 to += peep2_current;
2893 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2894 to -= MAX_INSNS_PER_PEEP2 + 1;
2895
2896 if (peep2_insn_data[from].insn == NULL_RTX)
2897 abort ();
2898 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2899
2900 while (from != to)
2901 {
2902 HARD_REG_SET this_live;
2903
2904 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2905 from = 0;
2906 if (peep2_insn_data[from].insn == NULL_RTX)
2907 abort ();
2908 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2909 IOR_HARD_REG_SET (live, this_live);
2910 }
2911
2912 class = (class_str[0] == 'r' ? GENERAL_REGS
2913 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2914
2915 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2916 {
2917 int raw_regno, regno, success, j;
2918
2919 /* Distribute the free registers as much as possible. */
2920 raw_regno = search_ofs + i;
2921 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2922 raw_regno -= FIRST_PSEUDO_REGISTER;
2923 #ifdef REG_ALLOC_ORDER
2924 regno = reg_alloc_order[raw_regno];
2925 #else
2926 regno = raw_regno;
2927 #endif
2928
2929 /* Don't allocate fixed registers. */
2930 if (fixed_regs[regno])
2931 continue;
2932 /* Make sure the register is of the right class. */
2933 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2934 continue;
2935 /* And can support the mode we need. */
2936 if (! HARD_REGNO_MODE_OK (regno, mode))
2937 continue;
2938 /* And that we don't create an extra save/restore. */
2939 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2940 continue;
2941 /* And we don't clobber traceback for noreturn functions. */
2942 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2943 && (! reload_completed || frame_pointer_needed))
2944 continue;
2945
2946 success = 1;
2947 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2948 {
2949 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2950 || TEST_HARD_REG_BIT (live, regno + j))
2951 {
2952 success = 0;
2953 break;
2954 }
2955 }
2956 if (success)
2957 {
2958 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2959 SET_HARD_REG_BIT (*reg_set, regno + j);
2960
2961 /* Start the next search with the next register. */
2962 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2963 raw_regno = 0;
2964 search_ofs = raw_regno;
2965
2966 return gen_rtx_REG (mode, regno);
2967 }
2968 }
2969
2970 search_ofs = 0;
2971 return NULL_RTX;
2972 }
2973
2974 /* Perform the peephole2 optimization pass. */
2975
2976 void
2977 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
2978 {
2979 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
2980 rtx insn, prev;
2981 regset live;
2982 int i;
2983 basic_block bb;
2984 #ifdef HAVE_conditional_execution
2985 sbitmap blocks;
2986 bool changed;
2987 #endif
2988 bool do_cleanup_cfg = false;
2989 bool do_rebuild_jump_labels = false;
2990
2991 /* Initialize the regsets we're going to use. */
2992 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
2993 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
2994 live = INITIALIZE_REG_SET (rs_heads[i]);
2995
2996 #ifdef HAVE_conditional_execution
2997 blocks = sbitmap_alloc (last_basic_block);
2998 sbitmap_zero (blocks);
2999 changed = false;
3000 #else
3001 count_or_remove_death_notes (NULL, 1);
3002 #endif
3003
3004 FOR_EACH_BB_REVERSE (bb)
3005 {
3006 struct propagate_block_info *pbi;
3007
3008 /* Indicate that all slots except the last holds invalid data. */
3009 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3010 peep2_insn_data[i].insn = NULL_RTX;
3011
3012 /* Indicate that the last slot contains live_after data. */
3013 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3014 peep2_current = MAX_INSNS_PER_PEEP2;
3015
3016 /* Start up propagation. */
3017 COPY_REG_SET (live, bb->global_live_at_end);
3018 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3019
3020 #ifdef HAVE_conditional_execution
3021 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3022 #else
3023 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3024 #endif
3025
3026 for (insn = bb->end; ; insn = prev)
3027 {
3028 prev = PREV_INSN (insn);
3029 if (INSN_P (insn))
3030 {
3031 rtx try, before_try, x;
3032 int match_len;
3033 rtx note;
3034 bool was_call = false;
3035
3036 /* Record this insn. */
3037 if (--peep2_current < 0)
3038 peep2_current = MAX_INSNS_PER_PEEP2;
3039 peep2_insn_data[peep2_current].insn = insn;
3040 propagate_one_insn (pbi, insn);
3041 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3042
3043 /* Match the peephole. */
3044 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3045 if (try != NULL)
3046 {
3047 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3048 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3049 cfg-related call notes. */
3050 for (i = 0; i <= match_len; ++i)
3051 {
3052 int j;
3053 rtx old_insn, new_insn, note;
3054
3055 j = i + peep2_current;
3056 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3057 j -= MAX_INSNS_PER_PEEP2 + 1;
3058 old_insn = peep2_insn_data[j].insn;
3059 if (GET_CODE (old_insn) != CALL_INSN)
3060 continue;
3061 was_call = true;
3062
3063 new_insn = try;
3064 while (new_insn != NULL_RTX)
3065 {
3066 if (GET_CODE (new_insn) == CALL_INSN)
3067 break;
3068 new_insn = NEXT_INSN (new_insn);
3069 }
3070
3071 if (new_insn == NULL_RTX)
3072 abort ();
3073
3074 CALL_INSN_FUNCTION_USAGE (new_insn)
3075 = CALL_INSN_FUNCTION_USAGE (old_insn);
3076
3077 for (note = REG_NOTES (old_insn);
3078 note;
3079 note = XEXP (note, 1))
3080 switch (REG_NOTE_KIND (note))
3081 {
3082 case REG_NORETURN:
3083 case REG_SETJMP:
3084 case REG_ALWAYS_RETURN:
3085 REG_NOTES (new_insn)
3086 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3087 XEXP (note, 0),
3088 REG_NOTES (new_insn));
3089 default:
3090 /* Discard all other reg notes. */
3091 break;
3092 }
3093
3094 /* Croak if there is another call in the sequence. */
3095 while (++i <= match_len)
3096 {
3097 j = i + peep2_current;
3098 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3099 j -= MAX_INSNS_PER_PEEP2 + 1;
3100 old_insn = peep2_insn_data[j].insn;
3101 if (GET_CODE (old_insn) == CALL_INSN)
3102 abort ();
3103 }
3104 break;
3105 }
3106
3107 i = match_len + peep2_current;
3108 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3109 i -= MAX_INSNS_PER_PEEP2 + 1;
3110
3111 note = find_reg_note (peep2_insn_data[i].insn,
3112 REG_EH_REGION, NULL_RTX);
3113
3114 /* Replace the old sequence with the new. */
3115 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3116 INSN_LOCATOR (peep2_insn_data[i].insn));
3117 before_try = PREV_INSN (insn);
3118 delete_insn_chain (insn, peep2_insn_data[i].insn);
3119
3120 /* Re-insert the EH_REGION notes. */
3121 if (note || (was_call && nonlocal_goto_handler_labels))
3122 {
3123 edge eh_edge;
3124
3125 for (eh_edge = bb->succ; eh_edge
3126 ; eh_edge = eh_edge->succ_next)
3127 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3128 break;
3129
3130 for (x = try ; x != before_try ; x = PREV_INSN (x))
3131 if (GET_CODE (x) == CALL_INSN
3132 || (flag_non_call_exceptions
3133 && may_trap_p (PATTERN (x))
3134 && !find_reg_note (x, REG_EH_REGION, NULL)))
3135 {
3136 if (note)
3137 REG_NOTES (x)
3138 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3139 XEXP (note, 0),
3140 REG_NOTES (x));
3141
3142 if (x != bb->end && eh_edge)
3143 {
3144 edge nfte, nehe;
3145 int flags;
3146
3147 nfte = split_block (bb, x);
3148 flags = (eh_edge->flags
3149 & (EDGE_EH | EDGE_ABNORMAL));
3150 if (GET_CODE (x) == CALL_INSN)
3151 flags |= EDGE_ABNORMAL_CALL;
3152 nehe = make_edge (nfte->src, eh_edge->dest,
3153 flags);
3154
3155 nehe->probability = eh_edge->probability;
3156 nfte->probability
3157 = REG_BR_PROB_BASE - nehe->probability;
3158
3159 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3160 #ifdef HAVE_conditional_execution
3161 SET_BIT (blocks, nfte->dest->index);
3162 changed = true;
3163 #endif
3164 bb = nfte->src;
3165 eh_edge = nehe;
3166 }
3167 }
3168
3169 /* Converting possibly trapping insn to non-trapping is
3170 possible. Zap dummy outgoing edges. */
3171 do_cleanup_cfg |= purge_dead_edges (bb);
3172 }
3173
3174 #ifdef HAVE_conditional_execution
3175 /* With conditional execution, we cannot back up the
3176 live information so easily, since the conditional
3177 death data structures are not so self-contained.
3178 So record that we've made a modification to this
3179 block and update life information at the end. */
3180 SET_BIT (blocks, bb->index);
3181 changed = true;
3182
3183 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3184 peep2_insn_data[i].insn = NULL_RTX;
3185 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3186 #else
3187 /* Back up lifetime information past the end of the
3188 newly created sequence. */
3189 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3190 i = 0;
3191 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3192
3193 /* Update life information for the new sequence. */
3194 x = try;
3195 do
3196 {
3197 if (INSN_P (x))
3198 {
3199 if (--i < 0)
3200 i = MAX_INSNS_PER_PEEP2;
3201 peep2_insn_data[i].insn = x;
3202 propagate_one_insn (pbi, x);
3203 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3204 }
3205 x = PREV_INSN (x);
3206 }
3207 while (x != prev);
3208
3209 /* ??? Should verify that LIVE now matches what we
3210 had before the new sequence. */
3211
3212 peep2_current = i;
3213 #endif
3214
3215 /* If we generated a jump instruction, it won't have
3216 JUMP_LABEL set. Recompute after we're done. */
3217 for (x = try; x != before_try; x = PREV_INSN (x))
3218 if (GET_CODE (x) == JUMP_INSN)
3219 {
3220 do_rebuild_jump_labels = true;
3221 break;
3222 }
3223 }
3224 }
3225
3226 if (insn == bb->head)
3227 break;
3228 }
3229
3230 free_propagate_block_info (pbi);
3231 }
3232
3233 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3234 FREE_REG_SET (peep2_insn_data[i].live_before);
3235 FREE_REG_SET (live);
3236
3237 if (do_rebuild_jump_labels)
3238 rebuild_jump_labels (get_insns ());
3239
3240 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3241 we've changed global life since exception handlers are no longer
3242 reachable. */
3243 if (do_cleanup_cfg)
3244 {
3245 cleanup_cfg (0);
3246 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3247 }
3248 #ifdef HAVE_conditional_execution
3249 else
3250 {
3251 count_or_remove_death_notes (blocks, 1);
3252 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3253 }
3254 sbitmap_free (blocks);
3255 #endif
3256 }
3257 #endif /* HAVE_peephole2 */
3258
3259 /* Common predicates for use with define_bypass. */
3260
3261 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3262 data not the address operand(s) of the store. IN_INSN must be
3263 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3264 SETs inside. */
3265
3266 int
3267 store_data_bypass_p (rtx out_insn, rtx in_insn)
3268 {
3269 rtx out_set, in_set;
3270
3271 in_set = single_set (in_insn);
3272 if (! in_set)
3273 abort ();
3274
3275 if (GET_CODE (SET_DEST (in_set)) != MEM)
3276 return false;
3277
3278 out_set = single_set (out_insn);
3279 if (out_set)
3280 {
3281 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3282 return false;
3283 }
3284 else
3285 {
3286 rtx out_pat;
3287 int i;
3288
3289 out_pat = PATTERN (out_insn);
3290 if (GET_CODE (out_pat) != PARALLEL)
3291 abort ();
3292
3293 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3294 {
3295 rtx exp = XVECEXP (out_pat, 0, i);
3296
3297 if (GET_CODE (exp) == CLOBBER)
3298 continue;
3299
3300 if (GET_CODE (exp) != SET)
3301 abort ();
3302
3303 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3304 return false;
3305 }
3306 }
3307
3308 return true;
3309 }
3310
3311 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3312 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3313 or multiple set; IN_INSN should be single_set for truth, but for convenience
3314 of insn categorization may be any JUMP or CALL insn. */
3315
3316 int
3317 if_test_bypass_p (rtx out_insn, rtx in_insn)
3318 {
3319 rtx out_set, in_set;
3320
3321 in_set = single_set (in_insn);
3322 if (! in_set)
3323 {
3324 if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
3325 return false;
3326 abort ();
3327 }
3328
3329 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3330 return false;
3331 in_set = SET_SRC (in_set);
3332
3333 out_set = single_set (out_insn);
3334 if (out_set)
3335 {
3336 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3337 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3338 return false;
3339 }
3340 else
3341 {
3342 rtx out_pat;
3343 int i;
3344
3345 out_pat = PATTERN (out_insn);
3346 if (GET_CODE (out_pat) != PARALLEL)
3347 abort ();
3348
3349 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3350 {
3351 rtx exp = XVECEXP (out_pat, 0, i);
3352
3353 if (GET_CODE (exp) == CLOBBER)
3354 continue;
3355
3356 if (GET_CODE (exp) != SET)
3357 abort ();
3358
3359 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3360 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3361 return false;
3362 }
3363 }
3364
3365 return true;
3366 }