calls.c (emit_call_1): Don't use REG_ALWAYS_RETURN.
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "toplev.h"
39 #include "basic-block.h"
40 #include "output.h"
41 #include "reload.h"
42
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
46 #else
47 #define STACK_PUSH_CODE PRE_INC
48 #endif
49 #endif
50
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
54 #else
55 #define STACK_POP_CODE POST_DEC
56 #endif
57 #endif
58
59 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
60 static rtx *find_single_use_1 (rtx, rtx *);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
63
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
69
70 init_recog and init_recog_no_volatile are responsible for setting this. */
71
72 int volatile_ok;
73
74 struct recog_data recog_data;
75
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
82
83 int which_alternative;
84
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
88
89 int reload_completed;
90
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
93
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
97
98 void
99 init_recog_no_volatile (void)
100 {
101 volatile_ok = 0;
102 }
103
104 void
105 init_recog (void)
106 {
107 volatile_ok = 1;
108 }
109
110 \f
111 /* Check that X is an insn-body for an `asm' with operands
112 and that the operands mentioned in it are legitimate. */
113
114 int
115 check_asm_operands (rtx x)
116 {
117 int noperands;
118 rtx *operands;
119 const char **constraints;
120 int i;
121
122 /* Post-reload, be more strict with things. */
123 if (reload_completed)
124 {
125 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
126 extract_insn (make_insn_raw (x));
127 constrain_operands (1);
128 return which_alternative >= 0;
129 }
130
131 noperands = asm_noperands (x);
132 if (noperands < 0)
133 return 0;
134 if (noperands == 0)
135 return 1;
136
137 operands = alloca (noperands * sizeof (rtx));
138 constraints = alloca (noperands * sizeof (char *));
139
140 decode_asm_operands (x, operands, NULL, constraints, NULL);
141
142 for (i = 0; i < noperands; i++)
143 {
144 const char *c = constraints[i];
145 if (c[0] == '%')
146 c++;
147 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
148 c = constraints[c[0] - '0'];
149
150 if (! asm_operand_ok (operands[i], c))
151 return 0;
152 }
153
154 return 1;
155 }
156 \f
157 /* Static data for the next two routines. */
158
159 typedef struct change_t
160 {
161 rtx object;
162 int old_code;
163 rtx *loc;
164 rtx old;
165 } change_t;
166
167 static change_t *changes;
168 static int changes_allocated;
169
170 static int num_changes = 0;
171
172 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
173 at which NEW will be placed. If OBJECT is zero, no validation is done,
174 the change is simply made.
175
176 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
177 will be called with the address and mode as parameters. If OBJECT is
178 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
179 the change in place.
180
181 IN_GROUP is nonzero if this is part of a group of changes that must be
182 performed as a group. In that case, the changes will be stored. The
183 function `apply_change_group' will validate and apply the changes.
184
185 If IN_GROUP is zero, this is a single change. Try to recognize the insn
186 or validate the memory reference with the change applied. If the result
187 is not valid for the machine, suppress the change and return zero.
188 Otherwise, perform the change and return 1. */
189
190 int
191 validate_change (rtx object, rtx *loc, rtx new, int in_group)
192 {
193 rtx old = *loc;
194
195 if (old == new || rtx_equal_p (old, new))
196 return 1;
197
198 gcc_assert (in_group != 0 || num_changes == 0);
199
200 *loc = new;
201
202 /* Save the information describing this change. */
203 if (num_changes >= changes_allocated)
204 {
205 if (changes_allocated == 0)
206 /* This value allows for repeated substitutions inside complex
207 indexed addresses, or changes in up to 5 insns. */
208 changes_allocated = MAX_RECOG_OPERANDS * 5;
209 else
210 changes_allocated *= 2;
211
212 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
213 }
214
215 changes[num_changes].object = object;
216 changes[num_changes].loc = loc;
217 changes[num_changes].old = old;
218
219 if (object && !MEM_P (object))
220 {
221 /* Set INSN_CODE to force rerecognition of insn. Save old code in
222 case invalid. */
223 changes[num_changes].old_code = INSN_CODE (object);
224 INSN_CODE (object) = -1;
225 }
226
227 num_changes++;
228
229 /* If we are making a group of changes, return 1. Otherwise, validate the
230 change group we made. */
231
232 if (in_group)
233 return 1;
234 else
235 return apply_change_group ();
236 }
237
238 /* This subroutine of apply_change_group verifies whether the changes to INSN
239 were valid; i.e. whether INSN can still be recognized. */
240
241 int
242 insn_invalid_p (rtx insn)
243 {
244 rtx pat = PATTERN (insn);
245 int num_clobbers = 0;
246 /* If we are before reload and the pattern is a SET, see if we can add
247 clobbers. */
248 int icode = recog (pat, insn,
249 (GET_CODE (pat) == SET
250 && ! reload_completed && ! reload_in_progress)
251 ? &num_clobbers : 0);
252 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
253
254
255 /* If this is an asm and the operand aren't legal, then fail. Likewise if
256 this is not an asm and the insn wasn't recognized. */
257 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
258 || (!is_asm && icode < 0))
259 return 1;
260
261 /* If we have to add CLOBBERs, fail if we have to add ones that reference
262 hard registers since our callers can't know if they are live or not.
263 Otherwise, add them. */
264 if (num_clobbers > 0)
265 {
266 rtx newpat;
267
268 if (added_clobbers_hard_reg_p (icode))
269 return 1;
270
271 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
272 XVECEXP (newpat, 0, 0) = pat;
273 add_clobbers (newpat, icode);
274 PATTERN (insn) = pat = newpat;
275 }
276
277 /* After reload, verify that all constraints are satisfied. */
278 if (reload_completed)
279 {
280 extract_insn (insn);
281
282 if (! constrain_operands (1))
283 return 1;
284 }
285
286 INSN_CODE (insn) = icode;
287 return 0;
288 }
289
290 /* Return number of changes made and not validated yet. */
291 int
292 num_changes_pending (void)
293 {
294 return num_changes;
295 }
296
297 /* Apply a group of changes previously issued with `validate_change'.
298 Return 1 if all changes are valid, zero otherwise. */
299
300 int
301 apply_change_group (void)
302 {
303 int i;
304 rtx last_validated = NULL_RTX;
305
306 /* The changes have been applied and all INSN_CODEs have been reset to force
307 rerecognition.
308
309 The changes are valid if we aren't given an object, or if we are
310 given a MEM and it still is a valid address, or if this is in insn
311 and it is recognized. In the latter case, if reload has completed,
312 we also require that the operands meet the constraints for
313 the insn. */
314
315 for (i = 0; i < num_changes; i++)
316 {
317 rtx object = changes[i].object;
318
319 /* If there is no object to test or if it is the same as the one we
320 already tested, ignore it. */
321 if (object == 0 || object == last_validated)
322 continue;
323
324 if (MEM_P (object))
325 {
326 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
327 break;
328 }
329 else if (insn_invalid_p (object))
330 {
331 rtx pat = PATTERN (object);
332
333 /* Perhaps we couldn't recognize the insn because there were
334 extra CLOBBERs at the end. If so, try to re-recognize
335 without the last CLOBBER (later iterations will cause each of
336 them to be eliminated, in turn). But don't do this if we
337 have an ASM_OPERAND. */
338 if (GET_CODE (pat) == PARALLEL
339 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
340 && asm_noperands (PATTERN (object)) < 0)
341 {
342 rtx newpat;
343
344 if (XVECLEN (pat, 0) == 2)
345 newpat = XVECEXP (pat, 0, 0);
346 else
347 {
348 int j;
349
350 newpat
351 = gen_rtx_PARALLEL (VOIDmode,
352 rtvec_alloc (XVECLEN (pat, 0) - 1));
353 for (j = 0; j < XVECLEN (newpat, 0); j++)
354 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
355 }
356
357 /* Add a new change to this group to replace the pattern
358 with this new pattern. Then consider this change
359 as having succeeded. The change we added will
360 cause the entire call to fail if things remain invalid.
361
362 Note that this can lose if a later change than the one
363 we are processing specified &XVECEXP (PATTERN (object), 0, X)
364 but this shouldn't occur. */
365
366 validate_change (object, &PATTERN (object), newpat, 1);
367 continue;
368 }
369 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
370 /* If this insn is a CLOBBER or USE, it is always valid, but is
371 never recognized. */
372 continue;
373 else
374 break;
375 }
376 last_validated = object;
377 }
378
379 if (i == num_changes)
380 {
381 basic_block bb;
382
383 for (i = 0; i < num_changes; i++)
384 if (changes[i].object
385 && INSN_P (changes[i].object)
386 && (bb = BLOCK_FOR_INSN (changes[i].object)))
387 bb->flags |= BB_DIRTY;
388
389 num_changes = 0;
390 return 1;
391 }
392 else
393 {
394 cancel_changes (0);
395 return 0;
396 }
397 }
398
399 /* Return the number of changes so far in the current group. */
400
401 int
402 num_validated_changes (void)
403 {
404 return num_changes;
405 }
406
407 /* Retract the changes numbered NUM and up. */
408
409 void
410 cancel_changes (int num)
411 {
412 int i;
413
414 /* Back out all the changes. Do this in the opposite order in which
415 they were made. */
416 for (i = num_changes - 1; i >= num; i--)
417 {
418 *changes[i].loc = changes[i].old;
419 if (changes[i].object && !MEM_P (changes[i].object))
420 INSN_CODE (changes[i].object) = changes[i].old_code;
421 }
422 num_changes = num;
423 }
424
425 /* Replace every occurrence of FROM in X with TO. Mark each change with
426 validate_change passing OBJECT. */
427
428 static void
429 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
430 {
431 int i, j;
432 const char *fmt;
433 rtx x = *loc;
434 enum rtx_code code;
435 enum machine_mode op0_mode = VOIDmode;
436 int prev_changes = num_changes;
437 rtx new;
438
439 if (!x)
440 return;
441
442 code = GET_CODE (x);
443 fmt = GET_RTX_FORMAT (code);
444 if (fmt[0] == 'e')
445 op0_mode = GET_MODE (XEXP (x, 0));
446
447 /* X matches FROM if it is the same rtx or they are both referring to the
448 same register in the same mode. Avoid calling rtx_equal_p unless the
449 operands look similar. */
450
451 if (x == from
452 || (REG_P (x) && REG_P (from)
453 && GET_MODE (x) == GET_MODE (from)
454 && REGNO (x) == REGNO (from))
455 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
456 && rtx_equal_p (x, from)))
457 {
458 validate_change (object, loc, to, 1);
459 return;
460 }
461
462 /* Call ourself recursively to perform the replacements.
463 We must not replace inside already replaced expression, otherwise we
464 get infinite recursion for replacements like (reg X)->(subreg (reg X))
465 done by regmove, so we must special case shared ASM_OPERANDS. */
466
467 if (GET_CODE (x) == PARALLEL)
468 {
469 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
470 {
471 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
472 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
473 {
474 /* Verify that operands are really shared. */
475 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
476 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
477 (x, 0, j))));
478 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
479 from, to, object);
480 }
481 else
482 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
483 }
484 }
485 else
486 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
487 {
488 if (fmt[i] == 'e')
489 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
490 else if (fmt[i] == 'E')
491 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
492 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
493 }
494
495 /* If we didn't substitute, there is nothing more to do. */
496 if (num_changes == prev_changes)
497 return;
498
499 /* Allow substituted expression to have different mode. This is used by
500 regmove to change mode of pseudo register. */
501 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
502 op0_mode = GET_MODE (XEXP (x, 0));
503
504 /* Do changes needed to keep rtx consistent. Don't do any other
505 simplifications, as it is not our job. */
506
507 if (SWAPPABLE_OPERANDS_P (x)
508 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
509 {
510 validate_change (object, loc,
511 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
512 : swap_condition (code),
513 GET_MODE (x), XEXP (x, 1),
514 XEXP (x, 0)), 1);
515 x = *loc;
516 code = GET_CODE (x);
517 }
518
519 switch (code)
520 {
521 case PLUS:
522 /* If we have a PLUS whose second operand is now a CONST_INT, use
523 simplify_gen_binary to try to simplify it.
524 ??? We may want later to remove this, once simplification is
525 separated from this function. */
526 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
527 validate_change (object, loc,
528 simplify_gen_binary
529 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
530 break;
531 case MINUS:
532 if (GET_CODE (XEXP (x, 1)) == CONST_INT
533 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
534 validate_change (object, loc,
535 simplify_gen_binary
536 (PLUS, GET_MODE (x), XEXP (x, 0),
537 simplify_gen_unary (NEG,
538 GET_MODE (x), XEXP (x, 1),
539 GET_MODE (x))), 1);
540 break;
541 case ZERO_EXTEND:
542 case SIGN_EXTEND:
543 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
544 {
545 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
546 op0_mode);
547 /* If any of the above failed, substitute in something that
548 we know won't be recognized. */
549 if (!new)
550 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
551 validate_change (object, loc, new, 1);
552 }
553 break;
554 case SUBREG:
555 /* All subregs possible to simplify should be simplified. */
556 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
557 SUBREG_BYTE (x));
558
559 /* Subregs of VOIDmode operands are incorrect. */
560 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
561 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
562 if (new)
563 validate_change (object, loc, new, 1);
564 break;
565 case ZERO_EXTRACT:
566 case SIGN_EXTRACT:
567 /* If we are replacing a register with memory, try to change the memory
568 to be the mode required for memory in extract operations (this isn't
569 likely to be an insertion operation; if it was, nothing bad will
570 happen, we might just fail in some cases). */
571
572 if (MEM_P (XEXP (x, 0))
573 && GET_CODE (XEXP (x, 1)) == CONST_INT
574 && GET_CODE (XEXP (x, 2)) == CONST_INT
575 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
576 && !MEM_VOLATILE_P (XEXP (x, 0)))
577 {
578 enum machine_mode wanted_mode = VOIDmode;
579 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
580 int pos = INTVAL (XEXP (x, 2));
581
582 if (GET_CODE (x) == ZERO_EXTRACT)
583 {
584 enum machine_mode new_mode
585 = mode_for_extraction (EP_extzv, 1);
586 if (new_mode != MAX_MACHINE_MODE)
587 wanted_mode = new_mode;
588 }
589 else if (GET_CODE (x) == SIGN_EXTRACT)
590 {
591 enum machine_mode new_mode
592 = mode_for_extraction (EP_extv, 1);
593 if (new_mode != MAX_MACHINE_MODE)
594 wanted_mode = new_mode;
595 }
596
597 /* If we have a narrower mode, we can do something. */
598 if (wanted_mode != VOIDmode
599 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
600 {
601 int offset = pos / BITS_PER_UNIT;
602 rtx newmem;
603
604 /* If the bytes and bits are counted differently, we
605 must adjust the offset. */
606 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
607 offset =
608 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
609 offset);
610
611 pos %= GET_MODE_BITSIZE (wanted_mode);
612
613 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
614
615 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
616 validate_change (object, &XEXP (x, 0), newmem, 1);
617 }
618 }
619
620 break;
621
622 default:
623 break;
624 }
625 }
626
627 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
628 with TO. After all changes have been made, validate by seeing
629 if INSN is still valid. */
630
631 int
632 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
633 {
634 validate_replace_rtx_1 (loc, from, to, insn);
635 return apply_change_group ();
636 }
637
638 /* Try replacing every occurrence of FROM in INSN with TO. After all
639 changes have been made, validate by seeing if INSN is still valid. */
640
641 int
642 validate_replace_rtx (rtx from, rtx to, rtx insn)
643 {
644 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
645 return apply_change_group ();
646 }
647
648 /* Try replacing every occurrence of FROM in INSN with TO. */
649
650 void
651 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
652 {
653 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
654 }
655
656 /* Function called by note_uses to replace used subexpressions. */
657 struct validate_replace_src_data
658 {
659 rtx from; /* Old RTX */
660 rtx to; /* New RTX */
661 rtx insn; /* Insn in which substitution is occurring. */
662 };
663
664 static void
665 validate_replace_src_1 (rtx *x, void *data)
666 {
667 struct validate_replace_src_data *d
668 = (struct validate_replace_src_data *) data;
669
670 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
671 }
672
673 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
674 SET_DESTs. */
675
676 void
677 validate_replace_src_group (rtx from, rtx to, rtx insn)
678 {
679 struct validate_replace_src_data d;
680
681 d.from = from;
682 d.to = to;
683 d.insn = insn;
684 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
685 }
686 \f
687 #ifdef HAVE_cc0
688 /* Return 1 if the insn using CC0 set by INSN does not contain
689 any ordered tests applied to the condition codes.
690 EQ and NE tests do not count. */
691
692 int
693 next_insn_tests_no_inequality (rtx insn)
694 {
695 rtx next = next_cc0_user (insn);
696
697 /* If there is no next insn, we have to take the conservative choice. */
698 if (next == 0)
699 return 0;
700
701 return (INSN_P (next)
702 && ! inequality_comparisons_p (PATTERN (next)));
703 }
704 #endif
705 \f
706 /* This is used by find_single_use to locate an rtx that contains exactly one
707 use of DEST, which is typically either a REG or CC0. It returns a
708 pointer to the innermost rtx expression containing DEST. Appearances of
709 DEST that are being used to totally replace it are not counted. */
710
711 static rtx *
712 find_single_use_1 (rtx dest, rtx *loc)
713 {
714 rtx x = *loc;
715 enum rtx_code code = GET_CODE (x);
716 rtx *result = 0;
717 rtx *this_result;
718 int i;
719 const char *fmt;
720
721 switch (code)
722 {
723 case CONST_INT:
724 case CONST:
725 case LABEL_REF:
726 case SYMBOL_REF:
727 case CONST_DOUBLE:
728 case CONST_VECTOR:
729 case CLOBBER:
730 return 0;
731
732 case SET:
733 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
734 of a REG that occupies all of the REG, the insn uses DEST if
735 it is mentioned in the destination or the source. Otherwise, we
736 need just check the source. */
737 if (GET_CODE (SET_DEST (x)) != CC0
738 && GET_CODE (SET_DEST (x)) != PC
739 && !REG_P (SET_DEST (x))
740 && ! (GET_CODE (SET_DEST (x)) == SUBREG
741 && REG_P (SUBREG_REG (SET_DEST (x)))
742 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
743 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
744 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
745 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
746 break;
747
748 return find_single_use_1 (dest, &SET_SRC (x));
749
750 case MEM:
751 case SUBREG:
752 return find_single_use_1 (dest, &XEXP (x, 0));
753
754 default:
755 break;
756 }
757
758 /* If it wasn't one of the common cases above, check each expression and
759 vector of this code. Look for a unique usage of DEST. */
760
761 fmt = GET_RTX_FORMAT (code);
762 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
763 {
764 if (fmt[i] == 'e')
765 {
766 if (dest == XEXP (x, i)
767 || (REG_P (dest) && REG_P (XEXP (x, i))
768 && REGNO (dest) == REGNO (XEXP (x, i))))
769 this_result = loc;
770 else
771 this_result = find_single_use_1 (dest, &XEXP (x, i));
772
773 if (result == 0)
774 result = this_result;
775 else if (this_result)
776 /* Duplicate usage. */
777 return 0;
778 }
779 else if (fmt[i] == 'E')
780 {
781 int j;
782
783 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
784 {
785 if (XVECEXP (x, i, j) == dest
786 || (REG_P (dest)
787 && REG_P (XVECEXP (x, i, j))
788 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
789 this_result = loc;
790 else
791 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
792
793 if (result == 0)
794 result = this_result;
795 else if (this_result)
796 return 0;
797 }
798 }
799 }
800
801 return result;
802 }
803 \f
804 /* See if DEST, produced in INSN, is used only a single time in the
805 sequel. If so, return a pointer to the innermost rtx expression in which
806 it is used.
807
808 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
809
810 This routine will return usually zero either before flow is called (because
811 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
812 note can't be trusted).
813
814 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
815 care about REG_DEAD notes or LOG_LINKS.
816
817 Otherwise, we find the single use by finding an insn that has a
818 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
819 only referenced once in that insn, we know that it must be the first
820 and last insn referencing DEST. */
821
822 rtx *
823 find_single_use (rtx dest, rtx insn, rtx *ploc)
824 {
825 rtx next;
826 rtx *result;
827 rtx link;
828
829 #ifdef HAVE_cc0
830 if (dest == cc0_rtx)
831 {
832 next = NEXT_INSN (insn);
833 if (next == 0
834 || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
835 return 0;
836
837 result = find_single_use_1 (dest, &PATTERN (next));
838 if (result && ploc)
839 *ploc = next;
840 return result;
841 }
842 #endif
843
844 if (reload_completed || reload_in_progress || !REG_P (dest))
845 return 0;
846
847 for (next = next_nonnote_insn (insn);
848 next != 0 && !LABEL_P (next);
849 next = next_nonnote_insn (next))
850 if (INSN_P (next) && dead_or_set_p (next, dest))
851 {
852 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
853 if (XEXP (link, 0) == insn)
854 break;
855
856 if (link)
857 {
858 result = find_single_use_1 (dest, &PATTERN (next));
859 if (ploc)
860 *ploc = next;
861 return result;
862 }
863 }
864
865 return 0;
866 }
867 \f
868 /* Return 1 if OP is a valid general operand for machine mode MODE.
869 This is either a register reference, a memory reference,
870 or a constant. In the case of a memory reference, the address
871 is checked for general validity for the target machine.
872
873 Register and memory references must have mode MODE in order to be valid,
874 but some constants have no machine mode and are valid for any mode.
875
876 If MODE is VOIDmode, OP is checked for validity for whatever mode
877 it has.
878
879 The main use of this function is as a predicate in match_operand
880 expressions in the machine description.
881
882 For an explanation of this function's behavior for registers of
883 class NO_REGS, see the comment for `register_operand'. */
884
885 int
886 general_operand (rtx op, enum machine_mode mode)
887 {
888 enum rtx_code code = GET_CODE (op);
889
890 if (mode == VOIDmode)
891 mode = GET_MODE (op);
892
893 /* Don't accept CONST_INT or anything similar
894 if the caller wants something floating. */
895 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
896 && GET_MODE_CLASS (mode) != MODE_INT
897 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
898 return 0;
899
900 if (GET_CODE (op) == CONST_INT
901 && mode != VOIDmode
902 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
903 return 0;
904
905 if (CONSTANT_P (op))
906 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
907 || mode == VOIDmode)
908 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
909 && LEGITIMATE_CONSTANT_P (op));
910
911 /* Except for certain constants with VOIDmode, already checked for,
912 OP's mode must match MODE if MODE specifies a mode. */
913
914 if (GET_MODE (op) != mode)
915 return 0;
916
917 if (code == SUBREG)
918 {
919 rtx sub = SUBREG_REG (op);
920
921 #ifdef INSN_SCHEDULING
922 /* On machines that have insn scheduling, we want all memory
923 reference to be explicit, so outlaw paradoxical SUBREGs.
924 However, we must allow them after reload so that they can
925 get cleaned up by cleanup_subreg_operands. */
926 if (!reload_completed && MEM_P (sub)
927 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
928 return 0;
929 #endif
930 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
931 may result in incorrect reference. We should simplify all valid
932 subregs of MEM anyway. But allow this after reload because we
933 might be called from cleanup_subreg_operands.
934
935 ??? This is a kludge. */
936 if (!reload_completed && SUBREG_BYTE (op) != 0
937 && MEM_P (sub))
938 return 0;
939
940 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
941 create such rtl, and we must reject it. */
942 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
943 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
944 return 0;
945
946 op = sub;
947 code = GET_CODE (op);
948 }
949
950 if (code == REG)
951 /* A register whose class is NO_REGS is not a general operand. */
952 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
953 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
954
955 if (code == MEM)
956 {
957 rtx y = XEXP (op, 0);
958
959 if (! volatile_ok && MEM_VOLATILE_P (op))
960 return 0;
961
962 /* Use the mem's mode, since it will be reloaded thus. */
963 if (memory_address_p (GET_MODE (op), y))
964 return 1;
965 }
966
967 return 0;
968 }
969 \f
970 /* Return 1 if OP is a valid memory address for a memory reference
971 of mode MODE.
972
973 The main use of this function is as a predicate in match_operand
974 expressions in the machine description. */
975
976 int
977 address_operand (rtx op, enum machine_mode mode)
978 {
979 return memory_address_p (mode, op);
980 }
981
982 /* Return 1 if OP is a register reference of mode MODE.
983 If MODE is VOIDmode, accept a register in any mode.
984
985 The main use of this function is as a predicate in match_operand
986 expressions in the machine description.
987
988 As a special exception, registers whose class is NO_REGS are
989 not accepted by `register_operand'. The reason for this change
990 is to allow the representation of special architecture artifacts
991 (such as a condition code register) without extending the rtl
992 definitions. Since registers of class NO_REGS cannot be used
993 as registers in any case where register classes are examined,
994 it is most consistent to keep this function from accepting them. */
995
996 int
997 register_operand (rtx op, enum machine_mode mode)
998 {
999 if (GET_MODE (op) != mode && mode != VOIDmode)
1000 return 0;
1001
1002 if (GET_CODE (op) == SUBREG)
1003 {
1004 rtx sub = SUBREG_REG (op);
1005
1006 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1007 because it is guaranteed to be reloaded into one.
1008 Just make sure the MEM is valid in itself.
1009 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1010 but currently it does result from (SUBREG (REG)...) where the
1011 reg went on the stack.) */
1012 if (! reload_completed && MEM_P (sub))
1013 return general_operand (op, mode);
1014
1015 #ifdef CANNOT_CHANGE_MODE_CLASS
1016 if (REG_P (sub)
1017 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1018 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1019 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1020 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1021 return 0;
1022 #endif
1023
1024 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1025 create such rtl, and we must reject it. */
1026 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1027 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1028 return 0;
1029
1030 op = sub;
1031 }
1032
1033 /* We don't consider registers whose class is NO_REGS
1034 to be a register operand. */
1035 return (REG_P (op)
1036 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1037 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1038 }
1039
1040 /* Return 1 for a register in Pmode; ignore the tested mode. */
1041
1042 int
1043 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1044 {
1045 return register_operand (op, Pmode);
1046 }
1047
1048 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1049 or a hard register. */
1050
1051 int
1052 scratch_operand (rtx op, enum machine_mode mode)
1053 {
1054 if (GET_MODE (op) != mode && mode != VOIDmode)
1055 return 0;
1056
1057 return (GET_CODE (op) == SCRATCH
1058 || (REG_P (op)
1059 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1060 }
1061
1062 /* Return 1 if OP is a valid immediate operand for mode MODE.
1063
1064 The main use of this function is as a predicate in match_operand
1065 expressions in the machine description. */
1066
1067 int
1068 immediate_operand (rtx op, enum machine_mode mode)
1069 {
1070 /* Don't accept CONST_INT or anything similar
1071 if the caller wants something floating. */
1072 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1073 && GET_MODE_CLASS (mode) != MODE_INT
1074 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1075 return 0;
1076
1077 if (GET_CODE (op) == CONST_INT
1078 && mode != VOIDmode
1079 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1080 return 0;
1081
1082 return (CONSTANT_P (op)
1083 && (GET_MODE (op) == mode || mode == VOIDmode
1084 || GET_MODE (op) == VOIDmode)
1085 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1086 && LEGITIMATE_CONSTANT_P (op));
1087 }
1088
1089 /* Returns 1 if OP is an operand that is a CONST_INT. */
1090
1091 int
1092 const_int_operand (rtx op, enum machine_mode mode)
1093 {
1094 if (GET_CODE (op) != CONST_INT)
1095 return 0;
1096
1097 if (mode != VOIDmode
1098 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1099 return 0;
1100
1101 return 1;
1102 }
1103
1104 /* Returns 1 if OP is an operand that is a constant integer or constant
1105 floating-point number. */
1106
1107 int
1108 const_double_operand (rtx op, enum machine_mode mode)
1109 {
1110 /* Don't accept CONST_INT or anything similar
1111 if the caller wants something floating. */
1112 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1113 && GET_MODE_CLASS (mode) != MODE_INT
1114 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1115 return 0;
1116
1117 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1118 && (mode == VOIDmode || GET_MODE (op) == mode
1119 || GET_MODE (op) == VOIDmode));
1120 }
1121
1122 /* Return 1 if OP is a general operand that is not an immediate operand. */
1123
1124 int
1125 nonimmediate_operand (rtx op, enum machine_mode mode)
1126 {
1127 return (general_operand (op, mode) && ! CONSTANT_P (op));
1128 }
1129
1130 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1131
1132 int
1133 nonmemory_operand (rtx op, enum machine_mode mode)
1134 {
1135 if (CONSTANT_P (op))
1136 {
1137 /* Don't accept CONST_INT or anything similar
1138 if the caller wants something floating. */
1139 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1140 && GET_MODE_CLASS (mode) != MODE_INT
1141 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1142 return 0;
1143
1144 if (GET_CODE (op) == CONST_INT
1145 && mode != VOIDmode
1146 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1147 return 0;
1148
1149 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1150 || mode == VOIDmode)
1151 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1152 && LEGITIMATE_CONSTANT_P (op));
1153 }
1154
1155 if (GET_MODE (op) != mode && mode != VOIDmode)
1156 return 0;
1157
1158 if (GET_CODE (op) == SUBREG)
1159 {
1160 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1161 because it is guaranteed to be reloaded into one.
1162 Just make sure the MEM is valid in itself.
1163 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1164 but currently it does result from (SUBREG (REG)...) where the
1165 reg went on the stack.) */
1166 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1167 return general_operand (op, mode);
1168 op = SUBREG_REG (op);
1169 }
1170
1171 /* We don't consider registers whose class is NO_REGS
1172 to be a register operand. */
1173 return (REG_P (op)
1174 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1175 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1176 }
1177
1178 /* Return 1 if OP is a valid operand that stands for pushing a
1179 value of mode MODE onto the stack.
1180
1181 The main use of this function is as a predicate in match_operand
1182 expressions in the machine description. */
1183
1184 int
1185 push_operand (rtx op, enum machine_mode mode)
1186 {
1187 unsigned int rounded_size = GET_MODE_SIZE (mode);
1188
1189 #ifdef PUSH_ROUNDING
1190 rounded_size = PUSH_ROUNDING (rounded_size);
1191 #endif
1192
1193 if (!MEM_P (op))
1194 return 0;
1195
1196 if (mode != VOIDmode && GET_MODE (op) != mode)
1197 return 0;
1198
1199 op = XEXP (op, 0);
1200
1201 if (rounded_size == GET_MODE_SIZE (mode))
1202 {
1203 if (GET_CODE (op) != STACK_PUSH_CODE)
1204 return 0;
1205 }
1206 else
1207 {
1208 if (GET_CODE (op) != PRE_MODIFY
1209 || GET_CODE (XEXP (op, 1)) != PLUS
1210 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1211 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1212 #ifdef STACK_GROWS_DOWNWARD
1213 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1214 #else
1215 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1216 #endif
1217 )
1218 return 0;
1219 }
1220
1221 return XEXP (op, 0) == stack_pointer_rtx;
1222 }
1223
1224 /* Return 1 if OP is a valid operand that stands for popping a
1225 value of mode MODE off the stack.
1226
1227 The main use of this function is as a predicate in match_operand
1228 expressions in the machine description. */
1229
1230 int
1231 pop_operand (rtx op, enum machine_mode mode)
1232 {
1233 if (!MEM_P (op))
1234 return 0;
1235
1236 if (mode != VOIDmode && GET_MODE (op) != mode)
1237 return 0;
1238
1239 op = XEXP (op, 0);
1240
1241 if (GET_CODE (op) != STACK_POP_CODE)
1242 return 0;
1243
1244 return XEXP (op, 0) == stack_pointer_rtx;
1245 }
1246
1247 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1248
1249 int
1250 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1251 {
1252 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1253 return 0;
1254
1255 win:
1256 return 1;
1257 }
1258
1259 /* Return 1 if OP is a valid memory reference with mode MODE,
1260 including a valid address.
1261
1262 The main use of this function is as a predicate in match_operand
1263 expressions in the machine description. */
1264
1265 int
1266 memory_operand (rtx op, enum machine_mode mode)
1267 {
1268 rtx inner;
1269
1270 if (! reload_completed)
1271 /* Note that no SUBREG is a memory operand before end of reload pass,
1272 because (SUBREG (MEM...)) forces reloading into a register. */
1273 return MEM_P (op) && general_operand (op, mode);
1274
1275 if (mode != VOIDmode && GET_MODE (op) != mode)
1276 return 0;
1277
1278 inner = op;
1279 if (GET_CODE (inner) == SUBREG)
1280 inner = SUBREG_REG (inner);
1281
1282 return (MEM_P (inner) && general_operand (op, mode));
1283 }
1284
1285 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1286 that is, a memory reference whose address is a general_operand. */
1287
1288 int
1289 indirect_operand (rtx op, enum machine_mode mode)
1290 {
1291 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1292 if (! reload_completed
1293 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1294 {
1295 int offset = SUBREG_BYTE (op);
1296 rtx inner = SUBREG_REG (op);
1297
1298 if (mode != VOIDmode && GET_MODE (op) != mode)
1299 return 0;
1300
1301 /* The only way that we can have a general_operand as the resulting
1302 address is if OFFSET is zero and the address already is an operand
1303 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1304 operand. */
1305
1306 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1307 || (GET_CODE (XEXP (inner, 0)) == PLUS
1308 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1309 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1310 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1311 }
1312
1313 return (MEM_P (op)
1314 && memory_operand (op, mode)
1315 && general_operand (XEXP (op, 0), Pmode));
1316 }
1317
1318 /* Return 1 if this is a comparison operator. This allows the use of
1319 MATCH_OPERATOR to recognize all the branch insns. */
1320
1321 int
1322 comparison_operator (rtx op, enum machine_mode mode)
1323 {
1324 return ((mode == VOIDmode || GET_MODE (op) == mode)
1325 && COMPARISON_P (op));
1326 }
1327 \f
1328 /* If BODY is an insn body that uses ASM_OPERANDS,
1329 return the number of operands (both input and output) in the insn.
1330 Otherwise return -1. */
1331
1332 int
1333 asm_noperands (rtx body)
1334 {
1335 switch (GET_CODE (body))
1336 {
1337 case ASM_OPERANDS:
1338 /* No output operands: return number of input operands. */
1339 return ASM_OPERANDS_INPUT_LENGTH (body);
1340 case SET:
1341 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1342 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1343 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1344 else
1345 return -1;
1346 case PARALLEL:
1347 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1348 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1349 {
1350 /* Multiple output operands, or 1 output plus some clobbers:
1351 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1352 int i;
1353 int n_sets;
1354
1355 /* Count backwards through CLOBBERs to determine number of SETs. */
1356 for (i = XVECLEN (body, 0); i > 0; i--)
1357 {
1358 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1359 break;
1360 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1361 return -1;
1362 }
1363
1364 /* N_SETS is now number of output operands. */
1365 n_sets = i;
1366
1367 /* Verify that all the SETs we have
1368 came from a single original asm_operands insn
1369 (so that invalid combinations are blocked). */
1370 for (i = 0; i < n_sets; i++)
1371 {
1372 rtx elt = XVECEXP (body, 0, i);
1373 if (GET_CODE (elt) != SET)
1374 return -1;
1375 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1376 return -1;
1377 /* If these ASM_OPERANDS rtx's came from different original insns
1378 then they aren't allowed together. */
1379 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1380 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1381 return -1;
1382 }
1383 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1384 + n_sets);
1385 }
1386 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1387 {
1388 /* 0 outputs, but some clobbers:
1389 body is [(asm_operands ...) (clobber (reg ...))...]. */
1390 int i;
1391
1392 /* Make sure all the other parallel things really are clobbers. */
1393 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1394 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1395 return -1;
1396
1397 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1398 }
1399 else
1400 return -1;
1401 default:
1402 return -1;
1403 }
1404 }
1405
1406 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1407 copy its operands (both input and output) into the vector OPERANDS,
1408 the locations of the operands within the insn into the vector OPERAND_LOCS,
1409 and the constraints for the operands into CONSTRAINTS.
1410 Write the modes of the operands into MODES.
1411 Return the assembler-template.
1412
1413 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1414 we don't store that info. */
1415
1416 const char *
1417 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1418 const char **constraints, enum machine_mode *modes)
1419 {
1420 int i;
1421 int noperands;
1422 const char *template = 0;
1423
1424 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1425 {
1426 rtx asmop = SET_SRC (body);
1427 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1428
1429 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1430
1431 for (i = 1; i < noperands; i++)
1432 {
1433 if (operand_locs)
1434 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1435 if (operands)
1436 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1437 if (constraints)
1438 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1439 if (modes)
1440 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1441 }
1442
1443 /* The output is in the SET.
1444 Its constraint is in the ASM_OPERANDS itself. */
1445 if (operands)
1446 operands[0] = SET_DEST (body);
1447 if (operand_locs)
1448 operand_locs[0] = &SET_DEST (body);
1449 if (constraints)
1450 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1451 if (modes)
1452 modes[0] = GET_MODE (SET_DEST (body));
1453 template = ASM_OPERANDS_TEMPLATE (asmop);
1454 }
1455 else if (GET_CODE (body) == ASM_OPERANDS)
1456 {
1457 rtx asmop = body;
1458 /* No output operands: BODY is (asm_operands ....). */
1459
1460 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1461
1462 /* The input operands are found in the 1st element vector. */
1463 /* Constraints for inputs are in the 2nd element vector. */
1464 for (i = 0; i < noperands; i++)
1465 {
1466 if (operand_locs)
1467 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1468 if (operands)
1469 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1470 if (constraints)
1471 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1472 if (modes)
1473 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1474 }
1475 template = ASM_OPERANDS_TEMPLATE (asmop);
1476 }
1477 else if (GET_CODE (body) == PARALLEL
1478 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1479 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1480 {
1481 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1482 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1483 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1484 int nout = 0; /* Does not include CLOBBERs. */
1485
1486 /* At least one output, plus some CLOBBERs. */
1487
1488 /* The outputs are in the SETs.
1489 Their constraints are in the ASM_OPERANDS itself. */
1490 for (i = 0; i < nparallel; i++)
1491 {
1492 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1493 break; /* Past last SET */
1494
1495 if (operands)
1496 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1497 if (operand_locs)
1498 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1499 if (constraints)
1500 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1501 if (modes)
1502 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1503 nout++;
1504 }
1505
1506 for (i = 0; i < nin; i++)
1507 {
1508 if (operand_locs)
1509 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1510 if (operands)
1511 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1512 if (constraints)
1513 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1514 if (modes)
1515 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1516 }
1517
1518 template = ASM_OPERANDS_TEMPLATE (asmop);
1519 }
1520 else if (GET_CODE (body) == PARALLEL
1521 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1522 {
1523 /* No outputs, but some CLOBBERs. */
1524
1525 rtx asmop = XVECEXP (body, 0, 0);
1526 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1527
1528 for (i = 0; i < nin; i++)
1529 {
1530 if (operand_locs)
1531 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1532 if (operands)
1533 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1534 if (constraints)
1535 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1536 if (modes)
1537 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1538 }
1539
1540 template = ASM_OPERANDS_TEMPLATE (asmop);
1541 }
1542
1543 return template;
1544 }
1545
1546 /* Check if an asm_operand matches its constraints.
1547 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1548
1549 int
1550 asm_operand_ok (rtx op, const char *constraint)
1551 {
1552 int result = 0;
1553
1554 /* Use constrain_operands after reload. */
1555 gcc_assert (!reload_completed);
1556
1557 while (*constraint)
1558 {
1559 char c = *constraint;
1560 int len;
1561 switch (c)
1562 {
1563 case ',':
1564 constraint++;
1565 continue;
1566 case '=':
1567 case '+':
1568 case '*':
1569 case '%':
1570 case '!':
1571 case '#':
1572 case '&':
1573 case '?':
1574 break;
1575
1576 case '0': case '1': case '2': case '3': case '4':
1577 case '5': case '6': case '7': case '8': case '9':
1578 /* For best results, our caller should have given us the
1579 proper matching constraint, but we can't actually fail
1580 the check if they didn't. Indicate that results are
1581 inconclusive. */
1582 do
1583 constraint++;
1584 while (ISDIGIT (*constraint));
1585 if (! result)
1586 result = -1;
1587 continue;
1588
1589 case 'p':
1590 if (address_operand (op, VOIDmode))
1591 result = 1;
1592 break;
1593
1594 case 'm':
1595 case 'V': /* non-offsettable */
1596 if (memory_operand (op, VOIDmode))
1597 result = 1;
1598 break;
1599
1600 case 'o': /* offsettable */
1601 if (offsettable_nonstrict_memref_p (op))
1602 result = 1;
1603 break;
1604
1605 case '<':
1606 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1607 excepting those that expand_call created. Further, on some
1608 machines which do not have generalized auto inc/dec, an inc/dec
1609 is not a memory_operand.
1610
1611 Match any memory and hope things are resolved after reload. */
1612
1613 if (MEM_P (op)
1614 && (1
1615 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1616 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1617 result = 1;
1618 break;
1619
1620 case '>':
1621 if (MEM_P (op)
1622 && (1
1623 || GET_CODE (XEXP (op, 0)) == PRE_INC
1624 || GET_CODE (XEXP (op, 0)) == POST_INC))
1625 result = 1;
1626 break;
1627
1628 case 'E':
1629 case 'F':
1630 if (GET_CODE (op) == CONST_DOUBLE
1631 || (GET_CODE (op) == CONST_VECTOR
1632 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1633 result = 1;
1634 break;
1635
1636 case 'G':
1637 if (GET_CODE (op) == CONST_DOUBLE
1638 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1639 result = 1;
1640 break;
1641 case 'H':
1642 if (GET_CODE (op) == CONST_DOUBLE
1643 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1644 result = 1;
1645 break;
1646
1647 case 's':
1648 if (GET_CODE (op) == CONST_INT
1649 || (GET_CODE (op) == CONST_DOUBLE
1650 && GET_MODE (op) == VOIDmode))
1651 break;
1652 /* Fall through. */
1653
1654 case 'i':
1655 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1656 result = 1;
1657 break;
1658
1659 case 'n':
1660 if (GET_CODE (op) == CONST_INT
1661 || (GET_CODE (op) == CONST_DOUBLE
1662 && GET_MODE (op) == VOIDmode))
1663 result = 1;
1664 break;
1665
1666 case 'I':
1667 if (GET_CODE (op) == CONST_INT
1668 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1669 result = 1;
1670 break;
1671 case 'J':
1672 if (GET_CODE (op) == CONST_INT
1673 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1674 result = 1;
1675 break;
1676 case 'K':
1677 if (GET_CODE (op) == CONST_INT
1678 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1679 result = 1;
1680 break;
1681 case 'L':
1682 if (GET_CODE (op) == CONST_INT
1683 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1684 result = 1;
1685 break;
1686 case 'M':
1687 if (GET_CODE (op) == CONST_INT
1688 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1689 result = 1;
1690 break;
1691 case 'N':
1692 if (GET_CODE (op) == CONST_INT
1693 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1694 result = 1;
1695 break;
1696 case 'O':
1697 if (GET_CODE (op) == CONST_INT
1698 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1699 result = 1;
1700 break;
1701 case 'P':
1702 if (GET_CODE (op) == CONST_INT
1703 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1704 result = 1;
1705 break;
1706
1707 case 'X':
1708 result = 1;
1709 break;
1710
1711 case 'g':
1712 if (general_operand (op, VOIDmode))
1713 result = 1;
1714 break;
1715
1716 default:
1717 /* For all other letters, we first check for a register class,
1718 otherwise it is an EXTRA_CONSTRAINT. */
1719 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1720 {
1721 case 'r':
1722 if (GET_MODE (op) == BLKmode)
1723 break;
1724 if (register_operand (op, VOIDmode))
1725 result = 1;
1726 }
1727 #ifdef EXTRA_CONSTRAINT_STR
1728 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1729 result = 1;
1730 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1731 /* Every memory operand can be reloaded to fit. */
1732 && memory_operand (op, VOIDmode))
1733 result = 1;
1734 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1735 /* Every address operand can be reloaded to fit. */
1736 && address_operand (op, VOIDmode))
1737 result = 1;
1738 #endif
1739 break;
1740 }
1741 len = CONSTRAINT_LEN (c, constraint);
1742 do
1743 constraint++;
1744 while (--len && *constraint);
1745 if (len)
1746 return 0;
1747 }
1748
1749 return result;
1750 }
1751 \f
1752 /* Given an rtx *P, if it is a sum containing an integer constant term,
1753 return the location (type rtx *) of the pointer to that constant term.
1754 Otherwise, return a null pointer. */
1755
1756 rtx *
1757 find_constant_term_loc (rtx *p)
1758 {
1759 rtx *tem;
1760 enum rtx_code code = GET_CODE (*p);
1761
1762 /* If *P IS such a constant term, P is its location. */
1763
1764 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1765 || code == CONST)
1766 return p;
1767
1768 /* Otherwise, if not a sum, it has no constant term. */
1769
1770 if (GET_CODE (*p) != PLUS)
1771 return 0;
1772
1773 /* If one of the summands is constant, return its location. */
1774
1775 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1776 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1777 return p;
1778
1779 /* Otherwise, check each summand for containing a constant term. */
1780
1781 if (XEXP (*p, 0) != 0)
1782 {
1783 tem = find_constant_term_loc (&XEXP (*p, 0));
1784 if (tem != 0)
1785 return tem;
1786 }
1787
1788 if (XEXP (*p, 1) != 0)
1789 {
1790 tem = find_constant_term_loc (&XEXP (*p, 1));
1791 if (tem != 0)
1792 return tem;
1793 }
1794
1795 return 0;
1796 }
1797 \f
1798 /* Return 1 if OP is a memory reference
1799 whose address contains no side effects
1800 and remains valid after the addition
1801 of a positive integer less than the
1802 size of the object being referenced.
1803
1804 We assume that the original address is valid and do not check it.
1805
1806 This uses strict_memory_address_p as a subroutine, so
1807 don't use it before reload. */
1808
1809 int
1810 offsettable_memref_p (rtx op)
1811 {
1812 return ((MEM_P (op))
1813 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1814 }
1815
1816 /* Similar, but don't require a strictly valid mem ref:
1817 consider pseudo-regs valid as index or base regs. */
1818
1819 int
1820 offsettable_nonstrict_memref_p (rtx op)
1821 {
1822 return ((MEM_P (op))
1823 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1824 }
1825
1826 /* Return 1 if Y is a memory address which contains no side effects
1827 and would remain valid after the addition of a positive integer
1828 less than the size of that mode.
1829
1830 We assume that the original address is valid and do not check it.
1831 We do check that it is valid for narrower modes.
1832
1833 If STRICTP is nonzero, we require a strictly valid address,
1834 for the sake of use in reload.c. */
1835
1836 int
1837 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1838 {
1839 enum rtx_code ycode = GET_CODE (y);
1840 rtx z;
1841 rtx y1 = y;
1842 rtx *y2;
1843 int (*addressp) (enum machine_mode, rtx) =
1844 (strictp ? strict_memory_address_p : memory_address_p);
1845 unsigned int mode_sz = GET_MODE_SIZE (mode);
1846
1847 if (CONSTANT_ADDRESS_P (y))
1848 return 1;
1849
1850 /* Adjusting an offsettable address involves changing to a narrower mode.
1851 Make sure that's OK. */
1852
1853 if (mode_dependent_address_p (y))
1854 return 0;
1855
1856 /* ??? How much offset does an offsettable BLKmode reference need?
1857 Clearly that depends on the situation in which it's being used.
1858 However, the current situation in which we test 0xffffffff is
1859 less than ideal. Caveat user. */
1860 if (mode_sz == 0)
1861 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1862
1863 /* If the expression contains a constant term,
1864 see if it remains valid when max possible offset is added. */
1865
1866 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1867 {
1868 int good;
1869
1870 y1 = *y2;
1871 *y2 = plus_constant (*y2, mode_sz - 1);
1872 /* Use QImode because an odd displacement may be automatically invalid
1873 for any wider mode. But it should be valid for a single byte. */
1874 good = (*addressp) (QImode, y);
1875
1876 /* In any case, restore old contents of memory. */
1877 *y2 = y1;
1878 return good;
1879 }
1880
1881 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1882 return 0;
1883
1884 /* The offset added here is chosen as the maximum offset that
1885 any instruction could need to add when operating on something
1886 of the specified mode. We assume that if Y and Y+c are
1887 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1888 go inside a LO_SUM here, so we do so as well. */
1889 if (GET_CODE (y) == LO_SUM
1890 && mode != BLKmode
1891 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1892 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1893 plus_constant (XEXP (y, 1), mode_sz - 1));
1894 else
1895 z = plus_constant (y, mode_sz - 1);
1896
1897 /* Use QImode because an odd displacement may be automatically invalid
1898 for any wider mode. But it should be valid for a single byte. */
1899 return (*addressp) (QImode, z);
1900 }
1901
1902 /* Return 1 if ADDR is an address-expression whose effect depends
1903 on the mode of the memory reference it is used in.
1904
1905 Autoincrement addressing is a typical example of mode-dependence
1906 because the amount of the increment depends on the mode. */
1907
1908 int
1909 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1910 {
1911 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1912 return 0;
1913 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1914 win: ATTRIBUTE_UNUSED_LABEL
1915 return 1;
1916 }
1917 \f
1918 /* Like extract_insn, but save insn extracted and don't extract again, when
1919 called again for the same insn expecting that recog_data still contain the
1920 valid information. This is used primary by gen_attr infrastructure that
1921 often does extract insn again and again. */
1922 void
1923 extract_insn_cached (rtx insn)
1924 {
1925 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1926 return;
1927 extract_insn (insn);
1928 recog_data.insn = insn;
1929 }
1930 /* Do cached extract_insn, constrain_operands and complain about failures.
1931 Used by insn_attrtab. */
1932 void
1933 extract_constrain_insn_cached (rtx insn)
1934 {
1935 extract_insn_cached (insn);
1936 if (which_alternative == -1
1937 && !constrain_operands (reload_completed))
1938 fatal_insn_not_found (insn);
1939 }
1940 /* Do cached constrain_operands and complain about failures. */
1941 int
1942 constrain_operands_cached (int strict)
1943 {
1944 if (which_alternative == -1)
1945 return constrain_operands (strict);
1946 else
1947 return 1;
1948 }
1949 \f
1950 /* Analyze INSN and fill in recog_data. */
1951
1952 void
1953 extract_insn (rtx insn)
1954 {
1955 int i;
1956 int icode;
1957 int noperands;
1958 rtx body = PATTERN (insn);
1959
1960 recog_data.insn = NULL;
1961 recog_data.n_operands = 0;
1962 recog_data.n_alternatives = 0;
1963 recog_data.n_dups = 0;
1964 which_alternative = -1;
1965
1966 switch (GET_CODE (body))
1967 {
1968 case USE:
1969 case CLOBBER:
1970 case ASM_INPUT:
1971 case ADDR_VEC:
1972 case ADDR_DIFF_VEC:
1973 return;
1974
1975 case SET:
1976 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1977 goto asm_insn;
1978 else
1979 goto normal_insn;
1980 case PARALLEL:
1981 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
1982 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1983 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1984 goto asm_insn;
1985 else
1986 goto normal_insn;
1987 case ASM_OPERANDS:
1988 asm_insn:
1989 recog_data.n_operands = noperands = asm_noperands (body);
1990 if (noperands >= 0)
1991 {
1992 /* This insn is an `asm' with operands. */
1993
1994 /* expand_asm_operands makes sure there aren't too many operands. */
1995 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
1996
1997 /* Now get the operand values and constraints out of the insn. */
1998 decode_asm_operands (body, recog_data.operand,
1999 recog_data.operand_loc,
2000 recog_data.constraints,
2001 recog_data.operand_mode);
2002 if (noperands > 0)
2003 {
2004 const char *p = recog_data.constraints[0];
2005 recog_data.n_alternatives = 1;
2006 while (*p)
2007 recog_data.n_alternatives += (*p++ == ',');
2008 }
2009 break;
2010 }
2011 fatal_insn_not_found (insn);
2012
2013 default:
2014 normal_insn:
2015 /* Ordinary insn: recognize it, get the operands via insn_extract
2016 and get the constraints. */
2017
2018 icode = recog_memoized (insn);
2019 if (icode < 0)
2020 fatal_insn_not_found (insn);
2021
2022 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2023 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2024 recog_data.n_dups = insn_data[icode].n_dups;
2025
2026 insn_extract (insn);
2027
2028 for (i = 0; i < noperands; i++)
2029 {
2030 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2031 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2032 /* VOIDmode match_operands gets mode from their real operand. */
2033 if (recog_data.operand_mode[i] == VOIDmode)
2034 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2035 }
2036 }
2037 for (i = 0; i < noperands; i++)
2038 recog_data.operand_type[i]
2039 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2040 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2041 : OP_IN);
2042
2043 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2044 }
2045
2046 /* After calling extract_insn, you can use this function to extract some
2047 information from the constraint strings into a more usable form.
2048 The collected data is stored in recog_op_alt. */
2049 void
2050 preprocess_constraints (void)
2051 {
2052 int i;
2053
2054 for (i = 0; i < recog_data.n_operands; i++)
2055 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2056 * sizeof (struct operand_alternative)));
2057
2058 for (i = 0; i < recog_data.n_operands; i++)
2059 {
2060 int j;
2061 struct operand_alternative *op_alt;
2062 const char *p = recog_data.constraints[i];
2063
2064 op_alt = recog_op_alt[i];
2065
2066 for (j = 0; j < recog_data.n_alternatives; j++)
2067 {
2068 op_alt[j].cl = NO_REGS;
2069 op_alt[j].constraint = p;
2070 op_alt[j].matches = -1;
2071 op_alt[j].matched = -1;
2072
2073 if (*p == '\0' || *p == ',')
2074 {
2075 op_alt[j].anything_ok = 1;
2076 continue;
2077 }
2078
2079 for (;;)
2080 {
2081 char c = *p;
2082 if (c == '#')
2083 do
2084 c = *++p;
2085 while (c != ',' && c != '\0');
2086 if (c == ',' || c == '\0')
2087 {
2088 p++;
2089 break;
2090 }
2091
2092 switch (c)
2093 {
2094 case '=': case '+': case '*': case '%':
2095 case 'E': case 'F': case 'G': case 'H':
2096 case 's': case 'i': case 'n':
2097 case 'I': case 'J': case 'K': case 'L':
2098 case 'M': case 'N': case 'O': case 'P':
2099 /* These don't say anything we care about. */
2100 break;
2101
2102 case '?':
2103 op_alt[j].reject += 6;
2104 break;
2105 case '!':
2106 op_alt[j].reject += 600;
2107 break;
2108 case '&':
2109 op_alt[j].earlyclobber = 1;
2110 break;
2111
2112 case '0': case '1': case '2': case '3': case '4':
2113 case '5': case '6': case '7': case '8': case '9':
2114 {
2115 char *end;
2116 op_alt[j].matches = strtoul (p, &end, 10);
2117 recog_op_alt[op_alt[j].matches][j].matched = i;
2118 p = end;
2119 }
2120 continue;
2121
2122 case 'm':
2123 op_alt[j].memory_ok = 1;
2124 break;
2125 case '<':
2126 op_alt[j].decmem_ok = 1;
2127 break;
2128 case '>':
2129 op_alt[j].incmem_ok = 1;
2130 break;
2131 case 'V':
2132 op_alt[j].nonoffmem_ok = 1;
2133 break;
2134 case 'o':
2135 op_alt[j].offmem_ok = 1;
2136 break;
2137 case 'X':
2138 op_alt[j].anything_ok = 1;
2139 break;
2140
2141 case 'p':
2142 op_alt[j].is_address = 1;
2143 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2144 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2145 break;
2146
2147 case 'g':
2148 case 'r':
2149 op_alt[j].cl =
2150 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2151 break;
2152
2153 default:
2154 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2155 {
2156 op_alt[j].memory_ok = 1;
2157 break;
2158 }
2159 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2160 {
2161 op_alt[j].is_address = 1;
2162 op_alt[j].cl
2163 = (reg_class_subunion
2164 [(int) op_alt[j].cl]
2165 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2166 break;
2167 }
2168
2169 op_alt[j].cl
2170 = (reg_class_subunion
2171 [(int) op_alt[j].cl]
2172 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2173 break;
2174 }
2175 p += CONSTRAINT_LEN (c, p);
2176 }
2177 }
2178 }
2179 }
2180
2181 /* Check the operands of an insn against the insn's operand constraints
2182 and return 1 if they are valid.
2183 The information about the insn's operands, constraints, operand modes
2184 etc. is obtained from the global variables set up by extract_insn.
2185
2186 WHICH_ALTERNATIVE is set to a number which indicates which
2187 alternative of constraints was matched: 0 for the first alternative,
2188 1 for the next, etc.
2189
2190 In addition, when two operands are required to match
2191 and it happens that the output operand is (reg) while the
2192 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2193 make the output operand look like the input.
2194 This is because the output operand is the one the template will print.
2195
2196 This is used in final, just before printing the assembler code and by
2197 the routines that determine an insn's attribute.
2198
2199 If STRICT is a positive nonzero value, it means that we have been
2200 called after reload has been completed. In that case, we must
2201 do all checks strictly. If it is zero, it means that we have been called
2202 before reload has completed. In that case, we first try to see if we can
2203 find an alternative that matches strictly. If not, we try again, this
2204 time assuming that reload will fix up the insn. This provides a "best
2205 guess" for the alternative and is used to compute attributes of insns prior
2206 to reload. A negative value of STRICT is used for this internal call. */
2207
2208 struct funny_match
2209 {
2210 int this, other;
2211 };
2212
2213 int
2214 constrain_operands (int strict)
2215 {
2216 const char *constraints[MAX_RECOG_OPERANDS];
2217 int matching_operands[MAX_RECOG_OPERANDS];
2218 int earlyclobber[MAX_RECOG_OPERANDS];
2219 int c;
2220
2221 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2222 int funny_match_index;
2223
2224 which_alternative = 0;
2225 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2226 return 1;
2227
2228 for (c = 0; c < recog_data.n_operands; c++)
2229 {
2230 constraints[c] = recog_data.constraints[c];
2231 matching_operands[c] = -1;
2232 }
2233
2234 do
2235 {
2236 int seen_earlyclobber_at = -1;
2237 int opno;
2238 int lose = 0;
2239 funny_match_index = 0;
2240
2241 for (opno = 0; opno < recog_data.n_operands; opno++)
2242 {
2243 rtx op = recog_data.operand[opno];
2244 enum machine_mode mode = GET_MODE (op);
2245 const char *p = constraints[opno];
2246 int offset = 0;
2247 int win = 0;
2248 int val;
2249 int len;
2250
2251 earlyclobber[opno] = 0;
2252
2253 /* A unary operator may be accepted by the predicate, but it
2254 is irrelevant for matching constraints. */
2255 if (UNARY_P (op))
2256 op = XEXP (op, 0);
2257
2258 if (GET_CODE (op) == SUBREG)
2259 {
2260 if (REG_P (SUBREG_REG (op))
2261 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2262 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2263 GET_MODE (SUBREG_REG (op)),
2264 SUBREG_BYTE (op),
2265 GET_MODE (op));
2266 op = SUBREG_REG (op);
2267 }
2268
2269 /* An empty constraint or empty alternative
2270 allows anything which matched the pattern. */
2271 if (*p == 0 || *p == ',')
2272 win = 1;
2273
2274 do
2275 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2276 {
2277 case '\0':
2278 len = 0;
2279 break;
2280 case ',':
2281 c = '\0';
2282 break;
2283
2284 case '?': case '!': case '*': case '%':
2285 case '=': case '+':
2286 break;
2287
2288 case '#':
2289 /* Ignore rest of this alternative as far as
2290 constraint checking is concerned. */
2291 do
2292 p++;
2293 while (*p && *p != ',');
2294 len = 0;
2295 break;
2296
2297 case '&':
2298 earlyclobber[opno] = 1;
2299 if (seen_earlyclobber_at < 0)
2300 seen_earlyclobber_at = opno;
2301 break;
2302
2303 case '0': case '1': case '2': case '3': case '4':
2304 case '5': case '6': case '7': case '8': case '9':
2305 {
2306 /* This operand must be the same as a previous one.
2307 This kind of constraint is used for instructions such
2308 as add when they take only two operands.
2309
2310 Note that the lower-numbered operand is passed first.
2311
2312 If we are not testing strictly, assume that this
2313 constraint will be satisfied. */
2314
2315 char *end;
2316 int match;
2317
2318 match = strtoul (p, &end, 10);
2319 p = end;
2320
2321 if (strict < 0)
2322 val = 1;
2323 else
2324 {
2325 rtx op1 = recog_data.operand[match];
2326 rtx op2 = recog_data.operand[opno];
2327
2328 /* A unary operator may be accepted by the predicate,
2329 but it is irrelevant for matching constraints. */
2330 if (UNARY_P (op1))
2331 op1 = XEXP (op1, 0);
2332 if (UNARY_P (op2))
2333 op2 = XEXP (op2, 0);
2334
2335 val = operands_match_p (op1, op2);
2336 }
2337
2338 matching_operands[opno] = match;
2339 matching_operands[match] = opno;
2340
2341 if (val != 0)
2342 win = 1;
2343
2344 /* If output is *x and input is *--x, arrange later
2345 to change the output to *--x as well, since the
2346 output op is the one that will be printed. */
2347 if (val == 2 && strict > 0)
2348 {
2349 funny_match[funny_match_index].this = opno;
2350 funny_match[funny_match_index++].other = match;
2351 }
2352 }
2353 len = 0;
2354 break;
2355
2356 case 'p':
2357 /* p is used for address_operands. When we are called by
2358 gen_reload, no one will have checked that the address is
2359 strictly valid, i.e., that all pseudos requiring hard regs
2360 have gotten them. */
2361 if (strict <= 0
2362 || (strict_memory_address_p (recog_data.operand_mode[opno],
2363 op)))
2364 win = 1;
2365 break;
2366
2367 /* No need to check general_operand again;
2368 it was done in insn-recog.c. */
2369 case 'g':
2370 /* Anything goes unless it is a REG and really has a hard reg
2371 but the hard reg is not in the class GENERAL_REGS. */
2372 if (strict < 0
2373 || GENERAL_REGS == ALL_REGS
2374 || !REG_P (op)
2375 || (reload_in_progress
2376 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2377 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2378 win = 1;
2379 break;
2380
2381 case 'X':
2382 /* This is used for a MATCH_SCRATCH in the cases when
2383 we don't actually need anything. So anything goes
2384 any time. */
2385 win = 1;
2386 break;
2387
2388 case 'm':
2389 /* Memory operands must be valid, to the extent
2390 required by STRICT. */
2391 if (MEM_P (op))
2392 {
2393 if (strict > 0
2394 && !strict_memory_address_p (GET_MODE (op),
2395 XEXP (op, 0)))
2396 break;
2397 if (strict == 0
2398 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2399 break;
2400 win = 1;
2401 }
2402 /* Before reload, accept what reload can turn into mem. */
2403 else if (strict < 0 && CONSTANT_P (op))
2404 win = 1;
2405 /* During reload, accept a pseudo */
2406 else if (reload_in_progress && REG_P (op)
2407 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2408 win = 1;
2409 break;
2410
2411 case '<':
2412 if (MEM_P (op)
2413 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2414 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2415 win = 1;
2416 break;
2417
2418 case '>':
2419 if (MEM_P (op)
2420 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2421 || GET_CODE (XEXP (op, 0)) == POST_INC))
2422 win = 1;
2423 break;
2424
2425 case 'E':
2426 case 'F':
2427 if (GET_CODE (op) == CONST_DOUBLE
2428 || (GET_CODE (op) == CONST_VECTOR
2429 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2430 win = 1;
2431 break;
2432
2433 case 'G':
2434 case 'H':
2435 if (GET_CODE (op) == CONST_DOUBLE
2436 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2437 win = 1;
2438 break;
2439
2440 case 's':
2441 if (GET_CODE (op) == CONST_INT
2442 || (GET_CODE (op) == CONST_DOUBLE
2443 && GET_MODE (op) == VOIDmode))
2444 break;
2445 case 'i':
2446 if (CONSTANT_P (op))
2447 win = 1;
2448 break;
2449
2450 case 'n':
2451 if (GET_CODE (op) == CONST_INT
2452 || (GET_CODE (op) == CONST_DOUBLE
2453 && GET_MODE (op) == VOIDmode))
2454 win = 1;
2455 break;
2456
2457 case 'I':
2458 case 'J':
2459 case 'K':
2460 case 'L':
2461 case 'M':
2462 case 'N':
2463 case 'O':
2464 case 'P':
2465 if (GET_CODE (op) == CONST_INT
2466 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2467 win = 1;
2468 break;
2469
2470 case 'V':
2471 if (MEM_P (op)
2472 && ((strict > 0 && ! offsettable_memref_p (op))
2473 || (strict < 0
2474 && !(CONSTANT_P (op) || MEM_P (op)))
2475 || (reload_in_progress
2476 && !(REG_P (op)
2477 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2478 win = 1;
2479 break;
2480
2481 case 'o':
2482 if ((strict > 0 && offsettable_memref_p (op))
2483 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2484 /* Before reload, accept what reload can handle. */
2485 || (strict < 0
2486 && (CONSTANT_P (op) || MEM_P (op)))
2487 /* During reload, accept a pseudo */
2488 || (reload_in_progress && REG_P (op)
2489 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2490 win = 1;
2491 break;
2492
2493 default:
2494 {
2495 enum reg_class cl;
2496
2497 cl = (c == 'r'
2498 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2499 if (cl != NO_REGS)
2500 {
2501 if (strict < 0
2502 || (strict == 0
2503 && REG_P (op)
2504 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2505 || (strict == 0 && GET_CODE (op) == SCRATCH)
2506 || (REG_P (op)
2507 && reg_fits_class_p (op, cl, offset, mode)))
2508 win = 1;
2509 }
2510 #ifdef EXTRA_CONSTRAINT_STR
2511 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2512 win = 1;
2513
2514 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2515 /* Every memory operand can be reloaded to fit. */
2516 && ((strict < 0 && MEM_P (op))
2517 /* Before reload, accept what reload can turn
2518 into mem. */
2519 || (strict < 0 && CONSTANT_P (op))
2520 /* During reload, accept a pseudo */
2521 || (reload_in_progress && REG_P (op)
2522 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2523 win = 1;
2524 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2525 /* Every address operand can be reloaded to fit. */
2526 && strict < 0)
2527 win = 1;
2528 #endif
2529 break;
2530 }
2531 }
2532 while (p += len, c);
2533
2534 constraints[opno] = p;
2535 /* If this operand did not win somehow,
2536 this alternative loses. */
2537 if (! win)
2538 lose = 1;
2539 }
2540 /* This alternative won; the operands are ok.
2541 Change whichever operands this alternative says to change. */
2542 if (! lose)
2543 {
2544 int opno, eopno;
2545
2546 /* See if any earlyclobber operand conflicts with some other
2547 operand. */
2548
2549 if (strict > 0 && seen_earlyclobber_at >= 0)
2550 for (eopno = seen_earlyclobber_at;
2551 eopno < recog_data.n_operands;
2552 eopno++)
2553 /* Ignore earlyclobber operands now in memory,
2554 because we would often report failure when we have
2555 two memory operands, one of which was formerly a REG. */
2556 if (earlyclobber[eopno]
2557 && REG_P (recog_data.operand[eopno]))
2558 for (opno = 0; opno < recog_data.n_operands; opno++)
2559 if ((MEM_P (recog_data.operand[opno])
2560 || recog_data.operand_type[opno] != OP_OUT)
2561 && opno != eopno
2562 /* Ignore things like match_operator operands. */
2563 && *recog_data.constraints[opno] != 0
2564 && ! (matching_operands[opno] == eopno
2565 && operands_match_p (recog_data.operand[opno],
2566 recog_data.operand[eopno]))
2567 && ! safe_from_earlyclobber (recog_data.operand[opno],
2568 recog_data.operand[eopno]))
2569 lose = 1;
2570
2571 if (! lose)
2572 {
2573 while (--funny_match_index >= 0)
2574 {
2575 recog_data.operand[funny_match[funny_match_index].other]
2576 = recog_data.operand[funny_match[funny_match_index].this];
2577 }
2578
2579 return 1;
2580 }
2581 }
2582
2583 which_alternative++;
2584 }
2585 while (which_alternative < recog_data.n_alternatives);
2586
2587 which_alternative = -1;
2588 /* If we are about to reject this, but we are not to test strictly,
2589 try a very loose test. Only return failure if it fails also. */
2590 if (strict == 0)
2591 return constrain_operands (-1);
2592 else
2593 return 0;
2594 }
2595
2596 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2597 is a hard reg in class CLASS when its regno is offset by OFFSET
2598 and changed to mode MODE.
2599 If REG occupies multiple hard regs, all of them must be in CLASS. */
2600
2601 int
2602 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2603 enum machine_mode mode)
2604 {
2605 int regno = REGNO (operand);
2606 if (regno < FIRST_PSEUDO_REGISTER
2607 && TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2608 regno + offset))
2609 {
2610 int sr;
2611 regno += offset;
2612 for (sr = hard_regno_nregs[regno][mode] - 1;
2613 sr > 0; sr--)
2614 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2615 regno + sr))
2616 break;
2617 return sr == 0;
2618 }
2619
2620 return 0;
2621 }
2622 \f
2623 /* Split single instruction. Helper function for split_all_insns and
2624 split_all_insns_noflow. Return last insn in the sequence if successful,
2625 or NULL if unsuccessful. */
2626
2627 static rtx
2628 split_insn (rtx insn)
2629 {
2630 /* Split insns here to get max fine-grain parallelism. */
2631 rtx first = PREV_INSN (insn);
2632 rtx last = try_split (PATTERN (insn), insn, 1);
2633
2634 if (last == insn)
2635 return NULL_RTX;
2636
2637 /* try_split returns the NOTE that INSN became. */
2638 SET_INSN_DELETED (insn);
2639
2640 /* ??? Coddle to md files that generate subregs in post-reload
2641 splitters instead of computing the proper hard register. */
2642 if (reload_completed && first != last)
2643 {
2644 first = NEXT_INSN (first);
2645 for (;;)
2646 {
2647 if (INSN_P (first))
2648 cleanup_subreg_operands (first);
2649 if (first == last)
2650 break;
2651 first = NEXT_INSN (first);
2652 }
2653 }
2654 return last;
2655 }
2656
2657 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2658
2659 void
2660 split_all_insns (int upd_life)
2661 {
2662 sbitmap blocks;
2663 bool changed;
2664 basic_block bb;
2665
2666 blocks = sbitmap_alloc (last_basic_block);
2667 sbitmap_zero (blocks);
2668 changed = false;
2669
2670 FOR_EACH_BB_REVERSE (bb)
2671 {
2672 rtx insn, next;
2673 bool finish = false;
2674
2675 for (insn = BB_HEAD (bb); !finish ; insn = next)
2676 {
2677 /* Can't use `next_real_insn' because that might go across
2678 CODE_LABELS and short-out basic blocks. */
2679 next = NEXT_INSN (insn);
2680 finish = (insn == BB_END (bb));
2681 if (INSN_P (insn))
2682 {
2683 rtx set = single_set (insn);
2684
2685 /* Don't split no-op move insns. These should silently
2686 disappear later in final. Splitting such insns would
2687 break the code that handles REG_NO_CONFLICT blocks. */
2688 if (set && set_noop_p (set))
2689 {
2690 /* Nops get in the way while scheduling, so delete them
2691 now if register allocation has already been done. It
2692 is too risky to try to do this before register
2693 allocation, and there are unlikely to be very many
2694 nops then anyways. */
2695 if (reload_completed)
2696 {
2697 /* If the no-op set has a REG_UNUSED note, we need
2698 to update liveness information. */
2699 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2700 {
2701 SET_BIT (blocks, bb->index);
2702 changed = true;
2703 }
2704 /* ??? Is life info affected by deleting edges? */
2705 delete_insn_and_edges (insn);
2706 }
2707 }
2708 else
2709 {
2710 rtx last = split_insn (insn);
2711 if (last)
2712 {
2713 /* The split sequence may include barrier, but the
2714 BB boundary we are interested in will be set to
2715 previous one. */
2716
2717 while (BARRIER_P (last))
2718 last = PREV_INSN (last);
2719 SET_BIT (blocks, bb->index);
2720 changed = true;
2721 }
2722 }
2723 }
2724 }
2725 }
2726
2727 if (changed)
2728 {
2729 int old_last_basic_block = last_basic_block;
2730
2731 find_many_sub_basic_blocks (blocks);
2732
2733 if (old_last_basic_block != last_basic_block && upd_life)
2734 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2735 }
2736
2737 if (changed && upd_life)
2738 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2739 PROP_DEATH_NOTES);
2740
2741 #ifdef ENABLE_CHECKING
2742 verify_flow_info ();
2743 #endif
2744
2745 sbitmap_free (blocks);
2746 }
2747
2748 /* Same as split_all_insns, but do not expect CFG to be available.
2749 Used by machine dependent reorg passes. */
2750
2751 void
2752 split_all_insns_noflow (void)
2753 {
2754 rtx next, insn;
2755
2756 for (insn = get_insns (); insn; insn = next)
2757 {
2758 next = NEXT_INSN (insn);
2759 if (INSN_P (insn))
2760 {
2761 /* Don't split no-op move insns. These should silently
2762 disappear later in final. Splitting such insns would
2763 break the code that handles REG_NO_CONFLICT blocks. */
2764 rtx set = single_set (insn);
2765 if (set && set_noop_p (set))
2766 {
2767 /* Nops get in the way while scheduling, so delete them
2768 now if register allocation has already been done. It
2769 is too risky to try to do this before register
2770 allocation, and there are unlikely to be very many
2771 nops then anyways.
2772
2773 ??? Should we use delete_insn when the CFG isn't valid? */
2774 if (reload_completed)
2775 delete_insn_and_edges (insn);
2776 }
2777 else
2778 split_insn (insn);
2779 }
2780 }
2781 }
2782 \f
2783 #ifdef HAVE_peephole2
2784 struct peep2_insn_data
2785 {
2786 rtx insn;
2787 regset live_before;
2788 };
2789
2790 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2791 static int peep2_current;
2792
2793 /* A non-insn marker indicating the last insn of the block.
2794 The live_before regset for this element is correct, indicating
2795 global_live_at_end for the block. */
2796 #define PEEP2_EOB pc_rtx
2797
2798 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2799 does not exist. Used by the recognizer to find the next insn to match
2800 in a multi-insn pattern. */
2801
2802 rtx
2803 peep2_next_insn (int n)
2804 {
2805 gcc_assert (n < MAX_INSNS_PER_PEEP2 + 1);
2806
2807 n += peep2_current;
2808 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2809 n -= MAX_INSNS_PER_PEEP2 + 1;
2810
2811 if (peep2_insn_data[n].insn == PEEP2_EOB)
2812 return NULL_RTX;
2813 return peep2_insn_data[n].insn;
2814 }
2815
2816 /* Return true if REGNO is dead before the Nth non-note insn
2817 after `current'. */
2818
2819 int
2820 peep2_regno_dead_p (int ofs, int regno)
2821 {
2822 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2823
2824 ofs += peep2_current;
2825 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2826 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2827
2828 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2829
2830 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2831 }
2832
2833 /* Similarly for a REG. */
2834
2835 int
2836 peep2_reg_dead_p (int ofs, rtx reg)
2837 {
2838 int regno, n;
2839
2840 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2841
2842 ofs += peep2_current;
2843 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2844 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2845
2846 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2847
2848 regno = REGNO (reg);
2849 n = hard_regno_nregs[regno][GET_MODE (reg)];
2850 while (--n >= 0)
2851 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2852 return 0;
2853 return 1;
2854 }
2855
2856 /* Try to find a hard register of mode MODE, matching the register class in
2857 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2858 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2859 in which case the only condition is that the register must be available
2860 before CURRENT_INSN.
2861 Registers that already have bits set in REG_SET will not be considered.
2862
2863 If an appropriate register is available, it will be returned and the
2864 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2865 returned. */
2866
2867 rtx
2868 peep2_find_free_register (int from, int to, const char *class_str,
2869 enum machine_mode mode, HARD_REG_SET *reg_set)
2870 {
2871 static int search_ofs;
2872 enum reg_class cl;
2873 HARD_REG_SET live;
2874 int i;
2875
2876 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2877 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2878
2879 from += peep2_current;
2880 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2881 from -= MAX_INSNS_PER_PEEP2 + 1;
2882 to += peep2_current;
2883 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2884 to -= MAX_INSNS_PER_PEEP2 + 1;
2885
2886 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2887 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2888
2889 while (from != to)
2890 {
2891 HARD_REG_SET this_live;
2892
2893 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2894 from = 0;
2895 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2896 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2897 IOR_HARD_REG_SET (live, this_live);
2898 }
2899
2900 cl = (class_str[0] == 'r' ? GENERAL_REGS
2901 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2902
2903 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2904 {
2905 int raw_regno, regno, success, j;
2906
2907 /* Distribute the free registers as much as possible. */
2908 raw_regno = search_ofs + i;
2909 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2910 raw_regno -= FIRST_PSEUDO_REGISTER;
2911 #ifdef REG_ALLOC_ORDER
2912 regno = reg_alloc_order[raw_regno];
2913 #else
2914 regno = raw_regno;
2915 #endif
2916
2917 /* Don't allocate fixed registers. */
2918 if (fixed_regs[regno])
2919 continue;
2920 /* Make sure the register is of the right class. */
2921 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
2922 continue;
2923 /* And can support the mode we need. */
2924 if (! HARD_REGNO_MODE_OK (regno, mode))
2925 continue;
2926 /* And that we don't create an extra save/restore. */
2927 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2928 continue;
2929 /* And we don't clobber traceback for noreturn functions. */
2930 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2931 && (! reload_completed || frame_pointer_needed))
2932 continue;
2933
2934 success = 1;
2935 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2936 {
2937 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2938 || TEST_HARD_REG_BIT (live, regno + j))
2939 {
2940 success = 0;
2941 break;
2942 }
2943 }
2944 if (success)
2945 {
2946 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2947 SET_HARD_REG_BIT (*reg_set, regno + j);
2948
2949 /* Start the next search with the next register. */
2950 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2951 raw_regno = 0;
2952 search_ofs = raw_regno;
2953
2954 return gen_rtx_REG (mode, regno);
2955 }
2956 }
2957
2958 search_ofs = 0;
2959 return NULL_RTX;
2960 }
2961
2962 /* Perform the peephole2 optimization pass. */
2963
2964 void
2965 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
2966 {
2967 rtx insn, prev;
2968 regset live;
2969 int i;
2970 basic_block bb;
2971 #ifdef HAVE_conditional_execution
2972 sbitmap blocks;
2973 bool changed;
2974 #endif
2975 bool do_cleanup_cfg = false;
2976 bool do_global_life_update = false;
2977 bool do_rebuild_jump_labels = false;
2978
2979 /* Initialize the regsets we're going to use. */
2980 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
2981 peep2_insn_data[i].live_before = ALLOC_REG_SET (&reg_obstack);
2982 live = ALLOC_REG_SET (&reg_obstack);
2983
2984 #ifdef HAVE_conditional_execution
2985 blocks = sbitmap_alloc (last_basic_block);
2986 sbitmap_zero (blocks);
2987 changed = false;
2988 #else
2989 count_or_remove_death_notes (NULL, 1);
2990 #endif
2991
2992 FOR_EACH_BB_REVERSE (bb)
2993 {
2994 struct propagate_block_info *pbi;
2995 reg_set_iterator rsi;
2996 unsigned int j;
2997
2998 /* Indicate that all slots except the last holds invalid data. */
2999 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3000 peep2_insn_data[i].insn = NULL_RTX;
3001
3002 /* Indicate that the last slot contains live_after data. */
3003 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3004 peep2_current = MAX_INSNS_PER_PEEP2;
3005
3006 /* Start up propagation. */
3007 COPY_REG_SET (live, bb->global_live_at_end);
3008 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3009
3010 #ifdef HAVE_conditional_execution
3011 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3012 #else
3013 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3014 #endif
3015
3016 for (insn = BB_END (bb); ; insn = prev)
3017 {
3018 prev = PREV_INSN (insn);
3019 if (INSN_P (insn))
3020 {
3021 rtx try, before_try, x;
3022 int match_len;
3023 rtx note;
3024 bool was_call = false;
3025
3026 /* Record this insn. */
3027 if (--peep2_current < 0)
3028 peep2_current = MAX_INSNS_PER_PEEP2;
3029 peep2_insn_data[peep2_current].insn = insn;
3030 propagate_one_insn (pbi, insn);
3031 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3032
3033 /* Match the peephole. */
3034 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3035 if (try != NULL)
3036 {
3037 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3038 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3039 cfg-related call notes. */
3040 for (i = 0; i <= match_len; ++i)
3041 {
3042 int j;
3043 rtx old_insn, new_insn, note;
3044
3045 j = i + peep2_current;
3046 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3047 j -= MAX_INSNS_PER_PEEP2 + 1;
3048 old_insn = peep2_insn_data[j].insn;
3049 if (!CALL_P (old_insn))
3050 continue;
3051 was_call = true;
3052
3053 new_insn = try;
3054 while (new_insn != NULL_RTX)
3055 {
3056 if (CALL_P (new_insn))
3057 break;
3058 new_insn = NEXT_INSN (new_insn);
3059 }
3060
3061 gcc_assert (new_insn != NULL_RTX);
3062
3063 CALL_INSN_FUNCTION_USAGE (new_insn)
3064 = CALL_INSN_FUNCTION_USAGE (old_insn);
3065
3066 for (note = REG_NOTES (old_insn);
3067 note;
3068 note = XEXP (note, 1))
3069 switch (REG_NOTE_KIND (note))
3070 {
3071 case REG_NORETURN:
3072 case REG_SETJMP:
3073 REG_NOTES (new_insn)
3074 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3075 XEXP (note, 0),
3076 REG_NOTES (new_insn));
3077 default:
3078 /* Discard all other reg notes. */
3079 break;
3080 }
3081
3082 /* Croak if there is another call in the sequence. */
3083 while (++i <= match_len)
3084 {
3085 j = i + peep2_current;
3086 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3087 j -= MAX_INSNS_PER_PEEP2 + 1;
3088 old_insn = peep2_insn_data[j].insn;
3089 gcc_assert (!CALL_P (old_insn));
3090 }
3091 break;
3092 }
3093
3094 i = match_len + peep2_current;
3095 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3096 i -= MAX_INSNS_PER_PEEP2 + 1;
3097
3098 note = find_reg_note (peep2_insn_data[i].insn,
3099 REG_EH_REGION, NULL_RTX);
3100
3101 /* Replace the old sequence with the new. */
3102 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3103 INSN_LOCATOR (peep2_insn_data[i].insn));
3104 before_try = PREV_INSN (insn);
3105 delete_insn_chain (insn, peep2_insn_data[i].insn);
3106
3107 /* Re-insert the EH_REGION notes. */
3108 if (note || (was_call && nonlocal_goto_handler_labels))
3109 {
3110 edge eh_edge;
3111 edge_iterator ei;
3112
3113 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3114 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3115 break;
3116
3117 for (x = try ; x != before_try ; x = PREV_INSN (x))
3118 if (CALL_P (x)
3119 || (flag_non_call_exceptions
3120 && may_trap_p (PATTERN (x))
3121 && !find_reg_note (x, REG_EH_REGION, NULL)))
3122 {
3123 if (note)
3124 REG_NOTES (x)
3125 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3126 XEXP (note, 0),
3127 REG_NOTES (x));
3128
3129 if (x != BB_END (bb) && eh_edge)
3130 {
3131 edge nfte, nehe;
3132 int flags;
3133
3134 nfte = split_block (bb, x);
3135 flags = (eh_edge->flags
3136 & (EDGE_EH | EDGE_ABNORMAL));
3137 if (CALL_P (x))
3138 flags |= EDGE_ABNORMAL_CALL;
3139 nehe = make_edge (nfte->src, eh_edge->dest,
3140 flags);
3141
3142 nehe->probability = eh_edge->probability;
3143 nfte->probability
3144 = REG_BR_PROB_BASE - nehe->probability;
3145
3146 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3147 #ifdef HAVE_conditional_execution
3148 SET_BIT (blocks, nfte->dest->index);
3149 changed = true;
3150 #endif
3151 bb = nfte->src;
3152 eh_edge = nehe;
3153 }
3154 }
3155
3156 /* Converting possibly trapping insn to non-trapping is
3157 possible. Zap dummy outgoing edges. */
3158 do_cleanup_cfg |= purge_dead_edges (bb);
3159 }
3160
3161 #ifdef HAVE_conditional_execution
3162 /* With conditional execution, we cannot back up the
3163 live information so easily, since the conditional
3164 death data structures are not so self-contained.
3165 So record that we've made a modification to this
3166 block and update life information at the end. */
3167 SET_BIT (blocks, bb->index);
3168 changed = true;
3169
3170 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3171 peep2_insn_data[i].insn = NULL_RTX;
3172 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3173 #else
3174 /* Back up lifetime information past the end of the
3175 newly created sequence. */
3176 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3177 i = 0;
3178 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3179
3180 /* Update life information for the new sequence. */
3181 x = try;
3182 do
3183 {
3184 if (INSN_P (x))
3185 {
3186 if (--i < 0)
3187 i = MAX_INSNS_PER_PEEP2;
3188 peep2_insn_data[i].insn = x;
3189 propagate_one_insn (pbi, x);
3190 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3191 }
3192 x = PREV_INSN (x);
3193 }
3194 while (x != prev);
3195
3196 /* ??? Should verify that LIVE now matches what we
3197 had before the new sequence. */
3198
3199 peep2_current = i;
3200 #endif
3201
3202 /* If we generated a jump instruction, it won't have
3203 JUMP_LABEL set. Recompute after we're done. */
3204 for (x = try; x != before_try; x = PREV_INSN (x))
3205 if (JUMP_P (x))
3206 {
3207 do_rebuild_jump_labels = true;
3208 break;
3209 }
3210 }
3211 }
3212
3213 if (insn == BB_HEAD (bb))
3214 break;
3215 }
3216
3217 /* Some peepholes can decide the don't need one or more of their
3218 inputs. If this happens, local life update is not enough. */
3219 EXECUTE_IF_AND_COMPL_IN_BITMAP (bb->global_live_at_start, live,
3220 0, j, rsi)
3221 {
3222 do_global_life_update = true;
3223 break;
3224 }
3225
3226 free_propagate_block_info (pbi);
3227 }
3228
3229 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3230 FREE_REG_SET (peep2_insn_data[i].live_before);
3231 FREE_REG_SET (live);
3232
3233 if (do_rebuild_jump_labels)
3234 rebuild_jump_labels (get_insns ());
3235
3236 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3237 we've changed global life since exception handlers are no longer
3238 reachable. */
3239 if (do_cleanup_cfg)
3240 {
3241 cleanup_cfg (0);
3242 do_global_life_update = true;
3243 }
3244 if (do_global_life_update)
3245 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3246 #ifdef HAVE_conditional_execution
3247 else
3248 {
3249 count_or_remove_death_notes (blocks, 1);
3250 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3251 }
3252 sbitmap_free (blocks);
3253 #endif
3254 }
3255 #endif /* HAVE_peephole2 */
3256
3257 /* Common predicates for use with define_bypass. */
3258
3259 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3260 data not the address operand(s) of the store. IN_INSN must be
3261 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3262 SETs inside. */
3263
3264 int
3265 store_data_bypass_p (rtx out_insn, rtx in_insn)
3266 {
3267 rtx out_set, in_set;
3268
3269 in_set = single_set (in_insn);
3270 gcc_assert (in_set);
3271
3272 if (!MEM_P (SET_DEST (in_set)))
3273 return false;
3274
3275 out_set = single_set (out_insn);
3276 if (out_set)
3277 {
3278 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3279 return false;
3280 }
3281 else
3282 {
3283 rtx out_pat;
3284 int i;
3285
3286 out_pat = PATTERN (out_insn);
3287 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3288
3289 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3290 {
3291 rtx exp = XVECEXP (out_pat, 0, i);
3292
3293 if (GET_CODE (exp) == CLOBBER)
3294 continue;
3295
3296 gcc_assert (GET_CODE (exp) == SET);
3297
3298 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3299 return false;
3300 }
3301 }
3302
3303 return true;
3304 }
3305
3306 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3307 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3308 or multiple set; IN_INSN should be single_set for truth, but for convenience
3309 of insn categorization may be any JUMP or CALL insn. */
3310
3311 int
3312 if_test_bypass_p (rtx out_insn, rtx in_insn)
3313 {
3314 rtx out_set, in_set;
3315
3316 in_set = single_set (in_insn);
3317 if (! in_set)
3318 {
3319 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3320 return false;
3321 }
3322
3323 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3324 return false;
3325 in_set = SET_SRC (in_set);
3326
3327 out_set = single_set (out_insn);
3328 if (out_set)
3329 {
3330 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3331 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3332 return false;
3333 }
3334 else
3335 {
3336 rtx out_pat;
3337 int i;
3338
3339 out_pat = PATTERN (out_insn);
3340 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3341
3342 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3343 {
3344 rtx exp = XVECEXP (out_pat, 0, i);
3345
3346 if (GET_CODE (exp) == CLOBBER)
3347 continue;
3348
3349 gcc_assert (GET_CODE (exp) == SET);
3350
3351 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3352 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3353 return false;
3354 }
3355 }
3356
3357 return true;
3358 }