recog.c (volatile_mem_p, [...]): Remove.
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "toplev.h"
39 #include "basic-block.h"
40 #include "output.h"
41 #include "reload.h"
42 #include "timevar.h"
43 #include "tree-pass.h"
44
45 #ifndef STACK_PUSH_CODE
46 #ifdef STACK_GROWS_DOWNWARD
47 #define STACK_PUSH_CODE PRE_DEC
48 #else
49 #define STACK_PUSH_CODE PRE_INC
50 #endif
51 #endif
52
53 #ifndef STACK_POP_CODE
54 #ifdef STACK_GROWS_DOWNWARD
55 #define STACK_POP_CODE POST_INC
56 #else
57 #define STACK_POP_CODE POST_DEC
58 #endif
59 #endif
60
61 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
62 static rtx *find_single_use_1 (rtx, rtx *);
63 static void validate_replace_src_1 (rtx *, void *);
64 static rtx split_insn (rtx);
65
66 /* Nonzero means allow operands to be volatile.
67 This should be 0 if you are generating rtl, such as if you are calling
68 the functions in optabs.c and expmed.c (most of the time).
69 This should be 1 if all valid insns need to be recognized,
70 such as in regclass.c and final.c and reload.c.
71
72 init_recog and init_recog_no_volatile are responsible for setting this. */
73
74 int volatile_ok;
75
76 struct recog_data recog_data;
77
78 /* Contains a vector of operand_alternative structures for every operand.
79 Set up by preprocess_constraints. */
80 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
81
82 /* On return from `constrain_operands', indicate which alternative
83 was satisfied. */
84
85 int which_alternative;
86
87 /* Nonzero after end of reload pass.
88 Set to 1 or 0 by toplev.c.
89 Controls the significance of (SUBREG (MEM)). */
90
91 int reload_completed;
92
93 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
94 int epilogue_completed;
95
96 /* Initialize data used by the function `recog'.
97 This must be called once in the compilation of a function
98 before any insn recognition may be done in the function. */
99
100 void
101 init_recog_no_volatile (void)
102 {
103 volatile_ok = 0;
104 }
105
106 void
107 init_recog (void)
108 {
109 volatile_ok = 1;
110 }
111
112 \f
113 /* Check that X is an insn-body for an `asm' with operands
114 and that the operands mentioned in it are legitimate. */
115
116 int
117 check_asm_operands (rtx x)
118 {
119 int noperands;
120 rtx *operands;
121 const char **constraints;
122 int i;
123
124 /* Post-reload, be more strict with things. */
125 if (reload_completed)
126 {
127 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
128 extract_insn (make_insn_raw (x));
129 constrain_operands (1);
130 return which_alternative >= 0;
131 }
132
133 noperands = asm_noperands (x);
134 if (noperands < 0)
135 return 0;
136 if (noperands == 0)
137 return 1;
138
139 operands = alloca (noperands * sizeof (rtx));
140 constraints = alloca (noperands * sizeof (char *));
141
142 decode_asm_operands (x, operands, NULL, constraints, NULL);
143
144 for (i = 0; i < noperands; i++)
145 {
146 const char *c = constraints[i];
147 if (c[0] == '%')
148 c++;
149 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
150 c = constraints[c[0] - '0'];
151
152 if (! asm_operand_ok (operands[i], c))
153 return 0;
154 }
155
156 return 1;
157 }
158 \f
159 /* Static data for the next two routines. */
160
161 typedef struct change_t
162 {
163 rtx object;
164 int old_code;
165 rtx *loc;
166 rtx old;
167 } change_t;
168
169 static change_t *changes;
170 static int changes_allocated;
171
172 static int num_changes = 0;
173
174 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
175 at which NEW will be placed. If OBJECT is zero, no validation is done,
176 the change is simply made.
177
178 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
179 will be called with the address and mode as parameters. If OBJECT is
180 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
181 the change in place.
182
183 IN_GROUP is nonzero if this is part of a group of changes that must be
184 performed as a group. In that case, the changes will be stored. The
185 function `apply_change_group' will validate and apply the changes.
186
187 If IN_GROUP is zero, this is a single change. Try to recognize the insn
188 or validate the memory reference with the change applied. If the result
189 is not valid for the machine, suppress the change and return zero.
190 Otherwise, perform the change and return 1. */
191
192 int
193 validate_change (rtx object, rtx *loc, rtx new, int in_group)
194 {
195 rtx old = *loc;
196
197 if (old == new || rtx_equal_p (old, new))
198 return 1;
199
200 gcc_assert (in_group != 0 || num_changes == 0);
201
202 *loc = new;
203
204 /* Save the information describing this change. */
205 if (num_changes >= changes_allocated)
206 {
207 if (changes_allocated == 0)
208 /* This value allows for repeated substitutions inside complex
209 indexed addresses, or changes in up to 5 insns. */
210 changes_allocated = MAX_RECOG_OPERANDS * 5;
211 else
212 changes_allocated *= 2;
213
214 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
215 }
216
217 changes[num_changes].object = object;
218 changes[num_changes].loc = loc;
219 changes[num_changes].old = old;
220
221 if (object && !MEM_P (object))
222 {
223 /* Set INSN_CODE to force rerecognition of insn. Save old code in
224 case invalid. */
225 changes[num_changes].old_code = INSN_CODE (object);
226 INSN_CODE (object) = -1;
227 }
228
229 num_changes++;
230
231 /* If we are making a group of changes, return 1. Otherwise, validate the
232 change group we made. */
233
234 if (in_group)
235 return 1;
236 else
237 return apply_change_group ();
238 }
239
240
241 /* This subroutine of apply_change_group verifies whether the changes to INSN
242 were valid; i.e. whether INSN can still be recognized. */
243
244 int
245 insn_invalid_p (rtx insn)
246 {
247 rtx pat = PATTERN (insn);
248 int num_clobbers = 0;
249 /* If we are before reload and the pattern is a SET, see if we can add
250 clobbers. */
251 int icode = recog (pat, insn,
252 (GET_CODE (pat) == SET
253 && ! reload_completed && ! reload_in_progress)
254 ? &num_clobbers : 0);
255 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
256
257
258 /* If this is an asm and the operand aren't legal, then fail. Likewise if
259 this is not an asm and the insn wasn't recognized. */
260 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
261 || (!is_asm && icode < 0))
262 return 1;
263
264 /* If we have to add CLOBBERs, fail if we have to add ones that reference
265 hard registers since our callers can't know if they are live or not.
266 Otherwise, add them. */
267 if (num_clobbers > 0)
268 {
269 rtx newpat;
270
271 if (added_clobbers_hard_reg_p (icode))
272 return 1;
273
274 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
275 XVECEXP (newpat, 0, 0) = pat;
276 add_clobbers (newpat, icode);
277 PATTERN (insn) = pat = newpat;
278 }
279
280 /* After reload, verify that all constraints are satisfied. */
281 if (reload_completed)
282 {
283 extract_insn (insn);
284
285 if (! constrain_operands (1))
286 return 1;
287 }
288
289 INSN_CODE (insn) = icode;
290 return 0;
291 }
292
293 /* Return number of changes made and not validated yet. */
294 int
295 num_changes_pending (void)
296 {
297 return num_changes;
298 }
299
300 /* Tentatively apply the changes numbered NUM and up.
301 Return 1 if all changes are valid, zero otherwise. */
302
303 int
304 verify_changes (int num)
305 {
306 int i;
307 rtx last_validated = NULL_RTX;
308
309 /* The changes have been applied and all INSN_CODEs have been reset to force
310 rerecognition.
311
312 The changes are valid if we aren't given an object, or if we are
313 given a MEM and it still is a valid address, or if this is in insn
314 and it is recognized. In the latter case, if reload has completed,
315 we also require that the operands meet the constraints for
316 the insn. */
317
318 for (i = num; i < num_changes; i++)
319 {
320 rtx object = changes[i].object;
321
322 /* If there is no object to test or if it is the same as the one we
323 already tested, ignore it. */
324 if (object == 0 || object == last_validated)
325 continue;
326
327 if (MEM_P (object))
328 {
329 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
330 break;
331 }
332 else if (insn_invalid_p (object))
333 {
334 rtx pat = PATTERN (object);
335
336 /* Perhaps we couldn't recognize the insn because there were
337 extra CLOBBERs at the end. If so, try to re-recognize
338 without the last CLOBBER (later iterations will cause each of
339 them to be eliminated, in turn). But don't do this if we
340 have an ASM_OPERAND. */
341 if (GET_CODE (pat) == PARALLEL
342 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
343 && asm_noperands (PATTERN (object)) < 0)
344 {
345 rtx newpat;
346
347 if (XVECLEN (pat, 0) == 2)
348 newpat = XVECEXP (pat, 0, 0);
349 else
350 {
351 int j;
352
353 newpat
354 = gen_rtx_PARALLEL (VOIDmode,
355 rtvec_alloc (XVECLEN (pat, 0) - 1));
356 for (j = 0; j < XVECLEN (newpat, 0); j++)
357 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
358 }
359
360 /* Add a new change to this group to replace the pattern
361 with this new pattern. Then consider this change
362 as having succeeded. The change we added will
363 cause the entire call to fail if things remain invalid.
364
365 Note that this can lose if a later change than the one
366 we are processing specified &XVECEXP (PATTERN (object), 0, X)
367 but this shouldn't occur. */
368
369 validate_change (object, &PATTERN (object), newpat, 1);
370 continue;
371 }
372 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
373 /* If this insn is a CLOBBER or USE, it is always valid, but is
374 never recognized. */
375 continue;
376 else
377 break;
378 }
379 last_validated = object;
380 }
381
382 return (i == num_changes);
383 }
384
385 /* A group of changes has previously been issued with validate_change and
386 verified with verify_changes. Update the BB_DIRTY flags of the affected
387 blocks, and clear num_changes. */
388
389 void
390 confirm_change_group (void)
391 {
392 int i;
393 basic_block bb;
394
395 for (i = 0; i < num_changes; i++)
396 if (changes[i].object
397 && INSN_P (changes[i].object)
398 && (bb = BLOCK_FOR_INSN (changes[i].object)))
399 bb->flags |= BB_DIRTY;
400
401 num_changes = 0;
402 }
403
404 /* Apply a group of changes previously issued with `validate_change'.
405 If all changes are valid, call confirm_change_group and return 1,
406 otherwise, call cancel_changes and return 0. */
407
408 int
409 apply_change_group (void)
410 {
411 if (verify_changes (0))
412 {
413 confirm_change_group ();
414 return 1;
415 }
416 else
417 {
418 cancel_changes (0);
419 return 0;
420 }
421 }
422
423
424 /* Return the number of changes so far in the current group. */
425
426 int
427 num_validated_changes (void)
428 {
429 return num_changes;
430 }
431
432 /* Retract the changes numbered NUM and up. */
433
434 void
435 cancel_changes (int num)
436 {
437 int i;
438
439 /* Back out all the changes. Do this in the opposite order in which
440 they were made. */
441 for (i = num_changes - 1; i >= num; i--)
442 {
443 *changes[i].loc = changes[i].old;
444 if (changes[i].object && !MEM_P (changes[i].object))
445 INSN_CODE (changes[i].object) = changes[i].old_code;
446 }
447 num_changes = num;
448 }
449
450 /* Replace every occurrence of FROM in X with TO. Mark each change with
451 validate_change passing OBJECT. */
452
453 static void
454 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
455 {
456 int i, j;
457 const char *fmt;
458 rtx x = *loc;
459 enum rtx_code code;
460 enum machine_mode op0_mode = VOIDmode;
461 int prev_changes = num_changes;
462 rtx new;
463
464 if (!x)
465 return;
466
467 code = GET_CODE (x);
468 fmt = GET_RTX_FORMAT (code);
469 if (fmt[0] == 'e')
470 op0_mode = GET_MODE (XEXP (x, 0));
471
472 /* X matches FROM if it is the same rtx or they are both referring to the
473 same register in the same mode. Avoid calling rtx_equal_p unless the
474 operands look similar. */
475
476 if (x == from
477 || (REG_P (x) && REG_P (from)
478 && GET_MODE (x) == GET_MODE (from)
479 && REGNO (x) == REGNO (from))
480 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
481 && rtx_equal_p (x, from)))
482 {
483 validate_change (object, loc, to, 1);
484 return;
485 }
486
487 /* Call ourself recursively to perform the replacements.
488 We must not replace inside already replaced expression, otherwise we
489 get infinite recursion for replacements like (reg X)->(subreg (reg X))
490 done by regmove, so we must special case shared ASM_OPERANDS. */
491
492 if (GET_CODE (x) == PARALLEL)
493 {
494 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
495 {
496 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
497 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
498 {
499 /* Verify that operands are really shared. */
500 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
501 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
502 (x, 0, j))));
503 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
504 from, to, object);
505 }
506 else
507 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
508 }
509 }
510 else
511 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
512 {
513 if (fmt[i] == 'e')
514 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
515 else if (fmt[i] == 'E')
516 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
517 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
518 }
519
520 /* If we didn't substitute, there is nothing more to do. */
521 if (num_changes == prev_changes)
522 return;
523
524 /* Allow substituted expression to have different mode. This is used by
525 regmove to change mode of pseudo register. */
526 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
527 op0_mode = GET_MODE (XEXP (x, 0));
528
529 /* Do changes needed to keep rtx consistent. Don't do any other
530 simplifications, as it is not our job. */
531
532 if (SWAPPABLE_OPERANDS_P (x)
533 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
534 {
535 validate_change (object, loc,
536 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
537 : swap_condition (code),
538 GET_MODE (x), XEXP (x, 1),
539 XEXP (x, 0)), 1);
540 x = *loc;
541 code = GET_CODE (x);
542 }
543
544 switch (code)
545 {
546 case PLUS:
547 /* If we have a PLUS whose second operand is now a CONST_INT, use
548 simplify_gen_binary to try to simplify it.
549 ??? We may want later to remove this, once simplification is
550 separated from this function. */
551 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
552 validate_change (object, loc,
553 simplify_gen_binary
554 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
555 break;
556 case MINUS:
557 if (GET_CODE (XEXP (x, 1)) == CONST_INT
558 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
559 validate_change (object, loc,
560 simplify_gen_binary
561 (PLUS, GET_MODE (x), XEXP (x, 0),
562 simplify_gen_unary (NEG,
563 GET_MODE (x), XEXP (x, 1),
564 GET_MODE (x))), 1);
565 break;
566 case ZERO_EXTEND:
567 case SIGN_EXTEND:
568 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
569 {
570 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
571 op0_mode);
572 /* If any of the above failed, substitute in something that
573 we know won't be recognized. */
574 if (!new)
575 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
576 validate_change (object, loc, new, 1);
577 }
578 break;
579 case SUBREG:
580 /* All subregs possible to simplify should be simplified. */
581 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
582 SUBREG_BYTE (x));
583
584 /* Subregs of VOIDmode operands are incorrect. */
585 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
586 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
587 if (new)
588 validate_change (object, loc, new, 1);
589 break;
590 case ZERO_EXTRACT:
591 case SIGN_EXTRACT:
592 /* If we are replacing a register with memory, try to change the memory
593 to be the mode required for memory in extract operations (this isn't
594 likely to be an insertion operation; if it was, nothing bad will
595 happen, we might just fail in some cases). */
596
597 if (MEM_P (XEXP (x, 0))
598 && GET_CODE (XEXP (x, 1)) == CONST_INT
599 && GET_CODE (XEXP (x, 2)) == CONST_INT
600 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
601 && !MEM_VOLATILE_P (XEXP (x, 0)))
602 {
603 enum machine_mode wanted_mode = VOIDmode;
604 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
605 int pos = INTVAL (XEXP (x, 2));
606
607 if (GET_CODE (x) == ZERO_EXTRACT)
608 {
609 enum machine_mode new_mode
610 = mode_for_extraction (EP_extzv, 1);
611 if (new_mode != MAX_MACHINE_MODE)
612 wanted_mode = new_mode;
613 }
614 else if (GET_CODE (x) == SIGN_EXTRACT)
615 {
616 enum machine_mode new_mode
617 = mode_for_extraction (EP_extv, 1);
618 if (new_mode != MAX_MACHINE_MODE)
619 wanted_mode = new_mode;
620 }
621
622 /* If we have a narrower mode, we can do something. */
623 if (wanted_mode != VOIDmode
624 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
625 {
626 int offset = pos / BITS_PER_UNIT;
627 rtx newmem;
628
629 /* If the bytes and bits are counted differently, we
630 must adjust the offset. */
631 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
632 offset =
633 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
634 offset);
635
636 pos %= GET_MODE_BITSIZE (wanted_mode);
637
638 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
639
640 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
641 validate_change (object, &XEXP (x, 0), newmem, 1);
642 }
643 }
644
645 break;
646
647 default:
648 break;
649 }
650 }
651
652 /* Try replacing every occurrence of FROM in INSN with TO. After all
653 changes have been made, validate by seeing if INSN is still valid. */
654
655 int
656 validate_replace_rtx (rtx from, rtx to, rtx insn)
657 {
658 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
659 return apply_change_group ();
660 }
661
662 /* Try replacing every occurrence of FROM in INSN with TO. */
663
664 void
665 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
666 {
667 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
668 }
669
670 /* Function called by note_uses to replace used subexpressions. */
671 struct validate_replace_src_data
672 {
673 rtx from; /* Old RTX */
674 rtx to; /* New RTX */
675 rtx insn; /* Insn in which substitution is occurring. */
676 };
677
678 static void
679 validate_replace_src_1 (rtx *x, void *data)
680 {
681 struct validate_replace_src_data *d
682 = (struct validate_replace_src_data *) data;
683
684 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
685 }
686
687 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
688 SET_DESTs. */
689
690 void
691 validate_replace_src_group (rtx from, rtx to, rtx insn)
692 {
693 struct validate_replace_src_data d;
694
695 d.from = from;
696 d.to = to;
697 d.insn = insn;
698 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
699 }
700 \f
701 #ifdef HAVE_cc0
702 /* Return 1 if the insn using CC0 set by INSN does not contain
703 any ordered tests applied to the condition codes.
704 EQ and NE tests do not count. */
705
706 int
707 next_insn_tests_no_inequality (rtx insn)
708 {
709 rtx next = next_cc0_user (insn);
710
711 /* If there is no next insn, we have to take the conservative choice. */
712 if (next == 0)
713 return 0;
714
715 return (INSN_P (next)
716 && ! inequality_comparisons_p (PATTERN (next)));
717 }
718 #endif
719 \f
720 /* This is used by find_single_use to locate an rtx that contains exactly one
721 use of DEST, which is typically either a REG or CC0. It returns a
722 pointer to the innermost rtx expression containing DEST. Appearances of
723 DEST that are being used to totally replace it are not counted. */
724
725 static rtx *
726 find_single_use_1 (rtx dest, rtx *loc)
727 {
728 rtx x = *loc;
729 enum rtx_code code = GET_CODE (x);
730 rtx *result = 0;
731 rtx *this_result;
732 int i;
733 const char *fmt;
734
735 switch (code)
736 {
737 case CONST_INT:
738 case CONST:
739 case LABEL_REF:
740 case SYMBOL_REF:
741 case CONST_DOUBLE:
742 case CONST_VECTOR:
743 case CLOBBER:
744 return 0;
745
746 case SET:
747 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
748 of a REG that occupies all of the REG, the insn uses DEST if
749 it is mentioned in the destination or the source. Otherwise, we
750 need just check the source. */
751 if (GET_CODE (SET_DEST (x)) != CC0
752 && GET_CODE (SET_DEST (x)) != PC
753 && !REG_P (SET_DEST (x))
754 && ! (GET_CODE (SET_DEST (x)) == SUBREG
755 && REG_P (SUBREG_REG (SET_DEST (x)))
756 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
757 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
758 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
759 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
760 break;
761
762 return find_single_use_1 (dest, &SET_SRC (x));
763
764 case MEM:
765 case SUBREG:
766 return find_single_use_1 (dest, &XEXP (x, 0));
767
768 default:
769 break;
770 }
771
772 /* If it wasn't one of the common cases above, check each expression and
773 vector of this code. Look for a unique usage of DEST. */
774
775 fmt = GET_RTX_FORMAT (code);
776 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
777 {
778 if (fmt[i] == 'e')
779 {
780 if (dest == XEXP (x, i)
781 || (REG_P (dest) && REG_P (XEXP (x, i))
782 && REGNO (dest) == REGNO (XEXP (x, i))))
783 this_result = loc;
784 else
785 this_result = find_single_use_1 (dest, &XEXP (x, i));
786
787 if (result == 0)
788 result = this_result;
789 else if (this_result)
790 /* Duplicate usage. */
791 return 0;
792 }
793 else if (fmt[i] == 'E')
794 {
795 int j;
796
797 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
798 {
799 if (XVECEXP (x, i, j) == dest
800 || (REG_P (dest)
801 && REG_P (XVECEXP (x, i, j))
802 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
803 this_result = loc;
804 else
805 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
806
807 if (result == 0)
808 result = this_result;
809 else if (this_result)
810 return 0;
811 }
812 }
813 }
814
815 return result;
816 }
817 \f
818 /* See if DEST, produced in INSN, is used only a single time in the
819 sequel. If so, return a pointer to the innermost rtx expression in which
820 it is used.
821
822 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
823
824 This routine will return usually zero either before flow is called (because
825 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
826 note can't be trusted).
827
828 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
829 care about REG_DEAD notes or LOG_LINKS.
830
831 Otherwise, we find the single use by finding an insn that has a
832 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
833 only referenced once in that insn, we know that it must be the first
834 and last insn referencing DEST. */
835
836 rtx *
837 find_single_use (rtx dest, rtx insn, rtx *ploc)
838 {
839 rtx next;
840 rtx *result;
841 rtx link;
842
843 #ifdef HAVE_cc0
844 if (dest == cc0_rtx)
845 {
846 next = NEXT_INSN (insn);
847 if (next == 0
848 || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
849 return 0;
850
851 result = find_single_use_1 (dest, &PATTERN (next));
852 if (result && ploc)
853 *ploc = next;
854 return result;
855 }
856 #endif
857
858 if (reload_completed || reload_in_progress || !REG_P (dest))
859 return 0;
860
861 for (next = next_nonnote_insn (insn);
862 next != 0 && !LABEL_P (next);
863 next = next_nonnote_insn (next))
864 if (INSN_P (next) && dead_or_set_p (next, dest))
865 {
866 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
867 if (XEXP (link, 0) == insn)
868 break;
869
870 if (link)
871 {
872 result = find_single_use_1 (dest, &PATTERN (next));
873 if (ploc)
874 *ploc = next;
875 return result;
876 }
877 }
878
879 return 0;
880 }
881 \f
882 /* Return 1 if OP is a valid general operand for machine mode MODE.
883 This is either a register reference, a memory reference,
884 or a constant. In the case of a memory reference, the address
885 is checked for general validity for the target machine.
886
887 Register and memory references must have mode MODE in order to be valid,
888 but some constants have no machine mode and are valid for any mode.
889
890 If MODE is VOIDmode, OP is checked for validity for whatever mode
891 it has.
892
893 The main use of this function is as a predicate in match_operand
894 expressions in the machine description.
895
896 For an explanation of this function's behavior for registers of
897 class NO_REGS, see the comment for `register_operand'. */
898
899 int
900 general_operand (rtx op, enum machine_mode mode)
901 {
902 enum rtx_code code = GET_CODE (op);
903
904 if (mode == VOIDmode)
905 mode = GET_MODE (op);
906
907 /* Don't accept CONST_INT or anything similar
908 if the caller wants something floating. */
909 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
910 && GET_MODE_CLASS (mode) != MODE_INT
911 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
912 return 0;
913
914 if (GET_CODE (op) == CONST_INT
915 && mode != VOIDmode
916 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
917 return 0;
918
919 if (CONSTANT_P (op))
920 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
921 || mode == VOIDmode)
922 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
923 && LEGITIMATE_CONSTANT_P (op));
924
925 /* Except for certain constants with VOIDmode, already checked for,
926 OP's mode must match MODE if MODE specifies a mode. */
927
928 if (GET_MODE (op) != mode)
929 return 0;
930
931 if (code == SUBREG)
932 {
933 rtx sub = SUBREG_REG (op);
934
935 #ifdef INSN_SCHEDULING
936 /* On machines that have insn scheduling, we want all memory
937 reference to be explicit, so outlaw paradoxical SUBREGs.
938 However, we must allow them after reload so that they can
939 get cleaned up by cleanup_subreg_operands. */
940 if (!reload_completed && MEM_P (sub)
941 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
942 return 0;
943 #endif
944 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
945 may result in incorrect reference. We should simplify all valid
946 subregs of MEM anyway. But allow this after reload because we
947 might be called from cleanup_subreg_operands.
948
949 ??? This is a kludge. */
950 if (!reload_completed && SUBREG_BYTE (op) != 0
951 && MEM_P (sub))
952 return 0;
953
954 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
955 create such rtl, and we must reject it. */
956 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
957 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
958 return 0;
959
960 op = sub;
961 code = GET_CODE (op);
962 }
963
964 if (code == REG)
965 /* A register whose class is NO_REGS is not a general operand. */
966 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
967 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
968
969 if (code == MEM)
970 {
971 rtx y = XEXP (op, 0);
972
973 if (! volatile_ok && MEM_VOLATILE_P (op))
974 return 0;
975
976 /* Use the mem's mode, since it will be reloaded thus. */
977 if (memory_address_p (GET_MODE (op), y))
978 return 1;
979 }
980
981 return 0;
982 }
983 \f
984 /* Return 1 if OP is a valid memory address for a memory reference
985 of mode MODE.
986
987 The main use of this function is as a predicate in match_operand
988 expressions in the machine description. */
989
990 int
991 address_operand (rtx op, enum machine_mode mode)
992 {
993 return memory_address_p (mode, op);
994 }
995
996 /* Return 1 if OP is a register reference of mode MODE.
997 If MODE is VOIDmode, accept a register in any mode.
998
999 The main use of this function is as a predicate in match_operand
1000 expressions in the machine description.
1001
1002 As a special exception, registers whose class is NO_REGS are
1003 not accepted by `register_operand'. The reason for this change
1004 is to allow the representation of special architecture artifacts
1005 (such as a condition code register) without extending the rtl
1006 definitions. Since registers of class NO_REGS cannot be used
1007 as registers in any case where register classes are examined,
1008 it is most consistent to keep this function from accepting them. */
1009
1010 int
1011 register_operand (rtx op, enum machine_mode mode)
1012 {
1013 if (GET_MODE (op) != mode && mode != VOIDmode)
1014 return 0;
1015
1016 if (GET_CODE (op) == SUBREG)
1017 {
1018 rtx sub = SUBREG_REG (op);
1019
1020 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1021 because it is guaranteed to be reloaded into one.
1022 Just make sure the MEM is valid in itself.
1023 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1024 but currently it does result from (SUBREG (REG)...) where the
1025 reg went on the stack.) */
1026 if (! reload_completed && MEM_P (sub))
1027 return general_operand (op, mode);
1028
1029 #ifdef CANNOT_CHANGE_MODE_CLASS
1030 if (REG_P (sub)
1031 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1032 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1033 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1034 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1035 return 0;
1036 #endif
1037
1038 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1039 create such rtl, and we must reject it. */
1040 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1041 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1042 return 0;
1043
1044 op = sub;
1045 }
1046
1047 /* We don't consider registers whose class is NO_REGS
1048 to be a register operand. */
1049 return (REG_P (op)
1050 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1051 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1052 }
1053
1054 /* Return 1 for a register in Pmode; ignore the tested mode. */
1055
1056 int
1057 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1058 {
1059 return register_operand (op, Pmode);
1060 }
1061
1062 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1063 or a hard register. */
1064
1065 int
1066 scratch_operand (rtx op, enum machine_mode mode)
1067 {
1068 if (GET_MODE (op) != mode && mode != VOIDmode)
1069 return 0;
1070
1071 return (GET_CODE (op) == SCRATCH
1072 || (REG_P (op)
1073 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1074 }
1075
1076 /* Return 1 if OP is a valid immediate operand for mode MODE.
1077
1078 The main use of this function is as a predicate in match_operand
1079 expressions in the machine description. */
1080
1081 int
1082 immediate_operand (rtx op, enum machine_mode mode)
1083 {
1084 /* Don't accept CONST_INT or anything similar
1085 if the caller wants something floating. */
1086 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1087 && GET_MODE_CLASS (mode) != MODE_INT
1088 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1089 return 0;
1090
1091 if (GET_CODE (op) == CONST_INT
1092 && mode != VOIDmode
1093 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1094 return 0;
1095
1096 return (CONSTANT_P (op)
1097 && (GET_MODE (op) == mode || mode == VOIDmode
1098 || GET_MODE (op) == VOIDmode)
1099 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1100 && LEGITIMATE_CONSTANT_P (op));
1101 }
1102
1103 /* Returns 1 if OP is an operand that is a CONST_INT. */
1104
1105 int
1106 const_int_operand (rtx op, enum machine_mode mode)
1107 {
1108 if (GET_CODE (op) != CONST_INT)
1109 return 0;
1110
1111 if (mode != VOIDmode
1112 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1113 return 0;
1114
1115 return 1;
1116 }
1117
1118 /* Returns 1 if OP is an operand that is a constant integer or constant
1119 floating-point number. */
1120
1121 int
1122 const_double_operand (rtx op, enum machine_mode mode)
1123 {
1124 /* Don't accept CONST_INT or anything similar
1125 if the caller wants something floating. */
1126 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1127 && GET_MODE_CLASS (mode) != MODE_INT
1128 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1129 return 0;
1130
1131 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1132 && (mode == VOIDmode || GET_MODE (op) == mode
1133 || GET_MODE (op) == VOIDmode));
1134 }
1135
1136 /* Return 1 if OP is a general operand that is not an immediate operand. */
1137
1138 int
1139 nonimmediate_operand (rtx op, enum machine_mode mode)
1140 {
1141 return (general_operand (op, mode) && ! CONSTANT_P (op));
1142 }
1143
1144 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1145
1146 int
1147 nonmemory_operand (rtx op, enum machine_mode mode)
1148 {
1149 if (CONSTANT_P (op))
1150 {
1151 /* Don't accept CONST_INT or anything similar
1152 if the caller wants something floating. */
1153 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1154 && GET_MODE_CLASS (mode) != MODE_INT
1155 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1156 return 0;
1157
1158 if (GET_CODE (op) == CONST_INT
1159 && mode != VOIDmode
1160 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1161 return 0;
1162
1163 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1164 || mode == VOIDmode)
1165 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1166 && LEGITIMATE_CONSTANT_P (op));
1167 }
1168
1169 if (GET_MODE (op) != mode && mode != VOIDmode)
1170 return 0;
1171
1172 if (GET_CODE (op) == SUBREG)
1173 {
1174 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1175 because it is guaranteed to be reloaded into one.
1176 Just make sure the MEM is valid in itself.
1177 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1178 but currently it does result from (SUBREG (REG)...) where the
1179 reg went on the stack.) */
1180 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1181 return general_operand (op, mode);
1182 op = SUBREG_REG (op);
1183 }
1184
1185 /* We don't consider registers whose class is NO_REGS
1186 to be a register operand. */
1187 return (REG_P (op)
1188 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1189 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1190 }
1191
1192 /* Return 1 if OP is a valid operand that stands for pushing a
1193 value of mode MODE onto the stack.
1194
1195 The main use of this function is as a predicate in match_operand
1196 expressions in the machine description. */
1197
1198 int
1199 push_operand (rtx op, enum machine_mode mode)
1200 {
1201 unsigned int rounded_size = GET_MODE_SIZE (mode);
1202
1203 #ifdef PUSH_ROUNDING
1204 rounded_size = PUSH_ROUNDING (rounded_size);
1205 #endif
1206
1207 if (!MEM_P (op))
1208 return 0;
1209
1210 if (mode != VOIDmode && GET_MODE (op) != mode)
1211 return 0;
1212
1213 op = XEXP (op, 0);
1214
1215 if (rounded_size == GET_MODE_SIZE (mode))
1216 {
1217 if (GET_CODE (op) != STACK_PUSH_CODE)
1218 return 0;
1219 }
1220 else
1221 {
1222 if (GET_CODE (op) != PRE_MODIFY
1223 || GET_CODE (XEXP (op, 1)) != PLUS
1224 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1225 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1226 #ifdef STACK_GROWS_DOWNWARD
1227 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1228 #else
1229 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1230 #endif
1231 )
1232 return 0;
1233 }
1234
1235 return XEXP (op, 0) == stack_pointer_rtx;
1236 }
1237
1238 /* Return 1 if OP is a valid operand that stands for popping a
1239 value of mode MODE off the stack.
1240
1241 The main use of this function is as a predicate in match_operand
1242 expressions in the machine description. */
1243
1244 int
1245 pop_operand (rtx op, enum machine_mode mode)
1246 {
1247 if (!MEM_P (op))
1248 return 0;
1249
1250 if (mode != VOIDmode && GET_MODE (op) != mode)
1251 return 0;
1252
1253 op = XEXP (op, 0);
1254
1255 if (GET_CODE (op) != STACK_POP_CODE)
1256 return 0;
1257
1258 return XEXP (op, 0) == stack_pointer_rtx;
1259 }
1260
1261 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1262
1263 int
1264 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1265 {
1266 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1267 return 0;
1268
1269 win:
1270 return 1;
1271 }
1272
1273 /* Return 1 if OP is a valid memory reference with mode MODE,
1274 including a valid address.
1275
1276 The main use of this function is as a predicate in match_operand
1277 expressions in the machine description. */
1278
1279 int
1280 memory_operand (rtx op, enum machine_mode mode)
1281 {
1282 rtx inner;
1283
1284 if (! reload_completed)
1285 /* Note that no SUBREG is a memory operand before end of reload pass,
1286 because (SUBREG (MEM...)) forces reloading into a register. */
1287 return MEM_P (op) && general_operand (op, mode);
1288
1289 if (mode != VOIDmode && GET_MODE (op) != mode)
1290 return 0;
1291
1292 inner = op;
1293 if (GET_CODE (inner) == SUBREG)
1294 inner = SUBREG_REG (inner);
1295
1296 return (MEM_P (inner) && general_operand (op, mode));
1297 }
1298
1299 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1300 that is, a memory reference whose address is a general_operand. */
1301
1302 int
1303 indirect_operand (rtx op, enum machine_mode mode)
1304 {
1305 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1306 if (! reload_completed
1307 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1308 {
1309 int offset = SUBREG_BYTE (op);
1310 rtx inner = SUBREG_REG (op);
1311
1312 if (mode != VOIDmode && GET_MODE (op) != mode)
1313 return 0;
1314
1315 /* The only way that we can have a general_operand as the resulting
1316 address is if OFFSET is zero and the address already is an operand
1317 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1318 operand. */
1319
1320 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1321 || (GET_CODE (XEXP (inner, 0)) == PLUS
1322 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1323 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1324 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1325 }
1326
1327 return (MEM_P (op)
1328 && memory_operand (op, mode)
1329 && general_operand (XEXP (op, 0), Pmode));
1330 }
1331
1332 /* Return 1 if this is a comparison operator. This allows the use of
1333 MATCH_OPERATOR to recognize all the branch insns. */
1334
1335 int
1336 comparison_operator (rtx op, enum machine_mode mode)
1337 {
1338 return ((mode == VOIDmode || GET_MODE (op) == mode)
1339 && COMPARISON_P (op));
1340 }
1341 \f
1342 /* If BODY is an insn body that uses ASM_OPERANDS,
1343 return the number of operands (both input and output) in the insn.
1344 Otherwise return -1. */
1345
1346 int
1347 asm_noperands (rtx body)
1348 {
1349 switch (GET_CODE (body))
1350 {
1351 case ASM_OPERANDS:
1352 /* No output operands: return number of input operands. */
1353 return ASM_OPERANDS_INPUT_LENGTH (body);
1354 case SET:
1355 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1356 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1357 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1358 else
1359 return -1;
1360 case PARALLEL:
1361 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1362 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1363 {
1364 /* Multiple output operands, or 1 output plus some clobbers:
1365 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1366 int i;
1367 int n_sets;
1368
1369 /* Count backwards through CLOBBERs to determine number of SETs. */
1370 for (i = XVECLEN (body, 0); i > 0; i--)
1371 {
1372 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1373 break;
1374 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1375 return -1;
1376 }
1377
1378 /* N_SETS is now number of output operands. */
1379 n_sets = i;
1380
1381 /* Verify that all the SETs we have
1382 came from a single original asm_operands insn
1383 (so that invalid combinations are blocked). */
1384 for (i = 0; i < n_sets; i++)
1385 {
1386 rtx elt = XVECEXP (body, 0, i);
1387 if (GET_CODE (elt) != SET)
1388 return -1;
1389 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1390 return -1;
1391 /* If these ASM_OPERANDS rtx's came from different original insns
1392 then they aren't allowed together. */
1393 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1394 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1395 return -1;
1396 }
1397 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1398 + n_sets);
1399 }
1400 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1401 {
1402 /* 0 outputs, but some clobbers:
1403 body is [(asm_operands ...) (clobber (reg ...))...]. */
1404 int i;
1405
1406 /* Make sure all the other parallel things really are clobbers. */
1407 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1408 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1409 return -1;
1410
1411 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1412 }
1413 else
1414 return -1;
1415 default:
1416 return -1;
1417 }
1418 }
1419
1420 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1421 copy its operands (both input and output) into the vector OPERANDS,
1422 the locations of the operands within the insn into the vector OPERAND_LOCS,
1423 and the constraints for the operands into CONSTRAINTS.
1424 Write the modes of the operands into MODES.
1425 Return the assembler-template.
1426
1427 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1428 we don't store that info. */
1429
1430 const char *
1431 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1432 const char **constraints, enum machine_mode *modes)
1433 {
1434 int i;
1435 int noperands;
1436 const char *template = 0;
1437
1438 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1439 {
1440 rtx asmop = SET_SRC (body);
1441 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1442
1443 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1444
1445 for (i = 1; i < noperands; i++)
1446 {
1447 if (operand_locs)
1448 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1449 if (operands)
1450 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1451 if (constraints)
1452 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1453 if (modes)
1454 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1455 }
1456
1457 /* The output is in the SET.
1458 Its constraint is in the ASM_OPERANDS itself. */
1459 if (operands)
1460 operands[0] = SET_DEST (body);
1461 if (operand_locs)
1462 operand_locs[0] = &SET_DEST (body);
1463 if (constraints)
1464 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1465 if (modes)
1466 modes[0] = GET_MODE (SET_DEST (body));
1467 template = ASM_OPERANDS_TEMPLATE (asmop);
1468 }
1469 else if (GET_CODE (body) == ASM_OPERANDS)
1470 {
1471 rtx asmop = body;
1472 /* No output operands: BODY is (asm_operands ....). */
1473
1474 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1475
1476 /* The input operands are found in the 1st element vector. */
1477 /* Constraints for inputs are in the 2nd element vector. */
1478 for (i = 0; i < noperands; i++)
1479 {
1480 if (operand_locs)
1481 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1482 if (operands)
1483 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1484 if (constraints)
1485 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1486 if (modes)
1487 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1488 }
1489 template = ASM_OPERANDS_TEMPLATE (asmop);
1490 }
1491 else if (GET_CODE (body) == PARALLEL
1492 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1493 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1494 {
1495 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1496 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1497 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1498 int nout = 0; /* Does not include CLOBBERs. */
1499
1500 /* At least one output, plus some CLOBBERs. */
1501
1502 /* The outputs are in the SETs.
1503 Their constraints are in the ASM_OPERANDS itself. */
1504 for (i = 0; i < nparallel; i++)
1505 {
1506 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1507 break; /* Past last SET */
1508
1509 if (operands)
1510 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1511 if (operand_locs)
1512 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1513 if (constraints)
1514 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1515 if (modes)
1516 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1517 nout++;
1518 }
1519
1520 for (i = 0; i < nin; i++)
1521 {
1522 if (operand_locs)
1523 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1524 if (operands)
1525 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1526 if (constraints)
1527 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1528 if (modes)
1529 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1530 }
1531
1532 template = ASM_OPERANDS_TEMPLATE (asmop);
1533 }
1534 else if (GET_CODE (body) == PARALLEL
1535 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1536 {
1537 /* No outputs, but some CLOBBERs. */
1538
1539 rtx asmop = XVECEXP (body, 0, 0);
1540 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1541
1542 for (i = 0; i < nin; i++)
1543 {
1544 if (operand_locs)
1545 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1546 if (operands)
1547 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1548 if (constraints)
1549 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1550 if (modes)
1551 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1552 }
1553
1554 template = ASM_OPERANDS_TEMPLATE (asmop);
1555 }
1556
1557 return template;
1558 }
1559
1560 /* Check if an asm_operand matches its constraints.
1561 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1562
1563 int
1564 asm_operand_ok (rtx op, const char *constraint)
1565 {
1566 int result = 0;
1567
1568 /* Use constrain_operands after reload. */
1569 gcc_assert (!reload_completed);
1570
1571 while (*constraint)
1572 {
1573 char c = *constraint;
1574 int len;
1575 switch (c)
1576 {
1577 case ',':
1578 constraint++;
1579 continue;
1580 case '=':
1581 case '+':
1582 case '*':
1583 case '%':
1584 case '!':
1585 case '#':
1586 case '&':
1587 case '?':
1588 break;
1589
1590 case '0': case '1': case '2': case '3': case '4':
1591 case '5': case '6': case '7': case '8': case '9':
1592 /* For best results, our caller should have given us the
1593 proper matching constraint, but we can't actually fail
1594 the check if they didn't. Indicate that results are
1595 inconclusive. */
1596 do
1597 constraint++;
1598 while (ISDIGIT (*constraint));
1599 if (! result)
1600 result = -1;
1601 continue;
1602
1603 case 'p':
1604 if (address_operand (op, VOIDmode))
1605 result = 1;
1606 break;
1607
1608 case 'm':
1609 case 'V': /* non-offsettable */
1610 if (memory_operand (op, VOIDmode))
1611 result = 1;
1612 break;
1613
1614 case 'o': /* offsettable */
1615 if (offsettable_nonstrict_memref_p (op))
1616 result = 1;
1617 break;
1618
1619 case '<':
1620 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1621 excepting those that expand_call created. Further, on some
1622 machines which do not have generalized auto inc/dec, an inc/dec
1623 is not a memory_operand.
1624
1625 Match any memory and hope things are resolved after reload. */
1626
1627 if (MEM_P (op)
1628 && (1
1629 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1630 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1631 result = 1;
1632 break;
1633
1634 case '>':
1635 if (MEM_P (op)
1636 && (1
1637 || GET_CODE (XEXP (op, 0)) == PRE_INC
1638 || GET_CODE (XEXP (op, 0)) == POST_INC))
1639 result = 1;
1640 break;
1641
1642 case 'E':
1643 case 'F':
1644 if (GET_CODE (op) == CONST_DOUBLE
1645 || (GET_CODE (op) == CONST_VECTOR
1646 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1647 result = 1;
1648 break;
1649
1650 case 'G':
1651 if (GET_CODE (op) == CONST_DOUBLE
1652 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1653 result = 1;
1654 break;
1655 case 'H':
1656 if (GET_CODE (op) == CONST_DOUBLE
1657 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1658 result = 1;
1659 break;
1660
1661 case 's':
1662 if (GET_CODE (op) == CONST_INT
1663 || (GET_CODE (op) == CONST_DOUBLE
1664 && GET_MODE (op) == VOIDmode))
1665 break;
1666 /* Fall through. */
1667
1668 case 'i':
1669 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1670 result = 1;
1671 break;
1672
1673 case 'n':
1674 if (GET_CODE (op) == CONST_INT
1675 || (GET_CODE (op) == CONST_DOUBLE
1676 && GET_MODE (op) == VOIDmode))
1677 result = 1;
1678 break;
1679
1680 case 'I':
1681 if (GET_CODE (op) == CONST_INT
1682 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1683 result = 1;
1684 break;
1685 case 'J':
1686 if (GET_CODE (op) == CONST_INT
1687 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1688 result = 1;
1689 break;
1690 case 'K':
1691 if (GET_CODE (op) == CONST_INT
1692 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1693 result = 1;
1694 break;
1695 case 'L':
1696 if (GET_CODE (op) == CONST_INT
1697 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1698 result = 1;
1699 break;
1700 case 'M':
1701 if (GET_CODE (op) == CONST_INT
1702 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1703 result = 1;
1704 break;
1705 case 'N':
1706 if (GET_CODE (op) == CONST_INT
1707 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1708 result = 1;
1709 break;
1710 case 'O':
1711 if (GET_CODE (op) == CONST_INT
1712 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1713 result = 1;
1714 break;
1715 case 'P':
1716 if (GET_CODE (op) == CONST_INT
1717 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1718 result = 1;
1719 break;
1720
1721 case 'X':
1722 result = 1;
1723 break;
1724
1725 case 'g':
1726 if (general_operand (op, VOIDmode))
1727 result = 1;
1728 break;
1729
1730 default:
1731 /* For all other letters, we first check for a register class,
1732 otherwise it is an EXTRA_CONSTRAINT. */
1733 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1734 {
1735 case 'r':
1736 if (GET_MODE (op) == BLKmode)
1737 break;
1738 if (register_operand (op, VOIDmode))
1739 result = 1;
1740 }
1741 #ifdef EXTRA_CONSTRAINT_STR
1742 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1743 result = 1;
1744 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1745 /* Every memory operand can be reloaded to fit. */
1746 && memory_operand (op, VOIDmode))
1747 result = 1;
1748 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1749 /* Every address operand can be reloaded to fit. */
1750 && address_operand (op, VOIDmode))
1751 result = 1;
1752 #endif
1753 break;
1754 }
1755 len = CONSTRAINT_LEN (c, constraint);
1756 do
1757 constraint++;
1758 while (--len && *constraint);
1759 if (len)
1760 return 0;
1761 }
1762
1763 return result;
1764 }
1765 \f
1766 /* Given an rtx *P, if it is a sum containing an integer constant term,
1767 return the location (type rtx *) of the pointer to that constant term.
1768 Otherwise, return a null pointer. */
1769
1770 rtx *
1771 find_constant_term_loc (rtx *p)
1772 {
1773 rtx *tem;
1774 enum rtx_code code = GET_CODE (*p);
1775
1776 /* If *P IS such a constant term, P is its location. */
1777
1778 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1779 || code == CONST)
1780 return p;
1781
1782 /* Otherwise, if not a sum, it has no constant term. */
1783
1784 if (GET_CODE (*p) != PLUS)
1785 return 0;
1786
1787 /* If one of the summands is constant, return its location. */
1788
1789 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1790 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1791 return p;
1792
1793 /* Otherwise, check each summand for containing a constant term. */
1794
1795 if (XEXP (*p, 0) != 0)
1796 {
1797 tem = find_constant_term_loc (&XEXP (*p, 0));
1798 if (tem != 0)
1799 return tem;
1800 }
1801
1802 if (XEXP (*p, 1) != 0)
1803 {
1804 tem = find_constant_term_loc (&XEXP (*p, 1));
1805 if (tem != 0)
1806 return tem;
1807 }
1808
1809 return 0;
1810 }
1811 \f
1812 /* Return 1 if OP is a memory reference
1813 whose address contains no side effects
1814 and remains valid after the addition
1815 of a positive integer less than the
1816 size of the object being referenced.
1817
1818 We assume that the original address is valid and do not check it.
1819
1820 This uses strict_memory_address_p as a subroutine, so
1821 don't use it before reload. */
1822
1823 int
1824 offsettable_memref_p (rtx op)
1825 {
1826 return ((MEM_P (op))
1827 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1828 }
1829
1830 /* Similar, but don't require a strictly valid mem ref:
1831 consider pseudo-regs valid as index or base regs. */
1832
1833 int
1834 offsettable_nonstrict_memref_p (rtx op)
1835 {
1836 return ((MEM_P (op))
1837 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1838 }
1839
1840 /* Return 1 if Y is a memory address which contains no side effects
1841 and would remain valid after the addition of a positive integer
1842 less than the size of that mode.
1843
1844 We assume that the original address is valid and do not check it.
1845 We do check that it is valid for narrower modes.
1846
1847 If STRICTP is nonzero, we require a strictly valid address,
1848 for the sake of use in reload.c. */
1849
1850 int
1851 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1852 {
1853 enum rtx_code ycode = GET_CODE (y);
1854 rtx z;
1855 rtx y1 = y;
1856 rtx *y2;
1857 int (*addressp) (enum machine_mode, rtx) =
1858 (strictp ? strict_memory_address_p : memory_address_p);
1859 unsigned int mode_sz = GET_MODE_SIZE (mode);
1860
1861 if (CONSTANT_ADDRESS_P (y))
1862 return 1;
1863
1864 /* Adjusting an offsettable address involves changing to a narrower mode.
1865 Make sure that's OK. */
1866
1867 if (mode_dependent_address_p (y))
1868 return 0;
1869
1870 /* ??? How much offset does an offsettable BLKmode reference need?
1871 Clearly that depends on the situation in which it's being used.
1872 However, the current situation in which we test 0xffffffff is
1873 less than ideal. Caveat user. */
1874 if (mode_sz == 0)
1875 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1876
1877 /* If the expression contains a constant term,
1878 see if it remains valid when max possible offset is added. */
1879
1880 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1881 {
1882 int good;
1883
1884 y1 = *y2;
1885 *y2 = plus_constant (*y2, mode_sz - 1);
1886 /* Use QImode because an odd displacement may be automatically invalid
1887 for any wider mode. But it should be valid for a single byte. */
1888 good = (*addressp) (QImode, y);
1889
1890 /* In any case, restore old contents of memory. */
1891 *y2 = y1;
1892 return good;
1893 }
1894
1895 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1896 return 0;
1897
1898 /* The offset added here is chosen as the maximum offset that
1899 any instruction could need to add when operating on something
1900 of the specified mode. We assume that if Y and Y+c are
1901 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1902 go inside a LO_SUM here, so we do so as well. */
1903 if (GET_CODE (y) == LO_SUM
1904 && mode != BLKmode
1905 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1906 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1907 plus_constant (XEXP (y, 1), mode_sz - 1));
1908 else
1909 z = plus_constant (y, mode_sz - 1);
1910
1911 /* Use QImode because an odd displacement may be automatically invalid
1912 for any wider mode. But it should be valid for a single byte. */
1913 return (*addressp) (QImode, z);
1914 }
1915
1916 /* Return 1 if ADDR is an address-expression whose effect depends
1917 on the mode of the memory reference it is used in.
1918
1919 Autoincrement addressing is a typical example of mode-dependence
1920 because the amount of the increment depends on the mode. */
1921
1922 int
1923 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1924 {
1925 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1926 return 0;
1927 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1928 win: ATTRIBUTE_UNUSED_LABEL
1929 return 1;
1930 }
1931 \f
1932 /* Like extract_insn, but save insn extracted and don't extract again, when
1933 called again for the same insn expecting that recog_data still contain the
1934 valid information. This is used primary by gen_attr infrastructure that
1935 often does extract insn again and again. */
1936 void
1937 extract_insn_cached (rtx insn)
1938 {
1939 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1940 return;
1941 extract_insn (insn);
1942 recog_data.insn = insn;
1943 }
1944
1945 /* Do cached extract_insn, constrain_operands and complain about failures.
1946 Used by insn_attrtab. */
1947 void
1948 extract_constrain_insn_cached (rtx insn)
1949 {
1950 extract_insn_cached (insn);
1951 if (which_alternative == -1
1952 && !constrain_operands (reload_completed))
1953 fatal_insn_not_found (insn);
1954 }
1955
1956 /* Do cached constrain_operands and complain about failures. */
1957 int
1958 constrain_operands_cached (int strict)
1959 {
1960 if (which_alternative == -1)
1961 return constrain_operands (strict);
1962 else
1963 return 1;
1964 }
1965 \f
1966 /* Analyze INSN and fill in recog_data. */
1967
1968 void
1969 extract_insn (rtx insn)
1970 {
1971 int i;
1972 int icode;
1973 int noperands;
1974 rtx body = PATTERN (insn);
1975
1976 recog_data.insn = NULL;
1977 recog_data.n_operands = 0;
1978 recog_data.n_alternatives = 0;
1979 recog_data.n_dups = 0;
1980 which_alternative = -1;
1981
1982 switch (GET_CODE (body))
1983 {
1984 case USE:
1985 case CLOBBER:
1986 case ASM_INPUT:
1987 case ADDR_VEC:
1988 case ADDR_DIFF_VEC:
1989 return;
1990
1991 case SET:
1992 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1993 goto asm_insn;
1994 else
1995 goto normal_insn;
1996 case PARALLEL:
1997 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
1998 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1999 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2000 goto asm_insn;
2001 else
2002 goto normal_insn;
2003 case ASM_OPERANDS:
2004 asm_insn:
2005 recog_data.n_operands = noperands = asm_noperands (body);
2006 if (noperands >= 0)
2007 {
2008 /* This insn is an `asm' with operands. */
2009
2010 /* expand_asm_operands makes sure there aren't too many operands. */
2011 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2012
2013 /* Now get the operand values and constraints out of the insn. */
2014 decode_asm_operands (body, recog_data.operand,
2015 recog_data.operand_loc,
2016 recog_data.constraints,
2017 recog_data.operand_mode);
2018 if (noperands > 0)
2019 {
2020 const char *p = recog_data.constraints[0];
2021 recog_data.n_alternatives = 1;
2022 while (*p)
2023 recog_data.n_alternatives += (*p++ == ',');
2024 }
2025 break;
2026 }
2027 fatal_insn_not_found (insn);
2028
2029 default:
2030 normal_insn:
2031 /* Ordinary insn: recognize it, get the operands via insn_extract
2032 and get the constraints. */
2033
2034 icode = recog_memoized (insn);
2035 if (icode < 0)
2036 fatal_insn_not_found (insn);
2037
2038 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2039 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2040 recog_data.n_dups = insn_data[icode].n_dups;
2041
2042 insn_extract (insn);
2043
2044 for (i = 0; i < noperands; i++)
2045 {
2046 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2047 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2048 /* VOIDmode match_operands gets mode from their real operand. */
2049 if (recog_data.operand_mode[i] == VOIDmode)
2050 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2051 }
2052 }
2053 for (i = 0; i < noperands; i++)
2054 recog_data.operand_type[i]
2055 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2056 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2057 : OP_IN);
2058
2059 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2060 }
2061
2062 /* After calling extract_insn, you can use this function to extract some
2063 information from the constraint strings into a more usable form.
2064 The collected data is stored in recog_op_alt. */
2065 void
2066 preprocess_constraints (void)
2067 {
2068 int i;
2069
2070 for (i = 0; i < recog_data.n_operands; i++)
2071 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2072 * sizeof (struct operand_alternative)));
2073
2074 for (i = 0; i < recog_data.n_operands; i++)
2075 {
2076 int j;
2077 struct operand_alternative *op_alt;
2078 const char *p = recog_data.constraints[i];
2079
2080 op_alt = recog_op_alt[i];
2081
2082 for (j = 0; j < recog_data.n_alternatives; j++)
2083 {
2084 op_alt[j].cl = NO_REGS;
2085 op_alt[j].constraint = p;
2086 op_alt[j].matches = -1;
2087 op_alt[j].matched = -1;
2088
2089 if (*p == '\0' || *p == ',')
2090 {
2091 op_alt[j].anything_ok = 1;
2092 continue;
2093 }
2094
2095 for (;;)
2096 {
2097 char c = *p;
2098 if (c == '#')
2099 do
2100 c = *++p;
2101 while (c != ',' && c != '\0');
2102 if (c == ',' || c == '\0')
2103 {
2104 p++;
2105 break;
2106 }
2107
2108 switch (c)
2109 {
2110 case '=': case '+': case '*': case '%':
2111 case 'E': case 'F': case 'G': case 'H':
2112 case 's': case 'i': case 'n':
2113 case 'I': case 'J': case 'K': case 'L':
2114 case 'M': case 'N': case 'O': case 'P':
2115 /* These don't say anything we care about. */
2116 break;
2117
2118 case '?':
2119 op_alt[j].reject += 6;
2120 break;
2121 case '!':
2122 op_alt[j].reject += 600;
2123 break;
2124 case '&':
2125 op_alt[j].earlyclobber = 1;
2126 break;
2127
2128 case '0': case '1': case '2': case '3': case '4':
2129 case '5': case '6': case '7': case '8': case '9':
2130 {
2131 char *end;
2132 op_alt[j].matches = strtoul (p, &end, 10);
2133 recog_op_alt[op_alt[j].matches][j].matched = i;
2134 p = end;
2135 }
2136 continue;
2137
2138 case 'm':
2139 op_alt[j].memory_ok = 1;
2140 break;
2141 case '<':
2142 op_alt[j].decmem_ok = 1;
2143 break;
2144 case '>':
2145 op_alt[j].incmem_ok = 1;
2146 break;
2147 case 'V':
2148 op_alt[j].nonoffmem_ok = 1;
2149 break;
2150 case 'o':
2151 op_alt[j].offmem_ok = 1;
2152 break;
2153 case 'X':
2154 op_alt[j].anything_ok = 1;
2155 break;
2156
2157 case 'p':
2158 op_alt[j].is_address = 1;
2159 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2160 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2161 break;
2162
2163 case 'g':
2164 case 'r':
2165 op_alt[j].cl =
2166 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2167 break;
2168
2169 default:
2170 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2171 {
2172 op_alt[j].memory_ok = 1;
2173 break;
2174 }
2175 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2176 {
2177 op_alt[j].is_address = 1;
2178 op_alt[j].cl
2179 = (reg_class_subunion
2180 [(int) op_alt[j].cl]
2181 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2182 break;
2183 }
2184
2185 op_alt[j].cl
2186 = (reg_class_subunion
2187 [(int) op_alt[j].cl]
2188 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2189 break;
2190 }
2191 p += CONSTRAINT_LEN (c, p);
2192 }
2193 }
2194 }
2195 }
2196
2197 /* Check the operands of an insn against the insn's operand constraints
2198 and return 1 if they are valid.
2199 The information about the insn's operands, constraints, operand modes
2200 etc. is obtained from the global variables set up by extract_insn.
2201
2202 WHICH_ALTERNATIVE is set to a number which indicates which
2203 alternative of constraints was matched: 0 for the first alternative,
2204 1 for the next, etc.
2205
2206 In addition, when two operands are required to match
2207 and it happens that the output operand is (reg) while the
2208 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2209 make the output operand look like the input.
2210 This is because the output operand is the one the template will print.
2211
2212 This is used in final, just before printing the assembler code and by
2213 the routines that determine an insn's attribute.
2214
2215 If STRICT is a positive nonzero value, it means that we have been
2216 called after reload has been completed. In that case, we must
2217 do all checks strictly. If it is zero, it means that we have been called
2218 before reload has completed. In that case, we first try to see if we can
2219 find an alternative that matches strictly. If not, we try again, this
2220 time assuming that reload will fix up the insn. This provides a "best
2221 guess" for the alternative and is used to compute attributes of insns prior
2222 to reload. A negative value of STRICT is used for this internal call. */
2223
2224 struct funny_match
2225 {
2226 int this, other;
2227 };
2228
2229 int
2230 constrain_operands (int strict)
2231 {
2232 const char *constraints[MAX_RECOG_OPERANDS];
2233 int matching_operands[MAX_RECOG_OPERANDS];
2234 int earlyclobber[MAX_RECOG_OPERANDS];
2235 int c;
2236
2237 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2238 int funny_match_index;
2239
2240 which_alternative = 0;
2241 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2242 return 1;
2243
2244 for (c = 0; c < recog_data.n_operands; c++)
2245 {
2246 constraints[c] = recog_data.constraints[c];
2247 matching_operands[c] = -1;
2248 }
2249
2250 do
2251 {
2252 int seen_earlyclobber_at = -1;
2253 int opno;
2254 int lose = 0;
2255 funny_match_index = 0;
2256
2257 for (opno = 0; opno < recog_data.n_operands; opno++)
2258 {
2259 rtx op = recog_data.operand[opno];
2260 enum machine_mode mode = GET_MODE (op);
2261 const char *p = constraints[opno];
2262 int offset = 0;
2263 int win = 0;
2264 int val;
2265 int len;
2266
2267 earlyclobber[opno] = 0;
2268
2269 /* A unary operator may be accepted by the predicate, but it
2270 is irrelevant for matching constraints. */
2271 if (UNARY_P (op))
2272 op = XEXP (op, 0);
2273
2274 if (GET_CODE (op) == SUBREG)
2275 {
2276 if (REG_P (SUBREG_REG (op))
2277 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2278 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2279 GET_MODE (SUBREG_REG (op)),
2280 SUBREG_BYTE (op),
2281 GET_MODE (op));
2282 op = SUBREG_REG (op);
2283 }
2284
2285 /* An empty constraint or empty alternative
2286 allows anything which matched the pattern. */
2287 if (*p == 0 || *p == ',')
2288 win = 1;
2289
2290 do
2291 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2292 {
2293 case '\0':
2294 len = 0;
2295 break;
2296 case ',':
2297 c = '\0';
2298 break;
2299
2300 case '?': case '!': case '*': case '%':
2301 case '=': case '+':
2302 break;
2303
2304 case '#':
2305 /* Ignore rest of this alternative as far as
2306 constraint checking is concerned. */
2307 do
2308 p++;
2309 while (*p && *p != ',');
2310 len = 0;
2311 break;
2312
2313 case '&':
2314 earlyclobber[opno] = 1;
2315 if (seen_earlyclobber_at < 0)
2316 seen_earlyclobber_at = opno;
2317 break;
2318
2319 case '0': case '1': case '2': case '3': case '4':
2320 case '5': case '6': case '7': case '8': case '9':
2321 {
2322 /* This operand must be the same as a previous one.
2323 This kind of constraint is used for instructions such
2324 as add when they take only two operands.
2325
2326 Note that the lower-numbered operand is passed first.
2327
2328 If we are not testing strictly, assume that this
2329 constraint will be satisfied. */
2330
2331 char *end;
2332 int match;
2333
2334 match = strtoul (p, &end, 10);
2335 p = end;
2336
2337 if (strict < 0)
2338 val = 1;
2339 else
2340 {
2341 rtx op1 = recog_data.operand[match];
2342 rtx op2 = recog_data.operand[opno];
2343
2344 /* A unary operator may be accepted by the predicate,
2345 but it is irrelevant for matching constraints. */
2346 if (UNARY_P (op1))
2347 op1 = XEXP (op1, 0);
2348 if (UNARY_P (op2))
2349 op2 = XEXP (op2, 0);
2350
2351 val = operands_match_p (op1, op2);
2352 }
2353
2354 matching_operands[opno] = match;
2355 matching_operands[match] = opno;
2356
2357 if (val != 0)
2358 win = 1;
2359
2360 /* If output is *x and input is *--x, arrange later
2361 to change the output to *--x as well, since the
2362 output op is the one that will be printed. */
2363 if (val == 2 && strict > 0)
2364 {
2365 funny_match[funny_match_index].this = opno;
2366 funny_match[funny_match_index++].other = match;
2367 }
2368 }
2369 len = 0;
2370 break;
2371
2372 case 'p':
2373 /* p is used for address_operands. When we are called by
2374 gen_reload, no one will have checked that the address is
2375 strictly valid, i.e., that all pseudos requiring hard regs
2376 have gotten them. */
2377 if (strict <= 0
2378 || (strict_memory_address_p (recog_data.operand_mode[opno],
2379 op)))
2380 win = 1;
2381 break;
2382
2383 /* No need to check general_operand again;
2384 it was done in insn-recog.c. Well, except that reload
2385 doesn't check the validity of its replacements, but
2386 that should only matter when there's a bug. */
2387 case 'g':
2388 /* Anything goes unless it is a REG and really has a hard reg
2389 but the hard reg is not in the class GENERAL_REGS. */
2390 if (REG_P (op))
2391 {
2392 if (strict < 0
2393 || GENERAL_REGS == ALL_REGS
2394 || (reload_in_progress
2395 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2396 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2397 win = 1;
2398 }
2399 else if (strict < 0 || general_operand (op, mode))
2400 win = 1;
2401 break;
2402
2403 case 'X':
2404 /* This is used for a MATCH_SCRATCH in the cases when
2405 we don't actually need anything. So anything goes
2406 any time. */
2407 win = 1;
2408 break;
2409
2410 case 'm':
2411 /* Memory operands must be valid, to the extent
2412 required by STRICT. */
2413 if (MEM_P (op))
2414 {
2415 if (strict > 0
2416 && !strict_memory_address_p (GET_MODE (op),
2417 XEXP (op, 0)))
2418 break;
2419 if (strict == 0
2420 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2421 break;
2422 win = 1;
2423 }
2424 /* Before reload, accept what reload can turn into mem. */
2425 else if (strict < 0 && CONSTANT_P (op))
2426 win = 1;
2427 /* During reload, accept a pseudo */
2428 else if (reload_in_progress && REG_P (op)
2429 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2430 win = 1;
2431 break;
2432
2433 case '<':
2434 if (MEM_P (op)
2435 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2436 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2437 win = 1;
2438 break;
2439
2440 case '>':
2441 if (MEM_P (op)
2442 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2443 || GET_CODE (XEXP (op, 0)) == POST_INC))
2444 win = 1;
2445 break;
2446
2447 case 'E':
2448 case 'F':
2449 if (GET_CODE (op) == CONST_DOUBLE
2450 || (GET_CODE (op) == CONST_VECTOR
2451 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2452 win = 1;
2453 break;
2454
2455 case 'G':
2456 case 'H':
2457 if (GET_CODE (op) == CONST_DOUBLE
2458 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2459 win = 1;
2460 break;
2461
2462 case 's':
2463 if (GET_CODE (op) == CONST_INT
2464 || (GET_CODE (op) == CONST_DOUBLE
2465 && GET_MODE (op) == VOIDmode))
2466 break;
2467 case 'i':
2468 if (CONSTANT_P (op))
2469 win = 1;
2470 break;
2471
2472 case 'n':
2473 if (GET_CODE (op) == CONST_INT
2474 || (GET_CODE (op) == CONST_DOUBLE
2475 && GET_MODE (op) == VOIDmode))
2476 win = 1;
2477 break;
2478
2479 case 'I':
2480 case 'J':
2481 case 'K':
2482 case 'L':
2483 case 'M':
2484 case 'N':
2485 case 'O':
2486 case 'P':
2487 if (GET_CODE (op) == CONST_INT
2488 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2489 win = 1;
2490 break;
2491
2492 case 'V':
2493 if (MEM_P (op)
2494 && ((strict > 0 && ! offsettable_memref_p (op))
2495 || (strict < 0
2496 && !(CONSTANT_P (op) || MEM_P (op)))
2497 || (reload_in_progress
2498 && !(REG_P (op)
2499 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2500 win = 1;
2501 break;
2502
2503 case 'o':
2504 if ((strict > 0 && offsettable_memref_p (op))
2505 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2506 /* Before reload, accept what reload can handle. */
2507 || (strict < 0
2508 && (CONSTANT_P (op) || MEM_P (op)))
2509 /* During reload, accept a pseudo */
2510 || (reload_in_progress && REG_P (op)
2511 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2512 win = 1;
2513 break;
2514
2515 default:
2516 {
2517 enum reg_class cl;
2518
2519 cl = (c == 'r'
2520 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2521 if (cl != NO_REGS)
2522 {
2523 if (strict < 0
2524 || (strict == 0
2525 && REG_P (op)
2526 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2527 || (strict == 0 && GET_CODE (op) == SCRATCH)
2528 || (REG_P (op)
2529 && reg_fits_class_p (op, cl, offset, mode)))
2530 win = 1;
2531 }
2532 #ifdef EXTRA_CONSTRAINT_STR
2533 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2534 win = 1;
2535
2536 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2537 /* Every memory operand can be reloaded to fit. */
2538 && ((strict < 0 && MEM_P (op))
2539 /* Before reload, accept what reload can turn
2540 into mem. */
2541 || (strict < 0 && CONSTANT_P (op))
2542 /* During reload, accept a pseudo */
2543 || (reload_in_progress && REG_P (op)
2544 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2545 win = 1;
2546 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2547 /* Every address operand can be reloaded to fit. */
2548 && strict < 0)
2549 win = 1;
2550 #endif
2551 break;
2552 }
2553 }
2554 while (p += len, c);
2555
2556 constraints[opno] = p;
2557 /* If this operand did not win somehow,
2558 this alternative loses. */
2559 if (! win)
2560 lose = 1;
2561 }
2562 /* This alternative won; the operands are ok.
2563 Change whichever operands this alternative says to change. */
2564 if (! lose)
2565 {
2566 int opno, eopno;
2567
2568 /* See if any earlyclobber operand conflicts with some other
2569 operand. */
2570
2571 if (strict > 0 && seen_earlyclobber_at >= 0)
2572 for (eopno = seen_earlyclobber_at;
2573 eopno < recog_data.n_operands;
2574 eopno++)
2575 /* Ignore earlyclobber operands now in memory,
2576 because we would often report failure when we have
2577 two memory operands, one of which was formerly a REG. */
2578 if (earlyclobber[eopno]
2579 && REG_P (recog_data.operand[eopno]))
2580 for (opno = 0; opno < recog_data.n_operands; opno++)
2581 if ((MEM_P (recog_data.operand[opno])
2582 || recog_data.operand_type[opno] != OP_OUT)
2583 && opno != eopno
2584 /* Ignore things like match_operator operands. */
2585 && *recog_data.constraints[opno] != 0
2586 && ! (matching_operands[opno] == eopno
2587 && operands_match_p (recog_data.operand[opno],
2588 recog_data.operand[eopno]))
2589 && ! safe_from_earlyclobber (recog_data.operand[opno],
2590 recog_data.operand[eopno]))
2591 lose = 1;
2592
2593 if (! lose)
2594 {
2595 while (--funny_match_index >= 0)
2596 {
2597 recog_data.operand[funny_match[funny_match_index].other]
2598 = recog_data.operand[funny_match[funny_match_index].this];
2599 }
2600
2601 return 1;
2602 }
2603 }
2604
2605 which_alternative++;
2606 }
2607 while (which_alternative < recog_data.n_alternatives);
2608
2609 which_alternative = -1;
2610 /* If we are about to reject this, but we are not to test strictly,
2611 try a very loose test. Only return failure if it fails also. */
2612 if (strict == 0)
2613 return constrain_operands (-1);
2614 else
2615 return 0;
2616 }
2617
2618 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2619 is a hard reg in class CLASS when its regno is offset by OFFSET
2620 and changed to mode MODE.
2621 If REG occupies multiple hard regs, all of them must be in CLASS. */
2622
2623 int
2624 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2625 enum machine_mode mode)
2626 {
2627 int regno = REGNO (operand);
2628
2629 if (cl == NO_REGS)
2630 return 0;
2631
2632 if (regno < FIRST_PSEUDO_REGISTER
2633 && TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2634 regno + offset))
2635 {
2636 int sr;
2637 regno += offset;
2638 for (sr = hard_regno_nregs[regno][mode] - 1;
2639 sr > 0; sr--)
2640 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2641 regno + sr))
2642 break;
2643 return sr == 0;
2644 }
2645
2646 return 0;
2647 }
2648 \f
2649 /* Split single instruction. Helper function for split_all_insns and
2650 split_all_insns_noflow. Return last insn in the sequence if successful,
2651 or NULL if unsuccessful. */
2652
2653 static rtx
2654 split_insn (rtx insn)
2655 {
2656 /* Split insns here to get max fine-grain parallelism. */
2657 rtx first = PREV_INSN (insn);
2658 rtx last = try_split (PATTERN (insn), insn, 1);
2659
2660 if (last == insn)
2661 return NULL_RTX;
2662
2663 /* try_split returns the NOTE that INSN became. */
2664 SET_INSN_DELETED (insn);
2665
2666 /* ??? Coddle to md files that generate subregs in post-reload
2667 splitters instead of computing the proper hard register. */
2668 if (reload_completed && first != last)
2669 {
2670 first = NEXT_INSN (first);
2671 for (;;)
2672 {
2673 if (INSN_P (first))
2674 cleanup_subreg_operands (first);
2675 if (first == last)
2676 break;
2677 first = NEXT_INSN (first);
2678 }
2679 }
2680 return last;
2681 }
2682
2683 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2684
2685 void
2686 split_all_insns (int upd_life)
2687 {
2688 sbitmap blocks;
2689 bool changed;
2690 basic_block bb;
2691
2692 blocks = sbitmap_alloc (last_basic_block);
2693 sbitmap_zero (blocks);
2694 changed = false;
2695
2696 FOR_EACH_BB_REVERSE (bb)
2697 {
2698 rtx insn, next;
2699 bool finish = false;
2700
2701 for (insn = BB_HEAD (bb); !finish ; insn = next)
2702 {
2703 /* Can't use `next_real_insn' because that might go across
2704 CODE_LABELS and short-out basic blocks. */
2705 next = NEXT_INSN (insn);
2706 finish = (insn == BB_END (bb));
2707 if (INSN_P (insn))
2708 {
2709 rtx set = single_set (insn);
2710
2711 /* Don't split no-op move insns. These should silently
2712 disappear later in final. Splitting such insns would
2713 break the code that handles REG_NO_CONFLICT blocks. */
2714 if (set && set_noop_p (set))
2715 {
2716 /* Nops get in the way while scheduling, so delete them
2717 now if register allocation has already been done. It
2718 is too risky to try to do this before register
2719 allocation, and there are unlikely to be very many
2720 nops then anyways. */
2721 if (reload_completed)
2722 {
2723 /* If the no-op set has a REG_UNUSED note, we need
2724 to update liveness information. */
2725 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2726 {
2727 SET_BIT (blocks, bb->index);
2728 changed = true;
2729 }
2730 /* ??? Is life info affected by deleting edges? */
2731 delete_insn_and_edges (insn);
2732 }
2733 }
2734 else
2735 {
2736 rtx last = split_insn (insn);
2737 if (last)
2738 {
2739 /* The split sequence may include barrier, but the
2740 BB boundary we are interested in will be set to
2741 previous one. */
2742
2743 while (BARRIER_P (last))
2744 last = PREV_INSN (last);
2745 SET_BIT (blocks, bb->index);
2746 changed = true;
2747 }
2748 }
2749 }
2750 }
2751 }
2752
2753 if (changed)
2754 {
2755 int old_last_basic_block = last_basic_block;
2756
2757 find_many_sub_basic_blocks (blocks);
2758
2759 if (old_last_basic_block != last_basic_block && upd_life)
2760 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2761 }
2762
2763 if (changed && upd_life)
2764 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2765 PROP_DEATH_NOTES);
2766
2767 #ifdef ENABLE_CHECKING
2768 verify_flow_info ();
2769 #endif
2770
2771 sbitmap_free (blocks);
2772 }
2773
2774 /* Same as split_all_insns, but do not expect CFG to be available.
2775 Used by machine dependent reorg passes. */
2776
2777 unsigned int
2778 split_all_insns_noflow (void)
2779 {
2780 rtx next, insn;
2781
2782 for (insn = get_insns (); insn; insn = next)
2783 {
2784 next = NEXT_INSN (insn);
2785 if (INSN_P (insn))
2786 {
2787 /* Don't split no-op move insns. These should silently
2788 disappear later in final. Splitting such insns would
2789 break the code that handles REG_NO_CONFLICT blocks. */
2790 rtx set = single_set (insn);
2791 if (set && set_noop_p (set))
2792 {
2793 /* Nops get in the way while scheduling, so delete them
2794 now if register allocation has already been done. It
2795 is too risky to try to do this before register
2796 allocation, and there are unlikely to be very many
2797 nops then anyways.
2798
2799 ??? Should we use delete_insn when the CFG isn't valid? */
2800 if (reload_completed)
2801 delete_insn_and_edges (insn);
2802 }
2803 else
2804 split_insn (insn);
2805 }
2806 }
2807 return 0;
2808 }
2809 \f
2810 #ifdef HAVE_peephole2
2811 struct peep2_insn_data
2812 {
2813 rtx insn;
2814 regset live_before;
2815 };
2816
2817 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2818 static int peep2_current;
2819 /* The number of instructions available to match a peep2. */
2820 int peep2_current_count;
2821
2822 /* A non-insn marker indicating the last insn of the block.
2823 The live_before regset for this element is correct, indicating
2824 global_live_at_end for the block. */
2825 #define PEEP2_EOB pc_rtx
2826
2827 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2828 does not exist. Used by the recognizer to find the next insn to match
2829 in a multi-insn pattern. */
2830
2831 rtx
2832 peep2_next_insn (int n)
2833 {
2834 gcc_assert (n <= peep2_current_count);
2835
2836 n += peep2_current;
2837 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2838 n -= MAX_INSNS_PER_PEEP2 + 1;
2839
2840 return peep2_insn_data[n].insn;
2841 }
2842
2843 /* Return true if REGNO is dead before the Nth non-note insn
2844 after `current'. */
2845
2846 int
2847 peep2_regno_dead_p (int ofs, int regno)
2848 {
2849 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2850
2851 ofs += peep2_current;
2852 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2853 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2854
2855 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2856
2857 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2858 }
2859
2860 /* Similarly for a REG. */
2861
2862 int
2863 peep2_reg_dead_p (int ofs, rtx reg)
2864 {
2865 int regno, n;
2866
2867 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2868
2869 ofs += peep2_current;
2870 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2871 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2872
2873 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2874
2875 regno = REGNO (reg);
2876 n = hard_regno_nregs[regno][GET_MODE (reg)];
2877 while (--n >= 0)
2878 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2879 return 0;
2880 return 1;
2881 }
2882
2883 /* Try to find a hard register of mode MODE, matching the register class in
2884 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2885 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2886 in which case the only condition is that the register must be available
2887 before CURRENT_INSN.
2888 Registers that already have bits set in REG_SET will not be considered.
2889
2890 If an appropriate register is available, it will be returned and the
2891 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2892 returned. */
2893
2894 rtx
2895 peep2_find_free_register (int from, int to, const char *class_str,
2896 enum machine_mode mode, HARD_REG_SET *reg_set)
2897 {
2898 static int search_ofs;
2899 enum reg_class cl;
2900 HARD_REG_SET live;
2901 int i;
2902
2903 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2904 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2905
2906 from += peep2_current;
2907 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2908 from -= MAX_INSNS_PER_PEEP2 + 1;
2909 to += peep2_current;
2910 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2911 to -= MAX_INSNS_PER_PEEP2 + 1;
2912
2913 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2914 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2915
2916 while (from != to)
2917 {
2918 HARD_REG_SET this_live;
2919
2920 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2921 from = 0;
2922 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2923 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2924 IOR_HARD_REG_SET (live, this_live);
2925 }
2926
2927 cl = (class_str[0] == 'r' ? GENERAL_REGS
2928 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2929
2930 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2931 {
2932 int raw_regno, regno, success, j;
2933
2934 /* Distribute the free registers as much as possible. */
2935 raw_regno = search_ofs + i;
2936 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2937 raw_regno -= FIRST_PSEUDO_REGISTER;
2938 #ifdef REG_ALLOC_ORDER
2939 regno = reg_alloc_order[raw_regno];
2940 #else
2941 regno = raw_regno;
2942 #endif
2943
2944 /* Don't allocate fixed registers. */
2945 if (fixed_regs[regno])
2946 continue;
2947 /* Make sure the register is of the right class. */
2948 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
2949 continue;
2950 /* And can support the mode we need. */
2951 if (! HARD_REGNO_MODE_OK (regno, mode))
2952 continue;
2953 /* And that we don't create an extra save/restore. */
2954 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2955 continue;
2956 /* And we don't clobber traceback for noreturn functions. */
2957 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2958 && (! reload_completed || frame_pointer_needed))
2959 continue;
2960
2961 success = 1;
2962 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2963 {
2964 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2965 || TEST_HARD_REG_BIT (live, regno + j))
2966 {
2967 success = 0;
2968 break;
2969 }
2970 }
2971 if (success)
2972 {
2973 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2974 SET_HARD_REG_BIT (*reg_set, regno + j);
2975
2976 /* Start the next search with the next register. */
2977 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2978 raw_regno = 0;
2979 search_ofs = raw_regno;
2980
2981 return gen_rtx_REG (mode, regno);
2982 }
2983 }
2984
2985 search_ofs = 0;
2986 return NULL_RTX;
2987 }
2988
2989 /* Perform the peephole2 optimization pass. */
2990
2991 static void
2992 peephole2_optimize (void)
2993 {
2994 rtx insn, prev;
2995 regset live;
2996 int i;
2997 basic_block bb;
2998 #ifdef HAVE_conditional_execution
2999 sbitmap blocks;
3000 bool changed;
3001 #endif
3002 bool do_cleanup_cfg = false;
3003 bool do_global_life_update = false;
3004 bool do_rebuild_jump_labels = false;
3005
3006 /* Initialize the regsets we're going to use. */
3007 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3008 peep2_insn_data[i].live_before = ALLOC_REG_SET (&reg_obstack);
3009 live = ALLOC_REG_SET (&reg_obstack);
3010
3011 #ifdef HAVE_conditional_execution
3012 blocks = sbitmap_alloc (last_basic_block);
3013 sbitmap_zero (blocks);
3014 changed = false;
3015 #else
3016 count_or_remove_death_notes (NULL, 1);
3017 #endif
3018
3019 FOR_EACH_BB_REVERSE (bb)
3020 {
3021 struct propagate_block_info *pbi;
3022 reg_set_iterator rsi;
3023 unsigned int j;
3024
3025 /* Indicate that all slots except the last holds invalid data. */
3026 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3027 peep2_insn_data[i].insn = NULL_RTX;
3028 peep2_current_count = 0;
3029
3030 /* Indicate that the last slot contains live_after data. */
3031 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3032 peep2_current = MAX_INSNS_PER_PEEP2;
3033
3034 /* Start up propagation. */
3035 COPY_REG_SET (live, bb->il.rtl->global_live_at_end);
3036 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3037
3038 #ifdef HAVE_conditional_execution
3039 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3040 #else
3041 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3042 #endif
3043
3044 for (insn = BB_END (bb); ; insn = prev)
3045 {
3046 prev = PREV_INSN (insn);
3047 if (INSN_P (insn))
3048 {
3049 rtx try, before_try, x;
3050 int match_len;
3051 rtx note;
3052 bool was_call = false;
3053
3054 /* Record this insn. */
3055 if (--peep2_current < 0)
3056 peep2_current = MAX_INSNS_PER_PEEP2;
3057 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3058 && peep2_insn_data[peep2_current].insn == NULL_RTX)
3059 peep2_current_count++;
3060 peep2_insn_data[peep2_current].insn = insn;
3061 propagate_one_insn (pbi, insn);
3062 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3063
3064 if (RTX_FRAME_RELATED_P (insn))
3065 {
3066 /* If an insn has RTX_FRAME_RELATED_P set, peephole
3067 substitution would lose the
3068 REG_FRAME_RELATED_EXPR that is attached. */
3069 peep2_current_count = 0;
3070 try = NULL;
3071 }
3072 else
3073 /* Match the peephole. */
3074 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3075
3076 if (try != NULL)
3077 {
3078 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3079 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3080 cfg-related call notes. */
3081 for (i = 0; i <= match_len; ++i)
3082 {
3083 int j;
3084 rtx old_insn, new_insn, note;
3085
3086 j = i + peep2_current;
3087 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3088 j -= MAX_INSNS_PER_PEEP2 + 1;
3089 old_insn = peep2_insn_data[j].insn;
3090 if (!CALL_P (old_insn))
3091 continue;
3092 was_call = true;
3093
3094 new_insn = try;
3095 while (new_insn != NULL_RTX)
3096 {
3097 if (CALL_P (new_insn))
3098 break;
3099 new_insn = NEXT_INSN (new_insn);
3100 }
3101
3102 gcc_assert (new_insn != NULL_RTX);
3103
3104 CALL_INSN_FUNCTION_USAGE (new_insn)
3105 = CALL_INSN_FUNCTION_USAGE (old_insn);
3106
3107 for (note = REG_NOTES (old_insn);
3108 note;
3109 note = XEXP (note, 1))
3110 switch (REG_NOTE_KIND (note))
3111 {
3112 case REG_NORETURN:
3113 case REG_SETJMP:
3114 REG_NOTES (new_insn)
3115 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3116 XEXP (note, 0),
3117 REG_NOTES (new_insn));
3118 default:
3119 /* Discard all other reg notes. */
3120 break;
3121 }
3122
3123 /* Croak if there is another call in the sequence. */
3124 while (++i <= match_len)
3125 {
3126 j = i + peep2_current;
3127 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3128 j -= MAX_INSNS_PER_PEEP2 + 1;
3129 old_insn = peep2_insn_data[j].insn;
3130 gcc_assert (!CALL_P (old_insn));
3131 }
3132 break;
3133 }
3134
3135 i = match_len + peep2_current;
3136 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3137 i -= MAX_INSNS_PER_PEEP2 + 1;
3138
3139 note = find_reg_note (peep2_insn_data[i].insn,
3140 REG_EH_REGION, NULL_RTX);
3141
3142 /* Replace the old sequence with the new. */
3143 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3144 INSN_LOCATOR (peep2_insn_data[i].insn));
3145 before_try = PREV_INSN (insn);
3146 delete_insn_chain (insn, peep2_insn_data[i].insn);
3147
3148 /* Re-insert the EH_REGION notes. */
3149 if (note || (was_call && nonlocal_goto_handler_labels))
3150 {
3151 edge eh_edge;
3152 edge_iterator ei;
3153
3154 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3155 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3156 break;
3157
3158 for (x = try ; x != before_try ; x = PREV_INSN (x))
3159 if (CALL_P (x)
3160 || (flag_non_call_exceptions
3161 && may_trap_p (PATTERN (x))
3162 && !find_reg_note (x, REG_EH_REGION, NULL)))
3163 {
3164 if (note)
3165 REG_NOTES (x)
3166 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3167 XEXP (note, 0),
3168 REG_NOTES (x));
3169
3170 if (x != BB_END (bb) && eh_edge)
3171 {
3172 edge nfte, nehe;
3173 int flags;
3174
3175 nfte = split_block (bb, x);
3176 flags = (eh_edge->flags
3177 & (EDGE_EH | EDGE_ABNORMAL));
3178 if (CALL_P (x))
3179 flags |= EDGE_ABNORMAL_CALL;
3180 nehe = make_edge (nfte->src, eh_edge->dest,
3181 flags);
3182
3183 nehe->probability = eh_edge->probability;
3184 nfte->probability
3185 = REG_BR_PROB_BASE - nehe->probability;
3186
3187 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3188 #ifdef HAVE_conditional_execution
3189 SET_BIT (blocks, nfte->dest->index);
3190 changed = true;
3191 #endif
3192 bb = nfte->src;
3193 eh_edge = nehe;
3194 }
3195 }
3196
3197 /* Converting possibly trapping insn to non-trapping is
3198 possible. Zap dummy outgoing edges. */
3199 do_cleanup_cfg |= purge_dead_edges (bb);
3200 }
3201
3202 #ifdef HAVE_conditional_execution
3203 /* With conditional execution, we cannot back up the
3204 live information so easily, since the conditional
3205 death data structures are not so self-contained.
3206 So record that we've made a modification to this
3207 block and update life information at the end. */
3208 SET_BIT (blocks, bb->index);
3209 changed = true;
3210
3211 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3212 peep2_insn_data[i].insn = NULL_RTX;
3213 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3214 peep2_current_count = 0;
3215 #else
3216 /* Back up lifetime information past the end of the
3217 newly created sequence. */
3218 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3219 i = 0;
3220 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3221
3222 /* Update life information for the new sequence. */
3223 x = try;
3224 do
3225 {
3226 if (INSN_P (x))
3227 {
3228 if (--i < 0)
3229 i = MAX_INSNS_PER_PEEP2;
3230 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3231 && peep2_insn_data[i].insn == NULL_RTX)
3232 peep2_current_count++;
3233 peep2_insn_data[i].insn = x;
3234 propagate_one_insn (pbi, x);
3235 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3236 }
3237 x = PREV_INSN (x);
3238 }
3239 while (x != prev);
3240
3241 /* ??? Should verify that LIVE now matches what we
3242 had before the new sequence. */
3243
3244 peep2_current = i;
3245 #endif
3246
3247 /* If we generated a jump instruction, it won't have
3248 JUMP_LABEL set. Recompute after we're done. */
3249 for (x = try; x != before_try; x = PREV_INSN (x))
3250 if (JUMP_P (x))
3251 {
3252 do_rebuild_jump_labels = true;
3253 break;
3254 }
3255 }
3256 }
3257
3258 if (insn == BB_HEAD (bb))
3259 break;
3260 }
3261
3262 /* Some peepholes can decide the don't need one or more of their
3263 inputs. If this happens, local life update is not enough. */
3264 EXECUTE_IF_AND_COMPL_IN_BITMAP (bb->il.rtl->global_live_at_start, live,
3265 0, j, rsi)
3266 {
3267 do_global_life_update = true;
3268 break;
3269 }
3270
3271 free_propagate_block_info (pbi);
3272 }
3273
3274 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3275 FREE_REG_SET (peep2_insn_data[i].live_before);
3276 FREE_REG_SET (live);
3277
3278 if (do_rebuild_jump_labels)
3279 rebuild_jump_labels (get_insns ());
3280
3281 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3282 we've changed global life since exception handlers are no longer
3283 reachable. */
3284 if (do_cleanup_cfg)
3285 {
3286 cleanup_cfg (0);
3287 do_global_life_update = true;
3288 }
3289 if (do_global_life_update)
3290 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3291 #ifdef HAVE_conditional_execution
3292 else
3293 {
3294 count_or_remove_death_notes (blocks, 1);
3295 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3296 }
3297 sbitmap_free (blocks);
3298 #endif
3299 }
3300 #endif /* HAVE_peephole2 */
3301
3302 /* Common predicates for use with define_bypass. */
3303
3304 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3305 data not the address operand(s) of the store. IN_INSN must be
3306 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3307 SETs inside. */
3308
3309 int
3310 store_data_bypass_p (rtx out_insn, rtx in_insn)
3311 {
3312 rtx out_set, in_set;
3313
3314 in_set = single_set (in_insn);
3315 gcc_assert (in_set);
3316
3317 if (!MEM_P (SET_DEST (in_set)))
3318 return false;
3319
3320 out_set = single_set (out_insn);
3321 if (out_set)
3322 {
3323 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3324 return false;
3325 }
3326 else
3327 {
3328 rtx out_pat;
3329 int i;
3330
3331 out_pat = PATTERN (out_insn);
3332 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3333
3334 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3335 {
3336 rtx exp = XVECEXP (out_pat, 0, i);
3337
3338 if (GET_CODE (exp) == CLOBBER)
3339 continue;
3340
3341 gcc_assert (GET_CODE (exp) == SET);
3342
3343 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3344 return false;
3345 }
3346 }
3347
3348 return true;
3349 }
3350
3351 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3352 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3353 or multiple set; IN_INSN should be single_set for truth, but for convenience
3354 of insn categorization may be any JUMP or CALL insn. */
3355
3356 int
3357 if_test_bypass_p (rtx out_insn, rtx in_insn)
3358 {
3359 rtx out_set, in_set;
3360
3361 in_set = single_set (in_insn);
3362 if (! in_set)
3363 {
3364 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3365 return false;
3366 }
3367
3368 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3369 return false;
3370 in_set = SET_SRC (in_set);
3371
3372 out_set = single_set (out_insn);
3373 if (out_set)
3374 {
3375 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3376 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3377 return false;
3378 }
3379 else
3380 {
3381 rtx out_pat;
3382 int i;
3383
3384 out_pat = PATTERN (out_insn);
3385 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3386
3387 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3388 {
3389 rtx exp = XVECEXP (out_pat, 0, i);
3390
3391 if (GET_CODE (exp) == CLOBBER)
3392 continue;
3393
3394 gcc_assert (GET_CODE (exp) == SET);
3395
3396 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3397 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3398 return false;
3399 }
3400 }
3401
3402 return true;
3403 }
3404 \f
3405 static bool
3406 gate_handle_peephole2 (void)
3407 {
3408 return (optimize > 0 && flag_peephole2);
3409 }
3410
3411 static unsigned int
3412 rest_of_handle_peephole2 (void)
3413 {
3414 #ifdef HAVE_peephole2
3415 peephole2_optimize ();
3416 #endif
3417 return 0;
3418 }
3419
3420 struct tree_opt_pass pass_peephole2 =
3421 {
3422 "peephole2", /* name */
3423 gate_handle_peephole2, /* gate */
3424 rest_of_handle_peephole2, /* execute */
3425 NULL, /* sub */
3426 NULL, /* next */
3427 0, /* static_pass_number */
3428 TV_PEEPHOLE2, /* tv_id */
3429 0, /* properties_required */
3430 0, /* properties_provided */
3431 0, /* properties_destroyed */
3432 0, /* todo_flags_start */
3433 TODO_dump_func, /* todo_flags_finish */
3434 'z' /* letter */
3435 };
3436
3437 static unsigned int
3438 rest_of_handle_split_all_insns (void)
3439 {
3440 split_all_insns (1);
3441 return 0;
3442 }
3443
3444 struct tree_opt_pass pass_split_all_insns =
3445 {
3446 "split1", /* name */
3447 NULL, /* gate */
3448 rest_of_handle_split_all_insns, /* execute */
3449 NULL, /* sub */
3450 NULL, /* next */
3451 0, /* static_pass_number */
3452 0, /* tv_id */
3453 0, /* properties_required */
3454 0, /* properties_provided */
3455 0, /* properties_destroyed */
3456 0, /* todo_flags_start */
3457 TODO_dump_func, /* todo_flags_finish */
3458 0 /* letter */
3459 };
3460
3461 /* The placement of the splitting that we do for shorten_branches
3462 depends on whether regstack is used by the target or not. */
3463 static bool
3464 gate_do_final_split (void)
3465 {
3466 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3467 return 1;
3468 #else
3469 return 0;
3470 #endif
3471 }
3472
3473 struct tree_opt_pass pass_split_for_shorten_branches =
3474 {
3475 "split3", /* name */
3476 gate_do_final_split, /* gate */
3477 split_all_insns_noflow, /* execute */
3478 NULL, /* sub */
3479 NULL, /* next */
3480 0, /* static_pass_number */
3481 TV_SHORTEN_BRANCH, /* tv_id */
3482 0, /* properties_required */
3483 0, /* properties_provided */
3484 0, /* properties_destroyed */
3485 0, /* todo_flags_start */
3486 TODO_dump_func, /* todo_flags_finish */
3487 0 /* letter */
3488 };
3489
3490
3491 static bool
3492 gate_handle_split_before_regstack (void)
3493 {
3494 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3495 /* If flow2 creates new instructions which need splitting
3496 and scheduling after reload is not done, they might not be
3497 split until final which doesn't allow splitting
3498 if HAVE_ATTR_length. */
3499 # ifdef INSN_SCHEDULING
3500 return (optimize && !flag_schedule_insns_after_reload);
3501 # else
3502 return (optimize);
3503 # endif
3504 #else
3505 return 0;
3506 #endif
3507 }
3508
3509 struct tree_opt_pass pass_split_before_regstack =
3510 {
3511 "split2", /* name */
3512 gate_handle_split_before_regstack, /* gate */
3513 rest_of_handle_split_all_insns, /* execute */
3514 NULL, /* sub */
3515 NULL, /* next */
3516 0, /* static_pass_number */
3517 TV_SHORTEN_BRANCH, /* tv_id */
3518 0, /* properties_required */
3519 0, /* properties_provided */
3520 0, /* properties_destroyed */
3521 0, /* todo_flags_start */
3522 TODO_dump_func, /* todo_flags_finish */
3523 0 /* letter */
3524 };