genrecog.c (enum decision_type): Add DT_num_insns.
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "toplev.h"
39 #include "basic-block.h"
40 #include "output.h"
41 #include "reload.h"
42 #include "timevar.h"
43 #include "tree-pass.h"
44
45 #ifndef STACK_PUSH_CODE
46 #ifdef STACK_GROWS_DOWNWARD
47 #define STACK_PUSH_CODE PRE_DEC
48 #else
49 #define STACK_PUSH_CODE PRE_INC
50 #endif
51 #endif
52
53 #ifndef STACK_POP_CODE
54 #ifdef STACK_GROWS_DOWNWARD
55 #define STACK_POP_CODE POST_INC
56 #else
57 #define STACK_POP_CODE POST_DEC
58 #endif
59 #endif
60
61 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
62 static rtx *find_single_use_1 (rtx, rtx *);
63 static void validate_replace_src_1 (rtx *, void *);
64 static rtx split_insn (rtx);
65
66 /* Nonzero means allow operands to be volatile.
67 This should be 0 if you are generating rtl, such as if you are calling
68 the functions in optabs.c and expmed.c (most of the time).
69 This should be 1 if all valid insns need to be recognized,
70 such as in regclass.c and final.c and reload.c.
71
72 init_recog and init_recog_no_volatile are responsible for setting this. */
73
74 int volatile_ok;
75
76 struct recog_data recog_data;
77
78 /* Contains a vector of operand_alternative structures for every operand.
79 Set up by preprocess_constraints. */
80 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
81
82 /* On return from `constrain_operands', indicate which alternative
83 was satisfied. */
84
85 int which_alternative;
86
87 /* Nonzero after end of reload pass.
88 Set to 1 or 0 by toplev.c.
89 Controls the significance of (SUBREG (MEM)). */
90
91 int reload_completed;
92
93 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
94 int epilogue_completed;
95
96 /* Initialize data used by the function `recog'.
97 This must be called once in the compilation of a function
98 before any insn recognition may be done in the function. */
99
100 void
101 init_recog_no_volatile (void)
102 {
103 volatile_ok = 0;
104 }
105
106 void
107 init_recog (void)
108 {
109 volatile_ok = 1;
110 }
111
112 \f
113 /* Check that X is an insn-body for an `asm' with operands
114 and that the operands mentioned in it are legitimate. */
115
116 int
117 check_asm_operands (rtx x)
118 {
119 int noperands;
120 rtx *operands;
121 const char **constraints;
122 int i;
123
124 /* Post-reload, be more strict with things. */
125 if (reload_completed)
126 {
127 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
128 extract_insn (make_insn_raw (x));
129 constrain_operands (1);
130 return which_alternative >= 0;
131 }
132
133 noperands = asm_noperands (x);
134 if (noperands < 0)
135 return 0;
136 if (noperands == 0)
137 return 1;
138
139 operands = alloca (noperands * sizeof (rtx));
140 constraints = alloca (noperands * sizeof (char *));
141
142 decode_asm_operands (x, operands, NULL, constraints, NULL);
143
144 for (i = 0; i < noperands; i++)
145 {
146 const char *c = constraints[i];
147 if (c[0] == '%')
148 c++;
149 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
150 c = constraints[c[0] - '0'];
151
152 if (! asm_operand_ok (operands[i], c))
153 return 0;
154 }
155
156 return 1;
157 }
158 \f
159 /* Static data for the next two routines. */
160
161 typedef struct change_t
162 {
163 rtx object;
164 int old_code;
165 rtx *loc;
166 rtx old;
167 } change_t;
168
169 static change_t *changes;
170 static int changes_allocated;
171
172 static int num_changes = 0;
173
174 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
175 at which NEW will be placed. If OBJECT is zero, no validation is done,
176 the change is simply made.
177
178 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
179 will be called with the address and mode as parameters. If OBJECT is
180 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
181 the change in place.
182
183 IN_GROUP is nonzero if this is part of a group of changes that must be
184 performed as a group. In that case, the changes will be stored. The
185 function `apply_change_group' will validate and apply the changes.
186
187 If IN_GROUP is zero, this is a single change. Try to recognize the insn
188 or validate the memory reference with the change applied. If the result
189 is not valid for the machine, suppress the change and return zero.
190 Otherwise, perform the change and return 1. */
191
192 int
193 validate_change (rtx object, rtx *loc, rtx new, int in_group)
194 {
195 rtx old = *loc;
196
197 if (old == new || rtx_equal_p (old, new))
198 return 1;
199
200 gcc_assert (in_group != 0 || num_changes == 0);
201
202 *loc = new;
203
204 /* Save the information describing this change. */
205 if (num_changes >= changes_allocated)
206 {
207 if (changes_allocated == 0)
208 /* This value allows for repeated substitutions inside complex
209 indexed addresses, or changes in up to 5 insns. */
210 changes_allocated = MAX_RECOG_OPERANDS * 5;
211 else
212 changes_allocated *= 2;
213
214 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
215 }
216
217 changes[num_changes].object = object;
218 changes[num_changes].loc = loc;
219 changes[num_changes].old = old;
220
221 if (object && !MEM_P (object))
222 {
223 /* Set INSN_CODE to force rerecognition of insn. Save old code in
224 case invalid. */
225 changes[num_changes].old_code = INSN_CODE (object);
226 INSN_CODE (object) = -1;
227 }
228
229 num_changes++;
230
231 /* If we are making a group of changes, return 1. Otherwise, validate the
232 change group we made. */
233
234 if (in_group)
235 return 1;
236 else
237 return apply_change_group ();
238 }
239
240
241 /* Function to be passed to for_each_rtx to test whether a piece of
242 RTL contains any mem/v. */
243 static int
244 volatile_mem_p (rtx *x, void *data ATTRIBUTE_UNUSED)
245 {
246 return (MEM_P (*x) && MEM_VOLATILE_P (*x));
247 }
248
249 /* Same as validate_change, but doesn't support groups, and it accepts
250 volatile mems if they're already present in the original insn. */
251
252 int
253 validate_change_maybe_volatile (rtx object, rtx *loc, rtx new)
254 {
255 int result;
256
257 if (validate_change (object, loc, new, 0))
258 return 1;
259
260 if (volatile_ok
261 /* If there isn't a volatile MEM, there's nothing we can do. */
262 || !for_each_rtx (&PATTERN (object), volatile_mem_p, 0)
263 /* Make sure we're not adding or removing volatile MEMs. */
264 || for_each_rtx (loc, volatile_mem_p, 0)
265 || for_each_rtx (&new, volatile_mem_p, 0)
266 || !insn_invalid_p (object))
267 return 0;
268
269 volatile_ok = 1;
270
271 gcc_assert (!insn_invalid_p (object));
272
273 result = validate_change (object, loc, new, 0);
274
275 volatile_ok = 0;
276
277 return result;
278 }
279
280 /* This subroutine of apply_change_group verifies whether the changes to INSN
281 were valid; i.e. whether INSN can still be recognized. */
282
283 int
284 insn_invalid_p (rtx insn)
285 {
286 rtx pat = PATTERN (insn);
287 int num_clobbers = 0;
288 /* If we are before reload and the pattern is a SET, see if we can add
289 clobbers. */
290 int icode = recog (pat, insn,
291 (GET_CODE (pat) == SET
292 && ! reload_completed && ! reload_in_progress)
293 ? &num_clobbers : 0);
294 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
295
296
297 /* If this is an asm and the operand aren't legal, then fail. Likewise if
298 this is not an asm and the insn wasn't recognized. */
299 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
300 || (!is_asm && icode < 0))
301 return 1;
302
303 /* If we have to add CLOBBERs, fail if we have to add ones that reference
304 hard registers since our callers can't know if they are live or not.
305 Otherwise, add them. */
306 if (num_clobbers > 0)
307 {
308 rtx newpat;
309
310 if (added_clobbers_hard_reg_p (icode))
311 return 1;
312
313 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
314 XVECEXP (newpat, 0, 0) = pat;
315 add_clobbers (newpat, icode);
316 PATTERN (insn) = pat = newpat;
317 }
318
319 /* After reload, verify that all constraints are satisfied. */
320 if (reload_completed)
321 {
322 extract_insn (insn);
323
324 if (! constrain_operands (1))
325 return 1;
326 }
327
328 INSN_CODE (insn) = icode;
329 return 0;
330 }
331
332 /* Return number of changes made and not validated yet. */
333 int
334 num_changes_pending (void)
335 {
336 return num_changes;
337 }
338
339 /* Tentatively apply the changes numbered NUM and up.
340 Return 1 if all changes are valid, zero otherwise. */
341
342 static int
343 verify_changes (int num)
344 {
345 int i;
346 rtx last_validated = NULL_RTX;
347
348 /* The changes have been applied and all INSN_CODEs have been reset to force
349 rerecognition.
350
351 The changes are valid if we aren't given an object, or if we are
352 given a MEM and it still is a valid address, or if this is in insn
353 and it is recognized. In the latter case, if reload has completed,
354 we also require that the operands meet the constraints for
355 the insn. */
356
357 for (i = num; i < num_changes; i++)
358 {
359 rtx object = changes[i].object;
360
361 /* If there is no object to test or if it is the same as the one we
362 already tested, ignore it. */
363 if (object == 0 || object == last_validated)
364 continue;
365
366 if (MEM_P (object))
367 {
368 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
369 break;
370 }
371 else if (insn_invalid_p (object))
372 {
373 rtx pat = PATTERN (object);
374
375 /* Perhaps we couldn't recognize the insn because there were
376 extra CLOBBERs at the end. If so, try to re-recognize
377 without the last CLOBBER (later iterations will cause each of
378 them to be eliminated, in turn). But don't do this if we
379 have an ASM_OPERAND. */
380 if (GET_CODE (pat) == PARALLEL
381 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
382 && asm_noperands (PATTERN (object)) < 0)
383 {
384 rtx newpat;
385
386 if (XVECLEN (pat, 0) == 2)
387 newpat = XVECEXP (pat, 0, 0);
388 else
389 {
390 int j;
391
392 newpat
393 = gen_rtx_PARALLEL (VOIDmode,
394 rtvec_alloc (XVECLEN (pat, 0) - 1));
395 for (j = 0; j < XVECLEN (newpat, 0); j++)
396 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
397 }
398
399 /* Add a new change to this group to replace the pattern
400 with this new pattern. Then consider this change
401 as having succeeded. The change we added will
402 cause the entire call to fail if things remain invalid.
403
404 Note that this can lose if a later change than the one
405 we are processing specified &XVECEXP (PATTERN (object), 0, X)
406 but this shouldn't occur. */
407
408 validate_change (object, &PATTERN (object), newpat, 1);
409 continue;
410 }
411 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
412 /* If this insn is a CLOBBER or USE, it is always valid, but is
413 never recognized. */
414 continue;
415 else
416 break;
417 }
418 last_validated = object;
419 }
420
421 return (i == num_changes);
422 }
423
424 /* A group of changes has previously been issued with validate_change and
425 verified with verify_changes. Update the BB_DIRTY flags of the affected
426 blocks, and clear num_changes. */
427
428 void
429 confirm_change_group (void)
430 {
431 int i;
432 basic_block bb;
433
434 for (i = 0; i < num_changes; i++)
435 if (changes[i].object
436 && INSN_P (changes[i].object)
437 && (bb = BLOCK_FOR_INSN (changes[i].object)))
438 bb->flags |= BB_DIRTY;
439
440 num_changes = 0;
441 }
442
443 /* Apply a group of changes previously issued with `validate_change'.
444 If all changes are valid, call confirm_change_group and return 1,
445 otherwise, call cancel_changes and return 0. */
446
447 int
448 apply_change_group (void)
449 {
450 if (verify_changes (0))
451 {
452 confirm_change_group ();
453 return 1;
454 }
455 else
456 {
457 cancel_changes (0);
458 return 0;
459 }
460 }
461
462
463 /* Return the number of changes so far in the current group. */
464
465 int
466 num_validated_changes (void)
467 {
468 return num_changes;
469 }
470
471 /* Retract the changes numbered NUM and up. */
472
473 void
474 cancel_changes (int num)
475 {
476 int i;
477
478 /* Back out all the changes. Do this in the opposite order in which
479 they were made. */
480 for (i = num_changes - 1; i >= num; i--)
481 {
482 *changes[i].loc = changes[i].old;
483 if (changes[i].object && !MEM_P (changes[i].object))
484 INSN_CODE (changes[i].object) = changes[i].old_code;
485 }
486 num_changes = num;
487 }
488
489 /* Replace every occurrence of FROM in X with TO. Mark each change with
490 validate_change passing OBJECT. */
491
492 static void
493 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
494 {
495 int i, j;
496 const char *fmt;
497 rtx x = *loc;
498 enum rtx_code code;
499 enum machine_mode op0_mode = VOIDmode;
500 int prev_changes = num_changes;
501 rtx new;
502
503 if (!x)
504 return;
505
506 code = GET_CODE (x);
507 fmt = GET_RTX_FORMAT (code);
508 if (fmt[0] == 'e')
509 op0_mode = GET_MODE (XEXP (x, 0));
510
511 /* X matches FROM if it is the same rtx or they are both referring to the
512 same register in the same mode. Avoid calling rtx_equal_p unless the
513 operands look similar. */
514
515 if (x == from
516 || (REG_P (x) && REG_P (from)
517 && GET_MODE (x) == GET_MODE (from)
518 && REGNO (x) == REGNO (from))
519 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
520 && rtx_equal_p (x, from)))
521 {
522 validate_change (object, loc, to, 1);
523 return;
524 }
525
526 /* Call ourself recursively to perform the replacements.
527 We must not replace inside already replaced expression, otherwise we
528 get infinite recursion for replacements like (reg X)->(subreg (reg X))
529 done by regmove, so we must special case shared ASM_OPERANDS. */
530
531 if (GET_CODE (x) == PARALLEL)
532 {
533 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
534 {
535 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
536 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
537 {
538 /* Verify that operands are really shared. */
539 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
540 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
541 (x, 0, j))));
542 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
543 from, to, object);
544 }
545 else
546 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
547 }
548 }
549 else
550 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
551 {
552 if (fmt[i] == 'e')
553 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
554 else if (fmt[i] == 'E')
555 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
556 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
557 }
558
559 /* If we didn't substitute, there is nothing more to do. */
560 if (num_changes == prev_changes)
561 return;
562
563 /* Allow substituted expression to have different mode. This is used by
564 regmove to change mode of pseudo register. */
565 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
566 op0_mode = GET_MODE (XEXP (x, 0));
567
568 /* Do changes needed to keep rtx consistent. Don't do any other
569 simplifications, as it is not our job. */
570
571 if (SWAPPABLE_OPERANDS_P (x)
572 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
573 {
574 validate_change (object, loc,
575 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
576 : swap_condition (code),
577 GET_MODE (x), XEXP (x, 1),
578 XEXP (x, 0)), 1);
579 x = *loc;
580 code = GET_CODE (x);
581 }
582
583 switch (code)
584 {
585 case PLUS:
586 /* If we have a PLUS whose second operand is now a CONST_INT, use
587 simplify_gen_binary to try to simplify it.
588 ??? We may want later to remove this, once simplification is
589 separated from this function. */
590 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
591 validate_change (object, loc,
592 simplify_gen_binary
593 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
594 break;
595 case MINUS:
596 if (GET_CODE (XEXP (x, 1)) == CONST_INT
597 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
598 validate_change (object, loc,
599 simplify_gen_binary
600 (PLUS, GET_MODE (x), XEXP (x, 0),
601 simplify_gen_unary (NEG,
602 GET_MODE (x), XEXP (x, 1),
603 GET_MODE (x))), 1);
604 break;
605 case ZERO_EXTEND:
606 case SIGN_EXTEND:
607 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
608 {
609 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
610 op0_mode);
611 /* If any of the above failed, substitute in something that
612 we know won't be recognized. */
613 if (!new)
614 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
615 validate_change (object, loc, new, 1);
616 }
617 break;
618 case SUBREG:
619 /* All subregs possible to simplify should be simplified. */
620 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
621 SUBREG_BYTE (x));
622
623 /* Subregs of VOIDmode operands are incorrect. */
624 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
625 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
626 if (new)
627 validate_change (object, loc, new, 1);
628 break;
629 case ZERO_EXTRACT:
630 case SIGN_EXTRACT:
631 /* If we are replacing a register with memory, try to change the memory
632 to be the mode required for memory in extract operations (this isn't
633 likely to be an insertion operation; if it was, nothing bad will
634 happen, we might just fail in some cases). */
635
636 if (MEM_P (XEXP (x, 0))
637 && GET_CODE (XEXP (x, 1)) == CONST_INT
638 && GET_CODE (XEXP (x, 2)) == CONST_INT
639 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
640 && !MEM_VOLATILE_P (XEXP (x, 0)))
641 {
642 enum machine_mode wanted_mode = VOIDmode;
643 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
644 int pos = INTVAL (XEXP (x, 2));
645
646 if (GET_CODE (x) == ZERO_EXTRACT)
647 {
648 enum machine_mode new_mode
649 = mode_for_extraction (EP_extzv, 1);
650 if (new_mode != MAX_MACHINE_MODE)
651 wanted_mode = new_mode;
652 }
653 else if (GET_CODE (x) == SIGN_EXTRACT)
654 {
655 enum machine_mode new_mode
656 = mode_for_extraction (EP_extv, 1);
657 if (new_mode != MAX_MACHINE_MODE)
658 wanted_mode = new_mode;
659 }
660
661 /* If we have a narrower mode, we can do something. */
662 if (wanted_mode != VOIDmode
663 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
664 {
665 int offset = pos / BITS_PER_UNIT;
666 rtx newmem;
667
668 /* If the bytes and bits are counted differently, we
669 must adjust the offset. */
670 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
671 offset =
672 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
673 offset);
674
675 pos %= GET_MODE_BITSIZE (wanted_mode);
676
677 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
678
679 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
680 validate_change (object, &XEXP (x, 0), newmem, 1);
681 }
682 }
683
684 break;
685
686 default:
687 break;
688 }
689 }
690
691 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
692 with TO. After all changes have been made, validate by seeing
693 if INSN is still valid. */
694
695 int
696 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
697 {
698 validate_replace_rtx_1 (loc, from, to, insn);
699 return apply_change_group ();
700 }
701
702 /* Try replacing every occurrence of FROM in INSN with TO. After all
703 changes have been made, validate by seeing if INSN is still valid. */
704
705 int
706 validate_replace_rtx (rtx from, rtx to, rtx insn)
707 {
708 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
709 return apply_change_group ();
710 }
711
712 /* Try replacing every occurrence of FROM in INSN with TO. */
713
714 void
715 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
716 {
717 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
718 }
719
720 /* Function called by note_uses to replace used subexpressions. */
721 struct validate_replace_src_data
722 {
723 rtx from; /* Old RTX */
724 rtx to; /* New RTX */
725 rtx insn; /* Insn in which substitution is occurring. */
726 };
727
728 static void
729 validate_replace_src_1 (rtx *x, void *data)
730 {
731 struct validate_replace_src_data *d
732 = (struct validate_replace_src_data *) data;
733
734 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
735 }
736
737 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
738 SET_DESTs. */
739
740 void
741 validate_replace_src_group (rtx from, rtx to, rtx insn)
742 {
743 struct validate_replace_src_data d;
744
745 d.from = from;
746 d.to = to;
747 d.insn = insn;
748 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
749 }
750 \f
751 #ifdef HAVE_cc0
752 /* Return 1 if the insn using CC0 set by INSN does not contain
753 any ordered tests applied to the condition codes.
754 EQ and NE tests do not count. */
755
756 int
757 next_insn_tests_no_inequality (rtx insn)
758 {
759 rtx next = next_cc0_user (insn);
760
761 /* If there is no next insn, we have to take the conservative choice. */
762 if (next == 0)
763 return 0;
764
765 return (INSN_P (next)
766 && ! inequality_comparisons_p (PATTERN (next)));
767 }
768 #endif
769 \f
770 /* This is used by find_single_use to locate an rtx that contains exactly one
771 use of DEST, which is typically either a REG or CC0. It returns a
772 pointer to the innermost rtx expression containing DEST. Appearances of
773 DEST that are being used to totally replace it are not counted. */
774
775 static rtx *
776 find_single_use_1 (rtx dest, rtx *loc)
777 {
778 rtx x = *loc;
779 enum rtx_code code = GET_CODE (x);
780 rtx *result = 0;
781 rtx *this_result;
782 int i;
783 const char *fmt;
784
785 switch (code)
786 {
787 case CONST_INT:
788 case CONST:
789 case LABEL_REF:
790 case SYMBOL_REF:
791 case CONST_DOUBLE:
792 case CONST_VECTOR:
793 case CLOBBER:
794 return 0;
795
796 case SET:
797 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
798 of a REG that occupies all of the REG, the insn uses DEST if
799 it is mentioned in the destination or the source. Otherwise, we
800 need just check the source. */
801 if (GET_CODE (SET_DEST (x)) != CC0
802 && GET_CODE (SET_DEST (x)) != PC
803 && !REG_P (SET_DEST (x))
804 && ! (GET_CODE (SET_DEST (x)) == SUBREG
805 && REG_P (SUBREG_REG (SET_DEST (x)))
806 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
807 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
808 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
809 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
810 break;
811
812 return find_single_use_1 (dest, &SET_SRC (x));
813
814 case MEM:
815 case SUBREG:
816 return find_single_use_1 (dest, &XEXP (x, 0));
817
818 default:
819 break;
820 }
821
822 /* If it wasn't one of the common cases above, check each expression and
823 vector of this code. Look for a unique usage of DEST. */
824
825 fmt = GET_RTX_FORMAT (code);
826 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
827 {
828 if (fmt[i] == 'e')
829 {
830 if (dest == XEXP (x, i)
831 || (REG_P (dest) && REG_P (XEXP (x, i))
832 && REGNO (dest) == REGNO (XEXP (x, i))))
833 this_result = loc;
834 else
835 this_result = find_single_use_1 (dest, &XEXP (x, i));
836
837 if (result == 0)
838 result = this_result;
839 else if (this_result)
840 /* Duplicate usage. */
841 return 0;
842 }
843 else if (fmt[i] == 'E')
844 {
845 int j;
846
847 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
848 {
849 if (XVECEXP (x, i, j) == dest
850 || (REG_P (dest)
851 && REG_P (XVECEXP (x, i, j))
852 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
853 this_result = loc;
854 else
855 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
856
857 if (result == 0)
858 result = this_result;
859 else if (this_result)
860 return 0;
861 }
862 }
863 }
864
865 return result;
866 }
867 \f
868 /* See if DEST, produced in INSN, is used only a single time in the
869 sequel. If so, return a pointer to the innermost rtx expression in which
870 it is used.
871
872 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
873
874 This routine will return usually zero either before flow is called (because
875 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
876 note can't be trusted).
877
878 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
879 care about REG_DEAD notes or LOG_LINKS.
880
881 Otherwise, we find the single use by finding an insn that has a
882 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
883 only referenced once in that insn, we know that it must be the first
884 and last insn referencing DEST. */
885
886 rtx *
887 find_single_use (rtx dest, rtx insn, rtx *ploc)
888 {
889 rtx next;
890 rtx *result;
891 rtx link;
892
893 #ifdef HAVE_cc0
894 if (dest == cc0_rtx)
895 {
896 next = NEXT_INSN (insn);
897 if (next == 0
898 || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
899 return 0;
900
901 result = find_single_use_1 (dest, &PATTERN (next));
902 if (result && ploc)
903 *ploc = next;
904 return result;
905 }
906 #endif
907
908 if (reload_completed || reload_in_progress || !REG_P (dest))
909 return 0;
910
911 for (next = next_nonnote_insn (insn);
912 next != 0 && !LABEL_P (next);
913 next = next_nonnote_insn (next))
914 if (INSN_P (next) && dead_or_set_p (next, dest))
915 {
916 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
917 if (XEXP (link, 0) == insn)
918 break;
919
920 if (link)
921 {
922 result = find_single_use_1 (dest, &PATTERN (next));
923 if (ploc)
924 *ploc = next;
925 return result;
926 }
927 }
928
929 return 0;
930 }
931 \f
932 /* Return 1 if OP is a valid general operand for machine mode MODE.
933 This is either a register reference, a memory reference,
934 or a constant. In the case of a memory reference, the address
935 is checked for general validity for the target machine.
936
937 Register and memory references must have mode MODE in order to be valid,
938 but some constants have no machine mode and are valid for any mode.
939
940 If MODE is VOIDmode, OP is checked for validity for whatever mode
941 it has.
942
943 The main use of this function is as a predicate in match_operand
944 expressions in the machine description.
945
946 For an explanation of this function's behavior for registers of
947 class NO_REGS, see the comment for `register_operand'. */
948
949 int
950 general_operand (rtx op, enum machine_mode mode)
951 {
952 enum rtx_code code = GET_CODE (op);
953
954 if (mode == VOIDmode)
955 mode = GET_MODE (op);
956
957 /* Don't accept CONST_INT or anything similar
958 if the caller wants something floating. */
959 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
960 && GET_MODE_CLASS (mode) != MODE_INT
961 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
962 return 0;
963
964 if (GET_CODE (op) == CONST_INT
965 && mode != VOIDmode
966 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
967 return 0;
968
969 if (CONSTANT_P (op))
970 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
971 || mode == VOIDmode)
972 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
973 && LEGITIMATE_CONSTANT_P (op));
974
975 /* Except for certain constants with VOIDmode, already checked for,
976 OP's mode must match MODE if MODE specifies a mode. */
977
978 if (GET_MODE (op) != mode)
979 return 0;
980
981 if (code == SUBREG)
982 {
983 rtx sub = SUBREG_REG (op);
984
985 #ifdef INSN_SCHEDULING
986 /* On machines that have insn scheduling, we want all memory
987 reference to be explicit, so outlaw paradoxical SUBREGs.
988 However, we must allow them after reload so that they can
989 get cleaned up by cleanup_subreg_operands. */
990 if (!reload_completed && MEM_P (sub)
991 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
992 return 0;
993 #endif
994 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
995 may result in incorrect reference. We should simplify all valid
996 subregs of MEM anyway. But allow this after reload because we
997 might be called from cleanup_subreg_operands.
998
999 ??? This is a kludge. */
1000 if (!reload_completed && SUBREG_BYTE (op) != 0
1001 && MEM_P (sub))
1002 return 0;
1003
1004 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1005 create such rtl, and we must reject it. */
1006 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1007 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1008 return 0;
1009
1010 op = sub;
1011 code = GET_CODE (op);
1012 }
1013
1014 if (code == REG)
1015 /* A register whose class is NO_REGS is not a general operand. */
1016 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1017 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1018
1019 if (code == MEM)
1020 {
1021 rtx y = XEXP (op, 0);
1022
1023 if (! volatile_ok && MEM_VOLATILE_P (op))
1024 return 0;
1025
1026 /* Use the mem's mode, since it will be reloaded thus. */
1027 if (memory_address_p (GET_MODE (op), y))
1028 return 1;
1029 }
1030
1031 return 0;
1032 }
1033 \f
1034 /* Return 1 if OP is a valid memory address for a memory reference
1035 of mode MODE.
1036
1037 The main use of this function is as a predicate in match_operand
1038 expressions in the machine description. */
1039
1040 int
1041 address_operand (rtx op, enum machine_mode mode)
1042 {
1043 return memory_address_p (mode, op);
1044 }
1045
1046 /* Return 1 if OP is a register reference of mode MODE.
1047 If MODE is VOIDmode, accept a register in any mode.
1048
1049 The main use of this function is as a predicate in match_operand
1050 expressions in the machine description.
1051
1052 As a special exception, registers whose class is NO_REGS are
1053 not accepted by `register_operand'. The reason for this change
1054 is to allow the representation of special architecture artifacts
1055 (such as a condition code register) without extending the rtl
1056 definitions. Since registers of class NO_REGS cannot be used
1057 as registers in any case where register classes are examined,
1058 it is most consistent to keep this function from accepting them. */
1059
1060 int
1061 register_operand (rtx op, enum machine_mode mode)
1062 {
1063 if (GET_MODE (op) != mode && mode != VOIDmode)
1064 return 0;
1065
1066 if (GET_CODE (op) == SUBREG)
1067 {
1068 rtx sub = SUBREG_REG (op);
1069
1070 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1071 because it is guaranteed to be reloaded into one.
1072 Just make sure the MEM is valid in itself.
1073 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1074 but currently it does result from (SUBREG (REG)...) where the
1075 reg went on the stack.) */
1076 if (! reload_completed && MEM_P (sub))
1077 return general_operand (op, mode);
1078
1079 #ifdef CANNOT_CHANGE_MODE_CLASS
1080 if (REG_P (sub)
1081 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1082 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1083 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1084 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1085 return 0;
1086 #endif
1087
1088 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1089 create such rtl, and we must reject it. */
1090 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1091 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1092 return 0;
1093
1094 op = sub;
1095 }
1096
1097 /* We don't consider registers whose class is NO_REGS
1098 to be a register operand. */
1099 return (REG_P (op)
1100 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1101 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1102 }
1103
1104 /* Return 1 for a register in Pmode; ignore the tested mode. */
1105
1106 int
1107 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1108 {
1109 return register_operand (op, Pmode);
1110 }
1111
1112 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1113 or a hard register. */
1114
1115 int
1116 scratch_operand (rtx op, enum machine_mode mode)
1117 {
1118 if (GET_MODE (op) != mode && mode != VOIDmode)
1119 return 0;
1120
1121 return (GET_CODE (op) == SCRATCH
1122 || (REG_P (op)
1123 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1124 }
1125
1126 /* Return 1 if OP is a valid immediate operand for mode MODE.
1127
1128 The main use of this function is as a predicate in match_operand
1129 expressions in the machine description. */
1130
1131 int
1132 immediate_operand (rtx op, enum machine_mode mode)
1133 {
1134 /* Don't accept CONST_INT or anything similar
1135 if the caller wants something floating. */
1136 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1137 && GET_MODE_CLASS (mode) != MODE_INT
1138 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1139 return 0;
1140
1141 if (GET_CODE (op) == CONST_INT
1142 && mode != VOIDmode
1143 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1144 return 0;
1145
1146 return (CONSTANT_P (op)
1147 && (GET_MODE (op) == mode || mode == VOIDmode
1148 || GET_MODE (op) == VOIDmode)
1149 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1150 && LEGITIMATE_CONSTANT_P (op));
1151 }
1152
1153 /* Returns 1 if OP is an operand that is a CONST_INT. */
1154
1155 int
1156 const_int_operand (rtx op, enum machine_mode mode)
1157 {
1158 if (GET_CODE (op) != CONST_INT)
1159 return 0;
1160
1161 if (mode != VOIDmode
1162 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1163 return 0;
1164
1165 return 1;
1166 }
1167
1168 /* Returns 1 if OP is an operand that is a constant integer or constant
1169 floating-point number. */
1170
1171 int
1172 const_double_operand (rtx op, enum machine_mode mode)
1173 {
1174 /* Don't accept CONST_INT or anything similar
1175 if the caller wants something floating. */
1176 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1177 && GET_MODE_CLASS (mode) != MODE_INT
1178 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1179 return 0;
1180
1181 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1182 && (mode == VOIDmode || GET_MODE (op) == mode
1183 || GET_MODE (op) == VOIDmode));
1184 }
1185
1186 /* Return 1 if OP is a general operand that is not an immediate operand. */
1187
1188 int
1189 nonimmediate_operand (rtx op, enum machine_mode mode)
1190 {
1191 return (general_operand (op, mode) && ! CONSTANT_P (op));
1192 }
1193
1194 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1195
1196 int
1197 nonmemory_operand (rtx op, enum machine_mode mode)
1198 {
1199 if (CONSTANT_P (op))
1200 {
1201 /* Don't accept CONST_INT or anything similar
1202 if the caller wants something floating. */
1203 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1204 && GET_MODE_CLASS (mode) != MODE_INT
1205 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1206 return 0;
1207
1208 if (GET_CODE (op) == CONST_INT
1209 && mode != VOIDmode
1210 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1211 return 0;
1212
1213 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1214 || mode == VOIDmode)
1215 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1216 && LEGITIMATE_CONSTANT_P (op));
1217 }
1218
1219 if (GET_MODE (op) != mode && mode != VOIDmode)
1220 return 0;
1221
1222 if (GET_CODE (op) == SUBREG)
1223 {
1224 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1225 because it is guaranteed to be reloaded into one.
1226 Just make sure the MEM is valid in itself.
1227 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1228 but currently it does result from (SUBREG (REG)...) where the
1229 reg went on the stack.) */
1230 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1231 return general_operand (op, mode);
1232 op = SUBREG_REG (op);
1233 }
1234
1235 /* We don't consider registers whose class is NO_REGS
1236 to be a register operand. */
1237 return (REG_P (op)
1238 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1239 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1240 }
1241
1242 /* Return 1 if OP is a valid operand that stands for pushing a
1243 value of mode MODE onto the stack.
1244
1245 The main use of this function is as a predicate in match_operand
1246 expressions in the machine description. */
1247
1248 int
1249 push_operand (rtx op, enum machine_mode mode)
1250 {
1251 unsigned int rounded_size = GET_MODE_SIZE (mode);
1252
1253 #ifdef PUSH_ROUNDING
1254 rounded_size = PUSH_ROUNDING (rounded_size);
1255 #endif
1256
1257 if (!MEM_P (op))
1258 return 0;
1259
1260 if (mode != VOIDmode && GET_MODE (op) != mode)
1261 return 0;
1262
1263 op = XEXP (op, 0);
1264
1265 if (rounded_size == GET_MODE_SIZE (mode))
1266 {
1267 if (GET_CODE (op) != STACK_PUSH_CODE)
1268 return 0;
1269 }
1270 else
1271 {
1272 if (GET_CODE (op) != PRE_MODIFY
1273 || GET_CODE (XEXP (op, 1)) != PLUS
1274 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1275 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1276 #ifdef STACK_GROWS_DOWNWARD
1277 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1278 #else
1279 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1280 #endif
1281 )
1282 return 0;
1283 }
1284
1285 return XEXP (op, 0) == stack_pointer_rtx;
1286 }
1287
1288 /* Return 1 if OP is a valid operand that stands for popping a
1289 value of mode MODE off the stack.
1290
1291 The main use of this function is as a predicate in match_operand
1292 expressions in the machine description. */
1293
1294 int
1295 pop_operand (rtx op, enum machine_mode mode)
1296 {
1297 if (!MEM_P (op))
1298 return 0;
1299
1300 if (mode != VOIDmode && GET_MODE (op) != mode)
1301 return 0;
1302
1303 op = XEXP (op, 0);
1304
1305 if (GET_CODE (op) != STACK_POP_CODE)
1306 return 0;
1307
1308 return XEXP (op, 0) == stack_pointer_rtx;
1309 }
1310
1311 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1312
1313 int
1314 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1315 {
1316 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1317 return 0;
1318
1319 win:
1320 return 1;
1321 }
1322
1323 /* Return 1 if OP is a valid memory reference with mode MODE,
1324 including a valid address.
1325
1326 The main use of this function is as a predicate in match_operand
1327 expressions in the machine description. */
1328
1329 int
1330 memory_operand (rtx op, enum machine_mode mode)
1331 {
1332 rtx inner;
1333
1334 if (! reload_completed)
1335 /* Note that no SUBREG is a memory operand before end of reload pass,
1336 because (SUBREG (MEM...)) forces reloading into a register. */
1337 return MEM_P (op) && general_operand (op, mode);
1338
1339 if (mode != VOIDmode && GET_MODE (op) != mode)
1340 return 0;
1341
1342 inner = op;
1343 if (GET_CODE (inner) == SUBREG)
1344 inner = SUBREG_REG (inner);
1345
1346 return (MEM_P (inner) && general_operand (op, mode));
1347 }
1348
1349 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1350 that is, a memory reference whose address is a general_operand. */
1351
1352 int
1353 indirect_operand (rtx op, enum machine_mode mode)
1354 {
1355 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1356 if (! reload_completed
1357 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1358 {
1359 int offset = SUBREG_BYTE (op);
1360 rtx inner = SUBREG_REG (op);
1361
1362 if (mode != VOIDmode && GET_MODE (op) != mode)
1363 return 0;
1364
1365 /* The only way that we can have a general_operand as the resulting
1366 address is if OFFSET is zero and the address already is an operand
1367 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1368 operand. */
1369
1370 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1371 || (GET_CODE (XEXP (inner, 0)) == PLUS
1372 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1373 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1374 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1375 }
1376
1377 return (MEM_P (op)
1378 && memory_operand (op, mode)
1379 && general_operand (XEXP (op, 0), Pmode));
1380 }
1381
1382 /* Return 1 if this is a comparison operator. This allows the use of
1383 MATCH_OPERATOR to recognize all the branch insns. */
1384
1385 int
1386 comparison_operator (rtx op, enum machine_mode mode)
1387 {
1388 return ((mode == VOIDmode || GET_MODE (op) == mode)
1389 && COMPARISON_P (op));
1390 }
1391 \f
1392 /* If BODY is an insn body that uses ASM_OPERANDS,
1393 return the number of operands (both input and output) in the insn.
1394 Otherwise return -1. */
1395
1396 int
1397 asm_noperands (rtx body)
1398 {
1399 switch (GET_CODE (body))
1400 {
1401 case ASM_OPERANDS:
1402 /* No output operands: return number of input operands. */
1403 return ASM_OPERANDS_INPUT_LENGTH (body);
1404 case SET:
1405 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1406 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1407 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1408 else
1409 return -1;
1410 case PARALLEL:
1411 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1412 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1413 {
1414 /* Multiple output operands, or 1 output plus some clobbers:
1415 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1416 int i;
1417 int n_sets;
1418
1419 /* Count backwards through CLOBBERs to determine number of SETs. */
1420 for (i = XVECLEN (body, 0); i > 0; i--)
1421 {
1422 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1423 break;
1424 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1425 return -1;
1426 }
1427
1428 /* N_SETS is now number of output operands. */
1429 n_sets = i;
1430
1431 /* Verify that all the SETs we have
1432 came from a single original asm_operands insn
1433 (so that invalid combinations are blocked). */
1434 for (i = 0; i < n_sets; i++)
1435 {
1436 rtx elt = XVECEXP (body, 0, i);
1437 if (GET_CODE (elt) != SET)
1438 return -1;
1439 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1440 return -1;
1441 /* If these ASM_OPERANDS rtx's came from different original insns
1442 then they aren't allowed together. */
1443 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1444 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1445 return -1;
1446 }
1447 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1448 + n_sets);
1449 }
1450 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1451 {
1452 /* 0 outputs, but some clobbers:
1453 body is [(asm_operands ...) (clobber (reg ...))...]. */
1454 int i;
1455
1456 /* Make sure all the other parallel things really are clobbers. */
1457 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1458 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1459 return -1;
1460
1461 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1462 }
1463 else
1464 return -1;
1465 default:
1466 return -1;
1467 }
1468 }
1469
1470 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1471 copy its operands (both input and output) into the vector OPERANDS,
1472 the locations of the operands within the insn into the vector OPERAND_LOCS,
1473 and the constraints for the operands into CONSTRAINTS.
1474 Write the modes of the operands into MODES.
1475 Return the assembler-template.
1476
1477 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1478 we don't store that info. */
1479
1480 const char *
1481 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1482 const char **constraints, enum machine_mode *modes)
1483 {
1484 int i;
1485 int noperands;
1486 const char *template = 0;
1487
1488 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1489 {
1490 rtx asmop = SET_SRC (body);
1491 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1492
1493 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1494
1495 for (i = 1; i < noperands; i++)
1496 {
1497 if (operand_locs)
1498 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1499 if (operands)
1500 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1501 if (constraints)
1502 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1503 if (modes)
1504 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1505 }
1506
1507 /* The output is in the SET.
1508 Its constraint is in the ASM_OPERANDS itself. */
1509 if (operands)
1510 operands[0] = SET_DEST (body);
1511 if (operand_locs)
1512 operand_locs[0] = &SET_DEST (body);
1513 if (constraints)
1514 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1515 if (modes)
1516 modes[0] = GET_MODE (SET_DEST (body));
1517 template = ASM_OPERANDS_TEMPLATE (asmop);
1518 }
1519 else if (GET_CODE (body) == ASM_OPERANDS)
1520 {
1521 rtx asmop = body;
1522 /* No output operands: BODY is (asm_operands ....). */
1523
1524 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1525
1526 /* The input operands are found in the 1st element vector. */
1527 /* Constraints for inputs are in the 2nd element vector. */
1528 for (i = 0; i < noperands; i++)
1529 {
1530 if (operand_locs)
1531 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1532 if (operands)
1533 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1534 if (constraints)
1535 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1536 if (modes)
1537 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1538 }
1539 template = ASM_OPERANDS_TEMPLATE (asmop);
1540 }
1541 else if (GET_CODE (body) == PARALLEL
1542 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1543 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1544 {
1545 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1546 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1547 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1548 int nout = 0; /* Does not include CLOBBERs. */
1549
1550 /* At least one output, plus some CLOBBERs. */
1551
1552 /* The outputs are in the SETs.
1553 Their constraints are in the ASM_OPERANDS itself. */
1554 for (i = 0; i < nparallel; i++)
1555 {
1556 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1557 break; /* Past last SET */
1558
1559 if (operands)
1560 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1561 if (operand_locs)
1562 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1563 if (constraints)
1564 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1565 if (modes)
1566 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1567 nout++;
1568 }
1569
1570 for (i = 0; i < nin; i++)
1571 {
1572 if (operand_locs)
1573 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1574 if (operands)
1575 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1576 if (constraints)
1577 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1578 if (modes)
1579 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1580 }
1581
1582 template = ASM_OPERANDS_TEMPLATE (asmop);
1583 }
1584 else if (GET_CODE (body) == PARALLEL
1585 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1586 {
1587 /* No outputs, but some CLOBBERs. */
1588
1589 rtx asmop = XVECEXP (body, 0, 0);
1590 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1591
1592 for (i = 0; i < nin; i++)
1593 {
1594 if (operand_locs)
1595 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1596 if (operands)
1597 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1598 if (constraints)
1599 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1600 if (modes)
1601 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1602 }
1603
1604 template = ASM_OPERANDS_TEMPLATE (asmop);
1605 }
1606
1607 return template;
1608 }
1609
1610 /* Check if an asm_operand matches its constraints.
1611 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1612
1613 int
1614 asm_operand_ok (rtx op, const char *constraint)
1615 {
1616 int result = 0;
1617
1618 /* Use constrain_operands after reload. */
1619 gcc_assert (!reload_completed);
1620
1621 while (*constraint)
1622 {
1623 char c = *constraint;
1624 int len;
1625 switch (c)
1626 {
1627 case ',':
1628 constraint++;
1629 continue;
1630 case '=':
1631 case '+':
1632 case '*':
1633 case '%':
1634 case '!':
1635 case '#':
1636 case '&':
1637 case '?':
1638 break;
1639
1640 case '0': case '1': case '2': case '3': case '4':
1641 case '5': case '6': case '7': case '8': case '9':
1642 /* For best results, our caller should have given us the
1643 proper matching constraint, but we can't actually fail
1644 the check if they didn't. Indicate that results are
1645 inconclusive. */
1646 do
1647 constraint++;
1648 while (ISDIGIT (*constraint));
1649 if (! result)
1650 result = -1;
1651 continue;
1652
1653 case 'p':
1654 if (address_operand (op, VOIDmode))
1655 result = 1;
1656 break;
1657
1658 case 'm':
1659 case 'V': /* non-offsettable */
1660 if (memory_operand (op, VOIDmode))
1661 result = 1;
1662 break;
1663
1664 case 'o': /* offsettable */
1665 if (offsettable_nonstrict_memref_p (op))
1666 result = 1;
1667 break;
1668
1669 case '<':
1670 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1671 excepting those that expand_call created. Further, on some
1672 machines which do not have generalized auto inc/dec, an inc/dec
1673 is not a memory_operand.
1674
1675 Match any memory and hope things are resolved after reload. */
1676
1677 if (MEM_P (op)
1678 && (1
1679 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1680 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1681 result = 1;
1682 break;
1683
1684 case '>':
1685 if (MEM_P (op)
1686 && (1
1687 || GET_CODE (XEXP (op, 0)) == PRE_INC
1688 || GET_CODE (XEXP (op, 0)) == POST_INC))
1689 result = 1;
1690 break;
1691
1692 case 'E':
1693 case 'F':
1694 if (GET_CODE (op) == CONST_DOUBLE
1695 || (GET_CODE (op) == CONST_VECTOR
1696 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1697 result = 1;
1698 break;
1699
1700 case 'G':
1701 if (GET_CODE (op) == CONST_DOUBLE
1702 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1703 result = 1;
1704 break;
1705 case 'H':
1706 if (GET_CODE (op) == CONST_DOUBLE
1707 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1708 result = 1;
1709 break;
1710
1711 case 's':
1712 if (GET_CODE (op) == CONST_INT
1713 || (GET_CODE (op) == CONST_DOUBLE
1714 && GET_MODE (op) == VOIDmode))
1715 break;
1716 /* Fall through. */
1717
1718 case 'i':
1719 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1720 result = 1;
1721 break;
1722
1723 case 'n':
1724 if (GET_CODE (op) == CONST_INT
1725 || (GET_CODE (op) == CONST_DOUBLE
1726 && GET_MODE (op) == VOIDmode))
1727 result = 1;
1728 break;
1729
1730 case 'I':
1731 if (GET_CODE (op) == CONST_INT
1732 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1733 result = 1;
1734 break;
1735 case 'J':
1736 if (GET_CODE (op) == CONST_INT
1737 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1738 result = 1;
1739 break;
1740 case 'K':
1741 if (GET_CODE (op) == CONST_INT
1742 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1743 result = 1;
1744 break;
1745 case 'L':
1746 if (GET_CODE (op) == CONST_INT
1747 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1748 result = 1;
1749 break;
1750 case 'M':
1751 if (GET_CODE (op) == CONST_INT
1752 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1753 result = 1;
1754 break;
1755 case 'N':
1756 if (GET_CODE (op) == CONST_INT
1757 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1758 result = 1;
1759 break;
1760 case 'O':
1761 if (GET_CODE (op) == CONST_INT
1762 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1763 result = 1;
1764 break;
1765 case 'P':
1766 if (GET_CODE (op) == CONST_INT
1767 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1768 result = 1;
1769 break;
1770
1771 case 'X':
1772 result = 1;
1773 break;
1774
1775 case 'g':
1776 if (general_operand (op, VOIDmode))
1777 result = 1;
1778 break;
1779
1780 default:
1781 /* For all other letters, we first check for a register class,
1782 otherwise it is an EXTRA_CONSTRAINT. */
1783 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1784 {
1785 case 'r':
1786 if (GET_MODE (op) == BLKmode)
1787 break;
1788 if (register_operand (op, VOIDmode))
1789 result = 1;
1790 }
1791 #ifdef EXTRA_CONSTRAINT_STR
1792 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1793 result = 1;
1794 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1795 /* Every memory operand can be reloaded to fit. */
1796 && memory_operand (op, VOIDmode))
1797 result = 1;
1798 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1799 /* Every address operand can be reloaded to fit. */
1800 && address_operand (op, VOIDmode))
1801 result = 1;
1802 #endif
1803 break;
1804 }
1805 len = CONSTRAINT_LEN (c, constraint);
1806 do
1807 constraint++;
1808 while (--len && *constraint);
1809 if (len)
1810 return 0;
1811 }
1812
1813 return result;
1814 }
1815 \f
1816 /* Given an rtx *P, if it is a sum containing an integer constant term,
1817 return the location (type rtx *) of the pointer to that constant term.
1818 Otherwise, return a null pointer. */
1819
1820 rtx *
1821 find_constant_term_loc (rtx *p)
1822 {
1823 rtx *tem;
1824 enum rtx_code code = GET_CODE (*p);
1825
1826 /* If *P IS such a constant term, P is its location. */
1827
1828 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1829 || code == CONST)
1830 return p;
1831
1832 /* Otherwise, if not a sum, it has no constant term. */
1833
1834 if (GET_CODE (*p) != PLUS)
1835 return 0;
1836
1837 /* If one of the summands is constant, return its location. */
1838
1839 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1840 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1841 return p;
1842
1843 /* Otherwise, check each summand for containing a constant term. */
1844
1845 if (XEXP (*p, 0) != 0)
1846 {
1847 tem = find_constant_term_loc (&XEXP (*p, 0));
1848 if (tem != 0)
1849 return tem;
1850 }
1851
1852 if (XEXP (*p, 1) != 0)
1853 {
1854 tem = find_constant_term_loc (&XEXP (*p, 1));
1855 if (tem != 0)
1856 return tem;
1857 }
1858
1859 return 0;
1860 }
1861 \f
1862 /* Return 1 if OP is a memory reference
1863 whose address contains no side effects
1864 and remains valid after the addition
1865 of a positive integer less than the
1866 size of the object being referenced.
1867
1868 We assume that the original address is valid and do not check it.
1869
1870 This uses strict_memory_address_p as a subroutine, so
1871 don't use it before reload. */
1872
1873 int
1874 offsettable_memref_p (rtx op)
1875 {
1876 return ((MEM_P (op))
1877 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1878 }
1879
1880 /* Similar, but don't require a strictly valid mem ref:
1881 consider pseudo-regs valid as index or base regs. */
1882
1883 int
1884 offsettable_nonstrict_memref_p (rtx op)
1885 {
1886 return ((MEM_P (op))
1887 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1888 }
1889
1890 /* Return 1 if Y is a memory address which contains no side effects
1891 and would remain valid after the addition of a positive integer
1892 less than the size of that mode.
1893
1894 We assume that the original address is valid and do not check it.
1895 We do check that it is valid for narrower modes.
1896
1897 If STRICTP is nonzero, we require a strictly valid address,
1898 for the sake of use in reload.c. */
1899
1900 int
1901 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1902 {
1903 enum rtx_code ycode = GET_CODE (y);
1904 rtx z;
1905 rtx y1 = y;
1906 rtx *y2;
1907 int (*addressp) (enum machine_mode, rtx) =
1908 (strictp ? strict_memory_address_p : memory_address_p);
1909 unsigned int mode_sz = GET_MODE_SIZE (mode);
1910
1911 if (CONSTANT_ADDRESS_P (y))
1912 return 1;
1913
1914 /* Adjusting an offsettable address involves changing to a narrower mode.
1915 Make sure that's OK. */
1916
1917 if (mode_dependent_address_p (y))
1918 return 0;
1919
1920 /* ??? How much offset does an offsettable BLKmode reference need?
1921 Clearly that depends on the situation in which it's being used.
1922 However, the current situation in which we test 0xffffffff is
1923 less than ideal. Caveat user. */
1924 if (mode_sz == 0)
1925 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1926
1927 /* If the expression contains a constant term,
1928 see if it remains valid when max possible offset is added. */
1929
1930 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1931 {
1932 int good;
1933
1934 y1 = *y2;
1935 *y2 = plus_constant (*y2, mode_sz - 1);
1936 /* Use QImode because an odd displacement may be automatically invalid
1937 for any wider mode. But it should be valid for a single byte. */
1938 good = (*addressp) (QImode, y);
1939
1940 /* In any case, restore old contents of memory. */
1941 *y2 = y1;
1942 return good;
1943 }
1944
1945 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1946 return 0;
1947
1948 /* The offset added here is chosen as the maximum offset that
1949 any instruction could need to add when operating on something
1950 of the specified mode. We assume that if Y and Y+c are
1951 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1952 go inside a LO_SUM here, so we do so as well. */
1953 if (GET_CODE (y) == LO_SUM
1954 && mode != BLKmode
1955 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1956 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1957 plus_constant (XEXP (y, 1), mode_sz - 1));
1958 else
1959 z = plus_constant (y, mode_sz - 1);
1960
1961 /* Use QImode because an odd displacement may be automatically invalid
1962 for any wider mode. But it should be valid for a single byte. */
1963 return (*addressp) (QImode, z);
1964 }
1965
1966 /* Return 1 if ADDR is an address-expression whose effect depends
1967 on the mode of the memory reference it is used in.
1968
1969 Autoincrement addressing is a typical example of mode-dependence
1970 because the amount of the increment depends on the mode. */
1971
1972 int
1973 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1974 {
1975 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1976 return 0;
1977 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1978 win: ATTRIBUTE_UNUSED_LABEL
1979 return 1;
1980 }
1981 \f
1982 /* Like extract_insn, but save insn extracted and don't extract again, when
1983 called again for the same insn expecting that recog_data still contain the
1984 valid information. This is used primary by gen_attr infrastructure that
1985 often does extract insn again and again. */
1986 void
1987 extract_insn_cached (rtx insn)
1988 {
1989 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1990 return;
1991 extract_insn (insn);
1992 recog_data.insn = insn;
1993 }
1994 /* Do cached extract_insn, constrain_operands and complain about failures.
1995 Used by insn_attrtab. */
1996 void
1997 extract_constrain_insn_cached (rtx insn)
1998 {
1999 extract_insn_cached (insn);
2000 if (which_alternative == -1
2001 && !constrain_operands (reload_completed))
2002 fatal_insn_not_found (insn);
2003 }
2004 /* Do cached constrain_operands and complain about failures. */
2005 int
2006 constrain_operands_cached (int strict)
2007 {
2008 if (which_alternative == -1)
2009 return constrain_operands (strict);
2010 else
2011 return 1;
2012 }
2013 \f
2014 /* Analyze INSN and fill in recog_data. */
2015
2016 void
2017 extract_insn (rtx insn)
2018 {
2019 int i;
2020 int icode;
2021 int noperands;
2022 rtx body = PATTERN (insn);
2023
2024 recog_data.insn = NULL;
2025 recog_data.n_operands = 0;
2026 recog_data.n_alternatives = 0;
2027 recog_data.n_dups = 0;
2028 which_alternative = -1;
2029
2030 switch (GET_CODE (body))
2031 {
2032 case USE:
2033 case CLOBBER:
2034 case ASM_INPUT:
2035 case ADDR_VEC:
2036 case ADDR_DIFF_VEC:
2037 return;
2038
2039 case SET:
2040 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2041 goto asm_insn;
2042 else
2043 goto normal_insn;
2044 case PARALLEL:
2045 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2046 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2047 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2048 goto asm_insn;
2049 else
2050 goto normal_insn;
2051 case ASM_OPERANDS:
2052 asm_insn:
2053 recog_data.n_operands = noperands = asm_noperands (body);
2054 if (noperands >= 0)
2055 {
2056 /* This insn is an `asm' with operands. */
2057
2058 /* expand_asm_operands makes sure there aren't too many operands. */
2059 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2060
2061 /* Now get the operand values and constraints out of the insn. */
2062 decode_asm_operands (body, recog_data.operand,
2063 recog_data.operand_loc,
2064 recog_data.constraints,
2065 recog_data.operand_mode);
2066 if (noperands > 0)
2067 {
2068 const char *p = recog_data.constraints[0];
2069 recog_data.n_alternatives = 1;
2070 while (*p)
2071 recog_data.n_alternatives += (*p++ == ',');
2072 }
2073 break;
2074 }
2075 fatal_insn_not_found (insn);
2076
2077 default:
2078 normal_insn:
2079 /* Ordinary insn: recognize it, get the operands via insn_extract
2080 and get the constraints. */
2081
2082 icode = recog_memoized (insn);
2083 if (icode < 0)
2084 fatal_insn_not_found (insn);
2085
2086 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2087 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2088 recog_data.n_dups = insn_data[icode].n_dups;
2089
2090 insn_extract (insn);
2091
2092 for (i = 0; i < noperands; i++)
2093 {
2094 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2095 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2096 /* VOIDmode match_operands gets mode from their real operand. */
2097 if (recog_data.operand_mode[i] == VOIDmode)
2098 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2099 }
2100 }
2101 for (i = 0; i < noperands; i++)
2102 recog_data.operand_type[i]
2103 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2104 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2105 : OP_IN);
2106
2107 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2108 }
2109
2110 /* After calling extract_insn, you can use this function to extract some
2111 information from the constraint strings into a more usable form.
2112 The collected data is stored in recog_op_alt. */
2113 void
2114 preprocess_constraints (void)
2115 {
2116 int i;
2117
2118 for (i = 0; i < recog_data.n_operands; i++)
2119 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2120 * sizeof (struct operand_alternative)));
2121
2122 for (i = 0; i < recog_data.n_operands; i++)
2123 {
2124 int j;
2125 struct operand_alternative *op_alt;
2126 const char *p = recog_data.constraints[i];
2127
2128 op_alt = recog_op_alt[i];
2129
2130 for (j = 0; j < recog_data.n_alternatives; j++)
2131 {
2132 op_alt[j].cl = NO_REGS;
2133 op_alt[j].constraint = p;
2134 op_alt[j].matches = -1;
2135 op_alt[j].matched = -1;
2136
2137 if (*p == '\0' || *p == ',')
2138 {
2139 op_alt[j].anything_ok = 1;
2140 continue;
2141 }
2142
2143 for (;;)
2144 {
2145 char c = *p;
2146 if (c == '#')
2147 do
2148 c = *++p;
2149 while (c != ',' && c != '\0');
2150 if (c == ',' || c == '\0')
2151 {
2152 p++;
2153 break;
2154 }
2155
2156 switch (c)
2157 {
2158 case '=': case '+': case '*': case '%':
2159 case 'E': case 'F': case 'G': case 'H':
2160 case 's': case 'i': case 'n':
2161 case 'I': case 'J': case 'K': case 'L':
2162 case 'M': case 'N': case 'O': case 'P':
2163 /* These don't say anything we care about. */
2164 break;
2165
2166 case '?':
2167 op_alt[j].reject += 6;
2168 break;
2169 case '!':
2170 op_alt[j].reject += 600;
2171 break;
2172 case '&':
2173 op_alt[j].earlyclobber = 1;
2174 break;
2175
2176 case '0': case '1': case '2': case '3': case '4':
2177 case '5': case '6': case '7': case '8': case '9':
2178 {
2179 char *end;
2180 op_alt[j].matches = strtoul (p, &end, 10);
2181 recog_op_alt[op_alt[j].matches][j].matched = i;
2182 p = end;
2183 }
2184 continue;
2185
2186 case 'm':
2187 op_alt[j].memory_ok = 1;
2188 break;
2189 case '<':
2190 op_alt[j].decmem_ok = 1;
2191 break;
2192 case '>':
2193 op_alt[j].incmem_ok = 1;
2194 break;
2195 case 'V':
2196 op_alt[j].nonoffmem_ok = 1;
2197 break;
2198 case 'o':
2199 op_alt[j].offmem_ok = 1;
2200 break;
2201 case 'X':
2202 op_alt[j].anything_ok = 1;
2203 break;
2204
2205 case 'p':
2206 op_alt[j].is_address = 1;
2207 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2208 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2209 break;
2210
2211 case 'g':
2212 case 'r':
2213 op_alt[j].cl =
2214 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2215 break;
2216
2217 default:
2218 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2219 {
2220 op_alt[j].memory_ok = 1;
2221 break;
2222 }
2223 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2224 {
2225 op_alt[j].is_address = 1;
2226 op_alt[j].cl
2227 = (reg_class_subunion
2228 [(int) op_alt[j].cl]
2229 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2230 break;
2231 }
2232
2233 op_alt[j].cl
2234 = (reg_class_subunion
2235 [(int) op_alt[j].cl]
2236 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2237 break;
2238 }
2239 p += CONSTRAINT_LEN (c, p);
2240 }
2241 }
2242 }
2243 }
2244
2245 /* Check the operands of an insn against the insn's operand constraints
2246 and return 1 if they are valid.
2247 The information about the insn's operands, constraints, operand modes
2248 etc. is obtained from the global variables set up by extract_insn.
2249
2250 WHICH_ALTERNATIVE is set to a number which indicates which
2251 alternative of constraints was matched: 0 for the first alternative,
2252 1 for the next, etc.
2253
2254 In addition, when two operands are required to match
2255 and it happens that the output operand is (reg) while the
2256 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2257 make the output operand look like the input.
2258 This is because the output operand is the one the template will print.
2259
2260 This is used in final, just before printing the assembler code and by
2261 the routines that determine an insn's attribute.
2262
2263 If STRICT is a positive nonzero value, it means that we have been
2264 called after reload has been completed. In that case, we must
2265 do all checks strictly. If it is zero, it means that we have been called
2266 before reload has completed. In that case, we first try to see if we can
2267 find an alternative that matches strictly. If not, we try again, this
2268 time assuming that reload will fix up the insn. This provides a "best
2269 guess" for the alternative and is used to compute attributes of insns prior
2270 to reload. A negative value of STRICT is used for this internal call. */
2271
2272 struct funny_match
2273 {
2274 int this, other;
2275 };
2276
2277 int
2278 constrain_operands (int strict)
2279 {
2280 const char *constraints[MAX_RECOG_OPERANDS];
2281 int matching_operands[MAX_RECOG_OPERANDS];
2282 int earlyclobber[MAX_RECOG_OPERANDS];
2283 int c;
2284
2285 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2286 int funny_match_index;
2287
2288 which_alternative = 0;
2289 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2290 return 1;
2291
2292 for (c = 0; c < recog_data.n_operands; c++)
2293 {
2294 constraints[c] = recog_data.constraints[c];
2295 matching_operands[c] = -1;
2296 }
2297
2298 do
2299 {
2300 int seen_earlyclobber_at = -1;
2301 int opno;
2302 int lose = 0;
2303 funny_match_index = 0;
2304
2305 for (opno = 0; opno < recog_data.n_operands; opno++)
2306 {
2307 rtx op = recog_data.operand[opno];
2308 enum machine_mode mode = GET_MODE (op);
2309 const char *p = constraints[opno];
2310 int offset = 0;
2311 int win = 0;
2312 int val;
2313 int len;
2314
2315 earlyclobber[opno] = 0;
2316
2317 /* A unary operator may be accepted by the predicate, but it
2318 is irrelevant for matching constraints. */
2319 if (UNARY_P (op))
2320 op = XEXP (op, 0);
2321
2322 if (GET_CODE (op) == SUBREG)
2323 {
2324 if (REG_P (SUBREG_REG (op))
2325 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2326 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2327 GET_MODE (SUBREG_REG (op)),
2328 SUBREG_BYTE (op),
2329 GET_MODE (op));
2330 op = SUBREG_REG (op);
2331 }
2332
2333 /* An empty constraint or empty alternative
2334 allows anything which matched the pattern. */
2335 if (*p == 0 || *p == ',')
2336 win = 1;
2337
2338 do
2339 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2340 {
2341 case '\0':
2342 len = 0;
2343 break;
2344 case ',':
2345 c = '\0';
2346 break;
2347
2348 case '?': case '!': case '*': case '%':
2349 case '=': case '+':
2350 break;
2351
2352 case '#':
2353 /* Ignore rest of this alternative as far as
2354 constraint checking is concerned. */
2355 do
2356 p++;
2357 while (*p && *p != ',');
2358 len = 0;
2359 break;
2360
2361 case '&':
2362 earlyclobber[opno] = 1;
2363 if (seen_earlyclobber_at < 0)
2364 seen_earlyclobber_at = opno;
2365 break;
2366
2367 case '0': case '1': case '2': case '3': case '4':
2368 case '5': case '6': case '7': case '8': case '9':
2369 {
2370 /* This operand must be the same as a previous one.
2371 This kind of constraint is used for instructions such
2372 as add when they take only two operands.
2373
2374 Note that the lower-numbered operand is passed first.
2375
2376 If we are not testing strictly, assume that this
2377 constraint will be satisfied. */
2378
2379 char *end;
2380 int match;
2381
2382 match = strtoul (p, &end, 10);
2383 p = end;
2384
2385 if (strict < 0)
2386 val = 1;
2387 else
2388 {
2389 rtx op1 = recog_data.operand[match];
2390 rtx op2 = recog_data.operand[opno];
2391
2392 /* A unary operator may be accepted by the predicate,
2393 but it is irrelevant for matching constraints. */
2394 if (UNARY_P (op1))
2395 op1 = XEXP (op1, 0);
2396 if (UNARY_P (op2))
2397 op2 = XEXP (op2, 0);
2398
2399 val = operands_match_p (op1, op2);
2400 }
2401
2402 matching_operands[opno] = match;
2403 matching_operands[match] = opno;
2404
2405 if (val != 0)
2406 win = 1;
2407
2408 /* If output is *x and input is *--x, arrange later
2409 to change the output to *--x as well, since the
2410 output op is the one that will be printed. */
2411 if (val == 2 && strict > 0)
2412 {
2413 funny_match[funny_match_index].this = opno;
2414 funny_match[funny_match_index++].other = match;
2415 }
2416 }
2417 len = 0;
2418 break;
2419
2420 case 'p':
2421 /* p is used for address_operands. When we are called by
2422 gen_reload, no one will have checked that the address is
2423 strictly valid, i.e., that all pseudos requiring hard regs
2424 have gotten them. */
2425 if (strict <= 0
2426 || (strict_memory_address_p (recog_data.operand_mode[opno],
2427 op)))
2428 win = 1;
2429 break;
2430
2431 /* No need to check general_operand again;
2432 it was done in insn-recog.c. */
2433 case 'g':
2434 /* Anything goes unless it is a REG and really has a hard reg
2435 but the hard reg is not in the class GENERAL_REGS. */
2436 if (strict < 0
2437 || GENERAL_REGS == ALL_REGS
2438 || !REG_P (op)
2439 || (reload_in_progress
2440 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2441 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2442 win = 1;
2443 break;
2444
2445 case 'X':
2446 /* This is used for a MATCH_SCRATCH in the cases when
2447 we don't actually need anything. So anything goes
2448 any time. */
2449 win = 1;
2450 break;
2451
2452 case 'm':
2453 /* Memory operands must be valid, to the extent
2454 required by STRICT. */
2455 if (MEM_P (op))
2456 {
2457 if (strict > 0
2458 && !strict_memory_address_p (GET_MODE (op),
2459 XEXP (op, 0)))
2460 break;
2461 if (strict == 0
2462 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2463 break;
2464 win = 1;
2465 }
2466 /* Before reload, accept what reload can turn into mem. */
2467 else if (strict < 0 && CONSTANT_P (op))
2468 win = 1;
2469 /* During reload, accept a pseudo */
2470 else if (reload_in_progress && REG_P (op)
2471 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2472 win = 1;
2473 break;
2474
2475 case '<':
2476 if (MEM_P (op)
2477 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2478 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2479 win = 1;
2480 break;
2481
2482 case '>':
2483 if (MEM_P (op)
2484 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2485 || GET_CODE (XEXP (op, 0)) == POST_INC))
2486 win = 1;
2487 break;
2488
2489 case 'E':
2490 case 'F':
2491 if (GET_CODE (op) == CONST_DOUBLE
2492 || (GET_CODE (op) == CONST_VECTOR
2493 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2494 win = 1;
2495 break;
2496
2497 case 'G':
2498 case 'H':
2499 if (GET_CODE (op) == CONST_DOUBLE
2500 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2501 win = 1;
2502 break;
2503
2504 case 's':
2505 if (GET_CODE (op) == CONST_INT
2506 || (GET_CODE (op) == CONST_DOUBLE
2507 && GET_MODE (op) == VOIDmode))
2508 break;
2509 case 'i':
2510 if (CONSTANT_P (op))
2511 win = 1;
2512 break;
2513
2514 case 'n':
2515 if (GET_CODE (op) == CONST_INT
2516 || (GET_CODE (op) == CONST_DOUBLE
2517 && GET_MODE (op) == VOIDmode))
2518 win = 1;
2519 break;
2520
2521 case 'I':
2522 case 'J':
2523 case 'K':
2524 case 'L':
2525 case 'M':
2526 case 'N':
2527 case 'O':
2528 case 'P':
2529 if (GET_CODE (op) == CONST_INT
2530 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2531 win = 1;
2532 break;
2533
2534 case 'V':
2535 if (MEM_P (op)
2536 && ((strict > 0 && ! offsettable_memref_p (op))
2537 || (strict < 0
2538 && !(CONSTANT_P (op) || MEM_P (op)))
2539 || (reload_in_progress
2540 && !(REG_P (op)
2541 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2542 win = 1;
2543 break;
2544
2545 case 'o':
2546 if ((strict > 0 && offsettable_memref_p (op))
2547 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2548 /* Before reload, accept what reload can handle. */
2549 || (strict < 0
2550 && (CONSTANT_P (op) || MEM_P (op)))
2551 /* During reload, accept a pseudo */
2552 || (reload_in_progress && REG_P (op)
2553 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2554 win = 1;
2555 break;
2556
2557 default:
2558 {
2559 enum reg_class cl;
2560
2561 cl = (c == 'r'
2562 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2563 if (cl != NO_REGS)
2564 {
2565 if (strict < 0
2566 || (strict == 0
2567 && REG_P (op)
2568 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2569 || (strict == 0 && GET_CODE (op) == SCRATCH)
2570 || (REG_P (op)
2571 && reg_fits_class_p (op, cl, offset, mode)))
2572 win = 1;
2573 }
2574 #ifdef EXTRA_CONSTRAINT_STR
2575 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2576 win = 1;
2577
2578 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2579 /* Every memory operand can be reloaded to fit. */
2580 && ((strict < 0 && MEM_P (op))
2581 /* Before reload, accept what reload can turn
2582 into mem. */
2583 || (strict < 0 && CONSTANT_P (op))
2584 /* During reload, accept a pseudo */
2585 || (reload_in_progress && REG_P (op)
2586 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2587 win = 1;
2588 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2589 /* Every address operand can be reloaded to fit. */
2590 && strict < 0)
2591 win = 1;
2592 #endif
2593 break;
2594 }
2595 }
2596 while (p += len, c);
2597
2598 constraints[opno] = p;
2599 /* If this operand did not win somehow,
2600 this alternative loses. */
2601 if (! win)
2602 lose = 1;
2603 }
2604 /* This alternative won; the operands are ok.
2605 Change whichever operands this alternative says to change. */
2606 if (! lose)
2607 {
2608 int opno, eopno;
2609
2610 /* See if any earlyclobber operand conflicts with some other
2611 operand. */
2612
2613 if (strict > 0 && seen_earlyclobber_at >= 0)
2614 for (eopno = seen_earlyclobber_at;
2615 eopno < recog_data.n_operands;
2616 eopno++)
2617 /* Ignore earlyclobber operands now in memory,
2618 because we would often report failure when we have
2619 two memory operands, one of which was formerly a REG. */
2620 if (earlyclobber[eopno]
2621 && REG_P (recog_data.operand[eopno]))
2622 for (opno = 0; opno < recog_data.n_operands; opno++)
2623 if ((MEM_P (recog_data.operand[opno])
2624 || recog_data.operand_type[opno] != OP_OUT)
2625 && opno != eopno
2626 /* Ignore things like match_operator operands. */
2627 && *recog_data.constraints[opno] != 0
2628 && ! (matching_operands[opno] == eopno
2629 && operands_match_p (recog_data.operand[opno],
2630 recog_data.operand[eopno]))
2631 && ! safe_from_earlyclobber (recog_data.operand[opno],
2632 recog_data.operand[eopno]))
2633 lose = 1;
2634
2635 if (! lose)
2636 {
2637 while (--funny_match_index >= 0)
2638 {
2639 recog_data.operand[funny_match[funny_match_index].other]
2640 = recog_data.operand[funny_match[funny_match_index].this];
2641 }
2642
2643 return 1;
2644 }
2645 }
2646
2647 which_alternative++;
2648 }
2649 while (which_alternative < recog_data.n_alternatives);
2650
2651 which_alternative = -1;
2652 /* If we are about to reject this, but we are not to test strictly,
2653 try a very loose test. Only return failure if it fails also. */
2654 if (strict == 0)
2655 return constrain_operands (-1);
2656 else
2657 return 0;
2658 }
2659
2660 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2661 is a hard reg in class CLASS when its regno is offset by OFFSET
2662 and changed to mode MODE.
2663 If REG occupies multiple hard regs, all of them must be in CLASS. */
2664
2665 int
2666 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2667 enum machine_mode mode)
2668 {
2669 int regno = REGNO (operand);
2670 if (regno < FIRST_PSEUDO_REGISTER
2671 && TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2672 regno + offset))
2673 {
2674 int sr;
2675 regno += offset;
2676 for (sr = hard_regno_nregs[regno][mode] - 1;
2677 sr > 0; sr--)
2678 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2679 regno + sr))
2680 break;
2681 return sr == 0;
2682 }
2683
2684 return 0;
2685 }
2686 \f
2687 /* Split single instruction. Helper function for split_all_insns and
2688 split_all_insns_noflow. Return last insn in the sequence if successful,
2689 or NULL if unsuccessful. */
2690
2691 static rtx
2692 split_insn (rtx insn)
2693 {
2694 /* Split insns here to get max fine-grain parallelism. */
2695 rtx first = PREV_INSN (insn);
2696 rtx last = try_split (PATTERN (insn), insn, 1);
2697
2698 if (last == insn)
2699 return NULL_RTX;
2700
2701 /* try_split returns the NOTE that INSN became. */
2702 SET_INSN_DELETED (insn);
2703
2704 /* ??? Coddle to md files that generate subregs in post-reload
2705 splitters instead of computing the proper hard register. */
2706 if (reload_completed && first != last)
2707 {
2708 first = NEXT_INSN (first);
2709 for (;;)
2710 {
2711 if (INSN_P (first))
2712 cleanup_subreg_operands (first);
2713 if (first == last)
2714 break;
2715 first = NEXT_INSN (first);
2716 }
2717 }
2718 return last;
2719 }
2720
2721 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2722
2723 void
2724 split_all_insns (int upd_life)
2725 {
2726 sbitmap blocks;
2727 bool changed;
2728 basic_block bb;
2729
2730 blocks = sbitmap_alloc (last_basic_block);
2731 sbitmap_zero (blocks);
2732 changed = false;
2733
2734 FOR_EACH_BB_REVERSE (bb)
2735 {
2736 rtx insn, next;
2737 bool finish = false;
2738
2739 for (insn = BB_HEAD (bb); !finish ; insn = next)
2740 {
2741 /* Can't use `next_real_insn' because that might go across
2742 CODE_LABELS and short-out basic blocks. */
2743 next = NEXT_INSN (insn);
2744 finish = (insn == BB_END (bb));
2745 if (INSN_P (insn))
2746 {
2747 rtx set = single_set (insn);
2748
2749 /* Don't split no-op move insns. These should silently
2750 disappear later in final. Splitting such insns would
2751 break the code that handles REG_NO_CONFLICT blocks. */
2752 if (set && set_noop_p (set))
2753 {
2754 /* Nops get in the way while scheduling, so delete them
2755 now if register allocation has already been done. It
2756 is too risky to try to do this before register
2757 allocation, and there are unlikely to be very many
2758 nops then anyways. */
2759 if (reload_completed)
2760 {
2761 /* If the no-op set has a REG_UNUSED note, we need
2762 to update liveness information. */
2763 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2764 {
2765 SET_BIT (blocks, bb->index);
2766 changed = true;
2767 }
2768 /* ??? Is life info affected by deleting edges? */
2769 delete_insn_and_edges (insn);
2770 }
2771 }
2772 else
2773 {
2774 rtx last = split_insn (insn);
2775 if (last)
2776 {
2777 /* The split sequence may include barrier, but the
2778 BB boundary we are interested in will be set to
2779 previous one. */
2780
2781 while (BARRIER_P (last))
2782 last = PREV_INSN (last);
2783 SET_BIT (blocks, bb->index);
2784 changed = true;
2785 }
2786 }
2787 }
2788 }
2789 }
2790
2791 if (changed)
2792 {
2793 int old_last_basic_block = last_basic_block;
2794
2795 find_many_sub_basic_blocks (blocks);
2796
2797 if (old_last_basic_block != last_basic_block && upd_life)
2798 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2799 }
2800
2801 if (changed && upd_life)
2802 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2803 PROP_DEATH_NOTES);
2804
2805 #ifdef ENABLE_CHECKING
2806 verify_flow_info ();
2807 #endif
2808
2809 sbitmap_free (blocks);
2810 }
2811
2812 /* Same as split_all_insns, but do not expect CFG to be available.
2813 Used by machine dependent reorg passes. */
2814
2815 void
2816 split_all_insns_noflow (void)
2817 {
2818 rtx next, insn;
2819
2820 for (insn = get_insns (); insn; insn = next)
2821 {
2822 next = NEXT_INSN (insn);
2823 if (INSN_P (insn))
2824 {
2825 /* Don't split no-op move insns. These should silently
2826 disappear later in final. Splitting such insns would
2827 break the code that handles REG_NO_CONFLICT blocks. */
2828 rtx set = single_set (insn);
2829 if (set && set_noop_p (set))
2830 {
2831 /* Nops get in the way while scheduling, so delete them
2832 now if register allocation has already been done. It
2833 is too risky to try to do this before register
2834 allocation, and there are unlikely to be very many
2835 nops then anyways.
2836
2837 ??? Should we use delete_insn when the CFG isn't valid? */
2838 if (reload_completed)
2839 delete_insn_and_edges (insn);
2840 }
2841 else
2842 split_insn (insn);
2843 }
2844 }
2845 }
2846 \f
2847 #ifdef HAVE_peephole2
2848 struct peep2_insn_data
2849 {
2850 rtx insn;
2851 regset live_before;
2852 };
2853
2854 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2855 static int peep2_current;
2856 /* The number of instructions available to match a peep2. */
2857 int peep2_current_count;
2858
2859 /* A non-insn marker indicating the last insn of the block.
2860 The live_before regset for this element is correct, indicating
2861 global_live_at_end for the block. */
2862 #define PEEP2_EOB pc_rtx
2863
2864 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2865 does not exist. Used by the recognizer to find the next insn to match
2866 in a multi-insn pattern. */
2867
2868 rtx
2869 peep2_next_insn (int n)
2870 {
2871 gcc_assert (n <= peep2_current_count);
2872
2873 n += peep2_current;
2874 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2875 n -= MAX_INSNS_PER_PEEP2 + 1;
2876
2877 return peep2_insn_data[n].insn;
2878 }
2879
2880 /* Return true if REGNO is dead before the Nth non-note insn
2881 after `current'. */
2882
2883 int
2884 peep2_regno_dead_p (int ofs, int regno)
2885 {
2886 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2887
2888 ofs += peep2_current;
2889 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2890 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2891
2892 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2893
2894 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2895 }
2896
2897 /* Similarly for a REG. */
2898
2899 int
2900 peep2_reg_dead_p (int ofs, rtx reg)
2901 {
2902 int regno, n;
2903
2904 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2905
2906 ofs += peep2_current;
2907 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2908 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2909
2910 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2911
2912 regno = REGNO (reg);
2913 n = hard_regno_nregs[regno][GET_MODE (reg)];
2914 while (--n >= 0)
2915 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2916 return 0;
2917 return 1;
2918 }
2919
2920 /* Try to find a hard register of mode MODE, matching the register class in
2921 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2922 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2923 in which case the only condition is that the register must be available
2924 before CURRENT_INSN.
2925 Registers that already have bits set in REG_SET will not be considered.
2926
2927 If an appropriate register is available, it will be returned and the
2928 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2929 returned. */
2930
2931 rtx
2932 peep2_find_free_register (int from, int to, const char *class_str,
2933 enum machine_mode mode, HARD_REG_SET *reg_set)
2934 {
2935 static int search_ofs;
2936 enum reg_class cl;
2937 HARD_REG_SET live;
2938 int i;
2939
2940 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2941 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2942
2943 from += peep2_current;
2944 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2945 from -= MAX_INSNS_PER_PEEP2 + 1;
2946 to += peep2_current;
2947 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2948 to -= MAX_INSNS_PER_PEEP2 + 1;
2949
2950 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2951 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2952
2953 while (from != to)
2954 {
2955 HARD_REG_SET this_live;
2956
2957 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2958 from = 0;
2959 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2960 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2961 IOR_HARD_REG_SET (live, this_live);
2962 }
2963
2964 cl = (class_str[0] == 'r' ? GENERAL_REGS
2965 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2966
2967 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2968 {
2969 int raw_regno, regno, success, j;
2970
2971 /* Distribute the free registers as much as possible. */
2972 raw_regno = search_ofs + i;
2973 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2974 raw_regno -= FIRST_PSEUDO_REGISTER;
2975 #ifdef REG_ALLOC_ORDER
2976 regno = reg_alloc_order[raw_regno];
2977 #else
2978 regno = raw_regno;
2979 #endif
2980
2981 /* Don't allocate fixed registers. */
2982 if (fixed_regs[regno])
2983 continue;
2984 /* Make sure the register is of the right class. */
2985 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
2986 continue;
2987 /* And can support the mode we need. */
2988 if (! HARD_REGNO_MODE_OK (regno, mode))
2989 continue;
2990 /* And that we don't create an extra save/restore. */
2991 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2992 continue;
2993 /* And we don't clobber traceback for noreturn functions. */
2994 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2995 && (! reload_completed || frame_pointer_needed))
2996 continue;
2997
2998 success = 1;
2999 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3000 {
3001 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3002 || TEST_HARD_REG_BIT (live, regno + j))
3003 {
3004 success = 0;
3005 break;
3006 }
3007 }
3008 if (success)
3009 {
3010 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3011 SET_HARD_REG_BIT (*reg_set, regno + j);
3012
3013 /* Start the next search with the next register. */
3014 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3015 raw_regno = 0;
3016 search_ofs = raw_regno;
3017
3018 return gen_rtx_REG (mode, regno);
3019 }
3020 }
3021
3022 search_ofs = 0;
3023 return NULL_RTX;
3024 }
3025
3026 /* Perform the peephole2 optimization pass. */
3027
3028 void
3029 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
3030 {
3031 rtx insn, prev;
3032 regset live;
3033 int i;
3034 basic_block bb;
3035 #ifdef HAVE_conditional_execution
3036 sbitmap blocks;
3037 bool changed;
3038 #endif
3039 bool do_cleanup_cfg = false;
3040 bool do_global_life_update = false;
3041 bool do_rebuild_jump_labels = false;
3042
3043 /* Initialize the regsets we're going to use. */
3044 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3045 peep2_insn_data[i].live_before = ALLOC_REG_SET (&reg_obstack);
3046 live = ALLOC_REG_SET (&reg_obstack);
3047
3048 #ifdef HAVE_conditional_execution
3049 blocks = sbitmap_alloc (last_basic_block);
3050 sbitmap_zero (blocks);
3051 changed = false;
3052 #else
3053 count_or_remove_death_notes (NULL, 1);
3054 #endif
3055
3056 FOR_EACH_BB_REVERSE (bb)
3057 {
3058 struct propagate_block_info *pbi;
3059 reg_set_iterator rsi;
3060 unsigned int j;
3061
3062 /* Indicate that all slots except the last holds invalid data. */
3063 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3064 peep2_insn_data[i].insn = NULL_RTX;
3065 peep2_current_count = 0;
3066
3067 /* Indicate that the last slot contains live_after data. */
3068 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3069 peep2_current = MAX_INSNS_PER_PEEP2;
3070
3071 /* Start up propagation. */
3072 COPY_REG_SET (live, bb->il.rtl->global_live_at_end);
3073 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3074
3075 #ifdef HAVE_conditional_execution
3076 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3077 #else
3078 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3079 #endif
3080
3081 for (insn = BB_END (bb); ; insn = prev)
3082 {
3083 prev = PREV_INSN (insn);
3084 if (INSN_P (insn))
3085 {
3086 rtx try, before_try, x;
3087 int match_len;
3088 rtx note;
3089 bool was_call = false;
3090
3091 /* Record this insn. */
3092 if (--peep2_current < 0)
3093 peep2_current = MAX_INSNS_PER_PEEP2;
3094 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3095 peep2_current_count++;
3096 peep2_insn_data[peep2_current].insn = insn;
3097 propagate_one_insn (pbi, insn);
3098 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3099
3100 /* Match the peephole. */
3101 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3102 if (try != NULL)
3103 {
3104 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3105 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3106 cfg-related call notes. */
3107 for (i = 0; i <= match_len; ++i)
3108 {
3109 int j;
3110 rtx old_insn, new_insn, note;
3111
3112 j = i + peep2_current;
3113 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3114 j -= MAX_INSNS_PER_PEEP2 + 1;
3115 old_insn = peep2_insn_data[j].insn;
3116 if (!CALL_P (old_insn))
3117 continue;
3118 was_call = true;
3119
3120 new_insn = try;
3121 while (new_insn != NULL_RTX)
3122 {
3123 if (CALL_P (new_insn))
3124 break;
3125 new_insn = NEXT_INSN (new_insn);
3126 }
3127
3128 gcc_assert (new_insn != NULL_RTX);
3129
3130 CALL_INSN_FUNCTION_USAGE (new_insn)
3131 = CALL_INSN_FUNCTION_USAGE (old_insn);
3132
3133 for (note = REG_NOTES (old_insn);
3134 note;
3135 note = XEXP (note, 1))
3136 switch (REG_NOTE_KIND (note))
3137 {
3138 case REG_NORETURN:
3139 case REG_SETJMP:
3140 REG_NOTES (new_insn)
3141 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3142 XEXP (note, 0),
3143 REG_NOTES (new_insn));
3144 default:
3145 /* Discard all other reg notes. */
3146 break;
3147 }
3148
3149 /* Croak if there is another call in the sequence. */
3150 while (++i <= match_len)
3151 {
3152 j = i + peep2_current;
3153 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3154 j -= MAX_INSNS_PER_PEEP2 + 1;
3155 old_insn = peep2_insn_data[j].insn;
3156 gcc_assert (!CALL_P (old_insn));
3157 }
3158 break;
3159 }
3160
3161 i = match_len + peep2_current;
3162 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3163 i -= MAX_INSNS_PER_PEEP2 + 1;
3164
3165 note = find_reg_note (peep2_insn_data[i].insn,
3166 REG_EH_REGION, NULL_RTX);
3167
3168 /* Replace the old sequence with the new. */
3169 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3170 INSN_LOCATOR (peep2_insn_data[i].insn));
3171 before_try = PREV_INSN (insn);
3172 delete_insn_chain (insn, peep2_insn_data[i].insn);
3173
3174 /* Re-insert the EH_REGION notes. */
3175 if (note || (was_call && nonlocal_goto_handler_labels))
3176 {
3177 edge eh_edge;
3178 edge_iterator ei;
3179
3180 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3181 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3182 break;
3183
3184 for (x = try ; x != before_try ; x = PREV_INSN (x))
3185 if (CALL_P (x)
3186 || (flag_non_call_exceptions
3187 && may_trap_p (PATTERN (x))
3188 && !find_reg_note (x, REG_EH_REGION, NULL)))
3189 {
3190 if (note)
3191 REG_NOTES (x)
3192 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3193 XEXP (note, 0),
3194 REG_NOTES (x));
3195
3196 if (x != BB_END (bb) && eh_edge)
3197 {
3198 edge nfte, nehe;
3199 int flags;
3200
3201 nfte = split_block (bb, x);
3202 flags = (eh_edge->flags
3203 & (EDGE_EH | EDGE_ABNORMAL));
3204 if (CALL_P (x))
3205 flags |= EDGE_ABNORMAL_CALL;
3206 nehe = make_edge (nfte->src, eh_edge->dest,
3207 flags);
3208
3209 nehe->probability = eh_edge->probability;
3210 nfte->probability
3211 = REG_BR_PROB_BASE - nehe->probability;
3212
3213 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3214 #ifdef HAVE_conditional_execution
3215 SET_BIT (blocks, nfte->dest->index);
3216 changed = true;
3217 #endif
3218 bb = nfte->src;
3219 eh_edge = nehe;
3220 }
3221 }
3222
3223 /* Converting possibly trapping insn to non-trapping is
3224 possible. Zap dummy outgoing edges. */
3225 do_cleanup_cfg |= purge_dead_edges (bb);
3226 }
3227
3228 #ifdef HAVE_conditional_execution
3229 /* With conditional execution, we cannot back up the
3230 live information so easily, since the conditional
3231 death data structures are not so self-contained.
3232 So record that we've made a modification to this
3233 block and update life information at the end. */
3234 SET_BIT (blocks, bb->index);
3235 changed = true;
3236
3237 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3238 peep2_insn_data[i].insn = NULL_RTX;
3239 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3240 peep2_current_count = 0;
3241 #else
3242 /* Back up lifetime information past the end of the
3243 newly created sequence. */
3244 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3245 i = 0;
3246 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3247
3248 /* Update life information for the new sequence. */
3249 x = try;
3250 do
3251 {
3252 if (INSN_P (x))
3253 {
3254 if (--i < 0)
3255 i = MAX_INSNS_PER_PEEP2;
3256 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3257 peep2_current_count++;
3258 peep2_insn_data[i].insn = x;
3259 propagate_one_insn (pbi, x);
3260 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3261 }
3262 x = PREV_INSN (x);
3263 }
3264 while (x != prev);
3265
3266 /* ??? Should verify that LIVE now matches what we
3267 had before the new sequence. */
3268
3269 peep2_current = i;
3270 #endif
3271
3272 /* If we generated a jump instruction, it won't have
3273 JUMP_LABEL set. Recompute after we're done. */
3274 for (x = try; x != before_try; x = PREV_INSN (x))
3275 if (JUMP_P (x))
3276 {
3277 do_rebuild_jump_labels = true;
3278 break;
3279 }
3280 }
3281 }
3282
3283 if (insn == BB_HEAD (bb))
3284 break;
3285 }
3286
3287 /* Some peepholes can decide the don't need one or more of their
3288 inputs. If this happens, local life update is not enough. */
3289 EXECUTE_IF_AND_COMPL_IN_BITMAP (bb->il.rtl->global_live_at_start, live,
3290 0, j, rsi)
3291 {
3292 do_global_life_update = true;
3293 break;
3294 }
3295
3296 free_propagate_block_info (pbi);
3297 }
3298
3299 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3300 FREE_REG_SET (peep2_insn_data[i].live_before);
3301 FREE_REG_SET (live);
3302
3303 if (do_rebuild_jump_labels)
3304 rebuild_jump_labels (get_insns ());
3305
3306 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3307 we've changed global life since exception handlers are no longer
3308 reachable. */
3309 if (do_cleanup_cfg)
3310 {
3311 cleanup_cfg (0);
3312 do_global_life_update = true;
3313 }
3314 if (do_global_life_update)
3315 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3316 #ifdef HAVE_conditional_execution
3317 else
3318 {
3319 count_or_remove_death_notes (blocks, 1);
3320 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3321 }
3322 sbitmap_free (blocks);
3323 #endif
3324 }
3325 #endif /* HAVE_peephole2 */
3326
3327 /* Common predicates for use with define_bypass. */
3328
3329 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3330 data not the address operand(s) of the store. IN_INSN must be
3331 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3332 SETs inside. */
3333
3334 int
3335 store_data_bypass_p (rtx out_insn, rtx in_insn)
3336 {
3337 rtx out_set, in_set;
3338
3339 in_set = single_set (in_insn);
3340 gcc_assert (in_set);
3341
3342 if (!MEM_P (SET_DEST (in_set)))
3343 return false;
3344
3345 out_set = single_set (out_insn);
3346 if (out_set)
3347 {
3348 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3349 return false;
3350 }
3351 else
3352 {
3353 rtx out_pat;
3354 int i;
3355
3356 out_pat = PATTERN (out_insn);
3357 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3358
3359 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3360 {
3361 rtx exp = XVECEXP (out_pat, 0, i);
3362
3363 if (GET_CODE (exp) == CLOBBER)
3364 continue;
3365
3366 gcc_assert (GET_CODE (exp) == SET);
3367
3368 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3369 return false;
3370 }
3371 }
3372
3373 return true;
3374 }
3375
3376 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3377 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3378 or multiple set; IN_INSN should be single_set for truth, but for convenience
3379 of insn categorization may be any JUMP or CALL insn. */
3380
3381 int
3382 if_test_bypass_p (rtx out_insn, rtx in_insn)
3383 {
3384 rtx out_set, in_set;
3385
3386 in_set = single_set (in_insn);
3387 if (! in_set)
3388 {
3389 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3390 return false;
3391 }
3392
3393 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3394 return false;
3395 in_set = SET_SRC (in_set);
3396
3397 out_set = single_set (out_insn);
3398 if (out_set)
3399 {
3400 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3401 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3402 return false;
3403 }
3404 else
3405 {
3406 rtx out_pat;
3407 int i;
3408
3409 out_pat = PATTERN (out_insn);
3410 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3411
3412 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3413 {
3414 rtx exp = XVECEXP (out_pat, 0, i);
3415
3416 if (GET_CODE (exp) == CLOBBER)
3417 continue;
3418
3419 gcc_assert (GET_CODE (exp) == SET);
3420
3421 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3422 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3423 return false;
3424 }
3425 }
3426
3427 return true;
3428 }
3429 \f
3430 static bool
3431 gate_handle_peephole2 (void)
3432 {
3433 return (optimize > 0 && flag_peephole2);
3434 }
3435
3436 static void
3437 rest_of_handle_peephole2 (void)
3438 {
3439 #ifdef HAVE_peephole2
3440 peephole2_optimize (dump_file);
3441 #endif
3442 }
3443
3444 struct tree_opt_pass pass_peephole2 =
3445 {
3446 "peephole2", /* name */
3447 gate_handle_peephole2, /* gate */
3448 rest_of_handle_peephole2, /* execute */
3449 NULL, /* sub */
3450 NULL, /* next */
3451 0, /* static_pass_number */
3452 TV_PEEPHOLE2, /* tv_id */
3453 0, /* properties_required */
3454 0, /* properties_provided */
3455 0, /* properties_destroyed */
3456 0, /* todo_flags_start */
3457 TODO_dump_func, /* todo_flags_finish */
3458 'z' /* letter */
3459 };
3460
3461 static void
3462 rest_of_handle_split_all_insns (void)
3463 {
3464 split_all_insns (1);
3465 }
3466
3467 struct tree_opt_pass pass_split_all_insns =
3468 {
3469 "split1", /* name */
3470 NULL, /* gate */
3471 rest_of_handle_split_all_insns, /* execute */
3472 NULL, /* sub */
3473 NULL, /* next */
3474 0, /* static_pass_number */
3475 0, /* tv_id */
3476 0, /* properties_required */
3477 0, /* properties_provided */
3478 0, /* properties_destroyed */
3479 0, /* todo_flags_start */
3480 TODO_dump_func, /* todo_flags_finish */
3481 0 /* letter */
3482 };
3483
3484 /* The placement of the splitting that we do for shorten_branches
3485 depends on whether regstack is used by the target or not. */
3486 static bool
3487 gate_do_final_split (void)
3488 {
3489 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3490 return 1;
3491 #else
3492 return 0;
3493 #endif
3494 }
3495
3496 struct tree_opt_pass pass_split_for_shorten_branches =
3497 {
3498 "split3", /* name */
3499 gate_do_final_split, /* gate */
3500 split_all_insns_noflow, /* execute */
3501 NULL, /* sub */
3502 NULL, /* next */
3503 0, /* static_pass_number */
3504 TV_SHORTEN_BRANCH, /* tv_id */
3505 0, /* properties_required */
3506 0, /* properties_provided */
3507 0, /* properties_destroyed */
3508 0, /* todo_flags_start */
3509 TODO_dump_func, /* todo_flags_finish */
3510 0 /* letter */
3511 };
3512
3513
3514 static bool
3515 gate_handle_split_before_regstack (void)
3516 {
3517 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3518 /* If flow2 creates new instructions which need splitting
3519 and scheduling after reload is not done, they might not be
3520 split until final which doesn't allow splitting
3521 if HAVE_ATTR_length. */
3522 # ifdef INSN_SCHEDULING
3523 return (optimize && !flag_schedule_insns_after_reload);
3524 # else
3525 return (optimize);
3526 # endif
3527 #else
3528 return 0;
3529 #endif
3530 }
3531
3532 struct tree_opt_pass pass_split_before_regstack =
3533 {
3534 "split2", /* name */
3535 gate_handle_split_before_regstack, /* gate */
3536 rest_of_handle_split_all_insns, /* execute */
3537 NULL, /* sub */
3538 NULL, /* next */
3539 0, /* static_pass_number */
3540 TV_SHORTEN_BRANCH, /* tv_id */
3541 0, /* properties_required */
3542 0, /* properties_provided */
3543 0, /* properties_destroyed */
3544 0, /* todo_flags_start */
3545 TODO_dump_func, /* todo_flags_finish */
3546 0 /* letter */
3547 };