re PR bootstrap/41395 (Revision 151800 failed bootstrap)
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "addresses.h"
35 #include "expr.h"
36 #include "function.h"
37 #include "flags.h"
38 #include "real.h"
39 #include "toplev.h"
40 #include "basic-block.h"
41 #include "output.h"
42 #include "reload.h"
43 #include "target.h"
44 #include "timevar.h"
45 #include "tree-pass.h"
46 #include "df.h"
47
48 #ifndef STACK_PUSH_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_PUSH_CODE PRE_DEC
51 #else
52 #define STACK_PUSH_CODE PRE_INC
53 #endif
54 #endif
55
56 #ifndef STACK_POP_CODE
57 #ifdef STACK_GROWS_DOWNWARD
58 #define STACK_POP_CODE POST_INC
59 #else
60 #define STACK_POP_CODE POST_DEC
61 #endif
62 #endif
63
64 #ifndef HAVE_ATTR_enabled
65 static inline bool
66 get_attr_enabled (rtx insn ATTRIBUTE_UNUSED)
67 {
68 return true;
69 }
70 #endif
71
72 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
73 static void validate_replace_src_1 (rtx *, void *);
74 static rtx split_insn (rtx);
75
76 /* Nonzero means allow operands to be volatile.
77 This should be 0 if you are generating rtl, such as if you are calling
78 the functions in optabs.c and expmed.c (most of the time).
79 This should be 1 if all valid insns need to be recognized,
80 such as in reginfo.c and final.c and reload.c.
81
82 init_recog and init_recog_no_volatile are responsible for setting this. */
83
84 int volatile_ok;
85
86 struct recog_data recog_data;
87
88 /* Contains a vector of operand_alternative structures for every operand.
89 Set up by preprocess_constraints. */
90 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
91
92 /* On return from `constrain_operands', indicate which alternative
93 was satisfied. */
94
95 int which_alternative;
96
97 /* Nonzero after end of reload pass.
98 Set to 1 or 0 by toplev.c.
99 Controls the significance of (SUBREG (MEM)). */
100
101 int reload_completed;
102
103 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
104 int epilogue_completed;
105
106 /* Initialize data used by the function `recog'.
107 This must be called once in the compilation of a function
108 before any insn recognition may be done in the function. */
109
110 void
111 init_recog_no_volatile (void)
112 {
113 volatile_ok = 0;
114 }
115
116 void
117 init_recog (void)
118 {
119 volatile_ok = 1;
120 }
121
122 \f
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
125
126 int
127 check_asm_operands (rtx x)
128 {
129 int noperands;
130 rtx *operands;
131 const char **constraints;
132 int i;
133
134 /* Post-reload, be more strict with things. */
135 if (reload_completed)
136 {
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x));
139 constrain_operands (1);
140 return which_alternative >= 0;
141 }
142
143 noperands = asm_noperands (x);
144 if (noperands < 0)
145 return 0;
146 if (noperands == 0)
147 return 1;
148
149 operands = XALLOCAVEC (rtx, noperands);
150 constraints = XALLOCAVEC (const char *, noperands);
151
152 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
153
154 for (i = 0; i < noperands; i++)
155 {
156 const char *c = constraints[i];
157 if (c[0] == '%')
158 c++;
159 if (! asm_operand_ok (operands[i], c, constraints))
160 return 0;
161 }
162
163 return 1;
164 }
165 \f
166 /* Static data for the next two routines. */
167
168 typedef struct change_t
169 {
170 rtx object;
171 int old_code;
172 rtx *loc;
173 rtx old;
174 bool unshare;
175 } change_t;
176
177 static change_t *changes;
178 static int changes_allocated;
179
180 static int num_changes = 0;
181
182 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
183 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
184 the change is simply made.
185
186 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
187 will be called with the address and mode as parameters. If OBJECT is
188 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
189 the change in place.
190
191 IN_GROUP is nonzero if this is part of a group of changes that must be
192 performed as a group. In that case, the changes will be stored. The
193 function `apply_change_group' will validate and apply the changes.
194
195 If IN_GROUP is zero, this is a single change. Try to recognize the insn
196 or validate the memory reference with the change applied. If the result
197 is not valid for the machine, suppress the change and return zero.
198 Otherwise, perform the change and return 1. */
199
200 static bool
201 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
202 {
203 rtx old = *loc;
204
205 if (old == new_rtx || rtx_equal_p (old, new_rtx))
206 return 1;
207
208 gcc_assert (in_group != 0 || num_changes == 0);
209
210 *loc = new_rtx;
211
212 /* Save the information describing this change. */
213 if (num_changes >= changes_allocated)
214 {
215 if (changes_allocated == 0)
216 /* This value allows for repeated substitutions inside complex
217 indexed addresses, or changes in up to 5 insns. */
218 changes_allocated = MAX_RECOG_OPERANDS * 5;
219 else
220 changes_allocated *= 2;
221
222 changes = XRESIZEVEC (change_t, changes, changes_allocated);
223 }
224
225 changes[num_changes].object = object;
226 changes[num_changes].loc = loc;
227 changes[num_changes].old = old;
228 changes[num_changes].unshare = unshare;
229
230 if (object && !MEM_P (object))
231 {
232 /* Set INSN_CODE to force rerecognition of insn. Save old code in
233 case invalid. */
234 changes[num_changes].old_code = INSN_CODE (object);
235 INSN_CODE (object) = -1;
236 }
237
238 num_changes++;
239
240 /* If we are making a group of changes, return 1. Otherwise, validate the
241 change group we made. */
242
243 if (in_group)
244 return 1;
245 else
246 return apply_change_group ();
247 }
248
249 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
250 UNSHARE to false. */
251
252 bool
253 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
254 {
255 return validate_change_1 (object, loc, new_rtx, in_group, false);
256 }
257
258 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
259 UNSHARE to true. */
260
261 bool
262 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
263 {
264 return validate_change_1 (object, loc, new_rtx, in_group, true);
265 }
266
267
268 /* Keep X canonicalized if some changes have made it non-canonical; only
269 modifies the operands of X, not (for example) its code. Simplifications
270 are not the job of this routine.
271
272 Return true if anything was changed. */
273 bool
274 canonicalize_change_group (rtx insn, rtx x)
275 {
276 if (COMMUTATIVE_P (x)
277 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
278 {
279 /* Oops, the caller has made X no longer canonical.
280 Let's redo the changes in the correct order. */
281 rtx tem = XEXP (x, 0);
282 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
283 validate_change (insn, &XEXP (x, 1), tem, 1);
284 return true;
285 }
286 else
287 return false;
288 }
289
290
291 /* This subroutine of apply_change_group verifies whether the changes to INSN
292 were valid; i.e. whether INSN can still be recognized. */
293
294 int
295 insn_invalid_p (rtx insn)
296 {
297 rtx pat = PATTERN (insn);
298 int num_clobbers = 0;
299 /* If we are before reload and the pattern is a SET, see if we can add
300 clobbers. */
301 int icode = recog (pat, insn,
302 (GET_CODE (pat) == SET
303 && ! reload_completed && ! reload_in_progress)
304 ? &num_clobbers : 0);
305 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
306
307
308 /* If this is an asm and the operand aren't legal, then fail. Likewise if
309 this is not an asm and the insn wasn't recognized. */
310 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
311 || (!is_asm && icode < 0))
312 return 1;
313
314 /* If we have to add CLOBBERs, fail if we have to add ones that reference
315 hard registers since our callers can't know if they are live or not.
316 Otherwise, add them. */
317 if (num_clobbers > 0)
318 {
319 rtx newpat;
320
321 if (added_clobbers_hard_reg_p (icode))
322 return 1;
323
324 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
325 XVECEXP (newpat, 0, 0) = pat;
326 add_clobbers (newpat, icode);
327 PATTERN (insn) = pat = newpat;
328 }
329
330 /* After reload, verify that all constraints are satisfied. */
331 if (reload_completed)
332 {
333 extract_insn (insn);
334
335 if (! constrain_operands (1))
336 return 1;
337 }
338
339 INSN_CODE (insn) = icode;
340 return 0;
341 }
342
343 /* Return number of changes made and not validated yet. */
344 int
345 num_changes_pending (void)
346 {
347 return num_changes;
348 }
349
350 /* Tentatively apply the changes numbered NUM and up.
351 Return 1 if all changes are valid, zero otherwise. */
352
353 int
354 verify_changes (int num)
355 {
356 int i;
357 rtx last_validated = NULL_RTX;
358
359 /* The changes have been applied and all INSN_CODEs have been reset to force
360 rerecognition.
361
362 The changes are valid if we aren't given an object, or if we are
363 given a MEM and it still is a valid address, or if this is in insn
364 and it is recognized. In the latter case, if reload has completed,
365 we also require that the operands meet the constraints for
366 the insn. */
367
368 for (i = num; i < num_changes; i++)
369 {
370 rtx object = changes[i].object;
371
372 /* If there is no object to test or if it is the same as the one we
373 already tested, ignore it. */
374 if (object == 0 || object == last_validated)
375 continue;
376
377 if (MEM_P (object))
378 {
379 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
380 break;
381 }
382 else if (REG_P (changes[i].old)
383 && asm_noperands (PATTERN (object)) > 0
384 && REG_EXPR (changes[i].old) != NULL_TREE
385 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
386 && DECL_REGISTER (REG_EXPR (changes[i].old)))
387 {
388 /* Don't allow changes of hard register operands to inline
389 assemblies if they have been defined as register asm ("x"). */
390 break;
391 }
392 else if (DEBUG_INSN_P (object))
393 continue;
394 else if (insn_invalid_p (object))
395 {
396 rtx pat = PATTERN (object);
397
398 /* Perhaps we couldn't recognize the insn because there were
399 extra CLOBBERs at the end. If so, try to re-recognize
400 without the last CLOBBER (later iterations will cause each of
401 them to be eliminated, in turn). But don't do this if we
402 have an ASM_OPERAND. */
403 if (GET_CODE (pat) == PARALLEL
404 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
405 && asm_noperands (PATTERN (object)) < 0)
406 {
407 rtx newpat;
408
409 if (XVECLEN (pat, 0) == 2)
410 newpat = XVECEXP (pat, 0, 0);
411 else
412 {
413 int j;
414
415 newpat
416 = gen_rtx_PARALLEL (VOIDmode,
417 rtvec_alloc (XVECLEN (pat, 0) - 1));
418 for (j = 0; j < XVECLEN (newpat, 0); j++)
419 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
420 }
421
422 /* Add a new change to this group to replace the pattern
423 with this new pattern. Then consider this change
424 as having succeeded. The change we added will
425 cause the entire call to fail if things remain invalid.
426
427 Note that this can lose if a later change than the one
428 we are processing specified &XVECEXP (PATTERN (object), 0, X)
429 but this shouldn't occur. */
430
431 validate_change (object, &PATTERN (object), newpat, 1);
432 continue;
433 }
434 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
435 || GET_CODE (pat) == VAR_LOCATION)
436 /* If this insn is a CLOBBER or USE, it is always valid, but is
437 never recognized. */
438 continue;
439 else
440 break;
441 }
442 last_validated = object;
443 }
444
445 return (i == num_changes);
446 }
447
448 /* A group of changes has previously been issued with validate_change
449 and verified with verify_changes. Call df_insn_rescan for each of
450 the insn changed and clear num_changes. */
451
452 void
453 confirm_change_group (void)
454 {
455 int i;
456 rtx last_object = NULL;
457
458 for (i = 0; i < num_changes; i++)
459 {
460 rtx object = changes[i].object;
461
462 if (changes[i].unshare)
463 *changes[i].loc = copy_rtx (*changes[i].loc);
464
465 /* Avoid unnecessary rescanning when multiple changes to same instruction
466 are made. */
467 if (object)
468 {
469 if (object != last_object && last_object && INSN_P (last_object))
470 df_insn_rescan (last_object);
471 last_object = object;
472 }
473 }
474
475 if (last_object && INSN_P (last_object))
476 df_insn_rescan (last_object);
477 num_changes = 0;
478 }
479
480 /* Apply a group of changes previously issued with `validate_change'.
481 If all changes are valid, call confirm_change_group and return 1,
482 otherwise, call cancel_changes and return 0. */
483
484 int
485 apply_change_group (void)
486 {
487 if (verify_changes (0))
488 {
489 confirm_change_group ();
490 return 1;
491 }
492 else
493 {
494 cancel_changes (0);
495 return 0;
496 }
497 }
498
499
500 /* Return the number of changes so far in the current group. */
501
502 int
503 num_validated_changes (void)
504 {
505 return num_changes;
506 }
507
508 /* Retract the changes numbered NUM and up. */
509
510 void
511 cancel_changes (int num)
512 {
513 int i;
514
515 /* Back out all the changes. Do this in the opposite order in which
516 they were made. */
517 for (i = num_changes - 1; i >= num; i--)
518 {
519 *changes[i].loc = changes[i].old;
520 if (changes[i].object && !MEM_P (changes[i].object))
521 INSN_CODE (changes[i].object) = changes[i].old_code;
522 }
523 num_changes = num;
524 }
525
526 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
527 rtx. */
528
529 static void
530 simplify_while_replacing (rtx *loc, rtx to, rtx object,
531 enum machine_mode op0_mode)
532 {
533 rtx x = *loc;
534 enum rtx_code code = GET_CODE (x);
535 rtx new_rtx;
536
537 if (SWAPPABLE_OPERANDS_P (x)
538 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
539 {
540 validate_unshare_change (object, loc,
541 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
542 : swap_condition (code),
543 GET_MODE (x), XEXP (x, 1),
544 XEXP (x, 0)), 1);
545 x = *loc;
546 code = GET_CODE (x);
547 }
548
549 switch (code)
550 {
551 case PLUS:
552 /* If we have a PLUS whose second operand is now a CONST_INT, use
553 simplify_gen_binary to try to simplify it.
554 ??? We may want later to remove this, once simplification is
555 separated from this function. */
556 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
557 validate_change (object, loc,
558 simplify_gen_binary
559 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
560 break;
561 case MINUS:
562 if (CONST_INT_P (XEXP (x, 1))
563 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
564 validate_change (object, loc,
565 simplify_gen_binary
566 (PLUS, GET_MODE (x), XEXP (x, 0),
567 simplify_gen_unary (NEG,
568 GET_MODE (x), XEXP (x, 1),
569 GET_MODE (x))), 1);
570 break;
571 case ZERO_EXTEND:
572 case SIGN_EXTEND:
573 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
574 {
575 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
576 op0_mode);
577 /* If any of the above failed, substitute in something that
578 we know won't be recognized. */
579 if (!new_rtx)
580 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
581 validate_change (object, loc, new_rtx, 1);
582 }
583 break;
584 case SUBREG:
585 /* All subregs possible to simplify should be simplified. */
586 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
587 SUBREG_BYTE (x));
588
589 /* Subregs of VOIDmode operands are incorrect. */
590 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
591 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
592 if (new_rtx)
593 validate_change (object, loc, new_rtx, 1);
594 break;
595 case ZERO_EXTRACT:
596 case SIGN_EXTRACT:
597 /* If we are replacing a register with memory, try to change the memory
598 to be the mode required for memory in extract operations (this isn't
599 likely to be an insertion operation; if it was, nothing bad will
600 happen, we might just fail in some cases). */
601
602 if (MEM_P (XEXP (x, 0))
603 && CONST_INT_P (XEXP (x, 1))
604 && CONST_INT_P (XEXP (x, 2))
605 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
606 && !MEM_VOLATILE_P (XEXP (x, 0)))
607 {
608 enum machine_mode wanted_mode = VOIDmode;
609 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
610 int pos = INTVAL (XEXP (x, 2));
611
612 if (GET_CODE (x) == ZERO_EXTRACT)
613 {
614 enum machine_mode new_mode
615 = mode_for_extraction (EP_extzv, 1);
616 if (new_mode != MAX_MACHINE_MODE)
617 wanted_mode = new_mode;
618 }
619 else if (GET_CODE (x) == SIGN_EXTRACT)
620 {
621 enum machine_mode new_mode
622 = mode_for_extraction (EP_extv, 1);
623 if (new_mode != MAX_MACHINE_MODE)
624 wanted_mode = new_mode;
625 }
626
627 /* If we have a narrower mode, we can do something. */
628 if (wanted_mode != VOIDmode
629 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
630 {
631 int offset = pos / BITS_PER_UNIT;
632 rtx newmem;
633
634 /* If the bytes and bits are counted differently, we
635 must adjust the offset. */
636 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
637 offset =
638 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
639 offset);
640
641 pos %= GET_MODE_BITSIZE (wanted_mode);
642
643 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
644
645 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
646 validate_change (object, &XEXP (x, 0), newmem, 1);
647 }
648 }
649
650 break;
651
652 default:
653 break;
654 }
655 }
656
657 /* Replace every occurrence of FROM in X with TO. Mark each change with
658 validate_change passing OBJECT. */
659
660 static void
661 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
662 bool simplify)
663 {
664 int i, j;
665 const char *fmt;
666 rtx x = *loc;
667 enum rtx_code code;
668 enum machine_mode op0_mode = VOIDmode;
669 int prev_changes = num_changes;
670
671 if (!x)
672 return;
673
674 code = GET_CODE (x);
675 fmt = GET_RTX_FORMAT (code);
676 if (fmt[0] == 'e')
677 op0_mode = GET_MODE (XEXP (x, 0));
678
679 /* X matches FROM if it is the same rtx or they are both referring to the
680 same register in the same mode. Avoid calling rtx_equal_p unless the
681 operands look similar. */
682
683 if (x == from
684 || (REG_P (x) && REG_P (from)
685 && GET_MODE (x) == GET_MODE (from)
686 && REGNO (x) == REGNO (from))
687 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
688 && rtx_equal_p (x, from)))
689 {
690 validate_unshare_change (object, loc, to, 1);
691 return;
692 }
693
694 /* Call ourself recursively to perform the replacements.
695 We must not replace inside already replaced expression, otherwise we
696 get infinite recursion for replacements like (reg X)->(subreg (reg X))
697 done by regmove, so we must special case shared ASM_OPERANDS. */
698
699 if (GET_CODE (x) == PARALLEL)
700 {
701 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
702 {
703 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
704 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
705 {
706 /* Verify that operands are really shared. */
707 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
708 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
709 (x, 0, j))));
710 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
711 from, to, object, simplify);
712 }
713 else
714 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
715 simplify);
716 }
717 }
718 else
719 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
720 {
721 if (fmt[i] == 'e')
722 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
723 else if (fmt[i] == 'E')
724 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
725 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
726 simplify);
727 }
728
729 /* If we didn't substitute, there is nothing more to do. */
730 if (num_changes == prev_changes)
731 return;
732
733 /* Allow substituted expression to have different mode. This is used by
734 regmove to change mode of pseudo register. */
735 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
736 op0_mode = GET_MODE (XEXP (x, 0));
737
738 /* Do changes needed to keep rtx consistent. Don't do any other
739 simplifications, as it is not our job. */
740 if (simplify)
741 simplify_while_replacing (loc, to, object, op0_mode);
742 }
743
744 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
745 with TO. After all changes have been made, validate by seeing
746 if INSN is still valid. */
747
748 int
749 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
750 {
751 validate_replace_rtx_1 (loc, from, to, insn, true);
752 return apply_change_group ();
753 }
754
755 /* Try replacing every occurrence of FROM in INSN with TO. After all
756 changes have been made, validate by seeing if INSN is still valid. */
757
758 int
759 validate_replace_rtx (rtx from, rtx to, rtx insn)
760 {
761 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
762 return apply_change_group ();
763 }
764
765 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
766 is a part of INSN. After all changes have been made, validate by seeing if
767 INSN is still valid.
768 validate_replace_rtx (from, to, insn) is equivalent to
769 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
770
771 int
772 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
773 {
774 validate_replace_rtx_1 (where, from, to, insn, true);
775 return apply_change_group ();
776 }
777
778 /* Same as above, but do not simplify rtx afterwards. */
779 int
780 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
781 rtx insn)
782 {
783 validate_replace_rtx_1 (where, from, to, insn, false);
784 return apply_change_group ();
785
786 }
787
788 /* Try replacing every occurrence of FROM in INSN with TO. */
789
790 void
791 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
792 {
793 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
794 }
795
796 /* Function called by note_uses to replace used subexpressions. */
797 struct validate_replace_src_data
798 {
799 rtx from; /* Old RTX */
800 rtx to; /* New RTX */
801 rtx insn; /* Insn in which substitution is occurring. */
802 };
803
804 static void
805 validate_replace_src_1 (rtx *x, void *data)
806 {
807 struct validate_replace_src_data *d
808 = (struct validate_replace_src_data *) data;
809
810 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
811 }
812
813 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
814 SET_DESTs. */
815
816 void
817 validate_replace_src_group (rtx from, rtx to, rtx insn)
818 {
819 struct validate_replace_src_data d;
820
821 d.from = from;
822 d.to = to;
823 d.insn = insn;
824 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
825 }
826
827 /* Try simplify INSN.
828 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
829 pattern and return true if something was simplified. */
830
831 bool
832 validate_simplify_insn (rtx insn)
833 {
834 int i;
835 rtx pat = NULL;
836 rtx newpat = NULL;
837
838 pat = PATTERN (insn);
839
840 if (GET_CODE (pat) == SET)
841 {
842 newpat = simplify_rtx (SET_SRC (pat));
843 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
844 validate_change (insn, &SET_SRC (pat), newpat, 1);
845 newpat = simplify_rtx (SET_DEST (pat));
846 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
847 validate_change (insn, &SET_DEST (pat), newpat, 1);
848 }
849 else if (GET_CODE (pat) == PARALLEL)
850 for (i = 0; i < XVECLEN (pat, 0); i++)
851 {
852 rtx s = XVECEXP (pat, 0, i);
853
854 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
855 {
856 newpat = simplify_rtx (SET_SRC (s));
857 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
858 validate_change (insn, &SET_SRC (s), newpat, 1);
859 newpat = simplify_rtx (SET_DEST (s));
860 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
861 validate_change (insn, &SET_DEST (s), newpat, 1);
862 }
863 }
864 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
865 }
866 \f
867 #ifdef HAVE_cc0
868 /* Return 1 if the insn using CC0 set by INSN does not contain
869 any ordered tests applied to the condition codes.
870 EQ and NE tests do not count. */
871
872 int
873 next_insn_tests_no_inequality (rtx insn)
874 {
875 rtx next = next_cc0_user (insn);
876
877 /* If there is no next insn, we have to take the conservative choice. */
878 if (next == 0)
879 return 0;
880
881 return (INSN_P (next)
882 && ! inequality_comparisons_p (PATTERN (next)));
883 }
884 #endif
885 \f
886 /* Return 1 if OP is a valid general operand for machine mode MODE.
887 This is either a register reference, a memory reference,
888 or a constant. In the case of a memory reference, the address
889 is checked for general validity for the target machine.
890
891 Register and memory references must have mode MODE in order to be valid,
892 but some constants have no machine mode and are valid for any mode.
893
894 If MODE is VOIDmode, OP is checked for validity for whatever mode
895 it has.
896
897 The main use of this function is as a predicate in match_operand
898 expressions in the machine description.
899
900 For an explanation of this function's behavior for registers of
901 class NO_REGS, see the comment for `register_operand'. */
902
903 int
904 general_operand (rtx op, enum machine_mode mode)
905 {
906 enum rtx_code code = GET_CODE (op);
907
908 if (mode == VOIDmode)
909 mode = GET_MODE (op);
910
911 /* Don't accept CONST_INT or anything similar
912 if the caller wants something floating. */
913 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
914 && GET_MODE_CLASS (mode) != MODE_INT
915 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
916 return 0;
917
918 if (CONST_INT_P (op)
919 && mode != VOIDmode
920 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
921 return 0;
922
923 if (CONSTANT_P (op))
924 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
925 || mode == VOIDmode)
926 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
927 && LEGITIMATE_CONSTANT_P (op));
928
929 /* Except for certain constants with VOIDmode, already checked for,
930 OP's mode must match MODE if MODE specifies a mode. */
931
932 if (GET_MODE (op) != mode)
933 return 0;
934
935 if (code == SUBREG)
936 {
937 rtx sub = SUBREG_REG (op);
938
939 #ifdef INSN_SCHEDULING
940 /* On machines that have insn scheduling, we want all memory
941 reference to be explicit, so outlaw paradoxical SUBREGs.
942 However, we must allow them after reload so that they can
943 get cleaned up by cleanup_subreg_operands. */
944 if (!reload_completed && MEM_P (sub)
945 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
946 return 0;
947 #endif
948 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
949 may result in incorrect reference. We should simplify all valid
950 subregs of MEM anyway. But allow this after reload because we
951 might be called from cleanup_subreg_operands.
952
953 ??? This is a kludge. */
954 if (!reload_completed && SUBREG_BYTE (op) != 0
955 && MEM_P (sub))
956 return 0;
957
958 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
959 create such rtl, and we must reject it. */
960 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
961 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
962 return 0;
963
964 op = sub;
965 code = GET_CODE (op);
966 }
967
968 if (code == REG)
969 /* A register whose class is NO_REGS is not a general operand. */
970 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
971 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
972
973 if (code == MEM)
974 {
975 rtx y = XEXP (op, 0);
976
977 if (! volatile_ok && MEM_VOLATILE_P (op))
978 return 0;
979
980 /* Use the mem's mode, since it will be reloaded thus. */
981 if (memory_address_p (GET_MODE (op), y))
982 return 1;
983 }
984
985 return 0;
986 }
987 \f
988 /* Return 1 if OP is a valid memory address for a memory reference
989 of mode MODE.
990
991 The main use of this function is as a predicate in match_operand
992 expressions in the machine description. */
993
994 int
995 address_operand (rtx op, enum machine_mode mode)
996 {
997 return memory_address_p (mode, op);
998 }
999
1000 /* Return 1 if OP is a register reference of mode MODE.
1001 If MODE is VOIDmode, accept a register in any mode.
1002
1003 The main use of this function is as a predicate in match_operand
1004 expressions in the machine description.
1005
1006 As a special exception, registers whose class is NO_REGS are
1007 not accepted by `register_operand'. The reason for this change
1008 is to allow the representation of special architecture artifacts
1009 (such as a condition code register) without extending the rtl
1010 definitions. Since registers of class NO_REGS cannot be used
1011 as registers in any case where register classes are examined,
1012 it is most consistent to keep this function from accepting them. */
1013
1014 int
1015 register_operand (rtx op, enum machine_mode mode)
1016 {
1017 if (GET_MODE (op) != mode && mode != VOIDmode)
1018 return 0;
1019
1020 if (GET_CODE (op) == SUBREG)
1021 {
1022 rtx sub = SUBREG_REG (op);
1023
1024 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1025 because it is guaranteed to be reloaded into one.
1026 Just make sure the MEM is valid in itself.
1027 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1028 but currently it does result from (SUBREG (REG)...) where the
1029 reg went on the stack.) */
1030 if (! reload_completed && MEM_P (sub))
1031 return general_operand (op, mode);
1032
1033 #ifdef CANNOT_CHANGE_MODE_CLASS
1034 if (REG_P (sub)
1035 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1036 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1037 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1038 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1039 return 0;
1040 #endif
1041
1042 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1043 create such rtl, and we must reject it. */
1044 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1045 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1046 return 0;
1047
1048 op = sub;
1049 }
1050
1051 /* We don't consider registers whose class is NO_REGS
1052 to be a register operand. */
1053 return (REG_P (op)
1054 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1055 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1056 }
1057
1058 /* Return 1 for a register in Pmode; ignore the tested mode. */
1059
1060 int
1061 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1062 {
1063 return register_operand (op, Pmode);
1064 }
1065
1066 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1067 or a hard register. */
1068
1069 int
1070 scratch_operand (rtx op, enum machine_mode mode)
1071 {
1072 if (GET_MODE (op) != mode && mode != VOIDmode)
1073 return 0;
1074
1075 return (GET_CODE (op) == SCRATCH
1076 || (REG_P (op)
1077 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1078 }
1079
1080 /* Return 1 if OP is a valid immediate operand for mode MODE.
1081
1082 The main use of this function is as a predicate in match_operand
1083 expressions in the machine description. */
1084
1085 int
1086 immediate_operand (rtx op, enum machine_mode mode)
1087 {
1088 /* Don't accept CONST_INT or anything similar
1089 if the caller wants something floating. */
1090 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1091 && GET_MODE_CLASS (mode) != MODE_INT
1092 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1093 return 0;
1094
1095 if (CONST_INT_P (op)
1096 && mode != VOIDmode
1097 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1098 return 0;
1099
1100 return (CONSTANT_P (op)
1101 && (GET_MODE (op) == mode || mode == VOIDmode
1102 || GET_MODE (op) == VOIDmode)
1103 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1104 && LEGITIMATE_CONSTANT_P (op));
1105 }
1106
1107 /* Returns 1 if OP is an operand that is a CONST_INT. */
1108
1109 int
1110 const_int_operand (rtx op, enum machine_mode mode)
1111 {
1112 if (!CONST_INT_P (op))
1113 return 0;
1114
1115 if (mode != VOIDmode
1116 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1117 return 0;
1118
1119 return 1;
1120 }
1121
1122 /* Returns 1 if OP is an operand that is a constant integer or constant
1123 floating-point number. */
1124
1125 int
1126 const_double_operand (rtx op, enum machine_mode mode)
1127 {
1128 /* Don't accept CONST_INT or anything similar
1129 if the caller wants something floating. */
1130 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1131 && GET_MODE_CLASS (mode) != MODE_INT
1132 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1133 return 0;
1134
1135 return ((GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op))
1136 && (mode == VOIDmode || GET_MODE (op) == mode
1137 || GET_MODE (op) == VOIDmode));
1138 }
1139
1140 /* Return 1 if OP is a general operand that is not an immediate operand. */
1141
1142 int
1143 nonimmediate_operand (rtx op, enum machine_mode mode)
1144 {
1145 return (general_operand (op, mode) && ! CONSTANT_P (op));
1146 }
1147
1148 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1149
1150 int
1151 nonmemory_operand (rtx op, enum machine_mode mode)
1152 {
1153 if (CONSTANT_P (op))
1154 {
1155 /* Don't accept CONST_INT or anything similar
1156 if the caller wants something floating. */
1157 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1158 && GET_MODE_CLASS (mode) != MODE_INT
1159 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1160 return 0;
1161
1162 if (CONST_INT_P (op)
1163 && mode != VOIDmode
1164 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1165 return 0;
1166
1167 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1168 || mode == VOIDmode)
1169 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1170 && LEGITIMATE_CONSTANT_P (op));
1171 }
1172
1173 if (GET_MODE (op) != mode && mode != VOIDmode)
1174 return 0;
1175
1176 if (GET_CODE (op) == SUBREG)
1177 {
1178 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1179 because it is guaranteed to be reloaded into one.
1180 Just make sure the MEM is valid in itself.
1181 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1182 but currently it does result from (SUBREG (REG)...) where the
1183 reg went on the stack.) */
1184 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1185 return general_operand (op, mode);
1186 op = SUBREG_REG (op);
1187 }
1188
1189 /* We don't consider registers whose class is NO_REGS
1190 to be a register operand. */
1191 return (REG_P (op)
1192 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1193 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1194 }
1195
1196 /* Return 1 if OP is a valid operand that stands for pushing a
1197 value of mode MODE onto the stack.
1198
1199 The main use of this function is as a predicate in match_operand
1200 expressions in the machine description. */
1201
1202 int
1203 push_operand (rtx op, enum machine_mode mode)
1204 {
1205 unsigned int rounded_size = GET_MODE_SIZE (mode);
1206
1207 #ifdef PUSH_ROUNDING
1208 rounded_size = PUSH_ROUNDING (rounded_size);
1209 #endif
1210
1211 if (!MEM_P (op))
1212 return 0;
1213
1214 if (mode != VOIDmode && GET_MODE (op) != mode)
1215 return 0;
1216
1217 op = XEXP (op, 0);
1218
1219 if (rounded_size == GET_MODE_SIZE (mode))
1220 {
1221 if (GET_CODE (op) != STACK_PUSH_CODE)
1222 return 0;
1223 }
1224 else
1225 {
1226 if (GET_CODE (op) != PRE_MODIFY
1227 || GET_CODE (XEXP (op, 1)) != PLUS
1228 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1229 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1230 #ifdef STACK_GROWS_DOWNWARD
1231 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1232 #else
1233 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1234 #endif
1235 )
1236 return 0;
1237 }
1238
1239 return XEXP (op, 0) == stack_pointer_rtx;
1240 }
1241
1242 /* Return 1 if OP is a valid operand that stands for popping a
1243 value of mode MODE off the stack.
1244
1245 The main use of this function is as a predicate in match_operand
1246 expressions in the machine description. */
1247
1248 int
1249 pop_operand (rtx op, enum machine_mode mode)
1250 {
1251 if (!MEM_P (op))
1252 return 0;
1253
1254 if (mode != VOIDmode && GET_MODE (op) != mode)
1255 return 0;
1256
1257 op = XEXP (op, 0);
1258
1259 if (GET_CODE (op) != STACK_POP_CODE)
1260 return 0;
1261
1262 return XEXP (op, 0) == stack_pointer_rtx;
1263 }
1264
1265 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1266
1267 int
1268 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1269 {
1270 #ifdef GO_IF_LEGITIMATE_ADDRESS
1271 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1272 return 0;
1273
1274 win:
1275 return 1;
1276 #else
1277 return targetm.legitimate_address_p (mode, addr, 0);
1278 #endif
1279 }
1280
1281 /* Return 1 if OP is a valid memory reference with mode MODE,
1282 including a valid address.
1283
1284 The main use of this function is as a predicate in match_operand
1285 expressions in the machine description. */
1286
1287 int
1288 memory_operand (rtx op, enum machine_mode mode)
1289 {
1290 rtx inner;
1291
1292 if (! reload_completed)
1293 /* Note that no SUBREG is a memory operand before end of reload pass,
1294 because (SUBREG (MEM...)) forces reloading into a register. */
1295 return MEM_P (op) && general_operand (op, mode);
1296
1297 if (mode != VOIDmode && GET_MODE (op) != mode)
1298 return 0;
1299
1300 inner = op;
1301 if (GET_CODE (inner) == SUBREG)
1302 inner = SUBREG_REG (inner);
1303
1304 return (MEM_P (inner) && general_operand (op, mode));
1305 }
1306
1307 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1308 that is, a memory reference whose address is a general_operand. */
1309
1310 int
1311 indirect_operand (rtx op, enum machine_mode mode)
1312 {
1313 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1314 if (! reload_completed
1315 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1316 {
1317 int offset = SUBREG_BYTE (op);
1318 rtx inner = SUBREG_REG (op);
1319
1320 if (mode != VOIDmode && GET_MODE (op) != mode)
1321 return 0;
1322
1323 /* The only way that we can have a general_operand as the resulting
1324 address is if OFFSET is zero and the address already is an operand
1325 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1326 operand. */
1327
1328 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1329 || (GET_CODE (XEXP (inner, 0)) == PLUS
1330 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1331 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1332 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1333 }
1334
1335 return (MEM_P (op)
1336 && memory_operand (op, mode)
1337 && general_operand (XEXP (op, 0), Pmode));
1338 }
1339
1340 /* Return 1 if this is an ordered comparison operator (not including
1341 ORDERED and UNORDERED). */
1342
1343 int
1344 ordered_comparison_operator (rtx op, enum machine_mode mode)
1345 {
1346 if (mode != VOIDmode && GET_MODE (op) != mode)
1347 return false;
1348 switch (GET_CODE (op))
1349 {
1350 case EQ:
1351 case NE:
1352 case LT:
1353 case LTU:
1354 case LE:
1355 case LEU:
1356 case GT:
1357 case GTU:
1358 case GE:
1359 case GEU:
1360 return true;
1361 default:
1362 return false;
1363 }
1364 }
1365
1366 /* Return 1 if this is a comparison operator. This allows the use of
1367 MATCH_OPERATOR to recognize all the branch insns. */
1368
1369 int
1370 comparison_operator (rtx op, enum machine_mode mode)
1371 {
1372 return ((mode == VOIDmode || GET_MODE (op) == mode)
1373 && COMPARISON_P (op));
1374 }
1375 \f
1376 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1377
1378 rtx
1379 extract_asm_operands (rtx body)
1380 {
1381 rtx tmp;
1382 switch (GET_CODE (body))
1383 {
1384 case ASM_OPERANDS:
1385 return body;
1386
1387 case SET:
1388 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1389 tmp = SET_SRC (body);
1390 if (GET_CODE (tmp) == ASM_OPERANDS)
1391 return tmp;
1392 break;
1393
1394 case PARALLEL:
1395 tmp = XVECEXP (body, 0, 0);
1396 if (GET_CODE (tmp) == ASM_OPERANDS)
1397 return tmp;
1398 if (GET_CODE (tmp) == SET)
1399 {
1400 tmp = SET_SRC (tmp);
1401 if (GET_CODE (tmp) == ASM_OPERANDS)
1402 return tmp;
1403 }
1404 break;
1405
1406 default:
1407 break;
1408 }
1409 return NULL;
1410 }
1411
1412 /* If BODY is an insn body that uses ASM_OPERANDS,
1413 return the number of operands (both input and output) in the insn.
1414 Otherwise return -1. */
1415
1416 int
1417 asm_noperands (const_rtx body)
1418 {
1419 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1420 int n_sets = 0;
1421
1422 if (asm_op == NULL)
1423 return -1;
1424
1425 if (GET_CODE (body) == SET)
1426 n_sets = 1;
1427 else if (GET_CODE (body) == PARALLEL)
1428 {
1429 int i;
1430 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1431 {
1432 /* Multiple output operands, or 1 output plus some clobbers:
1433 body is
1434 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1435 /* Count backwards through CLOBBERs to determine number of SETs. */
1436 for (i = XVECLEN (body, 0); i > 0; i--)
1437 {
1438 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1439 break;
1440 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1441 return -1;
1442 }
1443
1444 /* N_SETS is now number of output operands. */
1445 n_sets = i;
1446
1447 /* Verify that all the SETs we have
1448 came from a single original asm_operands insn
1449 (so that invalid combinations are blocked). */
1450 for (i = 0; i < n_sets; i++)
1451 {
1452 rtx elt = XVECEXP (body, 0, i);
1453 if (GET_CODE (elt) != SET)
1454 return -1;
1455 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1456 return -1;
1457 /* If these ASM_OPERANDS rtx's came from different original insns
1458 then they aren't allowed together. */
1459 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1460 != ASM_OPERANDS_INPUT_VEC (asm_op))
1461 return -1;
1462 }
1463 }
1464 else
1465 {
1466 /* 0 outputs, but some clobbers:
1467 body is [(asm_operands ...) (clobber (reg ...))...]. */
1468 /* Make sure all the other parallel things really are clobbers. */
1469 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1470 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1471 return -1;
1472 }
1473 }
1474
1475 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1476 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1477 }
1478
1479 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1480 copy its operands (both input and output) into the vector OPERANDS,
1481 the locations of the operands within the insn into the vector OPERAND_LOCS,
1482 and the constraints for the operands into CONSTRAINTS.
1483 Write the modes of the operands into MODES.
1484 Return the assembler-template.
1485
1486 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1487 we don't store that info. */
1488
1489 const char *
1490 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1491 const char **constraints, enum machine_mode *modes,
1492 location_t *loc)
1493 {
1494 int noperands, nbase = 0, n, i;
1495 rtx asmop;
1496
1497 switch (GET_CODE (body))
1498 {
1499 case ASM_OPERANDS:
1500 /* Zero output asm: BODY is (asm_operands ...). */
1501 asmop = body;
1502 break;
1503
1504 case SET:
1505 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1506 asmop = SET_SRC (body);
1507
1508 /* The output is in the SET.
1509 Its constraint is in the ASM_OPERANDS itself. */
1510 if (operands)
1511 operands[0] = SET_DEST (body);
1512 if (operand_locs)
1513 operand_locs[0] = &SET_DEST (body);
1514 if (constraints)
1515 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1516 if (modes)
1517 modes[0] = GET_MODE (SET_DEST (body));
1518 nbase = 1;
1519 break;
1520
1521 case PARALLEL:
1522 {
1523 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1524
1525 asmop = XVECEXP (body, 0, 0);
1526 if (GET_CODE (asmop) == SET)
1527 {
1528 asmop = SET_SRC (asmop);
1529
1530 /* At least one output, plus some CLOBBERs. The outputs are in
1531 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1532 for (i = 0; i < nparallel; i++)
1533 {
1534 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1535 break; /* Past last SET */
1536 if (operands)
1537 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1538 if (operand_locs)
1539 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1540 if (constraints)
1541 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1542 if (modes)
1543 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1544 }
1545 nbase = i;
1546 }
1547 break;
1548 }
1549
1550 default:
1551 gcc_unreachable ();
1552 }
1553
1554 noperands = (ASM_OPERANDS_INPUT_LENGTH (asmop)
1555 + ASM_OPERANDS_LABEL_LENGTH (asmop) + nbase);
1556
1557 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1558 for (i = 0; i < n; i++)
1559 {
1560 if (operand_locs)
1561 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1562 if (operands)
1563 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1564 if (constraints)
1565 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1566 if (modes)
1567 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1568 }
1569 nbase += n;
1570
1571 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1572 for (i = 0; i < n; i++)
1573 {
1574 if (operand_locs)
1575 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1576 if (operands)
1577 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1578 if (constraints)
1579 constraints[nbase + i] = "";
1580 if (modes)
1581 modes[nbase + i] = Pmode;
1582 }
1583
1584 if (loc)
1585 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1586
1587 return ASM_OPERANDS_TEMPLATE (asmop);
1588 }
1589
1590 /* Check if an asm_operand matches its constraints.
1591 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1592
1593 int
1594 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1595 {
1596 int result = 0;
1597
1598 /* Use constrain_operands after reload. */
1599 gcc_assert (!reload_completed);
1600
1601 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1602 many alternatives as required to match the other operands. */
1603 if (*constraint == '\0')
1604 return 1;
1605
1606 while (*constraint)
1607 {
1608 char c = *constraint;
1609 int len;
1610 switch (c)
1611 {
1612 case ',':
1613 constraint++;
1614 continue;
1615 case '=':
1616 case '+':
1617 case '*':
1618 case '%':
1619 case '!':
1620 case '#':
1621 case '&':
1622 case '?':
1623 break;
1624
1625 case '0': case '1': case '2': case '3': case '4':
1626 case '5': case '6': case '7': case '8': case '9':
1627 /* If caller provided constraints pointer, look up
1628 the maching constraint. Otherwise, our caller should have
1629 given us the proper matching constraint, but we can't
1630 actually fail the check if they didn't. Indicate that
1631 results are inconclusive. */
1632 if (constraints)
1633 {
1634 char *end;
1635 unsigned long match;
1636
1637 match = strtoul (constraint, &end, 10);
1638 if (!result)
1639 result = asm_operand_ok (op, constraints[match], NULL);
1640 constraint = (const char *) end;
1641 }
1642 else
1643 {
1644 do
1645 constraint++;
1646 while (ISDIGIT (*constraint));
1647 if (! result)
1648 result = -1;
1649 }
1650 continue;
1651
1652 case 'p':
1653 if (address_operand (op, VOIDmode))
1654 result = 1;
1655 break;
1656
1657 case TARGET_MEM_CONSTRAINT:
1658 case 'V': /* non-offsettable */
1659 if (memory_operand (op, VOIDmode))
1660 result = 1;
1661 break;
1662
1663 case 'o': /* offsettable */
1664 if (offsettable_nonstrict_memref_p (op))
1665 result = 1;
1666 break;
1667
1668 case '<':
1669 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1670 excepting those that expand_call created. Further, on some
1671 machines which do not have generalized auto inc/dec, an inc/dec
1672 is not a memory_operand.
1673
1674 Match any memory and hope things are resolved after reload. */
1675
1676 if (MEM_P (op)
1677 && (1
1678 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1679 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1680 result = 1;
1681 break;
1682
1683 case '>':
1684 if (MEM_P (op)
1685 && (1
1686 || GET_CODE (XEXP (op, 0)) == PRE_INC
1687 || GET_CODE (XEXP (op, 0)) == POST_INC))
1688 result = 1;
1689 break;
1690
1691 case 'E':
1692 case 'F':
1693 if (GET_CODE (op) == CONST_DOUBLE
1694 || (GET_CODE (op) == CONST_VECTOR
1695 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1696 result = 1;
1697 break;
1698
1699 case 'G':
1700 if (GET_CODE (op) == CONST_DOUBLE
1701 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1702 result = 1;
1703 break;
1704 case 'H':
1705 if (GET_CODE (op) == CONST_DOUBLE
1706 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1707 result = 1;
1708 break;
1709
1710 case 's':
1711 if (CONST_INT_P (op)
1712 || (GET_CODE (op) == CONST_DOUBLE
1713 && GET_MODE (op) == VOIDmode))
1714 break;
1715 /* Fall through. */
1716
1717 case 'i':
1718 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1719 result = 1;
1720 break;
1721
1722 case 'n':
1723 if (CONST_INT_P (op)
1724 || (GET_CODE (op) == CONST_DOUBLE
1725 && GET_MODE (op) == VOIDmode))
1726 result = 1;
1727 break;
1728
1729 case 'I':
1730 if (CONST_INT_P (op)
1731 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1732 result = 1;
1733 break;
1734 case 'J':
1735 if (CONST_INT_P (op)
1736 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1737 result = 1;
1738 break;
1739 case 'K':
1740 if (CONST_INT_P (op)
1741 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1742 result = 1;
1743 break;
1744 case 'L':
1745 if (CONST_INT_P (op)
1746 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1747 result = 1;
1748 break;
1749 case 'M':
1750 if (CONST_INT_P (op)
1751 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1752 result = 1;
1753 break;
1754 case 'N':
1755 if (CONST_INT_P (op)
1756 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1757 result = 1;
1758 break;
1759 case 'O':
1760 if (CONST_INT_P (op)
1761 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1762 result = 1;
1763 break;
1764 case 'P':
1765 if (CONST_INT_P (op)
1766 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1767 result = 1;
1768 break;
1769
1770 case 'X':
1771 result = 1;
1772 break;
1773
1774 case 'g':
1775 if (general_operand (op, VOIDmode))
1776 result = 1;
1777 break;
1778
1779 default:
1780 /* For all other letters, we first check for a register class,
1781 otherwise it is an EXTRA_CONSTRAINT. */
1782 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1783 {
1784 case 'r':
1785 if (GET_MODE (op) == BLKmode)
1786 break;
1787 if (register_operand (op, VOIDmode))
1788 result = 1;
1789 }
1790 #ifdef EXTRA_CONSTRAINT_STR
1791 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1792 /* Every memory operand can be reloaded to fit. */
1793 result = result || memory_operand (op, VOIDmode);
1794 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1795 /* Every address operand can be reloaded to fit. */
1796 result = result || address_operand (op, VOIDmode);
1797 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1798 result = 1;
1799 #endif
1800 break;
1801 }
1802 len = CONSTRAINT_LEN (c, constraint);
1803 do
1804 constraint++;
1805 while (--len && *constraint);
1806 if (len)
1807 return 0;
1808 }
1809
1810 return result;
1811 }
1812 \f
1813 /* Given an rtx *P, if it is a sum containing an integer constant term,
1814 return the location (type rtx *) of the pointer to that constant term.
1815 Otherwise, return a null pointer. */
1816
1817 rtx *
1818 find_constant_term_loc (rtx *p)
1819 {
1820 rtx *tem;
1821 enum rtx_code code = GET_CODE (*p);
1822
1823 /* If *P IS such a constant term, P is its location. */
1824
1825 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1826 || code == CONST)
1827 return p;
1828
1829 /* Otherwise, if not a sum, it has no constant term. */
1830
1831 if (GET_CODE (*p) != PLUS)
1832 return 0;
1833
1834 /* If one of the summands is constant, return its location. */
1835
1836 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1837 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1838 return p;
1839
1840 /* Otherwise, check each summand for containing a constant term. */
1841
1842 if (XEXP (*p, 0) != 0)
1843 {
1844 tem = find_constant_term_loc (&XEXP (*p, 0));
1845 if (tem != 0)
1846 return tem;
1847 }
1848
1849 if (XEXP (*p, 1) != 0)
1850 {
1851 tem = find_constant_term_loc (&XEXP (*p, 1));
1852 if (tem != 0)
1853 return tem;
1854 }
1855
1856 return 0;
1857 }
1858 \f
1859 /* Return 1 if OP is a memory reference
1860 whose address contains no side effects
1861 and remains valid after the addition
1862 of a positive integer less than the
1863 size of the object being referenced.
1864
1865 We assume that the original address is valid and do not check it.
1866
1867 This uses strict_memory_address_p as a subroutine, so
1868 don't use it before reload. */
1869
1870 int
1871 offsettable_memref_p (rtx op)
1872 {
1873 return ((MEM_P (op))
1874 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1875 }
1876
1877 /* Similar, but don't require a strictly valid mem ref:
1878 consider pseudo-regs valid as index or base regs. */
1879
1880 int
1881 offsettable_nonstrict_memref_p (rtx op)
1882 {
1883 return ((MEM_P (op))
1884 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1885 }
1886
1887 /* Return 1 if Y is a memory address which contains no side effects
1888 and would remain valid after the addition of a positive integer
1889 less than the size of that mode.
1890
1891 We assume that the original address is valid and do not check it.
1892 We do check that it is valid for narrower modes.
1893
1894 If STRICTP is nonzero, we require a strictly valid address,
1895 for the sake of use in reload.c. */
1896
1897 int
1898 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1899 {
1900 enum rtx_code ycode = GET_CODE (y);
1901 rtx z;
1902 rtx y1 = y;
1903 rtx *y2;
1904 int (*addressp) (enum machine_mode, rtx) =
1905 (strictp ? strict_memory_address_p : memory_address_p);
1906 unsigned int mode_sz = GET_MODE_SIZE (mode);
1907
1908 if (CONSTANT_ADDRESS_P (y))
1909 return 1;
1910
1911 /* Adjusting an offsettable address involves changing to a narrower mode.
1912 Make sure that's OK. */
1913
1914 if (mode_dependent_address_p (y))
1915 return 0;
1916
1917 /* ??? How much offset does an offsettable BLKmode reference need?
1918 Clearly that depends on the situation in which it's being used.
1919 However, the current situation in which we test 0xffffffff is
1920 less than ideal. Caveat user. */
1921 if (mode_sz == 0)
1922 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1923
1924 /* If the expression contains a constant term,
1925 see if it remains valid when max possible offset is added. */
1926
1927 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1928 {
1929 int good;
1930
1931 y1 = *y2;
1932 *y2 = plus_constant (*y2, mode_sz - 1);
1933 /* Use QImode because an odd displacement may be automatically invalid
1934 for any wider mode. But it should be valid for a single byte. */
1935 good = (*addressp) (QImode, y);
1936
1937 /* In any case, restore old contents of memory. */
1938 *y2 = y1;
1939 return good;
1940 }
1941
1942 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1943 return 0;
1944
1945 /* The offset added here is chosen as the maximum offset that
1946 any instruction could need to add when operating on something
1947 of the specified mode. We assume that if Y and Y+c are
1948 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1949 go inside a LO_SUM here, so we do so as well. */
1950 if (GET_CODE (y) == LO_SUM
1951 && mode != BLKmode
1952 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1953 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1954 plus_constant (XEXP (y, 1), mode_sz - 1));
1955 else
1956 z = plus_constant (y, mode_sz - 1);
1957
1958 /* Use QImode because an odd displacement may be automatically invalid
1959 for any wider mode. But it should be valid for a single byte. */
1960 return (*addressp) (QImode, z);
1961 }
1962
1963 /* Return 1 if ADDR is an address-expression whose effect depends
1964 on the mode of the memory reference it is used in.
1965
1966 Autoincrement addressing is a typical example of mode-dependence
1967 because the amount of the increment depends on the mode. */
1968
1969 int
1970 mode_dependent_address_p (rtx addr)
1971 {
1972 /* Auto-increment addressing with anything other than post_modify
1973 or pre_modify always introduces a mode dependency. Catch such
1974 cases now instead of deferring to the target. */
1975 if (GET_CODE (addr) == PRE_INC
1976 || GET_CODE (addr) == POST_INC
1977 || GET_CODE (addr) == PRE_DEC
1978 || GET_CODE (addr) == POST_DEC)
1979 return 1;
1980
1981 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1982 return 0;
1983 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1984 win: ATTRIBUTE_UNUSED_LABEL
1985 return 1;
1986 }
1987 \f
1988 /* Like extract_insn, but save insn extracted and don't extract again, when
1989 called again for the same insn expecting that recog_data still contain the
1990 valid information. This is used primary by gen_attr infrastructure that
1991 often does extract insn again and again. */
1992 void
1993 extract_insn_cached (rtx insn)
1994 {
1995 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1996 return;
1997 extract_insn (insn);
1998 recog_data.insn = insn;
1999 }
2000
2001 /* Do cached extract_insn, constrain_operands and complain about failures.
2002 Used by insn_attrtab. */
2003 void
2004 extract_constrain_insn_cached (rtx insn)
2005 {
2006 extract_insn_cached (insn);
2007 if (which_alternative == -1
2008 && !constrain_operands (reload_completed))
2009 fatal_insn_not_found (insn);
2010 }
2011
2012 /* Do cached constrain_operands and complain about failures. */
2013 int
2014 constrain_operands_cached (int strict)
2015 {
2016 if (which_alternative == -1)
2017 return constrain_operands (strict);
2018 else
2019 return 1;
2020 }
2021 \f
2022 /* Analyze INSN and fill in recog_data. */
2023
2024 void
2025 extract_insn (rtx insn)
2026 {
2027 int i;
2028 int icode;
2029 int noperands;
2030 rtx body = PATTERN (insn);
2031
2032 recog_data.n_operands = 0;
2033 recog_data.n_alternatives = 0;
2034 recog_data.n_dups = 0;
2035
2036 switch (GET_CODE (body))
2037 {
2038 case USE:
2039 case CLOBBER:
2040 case ASM_INPUT:
2041 case ADDR_VEC:
2042 case ADDR_DIFF_VEC:
2043 case VAR_LOCATION:
2044 return;
2045
2046 case SET:
2047 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2048 goto asm_insn;
2049 else
2050 goto normal_insn;
2051 case PARALLEL:
2052 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2053 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2054 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2055 goto asm_insn;
2056 else
2057 goto normal_insn;
2058 case ASM_OPERANDS:
2059 asm_insn:
2060 recog_data.n_operands = noperands = asm_noperands (body);
2061 if (noperands >= 0)
2062 {
2063 /* This insn is an `asm' with operands. */
2064
2065 /* expand_asm_operands makes sure there aren't too many operands. */
2066 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2067
2068 /* Now get the operand values and constraints out of the insn. */
2069 decode_asm_operands (body, recog_data.operand,
2070 recog_data.operand_loc,
2071 recog_data.constraints,
2072 recog_data.operand_mode, NULL);
2073 if (noperands > 0)
2074 {
2075 const char *p = recog_data.constraints[0];
2076 recog_data.n_alternatives = 1;
2077 while (*p)
2078 recog_data.n_alternatives += (*p++ == ',');
2079 }
2080 break;
2081 }
2082 fatal_insn_not_found (insn);
2083
2084 default:
2085 normal_insn:
2086 /* Ordinary insn: recognize it, get the operands via insn_extract
2087 and get the constraints. */
2088
2089 icode = recog_memoized (insn);
2090 if (icode < 0)
2091 fatal_insn_not_found (insn);
2092
2093 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2094 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2095 recog_data.n_dups = insn_data[icode].n_dups;
2096
2097 insn_extract (insn);
2098
2099 for (i = 0; i < noperands; i++)
2100 {
2101 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2102 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2103 /* VOIDmode match_operands gets mode from their real operand. */
2104 if (recog_data.operand_mode[i] == VOIDmode)
2105 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2106 }
2107 }
2108 for (i = 0; i < noperands; i++)
2109 recog_data.operand_type[i]
2110 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2111 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2112 : OP_IN);
2113
2114 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2115
2116 if (INSN_CODE (insn) < 0)
2117 for (i = 0; i < recog_data.n_alternatives; i++)
2118 recog_data.alternative_enabled_p[i] = true;
2119 else
2120 {
2121 recog_data.insn = insn;
2122 for (i = 0; i < recog_data.n_alternatives; i++)
2123 {
2124 which_alternative = i;
2125 recog_data.alternative_enabled_p[i] = get_attr_enabled (insn);
2126 }
2127 }
2128
2129 recog_data.insn = NULL;
2130 which_alternative = -1;
2131 }
2132
2133 /* After calling extract_insn, you can use this function to extract some
2134 information from the constraint strings into a more usable form.
2135 The collected data is stored in recog_op_alt. */
2136 void
2137 preprocess_constraints (void)
2138 {
2139 int i;
2140
2141 for (i = 0; i < recog_data.n_operands; i++)
2142 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2143 * sizeof (struct operand_alternative)));
2144
2145 for (i = 0; i < recog_data.n_operands; i++)
2146 {
2147 int j;
2148 struct operand_alternative *op_alt;
2149 const char *p = recog_data.constraints[i];
2150
2151 op_alt = recog_op_alt[i];
2152
2153 for (j = 0; j < recog_data.n_alternatives; j++)
2154 {
2155 op_alt[j].cl = NO_REGS;
2156 op_alt[j].constraint = p;
2157 op_alt[j].matches = -1;
2158 op_alt[j].matched = -1;
2159
2160 if (!recog_data.alternative_enabled_p[j])
2161 {
2162 p = skip_alternative (p);
2163 continue;
2164 }
2165
2166 if (*p == '\0' || *p == ',')
2167 {
2168 op_alt[j].anything_ok = 1;
2169 continue;
2170 }
2171
2172 for (;;)
2173 {
2174 char c = *p;
2175 if (c == '#')
2176 do
2177 c = *++p;
2178 while (c != ',' && c != '\0');
2179 if (c == ',' || c == '\0')
2180 {
2181 p++;
2182 break;
2183 }
2184
2185 switch (c)
2186 {
2187 case '=': case '+': case '*': case '%':
2188 case 'E': case 'F': case 'G': case 'H':
2189 case 's': case 'i': case 'n':
2190 case 'I': case 'J': case 'K': case 'L':
2191 case 'M': case 'N': case 'O': case 'P':
2192 /* These don't say anything we care about. */
2193 break;
2194
2195 case '?':
2196 op_alt[j].reject += 6;
2197 break;
2198 case '!':
2199 op_alt[j].reject += 600;
2200 break;
2201 case '&':
2202 op_alt[j].earlyclobber = 1;
2203 break;
2204
2205 case '0': case '1': case '2': case '3': case '4':
2206 case '5': case '6': case '7': case '8': case '9':
2207 {
2208 char *end;
2209 op_alt[j].matches = strtoul (p, &end, 10);
2210 recog_op_alt[op_alt[j].matches][j].matched = i;
2211 p = end;
2212 }
2213 continue;
2214
2215 case TARGET_MEM_CONSTRAINT:
2216 op_alt[j].memory_ok = 1;
2217 break;
2218 case '<':
2219 op_alt[j].decmem_ok = 1;
2220 break;
2221 case '>':
2222 op_alt[j].incmem_ok = 1;
2223 break;
2224 case 'V':
2225 op_alt[j].nonoffmem_ok = 1;
2226 break;
2227 case 'o':
2228 op_alt[j].offmem_ok = 1;
2229 break;
2230 case 'X':
2231 op_alt[j].anything_ok = 1;
2232 break;
2233
2234 case 'p':
2235 op_alt[j].is_address = 1;
2236 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2237 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2238 break;
2239
2240 case 'g':
2241 case 'r':
2242 op_alt[j].cl =
2243 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2244 break;
2245
2246 default:
2247 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2248 {
2249 op_alt[j].memory_ok = 1;
2250 break;
2251 }
2252 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2253 {
2254 op_alt[j].is_address = 1;
2255 op_alt[j].cl
2256 = (reg_class_subunion
2257 [(int) op_alt[j].cl]
2258 [(int) base_reg_class (VOIDmode, ADDRESS,
2259 SCRATCH)]);
2260 break;
2261 }
2262
2263 op_alt[j].cl
2264 = (reg_class_subunion
2265 [(int) op_alt[j].cl]
2266 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2267 break;
2268 }
2269 p += CONSTRAINT_LEN (c, p);
2270 }
2271 }
2272 }
2273 }
2274
2275 /* Check the operands of an insn against the insn's operand constraints
2276 and return 1 if they are valid.
2277 The information about the insn's operands, constraints, operand modes
2278 etc. is obtained from the global variables set up by extract_insn.
2279
2280 WHICH_ALTERNATIVE is set to a number which indicates which
2281 alternative of constraints was matched: 0 for the first alternative,
2282 1 for the next, etc.
2283
2284 In addition, when two operands are required to match
2285 and it happens that the output operand is (reg) while the
2286 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2287 make the output operand look like the input.
2288 This is because the output operand is the one the template will print.
2289
2290 This is used in final, just before printing the assembler code and by
2291 the routines that determine an insn's attribute.
2292
2293 If STRICT is a positive nonzero value, it means that we have been
2294 called after reload has been completed. In that case, we must
2295 do all checks strictly. If it is zero, it means that we have been called
2296 before reload has completed. In that case, we first try to see if we can
2297 find an alternative that matches strictly. If not, we try again, this
2298 time assuming that reload will fix up the insn. This provides a "best
2299 guess" for the alternative and is used to compute attributes of insns prior
2300 to reload. A negative value of STRICT is used for this internal call. */
2301
2302 struct funny_match
2303 {
2304 int this_op, other;
2305 };
2306
2307 int
2308 constrain_operands (int strict)
2309 {
2310 const char *constraints[MAX_RECOG_OPERANDS];
2311 int matching_operands[MAX_RECOG_OPERANDS];
2312 int earlyclobber[MAX_RECOG_OPERANDS];
2313 int c;
2314
2315 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2316 int funny_match_index;
2317
2318 which_alternative = 0;
2319 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2320 return 1;
2321
2322 for (c = 0; c < recog_data.n_operands; c++)
2323 {
2324 constraints[c] = recog_data.constraints[c];
2325 matching_operands[c] = -1;
2326 }
2327
2328 do
2329 {
2330 int seen_earlyclobber_at = -1;
2331 int opno;
2332 int lose = 0;
2333 funny_match_index = 0;
2334
2335 if (!recog_data.alternative_enabled_p[which_alternative])
2336 {
2337 int i;
2338
2339 for (i = 0; i < recog_data.n_operands; i++)
2340 constraints[i] = skip_alternative (constraints[i]);
2341
2342 which_alternative++;
2343 continue;
2344 }
2345
2346 for (opno = 0; opno < recog_data.n_operands; opno++)
2347 {
2348 rtx op = recog_data.operand[opno];
2349 enum machine_mode mode = GET_MODE (op);
2350 const char *p = constraints[opno];
2351 int offset = 0;
2352 int win = 0;
2353 int val;
2354 int len;
2355
2356 earlyclobber[opno] = 0;
2357
2358 /* A unary operator may be accepted by the predicate, but it
2359 is irrelevant for matching constraints. */
2360 if (UNARY_P (op))
2361 op = XEXP (op, 0);
2362
2363 if (GET_CODE (op) == SUBREG)
2364 {
2365 if (REG_P (SUBREG_REG (op))
2366 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2367 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2368 GET_MODE (SUBREG_REG (op)),
2369 SUBREG_BYTE (op),
2370 GET_MODE (op));
2371 op = SUBREG_REG (op);
2372 }
2373
2374 /* An empty constraint or empty alternative
2375 allows anything which matched the pattern. */
2376 if (*p == 0 || *p == ',')
2377 win = 1;
2378
2379 do
2380 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2381 {
2382 case '\0':
2383 len = 0;
2384 break;
2385 case ',':
2386 c = '\0';
2387 break;
2388
2389 case '?': case '!': case '*': case '%':
2390 case '=': case '+':
2391 break;
2392
2393 case '#':
2394 /* Ignore rest of this alternative as far as
2395 constraint checking is concerned. */
2396 do
2397 p++;
2398 while (*p && *p != ',');
2399 len = 0;
2400 break;
2401
2402 case '&':
2403 earlyclobber[opno] = 1;
2404 if (seen_earlyclobber_at < 0)
2405 seen_earlyclobber_at = opno;
2406 break;
2407
2408 case '0': case '1': case '2': case '3': case '4':
2409 case '5': case '6': case '7': case '8': case '9':
2410 {
2411 /* This operand must be the same as a previous one.
2412 This kind of constraint is used for instructions such
2413 as add when they take only two operands.
2414
2415 Note that the lower-numbered operand is passed first.
2416
2417 If we are not testing strictly, assume that this
2418 constraint will be satisfied. */
2419
2420 char *end;
2421 int match;
2422
2423 match = strtoul (p, &end, 10);
2424 p = end;
2425
2426 if (strict < 0)
2427 val = 1;
2428 else
2429 {
2430 rtx op1 = recog_data.operand[match];
2431 rtx op2 = recog_data.operand[opno];
2432
2433 /* A unary operator may be accepted by the predicate,
2434 but it is irrelevant for matching constraints. */
2435 if (UNARY_P (op1))
2436 op1 = XEXP (op1, 0);
2437 if (UNARY_P (op2))
2438 op2 = XEXP (op2, 0);
2439
2440 val = operands_match_p (op1, op2);
2441 }
2442
2443 matching_operands[opno] = match;
2444 matching_operands[match] = opno;
2445
2446 if (val != 0)
2447 win = 1;
2448
2449 /* If output is *x and input is *--x, arrange later
2450 to change the output to *--x as well, since the
2451 output op is the one that will be printed. */
2452 if (val == 2 && strict > 0)
2453 {
2454 funny_match[funny_match_index].this_op = opno;
2455 funny_match[funny_match_index++].other = match;
2456 }
2457 }
2458 len = 0;
2459 break;
2460
2461 case 'p':
2462 /* p is used for address_operands. When we are called by
2463 gen_reload, no one will have checked that the address is
2464 strictly valid, i.e., that all pseudos requiring hard regs
2465 have gotten them. */
2466 if (strict <= 0
2467 || (strict_memory_address_p (recog_data.operand_mode[opno],
2468 op)))
2469 win = 1;
2470 break;
2471
2472 /* No need to check general_operand again;
2473 it was done in insn-recog.c. Well, except that reload
2474 doesn't check the validity of its replacements, but
2475 that should only matter when there's a bug. */
2476 case 'g':
2477 /* Anything goes unless it is a REG and really has a hard reg
2478 but the hard reg is not in the class GENERAL_REGS. */
2479 if (REG_P (op))
2480 {
2481 if (strict < 0
2482 || GENERAL_REGS == ALL_REGS
2483 || (reload_in_progress
2484 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2485 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2486 win = 1;
2487 }
2488 else if (strict < 0 || general_operand (op, mode))
2489 win = 1;
2490 break;
2491
2492 case 'X':
2493 /* This is used for a MATCH_SCRATCH in the cases when
2494 we don't actually need anything. So anything goes
2495 any time. */
2496 win = 1;
2497 break;
2498
2499 case TARGET_MEM_CONSTRAINT:
2500 /* Memory operands must be valid, to the extent
2501 required by STRICT. */
2502 if (MEM_P (op))
2503 {
2504 if (strict > 0
2505 && !strict_memory_address_p (GET_MODE (op),
2506 XEXP (op, 0)))
2507 break;
2508 if (strict == 0
2509 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2510 break;
2511 win = 1;
2512 }
2513 /* Before reload, accept what reload can turn into mem. */
2514 else if (strict < 0 && CONSTANT_P (op))
2515 win = 1;
2516 /* During reload, accept a pseudo */
2517 else if (reload_in_progress && REG_P (op)
2518 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2519 win = 1;
2520 break;
2521
2522 case '<':
2523 if (MEM_P (op)
2524 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2525 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2526 win = 1;
2527 break;
2528
2529 case '>':
2530 if (MEM_P (op)
2531 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2532 || GET_CODE (XEXP (op, 0)) == POST_INC))
2533 win = 1;
2534 break;
2535
2536 case 'E':
2537 case 'F':
2538 if (GET_CODE (op) == CONST_DOUBLE
2539 || (GET_CODE (op) == CONST_VECTOR
2540 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2541 win = 1;
2542 break;
2543
2544 case 'G':
2545 case 'H':
2546 if (GET_CODE (op) == CONST_DOUBLE
2547 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2548 win = 1;
2549 break;
2550
2551 case 's':
2552 if (CONST_INT_P (op)
2553 || (GET_CODE (op) == CONST_DOUBLE
2554 && GET_MODE (op) == VOIDmode))
2555 break;
2556 case 'i':
2557 if (CONSTANT_P (op))
2558 win = 1;
2559 break;
2560
2561 case 'n':
2562 if (CONST_INT_P (op)
2563 || (GET_CODE (op) == CONST_DOUBLE
2564 && GET_MODE (op) == VOIDmode))
2565 win = 1;
2566 break;
2567
2568 case 'I':
2569 case 'J':
2570 case 'K':
2571 case 'L':
2572 case 'M':
2573 case 'N':
2574 case 'O':
2575 case 'P':
2576 if (CONST_INT_P (op)
2577 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2578 win = 1;
2579 break;
2580
2581 case 'V':
2582 if (MEM_P (op)
2583 && ((strict > 0 && ! offsettable_memref_p (op))
2584 || (strict < 0
2585 && !(CONSTANT_P (op) || MEM_P (op)))
2586 || (reload_in_progress
2587 && !(REG_P (op)
2588 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2589 win = 1;
2590 break;
2591
2592 case 'o':
2593 if ((strict > 0 && offsettable_memref_p (op))
2594 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2595 /* Before reload, accept what reload can handle. */
2596 || (strict < 0
2597 && (CONSTANT_P (op) || MEM_P (op)))
2598 /* During reload, accept a pseudo */
2599 || (reload_in_progress && REG_P (op)
2600 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2601 win = 1;
2602 break;
2603
2604 default:
2605 {
2606 enum reg_class cl;
2607
2608 cl = (c == 'r'
2609 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2610 if (cl != NO_REGS)
2611 {
2612 if (strict < 0
2613 || (strict == 0
2614 && REG_P (op)
2615 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2616 || (strict == 0 && GET_CODE (op) == SCRATCH)
2617 || (REG_P (op)
2618 && reg_fits_class_p (op, cl, offset, mode)))
2619 win = 1;
2620 }
2621 #ifdef EXTRA_CONSTRAINT_STR
2622 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2623 win = 1;
2624
2625 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2626 /* Every memory operand can be reloaded to fit. */
2627 && ((strict < 0 && MEM_P (op))
2628 /* Before reload, accept what reload can turn
2629 into mem. */
2630 || (strict < 0 && CONSTANT_P (op))
2631 /* During reload, accept a pseudo */
2632 || (reload_in_progress && REG_P (op)
2633 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2634 win = 1;
2635 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2636 /* Every address operand can be reloaded to fit. */
2637 && strict < 0)
2638 win = 1;
2639 #endif
2640 break;
2641 }
2642 }
2643 while (p += len, c);
2644
2645 constraints[opno] = p;
2646 /* If this operand did not win somehow,
2647 this alternative loses. */
2648 if (! win)
2649 lose = 1;
2650 }
2651 /* This alternative won; the operands are ok.
2652 Change whichever operands this alternative says to change. */
2653 if (! lose)
2654 {
2655 int opno, eopno;
2656
2657 /* See if any earlyclobber operand conflicts with some other
2658 operand. */
2659
2660 if (strict > 0 && seen_earlyclobber_at >= 0)
2661 for (eopno = seen_earlyclobber_at;
2662 eopno < recog_data.n_operands;
2663 eopno++)
2664 /* Ignore earlyclobber operands now in memory,
2665 because we would often report failure when we have
2666 two memory operands, one of which was formerly a REG. */
2667 if (earlyclobber[eopno]
2668 && REG_P (recog_data.operand[eopno]))
2669 for (opno = 0; opno < recog_data.n_operands; opno++)
2670 if ((MEM_P (recog_data.operand[opno])
2671 || recog_data.operand_type[opno] != OP_OUT)
2672 && opno != eopno
2673 /* Ignore things like match_operator operands. */
2674 && *recog_data.constraints[opno] != 0
2675 && ! (matching_operands[opno] == eopno
2676 && operands_match_p (recog_data.operand[opno],
2677 recog_data.operand[eopno]))
2678 && ! safe_from_earlyclobber (recog_data.operand[opno],
2679 recog_data.operand[eopno]))
2680 lose = 1;
2681
2682 if (! lose)
2683 {
2684 while (--funny_match_index >= 0)
2685 {
2686 recog_data.operand[funny_match[funny_match_index].other]
2687 = recog_data.operand[funny_match[funny_match_index].this_op];
2688 }
2689
2690 return 1;
2691 }
2692 }
2693
2694 which_alternative++;
2695 }
2696 while (which_alternative < recog_data.n_alternatives);
2697
2698 which_alternative = -1;
2699 /* If we are about to reject this, but we are not to test strictly,
2700 try a very loose test. Only return failure if it fails also. */
2701 if (strict == 0)
2702 return constrain_operands (-1);
2703 else
2704 return 0;
2705 }
2706
2707 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2708 is a hard reg in class CLASS when its regno is offset by OFFSET
2709 and changed to mode MODE.
2710 If REG occupies multiple hard regs, all of them must be in CLASS. */
2711
2712 int
2713 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2714 enum machine_mode mode)
2715 {
2716 int regno = REGNO (operand);
2717
2718 if (cl == NO_REGS)
2719 return 0;
2720
2721 return (regno < FIRST_PSEUDO_REGISTER
2722 && in_hard_reg_set_p (reg_class_contents[(int) cl],
2723 mode, regno + offset));
2724 }
2725 \f
2726 /* Split single instruction. Helper function for split_all_insns and
2727 split_all_insns_noflow. Return last insn in the sequence if successful,
2728 or NULL if unsuccessful. */
2729
2730 static rtx
2731 split_insn (rtx insn)
2732 {
2733 /* Split insns here to get max fine-grain parallelism. */
2734 rtx first = PREV_INSN (insn);
2735 rtx last = try_split (PATTERN (insn), insn, 1);
2736 rtx insn_set, last_set, note;
2737
2738 if (last == insn)
2739 return NULL_RTX;
2740
2741 /* If the original instruction was a single set that was known to be
2742 equivalent to a constant, see if we can say the same about the last
2743 instruction in the split sequence. The two instructions must set
2744 the same destination. */
2745 insn_set = single_set (insn);
2746 if (insn_set)
2747 {
2748 last_set = single_set (last);
2749 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2750 {
2751 note = find_reg_equal_equiv_note (insn);
2752 if (note && CONSTANT_P (XEXP (note, 0)))
2753 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2754 else if (CONSTANT_P (SET_SRC (insn_set)))
2755 set_unique_reg_note (last, REG_EQUAL, SET_SRC (insn_set));
2756 }
2757 }
2758
2759 /* try_split returns the NOTE that INSN became. */
2760 SET_INSN_DELETED (insn);
2761
2762 /* ??? Coddle to md files that generate subregs in post-reload
2763 splitters instead of computing the proper hard register. */
2764 if (reload_completed && first != last)
2765 {
2766 first = NEXT_INSN (first);
2767 for (;;)
2768 {
2769 if (INSN_P (first))
2770 cleanup_subreg_operands (first);
2771 if (first == last)
2772 break;
2773 first = NEXT_INSN (first);
2774 }
2775 }
2776
2777 return last;
2778 }
2779
2780 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2781
2782 void
2783 split_all_insns (void)
2784 {
2785 sbitmap blocks;
2786 bool changed;
2787 basic_block bb;
2788
2789 blocks = sbitmap_alloc (last_basic_block);
2790 sbitmap_zero (blocks);
2791 changed = false;
2792
2793 FOR_EACH_BB_REVERSE (bb)
2794 {
2795 rtx insn, next;
2796 bool finish = false;
2797
2798 rtl_profile_for_bb (bb);
2799 for (insn = BB_HEAD (bb); !finish ; insn = next)
2800 {
2801 /* Can't use `next_real_insn' because that might go across
2802 CODE_LABELS and short-out basic blocks. */
2803 next = NEXT_INSN (insn);
2804 finish = (insn == BB_END (bb));
2805 if (INSN_P (insn))
2806 {
2807 rtx set = single_set (insn);
2808
2809 /* Don't split no-op move insns. These should silently
2810 disappear later in final. Splitting such insns would
2811 break the code that handles LIBCALL blocks. */
2812 if (set && set_noop_p (set))
2813 {
2814 /* Nops get in the way while scheduling, so delete them
2815 now if register allocation has already been done. It
2816 is too risky to try to do this before register
2817 allocation, and there are unlikely to be very many
2818 nops then anyways. */
2819 if (reload_completed)
2820 delete_insn_and_edges (insn);
2821 }
2822 else
2823 {
2824 rtx last = split_insn (insn);
2825 if (last)
2826 {
2827 /* The split sequence may include barrier, but the
2828 BB boundary we are interested in will be set to
2829 previous one. */
2830
2831 while (BARRIER_P (last))
2832 last = PREV_INSN (last);
2833 SET_BIT (blocks, bb->index);
2834 changed = true;
2835 }
2836 }
2837 }
2838 }
2839 }
2840
2841 default_rtl_profile ();
2842 if (changed)
2843 find_many_sub_basic_blocks (blocks);
2844
2845 #ifdef ENABLE_CHECKING
2846 verify_flow_info ();
2847 #endif
2848
2849 sbitmap_free (blocks);
2850 }
2851
2852 /* Same as split_all_insns, but do not expect CFG to be available.
2853 Used by machine dependent reorg passes. */
2854
2855 unsigned int
2856 split_all_insns_noflow (void)
2857 {
2858 rtx next, insn;
2859
2860 for (insn = get_insns (); insn; insn = next)
2861 {
2862 next = NEXT_INSN (insn);
2863 if (INSN_P (insn))
2864 {
2865 /* Don't split no-op move insns. These should silently
2866 disappear later in final. Splitting such insns would
2867 break the code that handles LIBCALL blocks. */
2868 rtx set = single_set (insn);
2869 if (set && set_noop_p (set))
2870 {
2871 /* Nops get in the way while scheduling, so delete them
2872 now if register allocation has already been done. It
2873 is too risky to try to do this before register
2874 allocation, and there are unlikely to be very many
2875 nops then anyways.
2876
2877 ??? Should we use delete_insn when the CFG isn't valid? */
2878 if (reload_completed)
2879 delete_insn_and_edges (insn);
2880 }
2881 else
2882 split_insn (insn);
2883 }
2884 }
2885 return 0;
2886 }
2887 \f
2888 #ifdef HAVE_peephole2
2889 struct peep2_insn_data
2890 {
2891 rtx insn;
2892 regset live_before;
2893 };
2894
2895 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2896 static int peep2_current;
2897 /* The number of instructions available to match a peep2. */
2898 int peep2_current_count;
2899
2900 /* A non-insn marker indicating the last insn of the block.
2901 The live_before regset for this element is correct, indicating
2902 DF_LIVE_OUT for the block. */
2903 #define PEEP2_EOB pc_rtx
2904
2905 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2906 does not exist. Used by the recognizer to find the next insn to match
2907 in a multi-insn pattern. */
2908
2909 rtx
2910 peep2_next_insn (int n)
2911 {
2912 gcc_assert (n <= peep2_current_count);
2913
2914 n += peep2_current;
2915 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2916 n -= MAX_INSNS_PER_PEEP2 + 1;
2917
2918 return peep2_insn_data[n].insn;
2919 }
2920
2921 /* Return true if REGNO is dead before the Nth non-note insn
2922 after `current'. */
2923
2924 int
2925 peep2_regno_dead_p (int ofs, int regno)
2926 {
2927 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2928
2929 ofs += peep2_current;
2930 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2931 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2932
2933 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2934
2935 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2936 }
2937
2938 /* Similarly for a REG. */
2939
2940 int
2941 peep2_reg_dead_p (int ofs, rtx reg)
2942 {
2943 int regno, n;
2944
2945 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2946
2947 ofs += peep2_current;
2948 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2949 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2950
2951 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2952
2953 regno = REGNO (reg);
2954 n = hard_regno_nregs[regno][GET_MODE (reg)];
2955 while (--n >= 0)
2956 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2957 return 0;
2958 return 1;
2959 }
2960
2961 /* Try to find a hard register of mode MODE, matching the register class in
2962 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2963 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2964 in which case the only condition is that the register must be available
2965 before CURRENT_INSN.
2966 Registers that already have bits set in REG_SET will not be considered.
2967
2968 If an appropriate register is available, it will be returned and the
2969 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2970 returned. */
2971
2972 rtx
2973 peep2_find_free_register (int from, int to, const char *class_str,
2974 enum machine_mode mode, HARD_REG_SET *reg_set)
2975 {
2976 static int search_ofs;
2977 enum reg_class cl;
2978 HARD_REG_SET live;
2979 int i;
2980
2981 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2982 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2983
2984 from += peep2_current;
2985 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2986 from -= MAX_INSNS_PER_PEEP2 + 1;
2987 to += peep2_current;
2988 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2989 to -= MAX_INSNS_PER_PEEP2 + 1;
2990
2991 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2992 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2993
2994 while (from != to)
2995 {
2996 HARD_REG_SET this_live;
2997
2998 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2999 from = 0;
3000 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3001 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3002 IOR_HARD_REG_SET (live, this_live);
3003 }
3004
3005 cl = (class_str[0] == 'r' ? GENERAL_REGS
3006 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3007
3008 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3009 {
3010 int raw_regno, regno, success, j;
3011
3012 /* Distribute the free registers as much as possible. */
3013 raw_regno = search_ofs + i;
3014 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3015 raw_regno -= FIRST_PSEUDO_REGISTER;
3016 #ifdef REG_ALLOC_ORDER
3017 regno = reg_alloc_order[raw_regno];
3018 #else
3019 regno = raw_regno;
3020 #endif
3021
3022 /* Don't allocate fixed registers. */
3023 if (fixed_regs[regno])
3024 continue;
3025 /* Don't allocate global registers. */
3026 if (global_regs[regno])
3027 continue;
3028 /* Make sure the register is of the right class. */
3029 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3030 continue;
3031 /* And can support the mode we need. */
3032 if (! HARD_REGNO_MODE_OK (regno, mode))
3033 continue;
3034 /* And that we don't create an extra save/restore. */
3035 if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
3036 continue;
3037 if (! targetm.hard_regno_scratch_ok (regno))
3038 continue;
3039
3040 /* And we don't clobber traceback for noreturn functions. */
3041 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3042 && (! reload_completed || frame_pointer_needed))
3043 continue;
3044
3045 success = 1;
3046 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3047 {
3048 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3049 || TEST_HARD_REG_BIT (live, regno + j))
3050 {
3051 success = 0;
3052 break;
3053 }
3054 }
3055 if (success)
3056 {
3057 add_to_hard_reg_set (reg_set, mode, regno);
3058
3059 /* Start the next search with the next register. */
3060 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3061 raw_regno = 0;
3062 search_ofs = raw_regno;
3063
3064 return gen_rtx_REG (mode, regno);
3065 }
3066 }
3067
3068 search_ofs = 0;
3069 return NULL_RTX;
3070 }
3071
3072 /* Forget all currently tracked instructions, only remember current
3073 LIVE regset. */
3074
3075 static void
3076 peep2_reinit_state (regset live)
3077 {
3078 int i;
3079
3080 /* Indicate that all slots except the last holds invalid data. */
3081 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3082 peep2_insn_data[i].insn = NULL_RTX;
3083 peep2_current_count = 0;
3084
3085 /* Indicate that the last slot contains live_after data. */
3086 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3087 peep2_current = MAX_INSNS_PER_PEEP2;
3088
3089 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3090 }
3091
3092 /* Perform the peephole2 optimization pass. */
3093
3094 static void
3095 peephole2_optimize (void)
3096 {
3097 rtx insn, prev;
3098 bitmap live;
3099 int i;
3100 basic_block bb;
3101 bool do_cleanup_cfg = false;
3102 bool do_rebuild_jump_labels = false;
3103
3104 df_set_flags (DF_LR_RUN_DCE);
3105 df_analyze ();
3106
3107 /* Initialize the regsets we're going to use. */
3108 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3109 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3110 live = BITMAP_ALLOC (&reg_obstack);
3111
3112 FOR_EACH_BB_REVERSE (bb)
3113 {
3114 rtl_profile_for_bb (bb);
3115
3116 /* Start up propagation. */
3117 bitmap_copy (live, DF_LR_OUT (bb));
3118 df_simulate_initialize_backwards (bb, live);
3119 peep2_reinit_state (live);
3120
3121 for (insn = BB_END (bb); ; insn = prev)
3122 {
3123 prev = PREV_INSN (insn);
3124 if (NONDEBUG_INSN_P (insn))
3125 {
3126 rtx attempt, before_try, x;
3127 int match_len;
3128 rtx note;
3129 bool was_call = false;
3130
3131 /* Record this insn. */
3132 if (--peep2_current < 0)
3133 peep2_current = MAX_INSNS_PER_PEEP2;
3134 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3135 && peep2_insn_data[peep2_current].insn == NULL_RTX)
3136 peep2_current_count++;
3137 peep2_insn_data[peep2_current].insn = insn;
3138 df_simulate_one_insn_backwards (bb, insn, live);
3139 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3140
3141 if (RTX_FRAME_RELATED_P (insn))
3142 {
3143 /* If an insn has RTX_FRAME_RELATED_P set, peephole
3144 substitution would lose the
3145 REG_FRAME_RELATED_EXPR that is attached. */
3146 peep2_reinit_state (live);
3147 attempt = NULL;
3148 }
3149 else
3150 /* Match the peephole. */
3151 attempt = peephole2_insns (PATTERN (insn), insn, &match_len);
3152
3153 if (attempt != NULL)
3154 {
3155 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3156 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3157 cfg-related call notes. */
3158 for (i = 0; i <= match_len; ++i)
3159 {
3160 int j;
3161 rtx old_insn, new_insn, note;
3162
3163 j = i + peep2_current;
3164 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3165 j -= MAX_INSNS_PER_PEEP2 + 1;
3166 old_insn = peep2_insn_data[j].insn;
3167 if (!CALL_P (old_insn))
3168 continue;
3169 was_call = true;
3170
3171 new_insn = attempt;
3172 while (new_insn != NULL_RTX)
3173 {
3174 if (CALL_P (new_insn))
3175 break;
3176 new_insn = NEXT_INSN (new_insn);
3177 }
3178
3179 gcc_assert (new_insn != NULL_RTX);
3180
3181 CALL_INSN_FUNCTION_USAGE (new_insn)
3182 = CALL_INSN_FUNCTION_USAGE (old_insn);
3183
3184 for (note = REG_NOTES (old_insn);
3185 note;
3186 note = XEXP (note, 1))
3187 switch (REG_NOTE_KIND (note))
3188 {
3189 case REG_NORETURN:
3190 case REG_SETJMP:
3191 add_reg_note (new_insn, REG_NOTE_KIND (note),
3192 XEXP (note, 0));
3193 break;
3194 default:
3195 /* Discard all other reg notes. */
3196 break;
3197 }
3198
3199 /* Croak if there is another call in the sequence. */
3200 while (++i <= match_len)
3201 {
3202 j = i + peep2_current;
3203 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3204 j -= MAX_INSNS_PER_PEEP2 + 1;
3205 old_insn = peep2_insn_data[j].insn;
3206 gcc_assert (!CALL_P (old_insn));
3207 }
3208 break;
3209 }
3210
3211 i = match_len + peep2_current;
3212 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3213 i -= MAX_INSNS_PER_PEEP2 + 1;
3214
3215 note = find_reg_note (peep2_insn_data[i].insn,
3216 REG_EH_REGION, NULL_RTX);
3217
3218 /* Replace the old sequence with the new. */
3219 attempt = emit_insn_after_setloc (attempt,
3220 peep2_insn_data[i].insn,
3221 INSN_LOCATOR (peep2_insn_data[i].insn));
3222 before_try = PREV_INSN (insn);
3223 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3224
3225 /* Re-insert the EH_REGION notes. */
3226 if (note || (was_call && nonlocal_goto_handler_labels))
3227 {
3228 edge eh_edge;
3229 edge_iterator ei;
3230
3231 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3232 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3233 break;
3234
3235 if (note)
3236 copy_reg_eh_region_note_backward (note, attempt,
3237 before_try);
3238
3239 if (eh_edge)
3240 for (x = attempt ; x != before_try ; x = PREV_INSN (x))
3241 if (x != BB_END (bb)
3242 && (can_throw_internal (x)
3243 || can_nonlocal_goto (x)))
3244 {
3245 edge nfte, nehe;
3246 int flags;
3247
3248 nfte = split_block (bb, x);
3249 flags = (eh_edge->flags
3250 & (EDGE_EH | EDGE_ABNORMAL));
3251 if (CALL_P (x))
3252 flags |= EDGE_ABNORMAL_CALL;
3253 nehe = make_edge (nfte->src, eh_edge->dest,
3254 flags);
3255
3256 nehe->probability = eh_edge->probability;
3257 nfte->probability
3258 = REG_BR_PROB_BASE - nehe->probability;
3259
3260 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3261 bb = nfte->src;
3262 eh_edge = nehe;
3263 }
3264
3265 /* Converting possibly trapping insn to non-trapping is
3266 possible. Zap dummy outgoing edges. */
3267 do_cleanup_cfg |= purge_dead_edges (bb);
3268 }
3269
3270 #ifdef HAVE_conditional_execution
3271 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3272 peep2_insn_data[i].insn = NULL_RTX;
3273 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3274 peep2_current_count = 0;
3275 #else
3276 /* Back up lifetime information past the end of the
3277 newly created sequence. */
3278 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3279 i = 0;
3280 bitmap_copy (live, peep2_insn_data[i].live_before);
3281
3282 /* Update life information for the new sequence. */
3283 x = attempt;
3284 do
3285 {
3286 if (INSN_P (x))
3287 {
3288 if (--i < 0)
3289 i = MAX_INSNS_PER_PEEP2;
3290 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3291 && peep2_insn_data[i].insn == NULL_RTX)
3292 peep2_current_count++;
3293 peep2_insn_data[i].insn = x;
3294 df_insn_rescan (x);
3295 df_simulate_one_insn_backwards (bb, x, live);
3296 bitmap_copy (peep2_insn_data[i].live_before, live);
3297 }
3298 x = PREV_INSN (x);
3299 }
3300 while (x != prev);
3301
3302 peep2_current = i;
3303 #endif
3304
3305 /* If we generated a jump instruction, it won't have
3306 JUMP_LABEL set. Recompute after we're done. */
3307 for (x = attempt; x != before_try; x = PREV_INSN (x))
3308 if (JUMP_P (x))
3309 {
3310 do_rebuild_jump_labels = true;
3311 break;
3312 }
3313 }
3314 }
3315
3316 if (insn == BB_HEAD (bb))
3317 break;
3318 }
3319 }
3320
3321 default_rtl_profile ();
3322 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3323 BITMAP_FREE (peep2_insn_data[i].live_before);
3324 BITMAP_FREE (live);
3325 if (do_rebuild_jump_labels)
3326 rebuild_jump_labels (get_insns ());
3327 }
3328 #endif /* HAVE_peephole2 */
3329
3330 /* Common predicates for use with define_bypass. */
3331
3332 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3333 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3334 must be either a single_set or a PARALLEL with SETs inside. */
3335
3336 int
3337 store_data_bypass_p (rtx out_insn, rtx in_insn)
3338 {
3339 rtx out_set, in_set;
3340 rtx out_pat, in_pat;
3341 rtx out_exp, in_exp;
3342 int i, j;
3343
3344 in_set = single_set (in_insn);
3345 if (in_set)
3346 {
3347 if (!MEM_P (SET_DEST (in_set)))
3348 return false;
3349
3350 out_set = single_set (out_insn);
3351 if (out_set)
3352 {
3353 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3354 return false;
3355 }
3356 else
3357 {
3358 out_pat = PATTERN (out_insn);
3359
3360 if (GET_CODE (out_pat) != PARALLEL)
3361 return false;
3362
3363 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3364 {
3365 out_exp = XVECEXP (out_pat, 0, i);
3366
3367 if (GET_CODE (out_exp) == CLOBBER)
3368 continue;
3369
3370 gcc_assert (GET_CODE (out_exp) == SET);
3371
3372 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3373 return false;
3374 }
3375 }
3376 }
3377 else
3378 {
3379 in_pat = PATTERN (in_insn);
3380 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3381
3382 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3383 {
3384 in_exp = XVECEXP (in_pat, 0, i);
3385
3386 if (GET_CODE (in_exp) == CLOBBER)
3387 continue;
3388
3389 gcc_assert (GET_CODE (in_exp) == SET);
3390
3391 if (!MEM_P (SET_DEST (in_exp)))
3392 return false;
3393
3394 out_set = single_set (out_insn);
3395 if (out_set)
3396 {
3397 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3398 return false;
3399 }
3400 else
3401 {
3402 out_pat = PATTERN (out_insn);
3403 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3404
3405 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3406 {
3407 out_exp = XVECEXP (out_pat, 0, j);
3408
3409 if (GET_CODE (out_exp) == CLOBBER)
3410 continue;
3411
3412 gcc_assert (GET_CODE (out_exp) == SET);
3413
3414 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3415 return false;
3416 }
3417 }
3418 }
3419 }
3420
3421 return true;
3422 }
3423
3424 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3425 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3426 or multiple set; IN_INSN should be single_set for truth, but for convenience
3427 of insn categorization may be any JUMP or CALL insn. */
3428
3429 int
3430 if_test_bypass_p (rtx out_insn, rtx in_insn)
3431 {
3432 rtx out_set, in_set;
3433
3434 in_set = single_set (in_insn);
3435 if (! in_set)
3436 {
3437 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3438 return false;
3439 }
3440
3441 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3442 return false;
3443 in_set = SET_SRC (in_set);
3444
3445 out_set = single_set (out_insn);
3446 if (out_set)
3447 {
3448 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3449 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3450 return false;
3451 }
3452 else
3453 {
3454 rtx out_pat;
3455 int i;
3456
3457 out_pat = PATTERN (out_insn);
3458 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3459
3460 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3461 {
3462 rtx exp = XVECEXP (out_pat, 0, i);
3463
3464 if (GET_CODE (exp) == CLOBBER)
3465 continue;
3466
3467 gcc_assert (GET_CODE (exp) == SET);
3468
3469 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3470 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3471 return false;
3472 }
3473 }
3474
3475 return true;
3476 }
3477 \f
3478 static bool
3479 gate_handle_peephole2 (void)
3480 {
3481 return (optimize > 0 && flag_peephole2);
3482 }
3483
3484 static unsigned int
3485 rest_of_handle_peephole2 (void)
3486 {
3487 #ifdef HAVE_peephole2
3488 peephole2_optimize ();
3489 #endif
3490 return 0;
3491 }
3492
3493 struct rtl_opt_pass pass_peephole2 =
3494 {
3495 {
3496 RTL_PASS,
3497 "peephole2", /* name */
3498 gate_handle_peephole2, /* gate */
3499 rest_of_handle_peephole2, /* execute */
3500 NULL, /* sub */
3501 NULL, /* next */
3502 0, /* static_pass_number */
3503 TV_PEEPHOLE2, /* tv_id */
3504 0, /* properties_required */
3505 0, /* properties_provided */
3506 0, /* properties_destroyed */
3507 0, /* todo_flags_start */
3508 TODO_df_finish | TODO_verify_rtl_sharing |
3509 TODO_dump_func /* todo_flags_finish */
3510 }
3511 };
3512
3513 static unsigned int
3514 rest_of_handle_split_all_insns (void)
3515 {
3516 split_all_insns ();
3517 return 0;
3518 }
3519
3520 struct rtl_opt_pass pass_split_all_insns =
3521 {
3522 {
3523 RTL_PASS,
3524 "split1", /* name */
3525 NULL, /* gate */
3526 rest_of_handle_split_all_insns, /* execute */
3527 NULL, /* sub */
3528 NULL, /* next */
3529 0, /* static_pass_number */
3530 TV_NONE, /* tv_id */
3531 0, /* properties_required */
3532 0, /* properties_provided */
3533 0, /* properties_destroyed */
3534 0, /* todo_flags_start */
3535 TODO_dump_func /* todo_flags_finish */
3536 }
3537 };
3538
3539 static unsigned int
3540 rest_of_handle_split_after_reload (void)
3541 {
3542 /* If optimizing, then go ahead and split insns now. */
3543 #ifndef STACK_REGS
3544 if (optimize > 0)
3545 #endif
3546 split_all_insns ();
3547 return 0;
3548 }
3549
3550 struct rtl_opt_pass pass_split_after_reload =
3551 {
3552 {
3553 RTL_PASS,
3554 "split2", /* name */
3555 NULL, /* gate */
3556 rest_of_handle_split_after_reload, /* execute */
3557 NULL, /* sub */
3558 NULL, /* next */
3559 0, /* static_pass_number */
3560 TV_NONE, /* tv_id */
3561 0, /* properties_required */
3562 0, /* properties_provided */
3563 0, /* properties_destroyed */
3564 0, /* todo_flags_start */
3565 TODO_dump_func /* todo_flags_finish */
3566 }
3567 };
3568
3569 static bool
3570 gate_handle_split_before_regstack (void)
3571 {
3572 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3573 /* If flow2 creates new instructions which need splitting
3574 and scheduling after reload is not done, they might not be
3575 split until final which doesn't allow splitting
3576 if HAVE_ATTR_length. */
3577 # ifdef INSN_SCHEDULING
3578 return (optimize && !flag_schedule_insns_after_reload);
3579 # else
3580 return (optimize);
3581 # endif
3582 #else
3583 return 0;
3584 #endif
3585 }
3586
3587 static unsigned int
3588 rest_of_handle_split_before_regstack (void)
3589 {
3590 split_all_insns ();
3591 return 0;
3592 }
3593
3594 struct rtl_opt_pass pass_split_before_regstack =
3595 {
3596 {
3597 RTL_PASS,
3598 "split3", /* name */
3599 gate_handle_split_before_regstack, /* gate */
3600 rest_of_handle_split_before_regstack, /* execute */
3601 NULL, /* sub */
3602 NULL, /* next */
3603 0, /* static_pass_number */
3604 TV_NONE, /* tv_id */
3605 0, /* properties_required */
3606 0, /* properties_provided */
3607 0, /* properties_destroyed */
3608 0, /* todo_flags_start */
3609 TODO_dump_func /* todo_flags_finish */
3610 }
3611 };
3612
3613 static bool
3614 gate_handle_split_before_sched2 (void)
3615 {
3616 #ifdef INSN_SCHEDULING
3617 return optimize > 0 && flag_schedule_insns_after_reload;
3618 #else
3619 return 0;
3620 #endif
3621 }
3622
3623 static unsigned int
3624 rest_of_handle_split_before_sched2 (void)
3625 {
3626 #ifdef INSN_SCHEDULING
3627 split_all_insns ();
3628 #endif
3629 return 0;
3630 }
3631
3632 struct rtl_opt_pass pass_split_before_sched2 =
3633 {
3634 {
3635 RTL_PASS,
3636 "split4", /* name */
3637 gate_handle_split_before_sched2, /* gate */
3638 rest_of_handle_split_before_sched2, /* execute */
3639 NULL, /* sub */
3640 NULL, /* next */
3641 0, /* static_pass_number */
3642 TV_NONE, /* tv_id */
3643 0, /* properties_required */
3644 0, /* properties_provided */
3645 0, /* properties_destroyed */
3646 0, /* todo_flags_start */
3647 TODO_verify_flow |
3648 TODO_dump_func /* todo_flags_finish */
3649 }
3650 };
3651
3652 /* The placement of the splitting that we do for shorten_branches
3653 depends on whether regstack is used by the target or not. */
3654 static bool
3655 gate_do_final_split (void)
3656 {
3657 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3658 return 1;
3659 #else
3660 return 0;
3661 #endif
3662 }
3663
3664 struct rtl_opt_pass pass_split_for_shorten_branches =
3665 {
3666 {
3667 RTL_PASS,
3668 "split5", /* name */
3669 gate_do_final_split, /* gate */
3670 split_all_insns_noflow, /* execute */
3671 NULL, /* sub */
3672 NULL, /* next */
3673 0, /* static_pass_number */
3674 TV_NONE, /* tv_id */
3675 0, /* properties_required */
3676 0, /* properties_provided */
3677 0, /* properties_destroyed */
3678 0, /* todo_flags_start */
3679 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
3680 }
3681 };