re PR tree-optimization/47053 (ICE: verify_flow_info failed: BB 2 can not throw but...
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl-error.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "addresses.h"
35 #include "expr.h"
36 #include "function.h"
37 #include "flags.h"
38 #include "basic-block.h"
39 #include "output.h"
40 #include "reload.h"
41 #include "target.h"
42 #include "timevar.h"
43 #include "tree-pass.h"
44 #include "df.h"
45
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
49 #else
50 #define STACK_PUSH_CODE PRE_INC
51 #endif
52 #endif
53
54 #ifndef STACK_POP_CODE
55 #ifdef STACK_GROWS_DOWNWARD
56 #define STACK_POP_CODE POST_INC
57 #else
58 #define STACK_POP_CODE POST_DEC
59 #endif
60 #endif
61
62 #ifndef HAVE_ATTR_enabled
63 static inline bool
64 get_attr_enabled (rtx insn ATTRIBUTE_UNUSED)
65 {
66 return true;
67 }
68 #endif
69
70 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
71 static void validate_replace_src_1 (rtx *, void *);
72 static rtx split_insn (rtx);
73
74 /* Nonzero means allow operands to be volatile.
75 This should be 0 if you are generating rtl, such as if you are calling
76 the functions in optabs.c and expmed.c (most of the time).
77 This should be 1 if all valid insns need to be recognized,
78 such as in reginfo.c and final.c and reload.c.
79
80 init_recog and init_recog_no_volatile are responsible for setting this. */
81
82 int volatile_ok;
83
84 struct recog_data recog_data;
85
86 /* Contains a vector of operand_alternative structures for every operand.
87 Set up by preprocess_constraints. */
88 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
89
90 /* On return from `constrain_operands', indicate which alternative
91 was satisfied. */
92
93 int which_alternative;
94
95 /* Nonzero after end of reload pass.
96 Set to 1 or 0 by toplev.c.
97 Controls the significance of (SUBREG (MEM)). */
98
99 int reload_completed;
100
101 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
102 int epilogue_completed;
103
104 /* Initialize data used by the function `recog'.
105 This must be called once in the compilation of a function
106 before any insn recognition may be done in the function. */
107
108 void
109 init_recog_no_volatile (void)
110 {
111 volatile_ok = 0;
112 }
113
114 void
115 init_recog (void)
116 {
117 volatile_ok = 1;
118 }
119
120 \f
121 /* Check that X is an insn-body for an `asm' with operands
122 and that the operands mentioned in it are legitimate. */
123
124 int
125 check_asm_operands (rtx x)
126 {
127 int noperands;
128 rtx *operands;
129 const char **constraints;
130 int i;
131
132 /* Post-reload, be more strict with things. */
133 if (reload_completed)
134 {
135 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
136 extract_insn (make_insn_raw (x));
137 constrain_operands (1);
138 return which_alternative >= 0;
139 }
140
141 noperands = asm_noperands (x);
142 if (noperands < 0)
143 return 0;
144 if (noperands == 0)
145 return 1;
146
147 operands = XALLOCAVEC (rtx, noperands);
148 constraints = XALLOCAVEC (const char *, noperands);
149
150 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
151
152 for (i = 0; i < noperands; i++)
153 {
154 const char *c = constraints[i];
155 if (c[0] == '%')
156 c++;
157 if (! asm_operand_ok (operands[i], c, constraints))
158 return 0;
159 }
160
161 return 1;
162 }
163 \f
164 /* Static data for the next two routines. */
165
166 typedef struct change_t
167 {
168 rtx object;
169 int old_code;
170 rtx *loc;
171 rtx old;
172 bool unshare;
173 } change_t;
174
175 static change_t *changes;
176 static int changes_allocated;
177
178 static int num_changes = 0;
179
180 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
181 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
182 the change is simply made.
183
184 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
185 will be called with the address and mode as parameters. If OBJECT is
186 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
187 the change in place.
188
189 IN_GROUP is nonzero if this is part of a group of changes that must be
190 performed as a group. In that case, the changes will be stored. The
191 function `apply_change_group' will validate and apply the changes.
192
193 If IN_GROUP is zero, this is a single change. Try to recognize the insn
194 or validate the memory reference with the change applied. If the result
195 is not valid for the machine, suppress the change and return zero.
196 Otherwise, perform the change and return 1. */
197
198 static bool
199 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
200 {
201 rtx old = *loc;
202
203 if (old == new_rtx || rtx_equal_p (old, new_rtx))
204 return 1;
205
206 gcc_assert (in_group != 0 || num_changes == 0);
207
208 *loc = new_rtx;
209
210 /* Save the information describing this change. */
211 if (num_changes >= changes_allocated)
212 {
213 if (changes_allocated == 0)
214 /* This value allows for repeated substitutions inside complex
215 indexed addresses, or changes in up to 5 insns. */
216 changes_allocated = MAX_RECOG_OPERANDS * 5;
217 else
218 changes_allocated *= 2;
219
220 changes = XRESIZEVEC (change_t, changes, changes_allocated);
221 }
222
223 changes[num_changes].object = object;
224 changes[num_changes].loc = loc;
225 changes[num_changes].old = old;
226 changes[num_changes].unshare = unshare;
227
228 if (object && !MEM_P (object))
229 {
230 /* Set INSN_CODE to force rerecognition of insn. Save old code in
231 case invalid. */
232 changes[num_changes].old_code = INSN_CODE (object);
233 INSN_CODE (object) = -1;
234 }
235
236 num_changes++;
237
238 /* If we are making a group of changes, return 1. Otherwise, validate the
239 change group we made. */
240
241 if (in_group)
242 return 1;
243 else
244 return apply_change_group ();
245 }
246
247 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
248 UNSHARE to false. */
249
250 bool
251 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
252 {
253 return validate_change_1 (object, loc, new_rtx, in_group, false);
254 }
255
256 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
257 UNSHARE to true. */
258
259 bool
260 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
261 {
262 return validate_change_1 (object, loc, new_rtx, in_group, true);
263 }
264
265
266 /* Keep X canonicalized if some changes have made it non-canonical; only
267 modifies the operands of X, not (for example) its code. Simplifications
268 are not the job of this routine.
269
270 Return true if anything was changed. */
271 bool
272 canonicalize_change_group (rtx insn, rtx x)
273 {
274 if (COMMUTATIVE_P (x)
275 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
276 {
277 /* Oops, the caller has made X no longer canonical.
278 Let's redo the changes in the correct order. */
279 rtx tem = XEXP (x, 0);
280 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
281 validate_change (insn, &XEXP (x, 1), tem, 1);
282 return true;
283 }
284 else
285 return false;
286 }
287
288
289 /* This subroutine of apply_change_group verifies whether the changes to INSN
290 were valid; i.e. whether INSN can still be recognized. */
291
292 int
293 insn_invalid_p (rtx insn)
294 {
295 rtx pat = PATTERN (insn);
296 int num_clobbers = 0;
297 /* If we are before reload and the pattern is a SET, see if we can add
298 clobbers. */
299 int icode = recog (pat, insn,
300 (GET_CODE (pat) == SET
301 && ! reload_completed && ! reload_in_progress)
302 ? &num_clobbers : 0);
303 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
304
305
306 /* If this is an asm and the operand aren't legal, then fail. Likewise if
307 this is not an asm and the insn wasn't recognized. */
308 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
309 || (!is_asm && icode < 0))
310 return 1;
311
312 /* If we have to add CLOBBERs, fail if we have to add ones that reference
313 hard registers since our callers can't know if they are live or not.
314 Otherwise, add them. */
315 if (num_clobbers > 0)
316 {
317 rtx newpat;
318
319 if (added_clobbers_hard_reg_p (icode))
320 return 1;
321
322 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
323 XVECEXP (newpat, 0, 0) = pat;
324 add_clobbers (newpat, icode);
325 PATTERN (insn) = pat = newpat;
326 }
327
328 /* After reload, verify that all constraints are satisfied. */
329 if (reload_completed)
330 {
331 extract_insn (insn);
332
333 if (! constrain_operands (1))
334 return 1;
335 }
336
337 INSN_CODE (insn) = icode;
338 return 0;
339 }
340
341 /* Return number of changes made and not validated yet. */
342 int
343 num_changes_pending (void)
344 {
345 return num_changes;
346 }
347
348 /* Tentatively apply the changes numbered NUM and up.
349 Return 1 if all changes are valid, zero otherwise. */
350
351 int
352 verify_changes (int num)
353 {
354 int i;
355 rtx last_validated = NULL_RTX;
356
357 /* The changes have been applied and all INSN_CODEs have been reset to force
358 rerecognition.
359
360 The changes are valid if we aren't given an object, or if we are
361 given a MEM and it still is a valid address, or if this is in insn
362 and it is recognized. In the latter case, if reload has completed,
363 we also require that the operands meet the constraints for
364 the insn. */
365
366 for (i = num; i < num_changes; i++)
367 {
368 rtx object = changes[i].object;
369
370 /* If there is no object to test or if it is the same as the one we
371 already tested, ignore it. */
372 if (object == 0 || object == last_validated)
373 continue;
374
375 if (MEM_P (object))
376 {
377 if (! memory_address_addr_space_p (GET_MODE (object),
378 XEXP (object, 0),
379 MEM_ADDR_SPACE (object)))
380 break;
381 }
382 else if (REG_P (changes[i].old)
383 && asm_noperands (PATTERN (object)) > 0
384 && REG_EXPR (changes[i].old) != NULL_TREE
385 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
386 && DECL_REGISTER (REG_EXPR (changes[i].old)))
387 {
388 /* Don't allow changes of hard register operands to inline
389 assemblies if they have been defined as register asm ("x"). */
390 break;
391 }
392 else if (DEBUG_INSN_P (object))
393 continue;
394 else if (insn_invalid_p (object))
395 {
396 rtx pat = PATTERN (object);
397
398 /* Perhaps we couldn't recognize the insn because there were
399 extra CLOBBERs at the end. If so, try to re-recognize
400 without the last CLOBBER (later iterations will cause each of
401 them to be eliminated, in turn). But don't do this if we
402 have an ASM_OPERAND. */
403 if (GET_CODE (pat) == PARALLEL
404 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
405 && asm_noperands (PATTERN (object)) < 0)
406 {
407 rtx newpat;
408
409 if (XVECLEN (pat, 0) == 2)
410 newpat = XVECEXP (pat, 0, 0);
411 else
412 {
413 int j;
414
415 newpat
416 = gen_rtx_PARALLEL (VOIDmode,
417 rtvec_alloc (XVECLEN (pat, 0) - 1));
418 for (j = 0; j < XVECLEN (newpat, 0); j++)
419 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
420 }
421
422 /* Add a new change to this group to replace the pattern
423 with this new pattern. Then consider this change
424 as having succeeded. The change we added will
425 cause the entire call to fail if things remain invalid.
426
427 Note that this can lose if a later change than the one
428 we are processing specified &XVECEXP (PATTERN (object), 0, X)
429 but this shouldn't occur. */
430
431 validate_change (object, &PATTERN (object), newpat, 1);
432 continue;
433 }
434 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
435 || GET_CODE (pat) == VAR_LOCATION)
436 /* If this insn is a CLOBBER or USE, it is always valid, but is
437 never recognized. */
438 continue;
439 else
440 break;
441 }
442 last_validated = object;
443 }
444
445 return (i == num_changes);
446 }
447
448 /* A group of changes has previously been issued with validate_change
449 and verified with verify_changes. Call df_insn_rescan for each of
450 the insn changed and clear num_changes. */
451
452 void
453 confirm_change_group (void)
454 {
455 int i;
456 rtx last_object = NULL;
457
458 for (i = 0; i < num_changes; i++)
459 {
460 rtx object = changes[i].object;
461
462 if (changes[i].unshare)
463 *changes[i].loc = copy_rtx (*changes[i].loc);
464
465 /* Avoid unnecessary rescanning when multiple changes to same instruction
466 are made. */
467 if (object)
468 {
469 if (object != last_object && last_object && INSN_P (last_object))
470 df_insn_rescan (last_object);
471 last_object = object;
472 }
473 }
474
475 if (last_object && INSN_P (last_object))
476 df_insn_rescan (last_object);
477 num_changes = 0;
478 }
479
480 /* Apply a group of changes previously issued with `validate_change'.
481 If all changes are valid, call confirm_change_group and return 1,
482 otherwise, call cancel_changes and return 0. */
483
484 int
485 apply_change_group (void)
486 {
487 if (verify_changes (0))
488 {
489 confirm_change_group ();
490 return 1;
491 }
492 else
493 {
494 cancel_changes (0);
495 return 0;
496 }
497 }
498
499
500 /* Return the number of changes so far in the current group. */
501
502 int
503 num_validated_changes (void)
504 {
505 return num_changes;
506 }
507
508 /* Retract the changes numbered NUM and up. */
509
510 void
511 cancel_changes (int num)
512 {
513 int i;
514
515 /* Back out all the changes. Do this in the opposite order in which
516 they were made. */
517 for (i = num_changes - 1; i >= num; i--)
518 {
519 *changes[i].loc = changes[i].old;
520 if (changes[i].object && !MEM_P (changes[i].object))
521 INSN_CODE (changes[i].object) = changes[i].old_code;
522 }
523 num_changes = num;
524 }
525
526 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
527 rtx. */
528
529 static void
530 simplify_while_replacing (rtx *loc, rtx to, rtx object,
531 enum machine_mode op0_mode)
532 {
533 rtx x = *loc;
534 enum rtx_code code = GET_CODE (x);
535 rtx new_rtx;
536
537 if (SWAPPABLE_OPERANDS_P (x)
538 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
539 {
540 validate_unshare_change (object, loc,
541 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
542 : swap_condition (code),
543 GET_MODE (x), XEXP (x, 1),
544 XEXP (x, 0)), 1);
545 x = *loc;
546 code = GET_CODE (x);
547 }
548
549 switch (code)
550 {
551 case PLUS:
552 /* If we have a PLUS whose second operand is now a CONST_INT, use
553 simplify_gen_binary to try to simplify it.
554 ??? We may want later to remove this, once simplification is
555 separated from this function. */
556 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
557 validate_change (object, loc,
558 simplify_gen_binary
559 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
560 break;
561 case MINUS:
562 if (CONST_INT_P (XEXP (x, 1))
563 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
564 validate_change (object, loc,
565 simplify_gen_binary
566 (PLUS, GET_MODE (x), XEXP (x, 0),
567 simplify_gen_unary (NEG,
568 GET_MODE (x), XEXP (x, 1),
569 GET_MODE (x))), 1);
570 break;
571 case ZERO_EXTEND:
572 case SIGN_EXTEND:
573 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
574 {
575 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
576 op0_mode);
577 /* If any of the above failed, substitute in something that
578 we know won't be recognized. */
579 if (!new_rtx)
580 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
581 validate_change (object, loc, new_rtx, 1);
582 }
583 break;
584 case SUBREG:
585 /* All subregs possible to simplify should be simplified. */
586 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
587 SUBREG_BYTE (x));
588
589 /* Subregs of VOIDmode operands are incorrect. */
590 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
591 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
592 if (new_rtx)
593 validate_change (object, loc, new_rtx, 1);
594 break;
595 case ZERO_EXTRACT:
596 case SIGN_EXTRACT:
597 /* If we are replacing a register with memory, try to change the memory
598 to be the mode required for memory in extract operations (this isn't
599 likely to be an insertion operation; if it was, nothing bad will
600 happen, we might just fail in some cases). */
601
602 if (MEM_P (XEXP (x, 0))
603 && CONST_INT_P (XEXP (x, 1))
604 && CONST_INT_P (XEXP (x, 2))
605 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
606 && !MEM_VOLATILE_P (XEXP (x, 0)))
607 {
608 enum machine_mode wanted_mode = VOIDmode;
609 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
610 int pos = INTVAL (XEXP (x, 2));
611
612 if (GET_CODE (x) == ZERO_EXTRACT)
613 {
614 enum machine_mode new_mode
615 = mode_for_extraction (EP_extzv, 1);
616 if (new_mode != MAX_MACHINE_MODE)
617 wanted_mode = new_mode;
618 }
619 else if (GET_CODE (x) == SIGN_EXTRACT)
620 {
621 enum machine_mode new_mode
622 = mode_for_extraction (EP_extv, 1);
623 if (new_mode != MAX_MACHINE_MODE)
624 wanted_mode = new_mode;
625 }
626
627 /* If we have a narrower mode, we can do something. */
628 if (wanted_mode != VOIDmode
629 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
630 {
631 int offset = pos / BITS_PER_UNIT;
632 rtx newmem;
633
634 /* If the bytes and bits are counted differently, we
635 must adjust the offset. */
636 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
637 offset =
638 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
639 offset);
640
641 pos %= GET_MODE_BITSIZE (wanted_mode);
642
643 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
644
645 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
646 validate_change (object, &XEXP (x, 0), newmem, 1);
647 }
648 }
649
650 break;
651
652 default:
653 break;
654 }
655 }
656
657 /* Replace every occurrence of FROM in X with TO. Mark each change with
658 validate_change passing OBJECT. */
659
660 static void
661 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
662 bool simplify)
663 {
664 int i, j;
665 const char *fmt;
666 rtx x = *loc;
667 enum rtx_code code;
668 enum machine_mode op0_mode = VOIDmode;
669 int prev_changes = num_changes;
670
671 if (!x)
672 return;
673
674 code = GET_CODE (x);
675 fmt = GET_RTX_FORMAT (code);
676 if (fmt[0] == 'e')
677 op0_mode = GET_MODE (XEXP (x, 0));
678
679 /* X matches FROM if it is the same rtx or they are both referring to the
680 same register in the same mode. Avoid calling rtx_equal_p unless the
681 operands look similar. */
682
683 if (x == from
684 || (REG_P (x) && REG_P (from)
685 && GET_MODE (x) == GET_MODE (from)
686 && REGNO (x) == REGNO (from))
687 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
688 && rtx_equal_p (x, from)))
689 {
690 validate_unshare_change (object, loc, to, 1);
691 return;
692 }
693
694 /* Call ourself recursively to perform the replacements.
695 We must not replace inside already replaced expression, otherwise we
696 get infinite recursion for replacements like (reg X)->(subreg (reg X))
697 done by regmove, so we must special case shared ASM_OPERANDS. */
698
699 if (GET_CODE (x) == PARALLEL)
700 {
701 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
702 {
703 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
704 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
705 {
706 /* Verify that operands are really shared. */
707 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
708 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
709 (x, 0, j))));
710 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
711 from, to, object, simplify);
712 }
713 else
714 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
715 simplify);
716 }
717 }
718 else
719 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
720 {
721 if (fmt[i] == 'e')
722 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
723 else if (fmt[i] == 'E')
724 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
725 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
726 simplify);
727 }
728
729 /* If we didn't substitute, there is nothing more to do. */
730 if (num_changes == prev_changes)
731 return;
732
733 /* Allow substituted expression to have different mode. This is used by
734 regmove to change mode of pseudo register. */
735 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
736 op0_mode = GET_MODE (XEXP (x, 0));
737
738 /* Do changes needed to keep rtx consistent. Don't do any other
739 simplifications, as it is not our job. */
740 if (simplify)
741 simplify_while_replacing (loc, to, object, op0_mode);
742 }
743
744 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
745 with TO. After all changes have been made, validate by seeing
746 if INSN is still valid. */
747
748 int
749 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
750 {
751 validate_replace_rtx_1 (loc, from, to, insn, true);
752 return apply_change_group ();
753 }
754
755 /* Try replacing every occurrence of FROM in INSN with TO. After all
756 changes have been made, validate by seeing if INSN is still valid. */
757
758 int
759 validate_replace_rtx (rtx from, rtx to, rtx insn)
760 {
761 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
762 return apply_change_group ();
763 }
764
765 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
766 is a part of INSN. After all changes have been made, validate by seeing if
767 INSN is still valid.
768 validate_replace_rtx (from, to, insn) is equivalent to
769 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
770
771 int
772 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
773 {
774 validate_replace_rtx_1 (where, from, to, insn, true);
775 return apply_change_group ();
776 }
777
778 /* Same as above, but do not simplify rtx afterwards. */
779 int
780 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
781 rtx insn)
782 {
783 validate_replace_rtx_1 (where, from, to, insn, false);
784 return apply_change_group ();
785
786 }
787
788 /* Try replacing every occurrence of FROM in INSN with TO. This also
789 will replace in REG_EQUAL and REG_EQUIV notes. */
790
791 void
792 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
793 {
794 rtx note;
795 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
796 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
797 if (REG_NOTE_KIND (note) == REG_EQUAL
798 || REG_NOTE_KIND (note) == REG_EQUIV)
799 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
800 }
801
802 /* Function called by note_uses to replace used subexpressions. */
803 struct validate_replace_src_data
804 {
805 rtx from; /* Old RTX */
806 rtx to; /* New RTX */
807 rtx insn; /* Insn in which substitution is occurring. */
808 };
809
810 static void
811 validate_replace_src_1 (rtx *x, void *data)
812 {
813 struct validate_replace_src_data *d
814 = (struct validate_replace_src_data *) data;
815
816 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
817 }
818
819 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
820 SET_DESTs. */
821
822 void
823 validate_replace_src_group (rtx from, rtx to, rtx insn)
824 {
825 struct validate_replace_src_data d;
826
827 d.from = from;
828 d.to = to;
829 d.insn = insn;
830 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
831 }
832
833 /* Try simplify INSN.
834 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
835 pattern and return true if something was simplified. */
836
837 bool
838 validate_simplify_insn (rtx insn)
839 {
840 int i;
841 rtx pat = NULL;
842 rtx newpat = NULL;
843
844 pat = PATTERN (insn);
845
846 if (GET_CODE (pat) == SET)
847 {
848 newpat = simplify_rtx (SET_SRC (pat));
849 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
850 validate_change (insn, &SET_SRC (pat), newpat, 1);
851 newpat = simplify_rtx (SET_DEST (pat));
852 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
853 validate_change (insn, &SET_DEST (pat), newpat, 1);
854 }
855 else if (GET_CODE (pat) == PARALLEL)
856 for (i = 0; i < XVECLEN (pat, 0); i++)
857 {
858 rtx s = XVECEXP (pat, 0, i);
859
860 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
861 {
862 newpat = simplify_rtx (SET_SRC (s));
863 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
864 validate_change (insn, &SET_SRC (s), newpat, 1);
865 newpat = simplify_rtx (SET_DEST (s));
866 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
867 validate_change (insn, &SET_DEST (s), newpat, 1);
868 }
869 }
870 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
871 }
872 \f
873 #ifdef HAVE_cc0
874 /* Return 1 if the insn using CC0 set by INSN does not contain
875 any ordered tests applied to the condition codes.
876 EQ and NE tests do not count. */
877
878 int
879 next_insn_tests_no_inequality (rtx insn)
880 {
881 rtx next = next_cc0_user (insn);
882
883 /* If there is no next insn, we have to take the conservative choice. */
884 if (next == 0)
885 return 0;
886
887 return (INSN_P (next)
888 && ! inequality_comparisons_p (PATTERN (next)));
889 }
890 #endif
891 \f
892 /* Return 1 if OP is a valid general operand for machine mode MODE.
893 This is either a register reference, a memory reference,
894 or a constant. In the case of a memory reference, the address
895 is checked for general validity for the target machine.
896
897 Register and memory references must have mode MODE in order to be valid,
898 but some constants have no machine mode and are valid for any mode.
899
900 If MODE is VOIDmode, OP is checked for validity for whatever mode
901 it has.
902
903 The main use of this function is as a predicate in match_operand
904 expressions in the machine description.
905
906 For an explanation of this function's behavior for registers of
907 class NO_REGS, see the comment for `register_operand'. */
908
909 int
910 general_operand (rtx op, enum machine_mode mode)
911 {
912 enum rtx_code code = GET_CODE (op);
913
914 if (mode == VOIDmode)
915 mode = GET_MODE (op);
916
917 /* Don't accept CONST_INT or anything similar
918 if the caller wants something floating. */
919 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
920 && GET_MODE_CLASS (mode) != MODE_INT
921 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
922 return 0;
923
924 if (CONST_INT_P (op)
925 && mode != VOIDmode
926 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
927 return 0;
928
929 if (CONSTANT_P (op))
930 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
931 || mode == VOIDmode)
932 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
933 && LEGITIMATE_CONSTANT_P (op));
934
935 /* Except for certain constants with VOIDmode, already checked for,
936 OP's mode must match MODE if MODE specifies a mode. */
937
938 if (GET_MODE (op) != mode)
939 return 0;
940
941 if (code == SUBREG)
942 {
943 rtx sub = SUBREG_REG (op);
944
945 #ifdef INSN_SCHEDULING
946 /* On machines that have insn scheduling, we want all memory
947 reference to be explicit, so outlaw paradoxical SUBREGs.
948 However, we must allow them after reload so that they can
949 get cleaned up by cleanup_subreg_operands. */
950 if (!reload_completed && MEM_P (sub)
951 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
952 return 0;
953 #endif
954 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
955 may result in incorrect reference. We should simplify all valid
956 subregs of MEM anyway. But allow this after reload because we
957 might be called from cleanup_subreg_operands.
958
959 ??? This is a kludge. */
960 if (!reload_completed && SUBREG_BYTE (op) != 0
961 && MEM_P (sub))
962 return 0;
963
964 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
965 create such rtl, and we must reject it. */
966 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
967 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
968 return 0;
969
970 op = sub;
971 code = GET_CODE (op);
972 }
973
974 if (code == REG)
975 /* A register whose class is NO_REGS is not a general operand. */
976 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
977 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
978
979 if (code == MEM)
980 {
981 rtx y = XEXP (op, 0);
982
983 if (! volatile_ok && MEM_VOLATILE_P (op))
984 return 0;
985
986 /* Use the mem's mode, since it will be reloaded thus. */
987 if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
988 return 1;
989 }
990
991 return 0;
992 }
993 \f
994 /* Return 1 if OP is a valid memory address for a memory reference
995 of mode MODE.
996
997 The main use of this function is as a predicate in match_operand
998 expressions in the machine description. */
999
1000 int
1001 address_operand (rtx op, enum machine_mode mode)
1002 {
1003 return memory_address_p (mode, op);
1004 }
1005
1006 /* Return 1 if OP is a register reference of mode MODE.
1007 If MODE is VOIDmode, accept a register in any mode.
1008
1009 The main use of this function is as a predicate in match_operand
1010 expressions in the machine description.
1011
1012 As a special exception, registers whose class is NO_REGS are
1013 not accepted by `register_operand'. The reason for this change
1014 is to allow the representation of special architecture artifacts
1015 (such as a condition code register) without extending the rtl
1016 definitions. Since registers of class NO_REGS cannot be used
1017 as registers in any case where register classes are examined,
1018 it is most consistent to keep this function from accepting them. */
1019
1020 int
1021 register_operand (rtx op, enum machine_mode mode)
1022 {
1023 if (GET_MODE (op) != mode && mode != VOIDmode)
1024 return 0;
1025
1026 if (GET_CODE (op) == SUBREG)
1027 {
1028 rtx sub = SUBREG_REG (op);
1029
1030 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1031 because it is guaranteed to be reloaded into one.
1032 Just make sure the MEM is valid in itself.
1033 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1034 but currently it does result from (SUBREG (REG)...) where the
1035 reg went on the stack.) */
1036 if (! reload_completed && MEM_P (sub))
1037 return general_operand (op, mode);
1038
1039 #ifdef CANNOT_CHANGE_MODE_CLASS
1040 if (REG_P (sub)
1041 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1042 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1043 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1044 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1045 return 0;
1046 #endif
1047
1048 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1049 create such rtl, and we must reject it. */
1050 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1051 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1052 return 0;
1053
1054 op = sub;
1055 }
1056
1057 /* We don't consider registers whose class is NO_REGS
1058 to be a register operand. */
1059 return (REG_P (op)
1060 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1061 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1062 }
1063
1064 /* Return 1 for a register in Pmode; ignore the tested mode. */
1065
1066 int
1067 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1068 {
1069 return register_operand (op, Pmode);
1070 }
1071
1072 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1073 or a hard register. */
1074
1075 int
1076 scratch_operand (rtx op, enum machine_mode mode)
1077 {
1078 if (GET_MODE (op) != mode && mode != VOIDmode)
1079 return 0;
1080
1081 return (GET_CODE (op) == SCRATCH
1082 || (REG_P (op)
1083 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1084 }
1085
1086 /* Return 1 if OP is a valid immediate operand for mode MODE.
1087
1088 The main use of this function is as a predicate in match_operand
1089 expressions in the machine description. */
1090
1091 int
1092 immediate_operand (rtx op, enum machine_mode mode)
1093 {
1094 /* Don't accept CONST_INT or anything similar
1095 if the caller wants something floating. */
1096 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1097 && GET_MODE_CLASS (mode) != MODE_INT
1098 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1099 return 0;
1100
1101 if (CONST_INT_P (op)
1102 && mode != VOIDmode
1103 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1104 return 0;
1105
1106 return (CONSTANT_P (op)
1107 && (GET_MODE (op) == mode || mode == VOIDmode
1108 || GET_MODE (op) == VOIDmode)
1109 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1110 && LEGITIMATE_CONSTANT_P (op));
1111 }
1112
1113 /* Returns 1 if OP is an operand that is a CONST_INT. */
1114
1115 int
1116 const_int_operand (rtx op, enum machine_mode mode)
1117 {
1118 if (!CONST_INT_P (op))
1119 return 0;
1120
1121 if (mode != VOIDmode
1122 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1123 return 0;
1124
1125 return 1;
1126 }
1127
1128 /* Returns 1 if OP is an operand that is a constant integer or constant
1129 floating-point number. */
1130
1131 int
1132 const_double_operand (rtx op, enum machine_mode mode)
1133 {
1134 /* Don't accept CONST_INT or anything similar
1135 if the caller wants something floating. */
1136 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1137 && GET_MODE_CLASS (mode) != MODE_INT
1138 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1139 return 0;
1140
1141 return ((GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op))
1142 && (mode == VOIDmode || GET_MODE (op) == mode
1143 || GET_MODE (op) == VOIDmode));
1144 }
1145
1146 /* Return 1 if OP is a general operand that is not an immediate operand. */
1147
1148 int
1149 nonimmediate_operand (rtx op, enum machine_mode mode)
1150 {
1151 return (general_operand (op, mode) && ! CONSTANT_P (op));
1152 }
1153
1154 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1155
1156 int
1157 nonmemory_operand (rtx op, enum machine_mode mode)
1158 {
1159 if (CONSTANT_P (op))
1160 return immediate_operand (op, mode);
1161
1162 if (GET_MODE (op) != mode && mode != VOIDmode)
1163 return 0;
1164
1165 if (GET_CODE (op) == SUBREG)
1166 {
1167 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1168 because it is guaranteed to be reloaded into one.
1169 Just make sure the MEM is valid in itself.
1170 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1171 but currently it does result from (SUBREG (REG)...) where the
1172 reg went on the stack.) */
1173 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1174 return general_operand (op, mode);
1175 op = SUBREG_REG (op);
1176 }
1177
1178 /* We don't consider registers whose class is NO_REGS
1179 to be a register operand. */
1180 return (REG_P (op)
1181 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1182 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1183 }
1184
1185 /* Return 1 if OP is a valid operand that stands for pushing a
1186 value of mode MODE onto the stack.
1187
1188 The main use of this function is as a predicate in match_operand
1189 expressions in the machine description. */
1190
1191 int
1192 push_operand (rtx op, enum machine_mode mode)
1193 {
1194 unsigned int rounded_size = GET_MODE_SIZE (mode);
1195
1196 #ifdef PUSH_ROUNDING
1197 rounded_size = PUSH_ROUNDING (rounded_size);
1198 #endif
1199
1200 if (!MEM_P (op))
1201 return 0;
1202
1203 if (mode != VOIDmode && GET_MODE (op) != mode)
1204 return 0;
1205
1206 op = XEXP (op, 0);
1207
1208 if (rounded_size == GET_MODE_SIZE (mode))
1209 {
1210 if (GET_CODE (op) != STACK_PUSH_CODE)
1211 return 0;
1212 }
1213 else
1214 {
1215 if (GET_CODE (op) != PRE_MODIFY
1216 || GET_CODE (XEXP (op, 1)) != PLUS
1217 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1218 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1219 #ifdef STACK_GROWS_DOWNWARD
1220 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1221 #else
1222 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1223 #endif
1224 )
1225 return 0;
1226 }
1227
1228 return XEXP (op, 0) == stack_pointer_rtx;
1229 }
1230
1231 /* Return 1 if OP is a valid operand that stands for popping a
1232 value of mode MODE off the stack.
1233
1234 The main use of this function is as a predicate in match_operand
1235 expressions in the machine description. */
1236
1237 int
1238 pop_operand (rtx op, enum machine_mode mode)
1239 {
1240 if (!MEM_P (op))
1241 return 0;
1242
1243 if (mode != VOIDmode && GET_MODE (op) != mode)
1244 return 0;
1245
1246 op = XEXP (op, 0);
1247
1248 if (GET_CODE (op) != STACK_POP_CODE)
1249 return 0;
1250
1251 return XEXP (op, 0) == stack_pointer_rtx;
1252 }
1253
1254 /* Return 1 if ADDR is a valid memory address
1255 for mode MODE in address space AS. */
1256
1257 int
1258 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1259 rtx addr, addr_space_t as)
1260 {
1261 #ifdef GO_IF_LEGITIMATE_ADDRESS
1262 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1263 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1264 return 0;
1265
1266 win:
1267 return 1;
1268 #else
1269 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1270 #endif
1271 }
1272
1273 /* Return 1 if OP is a valid memory reference with mode MODE,
1274 including a valid address.
1275
1276 The main use of this function is as a predicate in match_operand
1277 expressions in the machine description. */
1278
1279 int
1280 memory_operand (rtx op, enum machine_mode mode)
1281 {
1282 rtx inner;
1283
1284 if (! reload_completed)
1285 /* Note that no SUBREG is a memory operand before end of reload pass,
1286 because (SUBREG (MEM...)) forces reloading into a register. */
1287 return MEM_P (op) && general_operand (op, mode);
1288
1289 if (mode != VOIDmode && GET_MODE (op) != mode)
1290 return 0;
1291
1292 inner = op;
1293 if (GET_CODE (inner) == SUBREG)
1294 inner = SUBREG_REG (inner);
1295
1296 return (MEM_P (inner) && general_operand (op, mode));
1297 }
1298
1299 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1300 that is, a memory reference whose address is a general_operand. */
1301
1302 int
1303 indirect_operand (rtx op, enum machine_mode mode)
1304 {
1305 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1306 if (! reload_completed
1307 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1308 {
1309 int offset = SUBREG_BYTE (op);
1310 rtx inner = SUBREG_REG (op);
1311
1312 if (mode != VOIDmode && GET_MODE (op) != mode)
1313 return 0;
1314
1315 /* The only way that we can have a general_operand as the resulting
1316 address is if OFFSET is zero and the address already is an operand
1317 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1318 operand. */
1319
1320 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1321 || (GET_CODE (XEXP (inner, 0)) == PLUS
1322 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1323 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1324 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1325 }
1326
1327 return (MEM_P (op)
1328 && memory_operand (op, mode)
1329 && general_operand (XEXP (op, 0), Pmode));
1330 }
1331
1332 /* Return 1 if this is an ordered comparison operator (not including
1333 ORDERED and UNORDERED). */
1334
1335 int
1336 ordered_comparison_operator (rtx op, enum machine_mode mode)
1337 {
1338 if (mode != VOIDmode && GET_MODE (op) != mode)
1339 return false;
1340 switch (GET_CODE (op))
1341 {
1342 case EQ:
1343 case NE:
1344 case LT:
1345 case LTU:
1346 case LE:
1347 case LEU:
1348 case GT:
1349 case GTU:
1350 case GE:
1351 case GEU:
1352 return true;
1353 default:
1354 return false;
1355 }
1356 }
1357
1358 /* Return 1 if this is a comparison operator. This allows the use of
1359 MATCH_OPERATOR to recognize all the branch insns. */
1360
1361 int
1362 comparison_operator (rtx op, enum machine_mode mode)
1363 {
1364 return ((mode == VOIDmode || GET_MODE (op) == mode)
1365 && COMPARISON_P (op));
1366 }
1367 \f
1368 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1369
1370 rtx
1371 extract_asm_operands (rtx body)
1372 {
1373 rtx tmp;
1374 switch (GET_CODE (body))
1375 {
1376 case ASM_OPERANDS:
1377 return body;
1378
1379 case SET:
1380 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1381 tmp = SET_SRC (body);
1382 if (GET_CODE (tmp) == ASM_OPERANDS)
1383 return tmp;
1384 break;
1385
1386 case PARALLEL:
1387 tmp = XVECEXP (body, 0, 0);
1388 if (GET_CODE (tmp) == ASM_OPERANDS)
1389 return tmp;
1390 if (GET_CODE (tmp) == SET)
1391 {
1392 tmp = SET_SRC (tmp);
1393 if (GET_CODE (tmp) == ASM_OPERANDS)
1394 return tmp;
1395 }
1396 break;
1397
1398 default:
1399 break;
1400 }
1401 return NULL;
1402 }
1403
1404 /* If BODY is an insn body that uses ASM_OPERANDS,
1405 return the number of operands (both input and output) in the insn.
1406 Otherwise return -1. */
1407
1408 int
1409 asm_noperands (const_rtx body)
1410 {
1411 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1412 int n_sets = 0;
1413
1414 if (asm_op == NULL)
1415 return -1;
1416
1417 if (GET_CODE (body) == SET)
1418 n_sets = 1;
1419 else if (GET_CODE (body) == PARALLEL)
1420 {
1421 int i;
1422 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1423 {
1424 /* Multiple output operands, or 1 output plus some clobbers:
1425 body is
1426 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1427 /* Count backwards through CLOBBERs to determine number of SETs. */
1428 for (i = XVECLEN (body, 0); i > 0; i--)
1429 {
1430 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1431 break;
1432 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1433 return -1;
1434 }
1435
1436 /* N_SETS is now number of output operands. */
1437 n_sets = i;
1438
1439 /* Verify that all the SETs we have
1440 came from a single original asm_operands insn
1441 (so that invalid combinations are blocked). */
1442 for (i = 0; i < n_sets; i++)
1443 {
1444 rtx elt = XVECEXP (body, 0, i);
1445 if (GET_CODE (elt) != SET)
1446 return -1;
1447 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1448 return -1;
1449 /* If these ASM_OPERANDS rtx's came from different original insns
1450 then they aren't allowed together. */
1451 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1452 != ASM_OPERANDS_INPUT_VEC (asm_op))
1453 return -1;
1454 }
1455 }
1456 else
1457 {
1458 /* 0 outputs, but some clobbers:
1459 body is [(asm_operands ...) (clobber (reg ...))...]. */
1460 /* Make sure all the other parallel things really are clobbers. */
1461 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1462 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1463 return -1;
1464 }
1465 }
1466
1467 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1468 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1469 }
1470
1471 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1472 copy its operands (both input and output) into the vector OPERANDS,
1473 the locations of the operands within the insn into the vector OPERAND_LOCS,
1474 and the constraints for the operands into CONSTRAINTS.
1475 Write the modes of the operands into MODES.
1476 Return the assembler-template.
1477
1478 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1479 we don't store that info. */
1480
1481 const char *
1482 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1483 const char **constraints, enum machine_mode *modes,
1484 location_t *loc)
1485 {
1486 int nbase = 0, n, i;
1487 rtx asmop;
1488
1489 switch (GET_CODE (body))
1490 {
1491 case ASM_OPERANDS:
1492 /* Zero output asm: BODY is (asm_operands ...). */
1493 asmop = body;
1494 break;
1495
1496 case SET:
1497 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1498 asmop = SET_SRC (body);
1499
1500 /* The output is in the SET.
1501 Its constraint is in the ASM_OPERANDS itself. */
1502 if (operands)
1503 operands[0] = SET_DEST (body);
1504 if (operand_locs)
1505 operand_locs[0] = &SET_DEST (body);
1506 if (constraints)
1507 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1508 if (modes)
1509 modes[0] = GET_MODE (SET_DEST (body));
1510 nbase = 1;
1511 break;
1512
1513 case PARALLEL:
1514 {
1515 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1516
1517 asmop = XVECEXP (body, 0, 0);
1518 if (GET_CODE (asmop) == SET)
1519 {
1520 asmop = SET_SRC (asmop);
1521
1522 /* At least one output, plus some CLOBBERs. The outputs are in
1523 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1524 for (i = 0; i < nparallel; i++)
1525 {
1526 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1527 break; /* Past last SET */
1528 if (operands)
1529 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1530 if (operand_locs)
1531 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1532 if (constraints)
1533 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1534 if (modes)
1535 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1536 }
1537 nbase = i;
1538 }
1539 break;
1540 }
1541
1542 default:
1543 gcc_unreachable ();
1544 }
1545
1546 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1547 for (i = 0; i < n; i++)
1548 {
1549 if (operand_locs)
1550 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1551 if (operands)
1552 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1553 if (constraints)
1554 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1555 if (modes)
1556 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1557 }
1558 nbase += n;
1559
1560 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1561 for (i = 0; i < n; i++)
1562 {
1563 if (operand_locs)
1564 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1565 if (operands)
1566 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1567 if (constraints)
1568 constraints[nbase + i] = "";
1569 if (modes)
1570 modes[nbase + i] = Pmode;
1571 }
1572
1573 if (loc)
1574 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1575
1576 return ASM_OPERANDS_TEMPLATE (asmop);
1577 }
1578
1579 /* Check if an asm_operand matches its constraints.
1580 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1581
1582 int
1583 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1584 {
1585 int result = 0;
1586 #ifdef AUTO_INC_DEC
1587 bool incdec_ok = false;
1588 #endif
1589
1590 /* Use constrain_operands after reload. */
1591 gcc_assert (!reload_completed);
1592
1593 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1594 many alternatives as required to match the other operands. */
1595 if (*constraint == '\0')
1596 result = 1;
1597
1598 while (*constraint)
1599 {
1600 char c = *constraint;
1601 int len;
1602 switch (c)
1603 {
1604 case ',':
1605 constraint++;
1606 continue;
1607 case '=':
1608 case '+':
1609 case '*':
1610 case '%':
1611 case '!':
1612 case '#':
1613 case '&':
1614 case '?':
1615 break;
1616
1617 case '0': case '1': case '2': case '3': case '4':
1618 case '5': case '6': case '7': case '8': case '9':
1619 /* If caller provided constraints pointer, look up
1620 the maching constraint. Otherwise, our caller should have
1621 given us the proper matching constraint, but we can't
1622 actually fail the check if they didn't. Indicate that
1623 results are inconclusive. */
1624 if (constraints)
1625 {
1626 char *end;
1627 unsigned long match;
1628
1629 match = strtoul (constraint, &end, 10);
1630 if (!result)
1631 result = asm_operand_ok (op, constraints[match], NULL);
1632 constraint = (const char *) end;
1633 }
1634 else
1635 {
1636 do
1637 constraint++;
1638 while (ISDIGIT (*constraint));
1639 if (! result)
1640 result = -1;
1641 }
1642 continue;
1643
1644 case 'p':
1645 if (address_operand (op, VOIDmode))
1646 result = 1;
1647 break;
1648
1649 case TARGET_MEM_CONSTRAINT:
1650 case 'V': /* non-offsettable */
1651 if (memory_operand (op, VOIDmode))
1652 result = 1;
1653 break;
1654
1655 case 'o': /* offsettable */
1656 if (offsettable_nonstrict_memref_p (op))
1657 result = 1;
1658 break;
1659
1660 case '<':
1661 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1662 excepting those that expand_call created. Further, on some
1663 machines which do not have generalized auto inc/dec, an inc/dec
1664 is not a memory_operand.
1665
1666 Match any memory and hope things are resolved after reload. */
1667
1668 if (MEM_P (op)
1669 && (1
1670 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1671 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1672 result = 1;
1673 #ifdef AUTO_INC_DEC
1674 incdec_ok = true;
1675 #endif
1676 break;
1677
1678 case '>':
1679 if (MEM_P (op)
1680 && (1
1681 || GET_CODE (XEXP (op, 0)) == PRE_INC
1682 || GET_CODE (XEXP (op, 0)) == POST_INC))
1683 result = 1;
1684 #ifdef AUTO_INC_DEC
1685 incdec_ok = true;
1686 #endif
1687 break;
1688
1689 case 'E':
1690 case 'F':
1691 if (GET_CODE (op) == CONST_DOUBLE
1692 || (GET_CODE (op) == CONST_VECTOR
1693 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1694 result = 1;
1695 break;
1696
1697 case 'G':
1698 if (GET_CODE (op) == CONST_DOUBLE
1699 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1700 result = 1;
1701 break;
1702 case 'H':
1703 if (GET_CODE (op) == CONST_DOUBLE
1704 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1705 result = 1;
1706 break;
1707
1708 case 's':
1709 if (CONST_INT_P (op)
1710 || (GET_CODE (op) == CONST_DOUBLE
1711 && GET_MODE (op) == VOIDmode))
1712 break;
1713 /* Fall through. */
1714
1715 case 'i':
1716 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1717 result = 1;
1718 break;
1719
1720 case 'n':
1721 if (CONST_INT_P (op)
1722 || (GET_CODE (op) == CONST_DOUBLE
1723 && GET_MODE (op) == VOIDmode))
1724 result = 1;
1725 break;
1726
1727 case 'I':
1728 if (CONST_INT_P (op)
1729 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1730 result = 1;
1731 break;
1732 case 'J':
1733 if (CONST_INT_P (op)
1734 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1735 result = 1;
1736 break;
1737 case 'K':
1738 if (CONST_INT_P (op)
1739 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1740 result = 1;
1741 break;
1742 case 'L':
1743 if (CONST_INT_P (op)
1744 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1745 result = 1;
1746 break;
1747 case 'M':
1748 if (CONST_INT_P (op)
1749 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1750 result = 1;
1751 break;
1752 case 'N':
1753 if (CONST_INT_P (op)
1754 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1755 result = 1;
1756 break;
1757 case 'O':
1758 if (CONST_INT_P (op)
1759 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1760 result = 1;
1761 break;
1762 case 'P':
1763 if (CONST_INT_P (op)
1764 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1765 result = 1;
1766 break;
1767
1768 case 'X':
1769 result = 1;
1770 break;
1771
1772 case 'g':
1773 if (general_operand (op, VOIDmode))
1774 result = 1;
1775 break;
1776
1777 default:
1778 /* For all other letters, we first check for a register class,
1779 otherwise it is an EXTRA_CONSTRAINT. */
1780 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1781 {
1782 case 'r':
1783 if (GET_MODE (op) == BLKmode)
1784 break;
1785 if (register_operand (op, VOIDmode))
1786 result = 1;
1787 }
1788 #ifdef EXTRA_CONSTRAINT_STR
1789 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1790 /* Every memory operand can be reloaded to fit. */
1791 result = result || memory_operand (op, VOIDmode);
1792 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1793 /* Every address operand can be reloaded to fit. */
1794 result = result || address_operand (op, VOIDmode);
1795 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1796 result = 1;
1797 #endif
1798 break;
1799 }
1800 len = CONSTRAINT_LEN (c, constraint);
1801 do
1802 constraint++;
1803 while (--len && *constraint);
1804 if (len)
1805 return 0;
1806 }
1807
1808 #ifdef AUTO_INC_DEC
1809 /* For operands without < or > constraints reject side-effects. */
1810 if (!incdec_ok && result && MEM_P (op))
1811 switch (GET_CODE (XEXP (op, 0)))
1812 {
1813 case PRE_INC:
1814 case POST_INC:
1815 case PRE_DEC:
1816 case POST_DEC:
1817 case PRE_MODIFY:
1818 case POST_MODIFY:
1819 return 0;
1820 default:
1821 break;
1822 }
1823 #endif
1824
1825 return result;
1826 }
1827 \f
1828 /* Given an rtx *P, if it is a sum containing an integer constant term,
1829 return the location (type rtx *) of the pointer to that constant term.
1830 Otherwise, return a null pointer. */
1831
1832 rtx *
1833 find_constant_term_loc (rtx *p)
1834 {
1835 rtx *tem;
1836 enum rtx_code code = GET_CODE (*p);
1837
1838 /* If *P IS such a constant term, P is its location. */
1839
1840 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1841 || code == CONST)
1842 return p;
1843
1844 /* Otherwise, if not a sum, it has no constant term. */
1845
1846 if (GET_CODE (*p) != PLUS)
1847 return 0;
1848
1849 /* If one of the summands is constant, return its location. */
1850
1851 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1852 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1853 return p;
1854
1855 /* Otherwise, check each summand for containing a constant term. */
1856
1857 if (XEXP (*p, 0) != 0)
1858 {
1859 tem = find_constant_term_loc (&XEXP (*p, 0));
1860 if (tem != 0)
1861 return tem;
1862 }
1863
1864 if (XEXP (*p, 1) != 0)
1865 {
1866 tem = find_constant_term_loc (&XEXP (*p, 1));
1867 if (tem != 0)
1868 return tem;
1869 }
1870
1871 return 0;
1872 }
1873 \f
1874 /* Return 1 if OP is a memory reference
1875 whose address contains no side effects
1876 and remains valid after the addition
1877 of a positive integer less than the
1878 size of the object being referenced.
1879
1880 We assume that the original address is valid and do not check it.
1881
1882 This uses strict_memory_address_p as a subroutine, so
1883 don't use it before reload. */
1884
1885 int
1886 offsettable_memref_p (rtx op)
1887 {
1888 return ((MEM_P (op))
1889 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1890 MEM_ADDR_SPACE (op)));
1891 }
1892
1893 /* Similar, but don't require a strictly valid mem ref:
1894 consider pseudo-regs valid as index or base regs. */
1895
1896 int
1897 offsettable_nonstrict_memref_p (rtx op)
1898 {
1899 return ((MEM_P (op))
1900 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1901 MEM_ADDR_SPACE (op)));
1902 }
1903
1904 /* Return 1 if Y is a memory address which contains no side effects
1905 and would remain valid for address space AS after the addition of
1906 a positive integer less than the size of that mode.
1907
1908 We assume that the original address is valid and do not check it.
1909 We do check that it is valid for narrower modes.
1910
1911 If STRICTP is nonzero, we require a strictly valid address,
1912 for the sake of use in reload.c. */
1913
1914 int
1915 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1916 addr_space_t as)
1917 {
1918 enum rtx_code ycode = GET_CODE (y);
1919 rtx z;
1920 rtx y1 = y;
1921 rtx *y2;
1922 int (*addressp) (enum machine_mode, rtx, addr_space_t) =
1923 (strictp ? strict_memory_address_addr_space_p
1924 : memory_address_addr_space_p);
1925 unsigned int mode_sz = GET_MODE_SIZE (mode);
1926
1927 if (CONSTANT_ADDRESS_P (y))
1928 return 1;
1929
1930 /* Adjusting an offsettable address involves changing to a narrower mode.
1931 Make sure that's OK. */
1932
1933 if (mode_dependent_address_p (y))
1934 return 0;
1935
1936 /* ??? How much offset does an offsettable BLKmode reference need?
1937 Clearly that depends on the situation in which it's being used.
1938 However, the current situation in which we test 0xffffffff is
1939 less than ideal. Caveat user. */
1940 if (mode_sz == 0)
1941 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1942
1943 /* If the expression contains a constant term,
1944 see if it remains valid when max possible offset is added. */
1945
1946 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1947 {
1948 int good;
1949
1950 y1 = *y2;
1951 *y2 = plus_constant (*y2, mode_sz - 1);
1952 /* Use QImode because an odd displacement may be automatically invalid
1953 for any wider mode. But it should be valid for a single byte. */
1954 good = (*addressp) (QImode, y, as);
1955
1956 /* In any case, restore old contents of memory. */
1957 *y2 = y1;
1958 return good;
1959 }
1960
1961 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1962 return 0;
1963
1964 /* The offset added here is chosen as the maximum offset that
1965 any instruction could need to add when operating on something
1966 of the specified mode. We assume that if Y and Y+c are
1967 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1968 go inside a LO_SUM here, so we do so as well. */
1969 if (GET_CODE (y) == LO_SUM
1970 && mode != BLKmode
1971 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1972 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1973 plus_constant (XEXP (y, 1), mode_sz - 1));
1974 else
1975 z = plus_constant (y, mode_sz - 1);
1976
1977 /* Use QImode because an odd displacement may be automatically invalid
1978 for any wider mode. But it should be valid for a single byte. */
1979 return (*addressp) (QImode, z, as);
1980 }
1981
1982 /* Return 1 if ADDR is an address-expression whose effect depends
1983 on the mode of the memory reference it is used in.
1984
1985 Autoincrement addressing is a typical example of mode-dependence
1986 because the amount of the increment depends on the mode. */
1987
1988 bool
1989 mode_dependent_address_p (rtx addr)
1990 {
1991 /* Auto-increment addressing with anything other than post_modify
1992 or pre_modify always introduces a mode dependency. Catch such
1993 cases now instead of deferring to the target. */
1994 if (GET_CODE (addr) == PRE_INC
1995 || GET_CODE (addr) == POST_INC
1996 || GET_CODE (addr) == PRE_DEC
1997 || GET_CODE (addr) == POST_DEC)
1998 return true;
1999
2000 return targetm.mode_dependent_address_p (addr);
2001 }
2002 \f
2003 /* Like extract_insn, but save insn extracted and don't extract again, when
2004 called again for the same insn expecting that recog_data still contain the
2005 valid information. This is used primary by gen_attr infrastructure that
2006 often does extract insn again and again. */
2007 void
2008 extract_insn_cached (rtx insn)
2009 {
2010 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2011 return;
2012 extract_insn (insn);
2013 recog_data.insn = insn;
2014 }
2015
2016 /* Do cached extract_insn, constrain_operands and complain about failures.
2017 Used by insn_attrtab. */
2018 void
2019 extract_constrain_insn_cached (rtx insn)
2020 {
2021 extract_insn_cached (insn);
2022 if (which_alternative == -1
2023 && !constrain_operands (reload_completed))
2024 fatal_insn_not_found (insn);
2025 }
2026
2027 /* Do cached constrain_operands and complain about failures. */
2028 int
2029 constrain_operands_cached (int strict)
2030 {
2031 if (which_alternative == -1)
2032 return constrain_operands (strict);
2033 else
2034 return 1;
2035 }
2036 \f
2037 /* Analyze INSN and fill in recog_data. */
2038
2039 void
2040 extract_insn (rtx insn)
2041 {
2042 int i;
2043 int icode;
2044 int noperands;
2045 rtx body = PATTERN (insn);
2046
2047 recog_data.n_operands = 0;
2048 recog_data.n_alternatives = 0;
2049 recog_data.n_dups = 0;
2050 recog_data.is_asm = false;
2051
2052 switch (GET_CODE (body))
2053 {
2054 case USE:
2055 case CLOBBER:
2056 case ASM_INPUT:
2057 case ADDR_VEC:
2058 case ADDR_DIFF_VEC:
2059 case VAR_LOCATION:
2060 return;
2061
2062 case SET:
2063 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2064 goto asm_insn;
2065 else
2066 goto normal_insn;
2067 case PARALLEL:
2068 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2069 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2070 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2071 goto asm_insn;
2072 else
2073 goto normal_insn;
2074 case ASM_OPERANDS:
2075 asm_insn:
2076 recog_data.n_operands = noperands = asm_noperands (body);
2077 if (noperands >= 0)
2078 {
2079 /* This insn is an `asm' with operands. */
2080
2081 /* expand_asm_operands makes sure there aren't too many operands. */
2082 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2083
2084 /* Now get the operand values and constraints out of the insn. */
2085 decode_asm_operands (body, recog_data.operand,
2086 recog_data.operand_loc,
2087 recog_data.constraints,
2088 recog_data.operand_mode, NULL);
2089 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2090 if (noperands > 0)
2091 {
2092 const char *p = recog_data.constraints[0];
2093 recog_data.n_alternatives = 1;
2094 while (*p)
2095 recog_data.n_alternatives += (*p++ == ',');
2096 }
2097 recog_data.is_asm = true;
2098 break;
2099 }
2100 fatal_insn_not_found (insn);
2101
2102 default:
2103 normal_insn:
2104 /* Ordinary insn: recognize it, get the operands via insn_extract
2105 and get the constraints. */
2106
2107 icode = recog_memoized (insn);
2108 if (icode < 0)
2109 fatal_insn_not_found (insn);
2110
2111 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2112 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2113 recog_data.n_dups = insn_data[icode].n_dups;
2114
2115 insn_extract (insn);
2116
2117 for (i = 0; i < noperands; i++)
2118 {
2119 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2120 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2121 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2122 /* VOIDmode match_operands gets mode from their real operand. */
2123 if (recog_data.operand_mode[i] == VOIDmode)
2124 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2125 }
2126 }
2127 for (i = 0; i < noperands; i++)
2128 recog_data.operand_type[i]
2129 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2130 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2131 : OP_IN);
2132
2133 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2134
2135 if (INSN_CODE (insn) < 0)
2136 for (i = 0; i < recog_data.n_alternatives; i++)
2137 recog_data.alternative_enabled_p[i] = true;
2138 else
2139 {
2140 recog_data.insn = insn;
2141 for (i = 0; i < recog_data.n_alternatives; i++)
2142 {
2143 which_alternative = i;
2144 recog_data.alternative_enabled_p[i] = get_attr_enabled (insn);
2145 }
2146 }
2147
2148 recog_data.insn = NULL;
2149 which_alternative = -1;
2150 }
2151
2152 /* After calling extract_insn, you can use this function to extract some
2153 information from the constraint strings into a more usable form.
2154 The collected data is stored in recog_op_alt. */
2155 void
2156 preprocess_constraints (void)
2157 {
2158 int i;
2159
2160 for (i = 0; i < recog_data.n_operands; i++)
2161 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2162 * sizeof (struct operand_alternative)));
2163
2164 for (i = 0; i < recog_data.n_operands; i++)
2165 {
2166 int j;
2167 struct operand_alternative *op_alt;
2168 const char *p = recog_data.constraints[i];
2169
2170 op_alt = recog_op_alt[i];
2171
2172 for (j = 0; j < recog_data.n_alternatives; j++)
2173 {
2174 op_alt[j].cl = NO_REGS;
2175 op_alt[j].constraint = p;
2176 op_alt[j].matches = -1;
2177 op_alt[j].matched = -1;
2178
2179 if (!recog_data.alternative_enabled_p[j])
2180 {
2181 p = skip_alternative (p);
2182 continue;
2183 }
2184
2185 if (*p == '\0' || *p == ',')
2186 {
2187 op_alt[j].anything_ok = 1;
2188 continue;
2189 }
2190
2191 for (;;)
2192 {
2193 char c = *p;
2194 if (c == '#')
2195 do
2196 c = *++p;
2197 while (c != ',' && c != '\0');
2198 if (c == ',' || c == '\0')
2199 {
2200 p++;
2201 break;
2202 }
2203
2204 switch (c)
2205 {
2206 case '=': case '+': case '*': case '%':
2207 case 'E': case 'F': case 'G': case 'H':
2208 case 's': case 'i': case 'n':
2209 case 'I': case 'J': case 'K': case 'L':
2210 case 'M': case 'N': case 'O': case 'P':
2211 /* These don't say anything we care about. */
2212 break;
2213
2214 case '?':
2215 op_alt[j].reject += 6;
2216 break;
2217 case '!':
2218 op_alt[j].reject += 600;
2219 break;
2220 case '&':
2221 op_alt[j].earlyclobber = 1;
2222 break;
2223
2224 case '0': case '1': case '2': case '3': case '4':
2225 case '5': case '6': case '7': case '8': case '9':
2226 {
2227 char *end;
2228 op_alt[j].matches = strtoul (p, &end, 10);
2229 recog_op_alt[op_alt[j].matches][j].matched = i;
2230 p = end;
2231 }
2232 continue;
2233
2234 case TARGET_MEM_CONSTRAINT:
2235 op_alt[j].memory_ok = 1;
2236 break;
2237 case '<':
2238 op_alt[j].decmem_ok = 1;
2239 break;
2240 case '>':
2241 op_alt[j].incmem_ok = 1;
2242 break;
2243 case 'V':
2244 op_alt[j].nonoffmem_ok = 1;
2245 break;
2246 case 'o':
2247 op_alt[j].offmem_ok = 1;
2248 break;
2249 case 'X':
2250 op_alt[j].anything_ok = 1;
2251 break;
2252
2253 case 'p':
2254 op_alt[j].is_address = 1;
2255 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2256 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2257 break;
2258
2259 case 'g':
2260 case 'r':
2261 op_alt[j].cl =
2262 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2263 break;
2264
2265 default:
2266 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2267 {
2268 op_alt[j].memory_ok = 1;
2269 break;
2270 }
2271 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2272 {
2273 op_alt[j].is_address = 1;
2274 op_alt[j].cl
2275 = (reg_class_subunion
2276 [(int) op_alt[j].cl]
2277 [(int) base_reg_class (VOIDmode, ADDRESS,
2278 SCRATCH)]);
2279 break;
2280 }
2281
2282 op_alt[j].cl
2283 = (reg_class_subunion
2284 [(int) op_alt[j].cl]
2285 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2286 break;
2287 }
2288 p += CONSTRAINT_LEN (c, p);
2289 }
2290 }
2291 }
2292 }
2293
2294 /* Check the operands of an insn against the insn's operand constraints
2295 and return 1 if they are valid.
2296 The information about the insn's operands, constraints, operand modes
2297 etc. is obtained from the global variables set up by extract_insn.
2298
2299 WHICH_ALTERNATIVE is set to a number which indicates which
2300 alternative of constraints was matched: 0 for the first alternative,
2301 1 for the next, etc.
2302
2303 In addition, when two operands are required to match
2304 and it happens that the output operand is (reg) while the
2305 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2306 make the output operand look like the input.
2307 This is because the output operand is the one the template will print.
2308
2309 This is used in final, just before printing the assembler code and by
2310 the routines that determine an insn's attribute.
2311
2312 If STRICT is a positive nonzero value, it means that we have been
2313 called after reload has been completed. In that case, we must
2314 do all checks strictly. If it is zero, it means that we have been called
2315 before reload has completed. In that case, we first try to see if we can
2316 find an alternative that matches strictly. If not, we try again, this
2317 time assuming that reload will fix up the insn. This provides a "best
2318 guess" for the alternative and is used to compute attributes of insns prior
2319 to reload. A negative value of STRICT is used for this internal call. */
2320
2321 struct funny_match
2322 {
2323 int this_op, other;
2324 };
2325
2326 int
2327 constrain_operands (int strict)
2328 {
2329 const char *constraints[MAX_RECOG_OPERANDS];
2330 int matching_operands[MAX_RECOG_OPERANDS];
2331 int earlyclobber[MAX_RECOG_OPERANDS];
2332 int c;
2333
2334 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2335 int funny_match_index;
2336
2337 which_alternative = 0;
2338 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2339 return 1;
2340
2341 for (c = 0; c < recog_data.n_operands; c++)
2342 {
2343 constraints[c] = recog_data.constraints[c];
2344 matching_operands[c] = -1;
2345 }
2346
2347 do
2348 {
2349 int seen_earlyclobber_at = -1;
2350 int opno;
2351 int lose = 0;
2352 funny_match_index = 0;
2353
2354 if (!recog_data.alternative_enabled_p[which_alternative])
2355 {
2356 int i;
2357
2358 for (i = 0; i < recog_data.n_operands; i++)
2359 constraints[i] = skip_alternative (constraints[i]);
2360
2361 which_alternative++;
2362 continue;
2363 }
2364
2365 for (opno = 0; opno < recog_data.n_operands; opno++)
2366 {
2367 rtx op = recog_data.operand[opno];
2368 enum machine_mode mode = GET_MODE (op);
2369 const char *p = constraints[opno];
2370 int offset = 0;
2371 int win = 0;
2372 int val;
2373 int len;
2374
2375 earlyclobber[opno] = 0;
2376
2377 /* A unary operator may be accepted by the predicate, but it
2378 is irrelevant for matching constraints. */
2379 if (UNARY_P (op))
2380 op = XEXP (op, 0);
2381
2382 if (GET_CODE (op) == SUBREG)
2383 {
2384 if (REG_P (SUBREG_REG (op))
2385 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2386 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2387 GET_MODE (SUBREG_REG (op)),
2388 SUBREG_BYTE (op),
2389 GET_MODE (op));
2390 op = SUBREG_REG (op);
2391 }
2392
2393 /* An empty constraint or empty alternative
2394 allows anything which matched the pattern. */
2395 if (*p == 0 || *p == ',')
2396 win = 1;
2397
2398 do
2399 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2400 {
2401 case '\0':
2402 len = 0;
2403 break;
2404 case ',':
2405 c = '\0';
2406 break;
2407
2408 case '?': case '!': case '*': case '%':
2409 case '=': case '+':
2410 break;
2411
2412 case '#':
2413 /* Ignore rest of this alternative as far as
2414 constraint checking is concerned. */
2415 do
2416 p++;
2417 while (*p && *p != ',');
2418 len = 0;
2419 break;
2420
2421 case '&':
2422 earlyclobber[opno] = 1;
2423 if (seen_earlyclobber_at < 0)
2424 seen_earlyclobber_at = opno;
2425 break;
2426
2427 case '0': case '1': case '2': case '3': case '4':
2428 case '5': case '6': case '7': case '8': case '9':
2429 {
2430 /* This operand must be the same as a previous one.
2431 This kind of constraint is used for instructions such
2432 as add when they take only two operands.
2433
2434 Note that the lower-numbered operand is passed first.
2435
2436 If we are not testing strictly, assume that this
2437 constraint will be satisfied. */
2438
2439 char *end;
2440 int match;
2441
2442 match = strtoul (p, &end, 10);
2443 p = end;
2444
2445 if (strict < 0)
2446 val = 1;
2447 else
2448 {
2449 rtx op1 = recog_data.operand[match];
2450 rtx op2 = recog_data.operand[opno];
2451
2452 /* A unary operator may be accepted by the predicate,
2453 but it is irrelevant for matching constraints. */
2454 if (UNARY_P (op1))
2455 op1 = XEXP (op1, 0);
2456 if (UNARY_P (op2))
2457 op2 = XEXP (op2, 0);
2458
2459 val = operands_match_p (op1, op2);
2460 }
2461
2462 matching_operands[opno] = match;
2463 matching_operands[match] = opno;
2464
2465 if (val != 0)
2466 win = 1;
2467
2468 /* If output is *x and input is *--x, arrange later
2469 to change the output to *--x as well, since the
2470 output op is the one that will be printed. */
2471 if (val == 2 && strict > 0)
2472 {
2473 funny_match[funny_match_index].this_op = opno;
2474 funny_match[funny_match_index++].other = match;
2475 }
2476 }
2477 len = 0;
2478 break;
2479
2480 case 'p':
2481 /* p is used for address_operands. When we are called by
2482 gen_reload, no one will have checked that the address is
2483 strictly valid, i.e., that all pseudos requiring hard regs
2484 have gotten them. */
2485 if (strict <= 0
2486 || (strict_memory_address_p (recog_data.operand_mode[opno],
2487 op)))
2488 win = 1;
2489 break;
2490
2491 /* No need to check general_operand again;
2492 it was done in insn-recog.c. Well, except that reload
2493 doesn't check the validity of its replacements, but
2494 that should only matter when there's a bug. */
2495 case 'g':
2496 /* Anything goes unless it is a REG and really has a hard reg
2497 but the hard reg is not in the class GENERAL_REGS. */
2498 if (REG_P (op))
2499 {
2500 if (strict < 0
2501 || GENERAL_REGS == ALL_REGS
2502 || (reload_in_progress
2503 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2504 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2505 win = 1;
2506 }
2507 else if (strict < 0 || general_operand (op, mode))
2508 win = 1;
2509 break;
2510
2511 case 'X':
2512 /* This is used for a MATCH_SCRATCH in the cases when
2513 we don't actually need anything. So anything goes
2514 any time. */
2515 win = 1;
2516 break;
2517
2518 case TARGET_MEM_CONSTRAINT:
2519 /* Memory operands must be valid, to the extent
2520 required by STRICT. */
2521 if (MEM_P (op))
2522 {
2523 if (strict > 0
2524 && !strict_memory_address_addr_space_p
2525 (GET_MODE (op), XEXP (op, 0),
2526 MEM_ADDR_SPACE (op)))
2527 break;
2528 if (strict == 0
2529 && !memory_address_addr_space_p
2530 (GET_MODE (op), XEXP (op, 0),
2531 MEM_ADDR_SPACE (op)))
2532 break;
2533 win = 1;
2534 }
2535 /* Before reload, accept what reload can turn into mem. */
2536 else if (strict < 0 && CONSTANT_P (op))
2537 win = 1;
2538 /* During reload, accept a pseudo */
2539 else if (reload_in_progress && REG_P (op)
2540 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2541 win = 1;
2542 break;
2543
2544 case '<':
2545 if (MEM_P (op)
2546 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2547 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2548 win = 1;
2549 break;
2550
2551 case '>':
2552 if (MEM_P (op)
2553 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2554 || GET_CODE (XEXP (op, 0)) == POST_INC))
2555 win = 1;
2556 break;
2557
2558 case 'E':
2559 case 'F':
2560 if (GET_CODE (op) == CONST_DOUBLE
2561 || (GET_CODE (op) == CONST_VECTOR
2562 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2563 win = 1;
2564 break;
2565
2566 case 'G':
2567 case 'H':
2568 if (GET_CODE (op) == CONST_DOUBLE
2569 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2570 win = 1;
2571 break;
2572
2573 case 's':
2574 if (CONST_INT_P (op)
2575 || (GET_CODE (op) == CONST_DOUBLE
2576 && GET_MODE (op) == VOIDmode))
2577 break;
2578 case 'i':
2579 if (CONSTANT_P (op))
2580 win = 1;
2581 break;
2582
2583 case 'n':
2584 if (CONST_INT_P (op)
2585 || (GET_CODE (op) == CONST_DOUBLE
2586 && GET_MODE (op) == VOIDmode))
2587 win = 1;
2588 break;
2589
2590 case 'I':
2591 case 'J':
2592 case 'K':
2593 case 'L':
2594 case 'M':
2595 case 'N':
2596 case 'O':
2597 case 'P':
2598 if (CONST_INT_P (op)
2599 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2600 win = 1;
2601 break;
2602
2603 case 'V':
2604 if (MEM_P (op)
2605 && ((strict > 0 && ! offsettable_memref_p (op))
2606 || (strict < 0
2607 && !(CONSTANT_P (op) || MEM_P (op)))
2608 || (reload_in_progress
2609 && !(REG_P (op)
2610 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2611 win = 1;
2612 break;
2613
2614 case 'o':
2615 if ((strict > 0 && offsettable_memref_p (op))
2616 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2617 /* Before reload, accept what reload can handle. */
2618 || (strict < 0
2619 && (CONSTANT_P (op) || MEM_P (op)))
2620 /* During reload, accept a pseudo */
2621 || (reload_in_progress && REG_P (op)
2622 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2623 win = 1;
2624 break;
2625
2626 default:
2627 {
2628 enum reg_class cl;
2629
2630 cl = (c == 'r'
2631 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2632 if (cl != NO_REGS)
2633 {
2634 if (strict < 0
2635 || (strict == 0
2636 && REG_P (op)
2637 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2638 || (strict == 0 && GET_CODE (op) == SCRATCH)
2639 || (REG_P (op)
2640 && reg_fits_class_p (op, cl, offset, mode)))
2641 win = 1;
2642 }
2643 #ifdef EXTRA_CONSTRAINT_STR
2644 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2645 win = 1;
2646
2647 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2648 /* Every memory operand can be reloaded to fit. */
2649 && ((strict < 0 && MEM_P (op))
2650 /* Before reload, accept what reload can turn
2651 into mem. */
2652 || (strict < 0 && CONSTANT_P (op))
2653 /* During reload, accept a pseudo */
2654 || (reload_in_progress && REG_P (op)
2655 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2656 win = 1;
2657 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2658 /* Every address operand can be reloaded to fit. */
2659 && strict < 0)
2660 win = 1;
2661 #endif
2662 break;
2663 }
2664 }
2665 while (p += len, c);
2666
2667 constraints[opno] = p;
2668 /* If this operand did not win somehow,
2669 this alternative loses. */
2670 if (! win)
2671 lose = 1;
2672 }
2673 /* This alternative won; the operands are ok.
2674 Change whichever operands this alternative says to change. */
2675 if (! lose)
2676 {
2677 int opno, eopno;
2678
2679 /* See if any earlyclobber operand conflicts with some other
2680 operand. */
2681
2682 if (strict > 0 && seen_earlyclobber_at >= 0)
2683 for (eopno = seen_earlyclobber_at;
2684 eopno < recog_data.n_operands;
2685 eopno++)
2686 /* Ignore earlyclobber operands now in memory,
2687 because we would often report failure when we have
2688 two memory operands, one of which was formerly a REG. */
2689 if (earlyclobber[eopno]
2690 && REG_P (recog_data.operand[eopno]))
2691 for (opno = 0; opno < recog_data.n_operands; opno++)
2692 if ((MEM_P (recog_data.operand[opno])
2693 || recog_data.operand_type[opno] != OP_OUT)
2694 && opno != eopno
2695 /* Ignore things like match_operator operands. */
2696 && *recog_data.constraints[opno] != 0
2697 && ! (matching_operands[opno] == eopno
2698 && operands_match_p (recog_data.operand[opno],
2699 recog_data.operand[eopno]))
2700 && ! safe_from_earlyclobber (recog_data.operand[opno],
2701 recog_data.operand[eopno]))
2702 lose = 1;
2703
2704 if (! lose)
2705 {
2706 while (--funny_match_index >= 0)
2707 {
2708 recog_data.operand[funny_match[funny_match_index].other]
2709 = recog_data.operand[funny_match[funny_match_index].this_op];
2710 }
2711
2712 #ifdef AUTO_INC_DEC
2713 /* For operands without < or > constraints reject side-effects. */
2714 if (recog_data.is_asm)
2715 {
2716 for (opno = 0; opno < recog_data.n_operands; opno++)
2717 if (MEM_P (recog_data.operand[opno]))
2718 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2719 {
2720 case PRE_INC:
2721 case POST_INC:
2722 case PRE_DEC:
2723 case POST_DEC:
2724 case PRE_MODIFY:
2725 case POST_MODIFY:
2726 if (strchr (recog_data.constraints[opno], '<') == NULL
2727 && strchr (recog_data.constraints[opno], '>')
2728 == NULL)
2729 return 0;
2730 break;
2731 default:
2732 break;
2733 }
2734 }
2735 #endif
2736 return 1;
2737 }
2738 }
2739
2740 which_alternative++;
2741 }
2742 while (which_alternative < recog_data.n_alternatives);
2743
2744 which_alternative = -1;
2745 /* If we are about to reject this, but we are not to test strictly,
2746 try a very loose test. Only return failure if it fails also. */
2747 if (strict == 0)
2748 return constrain_operands (-1);
2749 else
2750 return 0;
2751 }
2752
2753 /* Return true iff OPERAND (assumed to be a REG rtx)
2754 is a hard reg in class CLASS when its regno is offset by OFFSET
2755 and changed to mode MODE.
2756 If REG occupies multiple hard regs, all of them must be in CLASS. */
2757
2758 bool
2759 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2760 enum machine_mode mode)
2761 {
2762 int regno = REGNO (operand);
2763
2764 if (cl == NO_REGS)
2765 return false;
2766
2767 return (HARD_REGISTER_NUM_P (regno)
2768 && in_hard_reg_set_p (reg_class_contents[(int) cl],
2769 mode, regno + offset));
2770 }
2771 \f
2772 /* Split single instruction. Helper function for split_all_insns and
2773 split_all_insns_noflow. Return last insn in the sequence if successful,
2774 or NULL if unsuccessful. */
2775
2776 static rtx
2777 split_insn (rtx insn)
2778 {
2779 /* Split insns here to get max fine-grain parallelism. */
2780 rtx first = PREV_INSN (insn);
2781 rtx last = try_split (PATTERN (insn), insn, 1);
2782 rtx insn_set, last_set, note;
2783
2784 if (last == insn)
2785 return NULL_RTX;
2786
2787 /* If the original instruction was a single set that was known to be
2788 equivalent to a constant, see if we can say the same about the last
2789 instruction in the split sequence. The two instructions must set
2790 the same destination. */
2791 insn_set = single_set (insn);
2792 if (insn_set)
2793 {
2794 last_set = single_set (last);
2795 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2796 {
2797 note = find_reg_equal_equiv_note (insn);
2798 if (note && CONSTANT_P (XEXP (note, 0)))
2799 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2800 else if (CONSTANT_P (SET_SRC (insn_set)))
2801 set_unique_reg_note (last, REG_EQUAL, SET_SRC (insn_set));
2802 }
2803 }
2804
2805 /* try_split returns the NOTE that INSN became. */
2806 SET_INSN_DELETED (insn);
2807
2808 /* ??? Coddle to md files that generate subregs in post-reload
2809 splitters instead of computing the proper hard register. */
2810 if (reload_completed && first != last)
2811 {
2812 first = NEXT_INSN (first);
2813 for (;;)
2814 {
2815 if (INSN_P (first))
2816 cleanup_subreg_operands (first);
2817 if (first == last)
2818 break;
2819 first = NEXT_INSN (first);
2820 }
2821 }
2822
2823 return last;
2824 }
2825
2826 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2827
2828 void
2829 split_all_insns (void)
2830 {
2831 sbitmap blocks;
2832 bool changed;
2833 basic_block bb;
2834
2835 blocks = sbitmap_alloc (last_basic_block);
2836 sbitmap_zero (blocks);
2837 changed = false;
2838
2839 FOR_EACH_BB_REVERSE (bb)
2840 {
2841 rtx insn, next;
2842 bool finish = false;
2843
2844 rtl_profile_for_bb (bb);
2845 for (insn = BB_HEAD (bb); !finish ; insn = next)
2846 {
2847 /* Can't use `next_real_insn' because that might go across
2848 CODE_LABELS and short-out basic blocks. */
2849 next = NEXT_INSN (insn);
2850 finish = (insn == BB_END (bb));
2851 if (INSN_P (insn))
2852 {
2853 rtx set = single_set (insn);
2854
2855 /* Don't split no-op move insns. These should silently
2856 disappear later in final. Splitting such insns would
2857 break the code that handles LIBCALL blocks. */
2858 if (set && set_noop_p (set))
2859 {
2860 /* Nops get in the way while scheduling, so delete them
2861 now if register allocation has already been done. It
2862 is too risky to try to do this before register
2863 allocation, and there are unlikely to be very many
2864 nops then anyways. */
2865 if (reload_completed)
2866 delete_insn_and_edges (insn);
2867 }
2868 else
2869 {
2870 if (split_insn (insn))
2871 {
2872 SET_BIT (blocks, bb->index);
2873 changed = true;
2874 }
2875 }
2876 }
2877 }
2878 }
2879
2880 default_rtl_profile ();
2881 if (changed)
2882 find_many_sub_basic_blocks (blocks);
2883
2884 #ifdef ENABLE_CHECKING
2885 verify_flow_info ();
2886 #endif
2887
2888 sbitmap_free (blocks);
2889 }
2890
2891 /* Same as split_all_insns, but do not expect CFG to be available.
2892 Used by machine dependent reorg passes. */
2893
2894 unsigned int
2895 split_all_insns_noflow (void)
2896 {
2897 rtx next, insn;
2898
2899 for (insn = get_insns (); insn; insn = next)
2900 {
2901 next = NEXT_INSN (insn);
2902 if (INSN_P (insn))
2903 {
2904 /* Don't split no-op move insns. These should silently
2905 disappear later in final. Splitting such insns would
2906 break the code that handles LIBCALL blocks. */
2907 rtx set = single_set (insn);
2908 if (set && set_noop_p (set))
2909 {
2910 /* Nops get in the way while scheduling, so delete them
2911 now if register allocation has already been done. It
2912 is too risky to try to do this before register
2913 allocation, and there are unlikely to be very many
2914 nops then anyways.
2915
2916 ??? Should we use delete_insn when the CFG isn't valid? */
2917 if (reload_completed)
2918 delete_insn_and_edges (insn);
2919 }
2920 else
2921 split_insn (insn);
2922 }
2923 }
2924 return 0;
2925 }
2926 \f
2927 #ifdef HAVE_peephole2
2928 struct peep2_insn_data
2929 {
2930 rtx insn;
2931 regset live_before;
2932 };
2933
2934 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2935 static int peep2_current;
2936
2937 static bool peep2_do_rebuild_jump_labels;
2938 static bool peep2_do_cleanup_cfg;
2939
2940 /* The number of instructions available to match a peep2. */
2941 int peep2_current_count;
2942
2943 /* A non-insn marker indicating the last insn of the block.
2944 The live_before regset for this element is correct, indicating
2945 DF_LIVE_OUT for the block. */
2946 #define PEEP2_EOB pc_rtx
2947
2948 /* Wrap N to fit into the peep2_insn_data buffer. */
2949
2950 static int
2951 peep2_buf_position (int n)
2952 {
2953 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2954 n -= MAX_INSNS_PER_PEEP2 + 1;
2955 return n;
2956 }
2957
2958 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2959 does not exist. Used by the recognizer to find the next insn to match
2960 in a multi-insn pattern. */
2961
2962 rtx
2963 peep2_next_insn (int n)
2964 {
2965 gcc_assert (n <= peep2_current_count);
2966
2967 n = peep2_buf_position (peep2_current + n);
2968
2969 return peep2_insn_data[n].insn;
2970 }
2971
2972 /* Return true if REGNO is dead before the Nth non-note insn
2973 after `current'. */
2974
2975 int
2976 peep2_regno_dead_p (int ofs, int regno)
2977 {
2978 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2979
2980 ofs = peep2_buf_position (peep2_current + ofs);
2981
2982 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2983
2984 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2985 }
2986
2987 /* Similarly for a REG. */
2988
2989 int
2990 peep2_reg_dead_p (int ofs, rtx reg)
2991 {
2992 int regno, n;
2993
2994 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2995
2996 ofs = peep2_buf_position (peep2_current + ofs);
2997
2998 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2999
3000 regno = REGNO (reg);
3001 n = hard_regno_nregs[regno][GET_MODE (reg)];
3002 while (--n >= 0)
3003 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3004 return 0;
3005 return 1;
3006 }
3007
3008 /* Try to find a hard register of mode MODE, matching the register class in
3009 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3010 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3011 in which case the only condition is that the register must be available
3012 before CURRENT_INSN.
3013 Registers that already have bits set in REG_SET will not be considered.
3014
3015 If an appropriate register is available, it will be returned and the
3016 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3017 returned. */
3018
3019 rtx
3020 peep2_find_free_register (int from, int to, const char *class_str,
3021 enum machine_mode mode, HARD_REG_SET *reg_set)
3022 {
3023 static int search_ofs;
3024 enum reg_class cl;
3025 HARD_REG_SET live;
3026 int i;
3027
3028 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3029 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3030
3031 from = peep2_buf_position (peep2_current + from);
3032 to = peep2_buf_position (peep2_current + to);
3033
3034 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3035 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3036
3037 while (from != to)
3038 {
3039 HARD_REG_SET this_live;
3040
3041 from = peep2_buf_position (from + 1);
3042 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3043 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3044 IOR_HARD_REG_SET (live, this_live);
3045 }
3046
3047 cl = (class_str[0] == 'r' ? GENERAL_REGS
3048 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3049
3050 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3051 {
3052 int raw_regno, regno, success, j;
3053
3054 /* Distribute the free registers as much as possible. */
3055 raw_regno = search_ofs + i;
3056 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3057 raw_regno -= FIRST_PSEUDO_REGISTER;
3058 #ifdef REG_ALLOC_ORDER
3059 regno = reg_alloc_order[raw_regno];
3060 #else
3061 regno = raw_regno;
3062 #endif
3063
3064 /* Don't allocate fixed registers. */
3065 if (fixed_regs[regno])
3066 continue;
3067 /* Don't allocate global registers. */
3068 if (global_regs[regno])
3069 continue;
3070 /* Make sure the register is of the right class. */
3071 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3072 continue;
3073 /* And can support the mode we need. */
3074 if (! HARD_REGNO_MODE_OK (regno, mode))
3075 continue;
3076 /* And that we don't create an extra save/restore. */
3077 if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
3078 continue;
3079 if (! targetm.hard_regno_scratch_ok (regno))
3080 continue;
3081
3082 /* And we don't clobber traceback for noreturn functions. */
3083 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3084 && (! reload_completed || frame_pointer_needed))
3085 continue;
3086
3087 success = 1;
3088 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3089 {
3090 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3091 || TEST_HARD_REG_BIT (live, regno + j))
3092 {
3093 success = 0;
3094 break;
3095 }
3096 }
3097 if (success)
3098 {
3099 add_to_hard_reg_set (reg_set, mode, regno);
3100
3101 /* Start the next search with the next register. */
3102 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3103 raw_regno = 0;
3104 search_ofs = raw_regno;
3105
3106 return gen_rtx_REG (mode, regno);
3107 }
3108 }
3109
3110 search_ofs = 0;
3111 return NULL_RTX;
3112 }
3113
3114 /* Forget all currently tracked instructions, only remember current
3115 LIVE regset. */
3116
3117 static void
3118 peep2_reinit_state (regset live)
3119 {
3120 int i;
3121
3122 /* Indicate that all slots except the last holds invalid data. */
3123 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3124 peep2_insn_data[i].insn = NULL_RTX;
3125 peep2_current_count = 0;
3126
3127 /* Indicate that the last slot contains live_after data. */
3128 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3129 peep2_current = MAX_INSNS_PER_PEEP2;
3130
3131 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3132 }
3133
3134 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3135 starting at INSN. Perform the replacement, removing the old insns and
3136 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3137 if the replacement is rejected. */
3138
3139 static rtx
3140 peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3141 {
3142 int i;
3143 rtx last, note, before_try, x;
3144 rtx old_insn, new_insn;
3145 bool was_call = false;
3146
3147 /* If we are splittind an RTX_FRAME_RELATED_P insn, do not allow it to
3148 match more than one insn, or to be split into more than one insn. */
3149 old_insn = peep2_insn_data[peep2_current].insn;
3150 if (RTX_FRAME_RELATED_P (old_insn))
3151 {
3152 bool any_note = false;
3153
3154 if (match_len != 0)
3155 return NULL;
3156
3157 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3158 may be in the stream for the purpose of register allocation. */
3159 if (active_insn_p (attempt))
3160 new_insn = attempt;
3161 else
3162 new_insn = next_active_insn (attempt);
3163 if (next_active_insn (new_insn))
3164 return NULL;
3165
3166 /* We have a 1-1 replacement. Copy over any frame-related info. */
3167 RTX_FRAME_RELATED_P (new_insn) = 1;
3168
3169 /* Allow the backend to fill in a note during the split. */
3170 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3171 switch (REG_NOTE_KIND (note))
3172 {
3173 case REG_FRAME_RELATED_EXPR:
3174 case REG_CFA_DEF_CFA:
3175 case REG_CFA_ADJUST_CFA:
3176 case REG_CFA_OFFSET:
3177 case REG_CFA_REGISTER:
3178 case REG_CFA_EXPRESSION:
3179 case REG_CFA_RESTORE:
3180 case REG_CFA_SET_VDRAP:
3181 any_note = true;
3182 break;
3183 default:
3184 break;
3185 }
3186
3187 /* If the backend didn't supply a note, copy one over. */
3188 if (!any_note)
3189 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3190 switch (REG_NOTE_KIND (note))
3191 {
3192 case REG_FRAME_RELATED_EXPR:
3193 case REG_CFA_DEF_CFA:
3194 case REG_CFA_ADJUST_CFA:
3195 case REG_CFA_OFFSET:
3196 case REG_CFA_REGISTER:
3197 case REG_CFA_EXPRESSION:
3198 case REG_CFA_RESTORE:
3199 case REG_CFA_SET_VDRAP:
3200 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3201 any_note = true;
3202 break;
3203 default:
3204 break;
3205 }
3206
3207 /* If there still isn't a note, make sure the unwind info sees the
3208 same expression as before the split. */
3209 if (!any_note)
3210 {
3211 rtx old_set, new_set;
3212
3213 /* The old insn had better have been simple, or annotated. */
3214 old_set = single_set (old_insn);
3215 gcc_assert (old_set != NULL);
3216
3217 new_set = single_set (new_insn);
3218 if (!new_set || !rtx_equal_p (new_set, old_set))
3219 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3220 }
3221
3222 /* Copy prologue/epilogue status. This is required in order to keep
3223 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3224 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3225 }
3226
3227 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3228 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3229 cfg-related call notes. */
3230 for (i = 0; i <= match_len; ++i)
3231 {
3232 int j;
3233
3234 j = peep2_buf_position (peep2_current + i);
3235 old_insn = peep2_insn_data[j].insn;
3236 if (!CALL_P (old_insn))
3237 continue;
3238 was_call = true;
3239
3240 new_insn = attempt;
3241 while (new_insn != NULL_RTX)
3242 {
3243 if (CALL_P (new_insn))
3244 break;
3245 new_insn = NEXT_INSN (new_insn);
3246 }
3247
3248 gcc_assert (new_insn != NULL_RTX);
3249
3250 CALL_INSN_FUNCTION_USAGE (new_insn)
3251 = CALL_INSN_FUNCTION_USAGE (old_insn);
3252
3253 for (note = REG_NOTES (old_insn);
3254 note;
3255 note = XEXP (note, 1))
3256 switch (REG_NOTE_KIND (note))
3257 {
3258 case REG_NORETURN:
3259 case REG_SETJMP:
3260 add_reg_note (new_insn, REG_NOTE_KIND (note),
3261 XEXP (note, 0));
3262 break;
3263 default:
3264 /* Discard all other reg notes. */
3265 break;
3266 }
3267
3268 /* Croak if there is another call in the sequence. */
3269 while (++i <= match_len)
3270 {
3271 j = peep2_buf_position (peep2_current + i);
3272 old_insn = peep2_insn_data[j].insn;
3273 gcc_assert (!CALL_P (old_insn));
3274 }
3275 break;
3276 }
3277
3278 i = peep2_buf_position (peep2_current + match_len);
3279
3280 note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3281
3282 /* Replace the old sequence with the new. */
3283 last = emit_insn_after_setloc (attempt,
3284 peep2_insn_data[i].insn,
3285 INSN_LOCATOR (peep2_insn_data[i].insn));
3286 before_try = PREV_INSN (insn);
3287 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3288
3289 /* Re-insert the EH_REGION notes. */
3290 if (note || (was_call && nonlocal_goto_handler_labels))
3291 {
3292 edge eh_edge;
3293 edge_iterator ei;
3294
3295 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3296 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3297 break;
3298
3299 if (note)
3300 copy_reg_eh_region_note_backward (note, last, before_try);
3301
3302 if (eh_edge)
3303 for (x = last; x != before_try; x = PREV_INSN (x))
3304 if (x != BB_END (bb)
3305 && (can_throw_internal (x)
3306 || can_nonlocal_goto (x)))
3307 {
3308 edge nfte, nehe;
3309 int flags;
3310
3311 nfte = split_block (bb, x);
3312 flags = (eh_edge->flags
3313 & (EDGE_EH | EDGE_ABNORMAL));
3314 if (CALL_P (x))
3315 flags |= EDGE_ABNORMAL_CALL;
3316 nehe = make_edge (nfte->src, eh_edge->dest,
3317 flags);
3318
3319 nehe->probability = eh_edge->probability;
3320 nfte->probability
3321 = REG_BR_PROB_BASE - nehe->probability;
3322
3323 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3324 bb = nfte->src;
3325 eh_edge = nehe;
3326 }
3327
3328 /* Converting possibly trapping insn to non-trapping is
3329 possible. Zap dummy outgoing edges. */
3330 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3331 }
3332
3333 /* If we generated a jump instruction, it won't have
3334 JUMP_LABEL set. Recompute after we're done. */
3335 for (x = last; x != before_try; x = PREV_INSN (x))
3336 if (JUMP_P (x))
3337 {
3338 peep2_do_rebuild_jump_labels = true;
3339 break;
3340 }
3341
3342 return last;
3343 }
3344
3345 /* After performing a replacement in basic block BB, fix up the life
3346 information in our buffer. LAST is the last of the insns that we
3347 emitted as a replacement. PREV is the insn before the start of
3348 the replacement. MATCH_LEN is the number of instructions that were
3349 matched, and which now need to be replaced in the buffer. */
3350
3351 static void
3352 peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3353 {
3354 int i = peep2_buf_position (peep2_current + match_len + 1);
3355 rtx x;
3356 regset_head live;
3357
3358 INIT_REG_SET (&live);
3359 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3360
3361 gcc_assert (peep2_current_count >= match_len + 1);
3362 peep2_current_count -= match_len + 1;
3363
3364 x = last;
3365 do
3366 {
3367 if (INSN_P (x))
3368 {
3369 df_insn_rescan (x);
3370 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3371 {
3372 peep2_current_count++;
3373 if (--i < 0)
3374 i = MAX_INSNS_PER_PEEP2;
3375 peep2_insn_data[i].insn = x;
3376 df_simulate_one_insn_backwards (bb, x, &live);
3377 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3378 }
3379 }
3380 x = PREV_INSN (x);
3381 }
3382 while (x != prev);
3383 CLEAR_REG_SET (&live);
3384
3385 peep2_current = i;
3386 }
3387
3388 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3389 Return true if we added it, false otherwise. The caller will try to match
3390 peepholes against the buffer if we return false; otherwise it will try to
3391 add more instructions to the buffer. */
3392
3393 static bool
3394 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3395 {
3396 int pos;
3397
3398 /* Once we have filled the maximum number of insns the buffer can hold,
3399 allow the caller to match the insns against peepholes. We wait until
3400 the buffer is full in case the target has similar peepholes of different
3401 length; we always want to match the longest if possible. */
3402 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3403 return false;
3404
3405 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3406 any other pattern, lest it change the semantics of the frame info. */
3407 if (RTX_FRAME_RELATED_P (insn))
3408 {
3409 /* Let the buffer drain first. */
3410 if (peep2_current_count > 0)
3411 return false;
3412 /* Now the insn will be the only thing in the buffer. */
3413 }
3414
3415 pos = peep2_buf_position (peep2_current + peep2_current_count);
3416 peep2_insn_data[pos].insn = insn;
3417 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3418 peep2_current_count++;
3419
3420 df_simulate_one_insn_forwards (bb, insn, live);
3421 return true;
3422 }
3423
3424 /* Perform the peephole2 optimization pass. */
3425
3426 static void
3427 peephole2_optimize (void)
3428 {
3429 rtx insn;
3430 bitmap live;
3431 int i;
3432 basic_block bb;
3433
3434 peep2_do_cleanup_cfg = false;
3435 peep2_do_rebuild_jump_labels = false;
3436
3437 df_set_flags (DF_LR_RUN_DCE);
3438 df_note_add_problem ();
3439 df_analyze ();
3440
3441 /* Initialize the regsets we're going to use. */
3442 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3443 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3444 live = BITMAP_ALLOC (&reg_obstack);
3445
3446 FOR_EACH_BB_REVERSE (bb)
3447 {
3448 bool past_end = false;
3449 int pos;
3450
3451 rtl_profile_for_bb (bb);
3452
3453 /* Start up propagation. */
3454 bitmap_copy (live, DF_LR_IN (bb));
3455 df_simulate_initialize_forwards (bb, live);
3456 peep2_reinit_state (live);
3457
3458 insn = BB_HEAD (bb);
3459 for (;;)
3460 {
3461 rtx attempt, head;
3462 int match_len;
3463
3464 if (!past_end && !NONDEBUG_INSN_P (insn))
3465 {
3466 next_insn:
3467 insn = NEXT_INSN (insn);
3468 if (insn == NEXT_INSN (BB_END (bb)))
3469 past_end = true;
3470 continue;
3471 }
3472 if (!past_end && peep2_fill_buffer (bb, insn, live))
3473 goto next_insn;
3474
3475 /* If we did not fill an empty buffer, it signals the end of the
3476 block. */
3477 if (peep2_current_count == 0)
3478 break;
3479
3480 /* The buffer filled to the current maximum, so try to match. */
3481
3482 pos = peep2_buf_position (peep2_current + peep2_current_count);
3483 peep2_insn_data[pos].insn = PEEP2_EOB;
3484 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3485
3486 /* Match the peephole. */
3487 head = peep2_insn_data[peep2_current].insn;
3488 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3489 if (attempt != NULL)
3490 {
3491 rtx last = peep2_attempt (bb, head, match_len, attempt);
3492 if (last)
3493 {
3494 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3495 continue;
3496 }
3497 }
3498
3499 /* No match: advance the buffer by one insn. */
3500 peep2_current = peep2_buf_position (peep2_current + 1);
3501 peep2_current_count--;
3502 }
3503 }
3504
3505 default_rtl_profile ();
3506 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3507 BITMAP_FREE (peep2_insn_data[i].live_before);
3508 BITMAP_FREE (live);
3509 if (peep2_do_rebuild_jump_labels)
3510 rebuild_jump_labels (get_insns ());
3511 }
3512 #endif /* HAVE_peephole2 */
3513
3514 /* Common predicates for use with define_bypass. */
3515
3516 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3517 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3518 must be either a single_set or a PARALLEL with SETs inside. */
3519
3520 int
3521 store_data_bypass_p (rtx out_insn, rtx in_insn)
3522 {
3523 rtx out_set, in_set;
3524 rtx out_pat, in_pat;
3525 rtx out_exp, in_exp;
3526 int i, j;
3527
3528 in_set = single_set (in_insn);
3529 if (in_set)
3530 {
3531 if (!MEM_P (SET_DEST (in_set)))
3532 return false;
3533
3534 out_set = single_set (out_insn);
3535 if (out_set)
3536 {
3537 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3538 return false;
3539 }
3540 else
3541 {
3542 out_pat = PATTERN (out_insn);
3543
3544 if (GET_CODE (out_pat) != PARALLEL)
3545 return false;
3546
3547 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3548 {
3549 out_exp = XVECEXP (out_pat, 0, i);
3550
3551 if (GET_CODE (out_exp) == CLOBBER)
3552 continue;
3553
3554 gcc_assert (GET_CODE (out_exp) == SET);
3555
3556 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3557 return false;
3558 }
3559 }
3560 }
3561 else
3562 {
3563 in_pat = PATTERN (in_insn);
3564 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3565
3566 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3567 {
3568 in_exp = XVECEXP (in_pat, 0, i);
3569
3570 if (GET_CODE (in_exp) == CLOBBER)
3571 continue;
3572
3573 gcc_assert (GET_CODE (in_exp) == SET);
3574
3575 if (!MEM_P (SET_DEST (in_exp)))
3576 return false;
3577
3578 out_set = single_set (out_insn);
3579 if (out_set)
3580 {
3581 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3582 return false;
3583 }
3584 else
3585 {
3586 out_pat = PATTERN (out_insn);
3587 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3588
3589 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3590 {
3591 out_exp = XVECEXP (out_pat, 0, j);
3592
3593 if (GET_CODE (out_exp) == CLOBBER)
3594 continue;
3595
3596 gcc_assert (GET_CODE (out_exp) == SET);
3597
3598 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3599 return false;
3600 }
3601 }
3602 }
3603 }
3604
3605 return true;
3606 }
3607
3608 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3609 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3610 or multiple set; IN_INSN should be single_set for truth, but for convenience
3611 of insn categorization may be any JUMP or CALL insn. */
3612
3613 int
3614 if_test_bypass_p (rtx out_insn, rtx in_insn)
3615 {
3616 rtx out_set, in_set;
3617
3618 in_set = single_set (in_insn);
3619 if (! in_set)
3620 {
3621 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3622 return false;
3623 }
3624
3625 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3626 return false;
3627 in_set = SET_SRC (in_set);
3628
3629 out_set = single_set (out_insn);
3630 if (out_set)
3631 {
3632 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3633 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3634 return false;
3635 }
3636 else
3637 {
3638 rtx out_pat;
3639 int i;
3640
3641 out_pat = PATTERN (out_insn);
3642 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3643
3644 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3645 {
3646 rtx exp = XVECEXP (out_pat, 0, i);
3647
3648 if (GET_CODE (exp) == CLOBBER)
3649 continue;
3650
3651 gcc_assert (GET_CODE (exp) == SET);
3652
3653 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3654 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3655 return false;
3656 }
3657 }
3658
3659 return true;
3660 }
3661 \f
3662 static bool
3663 gate_handle_peephole2 (void)
3664 {
3665 return (optimize > 0 && flag_peephole2);
3666 }
3667
3668 static unsigned int
3669 rest_of_handle_peephole2 (void)
3670 {
3671 #ifdef HAVE_peephole2
3672 peephole2_optimize ();
3673 #endif
3674 return 0;
3675 }
3676
3677 struct rtl_opt_pass pass_peephole2 =
3678 {
3679 {
3680 RTL_PASS,
3681 "peephole2", /* name */
3682 gate_handle_peephole2, /* gate */
3683 rest_of_handle_peephole2, /* execute */
3684 NULL, /* sub */
3685 NULL, /* next */
3686 0, /* static_pass_number */
3687 TV_PEEPHOLE2, /* tv_id */
3688 0, /* properties_required */
3689 0, /* properties_provided */
3690 0, /* properties_destroyed */
3691 0, /* todo_flags_start */
3692 TODO_df_finish | TODO_verify_rtl_sharing |
3693 TODO_dump_func /* todo_flags_finish */
3694 }
3695 };
3696
3697 static unsigned int
3698 rest_of_handle_split_all_insns (void)
3699 {
3700 split_all_insns ();
3701 return 0;
3702 }
3703
3704 struct rtl_opt_pass pass_split_all_insns =
3705 {
3706 {
3707 RTL_PASS,
3708 "split1", /* name */
3709 NULL, /* gate */
3710 rest_of_handle_split_all_insns, /* execute */
3711 NULL, /* sub */
3712 NULL, /* next */
3713 0, /* static_pass_number */
3714 TV_NONE, /* tv_id */
3715 0, /* properties_required */
3716 0, /* properties_provided */
3717 0, /* properties_destroyed */
3718 0, /* todo_flags_start */
3719 TODO_dump_func /* todo_flags_finish */
3720 }
3721 };
3722
3723 static unsigned int
3724 rest_of_handle_split_after_reload (void)
3725 {
3726 /* If optimizing, then go ahead and split insns now. */
3727 #ifndef STACK_REGS
3728 if (optimize > 0)
3729 #endif
3730 split_all_insns ();
3731 return 0;
3732 }
3733
3734 struct rtl_opt_pass pass_split_after_reload =
3735 {
3736 {
3737 RTL_PASS,
3738 "split2", /* name */
3739 NULL, /* gate */
3740 rest_of_handle_split_after_reload, /* execute */
3741 NULL, /* sub */
3742 NULL, /* next */
3743 0, /* static_pass_number */
3744 TV_NONE, /* tv_id */
3745 0, /* properties_required */
3746 0, /* properties_provided */
3747 0, /* properties_destroyed */
3748 0, /* todo_flags_start */
3749 TODO_dump_func /* todo_flags_finish */
3750 }
3751 };
3752
3753 static bool
3754 gate_handle_split_before_regstack (void)
3755 {
3756 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3757 /* If flow2 creates new instructions which need splitting
3758 and scheduling after reload is not done, they might not be
3759 split until final which doesn't allow splitting
3760 if HAVE_ATTR_length. */
3761 # ifdef INSN_SCHEDULING
3762 return (optimize && !flag_schedule_insns_after_reload);
3763 # else
3764 return (optimize);
3765 # endif
3766 #else
3767 return 0;
3768 #endif
3769 }
3770
3771 static unsigned int
3772 rest_of_handle_split_before_regstack (void)
3773 {
3774 split_all_insns ();
3775 return 0;
3776 }
3777
3778 struct rtl_opt_pass pass_split_before_regstack =
3779 {
3780 {
3781 RTL_PASS,
3782 "split3", /* name */
3783 gate_handle_split_before_regstack, /* gate */
3784 rest_of_handle_split_before_regstack, /* execute */
3785 NULL, /* sub */
3786 NULL, /* next */
3787 0, /* static_pass_number */
3788 TV_NONE, /* tv_id */
3789 0, /* properties_required */
3790 0, /* properties_provided */
3791 0, /* properties_destroyed */
3792 0, /* todo_flags_start */
3793 TODO_dump_func /* todo_flags_finish */
3794 }
3795 };
3796
3797 static bool
3798 gate_handle_split_before_sched2 (void)
3799 {
3800 #ifdef INSN_SCHEDULING
3801 return optimize > 0 && flag_schedule_insns_after_reload;
3802 #else
3803 return 0;
3804 #endif
3805 }
3806
3807 static unsigned int
3808 rest_of_handle_split_before_sched2 (void)
3809 {
3810 #ifdef INSN_SCHEDULING
3811 split_all_insns ();
3812 #endif
3813 return 0;
3814 }
3815
3816 struct rtl_opt_pass pass_split_before_sched2 =
3817 {
3818 {
3819 RTL_PASS,
3820 "split4", /* name */
3821 gate_handle_split_before_sched2, /* gate */
3822 rest_of_handle_split_before_sched2, /* execute */
3823 NULL, /* sub */
3824 NULL, /* next */
3825 0, /* static_pass_number */
3826 TV_NONE, /* tv_id */
3827 0, /* properties_required */
3828 0, /* properties_provided */
3829 0, /* properties_destroyed */
3830 0, /* todo_flags_start */
3831 TODO_verify_flow |
3832 TODO_dump_func /* todo_flags_finish */
3833 }
3834 };
3835
3836 /* The placement of the splitting that we do for shorten_branches
3837 depends on whether regstack is used by the target or not. */
3838 static bool
3839 gate_do_final_split (void)
3840 {
3841 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3842 return 1;
3843 #else
3844 return 0;
3845 #endif
3846 }
3847
3848 struct rtl_opt_pass pass_split_for_shorten_branches =
3849 {
3850 {
3851 RTL_PASS,
3852 "split5", /* name */
3853 gate_do_final_split, /* gate */
3854 split_all_insns_noflow, /* execute */
3855 NULL, /* sub */
3856 NULL, /* next */
3857 0, /* static_pass_number */
3858 TV_NONE, /* tv_id */
3859 0, /* properties_required */
3860 0, /* properties_provided */
3861 0, /* properties_destroyed */
3862 0, /* todo_flags_start */
3863 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
3864 }
3865 };