Add -static-libasan option to the GCC driver
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl-error.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "addresses.h"
35 #include "expr.h"
36 #include "function.h"
37 #include "flags.h"
38 #include "basic-block.h"
39 #include "reload.h"
40 #include "target.h"
41 #include "tree-pass.h"
42 #include "df.h"
43
44 #ifndef STACK_PUSH_CODE
45 #ifdef STACK_GROWS_DOWNWARD
46 #define STACK_PUSH_CODE PRE_DEC
47 #else
48 #define STACK_PUSH_CODE PRE_INC
49 #endif
50 #endif
51
52 #ifndef STACK_POP_CODE
53 #ifdef STACK_GROWS_DOWNWARD
54 #define STACK_POP_CODE POST_INC
55 #else
56 #define STACK_POP_CODE POST_DEC
57 #endif
58 #endif
59
60 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
63
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in reginfo.c and final.c and reload.c.
69
70 init_recog and init_recog_no_volatile are responsible for setting this. */
71
72 int volatile_ok;
73
74 struct recog_data recog_data;
75
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
82
83 int which_alternative;
84
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
88
89 int reload_completed;
90
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
93
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
97
98 void
99 init_recog_no_volatile (void)
100 {
101 volatile_ok = 0;
102 }
103
104 void
105 init_recog (void)
106 {
107 volatile_ok = 1;
108 }
109
110 \f
111 /* Return true if labels in asm operands BODY are LABEL_REFs. */
112
113 static bool
114 asm_labels_ok (rtx body)
115 {
116 rtx asmop;
117 int i;
118
119 asmop = extract_asm_operands (body);
120 if (asmop == NULL_RTX)
121 return true;
122
123 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
124 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
125 return false;
126
127 return true;
128 }
129
130 /* Check that X is an insn-body for an `asm' with operands
131 and that the operands mentioned in it are legitimate. */
132
133 int
134 check_asm_operands (rtx x)
135 {
136 int noperands;
137 rtx *operands;
138 const char **constraints;
139 int i;
140
141 if (!asm_labels_ok (x))
142 return 0;
143
144 /* Post-reload, be more strict with things. */
145 if (reload_completed)
146 {
147 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
148 extract_insn (make_insn_raw (x));
149 constrain_operands (1);
150 return which_alternative >= 0;
151 }
152
153 noperands = asm_noperands (x);
154 if (noperands < 0)
155 return 0;
156 if (noperands == 0)
157 return 1;
158
159 operands = XALLOCAVEC (rtx, noperands);
160 constraints = XALLOCAVEC (const char *, noperands);
161
162 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
163
164 for (i = 0; i < noperands; i++)
165 {
166 const char *c = constraints[i];
167 if (c[0] == '%')
168 c++;
169 if (! asm_operand_ok (operands[i], c, constraints))
170 return 0;
171 }
172
173 return 1;
174 }
175 \f
176 /* Static data for the next two routines. */
177
178 typedef struct change_t
179 {
180 rtx object;
181 int old_code;
182 rtx *loc;
183 rtx old;
184 bool unshare;
185 } change_t;
186
187 static change_t *changes;
188 static int changes_allocated;
189
190 static int num_changes = 0;
191
192 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
193 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
194 the change is simply made.
195
196 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
197 will be called with the address and mode as parameters. If OBJECT is
198 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
199 the change in place.
200
201 IN_GROUP is nonzero if this is part of a group of changes that must be
202 performed as a group. In that case, the changes will be stored. The
203 function `apply_change_group' will validate and apply the changes.
204
205 If IN_GROUP is zero, this is a single change. Try to recognize the insn
206 or validate the memory reference with the change applied. If the result
207 is not valid for the machine, suppress the change and return zero.
208 Otherwise, perform the change and return 1. */
209
210 static bool
211 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
212 {
213 rtx old = *loc;
214
215 if (old == new_rtx || rtx_equal_p (old, new_rtx))
216 return 1;
217
218 gcc_assert (in_group != 0 || num_changes == 0);
219
220 *loc = new_rtx;
221
222 /* Save the information describing this change. */
223 if (num_changes >= changes_allocated)
224 {
225 if (changes_allocated == 0)
226 /* This value allows for repeated substitutions inside complex
227 indexed addresses, or changes in up to 5 insns. */
228 changes_allocated = MAX_RECOG_OPERANDS * 5;
229 else
230 changes_allocated *= 2;
231
232 changes = XRESIZEVEC (change_t, changes, changes_allocated);
233 }
234
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
238 changes[num_changes].unshare = unshare;
239
240 if (object && !MEM_P (object))
241 {
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 case invalid. */
244 changes[num_changes].old_code = INSN_CODE (object);
245 INSN_CODE (object) = -1;
246 }
247
248 num_changes++;
249
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
252
253 if (in_group)
254 return 1;
255 else
256 return apply_change_group ();
257 }
258
259 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
260 UNSHARE to false. */
261
262 bool
263 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
264 {
265 return validate_change_1 (object, loc, new_rtx, in_group, false);
266 }
267
268 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
269 UNSHARE to true. */
270
271 bool
272 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
273 {
274 return validate_change_1 (object, loc, new_rtx, in_group, true);
275 }
276
277
278 /* Keep X canonicalized if some changes have made it non-canonical; only
279 modifies the operands of X, not (for example) its code. Simplifications
280 are not the job of this routine.
281
282 Return true if anything was changed. */
283 bool
284 canonicalize_change_group (rtx insn, rtx x)
285 {
286 if (COMMUTATIVE_P (x)
287 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
288 {
289 /* Oops, the caller has made X no longer canonical.
290 Let's redo the changes in the correct order. */
291 rtx tem = XEXP (x, 0);
292 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
293 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
294 return true;
295 }
296 else
297 return false;
298 }
299
300
301 /* This subroutine of apply_change_group verifies whether the changes to INSN
302 were valid; i.e. whether INSN can still be recognized.
303
304 If IN_GROUP is true clobbers which have to be added in order to
305 match the instructions will be added to the current change group.
306 Otherwise the changes will take effect immediately. */
307
308 int
309 insn_invalid_p (rtx insn, bool in_group)
310 {
311 rtx pat = PATTERN (insn);
312 int num_clobbers = 0;
313 /* If we are before reload and the pattern is a SET, see if we can add
314 clobbers. */
315 int icode = recog (pat, insn,
316 (GET_CODE (pat) == SET
317 && ! reload_completed && ! reload_in_progress)
318 ? &num_clobbers : 0);
319 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
320
321
322 /* If this is an asm and the operand aren't legal, then fail. Likewise if
323 this is not an asm and the insn wasn't recognized. */
324 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
325 || (!is_asm && icode < 0))
326 return 1;
327
328 /* If we have to add CLOBBERs, fail if we have to add ones that reference
329 hard registers since our callers can't know if they are live or not.
330 Otherwise, add them. */
331 if (num_clobbers > 0)
332 {
333 rtx newpat;
334
335 if (added_clobbers_hard_reg_p (icode))
336 return 1;
337
338 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
339 XVECEXP (newpat, 0, 0) = pat;
340 add_clobbers (newpat, icode);
341 if (in_group)
342 validate_change (insn, &PATTERN (insn), newpat, 1);
343 else
344 PATTERN (insn) = pat = newpat;
345 }
346
347 /* After reload, verify that all constraints are satisfied. */
348 if (reload_completed)
349 {
350 extract_insn (insn);
351
352 if (! constrain_operands (1))
353 return 1;
354 }
355
356 INSN_CODE (insn) = icode;
357 return 0;
358 }
359
360 /* Return number of changes made and not validated yet. */
361 int
362 num_changes_pending (void)
363 {
364 return num_changes;
365 }
366
367 /* Tentatively apply the changes numbered NUM and up.
368 Return 1 if all changes are valid, zero otherwise. */
369
370 int
371 verify_changes (int num)
372 {
373 int i;
374 rtx last_validated = NULL_RTX;
375
376 /* The changes have been applied and all INSN_CODEs have been reset to force
377 rerecognition.
378
379 The changes are valid if we aren't given an object, or if we are
380 given a MEM and it still is a valid address, or if this is in insn
381 and it is recognized. In the latter case, if reload has completed,
382 we also require that the operands meet the constraints for
383 the insn. */
384
385 for (i = num; i < num_changes; i++)
386 {
387 rtx object = changes[i].object;
388
389 /* If there is no object to test or if it is the same as the one we
390 already tested, ignore it. */
391 if (object == 0 || object == last_validated)
392 continue;
393
394 if (MEM_P (object))
395 {
396 if (! memory_address_addr_space_p (GET_MODE (object),
397 XEXP (object, 0),
398 MEM_ADDR_SPACE (object)))
399 break;
400 }
401 else if (REG_P (changes[i].old)
402 && asm_noperands (PATTERN (object)) > 0
403 && REG_EXPR (changes[i].old) != NULL_TREE
404 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
405 && DECL_REGISTER (REG_EXPR (changes[i].old)))
406 {
407 /* Don't allow changes of hard register operands to inline
408 assemblies if they have been defined as register asm ("x"). */
409 break;
410 }
411 else if (DEBUG_INSN_P (object))
412 continue;
413 else if (insn_invalid_p (object, true))
414 {
415 rtx pat = PATTERN (object);
416
417 /* Perhaps we couldn't recognize the insn because there were
418 extra CLOBBERs at the end. If so, try to re-recognize
419 without the last CLOBBER (later iterations will cause each of
420 them to be eliminated, in turn). But don't do this if we
421 have an ASM_OPERAND. */
422 if (GET_CODE (pat) == PARALLEL
423 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
424 && asm_noperands (PATTERN (object)) < 0)
425 {
426 rtx newpat;
427
428 if (XVECLEN (pat, 0) == 2)
429 newpat = XVECEXP (pat, 0, 0);
430 else
431 {
432 int j;
433
434 newpat
435 = gen_rtx_PARALLEL (VOIDmode,
436 rtvec_alloc (XVECLEN (pat, 0) - 1));
437 for (j = 0; j < XVECLEN (newpat, 0); j++)
438 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
439 }
440
441 /* Add a new change to this group to replace the pattern
442 with this new pattern. Then consider this change
443 as having succeeded. The change we added will
444 cause the entire call to fail if things remain invalid.
445
446 Note that this can lose if a later change than the one
447 we are processing specified &XVECEXP (PATTERN (object), 0, X)
448 but this shouldn't occur. */
449
450 validate_change (object, &PATTERN (object), newpat, 1);
451 continue;
452 }
453 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
454 || GET_CODE (pat) == VAR_LOCATION)
455 /* If this insn is a CLOBBER or USE, it is always valid, but is
456 never recognized. */
457 continue;
458 else
459 break;
460 }
461 last_validated = object;
462 }
463
464 return (i == num_changes);
465 }
466
467 /* A group of changes has previously been issued with validate_change
468 and verified with verify_changes. Call df_insn_rescan for each of
469 the insn changed and clear num_changes. */
470
471 void
472 confirm_change_group (void)
473 {
474 int i;
475 rtx last_object = NULL;
476
477 for (i = 0; i < num_changes; i++)
478 {
479 rtx object = changes[i].object;
480
481 if (changes[i].unshare)
482 *changes[i].loc = copy_rtx (*changes[i].loc);
483
484 /* Avoid unnecessary rescanning when multiple changes to same instruction
485 are made. */
486 if (object)
487 {
488 if (object != last_object && last_object && INSN_P (last_object))
489 df_insn_rescan (last_object);
490 last_object = object;
491 }
492 }
493
494 if (last_object && INSN_P (last_object))
495 df_insn_rescan (last_object);
496 num_changes = 0;
497 }
498
499 /* Apply a group of changes previously issued with `validate_change'.
500 If all changes are valid, call confirm_change_group and return 1,
501 otherwise, call cancel_changes and return 0. */
502
503 int
504 apply_change_group (void)
505 {
506 if (verify_changes (0))
507 {
508 confirm_change_group ();
509 return 1;
510 }
511 else
512 {
513 cancel_changes (0);
514 return 0;
515 }
516 }
517
518
519 /* Return the number of changes so far in the current group. */
520
521 int
522 num_validated_changes (void)
523 {
524 return num_changes;
525 }
526
527 /* Retract the changes numbered NUM and up. */
528
529 void
530 cancel_changes (int num)
531 {
532 int i;
533
534 /* Back out all the changes. Do this in the opposite order in which
535 they were made. */
536 for (i = num_changes - 1; i >= num; i--)
537 {
538 *changes[i].loc = changes[i].old;
539 if (changes[i].object && !MEM_P (changes[i].object))
540 INSN_CODE (changes[i].object) = changes[i].old_code;
541 }
542 num_changes = num;
543 }
544
545 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
546 rtx. */
547
548 static void
549 simplify_while_replacing (rtx *loc, rtx to, rtx object,
550 enum machine_mode op0_mode)
551 {
552 rtx x = *loc;
553 enum rtx_code code = GET_CODE (x);
554 rtx new_rtx;
555
556 if (SWAPPABLE_OPERANDS_P (x)
557 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
558 {
559 validate_unshare_change (object, loc,
560 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
561 : swap_condition (code),
562 GET_MODE (x), XEXP (x, 1),
563 XEXP (x, 0)), 1);
564 x = *loc;
565 code = GET_CODE (x);
566 }
567
568 switch (code)
569 {
570 case PLUS:
571 /* If we have a PLUS whose second operand is now a CONST_INT, use
572 simplify_gen_binary to try to simplify it.
573 ??? We may want later to remove this, once simplification is
574 separated from this function. */
575 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
576 validate_change (object, loc,
577 simplify_gen_binary
578 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
579 break;
580 case MINUS:
581 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
582 validate_change (object, loc,
583 simplify_gen_binary
584 (PLUS, GET_MODE (x), XEXP (x, 0),
585 simplify_gen_unary (NEG,
586 GET_MODE (x), XEXP (x, 1),
587 GET_MODE (x))), 1);
588 break;
589 case ZERO_EXTEND:
590 case SIGN_EXTEND:
591 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
592 {
593 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
594 op0_mode);
595 /* If any of the above failed, substitute in something that
596 we know won't be recognized. */
597 if (!new_rtx)
598 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
599 validate_change (object, loc, new_rtx, 1);
600 }
601 break;
602 case SUBREG:
603 /* All subregs possible to simplify should be simplified. */
604 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
605 SUBREG_BYTE (x));
606
607 /* Subregs of VOIDmode operands are incorrect. */
608 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
609 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
610 if (new_rtx)
611 validate_change (object, loc, new_rtx, 1);
612 break;
613 case ZERO_EXTRACT:
614 case SIGN_EXTRACT:
615 /* If we are replacing a register with memory, try to change the memory
616 to be the mode required for memory in extract operations (this isn't
617 likely to be an insertion operation; if it was, nothing bad will
618 happen, we might just fail in some cases). */
619
620 if (MEM_P (XEXP (x, 0))
621 && CONST_INT_P (XEXP (x, 1))
622 && CONST_INT_P (XEXP (x, 2))
623 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
624 MEM_ADDR_SPACE (XEXP (x, 0)))
625 && !MEM_VOLATILE_P (XEXP (x, 0)))
626 {
627 enum machine_mode wanted_mode = VOIDmode;
628 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
629 int pos = INTVAL (XEXP (x, 2));
630
631 if (GET_CODE (x) == ZERO_EXTRACT)
632 {
633 enum machine_mode new_mode
634 = mode_for_extraction (EP_extzv, 1);
635 if (new_mode != MAX_MACHINE_MODE)
636 wanted_mode = new_mode;
637 }
638 else if (GET_CODE (x) == SIGN_EXTRACT)
639 {
640 enum machine_mode new_mode
641 = mode_for_extraction (EP_extv, 1);
642 if (new_mode != MAX_MACHINE_MODE)
643 wanted_mode = new_mode;
644 }
645
646 /* If we have a narrower mode, we can do something. */
647 if (wanted_mode != VOIDmode
648 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
649 {
650 int offset = pos / BITS_PER_UNIT;
651 rtx newmem;
652
653 /* If the bytes and bits are counted differently, we
654 must adjust the offset. */
655 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
656 offset =
657 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
658 offset);
659
660 gcc_assert (GET_MODE_PRECISION (wanted_mode)
661 == GET_MODE_BITSIZE (wanted_mode));
662 pos %= GET_MODE_BITSIZE (wanted_mode);
663
664 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
665
666 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
667 validate_change (object, &XEXP (x, 0), newmem, 1);
668 }
669 }
670
671 break;
672
673 default:
674 break;
675 }
676 }
677
678 /* Replace every occurrence of FROM in X with TO. Mark each change with
679 validate_change passing OBJECT. */
680
681 static void
682 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
683 bool simplify)
684 {
685 int i, j;
686 const char *fmt;
687 rtx x = *loc;
688 enum rtx_code code;
689 enum machine_mode op0_mode = VOIDmode;
690 int prev_changes = num_changes;
691
692 if (!x)
693 return;
694
695 code = GET_CODE (x);
696 fmt = GET_RTX_FORMAT (code);
697 if (fmt[0] == 'e')
698 op0_mode = GET_MODE (XEXP (x, 0));
699
700 /* X matches FROM if it is the same rtx or they are both referring to the
701 same register in the same mode. Avoid calling rtx_equal_p unless the
702 operands look similar. */
703
704 if (x == from
705 || (REG_P (x) && REG_P (from)
706 && GET_MODE (x) == GET_MODE (from)
707 && REGNO (x) == REGNO (from))
708 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
709 && rtx_equal_p (x, from)))
710 {
711 validate_unshare_change (object, loc, to, 1);
712 return;
713 }
714
715 /* Call ourself recursively to perform the replacements.
716 We must not replace inside already replaced expression, otherwise we
717 get infinite recursion for replacements like (reg X)->(subreg (reg X))
718 done by regmove, so we must special case shared ASM_OPERANDS. */
719
720 if (GET_CODE (x) == PARALLEL)
721 {
722 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
723 {
724 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
725 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
726 {
727 /* Verify that operands are really shared. */
728 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
729 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
730 (x, 0, j))));
731 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
732 from, to, object, simplify);
733 }
734 else
735 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
736 simplify);
737 }
738 }
739 else
740 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
741 {
742 if (fmt[i] == 'e')
743 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
744 else if (fmt[i] == 'E')
745 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
746 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
747 simplify);
748 }
749
750 /* If we didn't substitute, there is nothing more to do. */
751 if (num_changes == prev_changes)
752 return;
753
754 /* Allow substituted expression to have different mode. This is used by
755 regmove to change mode of pseudo register. */
756 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
757 op0_mode = GET_MODE (XEXP (x, 0));
758
759 /* Do changes needed to keep rtx consistent. Don't do any other
760 simplifications, as it is not our job. */
761 if (simplify)
762 simplify_while_replacing (loc, to, object, op0_mode);
763 }
764
765 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
766 with TO. After all changes have been made, validate by seeing
767 if INSN is still valid. */
768
769 int
770 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
771 {
772 validate_replace_rtx_1 (loc, from, to, insn, true);
773 return apply_change_group ();
774 }
775
776 /* Try replacing every occurrence of FROM in INSN with TO. After all
777 changes have been made, validate by seeing if INSN is still valid. */
778
779 int
780 validate_replace_rtx (rtx from, rtx to, rtx insn)
781 {
782 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
783 return apply_change_group ();
784 }
785
786 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
787 is a part of INSN. After all changes have been made, validate by seeing if
788 INSN is still valid.
789 validate_replace_rtx (from, to, insn) is equivalent to
790 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
791
792 int
793 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
794 {
795 validate_replace_rtx_1 (where, from, to, insn, true);
796 return apply_change_group ();
797 }
798
799 /* Same as above, but do not simplify rtx afterwards. */
800 int
801 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
802 rtx insn)
803 {
804 validate_replace_rtx_1 (where, from, to, insn, false);
805 return apply_change_group ();
806
807 }
808
809 /* Try replacing every occurrence of FROM in INSN with TO. This also
810 will replace in REG_EQUAL and REG_EQUIV notes. */
811
812 void
813 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
814 {
815 rtx note;
816 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
817 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
818 if (REG_NOTE_KIND (note) == REG_EQUAL
819 || REG_NOTE_KIND (note) == REG_EQUIV)
820 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
821 }
822
823 /* Function called by note_uses to replace used subexpressions. */
824 struct validate_replace_src_data
825 {
826 rtx from; /* Old RTX */
827 rtx to; /* New RTX */
828 rtx insn; /* Insn in which substitution is occurring. */
829 };
830
831 static void
832 validate_replace_src_1 (rtx *x, void *data)
833 {
834 struct validate_replace_src_data *d
835 = (struct validate_replace_src_data *) data;
836
837 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
838 }
839
840 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
841 SET_DESTs. */
842
843 void
844 validate_replace_src_group (rtx from, rtx to, rtx insn)
845 {
846 struct validate_replace_src_data d;
847
848 d.from = from;
849 d.to = to;
850 d.insn = insn;
851 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
852 }
853
854 /* Try simplify INSN.
855 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
856 pattern and return true if something was simplified. */
857
858 bool
859 validate_simplify_insn (rtx insn)
860 {
861 int i;
862 rtx pat = NULL;
863 rtx newpat = NULL;
864
865 pat = PATTERN (insn);
866
867 if (GET_CODE (pat) == SET)
868 {
869 newpat = simplify_rtx (SET_SRC (pat));
870 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
871 validate_change (insn, &SET_SRC (pat), newpat, 1);
872 newpat = simplify_rtx (SET_DEST (pat));
873 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
874 validate_change (insn, &SET_DEST (pat), newpat, 1);
875 }
876 else if (GET_CODE (pat) == PARALLEL)
877 for (i = 0; i < XVECLEN (pat, 0); i++)
878 {
879 rtx s = XVECEXP (pat, 0, i);
880
881 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
882 {
883 newpat = simplify_rtx (SET_SRC (s));
884 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
885 validate_change (insn, &SET_SRC (s), newpat, 1);
886 newpat = simplify_rtx (SET_DEST (s));
887 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
888 validate_change (insn, &SET_DEST (s), newpat, 1);
889 }
890 }
891 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
892 }
893 \f
894 #ifdef HAVE_cc0
895 /* Return 1 if the insn using CC0 set by INSN does not contain
896 any ordered tests applied to the condition codes.
897 EQ and NE tests do not count. */
898
899 int
900 next_insn_tests_no_inequality (rtx insn)
901 {
902 rtx next = next_cc0_user (insn);
903
904 /* If there is no next insn, we have to take the conservative choice. */
905 if (next == 0)
906 return 0;
907
908 return (INSN_P (next)
909 && ! inequality_comparisons_p (PATTERN (next)));
910 }
911 #endif
912 \f
913 /* Return 1 if OP is a valid general operand for machine mode MODE.
914 This is either a register reference, a memory reference,
915 or a constant. In the case of a memory reference, the address
916 is checked for general validity for the target machine.
917
918 Register and memory references must have mode MODE in order to be valid,
919 but some constants have no machine mode and are valid for any mode.
920
921 If MODE is VOIDmode, OP is checked for validity for whatever mode
922 it has.
923
924 The main use of this function is as a predicate in match_operand
925 expressions in the machine description. */
926
927 int
928 general_operand (rtx op, enum machine_mode mode)
929 {
930 enum rtx_code code = GET_CODE (op);
931
932 if (mode == VOIDmode)
933 mode = GET_MODE (op);
934
935 /* Don't accept CONST_INT or anything similar
936 if the caller wants something floating. */
937 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
938 && GET_MODE_CLASS (mode) != MODE_INT
939 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
940 return 0;
941
942 if (CONST_INT_P (op)
943 && mode != VOIDmode
944 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
945 return 0;
946
947 if (CONSTANT_P (op))
948 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
949 || mode == VOIDmode)
950 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
951 && targetm.legitimate_constant_p (mode == VOIDmode
952 ? GET_MODE (op)
953 : mode, op));
954
955 /* Except for certain constants with VOIDmode, already checked for,
956 OP's mode must match MODE if MODE specifies a mode. */
957
958 if (GET_MODE (op) != mode)
959 return 0;
960
961 if (code == SUBREG)
962 {
963 rtx sub = SUBREG_REG (op);
964
965 #ifdef INSN_SCHEDULING
966 /* On machines that have insn scheduling, we want all memory
967 reference to be explicit, so outlaw paradoxical SUBREGs.
968 However, we must allow them after reload so that they can
969 get cleaned up by cleanup_subreg_operands. */
970 if (!reload_completed && MEM_P (sub)
971 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
972 return 0;
973 #endif
974 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
975 may result in incorrect reference. We should simplify all valid
976 subregs of MEM anyway. But allow this after reload because we
977 might be called from cleanup_subreg_operands.
978
979 ??? This is a kludge. */
980 if (!reload_completed && SUBREG_BYTE (op) != 0
981 && MEM_P (sub))
982 return 0;
983
984 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
985 create such rtl, and we must reject it. */
986 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
987 /* LRA can use subreg to store a floating point value in an
988 integer mode. Although the floating point and the
989 integer modes need the same number of hard registers, the
990 size of floating point mode can be less than the integer
991 mode. */
992 && ! lra_in_progress
993 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
994 return 0;
995
996 op = sub;
997 code = GET_CODE (op);
998 }
999
1000 if (code == REG)
1001 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1002 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1003
1004 if (code == MEM)
1005 {
1006 rtx y = XEXP (op, 0);
1007
1008 if (! volatile_ok && MEM_VOLATILE_P (op))
1009 return 0;
1010
1011 /* Use the mem's mode, since it will be reloaded thus. */
1012 if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1013 return 1;
1014 }
1015
1016 return 0;
1017 }
1018 \f
1019 /* Return 1 if OP is a valid memory address for a memory reference
1020 of mode MODE.
1021
1022 The main use of this function is as a predicate in match_operand
1023 expressions in the machine description. */
1024
1025 int
1026 address_operand (rtx op, enum machine_mode mode)
1027 {
1028 return memory_address_p (mode, op);
1029 }
1030
1031 /* Return 1 if OP is a register reference of mode MODE.
1032 If MODE is VOIDmode, accept a register in any mode.
1033
1034 The main use of this function is as a predicate in match_operand
1035 expressions in the machine description. */
1036
1037 int
1038 register_operand (rtx op, enum machine_mode mode)
1039 {
1040 if (GET_MODE (op) != mode && mode != VOIDmode)
1041 return 0;
1042
1043 if (GET_CODE (op) == SUBREG)
1044 {
1045 rtx sub = SUBREG_REG (op);
1046
1047 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1048 because it is guaranteed to be reloaded into one.
1049 Just make sure the MEM is valid in itself.
1050 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1051 but currently it does result from (SUBREG (REG)...) where the
1052 reg went on the stack.) */
1053 if (! reload_completed && MEM_P (sub))
1054 return general_operand (op, mode);
1055
1056 #ifdef CANNOT_CHANGE_MODE_CLASS
1057 if (REG_P (sub)
1058 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1059 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1060 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1061 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1062 return 0;
1063 #endif
1064
1065 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1066 create such rtl, and we must reject it. */
1067 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1068 /* LRA can use subreg to store a floating point value in an
1069 integer mode. Although the floating point and the
1070 integer modes need the same number of hard registers, the
1071 size of floating point mode can be less than the integer
1072 mode. */
1073 && ! lra_in_progress
1074 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1075 return 0;
1076
1077 op = sub;
1078 }
1079
1080 return (REG_P (op)
1081 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1082 || in_hard_reg_set_p (operand_reg_set,
1083 GET_MODE (op), REGNO (op))));
1084 }
1085
1086 /* Return 1 for a register in Pmode; ignore the tested mode. */
1087
1088 int
1089 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1090 {
1091 return register_operand (op, Pmode);
1092 }
1093
1094 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1095 or a hard register. */
1096
1097 int
1098 scratch_operand (rtx op, enum machine_mode mode)
1099 {
1100 if (GET_MODE (op) != mode && mode != VOIDmode)
1101 return 0;
1102
1103 return (GET_CODE (op) == SCRATCH
1104 || (REG_P (op)
1105 && (lra_in_progress || REGNO (op) < FIRST_PSEUDO_REGISTER)));
1106 }
1107
1108 /* Return 1 if OP is a valid immediate operand for mode MODE.
1109
1110 The main use of this function is as a predicate in match_operand
1111 expressions in the machine description. */
1112
1113 int
1114 immediate_operand (rtx op, enum machine_mode mode)
1115 {
1116 /* Don't accept CONST_INT or anything similar
1117 if the caller wants something floating. */
1118 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1119 && GET_MODE_CLASS (mode) != MODE_INT
1120 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1121 return 0;
1122
1123 if (CONST_INT_P (op)
1124 && mode != VOIDmode
1125 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1126 return 0;
1127
1128 return (CONSTANT_P (op)
1129 && (GET_MODE (op) == mode || mode == VOIDmode
1130 || GET_MODE (op) == VOIDmode)
1131 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1132 && targetm.legitimate_constant_p (mode == VOIDmode
1133 ? GET_MODE (op)
1134 : mode, op));
1135 }
1136
1137 /* Returns 1 if OP is an operand that is a CONST_INT. */
1138
1139 int
1140 const_int_operand (rtx op, enum machine_mode mode)
1141 {
1142 if (!CONST_INT_P (op))
1143 return 0;
1144
1145 if (mode != VOIDmode
1146 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1147 return 0;
1148
1149 return 1;
1150 }
1151
1152 /* Returns 1 if OP is an operand that is a constant integer or constant
1153 floating-point number. */
1154
1155 int
1156 const_double_operand (rtx op, enum machine_mode mode)
1157 {
1158 /* Don't accept CONST_INT or anything similar
1159 if the caller wants something floating. */
1160 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1161 && GET_MODE_CLASS (mode) != MODE_INT
1162 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1163 return 0;
1164
1165 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1166 && (mode == VOIDmode || GET_MODE (op) == mode
1167 || GET_MODE (op) == VOIDmode));
1168 }
1169
1170 /* Return 1 if OP is a general operand that is not an immediate operand. */
1171
1172 int
1173 nonimmediate_operand (rtx op, enum machine_mode mode)
1174 {
1175 return (general_operand (op, mode) && ! CONSTANT_P (op));
1176 }
1177
1178 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1179
1180 int
1181 nonmemory_operand (rtx op, enum machine_mode mode)
1182 {
1183 if (CONSTANT_P (op))
1184 return immediate_operand (op, mode);
1185
1186 if (GET_MODE (op) != mode && mode != VOIDmode)
1187 return 0;
1188
1189 if (GET_CODE (op) == SUBREG)
1190 {
1191 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1192 because it is guaranteed to be reloaded into one.
1193 Just make sure the MEM is valid in itself.
1194 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1195 but currently it does result from (SUBREG (REG)...) where the
1196 reg went on the stack.) */
1197 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1198 return general_operand (op, mode);
1199 op = SUBREG_REG (op);
1200 }
1201
1202 return (REG_P (op)
1203 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1204 || in_hard_reg_set_p (operand_reg_set,
1205 GET_MODE (op), REGNO (op))));
1206 }
1207
1208 /* Return 1 if OP is a valid operand that stands for pushing a
1209 value of mode MODE onto the stack.
1210
1211 The main use of this function is as a predicate in match_operand
1212 expressions in the machine description. */
1213
1214 int
1215 push_operand (rtx op, enum machine_mode mode)
1216 {
1217 unsigned int rounded_size = GET_MODE_SIZE (mode);
1218
1219 #ifdef PUSH_ROUNDING
1220 rounded_size = PUSH_ROUNDING (rounded_size);
1221 #endif
1222
1223 if (!MEM_P (op))
1224 return 0;
1225
1226 if (mode != VOIDmode && GET_MODE (op) != mode)
1227 return 0;
1228
1229 op = XEXP (op, 0);
1230
1231 if (rounded_size == GET_MODE_SIZE (mode))
1232 {
1233 if (GET_CODE (op) != STACK_PUSH_CODE)
1234 return 0;
1235 }
1236 else
1237 {
1238 if (GET_CODE (op) != PRE_MODIFY
1239 || GET_CODE (XEXP (op, 1)) != PLUS
1240 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1241 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1242 #ifdef STACK_GROWS_DOWNWARD
1243 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1244 #else
1245 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1246 #endif
1247 )
1248 return 0;
1249 }
1250
1251 return XEXP (op, 0) == stack_pointer_rtx;
1252 }
1253
1254 /* Return 1 if OP is a valid operand that stands for popping a
1255 value of mode MODE off the stack.
1256
1257 The main use of this function is as a predicate in match_operand
1258 expressions in the machine description. */
1259
1260 int
1261 pop_operand (rtx op, enum machine_mode mode)
1262 {
1263 if (!MEM_P (op))
1264 return 0;
1265
1266 if (mode != VOIDmode && GET_MODE (op) != mode)
1267 return 0;
1268
1269 op = XEXP (op, 0);
1270
1271 if (GET_CODE (op) != STACK_POP_CODE)
1272 return 0;
1273
1274 return XEXP (op, 0) == stack_pointer_rtx;
1275 }
1276
1277 /* Return 1 if ADDR is a valid memory address
1278 for mode MODE in address space AS. */
1279
1280 int
1281 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1282 rtx addr, addr_space_t as)
1283 {
1284 #ifdef GO_IF_LEGITIMATE_ADDRESS
1285 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1286 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1287 return 0;
1288
1289 win:
1290 return 1;
1291 #else
1292 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1293 #endif
1294 }
1295
1296 /* Return 1 if OP is a valid memory reference with mode MODE,
1297 including a valid address.
1298
1299 The main use of this function is as a predicate in match_operand
1300 expressions in the machine description. */
1301
1302 int
1303 memory_operand (rtx op, enum machine_mode mode)
1304 {
1305 rtx inner;
1306
1307 if (! reload_completed)
1308 /* Note that no SUBREG is a memory operand before end of reload pass,
1309 because (SUBREG (MEM...)) forces reloading into a register. */
1310 return MEM_P (op) && general_operand (op, mode);
1311
1312 if (mode != VOIDmode && GET_MODE (op) != mode)
1313 return 0;
1314
1315 inner = op;
1316 if (GET_CODE (inner) == SUBREG)
1317 inner = SUBREG_REG (inner);
1318
1319 return (MEM_P (inner) && general_operand (op, mode));
1320 }
1321
1322 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1323 that is, a memory reference whose address is a general_operand. */
1324
1325 int
1326 indirect_operand (rtx op, enum machine_mode mode)
1327 {
1328 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1329 if (! reload_completed
1330 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1331 {
1332 int offset = SUBREG_BYTE (op);
1333 rtx inner = SUBREG_REG (op);
1334
1335 if (mode != VOIDmode && GET_MODE (op) != mode)
1336 return 0;
1337
1338 /* The only way that we can have a general_operand as the resulting
1339 address is if OFFSET is zero and the address already is an operand
1340 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1341 operand. */
1342
1343 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1344 || (GET_CODE (XEXP (inner, 0)) == PLUS
1345 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1346 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1347 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1348 }
1349
1350 return (MEM_P (op)
1351 && memory_operand (op, mode)
1352 && general_operand (XEXP (op, 0), Pmode));
1353 }
1354
1355 /* Return 1 if this is an ordered comparison operator (not including
1356 ORDERED and UNORDERED). */
1357
1358 int
1359 ordered_comparison_operator (rtx op, enum machine_mode mode)
1360 {
1361 if (mode != VOIDmode && GET_MODE (op) != mode)
1362 return false;
1363 switch (GET_CODE (op))
1364 {
1365 case EQ:
1366 case NE:
1367 case LT:
1368 case LTU:
1369 case LE:
1370 case LEU:
1371 case GT:
1372 case GTU:
1373 case GE:
1374 case GEU:
1375 return true;
1376 default:
1377 return false;
1378 }
1379 }
1380
1381 /* Return 1 if this is a comparison operator. This allows the use of
1382 MATCH_OPERATOR to recognize all the branch insns. */
1383
1384 int
1385 comparison_operator (rtx op, enum machine_mode mode)
1386 {
1387 return ((mode == VOIDmode || GET_MODE (op) == mode)
1388 && COMPARISON_P (op));
1389 }
1390 \f
1391 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1392
1393 rtx
1394 extract_asm_operands (rtx body)
1395 {
1396 rtx tmp;
1397 switch (GET_CODE (body))
1398 {
1399 case ASM_OPERANDS:
1400 return body;
1401
1402 case SET:
1403 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1404 tmp = SET_SRC (body);
1405 if (GET_CODE (tmp) == ASM_OPERANDS)
1406 return tmp;
1407 break;
1408
1409 case PARALLEL:
1410 tmp = XVECEXP (body, 0, 0);
1411 if (GET_CODE (tmp) == ASM_OPERANDS)
1412 return tmp;
1413 if (GET_CODE (tmp) == SET)
1414 {
1415 tmp = SET_SRC (tmp);
1416 if (GET_CODE (tmp) == ASM_OPERANDS)
1417 return tmp;
1418 }
1419 break;
1420
1421 default:
1422 break;
1423 }
1424 return NULL;
1425 }
1426
1427 /* If BODY is an insn body that uses ASM_OPERANDS,
1428 return the number of operands (both input and output) in the insn.
1429 Otherwise return -1. */
1430
1431 int
1432 asm_noperands (const_rtx body)
1433 {
1434 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1435 int n_sets = 0;
1436
1437 if (asm_op == NULL)
1438 return -1;
1439
1440 if (GET_CODE (body) == SET)
1441 n_sets = 1;
1442 else if (GET_CODE (body) == PARALLEL)
1443 {
1444 int i;
1445 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1446 {
1447 /* Multiple output operands, or 1 output plus some clobbers:
1448 body is
1449 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1450 /* Count backwards through CLOBBERs to determine number of SETs. */
1451 for (i = XVECLEN (body, 0); i > 0; i--)
1452 {
1453 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1454 break;
1455 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1456 return -1;
1457 }
1458
1459 /* N_SETS is now number of output operands. */
1460 n_sets = i;
1461
1462 /* Verify that all the SETs we have
1463 came from a single original asm_operands insn
1464 (so that invalid combinations are blocked). */
1465 for (i = 0; i < n_sets; i++)
1466 {
1467 rtx elt = XVECEXP (body, 0, i);
1468 if (GET_CODE (elt) != SET)
1469 return -1;
1470 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1471 return -1;
1472 /* If these ASM_OPERANDS rtx's came from different original insns
1473 then they aren't allowed together. */
1474 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1475 != ASM_OPERANDS_INPUT_VEC (asm_op))
1476 return -1;
1477 }
1478 }
1479 else
1480 {
1481 /* 0 outputs, but some clobbers:
1482 body is [(asm_operands ...) (clobber (reg ...))...]. */
1483 /* Make sure all the other parallel things really are clobbers. */
1484 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1485 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1486 return -1;
1487 }
1488 }
1489
1490 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1491 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1492 }
1493
1494 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1495 copy its operands (both input and output) into the vector OPERANDS,
1496 the locations of the operands within the insn into the vector OPERAND_LOCS,
1497 and the constraints for the operands into CONSTRAINTS.
1498 Write the modes of the operands into MODES.
1499 Return the assembler-template.
1500
1501 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1502 we don't store that info. */
1503
1504 const char *
1505 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1506 const char **constraints, enum machine_mode *modes,
1507 location_t *loc)
1508 {
1509 int nbase = 0, n, i;
1510 rtx asmop;
1511
1512 switch (GET_CODE (body))
1513 {
1514 case ASM_OPERANDS:
1515 /* Zero output asm: BODY is (asm_operands ...). */
1516 asmop = body;
1517 break;
1518
1519 case SET:
1520 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1521 asmop = SET_SRC (body);
1522
1523 /* The output is in the SET.
1524 Its constraint is in the ASM_OPERANDS itself. */
1525 if (operands)
1526 operands[0] = SET_DEST (body);
1527 if (operand_locs)
1528 operand_locs[0] = &SET_DEST (body);
1529 if (constraints)
1530 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1531 if (modes)
1532 modes[0] = GET_MODE (SET_DEST (body));
1533 nbase = 1;
1534 break;
1535
1536 case PARALLEL:
1537 {
1538 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1539
1540 asmop = XVECEXP (body, 0, 0);
1541 if (GET_CODE (asmop) == SET)
1542 {
1543 asmop = SET_SRC (asmop);
1544
1545 /* At least one output, plus some CLOBBERs. The outputs are in
1546 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1547 for (i = 0; i < nparallel; i++)
1548 {
1549 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1550 break; /* Past last SET */
1551 if (operands)
1552 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1553 if (operand_locs)
1554 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1555 if (constraints)
1556 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1557 if (modes)
1558 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1559 }
1560 nbase = i;
1561 }
1562 break;
1563 }
1564
1565 default:
1566 gcc_unreachable ();
1567 }
1568
1569 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1570 for (i = 0; i < n; i++)
1571 {
1572 if (operand_locs)
1573 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1574 if (operands)
1575 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1576 if (constraints)
1577 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1578 if (modes)
1579 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1580 }
1581 nbase += n;
1582
1583 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1584 for (i = 0; i < n; i++)
1585 {
1586 if (operand_locs)
1587 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1588 if (operands)
1589 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1590 if (constraints)
1591 constraints[nbase + i] = "";
1592 if (modes)
1593 modes[nbase + i] = Pmode;
1594 }
1595
1596 if (loc)
1597 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1598
1599 return ASM_OPERANDS_TEMPLATE (asmop);
1600 }
1601
1602 /* Check if an asm_operand matches its constraints.
1603 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1604
1605 int
1606 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1607 {
1608 int result = 0;
1609 #ifdef AUTO_INC_DEC
1610 bool incdec_ok = false;
1611 #endif
1612
1613 /* Use constrain_operands after reload. */
1614 gcc_assert (!reload_completed);
1615
1616 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1617 many alternatives as required to match the other operands. */
1618 if (*constraint == '\0')
1619 result = 1;
1620
1621 while (*constraint)
1622 {
1623 char c = *constraint;
1624 int len;
1625 switch (c)
1626 {
1627 case ',':
1628 constraint++;
1629 continue;
1630 case '=':
1631 case '+':
1632 case '*':
1633 case '%':
1634 case '!':
1635 case '#':
1636 case '&':
1637 case '?':
1638 break;
1639
1640 case '0': case '1': case '2': case '3': case '4':
1641 case '5': case '6': case '7': case '8': case '9':
1642 /* If caller provided constraints pointer, look up
1643 the maching constraint. Otherwise, our caller should have
1644 given us the proper matching constraint, but we can't
1645 actually fail the check if they didn't. Indicate that
1646 results are inconclusive. */
1647 if (constraints)
1648 {
1649 char *end;
1650 unsigned long match;
1651
1652 match = strtoul (constraint, &end, 10);
1653 if (!result)
1654 result = asm_operand_ok (op, constraints[match], NULL);
1655 constraint = (const char *) end;
1656 }
1657 else
1658 {
1659 do
1660 constraint++;
1661 while (ISDIGIT (*constraint));
1662 if (! result)
1663 result = -1;
1664 }
1665 continue;
1666
1667 case 'p':
1668 if (address_operand (op, VOIDmode))
1669 result = 1;
1670 break;
1671
1672 case TARGET_MEM_CONSTRAINT:
1673 case 'V': /* non-offsettable */
1674 if (memory_operand (op, VOIDmode))
1675 result = 1;
1676 break;
1677
1678 case 'o': /* offsettable */
1679 if (offsettable_nonstrict_memref_p (op))
1680 result = 1;
1681 break;
1682
1683 case '<':
1684 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1685 excepting those that expand_call created. Further, on some
1686 machines which do not have generalized auto inc/dec, an inc/dec
1687 is not a memory_operand.
1688
1689 Match any memory and hope things are resolved after reload. */
1690
1691 if (MEM_P (op)
1692 && (1
1693 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1694 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1695 result = 1;
1696 #ifdef AUTO_INC_DEC
1697 incdec_ok = true;
1698 #endif
1699 break;
1700
1701 case '>':
1702 if (MEM_P (op)
1703 && (1
1704 || GET_CODE (XEXP (op, 0)) == PRE_INC
1705 || GET_CODE (XEXP (op, 0)) == POST_INC))
1706 result = 1;
1707 #ifdef AUTO_INC_DEC
1708 incdec_ok = true;
1709 #endif
1710 break;
1711
1712 case 'E':
1713 case 'F':
1714 if (CONST_DOUBLE_AS_FLOAT_P (op)
1715 || (GET_CODE (op) == CONST_VECTOR
1716 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1717 result = 1;
1718 break;
1719
1720 case 'G':
1721 if (CONST_DOUBLE_AS_FLOAT_P (op)
1722 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1723 result = 1;
1724 break;
1725 case 'H':
1726 if (CONST_DOUBLE_AS_FLOAT_P (op)
1727 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1728 result = 1;
1729 break;
1730
1731 case 's':
1732 if (CONST_SCALAR_INT_P (op))
1733 break;
1734 /* Fall through. */
1735
1736 case 'i':
1737 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1738 result = 1;
1739 break;
1740
1741 case 'n':
1742 if (CONST_SCALAR_INT_P (op))
1743 result = 1;
1744 break;
1745
1746 case 'I':
1747 if (CONST_INT_P (op)
1748 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1749 result = 1;
1750 break;
1751 case 'J':
1752 if (CONST_INT_P (op)
1753 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1754 result = 1;
1755 break;
1756 case 'K':
1757 if (CONST_INT_P (op)
1758 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1759 result = 1;
1760 break;
1761 case 'L':
1762 if (CONST_INT_P (op)
1763 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1764 result = 1;
1765 break;
1766 case 'M':
1767 if (CONST_INT_P (op)
1768 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1769 result = 1;
1770 break;
1771 case 'N':
1772 if (CONST_INT_P (op)
1773 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1774 result = 1;
1775 break;
1776 case 'O':
1777 if (CONST_INT_P (op)
1778 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1779 result = 1;
1780 break;
1781 case 'P':
1782 if (CONST_INT_P (op)
1783 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1784 result = 1;
1785 break;
1786
1787 case 'X':
1788 result = 1;
1789 break;
1790
1791 case 'g':
1792 if (general_operand (op, VOIDmode))
1793 result = 1;
1794 break;
1795
1796 default:
1797 /* For all other letters, we first check for a register class,
1798 otherwise it is an EXTRA_CONSTRAINT. */
1799 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1800 {
1801 case 'r':
1802 if (GET_MODE (op) == BLKmode)
1803 break;
1804 if (register_operand (op, VOIDmode))
1805 result = 1;
1806 }
1807 #ifdef EXTRA_CONSTRAINT_STR
1808 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1809 /* Every memory operand can be reloaded to fit. */
1810 result = result || memory_operand (op, VOIDmode);
1811 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1812 /* Every address operand can be reloaded to fit. */
1813 result = result || address_operand (op, VOIDmode);
1814 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1815 result = 1;
1816 #endif
1817 break;
1818 }
1819 len = CONSTRAINT_LEN (c, constraint);
1820 do
1821 constraint++;
1822 while (--len && *constraint);
1823 if (len)
1824 return 0;
1825 }
1826
1827 #ifdef AUTO_INC_DEC
1828 /* For operands without < or > constraints reject side-effects. */
1829 if (!incdec_ok && result && MEM_P (op))
1830 switch (GET_CODE (XEXP (op, 0)))
1831 {
1832 case PRE_INC:
1833 case POST_INC:
1834 case PRE_DEC:
1835 case POST_DEC:
1836 case PRE_MODIFY:
1837 case POST_MODIFY:
1838 return 0;
1839 default:
1840 break;
1841 }
1842 #endif
1843
1844 return result;
1845 }
1846 \f
1847 /* Given an rtx *P, if it is a sum containing an integer constant term,
1848 return the location (type rtx *) of the pointer to that constant term.
1849 Otherwise, return a null pointer. */
1850
1851 rtx *
1852 find_constant_term_loc (rtx *p)
1853 {
1854 rtx *tem;
1855 enum rtx_code code = GET_CODE (*p);
1856
1857 /* If *P IS such a constant term, P is its location. */
1858
1859 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1860 || code == CONST)
1861 return p;
1862
1863 /* Otherwise, if not a sum, it has no constant term. */
1864
1865 if (GET_CODE (*p) != PLUS)
1866 return 0;
1867
1868 /* If one of the summands is constant, return its location. */
1869
1870 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1871 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1872 return p;
1873
1874 /* Otherwise, check each summand for containing a constant term. */
1875
1876 if (XEXP (*p, 0) != 0)
1877 {
1878 tem = find_constant_term_loc (&XEXP (*p, 0));
1879 if (tem != 0)
1880 return tem;
1881 }
1882
1883 if (XEXP (*p, 1) != 0)
1884 {
1885 tem = find_constant_term_loc (&XEXP (*p, 1));
1886 if (tem != 0)
1887 return tem;
1888 }
1889
1890 return 0;
1891 }
1892 \f
1893 /* Return 1 if OP is a memory reference
1894 whose address contains no side effects
1895 and remains valid after the addition
1896 of a positive integer less than the
1897 size of the object being referenced.
1898
1899 We assume that the original address is valid and do not check it.
1900
1901 This uses strict_memory_address_p as a subroutine, so
1902 don't use it before reload. */
1903
1904 int
1905 offsettable_memref_p (rtx op)
1906 {
1907 return ((MEM_P (op))
1908 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1909 MEM_ADDR_SPACE (op)));
1910 }
1911
1912 /* Similar, but don't require a strictly valid mem ref:
1913 consider pseudo-regs valid as index or base regs. */
1914
1915 int
1916 offsettable_nonstrict_memref_p (rtx op)
1917 {
1918 return ((MEM_P (op))
1919 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1920 MEM_ADDR_SPACE (op)));
1921 }
1922
1923 /* Return 1 if Y is a memory address which contains no side effects
1924 and would remain valid for address space AS after the addition of
1925 a positive integer less than the size of that mode.
1926
1927 We assume that the original address is valid and do not check it.
1928 We do check that it is valid for narrower modes.
1929
1930 If STRICTP is nonzero, we require a strictly valid address,
1931 for the sake of use in reload.c. */
1932
1933 int
1934 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1935 addr_space_t as)
1936 {
1937 enum rtx_code ycode = GET_CODE (y);
1938 rtx z;
1939 rtx y1 = y;
1940 rtx *y2;
1941 int (*addressp) (enum machine_mode, rtx, addr_space_t) =
1942 (strictp ? strict_memory_address_addr_space_p
1943 : memory_address_addr_space_p);
1944 unsigned int mode_sz = GET_MODE_SIZE (mode);
1945 #ifdef POINTERS_EXTEND_UNSIGNED
1946 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1947 #endif
1948
1949 if (CONSTANT_ADDRESS_P (y))
1950 return 1;
1951
1952 /* Adjusting an offsettable address involves changing to a narrower mode.
1953 Make sure that's OK. */
1954
1955 if (mode_dependent_address_p (y, as))
1956 return 0;
1957
1958 /* ??? How much offset does an offsettable BLKmode reference need?
1959 Clearly that depends on the situation in which it's being used.
1960 However, the current situation in which we test 0xffffffff is
1961 less than ideal. Caveat user. */
1962 if (mode_sz == 0)
1963 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1964
1965 /* If the expression contains a constant term,
1966 see if it remains valid when max possible offset is added. */
1967
1968 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1969 {
1970 int good;
1971
1972 y1 = *y2;
1973 *y2 = plus_constant (GET_MODE (y), *y2, mode_sz - 1);
1974 /* Use QImode because an odd displacement may be automatically invalid
1975 for any wider mode. But it should be valid for a single byte. */
1976 good = (*addressp) (QImode, y, as);
1977
1978 /* In any case, restore old contents of memory. */
1979 *y2 = y1;
1980 return good;
1981 }
1982
1983 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1984 return 0;
1985
1986 /* The offset added here is chosen as the maximum offset that
1987 any instruction could need to add when operating on something
1988 of the specified mode. We assume that if Y and Y+c are
1989 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1990 go inside a LO_SUM here, so we do so as well. */
1991 if (GET_CODE (y) == LO_SUM
1992 && mode != BLKmode
1993 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1994 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1995 plus_constant (GET_MODE (y), XEXP (y, 1),
1996 mode_sz - 1));
1997 #ifdef POINTERS_EXTEND_UNSIGNED
1998 /* Likewise for a ZERO_EXTEND from pointer_mode. */
1999 else if (POINTERS_EXTEND_UNSIGNED > 0
2000 && GET_CODE (y) == ZERO_EXTEND
2001 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2002 z = gen_rtx_ZERO_EXTEND (GET_MODE (y),
2003 plus_constant (pointer_mode, XEXP (y, 0),
2004 mode_sz - 1));
2005 #endif
2006 else
2007 z = plus_constant (GET_MODE (y), y, mode_sz - 1);
2008
2009 /* Use QImode because an odd displacement may be automatically invalid
2010 for any wider mode. But it should be valid for a single byte. */
2011 return (*addressp) (QImode, z, as);
2012 }
2013
2014 /* Return 1 if ADDR is an address-expression whose effect depends
2015 on the mode of the memory reference it is used in.
2016
2017 ADDRSPACE is the address space associated with the address.
2018
2019 Autoincrement addressing is a typical example of mode-dependence
2020 because the amount of the increment depends on the mode. */
2021
2022 bool
2023 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2024 {
2025 /* Auto-increment addressing with anything other than post_modify
2026 or pre_modify always introduces a mode dependency. Catch such
2027 cases now instead of deferring to the target. */
2028 if (GET_CODE (addr) == PRE_INC
2029 || GET_CODE (addr) == POST_INC
2030 || GET_CODE (addr) == PRE_DEC
2031 || GET_CODE (addr) == POST_DEC)
2032 return true;
2033
2034 return targetm.mode_dependent_address_p (addr, addrspace);
2035 }
2036 \f
2037 /* Like extract_insn, but save insn extracted and don't extract again, when
2038 called again for the same insn expecting that recog_data still contain the
2039 valid information. This is used primary by gen_attr infrastructure that
2040 often does extract insn again and again. */
2041 void
2042 extract_insn_cached (rtx insn)
2043 {
2044 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2045 return;
2046 extract_insn (insn);
2047 recog_data.insn = insn;
2048 }
2049
2050 /* Do cached extract_insn, constrain_operands and complain about failures.
2051 Used by insn_attrtab. */
2052 void
2053 extract_constrain_insn_cached (rtx insn)
2054 {
2055 extract_insn_cached (insn);
2056 if (which_alternative == -1
2057 && !constrain_operands (reload_completed))
2058 fatal_insn_not_found (insn);
2059 }
2060
2061 /* Do cached constrain_operands and complain about failures. */
2062 int
2063 constrain_operands_cached (int strict)
2064 {
2065 if (which_alternative == -1)
2066 return constrain_operands (strict);
2067 else
2068 return 1;
2069 }
2070 \f
2071 /* Analyze INSN and fill in recog_data. */
2072
2073 void
2074 extract_insn (rtx insn)
2075 {
2076 int i;
2077 int icode;
2078 int noperands;
2079 rtx body = PATTERN (insn);
2080
2081 recog_data.n_operands = 0;
2082 recog_data.n_alternatives = 0;
2083 recog_data.n_dups = 0;
2084 recog_data.is_asm = false;
2085
2086 switch (GET_CODE (body))
2087 {
2088 case USE:
2089 case CLOBBER:
2090 case ASM_INPUT:
2091 case ADDR_VEC:
2092 case ADDR_DIFF_VEC:
2093 case VAR_LOCATION:
2094 return;
2095
2096 case SET:
2097 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2098 goto asm_insn;
2099 else
2100 goto normal_insn;
2101 case PARALLEL:
2102 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2103 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2104 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2105 goto asm_insn;
2106 else
2107 goto normal_insn;
2108 case ASM_OPERANDS:
2109 asm_insn:
2110 recog_data.n_operands = noperands = asm_noperands (body);
2111 if (noperands >= 0)
2112 {
2113 /* This insn is an `asm' with operands. */
2114
2115 /* expand_asm_operands makes sure there aren't too many operands. */
2116 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2117
2118 /* Now get the operand values and constraints out of the insn. */
2119 decode_asm_operands (body, recog_data.operand,
2120 recog_data.operand_loc,
2121 recog_data.constraints,
2122 recog_data.operand_mode, NULL);
2123 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2124 if (noperands > 0)
2125 {
2126 const char *p = recog_data.constraints[0];
2127 recog_data.n_alternatives = 1;
2128 while (*p)
2129 recog_data.n_alternatives += (*p++ == ',');
2130 }
2131 recog_data.is_asm = true;
2132 break;
2133 }
2134 fatal_insn_not_found (insn);
2135
2136 default:
2137 normal_insn:
2138 /* Ordinary insn: recognize it, get the operands via insn_extract
2139 and get the constraints. */
2140
2141 icode = recog_memoized (insn);
2142 if (icode < 0)
2143 fatal_insn_not_found (insn);
2144
2145 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2146 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2147 recog_data.n_dups = insn_data[icode].n_dups;
2148
2149 insn_extract (insn);
2150
2151 for (i = 0; i < noperands; i++)
2152 {
2153 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2154 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2155 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2156 /* VOIDmode match_operands gets mode from their real operand. */
2157 if (recog_data.operand_mode[i] == VOIDmode)
2158 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2159 }
2160 }
2161 for (i = 0; i < noperands; i++)
2162 recog_data.operand_type[i]
2163 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2164 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2165 : OP_IN);
2166
2167 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2168
2169 if (INSN_CODE (insn) < 0)
2170 for (i = 0; i < recog_data.n_alternatives; i++)
2171 recog_data.alternative_enabled_p[i] = true;
2172 else
2173 {
2174 recog_data.insn = insn;
2175 for (i = 0; i < recog_data.n_alternatives; i++)
2176 {
2177 which_alternative = i;
2178 recog_data.alternative_enabled_p[i]
2179 = HAVE_ATTR_enabled ? get_attr_enabled (insn) : 1;
2180 }
2181 }
2182
2183 recog_data.insn = NULL;
2184 which_alternative = -1;
2185 }
2186
2187 /* After calling extract_insn, you can use this function to extract some
2188 information from the constraint strings into a more usable form.
2189 The collected data is stored in recog_op_alt. */
2190 void
2191 preprocess_constraints (void)
2192 {
2193 int i;
2194
2195 for (i = 0; i < recog_data.n_operands; i++)
2196 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2197 * sizeof (struct operand_alternative)));
2198
2199 for (i = 0; i < recog_data.n_operands; i++)
2200 {
2201 int j;
2202 struct operand_alternative *op_alt;
2203 const char *p = recog_data.constraints[i];
2204
2205 op_alt = recog_op_alt[i];
2206
2207 for (j = 0; j < recog_data.n_alternatives; j++)
2208 {
2209 op_alt[j].cl = NO_REGS;
2210 op_alt[j].constraint = p;
2211 op_alt[j].matches = -1;
2212 op_alt[j].matched = -1;
2213
2214 if (!recog_data.alternative_enabled_p[j])
2215 {
2216 p = skip_alternative (p);
2217 continue;
2218 }
2219
2220 if (*p == '\0' || *p == ',')
2221 {
2222 op_alt[j].anything_ok = 1;
2223 continue;
2224 }
2225
2226 for (;;)
2227 {
2228 char c = *p;
2229 if (c == '#')
2230 do
2231 c = *++p;
2232 while (c != ',' && c != '\0');
2233 if (c == ',' || c == '\0')
2234 {
2235 p++;
2236 break;
2237 }
2238
2239 switch (c)
2240 {
2241 case '=': case '+': case '*': case '%':
2242 case 'E': case 'F': case 'G': case 'H':
2243 case 's': case 'i': case 'n':
2244 case 'I': case 'J': case 'K': case 'L':
2245 case 'M': case 'N': case 'O': case 'P':
2246 /* These don't say anything we care about. */
2247 break;
2248
2249 case '?':
2250 op_alt[j].reject += 6;
2251 break;
2252 case '!':
2253 op_alt[j].reject += 600;
2254 break;
2255 case '&':
2256 op_alt[j].earlyclobber = 1;
2257 break;
2258
2259 case '0': case '1': case '2': case '3': case '4':
2260 case '5': case '6': case '7': case '8': case '9':
2261 {
2262 char *end;
2263 op_alt[j].matches = strtoul (p, &end, 10);
2264 recog_op_alt[op_alt[j].matches][j].matched = i;
2265 p = end;
2266 }
2267 continue;
2268
2269 case TARGET_MEM_CONSTRAINT:
2270 op_alt[j].memory_ok = 1;
2271 break;
2272 case '<':
2273 op_alt[j].decmem_ok = 1;
2274 break;
2275 case '>':
2276 op_alt[j].incmem_ok = 1;
2277 break;
2278 case 'V':
2279 op_alt[j].nonoffmem_ok = 1;
2280 break;
2281 case 'o':
2282 op_alt[j].offmem_ok = 1;
2283 break;
2284 case 'X':
2285 op_alt[j].anything_ok = 1;
2286 break;
2287
2288 case 'p':
2289 op_alt[j].is_address = 1;
2290 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2291 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2292 ADDRESS, SCRATCH)];
2293 break;
2294
2295 case 'g':
2296 case 'r':
2297 op_alt[j].cl =
2298 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2299 break;
2300
2301 default:
2302 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2303 {
2304 op_alt[j].memory_ok = 1;
2305 break;
2306 }
2307 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2308 {
2309 op_alt[j].is_address = 1;
2310 op_alt[j].cl
2311 = (reg_class_subunion
2312 [(int) op_alt[j].cl]
2313 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2314 ADDRESS, SCRATCH)]);
2315 break;
2316 }
2317
2318 op_alt[j].cl
2319 = (reg_class_subunion
2320 [(int) op_alt[j].cl]
2321 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2322 break;
2323 }
2324 p += CONSTRAINT_LEN (c, p);
2325 }
2326 }
2327 }
2328 }
2329
2330 /* Check the operands of an insn against the insn's operand constraints
2331 and return 1 if they are valid.
2332 The information about the insn's operands, constraints, operand modes
2333 etc. is obtained from the global variables set up by extract_insn.
2334
2335 WHICH_ALTERNATIVE is set to a number which indicates which
2336 alternative of constraints was matched: 0 for the first alternative,
2337 1 for the next, etc.
2338
2339 In addition, when two operands are required to match
2340 and it happens that the output operand is (reg) while the
2341 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2342 make the output operand look like the input.
2343 This is because the output operand is the one the template will print.
2344
2345 This is used in final, just before printing the assembler code and by
2346 the routines that determine an insn's attribute.
2347
2348 If STRICT is a positive nonzero value, it means that we have been
2349 called after reload has been completed. In that case, we must
2350 do all checks strictly. If it is zero, it means that we have been called
2351 before reload has completed. In that case, we first try to see if we can
2352 find an alternative that matches strictly. If not, we try again, this
2353 time assuming that reload will fix up the insn. This provides a "best
2354 guess" for the alternative and is used to compute attributes of insns prior
2355 to reload. A negative value of STRICT is used for this internal call. */
2356
2357 struct funny_match
2358 {
2359 int this_op, other;
2360 };
2361
2362 int
2363 constrain_operands (int strict)
2364 {
2365 const char *constraints[MAX_RECOG_OPERANDS];
2366 int matching_operands[MAX_RECOG_OPERANDS];
2367 int earlyclobber[MAX_RECOG_OPERANDS];
2368 int c;
2369
2370 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2371 int funny_match_index;
2372
2373 which_alternative = 0;
2374 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2375 return 1;
2376
2377 for (c = 0; c < recog_data.n_operands; c++)
2378 {
2379 constraints[c] = recog_data.constraints[c];
2380 matching_operands[c] = -1;
2381 }
2382
2383 do
2384 {
2385 int seen_earlyclobber_at = -1;
2386 int opno;
2387 int lose = 0;
2388 funny_match_index = 0;
2389
2390 if (!recog_data.alternative_enabled_p[which_alternative])
2391 {
2392 int i;
2393
2394 for (i = 0; i < recog_data.n_operands; i++)
2395 constraints[i] = skip_alternative (constraints[i]);
2396
2397 which_alternative++;
2398 continue;
2399 }
2400
2401 for (opno = 0; opno < recog_data.n_operands; opno++)
2402 {
2403 rtx op = recog_data.operand[opno];
2404 enum machine_mode mode = GET_MODE (op);
2405 const char *p = constraints[opno];
2406 int offset = 0;
2407 int win = 0;
2408 int val;
2409 int len;
2410
2411 earlyclobber[opno] = 0;
2412
2413 /* A unary operator may be accepted by the predicate, but it
2414 is irrelevant for matching constraints. */
2415 if (UNARY_P (op))
2416 op = XEXP (op, 0);
2417
2418 if (GET_CODE (op) == SUBREG)
2419 {
2420 if (REG_P (SUBREG_REG (op))
2421 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2422 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2423 GET_MODE (SUBREG_REG (op)),
2424 SUBREG_BYTE (op),
2425 GET_MODE (op));
2426 op = SUBREG_REG (op);
2427 }
2428
2429 /* An empty constraint or empty alternative
2430 allows anything which matched the pattern. */
2431 if (*p == 0 || *p == ',')
2432 win = 1;
2433
2434 do
2435 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2436 {
2437 case '\0':
2438 len = 0;
2439 break;
2440 case ',':
2441 c = '\0';
2442 break;
2443
2444 case '?': case '!': case '*': case '%':
2445 case '=': case '+':
2446 break;
2447
2448 case '#':
2449 /* Ignore rest of this alternative as far as
2450 constraint checking is concerned. */
2451 do
2452 p++;
2453 while (*p && *p != ',');
2454 len = 0;
2455 break;
2456
2457 case '&':
2458 earlyclobber[opno] = 1;
2459 if (seen_earlyclobber_at < 0)
2460 seen_earlyclobber_at = opno;
2461 break;
2462
2463 case '0': case '1': case '2': case '3': case '4':
2464 case '5': case '6': case '7': case '8': case '9':
2465 {
2466 /* This operand must be the same as a previous one.
2467 This kind of constraint is used for instructions such
2468 as add when they take only two operands.
2469
2470 Note that the lower-numbered operand is passed first.
2471
2472 If we are not testing strictly, assume that this
2473 constraint will be satisfied. */
2474
2475 char *end;
2476 int match;
2477
2478 match = strtoul (p, &end, 10);
2479 p = end;
2480
2481 if (strict < 0)
2482 val = 1;
2483 else
2484 {
2485 rtx op1 = recog_data.operand[match];
2486 rtx op2 = recog_data.operand[opno];
2487
2488 /* A unary operator may be accepted by the predicate,
2489 but it is irrelevant for matching constraints. */
2490 if (UNARY_P (op1))
2491 op1 = XEXP (op1, 0);
2492 if (UNARY_P (op2))
2493 op2 = XEXP (op2, 0);
2494
2495 val = operands_match_p (op1, op2);
2496 }
2497
2498 matching_operands[opno] = match;
2499 matching_operands[match] = opno;
2500
2501 if (val != 0)
2502 win = 1;
2503
2504 /* If output is *x and input is *--x, arrange later
2505 to change the output to *--x as well, since the
2506 output op is the one that will be printed. */
2507 if (val == 2 && strict > 0)
2508 {
2509 funny_match[funny_match_index].this_op = opno;
2510 funny_match[funny_match_index++].other = match;
2511 }
2512 }
2513 len = 0;
2514 break;
2515
2516 case 'p':
2517 /* p is used for address_operands. When we are called by
2518 gen_reload, no one will have checked that the address is
2519 strictly valid, i.e., that all pseudos requiring hard regs
2520 have gotten them. */
2521 if (strict <= 0
2522 || (strict_memory_address_p (recog_data.operand_mode[opno],
2523 op)))
2524 win = 1;
2525 break;
2526
2527 /* No need to check general_operand again;
2528 it was done in insn-recog.c. Well, except that reload
2529 doesn't check the validity of its replacements, but
2530 that should only matter when there's a bug. */
2531 case 'g':
2532 /* Anything goes unless it is a REG and really has a hard reg
2533 but the hard reg is not in the class GENERAL_REGS. */
2534 if (REG_P (op))
2535 {
2536 if (strict < 0
2537 || GENERAL_REGS == ALL_REGS
2538 || (reload_in_progress
2539 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2540 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2541 win = 1;
2542 }
2543 else if (strict < 0 || general_operand (op, mode))
2544 win = 1;
2545 break;
2546
2547 case 'X':
2548 /* This is used for a MATCH_SCRATCH in the cases when
2549 we don't actually need anything. So anything goes
2550 any time. */
2551 win = 1;
2552 break;
2553
2554 case TARGET_MEM_CONSTRAINT:
2555 /* Memory operands must be valid, to the extent
2556 required by STRICT. */
2557 if (MEM_P (op))
2558 {
2559 if (strict > 0
2560 && !strict_memory_address_addr_space_p
2561 (GET_MODE (op), XEXP (op, 0),
2562 MEM_ADDR_SPACE (op)))
2563 break;
2564 if (strict == 0
2565 && !memory_address_addr_space_p
2566 (GET_MODE (op), XEXP (op, 0),
2567 MEM_ADDR_SPACE (op)))
2568 break;
2569 win = 1;
2570 }
2571 /* Before reload, accept what reload can turn into mem. */
2572 else if (strict < 0 && CONSTANT_P (op))
2573 win = 1;
2574 /* During reload, accept a pseudo */
2575 else if (reload_in_progress && REG_P (op)
2576 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2577 win = 1;
2578 break;
2579
2580 case '<':
2581 if (MEM_P (op)
2582 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2583 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2584 win = 1;
2585 break;
2586
2587 case '>':
2588 if (MEM_P (op)
2589 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2590 || GET_CODE (XEXP (op, 0)) == POST_INC))
2591 win = 1;
2592 break;
2593
2594 case 'E':
2595 case 'F':
2596 if (CONST_DOUBLE_AS_FLOAT_P (op)
2597 || (GET_CODE (op) == CONST_VECTOR
2598 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2599 win = 1;
2600 break;
2601
2602 case 'G':
2603 case 'H':
2604 if (CONST_DOUBLE_AS_FLOAT_P (op)
2605 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2606 win = 1;
2607 break;
2608
2609 case 's':
2610 if (CONST_SCALAR_INT_P (op))
2611 break;
2612 case 'i':
2613 if (CONSTANT_P (op))
2614 win = 1;
2615 break;
2616
2617 case 'n':
2618 if (CONST_SCALAR_INT_P (op))
2619 win = 1;
2620 break;
2621
2622 case 'I':
2623 case 'J':
2624 case 'K':
2625 case 'L':
2626 case 'M':
2627 case 'N':
2628 case 'O':
2629 case 'P':
2630 if (CONST_INT_P (op)
2631 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2632 win = 1;
2633 break;
2634
2635 case 'V':
2636 if (MEM_P (op)
2637 && ((strict > 0 && ! offsettable_memref_p (op))
2638 || (strict < 0
2639 && !(CONSTANT_P (op) || MEM_P (op)))
2640 || (reload_in_progress
2641 && !(REG_P (op)
2642 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2643 win = 1;
2644 break;
2645
2646 case 'o':
2647 if ((strict > 0 && offsettable_memref_p (op))
2648 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2649 /* Before reload, accept what reload can handle. */
2650 || (strict < 0
2651 && (CONSTANT_P (op) || MEM_P (op)))
2652 /* During reload, accept a pseudo */
2653 || (reload_in_progress && REG_P (op)
2654 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2655 win = 1;
2656 break;
2657
2658 default:
2659 {
2660 enum reg_class cl;
2661
2662 cl = (c == 'r'
2663 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2664 if (cl != NO_REGS)
2665 {
2666 if (strict < 0
2667 || (strict == 0
2668 && REG_P (op)
2669 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2670 || (strict == 0 && GET_CODE (op) == SCRATCH)
2671 || (REG_P (op)
2672 && reg_fits_class_p (op, cl, offset, mode)))
2673 win = 1;
2674 }
2675 #ifdef EXTRA_CONSTRAINT_STR
2676 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2677 win = 1;
2678
2679 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2680 /* Every memory operand can be reloaded to fit. */
2681 && ((strict < 0 && MEM_P (op))
2682 /* Before reload, accept what reload can turn
2683 into mem. */
2684 || (strict < 0 && CONSTANT_P (op))
2685 /* During reload, accept a pseudo */
2686 || (reload_in_progress && REG_P (op)
2687 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2688 win = 1;
2689 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2690 /* Every address operand can be reloaded to fit. */
2691 && strict < 0)
2692 win = 1;
2693 /* Cater to architectures like IA-64 that define extra memory
2694 constraints without using define_memory_constraint. */
2695 else if (reload_in_progress
2696 && REG_P (op)
2697 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2698 && reg_renumber[REGNO (op)] < 0
2699 && reg_equiv_mem (REGNO (op)) != 0
2700 && EXTRA_CONSTRAINT_STR
2701 (reg_equiv_mem (REGNO (op)), c, p))
2702 win = 1;
2703 #endif
2704 break;
2705 }
2706 }
2707 while (p += len, c);
2708
2709 constraints[opno] = p;
2710 /* If this operand did not win somehow,
2711 this alternative loses. */
2712 if (! win)
2713 lose = 1;
2714 }
2715 /* This alternative won; the operands are ok.
2716 Change whichever operands this alternative says to change. */
2717 if (! lose)
2718 {
2719 int opno, eopno;
2720
2721 /* See if any earlyclobber operand conflicts with some other
2722 operand. */
2723
2724 if (strict > 0 && seen_earlyclobber_at >= 0)
2725 for (eopno = seen_earlyclobber_at;
2726 eopno < recog_data.n_operands;
2727 eopno++)
2728 /* Ignore earlyclobber operands now in memory,
2729 because we would often report failure when we have
2730 two memory operands, one of which was formerly a REG. */
2731 if (earlyclobber[eopno]
2732 && REG_P (recog_data.operand[eopno]))
2733 for (opno = 0; opno < recog_data.n_operands; opno++)
2734 if ((MEM_P (recog_data.operand[opno])
2735 || recog_data.operand_type[opno] != OP_OUT)
2736 && opno != eopno
2737 /* Ignore things like match_operator operands. */
2738 && *recog_data.constraints[opno] != 0
2739 && ! (matching_operands[opno] == eopno
2740 && operands_match_p (recog_data.operand[opno],
2741 recog_data.operand[eopno]))
2742 && ! safe_from_earlyclobber (recog_data.operand[opno],
2743 recog_data.operand[eopno]))
2744 lose = 1;
2745
2746 if (! lose)
2747 {
2748 while (--funny_match_index >= 0)
2749 {
2750 recog_data.operand[funny_match[funny_match_index].other]
2751 = recog_data.operand[funny_match[funny_match_index].this_op];
2752 }
2753
2754 #ifdef AUTO_INC_DEC
2755 /* For operands without < or > constraints reject side-effects. */
2756 if (recog_data.is_asm)
2757 {
2758 for (opno = 0; opno < recog_data.n_operands; opno++)
2759 if (MEM_P (recog_data.operand[opno]))
2760 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2761 {
2762 case PRE_INC:
2763 case POST_INC:
2764 case PRE_DEC:
2765 case POST_DEC:
2766 case PRE_MODIFY:
2767 case POST_MODIFY:
2768 if (strchr (recog_data.constraints[opno], '<') == NULL
2769 && strchr (recog_data.constraints[opno], '>')
2770 == NULL)
2771 return 0;
2772 break;
2773 default:
2774 break;
2775 }
2776 }
2777 #endif
2778 return 1;
2779 }
2780 }
2781
2782 which_alternative++;
2783 }
2784 while (which_alternative < recog_data.n_alternatives);
2785
2786 which_alternative = -1;
2787 /* If we are about to reject this, but we are not to test strictly,
2788 try a very loose test. Only return failure if it fails also. */
2789 if (strict == 0)
2790 return constrain_operands (-1);
2791 else
2792 return 0;
2793 }
2794
2795 /* Return true iff OPERAND (assumed to be a REG rtx)
2796 is a hard reg in class CLASS when its regno is offset by OFFSET
2797 and changed to mode MODE.
2798 If REG occupies multiple hard regs, all of them must be in CLASS. */
2799
2800 bool
2801 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2802 enum machine_mode mode)
2803 {
2804 unsigned int regno = REGNO (operand);
2805
2806 if (cl == NO_REGS)
2807 return false;
2808
2809 /* Regno must not be a pseudo register. Offset may be negative. */
2810 return (HARD_REGISTER_NUM_P (regno)
2811 && HARD_REGISTER_NUM_P (regno + offset)
2812 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2813 regno + offset));
2814 }
2815 \f
2816 /* Split single instruction. Helper function for split_all_insns and
2817 split_all_insns_noflow. Return last insn in the sequence if successful,
2818 or NULL if unsuccessful. */
2819
2820 static rtx
2821 split_insn (rtx insn)
2822 {
2823 /* Split insns here to get max fine-grain parallelism. */
2824 rtx first = PREV_INSN (insn);
2825 rtx last = try_split (PATTERN (insn), insn, 1);
2826 rtx insn_set, last_set, note;
2827
2828 if (last == insn)
2829 return NULL_RTX;
2830
2831 /* If the original instruction was a single set that was known to be
2832 equivalent to a constant, see if we can say the same about the last
2833 instruction in the split sequence. The two instructions must set
2834 the same destination. */
2835 insn_set = single_set (insn);
2836 if (insn_set)
2837 {
2838 last_set = single_set (last);
2839 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2840 {
2841 note = find_reg_equal_equiv_note (insn);
2842 if (note && CONSTANT_P (XEXP (note, 0)))
2843 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2844 else if (CONSTANT_P (SET_SRC (insn_set)))
2845 set_unique_reg_note (last, REG_EQUAL,
2846 copy_rtx (SET_SRC (insn_set)));
2847 }
2848 }
2849
2850 /* try_split returns the NOTE that INSN became. */
2851 SET_INSN_DELETED (insn);
2852
2853 /* ??? Coddle to md files that generate subregs in post-reload
2854 splitters instead of computing the proper hard register. */
2855 if (reload_completed && first != last)
2856 {
2857 first = NEXT_INSN (first);
2858 for (;;)
2859 {
2860 if (INSN_P (first))
2861 cleanup_subreg_operands (first);
2862 if (first == last)
2863 break;
2864 first = NEXT_INSN (first);
2865 }
2866 }
2867
2868 return last;
2869 }
2870
2871 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2872
2873 void
2874 split_all_insns (void)
2875 {
2876 sbitmap blocks;
2877 bool changed;
2878 basic_block bb;
2879
2880 blocks = sbitmap_alloc (last_basic_block);
2881 bitmap_clear (blocks);
2882 changed = false;
2883
2884 FOR_EACH_BB_REVERSE (bb)
2885 {
2886 rtx insn, next;
2887 bool finish = false;
2888
2889 rtl_profile_for_bb (bb);
2890 for (insn = BB_HEAD (bb); !finish ; insn = next)
2891 {
2892 /* Can't use `next_real_insn' because that might go across
2893 CODE_LABELS and short-out basic blocks. */
2894 next = NEXT_INSN (insn);
2895 finish = (insn == BB_END (bb));
2896 if (INSN_P (insn))
2897 {
2898 rtx set = single_set (insn);
2899
2900 /* Don't split no-op move insns. These should silently
2901 disappear later in final. Splitting such insns would
2902 break the code that handles LIBCALL blocks. */
2903 if (set && set_noop_p (set))
2904 {
2905 /* Nops get in the way while scheduling, so delete them
2906 now if register allocation has already been done. It
2907 is too risky to try to do this before register
2908 allocation, and there are unlikely to be very many
2909 nops then anyways. */
2910 if (reload_completed)
2911 delete_insn_and_edges (insn);
2912 }
2913 else
2914 {
2915 if (split_insn (insn))
2916 {
2917 bitmap_set_bit (blocks, bb->index);
2918 changed = true;
2919 }
2920 }
2921 }
2922 }
2923 }
2924
2925 default_rtl_profile ();
2926 if (changed)
2927 find_many_sub_basic_blocks (blocks);
2928
2929 #ifdef ENABLE_CHECKING
2930 verify_flow_info ();
2931 #endif
2932
2933 sbitmap_free (blocks);
2934 }
2935
2936 /* Same as split_all_insns, but do not expect CFG to be available.
2937 Used by machine dependent reorg passes. */
2938
2939 unsigned int
2940 split_all_insns_noflow (void)
2941 {
2942 rtx next, insn;
2943
2944 for (insn = get_insns (); insn; insn = next)
2945 {
2946 next = NEXT_INSN (insn);
2947 if (INSN_P (insn))
2948 {
2949 /* Don't split no-op move insns. These should silently
2950 disappear later in final. Splitting such insns would
2951 break the code that handles LIBCALL blocks. */
2952 rtx set = single_set (insn);
2953 if (set && set_noop_p (set))
2954 {
2955 /* Nops get in the way while scheduling, so delete them
2956 now if register allocation has already been done. It
2957 is too risky to try to do this before register
2958 allocation, and there are unlikely to be very many
2959 nops then anyways.
2960
2961 ??? Should we use delete_insn when the CFG isn't valid? */
2962 if (reload_completed)
2963 delete_insn_and_edges (insn);
2964 }
2965 else
2966 split_insn (insn);
2967 }
2968 }
2969 return 0;
2970 }
2971 \f
2972 #ifdef HAVE_peephole2
2973 struct peep2_insn_data
2974 {
2975 rtx insn;
2976 regset live_before;
2977 };
2978
2979 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2980 static int peep2_current;
2981
2982 static bool peep2_do_rebuild_jump_labels;
2983 static bool peep2_do_cleanup_cfg;
2984
2985 /* The number of instructions available to match a peep2. */
2986 int peep2_current_count;
2987
2988 /* A non-insn marker indicating the last insn of the block.
2989 The live_before regset for this element is correct, indicating
2990 DF_LIVE_OUT for the block. */
2991 #define PEEP2_EOB pc_rtx
2992
2993 /* Wrap N to fit into the peep2_insn_data buffer. */
2994
2995 static int
2996 peep2_buf_position (int n)
2997 {
2998 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2999 n -= MAX_INSNS_PER_PEEP2 + 1;
3000 return n;
3001 }
3002
3003 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3004 does not exist. Used by the recognizer to find the next insn to match
3005 in a multi-insn pattern. */
3006
3007 rtx
3008 peep2_next_insn (int n)
3009 {
3010 gcc_assert (n <= peep2_current_count);
3011
3012 n = peep2_buf_position (peep2_current + n);
3013
3014 return peep2_insn_data[n].insn;
3015 }
3016
3017 /* Return true if REGNO is dead before the Nth non-note insn
3018 after `current'. */
3019
3020 int
3021 peep2_regno_dead_p (int ofs, int regno)
3022 {
3023 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3024
3025 ofs = peep2_buf_position (peep2_current + ofs);
3026
3027 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3028
3029 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3030 }
3031
3032 /* Similarly for a REG. */
3033
3034 int
3035 peep2_reg_dead_p (int ofs, rtx reg)
3036 {
3037 int regno, n;
3038
3039 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3040
3041 ofs = peep2_buf_position (peep2_current + ofs);
3042
3043 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3044
3045 regno = REGNO (reg);
3046 n = hard_regno_nregs[regno][GET_MODE (reg)];
3047 while (--n >= 0)
3048 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3049 return 0;
3050 return 1;
3051 }
3052
3053 /* Try to find a hard register of mode MODE, matching the register class in
3054 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3055 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3056 in which case the only condition is that the register must be available
3057 before CURRENT_INSN.
3058 Registers that already have bits set in REG_SET will not be considered.
3059
3060 If an appropriate register is available, it will be returned and the
3061 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3062 returned. */
3063
3064 rtx
3065 peep2_find_free_register (int from, int to, const char *class_str,
3066 enum machine_mode mode, HARD_REG_SET *reg_set)
3067 {
3068 static int search_ofs;
3069 enum reg_class cl;
3070 HARD_REG_SET live;
3071 df_ref *def_rec;
3072 int i;
3073
3074 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3075 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3076
3077 from = peep2_buf_position (peep2_current + from);
3078 to = peep2_buf_position (peep2_current + to);
3079
3080 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3081 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3082
3083 while (from != to)
3084 {
3085 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3086
3087 /* Don't use registers set or clobbered by the insn. */
3088 for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
3089 *def_rec; def_rec++)
3090 SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
3091
3092 from = peep2_buf_position (from + 1);
3093 }
3094
3095 cl = (class_str[0] == 'r' ? GENERAL_REGS
3096 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3097
3098 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3099 {
3100 int raw_regno, regno, success, j;
3101
3102 /* Distribute the free registers as much as possible. */
3103 raw_regno = search_ofs + i;
3104 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3105 raw_regno -= FIRST_PSEUDO_REGISTER;
3106 #ifdef REG_ALLOC_ORDER
3107 regno = reg_alloc_order[raw_regno];
3108 #else
3109 regno = raw_regno;
3110 #endif
3111
3112 /* Don't allocate fixed registers. */
3113 if (fixed_regs[regno])
3114 continue;
3115 /* Don't allocate global registers. */
3116 if (global_regs[regno])
3117 continue;
3118 /* Make sure the register is of the right class. */
3119 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3120 continue;
3121 /* And can support the mode we need. */
3122 if (! HARD_REGNO_MODE_OK (regno, mode))
3123 continue;
3124 /* And that we don't create an extra save/restore. */
3125 if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
3126 continue;
3127 if (! targetm.hard_regno_scratch_ok (regno))
3128 continue;
3129
3130 /* And we don't clobber traceback for noreturn functions. */
3131 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3132 && (! reload_completed || frame_pointer_needed))
3133 continue;
3134
3135 success = 1;
3136 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3137 {
3138 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3139 || TEST_HARD_REG_BIT (live, regno + j))
3140 {
3141 success = 0;
3142 break;
3143 }
3144 }
3145 if (success)
3146 {
3147 add_to_hard_reg_set (reg_set, mode, regno);
3148
3149 /* Start the next search with the next register. */
3150 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3151 raw_regno = 0;
3152 search_ofs = raw_regno;
3153
3154 return gen_rtx_REG (mode, regno);
3155 }
3156 }
3157
3158 search_ofs = 0;
3159 return NULL_RTX;
3160 }
3161
3162 /* Forget all currently tracked instructions, only remember current
3163 LIVE regset. */
3164
3165 static void
3166 peep2_reinit_state (regset live)
3167 {
3168 int i;
3169
3170 /* Indicate that all slots except the last holds invalid data. */
3171 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3172 peep2_insn_data[i].insn = NULL_RTX;
3173 peep2_current_count = 0;
3174
3175 /* Indicate that the last slot contains live_after data. */
3176 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3177 peep2_current = MAX_INSNS_PER_PEEP2;
3178
3179 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3180 }
3181
3182 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3183 starting at INSN. Perform the replacement, removing the old insns and
3184 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3185 if the replacement is rejected. */
3186
3187 static rtx
3188 peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3189 {
3190 int i;
3191 rtx last, eh_note, as_note, before_try, x;
3192 rtx old_insn, new_insn;
3193 bool was_call = false;
3194
3195 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3196 match more than one insn, or to be split into more than one insn. */
3197 old_insn = peep2_insn_data[peep2_current].insn;
3198 if (RTX_FRAME_RELATED_P (old_insn))
3199 {
3200 bool any_note = false;
3201 rtx note;
3202
3203 if (match_len != 0)
3204 return NULL;
3205
3206 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3207 may be in the stream for the purpose of register allocation. */
3208 if (active_insn_p (attempt))
3209 new_insn = attempt;
3210 else
3211 new_insn = next_active_insn (attempt);
3212 if (next_active_insn (new_insn))
3213 return NULL;
3214
3215 /* We have a 1-1 replacement. Copy over any frame-related info. */
3216 RTX_FRAME_RELATED_P (new_insn) = 1;
3217
3218 /* Allow the backend to fill in a note during the split. */
3219 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3220 switch (REG_NOTE_KIND (note))
3221 {
3222 case REG_FRAME_RELATED_EXPR:
3223 case REG_CFA_DEF_CFA:
3224 case REG_CFA_ADJUST_CFA:
3225 case REG_CFA_OFFSET:
3226 case REG_CFA_REGISTER:
3227 case REG_CFA_EXPRESSION:
3228 case REG_CFA_RESTORE:
3229 case REG_CFA_SET_VDRAP:
3230 any_note = true;
3231 break;
3232 default:
3233 break;
3234 }
3235
3236 /* If the backend didn't supply a note, copy one over. */
3237 if (!any_note)
3238 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3239 switch (REG_NOTE_KIND (note))
3240 {
3241 case REG_FRAME_RELATED_EXPR:
3242 case REG_CFA_DEF_CFA:
3243 case REG_CFA_ADJUST_CFA:
3244 case REG_CFA_OFFSET:
3245 case REG_CFA_REGISTER:
3246 case REG_CFA_EXPRESSION:
3247 case REG_CFA_RESTORE:
3248 case REG_CFA_SET_VDRAP:
3249 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3250 any_note = true;
3251 break;
3252 default:
3253 break;
3254 }
3255
3256 /* If there still isn't a note, make sure the unwind info sees the
3257 same expression as before the split. */
3258 if (!any_note)
3259 {
3260 rtx old_set, new_set;
3261
3262 /* The old insn had better have been simple, or annotated. */
3263 old_set = single_set (old_insn);
3264 gcc_assert (old_set != NULL);
3265
3266 new_set = single_set (new_insn);
3267 if (!new_set || !rtx_equal_p (new_set, old_set))
3268 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3269 }
3270
3271 /* Copy prologue/epilogue status. This is required in order to keep
3272 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3273 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3274 }
3275
3276 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3277 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3278 cfg-related call notes. */
3279 for (i = 0; i <= match_len; ++i)
3280 {
3281 int j;
3282 rtx note;
3283
3284 j = peep2_buf_position (peep2_current + i);
3285 old_insn = peep2_insn_data[j].insn;
3286 if (!CALL_P (old_insn))
3287 continue;
3288 was_call = true;
3289
3290 new_insn = attempt;
3291 while (new_insn != NULL_RTX)
3292 {
3293 if (CALL_P (new_insn))
3294 break;
3295 new_insn = NEXT_INSN (new_insn);
3296 }
3297
3298 gcc_assert (new_insn != NULL_RTX);
3299
3300 CALL_INSN_FUNCTION_USAGE (new_insn)
3301 = CALL_INSN_FUNCTION_USAGE (old_insn);
3302
3303 for (note = REG_NOTES (old_insn);
3304 note;
3305 note = XEXP (note, 1))
3306 switch (REG_NOTE_KIND (note))
3307 {
3308 case REG_NORETURN:
3309 case REG_SETJMP:
3310 case REG_TM:
3311 add_reg_note (new_insn, REG_NOTE_KIND (note),
3312 XEXP (note, 0));
3313 break;
3314 default:
3315 /* Discard all other reg notes. */
3316 break;
3317 }
3318
3319 /* Croak if there is another call in the sequence. */
3320 while (++i <= match_len)
3321 {
3322 j = peep2_buf_position (peep2_current + i);
3323 old_insn = peep2_insn_data[j].insn;
3324 gcc_assert (!CALL_P (old_insn));
3325 }
3326 break;
3327 }
3328
3329 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3330 move those notes over to the new sequence. */
3331 as_note = NULL;
3332 for (i = match_len; i >= 0; --i)
3333 {
3334 int j = peep2_buf_position (peep2_current + i);
3335 old_insn = peep2_insn_data[j].insn;
3336
3337 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3338 if (as_note)
3339 break;
3340 }
3341
3342 i = peep2_buf_position (peep2_current + match_len);
3343 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3344
3345 /* Replace the old sequence with the new. */
3346 last = emit_insn_after_setloc (attempt,
3347 peep2_insn_data[i].insn,
3348 INSN_LOCATION (peep2_insn_data[i].insn));
3349 before_try = PREV_INSN (insn);
3350 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3351
3352 /* Re-insert the EH_REGION notes. */
3353 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3354 {
3355 edge eh_edge;
3356 edge_iterator ei;
3357
3358 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3359 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3360 break;
3361
3362 if (eh_note)
3363 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3364
3365 if (eh_edge)
3366 for (x = last; x != before_try; x = PREV_INSN (x))
3367 if (x != BB_END (bb)
3368 && (can_throw_internal (x)
3369 || can_nonlocal_goto (x)))
3370 {
3371 edge nfte, nehe;
3372 int flags;
3373
3374 nfte = split_block (bb, x);
3375 flags = (eh_edge->flags
3376 & (EDGE_EH | EDGE_ABNORMAL));
3377 if (CALL_P (x))
3378 flags |= EDGE_ABNORMAL_CALL;
3379 nehe = make_edge (nfte->src, eh_edge->dest,
3380 flags);
3381
3382 nehe->probability = eh_edge->probability;
3383 nfte->probability
3384 = REG_BR_PROB_BASE - nehe->probability;
3385
3386 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3387 bb = nfte->src;
3388 eh_edge = nehe;
3389 }
3390
3391 /* Converting possibly trapping insn to non-trapping is
3392 possible. Zap dummy outgoing edges. */
3393 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3394 }
3395
3396 /* Re-insert the ARGS_SIZE notes. */
3397 if (as_note)
3398 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3399
3400 /* If we generated a jump instruction, it won't have
3401 JUMP_LABEL set. Recompute after we're done. */
3402 for (x = last; x != before_try; x = PREV_INSN (x))
3403 if (JUMP_P (x))
3404 {
3405 peep2_do_rebuild_jump_labels = true;
3406 break;
3407 }
3408
3409 return last;
3410 }
3411
3412 /* After performing a replacement in basic block BB, fix up the life
3413 information in our buffer. LAST is the last of the insns that we
3414 emitted as a replacement. PREV is the insn before the start of
3415 the replacement. MATCH_LEN is the number of instructions that were
3416 matched, and which now need to be replaced in the buffer. */
3417
3418 static void
3419 peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3420 {
3421 int i = peep2_buf_position (peep2_current + match_len + 1);
3422 rtx x;
3423 regset_head live;
3424
3425 INIT_REG_SET (&live);
3426 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3427
3428 gcc_assert (peep2_current_count >= match_len + 1);
3429 peep2_current_count -= match_len + 1;
3430
3431 x = last;
3432 do
3433 {
3434 if (INSN_P (x))
3435 {
3436 df_insn_rescan (x);
3437 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3438 {
3439 peep2_current_count++;
3440 if (--i < 0)
3441 i = MAX_INSNS_PER_PEEP2;
3442 peep2_insn_data[i].insn = x;
3443 df_simulate_one_insn_backwards (bb, x, &live);
3444 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3445 }
3446 }
3447 x = PREV_INSN (x);
3448 }
3449 while (x != prev);
3450 CLEAR_REG_SET (&live);
3451
3452 peep2_current = i;
3453 }
3454
3455 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3456 Return true if we added it, false otherwise. The caller will try to match
3457 peepholes against the buffer if we return false; otherwise it will try to
3458 add more instructions to the buffer. */
3459
3460 static bool
3461 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3462 {
3463 int pos;
3464
3465 /* Once we have filled the maximum number of insns the buffer can hold,
3466 allow the caller to match the insns against peepholes. We wait until
3467 the buffer is full in case the target has similar peepholes of different
3468 length; we always want to match the longest if possible. */
3469 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3470 return false;
3471
3472 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3473 any other pattern, lest it change the semantics of the frame info. */
3474 if (RTX_FRAME_RELATED_P (insn))
3475 {
3476 /* Let the buffer drain first. */
3477 if (peep2_current_count > 0)
3478 return false;
3479 /* Now the insn will be the only thing in the buffer. */
3480 }
3481
3482 pos = peep2_buf_position (peep2_current + peep2_current_count);
3483 peep2_insn_data[pos].insn = insn;
3484 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3485 peep2_current_count++;
3486
3487 df_simulate_one_insn_forwards (bb, insn, live);
3488 return true;
3489 }
3490
3491 /* Perform the peephole2 optimization pass. */
3492
3493 static void
3494 peephole2_optimize (void)
3495 {
3496 rtx insn;
3497 bitmap live;
3498 int i;
3499 basic_block bb;
3500
3501 peep2_do_cleanup_cfg = false;
3502 peep2_do_rebuild_jump_labels = false;
3503
3504 df_set_flags (DF_LR_RUN_DCE);
3505 df_note_add_problem ();
3506 df_analyze ();
3507
3508 /* Initialize the regsets we're going to use. */
3509 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3510 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3511 live = BITMAP_ALLOC (&reg_obstack);
3512
3513 FOR_EACH_BB_REVERSE (bb)
3514 {
3515 bool past_end = false;
3516 int pos;
3517
3518 rtl_profile_for_bb (bb);
3519
3520 /* Start up propagation. */
3521 bitmap_copy (live, DF_LR_IN (bb));
3522 df_simulate_initialize_forwards (bb, live);
3523 peep2_reinit_state (live);
3524
3525 insn = BB_HEAD (bb);
3526 for (;;)
3527 {
3528 rtx attempt, head;
3529 int match_len;
3530
3531 if (!past_end && !NONDEBUG_INSN_P (insn))
3532 {
3533 next_insn:
3534 insn = NEXT_INSN (insn);
3535 if (insn == NEXT_INSN (BB_END (bb)))
3536 past_end = true;
3537 continue;
3538 }
3539 if (!past_end && peep2_fill_buffer (bb, insn, live))
3540 goto next_insn;
3541
3542 /* If we did not fill an empty buffer, it signals the end of the
3543 block. */
3544 if (peep2_current_count == 0)
3545 break;
3546
3547 /* The buffer filled to the current maximum, so try to match. */
3548
3549 pos = peep2_buf_position (peep2_current + peep2_current_count);
3550 peep2_insn_data[pos].insn = PEEP2_EOB;
3551 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3552
3553 /* Match the peephole. */
3554 head = peep2_insn_data[peep2_current].insn;
3555 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3556 if (attempt != NULL)
3557 {
3558 rtx last = peep2_attempt (bb, head, match_len, attempt);
3559 if (last)
3560 {
3561 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3562 continue;
3563 }
3564 }
3565
3566 /* No match: advance the buffer by one insn. */
3567 peep2_current = peep2_buf_position (peep2_current + 1);
3568 peep2_current_count--;
3569 }
3570 }
3571
3572 default_rtl_profile ();
3573 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3574 BITMAP_FREE (peep2_insn_data[i].live_before);
3575 BITMAP_FREE (live);
3576 if (peep2_do_rebuild_jump_labels)
3577 rebuild_jump_labels (get_insns ());
3578 }
3579 #endif /* HAVE_peephole2 */
3580
3581 /* Common predicates for use with define_bypass. */
3582
3583 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3584 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3585 must be either a single_set or a PARALLEL with SETs inside. */
3586
3587 int
3588 store_data_bypass_p (rtx out_insn, rtx in_insn)
3589 {
3590 rtx out_set, in_set;
3591 rtx out_pat, in_pat;
3592 rtx out_exp, in_exp;
3593 int i, j;
3594
3595 in_set = single_set (in_insn);
3596 if (in_set)
3597 {
3598 if (!MEM_P (SET_DEST (in_set)))
3599 return false;
3600
3601 out_set = single_set (out_insn);
3602 if (out_set)
3603 {
3604 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3605 return false;
3606 }
3607 else
3608 {
3609 out_pat = PATTERN (out_insn);
3610
3611 if (GET_CODE (out_pat) != PARALLEL)
3612 return false;
3613
3614 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3615 {
3616 out_exp = XVECEXP (out_pat, 0, i);
3617
3618 if (GET_CODE (out_exp) == CLOBBER)
3619 continue;
3620
3621 gcc_assert (GET_CODE (out_exp) == SET);
3622
3623 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3624 return false;
3625 }
3626 }
3627 }
3628 else
3629 {
3630 in_pat = PATTERN (in_insn);
3631 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3632
3633 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3634 {
3635 in_exp = XVECEXP (in_pat, 0, i);
3636
3637 if (GET_CODE (in_exp) == CLOBBER)
3638 continue;
3639
3640 gcc_assert (GET_CODE (in_exp) == SET);
3641
3642 if (!MEM_P (SET_DEST (in_exp)))
3643 return false;
3644
3645 out_set = single_set (out_insn);
3646 if (out_set)
3647 {
3648 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3649 return false;
3650 }
3651 else
3652 {
3653 out_pat = PATTERN (out_insn);
3654 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3655
3656 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3657 {
3658 out_exp = XVECEXP (out_pat, 0, j);
3659
3660 if (GET_CODE (out_exp) == CLOBBER)
3661 continue;
3662
3663 gcc_assert (GET_CODE (out_exp) == SET);
3664
3665 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3666 return false;
3667 }
3668 }
3669 }
3670 }
3671
3672 return true;
3673 }
3674
3675 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3676 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3677 or multiple set; IN_INSN should be single_set for truth, but for convenience
3678 of insn categorization may be any JUMP or CALL insn. */
3679
3680 int
3681 if_test_bypass_p (rtx out_insn, rtx in_insn)
3682 {
3683 rtx out_set, in_set;
3684
3685 in_set = single_set (in_insn);
3686 if (! in_set)
3687 {
3688 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3689 return false;
3690 }
3691
3692 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3693 return false;
3694 in_set = SET_SRC (in_set);
3695
3696 out_set = single_set (out_insn);
3697 if (out_set)
3698 {
3699 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3700 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3701 return false;
3702 }
3703 else
3704 {
3705 rtx out_pat;
3706 int i;
3707
3708 out_pat = PATTERN (out_insn);
3709 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3710
3711 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3712 {
3713 rtx exp = XVECEXP (out_pat, 0, i);
3714
3715 if (GET_CODE (exp) == CLOBBER)
3716 continue;
3717
3718 gcc_assert (GET_CODE (exp) == SET);
3719
3720 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3721 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3722 return false;
3723 }
3724 }
3725
3726 return true;
3727 }
3728 \f
3729 static bool
3730 gate_handle_peephole2 (void)
3731 {
3732 return (optimize > 0 && flag_peephole2);
3733 }
3734
3735 static unsigned int
3736 rest_of_handle_peephole2 (void)
3737 {
3738 #ifdef HAVE_peephole2
3739 peephole2_optimize ();
3740 #endif
3741 return 0;
3742 }
3743
3744 struct rtl_opt_pass pass_peephole2 =
3745 {
3746 {
3747 RTL_PASS,
3748 "peephole2", /* name */
3749 OPTGROUP_NONE, /* optinfo_flags */
3750 gate_handle_peephole2, /* gate */
3751 rest_of_handle_peephole2, /* execute */
3752 NULL, /* sub */
3753 NULL, /* next */
3754 0, /* static_pass_number */
3755 TV_PEEPHOLE2, /* tv_id */
3756 0, /* properties_required */
3757 0, /* properties_provided */
3758 0, /* properties_destroyed */
3759 0, /* todo_flags_start */
3760 TODO_df_finish | TODO_verify_rtl_sharing |
3761 0 /* todo_flags_finish */
3762 }
3763 };
3764
3765 static unsigned int
3766 rest_of_handle_split_all_insns (void)
3767 {
3768 split_all_insns ();
3769 return 0;
3770 }
3771
3772 struct rtl_opt_pass pass_split_all_insns =
3773 {
3774 {
3775 RTL_PASS,
3776 "split1", /* name */
3777 OPTGROUP_NONE, /* optinfo_flags */
3778 NULL, /* gate */
3779 rest_of_handle_split_all_insns, /* execute */
3780 NULL, /* sub */
3781 NULL, /* next */
3782 0, /* static_pass_number */
3783 TV_NONE, /* tv_id */
3784 0, /* properties_required */
3785 0, /* properties_provided */
3786 0, /* properties_destroyed */
3787 0, /* todo_flags_start */
3788 0 /* todo_flags_finish */
3789 }
3790 };
3791
3792 static unsigned int
3793 rest_of_handle_split_after_reload (void)
3794 {
3795 /* If optimizing, then go ahead and split insns now. */
3796 #ifndef STACK_REGS
3797 if (optimize > 0)
3798 #endif
3799 split_all_insns ();
3800 return 0;
3801 }
3802
3803 struct rtl_opt_pass pass_split_after_reload =
3804 {
3805 {
3806 RTL_PASS,
3807 "split2", /* name */
3808 OPTGROUP_NONE, /* optinfo_flags */
3809 NULL, /* gate */
3810 rest_of_handle_split_after_reload, /* execute */
3811 NULL, /* sub */
3812 NULL, /* next */
3813 0, /* static_pass_number */
3814 TV_NONE, /* tv_id */
3815 0, /* properties_required */
3816 0, /* properties_provided */
3817 0, /* properties_destroyed */
3818 0, /* todo_flags_start */
3819 0 /* todo_flags_finish */
3820 }
3821 };
3822
3823 static bool
3824 gate_handle_split_before_regstack (void)
3825 {
3826 #if HAVE_ATTR_length && defined (STACK_REGS)
3827 /* If flow2 creates new instructions which need splitting
3828 and scheduling after reload is not done, they might not be
3829 split until final which doesn't allow splitting
3830 if HAVE_ATTR_length. */
3831 # ifdef INSN_SCHEDULING
3832 return (optimize && !flag_schedule_insns_after_reload);
3833 # else
3834 return (optimize);
3835 # endif
3836 #else
3837 return 0;
3838 #endif
3839 }
3840
3841 static unsigned int
3842 rest_of_handle_split_before_regstack (void)
3843 {
3844 split_all_insns ();
3845 return 0;
3846 }
3847
3848 struct rtl_opt_pass pass_split_before_regstack =
3849 {
3850 {
3851 RTL_PASS,
3852 "split3", /* name */
3853 OPTGROUP_NONE, /* optinfo_flags */
3854 gate_handle_split_before_regstack, /* gate */
3855 rest_of_handle_split_before_regstack, /* execute */
3856 NULL, /* sub */
3857 NULL, /* next */
3858 0, /* static_pass_number */
3859 TV_NONE, /* tv_id */
3860 0, /* properties_required */
3861 0, /* properties_provided */
3862 0, /* properties_destroyed */
3863 0, /* todo_flags_start */
3864 0 /* todo_flags_finish */
3865 }
3866 };
3867
3868 static bool
3869 gate_handle_split_before_sched2 (void)
3870 {
3871 #ifdef INSN_SCHEDULING
3872 return optimize > 0 && flag_schedule_insns_after_reload;
3873 #else
3874 return 0;
3875 #endif
3876 }
3877
3878 static unsigned int
3879 rest_of_handle_split_before_sched2 (void)
3880 {
3881 #ifdef INSN_SCHEDULING
3882 split_all_insns ();
3883 #endif
3884 return 0;
3885 }
3886
3887 struct rtl_opt_pass pass_split_before_sched2 =
3888 {
3889 {
3890 RTL_PASS,
3891 "split4", /* name */
3892 OPTGROUP_NONE, /* optinfo_flags */
3893 gate_handle_split_before_sched2, /* gate */
3894 rest_of_handle_split_before_sched2, /* execute */
3895 NULL, /* sub */
3896 NULL, /* next */
3897 0, /* static_pass_number */
3898 TV_NONE, /* tv_id */
3899 0, /* properties_required */
3900 0, /* properties_provided */
3901 0, /* properties_destroyed */
3902 0, /* todo_flags_start */
3903 TODO_verify_flow /* todo_flags_finish */
3904 }
3905 };
3906
3907 /* The placement of the splitting that we do for shorten_branches
3908 depends on whether regstack is used by the target or not. */
3909 static bool
3910 gate_do_final_split (void)
3911 {
3912 #if HAVE_ATTR_length && !defined (STACK_REGS)
3913 return 1;
3914 #else
3915 return 0;
3916 #endif
3917 }
3918
3919 struct rtl_opt_pass pass_split_for_shorten_branches =
3920 {
3921 {
3922 RTL_PASS,
3923 "split5", /* name */
3924 OPTGROUP_NONE, /* optinfo_flags */
3925 gate_do_final_split, /* gate */
3926 split_all_insns_noflow, /* execute */
3927 NULL, /* sub */
3928 NULL, /* next */
3929 0, /* static_pass_number */
3930 TV_NONE, /* tv_id */
3931 0, /* properties_required */
3932 0, /* properties_provided */
3933 0, /* properties_destroyed */
3934 0, /* todo_flags_start */
3935 TODO_verify_rtl_sharing /* todo_flags_finish */
3936 }
3937 };