neon-vcond-gt.c: Scan for vbsl or vbit or vbif.
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl-error.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 #include "hard-reg-set.h"
31 #include "recog.h"
32 #include "regs.h"
33 #include "addresses.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "basic-block.h"
38 #include "reload.h"
39 #include "target.h"
40 #include "tree-pass.h"
41 #include "df.h"
42 #include "insn-codes.h"
43
44 #ifndef STACK_PUSH_CODE
45 #ifdef STACK_GROWS_DOWNWARD
46 #define STACK_PUSH_CODE PRE_DEC
47 #else
48 #define STACK_PUSH_CODE PRE_INC
49 #endif
50 #endif
51
52 #ifndef STACK_POP_CODE
53 #ifdef STACK_GROWS_DOWNWARD
54 #define STACK_POP_CODE POST_INC
55 #else
56 #define STACK_POP_CODE POST_DEC
57 #endif
58 #endif
59
60 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
63
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in reginfo.c and final.c and reload.c.
69
70 init_recog and init_recog_no_volatile are responsible for setting this. */
71
72 int volatile_ok;
73
74 struct recog_data_d recog_data;
75
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
82
83 int which_alternative;
84
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
88
89 int reload_completed;
90
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
93
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
97
98 void
99 init_recog_no_volatile (void)
100 {
101 volatile_ok = 0;
102 }
103
104 void
105 init_recog (void)
106 {
107 volatile_ok = 1;
108 }
109
110 \f
111 /* Return true if labels in asm operands BODY are LABEL_REFs. */
112
113 static bool
114 asm_labels_ok (rtx body)
115 {
116 rtx asmop;
117 int i;
118
119 asmop = extract_asm_operands (body);
120 if (asmop == NULL_RTX)
121 return true;
122
123 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
124 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
125 return false;
126
127 return true;
128 }
129
130 /* Check that X is an insn-body for an `asm' with operands
131 and that the operands mentioned in it are legitimate. */
132
133 int
134 check_asm_operands (rtx x)
135 {
136 int noperands;
137 rtx *operands;
138 const char **constraints;
139 int i;
140
141 if (!asm_labels_ok (x))
142 return 0;
143
144 /* Post-reload, be more strict with things. */
145 if (reload_completed)
146 {
147 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
148 extract_insn (make_insn_raw (x));
149 constrain_operands (1);
150 return which_alternative >= 0;
151 }
152
153 noperands = asm_noperands (x);
154 if (noperands < 0)
155 return 0;
156 if (noperands == 0)
157 return 1;
158
159 operands = XALLOCAVEC (rtx, noperands);
160 constraints = XALLOCAVEC (const char *, noperands);
161
162 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
163
164 for (i = 0; i < noperands; i++)
165 {
166 const char *c = constraints[i];
167 if (c[0] == '%')
168 c++;
169 if (! asm_operand_ok (operands[i], c, constraints))
170 return 0;
171 }
172
173 return 1;
174 }
175 \f
176 /* Static data for the next two routines. */
177
178 typedef struct change_t
179 {
180 rtx object;
181 int old_code;
182 rtx *loc;
183 rtx old;
184 bool unshare;
185 } change_t;
186
187 static change_t *changes;
188 static int changes_allocated;
189
190 static int num_changes = 0;
191
192 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
193 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
194 the change is simply made.
195
196 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
197 will be called with the address and mode as parameters. If OBJECT is
198 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
199 the change in place.
200
201 IN_GROUP is nonzero if this is part of a group of changes that must be
202 performed as a group. In that case, the changes will be stored. The
203 function `apply_change_group' will validate and apply the changes.
204
205 If IN_GROUP is zero, this is a single change. Try to recognize the insn
206 or validate the memory reference with the change applied. If the result
207 is not valid for the machine, suppress the change and return zero.
208 Otherwise, perform the change and return 1. */
209
210 static bool
211 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
212 {
213 rtx old = *loc;
214
215 if (old == new_rtx || rtx_equal_p (old, new_rtx))
216 return 1;
217
218 gcc_assert (in_group != 0 || num_changes == 0);
219
220 *loc = new_rtx;
221
222 /* Save the information describing this change. */
223 if (num_changes >= changes_allocated)
224 {
225 if (changes_allocated == 0)
226 /* This value allows for repeated substitutions inside complex
227 indexed addresses, or changes in up to 5 insns. */
228 changes_allocated = MAX_RECOG_OPERANDS * 5;
229 else
230 changes_allocated *= 2;
231
232 changes = XRESIZEVEC (change_t, changes, changes_allocated);
233 }
234
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
238 changes[num_changes].unshare = unshare;
239
240 if (object && !MEM_P (object))
241 {
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 case invalid. */
244 changes[num_changes].old_code = INSN_CODE (object);
245 INSN_CODE (object) = -1;
246 }
247
248 num_changes++;
249
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
252
253 if (in_group)
254 return 1;
255 else
256 return apply_change_group ();
257 }
258
259 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
260 UNSHARE to false. */
261
262 bool
263 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
264 {
265 return validate_change_1 (object, loc, new_rtx, in_group, false);
266 }
267
268 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
269 UNSHARE to true. */
270
271 bool
272 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
273 {
274 return validate_change_1 (object, loc, new_rtx, in_group, true);
275 }
276
277
278 /* Keep X canonicalized if some changes have made it non-canonical; only
279 modifies the operands of X, not (for example) its code. Simplifications
280 are not the job of this routine.
281
282 Return true if anything was changed. */
283 bool
284 canonicalize_change_group (rtx insn, rtx x)
285 {
286 if (COMMUTATIVE_P (x)
287 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
288 {
289 /* Oops, the caller has made X no longer canonical.
290 Let's redo the changes in the correct order. */
291 rtx tem = XEXP (x, 0);
292 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
293 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
294 return true;
295 }
296 else
297 return false;
298 }
299
300
301 /* This subroutine of apply_change_group verifies whether the changes to INSN
302 were valid; i.e. whether INSN can still be recognized.
303
304 If IN_GROUP is true clobbers which have to be added in order to
305 match the instructions will be added to the current change group.
306 Otherwise the changes will take effect immediately. */
307
308 int
309 insn_invalid_p (rtx insn, bool in_group)
310 {
311 rtx pat = PATTERN (insn);
312 int num_clobbers = 0;
313 /* If we are before reload and the pattern is a SET, see if we can add
314 clobbers. */
315 int icode = recog (pat, insn,
316 (GET_CODE (pat) == SET
317 && ! reload_completed && ! reload_in_progress)
318 ? &num_clobbers : 0);
319 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
320
321
322 /* If this is an asm and the operand aren't legal, then fail. Likewise if
323 this is not an asm and the insn wasn't recognized. */
324 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
325 || (!is_asm && icode < 0))
326 return 1;
327
328 /* If we have to add CLOBBERs, fail if we have to add ones that reference
329 hard registers since our callers can't know if they are live or not.
330 Otherwise, add them. */
331 if (num_clobbers > 0)
332 {
333 rtx newpat;
334
335 if (added_clobbers_hard_reg_p (icode))
336 return 1;
337
338 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
339 XVECEXP (newpat, 0, 0) = pat;
340 add_clobbers (newpat, icode);
341 if (in_group)
342 validate_change (insn, &PATTERN (insn), newpat, 1);
343 else
344 PATTERN (insn) = pat = newpat;
345 }
346
347 /* After reload, verify that all constraints are satisfied. */
348 if (reload_completed)
349 {
350 extract_insn (insn);
351
352 if (! constrain_operands (1))
353 return 1;
354 }
355
356 INSN_CODE (insn) = icode;
357 return 0;
358 }
359
360 /* Return number of changes made and not validated yet. */
361 int
362 num_changes_pending (void)
363 {
364 return num_changes;
365 }
366
367 /* Tentatively apply the changes numbered NUM and up.
368 Return 1 if all changes are valid, zero otherwise. */
369
370 int
371 verify_changes (int num)
372 {
373 int i;
374 rtx last_validated = NULL_RTX;
375
376 /* The changes have been applied and all INSN_CODEs have been reset to force
377 rerecognition.
378
379 The changes are valid if we aren't given an object, or if we are
380 given a MEM and it still is a valid address, or if this is in insn
381 and it is recognized. In the latter case, if reload has completed,
382 we also require that the operands meet the constraints for
383 the insn. */
384
385 for (i = num; i < num_changes; i++)
386 {
387 rtx object = changes[i].object;
388
389 /* If there is no object to test or if it is the same as the one we
390 already tested, ignore it. */
391 if (object == 0 || object == last_validated)
392 continue;
393
394 if (MEM_P (object))
395 {
396 if (! memory_address_addr_space_p (GET_MODE (object),
397 XEXP (object, 0),
398 MEM_ADDR_SPACE (object)))
399 break;
400 }
401 else if (/* changes[i].old might be zero, e.g. when putting a
402 REG_FRAME_RELATED_EXPR into a previously empty list. */
403 changes[i].old
404 && REG_P (changes[i].old)
405 && asm_noperands (PATTERN (object)) > 0
406 && REG_EXPR (changes[i].old) != NULL_TREE
407 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
408 && DECL_REGISTER (REG_EXPR (changes[i].old)))
409 {
410 /* Don't allow changes of hard register operands to inline
411 assemblies if they have been defined as register asm ("x"). */
412 break;
413 }
414 else if (DEBUG_INSN_P (object))
415 continue;
416 else if (insn_invalid_p (object, true))
417 {
418 rtx pat = PATTERN (object);
419
420 /* Perhaps we couldn't recognize the insn because there were
421 extra CLOBBERs at the end. If so, try to re-recognize
422 without the last CLOBBER (later iterations will cause each of
423 them to be eliminated, in turn). But don't do this if we
424 have an ASM_OPERAND. */
425 if (GET_CODE (pat) == PARALLEL
426 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
427 && asm_noperands (PATTERN (object)) < 0)
428 {
429 rtx newpat;
430
431 if (XVECLEN (pat, 0) == 2)
432 newpat = XVECEXP (pat, 0, 0);
433 else
434 {
435 int j;
436
437 newpat
438 = gen_rtx_PARALLEL (VOIDmode,
439 rtvec_alloc (XVECLEN (pat, 0) - 1));
440 for (j = 0; j < XVECLEN (newpat, 0); j++)
441 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
442 }
443
444 /* Add a new change to this group to replace the pattern
445 with this new pattern. Then consider this change
446 as having succeeded. The change we added will
447 cause the entire call to fail if things remain invalid.
448
449 Note that this can lose if a later change than the one
450 we are processing specified &XVECEXP (PATTERN (object), 0, X)
451 but this shouldn't occur. */
452
453 validate_change (object, &PATTERN (object), newpat, 1);
454 continue;
455 }
456 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
457 || GET_CODE (pat) == VAR_LOCATION)
458 /* If this insn is a CLOBBER or USE, it is always valid, but is
459 never recognized. */
460 continue;
461 else
462 break;
463 }
464 last_validated = object;
465 }
466
467 return (i == num_changes);
468 }
469
470 /* A group of changes has previously been issued with validate_change
471 and verified with verify_changes. Call df_insn_rescan for each of
472 the insn changed and clear num_changes. */
473
474 void
475 confirm_change_group (void)
476 {
477 int i;
478 rtx last_object = NULL;
479
480 for (i = 0; i < num_changes; i++)
481 {
482 rtx object = changes[i].object;
483
484 if (changes[i].unshare)
485 *changes[i].loc = copy_rtx (*changes[i].loc);
486
487 /* Avoid unnecessary rescanning when multiple changes to same instruction
488 are made. */
489 if (object)
490 {
491 if (object != last_object && last_object && INSN_P (last_object))
492 df_insn_rescan (last_object);
493 last_object = object;
494 }
495 }
496
497 if (last_object && INSN_P (last_object))
498 df_insn_rescan (last_object);
499 num_changes = 0;
500 }
501
502 /* Apply a group of changes previously issued with `validate_change'.
503 If all changes are valid, call confirm_change_group and return 1,
504 otherwise, call cancel_changes and return 0. */
505
506 int
507 apply_change_group (void)
508 {
509 if (verify_changes (0))
510 {
511 confirm_change_group ();
512 return 1;
513 }
514 else
515 {
516 cancel_changes (0);
517 return 0;
518 }
519 }
520
521
522 /* Return the number of changes so far in the current group. */
523
524 int
525 num_validated_changes (void)
526 {
527 return num_changes;
528 }
529
530 /* Retract the changes numbered NUM and up. */
531
532 void
533 cancel_changes (int num)
534 {
535 int i;
536
537 /* Back out all the changes. Do this in the opposite order in which
538 they were made. */
539 for (i = num_changes - 1; i >= num; i--)
540 {
541 *changes[i].loc = changes[i].old;
542 if (changes[i].object && !MEM_P (changes[i].object))
543 INSN_CODE (changes[i].object) = changes[i].old_code;
544 }
545 num_changes = num;
546 }
547
548 /* Reduce conditional compilation elsewhere. */
549 #ifndef HAVE_extv
550 #define HAVE_extv 0
551 #define CODE_FOR_extv CODE_FOR_nothing
552 #endif
553 #ifndef HAVE_extzv
554 #define HAVE_extzv 0
555 #define CODE_FOR_extzv CODE_FOR_nothing
556 #endif
557
558 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
559 rtx. */
560
561 static void
562 simplify_while_replacing (rtx *loc, rtx to, rtx object,
563 enum machine_mode op0_mode)
564 {
565 rtx x = *loc;
566 enum rtx_code code = GET_CODE (x);
567 rtx new_rtx;
568
569 if (SWAPPABLE_OPERANDS_P (x)
570 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
571 {
572 validate_unshare_change (object, loc,
573 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
574 : swap_condition (code),
575 GET_MODE (x), XEXP (x, 1),
576 XEXP (x, 0)), 1);
577 x = *loc;
578 code = GET_CODE (x);
579 }
580
581 switch (code)
582 {
583 case PLUS:
584 /* If we have a PLUS whose second operand is now a CONST_INT, use
585 simplify_gen_binary to try to simplify it.
586 ??? We may want later to remove this, once simplification is
587 separated from this function. */
588 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
589 validate_change (object, loc,
590 simplify_gen_binary
591 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
592 break;
593 case MINUS:
594 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
595 validate_change (object, loc,
596 simplify_gen_binary
597 (PLUS, GET_MODE (x), XEXP (x, 0),
598 simplify_gen_unary (NEG,
599 GET_MODE (x), XEXP (x, 1),
600 GET_MODE (x))), 1);
601 break;
602 case ZERO_EXTEND:
603 case SIGN_EXTEND:
604 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
605 {
606 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
607 op0_mode);
608 /* If any of the above failed, substitute in something that
609 we know won't be recognized. */
610 if (!new_rtx)
611 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
612 validate_change (object, loc, new_rtx, 1);
613 }
614 break;
615 case SUBREG:
616 /* All subregs possible to simplify should be simplified. */
617 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
618 SUBREG_BYTE (x));
619
620 /* Subregs of VOIDmode operands are incorrect. */
621 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
622 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
623 if (new_rtx)
624 validate_change (object, loc, new_rtx, 1);
625 break;
626 case ZERO_EXTRACT:
627 case SIGN_EXTRACT:
628 /* If we are replacing a register with memory, try to change the memory
629 to be the mode required for memory in extract operations (this isn't
630 likely to be an insertion operation; if it was, nothing bad will
631 happen, we might just fail in some cases). */
632
633 if (MEM_P (XEXP (x, 0))
634 && CONST_INT_P (XEXP (x, 1))
635 && CONST_INT_P (XEXP (x, 2))
636 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
637 MEM_ADDR_SPACE (XEXP (x, 0)))
638 && !MEM_VOLATILE_P (XEXP (x, 0)))
639 {
640 enum machine_mode wanted_mode = VOIDmode;
641 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
642 int pos = INTVAL (XEXP (x, 2));
643
644 if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
645 {
646 wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
647 if (wanted_mode == VOIDmode)
648 wanted_mode = word_mode;
649 }
650 else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
651 {
652 wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
653 if (wanted_mode == VOIDmode)
654 wanted_mode = word_mode;
655 }
656
657 /* If we have a narrower mode, we can do something. */
658 if (wanted_mode != VOIDmode
659 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
660 {
661 int offset = pos / BITS_PER_UNIT;
662 rtx newmem;
663
664 /* If the bytes and bits are counted differently, we
665 must adjust the offset. */
666 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
667 offset =
668 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
669 offset);
670
671 gcc_assert (GET_MODE_PRECISION (wanted_mode)
672 == GET_MODE_BITSIZE (wanted_mode));
673 pos %= GET_MODE_BITSIZE (wanted_mode);
674
675 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
676
677 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
678 validate_change (object, &XEXP (x, 0), newmem, 1);
679 }
680 }
681
682 break;
683
684 default:
685 break;
686 }
687 }
688
689 /* Replace every occurrence of FROM in X with TO. Mark each change with
690 validate_change passing OBJECT. */
691
692 static void
693 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
694 bool simplify)
695 {
696 int i, j;
697 const char *fmt;
698 rtx x = *loc;
699 enum rtx_code code;
700 enum machine_mode op0_mode = VOIDmode;
701 int prev_changes = num_changes;
702
703 if (!x)
704 return;
705
706 code = GET_CODE (x);
707 fmt = GET_RTX_FORMAT (code);
708 if (fmt[0] == 'e')
709 op0_mode = GET_MODE (XEXP (x, 0));
710
711 /* X matches FROM if it is the same rtx or they are both referring to the
712 same register in the same mode. Avoid calling rtx_equal_p unless the
713 operands look similar. */
714
715 if (x == from
716 || (REG_P (x) && REG_P (from)
717 && GET_MODE (x) == GET_MODE (from)
718 && REGNO (x) == REGNO (from))
719 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
720 && rtx_equal_p (x, from)))
721 {
722 validate_unshare_change (object, loc, to, 1);
723 return;
724 }
725
726 /* Call ourself recursively to perform the replacements.
727 We must not replace inside already replaced expression, otherwise we
728 get infinite recursion for replacements like (reg X)->(subreg (reg X))
729 so we must special case shared ASM_OPERANDS. */
730
731 if (GET_CODE (x) == PARALLEL)
732 {
733 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
734 {
735 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
736 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
737 {
738 /* Verify that operands are really shared. */
739 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
740 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
741 (x, 0, j))));
742 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
743 from, to, object, simplify);
744 }
745 else
746 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
747 simplify);
748 }
749 }
750 else
751 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
752 {
753 if (fmt[i] == 'e')
754 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
755 else if (fmt[i] == 'E')
756 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
757 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
758 simplify);
759 }
760
761 /* If we didn't substitute, there is nothing more to do. */
762 if (num_changes == prev_changes)
763 return;
764
765 /* ??? The regmove is no more, so is this aberration still necessary? */
766 /* Allow substituted expression to have different mode. This is used by
767 regmove to change mode of pseudo register. */
768 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
769 op0_mode = GET_MODE (XEXP (x, 0));
770
771 /* Do changes needed to keep rtx consistent. Don't do any other
772 simplifications, as it is not our job. */
773 if (simplify)
774 simplify_while_replacing (loc, to, object, op0_mode);
775 }
776
777 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
778 with TO. After all changes have been made, validate by seeing
779 if INSN is still valid. */
780
781 int
782 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
783 {
784 validate_replace_rtx_1 (loc, from, to, insn, true);
785 return apply_change_group ();
786 }
787
788 /* Try replacing every occurrence of FROM in INSN with TO. After all
789 changes have been made, validate by seeing if INSN is still valid. */
790
791 int
792 validate_replace_rtx (rtx from, rtx to, rtx insn)
793 {
794 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
795 return apply_change_group ();
796 }
797
798 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
799 is a part of INSN. After all changes have been made, validate by seeing if
800 INSN is still valid.
801 validate_replace_rtx (from, to, insn) is equivalent to
802 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
803
804 int
805 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
806 {
807 validate_replace_rtx_1 (where, from, to, insn, true);
808 return apply_change_group ();
809 }
810
811 /* Same as above, but do not simplify rtx afterwards. */
812 int
813 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
814 rtx insn)
815 {
816 validate_replace_rtx_1 (where, from, to, insn, false);
817 return apply_change_group ();
818
819 }
820
821 /* Try replacing every occurrence of FROM in INSN with TO. This also
822 will replace in REG_EQUAL and REG_EQUIV notes. */
823
824 void
825 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
826 {
827 rtx note;
828 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
829 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
830 if (REG_NOTE_KIND (note) == REG_EQUAL
831 || REG_NOTE_KIND (note) == REG_EQUIV)
832 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
833 }
834
835 /* Function called by note_uses to replace used subexpressions. */
836 struct validate_replace_src_data
837 {
838 rtx from; /* Old RTX */
839 rtx to; /* New RTX */
840 rtx insn; /* Insn in which substitution is occurring. */
841 };
842
843 static void
844 validate_replace_src_1 (rtx *x, void *data)
845 {
846 struct validate_replace_src_data *d
847 = (struct validate_replace_src_data *) data;
848
849 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
850 }
851
852 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
853 SET_DESTs. */
854
855 void
856 validate_replace_src_group (rtx from, rtx to, rtx insn)
857 {
858 struct validate_replace_src_data d;
859
860 d.from = from;
861 d.to = to;
862 d.insn = insn;
863 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
864 }
865
866 /* Try simplify INSN.
867 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
868 pattern and return true if something was simplified. */
869
870 bool
871 validate_simplify_insn (rtx insn)
872 {
873 int i;
874 rtx pat = NULL;
875 rtx newpat = NULL;
876
877 pat = PATTERN (insn);
878
879 if (GET_CODE (pat) == SET)
880 {
881 newpat = simplify_rtx (SET_SRC (pat));
882 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
883 validate_change (insn, &SET_SRC (pat), newpat, 1);
884 newpat = simplify_rtx (SET_DEST (pat));
885 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
886 validate_change (insn, &SET_DEST (pat), newpat, 1);
887 }
888 else if (GET_CODE (pat) == PARALLEL)
889 for (i = 0; i < XVECLEN (pat, 0); i++)
890 {
891 rtx s = XVECEXP (pat, 0, i);
892
893 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
894 {
895 newpat = simplify_rtx (SET_SRC (s));
896 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
897 validate_change (insn, &SET_SRC (s), newpat, 1);
898 newpat = simplify_rtx (SET_DEST (s));
899 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
900 validate_change (insn, &SET_DEST (s), newpat, 1);
901 }
902 }
903 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
904 }
905 \f
906 #ifdef HAVE_cc0
907 /* Return 1 if the insn using CC0 set by INSN does not contain
908 any ordered tests applied to the condition codes.
909 EQ and NE tests do not count. */
910
911 int
912 next_insn_tests_no_inequality (rtx insn)
913 {
914 rtx next = next_cc0_user (insn);
915
916 /* If there is no next insn, we have to take the conservative choice. */
917 if (next == 0)
918 return 0;
919
920 return (INSN_P (next)
921 && ! inequality_comparisons_p (PATTERN (next)));
922 }
923 #endif
924 \f
925 /* Return 1 if OP is a valid general operand for machine mode MODE.
926 This is either a register reference, a memory reference,
927 or a constant. In the case of a memory reference, the address
928 is checked for general validity for the target machine.
929
930 Register and memory references must have mode MODE in order to be valid,
931 but some constants have no machine mode and are valid for any mode.
932
933 If MODE is VOIDmode, OP is checked for validity for whatever mode
934 it has.
935
936 The main use of this function is as a predicate in match_operand
937 expressions in the machine description. */
938
939 int
940 general_operand (rtx op, enum machine_mode mode)
941 {
942 enum rtx_code code = GET_CODE (op);
943
944 if (mode == VOIDmode)
945 mode = GET_MODE (op);
946
947 /* Don't accept CONST_INT or anything similar
948 if the caller wants something floating. */
949 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
950 && GET_MODE_CLASS (mode) != MODE_INT
951 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
952 return 0;
953
954 if (CONST_INT_P (op)
955 && mode != VOIDmode
956 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
957 return 0;
958
959 if (CONSTANT_P (op))
960 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
961 || mode == VOIDmode)
962 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
963 && targetm.legitimate_constant_p (mode == VOIDmode
964 ? GET_MODE (op)
965 : mode, op));
966
967 /* Except for certain constants with VOIDmode, already checked for,
968 OP's mode must match MODE if MODE specifies a mode. */
969
970 if (GET_MODE (op) != mode)
971 return 0;
972
973 if (code == SUBREG)
974 {
975 rtx sub = SUBREG_REG (op);
976
977 #ifdef INSN_SCHEDULING
978 /* On machines that have insn scheduling, we want all memory
979 reference to be explicit, so outlaw paradoxical SUBREGs.
980 However, we must allow them after reload so that they can
981 get cleaned up by cleanup_subreg_operands. */
982 if (!reload_completed && MEM_P (sub)
983 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
984 return 0;
985 #endif
986 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
987 may result in incorrect reference. We should simplify all valid
988 subregs of MEM anyway. But allow this after reload because we
989 might be called from cleanup_subreg_operands.
990
991 ??? This is a kludge. */
992 if (!reload_completed && SUBREG_BYTE (op) != 0
993 && MEM_P (sub))
994 return 0;
995
996 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
997 create such rtl, and we must reject it. */
998 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
999 /* LRA can use subreg to store a floating point value in an
1000 integer mode. Although the floating point and the
1001 integer modes need the same number of hard registers, the
1002 size of floating point mode can be less than the integer
1003 mode. */
1004 && ! lra_in_progress
1005 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1006 return 0;
1007
1008 op = sub;
1009 code = GET_CODE (op);
1010 }
1011
1012 if (code == REG)
1013 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1014 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1015
1016 if (code == MEM)
1017 {
1018 rtx y = XEXP (op, 0);
1019
1020 if (! volatile_ok && MEM_VOLATILE_P (op))
1021 return 0;
1022
1023 /* Use the mem's mode, since it will be reloaded thus. */
1024 if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1025 return 1;
1026 }
1027
1028 return 0;
1029 }
1030 \f
1031 /* Return 1 if OP is a valid memory address for a memory reference
1032 of mode MODE.
1033
1034 The main use of this function is as a predicate in match_operand
1035 expressions in the machine description. */
1036
1037 int
1038 address_operand (rtx op, enum machine_mode mode)
1039 {
1040 return memory_address_p (mode, op);
1041 }
1042
1043 /* Return 1 if OP is a register reference of mode MODE.
1044 If MODE is VOIDmode, accept a register in any mode.
1045
1046 The main use of this function is as a predicate in match_operand
1047 expressions in the machine description. */
1048
1049 int
1050 register_operand (rtx op, enum machine_mode mode)
1051 {
1052 if (GET_MODE (op) != mode && mode != VOIDmode)
1053 return 0;
1054
1055 if (GET_CODE (op) == SUBREG)
1056 {
1057 rtx sub = SUBREG_REG (op);
1058
1059 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1060 because it is guaranteed to be reloaded into one.
1061 Just make sure the MEM is valid in itself.
1062 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1063 but currently it does result from (SUBREG (REG)...) where the
1064 reg went on the stack.) */
1065 if (! reload_completed && MEM_P (sub))
1066 return general_operand (op, mode);
1067
1068 #ifdef CANNOT_CHANGE_MODE_CLASS
1069 if (REG_P (sub)
1070 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1071 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1072 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1073 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1074 /* LRA can generate some invalid SUBREGS just for matched
1075 operand reload presentation. LRA needs to treat them as
1076 valid. */
1077 && ! LRA_SUBREG_P (op))
1078 return 0;
1079 #endif
1080
1081 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1082 create such rtl, and we must reject it. */
1083 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1084 /* LRA can use subreg to store a floating point value in an
1085 integer mode. Although the floating point and the
1086 integer modes need the same number of hard registers, the
1087 size of floating point mode can be less than the integer
1088 mode. */
1089 && ! lra_in_progress
1090 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1091 return 0;
1092
1093 op = sub;
1094 }
1095
1096 return (REG_P (op)
1097 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1098 || in_hard_reg_set_p (operand_reg_set,
1099 GET_MODE (op), REGNO (op))));
1100 }
1101
1102 /* Return 1 for a register in Pmode; ignore the tested mode. */
1103
1104 int
1105 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1106 {
1107 return register_operand (op, Pmode);
1108 }
1109
1110 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1111 or a hard register. */
1112
1113 int
1114 scratch_operand (rtx op, enum machine_mode mode)
1115 {
1116 if (GET_MODE (op) != mode && mode != VOIDmode)
1117 return 0;
1118
1119 return (GET_CODE (op) == SCRATCH
1120 || (REG_P (op)
1121 && (lra_in_progress || REGNO (op) < FIRST_PSEUDO_REGISTER)));
1122 }
1123
1124 /* Return 1 if OP is a valid immediate operand for mode MODE.
1125
1126 The main use of this function is as a predicate in match_operand
1127 expressions in the machine description. */
1128
1129 int
1130 immediate_operand (rtx op, enum machine_mode mode)
1131 {
1132 /* Don't accept CONST_INT or anything similar
1133 if the caller wants something floating. */
1134 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1135 && GET_MODE_CLASS (mode) != MODE_INT
1136 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1137 return 0;
1138
1139 if (CONST_INT_P (op)
1140 && mode != VOIDmode
1141 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1142 return 0;
1143
1144 return (CONSTANT_P (op)
1145 && (GET_MODE (op) == mode || mode == VOIDmode
1146 || GET_MODE (op) == VOIDmode)
1147 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1148 && targetm.legitimate_constant_p (mode == VOIDmode
1149 ? GET_MODE (op)
1150 : mode, op));
1151 }
1152
1153 /* Returns 1 if OP is an operand that is a CONST_INT. */
1154
1155 int
1156 const_int_operand (rtx op, enum machine_mode mode)
1157 {
1158 if (!CONST_INT_P (op))
1159 return 0;
1160
1161 if (mode != VOIDmode
1162 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1163 return 0;
1164
1165 return 1;
1166 }
1167
1168 /* Returns 1 if OP is an operand that is a constant integer or constant
1169 floating-point number. */
1170
1171 int
1172 const_double_operand (rtx op, enum machine_mode mode)
1173 {
1174 /* Don't accept CONST_INT or anything similar
1175 if the caller wants something floating. */
1176 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1177 && GET_MODE_CLASS (mode) != MODE_INT
1178 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1179 return 0;
1180
1181 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1182 && (mode == VOIDmode || GET_MODE (op) == mode
1183 || GET_MODE (op) == VOIDmode));
1184 }
1185
1186 /* Return 1 if OP is a general operand that is not an immediate operand. */
1187
1188 int
1189 nonimmediate_operand (rtx op, enum machine_mode mode)
1190 {
1191 return (general_operand (op, mode) && ! CONSTANT_P (op));
1192 }
1193
1194 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1195
1196 int
1197 nonmemory_operand (rtx op, enum machine_mode mode)
1198 {
1199 if (CONSTANT_P (op))
1200 return immediate_operand (op, mode);
1201
1202 if (GET_MODE (op) != mode && mode != VOIDmode)
1203 return 0;
1204
1205 if (GET_CODE (op) == SUBREG)
1206 {
1207 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1208 because it is guaranteed to be reloaded into one.
1209 Just make sure the MEM is valid in itself.
1210 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1211 but currently it does result from (SUBREG (REG)...) where the
1212 reg went on the stack.) */
1213 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1214 return general_operand (op, mode);
1215 op = SUBREG_REG (op);
1216 }
1217
1218 return (REG_P (op)
1219 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1220 || in_hard_reg_set_p (operand_reg_set,
1221 GET_MODE (op), REGNO (op))));
1222 }
1223
1224 /* Return 1 if OP is a valid operand that stands for pushing a
1225 value of mode MODE onto the stack.
1226
1227 The main use of this function is as a predicate in match_operand
1228 expressions in the machine description. */
1229
1230 int
1231 push_operand (rtx op, enum machine_mode mode)
1232 {
1233 unsigned int rounded_size = GET_MODE_SIZE (mode);
1234
1235 #ifdef PUSH_ROUNDING
1236 rounded_size = PUSH_ROUNDING (rounded_size);
1237 #endif
1238
1239 if (!MEM_P (op))
1240 return 0;
1241
1242 if (mode != VOIDmode && GET_MODE (op) != mode)
1243 return 0;
1244
1245 op = XEXP (op, 0);
1246
1247 if (rounded_size == GET_MODE_SIZE (mode))
1248 {
1249 if (GET_CODE (op) != STACK_PUSH_CODE)
1250 return 0;
1251 }
1252 else
1253 {
1254 if (GET_CODE (op) != PRE_MODIFY
1255 || GET_CODE (XEXP (op, 1)) != PLUS
1256 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1257 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1258 #ifdef STACK_GROWS_DOWNWARD
1259 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1260 #else
1261 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1262 #endif
1263 )
1264 return 0;
1265 }
1266
1267 return XEXP (op, 0) == stack_pointer_rtx;
1268 }
1269
1270 /* Return 1 if OP is a valid operand that stands for popping a
1271 value of mode MODE off the stack.
1272
1273 The main use of this function is as a predicate in match_operand
1274 expressions in the machine description. */
1275
1276 int
1277 pop_operand (rtx op, enum machine_mode mode)
1278 {
1279 if (!MEM_P (op))
1280 return 0;
1281
1282 if (mode != VOIDmode && GET_MODE (op) != mode)
1283 return 0;
1284
1285 op = XEXP (op, 0);
1286
1287 if (GET_CODE (op) != STACK_POP_CODE)
1288 return 0;
1289
1290 return XEXP (op, 0) == stack_pointer_rtx;
1291 }
1292
1293 /* Return 1 if ADDR is a valid memory address
1294 for mode MODE in address space AS. */
1295
1296 int
1297 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1298 rtx addr, addr_space_t as)
1299 {
1300 #ifdef GO_IF_LEGITIMATE_ADDRESS
1301 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1302 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1303 return 0;
1304
1305 win:
1306 return 1;
1307 #else
1308 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1309 #endif
1310 }
1311
1312 /* Return 1 if OP is a valid memory reference with mode MODE,
1313 including a valid address.
1314
1315 The main use of this function is as a predicate in match_operand
1316 expressions in the machine description. */
1317
1318 int
1319 memory_operand (rtx op, enum machine_mode mode)
1320 {
1321 rtx inner;
1322
1323 if (! reload_completed)
1324 /* Note that no SUBREG is a memory operand before end of reload pass,
1325 because (SUBREG (MEM...)) forces reloading into a register. */
1326 return MEM_P (op) && general_operand (op, mode);
1327
1328 if (mode != VOIDmode && GET_MODE (op) != mode)
1329 return 0;
1330
1331 inner = op;
1332 if (GET_CODE (inner) == SUBREG)
1333 inner = SUBREG_REG (inner);
1334
1335 return (MEM_P (inner) && general_operand (op, mode));
1336 }
1337
1338 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1339 that is, a memory reference whose address is a general_operand. */
1340
1341 int
1342 indirect_operand (rtx op, enum machine_mode mode)
1343 {
1344 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1345 if (! reload_completed
1346 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1347 {
1348 int offset = SUBREG_BYTE (op);
1349 rtx inner = SUBREG_REG (op);
1350
1351 if (mode != VOIDmode && GET_MODE (op) != mode)
1352 return 0;
1353
1354 /* The only way that we can have a general_operand as the resulting
1355 address is if OFFSET is zero and the address already is an operand
1356 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1357 operand. */
1358
1359 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1360 || (GET_CODE (XEXP (inner, 0)) == PLUS
1361 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1362 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1363 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1364 }
1365
1366 return (MEM_P (op)
1367 && memory_operand (op, mode)
1368 && general_operand (XEXP (op, 0), Pmode));
1369 }
1370
1371 /* Return 1 if this is an ordered comparison operator (not including
1372 ORDERED and UNORDERED). */
1373
1374 int
1375 ordered_comparison_operator (rtx op, enum machine_mode mode)
1376 {
1377 if (mode != VOIDmode && GET_MODE (op) != mode)
1378 return false;
1379 switch (GET_CODE (op))
1380 {
1381 case EQ:
1382 case NE:
1383 case LT:
1384 case LTU:
1385 case LE:
1386 case LEU:
1387 case GT:
1388 case GTU:
1389 case GE:
1390 case GEU:
1391 return true;
1392 default:
1393 return false;
1394 }
1395 }
1396
1397 /* Return 1 if this is a comparison operator. This allows the use of
1398 MATCH_OPERATOR to recognize all the branch insns. */
1399
1400 int
1401 comparison_operator (rtx op, enum machine_mode mode)
1402 {
1403 return ((mode == VOIDmode || GET_MODE (op) == mode)
1404 && COMPARISON_P (op));
1405 }
1406 \f
1407 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1408
1409 rtx
1410 extract_asm_operands (rtx body)
1411 {
1412 rtx tmp;
1413 switch (GET_CODE (body))
1414 {
1415 case ASM_OPERANDS:
1416 return body;
1417
1418 case SET:
1419 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1420 tmp = SET_SRC (body);
1421 if (GET_CODE (tmp) == ASM_OPERANDS)
1422 return tmp;
1423 break;
1424
1425 case PARALLEL:
1426 tmp = XVECEXP (body, 0, 0);
1427 if (GET_CODE (tmp) == ASM_OPERANDS)
1428 return tmp;
1429 if (GET_CODE (tmp) == SET)
1430 {
1431 tmp = SET_SRC (tmp);
1432 if (GET_CODE (tmp) == ASM_OPERANDS)
1433 return tmp;
1434 }
1435 break;
1436
1437 default:
1438 break;
1439 }
1440 return NULL;
1441 }
1442
1443 /* If BODY is an insn body that uses ASM_OPERANDS,
1444 return the number of operands (both input and output) in the insn.
1445 Otherwise return -1. */
1446
1447 int
1448 asm_noperands (const_rtx body)
1449 {
1450 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1451 int n_sets = 0;
1452
1453 if (asm_op == NULL)
1454 return -1;
1455
1456 if (GET_CODE (body) == SET)
1457 n_sets = 1;
1458 else if (GET_CODE (body) == PARALLEL)
1459 {
1460 int i;
1461 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1462 {
1463 /* Multiple output operands, or 1 output plus some clobbers:
1464 body is
1465 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1466 /* Count backwards through CLOBBERs to determine number of SETs. */
1467 for (i = XVECLEN (body, 0); i > 0; i--)
1468 {
1469 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1470 break;
1471 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1472 return -1;
1473 }
1474
1475 /* N_SETS is now number of output operands. */
1476 n_sets = i;
1477
1478 /* Verify that all the SETs we have
1479 came from a single original asm_operands insn
1480 (so that invalid combinations are blocked). */
1481 for (i = 0; i < n_sets; i++)
1482 {
1483 rtx elt = XVECEXP (body, 0, i);
1484 if (GET_CODE (elt) != SET)
1485 return -1;
1486 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1487 return -1;
1488 /* If these ASM_OPERANDS rtx's came from different original insns
1489 then they aren't allowed together. */
1490 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1491 != ASM_OPERANDS_INPUT_VEC (asm_op))
1492 return -1;
1493 }
1494 }
1495 else
1496 {
1497 /* 0 outputs, but some clobbers:
1498 body is [(asm_operands ...) (clobber (reg ...))...]. */
1499 /* Make sure all the other parallel things really are clobbers. */
1500 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1501 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1502 return -1;
1503 }
1504 }
1505
1506 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1507 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1508 }
1509
1510 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1511 copy its operands (both input and output) into the vector OPERANDS,
1512 the locations of the operands within the insn into the vector OPERAND_LOCS,
1513 and the constraints for the operands into CONSTRAINTS.
1514 Write the modes of the operands into MODES.
1515 Return the assembler-template.
1516
1517 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1518 we don't store that info. */
1519
1520 const char *
1521 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1522 const char **constraints, enum machine_mode *modes,
1523 location_t *loc)
1524 {
1525 int nbase = 0, n, i;
1526 rtx asmop;
1527
1528 switch (GET_CODE (body))
1529 {
1530 case ASM_OPERANDS:
1531 /* Zero output asm: BODY is (asm_operands ...). */
1532 asmop = body;
1533 break;
1534
1535 case SET:
1536 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1537 asmop = SET_SRC (body);
1538
1539 /* The output is in the SET.
1540 Its constraint is in the ASM_OPERANDS itself. */
1541 if (operands)
1542 operands[0] = SET_DEST (body);
1543 if (operand_locs)
1544 operand_locs[0] = &SET_DEST (body);
1545 if (constraints)
1546 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1547 if (modes)
1548 modes[0] = GET_MODE (SET_DEST (body));
1549 nbase = 1;
1550 break;
1551
1552 case PARALLEL:
1553 {
1554 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1555
1556 asmop = XVECEXP (body, 0, 0);
1557 if (GET_CODE (asmop) == SET)
1558 {
1559 asmop = SET_SRC (asmop);
1560
1561 /* At least one output, plus some CLOBBERs. The outputs are in
1562 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1563 for (i = 0; i < nparallel; i++)
1564 {
1565 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1566 break; /* Past last SET */
1567 if (operands)
1568 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1569 if (operand_locs)
1570 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1571 if (constraints)
1572 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1573 if (modes)
1574 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1575 }
1576 nbase = i;
1577 }
1578 break;
1579 }
1580
1581 default:
1582 gcc_unreachable ();
1583 }
1584
1585 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1586 for (i = 0; i < n; i++)
1587 {
1588 if (operand_locs)
1589 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1590 if (operands)
1591 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1592 if (constraints)
1593 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1594 if (modes)
1595 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1596 }
1597 nbase += n;
1598
1599 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1600 for (i = 0; i < n; i++)
1601 {
1602 if (operand_locs)
1603 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1604 if (operands)
1605 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1606 if (constraints)
1607 constraints[nbase + i] = "";
1608 if (modes)
1609 modes[nbase + i] = Pmode;
1610 }
1611
1612 if (loc)
1613 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1614
1615 return ASM_OPERANDS_TEMPLATE (asmop);
1616 }
1617
1618 /* Check if an asm_operand matches its constraints.
1619 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1620
1621 int
1622 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1623 {
1624 int result = 0;
1625 #ifdef AUTO_INC_DEC
1626 bool incdec_ok = false;
1627 #endif
1628
1629 /* Use constrain_operands after reload. */
1630 gcc_assert (!reload_completed);
1631
1632 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1633 many alternatives as required to match the other operands. */
1634 if (*constraint == '\0')
1635 result = 1;
1636
1637 while (*constraint)
1638 {
1639 char c = *constraint;
1640 int len;
1641 switch (c)
1642 {
1643 case ',':
1644 constraint++;
1645 continue;
1646 case '=':
1647 case '+':
1648 case '*':
1649 case '%':
1650 case '!':
1651 case '#':
1652 case '&':
1653 case '?':
1654 break;
1655
1656 case '0': case '1': case '2': case '3': case '4':
1657 case '5': case '6': case '7': case '8': case '9':
1658 /* If caller provided constraints pointer, look up
1659 the matching constraint. Otherwise, our caller should have
1660 given us the proper matching constraint, but we can't
1661 actually fail the check if they didn't. Indicate that
1662 results are inconclusive. */
1663 if (constraints)
1664 {
1665 char *end;
1666 unsigned long match;
1667
1668 match = strtoul (constraint, &end, 10);
1669 if (!result)
1670 result = asm_operand_ok (op, constraints[match], NULL);
1671 constraint = (const char *) end;
1672 }
1673 else
1674 {
1675 do
1676 constraint++;
1677 while (ISDIGIT (*constraint));
1678 if (! result)
1679 result = -1;
1680 }
1681 continue;
1682
1683 case 'p':
1684 if (address_operand (op, VOIDmode))
1685 result = 1;
1686 break;
1687
1688 case TARGET_MEM_CONSTRAINT:
1689 case 'V': /* non-offsettable */
1690 if (memory_operand (op, VOIDmode))
1691 result = 1;
1692 break;
1693
1694 case 'o': /* offsettable */
1695 if (offsettable_nonstrict_memref_p (op))
1696 result = 1;
1697 break;
1698
1699 case '<':
1700 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1701 excepting those that expand_call created. Further, on some
1702 machines which do not have generalized auto inc/dec, an inc/dec
1703 is not a memory_operand.
1704
1705 Match any memory and hope things are resolved after reload. */
1706
1707 if (MEM_P (op)
1708 && (1
1709 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1710 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1711 result = 1;
1712 #ifdef AUTO_INC_DEC
1713 incdec_ok = true;
1714 #endif
1715 break;
1716
1717 case '>':
1718 if (MEM_P (op)
1719 && (1
1720 || GET_CODE (XEXP (op, 0)) == PRE_INC
1721 || GET_CODE (XEXP (op, 0)) == POST_INC))
1722 result = 1;
1723 #ifdef AUTO_INC_DEC
1724 incdec_ok = true;
1725 #endif
1726 break;
1727
1728 case 'E':
1729 case 'F':
1730 if (CONST_DOUBLE_AS_FLOAT_P (op)
1731 || (GET_CODE (op) == CONST_VECTOR
1732 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1733 result = 1;
1734 break;
1735
1736 case 'G':
1737 if (CONST_DOUBLE_AS_FLOAT_P (op)
1738 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1739 result = 1;
1740 break;
1741 case 'H':
1742 if (CONST_DOUBLE_AS_FLOAT_P (op)
1743 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1744 result = 1;
1745 break;
1746
1747 case 's':
1748 if (CONST_SCALAR_INT_P (op))
1749 break;
1750 /* Fall through. */
1751
1752 case 'i':
1753 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1754 result = 1;
1755 break;
1756
1757 case 'n':
1758 if (CONST_SCALAR_INT_P (op))
1759 result = 1;
1760 break;
1761
1762 case 'I':
1763 if (CONST_INT_P (op)
1764 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1765 result = 1;
1766 break;
1767 case 'J':
1768 if (CONST_INT_P (op)
1769 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1770 result = 1;
1771 break;
1772 case 'K':
1773 if (CONST_INT_P (op)
1774 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1775 result = 1;
1776 break;
1777 case 'L':
1778 if (CONST_INT_P (op)
1779 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1780 result = 1;
1781 break;
1782 case 'M':
1783 if (CONST_INT_P (op)
1784 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1785 result = 1;
1786 break;
1787 case 'N':
1788 if (CONST_INT_P (op)
1789 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1790 result = 1;
1791 break;
1792 case 'O':
1793 if (CONST_INT_P (op)
1794 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1795 result = 1;
1796 break;
1797 case 'P':
1798 if (CONST_INT_P (op)
1799 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1800 result = 1;
1801 break;
1802
1803 case 'X':
1804 result = 1;
1805 break;
1806
1807 case 'g':
1808 if (general_operand (op, VOIDmode))
1809 result = 1;
1810 break;
1811
1812 default:
1813 /* For all other letters, we first check for a register class,
1814 otherwise it is an EXTRA_CONSTRAINT. */
1815 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1816 {
1817 case 'r':
1818 if (GET_MODE (op) == BLKmode)
1819 break;
1820 if (register_operand (op, VOIDmode))
1821 result = 1;
1822 }
1823 #ifdef EXTRA_CONSTRAINT_STR
1824 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1825 /* Every memory operand can be reloaded to fit. */
1826 result = result || memory_operand (op, VOIDmode);
1827 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1828 /* Every address operand can be reloaded to fit. */
1829 result = result || address_operand (op, VOIDmode);
1830 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1831 result = 1;
1832 #endif
1833 break;
1834 }
1835 len = CONSTRAINT_LEN (c, constraint);
1836 do
1837 constraint++;
1838 while (--len && *constraint);
1839 if (len)
1840 return 0;
1841 }
1842
1843 #ifdef AUTO_INC_DEC
1844 /* For operands without < or > constraints reject side-effects. */
1845 if (!incdec_ok && result && MEM_P (op))
1846 switch (GET_CODE (XEXP (op, 0)))
1847 {
1848 case PRE_INC:
1849 case POST_INC:
1850 case PRE_DEC:
1851 case POST_DEC:
1852 case PRE_MODIFY:
1853 case POST_MODIFY:
1854 return 0;
1855 default:
1856 break;
1857 }
1858 #endif
1859
1860 return result;
1861 }
1862 \f
1863 /* Given an rtx *P, if it is a sum containing an integer constant term,
1864 return the location (type rtx *) of the pointer to that constant term.
1865 Otherwise, return a null pointer. */
1866
1867 rtx *
1868 find_constant_term_loc (rtx *p)
1869 {
1870 rtx *tem;
1871 enum rtx_code code = GET_CODE (*p);
1872
1873 /* If *P IS such a constant term, P is its location. */
1874
1875 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1876 || code == CONST)
1877 return p;
1878
1879 /* Otherwise, if not a sum, it has no constant term. */
1880
1881 if (GET_CODE (*p) != PLUS)
1882 return 0;
1883
1884 /* If one of the summands is constant, return its location. */
1885
1886 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1887 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1888 return p;
1889
1890 /* Otherwise, check each summand for containing a constant term. */
1891
1892 if (XEXP (*p, 0) != 0)
1893 {
1894 tem = find_constant_term_loc (&XEXP (*p, 0));
1895 if (tem != 0)
1896 return tem;
1897 }
1898
1899 if (XEXP (*p, 1) != 0)
1900 {
1901 tem = find_constant_term_loc (&XEXP (*p, 1));
1902 if (tem != 0)
1903 return tem;
1904 }
1905
1906 return 0;
1907 }
1908 \f
1909 /* Return 1 if OP is a memory reference
1910 whose address contains no side effects
1911 and remains valid after the addition
1912 of a positive integer less than the
1913 size of the object being referenced.
1914
1915 We assume that the original address is valid and do not check it.
1916
1917 This uses strict_memory_address_p as a subroutine, so
1918 don't use it before reload. */
1919
1920 int
1921 offsettable_memref_p (rtx op)
1922 {
1923 return ((MEM_P (op))
1924 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1925 MEM_ADDR_SPACE (op)));
1926 }
1927
1928 /* Similar, but don't require a strictly valid mem ref:
1929 consider pseudo-regs valid as index or base regs. */
1930
1931 int
1932 offsettable_nonstrict_memref_p (rtx op)
1933 {
1934 return ((MEM_P (op))
1935 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1936 MEM_ADDR_SPACE (op)));
1937 }
1938
1939 /* Return 1 if Y is a memory address which contains no side effects
1940 and would remain valid for address space AS after the addition of
1941 a positive integer less than the size of that mode.
1942
1943 We assume that the original address is valid and do not check it.
1944 We do check that it is valid for narrower modes.
1945
1946 If STRICTP is nonzero, we require a strictly valid address,
1947 for the sake of use in reload.c. */
1948
1949 int
1950 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1951 addr_space_t as)
1952 {
1953 enum rtx_code ycode = GET_CODE (y);
1954 rtx z;
1955 rtx y1 = y;
1956 rtx *y2;
1957 int (*addressp) (enum machine_mode, rtx, addr_space_t) =
1958 (strictp ? strict_memory_address_addr_space_p
1959 : memory_address_addr_space_p);
1960 unsigned int mode_sz = GET_MODE_SIZE (mode);
1961
1962 if (CONSTANT_ADDRESS_P (y))
1963 return 1;
1964
1965 /* Adjusting an offsettable address involves changing to a narrower mode.
1966 Make sure that's OK. */
1967
1968 if (mode_dependent_address_p (y, as))
1969 return 0;
1970
1971 enum machine_mode address_mode = GET_MODE (y);
1972 if (address_mode == VOIDmode)
1973 address_mode = targetm.addr_space.address_mode (as);
1974 #ifdef POINTERS_EXTEND_UNSIGNED
1975 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1976 #endif
1977
1978 /* ??? How much offset does an offsettable BLKmode reference need?
1979 Clearly that depends on the situation in which it's being used.
1980 However, the current situation in which we test 0xffffffff is
1981 less than ideal. Caveat user. */
1982 if (mode_sz == 0)
1983 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1984
1985 /* If the expression contains a constant term,
1986 see if it remains valid when max possible offset is added. */
1987
1988 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1989 {
1990 int good;
1991
1992 y1 = *y2;
1993 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1994 /* Use QImode because an odd displacement may be automatically invalid
1995 for any wider mode. But it should be valid for a single byte. */
1996 good = (*addressp) (QImode, y, as);
1997
1998 /* In any case, restore old contents of memory. */
1999 *y2 = y1;
2000 return good;
2001 }
2002
2003 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2004 return 0;
2005
2006 /* The offset added here is chosen as the maximum offset that
2007 any instruction could need to add when operating on something
2008 of the specified mode. We assume that if Y and Y+c are
2009 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2010 go inside a LO_SUM here, so we do so as well. */
2011 if (GET_CODE (y) == LO_SUM
2012 && mode != BLKmode
2013 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2014 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2015 plus_constant (address_mode, XEXP (y, 1),
2016 mode_sz - 1));
2017 #ifdef POINTERS_EXTEND_UNSIGNED
2018 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2019 else if (POINTERS_EXTEND_UNSIGNED > 0
2020 && GET_CODE (y) == ZERO_EXTEND
2021 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2022 z = gen_rtx_ZERO_EXTEND (address_mode,
2023 plus_constant (pointer_mode, XEXP (y, 0),
2024 mode_sz - 1));
2025 #endif
2026 else
2027 z = plus_constant (address_mode, y, mode_sz - 1);
2028
2029 /* Use QImode because an odd displacement may be automatically invalid
2030 for any wider mode. But it should be valid for a single byte. */
2031 return (*addressp) (QImode, z, as);
2032 }
2033
2034 /* Return 1 if ADDR is an address-expression whose effect depends
2035 on the mode of the memory reference it is used in.
2036
2037 ADDRSPACE is the address space associated with the address.
2038
2039 Autoincrement addressing is a typical example of mode-dependence
2040 because the amount of the increment depends on the mode. */
2041
2042 bool
2043 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2044 {
2045 /* Auto-increment addressing with anything other than post_modify
2046 or pre_modify always introduces a mode dependency. Catch such
2047 cases now instead of deferring to the target. */
2048 if (GET_CODE (addr) == PRE_INC
2049 || GET_CODE (addr) == POST_INC
2050 || GET_CODE (addr) == PRE_DEC
2051 || GET_CODE (addr) == POST_DEC)
2052 return true;
2053
2054 return targetm.mode_dependent_address_p (addr, addrspace);
2055 }
2056 \f
2057 /* Like extract_insn, but save insn extracted and don't extract again, when
2058 called again for the same insn expecting that recog_data still contain the
2059 valid information. This is used primary by gen_attr infrastructure that
2060 often does extract insn again and again. */
2061 void
2062 extract_insn_cached (rtx insn)
2063 {
2064 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2065 return;
2066 extract_insn (insn);
2067 recog_data.insn = insn;
2068 }
2069
2070 /* Do cached extract_insn, constrain_operands and complain about failures.
2071 Used by insn_attrtab. */
2072 void
2073 extract_constrain_insn_cached (rtx insn)
2074 {
2075 extract_insn_cached (insn);
2076 if (which_alternative == -1
2077 && !constrain_operands (reload_completed))
2078 fatal_insn_not_found (insn);
2079 }
2080
2081 /* Do cached constrain_operands and complain about failures. */
2082 int
2083 constrain_operands_cached (int strict)
2084 {
2085 if (which_alternative == -1)
2086 return constrain_operands (strict);
2087 else
2088 return 1;
2089 }
2090 \f
2091 /* Analyze INSN and fill in recog_data. */
2092
2093 void
2094 extract_insn (rtx insn)
2095 {
2096 int i;
2097 int icode;
2098 int noperands;
2099 rtx body = PATTERN (insn);
2100
2101 recog_data.n_operands = 0;
2102 recog_data.n_alternatives = 0;
2103 recog_data.n_dups = 0;
2104 recog_data.is_asm = false;
2105
2106 switch (GET_CODE (body))
2107 {
2108 case USE:
2109 case CLOBBER:
2110 case ASM_INPUT:
2111 case ADDR_VEC:
2112 case ADDR_DIFF_VEC:
2113 case VAR_LOCATION:
2114 return;
2115
2116 case SET:
2117 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2118 goto asm_insn;
2119 else
2120 goto normal_insn;
2121 case PARALLEL:
2122 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2123 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2124 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2125 goto asm_insn;
2126 else
2127 goto normal_insn;
2128 case ASM_OPERANDS:
2129 asm_insn:
2130 recog_data.n_operands = noperands = asm_noperands (body);
2131 if (noperands >= 0)
2132 {
2133 /* This insn is an `asm' with operands. */
2134
2135 /* expand_asm_operands makes sure there aren't too many operands. */
2136 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2137
2138 /* Now get the operand values and constraints out of the insn. */
2139 decode_asm_operands (body, recog_data.operand,
2140 recog_data.operand_loc,
2141 recog_data.constraints,
2142 recog_data.operand_mode, NULL);
2143 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2144 if (noperands > 0)
2145 {
2146 const char *p = recog_data.constraints[0];
2147 recog_data.n_alternatives = 1;
2148 while (*p)
2149 recog_data.n_alternatives += (*p++ == ',');
2150 }
2151 recog_data.is_asm = true;
2152 break;
2153 }
2154 fatal_insn_not_found (insn);
2155
2156 default:
2157 normal_insn:
2158 /* Ordinary insn: recognize it, get the operands via insn_extract
2159 and get the constraints. */
2160
2161 icode = recog_memoized (insn);
2162 if (icode < 0)
2163 fatal_insn_not_found (insn);
2164
2165 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2166 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2167 recog_data.n_dups = insn_data[icode].n_dups;
2168
2169 insn_extract (insn);
2170
2171 for (i = 0; i < noperands; i++)
2172 {
2173 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2174 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2175 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2176 /* VOIDmode match_operands gets mode from their real operand. */
2177 if (recog_data.operand_mode[i] == VOIDmode)
2178 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2179 }
2180 }
2181 for (i = 0; i < noperands; i++)
2182 recog_data.operand_type[i]
2183 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2184 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2185 : OP_IN);
2186
2187 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2188
2189 if (INSN_CODE (insn) < 0)
2190 for (i = 0; i < recog_data.n_alternatives; i++)
2191 recog_data.alternative_enabled_p[i] = true;
2192 else
2193 {
2194 recog_data.insn = insn;
2195 for (i = 0; i < recog_data.n_alternatives; i++)
2196 {
2197 which_alternative = i;
2198 recog_data.alternative_enabled_p[i]
2199 = HAVE_ATTR_enabled ? get_attr_enabled (insn) : 1;
2200 }
2201 }
2202
2203 recog_data.insn = NULL;
2204 which_alternative = -1;
2205 }
2206
2207 /* After calling extract_insn, you can use this function to extract some
2208 information from the constraint strings into a more usable form.
2209 The collected data is stored in recog_op_alt. */
2210 void
2211 preprocess_constraints (void)
2212 {
2213 int i;
2214
2215 for (i = 0; i < recog_data.n_operands; i++)
2216 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2217 * sizeof (struct operand_alternative)));
2218
2219 for (i = 0; i < recog_data.n_operands; i++)
2220 {
2221 int j;
2222 struct operand_alternative *op_alt;
2223 const char *p = recog_data.constraints[i];
2224
2225 op_alt = recog_op_alt[i];
2226
2227 for (j = 0; j < recog_data.n_alternatives; j++)
2228 {
2229 op_alt[j].cl = NO_REGS;
2230 op_alt[j].constraint = p;
2231 op_alt[j].matches = -1;
2232 op_alt[j].matched = -1;
2233
2234 if (!recog_data.alternative_enabled_p[j])
2235 {
2236 p = skip_alternative (p);
2237 continue;
2238 }
2239
2240 if (*p == '\0' || *p == ',')
2241 {
2242 op_alt[j].anything_ok = 1;
2243 continue;
2244 }
2245
2246 for (;;)
2247 {
2248 char c = *p;
2249 if (c == '#')
2250 do
2251 c = *++p;
2252 while (c != ',' && c != '\0');
2253 if (c == ',' || c == '\0')
2254 {
2255 p++;
2256 break;
2257 }
2258
2259 switch (c)
2260 {
2261 case '=': case '+': case '*': case '%':
2262 case 'E': case 'F': case 'G': case 'H':
2263 case 's': case 'i': case 'n':
2264 case 'I': case 'J': case 'K': case 'L':
2265 case 'M': case 'N': case 'O': case 'P':
2266 /* These don't say anything we care about. */
2267 break;
2268
2269 case '?':
2270 op_alt[j].reject += 6;
2271 break;
2272 case '!':
2273 op_alt[j].reject += 600;
2274 break;
2275 case '&':
2276 op_alt[j].earlyclobber = 1;
2277 break;
2278
2279 case '0': case '1': case '2': case '3': case '4':
2280 case '5': case '6': case '7': case '8': case '9':
2281 {
2282 char *end;
2283 op_alt[j].matches = strtoul (p, &end, 10);
2284 recog_op_alt[op_alt[j].matches][j].matched = i;
2285 p = end;
2286 }
2287 continue;
2288
2289 case TARGET_MEM_CONSTRAINT:
2290 op_alt[j].memory_ok = 1;
2291 break;
2292 case '<':
2293 op_alt[j].decmem_ok = 1;
2294 break;
2295 case '>':
2296 op_alt[j].incmem_ok = 1;
2297 break;
2298 case 'V':
2299 op_alt[j].nonoffmem_ok = 1;
2300 break;
2301 case 'o':
2302 op_alt[j].offmem_ok = 1;
2303 break;
2304 case 'X':
2305 op_alt[j].anything_ok = 1;
2306 break;
2307
2308 case 'p':
2309 op_alt[j].is_address = 1;
2310 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2311 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2312 ADDRESS, SCRATCH)];
2313 break;
2314
2315 case 'g':
2316 case 'r':
2317 op_alt[j].cl =
2318 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2319 break;
2320
2321 default:
2322 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2323 {
2324 op_alt[j].memory_ok = 1;
2325 break;
2326 }
2327 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2328 {
2329 op_alt[j].is_address = 1;
2330 op_alt[j].cl
2331 = (reg_class_subunion
2332 [(int) op_alt[j].cl]
2333 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2334 ADDRESS, SCRATCH)]);
2335 break;
2336 }
2337
2338 op_alt[j].cl
2339 = (reg_class_subunion
2340 [(int) op_alt[j].cl]
2341 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2342 break;
2343 }
2344 p += CONSTRAINT_LEN (c, p);
2345 }
2346 }
2347 }
2348 }
2349
2350 /* Check the operands of an insn against the insn's operand constraints
2351 and return 1 if they are valid.
2352 The information about the insn's operands, constraints, operand modes
2353 etc. is obtained from the global variables set up by extract_insn.
2354
2355 WHICH_ALTERNATIVE is set to a number which indicates which
2356 alternative of constraints was matched: 0 for the first alternative,
2357 1 for the next, etc.
2358
2359 In addition, when two operands are required to match
2360 and it happens that the output operand is (reg) while the
2361 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2362 make the output operand look like the input.
2363 This is because the output operand is the one the template will print.
2364
2365 This is used in final, just before printing the assembler code and by
2366 the routines that determine an insn's attribute.
2367
2368 If STRICT is a positive nonzero value, it means that we have been
2369 called after reload has been completed. In that case, we must
2370 do all checks strictly. If it is zero, it means that we have been called
2371 before reload has completed. In that case, we first try to see if we can
2372 find an alternative that matches strictly. If not, we try again, this
2373 time assuming that reload will fix up the insn. This provides a "best
2374 guess" for the alternative and is used to compute attributes of insns prior
2375 to reload. A negative value of STRICT is used for this internal call. */
2376
2377 struct funny_match
2378 {
2379 int this_op, other;
2380 };
2381
2382 int
2383 constrain_operands (int strict)
2384 {
2385 const char *constraints[MAX_RECOG_OPERANDS];
2386 int matching_operands[MAX_RECOG_OPERANDS];
2387 int earlyclobber[MAX_RECOG_OPERANDS];
2388 int c;
2389
2390 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2391 int funny_match_index;
2392
2393 which_alternative = 0;
2394 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2395 return 1;
2396
2397 for (c = 0; c < recog_data.n_operands; c++)
2398 {
2399 constraints[c] = recog_data.constraints[c];
2400 matching_operands[c] = -1;
2401 }
2402
2403 do
2404 {
2405 int seen_earlyclobber_at = -1;
2406 int opno;
2407 int lose = 0;
2408 funny_match_index = 0;
2409
2410 if (!recog_data.alternative_enabled_p[which_alternative])
2411 {
2412 int i;
2413
2414 for (i = 0; i < recog_data.n_operands; i++)
2415 constraints[i] = skip_alternative (constraints[i]);
2416
2417 which_alternative++;
2418 continue;
2419 }
2420
2421 for (opno = 0; opno < recog_data.n_operands; opno++)
2422 {
2423 rtx op = recog_data.operand[opno];
2424 enum machine_mode mode = GET_MODE (op);
2425 const char *p = constraints[opno];
2426 int offset = 0;
2427 int win = 0;
2428 int val;
2429 int len;
2430
2431 earlyclobber[opno] = 0;
2432
2433 /* A unary operator may be accepted by the predicate, but it
2434 is irrelevant for matching constraints. */
2435 if (UNARY_P (op))
2436 op = XEXP (op, 0);
2437
2438 if (GET_CODE (op) == SUBREG)
2439 {
2440 if (REG_P (SUBREG_REG (op))
2441 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2442 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2443 GET_MODE (SUBREG_REG (op)),
2444 SUBREG_BYTE (op),
2445 GET_MODE (op));
2446 op = SUBREG_REG (op);
2447 }
2448
2449 /* An empty constraint or empty alternative
2450 allows anything which matched the pattern. */
2451 if (*p == 0 || *p == ',')
2452 win = 1;
2453
2454 do
2455 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2456 {
2457 case '\0':
2458 len = 0;
2459 break;
2460 case ',':
2461 c = '\0';
2462 break;
2463
2464 case '?': case '!': case '*': case '%':
2465 case '=': case '+':
2466 break;
2467
2468 case '#':
2469 /* Ignore rest of this alternative as far as
2470 constraint checking is concerned. */
2471 do
2472 p++;
2473 while (*p && *p != ',');
2474 len = 0;
2475 break;
2476
2477 case '&':
2478 earlyclobber[opno] = 1;
2479 if (seen_earlyclobber_at < 0)
2480 seen_earlyclobber_at = opno;
2481 break;
2482
2483 case '0': case '1': case '2': case '3': case '4':
2484 case '5': case '6': case '7': case '8': case '9':
2485 {
2486 /* This operand must be the same as a previous one.
2487 This kind of constraint is used for instructions such
2488 as add when they take only two operands.
2489
2490 Note that the lower-numbered operand is passed first.
2491
2492 If we are not testing strictly, assume that this
2493 constraint will be satisfied. */
2494
2495 char *end;
2496 int match;
2497
2498 match = strtoul (p, &end, 10);
2499 p = end;
2500
2501 if (strict < 0)
2502 val = 1;
2503 else
2504 {
2505 rtx op1 = recog_data.operand[match];
2506 rtx op2 = recog_data.operand[opno];
2507
2508 /* A unary operator may be accepted by the predicate,
2509 but it is irrelevant for matching constraints. */
2510 if (UNARY_P (op1))
2511 op1 = XEXP (op1, 0);
2512 if (UNARY_P (op2))
2513 op2 = XEXP (op2, 0);
2514
2515 val = operands_match_p (op1, op2);
2516 }
2517
2518 matching_operands[opno] = match;
2519 matching_operands[match] = opno;
2520
2521 if (val != 0)
2522 win = 1;
2523
2524 /* If output is *x and input is *--x, arrange later
2525 to change the output to *--x as well, since the
2526 output op is the one that will be printed. */
2527 if (val == 2 && strict > 0)
2528 {
2529 funny_match[funny_match_index].this_op = opno;
2530 funny_match[funny_match_index++].other = match;
2531 }
2532 }
2533 len = 0;
2534 break;
2535
2536 case 'p':
2537 /* p is used for address_operands. When we are called by
2538 gen_reload, no one will have checked that the address is
2539 strictly valid, i.e., that all pseudos requiring hard regs
2540 have gotten them. */
2541 if (strict <= 0
2542 || (strict_memory_address_p (recog_data.operand_mode[opno],
2543 op)))
2544 win = 1;
2545 break;
2546
2547 /* No need to check general_operand again;
2548 it was done in insn-recog.c. Well, except that reload
2549 doesn't check the validity of its replacements, but
2550 that should only matter when there's a bug. */
2551 case 'g':
2552 /* Anything goes unless it is a REG and really has a hard reg
2553 but the hard reg is not in the class GENERAL_REGS. */
2554 if (REG_P (op))
2555 {
2556 if (strict < 0
2557 || GENERAL_REGS == ALL_REGS
2558 || (reload_in_progress
2559 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2560 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2561 win = 1;
2562 }
2563 else if (strict < 0 || general_operand (op, mode))
2564 win = 1;
2565 break;
2566
2567 case 'X':
2568 /* This is used for a MATCH_SCRATCH in the cases when
2569 we don't actually need anything. So anything goes
2570 any time. */
2571 win = 1;
2572 break;
2573
2574 case TARGET_MEM_CONSTRAINT:
2575 /* Memory operands must be valid, to the extent
2576 required by STRICT. */
2577 if (MEM_P (op))
2578 {
2579 if (strict > 0
2580 && !strict_memory_address_addr_space_p
2581 (GET_MODE (op), XEXP (op, 0),
2582 MEM_ADDR_SPACE (op)))
2583 break;
2584 if (strict == 0
2585 && !memory_address_addr_space_p
2586 (GET_MODE (op), XEXP (op, 0),
2587 MEM_ADDR_SPACE (op)))
2588 break;
2589 win = 1;
2590 }
2591 /* Before reload, accept what reload can turn into mem. */
2592 else if (strict < 0 && CONSTANT_P (op))
2593 win = 1;
2594 /* During reload, accept a pseudo */
2595 else if (reload_in_progress && REG_P (op)
2596 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2597 win = 1;
2598 break;
2599
2600 case '<':
2601 if (MEM_P (op)
2602 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2603 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2604 win = 1;
2605 break;
2606
2607 case '>':
2608 if (MEM_P (op)
2609 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2610 || GET_CODE (XEXP (op, 0)) == POST_INC))
2611 win = 1;
2612 break;
2613
2614 case 'E':
2615 case 'F':
2616 if (CONST_DOUBLE_AS_FLOAT_P (op)
2617 || (GET_CODE (op) == CONST_VECTOR
2618 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2619 win = 1;
2620 break;
2621
2622 case 'G':
2623 case 'H':
2624 if (CONST_DOUBLE_AS_FLOAT_P (op)
2625 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2626 win = 1;
2627 break;
2628
2629 case 's':
2630 if (CONST_SCALAR_INT_P (op))
2631 break;
2632 case 'i':
2633 if (CONSTANT_P (op))
2634 win = 1;
2635 break;
2636
2637 case 'n':
2638 if (CONST_SCALAR_INT_P (op))
2639 win = 1;
2640 break;
2641
2642 case 'I':
2643 case 'J':
2644 case 'K':
2645 case 'L':
2646 case 'M':
2647 case 'N':
2648 case 'O':
2649 case 'P':
2650 if (CONST_INT_P (op)
2651 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2652 win = 1;
2653 break;
2654
2655 case 'V':
2656 if (MEM_P (op)
2657 && ((strict > 0 && ! offsettable_memref_p (op))
2658 || (strict < 0
2659 && !(CONSTANT_P (op) || MEM_P (op)))
2660 || (reload_in_progress
2661 && !(REG_P (op)
2662 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2663 win = 1;
2664 break;
2665
2666 case 'o':
2667 if ((strict > 0 && offsettable_memref_p (op))
2668 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2669 /* Before reload, accept what reload can handle. */
2670 || (strict < 0
2671 && (CONSTANT_P (op) || MEM_P (op)))
2672 /* During reload, accept a pseudo */
2673 || (reload_in_progress && REG_P (op)
2674 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2675 win = 1;
2676 break;
2677
2678 default:
2679 {
2680 enum reg_class cl;
2681
2682 cl = (c == 'r'
2683 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2684 if (cl != NO_REGS)
2685 {
2686 if (strict < 0
2687 || (strict == 0
2688 && REG_P (op)
2689 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2690 || (strict == 0 && GET_CODE (op) == SCRATCH)
2691 || (REG_P (op)
2692 && reg_fits_class_p (op, cl, offset, mode)))
2693 win = 1;
2694 }
2695 #ifdef EXTRA_CONSTRAINT_STR
2696 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2697 win = 1;
2698
2699 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2700 /* Every memory operand can be reloaded to fit. */
2701 && ((strict < 0 && MEM_P (op))
2702 /* Before reload, accept what reload can turn
2703 into mem. */
2704 || (strict < 0 && CONSTANT_P (op))
2705 /* During reload, accept a pseudo */
2706 || (reload_in_progress && REG_P (op)
2707 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2708 win = 1;
2709 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2710 /* Every address operand can be reloaded to fit. */
2711 && strict < 0)
2712 win = 1;
2713 /* Cater to architectures like IA-64 that define extra memory
2714 constraints without using define_memory_constraint. */
2715 else if (reload_in_progress
2716 && REG_P (op)
2717 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2718 && reg_renumber[REGNO (op)] < 0
2719 && reg_equiv_mem (REGNO (op)) != 0
2720 && EXTRA_CONSTRAINT_STR
2721 (reg_equiv_mem (REGNO (op)), c, p))
2722 win = 1;
2723 #endif
2724 break;
2725 }
2726 }
2727 while (p += len, c);
2728
2729 constraints[opno] = p;
2730 /* If this operand did not win somehow,
2731 this alternative loses. */
2732 if (! win)
2733 lose = 1;
2734 }
2735 /* This alternative won; the operands are ok.
2736 Change whichever operands this alternative says to change. */
2737 if (! lose)
2738 {
2739 int opno, eopno;
2740
2741 /* See if any earlyclobber operand conflicts with some other
2742 operand. */
2743
2744 if (strict > 0 && seen_earlyclobber_at >= 0)
2745 for (eopno = seen_earlyclobber_at;
2746 eopno < recog_data.n_operands;
2747 eopno++)
2748 /* Ignore earlyclobber operands now in memory,
2749 because we would often report failure when we have
2750 two memory operands, one of which was formerly a REG. */
2751 if (earlyclobber[eopno]
2752 && REG_P (recog_data.operand[eopno]))
2753 for (opno = 0; opno < recog_data.n_operands; opno++)
2754 if ((MEM_P (recog_data.operand[opno])
2755 || recog_data.operand_type[opno] != OP_OUT)
2756 && opno != eopno
2757 /* Ignore things like match_operator operands. */
2758 && *recog_data.constraints[opno] != 0
2759 && ! (matching_operands[opno] == eopno
2760 && operands_match_p (recog_data.operand[opno],
2761 recog_data.operand[eopno]))
2762 && ! safe_from_earlyclobber (recog_data.operand[opno],
2763 recog_data.operand[eopno]))
2764 lose = 1;
2765
2766 if (! lose)
2767 {
2768 while (--funny_match_index >= 0)
2769 {
2770 recog_data.operand[funny_match[funny_match_index].other]
2771 = recog_data.operand[funny_match[funny_match_index].this_op];
2772 }
2773
2774 #ifdef AUTO_INC_DEC
2775 /* For operands without < or > constraints reject side-effects. */
2776 if (recog_data.is_asm)
2777 {
2778 for (opno = 0; opno < recog_data.n_operands; opno++)
2779 if (MEM_P (recog_data.operand[opno]))
2780 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2781 {
2782 case PRE_INC:
2783 case POST_INC:
2784 case PRE_DEC:
2785 case POST_DEC:
2786 case PRE_MODIFY:
2787 case POST_MODIFY:
2788 if (strchr (recog_data.constraints[opno], '<') == NULL
2789 && strchr (recog_data.constraints[opno], '>')
2790 == NULL)
2791 return 0;
2792 break;
2793 default:
2794 break;
2795 }
2796 }
2797 #endif
2798 return 1;
2799 }
2800 }
2801
2802 which_alternative++;
2803 }
2804 while (which_alternative < recog_data.n_alternatives);
2805
2806 which_alternative = -1;
2807 /* If we are about to reject this, but we are not to test strictly,
2808 try a very loose test. Only return failure if it fails also. */
2809 if (strict == 0)
2810 return constrain_operands (-1);
2811 else
2812 return 0;
2813 }
2814
2815 /* Return true iff OPERAND (assumed to be a REG rtx)
2816 is a hard reg in class CLASS when its regno is offset by OFFSET
2817 and changed to mode MODE.
2818 If REG occupies multiple hard regs, all of them must be in CLASS. */
2819
2820 bool
2821 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2822 enum machine_mode mode)
2823 {
2824 unsigned int regno = REGNO (operand);
2825
2826 if (cl == NO_REGS)
2827 return false;
2828
2829 /* Regno must not be a pseudo register. Offset may be negative. */
2830 return (HARD_REGISTER_NUM_P (regno)
2831 && HARD_REGISTER_NUM_P (regno + offset)
2832 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2833 regno + offset));
2834 }
2835 \f
2836 /* Split single instruction. Helper function for split_all_insns and
2837 split_all_insns_noflow. Return last insn in the sequence if successful,
2838 or NULL if unsuccessful. */
2839
2840 static rtx
2841 split_insn (rtx insn)
2842 {
2843 /* Split insns here to get max fine-grain parallelism. */
2844 rtx first = PREV_INSN (insn);
2845 rtx last = try_split (PATTERN (insn), insn, 1);
2846 rtx insn_set, last_set, note;
2847
2848 if (last == insn)
2849 return NULL_RTX;
2850
2851 /* If the original instruction was a single set that was known to be
2852 equivalent to a constant, see if we can say the same about the last
2853 instruction in the split sequence. The two instructions must set
2854 the same destination. */
2855 insn_set = single_set (insn);
2856 if (insn_set)
2857 {
2858 last_set = single_set (last);
2859 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2860 {
2861 note = find_reg_equal_equiv_note (insn);
2862 if (note && CONSTANT_P (XEXP (note, 0)))
2863 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2864 else if (CONSTANT_P (SET_SRC (insn_set)))
2865 set_unique_reg_note (last, REG_EQUAL,
2866 copy_rtx (SET_SRC (insn_set)));
2867 }
2868 }
2869
2870 /* try_split returns the NOTE that INSN became. */
2871 SET_INSN_DELETED (insn);
2872
2873 /* ??? Coddle to md files that generate subregs in post-reload
2874 splitters instead of computing the proper hard register. */
2875 if (reload_completed && first != last)
2876 {
2877 first = NEXT_INSN (first);
2878 for (;;)
2879 {
2880 if (INSN_P (first))
2881 cleanup_subreg_operands (first);
2882 if (first == last)
2883 break;
2884 first = NEXT_INSN (first);
2885 }
2886 }
2887
2888 return last;
2889 }
2890
2891 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2892
2893 void
2894 split_all_insns (void)
2895 {
2896 sbitmap blocks;
2897 bool changed;
2898 basic_block bb;
2899
2900 blocks = sbitmap_alloc (last_basic_block);
2901 bitmap_clear (blocks);
2902 changed = false;
2903
2904 FOR_EACH_BB_REVERSE (bb)
2905 {
2906 rtx insn, next;
2907 bool finish = false;
2908
2909 rtl_profile_for_bb (bb);
2910 for (insn = BB_HEAD (bb); !finish ; insn = next)
2911 {
2912 /* Can't use `next_real_insn' because that might go across
2913 CODE_LABELS and short-out basic blocks. */
2914 next = NEXT_INSN (insn);
2915 finish = (insn == BB_END (bb));
2916 if (INSN_P (insn))
2917 {
2918 rtx set = single_set (insn);
2919
2920 /* Don't split no-op move insns. These should silently
2921 disappear later in final. Splitting such insns would
2922 break the code that handles LIBCALL blocks. */
2923 if (set && set_noop_p (set))
2924 {
2925 /* Nops get in the way while scheduling, so delete them
2926 now if register allocation has already been done. It
2927 is too risky to try to do this before register
2928 allocation, and there are unlikely to be very many
2929 nops then anyways. */
2930 if (reload_completed)
2931 delete_insn_and_edges (insn);
2932 }
2933 else
2934 {
2935 if (split_insn (insn))
2936 {
2937 bitmap_set_bit (blocks, bb->index);
2938 changed = true;
2939 }
2940 }
2941 }
2942 }
2943 }
2944
2945 default_rtl_profile ();
2946 if (changed)
2947 find_many_sub_basic_blocks (blocks);
2948
2949 #ifdef ENABLE_CHECKING
2950 verify_flow_info ();
2951 #endif
2952
2953 sbitmap_free (blocks);
2954 }
2955
2956 /* Same as split_all_insns, but do not expect CFG to be available.
2957 Used by machine dependent reorg passes. */
2958
2959 unsigned int
2960 split_all_insns_noflow (void)
2961 {
2962 rtx next, insn;
2963
2964 for (insn = get_insns (); insn; insn = next)
2965 {
2966 next = NEXT_INSN (insn);
2967 if (INSN_P (insn))
2968 {
2969 /* Don't split no-op move insns. These should silently
2970 disappear later in final. Splitting such insns would
2971 break the code that handles LIBCALL blocks. */
2972 rtx set = single_set (insn);
2973 if (set && set_noop_p (set))
2974 {
2975 /* Nops get in the way while scheduling, so delete them
2976 now if register allocation has already been done. It
2977 is too risky to try to do this before register
2978 allocation, and there are unlikely to be very many
2979 nops then anyways.
2980
2981 ??? Should we use delete_insn when the CFG isn't valid? */
2982 if (reload_completed)
2983 delete_insn_and_edges (insn);
2984 }
2985 else
2986 split_insn (insn);
2987 }
2988 }
2989 return 0;
2990 }
2991 \f
2992 #ifdef HAVE_peephole2
2993 struct peep2_insn_data
2994 {
2995 rtx insn;
2996 regset live_before;
2997 };
2998
2999 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3000 static int peep2_current;
3001
3002 static bool peep2_do_rebuild_jump_labels;
3003 static bool peep2_do_cleanup_cfg;
3004
3005 /* The number of instructions available to match a peep2. */
3006 int peep2_current_count;
3007
3008 /* A non-insn marker indicating the last insn of the block.
3009 The live_before regset for this element is correct, indicating
3010 DF_LIVE_OUT for the block. */
3011 #define PEEP2_EOB pc_rtx
3012
3013 /* Wrap N to fit into the peep2_insn_data buffer. */
3014
3015 static int
3016 peep2_buf_position (int n)
3017 {
3018 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3019 n -= MAX_INSNS_PER_PEEP2 + 1;
3020 return n;
3021 }
3022
3023 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3024 does not exist. Used by the recognizer to find the next insn to match
3025 in a multi-insn pattern. */
3026
3027 rtx
3028 peep2_next_insn (int n)
3029 {
3030 gcc_assert (n <= peep2_current_count);
3031
3032 n = peep2_buf_position (peep2_current + n);
3033
3034 return peep2_insn_data[n].insn;
3035 }
3036
3037 /* Return true if REGNO is dead before the Nth non-note insn
3038 after `current'. */
3039
3040 int
3041 peep2_regno_dead_p (int ofs, int regno)
3042 {
3043 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3044
3045 ofs = peep2_buf_position (peep2_current + ofs);
3046
3047 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3048
3049 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3050 }
3051
3052 /* Similarly for a REG. */
3053
3054 int
3055 peep2_reg_dead_p (int ofs, rtx reg)
3056 {
3057 int regno, n;
3058
3059 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3060
3061 ofs = peep2_buf_position (peep2_current + ofs);
3062
3063 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3064
3065 regno = REGNO (reg);
3066 n = hard_regno_nregs[regno][GET_MODE (reg)];
3067 while (--n >= 0)
3068 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3069 return 0;
3070 return 1;
3071 }
3072
3073 /* Regno offset to be used in the register search. */
3074 static int search_ofs;
3075
3076 /* Try to find a hard register of mode MODE, matching the register class in
3077 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3078 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3079 in which case the only condition is that the register must be available
3080 before CURRENT_INSN.
3081 Registers that already have bits set in REG_SET will not be considered.
3082
3083 If an appropriate register is available, it will be returned and the
3084 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3085 returned. */
3086
3087 rtx
3088 peep2_find_free_register (int from, int to, const char *class_str,
3089 enum machine_mode mode, HARD_REG_SET *reg_set)
3090 {
3091 enum reg_class cl;
3092 HARD_REG_SET live;
3093 df_ref *def_rec;
3094 int i;
3095
3096 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3097 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3098
3099 from = peep2_buf_position (peep2_current + from);
3100 to = peep2_buf_position (peep2_current + to);
3101
3102 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3103 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3104
3105 while (from != to)
3106 {
3107 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3108
3109 /* Don't use registers set or clobbered by the insn. */
3110 for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
3111 *def_rec; def_rec++)
3112 SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
3113
3114 from = peep2_buf_position (from + 1);
3115 }
3116
3117 cl = (class_str[0] == 'r' ? GENERAL_REGS
3118 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3119
3120 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3121 {
3122 int raw_regno, regno, success, j;
3123
3124 /* Distribute the free registers as much as possible. */
3125 raw_regno = search_ofs + i;
3126 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3127 raw_regno -= FIRST_PSEUDO_REGISTER;
3128 #ifdef REG_ALLOC_ORDER
3129 regno = reg_alloc_order[raw_regno];
3130 #else
3131 regno = raw_regno;
3132 #endif
3133
3134 /* Can it support the mode we need? */
3135 if (! HARD_REGNO_MODE_OK (regno, mode))
3136 continue;
3137
3138 success = 1;
3139 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3140 {
3141 /* Don't allocate fixed registers. */
3142 if (fixed_regs[regno + j])
3143 {
3144 success = 0;
3145 break;
3146 }
3147 /* Don't allocate global registers. */
3148 if (global_regs[regno + j])
3149 {
3150 success = 0;
3151 break;
3152 }
3153 /* Make sure the register is of the right class. */
3154 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3155 {
3156 success = 0;
3157 break;
3158 }
3159 /* And that we don't create an extra save/restore. */
3160 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3161 {
3162 success = 0;
3163 break;
3164 }
3165
3166 if (! targetm.hard_regno_scratch_ok (regno + j))
3167 {
3168 success = 0;
3169 break;
3170 }
3171
3172 /* And we don't clobber traceback for noreturn functions. */
3173 if ((regno + j == FRAME_POINTER_REGNUM
3174 || regno + j == HARD_FRAME_POINTER_REGNUM)
3175 && (! reload_completed || frame_pointer_needed))
3176 {
3177 success = 0;
3178 break;
3179 }
3180
3181 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3182 || TEST_HARD_REG_BIT (live, regno + j))
3183 {
3184 success = 0;
3185 break;
3186 }
3187 }
3188
3189 if (success)
3190 {
3191 add_to_hard_reg_set (reg_set, mode, regno);
3192
3193 /* Start the next search with the next register. */
3194 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3195 raw_regno = 0;
3196 search_ofs = raw_regno;
3197
3198 return gen_rtx_REG (mode, regno);
3199 }
3200 }
3201
3202 search_ofs = 0;
3203 return NULL_RTX;
3204 }
3205
3206 /* Forget all currently tracked instructions, only remember current
3207 LIVE regset. */
3208
3209 static void
3210 peep2_reinit_state (regset live)
3211 {
3212 int i;
3213
3214 /* Indicate that all slots except the last holds invalid data. */
3215 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3216 peep2_insn_data[i].insn = NULL_RTX;
3217 peep2_current_count = 0;
3218
3219 /* Indicate that the last slot contains live_after data. */
3220 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3221 peep2_current = MAX_INSNS_PER_PEEP2;
3222
3223 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3224 }
3225
3226 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3227 starting at INSN. Perform the replacement, removing the old insns and
3228 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3229 if the replacement is rejected. */
3230
3231 static rtx
3232 peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3233 {
3234 int i;
3235 rtx last, eh_note, as_note, before_try, x;
3236 rtx old_insn, new_insn;
3237 bool was_call = false;
3238
3239 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3240 match more than one insn, or to be split into more than one insn. */
3241 old_insn = peep2_insn_data[peep2_current].insn;
3242 if (RTX_FRAME_RELATED_P (old_insn))
3243 {
3244 bool any_note = false;
3245 rtx note;
3246
3247 if (match_len != 0)
3248 return NULL;
3249
3250 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3251 may be in the stream for the purpose of register allocation. */
3252 if (active_insn_p (attempt))
3253 new_insn = attempt;
3254 else
3255 new_insn = next_active_insn (attempt);
3256 if (next_active_insn (new_insn))
3257 return NULL;
3258
3259 /* We have a 1-1 replacement. Copy over any frame-related info. */
3260 RTX_FRAME_RELATED_P (new_insn) = 1;
3261
3262 /* Allow the backend to fill in a note during the split. */
3263 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3264 switch (REG_NOTE_KIND (note))
3265 {
3266 case REG_FRAME_RELATED_EXPR:
3267 case REG_CFA_DEF_CFA:
3268 case REG_CFA_ADJUST_CFA:
3269 case REG_CFA_OFFSET:
3270 case REG_CFA_REGISTER:
3271 case REG_CFA_EXPRESSION:
3272 case REG_CFA_RESTORE:
3273 case REG_CFA_SET_VDRAP:
3274 any_note = true;
3275 break;
3276 default:
3277 break;
3278 }
3279
3280 /* If the backend didn't supply a note, copy one over. */
3281 if (!any_note)
3282 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3283 switch (REG_NOTE_KIND (note))
3284 {
3285 case REG_FRAME_RELATED_EXPR:
3286 case REG_CFA_DEF_CFA:
3287 case REG_CFA_ADJUST_CFA:
3288 case REG_CFA_OFFSET:
3289 case REG_CFA_REGISTER:
3290 case REG_CFA_EXPRESSION:
3291 case REG_CFA_RESTORE:
3292 case REG_CFA_SET_VDRAP:
3293 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3294 any_note = true;
3295 break;
3296 default:
3297 break;
3298 }
3299
3300 /* If there still isn't a note, make sure the unwind info sees the
3301 same expression as before the split. */
3302 if (!any_note)
3303 {
3304 rtx old_set, new_set;
3305
3306 /* The old insn had better have been simple, or annotated. */
3307 old_set = single_set (old_insn);
3308 gcc_assert (old_set != NULL);
3309
3310 new_set = single_set (new_insn);
3311 if (!new_set || !rtx_equal_p (new_set, old_set))
3312 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3313 }
3314
3315 /* Copy prologue/epilogue status. This is required in order to keep
3316 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3317 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3318 }
3319
3320 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3321 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3322 cfg-related call notes. */
3323 for (i = 0; i <= match_len; ++i)
3324 {
3325 int j;
3326 rtx note;
3327
3328 j = peep2_buf_position (peep2_current + i);
3329 old_insn = peep2_insn_data[j].insn;
3330 if (!CALL_P (old_insn))
3331 continue;
3332 was_call = true;
3333
3334 new_insn = attempt;
3335 while (new_insn != NULL_RTX)
3336 {
3337 if (CALL_P (new_insn))
3338 break;
3339 new_insn = NEXT_INSN (new_insn);
3340 }
3341
3342 gcc_assert (new_insn != NULL_RTX);
3343
3344 CALL_INSN_FUNCTION_USAGE (new_insn)
3345 = CALL_INSN_FUNCTION_USAGE (old_insn);
3346
3347 for (note = REG_NOTES (old_insn);
3348 note;
3349 note = XEXP (note, 1))
3350 switch (REG_NOTE_KIND (note))
3351 {
3352 case REG_NORETURN:
3353 case REG_SETJMP:
3354 case REG_TM:
3355 add_reg_note (new_insn, REG_NOTE_KIND (note),
3356 XEXP (note, 0));
3357 break;
3358 default:
3359 /* Discard all other reg notes. */
3360 break;
3361 }
3362
3363 /* Croak if there is another call in the sequence. */
3364 while (++i <= match_len)
3365 {
3366 j = peep2_buf_position (peep2_current + i);
3367 old_insn = peep2_insn_data[j].insn;
3368 gcc_assert (!CALL_P (old_insn));
3369 }
3370 break;
3371 }
3372
3373 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3374 move those notes over to the new sequence. */
3375 as_note = NULL;
3376 for (i = match_len; i >= 0; --i)
3377 {
3378 int j = peep2_buf_position (peep2_current + i);
3379 old_insn = peep2_insn_data[j].insn;
3380
3381 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3382 if (as_note)
3383 break;
3384 }
3385
3386 i = peep2_buf_position (peep2_current + match_len);
3387 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3388
3389 /* Replace the old sequence with the new. */
3390 last = emit_insn_after_setloc (attempt,
3391 peep2_insn_data[i].insn,
3392 INSN_LOCATION (peep2_insn_data[i].insn));
3393 before_try = PREV_INSN (insn);
3394 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3395
3396 /* Re-insert the EH_REGION notes. */
3397 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3398 {
3399 edge eh_edge;
3400 edge_iterator ei;
3401
3402 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3403 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3404 break;
3405
3406 if (eh_note)
3407 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3408
3409 if (eh_edge)
3410 for (x = last; x != before_try; x = PREV_INSN (x))
3411 if (x != BB_END (bb)
3412 && (can_throw_internal (x)
3413 || can_nonlocal_goto (x)))
3414 {
3415 edge nfte, nehe;
3416 int flags;
3417
3418 nfte = split_block (bb, x);
3419 flags = (eh_edge->flags
3420 & (EDGE_EH | EDGE_ABNORMAL));
3421 if (CALL_P (x))
3422 flags |= EDGE_ABNORMAL_CALL;
3423 nehe = make_edge (nfte->src, eh_edge->dest,
3424 flags);
3425
3426 nehe->probability = eh_edge->probability;
3427 nfte->probability
3428 = REG_BR_PROB_BASE - nehe->probability;
3429
3430 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3431 bb = nfte->src;
3432 eh_edge = nehe;
3433 }
3434
3435 /* Converting possibly trapping insn to non-trapping is
3436 possible. Zap dummy outgoing edges. */
3437 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3438 }
3439
3440 /* Re-insert the ARGS_SIZE notes. */
3441 if (as_note)
3442 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3443
3444 /* If we generated a jump instruction, it won't have
3445 JUMP_LABEL set. Recompute after we're done. */
3446 for (x = last; x != before_try; x = PREV_INSN (x))
3447 if (JUMP_P (x))
3448 {
3449 peep2_do_rebuild_jump_labels = true;
3450 break;
3451 }
3452
3453 return last;
3454 }
3455
3456 /* After performing a replacement in basic block BB, fix up the life
3457 information in our buffer. LAST is the last of the insns that we
3458 emitted as a replacement. PREV is the insn before the start of
3459 the replacement. MATCH_LEN is the number of instructions that were
3460 matched, and which now need to be replaced in the buffer. */
3461
3462 static void
3463 peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3464 {
3465 int i = peep2_buf_position (peep2_current + match_len + 1);
3466 rtx x;
3467 regset_head live;
3468
3469 INIT_REG_SET (&live);
3470 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3471
3472 gcc_assert (peep2_current_count >= match_len + 1);
3473 peep2_current_count -= match_len + 1;
3474
3475 x = last;
3476 do
3477 {
3478 if (INSN_P (x))
3479 {
3480 df_insn_rescan (x);
3481 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3482 {
3483 peep2_current_count++;
3484 if (--i < 0)
3485 i = MAX_INSNS_PER_PEEP2;
3486 peep2_insn_data[i].insn = x;
3487 df_simulate_one_insn_backwards (bb, x, &live);
3488 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3489 }
3490 }
3491 x = PREV_INSN (x);
3492 }
3493 while (x != prev);
3494 CLEAR_REG_SET (&live);
3495
3496 peep2_current = i;
3497 }
3498
3499 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3500 Return true if we added it, false otherwise. The caller will try to match
3501 peepholes against the buffer if we return false; otherwise it will try to
3502 add more instructions to the buffer. */
3503
3504 static bool
3505 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3506 {
3507 int pos;
3508
3509 /* Once we have filled the maximum number of insns the buffer can hold,
3510 allow the caller to match the insns against peepholes. We wait until
3511 the buffer is full in case the target has similar peepholes of different
3512 length; we always want to match the longest if possible. */
3513 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3514 return false;
3515
3516 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3517 any other pattern, lest it change the semantics of the frame info. */
3518 if (RTX_FRAME_RELATED_P (insn))
3519 {
3520 /* Let the buffer drain first. */
3521 if (peep2_current_count > 0)
3522 return false;
3523 /* Now the insn will be the only thing in the buffer. */
3524 }
3525
3526 pos = peep2_buf_position (peep2_current + peep2_current_count);
3527 peep2_insn_data[pos].insn = insn;
3528 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3529 peep2_current_count++;
3530
3531 df_simulate_one_insn_forwards (bb, insn, live);
3532 return true;
3533 }
3534
3535 /* Perform the peephole2 optimization pass. */
3536
3537 static void
3538 peephole2_optimize (void)
3539 {
3540 rtx insn;
3541 bitmap live;
3542 int i;
3543 basic_block bb;
3544
3545 peep2_do_cleanup_cfg = false;
3546 peep2_do_rebuild_jump_labels = false;
3547
3548 df_set_flags (DF_LR_RUN_DCE);
3549 df_note_add_problem ();
3550 df_analyze ();
3551
3552 /* Initialize the regsets we're going to use. */
3553 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3554 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3555 search_ofs = 0;
3556 live = BITMAP_ALLOC (&reg_obstack);
3557
3558 FOR_EACH_BB_REVERSE (bb)
3559 {
3560 bool past_end = false;
3561 int pos;
3562
3563 rtl_profile_for_bb (bb);
3564
3565 /* Start up propagation. */
3566 bitmap_copy (live, DF_LR_IN (bb));
3567 df_simulate_initialize_forwards (bb, live);
3568 peep2_reinit_state (live);
3569
3570 insn = BB_HEAD (bb);
3571 for (;;)
3572 {
3573 rtx attempt, head;
3574 int match_len;
3575
3576 if (!past_end && !NONDEBUG_INSN_P (insn))
3577 {
3578 next_insn:
3579 insn = NEXT_INSN (insn);
3580 if (insn == NEXT_INSN (BB_END (bb)))
3581 past_end = true;
3582 continue;
3583 }
3584 if (!past_end && peep2_fill_buffer (bb, insn, live))
3585 goto next_insn;
3586
3587 /* If we did not fill an empty buffer, it signals the end of the
3588 block. */
3589 if (peep2_current_count == 0)
3590 break;
3591
3592 /* The buffer filled to the current maximum, so try to match. */
3593
3594 pos = peep2_buf_position (peep2_current + peep2_current_count);
3595 peep2_insn_data[pos].insn = PEEP2_EOB;
3596 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3597
3598 /* Match the peephole. */
3599 head = peep2_insn_data[peep2_current].insn;
3600 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3601 if (attempt != NULL)
3602 {
3603 rtx last = peep2_attempt (bb, head, match_len, attempt);
3604 if (last)
3605 {
3606 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3607 continue;
3608 }
3609 }
3610
3611 /* No match: advance the buffer by one insn. */
3612 peep2_current = peep2_buf_position (peep2_current + 1);
3613 peep2_current_count--;
3614 }
3615 }
3616
3617 default_rtl_profile ();
3618 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3619 BITMAP_FREE (peep2_insn_data[i].live_before);
3620 BITMAP_FREE (live);
3621 if (peep2_do_rebuild_jump_labels)
3622 rebuild_jump_labels (get_insns ());
3623 }
3624 #endif /* HAVE_peephole2 */
3625
3626 /* Common predicates for use with define_bypass. */
3627
3628 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3629 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3630 must be either a single_set or a PARALLEL with SETs inside. */
3631
3632 int
3633 store_data_bypass_p (rtx out_insn, rtx in_insn)
3634 {
3635 rtx out_set, in_set;
3636 rtx out_pat, in_pat;
3637 rtx out_exp, in_exp;
3638 int i, j;
3639
3640 in_set = single_set (in_insn);
3641 if (in_set)
3642 {
3643 if (!MEM_P (SET_DEST (in_set)))
3644 return false;
3645
3646 out_set = single_set (out_insn);
3647 if (out_set)
3648 {
3649 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3650 return false;
3651 }
3652 else
3653 {
3654 out_pat = PATTERN (out_insn);
3655
3656 if (GET_CODE (out_pat) != PARALLEL)
3657 return false;
3658
3659 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3660 {
3661 out_exp = XVECEXP (out_pat, 0, i);
3662
3663 if (GET_CODE (out_exp) == CLOBBER)
3664 continue;
3665
3666 gcc_assert (GET_CODE (out_exp) == SET);
3667
3668 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3669 return false;
3670 }
3671 }
3672 }
3673 else
3674 {
3675 in_pat = PATTERN (in_insn);
3676 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3677
3678 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3679 {
3680 in_exp = XVECEXP (in_pat, 0, i);
3681
3682 if (GET_CODE (in_exp) == CLOBBER)
3683 continue;
3684
3685 gcc_assert (GET_CODE (in_exp) == SET);
3686
3687 if (!MEM_P (SET_DEST (in_exp)))
3688 return false;
3689
3690 out_set = single_set (out_insn);
3691 if (out_set)
3692 {
3693 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3694 return false;
3695 }
3696 else
3697 {
3698 out_pat = PATTERN (out_insn);
3699 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3700
3701 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3702 {
3703 out_exp = XVECEXP (out_pat, 0, j);
3704
3705 if (GET_CODE (out_exp) == CLOBBER)
3706 continue;
3707
3708 gcc_assert (GET_CODE (out_exp) == SET);
3709
3710 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3711 return false;
3712 }
3713 }
3714 }
3715 }
3716
3717 return true;
3718 }
3719
3720 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3721 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3722 or multiple set; IN_INSN should be single_set for truth, but for convenience
3723 of insn categorization may be any JUMP or CALL insn. */
3724
3725 int
3726 if_test_bypass_p (rtx out_insn, rtx in_insn)
3727 {
3728 rtx out_set, in_set;
3729
3730 in_set = single_set (in_insn);
3731 if (! in_set)
3732 {
3733 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3734 return false;
3735 }
3736
3737 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3738 return false;
3739 in_set = SET_SRC (in_set);
3740
3741 out_set = single_set (out_insn);
3742 if (out_set)
3743 {
3744 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3745 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3746 return false;
3747 }
3748 else
3749 {
3750 rtx out_pat;
3751 int i;
3752
3753 out_pat = PATTERN (out_insn);
3754 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3755
3756 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3757 {
3758 rtx exp = XVECEXP (out_pat, 0, i);
3759
3760 if (GET_CODE (exp) == CLOBBER)
3761 continue;
3762
3763 gcc_assert (GET_CODE (exp) == SET);
3764
3765 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3766 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3767 return false;
3768 }
3769 }
3770
3771 return true;
3772 }
3773 \f
3774 static bool
3775 gate_handle_peephole2 (void)
3776 {
3777 return (optimize > 0 && flag_peephole2);
3778 }
3779
3780 static unsigned int
3781 rest_of_handle_peephole2 (void)
3782 {
3783 #ifdef HAVE_peephole2
3784 peephole2_optimize ();
3785 #endif
3786 return 0;
3787 }
3788
3789 namespace {
3790
3791 const pass_data pass_data_peephole2 =
3792 {
3793 RTL_PASS, /* type */
3794 "peephole2", /* name */
3795 OPTGROUP_NONE, /* optinfo_flags */
3796 true, /* has_gate */
3797 true, /* has_execute */
3798 TV_PEEPHOLE2, /* tv_id */
3799 0, /* properties_required */
3800 0, /* properties_provided */
3801 0, /* properties_destroyed */
3802 0, /* todo_flags_start */
3803 ( TODO_df_finish | TODO_verify_rtl_sharing | 0 ), /* todo_flags_finish */
3804 };
3805
3806 class pass_peephole2 : public rtl_opt_pass
3807 {
3808 public:
3809 pass_peephole2 (gcc::context *ctxt)
3810 : rtl_opt_pass (pass_data_peephole2, ctxt)
3811 {}
3812
3813 /* opt_pass methods: */
3814 /* The epiphany backend creates a second instance of this pass, so we need
3815 a clone method. */
3816 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3817 bool gate () { return gate_handle_peephole2 (); }
3818 unsigned int execute () { return rest_of_handle_peephole2 (); }
3819
3820 }; // class pass_peephole2
3821
3822 } // anon namespace
3823
3824 rtl_opt_pass *
3825 make_pass_peephole2 (gcc::context *ctxt)
3826 {
3827 return new pass_peephole2 (ctxt);
3828 }
3829
3830 static unsigned int
3831 rest_of_handle_split_all_insns (void)
3832 {
3833 split_all_insns ();
3834 return 0;
3835 }
3836
3837 namespace {
3838
3839 const pass_data pass_data_split_all_insns =
3840 {
3841 RTL_PASS, /* type */
3842 "split1", /* name */
3843 OPTGROUP_NONE, /* optinfo_flags */
3844 false, /* has_gate */
3845 true, /* has_execute */
3846 TV_NONE, /* tv_id */
3847 0, /* properties_required */
3848 0, /* properties_provided */
3849 0, /* properties_destroyed */
3850 0, /* todo_flags_start */
3851 0, /* todo_flags_finish */
3852 };
3853
3854 class pass_split_all_insns : public rtl_opt_pass
3855 {
3856 public:
3857 pass_split_all_insns (gcc::context *ctxt)
3858 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3859 {}
3860
3861 /* opt_pass methods: */
3862 /* The epiphany backend creates a second instance of this pass, so
3863 we need a clone method. */
3864 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3865 unsigned int execute () { return rest_of_handle_split_all_insns (); }
3866
3867 }; // class pass_split_all_insns
3868
3869 } // anon namespace
3870
3871 rtl_opt_pass *
3872 make_pass_split_all_insns (gcc::context *ctxt)
3873 {
3874 return new pass_split_all_insns (ctxt);
3875 }
3876
3877 static unsigned int
3878 rest_of_handle_split_after_reload (void)
3879 {
3880 /* If optimizing, then go ahead and split insns now. */
3881 #ifndef STACK_REGS
3882 if (optimize > 0)
3883 #endif
3884 split_all_insns ();
3885 return 0;
3886 }
3887
3888 namespace {
3889
3890 const pass_data pass_data_split_after_reload =
3891 {
3892 RTL_PASS, /* type */
3893 "split2", /* name */
3894 OPTGROUP_NONE, /* optinfo_flags */
3895 false, /* has_gate */
3896 true, /* has_execute */
3897 TV_NONE, /* tv_id */
3898 0, /* properties_required */
3899 0, /* properties_provided */
3900 0, /* properties_destroyed */
3901 0, /* todo_flags_start */
3902 0, /* todo_flags_finish */
3903 };
3904
3905 class pass_split_after_reload : public rtl_opt_pass
3906 {
3907 public:
3908 pass_split_after_reload (gcc::context *ctxt)
3909 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3910 {}
3911
3912 /* opt_pass methods: */
3913 unsigned int execute () { return rest_of_handle_split_after_reload (); }
3914
3915 }; // class pass_split_after_reload
3916
3917 } // anon namespace
3918
3919 rtl_opt_pass *
3920 make_pass_split_after_reload (gcc::context *ctxt)
3921 {
3922 return new pass_split_after_reload (ctxt);
3923 }
3924
3925 static bool
3926 gate_handle_split_before_regstack (void)
3927 {
3928 #if HAVE_ATTR_length && defined (STACK_REGS)
3929 /* If flow2 creates new instructions which need splitting
3930 and scheduling after reload is not done, they might not be
3931 split until final which doesn't allow splitting
3932 if HAVE_ATTR_length. */
3933 # ifdef INSN_SCHEDULING
3934 return (optimize && !flag_schedule_insns_after_reload);
3935 # else
3936 return (optimize);
3937 # endif
3938 #else
3939 return 0;
3940 #endif
3941 }
3942
3943 static unsigned int
3944 rest_of_handle_split_before_regstack (void)
3945 {
3946 split_all_insns ();
3947 return 0;
3948 }
3949
3950 namespace {
3951
3952 const pass_data pass_data_split_before_regstack =
3953 {
3954 RTL_PASS, /* type */
3955 "split3", /* name */
3956 OPTGROUP_NONE, /* optinfo_flags */
3957 true, /* has_gate */
3958 true, /* has_execute */
3959 TV_NONE, /* tv_id */
3960 0, /* properties_required */
3961 0, /* properties_provided */
3962 0, /* properties_destroyed */
3963 0, /* todo_flags_start */
3964 0, /* todo_flags_finish */
3965 };
3966
3967 class pass_split_before_regstack : public rtl_opt_pass
3968 {
3969 public:
3970 pass_split_before_regstack (gcc::context *ctxt)
3971 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3972 {}
3973
3974 /* opt_pass methods: */
3975 bool gate () { return gate_handle_split_before_regstack (); }
3976 unsigned int execute () {
3977 return rest_of_handle_split_before_regstack ();
3978 }
3979
3980 }; // class pass_split_before_regstack
3981
3982 } // anon namespace
3983
3984 rtl_opt_pass *
3985 make_pass_split_before_regstack (gcc::context *ctxt)
3986 {
3987 return new pass_split_before_regstack (ctxt);
3988 }
3989
3990 static bool
3991 gate_handle_split_before_sched2 (void)
3992 {
3993 #ifdef INSN_SCHEDULING
3994 return optimize > 0 && flag_schedule_insns_after_reload;
3995 #else
3996 return 0;
3997 #endif
3998 }
3999
4000 static unsigned int
4001 rest_of_handle_split_before_sched2 (void)
4002 {
4003 #ifdef INSN_SCHEDULING
4004 split_all_insns ();
4005 #endif
4006 return 0;
4007 }
4008
4009 namespace {
4010
4011 const pass_data pass_data_split_before_sched2 =
4012 {
4013 RTL_PASS, /* type */
4014 "split4", /* name */
4015 OPTGROUP_NONE, /* optinfo_flags */
4016 true, /* has_gate */
4017 true, /* has_execute */
4018 TV_NONE, /* tv_id */
4019 0, /* properties_required */
4020 0, /* properties_provided */
4021 0, /* properties_destroyed */
4022 0, /* todo_flags_start */
4023 TODO_verify_flow, /* todo_flags_finish */
4024 };
4025
4026 class pass_split_before_sched2 : public rtl_opt_pass
4027 {
4028 public:
4029 pass_split_before_sched2 (gcc::context *ctxt)
4030 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4031 {}
4032
4033 /* opt_pass methods: */
4034 bool gate () { return gate_handle_split_before_sched2 (); }
4035 unsigned int execute () { return rest_of_handle_split_before_sched2 (); }
4036
4037 }; // class pass_split_before_sched2
4038
4039 } // anon namespace
4040
4041 rtl_opt_pass *
4042 make_pass_split_before_sched2 (gcc::context *ctxt)
4043 {
4044 return new pass_split_before_sched2 (ctxt);
4045 }
4046
4047 /* The placement of the splitting that we do for shorten_branches
4048 depends on whether regstack is used by the target or not. */
4049 static bool
4050 gate_do_final_split (void)
4051 {
4052 #if HAVE_ATTR_length && !defined (STACK_REGS)
4053 return 1;
4054 #else
4055 return 0;
4056 #endif
4057 }
4058
4059 namespace {
4060
4061 const pass_data pass_data_split_for_shorten_branches =
4062 {
4063 RTL_PASS, /* type */
4064 "split5", /* name */
4065 OPTGROUP_NONE, /* optinfo_flags */
4066 true, /* has_gate */
4067 true, /* has_execute */
4068 TV_NONE, /* tv_id */
4069 0, /* properties_required */
4070 0, /* properties_provided */
4071 0, /* properties_destroyed */
4072 0, /* todo_flags_start */
4073 TODO_verify_rtl_sharing, /* todo_flags_finish */
4074 };
4075
4076 class pass_split_for_shorten_branches : public rtl_opt_pass
4077 {
4078 public:
4079 pass_split_for_shorten_branches (gcc::context *ctxt)
4080 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4081 {}
4082
4083 /* opt_pass methods: */
4084 bool gate () { return gate_do_final_split (); }
4085 unsigned int execute () { return split_all_insns_noflow (); }
4086
4087 }; // class pass_split_for_shorten_branches
4088
4089 } // anon namespace
4090
4091 rtl_opt_pass *
4092 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4093 {
4094 return new pass_split_for_shorten_branches (ctxt);
4095 }