* recog.c (peephole2_optimize): Revert last change.
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "toplev.h"
39 #include "basic-block.h"
40 #include "output.h"
41 #include "reload.h"
42
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
46 #else
47 #define STACK_PUSH_CODE PRE_INC
48 #endif
49 #endif
50
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
54 #else
55 #define STACK_POP_CODE POST_DEC
56 #endif
57 #endif
58
59 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
60 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
61 static void validate_replace_src_1 PARAMS ((rtx *, void *));
62 static rtx split_insn PARAMS ((rtx));
63
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
69
70 init_recog and init_recog_no_volatile are responsible for setting this. */
71
72 int volatile_ok;
73
74 struct recog_data recog_data;
75
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
82
83 int which_alternative;
84
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
88
89 int reload_completed;
90
91 /* Initialize data used by the function `recog'.
92 This must be called once in the compilation of a function
93 before any insn recognition may be done in the function. */
94
95 void
96 init_recog_no_volatile ()
97 {
98 volatile_ok = 0;
99 }
100
101 void
102 init_recog ()
103 {
104 volatile_ok = 1;
105 }
106
107 /* Try recognizing the instruction INSN,
108 and return the code number that results.
109 Remember the code so that repeated calls do not
110 need to spend the time for actual rerecognition.
111
112 This function is the normal interface to instruction recognition.
113 The automatically-generated function `recog' is normally called
114 through this one. (The only exception is in combine.c.) */
115
116 int
117 recog_memoized_1 (insn)
118 rtx insn;
119 {
120 if (INSN_CODE (insn) < 0)
121 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
122 return INSN_CODE (insn);
123 }
124 \f
125 /* Check that X is an insn-body for an `asm' with operands
126 and that the operands mentioned in it are legitimate. */
127
128 int
129 check_asm_operands (x)
130 rtx x;
131 {
132 int noperands;
133 rtx *operands;
134 const char **constraints;
135 int i;
136
137 /* Post-reload, be more strict with things. */
138 if (reload_completed)
139 {
140 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
141 extract_insn (make_insn_raw (x));
142 constrain_operands (1);
143 return which_alternative >= 0;
144 }
145
146 noperands = asm_noperands (x);
147 if (noperands < 0)
148 return 0;
149 if (noperands == 0)
150 return 1;
151
152 operands = (rtx *) alloca (noperands * sizeof (rtx));
153 constraints = (const char **) alloca (noperands * sizeof (char *));
154
155 decode_asm_operands (x, operands, NULL, constraints, NULL);
156
157 for (i = 0; i < noperands; i++)
158 {
159 const char *c = constraints[i];
160 if (c[0] == '%')
161 c++;
162 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
163 c = constraints[c[0] - '0'];
164
165 if (! asm_operand_ok (operands[i], c))
166 return 0;
167 }
168
169 return 1;
170 }
171 \f
172 /* Static data for the next two routines. */
173
174 typedef struct change_t
175 {
176 rtx object;
177 int old_code;
178 rtx *loc;
179 rtx old;
180 } change_t;
181
182 static change_t *changes;
183 static int changes_allocated;
184
185 static int num_changes = 0;
186
187 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
188 at which NEW will be placed. If OBJECT is zero, no validation is done,
189 the change is simply made.
190
191 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
192 will be called with the address and mode as parameters. If OBJECT is
193 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
194 the change in place.
195
196 IN_GROUP is nonzero if this is part of a group of changes that must be
197 performed as a group. In that case, the changes will be stored. The
198 function `apply_change_group' will validate and apply the changes.
199
200 If IN_GROUP is zero, this is a single change. Try to recognize the insn
201 or validate the memory reference with the change applied. If the result
202 is not valid for the machine, suppress the change and return zero.
203 Otherwise, perform the change and return 1. */
204
205 int
206 validate_change (object, loc, new, in_group)
207 rtx object;
208 rtx *loc;
209 rtx new;
210 int in_group;
211 {
212 rtx old = *loc;
213
214 if (old == new || rtx_equal_p (old, new))
215 return 1;
216
217 if (in_group == 0 && num_changes != 0)
218 abort ();
219
220 *loc = new;
221
222 /* Save the information describing this change. */
223 if (num_changes >= changes_allocated)
224 {
225 if (changes_allocated == 0)
226 /* This value allows for repeated substitutions inside complex
227 indexed addresses, or changes in up to 5 insns. */
228 changes_allocated = MAX_RECOG_OPERANDS * 5;
229 else
230 changes_allocated *= 2;
231
232 changes =
233 (change_t*) xrealloc (changes,
234 sizeof (change_t) * changes_allocated);
235 }
236
237 changes[num_changes].object = object;
238 changes[num_changes].loc = loc;
239 changes[num_changes].old = old;
240
241 if (object && GET_CODE (object) != MEM)
242 {
243 /* Set INSN_CODE to force rerecognition of insn. Save old code in
244 case invalid. */
245 changes[num_changes].old_code = INSN_CODE (object);
246 INSN_CODE (object) = -1;
247 }
248
249 num_changes++;
250
251 /* If we are making a group of changes, return 1. Otherwise, validate the
252 change group we made. */
253
254 if (in_group)
255 return 1;
256 else
257 return apply_change_group ();
258 }
259
260 /* This subroutine of apply_change_group verifies whether the changes to INSN
261 were valid; i.e. whether INSN can still be recognized. */
262
263 int
264 insn_invalid_p (insn)
265 rtx insn;
266 {
267 rtx pat = PATTERN (insn);
268 int num_clobbers = 0;
269 /* If we are before reload and the pattern is a SET, see if we can add
270 clobbers. */
271 int icode = recog (pat, insn,
272 (GET_CODE (pat) == SET
273 && ! reload_completed && ! reload_in_progress)
274 ? &num_clobbers : 0);
275 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
276
277
278 /* If this is an asm and the operand aren't legal, then fail. Likewise if
279 this is not an asm and the insn wasn't recognized. */
280 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
281 || (!is_asm && icode < 0))
282 return 1;
283
284 /* If we have to add CLOBBERs, fail if we have to add ones that reference
285 hard registers since our callers can't know if they are live or not.
286 Otherwise, add them. */
287 if (num_clobbers > 0)
288 {
289 rtx newpat;
290
291 if (added_clobbers_hard_reg_p (icode))
292 return 1;
293
294 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
295 XVECEXP (newpat, 0, 0) = pat;
296 add_clobbers (newpat, icode);
297 PATTERN (insn) = pat = newpat;
298 }
299
300 /* After reload, verify that all constraints are satisfied. */
301 if (reload_completed)
302 {
303 extract_insn (insn);
304
305 if (! constrain_operands (1))
306 return 1;
307 }
308
309 INSN_CODE (insn) = icode;
310 return 0;
311 }
312
313 /* Return number of changes made and not validated yet. */
314 int
315 num_changes_pending ()
316 {
317 return num_changes;
318 }
319
320 /* Apply a group of changes previously issued with `validate_change'.
321 Return 1 if all changes are valid, zero otherwise. */
322
323 int
324 apply_change_group ()
325 {
326 int i;
327 rtx last_validated = NULL_RTX;
328
329 /* The changes have been applied and all INSN_CODEs have been reset to force
330 rerecognition.
331
332 The changes are valid if we aren't given an object, or if we are
333 given a MEM and it still is a valid address, or if this is in insn
334 and it is recognized. In the latter case, if reload has completed,
335 we also require that the operands meet the constraints for
336 the insn. */
337
338 for (i = 0; i < num_changes; i++)
339 {
340 rtx object = changes[i].object;
341
342 /* if there is no object to test or if it is the same as the one we
343 already tested, ignore it. */
344 if (object == 0 || object == last_validated)
345 continue;
346
347 if (GET_CODE (object) == MEM)
348 {
349 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
350 break;
351 }
352 else if (insn_invalid_p (object))
353 {
354 rtx pat = PATTERN (object);
355
356 /* Perhaps we couldn't recognize the insn because there were
357 extra CLOBBERs at the end. If so, try to re-recognize
358 without the last CLOBBER (later iterations will cause each of
359 them to be eliminated, in turn). But don't do this if we
360 have an ASM_OPERAND. */
361 if (GET_CODE (pat) == PARALLEL
362 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
363 && asm_noperands (PATTERN (object)) < 0)
364 {
365 rtx newpat;
366
367 if (XVECLEN (pat, 0) == 2)
368 newpat = XVECEXP (pat, 0, 0);
369 else
370 {
371 int j;
372
373 newpat
374 = gen_rtx_PARALLEL (VOIDmode,
375 rtvec_alloc (XVECLEN (pat, 0) - 1));
376 for (j = 0; j < XVECLEN (newpat, 0); j++)
377 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
378 }
379
380 /* Add a new change to this group to replace the pattern
381 with this new pattern. Then consider this change
382 as having succeeded. The change we added will
383 cause the entire call to fail if things remain invalid.
384
385 Note that this can lose if a later change than the one
386 we are processing specified &XVECEXP (PATTERN (object), 0, X)
387 but this shouldn't occur. */
388
389 validate_change (object, &PATTERN (object), newpat, 1);
390 continue;
391 }
392 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
393 /* If this insn is a CLOBBER or USE, it is always valid, but is
394 never recognized. */
395 continue;
396 else
397 break;
398 }
399 last_validated = object;
400 }
401
402 if (i == num_changes)
403 {
404 basic_block bb;
405
406 for (i = 0; i < num_changes; i++)
407 if (changes[i].object
408 && INSN_P (changes[i].object)
409 && (bb = BLOCK_FOR_INSN (changes[i].object)))
410 bb->flags |= BB_DIRTY;
411
412 num_changes = 0;
413 return 1;
414 }
415 else
416 {
417 cancel_changes (0);
418 return 0;
419 }
420 }
421
422 /* Return the number of changes so far in the current group. */
423
424 int
425 num_validated_changes ()
426 {
427 return num_changes;
428 }
429
430 /* Retract the changes numbered NUM and up. */
431
432 void
433 cancel_changes (num)
434 int num;
435 {
436 int i;
437
438 /* Back out all the changes. Do this in the opposite order in which
439 they were made. */
440 for (i = num_changes - 1; i >= num; i--)
441 {
442 *changes[i].loc = changes[i].old;
443 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
444 INSN_CODE (changes[i].object) = changes[i].old_code;
445 }
446 num_changes = num;
447 }
448
449 /* Replace every occurrence of FROM in X with TO. Mark each change with
450 validate_change passing OBJECT. */
451
452 static void
453 validate_replace_rtx_1 (loc, from, to, object)
454 rtx *loc;
455 rtx from, to, object;
456 {
457 int i, j;
458 const char *fmt;
459 rtx x = *loc;
460 enum rtx_code code;
461 enum machine_mode op0_mode = VOIDmode;
462 int prev_changes = num_changes;
463 rtx new;
464
465 if (!x)
466 return;
467
468 code = GET_CODE (x);
469 fmt = GET_RTX_FORMAT (code);
470 if (fmt[0] == 'e')
471 op0_mode = GET_MODE (XEXP (x, 0));
472
473 /* X matches FROM if it is the same rtx or they are both referring to the
474 same register in the same mode. Avoid calling rtx_equal_p unless the
475 operands look similar. */
476
477 if (x == from
478 || (GET_CODE (x) == REG && GET_CODE (from) == REG
479 && GET_MODE (x) == GET_MODE (from)
480 && REGNO (x) == REGNO (from))
481 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
482 && rtx_equal_p (x, from)))
483 {
484 validate_change (object, loc, to, 1);
485 return;
486 }
487
488 /* Call ourself recursively to perform the replacements. */
489
490 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
491 {
492 if (fmt[i] == 'e')
493 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
494 else if (fmt[i] == 'E')
495 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
496 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
497 }
498
499 /* If we didn't substitute, there is nothing more to do. */
500 if (num_changes == prev_changes)
501 return;
502
503 /* Allow substituted expression to have different mode. This is used by
504 regmove to change mode of pseudo register. */
505 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
506 op0_mode = GET_MODE (XEXP (x, 0));
507
508 /* Do changes needed to keep rtx consistent. Don't do any other
509 simplifications, as it is not our job. */
510
511 if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
512 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
513 {
514 validate_change (object, loc,
515 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
516 : swap_condition (code),
517 GET_MODE (x), XEXP (x, 1),
518 XEXP (x, 0)), 1);
519 x = *loc;
520 code = GET_CODE (x);
521 }
522
523 switch (code)
524 {
525 case PLUS:
526 /* If we have a PLUS whose second operand is now a CONST_INT, use
527 simplify_gen_binary to try to simplify it.
528 ??? We may want later to remove this, once simplification is
529 separated from this function. */
530 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
531 validate_change (object, loc,
532 simplify_gen_binary
533 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
534 break;
535 case MINUS:
536 if (GET_CODE (XEXP (x, 1)) == CONST_INT
537 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
538 validate_change (object, loc,
539 simplify_gen_binary
540 (PLUS, GET_MODE (x), XEXP (x, 0),
541 simplify_gen_unary (NEG,
542 GET_MODE (x), XEXP (x, 1),
543 GET_MODE (x))), 1);
544 break;
545 case ZERO_EXTEND:
546 case SIGN_EXTEND:
547 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
548 {
549 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
550 op0_mode);
551 /* If any of the above failed, substitute in something that
552 we know won't be recognized. */
553 if (!new)
554 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
555 validate_change (object, loc, new, 1);
556 }
557 break;
558 case SUBREG:
559 /* All subregs possible to simplify should be simplified. */
560 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
561 SUBREG_BYTE (x));
562
563 /* Subregs of VOIDmode operands are incorrect. */
564 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
565 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
566 if (new)
567 validate_change (object, loc, new, 1);
568 break;
569 case ZERO_EXTRACT:
570 case SIGN_EXTRACT:
571 /* If we are replacing a register with memory, try to change the memory
572 to be the mode required for memory in extract operations (this isn't
573 likely to be an insertion operation; if it was, nothing bad will
574 happen, we might just fail in some cases). */
575
576 if (GET_CODE (XEXP (x, 0)) == MEM
577 && GET_CODE (XEXP (x, 1)) == CONST_INT
578 && GET_CODE (XEXP (x, 2)) == CONST_INT
579 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
580 && !MEM_VOLATILE_P (XEXP (x, 0)))
581 {
582 enum machine_mode wanted_mode = VOIDmode;
583 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
584 int pos = INTVAL (XEXP (x, 2));
585
586 if (GET_CODE (x) == ZERO_EXTRACT)
587 {
588 enum machine_mode new_mode
589 = mode_for_extraction (EP_extzv, 1);
590 if (new_mode != MAX_MACHINE_MODE)
591 wanted_mode = new_mode;
592 }
593 else if (GET_CODE (x) == SIGN_EXTRACT)
594 {
595 enum machine_mode new_mode
596 = mode_for_extraction (EP_extv, 1);
597 if (new_mode != MAX_MACHINE_MODE)
598 wanted_mode = new_mode;
599 }
600
601 /* If we have a narrower mode, we can do something. */
602 if (wanted_mode != VOIDmode
603 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
604 {
605 int offset = pos / BITS_PER_UNIT;
606 rtx newmem;
607
608 /* If the bytes and bits are counted differently, we
609 must adjust the offset. */
610 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
611 offset =
612 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
613 offset);
614
615 pos %= GET_MODE_BITSIZE (wanted_mode);
616
617 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
618
619 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
620 validate_change (object, &XEXP (x, 0), newmem, 1);
621 }
622 }
623
624 break;
625
626 default:
627 break;
628 }
629 }
630
631 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
632 with TO. After all changes have been made, validate by seeing
633 if INSN is still valid. */
634
635 int
636 validate_replace_rtx_subexp (from, to, insn, loc)
637 rtx from, to, insn, *loc;
638 {
639 validate_replace_rtx_1 (loc, from, to, insn);
640 return apply_change_group ();
641 }
642
643 /* Try replacing every occurrence of FROM in INSN with TO. After all
644 changes have been made, validate by seeing if INSN is still valid. */
645
646 int
647 validate_replace_rtx (from, to, insn)
648 rtx from, to, insn;
649 {
650 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
651 return apply_change_group ();
652 }
653
654 /* Try replacing every occurrence of FROM in INSN with TO. */
655
656 void
657 validate_replace_rtx_group (from, to, insn)
658 rtx from, to, insn;
659 {
660 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
661 }
662
663 /* Function called by note_uses to replace used subexpressions. */
664 struct validate_replace_src_data
665 {
666 rtx from; /* Old RTX */
667 rtx to; /* New RTX */
668 rtx insn; /* Insn in which substitution is occurring. */
669 };
670
671 static void
672 validate_replace_src_1 (x, data)
673 rtx *x;
674 void *data;
675 {
676 struct validate_replace_src_data *d
677 = (struct validate_replace_src_data *) data;
678
679 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
680 }
681
682 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
683 SET_DESTs. */
684
685 void
686 validate_replace_src_group (from, to, insn)
687 rtx from, to, insn;
688 {
689 struct validate_replace_src_data d;
690
691 d.from = from;
692 d.to = to;
693 d.insn = insn;
694 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
695 }
696
697 /* Same as validate_replace_src_group, but validate by seeing if
698 INSN is still valid. */
699 int
700 validate_replace_src (from, to, insn)
701 rtx from, to, insn;
702 {
703 validate_replace_src_group (from, to, insn);
704 return apply_change_group ();
705 }
706 \f
707 #ifdef HAVE_cc0
708 /* Return 1 if the insn using CC0 set by INSN does not contain
709 any ordered tests applied to the condition codes.
710 EQ and NE tests do not count. */
711
712 int
713 next_insn_tests_no_inequality (insn)
714 rtx insn;
715 {
716 rtx next = next_cc0_user (insn);
717
718 /* If there is no next insn, we have to take the conservative choice. */
719 if (next == 0)
720 return 0;
721
722 return ((GET_CODE (next) == JUMP_INSN
723 || GET_CODE (next) == INSN
724 || GET_CODE (next) == CALL_INSN)
725 && ! inequality_comparisons_p (PATTERN (next)));
726 }
727
728 #if 0 /* This is useless since the insn that sets the cc's
729 must be followed immediately by the use of them. */
730 /* Return 1 if the CC value set up by INSN is not used. */
731
732 int
733 next_insns_test_no_inequality (insn)
734 rtx insn;
735 {
736 rtx next = NEXT_INSN (insn);
737
738 for (; next != 0; next = NEXT_INSN (next))
739 {
740 if (GET_CODE (next) == CODE_LABEL
741 || GET_CODE (next) == BARRIER)
742 return 1;
743 if (GET_CODE (next) == NOTE)
744 continue;
745 if (inequality_comparisons_p (PATTERN (next)))
746 return 0;
747 if (sets_cc0_p (PATTERN (next)) == 1)
748 return 1;
749 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
750 return 1;
751 }
752 return 1;
753 }
754 #endif
755 #endif
756 \f
757 /* This is used by find_single_use to locate an rtx that contains exactly one
758 use of DEST, which is typically either a REG or CC0. It returns a
759 pointer to the innermost rtx expression containing DEST. Appearances of
760 DEST that are being used to totally replace it are not counted. */
761
762 static rtx *
763 find_single_use_1 (dest, loc)
764 rtx dest;
765 rtx *loc;
766 {
767 rtx x = *loc;
768 enum rtx_code code = GET_CODE (x);
769 rtx *result = 0;
770 rtx *this_result;
771 int i;
772 const char *fmt;
773
774 switch (code)
775 {
776 case CONST_INT:
777 case CONST:
778 case LABEL_REF:
779 case SYMBOL_REF:
780 case CONST_DOUBLE:
781 case CONST_VECTOR:
782 case CLOBBER:
783 return 0;
784
785 case SET:
786 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
787 of a REG that occupies all of the REG, the insn uses DEST if
788 it is mentioned in the destination or the source. Otherwise, we
789 need just check the source. */
790 if (GET_CODE (SET_DEST (x)) != CC0
791 && GET_CODE (SET_DEST (x)) != PC
792 && GET_CODE (SET_DEST (x)) != REG
793 && ! (GET_CODE (SET_DEST (x)) == SUBREG
794 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
795 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
796 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
797 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
798 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
799 break;
800
801 return find_single_use_1 (dest, &SET_SRC (x));
802
803 case MEM:
804 case SUBREG:
805 return find_single_use_1 (dest, &XEXP (x, 0));
806
807 default:
808 break;
809 }
810
811 /* If it wasn't one of the common cases above, check each expression and
812 vector of this code. Look for a unique usage of DEST. */
813
814 fmt = GET_RTX_FORMAT (code);
815 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
816 {
817 if (fmt[i] == 'e')
818 {
819 if (dest == XEXP (x, i)
820 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
821 && REGNO (dest) == REGNO (XEXP (x, i))))
822 this_result = loc;
823 else
824 this_result = find_single_use_1 (dest, &XEXP (x, i));
825
826 if (result == 0)
827 result = this_result;
828 else if (this_result)
829 /* Duplicate usage. */
830 return 0;
831 }
832 else if (fmt[i] == 'E')
833 {
834 int j;
835
836 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
837 {
838 if (XVECEXP (x, i, j) == dest
839 || (GET_CODE (dest) == REG
840 && GET_CODE (XVECEXP (x, i, j)) == REG
841 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
842 this_result = loc;
843 else
844 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
845
846 if (result == 0)
847 result = this_result;
848 else if (this_result)
849 return 0;
850 }
851 }
852 }
853
854 return result;
855 }
856 \f
857 /* See if DEST, produced in INSN, is used only a single time in the
858 sequel. If so, return a pointer to the innermost rtx expression in which
859 it is used.
860
861 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
862
863 This routine will return usually zero either before flow is called (because
864 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
865 note can't be trusted).
866
867 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
868 care about REG_DEAD notes or LOG_LINKS.
869
870 Otherwise, we find the single use by finding an insn that has a
871 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
872 only referenced once in that insn, we know that it must be the first
873 and last insn referencing DEST. */
874
875 rtx *
876 find_single_use (dest, insn, ploc)
877 rtx dest;
878 rtx insn;
879 rtx *ploc;
880 {
881 rtx next;
882 rtx *result;
883 rtx link;
884
885 #ifdef HAVE_cc0
886 if (dest == cc0_rtx)
887 {
888 next = NEXT_INSN (insn);
889 if (next == 0
890 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
891 return 0;
892
893 result = find_single_use_1 (dest, &PATTERN (next));
894 if (result && ploc)
895 *ploc = next;
896 return result;
897 }
898 #endif
899
900 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
901 return 0;
902
903 for (next = next_nonnote_insn (insn);
904 next != 0 && GET_CODE (next) != CODE_LABEL;
905 next = next_nonnote_insn (next))
906 if (INSN_P (next) && dead_or_set_p (next, dest))
907 {
908 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
909 if (XEXP (link, 0) == insn)
910 break;
911
912 if (link)
913 {
914 result = find_single_use_1 (dest, &PATTERN (next));
915 if (ploc)
916 *ploc = next;
917 return result;
918 }
919 }
920
921 return 0;
922 }
923 \f
924 /* Return 1 if OP is a valid general operand for machine mode MODE.
925 This is either a register reference, a memory reference,
926 or a constant. In the case of a memory reference, the address
927 is checked for general validity for the target machine.
928
929 Register and memory references must have mode MODE in order to be valid,
930 but some constants have no machine mode and are valid for any mode.
931
932 If MODE is VOIDmode, OP is checked for validity for whatever mode
933 it has.
934
935 The main use of this function is as a predicate in match_operand
936 expressions in the machine description.
937
938 For an explanation of this function's behavior for registers of
939 class NO_REGS, see the comment for `register_operand'. */
940
941 int
942 general_operand (op, mode)
943 rtx op;
944 enum machine_mode mode;
945 {
946 enum rtx_code code = GET_CODE (op);
947
948 if (mode == VOIDmode)
949 mode = GET_MODE (op);
950
951 /* Don't accept CONST_INT or anything similar
952 if the caller wants something floating. */
953 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
954 && GET_MODE_CLASS (mode) != MODE_INT
955 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
956 return 0;
957
958 if (GET_CODE (op) == CONST_INT
959 && mode != VOIDmode
960 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
961 return 0;
962
963 if (CONSTANT_P (op))
964 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
965 || mode == VOIDmode)
966 #ifdef LEGITIMATE_PIC_OPERAND_P
967 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
968 #endif
969 && LEGITIMATE_CONSTANT_P (op));
970
971 /* Except for certain constants with VOIDmode, already checked for,
972 OP's mode must match MODE if MODE specifies a mode. */
973
974 if (GET_MODE (op) != mode)
975 return 0;
976
977 if (code == SUBREG)
978 {
979 rtx sub = SUBREG_REG (op);
980
981 #ifdef INSN_SCHEDULING
982 /* On machines that have insn scheduling, we want all memory
983 reference to be explicit, so outlaw paradoxical SUBREGs. */
984 if (GET_CODE (sub) == MEM
985 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
986 return 0;
987 #endif
988 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
989 may result in incorrect reference. We should simplify all valid
990 subregs of MEM anyway. But allow this after reload because we
991 might be called from cleanup_subreg_operands.
992
993 ??? This is a kludge. */
994 if (!reload_completed && SUBREG_BYTE (op) != 0
995 && GET_CODE (sub) == MEM)
996 return 0;
997
998 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
999 create such rtl, and we must reject it. */
1000 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1001 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1002 return 0;
1003
1004 op = sub;
1005 code = GET_CODE (op);
1006 }
1007
1008 if (code == REG)
1009 /* A register whose class is NO_REGS is not a general operand. */
1010 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1011 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1012
1013 if (code == MEM)
1014 {
1015 rtx y = XEXP (op, 0);
1016
1017 if (! volatile_ok && MEM_VOLATILE_P (op))
1018 return 0;
1019
1020 if (GET_CODE (y) == ADDRESSOF)
1021 return 1;
1022
1023 /* Use the mem's mode, since it will be reloaded thus. */
1024 mode = GET_MODE (op);
1025 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1026 }
1027
1028 /* Pretend this is an operand for now; we'll run force_operand
1029 on its replacement in fixup_var_refs_1. */
1030 if (code == ADDRESSOF)
1031 return 1;
1032
1033 return 0;
1034
1035 win:
1036 return 1;
1037 }
1038 \f
1039 /* Return 1 if OP is a valid memory address for a memory reference
1040 of mode MODE.
1041
1042 The main use of this function is as a predicate in match_operand
1043 expressions in the machine description. */
1044
1045 int
1046 address_operand (op, mode)
1047 rtx op;
1048 enum machine_mode mode;
1049 {
1050 return memory_address_p (mode, op);
1051 }
1052
1053 /* Return 1 if OP is a register reference of mode MODE.
1054 If MODE is VOIDmode, accept a register in any mode.
1055
1056 The main use of this function is as a predicate in match_operand
1057 expressions in the machine description.
1058
1059 As a special exception, registers whose class is NO_REGS are
1060 not accepted by `register_operand'. The reason for this change
1061 is to allow the representation of special architecture artifacts
1062 (such as a condition code register) without extending the rtl
1063 definitions. Since registers of class NO_REGS cannot be used
1064 as registers in any case where register classes are examined,
1065 it is most consistent to keep this function from accepting them. */
1066
1067 int
1068 register_operand (op, mode)
1069 rtx op;
1070 enum machine_mode mode;
1071 {
1072 if (GET_MODE (op) != mode && mode != VOIDmode)
1073 return 0;
1074
1075 if (GET_CODE (op) == SUBREG)
1076 {
1077 rtx sub = SUBREG_REG (op);
1078
1079 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1080 because it is guaranteed to be reloaded into one.
1081 Just make sure the MEM is valid in itself.
1082 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1083 but currently it does result from (SUBREG (REG)...) where the
1084 reg went on the stack.) */
1085 if (! reload_completed && GET_CODE (sub) == MEM)
1086 return general_operand (op, mode);
1087
1088 #ifdef CANNOT_CHANGE_MODE_CLASS
1089 if (GET_CODE (sub) == REG
1090 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1091 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1092 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1093 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1094 return 0;
1095 #endif
1096
1097 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1098 create such rtl, and we must reject it. */
1099 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1100 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1101 return 0;
1102
1103 op = sub;
1104 }
1105
1106 /* If we have an ADDRESSOF, consider it valid since it will be
1107 converted into something that will not be a MEM. */
1108 if (GET_CODE (op) == ADDRESSOF)
1109 return 1;
1110
1111 /* We don't consider registers whose class is NO_REGS
1112 to be a register operand. */
1113 return (GET_CODE (op) == REG
1114 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1115 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1116 }
1117
1118 /* Return 1 for a register in Pmode; ignore the tested mode. */
1119
1120 int
1121 pmode_register_operand (op, mode)
1122 rtx op;
1123 enum machine_mode mode ATTRIBUTE_UNUSED;
1124 {
1125 return register_operand (op, Pmode);
1126 }
1127
1128 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1129 or a hard register. */
1130
1131 int
1132 scratch_operand (op, mode)
1133 rtx op;
1134 enum machine_mode mode;
1135 {
1136 if (GET_MODE (op) != mode && mode != VOIDmode)
1137 return 0;
1138
1139 return (GET_CODE (op) == SCRATCH
1140 || (GET_CODE (op) == REG
1141 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1142 }
1143
1144 /* Return 1 if OP is a valid immediate operand for mode MODE.
1145
1146 The main use of this function is as a predicate in match_operand
1147 expressions in the machine description. */
1148
1149 int
1150 immediate_operand (op, mode)
1151 rtx op;
1152 enum machine_mode mode;
1153 {
1154 /* Don't accept CONST_INT or anything similar
1155 if the caller wants something floating. */
1156 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1157 && GET_MODE_CLASS (mode) != MODE_INT
1158 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1159 return 0;
1160
1161 if (GET_CODE (op) == CONST_INT
1162 && mode != VOIDmode
1163 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1164 return 0;
1165
1166 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1167 result in 0/1. It seems a safe assumption that this is
1168 in range for everyone. */
1169 if (GET_CODE (op) == CONSTANT_P_RTX)
1170 return 1;
1171
1172 return (CONSTANT_P (op)
1173 && (GET_MODE (op) == mode || mode == VOIDmode
1174 || GET_MODE (op) == VOIDmode)
1175 #ifdef LEGITIMATE_PIC_OPERAND_P
1176 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1177 #endif
1178 && LEGITIMATE_CONSTANT_P (op));
1179 }
1180
1181 /* Returns 1 if OP is an operand that is a CONST_INT. */
1182
1183 int
1184 const_int_operand (op, mode)
1185 rtx op;
1186 enum machine_mode mode;
1187 {
1188 if (GET_CODE (op) != CONST_INT)
1189 return 0;
1190
1191 if (mode != VOIDmode
1192 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1193 return 0;
1194
1195 return 1;
1196 }
1197
1198 /* Returns 1 if OP is an operand that is a constant integer or constant
1199 floating-point number. */
1200
1201 int
1202 const_double_operand (op, mode)
1203 rtx op;
1204 enum machine_mode mode;
1205 {
1206 /* Don't accept CONST_INT or anything similar
1207 if the caller wants something floating. */
1208 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1209 && GET_MODE_CLASS (mode) != MODE_INT
1210 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1211 return 0;
1212
1213 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1214 && (mode == VOIDmode || GET_MODE (op) == mode
1215 || GET_MODE (op) == VOIDmode));
1216 }
1217
1218 /* Return 1 if OP is a general operand that is not an immediate operand. */
1219
1220 int
1221 nonimmediate_operand (op, mode)
1222 rtx op;
1223 enum machine_mode mode;
1224 {
1225 return (general_operand (op, mode) && ! CONSTANT_P (op));
1226 }
1227
1228 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1229
1230 int
1231 nonmemory_operand (op, mode)
1232 rtx op;
1233 enum machine_mode mode;
1234 {
1235 if (CONSTANT_P (op))
1236 {
1237 /* Don't accept CONST_INT or anything similar
1238 if the caller wants something floating. */
1239 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1240 && GET_MODE_CLASS (mode) != MODE_INT
1241 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1242 return 0;
1243
1244 if (GET_CODE (op) == CONST_INT
1245 && mode != VOIDmode
1246 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1247 return 0;
1248
1249 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1250 || mode == VOIDmode)
1251 #ifdef LEGITIMATE_PIC_OPERAND_P
1252 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1253 #endif
1254 && LEGITIMATE_CONSTANT_P (op));
1255 }
1256
1257 if (GET_MODE (op) != mode && mode != VOIDmode)
1258 return 0;
1259
1260 if (GET_CODE (op) == SUBREG)
1261 {
1262 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1263 because it is guaranteed to be reloaded into one.
1264 Just make sure the MEM is valid in itself.
1265 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1266 but currently it does result from (SUBREG (REG)...) where the
1267 reg went on the stack.) */
1268 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1269 return general_operand (op, mode);
1270 op = SUBREG_REG (op);
1271 }
1272
1273 /* We don't consider registers whose class is NO_REGS
1274 to be a register operand. */
1275 return (GET_CODE (op) == REG
1276 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1277 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1278 }
1279
1280 /* Return 1 if OP is a valid operand that stands for pushing a
1281 value of mode MODE onto the stack.
1282
1283 The main use of this function is as a predicate in match_operand
1284 expressions in the machine description. */
1285
1286 int
1287 push_operand (op, mode)
1288 rtx op;
1289 enum machine_mode mode;
1290 {
1291 unsigned int rounded_size = GET_MODE_SIZE (mode);
1292
1293 #ifdef PUSH_ROUNDING
1294 rounded_size = PUSH_ROUNDING (rounded_size);
1295 #endif
1296
1297 if (GET_CODE (op) != MEM)
1298 return 0;
1299
1300 if (mode != VOIDmode && GET_MODE (op) != mode)
1301 return 0;
1302
1303 op = XEXP (op, 0);
1304
1305 if (rounded_size == GET_MODE_SIZE (mode))
1306 {
1307 if (GET_CODE (op) != STACK_PUSH_CODE)
1308 return 0;
1309 }
1310 else
1311 {
1312 if (GET_CODE (op) != PRE_MODIFY
1313 || GET_CODE (XEXP (op, 1)) != PLUS
1314 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1315 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1316 #ifdef STACK_GROWS_DOWNWARD
1317 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1318 #else
1319 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1320 #endif
1321 )
1322 return 0;
1323 }
1324
1325 return XEXP (op, 0) == stack_pointer_rtx;
1326 }
1327
1328 /* Return 1 if OP is a valid operand that stands for popping a
1329 value of mode MODE off the stack.
1330
1331 The main use of this function is as a predicate in match_operand
1332 expressions in the machine description. */
1333
1334 int
1335 pop_operand (op, mode)
1336 rtx op;
1337 enum machine_mode mode;
1338 {
1339 if (GET_CODE (op) != MEM)
1340 return 0;
1341
1342 if (mode != VOIDmode && GET_MODE (op) != mode)
1343 return 0;
1344
1345 op = XEXP (op, 0);
1346
1347 if (GET_CODE (op) != STACK_POP_CODE)
1348 return 0;
1349
1350 return XEXP (op, 0) == stack_pointer_rtx;
1351 }
1352
1353 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1354
1355 int
1356 memory_address_p (mode, addr)
1357 enum machine_mode mode ATTRIBUTE_UNUSED;
1358 rtx addr;
1359 {
1360 if (GET_CODE (addr) == ADDRESSOF)
1361 return 1;
1362
1363 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1364 return 0;
1365
1366 win:
1367 return 1;
1368 }
1369
1370 /* Return 1 if OP is a valid memory reference with mode MODE,
1371 including a valid address.
1372
1373 The main use of this function is as a predicate in match_operand
1374 expressions in the machine description. */
1375
1376 int
1377 memory_operand (op, mode)
1378 rtx op;
1379 enum machine_mode mode;
1380 {
1381 rtx inner;
1382
1383 if (! reload_completed)
1384 /* Note that no SUBREG is a memory operand before end of reload pass,
1385 because (SUBREG (MEM...)) forces reloading into a register. */
1386 return GET_CODE (op) == MEM && general_operand (op, mode);
1387
1388 if (mode != VOIDmode && GET_MODE (op) != mode)
1389 return 0;
1390
1391 inner = op;
1392 if (GET_CODE (inner) == SUBREG)
1393 inner = SUBREG_REG (inner);
1394
1395 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1396 }
1397
1398 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1399 that is, a memory reference whose address is a general_operand. */
1400
1401 int
1402 indirect_operand (op, mode)
1403 rtx op;
1404 enum machine_mode mode;
1405 {
1406 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1407 if (! reload_completed
1408 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1409 {
1410 int offset = SUBREG_BYTE (op);
1411 rtx inner = SUBREG_REG (op);
1412
1413 if (mode != VOIDmode && GET_MODE (op) != mode)
1414 return 0;
1415
1416 /* The only way that we can have a general_operand as the resulting
1417 address is if OFFSET is zero and the address already is an operand
1418 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1419 operand. */
1420
1421 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1422 || (GET_CODE (XEXP (inner, 0)) == PLUS
1423 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1424 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1425 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1426 }
1427
1428 return (GET_CODE (op) == MEM
1429 && memory_operand (op, mode)
1430 && general_operand (XEXP (op, 0), Pmode));
1431 }
1432
1433 /* Return 1 if this is a comparison operator. This allows the use of
1434 MATCH_OPERATOR to recognize all the branch insns. */
1435
1436 int
1437 comparison_operator (op, mode)
1438 rtx op;
1439 enum machine_mode mode;
1440 {
1441 return ((mode == VOIDmode || GET_MODE (op) == mode)
1442 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1443 }
1444 \f
1445 /* If BODY is an insn body that uses ASM_OPERANDS,
1446 return the number of operands (both input and output) in the insn.
1447 Otherwise return -1. */
1448
1449 int
1450 asm_noperands (body)
1451 rtx body;
1452 {
1453 switch (GET_CODE (body))
1454 {
1455 case ASM_OPERANDS:
1456 /* No output operands: return number of input operands. */
1457 return ASM_OPERANDS_INPUT_LENGTH (body);
1458 case SET:
1459 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1460 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1461 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1462 else
1463 return -1;
1464 case PARALLEL:
1465 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1466 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1467 {
1468 /* Multiple output operands, or 1 output plus some clobbers:
1469 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1470 int i;
1471 int n_sets;
1472
1473 /* Count backwards through CLOBBERs to determine number of SETs. */
1474 for (i = XVECLEN (body, 0); i > 0; i--)
1475 {
1476 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1477 break;
1478 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1479 return -1;
1480 }
1481
1482 /* N_SETS is now number of output operands. */
1483 n_sets = i;
1484
1485 /* Verify that all the SETs we have
1486 came from a single original asm_operands insn
1487 (so that invalid combinations are blocked). */
1488 for (i = 0; i < n_sets; i++)
1489 {
1490 rtx elt = XVECEXP (body, 0, i);
1491 if (GET_CODE (elt) != SET)
1492 return -1;
1493 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1494 return -1;
1495 /* If these ASM_OPERANDS rtx's came from different original insns
1496 then they aren't allowed together. */
1497 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1498 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1499 return -1;
1500 }
1501 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1502 + n_sets);
1503 }
1504 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1505 {
1506 /* 0 outputs, but some clobbers:
1507 body is [(asm_operands ...) (clobber (reg ...))...]. */
1508 int i;
1509
1510 /* Make sure all the other parallel things really are clobbers. */
1511 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1512 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1513 return -1;
1514
1515 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1516 }
1517 else
1518 return -1;
1519 default:
1520 return -1;
1521 }
1522 }
1523
1524 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1525 copy its operands (both input and output) into the vector OPERANDS,
1526 the locations of the operands within the insn into the vector OPERAND_LOCS,
1527 and the constraints for the operands into CONSTRAINTS.
1528 Write the modes of the operands into MODES.
1529 Return the assembler-template.
1530
1531 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1532 we don't store that info. */
1533
1534 const char *
1535 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1536 rtx body;
1537 rtx *operands;
1538 rtx **operand_locs;
1539 const char **constraints;
1540 enum machine_mode *modes;
1541 {
1542 int i;
1543 int noperands;
1544 const char *template = 0;
1545
1546 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1547 {
1548 rtx asmop = SET_SRC (body);
1549 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1550
1551 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1552
1553 for (i = 1; i < noperands; i++)
1554 {
1555 if (operand_locs)
1556 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1557 if (operands)
1558 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1559 if (constraints)
1560 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1561 if (modes)
1562 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1563 }
1564
1565 /* The output is in the SET.
1566 Its constraint is in the ASM_OPERANDS itself. */
1567 if (operands)
1568 operands[0] = SET_DEST (body);
1569 if (operand_locs)
1570 operand_locs[0] = &SET_DEST (body);
1571 if (constraints)
1572 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1573 if (modes)
1574 modes[0] = GET_MODE (SET_DEST (body));
1575 template = ASM_OPERANDS_TEMPLATE (asmop);
1576 }
1577 else if (GET_CODE (body) == ASM_OPERANDS)
1578 {
1579 rtx asmop = body;
1580 /* No output operands: BODY is (asm_operands ....). */
1581
1582 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1583
1584 /* The input operands are found in the 1st element vector. */
1585 /* Constraints for inputs are in the 2nd element vector. */
1586 for (i = 0; i < noperands; i++)
1587 {
1588 if (operand_locs)
1589 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1590 if (operands)
1591 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1592 if (constraints)
1593 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1594 if (modes)
1595 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1596 }
1597 template = ASM_OPERANDS_TEMPLATE (asmop);
1598 }
1599 else if (GET_CODE (body) == PARALLEL
1600 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1601 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1602 {
1603 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1604 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1605 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1606 int nout = 0; /* Does not include CLOBBERs. */
1607
1608 /* At least one output, plus some CLOBBERs. */
1609
1610 /* The outputs are in the SETs.
1611 Their constraints are in the ASM_OPERANDS itself. */
1612 for (i = 0; i < nparallel; i++)
1613 {
1614 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1615 break; /* Past last SET */
1616
1617 if (operands)
1618 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1619 if (operand_locs)
1620 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1621 if (constraints)
1622 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1623 if (modes)
1624 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1625 nout++;
1626 }
1627
1628 for (i = 0; i < nin; i++)
1629 {
1630 if (operand_locs)
1631 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1632 if (operands)
1633 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1634 if (constraints)
1635 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1636 if (modes)
1637 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1638 }
1639
1640 template = ASM_OPERANDS_TEMPLATE (asmop);
1641 }
1642 else if (GET_CODE (body) == PARALLEL
1643 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1644 {
1645 /* No outputs, but some CLOBBERs. */
1646
1647 rtx asmop = XVECEXP (body, 0, 0);
1648 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1649
1650 for (i = 0; i < nin; i++)
1651 {
1652 if (operand_locs)
1653 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1654 if (operands)
1655 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1656 if (constraints)
1657 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1658 if (modes)
1659 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1660 }
1661
1662 template = ASM_OPERANDS_TEMPLATE (asmop);
1663 }
1664
1665 return template;
1666 }
1667
1668 /* Check if an asm_operand matches it's constraints.
1669 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1670
1671 int
1672 asm_operand_ok (op, constraint)
1673 rtx op;
1674 const char *constraint;
1675 {
1676 int result = 0;
1677
1678 /* Use constrain_operands after reload. */
1679 if (reload_completed)
1680 abort ();
1681
1682 while (*constraint)
1683 {
1684 char c = *constraint;
1685 int len;
1686 switch (c)
1687 {
1688 case ',':
1689 constraint++;
1690 continue;
1691 case '=':
1692 case '+':
1693 case '*':
1694 case '%':
1695 case '!':
1696 case '#':
1697 case '&':
1698 case '?':
1699 break;
1700
1701 case '0': case '1': case '2': case '3': case '4':
1702 case '5': case '6': case '7': case '8': case '9':
1703 /* For best results, our caller should have given us the
1704 proper matching constraint, but we can't actually fail
1705 the check if they didn't. Indicate that results are
1706 inconclusive. */
1707 do
1708 constraint++;
1709 while (ISDIGIT (*constraint));
1710 if (! result)
1711 result = -1;
1712 continue;
1713
1714 case 'p':
1715 if (address_operand (op, VOIDmode))
1716 result = 1;
1717 break;
1718
1719 case 'm':
1720 case 'V': /* non-offsettable */
1721 if (memory_operand (op, VOIDmode))
1722 result = 1;
1723 break;
1724
1725 case 'o': /* offsettable */
1726 if (offsettable_nonstrict_memref_p (op))
1727 result = 1;
1728 break;
1729
1730 case '<':
1731 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1732 excepting those that expand_call created. Further, on some
1733 machines which do not have generalized auto inc/dec, an inc/dec
1734 is not a memory_operand.
1735
1736 Match any memory and hope things are resolved after reload. */
1737
1738 if (GET_CODE (op) == MEM
1739 && (1
1740 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1741 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1742 result = 1;
1743 break;
1744
1745 case '>':
1746 if (GET_CODE (op) == MEM
1747 && (1
1748 || GET_CODE (XEXP (op, 0)) == PRE_INC
1749 || GET_CODE (XEXP (op, 0)) == POST_INC))
1750 result = 1;
1751 break;
1752
1753 case 'E':
1754 case 'F':
1755 if (GET_CODE (op) == CONST_DOUBLE
1756 || (GET_CODE (op) == CONST_VECTOR
1757 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1758 result = 1;
1759 break;
1760
1761 case 'G':
1762 if (GET_CODE (op) == CONST_DOUBLE
1763 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1764 result = 1;
1765 break;
1766 case 'H':
1767 if (GET_CODE (op) == CONST_DOUBLE
1768 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1769 result = 1;
1770 break;
1771
1772 case 's':
1773 if (GET_CODE (op) == CONST_INT
1774 || (GET_CODE (op) == CONST_DOUBLE
1775 && GET_MODE (op) == VOIDmode))
1776 break;
1777 /* FALLTHRU */
1778
1779 case 'i':
1780 if (CONSTANT_P (op)
1781 #ifdef LEGITIMATE_PIC_OPERAND_P
1782 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1783 #endif
1784 )
1785 result = 1;
1786 break;
1787
1788 case 'n':
1789 if (GET_CODE (op) == CONST_INT
1790 || (GET_CODE (op) == CONST_DOUBLE
1791 && GET_MODE (op) == VOIDmode))
1792 result = 1;
1793 break;
1794
1795 case 'I':
1796 if (GET_CODE (op) == CONST_INT
1797 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1798 result = 1;
1799 break;
1800 case 'J':
1801 if (GET_CODE (op) == CONST_INT
1802 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1803 result = 1;
1804 break;
1805 case 'K':
1806 if (GET_CODE (op) == CONST_INT
1807 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1808 result = 1;
1809 break;
1810 case 'L':
1811 if (GET_CODE (op) == CONST_INT
1812 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1813 result = 1;
1814 break;
1815 case 'M':
1816 if (GET_CODE (op) == CONST_INT
1817 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1818 result = 1;
1819 break;
1820 case 'N':
1821 if (GET_CODE (op) == CONST_INT
1822 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1823 result = 1;
1824 break;
1825 case 'O':
1826 if (GET_CODE (op) == CONST_INT
1827 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1828 result = 1;
1829 break;
1830 case 'P':
1831 if (GET_CODE (op) == CONST_INT
1832 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1833 result = 1;
1834 break;
1835
1836 case 'X':
1837 result = 1;
1838
1839 case 'g':
1840 if (general_operand (op, VOIDmode))
1841 result = 1;
1842 break;
1843
1844 default:
1845 /* For all other letters, we first check for a register class,
1846 otherwise it is an EXTRA_CONSTRAINT. */
1847 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1848 {
1849 case 'r':
1850 if (GET_MODE (op) == BLKmode)
1851 break;
1852 if (register_operand (op, VOIDmode))
1853 result = 1;
1854 }
1855 #ifdef EXTRA_CONSTRAINT_STR
1856 if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1857 result = 1;
1858 if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1859 {
1860 /* Every memory operand can be reloaded to fit. */
1861 if (memory_operand (op, VOIDmode))
1862 result = 1;
1863 }
1864 if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1865 {
1866 /* Every address operand can be reloaded to fit. */
1867 if (address_operand (op, VOIDmode))
1868 result = 1;
1869 }
1870 #endif
1871 break;
1872 }
1873 len = CONSTRAINT_LEN (c, constraint);
1874 do
1875 constraint++;
1876 while (--len && *constraint);
1877 if (len)
1878 return 0;
1879 }
1880
1881 return result;
1882 }
1883 \f
1884 /* Given an rtx *P, if it is a sum containing an integer constant term,
1885 return the location (type rtx *) of the pointer to that constant term.
1886 Otherwise, return a null pointer. */
1887
1888 rtx *
1889 find_constant_term_loc (p)
1890 rtx *p;
1891 {
1892 rtx *tem;
1893 enum rtx_code code = GET_CODE (*p);
1894
1895 /* If *P IS such a constant term, P is its location. */
1896
1897 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1898 || code == CONST)
1899 return p;
1900
1901 /* Otherwise, if not a sum, it has no constant term. */
1902
1903 if (GET_CODE (*p) != PLUS)
1904 return 0;
1905
1906 /* If one of the summands is constant, return its location. */
1907
1908 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1909 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1910 return p;
1911
1912 /* Otherwise, check each summand for containing a constant term. */
1913
1914 if (XEXP (*p, 0) != 0)
1915 {
1916 tem = find_constant_term_loc (&XEXP (*p, 0));
1917 if (tem != 0)
1918 return tem;
1919 }
1920
1921 if (XEXP (*p, 1) != 0)
1922 {
1923 tem = find_constant_term_loc (&XEXP (*p, 1));
1924 if (tem != 0)
1925 return tem;
1926 }
1927
1928 return 0;
1929 }
1930 \f
1931 /* Return 1 if OP is a memory reference
1932 whose address contains no side effects
1933 and remains valid after the addition
1934 of a positive integer less than the
1935 size of the object being referenced.
1936
1937 We assume that the original address is valid and do not check it.
1938
1939 This uses strict_memory_address_p as a subroutine, so
1940 don't use it before reload. */
1941
1942 int
1943 offsettable_memref_p (op)
1944 rtx op;
1945 {
1946 return ((GET_CODE (op) == MEM)
1947 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1948 }
1949
1950 /* Similar, but don't require a strictly valid mem ref:
1951 consider pseudo-regs valid as index or base regs. */
1952
1953 int
1954 offsettable_nonstrict_memref_p (op)
1955 rtx op;
1956 {
1957 return ((GET_CODE (op) == MEM)
1958 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1959 }
1960
1961 /* Return 1 if Y is a memory address which contains no side effects
1962 and would remain valid after the addition of a positive integer
1963 less than the size of that mode.
1964
1965 We assume that the original address is valid and do not check it.
1966 We do check that it is valid for narrower modes.
1967
1968 If STRICTP is nonzero, we require a strictly valid address,
1969 for the sake of use in reload.c. */
1970
1971 int
1972 offsettable_address_p (strictp, mode, y)
1973 int strictp;
1974 enum machine_mode mode;
1975 rtx y;
1976 {
1977 enum rtx_code ycode = GET_CODE (y);
1978 rtx z;
1979 rtx y1 = y;
1980 rtx *y2;
1981 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1982 (strictp ? strict_memory_address_p : memory_address_p);
1983 unsigned int mode_sz = GET_MODE_SIZE (mode);
1984
1985 if (CONSTANT_ADDRESS_P (y))
1986 return 1;
1987
1988 /* Adjusting an offsettable address involves changing to a narrower mode.
1989 Make sure that's OK. */
1990
1991 if (mode_dependent_address_p (y))
1992 return 0;
1993
1994 /* ??? How much offset does an offsettable BLKmode reference need?
1995 Clearly that depends on the situation in which it's being used.
1996 However, the current situation in which we test 0xffffffff is
1997 less than ideal. Caveat user. */
1998 if (mode_sz == 0)
1999 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2000
2001 /* If the expression contains a constant term,
2002 see if it remains valid when max possible offset is added. */
2003
2004 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2005 {
2006 int good;
2007
2008 y1 = *y2;
2009 *y2 = plus_constant (*y2, mode_sz - 1);
2010 /* Use QImode because an odd displacement may be automatically invalid
2011 for any wider mode. But it should be valid for a single byte. */
2012 good = (*addressp) (QImode, y);
2013
2014 /* In any case, restore old contents of memory. */
2015 *y2 = y1;
2016 return good;
2017 }
2018
2019 if (GET_RTX_CLASS (ycode) == 'a')
2020 return 0;
2021
2022 /* The offset added here is chosen as the maximum offset that
2023 any instruction could need to add when operating on something
2024 of the specified mode. We assume that if Y and Y+c are
2025 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2026 go inside a LO_SUM here, so we do so as well. */
2027 if (GET_CODE (y) == LO_SUM
2028 && mode != BLKmode
2029 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2030 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
2031 plus_constant (XEXP (y, 1), mode_sz - 1));
2032 else
2033 z = plus_constant (y, mode_sz - 1);
2034
2035 /* Use QImode because an odd displacement may be automatically invalid
2036 for any wider mode. But it should be valid for a single byte. */
2037 return (*addressp) (QImode, z);
2038 }
2039
2040 /* Return 1 if ADDR is an address-expression whose effect depends
2041 on the mode of the memory reference it is used in.
2042
2043 Autoincrement addressing is a typical example of mode-dependence
2044 because the amount of the increment depends on the mode. */
2045
2046 int
2047 mode_dependent_address_p (addr)
2048 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2049 {
2050 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2051 return 0;
2052 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2053 win: ATTRIBUTE_UNUSED_LABEL
2054 return 1;
2055 }
2056
2057 /* Return 1 if OP is a general operand
2058 other than a memory ref with a mode dependent address. */
2059
2060 int
2061 mode_independent_operand (op, mode)
2062 enum machine_mode mode;
2063 rtx op;
2064 {
2065 rtx addr;
2066
2067 if (! general_operand (op, mode))
2068 return 0;
2069
2070 if (GET_CODE (op) != MEM)
2071 return 1;
2072
2073 addr = XEXP (op, 0);
2074 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2075 return 1;
2076 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2077 lose: ATTRIBUTE_UNUSED_LABEL
2078 return 0;
2079 }
2080 \f
2081 /* Like extract_insn, but save insn extracted and don't extract again, when
2082 called again for the same insn expecting that recog_data still contain the
2083 valid information. This is used primary by gen_attr infrastructure that
2084 often does extract insn again and again. */
2085 void
2086 extract_insn_cached (insn)
2087 rtx insn;
2088 {
2089 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2090 return;
2091 extract_insn (insn);
2092 recog_data.insn = insn;
2093 }
2094 /* Do cached extract_insn, constrain_operand and complain about failures.
2095 Used by insn_attrtab. */
2096 void
2097 extract_constrain_insn_cached (insn)
2098 rtx insn;
2099 {
2100 extract_insn_cached (insn);
2101 if (which_alternative == -1
2102 && !constrain_operands (reload_completed))
2103 fatal_insn_not_found (insn);
2104 }
2105 /* Do cached constrain_operand and complain about failures. */
2106 int
2107 constrain_operands_cached (strict)
2108 int strict;
2109 {
2110 if (which_alternative == -1)
2111 return constrain_operands (strict);
2112 else
2113 return 1;
2114 }
2115 \f
2116 /* Analyze INSN and fill in recog_data. */
2117
2118 void
2119 extract_insn (insn)
2120 rtx insn;
2121 {
2122 int i;
2123 int icode;
2124 int noperands;
2125 rtx body = PATTERN (insn);
2126
2127 recog_data.insn = NULL;
2128 recog_data.n_operands = 0;
2129 recog_data.n_alternatives = 0;
2130 recog_data.n_dups = 0;
2131 which_alternative = -1;
2132
2133 switch (GET_CODE (body))
2134 {
2135 case USE:
2136 case CLOBBER:
2137 case ASM_INPUT:
2138 case ADDR_VEC:
2139 case ADDR_DIFF_VEC:
2140 return;
2141
2142 case SET:
2143 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2144 goto asm_insn;
2145 else
2146 goto normal_insn;
2147 case PARALLEL:
2148 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2149 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2150 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2151 goto asm_insn;
2152 else
2153 goto normal_insn;
2154 case ASM_OPERANDS:
2155 asm_insn:
2156 recog_data.n_operands = noperands = asm_noperands (body);
2157 if (noperands >= 0)
2158 {
2159 /* This insn is an `asm' with operands. */
2160
2161 /* expand_asm_operands makes sure there aren't too many operands. */
2162 if (noperands > MAX_RECOG_OPERANDS)
2163 abort ();
2164
2165 /* Now get the operand values and constraints out of the insn. */
2166 decode_asm_operands (body, recog_data.operand,
2167 recog_data.operand_loc,
2168 recog_data.constraints,
2169 recog_data.operand_mode);
2170 if (noperands > 0)
2171 {
2172 const char *p = recog_data.constraints[0];
2173 recog_data.n_alternatives = 1;
2174 while (*p)
2175 recog_data.n_alternatives += (*p++ == ',');
2176 }
2177 break;
2178 }
2179 fatal_insn_not_found (insn);
2180
2181 default:
2182 normal_insn:
2183 /* Ordinary insn: recognize it, get the operands via insn_extract
2184 and get the constraints. */
2185
2186 icode = recog_memoized (insn);
2187 if (icode < 0)
2188 fatal_insn_not_found (insn);
2189
2190 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2191 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2192 recog_data.n_dups = insn_data[icode].n_dups;
2193
2194 insn_extract (insn);
2195
2196 for (i = 0; i < noperands; i++)
2197 {
2198 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2199 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2200 /* VOIDmode match_operands gets mode from their real operand. */
2201 if (recog_data.operand_mode[i] == VOIDmode)
2202 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2203 }
2204 }
2205 for (i = 0; i < noperands; i++)
2206 recog_data.operand_type[i]
2207 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2208 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2209 : OP_IN);
2210
2211 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2212 abort ();
2213 }
2214
2215 /* After calling extract_insn, you can use this function to extract some
2216 information from the constraint strings into a more usable form.
2217 The collected data is stored in recog_op_alt. */
2218 void
2219 preprocess_constraints ()
2220 {
2221 int i;
2222
2223 memset (recog_op_alt, 0, sizeof recog_op_alt);
2224 for (i = 0; i < recog_data.n_operands; i++)
2225 {
2226 int j;
2227 struct operand_alternative *op_alt;
2228 const char *p = recog_data.constraints[i];
2229
2230 op_alt = recog_op_alt[i];
2231
2232 for (j = 0; j < recog_data.n_alternatives; j++)
2233 {
2234 op_alt[j].class = NO_REGS;
2235 op_alt[j].constraint = p;
2236 op_alt[j].matches = -1;
2237 op_alt[j].matched = -1;
2238
2239 if (*p == '\0' || *p == ',')
2240 {
2241 op_alt[j].anything_ok = 1;
2242 continue;
2243 }
2244
2245 for (;;)
2246 {
2247 char c = *p;
2248 if (c == '#')
2249 do
2250 c = *++p;
2251 while (c != ',' && c != '\0');
2252 if (c == ',' || c == '\0')
2253 {
2254 p++;
2255 break;
2256 }
2257
2258 switch (c)
2259 {
2260 case '=': case '+': case '*': case '%':
2261 case 'E': case 'F': case 'G': case 'H':
2262 case 's': case 'i': case 'n':
2263 case 'I': case 'J': case 'K': case 'L':
2264 case 'M': case 'N': case 'O': case 'P':
2265 /* These don't say anything we care about. */
2266 break;
2267
2268 case '?':
2269 op_alt[j].reject += 6;
2270 break;
2271 case '!':
2272 op_alt[j].reject += 600;
2273 break;
2274 case '&':
2275 op_alt[j].earlyclobber = 1;
2276 break;
2277
2278 case '0': case '1': case '2': case '3': case '4':
2279 case '5': case '6': case '7': case '8': case '9':
2280 {
2281 char *end;
2282 op_alt[j].matches = strtoul (p, &end, 10);
2283 recog_op_alt[op_alt[j].matches][j].matched = i;
2284 p = end;
2285 }
2286 continue;
2287
2288 case 'm':
2289 op_alt[j].memory_ok = 1;
2290 break;
2291 case '<':
2292 op_alt[j].decmem_ok = 1;
2293 break;
2294 case '>':
2295 op_alt[j].incmem_ok = 1;
2296 break;
2297 case 'V':
2298 op_alt[j].nonoffmem_ok = 1;
2299 break;
2300 case 'o':
2301 op_alt[j].offmem_ok = 1;
2302 break;
2303 case 'X':
2304 op_alt[j].anything_ok = 1;
2305 break;
2306
2307 case 'p':
2308 op_alt[j].is_address = 1;
2309 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2310 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2311 break;
2312
2313 case 'g': case 'r':
2314 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2315 break;
2316
2317 default:
2318 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2319 {
2320 op_alt[j].memory_ok = 1;
2321 break;
2322 }
2323 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2324 {
2325 op_alt[j].is_address = 1;
2326 op_alt[j].class
2327 = (reg_class_subunion
2328 [(int) op_alt[j].class]
2329 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2330 break;
2331 }
2332
2333 op_alt[j].class
2334 = (reg_class_subunion
2335 [(int) op_alt[j].class]
2336 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2337 break;
2338 }
2339 p += CONSTRAINT_LEN (c, p);
2340 }
2341 }
2342 }
2343 }
2344
2345 /* Check the operands of an insn against the insn's operand constraints
2346 and return 1 if they are valid.
2347 The information about the insn's operands, constraints, operand modes
2348 etc. is obtained from the global variables set up by extract_insn.
2349
2350 WHICH_ALTERNATIVE is set to a number which indicates which
2351 alternative of constraints was matched: 0 for the first alternative,
2352 1 for the next, etc.
2353
2354 In addition, when two operands are required to match
2355 and it happens that the output operand is (reg) while the
2356 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2357 make the output operand look like the input.
2358 This is because the output operand is the one the template will print.
2359
2360 This is used in final, just before printing the assembler code and by
2361 the routines that determine an insn's attribute.
2362
2363 If STRICT is a positive nonzero value, it means that we have been
2364 called after reload has been completed. In that case, we must
2365 do all checks strictly. If it is zero, it means that we have been called
2366 before reload has completed. In that case, we first try to see if we can
2367 find an alternative that matches strictly. If not, we try again, this
2368 time assuming that reload will fix up the insn. This provides a "best
2369 guess" for the alternative and is used to compute attributes of insns prior
2370 to reload. A negative value of STRICT is used for this internal call. */
2371
2372 struct funny_match
2373 {
2374 int this, other;
2375 };
2376
2377 int
2378 constrain_operands (strict)
2379 int strict;
2380 {
2381 const char *constraints[MAX_RECOG_OPERANDS];
2382 int matching_operands[MAX_RECOG_OPERANDS];
2383 int earlyclobber[MAX_RECOG_OPERANDS];
2384 int c;
2385
2386 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2387 int funny_match_index;
2388
2389 which_alternative = 0;
2390 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2391 return 1;
2392
2393 for (c = 0; c < recog_data.n_operands; c++)
2394 {
2395 constraints[c] = recog_data.constraints[c];
2396 matching_operands[c] = -1;
2397 }
2398
2399 do
2400 {
2401 int opno;
2402 int lose = 0;
2403 funny_match_index = 0;
2404
2405 for (opno = 0; opno < recog_data.n_operands; opno++)
2406 {
2407 rtx op = recog_data.operand[opno];
2408 enum machine_mode mode = GET_MODE (op);
2409 const char *p = constraints[opno];
2410 int offset = 0;
2411 int win = 0;
2412 int val;
2413 int len;
2414
2415 earlyclobber[opno] = 0;
2416
2417 /* A unary operator may be accepted by the predicate, but it
2418 is irrelevant for matching constraints. */
2419 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2420 op = XEXP (op, 0);
2421
2422 if (GET_CODE (op) == SUBREG)
2423 {
2424 if (GET_CODE (SUBREG_REG (op)) == REG
2425 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2426 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2427 GET_MODE (SUBREG_REG (op)),
2428 SUBREG_BYTE (op),
2429 GET_MODE (op));
2430 op = SUBREG_REG (op);
2431 }
2432
2433 /* An empty constraint or empty alternative
2434 allows anything which matched the pattern. */
2435 if (*p == 0 || *p == ',')
2436 win = 1;
2437
2438 do
2439 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2440 {
2441 case '\0':
2442 len = 0;
2443 break;
2444 case ',':
2445 c = '\0';
2446 break;
2447
2448 case '?': case '!': case '*': case '%':
2449 case '=': case '+':
2450 break;
2451
2452 case '#':
2453 /* Ignore rest of this alternative as far as
2454 constraint checking is concerned. */
2455 do
2456 p++;
2457 while (*p && *p != ',');
2458 len = 0;
2459 break;
2460
2461 case '&':
2462 earlyclobber[opno] = 1;
2463 break;
2464
2465 case '0': case '1': case '2': case '3': case '4':
2466 case '5': case '6': case '7': case '8': case '9':
2467 {
2468 /* This operand must be the same as a previous one.
2469 This kind of constraint is used for instructions such
2470 as add when they take only two operands.
2471
2472 Note that the lower-numbered operand is passed first.
2473
2474 If we are not testing strictly, assume that this
2475 constraint will be satisfied. */
2476
2477 char *end;
2478 int match;
2479
2480 match = strtoul (p, &end, 10);
2481 p = end;
2482
2483 if (strict < 0)
2484 val = 1;
2485 else
2486 {
2487 rtx op1 = recog_data.operand[match];
2488 rtx op2 = recog_data.operand[opno];
2489
2490 /* A unary operator may be accepted by the predicate,
2491 but it is irrelevant for matching constraints. */
2492 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2493 op1 = XEXP (op1, 0);
2494 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2495 op2 = XEXP (op2, 0);
2496
2497 val = operands_match_p (op1, op2);
2498 }
2499
2500 matching_operands[opno] = match;
2501 matching_operands[match] = opno;
2502
2503 if (val != 0)
2504 win = 1;
2505
2506 /* If output is *x and input is *--x, arrange later
2507 to change the output to *--x as well, since the
2508 output op is the one that will be printed. */
2509 if (val == 2 && strict > 0)
2510 {
2511 funny_match[funny_match_index].this = opno;
2512 funny_match[funny_match_index++].other = match;
2513 }
2514 }
2515 len = 0;
2516 break;
2517
2518 case 'p':
2519 /* p is used for address_operands. When we are called by
2520 gen_reload, no one will have checked that the address is
2521 strictly valid, i.e., that all pseudos requiring hard regs
2522 have gotten them. */
2523 if (strict <= 0
2524 || (strict_memory_address_p (recog_data.operand_mode[opno],
2525 op)))
2526 win = 1;
2527 break;
2528
2529 /* No need to check general_operand again;
2530 it was done in insn-recog.c. */
2531 case 'g':
2532 /* Anything goes unless it is a REG and really has a hard reg
2533 but the hard reg is not in the class GENERAL_REGS. */
2534 if (strict < 0
2535 || GENERAL_REGS == ALL_REGS
2536 || GET_CODE (op) != REG
2537 || (reload_in_progress
2538 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2539 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2540 win = 1;
2541 break;
2542
2543 case 'X':
2544 /* This is used for a MATCH_SCRATCH in the cases when
2545 we don't actually need anything. So anything goes
2546 any time. */
2547 win = 1;
2548 break;
2549
2550 case 'm':
2551 if (GET_CODE (op) == MEM
2552 /* Before reload, accept what reload can turn into mem. */
2553 || (strict < 0 && CONSTANT_P (op))
2554 /* During reload, accept a pseudo */
2555 || (reload_in_progress && GET_CODE (op) == REG
2556 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2557 win = 1;
2558 break;
2559
2560 case '<':
2561 if (GET_CODE (op) == MEM
2562 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2563 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2564 win = 1;
2565 break;
2566
2567 case '>':
2568 if (GET_CODE (op) == MEM
2569 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2570 || GET_CODE (XEXP (op, 0)) == POST_INC))
2571 win = 1;
2572 break;
2573
2574 case 'E':
2575 case 'F':
2576 if (GET_CODE (op) == CONST_DOUBLE
2577 || (GET_CODE (op) == CONST_VECTOR
2578 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2579 win = 1;
2580 break;
2581
2582 case 'G':
2583 case 'H':
2584 if (GET_CODE (op) == CONST_DOUBLE
2585 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2586 win = 1;
2587 break;
2588
2589 case 's':
2590 if (GET_CODE (op) == CONST_INT
2591 || (GET_CODE (op) == CONST_DOUBLE
2592 && GET_MODE (op) == VOIDmode))
2593 break;
2594 case 'i':
2595 if (CONSTANT_P (op))
2596 win = 1;
2597 break;
2598
2599 case 'n':
2600 if (GET_CODE (op) == CONST_INT
2601 || (GET_CODE (op) == CONST_DOUBLE
2602 && GET_MODE (op) == VOIDmode))
2603 win = 1;
2604 break;
2605
2606 case 'I':
2607 case 'J':
2608 case 'K':
2609 case 'L':
2610 case 'M':
2611 case 'N':
2612 case 'O':
2613 case 'P':
2614 if (GET_CODE (op) == CONST_INT
2615 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2616 win = 1;
2617 break;
2618
2619 case 'V':
2620 if (GET_CODE (op) == MEM
2621 && ((strict > 0 && ! offsettable_memref_p (op))
2622 || (strict < 0
2623 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2624 || (reload_in_progress
2625 && !(GET_CODE (op) == REG
2626 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2627 win = 1;
2628 break;
2629
2630 case 'o':
2631 if ((strict > 0 && offsettable_memref_p (op))
2632 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2633 /* Before reload, accept what reload can handle. */
2634 || (strict < 0
2635 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2636 /* During reload, accept a pseudo */
2637 || (reload_in_progress && GET_CODE (op) == REG
2638 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2639 win = 1;
2640 break;
2641
2642 default:
2643 {
2644 enum reg_class class;
2645
2646 class = (c == 'r'
2647 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2648 if (class != NO_REGS)
2649 {
2650 if (strict < 0
2651 || (strict == 0
2652 && GET_CODE (op) == REG
2653 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2654 || (strict == 0 && GET_CODE (op) == SCRATCH)
2655 || (GET_CODE (op) == REG
2656 && reg_fits_class_p (op, class, offset, mode)))
2657 win = 1;
2658 }
2659 #ifdef EXTRA_CONSTRAINT_STR
2660 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2661 win = 1;
2662
2663 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2664 {
2665 /* Every memory operand can be reloaded to fit. */
2666 if (strict < 0 && GET_CODE (op) == MEM)
2667 win = 1;
2668
2669 /* Before reload, accept what reload can turn into mem. */
2670 if (strict < 0 && CONSTANT_P (op))
2671 win = 1;
2672
2673 /* During reload, accept a pseudo */
2674 if (reload_in_progress && GET_CODE (op) == REG
2675 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2676 win = 1;
2677 }
2678 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2679 {
2680 /* Every address operand can be reloaded to fit. */
2681 if (strict < 0)
2682 win = 1;
2683 }
2684 #endif
2685 break;
2686 }
2687 }
2688 while (p += len, c);
2689
2690 constraints[opno] = p;
2691 /* If this operand did not win somehow,
2692 this alternative loses. */
2693 if (! win)
2694 lose = 1;
2695 }
2696 /* This alternative won; the operands are ok.
2697 Change whichever operands this alternative says to change. */
2698 if (! lose)
2699 {
2700 int opno, eopno;
2701
2702 /* See if any earlyclobber operand conflicts with some other
2703 operand. */
2704
2705 if (strict > 0)
2706 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2707 /* Ignore earlyclobber operands now in memory,
2708 because we would often report failure when we have
2709 two memory operands, one of which was formerly a REG. */
2710 if (earlyclobber[eopno]
2711 && GET_CODE (recog_data.operand[eopno]) == REG)
2712 for (opno = 0; opno < recog_data.n_operands; opno++)
2713 if ((GET_CODE (recog_data.operand[opno]) == MEM
2714 || recog_data.operand_type[opno] != OP_OUT)
2715 && opno != eopno
2716 /* Ignore things like match_operator operands. */
2717 && *recog_data.constraints[opno] != 0
2718 && ! (matching_operands[opno] == eopno
2719 && operands_match_p (recog_data.operand[opno],
2720 recog_data.operand[eopno]))
2721 && ! safe_from_earlyclobber (recog_data.operand[opno],
2722 recog_data.operand[eopno]))
2723 lose = 1;
2724
2725 if (! lose)
2726 {
2727 while (--funny_match_index >= 0)
2728 {
2729 recog_data.operand[funny_match[funny_match_index].other]
2730 = recog_data.operand[funny_match[funny_match_index].this];
2731 }
2732
2733 return 1;
2734 }
2735 }
2736
2737 which_alternative++;
2738 }
2739 while (which_alternative < recog_data.n_alternatives);
2740
2741 which_alternative = -1;
2742 /* If we are about to reject this, but we are not to test strictly,
2743 try a very loose test. Only return failure if it fails also. */
2744 if (strict == 0)
2745 return constrain_operands (-1);
2746 else
2747 return 0;
2748 }
2749
2750 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2751 is a hard reg in class CLASS when its regno is offset by OFFSET
2752 and changed to mode MODE.
2753 If REG occupies multiple hard regs, all of them must be in CLASS. */
2754
2755 int
2756 reg_fits_class_p (operand, class, offset, mode)
2757 rtx operand;
2758 enum reg_class class;
2759 int offset;
2760 enum machine_mode mode;
2761 {
2762 int regno = REGNO (operand);
2763 if (regno < FIRST_PSEUDO_REGISTER
2764 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2765 regno + offset))
2766 {
2767 int sr;
2768 regno += offset;
2769 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2770 sr > 0; sr--)
2771 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2772 regno + sr))
2773 break;
2774 return sr == 0;
2775 }
2776
2777 return 0;
2778 }
2779 \f
2780 /* Split single instruction. Helper function for split_all_insns.
2781 Return last insn in the sequence if successful, or NULL if unsuccessful. */
2782 static rtx
2783 split_insn (insn)
2784 rtx insn;
2785 {
2786 rtx set;
2787 if (!INSN_P (insn))
2788 ;
2789 /* Don't split no-op move insns. These should silently
2790 disappear later in final. Splitting such insns would
2791 break the code that handles REG_NO_CONFLICT blocks. */
2792
2793 else if ((set = single_set (insn)) != NULL && set_noop_p (set))
2794 {
2795 /* Nops get in the way while scheduling, so delete them
2796 now if register allocation has already been done. It
2797 is too risky to try to do this before register
2798 allocation, and there are unlikely to be very many
2799 nops then anyways. */
2800 if (reload_completed)
2801 delete_insn_and_edges (insn);
2802 }
2803 else
2804 {
2805 /* Split insns here to get max fine-grain parallelism. */
2806 rtx first = PREV_INSN (insn);
2807 rtx last = try_split (PATTERN (insn), insn, 1);
2808
2809 if (last != insn)
2810 {
2811 /* try_split returns the NOTE that INSN became. */
2812 PUT_CODE (insn, NOTE);
2813 NOTE_SOURCE_FILE (insn) = 0;
2814 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2815
2816 /* ??? Coddle to md files that generate subregs in post-
2817 reload splitters instead of computing the proper
2818 hard register. */
2819 if (reload_completed && first != last)
2820 {
2821 first = NEXT_INSN (first);
2822 while (1)
2823 {
2824 if (INSN_P (first))
2825 cleanup_subreg_operands (first);
2826 if (first == last)
2827 break;
2828 first = NEXT_INSN (first);
2829 }
2830 }
2831 return last;
2832 }
2833 }
2834 return NULL_RTX;
2835 }
2836 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2837
2838 void
2839 split_all_insns (upd_life)
2840 int upd_life;
2841 {
2842 sbitmap blocks;
2843 bool changed;
2844 basic_block bb;
2845
2846 blocks = sbitmap_alloc (last_basic_block);
2847 sbitmap_zero (blocks);
2848 changed = false;
2849
2850 FOR_EACH_BB_REVERSE (bb)
2851 {
2852 rtx insn, next;
2853 bool finish = false;
2854
2855 for (insn = bb->head; !finish ; insn = next)
2856 {
2857 rtx last;
2858
2859 /* Can't use `next_real_insn' because that might go across
2860 CODE_LABELS and short-out basic blocks. */
2861 next = NEXT_INSN (insn);
2862 finish = (insn == bb->end);
2863 last = split_insn (insn);
2864 if (last)
2865 {
2866 /* The split sequence may include barrier, but the
2867 BB boundary we are interested in will be set to previous
2868 one. */
2869
2870 while (GET_CODE (last) == BARRIER)
2871 last = PREV_INSN (last);
2872 SET_BIT (blocks, bb->index);
2873 changed = true;
2874 insn = last;
2875 }
2876 }
2877 }
2878
2879 if (changed)
2880 {
2881 int old_last_basic_block = last_basic_block;
2882
2883 find_many_sub_basic_blocks (blocks);
2884
2885 if (old_last_basic_block != last_basic_block && upd_life)
2886 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2887 }
2888
2889 if (changed && upd_life)
2890 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2891 PROP_DEATH_NOTES | PROP_REG_INFO);
2892
2893 #ifdef ENABLE_CHECKING
2894 verify_flow_info ();
2895 #endif
2896
2897 sbitmap_free (blocks);
2898 }
2899
2900 /* Same as split_all_insns, but do not expect CFG to be available.
2901 Used by machine dependent reorg passes. */
2902
2903 void
2904 split_all_insns_noflow ()
2905 {
2906 rtx next, insn;
2907
2908 for (insn = get_insns (); insn; insn = next)
2909 {
2910 next = NEXT_INSN (insn);
2911 split_insn (insn);
2912 }
2913 return;
2914 }
2915 \f
2916 #ifdef HAVE_peephole2
2917 struct peep2_insn_data
2918 {
2919 rtx insn;
2920 regset live_before;
2921 };
2922
2923 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2924 static int peep2_current;
2925
2926 /* A non-insn marker indicating the last insn of the block.
2927 The live_before regset for this element is correct, indicating
2928 global_live_at_end for the block. */
2929 #define PEEP2_EOB pc_rtx
2930
2931 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2932 does not exist. Used by the recognizer to find the next insn to match
2933 in a multi-insn pattern. */
2934
2935 rtx
2936 peep2_next_insn (n)
2937 int n;
2938 {
2939 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2940 abort ();
2941
2942 n += peep2_current;
2943 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2944 n -= MAX_INSNS_PER_PEEP2 + 1;
2945
2946 if (peep2_insn_data[n].insn == PEEP2_EOB)
2947 return NULL_RTX;
2948 return peep2_insn_data[n].insn;
2949 }
2950
2951 /* Return true if REGNO is dead before the Nth non-note insn
2952 after `current'. */
2953
2954 int
2955 peep2_regno_dead_p (ofs, regno)
2956 int ofs;
2957 int regno;
2958 {
2959 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2960 abort ();
2961
2962 ofs += peep2_current;
2963 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2964 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2965
2966 if (peep2_insn_data[ofs].insn == NULL_RTX)
2967 abort ();
2968
2969 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2970 }
2971
2972 /* Similarly for a REG. */
2973
2974 int
2975 peep2_reg_dead_p (ofs, reg)
2976 int ofs;
2977 rtx reg;
2978 {
2979 int regno, n;
2980
2981 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2982 abort ();
2983
2984 ofs += peep2_current;
2985 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2986 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2987
2988 if (peep2_insn_data[ofs].insn == NULL_RTX)
2989 abort ();
2990
2991 regno = REGNO (reg);
2992 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2993 while (--n >= 0)
2994 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2995 return 0;
2996 return 1;
2997 }
2998
2999 /* Try to find a hard register of mode MODE, matching the register class in
3000 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3001 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3002 in which case the only condition is that the register must be available
3003 before CURRENT_INSN.
3004 Registers that already have bits set in REG_SET will not be considered.
3005
3006 If an appropriate register is available, it will be returned and the
3007 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3008 returned. */
3009
3010 rtx
3011 peep2_find_free_register (from, to, class_str, mode, reg_set)
3012 int from, to;
3013 const char *class_str;
3014 enum machine_mode mode;
3015 HARD_REG_SET *reg_set;
3016 {
3017 static int search_ofs;
3018 enum reg_class class;
3019 HARD_REG_SET live;
3020 int i;
3021
3022 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
3023 abort ();
3024
3025 from += peep2_current;
3026 if (from >= MAX_INSNS_PER_PEEP2 + 1)
3027 from -= MAX_INSNS_PER_PEEP2 + 1;
3028 to += peep2_current;
3029 if (to >= MAX_INSNS_PER_PEEP2 + 1)
3030 to -= MAX_INSNS_PER_PEEP2 + 1;
3031
3032 if (peep2_insn_data[from].insn == NULL_RTX)
3033 abort ();
3034 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3035
3036 while (from != to)
3037 {
3038 HARD_REG_SET this_live;
3039
3040 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
3041 from = 0;
3042 if (peep2_insn_data[from].insn == NULL_RTX)
3043 abort ();
3044 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3045 IOR_HARD_REG_SET (live, this_live);
3046 }
3047
3048 class = (class_str[0] == 'r' ? GENERAL_REGS
3049 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3050
3051 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3052 {
3053 int raw_regno, regno, success, j;
3054
3055 /* Distribute the free registers as much as possible. */
3056 raw_regno = search_ofs + i;
3057 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3058 raw_regno -= FIRST_PSEUDO_REGISTER;
3059 #ifdef REG_ALLOC_ORDER
3060 regno = reg_alloc_order[raw_regno];
3061 #else
3062 regno = raw_regno;
3063 #endif
3064
3065 /* Don't allocate fixed registers. */
3066 if (fixed_regs[regno])
3067 continue;
3068 /* Make sure the register is of the right class. */
3069 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
3070 continue;
3071 /* And can support the mode we need. */
3072 if (! HARD_REGNO_MODE_OK (regno, mode))
3073 continue;
3074 /* And that we don't create an extra save/restore. */
3075 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3076 continue;
3077 /* And we don't clobber traceback for noreturn functions. */
3078 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3079 && (! reload_completed || frame_pointer_needed))
3080 continue;
3081
3082 success = 1;
3083 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3084 {
3085 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3086 || TEST_HARD_REG_BIT (live, regno + j))
3087 {
3088 success = 0;
3089 break;
3090 }
3091 }
3092 if (success)
3093 {
3094 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3095 SET_HARD_REG_BIT (*reg_set, regno + j);
3096
3097 /* Start the next search with the next register. */
3098 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3099 raw_regno = 0;
3100 search_ofs = raw_regno;
3101
3102 return gen_rtx_REG (mode, regno);
3103 }
3104 }
3105
3106 search_ofs = 0;
3107 return NULL_RTX;
3108 }
3109
3110 /* Perform the peephole2 optimization pass. */
3111
3112 void
3113 peephole2_optimize (dump_file)
3114 FILE *dump_file ATTRIBUTE_UNUSED;
3115 {
3116 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3117 rtx insn, prev;
3118 regset live;
3119 int i;
3120 basic_block bb;
3121 #ifdef HAVE_conditional_execution
3122 sbitmap blocks;
3123 bool changed;
3124 #endif
3125 bool do_cleanup_cfg = false;
3126 bool do_rebuild_jump_labels = false;
3127
3128 /* Initialize the regsets we're going to use. */
3129 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3130 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3131 live = INITIALIZE_REG_SET (rs_heads[i]);
3132
3133 #ifdef HAVE_conditional_execution
3134 blocks = sbitmap_alloc (last_basic_block);
3135 sbitmap_zero (blocks);
3136 changed = false;
3137 #else
3138 count_or_remove_death_notes (NULL, 1);
3139 #endif
3140
3141 FOR_EACH_BB_REVERSE (bb)
3142 {
3143 struct propagate_block_info *pbi;
3144
3145 /* Indicate that all slots except the last holds invalid data. */
3146 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3147 peep2_insn_data[i].insn = NULL_RTX;
3148
3149 /* Indicate that the last slot contains live_after data. */
3150 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3151 peep2_current = MAX_INSNS_PER_PEEP2;
3152
3153 /* Start up propagation. */
3154 COPY_REG_SET (live, bb->global_live_at_end);
3155 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3156
3157 #ifdef HAVE_conditional_execution
3158 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3159 #else
3160 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3161 #endif
3162
3163 for (insn = bb->end; ; insn = prev)
3164 {
3165 prev = PREV_INSN (insn);
3166 if (INSN_P (insn))
3167 {
3168 rtx try, before_try, x;
3169 int match_len;
3170 rtx note;
3171 bool was_call = false;
3172
3173 /* Record this insn. */
3174 if (--peep2_current < 0)
3175 peep2_current = MAX_INSNS_PER_PEEP2;
3176 peep2_insn_data[peep2_current].insn = insn;
3177 propagate_one_insn (pbi, insn);
3178 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3179
3180 /* Match the peephole. */
3181 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3182 if (try != NULL)
3183 {
3184 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3185 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3186 cfg-related call notes. */
3187 for (i = 0; i <= match_len; ++i)
3188 {
3189 int j;
3190 rtx old_insn, new_insn, note;
3191
3192 j = i + peep2_current;
3193 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3194 j -= MAX_INSNS_PER_PEEP2 + 1;
3195 old_insn = peep2_insn_data[j].insn;
3196 if (GET_CODE (old_insn) != CALL_INSN)
3197 continue;
3198 was_call = true;
3199
3200 new_insn = try;
3201 while (new_insn != NULL_RTX)
3202 {
3203 if (GET_CODE (new_insn) == CALL_INSN)
3204 break;
3205 new_insn = NEXT_INSN (new_insn);
3206 }
3207
3208 if (new_insn == NULL_RTX)
3209 abort ();
3210
3211 CALL_INSN_FUNCTION_USAGE (new_insn)
3212 = CALL_INSN_FUNCTION_USAGE (old_insn);
3213
3214 for (note = REG_NOTES (old_insn);
3215 note;
3216 note = XEXP (note, 1))
3217 switch (REG_NOTE_KIND (note))
3218 {
3219 case REG_NORETURN:
3220 case REG_SETJMP:
3221 case REG_ALWAYS_RETURN:
3222 REG_NOTES (new_insn)
3223 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3224 XEXP (note, 0),
3225 REG_NOTES (new_insn));
3226 default:
3227 /* Discard all other reg notes. */
3228 break;
3229 }
3230
3231 /* Croak if there is another call in the sequence. */
3232 while (++i <= match_len)
3233 {
3234 j = i + peep2_current;
3235 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3236 j -= MAX_INSNS_PER_PEEP2 + 1;
3237 old_insn = peep2_insn_data[j].insn;
3238 if (GET_CODE (old_insn) == CALL_INSN)
3239 abort ();
3240 }
3241 break;
3242 }
3243
3244 i = match_len + peep2_current;
3245 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3246 i -= MAX_INSNS_PER_PEEP2 + 1;
3247
3248 note = find_reg_note (peep2_insn_data[i].insn,
3249 REG_EH_REGION, NULL_RTX);
3250
3251 /* Replace the old sequence with the new. */
3252 try = emit_insn_after_scope (try, peep2_insn_data[i].insn,
3253 INSN_SCOPE (peep2_insn_data[i].insn));
3254 before_try = PREV_INSN (insn);
3255 delete_insn_chain (insn, peep2_insn_data[i].insn);
3256
3257 /* Re-insert the EH_REGION notes. */
3258 if (note || (was_call && nonlocal_goto_handler_labels))
3259 {
3260 edge eh_edge;
3261
3262 for (eh_edge = bb->succ; eh_edge
3263 ; eh_edge = eh_edge->succ_next)
3264 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3265 break;
3266
3267 for (x = try ; x != before_try ; x = PREV_INSN (x))
3268 if (GET_CODE (x) == CALL_INSN
3269 || (flag_non_call_exceptions
3270 && may_trap_p (PATTERN (x))
3271 && !find_reg_note (x, REG_EH_REGION, NULL)))
3272 {
3273 if (note)
3274 REG_NOTES (x)
3275 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3276 XEXP (note, 0),
3277 REG_NOTES (x));
3278
3279 if (x != bb->end && eh_edge)
3280 {
3281 edge nfte, nehe;
3282 int flags;
3283
3284 nfte = split_block (bb, x);
3285 flags = (eh_edge->flags
3286 & (EDGE_EH | EDGE_ABNORMAL));
3287 if (GET_CODE (x) == CALL_INSN)
3288 flags |= EDGE_ABNORMAL_CALL;
3289 nehe = make_edge (nfte->src, eh_edge->dest,
3290 flags);
3291
3292 nehe->probability = eh_edge->probability;
3293 nfte->probability
3294 = REG_BR_PROB_BASE - nehe->probability;
3295
3296 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3297 #ifdef HAVE_conditional_execution
3298 SET_BIT (blocks, nfte->dest->index);
3299 changed = true;
3300 #endif
3301 bb = nfte->src;
3302 eh_edge = nehe;
3303 }
3304 }
3305
3306 /* Converting possibly trapping insn to non-trapping is
3307 possible. Zap dummy outgoing edges. */
3308 do_cleanup_cfg |= purge_dead_edges (bb);
3309 }
3310
3311 #ifdef HAVE_conditional_execution
3312 /* With conditional execution, we cannot back up the
3313 live information so easily, since the conditional
3314 death data structures are not so self-contained.
3315 So record that we've made a modification to this
3316 block and update life information at the end. */
3317 SET_BIT (blocks, bb->index);
3318 changed = true;
3319
3320 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3321 peep2_insn_data[i].insn = NULL_RTX;
3322 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3323 #else
3324 /* Back up lifetime information past the end of the
3325 newly created sequence. */
3326 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3327 i = 0;
3328 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3329
3330 /* Update life information for the new sequence. */
3331 x = try;
3332 do
3333 {
3334 if (INSN_P (x))
3335 {
3336 if (--i < 0)
3337 i = MAX_INSNS_PER_PEEP2;
3338 peep2_insn_data[i].insn = x;
3339 propagate_one_insn (pbi, x);
3340 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3341 }
3342 x = PREV_INSN (x);
3343 }
3344 while (x != prev);
3345
3346 /* ??? Should verify that LIVE now matches what we
3347 had before the new sequence. */
3348
3349 peep2_current = i;
3350 #endif
3351
3352 /* If we generated a jump instruction, it won't have
3353 JUMP_LABEL set. Recompute after we're done. */
3354 for (x = try; x != before_try; x = PREV_INSN (x))
3355 if (GET_CODE (x) == JUMP_INSN)
3356 {
3357 do_rebuild_jump_labels = true;
3358 break;
3359 }
3360 }
3361 }
3362
3363 if (insn == bb->head)
3364 break;
3365 }
3366
3367 free_propagate_block_info (pbi);
3368 }
3369
3370 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3371 FREE_REG_SET (peep2_insn_data[i].live_before);
3372 FREE_REG_SET (live);
3373
3374 if (do_rebuild_jump_labels)
3375 rebuild_jump_labels (get_insns ());
3376
3377 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3378 we've changed global life since exception handlers are no longer
3379 reachable. */
3380 if (do_cleanup_cfg)
3381 {
3382 cleanup_cfg (0);
3383 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3384 }
3385 #ifdef HAVE_conditional_execution
3386 else
3387 {
3388 count_or_remove_death_notes (blocks, 1);
3389 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3390 }
3391 sbitmap_free (blocks);
3392 #endif
3393 }
3394 #endif /* HAVE_peephole2 */
3395
3396 /* Common predicates for use with define_bypass. */
3397
3398 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3399 data not the address operand(s) of the store. IN_INSN must be
3400 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3401 SETs inside. */
3402
3403 int
3404 store_data_bypass_p (out_insn, in_insn)
3405 rtx out_insn, in_insn;
3406 {
3407 rtx out_set, in_set;
3408
3409 in_set = single_set (in_insn);
3410 if (! in_set)
3411 abort ();
3412
3413 if (GET_CODE (SET_DEST (in_set)) != MEM)
3414 return false;
3415
3416 out_set = single_set (out_insn);
3417 if (out_set)
3418 {
3419 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3420 return false;
3421 }
3422 else
3423 {
3424 rtx out_pat;
3425 int i;
3426
3427 out_pat = PATTERN (out_insn);
3428 if (GET_CODE (out_pat) != PARALLEL)
3429 abort ();
3430
3431 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3432 {
3433 rtx exp = XVECEXP (out_pat, 0, i);
3434
3435 if (GET_CODE (exp) == CLOBBER)
3436 continue;
3437
3438 if (GET_CODE (exp) != SET)
3439 abort ();
3440
3441 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3442 return false;
3443 }
3444 }
3445
3446 return true;
3447 }
3448
3449 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3450 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3451 or multiple set; IN_INSN should be single_set for truth, but for convenience
3452 of insn categorization may be any JUMP or CALL insn. */
3453
3454 int
3455 if_test_bypass_p (out_insn, in_insn)
3456 rtx out_insn, in_insn;
3457 {
3458 rtx out_set, in_set;
3459
3460 in_set = single_set (in_insn);
3461 if (! in_set)
3462 {
3463 if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
3464 return false;
3465 abort ();
3466 }
3467
3468 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3469 return false;
3470 in_set = SET_SRC (in_set);
3471
3472 out_set = single_set (out_insn);
3473 if (out_set)
3474 {
3475 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3476 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3477 return false;
3478 }
3479 else
3480 {
3481 rtx out_pat;
3482 int i;
3483
3484 out_pat = PATTERN (out_insn);
3485 if (GET_CODE (out_pat) != PARALLEL)
3486 abort ();
3487
3488 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3489 {
3490 rtx exp = XVECEXP (out_pat, 0, i);
3491
3492 if (GET_CODE (exp) == CLOBBER)
3493 continue;
3494
3495 if (GET_CODE (exp) != SET)
3496 abort ();
3497
3498 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3499 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3500 return false;
3501 }
3502 }
3503
3504 return true;
3505 }