recog.c (store_data_bypass_p): Handle CLOBBER inside PARALLEL.
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
30 #include "recog.h"
31 #include "regs.h"
32 #include "expr.h"
33 #include "function.h"
34 #include "flags.h"
35 #include "real.h"
36 #include "toplev.h"
37 #include "basic-block.h"
38 #include "output.h"
39 #include "reload.h"
40
41 #ifndef STACK_PUSH_CODE
42 #ifdef STACK_GROWS_DOWNWARD
43 #define STACK_PUSH_CODE PRE_DEC
44 #else
45 #define STACK_PUSH_CODE PRE_INC
46 #endif
47 #endif
48
49 #ifndef STACK_POP_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_POP_CODE POST_INC
52 #else
53 #define STACK_POP_CODE POST_DEC
54 #endif
55 #endif
56
57 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
58 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
59 static void validate_replace_src_1 PARAMS ((rtx *, void *));
60 static rtx split_insn PARAMS ((rtx));
61
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
67
68 init_recog and init_recog_no_volatile are responsible for setting this. */
69
70 int volatile_ok;
71
72 struct recog_data recog_data;
73
74 /* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
77
78 /* On return from `constrain_operands', indicate which alternative
79 was satisfied. */
80
81 int which_alternative;
82
83 /* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
86
87 int reload_completed;
88
89 /* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
92
93 void
94 init_recog_no_volatile ()
95 {
96 volatile_ok = 0;
97 }
98
99 void
100 init_recog ()
101 {
102 volatile_ok = 1;
103 }
104
105 /* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
109
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
113
114 int
115 recog_memoized_1 (insn)
116 rtx insn;
117 {
118 if (INSN_CODE (insn) < 0)
119 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
120 return INSN_CODE (insn);
121 }
122 \f
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
125
126 int
127 check_asm_operands (x)
128 rtx x;
129 {
130 int noperands;
131 rtx *operands;
132 const char **constraints;
133 int i;
134
135 /* Post-reload, be more strict with things. */
136 if (reload_completed)
137 {
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x));
140 constrain_operands (1);
141 return which_alternative >= 0;
142 }
143
144 noperands = asm_noperands (x);
145 if (noperands < 0)
146 return 0;
147 if (noperands == 0)
148 return 1;
149
150 operands = (rtx *) alloca (noperands * sizeof (rtx));
151 constraints = (const char **) alloca (noperands * sizeof (char *));
152
153 decode_asm_operands (x, operands, NULL, constraints, NULL);
154
155 for (i = 0; i < noperands; i++)
156 {
157 const char *c = constraints[i];
158 if (c[0] == '%')
159 c++;
160 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
161 c = constraints[c[0] - '0'];
162
163 if (! asm_operand_ok (operands[i], c))
164 return 0;
165 }
166
167 return 1;
168 }
169 \f
170 /* Static data for the next two routines. */
171
172 typedef struct change_t
173 {
174 rtx object;
175 int old_code;
176 rtx *loc;
177 rtx old;
178 } change_t;
179
180 static change_t *changes;
181 static int changes_allocated;
182
183 static int num_changes = 0;
184
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
188
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
192 the change in place.
193
194 IN_GROUP is non-zero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
197
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
202
203 int
204 validate_change (object, loc, new, in_group)
205 rtx object;
206 rtx *loc;
207 rtx new;
208 int in_group;
209 {
210 rtx old = *loc;
211
212 if (old == new || rtx_equal_p (old, new))
213 return 1;
214
215 if (in_group == 0 && num_changes != 0)
216 abort ();
217
218 *loc = new;
219
220 /* Save the information describing this change. */
221 if (num_changes >= changes_allocated)
222 {
223 if (changes_allocated == 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated = MAX_RECOG_OPERANDS * 5;
227 else
228 changes_allocated *= 2;
229
230 changes =
231 (change_t*) xrealloc (changes,
232 sizeof (change_t) * changes_allocated);
233 }
234
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
238
239 if (object && GET_CODE (object) != MEM)
240 {
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 case invalid. */
243 changes[num_changes].old_code = INSN_CODE (object);
244 INSN_CODE (object) = -1;
245 }
246
247 num_changes++;
248
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
251
252 if (in_group)
253 return 1;
254 else
255 return apply_change_group ();
256 }
257
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
260
261 int
262 insn_invalid_p (insn)
263 rtx insn;
264 {
265 rtx pat = PATTERN (insn);
266 int num_clobbers = 0;
267 /* If we are before reload and the pattern is a SET, see if we can add
268 clobbers. */
269 int icode = recog (pat, insn,
270 (GET_CODE (pat) == SET
271 && ! reload_completed && ! reload_in_progress)
272 ? &num_clobbers : 0);
273 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
274
275
276 /* If this is an asm and the operand aren't legal, then fail. Likewise if
277 this is not an asm and the insn wasn't recognized. */
278 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
279 || (!is_asm && icode < 0))
280 return 1;
281
282 /* If we have to add CLOBBERs, fail if we have to add ones that reference
283 hard registers since our callers can't know if they are live or not.
284 Otherwise, add them. */
285 if (num_clobbers > 0)
286 {
287 rtx newpat;
288
289 if (added_clobbers_hard_reg_p (icode))
290 return 1;
291
292 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
293 XVECEXP (newpat, 0, 0) = pat;
294 add_clobbers (newpat, icode);
295 PATTERN (insn) = pat = newpat;
296 }
297
298 /* After reload, verify that all constraints are satisfied. */
299 if (reload_completed)
300 {
301 extract_insn (insn);
302
303 if (! constrain_operands (1))
304 return 1;
305 }
306
307 INSN_CODE (insn) = icode;
308 return 0;
309 }
310
311 /* Apply a group of changes previously issued with `validate_change'.
312 Return 1 if all changes are valid, zero otherwise. */
313
314 int
315 apply_change_group ()
316 {
317 int i;
318 rtx last_validated = NULL_RTX;
319
320 /* The changes have been applied and all INSN_CODEs have been reset to force
321 rerecognition.
322
323 The changes are valid if we aren't given an object, or if we are
324 given a MEM and it still is a valid address, or if this is in insn
325 and it is recognized. In the latter case, if reload has completed,
326 we also require that the operands meet the constraints for
327 the insn. */
328
329 for (i = 0; i < num_changes; i++)
330 {
331 rtx object = changes[i].object;
332
333 /* if there is no object to test or if it is the same as the one we
334 already tested, ignore it. */
335 if (object == 0 || object == last_validated)
336 continue;
337
338 if (GET_CODE (object) == MEM)
339 {
340 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
341 break;
342 }
343 else if (insn_invalid_p (object))
344 {
345 rtx pat = PATTERN (object);
346
347 /* Perhaps we couldn't recognize the insn because there were
348 extra CLOBBERs at the end. If so, try to re-recognize
349 without the last CLOBBER (later iterations will cause each of
350 them to be eliminated, in turn). But don't do this if we
351 have an ASM_OPERAND. */
352 if (GET_CODE (pat) == PARALLEL
353 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
354 && asm_noperands (PATTERN (object)) < 0)
355 {
356 rtx newpat;
357
358 if (XVECLEN (pat, 0) == 2)
359 newpat = XVECEXP (pat, 0, 0);
360 else
361 {
362 int j;
363
364 newpat
365 = gen_rtx_PARALLEL (VOIDmode,
366 rtvec_alloc (XVECLEN (pat, 0) - 1));
367 for (j = 0; j < XVECLEN (newpat, 0); j++)
368 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
369 }
370
371 /* Add a new change to this group to replace the pattern
372 with this new pattern. Then consider this change
373 as having succeeded. The change we added will
374 cause the entire call to fail if things remain invalid.
375
376 Note that this can lose if a later change than the one
377 we are processing specified &XVECEXP (PATTERN (object), 0, X)
378 but this shouldn't occur. */
379
380 validate_change (object, &PATTERN (object), newpat, 1);
381 continue;
382 }
383 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
384 /* If this insn is a CLOBBER or USE, it is always valid, but is
385 never recognized. */
386 continue;
387 else
388 break;
389 }
390 last_validated = object;
391 }
392
393 if (i == num_changes)
394 {
395 basic_block bb;
396
397 for (i = 0; i < num_changes; i++)
398 if (changes[i].object
399 && INSN_P (changes[i].object)
400 && basic_block_for_insn
401 && ((unsigned int)INSN_UID (changes[i].object)
402 < basic_block_for_insn->num_elements)
403 && (bb = BLOCK_FOR_INSN (changes[i].object)))
404 bb->flags |= BB_DIRTY;
405
406 num_changes = 0;
407 return 1;
408 }
409 else
410 {
411 cancel_changes (0);
412 return 0;
413 }
414 }
415
416 /* Return the number of changes so far in the current group. */
417
418 int
419 num_validated_changes ()
420 {
421 return num_changes;
422 }
423
424 /* Retract the changes numbered NUM and up. */
425
426 void
427 cancel_changes (num)
428 int num;
429 {
430 int i;
431
432 /* Back out all the changes. Do this in the opposite order in which
433 they were made. */
434 for (i = num_changes - 1; i >= num; i--)
435 {
436 *changes[i].loc = changes[i].old;
437 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
438 INSN_CODE (changes[i].object) = changes[i].old_code;
439 }
440 num_changes = num;
441 }
442
443 /* Replace every occurrence of FROM in X with TO. Mark each change with
444 validate_change passing OBJECT. */
445
446 static void
447 validate_replace_rtx_1 (loc, from, to, object)
448 rtx *loc;
449 rtx from, to, object;
450 {
451 int i, j;
452 const char *fmt;
453 rtx x = *loc;
454 enum rtx_code code;
455 enum machine_mode op0_mode = VOIDmode;
456 int prev_changes = num_changes;
457 rtx new;
458
459 if (!x)
460 return;
461
462 code = GET_CODE (x);
463 fmt = GET_RTX_FORMAT (code);
464 if (fmt[0] == 'e')
465 op0_mode = GET_MODE (XEXP (x, 0));
466
467 /* X matches FROM if it is the same rtx or they are both referring to the
468 same register in the same mode. Avoid calling rtx_equal_p unless the
469 operands look similar. */
470
471 if (x == from
472 || (GET_CODE (x) == REG && GET_CODE (from) == REG
473 && GET_MODE (x) == GET_MODE (from)
474 && REGNO (x) == REGNO (from))
475 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
476 && rtx_equal_p (x, from)))
477 {
478 validate_change (object, loc, to, 1);
479 return;
480 }
481
482 /* Call ourself recursively to perform the replacements. */
483
484 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
485 {
486 if (fmt[i] == 'e')
487 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
488 else if (fmt[i] == 'E')
489 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
490 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
491 }
492
493 /* If we didn't substitute, there is nothing more to do. */
494 if (num_changes == prev_changes)
495 return;
496
497 /* Allow substituted expression to have different mode. This is used by
498 regmove to change mode of pseudo register. */
499 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
500 op0_mode = GET_MODE (XEXP (x, 0));
501
502 /* Do changes needed to keep rtx consistent. Don't do any other
503 simplifications, as it is not our job. */
504
505 if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
506 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
507 {
508 validate_change (object, loc,
509 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
510 : swap_condition (code),
511 GET_MODE (x), XEXP (x, 1),
512 XEXP (x, 0)), 1);
513 x = *loc;
514 code = GET_CODE (x);
515 }
516
517 switch (code)
518 {
519 case PLUS:
520 /* If we have a PLUS whose second operand is now a CONST_INT, use
521 plus_constant to try to simplify it.
522 ??? We may want later to remove this, once simplification is
523 separated from this function. */
524 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
525 validate_change (object, loc,
526 simplify_gen_binary
527 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
528 break;
529 case MINUS:
530 if (GET_CODE (XEXP (x, 1)) == CONST_INT
531 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
532 validate_change (object, loc,
533 simplify_gen_binary
534 (PLUS, GET_MODE (x), XEXP (x, 0),
535 simplify_gen_unary (NEG,
536 GET_MODE (x), XEXP (x, 1),
537 GET_MODE (x))), 1);
538 break;
539 case ZERO_EXTEND:
540 case SIGN_EXTEND:
541 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
542 {
543 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
544 op0_mode);
545 /* If any of the above failed, substitute in something that
546 we know won't be recognized. */
547 if (!new)
548 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
549 validate_change (object, loc, new, 1);
550 }
551 break;
552 case SUBREG:
553 /* All subregs possible to simplify should be simplified. */
554 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
555 SUBREG_BYTE (x));
556
557 /* Subregs of VOIDmode operands are incorrect. */
558 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
559 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
560 if (new)
561 validate_change (object, loc, new, 1);
562 break;
563 case ZERO_EXTRACT:
564 case SIGN_EXTRACT:
565 /* If we are replacing a register with memory, try to change the memory
566 to be the mode required for memory in extract operations (this isn't
567 likely to be an insertion operation; if it was, nothing bad will
568 happen, we might just fail in some cases). */
569
570 if (GET_CODE (XEXP (x, 0)) == MEM
571 && GET_CODE (XEXP (x, 1)) == CONST_INT
572 && GET_CODE (XEXP (x, 2)) == CONST_INT
573 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
574 && !MEM_VOLATILE_P (XEXP (x, 0)))
575 {
576 enum machine_mode wanted_mode = VOIDmode;
577 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
578 int pos = INTVAL (XEXP (x, 2));
579
580 if (GET_CODE (x) == ZERO_EXTRACT)
581 {
582 enum machine_mode new_mode
583 = mode_for_extraction (EP_extzv, 1);
584 if (new_mode != MAX_MACHINE_MODE)
585 wanted_mode = new_mode;
586 }
587 else if (GET_CODE (x) == SIGN_EXTRACT)
588 {
589 enum machine_mode new_mode
590 = mode_for_extraction (EP_extv, 1);
591 if (new_mode != MAX_MACHINE_MODE)
592 wanted_mode = new_mode;
593 }
594
595 /* If we have a narrower mode, we can do something. */
596 if (wanted_mode != VOIDmode
597 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
598 {
599 int offset = pos / BITS_PER_UNIT;
600 rtx newmem;
601
602 /* If the bytes and bits are counted differently, we
603 must adjust the offset. */
604 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
605 offset =
606 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
607 offset);
608
609 pos %= GET_MODE_BITSIZE (wanted_mode);
610
611 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
612
613 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
614 validate_change (object, &XEXP (x, 0), newmem, 1);
615 }
616 }
617
618 break;
619
620 default:
621 break;
622 }
623 }
624
625 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
626 with TO. After all changes have been made, validate by seeing
627 if INSN is still valid. */
628
629 int
630 validate_replace_rtx_subexp (from, to, insn, loc)
631 rtx from, to, insn, *loc;
632 {
633 validate_replace_rtx_1 (loc, from, to, insn);
634 return apply_change_group ();
635 }
636
637 /* Try replacing every occurrence of FROM in INSN with TO. After all
638 changes have been made, validate by seeing if INSN is still valid. */
639
640 int
641 validate_replace_rtx (from, to, insn)
642 rtx from, to, insn;
643 {
644 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
645 return apply_change_group ();
646 }
647
648 /* Try replacing every occurrence of FROM in INSN with TO. */
649
650 void
651 validate_replace_rtx_group (from, to, insn)
652 rtx from, to, insn;
653 {
654 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
655 }
656
657 /* Function called by note_uses to replace used subexpressions. */
658 struct validate_replace_src_data
659 {
660 rtx from; /* Old RTX */
661 rtx to; /* New RTX */
662 rtx insn; /* Insn in which substitution is occurring. */
663 };
664
665 static void
666 validate_replace_src_1 (x, data)
667 rtx *x;
668 void *data;
669 {
670 struct validate_replace_src_data *d
671 = (struct validate_replace_src_data *) data;
672
673 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
674 }
675
676 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
677 SET_DESTs. After all changes have been made, validate by seeing if
678 INSN is still valid. */
679
680 int
681 validate_replace_src (from, to, insn)
682 rtx from, to, insn;
683 {
684 struct validate_replace_src_data d;
685
686 d.from = from;
687 d.to = to;
688 d.insn = insn;
689 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
690 return apply_change_group ();
691 }
692 \f
693 #ifdef HAVE_cc0
694 /* Return 1 if the insn using CC0 set by INSN does not contain
695 any ordered tests applied to the condition codes.
696 EQ and NE tests do not count. */
697
698 int
699 next_insn_tests_no_inequality (insn)
700 rtx insn;
701 {
702 rtx next = next_cc0_user (insn);
703
704 /* If there is no next insn, we have to take the conservative choice. */
705 if (next == 0)
706 return 0;
707
708 return ((GET_CODE (next) == JUMP_INSN
709 || GET_CODE (next) == INSN
710 || GET_CODE (next) == CALL_INSN)
711 && ! inequality_comparisons_p (PATTERN (next)));
712 }
713
714 #if 0 /* This is useless since the insn that sets the cc's
715 must be followed immediately by the use of them. */
716 /* Return 1 if the CC value set up by INSN is not used. */
717
718 int
719 next_insns_test_no_inequality (insn)
720 rtx insn;
721 {
722 rtx next = NEXT_INSN (insn);
723
724 for (; next != 0; next = NEXT_INSN (next))
725 {
726 if (GET_CODE (next) == CODE_LABEL
727 || GET_CODE (next) == BARRIER)
728 return 1;
729 if (GET_CODE (next) == NOTE)
730 continue;
731 if (inequality_comparisons_p (PATTERN (next)))
732 return 0;
733 if (sets_cc0_p (PATTERN (next)) == 1)
734 return 1;
735 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
736 return 1;
737 }
738 return 1;
739 }
740 #endif
741 #endif
742 \f
743 /* This is used by find_single_use to locate an rtx that contains exactly one
744 use of DEST, which is typically either a REG or CC0. It returns a
745 pointer to the innermost rtx expression containing DEST. Appearances of
746 DEST that are being used to totally replace it are not counted. */
747
748 static rtx *
749 find_single_use_1 (dest, loc)
750 rtx dest;
751 rtx *loc;
752 {
753 rtx x = *loc;
754 enum rtx_code code = GET_CODE (x);
755 rtx *result = 0;
756 rtx *this_result;
757 int i;
758 const char *fmt;
759
760 switch (code)
761 {
762 case CONST_INT:
763 case CONST:
764 case LABEL_REF:
765 case SYMBOL_REF:
766 case CONST_DOUBLE:
767 case CONST_VECTOR:
768 case CLOBBER:
769 return 0;
770
771 case SET:
772 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
773 of a REG that occupies all of the REG, the insn uses DEST if
774 it is mentioned in the destination or the source. Otherwise, we
775 need just check the source. */
776 if (GET_CODE (SET_DEST (x)) != CC0
777 && GET_CODE (SET_DEST (x)) != PC
778 && GET_CODE (SET_DEST (x)) != REG
779 && ! (GET_CODE (SET_DEST (x)) == SUBREG
780 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
781 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
782 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
783 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
784 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
785 break;
786
787 return find_single_use_1 (dest, &SET_SRC (x));
788
789 case MEM:
790 case SUBREG:
791 return find_single_use_1 (dest, &XEXP (x, 0));
792
793 default:
794 break;
795 }
796
797 /* If it wasn't one of the common cases above, check each expression and
798 vector of this code. Look for a unique usage of DEST. */
799
800 fmt = GET_RTX_FORMAT (code);
801 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
802 {
803 if (fmt[i] == 'e')
804 {
805 if (dest == XEXP (x, i)
806 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
807 && REGNO (dest) == REGNO (XEXP (x, i))))
808 this_result = loc;
809 else
810 this_result = find_single_use_1 (dest, &XEXP (x, i));
811
812 if (result == 0)
813 result = this_result;
814 else if (this_result)
815 /* Duplicate usage. */
816 return 0;
817 }
818 else if (fmt[i] == 'E')
819 {
820 int j;
821
822 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
823 {
824 if (XVECEXP (x, i, j) == dest
825 || (GET_CODE (dest) == REG
826 && GET_CODE (XVECEXP (x, i, j)) == REG
827 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
828 this_result = loc;
829 else
830 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
831
832 if (result == 0)
833 result = this_result;
834 else if (this_result)
835 return 0;
836 }
837 }
838 }
839
840 return result;
841 }
842 \f
843 /* See if DEST, produced in INSN, is used only a single time in the
844 sequel. If so, return a pointer to the innermost rtx expression in which
845 it is used.
846
847 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
848
849 This routine will return usually zero either before flow is called (because
850 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
851 note can't be trusted).
852
853 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
854 care about REG_DEAD notes or LOG_LINKS.
855
856 Otherwise, we find the single use by finding an insn that has a
857 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
858 only referenced once in that insn, we know that it must be the first
859 and last insn referencing DEST. */
860
861 rtx *
862 find_single_use (dest, insn, ploc)
863 rtx dest;
864 rtx insn;
865 rtx *ploc;
866 {
867 rtx next;
868 rtx *result;
869 rtx link;
870
871 #ifdef HAVE_cc0
872 if (dest == cc0_rtx)
873 {
874 next = NEXT_INSN (insn);
875 if (next == 0
876 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
877 return 0;
878
879 result = find_single_use_1 (dest, &PATTERN (next));
880 if (result && ploc)
881 *ploc = next;
882 return result;
883 }
884 #endif
885
886 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
887 return 0;
888
889 for (next = next_nonnote_insn (insn);
890 next != 0 && GET_CODE (next) != CODE_LABEL;
891 next = next_nonnote_insn (next))
892 if (INSN_P (next) && dead_or_set_p (next, dest))
893 {
894 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
895 if (XEXP (link, 0) == insn)
896 break;
897
898 if (link)
899 {
900 result = find_single_use_1 (dest, &PATTERN (next));
901 if (ploc)
902 *ploc = next;
903 return result;
904 }
905 }
906
907 return 0;
908 }
909 \f
910 /* Return 1 if OP is a valid general operand for machine mode MODE.
911 This is either a register reference, a memory reference,
912 or a constant. In the case of a memory reference, the address
913 is checked for general validity for the target machine.
914
915 Register and memory references must have mode MODE in order to be valid,
916 but some constants have no machine mode and are valid for any mode.
917
918 If MODE is VOIDmode, OP is checked for validity for whatever mode
919 it has.
920
921 The main use of this function is as a predicate in match_operand
922 expressions in the machine description.
923
924 For an explanation of this function's behavior for registers of
925 class NO_REGS, see the comment for `register_operand'. */
926
927 int
928 general_operand (op, mode)
929 rtx op;
930 enum machine_mode mode;
931 {
932 enum rtx_code code = GET_CODE (op);
933
934 if (mode == VOIDmode)
935 mode = GET_MODE (op);
936
937 /* Don't accept CONST_INT or anything similar
938 if the caller wants something floating. */
939 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
940 && GET_MODE_CLASS (mode) != MODE_INT
941 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
942 return 0;
943
944 if (GET_CODE (op) == CONST_INT
945 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
946 return 0;
947
948 if (CONSTANT_P (op))
949 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
950 || mode == VOIDmode)
951 #ifdef LEGITIMATE_PIC_OPERAND_P
952 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
953 #endif
954 && LEGITIMATE_CONSTANT_P (op));
955
956 /* Except for certain constants with VOIDmode, already checked for,
957 OP's mode must match MODE if MODE specifies a mode. */
958
959 if (GET_MODE (op) != mode)
960 return 0;
961
962 if (code == SUBREG)
963 {
964 #ifdef INSN_SCHEDULING
965 /* On machines that have insn scheduling, we want all memory
966 reference to be explicit, so outlaw paradoxical SUBREGs. */
967 if (GET_CODE (SUBREG_REG (op)) == MEM
968 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
969 return 0;
970 #endif
971 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
972 may result in incorrect reference. We should simplify all valid
973 subregs of MEM anyway. But allow this after reload because we
974 might be called from cleanup_subreg_operands.
975
976 ??? This is a kludge. */
977 if (!reload_completed && SUBREG_BYTE (op) != 0
978 && GET_CODE (SUBREG_REG (op)) == MEM)
979 return 0;
980
981 op = SUBREG_REG (op);
982 code = GET_CODE (op);
983 }
984
985 if (code == REG)
986 /* A register whose class is NO_REGS is not a general operand. */
987 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
988 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
989
990 if (code == MEM)
991 {
992 rtx y = XEXP (op, 0);
993
994 if (! volatile_ok && MEM_VOLATILE_P (op))
995 return 0;
996
997 if (GET_CODE (y) == ADDRESSOF)
998 return 1;
999
1000 /* Use the mem's mode, since it will be reloaded thus. */
1001 mode = GET_MODE (op);
1002 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1003 }
1004
1005 /* Pretend this is an operand for now; we'll run force_operand
1006 on its replacement in fixup_var_refs_1. */
1007 if (code == ADDRESSOF)
1008 return 1;
1009
1010 return 0;
1011
1012 win:
1013 return 1;
1014 }
1015 \f
1016 /* Return 1 if OP is a valid memory address for a memory reference
1017 of mode MODE.
1018
1019 The main use of this function is as a predicate in match_operand
1020 expressions in the machine description. */
1021
1022 int
1023 address_operand (op, mode)
1024 rtx op;
1025 enum machine_mode mode;
1026 {
1027 return memory_address_p (mode, op);
1028 }
1029
1030 /* Return 1 if OP is a register reference of mode MODE.
1031 If MODE is VOIDmode, accept a register in any mode.
1032
1033 The main use of this function is as a predicate in match_operand
1034 expressions in the machine description.
1035
1036 As a special exception, registers whose class is NO_REGS are
1037 not accepted by `register_operand'. The reason for this change
1038 is to allow the representation of special architecture artifacts
1039 (such as a condition code register) without extending the rtl
1040 definitions. Since registers of class NO_REGS cannot be used
1041 as registers in any case where register classes are examined,
1042 it is most consistent to keep this function from accepting them. */
1043
1044 int
1045 register_operand (op, mode)
1046 rtx op;
1047 enum machine_mode mode;
1048 {
1049 if (GET_MODE (op) != mode && mode != VOIDmode)
1050 return 0;
1051
1052 if (GET_CODE (op) == SUBREG)
1053 {
1054 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1055 because it is guaranteed to be reloaded into one.
1056 Just make sure the MEM is valid in itself.
1057 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1058 but currently it does result from (SUBREG (REG)...) where the
1059 reg went on the stack.) */
1060 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1061 return general_operand (op, mode);
1062
1063 #ifdef CLASS_CANNOT_CHANGE_MODE
1064 if (GET_CODE (SUBREG_REG (op)) == REG
1065 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1066 && (TEST_HARD_REG_BIT
1067 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1068 REGNO (SUBREG_REG (op))))
1069 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1070 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1071 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1072 return 0;
1073 #endif
1074
1075 op = SUBREG_REG (op);
1076 }
1077
1078 /* If we have an ADDRESSOF, consider it valid since it will be
1079 converted into something that will not be a MEM. */
1080 if (GET_CODE (op) == ADDRESSOF)
1081 return 1;
1082
1083 /* We don't consider registers whose class is NO_REGS
1084 to be a register operand. */
1085 return (GET_CODE (op) == REG
1086 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1087 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1088 }
1089
1090 /* Return 1 for a register in Pmode; ignore the tested mode. */
1091
1092 int
1093 pmode_register_operand (op, mode)
1094 rtx op;
1095 enum machine_mode mode ATTRIBUTE_UNUSED;
1096 {
1097 return register_operand (op, Pmode);
1098 }
1099
1100 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1101 or a hard register. */
1102
1103 int
1104 scratch_operand (op, mode)
1105 rtx op;
1106 enum machine_mode mode;
1107 {
1108 if (GET_MODE (op) != mode && mode != VOIDmode)
1109 return 0;
1110
1111 return (GET_CODE (op) == SCRATCH
1112 || (GET_CODE (op) == REG
1113 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1114 }
1115
1116 /* Return 1 if OP is a valid immediate operand for mode MODE.
1117
1118 The main use of this function is as a predicate in match_operand
1119 expressions in the machine description. */
1120
1121 int
1122 immediate_operand (op, mode)
1123 rtx op;
1124 enum machine_mode mode;
1125 {
1126 /* Don't accept CONST_INT or anything similar
1127 if the caller wants something floating. */
1128 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1129 && GET_MODE_CLASS (mode) != MODE_INT
1130 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1131 return 0;
1132
1133 if (GET_CODE (op) == CONST_INT
1134 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1135 return 0;
1136
1137 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1138 result in 0/1. It seems a safe assumption that this is
1139 in range for everyone. */
1140 if (GET_CODE (op) == CONSTANT_P_RTX)
1141 return 1;
1142
1143 return (CONSTANT_P (op)
1144 && (GET_MODE (op) == mode || mode == VOIDmode
1145 || GET_MODE (op) == VOIDmode)
1146 #ifdef LEGITIMATE_PIC_OPERAND_P
1147 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1148 #endif
1149 && LEGITIMATE_CONSTANT_P (op));
1150 }
1151
1152 /* Returns 1 if OP is an operand that is a CONST_INT. */
1153
1154 int
1155 const_int_operand (op, mode)
1156 rtx op;
1157 enum machine_mode mode;
1158 {
1159 if (GET_CODE (op) != CONST_INT)
1160 return 0;
1161
1162 if (mode != VOIDmode
1163 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1164 return 0;
1165
1166 return 1;
1167 }
1168
1169 /* Returns 1 if OP is an operand that is a constant integer or constant
1170 floating-point number. */
1171
1172 int
1173 const_double_operand (op, mode)
1174 rtx op;
1175 enum machine_mode mode;
1176 {
1177 /* Don't accept CONST_INT or anything similar
1178 if the caller wants something floating. */
1179 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1180 && GET_MODE_CLASS (mode) != MODE_INT
1181 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1182 return 0;
1183
1184 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1185 && (mode == VOIDmode || GET_MODE (op) == mode
1186 || GET_MODE (op) == VOIDmode));
1187 }
1188
1189 /* Return 1 if OP is a general operand that is not an immediate operand. */
1190
1191 int
1192 nonimmediate_operand (op, mode)
1193 rtx op;
1194 enum machine_mode mode;
1195 {
1196 return (general_operand (op, mode) && ! CONSTANT_P (op));
1197 }
1198
1199 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1200
1201 int
1202 nonmemory_operand (op, mode)
1203 rtx op;
1204 enum machine_mode mode;
1205 {
1206 if (CONSTANT_P (op))
1207 {
1208 /* Don't accept CONST_INT or anything similar
1209 if the caller wants something floating. */
1210 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1211 && GET_MODE_CLASS (mode) != MODE_INT
1212 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1213 return 0;
1214
1215 if (GET_CODE (op) == CONST_INT
1216 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1217 return 0;
1218
1219 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1220 || mode == VOIDmode)
1221 #ifdef LEGITIMATE_PIC_OPERAND_P
1222 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1223 #endif
1224 && LEGITIMATE_CONSTANT_P (op));
1225 }
1226
1227 if (GET_MODE (op) != mode && mode != VOIDmode)
1228 return 0;
1229
1230 if (GET_CODE (op) == SUBREG)
1231 {
1232 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1233 because it is guaranteed to be reloaded into one.
1234 Just make sure the MEM is valid in itself.
1235 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1236 but currently it does result from (SUBREG (REG)...) where the
1237 reg went on the stack.) */
1238 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1239 return general_operand (op, mode);
1240 op = SUBREG_REG (op);
1241 }
1242
1243 /* We don't consider registers whose class is NO_REGS
1244 to be a register operand. */
1245 return (GET_CODE (op) == REG
1246 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1247 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1248 }
1249
1250 /* Return 1 if OP is a valid operand that stands for pushing a
1251 value of mode MODE onto the stack.
1252
1253 The main use of this function is as a predicate in match_operand
1254 expressions in the machine description. */
1255
1256 int
1257 push_operand (op, mode)
1258 rtx op;
1259 enum machine_mode mode;
1260 {
1261 unsigned int rounded_size = GET_MODE_SIZE (mode);
1262
1263 #ifdef PUSH_ROUNDING
1264 rounded_size = PUSH_ROUNDING (rounded_size);
1265 #endif
1266
1267 if (GET_CODE (op) != MEM)
1268 return 0;
1269
1270 if (mode != VOIDmode && GET_MODE (op) != mode)
1271 return 0;
1272
1273 op = XEXP (op, 0);
1274
1275 if (rounded_size == GET_MODE_SIZE (mode))
1276 {
1277 if (GET_CODE (op) != STACK_PUSH_CODE)
1278 return 0;
1279 }
1280 else
1281 {
1282 if (GET_CODE (op) != PRE_MODIFY
1283 || GET_CODE (XEXP (op, 1)) != PLUS
1284 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1285 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1286 #ifdef STACK_GROWS_DOWNWARD
1287 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1288 #else
1289 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1290 #endif
1291 )
1292 return 0;
1293 }
1294
1295 return XEXP (op, 0) == stack_pointer_rtx;
1296 }
1297
1298 /* Return 1 if OP is a valid operand that stands for popping a
1299 value of mode MODE off the stack.
1300
1301 The main use of this function is as a predicate in match_operand
1302 expressions in the machine description. */
1303
1304 int
1305 pop_operand (op, mode)
1306 rtx op;
1307 enum machine_mode mode;
1308 {
1309 if (GET_CODE (op) != MEM)
1310 return 0;
1311
1312 if (mode != VOIDmode && GET_MODE (op) != mode)
1313 return 0;
1314
1315 op = XEXP (op, 0);
1316
1317 if (GET_CODE (op) != STACK_POP_CODE)
1318 return 0;
1319
1320 return XEXP (op, 0) == stack_pointer_rtx;
1321 }
1322
1323 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1324
1325 int
1326 memory_address_p (mode, addr)
1327 enum machine_mode mode ATTRIBUTE_UNUSED;
1328 rtx addr;
1329 {
1330 if (GET_CODE (addr) == ADDRESSOF)
1331 return 1;
1332
1333 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1334 return 0;
1335
1336 win:
1337 return 1;
1338 }
1339
1340 /* Return 1 if OP is a valid memory reference with mode MODE,
1341 including a valid address.
1342
1343 The main use of this function is as a predicate in match_operand
1344 expressions in the machine description. */
1345
1346 int
1347 memory_operand (op, mode)
1348 rtx op;
1349 enum machine_mode mode;
1350 {
1351 rtx inner;
1352
1353 if (! reload_completed)
1354 /* Note that no SUBREG is a memory operand before end of reload pass,
1355 because (SUBREG (MEM...)) forces reloading into a register. */
1356 return GET_CODE (op) == MEM && general_operand (op, mode);
1357
1358 if (mode != VOIDmode && GET_MODE (op) != mode)
1359 return 0;
1360
1361 inner = op;
1362 if (GET_CODE (inner) == SUBREG)
1363 inner = SUBREG_REG (inner);
1364
1365 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1366 }
1367
1368 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1369 that is, a memory reference whose address is a general_operand. */
1370
1371 int
1372 indirect_operand (op, mode)
1373 rtx op;
1374 enum machine_mode mode;
1375 {
1376 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1377 if (! reload_completed
1378 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1379 {
1380 int offset = SUBREG_BYTE (op);
1381 rtx inner = SUBREG_REG (op);
1382
1383 if (mode != VOIDmode && GET_MODE (op) != mode)
1384 return 0;
1385
1386 /* The only way that we can have a general_operand as the resulting
1387 address is if OFFSET is zero and the address already is an operand
1388 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1389 operand. */
1390
1391 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1392 || (GET_CODE (XEXP (inner, 0)) == PLUS
1393 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1394 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1395 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1396 }
1397
1398 return (GET_CODE (op) == MEM
1399 && memory_operand (op, mode)
1400 && general_operand (XEXP (op, 0), Pmode));
1401 }
1402
1403 /* Return 1 if this is a comparison operator. This allows the use of
1404 MATCH_OPERATOR to recognize all the branch insns. */
1405
1406 int
1407 comparison_operator (op, mode)
1408 rtx op;
1409 enum machine_mode mode;
1410 {
1411 return ((mode == VOIDmode || GET_MODE (op) == mode)
1412 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1413 }
1414 \f
1415 /* If BODY is an insn body that uses ASM_OPERANDS,
1416 return the number of operands (both input and output) in the insn.
1417 Otherwise return -1. */
1418
1419 int
1420 asm_noperands (body)
1421 rtx body;
1422 {
1423 switch (GET_CODE (body))
1424 {
1425 case ASM_OPERANDS:
1426 /* No output operands: return number of input operands. */
1427 return ASM_OPERANDS_INPUT_LENGTH (body);
1428 case SET:
1429 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1430 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1431 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1432 else
1433 return -1;
1434 case PARALLEL:
1435 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1436 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1437 {
1438 /* Multiple output operands, or 1 output plus some clobbers:
1439 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1440 int i;
1441 int n_sets;
1442
1443 /* Count backwards through CLOBBERs to determine number of SETs. */
1444 for (i = XVECLEN (body, 0); i > 0; i--)
1445 {
1446 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1447 break;
1448 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1449 return -1;
1450 }
1451
1452 /* N_SETS is now number of output operands. */
1453 n_sets = i;
1454
1455 /* Verify that all the SETs we have
1456 came from a single original asm_operands insn
1457 (so that invalid combinations are blocked). */
1458 for (i = 0; i < n_sets; i++)
1459 {
1460 rtx elt = XVECEXP (body, 0, i);
1461 if (GET_CODE (elt) != SET)
1462 return -1;
1463 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1464 return -1;
1465 /* If these ASM_OPERANDS rtx's came from different original insns
1466 then they aren't allowed together. */
1467 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1468 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1469 return -1;
1470 }
1471 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1472 + n_sets);
1473 }
1474 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1475 {
1476 /* 0 outputs, but some clobbers:
1477 body is [(asm_operands ...) (clobber (reg ...))...]. */
1478 int i;
1479
1480 /* Make sure all the other parallel things really are clobbers. */
1481 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1482 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1483 return -1;
1484
1485 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1486 }
1487 else
1488 return -1;
1489 default:
1490 return -1;
1491 }
1492 }
1493
1494 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1495 copy its operands (both input and output) into the vector OPERANDS,
1496 the locations of the operands within the insn into the vector OPERAND_LOCS,
1497 and the constraints for the operands into CONSTRAINTS.
1498 Write the modes of the operands into MODES.
1499 Return the assembler-template.
1500
1501 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1502 we don't store that info. */
1503
1504 const char *
1505 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1506 rtx body;
1507 rtx *operands;
1508 rtx **operand_locs;
1509 const char **constraints;
1510 enum machine_mode *modes;
1511 {
1512 int i;
1513 int noperands;
1514 const char *template = 0;
1515
1516 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1517 {
1518 rtx asmop = SET_SRC (body);
1519 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1520
1521 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1522
1523 for (i = 1; i < noperands; i++)
1524 {
1525 if (operand_locs)
1526 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1527 if (operands)
1528 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1529 if (constraints)
1530 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1531 if (modes)
1532 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1533 }
1534
1535 /* The output is in the SET.
1536 Its constraint is in the ASM_OPERANDS itself. */
1537 if (operands)
1538 operands[0] = SET_DEST (body);
1539 if (operand_locs)
1540 operand_locs[0] = &SET_DEST (body);
1541 if (constraints)
1542 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1543 if (modes)
1544 modes[0] = GET_MODE (SET_DEST (body));
1545 template = ASM_OPERANDS_TEMPLATE (asmop);
1546 }
1547 else if (GET_CODE (body) == ASM_OPERANDS)
1548 {
1549 rtx asmop = body;
1550 /* No output operands: BODY is (asm_operands ....). */
1551
1552 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1553
1554 /* The input operands are found in the 1st element vector. */
1555 /* Constraints for inputs are in the 2nd element vector. */
1556 for (i = 0; i < noperands; i++)
1557 {
1558 if (operand_locs)
1559 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1560 if (operands)
1561 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1562 if (constraints)
1563 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1564 if (modes)
1565 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1566 }
1567 template = ASM_OPERANDS_TEMPLATE (asmop);
1568 }
1569 else if (GET_CODE (body) == PARALLEL
1570 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1571 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1572 {
1573 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1574 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1575 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1576 int nout = 0; /* Does not include CLOBBERs. */
1577
1578 /* At least one output, plus some CLOBBERs. */
1579
1580 /* The outputs are in the SETs.
1581 Their constraints are in the ASM_OPERANDS itself. */
1582 for (i = 0; i < nparallel; i++)
1583 {
1584 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1585 break; /* Past last SET */
1586
1587 if (operands)
1588 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1589 if (operand_locs)
1590 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1591 if (constraints)
1592 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1593 if (modes)
1594 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1595 nout++;
1596 }
1597
1598 for (i = 0; i < nin; i++)
1599 {
1600 if (operand_locs)
1601 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1602 if (operands)
1603 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1604 if (constraints)
1605 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1606 if (modes)
1607 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1608 }
1609
1610 template = ASM_OPERANDS_TEMPLATE (asmop);
1611 }
1612 else if (GET_CODE (body) == PARALLEL
1613 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1614 {
1615 /* No outputs, but some CLOBBERs. */
1616
1617 rtx asmop = XVECEXP (body, 0, 0);
1618 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1619
1620 for (i = 0; i < nin; i++)
1621 {
1622 if (operand_locs)
1623 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1624 if (operands)
1625 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1626 if (constraints)
1627 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1628 if (modes)
1629 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1630 }
1631
1632 template = ASM_OPERANDS_TEMPLATE (asmop);
1633 }
1634
1635 return template;
1636 }
1637
1638 /* Check if an asm_operand matches it's constraints.
1639 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1640
1641 int
1642 asm_operand_ok (op, constraint)
1643 rtx op;
1644 const char *constraint;
1645 {
1646 int result = 0;
1647
1648 /* Use constrain_operands after reload. */
1649 if (reload_completed)
1650 abort ();
1651
1652 while (*constraint)
1653 {
1654 char c = *constraint++;
1655 switch (c)
1656 {
1657 case '=':
1658 case '+':
1659 case '*':
1660 case '%':
1661 case '?':
1662 case '!':
1663 case '#':
1664 case '&':
1665 case ',':
1666 break;
1667
1668 case '0': case '1': case '2': case '3': case '4':
1669 case '5': case '6': case '7': case '8': case '9':
1670 /* For best results, our caller should have given us the
1671 proper matching constraint, but we can't actually fail
1672 the check if they didn't. Indicate that results are
1673 inconclusive. */
1674 while (ISDIGIT (*constraint))
1675 constraint++;
1676 result = -1;
1677 break;
1678
1679 case 'p':
1680 if (address_operand (op, VOIDmode))
1681 return 1;
1682 break;
1683
1684 case 'm':
1685 case 'V': /* non-offsettable */
1686 if (memory_operand (op, VOIDmode))
1687 return 1;
1688 break;
1689
1690 case 'o': /* offsettable */
1691 if (offsettable_nonstrict_memref_p (op))
1692 return 1;
1693 break;
1694
1695 case '<':
1696 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1697 excepting those that expand_call created. Further, on some
1698 machines which do not have generalized auto inc/dec, an inc/dec
1699 is not a memory_operand.
1700
1701 Match any memory and hope things are resolved after reload. */
1702
1703 if (GET_CODE (op) == MEM
1704 && (1
1705 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1706 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1707 return 1;
1708 break;
1709
1710 case '>':
1711 if (GET_CODE (op) == MEM
1712 && (1
1713 || GET_CODE (XEXP (op, 0)) == PRE_INC
1714 || GET_CODE (XEXP (op, 0)) == POST_INC))
1715 return 1;
1716 break;
1717
1718 case 'E':
1719 case 'F':
1720 if (GET_CODE (op) == CONST_DOUBLE)
1721 return 1;
1722 break;
1723
1724 case 'G':
1725 if (GET_CODE (op) == CONST_DOUBLE
1726 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1727 return 1;
1728 break;
1729 case 'H':
1730 if (GET_CODE (op) == CONST_DOUBLE
1731 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1732 return 1;
1733 break;
1734
1735 case 's':
1736 if (GET_CODE (op) == CONST_INT
1737 || (GET_CODE (op) == CONST_DOUBLE
1738 && GET_MODE (op) == VOIDmode))
1739 break;
1740 /* FALLTHRU */
1741
1742 case 'i':
1743 if (CONSTANT_P (op)
1744 #ifdef LEGITIMATE_PIC_OPERAND_P
1745 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1746 #endif
1747 )
1748 return 1;
1749 break;
1750
1751 case 'n':
1752 if (GET_CODE (op) == CONST_INT
1753 || (GET_CODE (op) == CONST_DOUBLE
1754 && GET_MODE (op) == VOIDmode))
1755 return 1;
1756 break;
1757
1758 case 'I':
1759 if (GET_CODE (op) == CONST_INT
1760 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1761 return 1;
1762 break;
1763 case 'J':
1764 if (GET_CODE (op) == CONST_INT
1765 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1766 return 1;
1767 break;
1768 case 'K':
1769 if (GET_CODE (op) == CONST_INT
1770 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1771 return 1;
1772 break;
1773 case 'L':
1774 if (GET_CODE (op) == CONST_INT
1775 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1776 return 1;
1777 break;
1778 case 'M':
1779 if (GET_CODE (op) == CONST_INT
1780 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1781 return 1;
1782 break;
1783 case 'N':
1784 if (GET_CODE (op) == CONST_INT
1785 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1786 return 1;
1787 break;
1788 case 'O':
1789 if (GET_CODE (op) == CONST_INT
1790 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1791 return 1;
1792 break;
1793 case 'P':
1794 if (GET_CODE (op) == CONST_INT
1795 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1796 return 1;
1797 break;
1798
1799 case 'X':
1800 return 1;
1801
1802 case 'g':
1803 if (general_operand (op, VOIDmode))
1804 return 1;
1805 break;
1806
1807 default:
1808 /* For all other letters, we first check for a register class,
1809 otherwise it is an EXTRA_CONSTRAINT. */
1810 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1811 {
1812 case 'r':
1813 if (GET_MODE (op) == BLKmode)
1814 break;
1815 if (register_operand (op, VOIDmode))
1816 return 1;
1817 }
1818 #ifdef EXTRA_CONSTRAINT
1819 if (EXTRA_CONSTRAINT (op, c))
1820 return 1;
1821 #endif
1822 break;
1823 }
1824 }
1825
1826 return result;
1827 }
1828 \f
1829 /* Given an rtx *P, if it is a sum containing an integer constant term,
1830 return the location (type rtx *) of the pointer to that constant term.
1831 Otherwise, return a null pointer. */
1832
1833 rtx *
1834 find_constant_term_loc (p)
1835 rtx *p;
1836 {
1837 rtx *tem;
1838 enum rtx_code code = GET_CODE (*p);
1839
1840 /* If *P IS such a constant term, P is its location. */
1841
1842 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1843 || code == CONST)
1844 return p;
1845
1846 /* Otherwise, if not a sum, it has no constant term. */
1847
1848 if (GET_CODE (*p) != PLUS)
1849 return 0;
1850
1851 /* If one of the summands is constant, return its location. */
1852
1853 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1854 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1855 return p;
1856
1857 /* Otherwise, check each summand for containing a constant term. */
1858
1859 if (XEXP (*p, 0) != 0)
1860 {
1861 tem = find_constant_term_loc (&XEXP (*p, 0));
1862 if (tem != 0)
1863 return tem;
1864 }
1865
1866 if (XEXP (*p, 1) != 0)
1867 {
1868 tem = find_constant_term_loc (&XEXP (*p, 1));
1869 if (tem != 0)
1870 return tem;
1871 }
1872
1873 return 0;
1874 }
1875 \f
1876 /* Return 1 if OP is a memory reference
1877 whose address contains no side effects
1878 and remains valid after the addition
1879 of a positive integer less than the
1880 size of the object being referenced.
1881
1882 We assume that the original address is valid and do not check it.
1883
1884 This uses strict_memory_address_p as a subroutine, so
1885 don't use it before reload. */
1886
1887 int
1888 offsettable_memref_p (op)
1889 rtx op;
1890 {
1891 return ((GET_CODE (op) == MEM)
1892 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1893 }
1894
1895 /* Similar, but don't require a strictly valid mem ref:
1896 consider pseudo-regs valid as index or base regs. */
1897
1898 int
1899 offsettable_nonstrict_memref_p (op)
1900 rtx op;
1901 {
1902 return ((GET_CODE (op) == MEM)
1903 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1904 }
1905
1906 /* Return 1 if Y is a memory address which contains no side effects
1907 and would remain valid after the addition of a positive integer
1908 less than the size of that mode.
1909
1910 We assume that the original address is valid and do not check it.
1911 We do check that it is valid for narrower modes.
1912
1913 If STRICTP is nonzero, we require a strictly valid address,
1914 for the sake of use in reload.c. */
1915
1916 int
1917 offsettable_address_p (strictp, mode, y)
1918 int strictp;
1919 enum machine_mode mode;
1920 rtx y;
1921 {
1922 enum rtx_code ycode = GET_CODE (y);
1923 rtx z;
1924 rtx y1 = y;
1925 rtx *y2;
1926 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1927 (strictp ? strict_memory_address_p : memory_address_p);
1928 unsigned int mode_sz = GET_MODE_SIZE (mode);
1929
1930 if (CONSTANT_ADDRESS_P (y))
1931 return 1;
1932
1933 /* Adjusting an offsettable address involves changing to a narrower mode.
1934 Make sure that's OK. */
1935
1936 if (mode_dependent_address_p (y))
1937 return 0;
1938
1939 /* ??? How much offset does an offsettable BLKmode reference need?
1940 Clearly that depends on the situation in which it's being used.
1941 However, the current situation in which we test 0xffffffff is
1942 less than ideal. Caveat user. */
1943 if (mode_sz == 0)
1944 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1945
1946 /* If the expression contains a constant term,
1947 see if it remains valid when max possible offset is added. */
1948
1949 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1950 {
1951 int good;
1952
1953 y1 = *y2;
1954 *y2 = plus_constant (*y2, mode_sz - 1);
1955 /* Use QImode because an odd displacement may be automatically invalid
1956 for any wider mode. But it should be valid for a single byte. */
1957 good = (*addressp) (QImode, y);
1958
1959 /* In any case, restore old contents of memory. */
1960 *y2 = y1;
1961 return good;
1962 }
1963
1964 if (GET_RTX_CLASS (ycode) == 'a')
1965 return 0;
1966
1967 /* The offset added here is chosen as the maximum offset that
1968 any instruction could need to add when operating on something
1969 of the specified mode. We assume that if Y and Y+c are
1970 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1971 go inside a LO_SUM here, so we do so as well. */
1972 if (GET_CODE (y) == LO_SUM
1973 && mode != BLKmode
1974 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1975 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1976 plus_constant (XEXP (y, 1), mode_sz - 1));
1977 else
1978 z = plus_constant (y, mode_sz - 1);
1979
1980 /* Use QImode because an odd displacement may be automatically invalid
1981 for any wider mode. But it should be valid for a single byte. */
1982 return (*addressp) (QImode, z);
1983 }
1984
1985 /* Return 1 if ADDR is an address-expression whose effect depends
1986 on the mode of the memory reference it is used in.
1987
1988 Autoincrement addressing is a typical example of mode-dependence
1989 because the amount of the increment depends on the mode. */
1990
1991 int
1992 mode_dependent_address_p (addr)
1993 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1994 {
1995 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1996 return 0;
1997 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1998 win: ATTRIBUTE_UNUSED_LABEL
1999 return 1;
2000 }
2001
2002 /* Return 1 if OP is a general operand
2003 other than a memory ref with a mode dependent address. */
2004
2005 int
2006 mode_independent_operand (op, mode)
2007 enum machine_mode mode;
2008 rtx op;
2009 {
2010 rtx addr;
2011
2012 if (! general_operand (op, mode))
2013 return 0;
2014
2015 if (GET_CODE (op) != MEM)
2016 return 1;
2017
2018 addr = XEXP (op, 0);
2019 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2020 return 1;
2021 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2022 lose: ATTRIBUTE_UNUSED_LABEL
2023 return 0;
2024 }
2025 \f
2026 /* Like extract_insn, but save insn extracted and don't extract again, when
2027 called again for the same insn expecting that recog_data still contain the
2028 valid information. This is used primary by gen_attr infrastructure that
2029 often does extract insn again and again. */
2030 void
2031 extract_insn_cached (insn)
2032 rtx insn;
2033 {
2034 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2035 return;
2036 extract_insn (insn);
2037 recog_data.insn = insn;
2038 }
2039 /* Do cached extract_insn, constrain_operand and complain about failures.
2040 Used by insn_attrtab. */
2041 void
2042 extract_constrain_insn_cached (insn)
2043 rtx insn;
2044 {
2045 extract_insn_cached (insn);
2046 if (which_alternative == -1
2047 && !constrain_operands (reload_completed))
2048 fatal_insn_not_found (insn);
2049 }
2050 /* Do cached constrain_operand and complain about failures. */
2051 int
2052 constrain_operands_cached (strict)
2053 int strict;
2054 {
2055 if (which_alternative == -1)
2056 return constrain_operands (strict);
2057 else
2058 return 1;
2059 }
2060 \f
2061 /* Analyze INSN and fill in recog_data. */
2062
2063 void
2064 extract_insn (insn)
2065 rtx insn;
2066 {
2067 int i;
2068 int icode;
2069 int noperands;
2070 rtx body = PATTERN (insn);
2071
2072 recog_data.insn = NULL;
2073 recog_data.n_operands = 0;
2074 recog_data.n_alternatives = 0;
2075 recog_data.n_dups = 0;
2076 which_alternative = -1;
2077
2078 switch (GET_CODE (body))
2079 {
2080 case USE:
2081 case CLOBBER:
2082 case ASM_INPUT:
2083 case ADDR_VEC:
2084 case ADDR_DIFF_VEC:
2085 return;
2086
2087 case SET:
2088 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2089 goto asm_insn;
2090 else
2091 goto normal_insn;
2092 case PARALLEL:
2093 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2094 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2095 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2096 goto asm_insn;
2097 else
2098 goto normal_insn;
2099 case ASM_OPERANDS:
2100 asm_insn:
2101 recog_data.n_operands = noperands = asm_noperands (body);
2102 if (noperands >= 0)
2103 {
2104 /* This insn is an `asm' with operands. */
2105
2106 /* expand_asm_operands makes sure there aren't too many operands. */
2107 if (noperands > MAX_RECOG_OPERANDS)
2108 abort ();
2109
2110 /* Now get the operand values and constraints out of the insn. */
2111 decode_asm_operands (body, recog_data.operand,
2112 recog_data.operand_loc,
2113 recog_data.constraints,
2114 recog_data.operand_mode);
2115 if (noperands > 0)
2116 {
2117 const char *p = recog_data.constraints[0];
2118 recog_data.n_alternatives = 1;
2119 while (*p)
2120 recog_data.n_alternatives += (*p++ == ',');
2121 }
2122 break;
2123 }
2124 fatal_insn_not_found (insn);
2125
2126 default:
2127 normal_insn:
2128 /* Ordinary insn: recognize it, get the operands via insn_extract
2129 and get the constraints. */
2130
2131 icode = recog_memoized (insn);
2132 if (icode < 0)
2133 fatal_insn_not_found (insn);
2134
2135 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2136 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2137 recog_data.n_dups = insn_data[icode].n_dups;
2138
2139 insn_extract (insn);
2140
2141 for (i = 0; i < noperands; i++)
2142 {
2143 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2144 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2145 /* VOIDmode match_operands gets mode from their real operand. */
2146 if (recog_data.operand_mode[i] == VOIDmode)
2147 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2148 }
2149 }
2150 for (i = 0; i < noperands; i++)
2151 recog_data.operand_type[i]
2152 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2153 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2154 : OP_IN);
2155
2156 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2157 abort ();
2158 }
2159
2160 /* After calling extract_insn, you can use this function to extract some
2161 information from the constraint strings into a more usable form.
2162 The collected data is stored in recog_op_alt. */
2163 void
2164 preprocess_constraints ()
2165 {
2166 int i;
2167
2168 memset (recog_op_alt, 0, sizeof recog_op_alt);
2169 for (i = 0; i < recog_data.n_operands; i++)
2170 {
2171 int j;
2172 struct operand_alternative *op_alt;
2173 const char *p = recog_data.constraints[i];
2174
2175 op_alt = recog_op_alt[i];
2176
2177 for (j = 0; j < recog_data.n_alternatives; j++)
2178 {
2179 op_alt[j].class = NO_REGS;
2180 op_alt[j].constraint = p;
2181 op_alt[j].matches = -1;
2182 op_alt[j].matched = -1;
2183
2184 if (*p == '\0' || *p == ',')
2185 {
2186 op_alt[j].anything_ok = 1;
2187 continue;
2188 }
2189
2190 for (;;)
2191 {
2192 char c = *p++;
2193 if (c == '#')
2194 do
2195 c = *p++;
2196 while (c != ',' && c != '\0');
2197 if (c == ',' || c == '\0')
2198 break;
2199
2200 switch (c)
2201 {
2202 case '=': case '+': case '*': case '%':
2203 case 'E': case 'F': case 'G': case 'H':
2204 case 's': case 'i': case 'n':
2205 case 'I': case 'J': case 'K': case 'L':
2206 case 'M': case 'N': case 'O': case 'P':
2207 /* These don't say anything we care about. */
2208 break;
2209
2210 case '?':
2211 op_alt[j].reject += 6;
2212 break;
2213 case '!':
2214 op_alt[j].reject += 600;
2215 break;
2216 case '&':
2217 op_alt[j].earlyclobber = 1;
2218 break;
2219
2220 case '0': case '1': case '2': case '3': case '4':
2221 case '5': case '6': case '7': case '8': case '9':
2222 {
2223 char *end;
2224 op_alt[j].matches = strtoul (p - 1, &end, 10);
2225 recog_op_alt[op_alt[j].matches][j].matched = i;
2226 p = end;
2227 }
2228 break;
2229
2230 case 'm':
2231 op_alt[j].memory_ok = 1;
2232 break;
2233 case '<':
2234 op_alt[j].decmem_ok = 1;
2235 break;
2236 case '>':
2237 op_alt[j].incmem_ok = 1;
2238 break;
2239 case 'V':
2240 op_alt[j].nonoffmem_ok = 1;
2241 break;
2242 case 'o':
2243 op_alt[j].offmem_ok = 1;
2244 break;
2245 case 'X':
2246 op_alt[j].anything_ok = 1;
2247 break;
2248
2249 case 'p':
2250 op_alt[j].is_address = 1;
2251 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2252 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2253 break;
2254
2255 case 'g': case 'r':
2256 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2257 break;
2258
2259 default:
2260 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char) c)];
2261 break;
2262 }
2263 }
2264 }
2265 }
2266 }
2267
2268 /* Check the operands of an insn against the insn's operand constraints
2269 and return 1 if they are valid.
2270 The information about the insn's operands, constraints, operand modes
2271 etc. is obtained from the global variables set up by extract_insn.
2272
2273 WHICH_ALTERNATIVE is set to a number which indicates which
2274 alternative of constraints was matched: 0 for the first alternative,
2275 1 for the next, etc.
2276
2277 In addition, when two operands are match
2278 and it happens that the output operand is (reg) while the
2279 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2280 make the output operand look like the input.
2281 This is because the output operand is the one the template will print.
2282
2283 This is used in final, just before printing the assembler code and by
2284 the routines that determine an insn's attribute.
2285
2286 If STRICT is a positive non-zero value, it means that we have been
2287 called after reload has been completed. In that case, we must
2288 do all checks strictly. If it is zero, it means that we have been called
2289 before reload has completed. In that case, we first try to see if we can
2290 find an alternative that matches strictly. If not, we try again, this
2291 time assuming that reload will fix up the insn. This provides a "best
2292 guess" for the alternative and is used to compute attributes of insns prior
2293 to reload. A negative value of STRICT is used for this internal call. */
2294
2295 struct funny_match
2296 {
2297 int this, other;
2298 };
2299
2300 int
2301 constrain_operands (strict)
2302 int strict;
2303 {
2304 const char *constraints[MAX_RECOG_OPERANDS];
2305 int matching_operands[MAX_RECOG_OPERANDS];
2306 int earlyclobber[MAX_RECOG_OPERANDS];
2307 int c;
2308
2309 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2310 int funny_match_index;
2311
2312 which_alternative = 0;
2313 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2314 return 1;
2315
2316 for (c = 0; c < recog_data.n_operands; c++)
2317 {
2318 constraints[c] = recog_data.constraints[c];
2319 matching_operands[c] = -1;
2320 }
2321
2322 do
2323 {
2324 int opno;
2325 int lose = 0;
2326 funny_match_index = 0;
2327
2328 for (opno = 0; opno < recog_data.n_operands; opno++)
2329 {
2330 rtx op = recog_data.operand[opno];
2331 enum machine_mode mode = GET_MODE (op);
2332 const char *p = constraints[opno];
2333 int offset = 0;
2334 int win = 0;
2335 int val;
2336
2337 earlyclobber[opno] = 0;
2338
2339 /* A unary operator may be accepted by the predicate, but it
2340 is irrelevant for matching constraints. */
2341 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2342 op = XEXP (op, 0);
2343
2344 if (GET_CODE (op) == SUBREG)
2345 {
2346 if (GET_CODE (SUBREG_REG (op)) == REG
2347 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2348 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2349 GET_MODE (SUBREG_REG (op)),
2350 SUBREG_BYTE (op),
2351 GET_MODE (op));
2352 op = SUBREG_REG (op);
2353 }
2354
2355 /* An empty constraint or empty alternative
2356 allows anything which matched the pattern. */
2357 if (*p == 0 || *p == ',')
2358 win = 1;
2359
2360 while (*p && (c = *p++) != ',')
2361 switch (c)
2362 {
2363 case '?': case '!': case '*': case '%':
2364 case '=': case '+':
2365 break;
2366
2367 case '#':
2368 /* Ignore rest of this alternative as far as
2369 constraint checking is concerned. */
2370 while (*p && *p != ',')
2371 p++;
2372 break;
2373
2374 case '&':
2375 earlyclobber[opno] = 1;
2376 break;
2377
2378 case '0': case '1': case '2': case '3': case '4':
2379 case '5': case '6': case '7': case '8': case '9':
2380 {
2381 /* This operand must be the same as a previous one.
2382 This kind of constraint is used for instructions such
2383 as add when they take only two operands.
2384
2385 Note that the lower-numbered operand is passed first.
2386
2387 If we are not testing strictly, assume that this
2388 constraint will be satisfied. */
2389
2390 char *end;
2391 int match;
2392
2393 match = strtoul (p - 1, &end, 10);
2394 p = end;
2395
2396 if (strict < 0)
2397 val = 1;
2398 else
2399 {
2400 rtx op1 = recog_data.operand[match];
2401 rtx op2 = recog_data.operand[opno];
2402
2403 /* A unary operator may be accepted by the predicate,
2404 but it is irrelevant for matching constraints. */
2405 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2406 op1 = XEXP (op1, 0);
2407 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2408 op2 = XEXP (op2, 0);
2409
2410 val = operands_match_p (op1, op2);
2411 }
2412
2413 matching_operands[opno] = match;
2414 matching_operands[match] = opno;
2415
2416 if (val != 0)
2417 win = 1;
2418
2419 /* If output is *x and input is *--x, arrange later
2420 to change the output to *--x as well, since the
2421 output op is the one that will be printed. */
2422 if (val == 2 && strict > 0)
2423 {
2424 funny_match[funny_match_index].this = opno;
2425 funny_match[funny_match_index++].other = match;
2426 }
2427 }
2428 break;
2429
2430 case 'p':
2431 /* p is used for address_operands. When we are called by
2432 gen_reload, no one will have checked that the address is
2433 strictly valid, i.e., that all pseudos requiring hard regs
2434 have gotten them. */
2435 if (strict <= 0
2436 || (strict_memory_address_p (recog_data.operand_mode[opno],
2437 op)))
2438 win = 1;
2439 break;
2440
2441 /* No need to check general_operand again;
2442 it was done in insn-recog.c. */
2443 case 'g':
2444 /* Anything goes unless it is a REG and really has a hard reg
2445 but the hard reg is not in the class GENERAL_REGS. */
2446 if (strict < 0
2447 || GENERAL_REGS == ALL_REGS
2448 || GET_CODE (op) != REG
2449 || (reload_in_progress
2450 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2451 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2452 win = 1;
2453 break;
2454
2455 case 'X':
2456 /* This is used for a MATCH_SCRATCH in the cases when
2457 we don't actually need anything. So anything goes
2458 any time. */
2459 win = 1;
2460 break;
2461
2462 case 'm':
2463 if (GET_CODE (op) == MEM
2464 /* Before reload, accept what reload can turn into mem. */
2465 || (strict < 0 && CONSTANT_P (op))
2466 /* During reload, accept a pseudo */
2467 || (reload_in_progress && GET_CODE (op) == REG
2468 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2469 win = 1;
2470 break;
2471
2472 case '<':
2473 if (GET_CODE (op) == MEM
2474 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2475 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2476 win = 1;
2477 break;
2478
2479 case '>':
2480 if (GET_CODE (op) == MEM
2481 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2482 || GET_CODE (XEXP (op, 0)) == POST_INC))
2483 win = 1;
2484 break;
2485
2486 case 'E':
2487 case 'F':
2488 if (GET_CODE (op) == CONST_DOUBLE)
2489 win = 1;
2490 break;
2491
2492 case 'G':
2493 case 'H':
2494 if (GET_CODE (op) == CONST_DOUBLE
2495 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2496 win = 1;
2497 break;
2498
2499 case 's':
2500 if (GET_CODE (op) == CONST_INT
2501 || (GET_CODE (op) == CONST_DOUBLE
2502 && GET_MODE (op) == VOIDmode))
2503 break;
2504 case 'i':
2505 if (CONSTANT_P (op))
2506 win = 1;
2507 break;
2508
2509 case 'n':
2510 if (GET_CODE (op) == CONST_INT
2511 || (GET_CODE (op) == CONST_DOUBLE
2512 && GET_MODE (op) == VOIDmode))
2513 win = 1;
2514 break;
2515
2516 case 'I':
2517 case 'J':
2518 case 'K':
2519 case 'L':
2520 case 'M':
2521 case 'N':
2522 case 'O':
2523 case 'P':
2524 if (GET_CODE (op) == CONST_INT
2525 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2526 win = 1;
2527 break;
2528
2529 case 'V':
2530 if (GET_CODE (op) == MEM
2531 && ((strict > 0 && ! offsettable_memref_p (op))
2532 || (strict < 0
2533 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2534 || (reload_in_progress
2535 && !(GET_CODE (op) == REG
2536 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2537 win = 1;
2538 break;
2539
2540 case 'o':
2541 if ((strict > 0 && offsettable_memref_p (op))
2542 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2543 /* Before reload, accept what reload can handle. */
2544 || (strict < 0
2545 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2546 /* During reload, accept a pseudo */
2547 || (reload_in_progress && GET_CODE (op) == REG
2548 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2549 win = 1;
2550 break;
2551
2552 default:
2553 {
2554 enum reg_class class;
2555
2556 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2557 if (class != NO_REGS)
2558 {
2559 if (strict < 0
2560 || (strict == 0
2561 && GET_CODE (op) == REG
2562 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2563 || (strict == 0 && GET_CODE (op) == SCRATCH)
2564 || (GET_CODE (op) == REG
2565 && reg_fits_class_p (op, class, offset, mode)))
2566 win = 1;
2567 }
2568 #ifdef EXTRA_CONSTRAINT
2569 else if (EXTRA_CONSTRAINT (op, c))
2570 win = 1;
2571 #endif
2572 break;
2573 }
2574 }
2575
2576 constraints[opno] = p;
2577 /* If this operand did not win somehow,
2578 this alternative loses. */
2579 if (! win)
2580 lose = 1;
2581 }
2582 /* This alternative won; the operands are ok.
2583 Change whichever operands this alternative says to change. */
2584 if (! lose)
2585 {
2586 int opno, eopno;
2587
2588 /* See if any earlyclobber operand conflicts with some other
2589 operand. */
2590
2591 if (strict > 0)
2592 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2593 /* Ignore earlyclobber operands now in memory,
2594 because we would often report failure when we have
2595 two memory operands, one of which was formerly a REG. */
2596 if (earlyclobber[eopno]
2597 && GET_CODE (recog_data.operand[eopno]) == REG)
2598 for (opno = 0; opno < recog_data.n_operands; opno++)
2599 if ((GET_CODE (recog_data.operand[opno]) == MEM
2600 || recog_data.operand_type[opno] != OP_OUT)
2601 && opno != eopno
2602 /* Ignore things like match_operator operands. */
2603 && *recog_data.constraints[opno] != 0
2604 && ! (matching_operands[opno] == eopno
2605 && operands_match_p (recog_data.operand[opno],
2606 recog_data.operand[eopno]))
2607 && ! safe_from_earlyclobber (recog_data.operand[opno],
2608 recog_data.operand[eopno]))
2609 lose = 1;
2610
2611 if (! lose)
2612 {
2613 while (--funny_match_index >= 0)
2614 {
2615 recog_data.operand[funny_match[funny_match_index].other]
2616 = recog_data.operand[funny_match[funny_match_index].this];
2617 }
2618
2619 return 1;
2620 }
2621 }
2622
2623 which_alternative++;
2624 }
2625 while (which_alternative < recog_data.n_alternatives);
2626
2627 which_alternative = -1;
2628 /* If we are about to reject this, but we are not to test strictly,
2629 try a very loose test. Only return failure if it fails also. */
2630 if (strict == 0)
2631 return constrain_operands (-1);
2632 else
2633 return 0;
2634 }
2635
2636 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2637 is a hard reg in class CLASS when its regno is offset by OFFSET
2638 and changed to mode MODE.
2639 If REG occupies multiple hard regs, all of them must be in CLASS. */
2640
2641 int
2642 reg_fits_class_p (operand, class, offset, mode)
2643 rtx operand;
2644 enum reg_class class;
2645 int offset;
2646 enum machine_mode mode;
2647 {
2648 int regno = REGNO (operand);
2649 if (regno < FIRST_PSEUDO_REGISTER
2650 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2651 regno + offset))
2652 {
2653 int sr;
2654 regno += offset;
2655 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2656 sr > 0; sr--)
2657 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2658 regno + sr))
2659 break;
2660 return sr == 0;
2661 }
2662
2663 return 0;
2664 }
2665 \f
2666 /* Split single instruction. Helper function for split_all_insns.
2667 Return last insn in the sequence if successful, or NULL if unsuccessful. */
2668 static rtx
2669 split_insn (insn)
2670 rtx insn;
2671 {
2672 rtx set;
2673 if (!INSN_P (insn))
2674 ;
2675 /* Don't split no-op move insns. These should silently
2676 disappear later in final. Splitting such insns would
2677 break the code that handles REG_NO_CONFLICT blocks. */
2678
2679 else if ((set = single_set (insn)) != NULL && set_noop_p (set))
2680 {
2681 /* Nops get in the way while scheduling, so delete them
2682 now if register allocation has already been done. It
2683 is too risky to try to do this before register
2684 allocation, and there are unlikely to be very many
2685 nops then anyways. */
2686 if (reload_completed)
2687 delete_insn_and_edges (insn);
2688 }
2689 else
2690 {
2691 /* Split insns here to get max fine-grain parallelism. */
2692 rtx first = PREV_INSN (insn);
2693 rtx last = try_split (PATTERN (insn), insn, 1);
2694
2695 if (last != insn)
2696 {
2697 /* try_split returns the NOTE that INSN became. */
2698 PUT_CODE (insn, NOTE);
2699 NOTE_SOURCE_FILE (insn) = 0;
2700 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2701
2702 /* ??? Coddle to md files that generate subregs in post-
2703 reload splitters instead of computing the proper
2704 hard register. */
2705 if (reload_completed && first != last)
2706 {
2707 first = NEXT_INSN (first);
2708 while (1)
2709 {
2710 if (INSN_P (first))
2711 cleanup_subreg_operands (first);
2712 if (first == last)
2713 break;
2714 first = NEXT_INSN (first);
2715 }
2716 }
2717 return last;
2718 }
2719 }
2720 return NULL_RTX;
2721 }
2722 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2723
2724 void
2725 split_all_insns (upd_life)
2726 int upd_life;
2727 {
2728 sbitmap blocks;
2729 int changed;
2730 int i;
2731
2732 blocks = sbitmap_alloc (n_basic_blocks);
2733 sbitmap_zero (blocks);
2734 changed = 0;
2735
2736 for (i = n_basic_blocks - 1; i >= 0; --i)
2737 {
2738 basic_block bb = BASIC_BLOCK (i);
2739 rtx insn, next;
2740 bool finish = false;
2741
2742 for (insn = bb->head; !finish ; insn = next)
2743 {
2744 rtx last;
2745
2746 /* Can't use `next_real_insn' because that might go across
2747 CODE_LABELS and short-out basic blocks. */
2748 next = NEXT_INSN (insn);
2749 finish = (insn == bb->end);
2750 last = split_insn (insn);
2751 if (last)
2752 {
2753 /* The split sequence may include barrier, but the
2754 BB boundary we are interested in will be set to previous
2755 one. */
2756
2757 while (GET_CODE (last) == BARRIER)
2758 last = PREV_INSN (last);
2759 SET_BIT (blocks, i);
2760 changed = 1;
2761 insn = last;
2762 }
2763 }
2764 }
2765
2766 if (changed)
2767 {
2768 find_many_sub_basic_blocks (blocks);
2769 }
2770
2771 if (changed && upd_life)
2772 {
2773 count_or_remove_death_notes (blocks, 1);
2774 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2775 }
2776 #ifdef ENABLE_CHECKING
2777 verify_flow_info ();
2778 #endif
2779
2780 sbitmap_free (blocks);
2781 }
2782
2783 /* Same as split_all_insns, but do not expect CFG to be available.
2784 Used by machine depedent reorg passes. */
2785
2786 void
2787 split_all_insns_noflow ()
2788 {
2789 rtx next, insn;
2790
2791 for (insn = get_insns (); insn; insn = next)
2792 {
2793 next = NEXT_INSN (insn);
2794 split_insn (insn);
2795 }
2796 return;
2797 }
2798 \f
2799 #ifdef HAVE_peephole2
2800 struct peep2_insn_data
2801 {
2802 rtx insn;
2803 regset live_before;
2804 };
2805
2806 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2807 static int peep2_current;
2808
2809 /* A non-insn marker indicating the last insn of the block.
2810 The live_before regset for this element is correct, indicating
2811 global_live_at_end for the block. */
2812 #define PEEP2_EOB pc_rtx
2813
2814 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2815 does not exist. Used by the recognizer to find the next insn to match
2816 in a multi-insn pattern. */
2817
2818 rtx
2819 peep2_next_insn (n)
2820 int n;
2821 {
2822 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2823 abort ();
2824
2825 n += peep2_current;
2826 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2827 n -= MAX_INSNS_PER_PEEP2 + 1;
2828
2829 if (peep2_insn_data[n].insn == PEEP2_EOB)
2830 return NULL_RTX;
2831 return peep2_insn_data[n].insn;
2832 }
2833
2834 /* Return true if REGNO is dead before the Nth non-note insn
2835 after `current'. */
2836
2837 int
2838 peep2_regno_dead_p (ofs, regno)
2839 int ofs;
2840 int regno;
2841 {
2842 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2843 abort ();
2844
2845 ofs += peep2_current;
2846 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2847 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2848
2849 if (peep2_insn_data[ofs].insn == NULL_RTX)
2850 abort ();
2851
2852 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2853 }
2854
2855 /* Similarly for a REG. */
2856
2857 int
2858 peep2_reg_dead_p (ofs, reg)
2859 int ofs;
2860 rtx reg;
2861 {
2862 int regno, n;
2863
2864 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2865 abort ();
2866
2867 ofs += peep2_current;
2868 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2869 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2870
2871 if (peep2_insn_data[ofs].insn == NULL_RTX)
2872 abort ();
2873
2874 regno = REGNO (reg);
2875 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2876 while (--n >= 0)
2877 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2878 return 0;
2879 return 1;
2880 }
2881
2882 /* Try to find a hard register of mode MODE, matching the register class in
2883 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2884 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2885 in which case the only condition is that the register must be available
2886 before CURRENT_INSN.
2887 Registers that already have bits set in REG_SET will not be considered.
2888
2889 If an appropriate register is available, it will be returned and the
2890 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2891 returned. */
2892
2893 rtx
2894 peep2_find_free_register (from, to, class_str, mode, reg_set)
2895 int from, to;
2896 const char *class_str;
2897 enum machine_mode mode;
2898 HARD_REG_SET *reg_set;
2899 {
2900 static int search_ofs;
2901 enum reg_class class;
2902 HARD_REG_SET live;
2903 int i;
2904
2905 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2906 abort ();
2907
2908 from += peep2_current;
2909 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2910 from -= MAX_INSNS_PER_PEEP2 + 1;
2911 to += peep2_current;
2912 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2913 to -= MAX_INSNS_PER_PEEP2 + 1;
2914
2915 if (peep2_insn_data[from].insn == NULL_RTX)
2916 abort ();
2917 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2918
2919 while (from != to)
2920 {
2921 HARD_REG_SET this_live;
2922
2923 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2924 from = 0;
2925 if (peep2_insn_data[from].insn == NULL_RTX)
2926 abort ();
2927 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2928 IOR_HARD_REG_SET (live, this_live);
2929 }
2930
2931 class = (class_str[0] == 'r' ? GENERAL_REGS
2932 : REG_CLASS_FROM_LETTER (class_str[0]));
2933
2934 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2935 {
2936 int raw_regno, regno, success, j;
2937
2938 /* Distribute the free registers as much as possible. */
2939 raw_regno = search_ofs + i;
2940 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2941 raw_regno -= FIRST_PSEUDO_REGISTER;
2942 #ifdef REG_ALLOC_ORDER
2943 regno = reg_alloc_order[raw_regno];
2944 #else
2945 regno = raw_regno;
2946 #endif
2947
2948 /* Don't allocate fixed registers. */
2949 if (fixed_regs[regno])
2950 continue;
2951 /* Make sure the register is of the right class. */
2952 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2953 continue;
2954 /* And can support the mode we need. */
2955 if (! HARD_REGNO_MODE_OK (regno, mode))
2956 continue;
2957 /* And that we don't create an extra save/restore. */
2958 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2959 continue;
2960 /* And we don't clobber traceback for noreturn functions. */
2961 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2962 && (! reload_completed || frame_pointer_needed))
2963 continue;
2964
2965 success = 1;
2966 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2967 {
2968 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2969 || TEST_HARD_REG_BIT (live, regno + j))
2970 {
2971 success = 0;
2972 break;
2973 }
2974 }
2975 if (success)
2976 {
2977 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2978 SET_HARD_REG_BIT (*reg_set, regno + j);
2979
2980 /* Start the next search with the next register. */
2981 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2982 raw_regno = 0;
2983 search_ofs = raw_regno;
2984
2985 return gen_rtx_REG (mode, regno);
2986 }
2987 }
2988
2989 search_ofs = 0;
2990 return NULL_RTX;
2991 }
2992
2993 /* Perform the peephole2 optimization pass. */
2994
2995 void
2996 peephole2_optimize (dump_file)
2997 FILE *dump_file ATTRIBUTE_UNUSED;
2998 {
2999 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3000 rtx insn, prev;
3001 regset live;
3002 int i, b;
3003 #ifdef HAVE_conditional_execution
3004 sbitmap blocks;
3005 bool changed;
3006 #endif
3007 bool do_cleanup_cfg = false;
3008 bool do_rebuild_jump_labels = false;
3009
3010 /* Initialize the regsets we're going to use. */
3011 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3012 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3013 live = INITIALIZE_REG_SET (rs_heads[i]);
3014
3015 #ifdef HAVE_conditional_execution
3016 blocks = sbitmap_alloc (n_basic_blocks);
3017 sbitmap_zero (blocks);
3018 changed = false;
3019 #else
3020 count_or_remove_death_notes (NULL, 1);
3021 #endif
3022
3023 for (b = n_basic_blocks - 1; b >= 0; --b)
3024 {
3025 basic_block bb = BASIC_BLOCK (b);
3026 struct propagate_block_info *pbi;
3027
3028 /* Indicate that all slots except the last holds invalid data. */
3029 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3030 peep2_insn_data[i].insn = NULL_RTX;
3031
3032 /* Indicate that the last slot contains live_after data. */
3033 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3034 peep2_current = MAX_INSNS_PER_PEEP2;
3035
3036 /* Start up propagation. */
3037 COPY_REG_SET (live, bb->global_live_at_end);
3038 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3039
3040 #ifdef HAVE_conditional_execution
3041 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3042 #else
3043 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3044 #endif
3045
3046 for (insn = bb->end; ; insn = prev)
3047 {
3048 prev = PREV_INSN (insn);
3049 if (INSN_P (insn))
3050 {
3051 rtx try, before_try, x;
3052 int match_len;
3053 rtx note;
3054
3055 /* Record this insn. */
3056 if (--peep2_current < 0)
3057 peep2_current = MAX_INSNS_PER_PEEP2;
3058 peep2_insn_data[peep2_current].insn = insn;
3059 propagate_one_insn (pbi, insn);
3060 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3061
3062 /* Match the peephole. */
3063 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3064 if (try != NULL)
3065 {
3066 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3067 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3068 cfg-related call notes. */
3069 for (i = 0; i <= match_len; ++i)
3070 {
3071 int j, k;
3072 rtx old_insn, new_insn, note;
3073
3074 j = i + peep2_current;
3075 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3076 j -= MAX_INSNS_PER_PEEP2 + 1;
3077 old_insn = peep2_insn_data[j].insn;
3078 if (GET_CODE (old_insn) != CALL_INSN)
3079 continue;
3080
3081 new_insn = NULL_RTX;
3082 if (GET_CODE (try) == SEQUENCE)
3083 for (k = XVECLEN (try, 0) - 1; k >= 0; k--)
3084 {
3085 rtx x = XVECEXP (try, 0, k);
3086 if (GET_CODE (x) == CALL_INSN)
3087 {
3088 new_insn = x;
3089 break;
3090 }
3091 }
3092 else if (GET_CODE (try) == CALL_INSN)
3093 new_insn = try;
3094 if (! new_insn)
3095 abort ();
3096
3097 CALL_INSN_FUNCTION_USAGE (new_insn)
3098 = CALL_INSN_FUNCTION_USAGE (old_insn);
3099
3100 for (note = REG_NOTES (old_insn);
3101 note;
3102 note = XEXP (note, 1))
3103 switch (REG_NOTE_KIND (note))
3104 {
3105 case REG_NORETURN:
3106 case REG_SETJMP:
3107 case REG_ALWAYS_RETURN:
3108 REG_NOTES (new_insn)
3109 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3110 XEXP (note, 0),
3111 REG_NOTES (new_insn));
3112 default:
3113 /* Discard all other reg notes. */
3114 break;
3115 }
3116
3117 /* Croak if there is another call in the sequence. */
3118 while (++i <= match_len)
3119 {
3120 j = i + peep2_current;
3121 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3122 j -= MAX_INSNS_PER_PEEP2 + 1;
3123 old_insn = peep2_insn_data[j].insn;
3124 if (GET_CODE (old_insn) == CALL_INSN)
3125 abort ();
3126 }
3127 break;
3128 }
3129
3130 i = match_len + peep2_current;
3131 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3132 i -= MAX_INSNS_PER_PEEP2 + 1;
3133
3134 note = find_reg_note (peep2_insn_data[i].insn,
3135 REG_EH_REGION, NULL_RTX);
3136
3137 /* Replace the old sequence with the new. */
3138 try = emit_insn_after (try, peep2_insn_data[i].insn);
3139 before_try = PREV_INSN (insn);
3140 delete_insn_chain (insn, peep2_insn_data[i].insn);
3141
3142 /* Re-insert the EH_REGION notes. */
3143 if (note)
3144 {
3145 edge eh_edge;
3146
3147 for (eh_edge = bb->succ; eh_edge
3148 ; eh_edge = eh_edge->succ_next)
3149 if (eh_edge->flags & EDGE_EH)
3150 break;
3151
3152 for (x = try ; x != before_try ; x = PREV_INSN (x))
3153 if (GET_CODE (x) == CALL_INSN
3154 || (flag_non_call_exceptions
3155 && may_trap_p (PATTERN (x))
3156 && !find_reg_note (x, REG_EH_REGION, NULL)))
3157 {
3158 REG_NOTES (x)
3159 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3160 XEXP (note, 0),
3161 REG_NOTES (x));
3162
3163 if (x != bb->end && eh_edge)
3164 {
3165 edge nfte, nehe;
3166 int flags;
3167
3168 nfte = split_block (bb, x);
3169 flags = EDGE_EH | EDGE_ABNORMAL;
3170 if (GET_CODE (x) == CALL_INSN)
3171 flags |= EDGE_ABNORMAL_CALL;
3172 nehe = make_edge (nfte->src, eh_edge->dest,
3173 flags);
3174
3175 nehe->probability = eh_edge->probability;
3176 nfte->probability
3177 = REG_BR_PROB_BASE - nehe->probability;
3178
3179 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3180 #ifdef HAVE_conditional_execution
3181 SET_BIT (blocks, nfte->dest->index);
3182 changed = true;
3183 #endif
3184 bb = nfte->src;
3185 eh_edge = nehe;
3186 }
3187 }
3188
3189 /* Converting possibly trapping insn to non-trapping is
3190 possible. Zap dummy outgoing edges. */
3191 do_cleanup_cfg |= purge_dead_edges (bb);
3192 }
3193
3194 #ifdef HAVE_conditional_execution
3195 /* With conditional execution, we cannot back up the
3196 live information so easily, since the conditional
3197 death data structures are not so self-contained.
3198 So record that we've made a modification to this
3199 block and update life information at the end. */
3200 SET_BIT (blocks, b);
3201 changed = true;
3202
3203 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3204 peep2_insn_data[i].insn = NULL_RTX;
3205 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3206 #else
3207 /* Back up lifetime information past the end of the
3208 newly created sequence. */
3209 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3210 i = 0;
3211 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3212
3213 /* Update life information for the new sequence. */
3214 x = try;
3215 do
3216 {
3217 if (INSN_P (x))
3218 {
3219 if (--i < 0)
3220 i = MAX_INSNS_PER_PEEP2;
3221 peep2_insn_data[i].insn = x;
3222 propagate_one_insn (pbi, x);
3223 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3224 }
3225 x = PREV_INSN (x);
3226 }
3227 while (x != prev);
3228
3229 /* ??? Should verify that LIVE now matches what we
3230 had before the new sequence. */
3231
3232 peep2_current = i;
3233 #endif
3234
3235 /* If we generated a jump instruction, it won't have
3236 JUMP_LABEL set. Recompute after we're done. */
3237 for (x = try; x != before_try; x = PREV_INSN (x))
3238 if (GET_CODE (x) == JUMP_INSN)
3239 {
3240 do_rebuild_jump_labels = true;
3241 break;
3242 }
3243 }
3244 }
3245
3246 if (insn == bb->head)
3247 break;
3248 }
3249
3250 free_propagate_block_info (pbi);
3251 }
3252
3253 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3254 FREE_REG_SET (peep2_insn_data[i].live_before);
3255 FREE_REG_SET (live);
3256
3257 if (do_rebuild_jump_labels)
3258 rebuild_jump_labels (get_insns ());
3259
3260 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3261 we've changed global life since exception handlers are no longer
3262 reachable. */
3263 if (do_cleanup_cfg)
3264 {
3265 cleanup_cfg (0);
3266 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3267 }
3268 #ifdef HAVE_conditional_execution
3269 else
3270 {
3271 count_or_remove_death_notes (blocks, 1);
3272 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3273 }
3274 sbitmap_free (blocks);
3275 #endif
3276 }
3277 #endif /* HAVE_peephole2 */
3278
3279 /* Common predicates for use with define_bypass. */
3280
3281 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3282 data not the address operand(s) of the store. IN_INSN must be
3283 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3284 SETs inside. */
3285
3286 int
3287 store_data_bypass_p (out_insn, in_insn)
3288 rtx out_insn, in_insn;
3289 {
3290 rtx out_set, in_set;
3291
3292 in_set = single_set (in_insn);
3293 if (! in_set)
3294 abort ();
3295
3296 if (GET_CODE (SET_DEST (in_set)) != MEM)
3297 return false;
3298
3299 out_set = single_set (out_insn);
3300 if (out_set)
3301 {
3302 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3303 return false;
3304 }
3305 else
3306 {
3307 rtx out_pat;
3308 int i;
3309
3310 out_pat = PATTERN (out_insn);
3311 if (GET_CODE (out_pat) != PARALLEL)
3312 abort ();
3313
3314 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3315 {
3316 rtx exp = XVECEXP (out_pat, 0, i);
3317
3318 if (GET_CODE (exp) == CLOBBER)
3319 continue;
3320
3321 if (GET_CODE (exp) != SET)
3322 abort ();
3323
3324 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3325 return false;
3326 }
3327 }
3328
3329 return true;
3330 }
3331
3332 /* True if the dependency between OUT_INSN and IN_INSN is in the
3333 IF_THEN_ELSE condition, and not the THEN or ELSE branch.
3334 Both OUT_INSN and IN_INSN must be single_set. */
3335
3336 int
3337 if_test_bypass_p (out_insn, in_insn)
3338 rtx out_insn, in_insn;
3339 {
3340 rtx out_set, in_set;
3341
3342 out_set = single_set (out_insn);
3343 if (! out_set)
3344 abort ();
3345
3346 in_set = single_set (in_insn);
3347 if (! in_set)
3348 abort ();
3349
3350 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3351 return false;
3352
3353 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3354 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3355 return false;
3356
3357 return true;
3358 }