gcse.c (try_replace_reg): Use num_changes_pending.
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
30 #include "recog.h"
31 #include "regs.h"
32 #include "expr.h"
33 #include "function.h"
34 #include "flags.h"
35 #include "real.h"
36 #include "toplev.h"
37 #include "basic-block.h"
38 #include "output.h"
39 #include "reload.h"
40
41 #ifndef STACK_PUSH_CODE
42 #ifdef STACK_GROWS_DOWNWARD
43 #define STACK_PUSH_CODE PRE_DEC
44 #else
45 #define STACK_PUSH_CODE PRE_INC
46 #endif
47 #endif
48
49 #ifndef STACK_POP_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_POP_CODE POST_INC
52 #else
53 #define STACK_POP_CODE POST_DEC
54 #endif
55 #endif
56
57 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
58 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
59 static void validate_replace_src_1 PARAMS ((rtx *, void *));
60 static rtx split_insn PARAMS ((rtx));
61
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
67
68 init_recog and init_recog_no_volatile are responsible for setting this. */
69
70 int volatile_ok;
71
72 struct recog_data recog_data;
73
74 /* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
77
78 /* On return from `constrain_operands', indicate which alternative
79 was satisfied. */
80
81 int which_alternative;
82
83 /* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
86
87 int reload_completed;
88
89 /* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
92
93 void
94 init_recog_no_volatile ()
95 {
96 volatile_ok = 0;
97 }
98
99 void
100 init_recog ()
101 {
102 volatile_ok = 1;
103 }
104
105 /* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
109
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
113
114 int
115 recog_memoized_1 (insn)
116 rtx insn;
117 {
118 if (INSN_CODE (insn) < 0)
119 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
120 return INSN_CODE (insn);
121 }
122 \f
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
125
126 int
127 check_asm_operands (x)
128 rtx x;
129 {
130 int noperands;
131 rtx *operands;
132 const char **constraints;
133 int i;
134
135 /* Post-reload, be more strict with things. */
136 if (reload_completed)
137 {
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x));
140 constrain_operands (1);
141 return which_alternative >= 0;
142 }
143
144 noperands = asm_noperands (x);
145 if (noperands < 0)
146 return 0;
147 if (noperands == 0)
148 return 1;
149
150 operands = (rtx *) alloca (noperands * sizeof (rtx));
151 constraints = (const char **) alloca (noperands * sizeof (char *));
152
153 decode_asm_operands (x, operands, NULL, constraints, NULL);
154
155 for (i = 0; i < noperands; i++)
156 {
157 const char *c = constraints[i];
158 if (c[0] == '%')
159 c++;
160 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
161 c = constraints[c[0] - '0'];
162
163 if (! asm_operand_ok (operands[i], c))
164 return 0;
165 }
166
167 return 1;
168 }
169 \f
170 /* Static data for the next two routines. */
171
172 typedef struct change_t
173 {
174 rtx object;
175 int old_code;
176 rtx *loc;
177 rtx old;
178 } change_t;
179
180 static change_t *changes;
181 static int changes_allocated;
182
183 static int num_changes = 0;
184
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
188
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
192 the change in place.
193
194 IN_GROUP is non-zero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
197
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
202
203 int
204 validate_change (object, loc, new, in_group)
205 rtx object;
206 rtx *loc;
207 rtx new;
208 int in_group;
209 {
210 rtx old = *loc;
211
212 if (old == new || rtx_equal_p (old, new))
213 return 1;
214
215 if (in_group == 0 && num_changes != 0)
216 abort ();
217
218 *loc = new;
219
220 /* Save the information describing this change. */
221 if (num_changes >= changes_allocated)
222 {
223 if (changes_allocated == 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated = MAX_RECOG_OPERANDS * 5;
227 else
228 changes_allocated *= 2;
229
230 changes =
231 (change_t*) xrealloc (changes,
232 sizeof (change_t) * changes_allocated);
233 }
234
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
238
239 if (object && GET_CODE (object) != MEM)
240 {
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 case invalid. */
243 changes[num_changes].old_code = INSN_CODE (object);
244 INSN_CODE (object) = -1;
245 }
246
247 num_changes++;
248
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
251
252 if (in_group)
253 return 1;
254 else
255 return apply_change_group ();
256 }
257
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
260
261 int
262 insn_invalid_p (insn)
263 rtx insn;
264 {
265 rtx pat = PATTERN (insn);
266 int num_clobbers = 0;
267 /* If we are before reload and the pattern is a SET, see if we can add
268 clobbers. */
269 int icode = recog (pat, insn,
270 (GET_CODE (pat) == SET
271 && ! reload_completed && ! reload_in_progress)
272 ? &num_clobbers : 0);
273 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
274
275
276 /* If this is an asm and the operand aren't legal, then fail. Likewise if
277 this is not an asm and the insn wasn't recognized. */
278 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
279 || (!is_asm && icode < 0))
280 return 1;
281
282 /* If we have to add CLOBBERs, fail if we have to add ones that reference
283 hard registers since our callers can't know if they are live or not.
284 Otherwise, add them. */
285 if (num_clobbers > 0)
286 {
287 rtx newpat;
288
289 if (added_clobbers_hard_reg_p (icode))
290 return 1;
291
292 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
293 XVECEXP (newpat, 0, 0) = pat;
294 add_clobbers (newpat, icode);
295 PATTERN (insn) = pat = newpat;
296 }
297
298 /* After reload, verify that all constraints are satisfied. */
299 if (reload_completed)
300 {
301 extract_insn (insn);
302
303 if (! constrain_operands (1))
304 return 1;
305 }
306
307 INSN_CODE (insn) = icode;
308 return 0;
309 }
310
311 /* Return number of changes made and not validated yet. */
312 int
313 num_changes_pending ()
314 {
315 return num_changes;
316 }
317
318 /* Apply a group of changes previously issued with `validate_change'.
319 Return 1 if all changes are valid, zero otherwise. */
320
321 int
322 apply_change_group ()
323 {
324 int i;
325 rtx last_validated = NULL_RTX;
326
327 /* The changes have been applied and all INSN_CODEs have been reset to force
328 rerecognition.
329
330 The changes are valid if we aren't given an object, or if we are
331 given a MEM and it still is a valid address, or if this is in insn
332 and it is recognized. In the latter case, if reload has completed,
333 we also require that the operands meet the constraints for
334 the insn. */
335
336 for (i = 0; i < num_changes; i++)
337 {
338 rtx object = changes[i].object;
339
340 /* if there is no object to test or if it is the same as the one we
341 already tested, ignore it. */
342 if (object == 0 || object == last_validated)
343 continue;
344
345 if (GET_CODE (object) == MEM)
346 {
347 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
348 break;
349 }
350 else if (insn_invalid_p (object))
351 {
352 rtx pat = PATTERN (object);
353
354 /* Perhaps we couldn't recognize the insn because there were
355 extra CLOBBERs at the end. If so, try to re-recognize
356 without the last CLOBBER (later iterations will cause each of
357 them to be eliminated, in turn). But don't do this if we
358 have an ASM_OPERAND. */
359 if (GET_CODE (pat) == PARALLEL
360 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
361 && asm_noperands (PATTERN (object)) < 0)
362 {
363 rtx newpat;
364
365 if (XVECLEN (pat, 0) == 2)
366 newpat = XVECEXP (pat, 0, 0);
367 else
368 {
369 int j;
370
371 newpat
372 = gen_rtx_PARALLEL (VOIDmode,
373 rtvec_alloc (XVECLEN (pat, 0) - 1));
374 for (j = 0; j < XVECLEN (newpat, 0); j++)
375 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
376 }
377
378 /* Add a new change to this group to replace the pattern
379 with this new pattern. Then consider this change
380 as having succeeded. The change we added will
381 cause the entire call to fail if things remain invalid.
382
383 Note that this can lose if a later change than the one
384 we are processing specified &XVECEXP (PATTERN (object), 0, X)
385 but this shouldn't occur. */
386
387 validate_change (object, &PATTERN (object), newpat, 1);
388 continue;
389 }
390 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
391 /* If this insn is a CLOBBER or USE, it is always valid, but is
392 never recognized. */
393 continue;
394 else
395 break;
396 }
397 last_validated = object;
398 }
399
400 if (i == num_changes)
401 {
402 basic_block bb;
403
404 for (i = 0; i < num_changes; i++)
405 if (changes[i].object
406 && INSN_P (changes[i].object)
407 && (bb = BLOCK_FOR_INSN (changes[i].object)))
408 bb->flags |= BB_DIRTY;
409
410 num_changes = 0;
411 return 1;
412 }
413 else
414 {
415 cancel_changes (0);
416 return 0;
417 }
418 }
419
420 /* Return the number of changes so far in the current group. */
421
422 int
423 num_validated_changes ()
424 {
425 return num_changes;
426 }
427
428 /* Retract the changes numbered NUM and up. */
429
430 void
431 cancel_changes (num)
432 int num;
433 {
434 int i;
435
436 /* Back out all the changes. Do this in the opposite order in which
437 they were made. */
438 for (i = num_changes - 1; i >= num; i--)
439 {
440 *changes[i].loc = changes[i].old;
441 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
442 INSN_CODE (changes[i].object) = changes[i].old_code;
443 }
444 num_changes = num;
445 }
446
447 /* Replace every occurrence of FROM in X with TO. Mark each change with
448 validate_change passing OBJECT. */
449
450 static void
451 validate_replace_rtx_1 (loc, from, to, object)
452 rtx *loc;
453 rtx from, to, object;
454 {
455 int i, j;
456 const char *fmt;
457 rtx x = *loc;
458 enum rtx_code code;
459 enum machine_mode op0_mode = VOIDmode;
460 int prev_changes = num_changes;
461 rtx new;
462
463 if (!x)
464 return;
465
466 code = GET_CODE (x);
467 fmt = GET_RTX_FORMAT (code);
468 if (fmt[0] == 'e')
469 op0_mode = GET_MODE (XEXP (x, 0));
470
471 /* X matches FROM if it is the same rtx or they are both referring to the
472 same register in the same mode. Avoid calling rtx_equal_p unless the
473 operands look similar. */
474
475 if (x == from
476 || (GET_CODE (x) == REG && GET_CODE (from) == REG
477 && GET_MODE (x) == GET_MODE (from)
478 && REGNO (x) == REGNO (from))
479 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
480 && rtx_equal_p (x, from)))
481 {
482 validate_change (object, loc, to, 1);
483 return;
484 }
485
486 /* Call ourself recursively to perform the replacements. */
487
488 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
489 {
490 if (fmt[i] == 'e')
491 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
492 else if (fmt[i] == 'E')
493 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
494 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
495 }
496
497 /* If we didn't substitute, there is nothing more to do. */
498 if (num_changes == prev_changes)
499 return;
500
501 /* Allow substituted expression to have different mode. This is used by
502 regmove to change mode of pseudo register. */
503 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
504 op0_mode = GET_MODE (XEXP (x, 0));
505
506 /* Do changes needed to keep rtx consistent. Don't do any other
507 simplifications, as it is not our job. */
508
509 if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
510 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
511 {
512 validate_change (object, loc,
513 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
514 : swap_condition (code),
515 GET_MODE (x), XEXP (x, 1),
516 XEXP (x, 0)), 1);
517 x = *loc;
518 code = GET_CODE (x);
519 }
520
521 switch (code)
522 {
523 case PLUS:
524 /* If we have a PLUS whose second operand is now a CONST_INT, use
525 plus_constant to try to simplify it.
526 ??? We may want later to remove this, once simplification is
527 separated from this function. */
528 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
529 validate_change (object, loc,
530 simplify_gen_binary
531 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
532 break;
533 case MINUS:
534 if (GET_CODE (XEXP (x, 1)) == CONST_INT
535 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
536 validate_change (object, loc,
537 simplify_gen_binary
538 (PLUS, GET_MODE (x), XEXP (x, 0),
539 simplify_gen_unary (NEG,
540 GET_MODE (x), XEXP (x, 1),
541 GET_MODE (x))), 1);
542 break;
543 case ZERO_EXTEND:
544 case SIGN_EXTEND:
545 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
546 {
547 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
548 op0_mode);
549 /* If any of the above failed, substitute in something that
550 we know won't be recognized. */
551 if (!new)
552 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
553 validate_change (object, loc, new, 1);
554 }
555 break;
556 case SUBREG:
557 /* All subregs possible to simplify should be simplified. */
558 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
559 SUBREG_BYTE (x));
560
561 /* Subregs of VOIDmode operands are incorrect. */
562 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
563 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
564 if (new)
565 validate_change (object, loc, new, 1);
566 break;
567 case ZERO_EXTRACT:
568 case SIGN_EXTRACT:
569 /* If we are replacing a register with memory, try to change the memory
570 to be the mode required for memory in extract operations (this isn't
571 likely to be an insertion operation; if it was, nothing bad will
572 happen, we might just fail in some cases). */
573
574 if (GET_CODE (XEXP (x, 0)) == MEM
575 && GET_CODE (XEXP (x, 1)) == CONST_INT
576 && GET_CODE (XEXP (x, 2)) == CONST_INT
577 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
578 && !MEM_VOLATILE_P (XEXP (x, 0)))
579 {
580 enum machine_mode wanted_mode = VOIDmode;
581 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
582 int pos = INTVAL (XEXP (x, 2));
583
584 if (GET_CODE (x) == ZERO_EXTRACT)
585 {
586 enum machine_mode new_mode
587 = mode_for_extraction (EP_extzv, 1);
588 if (new_mode != MAX_MACHINE_MODE)
589 wanted_mode = new_mode;
590 }
591 else if (GET_CODE (x) == SIGN_EXTRACT)
592 {
593 enum machine_mode new_mode
594 = mode_for_extraction (EP_extv, 1);
595 if (new_mode != MAX_MACHINE_MODE)
596 wanted_mode = new_mode;
597 }
598
599 /* If we have a narrower mode, we can do something. */
600 if (wanted_mode != VOIDmode
601 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
602 {
603 int offset = pos / BITS_PER_UNIT;
604 rtx newmem;
605
606 /* If the bytes and bits are counted differently, we
607 must adjust the offset. */
608 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
609 offset =
610 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
611 offset);
612
613 pos %= GET_MODE_BITSIZE (wanted_mode);
614
615 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
616
617 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
618 validate_change (object, &XEXP (x, 0), newmem, 1);
619 }
620 }
621
622 break;
623
624 default:
625 break;
626 }
627 }
628
629 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
630 with TO. After all changes have been made, validate by seeing
631 if INSN is still valid. */
632
633 int
634 validate_replace_rtx_subexp (from, to, insn, loc)
635 rtx from, to, insn, *loc;
636 {
637 validate_replace_rtx_1 (loc, from, to, insn);
638 return apply_change_group ();
639 }
640
641 /* Try replacing every occurrence of FROM in INSN with TO. After all
642 changes have been made, validate by seeing if INSN is still valid. */
643
644 int
645 validate_replace_rtx (from, to, insn)
646 rtx from, to, insn;
647 {
648 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
649 return apply_change_group ();
650 }
651
652 /* Try replacing every occurrence of FROM in INSN with TO. */
653
654 void
655 validate_replace_rtx_group (from, to, insn)
656 rtx from, to, insn;
657 {
658 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
659 }
660
661 /* Function called by note_uses to replace used subexpressions. */
662 struct validate_replace_src_data
663 {
664 rtx from; /* Old RTX */
665 rtx to; /* New RTX */
666 rtx insn; /* Insn in which substitution is occurring. */
667 };
668
669 static void
670 validate_replace_src_1 (x, data)
671 rtx *x;
672 void *data;
673 {
674 struct validate_replace_src_data *d
675 = (struct validate_replace_src_data *) data;
676
677 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
678 }
679
680 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
681 SET_DESTs. */
682
683 void
684 validate_replace_src_group (from, to, insn)
685 rtx from, to, insn;
686 {
687 struct validate_replace_src_data d;
688
689 d.from = from;
690 d.to = to;
691 d.insn = insn;
692 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
693 }
694
695 /* Same as validate_repalace_src_group, but validate by seeing if
696 INSN is still valid. */
697 int
698 validate_replace_src (from, to, insn)
699 rtx from, to, insn;
700 {
701 validate_replace_src_group (from, to, insn);
702 return apply_change_group ();
703 }
704 \f
705 #ifdef HAVE_cc0
706 /* Return 1 if the insn using CC0 set by INSN does not contain
707 any ordered tests applied to the condition codes.
708 EQ and NE tests do not count. */
709
710 int
711 next_insn_tests_no_inequality (insn)
712 rtx insn;
713 {
714 rtx next = next_cc0_user (insn);
715
716 /* If there is no next insn, we have to take the conservative choice. */
717 if (next == 0)
718 return 0;
719
720 return ((GET_CODE (next) == JUMP_INSN
721 || GET_CODE (next) == INSN
722 || GET_CODE (next) == CALL_INSN)
723 && ! inequality_comparisons_p (PATTERN (next)));
724 }
725
726 #if 0 /* This is useless since the insn that sets the cc's
727 must be followed immediately by the use of them. */
728 /* Return 1 if the CC value set up by INSN is not used. */
729
730 int
731 next_insns_test_no_inequality (insn)
732 rtx insn;
733 {
734 rtx next = NEXT_INSN (insn);
735
736 for (; next != 0; next = NEXT_INSN (next))
737 {
738 if (GET_CODE (next) == CODE_LABEL
739 || GET_CODE (next) == BARRIER)
740 return 1;
741 if (GET_CODE (next) == NOTE)
742 continue;
743 if (inequality_comparisons_p (PATTERN (next)))
744 return 0;
745 if (sets_cc0_p (PATTERN (next)) == 1)
746 return 1;
747 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
748 return 1;
749 }
750 return 1;
751 }
752 #endif
753 #endif
754 \f
755 /* This is used by find_single_use to locate an rtx that contains exactly one
756 use of DEST, which is typically either a REG or CC0. It returns a
757 pointer to the innermost rtx expression containing DEST. Appearances of
758 DEST that are being used to totally replace it are not counted. */
759
760 static rtx *
761 find_single_use_1 (dest, loc)
762 rtx dest;
763 rtx *loc;
764 {
765 rtx x = *loc;
766 enum rtx_code code = GET_CODE (x);
767 rtx *result = 0;
768 rtx *this_result;
769 int i;
770 const char *fmt;
771
772 switch (code)
773 {
774 case CONST_INT:
775 case CONST:
776 case LABEL_REF:
777 case SYMBOL_REF:
778 case CONST_DOUBLE:
779 case CONST_VECTOR:
780 case CLOBBER:
781 return 0;
782
783 case SET:
784 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
785 of a REG that occupies all of the REG, the insn uses DEST if
786 it is mentioned in the destination or the source. Otherwise, we
787 need just check the source. */
788 if (GET_CODE (SET_DEST (x)) != CC0
789 && GET_CODE (SET_DEST (x)) != PC
790 && GET_CODE (SET_DEST (x)) != REG
791 && ! (GET_CODE (SET_DEST (x)) == SUBREG
792 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
793 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
794 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
795 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
796 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
797 break;
798
799 return find_single_use_1 (dest, &SET_SRC (x));
800
801 case MEM:
802 case SUBREG:
803 return find_single_use_1 (dest, &XEXP (x, 0));
804
805 default:
806 break;
807 }
808
809 /* If it wasn't one of the common cases above, check each expression and
810 vector of this code. Look for a unique usage of DEST. */
811
812 fmt = GET_RTX_FORMAT (code);
813 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
814 {
815 if (fmt[i] == 'e')
816 {
817 if (dest == XEXP (x, i)
818 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
819 && REGNO (dest) == REGNO (XEXP (x, i))))
820 this_result = loc;
821 else
822 this_result = find_single_use_1 (dest, &XEXP (x, i));
823
824 if (result == 0)
825 result = this_result;
826 else if (this_result)
827 /* Duplicate usage. */
828 return 0;
829 }
830 else if (fmt[i] == 'E')
831 {
832 int j;
833
834 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
835 {
836 if (XVECEXP (x, i, j) == dest
837 || (GET_CODE (dest) == REG
838 && GET_CODE (XVECEXP (x, i, j)) == REG
839 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
840 this_result = loc;
841 else
842 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
843
844 if (result == 0)
845 result = this_result;
846 else if (this_result)
847 return 0;
848 }
849 }
850 }
851
852 return result;
853 }
854 \f
855 /* See if DEST, produced in INSN, is used only a single time in the
856 sequel. If so, return a pointer to the innermost rtx expression in which
857 it is used.
858
859 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
860
861 This routine will return usually zero either before flow is called (because
862 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
863 note can't be trusted).
864
865 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
866 care about REG_DEAD notes or LOG_LINKS.
867
868 Otherwise, we find the single use by finding an insn that has a
869 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
870 only referenced once in that insn, we know that it must be the first
871 and last insn referencing DEST. */
872
873 rtx *
874 find_single_use (dest, insn, ploc)
875 rtx dest;
876 rtx insn;
877 rtx *ploc;
878 {
879 rtx next;
880 rtx *result;
881 rtx link;
882
883 #ifdef HAVE_cc0
884 if (dest == cc0_rtx)
885 {
886 next = NEXT_INSN (insn);
887 if (next == 0
888 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
889 return 0;
890
891 result = find_single_use_1 (dest, &PATTERN (next));
892 if (result && ploc)
893 *ploc = next;
894 return result;
895 }
896 #endif
897
898 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
899 return 0;
900
901 for (next = next_nonnote_insn (insn);
902 next != 0 && GET_CODE (next) != CODE_LABEL;
903 next = next_nonnote_insn (next))
904 if (INSN_P (next) && dead_or_set_p (next, dest))
905 {
906 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
907 if (XEXP (link, 0) == insn)
908 break;
909
910 if (link)
911 {
912 result = find_single_use_1 (dest, &PATTERN (next));
913 if (ploc)
914 *ploc = next;
915 return result;
916 }
917 }
918
919 return 0;
920 }
921 \f
922 /* Return 1 if OP is a valid general operand for machine mode MODE.
923 This is either a register reference, a memory reference,
924 or a constant. In the case of a memory reference, the address
925 is checked for general validity for the target machine.
926
927 Register and memory references must have mode MODE in order to be valid,
928 but some constants have no machine mode and are valid for any mode.
929
930 If MODE is VOIDmode, OP is checked for validity for whatever mode
931 it has.
932
933 The main use of this function is as a predicate in match_operand
934 expressions in the machine description.
935
936 For an explanation of this function's behavior for registers of
937 class NO_REGS, see the comment for `register_operand'. */
938
939 int
940 general_operand (op, mode)
941 rtx op;
942 enum machine_mode mode;
943 {
944 enum rtx_code code = GET_CODE (op);
945
946 if (mode == VOIDmode)
947 mode = GET_MODE (op);
948
949 /* Don't accept CONST_INT or anything similar
950 if the caller wants something floating. */
951 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
952 && GET_MODE_CLASS (mode) != MODE_INT
953 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
954 return 0;
955
956 if (GET_CODE (op) == CONST_INT
957 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
958 return 0;
959
960 if (CONSTANT_P (op))
961 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
962 || mode == VOIDmode)
963 #ifdef LEGITIMATE_PIC_OPERAND_P
964 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
965 #endif
966 && LEGITIMATE_CONSTANT_P (op));
967
968 /* Except for certain constants with VOIDmode, already checked for,
969 OP's mode must match MODE if MODE specifies a mode. */
970
971 if (GET_MODE (op) != mode)
972 return 0;
973
974 if (code == SUBREG)
975 {
976 rtx sub = SUBREG_REG (op);
977
978 #ifdef INSN_SCHEDULING
979 /* On machines that have insn scheduling, we want all memory
980 reference to be explicit, so outlaw paradoxical SUBREGs. */
981 if (GET_CODE (sub) == MEM
982 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
983 return 0;
984 #endif
985 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
986 may result in incorrect reference. We should simplify all valid
987 subregs of MEM anyway. But allow this after reload because we
988 might be called from cleanup_subreg_operands.
989
990 ??? This is a kludge. */
991 if (!reload_completed && SUBREG_BYTE (op) != 0
992 && GET_CODE (sub) == MEM)
993 return 0;
994
995 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
996 create such rtl, and we must reject it. */
997 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
998 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
999 return 0;
1000
1001 op = sub;
1002 code = GET_CODE (op);
1003 }
1004
1005 if (code == REG)
1006 /* A register whose class is NO_REGS is not a general operand. */
1007 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1008 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1009
1010 if (code == MEM)
1011 {
1012 rtx y = XEXP (op, 0);
1013
1014 if (! volatile_ok && MEM_VOLATILE_P (op))
1015 return 0;
1016
1017 if (GET_CODE (y) == ADDRESSOF)
1018 return 1;
1019
1020 /* Use the mem's mode, since it will be reloaded thus. */
1021 mode = GET_MODE (op);
1022 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1023 }
1024
1025 /* Pretend this is an operand for now; we'll run force_operand
1026 on its replacement in fixup_var_refs_1. */
1027 if (code == ADDRESSOF)
1028 return 1;
1029
1030 return 0;
1031
1032 win:
1033 return 1;
1034 }
1035 \f
1036 /* Return 1 if OP is a valid memory address for a memory reference
1037 of mode MODE.
1038
1039 The main use of this function is as a predicate in match_operand
1040 expressions in the machine description. */
1041
1042 int
1043 address_operand (op, mode)
1044 rtx op;
1045 enum machine_mode mode;
1046 {
1047 return memory_address_p (mode, op);
1048 }
1049
1050 /* Return 1 if OP is a register reference of mode MODE.
1051 If MODE is VOIDmode, accept a register in any mode.
1052
1053 The main use of this function is as a predicate in match_operand
1054 expressions in the machine description.
1055
1056 As a special exception, registers whose class is NO_REGS are
1057 not accepted by `register_operand'. The reason for this change
1058 is to allow the representation of special architecture artifacts
1059 (such as a condition code register) without extending the rtl
1060 definitions. Since registers of class NO_REGS cannot be used
1061 as registers in any case where register classes are examined,
1062 it is most consistent to keep this function from accepting them. */
1063
1064 int
1065 register_operand (op, mode)
1066 rtx op;
1067 enum machine_mode mode;
1068 {
1069 if (GET_MODE (op) != mode && mode != VOIDmode)
1070 return 0;
1071
1072 if (GET_CODE (op) == SUBREG)
1073 {
1074 rtx sub = SUBREG_REG (op);
1075
1076 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1077 because it is guaranteed to be reloaded into one.
1078 Just make sure the MEM is valid in itself.
1079 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1080 but currently it does result from (SUBREG (REG)...) where the
1081 reg went on the stack.) */
1082 if (! reload_completed && GET_CODE (sub) == MEM)
1083 return general_operand (op, mode);
1084
1085 #ifdef CLASS_CANNOT_CHANGE_MODE
1086 if (GET_CODE (sub) == REG
1087 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1088 && (TEST_HARD_REG_BIT
1089 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1090 REGNO (sub)))
1091 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (sub))
1092 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1093 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1094 return 0;
1095 #endif
1096
1097 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1098 create such rtl, and we must reject it. */
1099 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1100 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1101 return 0;
1102
1103 op = sub;
1104 }
1105
1106 /* If we have an ADDRESSOF, consider it valid since it will be
1107 converted into something that will not be a MEM. */
1108 if (GET_CODE (op) == ADDRESSOF)
1109 return 1;
1110
1111 /* We don't consider registers whose class is NO_REGS
1112 to be a register operand. */
1113 return (GET_CODE (op) == REG
1114 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1115 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1116 }
1117
1118 /* Return 1 for a register in Pmode; ignore the tested mode. */
1119
1120 int
1121 pmode_register_operand (op, mode)
1122 rtx op;
1123 enum machine_mode mode ATTRIBUTE_UNUSED;
1124 {
1125 return register_operand (op, Pmode);
1126 }
1127
1128 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1129 or a hard register. */
1130
1131 int
1132 scratch_operand (op, mode)
1133 rtx op;
1134 enum machine_mode mode;
1135 {
1136 if (GET_MODE (op) != mode && mode != VOIDmode)
1137 return 0;
1138
1139 return (GET_CODE (op) == SCRATCH
1140 || (GET_CODE (op) == REG
1141 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1142 }
1143
1144 /* Return 1 if OP is a valid immediate operand for mode MODE.
1145
1146 The main use of this function is as a predicate in match_operand
1147 expressions in the machine description. */
1148
1149 int
1150 immediate_operand (op, mode)
1151 rtx op;
1152 enum machine_mode mode;
1153 {
1154 /* Don't accept CONST_INT or anything similar
1155 if the caller wants something floating. */
1156 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1157 && GET_MODE_CLASS (mode) != MODE_INT
1158 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1159 return 0;
1160
1161 if (GET_CODE (op) == CONST_INT
1162 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1163 return 0;
1164
1165 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1166 result in 0/1. It seems a safe assumption that this is
1167 in range for everyone. */
1168 if (GET_CODE (op) == CONSTANT_P_RTX)
1169 return 1;
1170
1171 return (CONSTANT_P (op)
1172 && (GET_MODE (op) == mode || mode == VOIDmode
1173 || GET_MODE (op) == VOIDmode)
1174 #ifdef LEGITIMATE_PIC_OPERAND_P
1175 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1176 #endif
1177 && LEGITIMATE_CONSTANT_P (op));
1178 }
1179
1180 /* Returns 1 if OP is an operand that is a CONST_INT. */
1181
1182 int
1183 const_int_operand (op, mode)
1184 rtx op;
1185 enum machine_mode mode;
1186 {
1187 if (GET_CODE (op) != CONST_INT)
1188 return 0;
1189
1190 if (mode != VOIDmode
1191 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1192 return 0;
1193
1194 return 1;
1195 }
1196
1197 /* Returns 1 if OP is an operand that is a constant integer or constant
1198 floating-point number. */
1199
1200 int
1201 const_double_operand (op, mode)
1202 rtx op;
1203 enum machine_mode mode;
1204 {
1205 /* Don't accept CONST_INT or anything similar
1206 if the caller wants something floating. */
1207 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1208 && GET_MODE_CLASS (mode) != MODE_INT
1209 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1210 return 0;
1211
1212 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1213 && (mode == VOIDmode || GET_MODE (op) == mode
1214 || GET_MODE (op) == VOIDmode));
1215 }
1216
1217 /* Return 1 if OP is a general operand that is not an immediate operand. */
1218
1219 int
1220 nonimmediate_operand (op, mode)
1221 rtx op;
1222 enum machine_mode mode;
1223 {
1224 return (general_operand (op, mode) && ! CONSTANT_P (op));
1225 }
1226
1227 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1228
1229 int
1230 nonmemory_operand (op, mode)
1231 rtx op;
1232 enum machine_mode mode;
1233 {
1234 if (CONSTANT_P (op))
1235 {
1236 /* Don't accept CONST_INT or anything similar
1237 if the caller wants something floating. */
1238 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1239 && GET_MODE_CLASS (mode) != MODE_INT
1240 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1241 return 0;
1242
1243 if (GET_CODE (op) == CONST_INT
1244 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1245 return 0;
1246
1247 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1248 || mode == VOIDmode)
1249 #ifdef LEGITIMATE_PIC_OPERAND_P
1250 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1251 #endif
1252 && LEGITIMATE_CONSTANT_P (op));
1253 }
1254
1255 if (GET_MODE (op) != mode && mode != VOIDmode)
1256 return 0;
1257
1258 if (GET_CODE (op) == SUBREG)
1259 {
1260 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1261 because it is guaranteed to be reloaded into one.
1262 Just make sure the MEM is valid in itself.
1263 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1264 but currently it does result from (SUBREG (REG)...) where the
1265 reg went on the stack.) */
1266 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1267 return general_operand (op, mode);
1268 op = SUBREG_REG (op);
1269 }
1270
1271 /* We don't consider registers whose class is NO_REGS
1272 to be a register operand. */
1273 return (GET_CODE (op) == REG
1274 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1275 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1276 }
1277
1278 /* Return 1 if OP is a valid operand that stands for pushing a
1279 value of mode MODE onto the stack.
1280
1281 The main use of this function is as a predicate in match_operand
1282 expressions in the machine description. */
1283
1284 int
1285 push_operand (op, mode)
1286 rtx op;
1287 enum machine_mode mode;
1288 {
1289 unsigned int rounded_size = GET_MODE_SIZE (mode);
1290
1291 #ifdef PUSH_ROUNDING
1292 rounded_size = PUSH_ROUNDING (rounded_size);
1293 #endif
1294
1295 if (GET_CODE (op) != MEM)
1296 return 0;
1297
1298 if (mode != VOIDmode && GET_MODE (op) != mode)
1299 return 0;
1300
1301 op = XEXP (op, 0);
1302
1303 if (rounded_size == GET_MODE_SIZE (mode))
1304 {
1305 if (GET_CODE (op) != STACK_PUSH_CODE)
1306 return 0;
1307 }
1308 else
1309 {
1310 if (GET_CODE (op) != PRE_MODIFY
1311 || GET_CODE (XEXP (op, 1)) != PLUS
1312 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1313 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1314 #ifdef STACK_GROWS_DOWNWARD
1315 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1316 #else
1317 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1318 #endif
1319 )
1320 return 0;
1321 }
1322
1323 return XEXP (op, 0) == stack_pointer_rtx;
1324 }
1325
1326 /* Return 1 if OP is a valid operand that stands for popping a
1327 value of mode MODE off the stack.
1328
1329 The main use of this function is as a predicate in match_operand
1330 expressions in the machine description. */
1331
1332 int
1333 pop_operand (op, mode)
1334 rtx op;
1335 enum machine_mode mode;
1336 {
1337 if (GET_CODE (op) != MEM)
1338 return 0;
1339
1340 if (mode != VOIDmode && GET_MODE (op) != mode)
1341 return 0;
1342
1343 op = XEXP (op, 0);
1344
1345 if (GET_CODE (op) != STACK_POP_CODE)
1346 return 0;
1347
1348 return XEXP (op, 0) == stack_pointer_rtx;
1349 }
1350
1351 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1352
1353 int
1354 memory_address_p (mode, addr)
1355 enum machine_mode mode ATTRIBUTE_UNUSED;
1356 rtx addr;
1357 {
1358 if (GET_CODE (addr) == ADDRESSOF)
1359 return 1;
1360
1361 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1362 return 0;
1363
1364 win:
1365 return 1;
1366 }
1367
1368 /* Return 1 if OP is a valid memory reference with mode MODE,
1369 including a valid address.
1370
1371 The main use of this function is as a predicate in match_operand
1372 expressions in the machine description. */
1373
1374 int
1375 memory_operand (op, mode)
1376 rtx op;
1377 enum machine_mode mode;
1378 {
1379 rtx inner;
1380
1381 if (! reload_completed)
1382 /* Note that no SUBREG is a memory operand before end of reload pass,
1383 because (SUBREG (MEM...)) forces reloading into a register. */
1384 return GET_CODE (op) == MEM && general_operand (op, mode);
1385
1386 if (mode != VOIDmode && GET_MODE (op) != mode)
1387 return 0;
1388
1389 inner = op;
1390 if (GET_CODE (inner) == SUBREG)
1391 inner = SUBREG_REG (inner);
1392
1393 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1394 }
1395
1396 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1397 that is, a memory reference whose address is a general_operand. */
1398
1399 int
1400 indirect_operand (op, mode)
1401 rtx op;
1402 enum machine_mode mode;
1403 {
1404 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1405 if (! reload_completed
1406 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1407 {
1408 int offset = SUBREG_BYTE (op);
1409 rtx inner = SUBREG_REG (op);
1410
1411 if (mode != VOIDmode && GET_MODE (op) != mode)
1412 return 0;
1413
1414 /* The only way that we can have a general_operand as the resulting
1415 address is if OFFSET is zero and the address already is an operand
1416 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1417 operand. */
1418
1419 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1420 || (GET_CODE (XEXP (inner, 0)) == PLUS
1421 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1422 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1423 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1424 }
1425
1426 return (GET_CODE (op) == MEM
1427 && memory_operand (op, mode)
1428 && general_operand (XEXP (op, 0), Pmode));
1429 }
1430
1431 /* Return 1 if this is a comparison operator. This allows the use of
1432 MATCH_OPERATOR to recognize all the branch insns. */
1433
1434 int
1435 comparison_operator (op, mode)
1436 rtx op;
1437 enum machine_mode mode;
1438 {
1439 return ((mode == VOIDmode || GET_MODE (op) == mode)
1440 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1441 }
1442 \f
1443 /* If BODY is an insn body that uses ASM_OPERANDS,
1444 return the number of operands (both input and output) in the insn.
1445 Otherwise return -1. */
1446
1447 int
1448 asm_noperands (body)
1449 rtx body;
1450 {
1451 switch (GET_CODE (body))
1452 {
1453 case ASM_OPERANDS:
1454 /* No output operands: return number of input operands. */
1455 return ASM_OPERANDS_INPUT_LENGTH (body);
1456 case SET:
1457 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1458 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1459 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1460 else
1461 return -1;
1462 case PARALLEL:
1463 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1464 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1465 {
1466 /* Multiple output operands, or 1 output plus some clobbers:
1467 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1468 int i;
1469 int n_sets;
1470
1471 /* Count backwards through CLOBBERs to determine number of SETs. */
1472 for (i = XVECLEN (body, 0); i > 0; i--)
1473 {
1474 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1475 break;
1476 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1477 return -1;
1478 }
1479
1480 /* N_SETS is now number of output operands. */
1481 n_sets = i;
1482
1483 /* Verify that all the SETs we have
1484 came from a single original asm_operands insn
1485 (so that invalid combinations are blocked). */
1486 for (i = 0; i < n_sets; i++)
1487 {
1488 rtx elt = XVECEXP (body, 0, i);
1489 if (GET_CODE (elt) != SET)
1490 return -1;
1491 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1492 return -1;
1493 /* If these ASM_OPERANDS rtx's came from different original insns
1494 then they aren't allowed together. */
1495 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1496 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1497 return -1;
1498 }
1499 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1500 + n_sets);
1501 }
1502 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1503 {
1504 /* 0 outputs, but some clobbers:
1505 body is [(asm_operands ...) (clobber (reg ...))...]. */
1506 int i;
1507
1508 /* Make sure all the other parallel things really are clobbers. */
1509 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1510 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1511 return -1;
1512
1513 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1514 }
1515 else
1516 return -1;
1517 default:
1518 return -1;
1519 }
1520 }
1521
1522 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1523 copy its operands (both input and output) into the vector OPERANDS,
1524 the locations of the operands within the insn into the vector OPERAND_LOCS,
1525 and the constraints for the operands into CONSTRAINTS.
1526 Write the modes of the operands into MODES.
1527 Return the assembler-template.
1528
1529 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1530 we don't store that info. */
1531
1532 const char *
1533 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1534 rtx body;
1535 rtx *operands;
1536 rtx **operand_locs;
1537 const char **constraints;
1538 enum machine_mode *modes;
1539 {
1540 int i;
1541 int noperands;
1542 const char *template = 0;
1543
1544 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1545 {
1546 rtx asmop = SET_SRC (body);
1547 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1548
1549 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1550
1551 for (i = 1; i < noperands; i++)
1552 {
1553 if (operand_locs)
1554 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1555 if (operands)
1556 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1557 if (constraints)
1558 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1559 if (modes)
1560 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1561 }
1562
1563 /* The output is in the SET.
1564 Its constraint is in the ASM_OPERANDS itself. */
1565 if (operands)
1566 operands[0] = SET_DEST (body);
1567 if (operand_locs)
1568 operand_locs[0] = &SET_DEST (body);
1569 if (constraints)
1570 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1571 if (modes)
1572 modes[0] = GET_MODE (SET_DEST (body));
1573 template = ASM_OPERANDS_TEMPLATE (asmop);
1574 }
1575 else if (GET_CODE (body) == ASM_OPERANDS)
1576 {
1577 rtx asmop = body;
1578 /* No output operands: BODY is (asm_operands ....). */
1579
1580 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1581
1582 /* The input operands are found in the 1st element vector. */
1583 /* Constraints for inputs are in the 2nd element vector. */
1584 for (i = 0; i < noperands; i++)
1585 {
1586 if (operand_locs)
1587 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1588 if (operands)
1589 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1590 if (constraints)
1591 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1592 if (modes)
1593 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1594 }
1595 template = ASM_OPERANDS_TEMPLATE (asmop);
1596 }
1597 else if (GET_CODE (body) == PARALLEL
1598 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1599 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1600 {
1601 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1602 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1603 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1604 int nout = 0; /* Does not include CLOBBERs. */
1605
1606 /* At least one output, plus some CLOBBERs. */
1607
1608 /* The outputs are in the SETs.
1609 Their constraints are in the ASM_OPERANDS itself. */
1610 for (i = 0; i < nparallel; i++)
1611 {
1612 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1613 break; /* Past last SET */
1614
1615 if (operands)
1616 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1617 if (operand_locs)
1618 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1619 if (constraints)
1620 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1621 if (modes)
1622 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1623 nout++;
1624 }
1625
1626 for (i = 0; i < nin; i++)
1627 {
1628 if (operand_locs)
1629 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1630 if (operands)
1631 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1632 if (constraints)
1633 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1634 if (modes)
1635 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1636 }
1637
1638 template = ASM_OPERANDS_TEMPLATE (asmop);
1639 }
1640 else if (GET_CODE (body) == PARALLEL
1641 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1642 {
1643 /* No outputs, but some CLOBBERs. */
1644
1645 rtx asmop = XVECEXP (body, 0, 0);
1646 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1647
1648 for (i = 0; i < nin; i++)
1649 {
1650 if (operand_locs)
1651 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1652 if (operands)
1653 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1654 if (constraints)
1655 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1656 if (modes)
1657 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1658 }
1659
1660 template = ASM_OPERANDS_TEMPLATE (asmop);
1661 }
1662
1663 return template;
1664 }
1665
1666 /* Check if an asm_operand matches it's constraints.
1667 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1668
1669 int
1670 asm_operand_ok (op, constraint)
1671 rtx op;
1672 const char *constraint;
1673 {
1674 int result = 0;
1675
1676 /* Use constrain_operands after reload. */
1677 if (reload_completed)
1678 abort ();
1679
1680 while (*constraint)
1681 {
1682 char c = *constraint++;
1683 switch (c)
1684 {
1685 case '=':
1686 case '+':
1687 case '*':
1688 case '%':
1689 case '?':
1690 case '!':
1691 case '#':
1692 case '&':
1693 case ',':
1694 break;
1695
1696 case '0': case '1': case '2': case '3': case '4':
1697 case '5': case '6': case '7': case '8': case '9':
1698 /* For best results, our caller should have given us the
1699 proper matching constraint, but we can't actually fail
1700 the check if they didn't. Indicate that results are
1701 inconclusive. */
1702 while (ISDIGIT (*constraint))
1703 constraint++;
1704 result = -1;
1705 break;
1706
1707 case 'p':
1708 if (address_operand (op, VOIDmode))
1709 return 1;
1710 break;
1711
1712 case 'm':
1713 case 'V': /* non-offsettable */
1714 if (memory_operand (op, VOIDmode))
1715 return 1;
1716 break;
1717
1718 case 'o': /* offsettable */
1719 if (offsettable_nonstrict_memref_p (op))
1720 return 1;
1721 break;
1722
1723 case '<':
1724 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1725 excepting those that expand_call created. Further, on some
1726 machines which do not have generalized auto inc/dec, an inc/dec
1727 is not a memory_operand.
1728
1729 Match any memory and hope things are resolved after reload. */
1730
1731 if (GET_CODE (op) == MEM
1732 && (1
1733 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1734 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1735 return 1;
1736 break;
1737
1738 case '>':
1739 if (GET_CODE (op) == MEM
1740 && (1
1741 || GET_CODE (XEXP (op, 0)) == PRE_INC
1742 || GET_CODE (XEXP (op, 0)) == POST_INC))
1743 return 1;
1744 break;
1745
1746 case 'E':
1747 case 'F':
1748 if (GET_CODE (op) == CONST_DOUBLE)
1749 return 1;
1750 break;
1751
1752 case 'G':
1753 if (GET_CODE (op) == CONST_DOUBLE
1754 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1755 return 1;
1756 break;
1757 case 'H':
1758 if (GET_CODE (op) == CONST_DOUBLE
1759 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1760 return 1;
1761 break;
1762
1763 case 's':
1764 if (GET_CODE (op) == CONST_INT
1765 || (GET_CODE (op) == CONST_DOUBLE
1766 && GET_MODE (op) == VOIDmode))
1767 break;
1768 /* FALLTHRU */
1769
1770 case 'i':
1771 if (CONSTANT_P (op)
1772 #ifdef LEGITIMATE_PIC_OPERAND_P
1773 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1774 #endif
1775 )
1776 return 1;
1777 break;
1778
1779 case 'n':
1780 if (GET_CODE (op) == CONST_INT
1781 || (GET_CODE (op) == CONST_DOUBLE
1782 && GET_MODE (op) == VOIDmode))
1783 return 1;
1784 break;
1785
1786 case 'I':
1787 if (GET_CODE (op) == CONST_INT
1788 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1789 return 1;
1790 break;
1791 case 'J':
1792 if (GET_CODE (op) == CONST_INT
1793 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1794 return 1;
1795 break;
1796 case 'K':
1797 if (GET_CODE (op) == CONST_INT
1798 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1799 return 1;
1800 break;
1801 case 'L':
1802 if (GET_CODE (op) == CONST_INT
1803 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1804 return 1;
1805 break;
1806 case 'M':
1807 if (GET_CODE (op) == CONST_INT
1808 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1809 return 1;
1810 break;
1811 case 'N':
1812 if (GET_CODE (op) == CONST_INT
1813 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1814 return 1;
1815 break;
1816 case 'O':
1817 if (GET_CODE (op) == CONST_INT
1818 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1819 return 1;
1820 break;
1821 case 'P':
1822 if (GET_CODE (op) == CONST_INT
1823 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1824 return 1;
1825 break;
1826
1827 case 'X':
1828 return 1;
1829
1830 case 'g':
1831 if (general_operand (op, VOIDmode))
1832 return 1;
1833 break;
1834
1835 default:
1836 /* For all other letters, we first check for a register class,
1837 otherwise it is an EXTRA_CONSTRAINT. */
1838 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1839 {
1840 case 'r':
1841 if (GET_MODE (op) == BLKmode)
1842 break;
1843 if (register_operand (op, VOIDmode))
1844 return 1;
1845 }
1846 #ifdef EXTRA_CONSTRAINT
1847 if (EXTRA_CONSTRAINT (op, c))
1848 return 1;
1849 #endif
1850 break;
1851 }
1852 }
1853
1854 return result;
1855 }
1856 \f
1857 /* Given an rtx *P, if it is a sum containing an integer constant term,
1858 return the location (type rtx *) of the pointer to that constant term.
1859 Otherwise, return a null pointer. */
1860
1861 rtx *
1862 find_constant_term_loc (p)
1863 rtx *p;
1864 {
1865 rtx *tem;
1866 enum rtx_code code = GET_CODE (*p);
1867
1868 /* If *P IS such a constant term, P is its location. */
1869
1870 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1871 || code == CONST)
1872 return p;
1873
1874 /* Otherwise, if not a sum, it has no constant term. */
1875
1876 if (GET_CODE (*p) != PLUS)
1877 return 0;
1878
1879 /* If one of the summands is constant, return its location. */
1880
1881 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1882 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1883 return p;
1884
1885 /* Otherwise, check each summand for containing a constant term. */
1886
1887 if (XEXP (*p, 0) != 0)
1888 {
1889 tem = find_constant_term_loc (&XEXP (*p, 0));
1890 if (tem != 0)
1891 return tem;
1892 }
1893
1894 if (XEXP (*p, 1) != 0)
1895 {
1896 tem = find_constant_term_loc (&XEXP (*p, 1));
1897 if (tem != 0)
1898 return tem;
1899 }
1900
1901 return 0;
1902 }
1903 \f
1904 /* Return 1 if OP is a memory reference
1905 whose address contains no side effects
1906 and remains valid after the addition
1907 of a positive integer less than the
1908 size of the object being referenced.
1909
1910 We assume that the original address is valid and do not check it.
1911
1912 This uses strict_memory_address_p as a subroutine, so
1913 don't use it before reload. */
1914
1915 int
1916 offsettable_memref_p (op)
1917 rtx op;
1918 {
1919 return ((GET_CODE (op) == MEM)
1920 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1921 }
1922
1923 /* Similar, but don't require a strictly valid mem ref:
1924 consider pseudo-regs valid as index or base regs. */
1925
1926 int
1927 offsettable_nonstrict_memref_p (op)
1928 rtx op;
1929 {
1930 return ((GET_CODE (op) == MEM)
1931 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1932 }
1933
1934 /* Return 1 if Y is a memory address which contains no side effects
1935 and would remain valid after the addition of a positive integer
1936 less than the size of that mode.
1937
1938 We assume that the original address is valid and do not check it.
1939 We do check that it is valid for narrower modes.
1940
1941 If STRICTP is nonzero, we require a strictly valid address,
1942 for the sake of use in reload.c. */
1943
1944 int
1945 offsettable_address_p (strictp, mode, y)
1946 int strictp;
1947 enum machine_mode mode;
1948 rtx y;
1949 {
1950 enum rtx_code ycode = GET_CODE (y);
1951 rtx z;
1952 rtx y1 = y;
1953 rtx *y2;
1954 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1955 (strictp ? strict_memory_address_p : memory_address_p);
1956 unsigned int mode_sz = GET_MODE_SIZE (mode);
1957
1958 if (CONSTANT_ADDRESS_P (y))
1959 return 1;
1960
1961 /* Adjusting an offsettable address involves changing to a narrower mode.
1962 Make sure that's OK. */
1963
1964 if (mode_dependent_address_p (y))
1965 return 0;
1966
1967 /* ??? How much offset does an offsettable BLKmode reference need?
1968 Clearly that depends on the situation in which it's being used.
1969 However, the current situation in which we test 0xffffffff is
1970 less than ideal. Caveat user. */
1971 if (mode_sz == 0)
1972 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1973
1974 /* If the expression contains a constant term,
1975 see if it remains valid when max possible offset is added. */
1976
1977 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1978 {
1979 int good;
1980
1981 y1 = *y2;
1982 *y2 = plus_constant (*y2, mode_sz - 1);
1983 /* Use QImode because an odd displacement may be automatically invalid
1984 for any wider mode. But it should be valid for a single byte. */
1985 good = (*addressp) (QImode, y);
1986
1987 /* In any case, restore old contents of memory. */
1988 *y2 = y1;
1989 return good;
1990 }
1991
1992 if (GET_RTX_CLASS (ycode) == 'a')
1993 return 0;
1994
1995 /* The offset added here is chosen as the maximum offset that
1996 any instruction could need to add when operating on something
1997 of the specified mode. We assume that if Y and Y+c are
1998 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1999 go inside a LO_SUM here, so we do so as well. */
2000 if (GET_CODE (y) == LO_SUM
2001 && mode != BLKmode
2002 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2003 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
2004 plus_constant (XEXP (y, 1), mode_sz - 1));
2005 else
2006 z = plus_constant (y, mode_sz - 1);
2007
2008 /* Use QImode because an odd displacement may be automatically invalid
2009 for any wider mode. But it should be valid for a single byte. */
2010 return (*addressp) (QImode, z);
2011 }
2012
2013 /* Return 1 if ADDR is an address-expression whose effect depends
2014 on the mode of the memory reference it is used in.
2015
2016 Autoincrement addressing is a typical example of mode-dependence
2017 because the amount of the increment depends on the mode. */
2018
2019 int
2020 mode_dependent_address_p (addr)
2021 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2022 {
2023 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2024 return 0;
2025 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2026 win: ATTRIBUTE_UNUSED_LABEL
2027 return 1;
2028 }
2029
2030 /* Return 1 if OP is a general operand
2031 other than a memory ref with a mode dependent address. */
2032
2033 int
2034 mode_independent_operand (op, mode)
2035 enum machine_mode mode;
2036 rtx op;
2037 {
2038 rtx addr;
2039
2040 if (! general_operand (op, mode))
2041 return 0;
2042
2043 if (GET_CODE (op) != MEM)
2044 return 1;
2045
2046 addr = XEXP (op, 0);
2047 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2048 return 1;
2049 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2050 lose: ATTRIBUTE_UNUSED_LABEL
2051 return 0;
2052 }
2053 \f
2054 /* Like extract_insn, but save insn extracted and don't extract again, when
2055 called again for the same insn expecting that recog_data still contain the
2056 valid information. This is used primary by gen_attr infrastructure that
2057 often does extract insn again and again. */
2058 void
2059 extract_insn_cached (insn)
2060 rtx insn;
2061 {
2062 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2063 return;
2064 extract_insn (insn);
2065 recog_data.insn = insn;
2066 }
2067 /* Do cached extract_insn, constrain_operand and complain about failures.
2068 Used by insn_attrtab. */
2069 void
2070 extract_constrain_insn_cached (insn)
2071 rtx insn;
2072 {
2073 extract_insn_cached (insn);
2074 if (which_alternative == -1
2075 && !constrain_operands (reload_completed))
2076 fatal_insn_not_found (insn);
2077 }
2078 /* Do cached constrain_operand and complain about failures. */
2079 int
2080 constrain_operands_cached (strict)
2081 int strict;
2082 {
2083 if (which_alternative == -1)
2084 return constrain_operands (strict);
2085 else
2086 return 1;
2087 }
2088 \f
2089 /* Analyze INSN and fill in recog_data. */
2090
2091 void
2092 extract_insn (insn)
2093 rtx insn;
2094 {
2095 int i;
2096 int icode;
2097 int noperands;
2098 rtx body = PATTERN (insn);
2099
2100 recog_data.insn = NULL;
2101 recog_data.n_operands = 0;
2102 recog_data.n_alternatives = 0;
2103 recog_data.n_dups = 0;
2104 which_alternative = -1;
2105
2106 switch (GET_CODE (body))
2107 {
2108 case USE:
2109 case CLOBBER:
2110 case ASM_INPUT:
2111 case ADDR_VEC:
2112 case ADDR_DIFF_VEC:
2113 return;
2114
2115 case SET:
2116 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2117 goto asm_insn;
2118 else
2119 goto normal_insn;
2120 case PARALLEL:
2121 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2122 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2123 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2124 goto asm_insn;
2125 else
2126 goto normal_insn;
2127 case ASM_OPERANDS:
2128 asm_insn:
2129 recog_data.n_operands = noperands = asm_noperands (body);
2130 if (noperands >= 0)
2131 {
2132 /* This insn is an `asm' with operands. */
2133
2134 /* expand_asm_operands makes sure there aren't too many operands. */
2135 if (noperands > MAX_RECOG_OPERANDS)
2136 abort ();
2137
2138 /* Now get the operand values and constraints out of the insn. */
2139 decode_asm_operands (body, recog_data.operand,
2140 recog_data.operand_loc,
2141 recog_data.constraints,
2142 recog_data.operand_mode);
2143 if (noperands > 0)
2144 {
2145 const char *p = recog_data.constraints[0];
2146 recog_data.n_alternatives = 1;
2147 while (*p)
2148 recog_data.n_alternatives += (*p++ == ',');
2149 }
2150 break;
2151 }
2152 fatal_insn_not_found (insn);
2153
2154 default:
2155 normal_insn:
2156 /* Ordinary insn: recognize it, get the operands via insn_extract
2157 and get the constraints. */
2158
2159 icode = recog_memoized (insn);
2160 if (icode < 0)
2161 fatal_insn_not_found (insn);
2162
2163 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2164 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2165 recog_data.n_dups = insn_data[icode].n_dups;
2166
2167 insn_extract (insn);
2168
2169 for (i = 0; i < noperands; i++)
2170 {
2171 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2172 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2173 /* VOIDmode match_operands gets mode from their real operand. */
2174 if (recog_data.operand_mode[i] == VOIDmode)
2175 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2176 }
2177 }
2178 for (i = 0; i < noperands; i++)
2179 recog_data.operand_type[i]
2180 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2181 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2182 : OP_IN);
2183
2184 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2185 abort ();
2186 }
2187
2188 /* After calling extract_insn, you can use this function to extract some
2189 information from the constraint strings into a more usable form.
2190 The collected data is stored in recog_op_alt. */
2191 void
2192 preprocess_constraints ()
2193 {
2194 int i;
2195
2196 memset (recog_op_alt, 0, sizeof recog_op_alt);
2197 for (i = 0; i < recog_data.n_operands; i++)
2198 {
2199 int j;
2200 struct operand_alternative *op_alt;
2201 const char *p = recog_data.constraints[i];
2202
2203 op_alt = recog_op_alt[i];
2204
2205 for (j = 0; j < recog_data.n_alternatives; j++)
2206 {
2207 op_alt[j].class = NO_REGS;
2208 op_alt[j].constraint = p;
2209 op_alt[j].matches = -1;
2210 op_alt[j].matched = -1;
2211
2212 if (*p == '\0' || *p == ',')
2213 {
2214 op_alt[j].anything_ok = 1;
2215 continue;
2216 }
2217
2218 for (;;)
2219 {
2220 char c = *p++;
2221 if (c == '#')
2222 do
2223 c = *p++;
2224 while (c != ',' && c != '\0');
2225 if (c == ',' || c == '\0')
2226 break;
2227
2228 switch (c)
2229 {
2230 case '=': case '+': case '*': case '%':
2231 case 'E': case 'F': case 'G': case 'H':
2232 case 's': case 'i': case 'n':
2233 case 'I': case 'J': case 'K': case 'L':
2234 case 'M': case 'N': case 'O': case 'P':
2235 /* These don't say anything we care about. */
2236 break;
2237
2238 case '?':
2239 op_alt[j].reject += 6;
2240 break;
2241 case '!':
2242 op_alt[j].reject += 600;
2243 break;
2244 case '&':
2245 op_alt[j].earlyclobber = 1;
2246 break;
2247
2248 case '0': case '1': case '2': case '3': case '4':
2249 case '5': case '6': case '7': case '8': case '9':
2250 {
2251 char *end;
2252 op_alt[j].matches = strtoul (p - 1, &end, 10);
2253 recog_op_alt[op_alt[j].matches][j].matched = i;
2254 p = end;
2255 }
2256 break;
2257
2258 case 'm':
2259 op_alt[j].memory_ok = 1;
2260 break;
2261 case '<':
2262 op_alt[j].decmem_ok = 1;
2263 break;
2264 case '>':
2265 op_alt[j].incmem_ok = 1;
2266 break;
2267 case 'V':
2268 op_alt[j].nonoffmem_ok = 1;
2269 break;
2270 case 'o':
2271 op_alt[j].offmem_ok = 1;
2272 break;
2273 case 'X':
2274 op_alt[j].anything_ok = 1;
2275 break;
2276
2277 case 'p':
2278 op_alt[j].is_address = 1;
2279 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2280 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2281 break;
2282
2283 case 'g': case 'r':
2284 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2285 break;
2286
2287 default:
2288 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char) c)];
2289 break;
2290 }
2291 }
2292 }
2293 }
2294 }
2295
2296 /* Check the operands of an insn against the insn's operand constraints
2297 and return 1 if they are valid.
2298 The information about the insn's operands, constraints, operand modes
2299 etc. is obtained from the global variables set up by extract_insn.
2300
2301 WHICH_ALTERNATIVE is set to a number which indicates which
2302 alternative of constraints was matched: 0 for the first alternative,
2303 1 for the next, etc.
2304
2305 In addition, when two operands are match
2306 and it happens that the output operand is (reg) while the
2307 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2308 make the output operand look like the input.
2309 This is because the output operand is the one the template will print.
2310
2311 This is used in final, just before printing the assembler code and by
2312 the routines that determine an insn's attribute.
2313
2314 If STRICT is a positive non-zero value, it means that we have been
2315 called after reload has been completed. In that case, we must
2316 do all checks strictly. If it is zero, it means that we have been called
2317 before reload has completed. In that case, we first try to see if we can
2318 find an alternative that matches strictly. If not, we try again, this
2319 time assuming that reload will fix up the insn. This provides a "best
2320 guess" for the alternative and is used to compute attributes of insns prior
2321 to reload. A negative value of STRICT is used for this internal call. */
2322
2323 struct funny_match
2324 {
2325 int this, other;
2326 };
2327
2328 int
2329 constrain_operands (strict)
2330 int strict;
2331 {
2332 const char *constraints[MAX_RECOG_OPERANDS];
2333 int matching_operands[MAX_RECOG_OPERANDS];
2334 int earlyclobber[MAX_RECOG_OPERANDS];
2335 int c;
2336
2337 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2338 int funny_match_index;
2339
2340 which_alternative = 0;
2341 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2342 return 1;
2343
2344 for (c = 0; c < recog_data.n_operands; c++)
2345 {
2346 constraints[c] = recog_data.constraints[c];
2347 matching_operands[c] = -1;
2348 }
2349
2350 do
2351 {
2352 int opno;
2353 int lose = 0;
2354 funny_match_index = 0;
2355
2356 for (opno = 0; opno < recog_data.n_operands; opno++)
2357 {
2358 rtx op = recog_data.operand[opno];
2359 enum machine_mode mode = GET_MODE (op);
2360 const char *p = constraints[opno];
2361 int offset = 0;
2362 int win = 0;
2363 int val;
2364
2365 earlyclobber[opno] = 0;
2366
2367 /* A unary operator may be accepted by the predicate, but it
2368 is irrelevant for matching constraints. */
2369 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2370 op = XEXP (op, 0);
2371
2372 if (GET_CODE (op) == SUBREG)
2373 {
2374 if (GET_CODE (SUBREG_REG (op)) == REG
2375 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2376 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2377 GET_MODE (SUBREG_REG (op)),
2378 SUBREG_BYTE (op),
2379 GET_MODE (op));
2380 op = SUBREG_REG (op);
2381 }
2382
2383 /* An empty constraint or empty alternative
2384 allows anything which matched the pattern. */
2385 if (*p == 0 || *p == ',')
2386 win = 1;
2387
2388 while (*p && (c = *p++) != ',')
2389 switch (c)
2390 {
2391 case '?': case '!': case '*': case '%':
2392 case '=': case '+':
2393 break;
2394
2395 case '#':
2396 /* Ignore rest of this alternative as far as
2397 constraint checking is concerned. */
2398 while (*p && *p != ',')
2399 p++;
2400 break;
2401
2402 case '&':
2403 earlyclobber[opno] = 1;
2404 break;
2405
2406 case '0': case '1': case '2': case '3': case '4':
2407 case '5': case '6': case '7': case '8': case '9':
2408 {
2409 /* This operand must be the same as a previous one.
2410 This kind of constraint is used for instructions such
2411 as add when they take only two operands.
2412
2413 Note that the lower-numbered operand is passed first.
2414
2415 If we are not testing strictly, assume that this
2416 constraint will be satisfied. */
2417
2418 char *end;
2419 int match;
2420
2421 match = strtoul (p - 1, &end, 10);
2422 p = end;
2423
2424 if (strict < 0)
2425 val = 1;
2426 else
2427 {
2428 rtx op1 = recog_data.operand[match];
2429 rtx op2 = recog_data.operand[opno];
2430
2431 /* A unary operator may be accepted by the predicate,
2432 but it is irrelevant for matching constraints. */
2433 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2434 op1 = XEXP (op1, 0);
2435 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2436 op2 = XEXP (op2, 0);
2437
2438 val = operands_match_p (op1, op2);
2439 }
2440
2441 matching_operands[opno] = match;
2442 matching_operands[match] = opno;
2443
2444 if (val != 0)
2445 win = 1;
2446
2447 /* If output is *x and input is *--x, arrange later
2448 to change the output to *--x as well, since the
2449 output op is the one that will be printed. */
2450 if (val == 2 && strict > 0)
2451 {
2452 funny_match[funny_match_index].this = opno;
2453 funny_match[funny_match_index++].other = match;
2454 }
2455 }
2456 break;
2457
2458 case 'p':
2459 /* p is used for address_operands. When we are called by
2460 gen_reload, no one will have checked that the address is
2461 strictly valid, i.e., that all pseudos requiring hard regs
2462 have gotten them. */
2463 if (strict <= 0
2464 || (strict_memory_address_p (recog_data.operand_mode[opno],
2465 op)))
2466 win = 1;
2467 break;
2468
2469 /* No need to check general_operand again;
2470 it was done in insn-recog.c. */
2471 case 'g':
2472 /* Anything goes unless it is a REG and really has a hard reg
2473 but the hard reg is not in the class GENERAL_REGS. */
2474 if (strict < 0
2475 || GENERAL_REGS == ALL_REGS
2476 || GET_CODE (op) != REG
2477 || (reload_in_progress
2478 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2479 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2480 win = 1;
2481 break;
2482
2483 case 'X':
2484 /* This is used for a MATCH_SCRATCH in the cases when
2485 we don't actually need anything. So anything goes
2486 any time. */
2487 win = 1;
2488 break;
2489
2490 case 'm':
2491 if (GET_CODE (op) == MEM
2492 /* Before reload, accept what reload can turn into mem. */
2493 || (strict < 0 && CONSTANT_P (op))
2494 /* During reload, accept a pseudo */
2495 || (reload_in_progress && GET_CODE (op) == REG
2496 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2497 win = 1;
2498 break;
2499
2500 case '<':
2501 if (GET_CODE (op) == MEM
2502 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2503 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2504 win = 1;
2505 break;
2506
2507 case '>':
2508 if (GET_CODE (op) == MEM
2509 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2510 || GET_CODE (XEXP (op, 0)) == POST_INC))
2511 win = 1;
2512 break;
2513
2514 case 'E':
2515 case 'F':
2516 if (GET_CODE (op) == CONST_DOUBLE)
2517 win = 1;
2518 break;
2519
2520 case 'G':
2521 case 'H':
2522 if (GET_CODE (op) == CONST_DOUBLE
2523 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2524 win = 1;
2525 break;
2526
2527 case 's':
2528 if (GET_CODE (op) == CONST_INT
2529 || (GET_CODE (op) == CONST_DOUBLE
2530 && GET_MODE (op) == VOIDmode))
2531 break;
2532 case 'i':
2533 if (CONSTANT_P (op))
2534 win = 1;
2535 break;
2536
2537 case 'n':
2538 if (GET_CODE (op) == CONST_INT
2539 || (GET_CODE (op) == CONST_DOUBLE
2540 && GET_MODE (op) == VOIDmode))
2541 win = 1;
2542 break;
2543
2544 case 'I':
2545 case 'J':
2546 case 'K':
2547 case 'L':
2548 case 'M':
2549 case 'N':
2550 case 'O':
2551 case 'P':
2552 if (GET_CODE (op) == CONST_INT
2553 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2554 win = 1;
2555 break;
2556
2557 case 'V':
2558 if (GET_CODE (op) == MEM
2559 && ((strict > 0 && ! offsettable_memref_p (op))
2560 || (strict < 0
2561 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2562 || (reload_in_progress
2563 && !(GET_CODE (op) == REG
2564 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2565 win = 1;
2566 break;
2567
2568 case 'o':
2569 if ((strict > 0 && offsettable_memref_p (op))
2570 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2571 /* Before reload, accept what reload can handle. */
2572 || (strict < 0
2573 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2574 /* During reload, accept a pseudo */
2575 || (reload_in_progress && GET_CODE (op) == REG
2576 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2577 win = 1;
2578 break;
2579
2580 default:
2581 {
2582 enum reg_class class;
2583
2584 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2585 if (class != NO_REGS)
2586 {
2587 if (strict < 0
2588 || (strict == 0
2589 && GET_CODE (op) == REG
2590 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2591 || (strict == 0 && GET_CODE (op) == SCRATCH)
2592 || (GET_CODE (op) == REG
2593 && reg_fits_class_p (op, class, offset, mode)))
2594 win = 1;
2595 }
2596 #ifdef EXTRA_CONSTRAINT
2597 else if (EXTRA_CONSTRAINT (op, c))
2598 win = 1;
2599 #endif
2600 break;
2601 }
2602 }
2603
2604 constraints[opno] = p;
2605 /* If this operand did not win somehow,
2606 this alternative loses. */
2607 if (! win)
2608 lose = 1;
2609 }
2610 /* This alternative won; the operands are ok.
2611 Change whichever operands this alternative says to change. */
2612 if (! lose)
2613 {
2614 int opno, eopno;
2615
2616 /* See if any earlyclobber operand conflicts with some other
2617 operand. */
2618
2619 if (strict > 0)
2620 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2621 /* Ignore earlyclobber operands now in memory,
2622 because we would often report failure when we have
2623 two memory operands, one of which was formerly a REG. */
2624 if (earlyclobber[eopno]
2625 && GET_CODE (recog_data.operand[eopno]) == REG)
2626 for (opno = 0; opno < recog_data.n_operands; opno++)
2627 if ((GET_CODE (recog_data.operand[opno]) == MEM
2628 || recog_data.operand_type[opno] != OP_OUT)
2629 && opno != eopno
2630 /* Ignore things like match_operator operands. */
2631 && *recog_data.constraints[opno] != 0
2632 && ! (matching_operands[opno] == eopno
2633 && operands_match_p (recog_data.operand[opno],
2634 recog_data.operand[eopno]))
2635 && ! safe_from_earlyclobber (recog_data.operand[opno],
2636 recog_data.operand[eopno]))
2637 lose = 1;
2638
2639 if (! lose)
2640 {
2641 while (--funny_match_index >= 0)
2642 {
2643 recog_data.operand[funny_match[funny_match_index].other]
2644 = recog_data.operand[funny_match[funny_match_index].this];
2645 }
2646
2647 return 1;
2648 }
2649 }
2650
2651 which_alternative++;
2652 }
2653 while (which_alternative < recog_data.n_alternatives);
2654
2655 which_alternative = -1;
2656 /* If we are about to reject this, but we are not to test strictly,
2657 try a very loose test. Only return failure if it fails also. */
2658 if (strict == 0)
2659 return constrain_operands (-1);
2660 else
2661 return 0;
2662 }
2663
2664 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2665 is a hard reg in class CLASS when its regno is offset by OFFSET
2666 and changed to mode MODE.
2667 If REG occupies multiple hard regs, all of them must be in CLASS. */
2668
2669 int
2670 reg_fits_class_p (operand, class, offset, mode)
2671 rtx operand;
2672 enum reg_class class;
2673 int offset;
2674 enum machine_mode mode;
2675 {
2676 int regno = REGNO (operand);
2677 if (regno < FIRST_PSEUDO_REGISTER
2678 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2679 regno + offset))
2680 {
2681 int sr;
2682 regno += offset;
2683 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2684 sr > 0; sr--)
2685 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2686 regno + sr))
2687 break;
2688 return sr == 0;
2689 }
2690
2691 return 0;
2692 }
2693 \f
2694 /* Split single instruction. Helper function for split_all_insns.
2695 Return last insn in the sequence if successful, or NULL if unsuccessful. */
2696 static rtx
2697 split_insn (insn)
2698 rtx insn;
2699 {
2700 rtx set;
2701 if (!INSN_P (insn))
2702 ;
2703 /* Don't split no-op move insns. These should silently
2704 disappear later in final. Splitting such insns would
2705 break the code that handles REG_NO_CONFLICT blocks. */
2706
2707 else if ((set = single_set (insn)) != NULL && set_noop_p (set))
2708 {
2709 /* Nops get in the way while scheduling, so delete them
2710 now if register allocation has already been done. It
2711 is too risky to try to do this before register
2712 allocation, and there are unlikely to be very many
2713 nops then anyways. */
2714 if (reload_completed)
2715 delete_insn_and_edges (insn);
2716 }
2717 else
2718 {
2719 /* Split insns here to get max fine-grain parallelism. */
2720 rtx first = PREV_INSN (insn);
2721 rtx last = try_split (PATTERN (insn), insn, 1);
2722
2723 if (last != insn)
2724 {
2725 /* try_split returns the NOTE that INSN became. */
2726 PUT_CODE (insn, NOTE);
2727 NOTE_SOURCE_FILE (insn) = 0;
2728 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2729
2730 /* ??? Coddle to md files that generate subregs in post-
2731 reload splitters instead of computing the proper
2732 hard register. */
2733 if (reload_completed && first != last)
2734 {
2735 first = NEXT_INSN (first);
2736 while (1)
2737 {
2738 if (INSN_P (first))
2739 cleanup_subreg_operands (first);
2740 if (first == last)
2741 break;
2742 first = NEXT_INSN (first);
2743 }
2744 }
2745 return last;
2746 }
2747 }
2748 return NULL_RTX;
2749 }
2750 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2751
2752 void
2753 split_all_insns (upd_life)
2754 int upd_life;
2755 {
2756 sbitmap blocks;
2757 int changed;
2758 basic_block bb;
2759
2760 blocks = sbitmap_alloc (last_basic_block);
2761 sbitmap_zero (blocks);
2762 changed = 0;
2763
2764 FOR_EACH_BB_REVERSE (bb)
2765 {
2766 rtx insn, next;
2767 bool finish = false;
2768
2769 for (insn = bb->head; !finish ; insn = next)
2770 {
2771 rtx last;
2772
2773 /* Can't use `next_real_insn' because that might go across
2774 CODE_LABELS and short-out basic blocks. */
2775 next = NEXT_INSN (insn);
2776 finish = (insn == bb->end);
2777 last = split_insn (insn);
2778 if (last)
2779 {
2780 /* The split sequence may include barrier, but the
2781 BB boundary we are interested in will be set to previous
2782 one. */
2783
2784 while (GET_CODE (last) == BARRIER)
2785 last = PREV_INSN (last);
2786 SET_BIT (blocks, bb->index);
2787 changed = 1;
2788 insn = last;
2789 }
2790 }
2791 }
2792
2793 if (changed)
2794 {
2795 find_many_sub_basic_blocks (blocks);
2796 }
2797
2798 if (changed && upd_life)
2799 {
2800 count_or_remove_death_notes (blocks, 1);
2801 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2802 }
2803 #ifdef ENABLE_CHECKING
2804 verify_flow_info ();
2805 #endif
2806
2807 sbitmap_free (blocks);
2808 }
2809
2810 /* Same as split_all_insns, but do not expect CFG to be available.
2811 Used by machine depedent reorg passes. */
2812
2813 void
2814 split_all_insns_noflow ()
2815 {
2816 rtx next, insn;
2817
2818 for (insn = get_insns (); insn; insn = next)
2819 {
2820 next = NEXT_INSN (insn);
2821 split_insn (insn);
2822 }
2823 return;
2824 }
2825 \f
2826 #ifdef HAVE_peephole2
2827 struct peep2_insn_data
2828 {
2829 rtx insn;
2830 regset live_before;
2831 };
2832
2833 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2834 static int peep2_current;
2835
2836 /* A non-insn marker indicating the last insn of the block.
2837 The live_before regset for this element is correct, indicating
2838 global_live_at_end for the block. */
2839 #define PEEP2_EOB pc_rtx
2840
2841 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2842 does not exist. Used by the recognizer to find the next insn to match
2843 in a multi-insn pattern. */
2844
2845 rtx
2846 peep2_next_insn (n)
2847 int n;
2848 {
2849 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2850 abort ();
2851
2852 n += peep2_current;
2853 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2854 n -= MAX_INSNS_PER_PEEP2 + 1;
2855
2856 if (peep2_insn_data[n].insn == PEEP2_EOB)
2857 return NULL_RTX;
2858 return peep2_insn_data[n].insn;
2859 }
2860
2861 /* Return true if REGNO is dead before the Nth non-note insn
2862 after `current'. */
2863
2864 int
2865 peep2_regno_dead_p (ofs, regno)
2866 int ofs;
2867 int regno;
2868 {
2869 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2870 abort ();
2871
2872 ofs += peep2_current;
2873 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2874 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2875
2876 if (peep2_insn_data[ofs].insn == NULL_RTX)
2877 abort ();
2878
2879 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2880 }
2881
2882 /* Similarly for a REG. */
2883
2884 int
2885 peep2_reg_dead_p (ofs, reg)
2886 int ofs;
2887 rtx reg;
2888 {
2889 int regno, n;
2890
2891 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2892 abort ();
2893
2894 ofs += peep2_current;
2895 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2896 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2897
2898 if (peep2_insn_data[ofs].insn == NULL_RTX)
2899 abort ();
2900
2901 regno = REGNO (reg);
2902 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2903 while (--n >= 0)
2904 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2905 return 0;
2906 return 1;
2907 }
2908
2909 /* Try to find a hard register of mode MODE, matching the register class in
2910 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2911 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2912 in which case the only condition is that the register must be available
2913 before CURRENT_INSN.
2914 Registers that already have bits set in REG_SET will not be considered.
2915
2916 If an appropriate register is available, it will be returned and the
2917 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2918 returned. */
2919
2920 rtx
2921 peep2_find_free_register (from, to, class_str, mode, reg_set)
2922 int from, to;
2923 const char *class_str;
2924 enum machine_mode mode;
2925 HARD_REG_SET *reg_set;
2926 {
2927 static int search_ofs;
2928 enum reg_class class;
2929 HARD_REG_SET live;
2930 int i;
2931
2932 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2933 abort ();
2934
2935 from += peep2_current;
2936 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2937 from -= MAX_INSNS_PER_PEEP2 + 1;
2938 to += peep2_current;
2939 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2940 to -= MAX_INSNS_PER_PEEP2 + 1;
2941
2942 if (peep2_insn_data[from].insn == NULL_RTX)
2943 abort ();
2944 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2945
2946 while (from != to)
2947 {
2948 HARD_REG_SET this_live;
2949
2950 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2951 from = 0;
2952 if (peep2_insn_data[from].insn == NULL_RTX)
2953 abort ();
2954 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2955 IOR_HARD_REG_SET (live, this_live);
2956 }
2957
2958 class = (class_str[0] == 'r' ? GENERAL_REGS
2959 : REG_CLASS_FROM_LETTER (class_str[0]));
2960
2961 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2962 {
2963 int raw_regno, regno, success, j;
2964
2965 /* Distribute the free registers as much as possible. */
2966 raw_regno = search_ofs + i;
2967 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2968 raw_regno -= FIRST_PSEUDO_REGISTER;
2969 #ifdef REG_ALLOC_ORDER
2970 regno = reg_alloc_order[raw_regno];
2971 #else
2972 regno = raw_regno;
2973 #endif
2974
2975 /* Don't allocate fixed registers. */
2976 if (fixed_regs[regno])
2977 continue;
2978 /* Make sure the register is of the right class. */
2979 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2980 continue;
2981 /* And can support the mode we need. */
2982 if (! HARD_REGNO_MODE_OK (regno, mode))
2983 continue;
2984 /* And that we don't create an extra save/restore. */
2985 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2986 continue;
2987 /* And we don't clobber traceback for noreturn functions. */
2988 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2989 && (! reload_completed || frame_pointer_needed))
2990 continue;
2991
2992 success = 1;
2993 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2994 {
2995 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2996 || TEST_HARD_REG_BIT (live, regno + j))
2997 {
2998 success = 0;
2999 break;
3000 }
3001 }
3002 if (success)
3003 {
3004 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3005 SET_HARD_REG_BIT (*reg_set, regno + j);
3006
3007 /* Start the next search with the next register. */
3008 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3009 raw_regno = 0;
3010 search_ofs = raw_regno;
3011
3012 return gen_rtx_REG (mode, regno);
3013 }
3014 }
3015
3016 search_ofs = 0;
3017 return NULL_RTX;
3018 }
3019
3020 /* Perform the peephole2 optimization pass. */
3021
3022 void
3023 peephole2_optimize (dump_file)
3024 FILE *dump_file ATTRIBUTE_UNUSED;
3025 {
3026 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3027 rtx insn, prev;
3028 regset live;
3029 int i;
3030 basic_block bb;
3031 #ifdef HAVE_conditional_execution
3032 sbitmap blocks;
3033 bool changed;
3034 #endif
3035 bool do_cleanup_cfg = false;
3036 bool do_rebuild_jump_labels = false;
3037
3038 /* Initialize the regsets we're going to use. */
3039 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3040 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3041 live = INITIALIZE_REG_SET (rs_heads[i]);
3042
3043 #ifdef HAVE_conditional_execution
3044 blocks = sbitmap_alloc (last_basic_block);
3045 sbitmap_zero (blocks);
3046 changed = false;
3047 #else
3048 count_or_remove_death_notes (NULL, 1);
3049 #endif
3050
3051 FOR_EACH_BB_REVERSE (bb)
3052 {
3053 struct propagate_block_info *pbi;
3054
3055 /* Indicate that all slots except the last holds invalid data. */
3056 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3057 peep2_insn_data[i].insn = NULL_RTX;
3058
3059 /* Indicate that the last slot contains live_after data. */
3060 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3061 peep2_current = MAX_INSNS_PER_PEEP2;
3062
3063 /* Start up propagation. */
3064 COPY_REG_SET (live, bb->global_live_at_end);
3065 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3066
3067 #ifdef HAVE_conditional_execution
3068 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3069 #else
3070 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3071 #endif
3072
3073 for (insn = bb->end; ; insn = prev)
3074 {
3075 prev = PREV_INSN (insn);
3076 if (INSN_P (insn))
3077 {
3078 rtx try, before_try, x;
3079 int match_len;
3080 rtx note;
3081 bool was_call = false;
3082
3083 /* Record this insn. */
3084 if (--peep2_current < 0)
3085 peep2_current = MAX_INSNS_PER_PEEP2;
3086 peep2_insn_data[peep2_current].insn = insn;
3087 propagate_one_insn (pbi, insn);
3088 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3089
3090 /* Match the peephole. */
3091 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3092 if (try != NULL)
3093 {
3094 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3095 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3096 cfg-related call notes. */
3097 for (i = 0; i <= match_len; ++i)
3098 {
3099 int j;
3100 rtx old_insn, new_insn, note;
3101
3102 j = i + peep2_current;
3103 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3104 j -= MAX_INSNS_PER_PEEP2 + 1;
3105 old_insn = peep2_insn_data[j].insn;
3106 if (GET_CODE (old_insn) != CALL_INSN)
3107 continue;
3108 was_call = true;
3109
3110 new_insn = try;
3111 while (new_insn != NULL_RTX)
3112 {
3113 if (GET_CODE (new_insn) == CALL_INSN)
3114 break;
3115 new_insn = NEXT_INSN (new_insn);
3116 }
3117
3118 if (new_insn == NULL_RTX)
3119 abort ();
3120
3121 CALL_INSN_FUNCTION_USAGE (new_insn)
3122 = CALL_INSN_FUNCTION_USAGE (old_insn);
3123
3124 for (note = REG_NOTES (old_insn);
3125 note;
3126 note = XEXP (note, 1))
3127 switch (REG_NOTE_KIND (note))
3128 {
3129 case REG_NORETURN:
3130 case REG_SETJMP:
3131 case REG_ALWAYS_RETURN:
3132 REG_NOTES (new_insn)
3133 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3134 XEXP (note, 0),
3135 REG_NOTES (new_insn));
3136 default:
3137 /* Discard all other reg notes. */
3138 break;
3139 }
3140
3141 /* Croak if there is another call in the sequence. */
3142 while (++i <= match_len)
3143 {
3144 j = i + peep2_current;
3145 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3146 j -= MAX_INSNS_PER_PEEP2 + 1;
3147 old_insn = peep2_insn_data[j].insn;
3148 if (GET_CODE (old_insn) == CALL_INSN)
3149 abort ();
3150 }
3151 break;
3152 }
3153
3154 i = match_len + peep2_current;
3155 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3156 i -= MAX_INSNS_PER_PEEP2 + 1;
3157
3158 note = find_reg_note (peep2_insn_data[i].insn,
3159 REG_EH_REGION, NULL_RTX);
3160
3161 /* Replace the old sequence with the new. */
3162 try = emit_insn_after_scope (try, peep2_insn_data[i].insn,
3163 INSN_SCOPE (peep2_insn_data[i].insn));
3164 before_try = PREV_INSN (insn);
3165 delete_insn_chain (insn, peep2_insn_data[i].insn);
3166
3167 /* Re-insert the EH_REGION notes. */
3168 if (note || (was_call && nonlocal_goto_handler_labels))
3169 {
3170 edge eh_edge;
3171
3172 for (eh_edge = bb->succ; eh_edge
3173 ; eh_edge = eh_edge->succ_next)
3174 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3175 break;
3176
3177 for (x = try ; x != before_try ; x = PREV_INSN (x))
3178 if (GET_CODE (x) == CALL_INSN
3179 || (flag_non_call_exceptions
3180 && may_trap_p (PATTERN (x))
3181 && !find_reg_note (x, REG_EH_REGION, NULL)))
3182 {
3183 if (note)
3184 REG_NOTES (x)
3185 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3186 XEXP (note, 0),
3187 REG_NOTES (x));
3188
3189 if (x != bb->end && eh_edge)
3190 {
3191 edge nfte, nehe;
3192 int flags;
3193
3194 nfte = split_block (bb, x);
3195 flags = (eh_edge->flags
3196 & (EDGE_EH | EDGE_ABNORMAL));
3197 if (GET_CODE (x) == CALL_INSN)
3198 flags |= EDGE_ABNORMAL_CALL;
3199 nehe = make_edge (nfte->src, eh_edge->dest,
3200 flags);
3201
3202 nehe->probability = eh_edge->probability;
3203 nfte->probability
3204 = REG_BR_PROB_BASE - nehe->probability;
3205
3206 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3207 #ifdef HAVE_conditional_execution
3208 SET_BIT (blocks, nfte->dest->index);
3209 changed = true;
3210 #endif
3211 bb = nfte->src;
3212 eh_edge = nehe;
3213 }
3214 }
3215
3216 /* Converting possibly trapping insn to non-trapping is
3217 possible. Zap dummy outgoing edges. */
3218 do_cleanup_cfg |= purge_dead_edges (bb);
3219 }
3220
3221 #ifdef HAVE_conditional_execution
3222 /* With conditional execution, we cannot back up the
3223 live information so easily, since the conditional
3224 death data structures are not so self-contained.
3225 So record that we've made a modification to this
3226 block and update life information at the end. */
3227 SET_BIT (blocks, bb->index);
3228 changed = true;
3229
3230 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3231 peep2_insn_data[i].insn = NULL_RTX;
3232 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3233 #else
3234 /* Back up lifetime information past the end of the
3235 newly created sequence. */
3236 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3237 i = 0;
3238 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3239
3240 /* Update life information for the new sequence. */
3241 x = try;
3242 do
3243 {
3244 if (INSN_P (x))
3245 {
3246 if (--i < 0)
3247 i = MAX_INSNS_PER_PEEP2;
3248 peep2_insn_data[i].insn = x;
3249 propagate_one_insn (pbi, x);
3250 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3251 }
3252 x = PREV_INSN (x);
3253 }
3254 while (x != prev);
3255
3256 /* ??? Should verify that LIVE now matches what we
3257 had before the new sequence. */
3258
3259 peep2_current = i;
3260 #endif
3261
3262 /* If we generated a jump instruction, it won't have
3263 JUMP_LABEL set. Recompute after we're done. */
3264 for (x = try; x != before_try; x = PREV_INSN (x))
3265 if (GET_CODE (x) == JUMP_INSN)
3266 {
3267 do_rebuild_jump_labels = true;
3268 break;
3269 }
3270 }
3271 }
3272
3273 if (insn == bb->head)
3274 break;
3275 }
3276
3277 free_propagate_block_info (pbi);
3278 }
3279
3280 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3281 FREE_REG_SET (peep2_insn_data[i].live_before);
3282 FREE_REG_SET (live);
3283
3284 if (do_rebuild_jump_labels)
3285 rebuild_jump_labels (get_insns ());
3286
3287 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3288 we've changed global life since exception handlers are no longer
3289 reachable. */
3290 if (do_cleanup_cfg)
3291 {
3292 cleanup_cfg (0);
3293 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3294 }
3295 #ifdef HAVE_conditional_execution
3296 else
3297 {
3298 count_or_remove_death_notes (blocks, 1);
3299 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3300 }
3301 sbitmap_free (blocks);
3302 #endif
3303 }
3304 #endif /* HAVE_peephole2 */
3305
3306 /* Common predicates for use with define_bypass. */
3307
3308 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3309 data not the address operand(s) of the store. IN_INSN must be
3310 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3311 SETs inside. */
3312
3313 int
3314 store_data_bypass_p (out_insn, in_insn)
3315 rtx out_insn, in_insn;
3316 {
3317 rtx out_set, in_set;
3318
3319 in_set = single_set (in_insn);
3320 if (! in_set)
3321 abort ();
3322
3323 if (GET_CODE (SET_DEST (in_set)) != MEM)
3324 return false;
3325
3326 out_set = single_set (out_insn);
3327 if (out_set)
3328 {
3329 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3330 return false;
3331 }
3332 else
3333 {
3334 rtx out_pat;
3335 int i;
3336
3337 out_pat = PATTERN (out_insn);
3338 if (GET_CODE (out_pat) != PARALLEL)
3339 abort ();
3340
3341 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3342 {
3343 rtx exp = XVECEXP (out_pat, 0, i);
3344
3345 if (GET_CODE (exp) == CLOBBER)
3346 continue;
3347
3348 if (GET_CODE (exp) != SET)
3349 abort ();
3350
3351 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3352 return false;
3353 }
3354 }
3355
3356 return true;
3357 }
3358
3359 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3360 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3361 or multiple set; IN_INSN should be single_set for truth, but for convenience
3362 of insn categorization may be any JUMP or CALL insn. */
3363
3364 int
3365 if_test_bypass_p (out_insn, in_insn)
3366 rtx out_insn, in_insn;
3367 {
3368 rtx out_set, in_set;
3369
3370 in_set = single_set (in_insn);
3371 if (! in_set)
3372 {
3373 if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
3374 return false;
3375 abort ();
3376 }
3377
3378 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3379 return false;
3380 in_set = SET_SRC (in_set);
3381
3382 out_set = single_set (out_insn);
3383 if (out_set)
3384 {
3385 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3386 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3387 return false;
3388 }
3389 else
3390 {
3391 rtx out_pat;
3392 int i;
3393
3394 out_pat = PATTERN (out_insn);
3395 if (GET_CODE (out_pat) != PARALLEL)
3396 abort ();
3397
3398 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3399 {
3400 rtx exp = XVECEXP (out_pat, 0, i);
3401
3402 if (GET_CODE (exp) == CLOBBER)
3403 continue;
3404
3405 if (GET_CODE (exp) != SET)
3406 abort ();
3407
3408 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3409 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3410 return false;
3411 }
3412 }
3413
3414 return true;
3415 }