alias.c (rtx_equal_for_memref_p): Use predicates to test rtx classes and new rtx...
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "toplev.h"
39 #include "basic-block.h"
40 #include "output.h"
41 #include "reload.h"
42
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
46 #else
47 #define STACK_PUSH_CODE PRE_INC
48 #endif
49 #endif
50
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
54 #else
55 #define STACK_POP_CODE POST_DEC
56 #endif
57 #endif
58
59 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
60 static rtx *find_single_use_1 (rtx, rtx *);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
63
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
69
70 init_recog and init_recog_no_volatile are responsible for setting this. */
71
72 int volatile_ok;
73
74 struct recog_data recog_data;
75
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
82
83 int which_alternative;
84
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
88
89 int reload_completed;
90
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
93
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
97
98 void
99 init_recog_no_volatile (void)
100 {
101 volatile_ok = 0;
102 }
103
104 void
105 init_recog (void)
106 {
107 volatile_ok = 1;
108 }
109
110 /* Try recognizing the instruction INSN,
111 and return the code number that results.
112 Remember the code so that repeated calls do not
113 need to spend the time for actual rerecognition.
114
115 This function is the normal interface to instruction recognition.
116 The automatically-generated function `recog' is normally called
117 through this one. (The only exception is in combine.c.) */
118
119 int
120 recog_memoized_1 (rtx insn)
121 {
122 if (INSN_CODE (insn) < 0)
123 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
124 return INSN_CODE (insn);
125 }
126 \f
127 /* Check that X is an insn-body for an `asm' with operands
128 and that the operands mentioned in it are legitimate. */
129
130 int
131 check_asm_operands (rtx x)
132 {
133 int noperands;
134 rtx *operands;
135 const char **constraints;
136 int i;
137
138 /* Post-reload, be more strict with things. */
139 if (reload_completed)
140 {
141 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
142 extract_insn (make_insn_raw (x));
143 constrain_operands (1);
144 return which_alternative >= 0;
145 }
146
147 noperands = asm_noperands (x);
148 if (noperands < 0)
149 return 0;
150 if (noperands == 0)
151 return 1;
152
153 operands = alloca (noperands * sizeof (rtx));
154 constraints = alloca (noperands * sizeof (char *));
155
156 decode_asm_operands (x, operands, NULL, constraints, NULL);
157
158 for (i = 0; i < noperands; i++)
159 {
160 const char *c = constraints[i];
161 if (c[0] == '%')
162 c++;
163 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
164 c = constraints[c[0] - '0'];
165
166 if (! asm_operand_ok (operands[i], c))
167 return 0;
168 }
169
170 return 1;
171 }
172 \f
173 /* Static data for the next two routines. */
174
175 typedef struct change_t
176 {
177 rtx object;
178 int old_code;
179 rtx *loc;
180 rtx old;
181 } change_t;
182
183 static change_t *changes;
184 static int changes_allocated;
185
186 static int num_changes = 0;
187
188 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
189 at which NEW will be placed. If OBJECT is zero, no validation is done,
190 the change is simply made.
191
192 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
193 will be called with the address and mode as parameters. If OBJECT is
194 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
195 the change in place.
196
197 IN_GROUP is nonzero if this is part of a group of changes that must be
198 performed as a group. In that case, the changes will be stored. The
199 function `apply_change_group' will validate and apply the changes.
200
201 If IN_GROUP is zero, this is a single change. Try to recognize the insn
202 or validate the memory reference with the change applied. If the result
203 is not valid for the machine, suppress the change and return zero.
204 Otherwise, perform the change and return 1. */
205
206 int
207 validate_change (rtx object, rtx *loc, rtx new, int in_group)
208 {
209 rtx old = *loc;
210
211 if (old == new || rtx_equal_p (old, new))
212 return 1;
213
214 if (in_group == 0 && num_changes != 0)
215 abort ();
216
217 *loc = new;
218
219 /* Save the information describing this change. */
220 if (num_changes >= changes_allocated)
221 {
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
226 else
227 changes_allocated *= 2;
228
229 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
230 }
231
232 changes[num_changes].object = object;
233 changes[num_changes].loc = loc;
234 changes[num_changes].old = old;
235
236 if (object && GET_CODE (object) != MEM)
237 {
238 /* Set INSN_CODE to force rerecognition of insn. Save old code in
239 case invalid. */
240 changes[num_changes].old_code = INSN_CODE (object);
241 INSN_CODE (object) = -1;
242 }
243
244 num_changes++;
245
246 /* If we are making a group of changes, return 1. Otherwise, validate the
247 change group we made. */
248
249 if (in_group)
250 return 1;
251 else
252 return apply_change_group ();
253 }
254
255 /* This subroutine of apply_change_group verifies whether the changes to INSN
256 were valid; i.e. whether INSN can still be recognized. */
257
258 int
259 insn_invalid_p (rtx insn)
260 {
261 rtx pat = PATTERN (insn);
262 int num_clobbers = 0;
263 /* If we are before reload and the pattern is a SET, see if we can add
264 clobbers. */
265 int icode = recog (pat, insn,
266 (GET_CODE (pat) == SET
267 && ! reload_completed && ! reload_in_progress)
268 ? &num_clobbers : 0);
269 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
270
271
272 /* If this is an asm and the operand aren't legal, then fail. Likewise if
273 this is not an asm and the insn wasn't recognized. */
274 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
275 || (!is_asm && icode < 0))
276 return 1;
277
278 /* If we have to add CLOBBERs, fail if we have to add ones that reference
279 hard registers since our callers can't know if they are live or not.
280 Otherwise, add them. */
281 if (num_clobbers > 0)
282 {
283 rtx newpat;
284
285 if (added_clobbers_hard_reg_p (icode))
286 return 1;
287
288 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
289 XVECEXP (newpat, 0, 0) = pat;
290 add_clobbers (newpat, icode);
291 PATTERN (insn) = pat = newpat;
292 }
293
294 /* After reload, verify that all constraints are satisfied. */
295 if (reload_completed)
296 {
297 extract_insn (insn);
298
299 if (! constrain_operands (1))
300 return 1;
301 }
302
303 INSN_CODE (insn) = icode;
304 return 0;
305 }
306
307 /* Return number of changes made and not validated yet. */
308 int
309 num_changes_pending (void)
310 {
311 return num_changes;
312 }
313
314 /* Apply a group of changes previously issued with `validate_change'.
315 Return 1 if all changes are valid, zero otherwise. */
316
317 int
318 apply_change_group (void)
319 {
320 int i;
321 rtx last_validated = NULL_RTX;
322
323 /* The changes have been applied and all INSN_CODEs have been reset to force
324 rerecognition.
325
326 The changes are valid if we aren't given an object, or if we are
327 given a MEM and it still is a valid address, or if this is in insn
328 and it is recognized. In the latter case, if reload has completed,
329 we also require that the operands meet the constraints for
330 the insn. */
331
332 for (i = 0; i < num_changes; i++)
333 {
334 rtx object = changes[i].object;
335
336 /* If there is no object to test or if it is the same as the one we
337 already tested, ignore it. */
338 if (object == 0 || object == last_validated)
339 continue;
340
341 if (GET_CODE (object) == MEM)
342 {
343 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
344 break;
345 }
346 else if (insn_invalid_p (object))
347 {
348 rtx pat = PATTERN (object);
349
350 /* Perhaps we couldn't recognize the insn because there were
351 extra CLOBBERs at the end. If so, try to re-recognize
352 without the last CLOBBER (later iterations will cause each of
353 them to be eliminated, in turn). But don't do this if we
354 have an ASM_OPERAND. */
355 if (GET_CODE (pat) == PARALLEL
356 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
357 && asm_noperands (PATTERN (object)) < 0)
358 {
359 rtx newpat;
360
361 if (XVECLEN (pat, 0) == 2)
362 newpat = XVECEXP (pat, 0, 0);
363 else
364 {
365 int j;
366
367 newpat
368 = gen_rtx_PARALLEL (VOIDmode,
369 rtvec_alloc (XVECLEN (pat, 0) - 1));
370 for (j = 0; j < XVECLEN (newpat, 0); j++)
371 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
372 }
373
374 /* Add a new change to this group to replace the pattern
375 with this new pattern. Then consider this change
376 as having succeeded. The change we added will
377 cause the entire call to fail if things remain invalid.
378
379 Note that this can lose if a later change than the one
380 we are processing specified &XVECEXP (PATTERN (object), 0, X)
381 but this shouldn't occur. */
382
383 validate_change (object, &PATTERN (object), newpat, 1);
384 continue;
385 }
386 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
387 /* If this insn is a CLOBBER or USE, it is always valid, but is
388 never recognized. */
389 continue;
390 else
391 break;
392 }
393 last_validated = object;
394 }
395
396 if (i == num_changes)
397 {
398 basic_block bb;
399
400 for (i = 0; i < num_changes; i++)
401 if (changes[i].object
402 && INSN_P (changes[i].object)
403 && (bb = BLOCK_FOR_INSN (changes[i].object)))
404 bb->flags |= BB_DIRTY;
405
406 num_changes = 0;
407 return 1;
408 }
409 else
410 {
411 cancel_changes (0);
412 return 0;
413 }
414 }
415
416 /* Return the number of changes so far in the current group. */
417
418 int
419 num_validated_changes (void)
420 {
421 return num_changes;
422 }
423
424 /* Retract the changes numbered NUM and up. */
425
426 void
427 cancel_changes (int num)
428 {
429 int i;
430
431 /* Back out all the changes. Do this in the opposite order in which
432 they were made. */
433 for (i = num_changes - 1; i >= num; i--)
434 {
435 *changes[i].loc = changes[i].old;
436 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
437 INSN_CODE (changes[i].object) = changes[i].old_code;
438 }
439 num_changes = num;
440 }
441
442 /* Replace every occurrence of FROM in X with TO. Mark each change with
443 validate_change passing OBJECT. */
444
445 static void
446 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
447 {
448 int i, j;
449 const char *fmt;
450 rtx x = *loc;
451 enum rtx_code code;
452 enum machine_mode op0_mode = VOIDmode;
453 int prev_changes = num_changes;
454 rtx new;
455
456 if (!x)
457 return;
458
459 code = GET_CODE (x);
460 fmt = GET_RTX_FORMAT (code);
461 if (fmt[0] == 'e')
462 op0_mode = GET_MODE (XEXP (x, 0));
463
464 /* X matches FROM if it is the same rtx or they are both referring to the
465 same register in the same mode. Avoid calling rtx_equal_p unless the
466 operands look similar. */
467
468 if (x == from
469 || (GET_CODE (x) == REG && GET_CODE (from) == REG
470 && GET_MODE (x) == GET_MODE (from)
471 && REGNO (x) == REGNO (from))
472 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
473 && rtx_equal_p (x, from)))
474 {
475 validate_change (object, loc, to, 1);
476 return;
477 }
478
479 /* Call ourself recursively to perform the replacements.
480 We must not replace inside already replaced expression, otherwise we
481 get infinite recursion for replacements like (reg X)->(subreg (reg X))
482 done by regmove, so we must special case shared ASM_OPERANDS. */
483
484 if (GET_CODE (x) == PARALLEL)
485 {
486 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
487 {
488 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
489 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
490 {
491 /* Verify that operands are really shared. */
492 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0))) !=
493 ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, j))))
494 abort ();
495 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
496 from, to, object);
497 }
498 else
499 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
500 }
501 }
502 else
503 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
504 {
505 if (fmt[i] == 'e')
506 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
507 else if (fmt[i] == 'E')
508 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
509 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
510 }
511
512 /* If we didn't substitute, there is nothing more to do. */
513 if (num_changes == prev_changes)
514 return;
515
516 /* Allow substituted expression to have different mode. This is used by
517 regmove to change mode of pseudo register. */
518 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
519 op0_mode = GET_MODE (XEXP (x, 0));
520
521 /* Do changes needed to keep rtx consistent. Don't do any other
522 simplifications, as it is not our job. */
523
524 if (SWAPPABLE_OPERANDS_P (x)
525 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
526 {
527 validate_change (object, loc,
528 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
529 : swap_condition (code),
530 GET_MODE (x), XEXP (x, 1),
531 XEXP (x, 0)), 1);
532 x = *loc;
533 code = GET_CODE (x);
534 }
535
536 switch (code)
537 {
538 case PLUS:
539 /* If we have a PLUS whose second operand is now a CONST_INT, use
540 simplify_gen_binary to try to simplify it.
541 ??? We may want later to remove this, once simplification is
542 separated from this function. */
543 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
544 validate_change (object, loc,
545 simplify_gen_binary
546 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
547 break;
548 case MINUS:
549 if (GET_CODE (XEXP (x, 1)) == CONST_INT
550 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
551 validate_change (object, loc,
552 simplify_gen_binary
553 (PLUS, GET_MODE (x), XEXP (x, 0),
554 simplify_gen_unary (NEG,
555 GET_MODE (x), XEXP (x, 1),
556 GET_MODE (x))), 1);
557 break;
558 case ZERO_EXTEND:
559 case SIGN_EXTEND:
560 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
561 {
562 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
563 op0_mode);
564 /* If any of the above failed, substitute in something that
565 we know won't be recognized. */
566 if (!new)
567 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
568 validate_change (object, loc, new, 1);
569 }
570 break;
571 case SUBREG:
572 /* All subregs possible to simplify should be simplified. */
573 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
574 SUBREG_BYTE (x));
575
576 /* Subregs of VOIDmode operands are incorrect. */
577 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
578 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
579 if (new)
580 validate_change (object, loc, new, 1);
581 break;
582 case ZERO_EXTRACT:
583 case SIGN_EXTRACT:
584 /* If we are replacing a register with memory, try to change the memory
585 to be the mode required for memory in extract operations (this isn't
586 likely to be an insertion operation; if it was, nothing bad will
587 happen, we might just fail in some cases). */
588
589 if (GET_CODE (XEXP (x, 0)) == MEM
590 && GET_CODE (XEXP (x, 1)) == CONST_INT
591 && GET_CODE (XEXP (x, 2)) == CONST_INT
592 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
593 && !MEM_VOLATILE_P (XEXP (x, 0)))
594 {
595 enum machine_mode wanted_mode = VOIDmode;
596 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
597 int pos = INTVAL (XEXP (x, 2));
598
599 if (GET_CODE (x) == ZERO_EXTRACT)
600 {
601 enum machine_mode new_mode
602 = mode_for_extraction (EP_extzv, 1);
603 if (new_mode != MAX_MACHINE_MODE)
604 wanted_mode = new_mode;
605 }
606 else if (GET_CODE (x) == SIGN_EXTRACT)
607 {
608 enum machine_mode new_mode
609 = mode_for_extraction (EP_extv, 1);
610 if (new_mode != MAX_MACHINE_MODE)
611 wanted_mode = new_mode;
612 }
613
614 /* If we have a narrower mode, we can do something. */
615 if (wanted_mode != VOIDmode
616 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
617 {
618 int offset = pos / BITS_PER_UNIT;
619 rtx newmem;
620
621 /* If the bytes and bits are counted differently, we
622 must adjust the offset. */
623 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
624 offset =
625 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
626 offset);
627
628 pos %= GET_MODE_BITSIZE (wanted_mode);
629
630 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
631
632 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
633 validate_change (object, &XEXP (x, 0), newmem, 1);
634 }
635 }
636
637 break;
638
639 default:
640 break;
641 }
642 }
643
644 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
645 with TO. After all changes have been made, validate by seeing
646 if INSN is still valid. */
647
648 int
649 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
650 {
651 validate_replace_rtx_1 (loc, from, to, insn);
652 return apply_change_group ();
653 }
654
655 /* Try replacing every occurrence of FROM in INSN with TO. After all
656 changes have been made, validate by seeing if INSN is still valid. */
657
658 int
659 validate_replace_rtx (rtx from, rtx to, rtx insn)
660 {
661 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
662 return apply_change_group ();
663 }
664
665 /* Try replacing every occurrence of FROM in INSN with TO. */
666
667 void
668 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
669 {
670 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
671 }
672
673 /* Function called by note_uses to replace used subexpressions. */
674 struct validate_replace_src_data
675 {
676 rtx from; /* Old RTX */
677 rtx to; /* New RTX */
678 rtx insn; /* Insn in which substitution is occurring. */
679 };
680
681 static void
682 validate_replace_src_1 (rtx *x, void *data)
683 {
684 struct validate_replace_src_data *d
685 = (struct validate_replace_src_data *) data;
686
687 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
688 }
689
690 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
691 SET_DESTs. */
692
693 void
694 validate_replace_src_group (rtx from, rtx to, rtx insn)
695 {
696 struct validate_replace_src_data d;
697
698 d.from = from;
699 d.to = to;
700 d.insn = insn;
701 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
702 }
703 \f
704 #ifdef HAVE_cc0
705 /* Return 1 if the insn using CC0 set by INSN does not contain
706 any ordered tests applied to the condition codes.
707 EQ and NE tests do not count. */
708
709 int
710 next_insn_tests_no_inequality (rtx insn)
711 {
712 rtx next = next_cc0_user (insn);
713
714 /* If there is no next insn, we have to take the conservative choice. */
715 if (next == 0)
716 return 0;
717
718 return ((GET_CODE (next) == JUMP_INSN
719 || GET_CODE (next) == INSN
720 || GET_CODE (next) == CALL_INSN)
721 && ! inequality_comparisons_p (PATTERN (next)));
722 }
723 #endif
724 \f
725 /* This is used by find_single_use to locate an rtx that contains exactly one
726 use of DEST, which is typically either a REG or CC0. It returns a
727 pointer to the innermost rtx expression containing DEST. Appearances of
728 DEST that are being used to totally replace it are not counted. */
729
730 static rtx *
731 find_single_use_1 (rtx dest, rtx *loc)
732 {
733 rtx x = *loc;
734 enum rtx_code code = GET_CODE (x);
735 rtx *result = 0;
736 rtx *this_result;
737 int i;
738 const char *fmt;
739
740 switch (code)
741 {
742 case CONST_INT:
743 case CONST:
744 case LABEL_REF:
745 case SYMBOL_REF:
746 case CONST_DOUBLE:
747 case CONST_VECTOR:
748 case CLOBBER:
749 return 0;
750
751 case SET:
752 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
753 of a REG that occupies all of the REG, the insn uses DEST if
754 it is mentioned in the destination or the source. Otherwise, we
755 need just check the source. */
756 if (GET_CODE (SET_DEST (x)) != CC0
757 && GET_CODE (SET_DEST (x)) != PC
758 && GET_CODE (SET_DEST (x)) != REG
759 && ! (GET_CODE (SET_DEST (x)) == SUBREG
760 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
761 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
762 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
763 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
764 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
765 break;
766
767 return find_single_use_1 (dest, &SET_SRC (x));
768
769 case MEM:
770 case SUBREG:
771 return find_single_use_1 (dest, &XEXP (x, 0));
772
773 default:
774 break;
775 }
776
777 /* If it wasn't one of the common cases above, check each expression and
778 vector of this code. Look for a unique usage of DEST. */
779
780 fmt = GET_RTX_FORMAT (code);
781 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
782 {
783 if (fmt[i] == 'e')
784 {
785 if (dest == XEXP (x, i)
786 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
787 && REGNO (dest) == REGNO (XEXP (x, i))))
788 this_result = loc;
789 else
790 this_result = find_single_use_1 (dest, &XEXP (x, i));
791
792 if (result == 0)
793 result = this_result;
794 else if (this_result)
795 /* Duplicate usage. */
796 return 0;
797 }
798 else if (fmt[i] == 'E')
799 {
800 int j;
801
802 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
803 {
804 if (XVECEXP (x, i, j) == dest
805 || (GET_CODE (dest) == REG
806 && GET_CODE (XVECEXP (x, i, j)) == REG
807 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
808 this_result = loc;
809 else
810 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
811
812 if (result == 0)
813 result = this_result;
814 else if (this_result)
815 return 0;
816 }
817 }
818 }
819
820 return result;
821 }
822 \f
823 /* See if DEST, produced in INSN, is used only a single time in the
824 sequel. If so, return a pointer to the innermost rtx expression in which
825 it is used.
826
827 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
828
829 This routine will return usually zero either before flow is called (because
830 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
831 note can't be trusted).
832
833 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
834 care about REG_DEAD notes or LOG_LINKS.
835
836 Otherwise, we find the single use by finding an insn that has a
837 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
838 only referenced once in that insn, we know that it must be the first
839 and last insn referencing DEST. */
840
841 rtx *
842 find_single_use (rtx dest, rtx insn, rtx *ploc)
843 {
844 rtx next;
845 rtx *result;
846 rtx link;
847
848 #ifdef HAVE_cc0
849 if (dest == cc0_rtx)
850 {
851 next = NEXT_INSN (insn);
852 if (next == 0
853 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
854 return 0;
855
856 result = find_single_use_1 (dest, &PATTERN (next));
857 if (result && ploc)
858 *ploc = next;
859 return result;
860 }
861 #endif
862
863 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
864 return 0;
865
866 for (next = next_nonnote_insn (insn);
867 next != 0 && GET_CODE (next) != CODE_LABEL;
868 next = next_nonnote_insn (next))
869 if (INSN_P (next) && dead_or_set_p (next, dest))
870 {
871 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
872 if (XEXP (link, 0) == insn)
873 break;
874
875 if (link)
876 {
877 result = find_single_use_1 (dest, &PATTERN (next));
878 if (ploc)
879 *ploc = next;
880 return result;
881 }
882 }
883
884 return 0;
885 }
886 \f
887 /* Return 1 if OP is a valid general operand for machine mode MODE.
888 This is either a register reference, a memory reference,
889 or a constant. In the case of a memory reference, the address
890 is checked for general validity for the target machine.
891
892 Register and memory references must have mode MODE in order to be valid,
893 but some constants have no machine mode and are valid for any mode.
894
895 If MODE is VOIDmode, OP is checked for validity for whatever mode
896 it has.
897
898 The main use of this function is as a predicate in match_operand
899 expressions in the machine description.
900
901 For an explanation of this function's behavior for registers of
902 class NO_REGS, see the comment for `register_operand'. */
903
904 int
905 general_operand (rtx op, enum machine_mode mode)
906 {
907 enum rtx_code code = GET_CODE (op);
908
909 if (mode == VOIDmode)
910 mode = GET_MODE (op);
911
912 /* Don't accept CONST_INT or anything similar
913 if the caller wants something floating. */
914 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
915 && GET_MODE_CLASS (mode) != MODE_INT
916 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
917 return 0;
918
919 if (GET_CODE (op) == CONST_INT
920 && mode != VOIDmode
921 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
922 return 0;
923
924 if (CONSTANT_P (op))
925 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
926 || mode == VOIDmode)
927 #ifdef LEGITIMATE_PIC_OPERAND_P
928 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
929 #endif
930 && LEGITIMATE_CONSTANT_P (op));
931
932 /* Except for certain constants with VOIDmode, already checked for,
933 OP's mode must match MODE if MODE specifies a mode. */
934
935 if (GET_MODE (op) != mode)
936 return 0;
937
938 if (code == SUBREG)
939 {
940 rtx sub = SUBREG_REG (op);
941
942 #ifdef INSN_SCHEDULING
943 /* On machines that have insn scheduling, we want all memory
944 reference to be explicit, so outlaw paradoxical SUBREGs. */
945 if (GET_CODE (sub) == MEM
946 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
947 return 0;
948 #endif
949 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
950 may result in incorrect reference. We should simplify all valid
951 subregs of MEM anyway. But allow this after reload because we
952 might be called from cleanup_subreg_operands.
953
954 ??? This is a kludge. */
955 if (!reload_completed && SUBREG_BYTE (op) != 0
956 && GET_CODE (sub) == MEM)
957 return 0;
958
959 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
960 create such rtl, and we must reject it. */
961 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
962 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
963 return 0;
964
965 op = sub;
966 code = GET_CODE (op);
967 }
968
969 if (code == REG)
970 /* A register whose class is NO_REGS is not a general operand. */
971 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
972 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
973
974 if (code == MEM)
975 {
976 rtx y = XEXP (op, 0);
977
978 if (! volatile_ok && MEM_VOLATILE_P (op))
979 return 0;
980
981 if (GET_CODE (y) == ADDRESSOF)
982 return 1;
983
984 /* Use the mem's mode, since it will be reloaded thus. */
985 mode = GET_MODE (op);
986 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
987 }
988
989 /* Pretend this is an operand for now; we'll run force_operand
990 on its replacement in fixup_var_refs_1. */
991 if (code == ADDRESSOF)
992 return 1;
993
994 return 0;
995
996 win:
997 return 1;
998 }
999 \f
1000 /* Return 1 if OP is a valid memory address for a memory reference
1001 of mode MODE.
1002
1003 The main use of this function is as a predicate in match_operand
1004 expressions in the machine description. */
1005
1006 int
1007 address_operand (rtx op, enum machine_mode mode)
1008 {
1009 return memory_address_p (mode, op);
1010 }
1011
1012 /* Return 1 if OP is a register reference of mode MODE.
1013 If MODE is VOIDmode, accept a register in any mode.
1014
1015 The main use of this function is as a predicate in match_operand
1016 expressions in the machine description.
1017
1018 As a special exception, registers whose class is NO_REGS are
1019 not accepted by `register_operand'. The reason for this change
1020 is to allow the representation of special architecture artifacts
1021 (such as a condition code register) without extending the rtl
1022 definitions. Since registers of class NO_REGS cannot be used
1023 as registers in any case where register classes are examined,
1024 it is most consistent to keep this function from accepting them. */
1025
1026 int
1027 register_operand (rtx op, enum machine_mode mode)
1028 {
1029 if (GET_MODE (op) != mode && mode != VOIDmode)
1030 return 0;
1031
1032 if (GET_CODE (op) == SUBREG)
1033 {
1034 rtx sub = SUBREG_REG (op);
1035
1036 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1037 because it is guaranteed to be reloaded into one.
1038 Just make sure the MEM is valid in itself.
1039 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1040 but currently it does result from (SUBREG (REG)...) where the
1041 reg went on the stack.) */
1042 if (! reload_completed && GET_CODE (sub) == MEM)
1043 return general_operand (op, mode);
1044
1045 #ifdef CANNOT_CHANGE_MODE_CLASS
1046 if (GET_CODE (sub) == REG
1047 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1048 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1049 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1050 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1051 return 0;
1052 #endif
1053
1054 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1055 create such rtl, and we must reject it. */
1056 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1057 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1058 return 0;
1059
1060 op = sub;
1061 }
1062
1063 /* If we have an ADDRESSOF, consider it valid since it will be
1064 converted into something that will not be a MEM. */
1065 if (GET_CODE (op) == ADDRESSOF)
1066 return 1;
1067
1068 /* We don't consider registers whose class is NO_REGS
1069 to be a register operand. */
1070 return (GET_CODE (op) == REG
1071 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1072 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1073 }
1074
1075 /* Return 1 for a register in Pmode; ignore the tested mode. */
1076
1077 int
1078 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1079 {
1080 return register_operand (op, Pmode);
1081 }
1082
1083 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1084 or a hard register. */
1085
1086 int
1087 scratch_operand (rtx op, enum machine_mode mode)
1088 {
1089 if (GET_MODE (op) != mode && mode != VOIDmode)
1090 return 0;
1091
1092 return (GET_CODE (op) == SCRATCH
1093 || (GET_CODE (op) == REG
1094 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1095 }
1096
1097 /* Return 1 if OP is a valid immediate operand for mode MODE.
1098
1099 The main use of this function is as a predicate in match_operand
1100 expressions in the machine description. */
1101
1102 int
1103 immediate_operand (rtx op, enum machine_mode mode)
1104 {
1105 /* Don't accept CONST_INT or anything similar
1106 if the caller wants something floating. */
1107 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1108 && GET_MODE_CLASS (mode) != MODE_INT
1109 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1110 return 0;
1111
1112 if (GET_CODE (op) == CONST_INT
1113 && mode != VOIDmode
1114 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1115 return 0;
1116
1117 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1118 result in 0/1. It seems a safe assumption that this is
1119 in range for everyone. */
1120 if (GET_CODE (op) == CONSTANT_P_RTX)
1121 return 1;
1122
1123 return (CONSTANT_P (op)
1124 && (GET_MODE (op) == mode || mode == VOIDmode
1125 || GET_MODE (op) == VOIDmode)
1126 #ifdef LEGITIMATE_PIC_OPERAND_P
1127 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1128 #endif
1129 && LEGITIMATE_CONSTANT_P (op));
1130 }
1131
1132 /* Returns 1 if OP is an operand that is a CONST_INT. */
1133
1134 int
1135 const_int_operand (rtx op, enum machine_mode mode)
1136 {
1137 if (GET_CODE (op) != CONST_INT)
1138 return 0;
1139
1140 if (mode != VOIDmode
1141 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1142 return 0;
1143
1144 return 1;
1145 }
1146
1147 /* Returns 1 if OP is an operand that is a constant integer or constant
1148 floating-point number. */
1149
1150 int
1151 const_double_operand (rtx op, enum machine_mode mode)
1152 {
1153 /* Don't accept CONST_INT or anything similar
1154 if the caller wants something floating. */
1155 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1156 && GET_MODE_CLASS (mode) != MODE_INT
1157 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1158 return 0;
1159
1160 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1161 && (mode == VOIDmode || GET_MODE (op) == mode
1162 || GET_MODE (op) == VOIDmode));
1163 }
1164
1165 /* Return 1 if OP is a general operand that is not an immediate operand. */
1166
1167 int
1168 nonimmediate_operand (rtx op, enum machine_mode mode)
1169 {
1170 return (general_operand (op, mode) && ! CONSTANT_P (op));
1171 }
1172
1173 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1174
1175 int
1176 nonmemory_operand (rtx op, enum machine_mode mode)
1177 {
1178 if (CONSTANT_P (op))
1179 {
1180 /* Don't accept CONST_INT or anything similar
1181 if the caller wants something floating. */
1182 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1183 && GET_MODE_CLASS (mode) != MODE_INT
1184 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1185 return 0;
1186
1187 if (GET_CODE (op) == CONST_INT
1188 && mode != VOIDmode
1189 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1190 return 0;
1191
1192 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1193 || mode == VOIDmode)
1194 #ifdef LEGITIMATE_PIC_OPERAND_P
1195 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1196 #endif
1197 && LEGITIMATE_CONSTANT_P (op));
1198 }
1199
1200 if (GET_MODE (op) != mode && mode != VOIDmode)
1201 return 0;
1202
1203 if (GET_CODE (op) == SUBREG)
1204 {
1205 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1206 because it is guaranteed to be reloaded into one.
1207 Just make sure the MEM is valid in itself.
1208 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1209 but currently it does result from (SUBREG (REG)...) where the
1210 reg went on the stack.) */
1211 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1212 return general_operand (op, mode);
1213 op = SUBREG_REG (op);
1214 }
1215
1216 /* We don't consider registers whose class is NO_REGS
1217 to be a register operand. */
1218 return (GET_CODE (op) == REG
1219 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1220 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1221 }
1222
1223 /* Return 1 if OP is a valid operand that stands for pushing a
1224 value of mode MODE onto the stack.
1225
1226 The main use of this function is as a predicate in match_operand
1227 expressions in the machine description. */
1228
1229 int
1230 push_operand (rtx op, enum machine_mode mode)
1231 {
1232 unsigned int rounded_size = GET_MODE_SIZE (mode);
1233
1234 #ifdef PUSH_ROUNDING
1235 rounded_size = PUSH_ROUNDING (rounded_size);
1236 #endif
1237
1238 if (GET_CODE (op) != MEM)
1239 return 0;
1240
1241 if (mode != VOIDmode && GET_MODE (op) != mode)
1242 return 0;
1243
1244 op = XEXP (op, 0);
1245
1246 if (rounded_size == GET_MODE_SIZE (mode))
1247 {
1248 if (GET_CODE (op) != STACK_PUSH_CODE)
1249 return 0;
1250 }
1251 else
1252 {
1253 if (GET_CODE (op) != PRE_MODIFY
1254 || GET_CODE (XEXP (op, 1)) != PLUS
1255 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1256 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1257 #ifdef STACK_GROWS_DOWNWARD
1258 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1259 #else
1260 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1261 #endif
1262 )
1263 return 0;
1264 }
1265
1266 return XEXP (op, 0) == stack_pointer_rtx;
1267 }
1268
1269 /* Return 1 if OP is a valid operand that stands for popping a
1270 value of mode MODE off the stack.
1271
1272 The main use of this function is as a predicate in match_operand
1273 expressions in the machine description. */
1274
1275 int
1276 pop_operand (rtx op, enum machine_mode mode)
1277 {
1278 if (GET_CODE (op) != MEM)
1279 return 0;
1280
1281 if (mode != VOIDmode && GET_MODE (op) != mode)
1282 return 0;
1283
1284 op = XEXP (op, 0);
1285
1286 if (GET_CODE (op) != STACK_POP_CODE)
1287 return 0;
1288
1289 return XEXP (op, 0) == stack_pointer_rtx;
1290 }
1291
1292 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1293
1294 int
1295 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1296 {
1297 if (GET_CODE (addr) == ADDRESSOF)
1298 return 1;
1299
1300 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1301 return 0;
1302
1303 win:
1304 return 1;
1305 }
1306
1307 /* Return 1 if OP is a valid memory reference with mode MODE,
1308 including a valid address.
1309
1310 The main use of this function is as a predicate in match_operand
1311 expressions in the machine description. */
1312
1313 int
1314 memory_operand (rtx op, enum machine_mode mode)
1315 {
1316 rtx inner;
1317
1318 if (! reload_completed)
1319 /* Note that no SUBREG is a memory operand before end of reload pass,
1320 because (SUBREG (MEM...)) forces reloading into a register. */
1321 return GET_CODE (op) == MEM && general_operand (op, mode);
1322
1323 if (mode != VOIDmode && GET_MODE (op) != mode)
1324 return 0;
1325
1326 inner = op;
1327 if (GET_CODE (inner) == SUBREG)
1328 inner = SUBREG_REG (inner);
1329
1330 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1331 }
1332
1333 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1334 that is, a memory reference whose address is a general_operand. */
1335
1336 int
1337 indirect_operand (rtx op, enum machine_mode mode)
1338 {
1339 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1340 if (! reload_completed
1341 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1342 {
1343 int offset = SUBREG_BYTE (op);
1344 rtx inner = SUBREG_REG (op);
1345
1346 if (mode != VOIDmode && GET_MODE (op) != mode)
1347 return 0;
1348
1349 /* The only way that we can have a general_operand as the resulting
1350 address is if OFFSET is zero and the address already is an operand
1351 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1352 operand. */
1353
1354 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1355 || (GET_CODE (XEXP (inner, 0)) == PLUS
1356 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1357 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1358 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1359 }
1360
1361 return (GET_CODE (op) == MEM
1362 && memory_operand (op, mode)
1363 && general_operand (XEXP (op, 0), Pmode));
1364 }
1365
1366 /* Return 1 if this is a comparison operator. This allows the use of
1367 MATCH_OPERATOR to recognize all the branch insns. */
1368
1369 int
1370 comparison_operator (rtx op, enum machine_mode mode)
1371 {
1372 return ((mode == VOIDmode || GET_MODE (op) == mode)
1373 && COMPARISON_P (op));
1374 }
1375 \f
1376 /* If BODY is an insn body that uses ASM_OPERANDS,
1377 return the number of operands (both input and output) in the insn.
1378 Otherwise return -1. */
1379
1380 int
1381 asm_noperands (rtx body)
1382 {
1383 switch (GET_CODE (body))
1384 {
1385 case ASM_OPERANDS:
1386 /* No output operands: return number of input operands. */
1387 return ASM_OPERANDS_INPUT_LENGTH (body);
1388 case SET:
1389 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1390 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1391 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1392 else
1393 return -1;
1394 case PARALLEL:
1395 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1396 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1397 {
1398 /* Multiple output operands, or 1 output plus some clobbers:
1399 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1400 int i;
1401 int n_sets;
1402
1403 /* Count backwards through CLOBBERs to determine number of SETs. */
1404 for (i = XVECLEN (body, 0); i > 0; i--)
1405 {
1406 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1407 break;
1408 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1409 return -1;
1410 }
1411
1412 /* N_SETS is now number of output operands. */
1413 n_sets = i;
1414
1415 /* Verify that all the SETs we have
1416 came from a single original asm_operands insn
1417 (so that invalid combinations are blocked). */
1418 for (i = 0; i < n_sets; i++)
1419 {
1420 rtx elt = XVECEXP (body, 0, i);
1421 if (GET_CODE (elt) != SET)
1422 return -1;
1423 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1424 return -1;
1425 /* If these ASM_OPERANDS rtx's came from different original insns
1426 then they aren't allowed together. */
1427 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1428 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1429 return -1;
1430 }
1431 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1432 + n_sets);
1433 }
1434 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1435 {
1436 /* 0 outputs, but some clobbers:
1437 body is [(asm_operands ...) (clobber (reg ...))...]. */
1438 int i;
1439
1440 /* Make sure all the other parallel things really are clobbers. */
1441 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1442 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1443 return -1;
1444
1445 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1446 }
1447 else
1448 return -1;
1449 default:
1450 return -1;
1451 }
1452 }
1453
1454 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1455 copy its operands (both input and output) into the vector OPERANDS,
1456 the locations of the operands within the insn into the vector OPERAND_LOCS,
1457 and the constraints for the operands into CONSTRAINTS.
1458 Write the modes of the operands into MODES.
1459 Return the assembler-template.
1460
1461 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1462 we don't store that info. */
1463
1464 const char *
1465 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1466 const char **constraints, enum machine_mode *modes)
1467 {
1468 int i;
1469 int noperands;
1470 const char *template = 0;
1471
1472 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1473 {
1474 rtx asmop = SET_SRC (body);
1475 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1476
1477 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1478
1479 for (i = 1; i < noperands; i++)
1480 {
1481 if (operand_locs)
1482 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1483 if (operands)
1484 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1485 if (constraints)
1486 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1487 if (modes)
1488 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1489 }
1490
1491 /* The output is in the SET.
1492 Its constraint is in the ASM_OPERANDS itself. */
1493 if (operands)
1494 operands[0] = SET_DEST (body);
1495 if (operand_locs)
1496 operand_locs[0] = &SET_DEST (body);
1497 if (constraints)
1498 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1499 if (modes)
1500 modes[0] = GET_MODE (SET_DEST (body));
1501 template = ASM_OPERANDS_TEMPLATE (asmop);
1502 }
1503 else if (GET_CODE (body) == ASM_OPERANDS)
1504 {
1505 rtx asmop = body;
1506 /* No output operands: BODY is (asm_operands ....). */
1507
1508 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1509
1510 /* The input operands are found in the 1st element vector. */
1511 /* Constraints for inputs are in the 2nd element vector. */
1512 for (i = 0; i < noperands; i++)
1513 {
1514 if (operand_locs)
1515 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1516 if (operands)
1517 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1518 if (constraints)
1519 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1520 if (modes)
1521 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1522 }
1523 template = ASM_OPERANDS_TEMPLATE (asmop);
1524 }
1525 else if (GET_CODE (body) == PARALLEL
1526 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1527 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1528 {
1529 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1530 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1531 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1532 int nout = 0; /* Does not include CLOBBERs. */
1533
1534 /* At least one output, plus some CLOBBERs. */
1535
1536 /* The outputs are in the SETs.
1537 Their constraints are in the ASM_OPERANDS itself. */
1538 for (i = 0; i < nparallel; i++)
1539 {
1540 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1541 break; /* Past last SET */
1542
1543 if (operands)
1544 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1545 if (operand_locs)
1546 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1547 if (constraints)
1548 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1549 if (modes)
1550 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1551 nout++;
1552 }
1553
1554 for (i = 0; i < nin; i++)
1555 {
1556 if (operand_locs)
1557 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1558 if (operands)
1559 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1560 if (constraints)
1561 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1562 if (modes)
1563 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1564 }
1565
1566 template = ASM_OPERANDS_TEMPLATE (asmop);
1567 }
1568 else if (GET_CODE (body) == PARALLEL
1569 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1570 {
1571 /* No outputs, but some CLOBBERs. */
1572
1573 rtx asmop = XVECEXP (body, 0, 0);
1574 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1575
1576 for (i = 0; i < nin; i++)
1577 {
1578 if (operand_locs)
1579 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1580 if (operands)
1581 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1582 if (constraints)
1583 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1584 if (modes)
1585 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1586 }
1587
1588 template = ASM_OPERANDS_TEMPLATE (asmop);
1589 }
1590
1591 return template;
1592 }
1593
1594 /* Check if an asm_operand matches it's constraints.
1595 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1596
1597 int
1598 asm_operand_ok (rtx op, const char *constraint)
1599 {
1600 int result = 0;
1601
1602 /* Use constrain_operands after reload. */
1603 if (reload_completed)
1604 abort ();
1605
1606 while (*constraint)
1607 {
1608 char c = *constraint;
1609 int len;
1610 switch (c)
1611 {
1612 case ',':
1613 constraint++;
1614 continue;
1615 case '=':
1616 case '+':
1617 case '*':
1618 case '%':
1619 case '!':
1620 case '#':
1621 case '&':
1622 case '?':
1623 break;
1624
1625 case '0': case '1': case '2': case '3': case '4':
1626 case '5': case '6': case '7': case '8': case '9':
1627 /* For best results, our caller should have given us the
1628 proper matching constraint, but we can't actually fail
1629 the check if they didn't. Indicate that results are
1630 inconclusive. */
1631 do
1632 constraint++;
1633 while (ISDIGIT (*constraint));
1634 if (! result)
1635 result = -1;
1636 continue;
1637
1638 case 'p':
1639 if (address_operand (op, VOIDmode))
1640 result = 1;
1641 break;
1642
1643 case 'm':
1644 case 'V': /* non-offsettable */
1645 if (memory_operand (op, VOIDmode))
1646 result = 1;
1647 break;
1648
1649 case 'o': /* offsettable */
1650 if (offsettable_nonstrict_memref_p (op))
1651 result = 1;
1652 break;
1653
1654 case '<':
1655 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1656 excepting those that expand_call created. Further, on some
1657 machines which do not have generalized auto inc/dec, an inc/dec
1658 is not a memory_operand.
1659
1660 Match any memory and hope things are resolved after reload. */
1661
1662 if (GET_CODE (op) == MEM
1663 && (1
1664 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1665 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1666 result = 1;
1667 break;
1668
1669 case '>':
1670 if (GET_CODE (op) == MEM
1671 && (1
1672 || GET_CODE (XEXP (op, 0)) == PRE_INC
1673 || GET_CODE (XEXP (op, 0)) == POST_INC))
1674 result = 1;
1675 break;
1676
1677 case 'E':
1678 case 'F':
1679 if (GET_CODE (op) == CONST_DOUBLE
1680 || (GET_CODE (op) == CONST_VECTOR
1681 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1682 result = 1;
1683 break;
1684
1685 case 'G':
1686 if (GET_CODE (op) == CONST_DOUBLE
1687 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1688 result = 1;
1689 break;
1690 case 'H':
1691 if (GET_CODE (op) == CONST_DOUBLE
1692 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1693 result = 1;
1694 break;
1695
1696 case 's':
1697 if (GET_CODE (op) == CONST_INT
1698 || (GET_CODE (op) == CONST_DOUBLE
1699 && GET_MODE (op) == VOIDmode))
1700 break;
1701 /* Fall through. */
1702
1703 case 'i':
1704 if (CONSTANT_P (op)
1705 #ifdef LEGITIMATE_PIC_OPERAND_P
1706 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1707 #endif
1708 )
1709 result = 1;
1710 break;
1711
1712 case 'n':
1713 if (GET_CODE (op) == CONST_INT
1714 || (GET_CODE (op) == CONST_DOUBLE
1715 && GET_MODE (op) == VOIDmode))
1716 result = 1;
1717 break;
1718
1719 case 'I':
1720 if (GET_CODE (op) == CONST_INT
1721 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1722 result = 1;
1723 break;
1724 case 'J':
1725 if (GET_CODE (op) == CONST_INT
1726 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1727 result = 1;
1728 break;
1729 case 'K':
1730 if (GET_CODE (op) == CONST_INT
1731 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1732 result = 1;
1733 break;
1734 case 'L':
1735 if (GET_CODE (op) == CONST_INT
1736 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1737 result = 1;
1738 break;
1739 case 'M':
1740 if (GET_CODE (op) == CONST_INT
1741 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1742 result = 1;
1743 break;
1744 case 'N':
1745 if (GET_CODE (op) == CONST_INT
1746 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1747 result = 1;
1748 break;
1749 case 'O':
1750 if (GET_CODE (op) == CONST_INT
1751 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1752 result = 1;
1753 break;
1754 case 'P':
1755 if (GET_CODE (op) == CONST_INT
1756 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1757 result = 1;
1758 break;
1759
1760 case 'X':
1761 result = 1;
1762 break;
1763
1764 case 'g':
1765 if (general_operand (op, VOIDmode))
1766 result = 1;
1767 break;
1768
1769 default:
1770 /* For all other letters, we first check for a register class,
1771 otherwise it is an EXTRA_CONSTRAINT. */
1772 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1773 {
1774 case 'r':
1775 if (GET_MODE (op) == BLKmode)
1776 break;
1777 if (register_operand (op, VOIDmode))
1778 result = 1;
1779 }
1780 #ifdef EXTRA_CONSTRAINT_STR
1781 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1782 result = 1;
1783 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1784 /* Every memory operand can be reloaded to fit. */
1785 && memory_operand (op, VOIDmode))
1786 result = 1;
1787 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1788 /* Every address operand can be reloaded to fit. */
1789 && address_operand (op, VOIDmode))
1790 result = 1;
1791 #endif
1792 break;
1793 }
1794 len = CONSTRAINT_LEN (c, constraint);
1795 do
1796 constraint++;
1797 while (--len && *constraint);
1798 if (len)
1799 return 0;
1800 }
1801
1802 return result;
1803 }
1804 \f
1805 /* Given an rtx *P, if it is a sum containing an integer constant term,
1806 return the location (type rtx *) of the pointer to that constant term.
1807 Otherwise, return a null pointer. */
1808
1809 rtx *
1810 find_constant_term_loc (rtx *p)
1811 {
1812 rtx *tem;
1813 enum rtx_code code = GET_CODE (*p);
1814
1815 /* If *P IS such a constant term, P is its location. */
1816
1817 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1818 || code == CONST)
1819 return p;
1820
1821 /* Otherwise, if not a sum, it has no constant term. */
1822
1823 if (GET_CODE (*p) != PLUS)
1824 return 0;
1825
1826 /* If one of the summands is constant, return its location. */
1827
1828 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1829 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1830 return p;
1831
1832 /* Otherwise, check each summand for containing a constant term. */
1833
1834 if (XEXP (*p, 0) != 0)
1835 {
1836 tem = find_constant_term_loc (&XEXP (*p, 0));
1837 if (tem != 0)
1838 return tem;
1839 }
1840
1841 if (XEXP (*p, 1) != 0)
1842 {
1843 tem = find_constant_term_loc (&XEXP (*p, 1));
1844 if (tem != 0)
1845 return tem;
1846 }
1847
1848 return 0;
1849 }
1850 \f
1851 /* Return 1 if OP is a memory reference
1852 whose address contains no side effects
1853 and remains valid after the addition
1854 of a positive integer less than the
1855 size of the object being referenced.
1856
1857 We assume that the original address is valid and do not check it.
1858
1859 This uses strict_memory_address_p as a subroutine, so
1860 don't use it before reload. */
1861
1862 int
1863 offsettable_memref_p (rtx op)
1864 {
1865 return ((GET_CODE (op) == MEM)
1866 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1867 }
1868
1869 /* Similar, but don't require a strictly valid mem ref:
1870 consider pseudo-regs valid as index or base regs. */
1871
1872 int
1873 offsettable_nonstrict_memref_p (rtx op)
1874 {
1875 return ((GET_CODE (op) == MEM)
1876 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1877 }
1878
1879 /* Return 1 if Y is a memory address which contains no side effects
1880 and would remain valid after the addition of a positive integer
1881 less than the size of that mode.
1882
1883 We assume that the original address is valid and do not check it.
1884 We do check that it is valid for narrower modes.
1885
1886 If STRICTP is nonzero, we require a strictly valid address,
1887 for the sake of use in reload.c. */
1888
1889 int
1890 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1891 {
1892 enum rtx_code ycode = GET_CODE (y);
1893 rtx z;
1894 rtx y1 = y;
1895 rtx *y2;
1896 int (*addressp) (enum machine_mode, rtx) =
1897 (strictp ? strict_memory_address_p : memory_address_p);
1898 unsigned int mode_sz = GET_MODE_SIZE (mode);
1899
1900 if (CONSTANT_ADDRESS_P (y))
1901 return 1;
1902
1903 /* Adjusting an offsettable address involves changing to a narrower mode.
1904 Make sure that's OK. */
1905
1906 if (mode_dependent_address_p (y))
1907 return 0;
1908
1909 /* ??? How much offset does an offsettable BLKmode reference need?
1910 Clearly that depends on the situation in which it's being used.
1911 However, the current situation in which we test 0xffffffff is
1912 less than ideal. Caveat user. */
1913 if (mode_sz == 0)
1914 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1915
1916 /* If the expression contains a constant term,
1917 see if it remains valid when max possible offset is added. */
1918
1919 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1920 {
1921 int good;
1922
1923 y1 = *y2;
1924 *y2 = plus_constant (*y2, mode_sz - 1);
1925 /* Use QImode because an odd displacement may be automatically invalid
1926 for any wider mode. But it should be valid for a single byte. */
1927 good = (*addressp) (QImode, y);
1928
1929 /* In any case, restore old contents of memory. */
1930 *y2 = y1;
1931 return good;
1932 }
1933
1934 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1935 return 0;
1936
1937 /* The offset added here is chosen as the maximum offset that
1938 any instruction could need to add when operating on something
1939 of the specified mode. We assume that if Y and Y+c are
1940 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1941 go inside a LO_SUM here, so we do so as well. */
1942 if (GET_CODE (y) == LO_SUM
1943 && mode != BLKmode
1944 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1945 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1946 plus_constant (XEXP (y, 1), mode_sz - 1));
1947 else
1948 z = plus_constant (y, mode_sz - 1);
1949
1950 /* Use QImode because an odd displacement may be automatically invalid
1951 for any wider mode. But it should be valid for a single byte. */
1952 return (*addressp) (QImode, z);
1953 }
1954
1955 /* Return 1 if ADDR is an address-expression whose effect depends
1956 on the mode of the memory reference it is used in.
1957
1958 Autoincrement addressing is a typical example of mode-dependence
1959 because the amount of the increment depends on the mode. */
1960
1961 int
1962 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1963 {
1964 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1965 return 0;
1966 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1967 win: ATTRIBUTE_UNUSED_LABEL
1968 return 1;
1969 }
1970 \f
1971 /* Like extract_insn, but save insn extracted and don't extract again, when
1972 called again for the same insn expecting that recog_data still contain the
1973 valid information. This is used primary by gen_attr infrastructure that
1974 often does extract insn again and again. */
1975 void
1976 extract_insn_cached (rtx insn)
1977 {
1978 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1979 return;
1980 extract_insn (insn);
1981 recog_data.insn = insn;
1982 }
1983 /* Do cached extract_insn, constrain_operands and complain about failures.
1984 Used by insn_attrtab. */
1985 void
1986 extract_constrain_insn_cached (rtx insn)
1987 {
1988 extract_insn_cached (insn);
1989 if (which_alternative == -1
1990 && !constrain_operands (reload_completed))
1991 fatal_insn_not_found (insn);
1992 }
1993 /* Do cached constrain_operands and complain about failures. */
1994 int
1995 constrain_operands_cached (int strict)
1996 {
1997 if (which_alternative == -1)
1998 return constrain_operands (strict);
1999 else
2000 return 1;
2001 }
2002 \f
2003 /* Analyze INSN and fill in recog_data. */
2004
2005 void
2006 extract_insn (rtx insn)
2007 {
2008 int i;
2009 int icode;
2010 int noperands;
2011 rtx body = PATTERN (insn);
2012
2013 recog_data.insn = NULL;
2014 recog_data.n_operands = 0;
2015 recog_data.n_alternatives = 0;
2016 recog_data.n_dups = 0;
2017 which_alternative = -1;
2018
2019 switch (GET_CODE (body))
2020 {
2021 case USE:
2022 case CLOBBER:
2023 case ASM_INPUT:
2024 case ADDR_VEC:
2025 case ADDR_DIFF_VEC:
2026 return;
2027
2028 case SET:
2029 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2030 goto asm_insn;
2031 else
2032 goto normal_insn;
2033 case PARALLEL:
2034 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2035 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2036 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2037 goto asm_insn;
2038 else
2039 goto normal_insn;
2040 case ASM_OPERANDS:
2041 asm_insn:
2042 recog_data.n_operands = noperands = asm_noperands (body);
2043 if (noperands >= 0)
2044 {
2045 /* This insn is an `asm' with operands. */
2046
2047 /* expand_asm_operands makes sure there aren't too many operands. */
2048 if (noperands > MAX_RECOG_OPERANDS)
2049 abort ();
2050
2051 /* Now get the operand values and constraints out of the insn. */
2052 decode_asm_operands (body, recog_data.operand,
2053 recog_data.operand_loc,
2054 recog_data.constraints,
2055 recog_data.operand_mode);
2056 if (noperands > 0)
2057 {
2058 const char *p = recog_data.constraints[0];
2059 recog_data.n_alternatives = 1;
2060 while (*p)
2061 recog_data.n_alternatives += (*p++ == ',');
2062 }
2063 break;
2064 }
2065 fatal_insn_not_found (insn);
2066
2067 default:
2068 normal_insn:
2069 /* Ordinary insn: recognize it, get the operands via insn_extract
2070 and get the constraints. */
2071
2072 icode = recog_memoized (insn);
2073 if (icode < 0)
2074 fatal_insn_not_found (insn);
2075
2076 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2077 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2078 recog_data.n_dups = insn_data[icode].n_dups;
2079
2080 insn_extract (insn);
2081
2082 for (i = 0; i < noperands; i++)
2083 {
2084 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2085 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2086 /* VOIDmode match_operands gets mode from their real operand. */
2087 if (recog_data.operand_mode[i] == VOIDmode)
2088 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2089 }
2090 }
2091 for (i = 0; i < noperands; i++)
2092 recog_data.operand_type[i]
2093 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2094 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2095 : OP_IN);
2096
2097 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2098 abort ();
2099 }
2100
2101 /* After calling extract_insn, you can use this function to extract some
2102 information from the constraint strings into a more usable form.
2103 The collected data is stored in recog_op_alt. */
2104 void
2105 preprocess_constraints (void)
2106 {
2107 int i;
2108
2109 for (i = 0; i < recog_data.n_operands; i++)
2110 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2111 * sizeof (struct operand_alternative)));
2112
2113 for (i = 0; i < recog_data.n_operands; i++)
2114 {
2115 int j;
2116 struct operand_alternative *op_alt;
2117 const char *p = recog_data.constraints[i];
2118
2119 op_alt = recog_op_alt[i];
2120
2121 for (j = 0; j < recog_data.n_alternatives; j++)
2122 {
2123 op_alt[j].class = NO_REGS;
2124 op_alt[j].constraint = p;
2125 op_alt[j].matches = -1;
2126 op_alt[j].matched = -1;
2127
2128 if (*p == '\0' || *p == ',')
2129 {
2130 op_alt[j].anything_ok = 1;
2131 continue;
2132 }
2133
2134 for (;;)
2135 {
2136 char c = *p;
2137 if (c == '#')
2138 do
2139 c = *++p;
2140 while (c != ',' && c != '\0');
2141 if (c == ',' || c == '\0')
2142 {
2143 p++;
2144 break;
2145 }
2146
2147 switch (c)
2148 {
2149 case '=': case '+': case '*': case '%':
2150 case 'E': case 'F': case 'G': case 'H':
2151 case 's': case 'i': case 'n':
2152 case 'I': case 'J': case 'K': case 'L':
2153 case 'M': case 'N': case 'O': case 'P':
2154 /* These don't say anything we care about. */
2155 break;
2156
2157 case '?':
2158 op_alt[j].reject += 6;
2159 break;
2160 case '!':
2161 op_alt[j].reject += 600;
2162 break;
2163 case '&':
2164 op_alt[j].earlyclobber = 1;
2165 break;
2166
2167 case '0': case '1': case '2': case '3': case '4':
2168 case '5': case '6': case '7': case '8': case '9':
2169 {
2170 char *end;
2171 op_alt[j].matches = strtoul (p, &end, 10);
2172 recog_op_alt[op_alt[j].matches][j].matched = i;
2173 p = end;
2174 }
2175 continue;
2176
2177 case 'm':
2178 op_alt[j].memory_ok = 1;
2179 break;
2180 case '<':
2181 op_alt[j].decmem_ok = 1;
2182 break;
2183 case '>':
2184 op_alt[j].incmem_ok = 1;
2185 break;
2186 case 'V':
2187 op_alt[j].nonoffmem_ok = 1;
2188 break;
2189 case 'o':
2190 op_alt[j].offmem_ok = 1;
2191 break;
2192 case 'X':
2193 op_alt[j].anything_ok = 1;
2194 break;
2195
2196 case 'p':
2197 op_alt[j].is_address = 1;
2198 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2199 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2200 break;
2201
2202 case 'g': case 'r':
2203 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2204 break;
2205
2206 default:
2207 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2208 {
2209 op_alt[j].memory_ok = 1;
2210 break;
2211 }
2212 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2213 {
2214 op_alt[j].is_address = 1;
2215 op_alt[j].class
2216 = (reg_class_subunion
2217 [(int) op_alt[j].class]
2218 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2219 break;
2220 }
2221
2222 op_alt[j].class
2223 = (reg_class_subunion
2224 [(int) op_alt[j].class]
2225 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2226 break;
2227 }
2228 p += CONSTRAINT_LEN (c, p);
2229 }
2230 }
2231 }
2232 }
2233
2234 /* Check the operands of an insn against the insn's operand constraints
2235 and return 1 if they are valid.
2236 The information about the insn's operands, constraints, operand modes
2237 etc. is obtained from the global variables set up by extract_insn.
2238
2239 WHICH_ALTERNATIVE is set to a number which indicates which
2240 alternative of constraints was matched: 0 for the first alternative,
2241 1 for the next, etc.
2242
2243 In addition, when two operands are required to match
2244 and it happens that the output operand is (reg) while the
2245 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2246 make the output operand look like the input.
2247 This is because the output operand is the one the template will print.
2248
2249 This is used in final, just before printing the assembler code and by
2250 the routines that determine an insn's attribute.
2251
2252 If STRICT is a positive nonzero value, it means that we have been
2253 called after reload has been completed. In that case, we must
2254 do all checks strictly. If it is zero, it means that we have been called
2255 before reload has completed. In that case, we first try to see if we can
2256 find an alternative that matches strictly. If not, we try again, this
2257 time assuming that reload will fix up the insn. This provides a "best
2258 guess" for the alternative and is used to compute attributes of insns prior
2259 to reload. A negative value of STRICT is used for this internal call. */
2260
2261 struct funny_match
2262 {
2263 int this, other;
2264 };
2265
2266 int
2267 constrain_operands (int strict)
2268 {
2269 const char *constraints[MAX_RECOG_OPERANDS];
2270 int matching_operands[MAX_RECOG_OPERANDS];
2271 int earlyclobber[MAX_RECOG_OPERANDS];
2272 int c;
2273
2274 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2275 int funny_match_index;
2276
2277 which_alternative = 0;
2278 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2279 return 1;
2280
2281 for (c = 0; c < recog_data.n_operands; c++)
2282 {
2283 constraints[c] = recog_data.constraints[c];
2284 matching_operands[c] = -1;
2285 }
2286
2287 do
2288 {
2289 int opno;
2290 int lose = 0;
2291 funny_match_index = 0;
2292
2293 for (opno = 0; opno < recog_data.n_operands; opno++)
2294 {
2295 rtx op = recog_data.operand[opno];
2296 enum machine_mode mode = GET_MODE (op);
2297 const char *p = constraints[opno];
2298 int offset = 0;
2299 int win = 0;
2300 int val;
2301 int len;
2302
2303 earlyclobber[opno] = 0;
2304
2305 /* A unary operator may be accepted by the predicate, but it
2306 is irrelevant for matching constraints. */
2307 if (UNARY_P (op))
2308 op = XEXP (op, 0);
2309
2310 if (GET_CODE (op) == SUBREG)
2311 {
2312 if (GET_CODE (SUBREG_REG (op)) == REG
2313 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2314 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2315 GET_MODE (SUBREG_REG (op)),
2316 SUBREG_BYTE (op),
2317 GET_MODE (op));
2318 op = SUBREG_REG (op);
2319 }
2320
2321 /* An empty constraint or empty alternative
2322 allows anything which matched the pattern. */
2323 if (*p == 0 || *p == ',')
2324 win = 1;
2325
2326 do
2327 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2328 {
2329 case '\0':
2330 len = 0;
2331 break;
2332 case ',':
2333 c = '\0';
2334 break;
2335
2336 case '?': case '!': case '*': case '%':
2337 case '=': case '+':
2338 break;
2339
2340 case '#':
2341 /* Ignore rest of this alternative as far as
2342 constraint checking is concerned. */
2343 do
2344 p++;
2345 while (*p && *p != ',');
2346 len = 0;
2347 break;
2348
2349 case '&':
2350 earlyclobber[opno] = 1;
2351 break;
2352
2353 case '0': case '1': case '2': case '3': case '4':
2354 case '5': case '6': case '7': case '8': case '9':
2355 {
2356 /* This operand must be the same as a previous one.
2357 This kind of constraint is used for instructions such
2358 as add when they take only two operands.
2359
2360 Note that the lower-numbered operand is passed first.
2361
2362 If we are not testing strictly, assume that this
2363 constraint will be satisfied. */
2364
2365 char *end;
2366 int match;
2367
2368 match = strtoul (p, &end, 10);
2369 p = end;
2370
2371 if (strict < 0)
2372 val = 1;
2373 else
2374 {
2375 rtx op1 = recog_data.operand[match];
2376 rtx op2 = recog_data.operand[opno];
2377
2378 /* A unary operator may be accepted by the predicate,
2379 but it is irrelevant for matching constraints. */
2380 if (UNARY_P (op1))
2381 op1 = XEXP (op1, 0);
2382 if (UNARY_P (op2))
2383 op2 = XEXP (op2, 0);
2384
2385 val = operands_match_p (op1, op2);
2386 }
2387
2388 matching_operands[opno] = match;
2389 matching_operands[match] = opno;
2390
2391 if (val != 0)
2392 win = 1;
2393
2394 /* If output is *x and input is *--x, arrange later
2395 to change the output to *--x as well, since the
2396 output op is the one that will be printed. */
2397 if (val == 2 && strict > 0)
2398 {
2399 funny_match[funny_match_index].this = opno;
2400 funny_match[funny_match_index++].other = match;
2401 }
2402 }
2403 len = 0;
2404 break;
2405
2406 case 'p':
2407 /* p is used for address_operands. When we are called by
2408 gen_reload, no one will have checked that the address is
2409 strictly valid, i.e., that all pseudos requiring hard regs
2410 have gotten them. */
2411 if (strict <= 0
2412 || (strict_memory_address_p (recog_data.operand_mode[opno],
2413 op)))
2414 win = 1;
2415 break;
2416
2417 /* No need to check general_operand again;
2418 it was done in insn-recog.c. */
2419 case 'g':
2420 /* Anything goes unless it is a REG and really has a hard reg
2421 but the hard reg is not in the class GENERAL_REGS. */
2422 if (strict < 0
2423 || GENERAL_REGS == ALL_REGS
2424 || GET_CODE (op) != REG
2425 || (reload_in_progress
2426 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2427 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2428 win = 1;
2429 break;
2430
2431 case 'X':
2432 /* This is used for a MATCH_SCRATCH in the cases when
2433 we don't actually need anything. So anything goes
2434 any time. */
2435 win = 1;
2436 break;
2437
2438 case 'm':
2439 /* Memory operands must be valid, to the extent
2440 required by STRICT. */
2441 if (GET_CODE (op) == MEM)
2442 {
2443 if (strict > 0
2444 && !strict_memory_address_p (GET_MODE (op),
2445 XEXP (op, 0)))
2446 break;
2447 if (strict == 0
2448 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2449 break;
2450 win = 1;
2451 }
2452 /* Before reload, accept what reload can turn into mem. */
2453 else if (strict < 0 && CONSTANT_P (op))
2454 win = 1;
2455 /* During reload, accept a pseudo */
2456 else if (reload_in_progress && GET_CODE (op) == REG
2457 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2458 win = 1;
2459 break;
2460
2461 case '<':
2462 if (GET_CODE (op) == MEM
2463 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2464 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2465 win = 1;
2466 break;
2467
2468 case '>':
2469 if (GET_CODE (op) == MEM
2470 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2471 || GET_CODE (XEXP (op, 0)) == POST_INC))
2472 win = 1;
2473 break;
2474
2475 case 'E':
2476 case 'F':
2477 if (GET_CODE (op) == CONST_DOUBLE
2478 || (GET_CODE (op) == CONST_VECTOR
2479 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2480 win = 1;
2481 break;
2482
2483 case 'G':
2484 case 'H':
2485 if (GET_CODE (op) == CONST_DOUBLE
2486 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2487 win = 1;
2488 break;
2489
2490 case 's':
2491 if (GET_CODE (op) == CONST_INT
2492 || (GET_CODE (op) == CONST_DOUBLE
2493 && GET_MODE (op) == VOIDmode))
2494 break;
2495 case 'i':
2496 if (CONSTANT_P (op))
2497 win = 1;
2498 break;
2499
2500 case 'n':
2501 if (GET_CODE (op) == CONST_INT
2502 || (GET_CODE (op) == CONST_DOUBLE
2503 && GET_MODE (op) == VOIDmode))
2504 win = 1;
2505 break;
2506
2507 case 'I':
2508 case 'J':
2509 case 'K':
2510 case 'L':
2511 case 'M':
2512 case 'N':
2513 case 'O':
2514 case 'P':
2515 if (GET_CODE (op) == CONST_INT
2516 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2517 win = 1;
2518 break;
2519
2520 case 'V':
2521 if (GET_CODE (op) == MEM
2522 && ((strict > 0 && ! offsettable_memref_p (op))
2523 || (strict < 0
2524 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2525 || (reload_in_progress
2526 && !(GET_CODE (op) == REG
2527 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2528 win = 1;
2529 break;
2530
2531 case 'o':
2532 if ((strict > 0 && offsettable_memref_p (op))
2533 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2534 /* Before reload, accept what reload can handle. */
2535 || (strict < 0
2536 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2537 /* During reload, accept a pseudo */
2538 || (reload_in_progress && GET_CODE (op) == REG
2539 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2540 win = 1;
2541 break;
2542
2543 default:
2544 {
2545 enum reg_class class;
2546
2547 class = (c == 'r'
2548 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2549 if (class != NO_REGS)
2550 {
2551 if (strict < 0
2552 || (strict == 0
2553 && GET_CODE (op) == REG
2554 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2555 || (strict == 0 && GET_CODE (op) == SCRATCH)
2556 || (GET_CODE (op) == REG
2557 && reg_fits_class_p (op, class, offset, mode)))
2558 win = 1;
2559 }
2560 #ifdef EXTRA_CONSTRAINT_STR
2561 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2562 win = 1;
2563
2564 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2565 /* Every memory operand can be reloaded to fit. */
2566 && ((strict < 0 && GET_CODE (op) == MEM)
2567 /* Before reload, accept what reload can turn
2568 into mem. */
2569 || (strict < 0 && CONSTANT_P (op))
2570 /* During reload, accept a pseudo */
2571 || (reload_in_progress && GET_CODE (op) == REG
2572 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2573 win = 1;
2574 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2575 /* Every address operand can be reloaded to fit. */
2576 && strict < 0)
2577 win = 1;
2578 #endif
2579 break;
2580 }
2581 }
2582 while (p += len, c);
2583
2584 constraints[opno] = p;
2585 /* If this operand did not win somehow,
2586 this alternative loses. */
2587 if (! win)
2588 lose = 1;
2589 }
2590 /* This alternative won; the operands are ok.
2591 Change whichever operands this alternative says to change. */
2592 if (! lose)
2593 {
2594 int opno, eopno;
2595
2596 /* See if any earlyclobber operand conflicts with some other
2597 operand. */
2598
2599 if (strict > 0)
2600 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2601 /* Ignore earlyclobber operands now in memory,
2602 because we would often report failure when we have
2603 two memory operands, one of which was formerly a REG. */
2604 if (earlyclobber[eopno]
2605 && GET_CODE (recog_data.operand[eopno]) == REG)
2606 for (opno = 0; opno < recog_data.n_operands; opno++)
2607 if ((GET_CODE (recog_data.operand[opno]) == MEM
2608 || recog_data.operand_type[opno] != OP_OUT)
2609 && opno != eopno
2610 /* Ignore things like match_operator operands. */
2611 && *recog_data.constraints[opno] != 0
2612 && ! (matching_operands[opno] == eopno
2613 && operands_match_p (recog_data.operand[opno],
2614 recog_data.operand[eopno]))
2615 && ! safe_from_earlyclobber (recog_data.operand[opno],
2616 recog_data.operand[eopno]))
2617 lose = 1;
2618
2619 if (! lose)
2620 {
2621 while (--funny_match_index >= 0)
2622 {
2623 recog_data.operand[funny_match[funny_match_index].other]
2624 = recog_data.operand[funny_match[funny_match_index].this];
2625 }
2626
2627 return 1;
2628 }
2629 }
2630
2631 which_alternative++;
2632 }
2633 while (which_alternative < recog_data.n_alternatives);
2634
2635 which_alternative = -1;
2636 /* If we are about to reject this, but we are not to test strictly,
2637 try a very loose test. Only return failure if it fails also. */
2638 if (strict == 0)
2639 return constrain_operands (-1);
2640 else
2641 return 0;
2642 }
2643
2644 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2645 is a hard reg in class CLASS when its regno is offset by OFFSET
2646 and changed to mode MODE.
2647 If REG occupies multiple hard regs, all of them must be in CLASS. */
2648
2649 int
2650 reg_fits_class_p (rtx operand, enum reg_class class, int offset,
2651 enum machine_mode mode)
2652 {
2653 int regno = REGNO (operand);
2654 if (regno < FIRST_PSEUDO_REGISTER
2655 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2656 regno + offset))
2657 {
2658 int sr;
2659 regno += offset;
2660 for (sr = hard_regno_nregs[regno][mode] - 1;
2661 sr > 0; sr--)
2662 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2663 regno + sr))
2664 break;
2665 return sr == 0;
2666 }
2667
2668 return 0;
2669 }
2670 \f
2671 /* Split single instruction. Helper function for split_all_insns and
2672 split_all_insns_noflow. Return last insn in the sequence if successful,
2673 or NULL if unsuccessful. */
2674
2675 static rtx
2676 split_insn (rtx insn)
2677 {
2678 /* Split insns here to get max fine-grain parallelism. */
2679 rtx first = PREV_INSN (insn);
2680 rtx last = try_split (PATTERN (insn), insn, 1);
2681
2682 if (last == insn)
2683 return NULL_RTX;
2684
2685 /* try_split returns the NOTE that INSN became. */
2686 PUT_CODE (insn, NOTE);
2687 NOTE_SOURCE_FILE (insn) = 0;
2688 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2689
2690 /* ??? Coddle to md files that generate subregs in post-reload
2691 splitters instead of computing the proper hard register. */
2692 if (reload_completed && first != last)
2693 {
2694 first = NEXT_INSN (first);
2695 for (;;)
2696 {
2697 if (INSN_P (first))
2698 cleanup_subreg_operands (first);
2699 if (first == last)
2700 break;
2701 first = NEXT_INSN (first);
2702 }
2703 }
2704 return last;
2705 }
2706
2707 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2708
2709 void
2710 split_all_insns (int upd_life)
2711 {
2712 sbitmap blocks;
2713 bool changed;
2714 basic_block bb;
2715
2716 blocks = sbitmap_alloc (last_basic_block);
2717 sbitmap_zero (blocks);
2718 changed = false;
2719
2720 FOR_EACH_BB_REVERSE (bb)
2721 {
2722 rtx insn, next;
2723 bool finish = false;
2724
2725 for (insn = BB_HEAD (bb); !finish ; insn = next)
2726 {
2727 /* Can't use `next_real_insn' because that might go across
2728 CODE_LABELS and short-out basic blocks. */
2729 next = NEXT_INSN (insn);
2730 finish = (insn == BB_END (bb));
2731 if (INSN_P (insn))
2732 {
2733 rtx set = single_set (insn);
2734
2735 /* Don't split no-op move insns. These should silently
2736 disappear later in final. Splitting such insns would
2737 break the code that handles REG_NO_CONFLICT blocks. */
2738 if (set && set_noop_p (set))
2739 {
2740 /* Nops get in the way while scheduling, so delete them
2741 now if register allocation has already been done. It
2742 is too risky to try to do this before register
2743 allocation, and there are unlikely to be very many
2744 nops then anyways. */
2745 if (reload_completed)
2746 {
2747 /* If the no-op set has a REG_UNUSED note, we need
2748 to update liveness information. */
2749 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2750 {
2751 SET_BIT (blocks, bb->index);
2752 changed = true;
2753 }
2754 /* ??? Is life info affected by deleting edges? */
2755 delete_insn_and_edges (insn);
2756 }
2757 }
2758 else
2759 {
2760 rtx last = split_insn (insn);
2761 if (last)
2762 {
2763 /* The split sequence may include barrier, but the
2764 BB boundary we are interested in will be set to
2765 previous one. */
2766
2767 while (GET_CODE (last) == BARRIER)
2768 last = PREV_INSN (last);
2769 SET_BIT (blocks, bb->index);
2770 changed = true;
2771 }
2772 }
2773 }
2774 }
2775 }
2776
2777 if (changed)
2778 {
2779 int old_last_basic_block = last_basic_block;
2780
2781 find_many_sub_basic_blocks (blocks);
2782
2783 if (old_last_basic_block != last_basic_block && upd_life)
2784 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2785 }
2786
2787 if (changed && upd_life)
2788 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2789 PROP_DEATH_NOTES);
2790
2791 #ifdef ENABLE_CHECKING
2792 verify_flow_info ();
2793 #endif
2794
2795 sbitmap_free (blocks);
2796 }
2797
2798 /* Same as split_all_insns, but do not expect CFG to be available.
2799 Used by machine dependent reorg passes. */
2800
2801 void
2802 split_all_insns_noflow (void)
2803 {
2804 rtx next, insn;
2805
2806 for (insn = get_insns (); insn; insn = next)
2807 {
2808 next = NEXT_INSN (insn);
2809 if (INSN_P (insn))
2810 {
2811 /* Don't split no-op move insns. These should silently
2812 disappear later in final. Splitting such insns would
2813 break the code that handles REG_NO_CONFLICT blocks. */
2814 rtx set = single_set (insn);
2815 if (set && set_noop_p (set))
2816 {
2817 /* Nops get in the way while scheduling, so delete them
2818 now if register allocation has already been done. It
2819 is too risky to try to do this before register
2820 allocation, and there are unlikely to be very many
2821 nops then anyways.
2822
2823 ??? Should we use delete_insn when the CFG isn't valid? */
2824 if (reload_completed)
2825 delete_insn_and_edges (insn);
2826 }
2827 else
2828 split_insn (insn);
2829 }
2830 }
2831 }
2832 \f
2833 #ifdef HAVE_peephole2
2834 struct peep2_insn_data
2835 {
2836 rtx insn;
2837 regset live_before;
2838 };
2839
2840 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2841 static int peep2_current;
2842
2843 /* A non-insn marker indicating the last insn of the block.
2844 The live_before regset for this element is correct, indicating
2845 global_live_at_end for the block. */
2846 #define PEEP2_EOB pc_rtx
2847
2848 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2849 does not exist. Used by the recognizer to find the next insn to match
2850 in a multi-insn pattern. */
2851
2852 rtx
2853 peep2_next_insn (int n)
2854 {
2855 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2856 abort ();
2857
2858 n += peep2_current;
2859 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2860 n -= MAX_INSNS_PER_PEEP2 + 1;
2861
2862 if (peep2_insn_data[n].insn == PEEP2_EOB)
2863 return NULL_RTX;
2864 return peep2_insn_data[n].insn;
2865 }
2866
2867 /* Return true if REGNO is dead before the Nth non-note insn
2868 after `current'. */
2869
2870 int
2871 peep2_regno_dead_p (int ofs, int regno)
2872 {
2873 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2874 abort ();
2875
2876 ofs += peep2_current;
2877 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2878 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2879
2880 if (peep2_insn_data[ofs].insn == NULL_RTX)
2881 abort ();
2882
2883 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2884 }
2885
2886 /* Similarly for a REG. */
2887
2888 int
2889 peep2_reg_dead_p (int ofs, rtx reg)
2890 {
2891 int regno, n;
2892
2893 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2894 abort ();
2895
2896 ofs += peep2_current;
2897 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2898 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2899
2900 if (peep2_insn_data[ofs].insn == NULL_RTX)
2901 abort ();
2902
2903 regno = REGNO (reg);
2904 n = hard_regno_nregs[regno][GET_MODE (reg)];
2905 while (--n >= 0)
2906 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2907 return 0;
2908 return 1;
2909 }
2910
2911 /* Try to find a hard register of mode MODE, matching the register class in
2912 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2913 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2914 in which case the only condition is that the register must be available
2915 before CURRENT_INSN.
2916 Registers that already have bits set in REG_SET will not be considered.
2917
2918 If an appropriate register is available, it will be returned and the
2919 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2920 returned. */
2921
2922 rtx
2923 peep2_find_free_register (int from, int to, const char *class_str,
2924 enum machine_mode mode, HARD_REG_SET *reg_set)
2925 {
2926 static int search_ofs;
2927 enum reg_class class;
2928 HARD_REG_SET live;
2929 int i;
2930
2931 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2932 abort ();
2933
2934 from += peep2_current;
2935 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2936 from -= MAX_INSNS_PER_PEEP2 + 1;
2937 to += peep2_current;
2938 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2939 to -= MAX_INSNS_PER_PEEP2 + 1;
2940
2941 if (peep2_insn_data[from].insn == NULL_RTX)
2942 abort ();
2943 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2944
2945 while (from != to)
2946 {
2947 HARD_REG_SET this_live;
2948
2949 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2950 from = 0;
2951 if (peep2_insn_data[from].insn == NULL_RTX)
2952 abort ();
2953 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2954 IOR_HARD_REG_SET (live, this_live);
2955 }
2956
2957 class = (class_str[0] == 'r' ? GENERAL_REGS
2958 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2959
2960 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2961 {
2962 int raw_regno, regno, success, j;
2963
2964 /* Distribute the free registers as much as possible. */
2965 raw_regno = search_ofs + i;
2966 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2967 raw_regno -= FIRST_PSEUDO_REGISTER;
2968 #ifdef REG_ALLOC_ORDER
2969 regno = reg_alloc_order[raw_regno];
2970 #else
2971 regno = raw_regno;
2972 #endif
2973
2974 /* Don't allocate fixed registers. */
2975 if (fixed_regs[regno])
2976 continue;
2977 /* Make sure the register is of the right class. */
2978 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2979 continue;
2980 /* And can support the mode we need. */
2981 if (! HARD_REGNO_MODE_OK (regno, mode))
2982 continue;
2983 /* And that we don't create an extra save/restore. */
2984 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2985 continue;
2986 /* And we don't clobber traceback for noreturn functions. */
2987 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2988 && (! reload_completed || frame_pointer_needed))
2989 continue;
2990
2991 success = 1;
2992 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2993 {
2994 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2995 || TEST_HARD_REG_BIT (live, regno + j))
2996 {
2997 success = 0;
2998 break;
2999 }
3000 }
3001 if (success)
3002 {
3003 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3004 SET_HARD_REG_BIT (*reg_set, regno + j);
3005
3006 /* Start the next search with the next register. */
3007 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3008 raw_regno = 0;
3009 search_ofs = raw_regno;
3010
3011 return gen_rtx_REG (mode, regno);
3012 }
3013 }
3014
3015 search_ofs = 0;
3016 return NULL_RTX;
3017 }
3018
3019 /* Perform the peephole2 optimization pass. */
3020
3021 void
3022 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
3023 {
3024 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3025 rtx insn, prev;
3026 regset live;
3027 int i;
3028 basic_block bb;
3029 #ifdef HAVE_conditional_execution
3030 sbitmap blocks;
3031 bool changed;
3032 #endif
3033 bool do_cleanup_cfg = false;
3034 bool do_rebuild_jump_labels = false;
3035
3036 /* Initialize the regsets we're going to use. */
3037 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3038 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3039 live = INITIALIZE_REG_SET (rs_heads[i]);
3040
3041 #ifdef HAVE_conditional_execution
3042 blocks = sbitmap_alloc (last_basic_block);
3043 sbitmap_zero (blocks);
3044 changed = false;
3045 #else
3046 count_or_remove_death_notes (NULL, 1);
3047 #endif
3048
3049 FOR_EACH_BB_REVERSE (bb)
3050 {
3051 struct propagate_block_info *pbi;
3052
3053 /* Indicate that all slots except the last holds invalid data. */
3054 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3055 peep2_insn_data[i].insn = NULL_RTX;
3056
3057 /* Indicate that the last slot contains live_after data. */
3058 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3059 peep2_current = MAX_INSNS_PER_PEEP2;
3060
3061 /* Start up propagation. */
3062 COPY_REG_SET (live, bb->global_live_at_end);
3063 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3064
3065 #ifdef HAVE_conditional_execution
3066 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3067 #else
3068 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3069 #endif
3070
3071 for (insn = BB_END (bb); ; insn = prev)
3072 {
3073 prev = PREV_INSN (insn);
3074 if (INSN_P (insn))
3075 {
3076 rtx try, before_try, x;
3077 int match_len;
3078 rtx note;
3079 bool was_call = false;
3080
3081 /* Record this insn. */
3082 if (--peep2_current < 0)
3083 peep2_current = MAX_INSNS_PER_PEEP2;
3084 peep2_insn_data[peep2_current].insn = insn;
3085 propagate_one_insn (pbi, insn);
3086 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3087
3088 /* Match the peephole. */
3089 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3090 if (try != NULL)
3091 {
3092 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3093 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3094 cfg-related call notes. */
3095 for (i = 0; i <= match_len; ++i)
3096 {
3097 int j;
3098 rtx old_insn, new_insn, note;
3099
3100 j = i + peep2_current;
3101 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3102 j -= MAX_INSNS_PER_PEEP2 + 1;
3103 old_insn = peep2_insn_data[j].insn;
3104 if (GET_CODE (old_insn) != CALL_INSN)
3105 continue;
3106 was_call = true;
3107
3108 new_insn = try;
3109 while (new_insn != NULL_RTX)
3110 {
3111 if (GET_CODE (new_insn) == CALL_INSN)
3112 break;
3113 new_insn = NEXT_INSN (new_insn);
3114 }
3115
3116 if (new_insn == NULL_RTX)
3117 abort ();
3118
3119 CALL_INSN_FUNCTION_USAGE (new_insn)
3120 = CALL_INSN_FUNCTION_USAGE (old_insn);
3121
3122 for (note = REG_NOTES (old_insn);
3123 note;
3124 note = XEXP (note, 1))
3125 switch (REG_NOTE_KIND (note))
3126 {
3127 case REG_NORETURN:
3128 case REG_SETJMP:
3129 case REG_ALWAYS_RETURN:
3130 REG_NOTES (new_insn)
3131 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3132 XEXP (note, 0),
3133 REG_NOTES (new_insn));
3134 default:
3135 /* Discard all other reg notes. */
3136 break;
3137 }
3138
3139 /* Croak if there is another call in the sequence. */
3140 while (++i <= match_len)
3141 {
3142 j = i + peep2_current;
3143 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3144 j -= MAX_INSNS_PER_PEEP2 + 1;
3145 old_insn = peep2_insn_data[j].insn;
3146 if (GET_CODE (old_insn) == CALL_INSN)
3147 abort ();
3148 }
3149 break;
3150 }
3151
3152 i = match_len + peep2_current;
3153 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3154 i -= MAX_INSNS_PER_PEEP2 + 1;
3155
3156 note = find_reg_note (peep2_insn_data[i].insn,
3157 REG_EH_REGION, NULL_RTX);
3158
3159 /* Replace the old sequence with the new. */
3160 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3161 INSN_LOCATOR (peep2_insn_data[i].insn));
3162 before_try = PREV_INSN (insn);
3163 delete_insn_chain (insn, peep2_insn_data[i].insn);
3164
3165 /* Re-insert the EH_REGION notes. */
3166 if (note || (was_call && nonlocal_goto_handler_labels))
3167 {
3168 edge eh_edge;
3169
3170 for (eh_edge = bb->succ; eh_edge
3171 ; eh_edge = eh_edge->succ_next)
3172 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3173 break;
3174
3175 for (x = try ; x != before_try ; x = PREV_INSN (x))
3176 if (GET_CODE (x) == CALL_INSN
3177 || (flag_non_call_exceptions
3178 && may_trap_p (PATTERN (x))
3179 && !find_reg_note (x, REG_EH_REGION, NULL)))
3180 {
3181 if (note)
3182 REG_NOTES (x)
3183 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3184 XEXP (note, 0),
3185 REG_NOTES (x));
3186
3187 if (x != BB_END (bb) && eh_edge)
3188 {
3189 edge nfte, nehe;
3190 int flags;
3191
3192 nfte = split_block (bb, x);
3193 flags = (eh_edge->flags
3194 & (EDGE_EH | EDGE_ABNORMAL));
3195 if (GET_CODE (x) == CALL_INSN)
3196 flags |= EDGE_ABNORMAL_CALL;
3197 nehe = make_edge (nfte->src, eh_edge->dest,
3198 flags);
3199
3200 nehe->probability = eh_edge->probability;
3201 nfte->probability
3202 = REG_BR_PROB_BASE - nehe->probability;
3203
3204 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3205 #ifdef HAVE_conditional_execution
3206 SET_BIT (blocks, nfte->dest->index);
3207 changed = true;
3208 #endif
3209 bb = nfte->src;
3210 eh_edge = nehe;
3211 }
3212 }
3213
3214 /* Converting possibly trapping insn to non-trapping is
3215 possible. Zap dummy outgoing edges. */
3216 do_cleanup_cfg |= purge_dead_edges (bb);
3217 }
3218
3219 #ifdef HAVE_conditional_execution
3220 /* With conditional execution, we cannot back up the
3221 live information so easily, since the conditional
3222 death data structures are not so self-contained.
3223 So record that we've made a modification to this
3224 block and update life information at the end. */
3225 SET_BIT (blocks, bb->index);
3226 changed = true;
3227
3228 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3229 peep2_insn_data[i].insn = NULL_RTX;
3230 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3231 #else
3232 /* Back up lifetime information past the end of the
3233 newly created sequence. */
3234 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3235 i = 0;
3236 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3237
3238 /* Update life information for the new sequence. */
3239 x = try;
3240 do
3241 {
3242 if (INSN_P (x))
3243 {
3244 if (--i < 0)
3245 i = MAX_INSNS_PER_PEEP2;
3246 peep2_insn_data[i].insn = x;
3247 propagate_one_insn (pbi, x);
3248 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3249 }
3250 x = PREV_INSN (x);
3251 }
3252 while (x != prev);
3253
3254 /* ??? Should verify that LIVE now matches what we
3255 had before the new sequence. */
3256
3257 peep2_current = i;
3258 #endif
3259
3260 /* If we generated a jump instruction, it won't have
3261 JUMP_LABEL set. Recompute after we're done. */
3262 for (x = try; x != before_try; x = PREV_INSN (x))
3263 if (GET_CODE (x) == JUMP_INSN)
3264 {
3265 do_rebuild_jump_labels = true;
3266 break;
3267 }
3268 }
3269 }
3270
3271 if (insn == BB_HEAD (bb))
3272 break;
3273 }
3274
3275 free_propagate_block_info (pbi);
3276 }
3277
3278 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3279 FREE_REG_SET (peep2_insn_data[i].live_before);
3280 FREE_REG_SET (live);
3281
3282 if (do_rebuild_jump_labels)
3283 rebuild_jump_labels (get_insns ());
3284
3285 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3286 we've changed global life since exception handlers are no longer
3287 reachable. */
3288 if (do_cleanup_cfg)
3289 {
3290 cleanup_cfg (0);
3291 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3292 }
3293 #ifdef HAVE_conditional_execution
3294 else
3295 {
3296 count_or_remove_death_notes (blocks, 1);
3297 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3298 }
3299 sbitmap_free (blocks);
3300 #endif
3301 }
3302 #endif /* HAVE_peephole2 */
3303
3304 /* Common predicates for use with define_bypass. */
3305
3306 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3307 data not the address operand(s) of the store. IN_INSN must be
3308 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3309 SETs inside. */
3310
3311 int
3312 store_data_bypass_p (rtx out_insn, rtx in_insn)
3313 {
3314 rtx out_set, in_set;
3315
3316 in_set = single_set (in_insn);
3317 if (! in_set)
3318 abort ();
3319
3320 if (GET_CODE (SET_DEST (in_set)) != MEM)
3321 return false;
3322
3323 out_set = single_set (out_insn);
3324 if (out_set)
3325 {
3326 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3327 return false;
3328 }
3329 else
3330 {
3331 rtx out_pat;
3332 int i;
3333
3334 out_pat = PATTERN (out_insn);
3335 if (GET_CODE (out_pat) != PARALLEL)
3336 abort ();
3337
3338 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3339 {
3340 rtx exp = XVECEXP (out_pat, 0, i);
3341
3342 if (GET_CODE (exp) == CLOBBER)
3343 continue;
3344
3345 if (GET_CODE (exp) != SET)
3346 abort ();
3347
3348 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3349 return false;
3350 }
3351 }
3352
3353 return true;
3354 }
3355
3356 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3357 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3358 or multiple set; IN_INSN should be single_set for truth, but for convenience
3359 of insn categorization may be any JUMP or CALL insn. */
3360
3361 int
3362 if_test_bypass_p (rtx out_insn, rtx in_insn)
3363 {
3364 rtx out_set, in_set;
3365
3366 in_set = single_set (in_insn);
3367 if (! in_set)
3368 {
3369 if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
3370 return false;
3371 abort ();
3372 }
3373
3374 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3375 return false;
3376 in_set = SET_SRC (in_set);
3377
3378 out_set = single_set (out_insn);
3379 if (out_set)
3380 {
3381 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3382 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3383 return false;
3384 }
3385 else
3386 {
3387 rtx out_pat;
3388 int i;
3389
3390 out_pat = PATTERN (out_insn);
3391 if (GET_CODE (out_pat) != PARALLEL)
3392 abort ();
3393
3394 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3395 {
3396 rtx exp = XVECEXP (out_pat, 0, i);
3397
3398 if (GET_CODE (exp) == CLOBBER)
3399 continue;
3400
3401 if (GET_CODE (exp) != SET)
3402 abort ();
3403
3404 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3405 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3406 return false;
3407 }
3408 }
3409
3410 return true;
3411 }