re PR pch/13689 (GGC PCH breaks aliasing)
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "toplev.h"
39 #include "basic-block.h"
40 #include "output.h"
41 #include "reload.h"
42
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
46 #else
47 #define STACK_PUSH_CODE PRE_INC
48 #endif
49 #endif
50
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
54 #else
55 #define STACK_POP_CODE POST_DEC
56 #endif
57 #endif
58
59 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
60 static rtx *find_single_use_1 (rtx, rtx *);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
63
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
69
70 init_recog and init_recog_no_volatile are responsible for setting this. */
71
72 int volatile_ok;
73
74 struct recog_data recog_data;
75
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
82
83 int which_alternative;
84
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
88
89 int reload_completed;
90
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
93
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
97
98 void
99 init_recog_no_volatile (void)
100 {
101 volatile_ok = 0;
102 }
103
104 void
105 init_recog (void)
106 {
107 volatile_ok = 1;
108 }
109
110 /* Try recognizing the instruction INSN,
111 and return the code number that results.
112 Remember the code so that repeated calls do not
113 need to spend the time for actual rerecognition.
114
115 This function is the normal interface to instruction recognition.
116 The automatically-generated function `recog' is normally called
117 through this one. (The only exception is in combine.c.) */
118
119 int
120 recog_memoized_1 (rtx insn)
121 {
122 if (INSN_CODE (insn) < 0)
123 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
124 return INSN_CODE (insn);
125 }
126 \f
127 /* Check that X is an insn-body for an `asm' with operands
128 and that the operands mentioned in it are legitimate. */
129
130 int
131 check_asm_operands (rtx x)
132 {
133 int noperands;
134 rtx *operands;
135 const char **constraints;
136 int i;
137
138 /* Post-reload, be more strict with things. */
139 if (reload_completed)
140 {
141 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
142 extract_insn (make_insn_raw (x));
143 constrain_operands (1);
144 return which_alternative >= 0;
145 }
146
147 noperands = asm_noperands (x);
148 if (noperands < 0)
149 return 0;
150 if (noperands == 0)
151 return 1;
152
153 operands = alloca (noperands * sizeof (rtx));
154 constraints = alloca (noperands * sizeof (char *));
155
156 decode_asm_operands (x, operands, NULL, constraints, NULL);
157
158 for (i = 0; i < noperands; i++)
159 {
160 const char *c = constraints[i];
161 if (c[0] == '%')
162 c++;
163 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
164 c = constraints[c[0] - '0'];
165
166 if (! asm_operand_ok (operands[i], c))
167 return 0;
168 }
169
170 return 1;
171 }
172 \f
173 /* Static data for the next two routines. */
174
175 typedef struct change_t
176 {
177 rtx object;
178 int old_code;
179 rtx *loc;
180 rtx old;
181 } change_t;
182
183 static change_t *changes;
184 static int changes_allocated;
185
186 static int num_changes = 0;
187
188 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
189 at which NEW will be placed. If OBJECT is zero, no validation is done,
190 the change is simply made.
191
192 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
193 will be called with the address and mode as parameters. If OBJECT is
194 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
195 the change in place.
196
197 IN_GROUP is nonzero if this is part of a group of changes that must be
198 performed as a group. In that case, the changes will be stored. The
199 function `apply_change_group' will validate and apply the changes.
200
201 If IN_GROUP is zero, this is a single change. Try to recognize the insn
202 or validate the memory reference with the change applied. If the result
203 is not valid for the machine, suppress the change and return zero.
204 Otherwise, perform the change and return 1. */
205
206 int
207 validate_change (rtx object, rtx *loc, rtx new, int in_group)
208 {
209 rtx old = *loc;
210
211 if (old == new || rtx_equal_p (old, new))
212 return 1;
213
214 if (in_group == 0 && num_changes != 0)
215 abort ();
216
217 *loc = new;
218
219 /* Save the information describing this change. */
220 if (num_changes >= changes_allocated)
221 {
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
226 else
227 changes_allocated *= 2;
228
229 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
230 }
231
232 changes[num_changes].object = object;
233 changes[num_changes].loc = loc;
234 changes[num_changes].old = old;
235
236 if (object && GET_CODE (object) != MEM)
237 {
238 /* Set INSN_CODE to force rerecognition of insn. Save old code in
239 case invalid. */
240 changes[num_changes].old_code = INSN_CODE (object);
241 INSN_CODE (object) = -1;
242 }
243
244 num_changes++;
245
246 /* If we are making a group of changes, return 1. Otherwise, validate the
247 change group we made. */
248
249 if (in_group)
250 return 1;
251 else
252 return apply_change_group ();
253 }
254
255 /* This subroutine of apply_change_group verifies whether the changes to INSN
256 were valid; i.e. whether INSN can still be recognized. */
257
258 int
259 insn_invalid_p (rtx insn)
260 {
261 rtx pat = PATTERN (insn);
262 int num_clobbers = 0;
263 /* If we are before reload and the pattern is a SET, see if we can add
264 clobbers. */
265 int icode = recog (pat, insn,
266 (GET_CODE (pat) == SET
267 && ! reload_completed && ! reload_in_progress)
268 ? &num_clobbers : 0);
269 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
270
271
272 /* If this is an asm and the operand aren't legal, then fail. Likewise if
273 this is not an asm and the insn wasn't recognized. */
274 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
275 || (!is_asm && icode < 0))
276 return 1;
277
278 /* If we have to add CLOBBERs, fail if we have to add ones that reference
279 hard registers since our callers can't know if they are live or not.
280 Otherwise, add them. */
281 if (num_clobbers > 0)
282 {
283 rtx newpat;
284
285 if (added_clobbers_hard_reg_p (icode))
286 return 1;
287
288 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
289 XVECEXP (newpat, 0, 0) = pat;
290 add_clobbers (newpat, icode);
291 PATTERN (insn) = pat = newpat;
292 }
293
294 /* After reload, verify that all constraints are satisfied. */
295 if (reload_completed)
296 {
297 extract_insn (insn);
298
299 if (! constrain_operands (1))
300 return 1;
301 }
302
303 INSN_CODE (insn) = icode;
304 return 0;
305 }
306
307 /* Return number of changes made and not validated yet. */
308 int
309 num_changes_pending (void)
310 {
311 return num_changes;
312 }
313
314 /* Apply a group of changes previously issued with `validate_change'.
315 Return 1 if all changes are valid, zero otherwise. */
316
317 int
318 apply_change_group (void)
319 {
320 int i;
321 rtx last_validated = NULL_RTX;
322
323 /* The changes have been applied and all INSN_CODEs have been reset to force
324 rerecognition.
325
326 The changes are valid if we aren't given an object, or if we are
327 given a MEM and it still is a valid address, or if this is in insn
328 and it is recognized. In the latter case, if reload has completed,
329 we also require that the operands meet the constraints for
330 the insn. */
331
332 for (i = 0; i < num_changes; i++)
333 {
334 rtx object = changes[i].object;
335
336 /* If there is no object to test or if it is the same as the one we
337 already tested, ignore it. */
338 if (object == 0 || object == last_validated)
339 continue;
340
341 if (GET_CODE (object) == MEM)
342 {
343 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
344 break;
345 }
346 else if (insn_invalid_p (object))
347 {
348 rtx pat = PATTERN (object);
349
350 /* Perhaps we couldn't recognize the insn because there were
351 extra CLOBBERs at the end. If so, try to re-recognize
352 without the last CLOBBER (later iterations will cause each of
353 them to be eliminated, in turn). But don't do this if we
354 have an ASM_OPERAND. */
355 if (GET_CODE (pat) == PARALLEL
356 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
357 && asm_noperands (PATTERN (object)) < 0)
358 {
359 rtx newpat;
360
361 if (XVECLEN (pat, 0) == 2)
362 newpat = XVECEXP (pat, 0, 0);
363 else
364 {
365 int j;
366
367 newpat
368 = gen_rtx_PARALLEL (VOIDmode,
369 rtvec_alloc (XVECLEN (pat, 0) - 1));
370 for (j = 0; j < XVECLEN (newpat, 0); j++)
371 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
372 }
373
374 /* Add a new change to this group to replace the pattern
375 with this new pattern. Then consider this change
376 as having succeeded. The change we added will
377 cause the entire call to fail if things remain invalid.
378
379 Note that this can lose if a later change than the one
380 we are processing specified &XVECEXP (PATTERN (object), 0, X)
381 but this shouldn't occur. */
382
383 validate_change (object, &PATTERN (object), newpat, 1);
384 continue;
385 }
386 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
387 /* If this insn is a CLOBBER or USE, it is always valid, but is
388 never recognized. */
389 continue;
390 else
391 break;
392 }
393 last_validated = object;
394 }
395
396 if (i == num_changes)
397 {
398 basic_block bb;
399
400 for (i = 0; i < num_changes; i++)
401 if (changes[i].object
402 && INSN_P (changes[i].object)
403 && (bb = BLOCK_FOR_INSN (changes[i].object)))
404 bb->flags |= BB_DIRTY;
405
406 num_changes = 0;
407 return 1;
408 }
409 else
410 {
411 cancel_changes (0);
412 return 0;
413 }
414 }
415
416 /* Return the number of changes so far in the current group. */
417
418 int
419 num_validated_changes (void)
420 {
421 return num_changes;
422 }
423
424 /* Retract the changes numbered NUM and up. */
425
426 void
427 cancel_changes (int num)
428 {
429 int i;
430
431 /* Back out all the changes. Do this in the opposite order in which
432 they were made. */
433 for (i = num_changes - 1; i >= num; i--)
434 {
435 *changes[i].loc = changes[i].old;
436 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
437 INSN_CODE (changes[i].object) = changes[i].old_code;
438 }
439 num_changes = num;
440 }
441
442 /* Replace every occurrence of FROM in X with TO. Mark each change with
443 validate_change passing OBJECT. */
444
445 static void
446 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
447 {
448 int i, j;
449 const char *fmt;
450 rtx x = *loc;
451 enum rtx_code code;
452 enum machine_mode op0_mode = VOIDmode;
453 int prev_changes = num_changes;
454 rtx new;
455
456 if (!x)
457 return;
458
459 code = GET_CODE (x);
460 fmt = GET_RTX_FORMAT (code);
461 if (fmt[0] == 'e')
462 op0_mode = GET_MODE (XEXP (x, 0));
463
464 /* X matches FROM if it is the same rtx or they are both referring to the
465 same register in the same mode. Avoid calling rtx_equal_p unless the
466 operands look similar. */
467
468 if (x == from
469 || (GET_CODE (x) == REG && GET_CODE (from) == REG
470 && GET_MODE (x) == GET_MODE (from)
471 && REGNO (x) == REGNO (from))
472 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
473 && rtx_equal_p (x, from)))
474 {
475 validate_change (object, loc, to, 1);
476 return;
477 }
478
479 /* Call ourself recursively to perform the replacements.
480 We must not replace inside already replaced expression, otherwise we
481 get infinite recursion for replacements like (reg X)->(subreg (reg X))
482 done by regmove, so we must special case shared ASM_OPERANDS. */
483
484 if (GET_CODE (x) == PARALLEL)
485 {
486 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
487 {
488 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
489 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
490 {
491 /* Verify that operands are really shared. */
492 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0))) !=
493 ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, j))))
494 abort ();
495 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
496 from, to, object);
497 }
498 else
499 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
500 }
501 }
502 else
503 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
504 {
505 if (fmt[i] == 'e')
506 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
507 else if (fmt[i] == 'E')
508 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
509 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
510 }
511
512 /* If we didn't substitute, there is nothing more to do. */
513 if (num_changes == prev_changes)
514 return;
515
516 /* Allow substituted expression to have different mode. This is used by
517 regmove to change mode of pseudo register. */
518 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
519 op0_mode = GET_MODE (XEXP (x, 0));
520
521 /* Do changes needed to keep rtx consistent. Don't do any other
522 simplifications, as it is not our job. */
523
524 if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
525 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
526 {
527 validate_change (object, loc,
528 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
529 : swap_condition (code),
530 GET_MODE (x), XEXP (x, 1),
531 XEXP (x, 0)), 1);
532 x = *loc;
533 code = GET_CODE (x);
534 }
535
536 switch (code)
537 {
538 case PLUS:
539 /* If we have a PLUS whose second operand is now a CONST_INT, use
540 simplify_gen_binary to try to simplify it.
541 ??? We may want later to remove this, once simplification is
542 separated from this function. */
543 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
544 validate_change (object, loc,
545 simplify_gen_binary
546 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
547 break;
548 case MINUS:
549 if (GET_CODE (XEXP (x, 1)) == CONST_INT
550 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
551 validate_change (object, loc,
552 simplify_gen_binary
553 (PLUS, GET_MODE (x), XEXP (x, 0),
554 simplify_gen_unary (NEG,
555 GET_MODE (x), XEXP (x, 1),
556 GET_MODE (x))), 1);
557 break;
558 case ZERO_EXTEND:
559 case SIGN_EXTEND:
560 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
561 {
562 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
563 op0_mode);
564 /* If any of the above failed, substitute in something that
565 we know won't be recognized. */
566 if (!new)
567 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
568 validate_change (object, loc, new, 1);
569 }
570 break;
571 case SUBREG:
572 /* All subregs possible to simplify should be simplified. */
573 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
574 SUBREG_BYTE (x));
575
576 /* Subregs of VOIDmode operands are incorrect. */
577 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
578 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
579 if (new)
580 validate_change (object, loc, new, 1);
581 break;
582 case ZERO_EXTRACT:
583 case SIGN_EXTRACT:
584 /* If we are replacing a register with memory, try to change the memory
585 to be the mode required for memory in extract operations (this isn't
586 likely to be an insertion operation; if it was, nothing bad will
587 happen, we might just fail in some cases). */
588
589 if (GET_CODE (XEXP (x, 0)) == MEM
590 && GET_CODE (XEXP (x, 1)) == CONST_INT
591 && GET_CODE (XEXP (x, 2)) == CONST_INT
592 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
593 && !MEM_VOLATILE_P (XEXP (x, 0)))
594 {
595 enum machine_mode wanted_mode = VOIDmode;
596 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
597 int pos = INTVAL (XEXP (x, 2));
598
599 if (GET_CODE (x) == ZERO_EXTRACT)
600 {
601 enum machine_mode new_mode
602 = mode_for_extraction (EP_extzv, 1);
603 if (new_mode != MAX_MACHINE_MODE)
604 wanted_mode = new_mode;
605 }
606 else if (GET_CODE (x) == SIGN_EXTRACT)
607 {
608 enum machine_mode new_mode
609 = mode_for_extraction (EP_extv, 1);
610 if (new_mode != MAX_MACHINE_MODE)
611 wanted_mode = new_mode;
612 }
613
614 /* If we have a narrower mode, we can do something. */
615 if (wanted_mode != VOIDmode
616 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
617 {
618 int offset = pos / BITS_PER_UNIT;
619 rtx newmem;
620
621 /* If the bytes and bits are counted differently, we
622 must adjust the offset. */
623 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
624 offset =
625 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
626 offset);
627
628 pos %= GET_MODE_BITSIZE (wanted_mode);
629
630 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
631
632 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
633 validate_change (object, &XEXP (x, 0), newmem, 1);
634 }
635 }
636
637 break;
638
639 default:
640 break;
641 }
642 }
643
644 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
645 with TO. After all changes have been made, validate by seeing
646 if INSN is still valid. */
647
648 int
649 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
650 {
651 validate_replace_rtx_1 (loc, from, to, insn);
652 return apply_change_group ();
653 }
654
655 /* Try replacing every occurrence of FROM in INSN with TO. After all
656 changes have been made, validate by seeing if INSN is still valid. */
657
658 int
659 validate_replace_rtx (rtx from, rtx to, rtx insn)
660 {
661 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
662 return apply_change_group ();
663 }
664
665 /* Try replacing every occurrence of FROM in INSN with TO. */
666
667 void
668 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
669 {
670 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
671 }
672
673 /* Function called by note_uses to replace used subexpressions. */
674 struct validate_replace_src_data
675 {
676 rtx from; /* Old RTX */
677 rtx to; /* New RTX */
678 rtx insn; /* Insn in which substitution is occurring. */
679 };
680
681 static void
682 validate_replace_src_1 (rtx *x, void *data)
683 {
684 struct validate_replace_src_data *d
685 = (struct validate_replace_src_data *) data;
686
687 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
688 }
689
690 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
691 SET_DESTs. */
692
693 void
694 validate_replace_src_group (rtx from, rtx to, rtx insn)
695 {
696 struct validate_replace_src_data d;
697
698 d.from = from;
699 d.to = to;
700 d.insn = insn;
701 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
702 }
703
704 /* Same as validate_replace_src_group, but validate by seeing if
705 INSN is still valid. */
706 int
707 validate_replace_src (rtx from, rtx to, rtx insn)
708 {
709 validate_replace_src_group (from, to, insn);
710 return apply_change_group ();
711 }
712 \f
713 #ifdef HAVE_cc0
714 /* Return 1 if the insn using CC0 set by INSN does not contain
715 any ordered tests applied to the condition codes.
716 EQ and NE tests do not count. */
717
718 int
719 next_insn_tests_no_inequality (rtx insn)
720 {
721 rtx next = next_cc0_user (insn);
722
723 /* If there is no next insn, we have to take the conservative choice. */
724 if (next == 0)
725 return 0;
726
727 return ((GET_CODE (next) == JUMP_INSN
728 || GET_CODE (next) == INSN
729 || GET_CODE (next) == CALL_INSN)
730 && ! inequality_comparisons_p (PATTERN (next)));
731 }
732 #endif
733 \f
734 /* This is used by find_single_use to locate an rtx that contains exactly one
735 use of DEST, which is typically either a REG or CC0. It returns a
736 pointer to the innermost rtx expression containing DEST. Appearances of
737 DEST that are being used to totally replace it are not counted. */
738
739 static rtx *
740 find_single_use_1 (rtx dest, rtx *loc)
741 {
742 rtx x = *loc;
743 enum rtx_code code = GET_CODE (x);
744 rtx *result = 0;
745 rtx *this_result;
746 int i;
747 const char *fmt;
748
749 switch (code)
750 {
751 case CONST_INT:
752 case CONST:
753 case LABEL_REF:
754 case SYMBOL_REF:
755 case CONST_DOUBLE:
756 case CONST_VECTOR:
757 case CLOBBER:
758 return 0;
759
760 case SET:
761 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
762 of a REG that occupies all of the REG, the insn uses DEST if
763 it is mentioned in the destination or the source. Otherwise, we
764 need just check the source. */
765 if (GET_CODE (SET_DEST (x)) != CC0
766 && GET_CODE (SET_DEST (x)) != PC
767 && GET_CODE (SET_DEST (x)) != REG
768 && ! (GET_CODE (SET_DEST (x)) == SUBREG
769 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
770 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
771 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
772 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
773 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
774 break;
775
776 return find_single_use_1 (dest, &SET_SRC (x));
777
778 case MEM:
779 case SUBREG:
780 return find_single_use_1 (dest, &XEXP (x, 0));
781
782 default:
783 break;
784 }
785
786 /* If it wasn't one of the common cases above, check each expression and
787 vector of this code. Look for a unique usage of DEST. */
788
789 fmt = GET_RTX_FORMAT (code);
790 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
791 {
792 if (fmt[i] == 'e')
793 {
794 if (dest == XEXP (x, i)
795 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
796 && REGNO (dest) == REGNO (XEXP (x, i))))
797 this_result = loc;
798 else
799 this_result = find_single_use_1 (dest, &XEXP (x, i));
800
801 if (result == 0)
802 result = this_result;
803 else if (this_result)
804 /* Duplicate usage. */
805 return 0;
806 }
807 else if (fmt[i] == 'E')
808 {
809 int j;
810
811 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
812 {
813 if (XVECEXP (x, i, j) == dest
814 || (GET_CODE (dest) == REG
815 && GET_CODE (XVECEXP (x, i, j)) == REG
816 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
817 this_result = loc;
818 else
819 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
820
821 if (result == 0)
822 result = this_result;
823 else if (this_result)
824 return 0;
825 }
826 }
827 }
828
829 return result;
830 }
831 \f
832 /* See if DEST, produced in INSN, is used only a single time in the
833 sequel. If so, return a pointer to the innermost rtx expression in which
834 it is used.
835
836 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
837
838 This routine will return usually zero either before flow is called (because
839 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
840 note can't be trusted).
841
842 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
843 care about REG_DEAD notes or LOG_LINKS.
844
845 Otherwise, we find the single use by finding an insn that has a
846 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
847 only referenced once in that insn, we know that it must be the first
848 and last insn referencing DEST. */
849
850 rtx *
851 find_single_use (rtx dest, rtx insn, rtx *ploc)
852 {
853 rtx next;
854 rtx *result;
855 rtx link;
856
857 #ifdef HAVE_cc0
858 if (dest == cc0_rtx)
859 {
860 next = NEXT_INSN (insn);
861 if (next == 0
862 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
863 return 0;
864
865 result = find_single_use_1 (dest, &PATTERN (next));
866 if (result && ploc)
867 *ploc = next;
868 return result;
869 }
870 #endif
871
872 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
873 return 0;
874
875 for (next = next_nonnote_insn (insn);
876 next != 0 && GET_CODE (next) != CODE_LABEL;
877 next = next_nonnote_insn (next))
878 if (INSN_P (next) && dead_or_set_p (next, dest))
879 {
880 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
881 if (XEXP (link, 0) == insn)
882 break;
883
884 if (link)
885 {
886 result = find_single_use_1 (dest, &PATTERN (next));
887 if (ploc)
888 *ploc = next;
889 return result;
890 }
891 }
892
893 return 0;
894 }
895 \f
896 /* Return 1 if OP is a valid general operand for machine mode MODE.
897 This is either a register reference, a memory reference,
898 or a constant. In the case of a memory reference, the address
899 is checked for general validity for the target machine.
900
901 Register and memory references must have mode MODE in order to be valid,
902 but some constants have no machine mode and are valid for any mode.
903
904 If MODE is VOIDmode, OP is checked for validity for whatever mode
905 it has.
906
907 The main use of this function is as a predicate in match_operand
908 expressions in the machine description.
909
910 For an explanation of this function's behavior for registers of
911 class NO_REGS, see the comment for `register_operand'. */
912
913 int
914 general_operand (rtx op, enum machine_mode mode)
915 {
916 enum rtx_code code = GET_CODE (op);
917
918 if (mode == VOIDmode)
919 mode = GET_MODE (op);
920
921 /* Don't accept CONST_INT or anything similar
922 if the caller wants something floating. */
923 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
924 && GET_MODE_CLASS (mode) != MODE_INT
925 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
926 return 0;
927
928 if (GET_CODE (op) == CONST_INT
929 && mode != VOIDmode
930 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
931 return 0;
932
933 if (CONSTANT_P (op))
934 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
935 || mode == VOIDmode)
936 #ifdef LEGITIMATE_PIC_OPERAND_P
937 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
938 #endif
939 && LEGITIMATE_CONSTANT_P (op));
940
941 /* Except for certain constants with VOIDmode, already checked for,
942 OP's mode must match MODE if MODE specifies a mode. */
943
944 if (GET_MODE (op) != mode)
945 return 0;
946
947 if (code == SUBREG)
948 {
949 rtx sub = SUBREG_REG (op);
950
951 #ifdef INSN_SCHEDULING
952 /* On machines that have insn scheduling, we want all memory
953 reference to be explicit, so outlaw paradoxical SUBREGs. */
954 if (GET_CODE (sub) == MEM
955 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
956 return 0;
957 #endif
958 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
959 may result in incorrect reference. We should simplify all valid
960 subregs of MEM anyway. But allow this after reload because we
961 might be called from cleanup_subreg_operands.
962
963 ??? This is a kludge. */
964 if (!reload_completed && SUBREG_BYTE (op) != 0
965 && GET_CODE (sub) == MEM)
966 return 0;
967
968 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
969 create such rtl, and we must reject it. */
970 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
971 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
972 return 0;
973
974 op = sub;
975 code = GET_CODE (op);
976 }
977
978 if (code == REG)
979 /* A register whose class is NO_REGS is not a general operand. */
980 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
981 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
982
983 if (code == MEM)
984 {
985 rtx y = XEXP (op, 0);
986
987 if (! volatile_ok && MEM_VOLATILE_P (op))
988 return 0;
989
990 if (GET_CODE (y) == ADDRESSOF)
991 return 1;
992
993 /* Use the mem's mode, since it will be reloaded thus. */
994 mode = GET_MODE (op);
995 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
996 }
997
998 /* Pretend this is an operand for now; we'll run force_operand
999 on its replacement in fixup_var_refs_1. */
1000 if (code == ADDRESSOF)
1001 return 1;
1002
1003 return 0;
1004
1005 win:
1006 return 1;
1007 }
1008 \f
1009 /* Return 1 if OP is a valid memory address for a memory reference
1010 of mode MODE.
1011
1012 The main use of this function is as a predicate in match_operand
1013 expressions in the machine description. */
1014
1015 int
1016 address_operand (rtx op, enum machine_mode mode)
1017 {
1018 return memory_address_p (mode, op);
1019 }
1020
1021 /* Return 1 if OP is a register reference of mode MODE.
1022 If MODE is VOIDmode, accept a register in any mode.
1023
1024 The main use of this function is as a predicate in match_operand
1025 expressions in the machine description.
1026
1027 As a special exception, registers whose class is NO_REGS are
1028 not accepted by `register_operand'. The reason for this change
1029 is to allow the representation of special architecture artifacts
1030 (such as a condition code register) without extending the rtl
1031 definitions. Since registers of class NO_REGS cannot be used
1032 as registers in any case where register classes are examined,
1033 it is most consistent to keep this function from accepting them. */
1034
1035 int
1036 register_operand (rtx op, enum machine_mode mode)
1037 {
1038 if (GET_MODE (op) != mode && mode != VOIDmode)
1039 return 0;
1040
1041 if (GET_CODE (op) == SUBREG)
1042 {
1043 rtx sub = SUBREG_REG (op);
1044
1045 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1046 because it is guaranteed to be reloaded into one.
1047 Just make sure the MEM is valid in itself.
1048 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1049 but currently it does result from (SUBREG (REG)...) where the
1050 reg went on the stack.) */
1051 if (! reload_completed && GET_CODE (sub) == MEM)
1052 return general_operand (op, mode);
1053
1054 #ifdef CANNOT_CHANGE_MODE_CLASS
1055 if (GET_CODE (sub) == REG
1056 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1057 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1058 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1059 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1060 return 0;
1061 #endif
1062
1063 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1064 create such rtl, and we must reject it. */
1065 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1066 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1067 return 0;
1068
1069 op = sub;
1070 }
1071
1072 /* If we have an ADDRESSOF, consider it valid since it will be
1073 converted into something that will not be a MEM. */
1074 if (GET_CODE (op) == ADDRESSOF)
1075 return 1;
1076
1077 /* We don't consider registers whose class is NO_REGS
1078 to be a register operand. */
1079 return (GET_CODE (op) == REG
1080 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1081 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1082 }
1083
1084 /* Return 1 for a register in Pmode; ignore the tested mode. */
1085
1086 int
1087 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1088 {
1089 return register_operand (op, Pmode);
1090 }
1091
1092 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1093 or a hard register. */
1094
1095 int
1096 scratch_operand (rtx op, enum machine_mode mode)
1097 {
1098 if (GET_MODE (op) != mode && mode != VOIDmode)
1099 return 0;
1100
1101 return (GET_CODE (op) == SCRATCH
1102 || (GET_CODE (op) == REG
1103 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1104 }
1105
1106 /* Return 1 if OP is a valid immediate operand for mode MODE.
1107
1108 The main use of this function is as a predicate in match_operand
1109 expressions in the machine description. */
1110
1111 int
1112 immediate_operand (rtx op, enum machine_mode mode)
1113 {
1114 /* Don't accept CONST_INT or anything similar
1115 if the caller wants something floating. */
1116 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1117 && GET_MODE_CLASS (mode) != MODE_INT
1118 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1119 return 0;
1120
1121 if (GET_CODE (op) == CONST_INT
1122 && mode != VOIDmode
1123 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1124 return 0;
1125
1126 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1127 result in 0/1. It seems a safe assumption that this is
1128 in range for everyone. */
1129 if (GET_CODE (op) == CONSTANT_P_RTX)
1130 return 1;
1131
1132 return (CONSTANT_P (op)
1133 && (GET_MODE (op) == mode || mode == VOIDmode
1134 || GET_MODE (op) == VOIDmode)
1135 #ifdef LEGITIMATE_PIC_OPERAND_P
1136 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1137 #endif
1138 && LEGITIMATE_CONSTANT_P (op));
1139 }
1140
1141 /* Returns 1 if OP is an operand that is a CONST_INT. */
1142
1143 int
1144 const_int_operand (rtx op, enum machine_mode mode)
1145 {
1146 if (GET_CODE (op) != CONST_INT)
1147 return 0;
1148
1149 if (mode != VOIDmode
1150 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1151 return 0;
1152
1153 return 1;
1154 }
1155
1156 /* Returns 1 if OP is an operand that is a constant integer or constant
1157 floating-point number. */
1158
1159 int
1160 const_double_operand (rtx op, enum machine_mode mode)
1161 {
1162 /* Don't accept CONST_INT or anything similar
1163 if the caller wants something floating. */
1164 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1165 && GET_MODE_CLASS (mode) != MODE_INT
1166 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1167 return 0;
1168
1169 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1170 && (mode == VOIDmode || GET_MODE (op) == mode
1171 || GET_MODE (op) == VOIDmode));
1172 }
1173
1174 /* Return 1 if OP is a general operand that is not an immediate operand. */
1175
1176 int
1177 nonimmediate_operand (rtx op, enum machine_mode mode)
1178 {
1179 return (general_operand (op, mode) && ! CONSTANT_P (op));
1180 }
1181
1182 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1183
1184 int
1185 nonmemory_operand (rtx op, enum machine_mode mode)
1186 {
1187 if (CONSTANT_P (op))
1188 {
1189 /* Don't accept CONST_INT or anything similar
1190 if the caller wants something floating. */
1191 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1192 && GET_MODE_CLASS (mode) != MODE_INT
1193 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1194 return 0;
1195
1196 if (GET_CODE (op) == CONST_INT
1197 && mode != VOIDmode
1198 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1199 return 0;
1200
1201 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1202 || mode == VOIDmode)
1203 #ifdef LEGITIMATE_PIC_OPERAND_P
1204 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1205 #endif
1206 && LEGITIMATE_CONSTANT_P (op));
1207 }
1208
1209 if (GET_MODE (op) != mode && mode != VOIDmode)
1210 return 0;
1211
1212 if (GET_CODE (op) == SUBREG)
1213 {
1214 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1215 because it is guaranteed to be reloaded into one.
1216 Just make sure the MEM is valid in itself.
1217 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1218 but currently it does result from (SUBREG (REG)...) where the
1219 reg went on the stack.) */
1220 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1221 return general_operand (op, mode);
1222 op = SUBREG_REG (op);
1223 }
1224
1225 /* We don't consider registers whose class is NO_REGS
1226 to be a register operand. */
1227 return (GET_CODE (op) == REG
1228 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1229 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1230 }
1231
1232 /* Return 1 if OP is a valid operand that stands for pushing a
1233 value of mode MODE onto the stack.
1234
1235 The main use of this function is as a predicate in match_operand
1236 expressions in the machine description. */
1237
1238 int
1239 push_operand (rtx op, enum machine_mode mode)
1240 {
1241 unsigned int rounded_size = GET_MODE_SIZE (mode);
1242
1243 #ifdef PUSH_ROUNDING
1244 rounded_size = PUSH_ROUNDING (rounded_size);
1245 #endif
1246
1247 if (GET_CODE (op) != MEM)
1248 return 0;
1249
1250 if (mode != VOIDmode && GET_MODE (op) != mode)
1251 return 0;
1252
1253 op = XEXP (op, 0);
1254
1255 if (rounded_size == GET_MODE_SIZE (mode))
1256 {
1257 if (GET_CODE (op) != STACK_PUSH_CODE)
1258 return 0;
1259 }
1260 else
1261 {
1262 if (GET_CODE (op) != PRE_MODIFY
1263 || GET_CODE (XEXP (op, 1)) != PLUS
1264 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1265 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1266 #ifdef STACK_GROWS_DOWNWARD
1267 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1268 #else
1269 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1270 #endif
1271 )
1272 return 0;
1273 }
1274
1275 return XEXP (op, 0) == stack_pointer_rtx;
1276 }
1277
1278 /* Return 1 if OP is a valid operand that stands for popping a
1279 value of mode MODE off the stack.
1280
1281 The main use of this function is as a predicate in match_operand
1282 expressions in the machine description. */
1283
1284 int
1285 pop_operand (rtx op, enum machine_mode mode)
1286 {
1287 if (GET_CODE (op) != MEM)
1288 return 0;
1289
1290 if (mode != VOIDmode && GET_MODE (op) != mode)
1291 return 0;
1292
1293 op = XEXP (op, 0);
1294
1295 if (GET_CODE (op) != STACK_POP_CODE)
1296 return 0;
1297
1298 return XEXP (op, 0) == stack_pointer_rtx;
1299 }
1300
1301 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1302
1303 int
1304 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1305 {
1306 if (GET_CODE (addr) == ADDRESSOF)
1307 return 1;
1308
1309 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1310 return 0;
1311
1312 win:
1313 return 1;
1314 }
1315
1316 /* Return 1 if OP is a valid memory reference with mode MODE,
1317 including a valid address.
1318
1319 The main use of this function is as a predicate in match_operand
1320 expressions in the machine description. */
1321
1322 int
1323 memory_operand (rtx op, enum machine_mode mode)
1324 {
1325 rtx inner;
1326
1327 if (! reload_completed)
1328 /* Note that no SUBREG is a memory operand before end of reload pass,
1329 because (SUBREG (MEM...)) forces reloading into a register. */
1330 return GET_CODE (op) == MEM && general_operand (op, mode);
1331
1332 if (mode != VOIDmode && GET_MODE (op) != mode)
1333 return 0;
1334
1335 inner = op;
1336 if (GET_CODE (inner) == SUBREG)
1337 inner = SUBREG_REG (inner);
1338
1339 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1340 }
1341
1342 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1343 that is, a memory reference whose address is a general_operand. */
1344
1345 int
1346 indirect_operand (rtx op, enum machine_mode mode)
1347 {
1348 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1349 if (! reload_completed
1350 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1351 {
1352 int offset = SUBREG_BYTE (op);
1353 rtx inner = SUBREG_REG (op);
1354
1355 if (mode != VOIDmode && GET_MODE (op) != mode)
1356 return 0;
1357
1358 /* The only way that we can have a general_operand as the resulting
1359 address is if OFFSET is zero and the address already is an operand
1360 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1361 operand. */
1362
1363 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1364 || (GET_CODE (XEXP (inner, 0)) == PLUS
1365 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1366 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1367 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1368 }
1369
1370 return (GET_CODE (op) == MEM
1371 && memory_operand (op, mode)
1372 && general_operand (XEXP (op, 0), Pmode));
1373 }
1374
1375 /* Return 1 if this is a comparison operator. This allows the use of
1376 MATCH_OPERATOR to recognize all the branch insns. */
1377
1378 int
1379 comparison_operator (rtx op, enum machine_mode mode)
1380 {
1381 return ((mode == VOIDmode || GET_MODE (op) == mode)
1382 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1383 }
1384 \f
1385 /* If BODY is an insn body that uses ASM_OPERANDS,
1386 return the number of operands (both input and output) in the insn.
1387 Otherwise return -1. */
1388
1389 int
1390 asm_noperands (rtx body)
1391 {
1392 switch (GET_CODE (body))
1393 {
1394 case ASM_OPERANDS:
1395 /* No output operands: return number of input operands. */
1396 return ASM_OPERANDS_INPUT_LENGTH (body);
1397 case SET:
1398 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1399 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1400 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1401 else
1402 return -1;
1403 case PARALLEL:
1404 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1405 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1406 {
1407 /* Multiple output operands, or 1 output plus some clobbers:
1408 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1409 int i;
1410 int n_sets;
1411
1412 /* Count backwards through CLOBBERs to determine number of SETs. */
1413 for (i = XVECLEN (body, 0); i > 0; i--)
1414 {
1415 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1416 break;
1417 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1418 return -1;
1419 }
1420
1421 /* N_SETS is now number of output operands. */
1422 n_sets = i;
1423
1424 /* Verify that all the SETs we have
1425 came from a single original asm_operands insn
1426 (so that invalid combinations are blocked). */
1427 for (i = 0; i < n_sets; i++)
1428 {
1429 rtx elt = XVECEXP (body, 0, i);
1430 if (GET_CODE (elt) != SET)
1431 return -1;
1432 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1433 return -1;
1434 /* If these ASM_OPERANDS rtx's came from different original insns
1435 then they aren't allowed together. */
1436 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1437 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1438 return -1;
1439 }
1440 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1441 + n_sets);
1442 }
1443 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1444 {
1445 /* 0 outputs, but some clobbers:
1446 body is [(asm_operands ...) (clobber (reg ...))...]. */
1447 int i;
1448
1449 /* Make sure all the other parallel things really are clobbers. */
1450 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1451 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1452 return -1;
1453
1454 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1455 }
1456 else
1457 return -1;
1458 default:
1459 return -1;
1460 }
1461 }
1462
1463 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1464 copy its operands (both input and output) into the vector OPERANDS,
1465 the locations of the operands within the insn into the vector OPERAND_LOCS,
1466 and the constraints for the operands into CONSTRAINTS.
1467 Write the modes of the operands into MODES.
1468 Return the assembler-template.
1469
1470 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1471 we don't store that info. */
1472
1473 const char *
1474 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1475 const char **constraints, enum machine_mode *modes)
1476 {
1477 int i;
1478 int noperands;
1479 const char *template = 0;
1480
1481 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1482 {
1483 rtx asmop = SET_SRC (body);
1484 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1485
1486 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1487
1488 for (i = 1; i < noperands; i++)
1489 {
1490 if (operand_locs)
1491 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1492 if (operands)
1493 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1494 if (constraints)
1495 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1496 if (modes)
1497 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1498 }
1499
1500 /* The output is in the SET.
1501 Its constraint is in the ASM_OPERANDS itself. */
1502 if (operands)
1503 operands[0] = SET_DEST (body);
1504 if (operand_locs)
1505 operand_locs[0] = &SET_DEST (body);
1506 if (constraints)
1507 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1508 if (modes)
1509 modes[0] = GET_MODE (SET_DEST (body));
1510 template = ASM_OPERANDS_TEMPLATE (asmop);
1511 }
1512 else if (GET_CODE (body) == ASM_OPERANDS)
1513 {
1514 rtx asmop = body;
1515 /* No output operands: BODY is (asm_operands ....). */
1516
1517 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1518
1519 /* The input operands are found in the 1st element vector. */
1520 /* Constraints for inputs are in the 2nd element vector. */
1521 for (i = 0; i < noperands; i++)
1522 {
1523 if (operand_locs)
1524 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1525 if (operands)
1526 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1527 if (constraints)
1528 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1529 if (modes)
1530 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1531 }
1532 template = ASM_OPERANDS_TEMPLATE (asmop);
1533 }
1534 else if (GET_CODE (body) == PARALLEL
1535 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1536 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1537 {
1538 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1539 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1540 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1541 int nout = 0; /* Does not include CLOBBERs. */
1542
1543 /* At least one output, plus some CLOBBERs. */
1544
1545 /* The outputs are in the SETs.
1546 Their constraints are in the ASM_OPERANDS itself. */
1547 for (i = 0; i < nparallel; i++)
1548 {
1549 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1550 break; /* Past last SET */
1551
1552 if (operands)
1553 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1554 if (operand_locs)
1555 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1556 if (constraints)
1557 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1558 if (modes)
1559 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1560 nout++;
1561 }
1562
1563 for (i = 0; i < nin; i++)
1564 {
1565 if (operand_locs)
1566 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1567 if (operands)
1568 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1569 if (constraints)
1570 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1571 if (modes)
1572 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1573 }
1574
1575 template = ASM_OPERANDS_TEMPLATE (asmop);
1576 }
1577 else if (GET_CODE (body) == PARALLEL
1578 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1579 {
1580 /* No outputs, but some CLOBBERs. */
1581
1582 rtx asmop = XVECEXP (body, 0, 0);
1583 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1584
1585 for (i = 0; i < nin; i++)
1586 {
1587 if (operand_locs)
1588 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1589 if (operands)
1590 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1591 if (constraints)
1592 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1593 if (modes)
1594 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1595 }
1596
1597 template = ASM_OPERANDS_TEMPLATE (asmop);
1598 }
1599
1600 return template;
1601 }
1602
1603 /* Check if an asm_operand matches it's constraints.
1604 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1605
1606 int
1607 asm_operand_ok (rtx op, const char *constraint)
1608 {
1609 int result = 0;
1610
1611 /* Use constrain_operands after reload. */
1612 if (reload_completed)
1613 abort ();
1614
1615 while (*constraint)
1616 {
1617 char c = *constraint;
1618 int len;
1619 switch (c)
1620 {
1621 case ',':
1622 constraint++;
1623 continue;
1624 case '=':
1625 case '+':
1626 case '*':
1627 case '%':
1628 case '!':
1629 case '#':
1630 case '&':
1631 case '?':
1632 break;
1633
1634 case '0': case '1': case '2': case '3': case '4':
1635 case '5': case '6': case '7': case '8': case '9':
1636 /* For best results, our caller should have given us the
1637 proper matching constraint, but we can't actually fail
1638 the check if they didn't. Indicate that results are
1639 inconclusive. */
1640 do
1641 constraint++;
1642 while (ISDIGIT (*constraint));
1643 if (! result)
1644 result = -1;
1645 continue;
1646
1647 case 'p':
1648 if (address_operand (op, VOIDmode))
1649 result = 1;
1650 break;
1651
1652 case 'm':
1653 case 'V': /* non-offsettable */
1654 if (memory_operand (op, VOIDmode))
1655 result = 1;
1656 break;
1657
1658 case 'o': /* offsettable */
1659 if (offsettable_nonstrict_memref_p (op))
1660 result = 1;
1661 break;
1662
1663 case '<':
1664 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1665 excepting those that expand_call created. Further, on some
1666 machines which do not have generalized auto inc/dec, an inc/dec
1667 is not a memory_operand.
1668
1669 Match any memory and hope things are resolved after reload. */
1670
1671 if (GET_CODE (op) == MEM
1672 && (1
1673 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1674 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1675 result = 1;
1676 break;
1677
1678 case '>':
1679 if (GET_CODE (op) == MEM
1680 && (1
1681 || GET_CODE (XEXP (op, 0)) == PRE_INC
1682 || GET_CODE (XEXP (op, 0)) == POST_INC))
1683 result = 1;
1684 break;
1685
1686 case 'E':
1687 case 'F':
1688 if (GET_CODE (op) == CONST_DOUBLE
1689 || (GET_CODE (op) == CONST_VECTOR
1690 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1691 result = 1;
1692 break;
1693
1694 case 'G':
1695 if (GET_CODE (op) == CONST_DOUBLE
1696 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1697 result = 1;
1698 break;
1699 case 'H':
1700 if (GET_CODE (op) == CONST_DOUBLE
1701 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1702 result = 1;
1703 break;
1704
1705 case 's':
1706 if (GET_CODE (op) == CONST_INT
1707 || (GET_CODE (op) == CONST_DOUBLE
1708 && GET_MODE (op) == VOIDmode))
1709 break;
1710 /* Fall through. */
1711
1712 case 'i':
1713 if (CONSTANT_P (op)
1714 #ifdef LEGITIMATE_PIC_OPERAND_P
1715 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1716 #endif
1717 )
1718 result = 1;
1719 break;
1720
1721 case 'n':
1722 if (GET_CODE (op) == CONST_INT
1723 || (GET_CODE (op) == CONST_DOUBLE
1724 && GET_MODE (op) == VOIDmode))
1725 result = 1;
1726 break;
1727
1728 case 'I':
1729 if (GET_CODE (op) == CONST_INT
1730 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1731 result = 1;
1732 break;
1733 case 'J':
1734 if (GET_CODE (op) == CONST_INT
1735 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1736 result = 1;
1737 break;
1738 case 'K':
1739 if (GET_CODE (op) == CONST_INT
1740 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1741 result = 1;
1742 break;
1743 case 'L':
1744 if (GET_CODE (op) == CONST_INT
1745 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1746 result = 1;
1747 break;
1748 case 'M':
1749 if (GET_CODE (op) == CONST_INT
1750 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1751 result = 1;
1752 break;
1753 case 'N':
1754 if (GET_CODE (op) == CONST_INT
1755 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1756 result = 1;
1757 break;
1758 case 'O':
1759 if (GET_CODE (op) == CONST_INT
1760 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1761 result = 1;
1762 break;
1763 case 'P':
1764 if (GET_CODE (op) == CONST_INT
1765 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1766 result = 1;
1767 break;
1768
1769 case 'X':
1770 result = 1;
1771 break;
1772
1773 case 'g':
1774 if (general_operand (op, VOIDmode))
1775 result = 1;
1776 break;
1777
1778 default:
1779 /* For all other letters, we first check for a register class,
1780 otherwise it is an EXTRA_CONSTRAINT. */
1781 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1782 {
1783 case 'r':
1784 if (GET_MODE (op) == BLKmode)
1785 break;
1786 if (register_operand (op, VOIDmode))
1787 result = 1;
1788 }
1789 #ifdef EXTRA_CONSTRAINT_STR
1790 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1791 result = 1;
1792 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1793 /* Every memory operand can be reloaded to fit. */
1794 && memory_operand (op, VOIDmode))
1795 result = 1;
1796 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1797 /* Every address operand can be reloaded to fit. */
1798 && address_operand (op, VOIDmode))
1799 result = 1;
1800 #endif
1801 break;
1802 }
1803 len = CONSTRAINT_LEN (c, constraint);
1804 do
1805 constraint++;
1806 while (--len && *constraint);
1807 if (len)
1808 return 0;
1809 }
1810
1811 return result;
1812 }
1813 \f
1814 /* Given an rtx *P, if it is a sum containing an integer constant term,
1815 return the location (type rtx *) of the pointer to that constant term.
1816 Otherwise, return a null pointer. */
1817
1818 rtx *
1819 find_constant_term_loc (rtx *p)
1820 {
1821 rtx *tem;
1822 enum rtx_code code = GET_CODE (*p);
1823
1824 /* If *P IS such a constant term, P is its location. */
1825
1826 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1827 || code == CONST)
1828 return p;
1829
1830 /* Otherwise, if not a sum, it has no constant term. */
1831
1832 if (GET_CODE (*p) != PLUS)
1833 return 0;
1834
1835 /* If one of the summands is constant, return its location. */
1836
1837 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1838 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1839 return p;
1840
1841 /* Otherwise, check each summand for containing a constant term. */
1842
1843 if (XEXP (*p, 0) != 0)
1844 {
1845 tem = find_constant_term_loc (&XEXP (*p, 0));
1846 if (tem != 0)
1847 return tem;
1848 }
1849
1850 if (XEXP (*p, 1) != 0)
1851 {
1852 tem = find_constant_term_loc (&XEXP (*p, 1));
1853 if (tem != 0)
1854 return tem;
1855 }
1856
1857 return 0;
1858 }
1859 \f
1860 /* Return 1 if OP is a memory reference
1861 whose address contains no side effects
1862 and remains valid after the addition
1863 of a positive integer less than the
1864 size of the object being referenced.
1865
1866 We assume that the original address is valid and do not check it.
1867
1868 This uses strict_memory_address_p as a subroutine, so
1869 don't use it before reload. */
1870
1871 int
1872 offsettable_memref_p (rtx op)
1873 {
1874 return ((GET_CODE (op) == MEM)
1875 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1876 }
1877
1878 /* Similar, but don't require a strictly valid mem ref:
1879 consider pseudo-regs valid as index or base regs. */
1880
1881 int
1882 offsettable_nonstrict_memref_p (rtx op)
1883 {
1884 return ((GET_CODE (op) == MEM)
1885 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1886 }
1887
1888 /* Return 1 if Y is a memory address which contains no side effects
1889 and would remain valid after the addition of a positive integer
1890 less than the size of that mode.
1891
1892 We assume that the original address is valid and do not check it.
1893 We do check that it is valid for narrower modes.
1894
1895 If STRICTP is nonzero, we require a strictly valid address,
1896 for the sake of use in reload.c. */
1897
1898 int
1899 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1900 {
1901 enum rtx_code ycode = GET_CODE (y);
1902 rtx z;
1903 rtx y1 = y;
1904 rtx *y2;
1905 int (*addressp) (enum machine_mode, rtx) =
1906 (strictp ? strict_memory_address_p : memory_address_p);
1907 unsigned int mode_sz = GET_MODE_SIZE (mode);
1908
1909 if (CONSTANT_ADDRESS_P (y))
1910 return 1;
1911
1912 /* Adjusting an offsettable address involves changing to a narrower mode.
1913 Make sure that's OK. */
1914
1915 if (mode_dependent_address_p (y))
1916 return 0;
1917
1918 /* ??? How much offset does an offsettable BLKmode reference need?
1919 Clearly that depends on the situation in which it's being used.
1920 However, the current situation in which we test 0xffffffff is
1921 less than ideal. Caveat user. */
1922 if (mode_sz == 0)
1923 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1924
1925 /* If the expression contains a constant term,
1926 see if it remains valid when max possible offset is added. */
1927
1928 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1929 {
1930 int good;
1931
1932 y1 = *y2;
1933 *y2 = plus_constant (*y2, mode_sz - 1);
1934 /* Use QImode because an odd displacement may be automatically invalid
1935 for any wider mode. But it should be valid for a single byte. */
1936 good = (*addressp) (QImode, y);
1937
1938 /* In any case, restore old contents of memory. */
1939 *y2 = y1;
1940 return good;
1941 }
1942
1943 if (GET_RTX_CLASS (ycode) == 'a')
1944 return 0;
1945
1946 /* The offset added here is chosen as the maximum offset that
1947 any instruction could need to add when operating on something
1948 of the specified mode. We assume that if Y and Y+c are
1949 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1950 go inside a LO_SUM here, so we do so as well. */
1951 if (GET_CODE (y) == LO_SUM
1952 && mode != BLKmode
1953 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1954 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1955 plus_constant (XEXP (y, 1), mode_sz - 1));
1956 else
1957 z = plus_constant (y, mode_sz - 1);
1958
1959 /* Use QImode because an odd displacement may be automatically invalid
1960 for any wider mode. But it should be valid for a single byte. */
1961 return (*addressp) (QImode, z);
1962 }
1963
1964 /* Return 1 if ADDR is an address-expression whose effect depends
1965 on the mode of the memory reference it is used in.
1966
1967 Autoincrement addressing is a typical example of mode-dependence
1968 because the amount of the increment depends on the mode. */
1969
1970 int
1971 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1972 {
1973 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1974 return 0;
1975 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1976 win: ATTRIBUTE_UNUSED_LABEL
1977 return 1;
1978 }
1979 \f
1980 /* Like extract_insn, but save insn extracted and don't extract again, when
1981 called again for the same insn expecting that recog_data still contain the
1982 valid information. This is used primary by gen_attr infrastructure that
1983 often does extract insn again and again. */
1984 void
1985 extract_insn_cached (rtx insn)
1986 {
1987 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1988 return;
1989 extract_insn (insn);
1990 recog_data.insn = insn;
1991 }
1992 /* Do cached extract_insn, constrain_operands and complain about failures.
1993 Used by insn_attrtab. */
1994 void
1995 extract_constrain_insn_cached (rtx insn)
1996 {
1997 extract_insn_cached (insn);
1998 if (which_alternative == -1
1999 && !constrain_operands (reload_completed))
2000 fatal_insn_not_found (insn);
2001 }
2002 /* Do cached constrain_operands and complain about failures. */
2003 int
2004 constrain_operands_cached (int strict)
2005 {
2006 if (which_alternative == -1)
2007 return constrain_operands (strict);
2008 else
2009 return 1;
2010 }
2011 \f
2012 /* Analyze INSN and fill in recog_data. */
2013
2014 void
2015 extract_insn (rtx insn)
2016 {
2017 int i;
2018 int icode;
2019 int noperands;
2020 rtx body = PATTERN (insn);
2021
2022 recog_data.insn = NULL;
2023 recog_data.n_operands = 0;
2024 recog_data.n_alternatives = 0;
2025 recog_data.n_dups = 0;
2026 which_alternative = -1;
2027
2028 switch (GET_CODE (body))
2029 {
2030 case USE:
2031 case CLOBBER:
2032 case ASM_INPUT:
2033 case ADDR_VEC:
2034 case ADDR_DIFF_VEC:
2035 return;
2036
2037 case SET:
2038 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2039 goto asm_insn;
2040 else
2041 goto normal_insn;
2042 case PARALLEL:
2043 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2044 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2045 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2046 goto asm_insn;
2047 else
2048 goto normal_insn;
2049 case ASM_OPERANDS:
2050 asm_insn:
2051 recog_data.n_operands = noperands = asm_noperands (body);
2052 if (noperands >= 0)
2053 {
2054 /* This insn is an `asm' with operands. */
2055
2056 /* expand_asm_operands makes sure there aren't too many operands. */
2057 if (noperands > MAX_RECOG_OPERANDS)
2058 abort ();
2059
2060 /* Now get the operand values and constraints out of the insn. */
2061 decode_asm_operands (body, recog_data.operand,
2062 recog_data.operand_loc,
2063 recog_data.constraints,
2064 recog_data.operand_mode);
2065 if (noperands > 0)
2066 {
2067 const char *p = recog_data.constraints[0];
2068 recog_data.n_alternatives = 1;
2069 while (*p)
2070 recog_data.n_alternatives += (*p++ == ',');
2071 }
2072 break;
2073 }
2074 fatal_insn_not_found (insn);
2075
2076 default:
2077 normal_insn:
2078 /* Ordinary insn: recognize it, get the operands via insn_extract
2079 and get the constraints. */
2080
2081 icode = recog_memoized (insn);
2082 if (icode < 0)
2083 fatal_insn_not_found (insn);
2084
2085 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2086 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2087 recog_data.n_dups = insn_data[icode].n_dups;
2088
2089 insn_extract (insn);
2090
2091 for (i = 0; i < noperands; i++)
2092 {
2093 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2094 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2095 /* VOIDmode match_operands gets mode from their real operand. */
2096 if (recog_data.operand_mode[i] == VOIDmode)
2097 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2098 }
2099 }
2100 for (i = 0; i < noperands; i++)
2101 recog_data.operand_type[i]
2102 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2103 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2104 : OP_IN);
2105
2106 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2107 abort ();
2108 }
2109
2110 /* After calling extract_insn, you can use this function to extract some
2111 information from the constraint strings into a more usable form.
2112 The collected data is stored in recog_op_alt. */
2113 void
2114 preprocess_constraints (void)
2115 {
2116 int i;
2117
2118 for (i = 0; i < recog_data.n_operands; i++)
2119 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2120 * sizeof (struct operand_alternative)));
2121
2122 for (i = 0; i < recog_data.n_operands; i++)
2123 {
2124 int j;
2125 struct operand_alternative *op_alt;
2126 const char *p = recog_data.constraints[i];
2127
2128 op_alt = recog_op_alt[i];
2129
2130 for (j = 0; j < recog_data.n_alternatives; j++)
2131 {
2132 op_alt[j].class = NO_REGS;
2133 op_alt[j].constraint = p;
2134 op_alt[j].matches = -1;
2135 op_alt[j].matched = -1;
2136
2137 if (*p == '\0' || *p == ',')
2138 {
2139 op_alt[j].anything_ok = 1;
2140 continue;
2141 }
2142
2143 for (;;)
2144 {
2145 char c = *p;
2146 if (c == '#')
2147 do
2148 c = *++p;
2149 while (c != ',' && c != '\0');
2150 if (c == ',' || c == '\0')
2151 {
2152 p++;
2153 break;
2154 }
2155
2156 switch (c)
2157 {
2158 case '=': case '+': case '*': case '%':
2159 case 'E': case 'F': case 'G': case 'H':
2160 case 's': case 'i': case 'n':
2161 case 'I': case 'J': case 'K': case 'L':
2162 case 'M': case 'N': case 'O': case 'P':
2163 /* These don't say anything we care about. */
2164 break;
2165
2166 case '?':
2167 op_alt[j].reject += 6;
2168 break;
2169 case '!':
2170 op_alt[j].reject += 600;
2171 break;
2172 case '&':
2173 op_alt[j].earlyclobber = 1;
2174 break;
2175
2176 case '0': case '1': case '2': case '3': case '4':
2177 case '5': case '6': case '7': case '8': case '9':
2178 {
2179 char *end;
2180 op_alt[j].matches = strtoul (p, &end, 10);
2181 recog_op_alt[op_alt[j].matches][j].matched = i;
2182 p = end;
2183 }
2184 continue;
2185
2186 case 'm':
2187 op_alt[j].memory_ok = 1;
2188 break;
2189 case '<':
2190 op_alt[j].decmem_ok = 1;
2191 break;
2192 case '>':
2193 op_alt[j].incmem_ok = 1;
2194 break;
2195 case 'V':
2196 op_alt[j].nonoffmem_ok = 1;
2197 break;
2198 case 'o':
2199 op_alt[j].offmem_ok = 1;
2200 break;
2201 case 'X':
2202 op_alt[j].anything_ok = 1;
2203 break;
2204
2205 case 'p':
2206 op_alt[j].is_address = 1;
2207 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2208 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2209 break;
2210
2211 case 'g': case 'r':
2212 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2213 break;
2214
2215 default:
2216 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2217 {
2218 op_alt[j].memory_ok = 1;
2219 break;
2220 }
2221 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2222 {
2223 op_alt[j].is_address = 1;
2224 op_alt[j].class
2225 = (reg_class_subunion
2226 [(int) op_alt[j].class]
2227 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2228 break;
2229 }
2230
2231 op_alt[j].class
2232 = (reg_class_subunion
2233 [(int) op_alt[j].class]
2234 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2235 break;
2236 }
2237 p += CONSTRAINT_LEN (c, p);
2238 }
2239 }
2240 }
2241 }
2242
2243 /* Check the operands of an insn against the insn's operand constraints
2244 and return 1 if they are valid.
2245 The information about the insn's operands, constraints, operand modes
2246 etc. is obtained from the global variables set up by extract_insn.
2247
2248 WHICH_ALTERNATIVE is set to a number which indicates which
2249 alternative of constraints was matched: 0 for the first alternative,
2250 1 for the next, etc.
2251
2252 In addition, when two operands are required to match
2253 and it happens that the output operand is (reg) while the
2254 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2255 make the output operand look like the input.
2256 This is because the output operand is the one the template will print.
2257
2258 This is used in final, just before printing the assembler code and by
2259 the routines that determine an insn's attribute.
2260
2261 If STRICT is a positive nonzero value, it means that we have been
2262 called after reload has been completed. In that case, we must
2263 do all checks strictly. If it is zero, it means that we have been called
2264 before reload has completed. In that case, we first try to see if we can
2265 find an alternative that matches strictly. If not, we try again, this
2266 time assuming that reload will fix up the insn. This provides a "best
2267 guess" for the alternative and is used to compute attributes of insns prior
2268 to reload. A negative value of STRICT is used for this internal call. */
2269
2270 struct funny_match
2271 {
2272 int this, other;
2273 };
2274
2275 int
2276 constrain_operands (int strict)
2277 {
2278 const char *constraints[MAX_RECOG_OPERANDS];
2279 int matching_operands[MAX_RECOG_OPERANDS];
2280 int earlyclobber[MAX_RECOG_OPERANDS];
2281 int c;
2282
2283 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2284 int funny_match_index;
2285
2286 which_alternative = 0;
2287 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2288 return 1;
2289
2290 for (c = 0; c < recog_data.n_operands; c++)
2291 {
2292 constraints[c] = recog_data.constraints[c];
2293 matching_operands[c] = -1;
2294 }
2295
2296 do
2297 {
2298 int opno;
2299 int lose = 0;
2300 funny_match_index = 0;
2301
2302 for (opno = 0; opno < recog_data.n_operands; opno++)
2303 {
2304 rtx op = recog_data.operand[opno];
2305 enum machine_mode mode = GET_MODE (op);
2306 const char *p = constraints[opno];
2307 int offset = 0;
2308 int win = 0;
2309 int val;
2310 int len;
2311
2312 earlyclobber[opno] = 0;
2313
2314 /* A unary operator may be accepted by the predicate, but it
2315 is irrelevant for matching constraints. */
2316 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2317 op = XEXP (op, 0);
2318
2319 if (GET_CODE (op) == SUBREG)
2320 {
2321 if (GET_CODE (SUBREG_REG (op)) == REG
2322 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2323 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2324 GET_MODE (SUBREG_REG (op)),
2325 SUBREG_BYTE (op),
2326 GET_MODE (op));
2327 op = SUBREG_REG (op);
2328 }
2329
2330 /* An empty constraint or empty alternative
2331 allows anything which matched the pattern. */
2332 if (*p == 0 || *p == ',')
2333 win = 1;
2334
2335 do
2336 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2337 {
2338 case '\0':
2339 len = 0;
2340 break;
2341 case ',':
2342 c = '\0';
2343 break;
2344
2345 case '?': case '!': case '*': case '%':
2346 case '=': case '+':
2347 break;
2348
2349 case '#':
2350 /* Ignore rest of this alternative as far as
2351 constraint checking is concerned. */
2352 do
2353 p++;
2354 while (*p && *p != ',');
2355 len = 0;
2356 break;
2357
2358 case '&':
2359 earlyclobber[opno] = 1;
2360 break;
2361
2362 case '0': case '1': case '2': case '3': case '4':
2363 case '5': case '6': case '7': case '8': case '9':
2364 {
2365 /* This operand must be the same as a previous one.
2366 This kind of constraint is used for instructions such
2367 as add when they take only two operands.
2368
2369 Note that the lower-numbered operand is passed first.
2370
2371 If we are not testing strictly, assume that this
2372 constraint will be satisfied. */
2373
2374 char *end;
2375 int match;
2376
2377 match = strtoul (p, &end, 10);
2378 p = end;
2379
2380 if (strict < 0)
2381 val = 1;
2382 else
2383 {
2384 rtx op1 = recog_data.operand[match];
2385 rtx op2 = recog_data.operand[opno];
2386
2387 /* A unary operator may be accepted by the predicate,
2388 but it is irrelevant for matching constraints. */
2389 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2390 op1 = XEXP (op1, 0);
2391 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2392 op2 = XEXP (op2, 0);
2393
2394 val = operands_match_p (op1, op2);
2395 }
2396
2397 matching_operands[opno] = match;
2398 matching_operands[match] = opno;
2399
2400 if (val != 0)
2401 win = 1;
2402
2403 /* If output is *x and input is *--x, arrange later
2404 to change the output to *--x as well, since the
2405 output op is the one that will be printed. */
2406 if (val == 2 && strict > 0)
2407 {
2408 funny_match[funny_match_index].this = opno;
2409 funny_match[funny_match_index++].other = match;
2410 }
2411 }
2412 len = 0;
2413 break;
2414
2415 case 'p':
2416 /* p is used for address_operands. When we are called by
2417 gen_reload, no one will have checked that the address is
2418 strictly valid, i.e., that all pseudos requiring hard regs
2419 have gotten them. */
2420 if (strict <= 0
2421 || (strict_memory_address_p (recog_data.operand_mode[opno],
2422 op)))
2423 win = 1;
2424 break;
2425
2426 /* No need to check general_operand again;
2427 it was done in insn-recog.c. */
2428 case 'g':
2429 /* Anything goes unless it is a REG and really has a hard reg
2430 but the hard reg is not in the class GENERAL_REGS. */
2431 if (strict < 0
2432 || GENERAL_REGS == ALL_REGS
2433 || GET_CODE (op) != REG
2434 || (reload_in_progress
2435 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2436 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2437 win = 1;
2438 break;
2439
2440 case 'X':
2441 /* This is used for a MATCH_SCRATCH in the cases when
2442 we don't actually need anything. So anything goes
2443 any time. */
2444 win = 1;
2445 break;
2446
2447 case 'm':
2448 /* Memory operands must be valid, to the extent
2449 required by STRICT. */
2450 if (GET_CODE (op) == MEM)
2451 {
2452 if (strict > 0
2453 && !strict_memory_address_p (GET_MODE (op),
2454 XEXP (op, 0)))
2455 break;
2456 if (strict == 0
2457 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2458 break;
2459 win = 1;
2460 }
2461 /* Before reload, accept what reload can turn into mem. */
2462 else if (strict < 0 && CONSTANT_P (op))
2463 win = 1;
2464 /* During reload, accept a pseudo */
2465 else if (reload_in_progress && GET_CODE (op) == REG
2466 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2467 win = 1;
2468 break;
2469
2470 case '<':
2471 if (GET_CODE (op) == MEM
2472 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2473 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2474 win = 1;
2475 break;
2476
2477 case '>':
2478 if (GET_CODE (op) == MEM
2479 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2480 || GET_CODE (XEXP (op, 0)) == POST_INC))
2481 win = 1;
2482 break;
2483
2484 case 'E':
2485 case 'F':
2486 if (GET_CODE (op) == CONST_DOUBLE
2487 || (GET_CODE (op) == CONST_VECTOR
2488 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2489 win = 1;
2490 break;
2491
2492 case 'G':
2493 case 'H':
2494 if (GET_CODE (op) == CONST_DOUBLE
2495 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2496 win = 1;
2497 break;
2498
2499 case 's':
2500 if (GET_CODE (op) == CONST_INT
2501 || (GET_CODE (op) == CONST_DOUBLE
2502 && GET_MODE (op) == VOIDmode))
2503 break;
2504 case 'i':
2505 if (CONSTANT_P (op))
2506 win = 1;
2507 break;
2508
2509 case 'n':
2510 if (GET_CODE (op) == CONST_INT
2511 || (GET_CODE (op) == CONST_DOUBLE
2512 && GET_MODE (op) == VOIDmode))
2513 win = 1;
2514 break;
2515
2516 case 'I':
2517 case 'J':
2518 case 'K':
2519 case 'L':
2520 case 'M':
2521 case 'N':
2522 case 'O':
2523 case 'P':
2524 if (GET_CODE (op) == CONST_INT
2525 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2526 win = 1;
2527 break;
2528
2529 case 'V':
2530 if (GET_CODE (op) == MEM
2531 && ((strict > 0 && ! offsettable_memref_p (op))
2532 || (strict < 0
2533 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2534 || (reload_in_progress
2535 && !(GET_CODE (op) == REG
2536 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2537 win = 1;
2538 break;
2539
2540 case 'o':
2541 if ((strict > 0 && offsettable_memref_p (op))
2542 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2543 /* Before reload, accept what reload can handle. */
2544 || (strict < 0
2545 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2546 /* During reload, accept a pseudo */
2547 || (reload_in_progress && GET_CODE (op) == REG
2548 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2549 win = 1;
2550 break;
2551
2552 default:
2553 {
2554 enum reg_class class;
2555
2556 class = (c == 'r'
2557 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2558 if (class != NO_REGS)
2559 {
2560 if (strict < 0
2561 || (strict == 0
2562 && GET_CODE (op) == REG
2563 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2564 || (strict == 0 && GET_CODE (op) == SCRATCH)
2565 || (GET_CODE (op) == REG
2566 && reg_fits_class_p (op, class, offset, mode)))
2567 win = 1;
2568 }
2569 #ifdef EXTRA_CONSTRAINT_STR
2570 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2571 win = 1;
2572
2573 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2574 /* Every memory operand can be reloaded to fit. */
2575 && ((strict < 0 && GET_CODE (op) == MEM)
2576 /* Before reload, accept what reload can turn
2577 into mem. */
2578 || (strict < 0 && CONSTANT_P (op))
2579 /* During reload, accept a pseudo */
2580 || (reload_in_progress && GET_CODE (op) == REG
2581 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2582 win = 1;
2583 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2584 /* Every address operand can be reloaded to fit. */
2585 && strict < 0)
2586 win = 1;
2587 #endif
2588 break;
2589 }
2590 }
2591 while (p += len, c);
2592
2593 constraints[opno] = p;
2594 /* If this operand did not win somehow,
2595 this alternative loses. */
2596 if (! win)
2597 lose = 1;
2598 }
2599 /* This alternative won; the operands are ok.
2600 Change whichever operands this alternative says to change. */
2601 if (! lose)
2602 {
2603 int opno, eopno;
2604
2605 /* See if any earlyclobber operand conflicts with some other
2606 operand. */
2607
2608 if (strict > 0)
2609 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2610 /* Ignore earlyclobber operands now in memory,
2611 because we would often report failure when we have
2612 two memory operands, one of which was formerly a REG. */
2613 if (earlyclobber[eopno]
2614 && GET_CODE (recog_data.operand[eopno]) == REG)
2615 for (opno = 0; opno < recog_data.n_operands; opno++)
2616 if ((GET_CODE (recog_data.operand[opno]) == MEM
2617 || recog_data.operand_type[opno] != OP_OUT)
2618 && opno != eopno
2619 /* Ignore things like match_operator operands. */
2620 && *recog_data.constraints[opno] != 0
2621 && ! (matching_operands[opno] == eopno
2622 && operands_match_p (recog_data.operand[opno],
2623 recog_data.operand[eopno]))
2624 && ! safe_from_earlyclobber (recog_data.operand[opno],
2625 recog_data.operand[eopno]))
2626 lose = 1;
2627
2628 if (! lose)
2629 {
2630 while (--funny_match_index >= 0)
2631 {
2632 recog_data.operand[funny_match[funny_match_index].other]
2633 = recog_data.operand[funny_match[funny_match_index].this];
2634 }
2635
2636 return 1;
2637 }
2638 }
2639
2640 which_alternative++;
2641 }
2642 while (which_alternative < recog_data.n_alternatives);
2643
2644 which_alternative = -1;
2645 /* If we are about to reject this, but we are not to test strictly,
2646 try a very loose test. Only return failure if it fails also. */
2647 if (strict == 0)
2648 return constrain_operands (-1);
2649 else
2650 return 0;
2651 }
2652
2653 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2654 is a hard reg in class CLASS when its regno is offset by OFFSET
2655 and changed to mode MODE.
2656 If REG occupies multiple hard regs, all of them must be in CLASS. */
2657
2658 int
2659 reg_fits_class_p (rtx operand, enum reg_class class, int offset,
2660 enum machine_mode mode)
2661 {
2662 int regno = REGNO (operand);
2663 if (regno < FIRST_PSEUDO_REGISTER
2664 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2665 regno + offset))
2666 {
2667 int sr;
2668 regno += offset;
2669 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2670 sr > 0; sr--)
2671 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2672 regno + sr))
2673 break;
2674 return sr == 0;
2675 }
2676
2677 return 0;
2678 }
2679 \f
2680 /* Split single instruction. Helper function for split_all_insns and
2681 split_all_insns_noflow. Return last insn in the sequence if successful,
2682 or NULL if unsuccessful. */
2683
2684 static rtx
2685 split_insn (rtx insn)
2686 {
2687 /* Split insns here to get max fine-grain parallelism. */
2688 rtx first = PREV_INSN (insn);
2689 rtx last = try_split (PATTERN (insn), insn, 1);
2690
2691 if (last == insn)
2692 return NULL_RTX;
2693
2694 /* try_split returns the NOTE that INSN became. */
2695 PUT_CODE (insn, NOTE);
2696 NOTE_SOURCE_FILE (insn) = 0;
2697 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2698
2699 /* ??? Coddle to md files that generate subregs in post-reload
2700 splitters instead of computing the proper hard register. */
2701 if (reload_completed && first != last)
2702 {
2703 first = NEXT_INSN (first);
2704 for (;;)
2705 {
2706 if (INSN_P (first))
2707 cleanup_subreg_operands (first);
2708 if (first == last)
2709 break;
2710 first = NEXT_INSN (first);
2711 }
2712 }
2713 return last;
2714 }
2715
2716 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2717
2718 void
2719 split_all_insns (int upd_life)
2720 {
2721 sbitmap blocks;
2722 bool changed;
2723 basic_block bb;
2724
2725 blocks = sbitmap_alloc (last_basic_block);
2726 sbitmap_zero (blocks);
2727 changed = false;
2728
2729 FOR_EACH_BB_REVERSE (bb)
2730 {
2731 rtx insn, next;
2732 bool finish = false;
2733
2734 for (insn = BB_HEAD (bb); !finish ; insn = next)
2735 {
2736 /* Can't use `next_real_insn' because that might go across
2737 CODE_LABELS and short-out basic blocks. */
2738 next = NEXT_INSN (insn);
2739 finish = (insn == BB_END (bb));
2740 if (INSN_P (insn))
2741 {
2742 rtx set = single_set (insn);
2743
2744 /* Don't split no-op move insns. These should silently
2745 disappear later in final. Splitting such insns would
2746 break the code that handles REG_NO_CONFLICT blocks. */
2747 if (set && set_noop_p (set))
2748 {
2749 /* Nops get in the way while scheduling, so delete them
2750 now if register allocation has already been done. It
2751 is too risky to try to do this before register
2752 allocation, and there are unlikely to be very many
2753 nops then anyways. */
2754 if (reload_completed)
2755 {
2756 /* If the no-op set has a REG_UNUSED note, we need
2757 to update liveness information. */
2758 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2759 {
2760 SET_BIT (blocks, bb->index);
2761 changed = true;
2762 }
2763 /* ??? Is life info affected by deleting edges? */
2764 delete_insn_and_edges (insn);
2765 }
2766 }
2767 else
2768 {
2769 rtx last = split_insn (insn);
2770 if (last)
2771 {
2772 /* The split sequence may include barrier, but the
2773 BB boundary we are interested in will be set to
2774 previous one. */
2775
2776 while (GET_CODE (last) == BARRIER)
2777 last = PREV_INSN (last);
2778 SET_BIT (blocks, bb->index);
2779 changed = true;
2780 }
2781 }
2782 }
2783 }
2784 }
2785
2786 if (changed)
2787 {
2788 int old_last_basic_block = last_basic_block;
2789
2790 find_many_sub_basic_blocks (blocks);
2791
2792 if (old_last_basic_block != last_basic_block && upd_life)
2793 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2794 }
2795
2796 if (changed && upd_life)
2797 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2798 PROP_DEATH_NOTES | PROP_REG_INFO);
2799
2800 #ifdef ENABLE_CHECKING
2801 verify_flow_info ();
2802 #endif
2803
2804 sbitmap_free (blocks);
2805 }
2806
2807 /* Same as split_all_insns, but do not expect CFG to be available.
2808 Used by machine dependent reorg passes. */
2809
2810 void
2811 split_all_insns_noflow (void)
2812 {
2813 rtx next, insn;
2814
2815 for (insn = get_insns (); insn; insn = next)
2816 {
2817 next = NEXT_INSN (insn);
2818 if (INSN_P (insn))
2819 {
2820 /* Don't split no-op move insns. These should silently
2821 disappear later in final. Splitting such insns would
2822 break the code that handles REG_NO_CONFLICT blocks. */
2823 rtx set = single_set (insn);
2824 if (set && set_noop_p (set))
2825 {
2826 /* Nops get in the way while scheduling, so delete them
2827 now if register allocation has already been done. It
2828 is too risky to try to do this before register
2829 allocation, and there are unlikely to be very many
2830 nops then anyways.
2831
2832 ??? Should we use delete_insn when the CFG isn't valid? */
2833 if (reload_completed)
2834 delete_insn_and_edges (insn);
2835 }
2836 else
2837 split_insn (insn);
2838 }
2839 }
2840 }
2841 \f
2842 #ifdef HAVE_peephole2
2843 struct peep2_insn_data
2844 {
2845 rtx insn;
2846 regset live_before;
2847 };
2848
2849 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2850 static int peep2_current;
2851
2852 /* A non-insn marker indicating the last insn of the block.
2853 The live_before regset for this element is correct, indicating
2854 global_live_at_end for the block. */
2855 #define PEEP2_EOB pc_rtx
2856
2857 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2858 does not exist. Used by the recognizer to find the next insn to match
2859 in a multi-insn pattern. */
2860
2861 rtx
2862 peep2_next_insn (int n)
2863 {
2864 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2865 abort ();
2866
2867 n += peep2_current;
2868 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2869 n -= MAX_INSNS_PER_PEEP2 + 1;
2870
2871 if (peep2_insn_data[n].insn == PEEP2_EOB)
2872 return NULL_RTX;
2873 return peep2_insn_data[n].insn;
2874 }
2875
2876 /* Return true if REGNO is dead before the Nth non-note insn
2877 after `current'. */
2878
2879 int
2880 peep2_regno_dead_p (int ofs, int regno)
2881 {
2882 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2883 abort ();
2884
2885 ofs += peep2_current;
2886 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2887 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2888
2889 if (peep2_insn_data[ofs].insn == NULL_RTX)
2890 abort ();
2891
2892 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2893 }
2894
2895 /* Similarly for a REG. */
2896
2897 int
2898 peep2_reg_dead_p (int ofs, rtx reg)
2899 {
2900 int regno, n;
2901
2902 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2903 abort ();
2904
2905 ofs += peep2_current;
2906 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2907 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2908
2909 if (peep2_insn_data[ofs].insn == NULL_RTX)
2910 abort ();
2911
2912 regno = REGNO (reg);
2913 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2914 while (--n >= 0)
2915 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2916 return 0;
2917 return 1;
2918 }
2919
2920 /* Try to find a hard register of mode MODE, matching the register class in
2921 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2922 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2923 in which case the only condition is that the register must be available
2924 before CURRENT_INSN.
2925 Registers that already have bits set in REG_SET will not be considered.
2926
2927 If an appropriate register is available, it will be returned and the
2928 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2929 returned. */
2930
2931 rtx
2932 peep2_find_free_register (int from, int to, const char *class_str,
2933 enum machine_mode mode, HARD_REG_SET *reg_set)
2934 {
2935 static int search_ofs;
2936 enum reg_class class;
2937 HARD_REG_SET live;
2938 int i;
2939
2940 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2941 abort ();
2942
2943 from += peep2_current;
2944 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2945 from -= MAX_INSNS_PER_PEEP2 + 1;
2946 to += peep2_current;
2947 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2948 to -= MAX_INSNS_PER_PEEP2 + 1;
2949
2950 if (peep2_insn_data[from].insn == NULL_RTX)
2951 abort ();
2952 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2953
2954 while (from != to)
2955 {
2956 HARD_REG_SET this_live;
2957
2958 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2959 from = 0;
2960 if (peep2_insn_data[from].insn == NULL_RTX)
2961 abort ();
2962 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2963 IOR_HARD_REG_SET (live, this_live);
2964 }
2965
2966 class = (class_str[0] == 'r' ? GENERAL_REGS
2967 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2968
2969 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2970 {
2971 int raw_regno, regno, success, j;
2972
2973 /* Distribute the free registers as much as possible. */
2974 raw_regno = search_ofs + i;
2975 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2976 raw_regno -= FIRST_PSEUDO_REGISTER;
2977 #ifdef REG_ALLOC_ORDER
2978 regno = reg_alloc_order[raw_regno];
2979 #else
2980 regno = raw_regno;
2981 #endif
2982
2983 /* Don't allocate fixed registers. */
2984 if (fixed_regs[regno])
2985 continue;
2986 /* Make sure the register is of the right class. */
2987 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2988 continue;
2989 /* And can support the mode we need. */
2990 if (! HARD_REGNO_MODE_OK (regno, mode))
2991 continue;
2992 /* And that we don't create an extra save/restore. */
2993 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2994 continue;
2995 /* And we don't clobber traceback for noreturn functions. */
2996 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2997 && (! reload_completed || frame_pointer_needed))
2998 continue;
2999
3000 success = 1;
3001 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3002 {
3003 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3004 || TEST_HARD_REG_BIT (live, regno + j))
3005 {
3006 success = 0;
3007 break;
3008 }
3009 }
3010 if (success)
3011 {
3012 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3013 SET_HARD_REG_BIT (*reg_set, regno + j);
3014
3015 /* Start the next search with the next register. */
3016 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3017 raw_regno = 0;
3018 search_ofs = raw_regno;
3019
3020 return gen_rtx_REG (mode, regno);
3021 }
3022 }
3023
3024 search_ofs = 0;
3025 return NULL_RTX;
3026 }
3027
3028 /* Perform the peephole2 optimization pass. */
3029
3030 void
3031 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
3032 {
3033 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3034 rtx insn, prev;
3035 regset live;
3036 int i;
3037 basic_block bb;
3038 #ifdef HAVE_conditional_execution
3039 sbitmap blocks;
3040 bool changed;
3041 #endif
3042 bool do_cleanup_cfg = false;
3043 bool do_rebuild_jump_labels = false;
3044
3045 /* Initialize the regsets we're going to use. */
3046 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3047 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3048 live = INITIALIZE_REG_SET (rs_heads[i]);
3049
3050 #ifdef HAVE_conditional_execution
3051 blocks = sbitmap_alloc (last_basic_block);
3052 sbitmap_zero (blocks);
3053 changed = false;
3054 #else
3055 count_or_remove_death_notes (NULL, 1);
3056 #endif
3057
3058 FOR_EACH_BB_REVERSE (bb)
3059 {
3060 struct propagate_block_info *pbi;
3061
3062 /* Indicate that all slots except the last holds invalid data. */
3063 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3064 peep2_insn_data[i].insn = NULL_RTX;
3065
3066 /* Indicate that the last slot contains live_after data. */
3067 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3068 peep2_current = MAX_INSNS_PER_PEEP2;
3069
3070 /* Start up propagation. */
3071 COPY_REG_SET (live, bb->global_live_at_end);
3072 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3073
3074 #ifdef HAVE_conditional_execution
3075 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3076 #else
3077 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3078 #endif
3079
3080 for (insn = BB_END (bb); ; insn = prev)
3081 {
3082 prev = PREV_INSN (insn);
3083 if (INSN_P (insn))
3084 {
3085 rtx try, before_try, x;
3086 int match_len;
3087 rtx note;
3088 bool was_call = false;
3089
3090 /* Record this insn. */
3091 if (--peep2_current < 0)
3092 peep2_current = MAX_INSNS_PER_PEEP2;
3093 peep2_insn_data[peep2_current].insn = insn;
3094 propagate_one_insn (pbi, insn);
3095 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3096
3097 /* Match the peephole. */
3098 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3099 if (try != NULL)
3100 {
3101 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3102 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3103 cfg-related call notes. */
3104 for (i = 0; i <= match_len; ++i)
3105 {
3106 int j;
3107 rtx old_insn, new_insn, note;
3108
3109 j = i + peep2_current;
3110 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3111 j -= MAX_INSNS_PER_PEEP2 + 1;
3112 old_insn = peep2_insn_data[j].insn;
3113 if (GET_CODE (old_insn) != CALL_INSN)
3114 continue;
3115 was_call = true;
3116
3117 new_insn = try;
3118 while (new_insn != NULL_RTX)
3119 {
3120 if (GET_CODE (new_insn) == CALL_INSN)
3121 break;
3122 new_insn = NEXT_INSN (new_insn);
3123 }
3124
3125 if (new_insn == NULL_RTX)
3126 abort ();
3127
3128 CALL_INSN_FUNCTION_USAGE (new_insn)
3129 = CALL_INSN_FUNCTION_USAGE (old_insn);
3130
3131 for (note = REG_NOTES (old_insn);
3132 note;
3133 note = XEXP (note, 1))
3134 switch (REG_NOTE_KIND (note))
3135 {
3136 case REG_NORETURN:
3137 case REG_SETJMP:
3138 case REG_ALWAYS_RETURN:
3139 REG_NOTES (new_insn)
3140 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3141 XEXP (note, 0),
3142 REG_NOTES (new_insn));
3143 default:
3144 /* Discard all other reg notes. */
3145 break;
3146 }
3147
3148 /* Croak if there is another call in the sequence. */
3149 while (++i <= match_len)
3150 {
3151 j = i + peep2_current;
3152 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3153 j -= MAX_INSNS_PER_PEEP2 + 1;
3154 old_insn = peep2_insn_data[j].insn;
3155 if (GET_CODE (old_insn) == CALL_INSN)
3156 abort ();
3157 }
3158 break;
3159 }
3160
3161 i = match_len + peep2_current;
3162 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3163 i -= MAX_INSNS_PER_PEEP2 + 1;
3164
3165 note = find_reg_note (peep2_insn_data[i].insn,
3166 REG_EH_REGION, NULL_RTX);
3167
3168 /* Replace the old sequence with the new. */
3169 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3170 INSN_LOCATOR (peep2_insn_data[i].insn));
3171 before_try = PREV_INSN (insn);
3172 delete_insn_chain (insn, peep2_insn_data[i].insn);
3173
3174 /* Re-insert the EH_REGION notes. */
3175 if (note || (was_call && nonlocal_goto_handler_labels))
3176 {
3177 edge eh_edge;
3178
3179 for (eh_edge = bb->succ; eh_edge
3180 ; eh_edge = eh_edge->succ_next)
3181 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3182 break;
3183
3184 for (x = try ; x != before_try ; x = PREV_INSN (x))
3185 if (GET_CODE (x) == CALL_INSN
3186 || (flag_non_call_exceptions
3187 && may_trap_p (PATTERN (x))
3188 && !find_reg_note (x, REG_EH_REGION, NULL)))
3189 {
3190 if (note)
3191 REG_NOTES (x)
3192 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3193 XEXP (note, 0),
3194 REG_NOTES (x));
3195
3196 if (x != BB_END (bb) && eh_edge)
3197 {
3198 edge nfte, nehe;
3199 int flags;
3200
3201 nfte = split_block (bb, x);
3202 flags = (eh_edge->flags
3203 & (EDGE_EH | EDGE_ABNORMAL));
3204 if (GET_CODE (x) == CALL_INSN)
3205 flags |= EDGE_ABNORMAL_CALL;
3206 nehe = make_edge (nfte->src, eh_edge->dest,
3207 flags);
3208
3209 nehe->probability = eh_edge->probability;
3210 nfte->probability
3211 = REG_BR_PROB_BASE - nehe->probability;
3212
3213 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3214 #ifdef HAVE_conditional_execution
3215 SET_BIT (blocks, nfte->dest->index);
3216 changed = true;
3217 #endif
3218 bb = nfte->src;
3219 eh_edge = nehe;
3220 }
3221 }
3222
3223 /* Converting possibly trapping insn to non-trapping is
3224 possible. Zap dummy outgoing edges. */
3225 do_cleanup_cfg |= purge_dead_edges (bb);
3226 }
3227
3228 #ifdef HAVE_conditional_execution
3229 /* With conditional execution, we cannot back up the
3230 live information so easily, since the conditional
3231 death data structures are not so self-contained.
3232 So record that we've made a modification to this
3233 block and update life information at the end. */
3234 SET_BIT (blocks, bb->index);
3235 changed = true;
3236
3237 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3238 peep2_insn_data[i].insn = NULL_RTX;
3239 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3240 #else
3241 /* Back up lifetime information past the end of the
3242 newly created sequence. */
3243 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3244 i = 0;
3245 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3246
3247 /* Update life information for the new sequence. */
3248 x = try;
3249 do
3250 {
3251 if (INSN_P (x))
3252 {
3253 if (--i < 0)
3254 i = MAX_INSNS_PER_PEEP2;
3255 peep2_insn_data[i].insn = x;
3256 propagate_one_insn (pbi, x);
3257 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3258 }
3259 x = PREV_INSN (x);
3260 }
3261 while (x != prev);
3262
3263 /* ??? Should verify that LIVE now matches what we
3264 had before the new sequence. */
3265
3266 peep2_current = i;
3267 #endif
3268
3269 /* If we generated a jump instruction, it won't have
3270 JUMP_LABEL set. Recompute after we're done. */
3271 for (x = try; x != before_try; x = PREV_INSN (x))
3272 if (GET_CODE (x) == JUMP_INSN)
3273 {
3274 do_rebuild_jump_labels = true;
3275 break;
3276 }
3277 }
3278 }
3279
3280 if (insn == BB_HEAD (bb))
3281 break;
3282 }
3283
3284 free_propagate_block_info (pbi);
3285 }
3286
3287 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3288 FREE_REG_SET (peep2_insn_data[i].live_before);
3289 FREE_REG_SET (live);
3290
3291 if (do_rebuild_jump_labels)
3292 rebuild_jump_labels (get_insns ());
3293
3294 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3295 we've changed global life since exception handlers are no longer
3296 reachable. */
3297 if (do_cleanup_cfg)
3298 {
3299 cleanup_cfg (0);
3300 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3301 }
3302 #ifdef HAVE_conditional_execution
3303 else
3304 {
3305 count_or_remove_death_notes (blocks, 1);
3306 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3307 }
3308 sbitmap_free (blocks);
3309 #endif
3310 }
3311 #endif /* HAVE_peephole2 */
3312
3313 /* Common predicates for use with define_bypass. */
3314
3315 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3316 data not the address operand(s) of the store. IN_INSN must be
3317 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3318 SETs inside. */
3319
3320 int
3321 store_data_bypass_p (rtx out_insn, rtx in_insn)
3322 {
3323 rtx out_set, in_set;
3324
3325 in_set = single_set (in_insn);
3326 if (! in_set)
3327 abort ();
3328
3329 if (GET_CODE (SET_DEST (in_set)) != MEM)
3330 return false;
3331
3332 out_set = single_set (out_insn);
3333 if (out_set)
3334 {
3335 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3336 return false;
3337 }
3338 else
3339 {
3340 rtx out_pat;
3341 int i;
3342
3343 out_pat = PATTERN (out_insn);
3344 if (GET_CODE (out_pat) != PARALLEL)
3345 abort ();
3346
3347 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3348 {
3349 rtx exp = XVECEXP (out_pat, 0, i);
3350
3351 if (GET_CODE (exp) == CLOBBER)
3352 continue;
3353
3354 if (GET_CODE (exp) != SET)
3355 abort ();
3356
3357 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3358 return false;
3359 }
3360 }
3361
3362 return true;
3363 }
3364
3365 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3366 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3367 or multiple set; IN_INSN should be single_set for truth, but for convenience
3368 of insn categorization may be any JUMP or CALL insn. */
3369
3370 int
3371 if_test_bypass_p (rtx out_insn, rtx in_insn)
3372 {
3373 rtx out_set, in_set;
3374
3375 in_set = single_set (in_insn);
3376 if (! in_set)
3377 {
3378 if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
3379 return false;
3380 abort ();
3381 }
3382
3383 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3384 return false;
3385 in_set = SET_SRC (in_set);
3386
3387 out_set = single_set (out_insn);
3388 if (out_set)
3389 {
3390 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3391 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3392 return false;
3393 }
3394 else
3395 {
3396 rtx out_pat;
3397 int i;
3398
3399 out_pat = PATTERN (out_insn);
3400 if (GET_CODE (out_pat) != PARALLEL)
3401 abort ();
3402
3403 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3404 {
3405 rtx exp = XVECEXP (out_pat, 0, i);
3406
3407 if (GET_CODE (exp) == CLOBBER)
3408 continue;
3409
3410 if (GET_CODE (exp) != SET)
3411 abort ();
3412
3413 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3414 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3415 return false;
3416 }
3417 }
3418
3419 return true;
3420 }