explow.c (memory_address): Use memory_address_p.
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "toplev.h"
39 #include "basic-block.h"
40 #include "output.h"
41 #include "reload.h"
42
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
46 #else
47 #define STACK_PUSH_CODE PRE_INC
48 #endif
49 #endif
50
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
54 #else
55 #define STACK_POP_CODE POST_DEC
56 #endif
57 #endif
58
59 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
60 static rtx *find_single_use_1 (rtx, rtx *);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
63
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
69
70 init_recog and init_recog_no_volatile are responsible for setting this. */
71
72 int volatile_ok;
73
74 struct recog_data recog_data;
75
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
82
83 int which_alternative;
84
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
88
89 int reload_completed;
90
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
93
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
97
98 void
99 init_recog_no_volatile (void)
100 {
101 volatile_ok = 0;
102 }
103
104 void
105 init_recog (void)
106 {
107 volatile_ok = 1;
108 }
109
110 /* Try recognizing the instruction INSN,
111 and return the code number that results.
112 Remember the code so that repeated calls do not
113 need to spend the time for actual rerecognition.
114
115 This function is the normal interface to instruction recognition.
116 The automatically-generated function `recog' is normally called
117 through this one. (The only exception is in combine.c.) */
118
119 int
120 recog_memoized_1 (rtx insn)
121 {
122 if (INSN_CODE (insn) < 0)
123 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
124 return INSN_CODE (insn);
125 }
126 \f
127 /* Check that X is an insn-body for an `asm' with operands
128 and that the operands mentioned in it are legitimate. */
129
130 int
131 check_asm_operands (rtx x)
132 {
133 int noperands;
134 rtx *operands;
135 const char **constraints;
136 int i;
137
138 /* Post-reload, be more strict with things. */
139 if (reload_completed)
140 {
141 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
142 extract_insn (make_insn_raw (x));
143 constrain_operands (1);
144 return which_alternative >= 0;
145 }
146
147 noperands = asm_noperands (x);
148 if (noperands < 0)
149 return 0;
150 if (noperands == 0)
151 return 1;
152
153 operands = alloca (noperands * sizeof (rtx));
154 constraints = alloca (noperands * sizeof (char *));
155
156 decode_asm_operands (x, operands, NULL, constraints, NULL);
157
158 for (i = 0; i < noperands; i++)
159 {
160 const char *c = constraints[i];
161 if (c[0] == '%')
162 c++;
163 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
164 c = constraints[c[0] - '0'];
165
166 if (! asm_operand_ok (operands[i], c))
167 return 0;
168 }
169
170 return 1;
171 }
172 \f
173 /* Static data for the next two routines. */
174
175 typedef struct change_t
176 {
177 rtx object;
178 int old_code;
179 rtx *loc;
180 rtx old;
181 } change_t;
182
183 static change_t *changes;
184 static int changes_allocated;
185
186 static int num_changes = 0;
187
188 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
189 at which NEW will be placed. If OBJECT is zero, no validation is done,
190 the change is simply made.
191
192 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
193 will be called with the address and mode as parameters. If OBJECT is
194 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
195 the change in place.
196
197 IN_GROUP is nonzero if this is part of a group of changes that must be
198 performed as a group. In that case, the changes will be stored. The
199 function `apply_change_group' will validate and apply the changes.
200
201 If IN_GROUP is zero, this is a single change. Try to recognize the insn
202 or validate the memory reference with the change applied. If the result
203 is not valid for the machine, suppress the change and return zero.
204 Otherwise, perform the change and return 1. */
205
206 int
207 validate_change (rtx object, rtx *loc, rtx new, int in_group)
208 {
209 rtx old = *loc;
210
211 if (old == new || rtx_equal_p (old, new))
212 return 1;
213
214 if (in_group == 0 && num_changes != 0)
215 abort ();
216
217 *loc = new;
218
219 /* Save the information describing this change. */
220 if (num_changes >= changes_allocated)
221 {
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
226 else
227 changes_allocated *= 2;
228
229 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
230 }
231
232 changes[num_changes].object = object;
233 changes[num_changes].loc = loc;
234 changes[num_changes].old = old;
235
236 if (object && !MEM_P (object))
237 {
238 /* Set INSN_CODE to force rerecognition of insn. Save old code in
239 case invalid. */
240 changes[num_changes].old_code = INSN_CODE (object);
241 INSN_CODE (object) = -1;
242 }
243
244 num_changes++;
245
246 /* If we are making a group of changes, return 1. Otherwise, validate the
247 change group we made. */
248
249 if (in_group)
250 return 1;
251 else
252 return apply_change_group ();
253 }
254
255 /* This subroutine of apply_change_group verifies whether the changes to INSN
256 were valid; i.e. whether INSN can still be recognized. */
257
258 int
259 insn_invalid_p (rtx insn)
260 {
261 rtx pat = PATTERN (insn);
262 int num_clobbers = 0;
263 /* If we are before reload and the pattern is a SET, see if we can add
264 clobbers. */
265 int icode = recog (pat, insn,
266 (GET_CODE (pat) == SET
267 && ! reload_completed && ! reload_in_progress)
268 ? &num_clobbers : 0);
269 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
270
271
272 /* If this is an asm and the operand aren't legal, then fail. Likewise if
273 this is not an asm and the insn wasn't recognized. */
274 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
275 || (!is_asm && icode < 0))
276 return 1;
277
278 /* If we have to add CLOBBERs, fail if we have to add ones that reference
279 hard registers since our callers can't know if they are live or not.
280 Otherwise, add them. */
281 if (num_clobbers > 0)
282 {
283 rtx newpat;
284
285 if (added_clobbers_hard_reg_p (icode))
286 return 1;
287
288 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
289 XVECEXP (newpat, 0, 0) = pat;
290 add_clobbers (newpat, icode);
291 PATTERN (insn) = pat = newpat;
292 }
293
294 /* After reload, verify that all constraints are satisfied. */
295 if (reload_completed)
296 {
297 extract_insn (insn);
298
299 if (! constrain_operands (1))
300 return 1;
301 }
302
303 INSN_CODE (insn) = icode;
304 return 0;
305 }
306
307 /* Return number of changes made and not validated yet. */
308 int
309 num_changes_pending (void)
310 {
311 return num_changes;
312 }
313
314 /* Apply a group of changes previously issued with `validate_change'.
315 Return 1 if all changes are valid, zero otherwise. */
316
317 int
318 apply_change_group (void)
319 {
320 int i;
321 rtx last_validated = NULL_RTX;
322
323 /* The changes have been applied and all INSN_CODEs have been reset to force
324 rerecognition.
325
326 The changes are valid if we aren't given an object, or if we are
327 given a MEM and it still is a valid address, or if this is in insn
328 and it is recognized. In the latter case, if reload has completed,
329 we also require that the operands meet the constraints for
330 the insn. */
331
332 for (i = 0; i < num_changes; i++)
333 {
334 rtx object = changes[i].object;
335
336 /* If there is no object to test or if it is the same as the one we
337 already tested, ignore it. */
338 if (object == 0 || object == last_validated)
339 continue;
340
341 if (MEM_P (object))
342 {
343 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
344 break;
345 }
346 else if (insn_invalid_p (object))
347 {
348 rtx pat = PATTERN (object);
349
350 /* Perhaps we couldn't recognize the insn because there were
351 extra CLOBBERs at the end. If so, try to re-recognize
352 without the last CLOBBER (later iterations will cause each of
353 them to be eliminated, in turn). But don't do this if we
354 have an ASM_OPERAND. */
355 if (GET_CODE (pat) == PARALLEL
356 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
357 && asm_noperands (PATTERN (object)) < 0)
358 {
359 rtx newpat;
360
361 if (XVECLEN (pat, 0) == 2)
362 newpat = XVECEXP (pat, 0, 0);
363 else
364 {
365 int j;
366
367 newpat
368 = gen_rtx_PARALLEL (VOIDmode,
369 rtvec_alloc (XVECLEN (pat, 0) - 1));
370 for (j = 0; j < XVECLEN (newpat, 0); j++)
371 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
372 }
373
374 /* Add a new change to this group to replace the pattern
375 with this new pattern. Then consider this change
376 as having succeeded. The change we added will
377 cause the entire call to fail if things remain invalid.
378
379 Note that this can lose if a later change than the one
380 we are processing specified &XVECEXP (PATTERN (object), 0, X)
381 but this shouldn't occur. */
382
383 validate_change (object, &PATTERN (object), newpat, 1);
384 continue;
385 }
386 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
387 /* If this insn is a CLOBBER or USE, it is always valid, but is
388 never recognized. */
389 continue;
390 else
391 break;
392 }
393 last_validated = object;
394 }
395
396 if (i == num_changes)
397 {
398 basic_block bb;
399
400 for (i = 0; i < num_changes; i++)
401 if (changes[i].object
402 && INSN_P (changes[i].object)
403 && (bb = BLOCK_FOR_INSN (changes[i].object)))
404 bb->flags |= BB_DIRTY;
405
406 num_changes = 0;
407 return 1;
408 }
409 else
410 {
411 cancel_changes (0);
412 return 0;
413 }
414 }
415
416 /* Return the number of changes so far in the current group. */
417
418 int
419 num_validated_changes (void)
420 {
421 return num_changes;
422 }
423
424 /* Retract the changes numbered NUM and up. */
425
426 void
427 cancel_changes (int num)
428 {
429 int i;
430
431 /* Back out all the changes. Do this in the opposite order in which
432 they were made. */
433 for (i = num_changes - 1; i >= num; i--)
434 {
435 *changes[i].loc = changes[i].old;
436 if (changes[i].object && !MEM_P (changes[i].object))
437 INSN_CODE (changes[i].object) = changes[i].old_code;
438 }
439 num_changes = num;
440 }
441
442 /* Replace every occurrence of FROM in X with TO. Mark each change with
443 validate_change passing OBJECT. */
444
445 static void
446 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
447 {
448 int i, j;
449 const char *fmt;
450 rtx x = *loc;
451 enum rtx_code code;
452 enum machine_mode op0_mode = VOIDmode;
453 int prev_changes = num_changes;
454 rtx new;
455
456 if (!x)
457 return;
458
459 code = GET_CODE (x);
460 fmt = GET_RTX_FORMAT (code);
461 if (fmt[0] == 'e')
462 op0_mode = GET_MODE (XEXP (x, 0));
463
464 /* X matches FROM if it is the same rtx or they are both referring to the
465 same register in the same mode. Avoid calling rtx_equal_p unless the
466 operands look similar. */
467
468 if (x == from
469 || (REG_P (x) && REG_P (from)
470 && GET_MODE (x) == GET_MODE (from)
471 && REGNO (x) == REGNO (from))
472 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
473 && rtx_equal_p (x, from)))
474 {
475 validate_change (object, loc, to, 1);
476 return;
477 }
478
479 /* Call ourself recursively to perform the replacements.
480 We must not replace inside already replaced expression, otherwise we
481 get infinite recursion for replacements like (reg X)->(subreg (reg X))
482 done by regmove, so we must special case shared ASM_OPERANDS. */
483
484 if (GET_CODE (x) == PARALLEL)
485 {
486 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
487 {
488 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
489 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
490 {
491 /* Verify that operands are really shared. */
492 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0))) !=
493 ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, j))))
494 abort ();
495 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
496 from, to, object);
497 }
498 else
499 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
500 }
501 }
502 else
503 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
504 {
505 if (fmt[i] == 'e')
506 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
507 else if (fmt[i] == 'E')
508 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
509 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
510 }
511
512 /* If we didn't substitute, there is nothing more to do. */
513 if (num_changes == prev_changes)
514 return;
515
516 /* Allow substituted expression to have different mode. This is used by
517 regmove to change mode of pseudo register. */
518 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
519 op0_mode = GET_MODE (XEXP (x, 0));
520
521 /* Do changes needed to keep rtx consistent. Don't do any other
522 simplifications, as it is not our job. */
523
524 if (SWAPPABLE_OPERANDS_P (x)
525 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
526 {
527 validate_change (object, loc,
528 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
529 : swap_condition (code),
530 GET_MODE (x), XEXP (x, 1),
531 XEXP (x, 0)), 1);
532 x = *loc;
533 code = GET_CODE (x);
534 }
535
536 switch (code)
537 {
538 case PLUS:
539 /* If we have a PLUS whose second operand is now a CONST_INT, use
540 simplify_gen_binary to try to simplify it.
541 ??? We may want later to remove this, once simplification is
542 separated from this function. */
543 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
544 validate_change (object, loc,
545 simplify_gen_binary
546 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
547 break;
548 case MINUS:
549 if (GET_CODE (XEXP (x, 1)) == CONST_INT
550 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
551 validate_change (object, loc,
552 simplify_gen_binary
553 (PLUS, GET_MODE (x), XEXP (x, 0),
554 simplify_gen_unary (NEG,
555 GET_MODE (x), XEXP (x, 1),
556 GET_MODE (x))), 1);
557 break;
558 case ZERO_EXTEND:
559 case SIGN_EXTEND:
560 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
561 {
562 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
563 op0_mode);
564 /* If any of the above failed, substitute in something that
565 we know won't be recognized. */
566 if (!new)
567 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
568 validate_change (object, loc, new, 1);
569 }
570 break;
571 case SUBREG:
572 /* All subregs possible to simplify should be simplified. */
573 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
574 SUBREG_BYTE (x));
575
576 /* Subregs of VOIDmode operands are incorrect. */
577 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
578 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
579 if (new)
580 validate_change (object, loc, new, 1);
581 break;
582 case ZERO_EXTRACT:
583 case SIGN_EXTRACT:
584 /* If we are replacing a register with memory, try to change the memory
585 to be the mode required for memory in extract operations (this isn't
586 likely to be an insertion operation; if it was, nothing bad will
587 happen, we might just fail in some cases). */
588
589 if (MEM_P (XEXP (x, 0))
590 && GET_CODE (XEXP (x, 1)) == CONST_INT
591 && GET_CODE (XEXP (x, 2)) == CONST_INT
592 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
593 && !MEM_VOLATILE_P (XEXP (x, 0)))
594 {
595 enum machine_mode wanted_mode = VOIDmode;
596 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
597 int pos = INTVAL (XEXP (x, 2));
598
599 if (GET_CODE (x) == ZERO_EXTRACT)
600 {
601 enum machine_mode new_mode
602 = mode_for_extraction (EP_extzv, 1);
603 if (new_mode != MAX_MACHINE_MODE)
604 wanted_mode = new_mode;
605 }
606 else if (GET_CODE (x) == SIGN_EXTRACT)
607 {
608 enum machine_mode new_mode
609 = mode_for_extraction (EP_extv, 1);
610 if (new_mode != MAX_MACHINE_MODE)
611 wanted_mode = new_mode;
612 }
613
614 /* If we have a narrower mode, we can do something. */
615 if (wanted_mode != VOIDmode
616 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
617 {
618 int offset = pos / BITS_PER_UNIT;
619 rtx newmem;
620
621 /* If the bytes and bits are counted differently, we
622 must adjust the offset. */
623 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
624 offset =
625 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
626 offset);
627
628 pos %= GET_MODE_BITSIZE (wanted_mode);
629
630 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
631
632 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
633 validate_change (object, &XEXP (x, 0), newmem, 1);
634 }
635 }
636
637 break;
638
639 default:
640 break;
641 }
642 }
643
644 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
645 with TO. After all changes have been made, validate by seeing
646 if INSN is still valid. */
647
648 int
649 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
650 {
651 validate_replace_rtx_1 (loc, from, to, insn);
652 return apply_change_group ();
653 }
654
655 /* Try replacing every occurrence of FROM in INSN with TO. After all
656 changes have been made, validate by seeing if INSN is still valid. */
657
658 int
659 validate_replace_rtx (rtx from, rtx to, rtx insn)
660 {
661 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
662 return apply_change_group ();
663 }
664
665 /* Try replacing every occurrence of FROM in INSN with TO. */
666
667 void
668 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
669 {
670 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
671 }
672
673 /* Function called by note_uses to replace used subexpressions. */
674 struct validate_replace_src_data
675 {
676 rtx from; /* Old RTX */
677 rtx to; /* New RTX */
678 rtx insn; /* Insn in which substitution is occurring. */
679 };
680
681 static void
682 validate_replace_src_1 (rtx *x, void *data)
683 {
684 struct validate_replace_src_data *d
685 = (struct validate_replace_src_data *) data;
686
687 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
688 }
689
690 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
691 SET_DESTs. */
692
693 void
694 validate_replace_src_group (rtx from, rtx to, rtx insn)
695 {
696 struct validate_replace_src_data d;
697
698 d.from = from;
699 d.to = to;
700 d.insn = insn;
701 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
702 }
703 \f
704 #ifdef HAVE_cc0
705 /* Return 1 if the insn using CC0 set by INSN does not contain
706 any ordered tests applied to the condition codes.
707 EQ and NE tests do not count. */
708
709 int
710 next_insn_tests_no_inequality (rtx insn)
711 {
712 rtx next = next_cc0_user (insn);
713
714 /* If there is no next insn, we have to take the conservative choice. */
715 if (next == 0)
716 return 0;
717
718 return (INSN_P (next)
719 && ! inequality_comparisons_p (PATTERN (next)));
720 }
721 #endif
722 \f
723 /* This is used by find_single_use to locate an rtx that contains exactly one
724 use of DEST, which is typically either a REG or CC0. It returns a
725 pointer to the innermost rtx expression containing DEST. Appearances of
726 DEST that are being used to totally replace it are not counted. */
727
728 static rtx *
729 find_single_use_1 (rtx dest, rtx *loc)
730 {
731 rtx x = *loc;
732 enum rtx_code code = GET_CODE (x);
733 rtx *result = 0;
734 rtx *this_result;
735 int i;
736 const char *fmt;
737
738 switch (code)
739 {
740 case CONST_INT:
741 case CONST:
742 case LABEL_REF:
743 case SYMBOL_REF:
744 case CONST_DOUBLE:
745 case CONST_VECTOR:
746 case CLOBBER:
747 return 0;
748
749 case SET:
750 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
751 of a REG that occupies all of the REG, the insn uses DEST if
752 it is mentioned in the destination or the source. Otherwise, we
753 need just check the source. */
754 if (GET_CODE (SET_DEST (x)) != CC0
755 && GET_CODE (SET_DEST (x)) != PC
756 && !REG_P (SET_DEST (x))
757 && ! (GET_CODE (SET_DEST (x)) == SUBREG
758 && REG_P (SUBREG_REG (SET_DEST (x)))
759 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
760 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
761 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
762 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
763 break;
764
765 return find_single_use_1 (dest, &SET_SRC (x));
766
767 case MEM:
768 case SUBREG:
769 return find_single_use_1 (dest, &XEXP (x, 0));
770
771 default:
772 break;
773 }
774
775 /* If it wasn't one of the common cases above, check each expression and
776 vector of this code. Look for a unique usage of DEST. */
777
778 fmt = GET_RTX_FORMAT (code);
779 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
780 {
781 if (fmt[i] == 'e')
782 {
783 if (dest == XEXP (x, i)
784 || (REG_P (dest) && REG_P (XEXP (x, i))
785 && REGNO (dest) == REGNO (XEXP (x, i))))
786 this_result = loc;
787 else
788 this_result = find_single_use_1 (dest, &XEXP (x, i));
789
790 if (result == 0)
791 result = this_result;
792 else if (this_result)
793 /* Duplicate usage. */
794 return 0;
795 }
796 else if (fmt[i] == 'E')
797 {
798 int j;
799
800 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
801 {
802 if (XVECEXP (x, i, j) == dest
803 || (REG_P (dest)
804 && REG_P (XVECEXP (x, i, j))
805 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
806 this_result = loc;
807 else
808 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
809
810 if (result == 0)
811 result = this_result;
812 else if (this_result)
813 return 0;
814 }
815 }
816 }
817
818 return result;
819 }
820 \f
821 /* See if DEST, produced in INSN, is used only a single time in the
822 sequel. If so, return a pointer to the innermost rtx expression in which
823 it is used.
824
825 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
826
827 This routine will return usually zero either before flow is called (because
828 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
829 note can't be trusted).
830
831 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
832 care about REG_DEAD notes or LOG_LINKS.
833
834 Otherwise, we find the single use by finding an insn that has a
835 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
836 only referenced once in that insn, we know that it must be the first
837 and last insn referencing DEST. */
838
839 rtx *
840 find_single_use (rtx dest, rtx insn, rtx *ploc)
841 {
842 rtx next;
843 rtx *result;
844 rtx link;
845
846 #ifdef HAVE_cc0
847 if (dest == cc0_rtx)
848 {
849 next = NEXT_INSN (insn);
850 if (next == 0
851 || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
852 return 0;
853
854 result = find_single_use_1 (dest, &PATTERN (next));
855 if (result && ploc)
856 *ploc = next;
857 return result;
858 }
859 #endif
860
861 if (reload_completed || reload_in_progress || !REG_P (dest))
862 return 0;
863
864 for (next = next_nonnote_insn (insn);
865 next != 0 && !LABEL_P (next);
866 next = next_nonnote_insn (next))
867 if (INSN_P (next) && dead_or_set_p (next, dest))
868 {
869 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
870 if (XEXP (link, 0) == insn)
871 break;
872
873 if (link)
874 {
875 result = find_single_use_1 (dest, &PATTERN (next));
876 if (ploc)
877 *ploc = next;
878 return result;
879 }
880 }
881
882 return 0;
883 }
884 \f
885 /* Return 1 if OP is a valid general operand for machine mode MODE.
886 This is either a register reference, a memory reference,
887 or a constant. In the case of a memory reference, the address
888 is checked for general validity for the target machine.
889
890 Register and memory references must have mode MODE in order to be valid,
891 but some constants have no machine mode and are valid for any mode.
892
893 If MODE is VOIDmode, OP is checked for validity for whatever mode
894 it has.
895
896 The main use of this function is as a predicate in match_operand
897 expressions in the machine description.
898
899 For an explanation of this function's behavior for registers of
900 class NO_REGS, see the comment for `register_operand'. */
901
902 int
903 general_operand (rtx op, enum machine_mode mode)
904 {
905 enum rtx_code code = GET_CODE (op);
906
907 if (mode == VOIDmode)
908 mode = GET_MODE (op);
909
910 /* Don't accept CONST_INT or anything similar
911 if the caller wants something floating. */
912 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
913 && GET_MODE_CLASS (mode) != MODE_INT
914 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
915 return 0;
916
917 if (GET_CODE (op) == CONST_INT
918 && mode != VOIDmode
919 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
920 return 0;
921
922 if (CONSTANT_P (op))
923 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
924 || mode == VOIDmode)
925 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
926 && LEGITIMATE_CONSTANT_P (op));
927
928 /* Except for certain constants with VOIDmode, already checked for,
929 OP's mode must match MODE if MODE specifies a mode. */
930
931 if (GET_MODE (op) != mode)
932 return 0;
933
934 if (code == SUBREG)
935 {
936 rtx sub = SUBREG_REG (op);
937
938 #ifdef INSN_SCHEDULING
939 /* On machines that have insn scheduling, we want all memory
940 reference to be explicit, so outlaw paradoxical SUBREGs. */
941 if (MEM_P (sub)
942 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
943 return 0;
944 #endif
945 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
946 may result in incorrect reference. We should simplify all valid
947 subregs of MEM anyway. But allow this after reload because we
948 might be called from cleanup_subreg_operands.
949
950 ??? This is a kludge. */
951 if (!reload_completed && SUBREG_BYTE (op) != 0
952 && MEM_P (sub))
953 return 0;
954
955 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
956 create such rtl, and we must reject it. */
957 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
958 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
959 return 0;
960
961 op = sub;
962 code = GET_CODE (op);
963 }
964
965 if (code == REG)
966 /* A register whose class is NO_REGS is not a general operand. */
967 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
968 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
969
970 if (code == MEM)
971 {
972 rtx y = XEXP (op, 0);
973
974 if (! volatile_ok && MEM_VOLATILE_P (op))
975 return 0;
976
977 /* Use the mem's mode, since it will be reloaded thus. */
978 if (memory_address_p (GET_MODE (op), y))
979 return 1;
980 }
981
982 return 0;
983 }
984 \f
985 /* Return 1 if OP is a valid memory address for a memory reference
986 of mode MODE.
987
988 The main use of this function is as a predicate in match_operand
989 expressions in the machine description. */
990
991 int
992 address_operand (rtx op, enum machine_mode mode)
993 {
994 return memory_address_p (mode, op);
995 }
996
997 /* Return 1 if OP is a register reference of mode MODE.
998 If MODE is VOIDmode, accept a register in any mode.
999
1000 The main use of this function is as a predicate in match_operand
1001 expressions in the machine description.
1002
1003 As a special exception, registers whose class is NO_REGS are
1004 not accepted by `register_operand'. The reason for this change
1005 is to allow the representation of special architecture artifacts
1006 (such as a condition code register) without extending the rtl
1007 definitions. Since registers of class NO_REGS cannot be used
1008 as registers in any case where register classes are examined,
1009 it is most consistent to keep this function from accepting them. */
1010
1011 int
1012 register_operand (rtx op, enum machine_mode mode)
1013 {
1014 if (GET_MODE (op) != mode && mode != VOIDmode)
1015 return 0;
1016
1017 if (GET_CODE (op) == SUBREG)
1018 {
1019 rtx sub = SUBREG_REG (op);
1020
1021 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1022 because it is guaranteed to be reloaded into one.
1023 Just make sure the MEM is valid in itself.
1024 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1025 but currently it does result from (SUBREG (REG)...) where the
1026 reg went on the stack.) */
1027 if (! reload_completed && MEM_P (sub))
1028 return general_operand (op, mode);
1029
1030 #ifdef CANNOT_CHANGE_MODE_CLASS
1031 if (REG_P (sub)
1032 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1033 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1034 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1035 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1036 return 0;
1037 #endif
1038
1039 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1040 create such rtl, and we must reject it. */
1041 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1042 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1043 return 0;
1044
1045 op = sub;
1046 }
1047
1048 /* We don't consider registers whose class is NO_REGS
1049 to be a register operand. */
1050 return (REG_P (op)
1051 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1052 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1053 }
1054
1055 /* Return 1 for a register in Pmode; ignore the tested mode. */
1056
1057 int
1058 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1059 {
1060 return register_operand (op, Pmode);
1061 }
1062
1063 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1064 or a hard register. */
1065
1066 int
1067 scratch_operand (rtx op, enum machine_mode mode)
1068 {
1069 if (GET_MODE (op) != mode && mode != VOIDmode)
1070 return 0;
1071
1072 return (GET_CODE (op) == SCRATCH
1073 || (REG_P (op)
1074 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1075 }
1076
1077 /* Return 1 if OP is a valid immediate operand for mode MODE.
1078
1079 The main use of this function is as a predicate in match_operand
1080 expressions in the machine description. */
1081
1082 int
1083 immediate_operand (rtx op, enum machine_mode mode)
1084 {
1085 /* Don't accept CONST_INT or anything similar
1086 if the caller wants something floating. */
1087 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1088 && GET_MODE_CLASS (mode) != MODE_INT
1089 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1090 return 0;
1091
1092 if (GET_CODE (op) == CONST_INT
1093 && mode != VOIDmode
1094 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1095 return 0;
1096
1097 return (CONSTANT_P (op)
1098 && (GET_MODE (op) == mode || mode == VOIDmode
1099 || GET_MODE (op) == VOIDmode)
1100 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1101 && LEGITIMATE_CONSTANT_P (op));
1102 }
1103
1104 /* Returns 1 if OP is an operand that is a CONST_INT. */
1105
1106 int
1107 const_int_operand (rtx op, enum machine_mode mode)
1108 {
1109 if (GET_CODE (op) != CONST_INT)
1110 return 0;
1111
1112 if (mode != VOIDmode
1113 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1114 return 0;
1115
1116 return 1;
1117 }
1118
1119 /* Returns 1 if OP is an operand that is a constant integer or constant
1120 floating-point number. */
1121
1122 int
1123 const_double_operand (rtx op, enum machine_mode mode)
1124 {
1125 /* Don't accept CONST_INT or anything similar
1126 if the caller wants something floating. */
1127 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1128 && GET_MODE_CLASS (mode) != MODE_INT
1129 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1130 return 0;
1131
1132 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1133 && (mode == VOIDmode || GET_MODE (op) == mode
1134 || GET_MODE (op) == VOIDmode));
1135 }
1136
1137 /* Return 1 if OP is a general operand that is not an immediate operand. */
1138
1139 int
1140 nonimmediate_operand (rtx op, enum machine_mode mode)
1141 {
1142 return (general_operand (op, mode) && ! CONSTANT_P (op));
1143 }
1144
1145 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1146
1147 int
1148 nonmemory_operand (rtx op, enum machine_mode mode)
1149 {
1150 if (CONSTANT_P (op))
1151 {
1152 /* Don't accept CONST_INT or anything similar
1153 if the caller wants something floating. */
1154 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1155 && GET_MODE_CLASS (mode) != MODE_INT
1156 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1157 return 0;
1158
1159 if (GET_CODE (op) == CONST_INT
1160 && mode != VOIDmode
1161 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1162 return 0;
1163
1164 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1165 || mode == VOIDmode)
1166 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1167 && LEGITIMATE_CONSTANT_P (op));
1168 }
1169
1170 if (GET_MODE (op) != mode && mode != VOIDmode)
1171 return 0;
1172
1173 if (GET_CODE (op) == SUBREG)
1174 {
1175 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1176 because it is guaranteed to be reloaded into one.
1177 Just make sure the MEM is valid in itself.
1178 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1179 but currently it does result from (SUBREG (REG)...) where the
1180 reg went on the stack.) */
1181 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1182 return general_operand (op, mode);
1183 op = SUBREG_REG (op);
1184 }
1185
1186 /* We don't consider registers whose class is NO_REGS
1187 to be a register operand. */
1188 return (REG_P (op)
1189 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1190 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1191 }
1192
1193 /* Return 1 if OP is a valid operand that stands for pushing a
1194 value of mode MODE onto the stack.
1195
1196 The main use of this function is as a predicate in match_operand
1197 expressions in the machine description. */
1198
1199 int
1200 push_operand (rtx op, enum machine_mode mode)
1201 {
1202 unsigned int rounded_size = GET_MODE_SIZE (mode);
1203
1204 #ifdef PUSH_ROUNDING
1205 rounded_size = PUSH_ROUNDING (rounded_size);
1206 #endif
1207
1208 if (!MEM_P (op))
1209 return 0;
1210
1211 if (mode != VOIDmode && GET_MODE (op) != mode)
1212 return 0;
1213
1214 op = XEXP (op, 0);
1215
1216 if (rounded_size == GET_MODE_SIZE (mode))
1217 {
1218 if (GET_CODE (op) != STACK_PUSH_CODE)
1219 return 0;
1220 }
1221 else
1222 {
1223 if (GET_CODE (op) != PRE_MODIFY
1224 || GET_CODE (XEXP (op, 1)) != PLUS
1225 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1226 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1227 #ifdef STACK_GROWS_DOWNWARD
1228 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1229 #else
1230 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1231 #endif
1232 )
1233 return 0;
1234 }
1235
1236 return XEXP (op, 0) == stack_pointer_rtx;
1237 }
1238
1239 /* Return 1 if OP is a valid operand that stands for popping a
1240 value of mode MODE off the stack.
1241
1242 The main use of this function is as a predicate in match_operand
1243 expressions in the machine description. */
1244
1245 int
1246 pop_operand (rtx op, enum machine_mode mode)
1247 {
1248 if (!MEM_P (op))
1249 return 0;
1250
1251 if (mode != VOIDmode && GET_MODE (op) != mode)
1252 return 0;
1253
1254 op = XEXP (op, 0);
1255
1256 if (GET_CODE (op) != STACK_POP_CODE)
1257 return 0;
1258
1259 return XEXP (op, 0) == stack_pointer_rtx;
1260 }
1261
1262 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1263
1264 int
1265 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1266 {
1267 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1268 return 0;
1269
1270 win:
1271 return 1;
1272 }
1273
1274 /* Return 1 if OP is a valid memory reference with mode MODE,
1275 including a valid address.
1276
1277 The main use of this function is as a predicate in match_operand
1278 expressions in the machine description. */
1279
1280 int
1281 memory_operand (rtx op, enum machine_mode mode)
1282 {
1283 rtx inner;
1284
1285 if (! reload_completed)
1286 /* Note that no SUBREG is a memory operand before end of reload pass,
1287 because (SUBREG (MEM...)) forces reloading into a register. */
1288 return MEM_P (op) && general_operand (op, mode);
1289
1290 if (mode != VOIDmode && GET_MODE (op) != mode)
1291 return 0;
1292
1293 inner = op;
1294 if (GET_CODE (inner) == SUBREG)
1295 inner = SUBREG_REG (inner);
1296
1297 return (MEM_P (inner) && general_operand (op, mode));
1298 }
1299
1300 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1301 that is, a memory reference whose address is a general_operand. */
1302
1303 int
1304 indirect_operand (rtx op, enum machine_mode mode)
1305 {
1306 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1307 if (! reload_completed
1308 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1309 {
1310 int offset = SUBREG_BYTE (op);
1311 rtx inner = SUBREG_REG (op);
1312
1313 if (mode != VOIDmode && GET_MODE (op) != mode)
1314 return 0;
1315
1316 /* The only way that we can have a general_operand as the resulting
1317 address is if OFFSET is zero and the address already is an operand
1318 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1319 operand. */
1320
1321 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1322 || (GET_CODE (XEXP (inner, 0)) == PLUS
1323 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1324 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1325 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1326 }
1327
1328 return (MEM_P (op)
1329 && memory_operand (op, mode)
1330 && general_operand (XEXP (op, 0), Pmode));
1331 }
1332
1333 /* Return 1 if this is a comparison operator. This allows the use of
1334 MATCH_OPERATOR to recognize all the branch insns. */
1335
1336 int
1337 comparison_operator (rtx op, enum machine_mode mode)
1338 {
1339 return ((mode == VOIDmode || GET_MODE (op) == mode)
1340 && COMPARISON_P (op));
1341 }
1342 \f
1343 /* If BODY is an insn body that uses ASM_OPERANDS,
1344 return the number of operands (both input and output) in the insn.
1345 Otherwise return -1. */
1346
1347 int
1348 asm_noperands (rtx body)
1349 {
1350 switch (GET_CODE (body))
1351 {
1352 case ASM_OPERANDS:
1353 /* No output operands: return number of input operands. */
1354 return ASM_OPERANDS_INPUT_LENGTH (body);
1355 case SET:
1356 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1357 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1358 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1359 else
1360 return -1;
1361 case PARALLEL:
1362 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1363 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1364 {
1365 /* Multiple output operands, or 1 output plus some clobbers:
1366 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1367 int i;
1368 int n_sets;
1369
1370 /* Count backwards through CLOBBERs to determine number of SETs. */
1371 for (i = XVECLEN (body, 0); i > 0; i--)
1372 {
1373 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1374 break;
1375 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1376 return -1;
1377 }
1378
1379 /* N_SETS is now number of output operands. */
1380 n_sets = i;
1381
1382 /* Verify that all the SETs we have
1383 came from a single original asm_operands insn
1384 (so that invalid combinations are blocked). */
1385 for (i = 0; i < n_sets; i++)
1386 {
1387 rtx elt = XVECEXP (body, 0, i);
1388 if (GET_CODE (elt) != SET)
1389 return -1;
1390 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1391 return -1;
1392 /* If these ASM_OPERANDS rtx's came from different original insns
1393 then they aren't allowed together. */
1394 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1395 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1396 return -1;
1397 }
1398 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1399 + n_sets);
1400 }
1401 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1402 {
1403 /* 0 outputs, but some clobbers:
1404 body is [(asm_operands ...) (clobber (reg ...))...]. */
1405 int i;
1406
1407 /* Make sure all the other parallel things really are clobbers. */
1408 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1409 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1410 return -1;
1411
1412 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1413 }
1414 else
1415 return -1;
1416 default:
1417 return -1;
1418 }
1419 }
1420
1421 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1422 copy its operands (both input and output) into the vector OPERANDS,
1423 the locations of the operands within the insn into the vector OPERAND_LOCS,
1424 and the constraints for the operands into CONSTRAINTS.
1425 Write the modes of the operands into MODES.
1426 Return the assembler-template.
1427
1428 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1429 we don't store that info. */
1430
1431 const char *
1432 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1433 const char **constraints, enum machine_mode *modes)
1434 {
1435 int i;
1436 int noperands;
1437 const char *template = 0;
1438
1439 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1440 {
1441 rtx asmop = SET_SRC (body);
1442 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1443
1444 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1445
1446 for (i = 1; i < noperands; i++)
1447 {
1448 if (operand_locs)
1449 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1450 if (operands)
1451 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1452 if (constraints)
1453 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1454 if (modes)
1455 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1456 }
1457
1458 /* The output is in the SET.
1459 Its constraint is in the ASM_OPERANDS itself. */
1460 if (operands)
1461 operands[0] = SET_DEST (body);
1462 if (operand_locs)
1463 operand_locs[0] = &SET_DEST (body);
1464 if (constraints)
1465 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1466 if (modes)
1467 modes[0] = GET_MODE (SET_DEST (body));
1468 template = ASM_OPERANDS_TEMPLATE (asmop);
1469 }
1470 else if (GET_CODE (body) == ASM_OPERANDS)
1471 {
1472 rtx asmop = body;
1473 /* No output operands: BODY is (asm_operands ....). */
1474
1475 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1476
1477 /* The input operands are found in the 1st element vector. */
1478 /* Constraints for inputs are in the 2nd element vector. */
1479 for (i = 0; i < noperands; i++)
1480 {
1481 if (operand_locs)
1482 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1483 if (operands)
1484 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1485 if (constraints)
1486 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1487 if (modes)
1488 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1489 }
1490 template = ASM_OPERANDS_TEMPLATE (asmop);
1491 }
1492 else if (GET_CODE (body) == PARALLEL
1493 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1494 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1495 {
1496 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1497 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1498 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1499 int nout = 0; /* Does not include CLOBBERs. */
1500
1501 /* At least one output, plus some CLOBBERs. */
1502
1503 /* The outputs are in the SETs.
1504 Their constraints are in the ASM_OPERANDS itself. */
1505 for (i = 0; i < nparallel; i++)
1506 {
1507 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1508 break; /* Past last SET */
1509
1510 if (operands)
1511 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1512 if (operand_locs)
1513 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1514 if (constraints)
1515 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1516 if (modes)
1517 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1518 nout++;
1519 }
1520
1521 for (i = 0; i < nin; i++)
1522 {
1523 if (operand_locs)
1524 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1525 if (operands)
1526 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1527 if (constraints)
1528 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1529 if (modes)
1530 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1531 }
1532
1533 template = ASM_OPERANDS_TEMPLATE (asmop);
1534 }
1535 else if (GET_CODE (body) == PARALLEL
1536 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1537 {
1538 /* No outputs, but some CLOBBERs. */
1539
1540 rtx asmop = XVECEXP (body, 0, 0);
1541 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1542
1543 for (i = 0; i < nin; i++)
1544 {
1545 if (operand_locs)
1546 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1547 if (operands)
1548 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1549 if (constraints)
1550 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1551 if (modes)
1552 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1553 }
1554
1555 template = ASM_OPERANDS_TEMPLATE (asmop);
1556 }
1557
1558 return template;
1559 }
1560
1561 /* Check if an asm_operand matches its constraints.
1562 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1563
1564 int
1565 asm_operand_ok (rtx op, const char *constraint)
1566 {
1567 int result = 0;
1568
1569 /* Use constrain_operands after reload. */
1570 if (reload_completed)
1571 abort ();
1572
1573 while (*constraint)
1574 {
1575 char c = *constraint;
1576 int len;
1577 switch (c)
1578 {
1579 case ',':
1580 constraint++;
1581 continue;
1582 case '=':
1583 case '+':
1584 case '*':
1585 case '%':
1586 case '!':
1587 case '#':
1588 case '&':
1589 case '?':
1590 break;
1591
1592 case '0': case '1': case '2': case '3': case '4':
1593 case '5': case '6': case '7': case '8': case '9':
1594 /* For best results, our caller should have given us the
1595 proper matching constraint, but we can't actually fail
1596 the check if they didn't. Indicate that results are
1597 inconclusive. */
1598 do
1599 constraint++;
1600 while (ISDIGIT (*constraint));
1601 if (! result)
1602 result = -1;
1603 continue;
1604
1605 case 'p':
1606 if (address_operand (op, VOIDmode))
1607 result = 1;
1608 break;
1609
1610 case 'm':
1611 case 'V': /* non-offsettable */
1612 if (memory_operand (op, VOIDmode))
1613 result = 1;
1614 break;
1615
1616 case 'o': /* offsettable */
1617 if (offsettable_nonstrict_memref_p (op))
1618 result = 1;
1619 break;
1620
1621 case '<':
1622 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1623 excepting those that expand_call created. Further, on some
1624 machines which do not have generalized auto inc/dec, an inc/dec
1625 is not a memory_operand.
1626
1627 Match any memory and hope things are resolved after reload. */
1628
1629 if (MEM_P (op)
1630 && (1
1631 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1632 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1633 result = 1;
1634 break;
1635
1636 case '>':
1637 if (MEM_P (op)
1638 && (1
1639 || GET_CODE (XEXP (op, 0)) == PRE_INC
1640 || GET_CODE (XEXP (op, 0)) == POST_INC))
1641 result = 1;
1642 break;
1643
1644 case 'E':
1645 case 'F':
1646 if (GET_CODE (op) == CONST_DOUBLE
1647 || (GET_CODE (op) == CONST_VECTOR
1648 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1649 result = 1;
1650 break;
1651
1652 case 'G':
1653 if (GET_CODE (op) == CONST_DOUBLE
1654 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1655 result = 1;
1656 break;
1657 case 'H':
1658 if (GET_CODE (op) == CONST_DOUBLE
1659 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1660 result = 1;
1661 break;
1662
1663 case 's':
1664 if (GET_CODE (op) == CONST_INT
1665 || (GET_CODE (op) == CONST_DOUBLE
1666 && GET_MODE (op) == VOIDmode))
1667 break;
1668 /* Fall through. */
1669
1670 case 'i':
1671 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1672 result = 1;
1673 break;
1674
1675 case 'n':
1676 if (GET_CODE (op) == CONST_INT
1677 || (GET_CODE (op) == CONST_DOUBLE
1678 && GET_MODE (op) == VOIDmode))
1679 result = 1;
1680 break;
1681
1682 case 'I':
1683 if (GET_CODE (op) == CONST_INT
1684 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1685 result = 1;
1686 break;
1687 case 'J':
1688 if (GET_CODE (op) == CONST_INT
1689 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1690 result = 1;
1691 break;
1692 case 'K':
1693 if (GET_CODE (op) == CONST_INT
1694 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1695 result = 1;
1696 break;
1697 case 'L':
1698 if (GET_CODE (op) == CONST_INT
1699 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1700 result = 1;
1701 break;
1702 case 'M':
1703 if (GET_CODE (op) == CONST_INT
1704 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1705 result = 1;
1706 break;
1707 case 'N':
1708 if (GET_CODE (op) == CONST_INT
1709 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1710 result = 1;
1711 break;
1712 case 'O':
1713 if (GET_CODE (op) == CONST_INT
1714 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1715 result = 1;
1716 break;
1717 case 'P':
1718 if (GET_CODE (op) == CONST_INT
1719 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1720 result = 1;
1721 break;
1722
1723 case 'X':
1724 result = 1;
1725 break;
1726
1727 case 'g':
1728 if (general_operand (op, VOIDmode))
1729 result = 1;
1730 break;
1731
1732 default:
1733 /* For all other letters, we first check for a register class,
1734 otherwise it is an EXTRA_CONSTRAINT. */
1735 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1736 {
1737 case 'r':
1738 if (GET_MODE (op) == BLKmode)
1739 break;
1740 if (register_operand (op, VOIDmode))
1741 result = 1;
1742 }
1743 #ifdef EXTRA_CONSTRAINT_STR
1744 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1745 result = 1;
1746 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1747 /* Every memory operand can be reloaded to fit. */
1748 && memory_operand (op, VOIDmode))
1749 result = 1;
1750 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1751 /* Every address operand can be reloaded to fit. */
1752 && address_operand (op, VOIDmode))
1753 result = 1;
1754 #endif
1755 break;
1756 }
1757 len = CONSTRAINT_LEN (c, constraint);
1758 do
1759 constraint++;
1760 while (--len && *constraint);
1761 if (len)
1762 return 0;
1763 }
1764
1765 return result;
1766 }
1767 \f
1768 /* Given an rtx *P, if it is a sum containing an integer constant term,
1769 return the location (type rtx *) of the pointer to that constant term.
1770 Otherwise, return a null pointer. */
1771
1772 rtx *
1773 find_constant_term_loc (rtx *p)
1774 {
1775 rtx *tem;
1776 enum rtx_code code = GET_CODE (*p);
1777
1778 /* If *P IS such a constant term, P is its location. */
1779
1780 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1781 || code == CONST)
1782 return p;
1783
1784 /* Otherwise, if not a sum, it has no constant term. */
1785
1786 if (GET_CODE (*p) != PLUS)
1787 return 0;
1788
1789 /* If one of the summands is constant, return its location. */
1790
1791 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1792 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1793 return p;
1794
1795 /* Otherwise, check each summand for containing a constant term. */
1796
1797 if (XEXP (*p, 0) != 0)
1798 {
1799 tem = find_constant_term_loc (&XEXP (*p, 0));
1800 if (tem != 0)
1801 return tem;
1802 }
1803
1804 if (XEXP (*p, 1) != 0)
1805 {
1806 tem = find_constant_term_loc (&XEXP (*p, 1));
1807 if (tem != 0)
1808 return tem;
1809 }
1810
1811 return 0;
1812 }
1813 \f
1814 /* Return 1 if OP is a memory reference
1815 whose address contains no side effects
1816 and remains valid after the addition
1817 of a positive integer less than the
1818 size of the object being referenced.
1819
1820 We assume that the original address is valid and do not check it.
1821
1822 This uses strict_memory_address_p as a subroutine, so
1823 don't use it before reload. */
1824
1825 int
1826 offsettable_memref_p (rtx op)
1827 {
1828 return ((MEM_P (op))
1829 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1830 }
1831
1832 /* Similar, but don't require a strictly valid mem ref:
1833 consider pseudo-regs valid as index or base regs. */
1834
1835 int
1836 offsettable_nonstrict_memref_p (rtx op)
1837 {
1838 return ((MEM_P (op))
1839 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1840 }
1841
1842 /* Return 1 if Y is a memory address which contains no side effects
1843 and would remain valid after the addition of a positive integer
1844 less than the size of that mode.
1845
1846 We assume that the original address is valid and do not check it.
1847 We do check that it is valid for narrower modes.
1848
1849 If STRICTP is nonzero, we require a strictly valid address,
1850 for the sake of use in reload.c. */
1851
1852 int
1853 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1854 {
1855 enum rtx_code ycode = GET_CODE (y);
1856 rtx z;
1857 rtx y1 = y;
1858 rtx *y2;
1859 int (*addressp) (enum machine_mode, rtx) =
1860 (strictp ? strict_memory_address_p : memory_address_p);
1861 unsigned int mode_sz = GET_MODE_SIZE (mode);
1862
1863 if (CONSTANT_ADDRESS_P (y))
1864 return 1;
1865
1866 /* Adjusting an offsettable address involves changing to a narrower mode.
1867 Make sure that's OK. */
1868
1869 if (mode_dependent_address_p (y))
1870 return 0;
1871
1872 /* ??? How much offset does an offsettable BLKmode reference need?
1873 Clearly that depends on the situation in which it's being used.
1874 However, the current situation in which we test 0xffffffff is
1875 less than ideal. Caveat user. */
1876 if (mode_sz == 0)
1877 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1878
1879 /* If the expression contains a constant term,
1880 see if it remains valid when max possible offset is added. */
1881
1882 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1883 {
1884 int good;
1885
1886 y1 = *y2;
1887 *y2 = plus_constant (*y2, mode_sz - 1);
1888 /* Use QImode because an odd displacement may be automatically invalid
1889 for any wider mode. But it should be valid for a single byte. */
1890 good = (*addressp) (QImode, y);
1891
1892 /* In any case, restore old contents of memory. */
1893 *y2 = y1;
1894 return good;
1895 }
1896
1897 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1898 return 0;
1899
1900 /* The offset added here is chosen as the maximum offset that
1901 any instruction could need to add when operating on something
1902 of the specified mode. We assume that if Y and Y+c are
1903 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1904 go inside a LO_SUM here, so we do so as well. */
1905 if (GET_CODE (y) == LO_SUM
1906 && mode != BLKmode
1907 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1908 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1909 plus_constant (XEXP (y, 1), mode_sz - 1));
1910 else
1911 z = plus_constant (y, mode_sz - 1);
1912
1913 /* Use QImode because an odd displacement may be automatically invalid
1914 for any wider mode. But it should be valid for a single byte. */
1915 return (*addressp) (QImode, z);
1916 }
1917
1918 /* Return 1 if ADDR is an address-expression whose effect depends
1919 on the mode of the memory reference it is used in.
1920
1921 Autoincrement addressing is a typical example of mode-dependence
1922 because the amount of the increment depends on the mode. */
1923
1924 int
1925 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1926 {
1927 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1928 return 0;
1929 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1930 win: ATTRIBUTE_UNUSED_LABEL
1931 return 1;
1932 }
1933 \f
1934 /* Like extract_insn, but save insn extracted and don't extract again, when
1935 called again for the same insn expecting that recog_data still contain the
1936 valid information. This is used primary by gen_attr infrastructure that
1937 often does extract insn again and again. */
1938 void
1939 extract_insn_cached (rtx insn)
1940 {
1941 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1942 return;
1943 extract_insn (insn);
1944 recog_data.insn = insn;
1945 }
1946 /* Do cached extract_insn, constrain_operands and complain about failures.
1947 Used by insn_attrtab. */
1948 void
1949 extract_constrain_insn_cached (rtx insn)
1950 {
1951 extract_insn_cached (insn);
1952 if (which_alternative == -1
1953 && !constrain_operands (reload_completed))
1954 fatal_insn_not_found (insn);
1955 }
1956 /* Do cached constrain_operands and complain about failures. */
1957 int
1958 constrain_operands_cached (int strict)
1959 {
1960 if (which_alternative == -1)
1961 return constrain_operands (strict);
1962 else
1963 return 1;
1964 }
1965 \f
1966 /* Analyze INSN and fill in recog_data. */
1967
1968 void
1969 extract_insn (rtx insn)
1970 {
1971 int i;
1972 int icode;
1973 int noperands;
1974 rtx body = PATTERN (insn);
1975
1976 recog_data.insn = NULL;
1977 recog_data.n_operands = 0;
1978 recog_data.n_alternatives = 0;
1979 recog_data.n_dups = 0;
1980 which_alternative = -1;
1981
1982 switch (GET_CODE (body))
1983 {
1984 case USE:
1985 case CLOBBER:
1986 case ASM_INPUT:
1987 case ADDR_VEC:
1988 case ADDR_DIFF_VEC:
1989 return;
1990
1991 case SET:
1992 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1993 goto asm_insn;
1994 else
1995 goto normal_insn;
1996 case PARALLEL:
1997 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
1998 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1999 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2000 goto asm_insn;
2001 else
2002 goto normal_insn;
2003 case ASM_OPERANDS:
2004 asm_insn:
2005 recog_data.n_operands = noperands = asm_noperands (body);
2006 if (noperands >= 0)
2007 {
2008 /* This insn is an `asm' with operands. */
2009
2010 /* expand_asm_operands makes sure there aren't too many operands. */
2011 if (noperands > MAX_RECOG_OPERANDS)
2012 abort ();
2013
2014 /* Now get the operand values and constraints out of the insn. */
2015 decode_asm_operands (body, recog_data.operand,
2016 recog_data.operand_loc,
2017 recog_data.constraints,
2018 recog_data.operand_mode);
2019 if (noperands > 0)
2020 {
2021 const char *p = recog_data.constraints[0];
2022 recog_data.n_alternatives = 1;
2023 while (*p)
2024 recog_data.n_alternatives += (*p++ == ',');
2025 }
2026 break;
2027 }
2028 fatal_insn_not_found (insn);
2029
2030 default:
2031 normal_insn:
2032 /* Ordinary insn: recognize it, get the operands via insn_extract
2033 and get the constraints. */
2034
2035 icode = recog_memoized (insn);
2036 if (icode < 0)
2037 fatal_insn_not_found (insn);
2038
2039 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2040 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2041 recog_data.n_dups = insn_data[icode].n_dups;
2042
2043 insn_extract (insn);
2044
2045 for (i = 0; i < noperands; i++)
2046 {
2047 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2048 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2049 /* VOIDmode match_operands gets mode from their real operand. */
2050 if (recog_data.operand_mode[i] == VOIDmode)
2051 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2052 }
2053 }
2054 for (i = 0; i < noperands; i++)
2055 recog_data.operand_type[i]
2056 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2057 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2058 : OP_IN);
2059
2060 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2061 abort ();
2062 }
2063
2064 /* After calling extract_insn, you can use this function to extract some
2065 information from the constraint strings into a more usable form.
2066 The collected data is stored in recog_op_alt. */
2067 void
2068 preprocess_constraints (void)
2069 {
2070 int i;
2071
2072 for (i = 0; i < recog_data.n_operands; i++)
2073 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2074 * sizeof (struct operand_alternative)));
2075
2076 for (i = 0; i < recog_data.n_operands; i++)
2077 {
2078 int j;
2079 struct operand_alternative *op_alt;
2080 const char *p = recog_data.constraints[i];
2081
2082 op_alt = recog_op_alt[i];
2083
2084 for (j = 0; j < recog_data.n_alternatives; j++)
2085 {
2086 op_alt[j].cl = NO_REGS;
2087 op_alt[j].constraint = p;
2088 op_alt[j].matches = -1;
2089 op_alt[j].matched = -1;
2090
2091 if (*p == '\0' || *p == ',')
2092 {
2093 op_alt[j].anything_ok = 1;
2094 continue;
2095 }
2096
2097 for (;;)
2098 {
2099 char c = *p;
2100 if (c == '#')
2101 do
2102 c = *++p;
2103 while (c != ',' && c != '\0');
2104 if (c == ',' || c == '\0')
2105 {
2106 p++;
2107 break;
2108 }
2109
2110 switch (c)
2111 {
2112 case '=': case '+': case '*': case '%':
2113 case 'E': case 'F': case 'G': case 'H':
2114 case 's': case 'i': case 'n':
2115 case 'I': case 'J': case 'K': case 'L':
2116 case 'M': case 'N': case 'O': case 'P':
2117 /* These don't say anything we care about. */
2118 break;
2119
2120 case '?':
2121 op_alt[j].reject += 6;
2122 break;
2123 case '!':
2124 op_alt[j].reject += 600;
2125 break;
2126 case '&':
2127 op_alt[j].earlyclobber = 1;
2128 break;
2129
2130 case '0': case '1': case '2': case '3': case '4':
2131 case '5': case '6': case '7': case '8': case '9':
2132 {
2133 char *end;
2134 op_alt[j].matches = strtoul (p, &end, 10);
2135 recog_op_alt[op_alt[j].matches][j].matched = i;
2136 p = end;
2137 }
2138 continue;
2139
2140 case 'm':
2141 op_alt[j].memory_ok = 1;
2142 break;
2143 case '<':
2144 op_alt[j].decmem_ok = 1;
2145 break;
2146 case '>':
2147 op_alt[j].incmem_ok = 1;
2148 break;
2149 case 'V':
2150 op_alt[j].nonoffmem_ok = 1;
2151 break;
2152 case 'o':
2153 op_alt[j].offmem_ok = 1;
2154 break;
2155 case 'X':
2156 op_alt[j].anything_ok = 1;
2157 break;
2158
2159 case 'p':
2160 op_alt[j].is_address = 1;
2161 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2162 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2163 break;
2164
2165 case 'g':
2166 case 'r':
2167 op_alt[j].cl =
2168 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2169 break;
2170
2171 default:
2172 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2173 {
2174 op_alt[j].memory_ok = 1;
2175 break;
2176 }
2177 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2178 {
2179 op_alt[j].is_address = 1;
2180 op_alt[j].cl
2181 = (reg_class_subunion
2182 [(int) op_alt[j].cl]
2183 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2184 break;
2185 }
2186
2187 op_alt[j].cl
2188 = (reg_class_subunion
2189 [(int) op_alt[j].cl]
2190 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2191 break;
2192 }
2193 p += CONSTRAINT_LEN (c, p);
2194 }
2195 }
2196 }
2197 }
2198
2199 /* Check the operands of an insn against the insn's operand constraints
2200 and return 1 if they are valid.
2201 The information about the insn's operands, constraints, operand modes
2202 etc. is obtained from the global variables set up by extract_insn.
2203
2204 WHICH_ALTERNATIVE is set to a number which indicates which
2205 alternative of constraints was matched: 0 for the first alternative,
2206 1 for the next, etc.
2207
2208 In addition, when two operands are required to match
2209 and it happens that the output operand is (reg) while the
2210 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2211 make the output operand look like the input.
2212 This is because the output operand is the one the template will print.
2213
2214 This is used in final, just before printing the assembler code and by
2215 the routines that determine an insn's attribute.
2216
2217 If STRICT is a positive nonzero value, it means that we have been
2218 called after reload has been completed. In that case, we must
2219 do all checks strictly. If it is zero, it means that we have been called
2220 before reload has completed. In that case, we first try to see if we can
2221 find an alternative that matches strictly. If not, we try again, this
2222 time assuming that reload will fix up the insn. This provides a "best
2223 guess" for the alternative and is used to compute attributes of insns prior
2224 to reload. A negative value of STRICT is used for this internal call. */
2225
2226 struct funny_match
2227 {
2228 int this, other;
2229 };
2230
2231 int
2232 constrain_operands (int strict)
2233 {
2234 const char *constraints[MAX_RECOG_OPERANDS];
2235 int matching_operands[MAX_RECOG_OPERANDS];
2236 int earlyclobber[MAX_RECOG_OPERANDS];
2237 int c;
2238
2239 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2240 int funny_match_index;
2241
2242 which_alternative = 0;
2243 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2244 return 1;
2245
2246 for (c = 0; c < recog_data.n_operands; c++)
2247 {
2248 constraints[c] = recog_data.constraints[c];
2249 matching_operands[c] = -1;
2250 }
2251
2252 do
2253 {
2254 int opno;
2255 int lose = 0;
2256 funny_match_index = 0;
2257
2258 for (opno = 0; opno < recog_data.n_operands; opno++)
2259 {
2260 rtx op = recog_data.operand[opno];
2261 enum machine_mode mode = GET_MODE (op);
2262 const char *p = constraints[opno];
2263 int offset = 0;
2264 int win = 0;
2265 int val;
2266 int len;
2267
2268 earlyclobber[opno] = 0;
2269
2270 /* A unary operator may be accepted by the predicate, but it
2271 is irrelevant for matching constraints. */
2272 if (UNARY_P (op))
2273 op = XEXP (op, 0);
2274
2275 if (GET_CODE (op) == SUBREG)
2276 {
2277 if (REG_P (SUBREG_REG (op))
2278 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2279 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2280 GET_MODE (SUBREG_REG (op)),
2281 SUBREG_BYTE (op),
2282 GET_MODE (op));
2283 op = SUBREG_REG (op);
2284 }
2285
2286 /* An empty constraint or empty alternative
2287 allows anything which matched the pattern. */
2288 if (*p == 0 || *p == ',')
2289 win = 1;
2290
2291 do
2292 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2293 {
2294 case '\0':
2295 len = 0;
2296 break;
2297 case ',':
2298 c = '\0';
2299 break;
2300
2301 case '?': case '!': case '*': case '%':
2302 case '=': case '+':
2303 break;
2304
2305 case '#':
2306 /* Ignore rest of this alternative as far as
2307 constraint checking is concerned. */
2308 do
2309 p++;
2310 while (*p && *p != ',');
2311 len = 0;
2312 break;
2313
2314 case '&':
2315 earlyclobber[opno] = 1;
2316 break;
2317
2318 case '0': case '1': case '2': case '3': case '4':
2319 case '5': case '6': case '7': case '8': case '9':
2320 {
2321 /* This operand must be the same as a previous one.
2322 This kind of constraint is used for instructions such
2323 as add when they take only two operands.
2324
2325 Note that the lower-numbered operand is passed first.
2326
2327 If we are not testing strictly, assume that this
2328 constraint will be satisfied. */
2329
2330 char *end;
2331 int match;
2332
2333 match = strtoul (p, &end, 10);
2334 p = end;
2335
2336 if (strict < 0)
2337 val = 1;
2338 else
2339 {
2340 rtx op1 = recog_data.operand[match];
2341 rtx op2 = recog_data.operand[opno];
2342
2343 /* A unary operator may be accepted by the predicate,
2344 but it is irrelevant for matching constraints. */
2345 if (UNARY_P (op1))
2346 op1 = XEXP (op1, 0);
2347 if (UNARY_P (op2))
2348 op2 = XEXP (op2, 0);
2349
2350 val = operands_match_p (op1, op2);
2351 }
2352
2353 matching_operands[opno] = match;
2354 matching_operands[match] = opno;
2355
2356 if (val != 0)
2357 win = 1;
2358
2359 /* If output is *x and input is *--x, arrange later
2360 to change the output to *--x as well, since the
2361 output op is the one that will be printed. */
2362 if (val == 2 && strict > 0)
2363 {
2364 funny_match[funny_match_index].this = opno;
2365 funny_match[funny_match_index++].other = match;
2366 }
2367 }
2368 len = 0;
2369 break;
2370
2371 case 'p':
2372 /* p is used for address_operands. When we are called by
2373 gen_reload, no one will have checked that the address is
2374 strictly valid, i.e., that all pseudos requiring hard regs
2375 have gotten them. */
2376 if (strict <= 0
2377 || (strict_memory_address_p (recog_data.operand_mode[opno],
2378 op)))
2379 win = 1;
2380 break;
2381
2382 /* No need to check general_operand again;
2383 it was done in insn-recog.c. */
2384 case 'g':
2385 /* Anything goes unless it is a REG and really has a hard reg
2386 but the hard reg is not in the class GENERAL_REGS. */
2387 if (strict < 0
2388 || GENERAL_REGS == ALL_REGS
2389 || !REG_P (op)
2390 || (reload_in_progress
2391 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2392 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2393 win = 1;
2394 break;
2395
2396 case 'X':
2397 /* This is used for a MATCH_SCRATCH in the cases when
2398 we don't actually need anything. So anything goes
2399 any time. */
2400 win = 1;
2401 break;
2402
2403 case 'm':
2404 /* Memory operands must be valid, to the extent
2405 required by STRICT. */
2406 if (MEM_P (op))
2407 {
2408 if (strict > 0
2409 && !strict_memory_address_p (GET_MODE (op),
2410 XEXP (op, 0)))
2411 break;
2412 if (strict == 0
2413 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2414 break;
2415 win = 1;
2416 }
2417 /* Before reload, accept what reload can turn into mem. */
2418 else if (strict < 0 && CONSTANT_P (op))
2419 win = 1;
2420 /* During reload, accept a pseudo */
2421 else if (reload_in_progress && REG_P (op)
2422 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2423 win = 1;
2424 break;
2425
2426 case '<':
2427 if (MEM_P (op)
2428 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2429 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2430 win = 1;
2431 break;
2432
2433 case '>':
2434 if (MEM_P (op)
2435 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2436 || GET_CODE (XEXP (op, 0)) == POST_INC))
2437 win = 1;
2438 break;
2439
2440 case 'E':
2441 case 'F':
2442 if (GET_CODE (op) == CONST_DOUBLE
2443 || (GET_CODE (op) == CONST_VECTOR
2444 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2445 win = 1;
2446 break;
2447
2448 case 'G':
2449 case 'H':
2450 if (GET_CODE (op) == CONST_DOUBLE
2451 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2452 win = 1;
2453 break;
2454
2455 case 's':
2456 if (GET_CODE (op) == CONST_INT
2457 || (GET_CODE (op) == CONST_DOUBLE
2458 && GET_MODE (op) == VOIDmode))
2459 break;
2460 case 'i':
2461 if (CONSTANT_P (op))
2462 win = 1;
2463 break;
2464
2465 case 'n':
2466 if (GET_CODE (op) == CONST_INT
2467 || (GET_CODE (op) == CONST_DOUBLE
2468 && GET_MODE (op) == VOIDmode))
2469 win = 1;
2470 break;
2471
2472 case 'I':
2473 case 'J':
2474 case 'K':
2475 case 'L':
2476 case 'M':
2477 case 'N':
2478 case 'O':
2479 case 'P':
2480 if (GET_CODE (op) == CONST_INT
2481 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2482 win = 1;
2483 break;
2484
2485 case 'V':
2486 if (MEM_P (op)
2487 && ((strict > 0 && ! offsettable_memref_p (op))
2488 || (strict < 0
2489 && !(CONSTANT_P (op) || MEM_P (op)))
2490 || (reload_in_progress
2491 && !(REG_P (op)
2492 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2493 win = 1;
2494 break;
2495
2496 case 'o':
2497 if ((strict > 0 && offsettable_memref_p (op))
2498 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2499 /* Before reload, accept what reload can handle. */
2500 || (strict < 0
2501 && (CONSTANT_P (op) || MEM_P (op)))
2502 /* During reload, accept a pseudo */
2503 || (reload_in_progress && REG_P (op)
2504 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2505 win = 1;
2506 break;
2507
2508 default:
2509 {
2510 enum reg_class cl;
2511
2512 cl = (c == 'r'
2513 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2514 if (cl != NO_REGS)
2515 {
2516 if (strict < 0
2517 || (strict == 0
2518 && REG_P (op)
2519 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2520 || (strict == 0 && GET_CODE (op) == SCRATCH)
2521 || (REG_P (op)
2522 && reg_fits_class_p (op, cl, offset, mode)))
2523 win = 1;
2524 }
2525 #ifdef EXTRA_CONSTRAINT_STR
2526 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2527 win = 1;
2528
2529 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2530 /* Every memory operand can be reloaded to fit. */
2531 && ((strict < 0 && MEM_P (op))
2532 /* Before reload, accept what reload can turn
2533 into mem. */
2534 || (strict < 0 && CONSTANT_P (op))
2535 /* During reload, accept a pseudo */
2536 || (reload_in_progress && REG_P (op)
2537 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2538 win = 1;
2539 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2540 /* Every address operand can be reloaded to fit. */
2541 && strict < 0)
2542 win = 1;
2543 #endif
2544 break;
2545 }
2546 }
2547 while (p += len, c);
2548
2549 constraints[opno] = p;
2550 /* If this operand did not win somehow,
2551 this alternative loses. */
2552 if (! win)
2553 lose = 1;
2554 }
2555 /* This alternative won; the operands are ok.
2556 Change whichever operands this alternative says to change. */
2557 if (! lose)
2558 {
2559 int opno, eopno;
2560
2561 /* See if any earlyclobber operand conflicts with some other
2562 operand. */
2563
2564 if (strict > 0)
2565 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2566 /* Ignore earlyclobber operands now in memory,
2567 because we would often report failure when we have
2568 two memory operands, one of which was formerly a REG. */
2569 if (earlyclobber[eopno]
2570 && REG_P (recog_data.operand[eopno]))
2571 for (opno = 0; opno < recog_data.n_operands; opno++)
2572 if ((MEM_P (recog_data.operand[opno])
2573 || recog_data.operand_type[opno] != OP_OUT)
2574 && opno != eopno
2575 /* Ignore things like match_operator operands. */
2576 && *recog_data.constraints[opno] != 0
2577 && ! (matching_operands[opno] == eopno
2578 && operands_match_p (recog_data.operand[opno],
2579 recog_data.operand[eopno]))
2580 && ! safe_from_earlyclobber (recog_data.operand[opno],
2581 recog_data.operand[eopno]))
2582 lose = 1;
2583
2584 if (! lose)
2585 {
2586 while (--funny_match_index >= 0)
2587 {
2588 recog_data.operand[funny_match[funny_match_index].other]
2589 = recog_data.operand[funny_match[funny_match_index].this];
2590 }
2591
2592 return 1;
2593 }
2594 }
2595
2596 which_alternative++;
2597 }
2598 while (which_alternative < recog_data.n_alternatives);
2599
2600 which_alternative = -1;
2601 /* If we are about to reject this, but we are not to test strictly,
2602 try a very loose test. Only return failure if it fails also. */
2603 if (strict == 0)
2604 return constrain_operands (-1);
2605 else
2606 return 0;
2607 }
2608
2609 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2610 is a hard reg in class CLASS when its regno is offset by OFFSET
2611 and changed to mode MODE.
2612 If REG occupies multiple hard regs, all of them must be in CLASS. */
2613
2614 int
2615 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2616 enum machine_mode mode)
2617 {
2618 int regno = REGNO (operand);
2619 if (regno < FIRST_PSEUDO_REGISTER
2620 && TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2621 regno + offset))
2622 {
2623 int sr;
2624 regno += offset;
2625 for (sr = hard_regno_nregs[regno][mode] - 1;
2626 sr > 0; sr--)
2627 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2628 regno + sr))
2629 break;
2630 return sr == 0;
2631 }
2632
2633 return 0;
2634 }
2635 \f
2636 /* Split single instruction. Helper function for split_all_insns and
2637 split_all_insns_noflow. Return last insn in the sequence if successful,
2638 or NULL if unsuccessful. */
2639
2640 static rtx
2641 split_insn (rtx insn)
2642 {
2643 /* Split insns here to get max fine-grain parallelism. */
2644 rtx first = PREV_INSN (insn);
2645 rtx last = try_split (PATTERN (insn), insn, 1);
2646
2647 if (last == insn)
2648 return NULL_RTX;
2649
2650 /* try_split returns the NOTE that INSN became. */
2651 SET_INSN_DELETED (insn);
2652
2653 /* ??? Coddle to md files that generate subregs in post-reload
2654 splitters instead of computing the proper hard register. */
2655 if (reload_completed && first != last)
2656 {
2657 first = NEXT_INSN (first);
2658 for (;;)
2659 {
2660 if (INSN_P (first))
2661 cleanup_subreg_operands (first);
2662 if (first == last)
2663 break;
2664 first = NEXT_INSN (first);
2665 }
2666 }
2667 return last;
2668 }
2669
2670 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2671
2672 void
2673 split_all_insns (int upd_life)
2674 {
2675 sbitmap blocks;
2676 bool changed;
2677 basic_block bb;
2678
2679 blocks = sbitmap_alloc (last_basic_block);
2680 sbitmap_zero (blocks);
2681 changed = false;
2682
2683 FOR_EACH_BB_REVERSE (bb)
2684 {
2685 rtx insn, next;
2686 bool finish = false;
2687
2688 for (insn = BB_HEAD (bb); !finish ; insn = next)
2689 {
2690 /* Can't use `next_real_insn' because that might go across
2691 CODE_LABELS and short-out basic blocks. */
2692 next = NEXT_INSN (insn);
2693 finish = (insn == BB_END (bb));
2694 if (INSN_P (insn))
2695 {
2696 rtx set = single_set (insn);
2697
2698 /* Don't split no-op move insns. These should silently
2699 disappear later in final. Splitting such insns would
2700 break the code that handles REG_NO_CONFLICT blocks. */
2701 if (set && set_noop_p (set))
2702 {
2703 /* Nops get in the way while scheduling, so delete them
2704 now if register allocation has already been done. It
2705 is too risky to try to do this before register
2706 allocation, and there are unlikely to be very many
2707 nops then anyways. */
2708 if (reload_completed)
2709 {
2710 /* If the no-op set has a REG_UNUSED note, we need
2711 to update liveness information. */
2712 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2713 {
2714 SET_BIT (blocks, bb->index);
2715 changed = true;
2716 }
2717 /* ??? Is life info affected by deleting edges? */
2718 delete_insn_and_edges (insn);
2719 }
2720 }
2721 else
2722 {
2723 rtx last = split_insn (insn);
2724 if (last)
2725 {
2726 /* The split sequence may include barrier, but the
2727 BB boundary we are interested in will be set to
2728 previous one. */
2729
2730 while (BARRIER_P (last))
2731 last = PREV_INSN (last);
2732 SET_BIT (blocks, bb->index);
2733 changed = true;
2734 }
2735 }
2736 }
2737 }
2738 }
2739
2740 if (changed)
2741 {
2742 int old_last_basic_block = last_basic_block;
2743
2744 find_many_sub_basic_blocks (blocks);
2745
2746 if (old_last_basic_block != last_basic_block && upd_life)
2747 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2748 }
2749
2750 if (changed && upd_life)
2751 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2752 PROP_DEATH_NOTES);
2753
2754 #ifdef ENABLE_CHECKING
2755 verify_flow_info ();
2756 #endif
2757
2758 sbitmap_free (blocks);
2759 }
2760
2761 /* Same as split_all_insns, but do not expect CFG to be available.
2762 Used by machine dependent reorg passes. */
2763
2764 void
2765 split_all_insns_noflow (void)
2766 {
2767 rtx next, insn;
2768
2769 for (insn = get_insns (); insn; insn = next)
2770 {
2771 next = NEXT_INSN (insn);
2772 if (INSN_P (insn))
2773 {
2774 /* Don't split no-op move insns. These should silently
2775 disappear later in final. Splitting such insns would
2776 break the code that handles REG_NO_CONFLICT blocks. */
2777 rtx set = single_set (insn);
2778 if (set && set_noop_p (set))
2779 {
2780 /* Nops get in the way while scheduling, so delete them
2781 now if register allocation has already been done. It
2782 is too risky to try to do this before register
2783 allocation, and there are unlikely to be very many
2784 nops then anyways.
2785
2786 ??? Should we use delete_insn when the CFG isn't valid? */
2787 if (reload_completed)
2788 delete_insn_and_edges (insn);
2789 }
2790 else
2791 split_insn (insn);
2792 }
2793 }
2794 }
2795 \f
2796 #ifdef HAVE_peephole2
2797 struct peep2_insn_data
2798 {
2799 rtx insn;
2800 regset live_before;
2801 };
2802
2803 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2804 static int peep2_current;
2805
2806 /* A non-insn marker indicating the last insn of the block.
2807 The live_before regset for this element is correct, indicating
2808 global_live_at_end for the block. */
2809 #define PEEP2_EOB pc_rtx
2810
2811 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2812 does not exist. Used by the recognizer to find the next insn to match
2813 in a multi-insn pattern. */
2814
2815 rtx
2816 peep2_next_insn (int n)
2817 {
2818 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2819 abort ();
2820
2821 n += peep2_current;
2822 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2823 n -= MAX_INSNS_PER_PEEP2 + 1;
2824
2825 if (peep2_insn_data[n].insn == PEEP2_EOB)
2826 return NULL_RTX;
2827 return peep2_insn_data[n].insn;
2828 }
2829
2830 /* Return true if REGNO is dead before the Nth non-note insn
2831 after `current'. */
2832
2833 int
2834 peep2_regno_dead_p (int ofs, int regno)
2835 {
2836 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2837 abort ();
2838
2839 ofs += peep2_current;
2840 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2841 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2842
2843 if (peep2_insn_data[ofs].insn == NULL_RTX)
2844 abort ();
2845
2846 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2847 }
2848
2849 /* Similarly for a REG. */
2850
2851 int
2852 peep2_reg_dead_p (int ofs, rtx reg)
2853 {
2854 int regno, n;
2855
2856 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2857 abort ();
2858
2859 ofs += peep2_current;
2860 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2861 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2862
2863 if (peep2_insn_data[ofs].insn == NULL_RTX)
2864 abort ();
2865
2866 regno = REGNO (reg);
2867 n = hard_regno_nregs[regno][GET_MODE (reg)];
2868 while (--n >= 0)
2869 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2870 return 0;
2871 return 1;
2872 }
2873
2874 /* Try to find a hard register of mode MODE, matching the register class in
2875 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2876 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2877 in which case the only condition is that the register must be available
2878 before CURRENT_INSN.
2879 Registers that already have bits set in REG_SET will not be considered.
2880
2881 If an appropriate register is available, it will be returned and the
2882 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2883 returned. */
2884
2885 rtx
2886 peep2_find_free_register (int from, int to, const char *class_str,
2887 enum machine_mode mode, HARD_REG_SET *reg_set)
2888 {
2889 static int search_ofs;
2890 enum reg_class cl;
2891 HARD_REG_SET live;
2892 int i;
2893
2894 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2895 abort ();
2896
2897 from += peep2_current;
2898 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2899 from -= MAX_INSNS_PER_PEEP2 + 1;
2900 to += peep2_current;
2901 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2902 to -= MAX_INSNS_PER_PEEP2 + 1;
2903
2904 if (peep2_insn_data[from].insn == NULL_RTX)
2905 abort ();
2906 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2907
2908 while (from != to)
2909 {
2910 HARD_REG_SET this_live;
2911
2912 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2913 from = 0;
2914 if (peep2_insn_data[from].insn == NULL_RTX)
2915 abort ();
2916 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2917 IOR_HARD_REG_SET (live, this_live);
2918 }
2919
2920 cl = (class_str[0] == 'r' ? GENERAL_REGS
2921 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2922
2923 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2924 {
2925 int raw_regno, regno, success, j;
2926
2927 /* Distribute the free registers as much as possible. */
2928 raw_regno = search_ofs + i;
2929 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2930 raw_regno -= FIRST_PSEUDO_REGISTER;
2931 #ifdef REG_ALLOC_ORDER
2932 regno = reg_alloc_order[raw_regno];
2933 #else
2934 regno = raw_regno;
2935 #endif
2936
2937 /* Don't allocate fixed registers. */
2938 if (fixed_regs[regno])
2939 continue;
2940 /* Make sure the register is of the right class. */
2941 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
2942 continue;
2943 /* And can support the mode we need. */
2944 if (! HARD_REGNO_MODE_OK (regno, mode))
2945 continue;
2946 /* And that we don't create an extra save/restore. */
2947 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2948 continue;
2949 /* And we don't clobber traceback for noreturn functions. */
2950 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2951 && (! reload_completed || frame_pointer_needed))
2952 continue;
2953
2954 success = 1;
2955 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2956 {
2957 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2958 || TEST_HARD_REG_BIT (live, regno + j))
2959 {
2960 success = 0;
2961 break;
2962 }
2963 }
2964 if (success)
2965 {
2966 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2967 SET_HARD_REG_BIT (*reg_set, regno + j);
2968
2969 /* Start the next search with the next register. */
2970 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2971 raw_regno = 0;
2972 search_ofs = raw_regno;
2973
2974 return gen_rtx_REG (mode, regno);
2975 }
2976 }
2977
2978 search_ofs = 0;
2979 return NULL_RTX;
2980 }
2981
2982 /* Perform the peephole2 optimization pass. */
2983
2984 void
2985 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
2986 {
2987 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
2988 rtx insn, prev;
2989 regset live;
2990 int i;
2991 basic_block bb;
2992 #ifdef HAVE_conditional_execution
2993 sbitmap blocks;
2994 bool changed;
2995 #endif
2996 bool do_cleanup_cfg = false;
2997 bool do_rebuild_jump_labels = false;
2998
2999 /* Initialize the regsets we're going to use. */
3000 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3001 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3002 live = INITIALIZE_REG_SET (rs_heads[i]);
3003
3004 #ifdef HAVE_conditional_execution
3005 blocks = sbitmap_alloc (last_basic_block);
3006 sbitmap_zero (blocks);
3007 changed = false;
3008 #else
3009 count_or_remove_death_notes (NULL, 1);
3010 #endif
3011
3012 FOR_EACH_BB_REVERSE (bb)
3013 {
3014 struct propagate_block_info *pbi;
3015
3016 /* Indicate that all slots except the last holds invalid data. */
3017 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3018 peep2_insn_data[i].insn = NULL_RTX;
3019
3020 /* Indicate that the last slot contains live_after data. */
3021 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3022 peep2_current = MAX_INSNS_PER_PEEP2;
3023
3024 /* Start up propagation. */
3025 COPY_REG_SET (live, bb->global_live_at_end);
3026 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3027
3028 #ifdef HAVE_conditional_execution
3029 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3030 #else
3031 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3032 #endif
3033
3034 for (insn = BB_END (bb); ; insn = prev)
3035 {
3036 prev = PREV_INSN (insn);
3037 if (INSN_P (insn))
3038 {
3039 rtx try, before_try, x;
3040 int match_len;
3041 rtx note;
3042 bool was_call = false;
3043
3044 /* Record this insn. */
3045 if (--peep2_current < 0)
3046 peep2_current = MAX_INSNS_PER_PEEP2;
3047 peep2_insn_data[peep2_current].insn = insn;
3048 propagate_one_insn (pbi, insn);
3049 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3050
3051 /* Match the peephole. */
3052 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3053 if (try != NULL)
3054 {
3055 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3056 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3057 cfg-related call notes. */
3058 for (i = 0; i <= match_len; ++i)
3059 {
3060 int j;
3061 rtx old_insn, new_insn, note;
3062
3063 j = i + peep2_current;
3064 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3065 j -= MAX_INSNS_PER_PEEP2 + 1;
3066 old_insn = peep2_insn_data[j].insn;
3067 if (!CALL_P (old_insn))
3068 continue;
3069 was_call = true;
3070
3071 new_insn = try;
3072 while (new_insn != NULL_RTX)
3073 {
3074 if (CALL_P (new_insn))
3075 break;
3076 new_insn = NEXT_INSN (new_insn);
3077 }
3078
3079 if (new_insn == NULL_RTX)
3080 abort ();
3081
3082 CALL_INSN_FUNCTION_USAGE (new_insn)
3083 = CALL_INSN_FUNCTION_USAGE (old_insn);
3084
3085 for (note = REG_NOTES (old_insn);
3086 note;
3087 note = XEXP (note, 1))
3088 switch (REG_NOTE_KIND (note))
3089 {
3090 case REG_NORETURN:
3091 case REG_SETJMP:
3092 case REG_ALWAYS_RETURN:
3093 REG_NOTES (new_insn)
3094 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3095 XEXP (note, 0),
3096 REG_NOTES (new_insn));
3097 default:
3098 /* Discard all other reg notes. */
3099 break;
3100 }
3101
3102 /* Croak if there is another call in the sequence. */
3103 while (++i <= match_len)
3104 {
3105 j = i + peep2_current;
3106 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3107 j -= MAX_INSNS_PER_PEEP2 + 1;
3108 old_insn = peep2_insn_data[j].insn;
3109 if (CALL_P (old_insn))
3110 abort ();
3111 }
3112 break;
3113 }
3114
3115 i = match_len + peep2_current;
3116 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3117 i -= MAX_INSNS_PER_PEEP2 + 1;
3118
3119 note = find_reg_note (peep2_insn_data[i].insn,
3120 REG_EH_REGION, NULL_RTX);
3121
3122 /* Replace the old sequence with the new. */
3123 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3124 INSN_LOCATOR (peep2_insn_data[i].insn));
3125 before_try = PREV_INSN (insn);
3126 delete_insn_chain (insn, peep2_insn_data[i].insn);
3127
3128 /* Re-insert the EH_REGION notes. */
3129 if (note || (was_call && nonlocal_goto_handler_labels))
3130 {
3131 edge eh_edge;
3132
3133 for (eh_edge = bb->succ; eh_edge
3134 ; eh_edge = eh_edge->succ_next)
3135 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3136 break;
3137
3138 for (x = try ; x != before_try ; x = PREV_INSN (x))
3139 if (CALL_P (x)
3140 || (flag_non_call_exceptions
3141 && may_trap_p (PATTERN (x))
3142 && !find_reg_note (x, REG_EH_REGION, NULL)))
3143 {
3144 if (note)
3145 REG_NOTES (x)
3146 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3147 XEXP (note, 0),
3148 REG_NOTES (x));
3149
3150 if (x != BB_END (bb) && eh_edge)
3151 {
3152 edge nfte, nehe;
3153 int flags;
3154
3155 nfte = split_block (bb, x);
3156 flags = (eh_edge->flags
3157 & (EDGE_EH | EDGE_ABNORMAL));
3158 if (CALL_P (x))
3159 flags |= EDGE_ABNORMAL_CALL;
3160 nehe = make_edge (nfte->src, eh_edge->dest,
3161 flags);
3162
3163 nehe->probability = eh_edge->probability;
3164 nfte->probability
3165 = REG_BR_PROB_BASE - nehe->probability;
3166
3167 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3168 #ifdef HAVE_conditional_execution
3169 SET_BIT (blocks, nfte->dest->index);
3170 changed = true;
3171 #endif
3172 bb = nfte->src;
3173 eh_edge = nehe;
3174 }
3175 }
3176
3177 /* Converting possibly trapping insn to non-trapping is
3178 possible. Zap dummy outgoing edges. */
3179 do_cleanup_cfg |= purge_dead_edges (bb);
3180 }
3181
3182 #ifdef HAVE_conditional_execution
3183 /* With conditional execution, we cannot back up the
3184 live information so easily, since the conditional
3185 death data structures are not so self-contained.
3186 So record that we've made a modification to this
3187 block and update life information at the end. */
3188 SET_BIT (blocks, bb->index);
3189 changed = true;
3190
3191 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3192 peep2_insn_data[i].insn = NULL_RTX;
3193 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3194 #else
3195 /* Back up lifetime information past the end of the
3196 newly created sequence. */
3197 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3198 i = 0;
3199 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3200
3201 /* Update life information for the new sequence. */
3202 x = try;
3203 do
3204 {
3205 if (INSN_P (x))
3206 {
3207 if (--i < 0)
3208 i = MAX_INSNS_PER_PEEP2;
3209 peep2_insn_data[i].insn = x;
3210 propagate_one_insn (pbi, x);
3211 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3212 }
3213 x = PREV_INSN (x);
3214 }
3215 while (x != prev);
3216
3217 /* ??? Should verify that LIVE now matches what we
3218 had before the new sequence. */
3219
3220 peep2_current = i;
3221 #endif
3222
3223 /* If we generated a jump instruction, it won't have
3224 JUMP_LABEL set. Recompute after we're done. */
3225 for (x = try; x != before_try; x = PREV_INSN (x))
3226 if (JUMP_P (x))
3227 {
3228 do_rebuild_jump_labels = true;
3229 break;
3230 }
3231 }
3232 }
3233
3234 if (insn == BB_HEAD (bb))
3235 break;
3236 }
3237
3238 free_propagate_block_info (pbi);
3239 }
3240
3241 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3242 FREE_REG_SET (peep2_insn_data[i].live_before);
3243 FREE_REG_SET (live);
3244
3245 if (do_rebuild_jump_labels)
3246 rebuild_jump_labels (get_insns ());
3247
3248 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3249 we've changed global life since exception handlers are no longer
3250 reachable. */
3251 if (do_cleanup_cfg)
3252 {
3253 cleanup_cfg (0);
3254 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3255 }
3256 #ifdef HAVE_conditional_execution
3257 else
3258 {
3259 count_or_remove_death_notes (blocks, 1);
3260 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3261 }
3262 sbitmap_free (blocks);
3263 #endif
3264 }
3265 #endif /* HAVE_peephole2 */
3266
3267 /* Common predicates for use with define_bypass. */
3268
3269 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3270 data not the address operand(s) of the store. IN_INSN must be
3271 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3272 SETs inside. */
3273
3274 int
3275 store_data_bypass_p (rtx out_insn, rtx in_insn)
3276 {
3277 rtx out_set, in_set;
3278
3279 in_set = single_set (in_insn);
3280 if (! in_set)
3281 abort ();
3282
3283 if (!MEM_P (SET_DEST (in_set)))
3284 return false;
3285
3286 out_set = single_set (out_insn);
3287 if (out_set)
3288 {
3289 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3290 return false;
3291 }
3292 else
3293 {
3294 rtx out_pat;
3295 int i;
3296
3297 out_pat = PATTERN (out_insn);
3298 if (GET_CODE (out_pat) != PARALLEL)
3299 abort ();
3300
3301 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3302 {
3303 rtx exp = XVECEXP (out_pat, 0, i);
3304
3305 if (GET_CODE (exp) == CLOBBER)
3306 continue;
3307
3308 if (GET_CODE (exp) != SET)
3309 abort ();
3310
3311 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3312 return false;
3313 }
3314 }
3315
3316 return true;
3317 }
3318
3319 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3320 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3321 or multiple set; IN_INSN should be single_set for truth, but for convenience
3322 of insn categorization may be any JUMP or CALL insn. */
3323
3324 int
3325 if_test_bypass_p (rtx out_insn, rtx in_insn)
3326 {
3327 rtx out_set, in_set;
3328
3329 in_set = single_set (in_insn);
3330 if (! in_set)
3331 {
3332 if (JUMP_P (in_insn) || CALL_P (in_insn))
3333 return false;
3334 abort ();
3335 }
3336
3337 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3338 return false;
3339 in_set = SET_SRC (in_set);
3340
3341 out_set = single_set (out_insn);
3342 if (out_set)
3343 {
3344 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3345 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3346 return false;
3347 }
3348 else
3349 {
3350 rtx out_pat;
3351 int i;
3352
3353 out_pat = PATTERN (out_insn);
3354 if (GET_CODE (out_pat) != PARALLEL)
3355 abort ();
3356
3357 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3358 {
3359 rtx exp = XVECEXP (out_pat, 0, i);
3360
3361 if (GET_CODE (exp) == CLOBBER)
3362 continue;
3363
3364 if (GET_CODE (exp) != SET)
3365 abort ();
3366
3367 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3368 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3369 return false;
3370 }
3371 }
3372
3373 return true;
3374 }