rtl.def (ADDRESSOF): Remove.
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "toplev.h"
39 #include "basic-block.h"
40 #include "output.h"
41 #include "reload.h"
42
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
46 #else
47 #define STACK_PUSH_CODE PRE_INC
48 #endif
49 #endif
50
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
54 #else
55 #define STACK_POP_CODE POST_DEC
56 #endif
57 #endif
58
59 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
60 static rtx *find_single_use_1 (rtx, rtx *);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
63
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
69
70 init_recog and init_recog_no_volatile are responsible for setting this. */
71
72 int volatile_ok;
73
74 struct recog_data recog_data;
75
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
82
83 int which_alternative;
84
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
88
89 int reload_completed;
90
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
93
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
97
98 void
99 init_recog_no_volatile (void)
100 {
101 volatile_ok = 0;
102 }
103
104 void
105 init_recog (void)
106 {
107 volatile_ok = 1;
108 }
109
110 /* Try recognizing the instruction INSN,
111 and return the code number that results.
112 Remember the code so that repeated calls do not
113 need to spend the time for actual rerecognition.
114
115 This function is the normal interface to instruction recognition.
116 The automatically-generated function `recog' is normally called
117 through this one. (The only exception is in combine.c.) */
118
119 int
120 recog_memoized_1 (rtx insn)
121 {
122 if (INSN_CODE (insn) < 0)
123 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
124 return INSN_CODE (insn);
125 }
126 \f
127 /* Check that X is an insn-body for an `asm' with operands
128 and that the operands mentioned in it are legitimate. */
129
130 int
131 check_asm_operands (rtx x)
132 {
133 int noperands;
134 rtx *operands;
135 const char **constraints;
136 int i;
137
138 /* Post-reload, be more strict with things. */
139 if (reload_completed)
140 {
141 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
142 extract_insn (make_insn_raw (x));
143 constrain_operands (1);
144 return which_alternative >= 0;
145 }
146
147 noperands = asm_noperands (x);
148 if (noperands < 0)
149 return 0;
150 if (noperands == 0)
151 return 1;
152
153 operands = alloca (noperands * sizeof (rtx));
154 constraints = alloca (noperands * sizeof (char *));
155
156 decode_asm_operands (x, operands, NULL, constraints, NULL);
157
158 for (i = 0; i < noperands; i++)
159 {
160 const char *c = constraints[i];
161 if (c[0] == '%')
162 c++;
163 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
164 c = constraints[c[0] - '0'];
165
166 if (! asm_operand_ok (operands[i], c))
167 return 0;
168 }
169
170 return 1;
171 }
172 \f
173 /* Static data for the next two routines. */
174
175 typedef struct change_t
176 {
177 rtx object;
178 int old_code;
179 rtx *loc;
180 rtx old;
181 } change_t;
182
183 static change_t *changes;
184 static int changes_allocated;
185
186 static int num_changes = 0;
187
188 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
189 at which NEW will be placed. If OBJECT is zero, no validation is done,
190 the change is simply made.
191
192 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
193 will be called with the address and mode as parameters. If OBJECT is
194 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
195 the change in place.
196
197 IN_GROUP is nonzero if this is part of a group of changes that must be
198 performed as a group. In that case, the changes will be stored. The
199 function `apply_change_group' will validate and apply the changes.
200
201 If IN_GROUP is zero, this is a single change. Try to recognize the insn
202 or validate the memory reference with the change applied. If the result
203 is not valid for the machine, suppress the change and return zero.
204 Otherwise, perform the change and return 1. */
205
206 int
207 validate_change (rtx object, rtx *loc, rtx new, int in_group)
208 {
209 rtx old = *loc;
210
211 if (old == new || rtx_equal_p (old, new))
212 return 1;
213
214 if (in_group == 0 && num_changes != 0)
215 abort ();
216
217 *loc = new;
218
219 /* Save the information describing this change. */
220 if (num_changes >= changes_allocated)
221 {
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
226 else
227 changes_allocated *= 2;
228
229 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
230 }
231
232 changes[num_changes].object = object;
233 changes[num_changes].loc = loc;
234 changes[num_changes].old = old;
235
236 if (object && !MEM_P (object))
237 {
238 /* Set INSN_CODE to force rerecognition of insn. Save old code in
239 case invalid. */
240 changes[num_changes].old_code = INSN_CODE (object);
241 INSN_CODE (object) = -1;
242 }
243
244 num_changes++;
245
246 /* If we are making a group of changes, return 1. Otherwise, validate the
247 change group we made. */
248
249 if (in_group)
250 return 1;
251 else
252 return apply_change_group ();
253 }
254
255 /* This subroutine of apply_change_group verifies whether the changes to INSN
256 were valid; i.e. whether INSN can still be recognized. */
257
258 int
259 insn_invalid_p (rtx insn)
260 {
261 rtx pat = PATTERN (insn);
262 int num_clobbers = 0;
263 /* If we are before reload and the pattern is a SET, see if we can add
264 clobbers. */
265 int icode = recog (pat, insn,
266 (GET_CODE (pat) == SET
267 && ! reload_completed && ! reload_in_progress)
268 ? &num_clobbers : 0);
269 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
270
271
272 /* If this is an asm and the operand aren't legal, then fail. Likewise if
273 this is not an asm and the insn wasn't recognized. */
274 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
275 || (!is_asm && icode < 0))
276 return 1;
277
278 /* If we have to add CLOBBERs, fail if we have to add ones that reference
279 hard registers since our callers can't know if they are live or not.
280 Otherwise, add them. */
281 if (num_clobbers > 0)
282 {
283 rtx newpat;
284
285 if (added_clobbers_hard_reg_p (icode))
286 return 1;
287
288 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
289 XVECEXP (newpat, 0, 0) = pat;
290 add_clobbers (newpat, icode);
291 PATTERN (insn) = pat = newpat;
292 }
293
294 /* After reload, verify that all constraints are satisfied. */
295 if (reload_completed)
296 {
297 extract_insn (insn);
298
299 if (! constrain_operands (1))
300 return 1;
301 }
302
303 INSN_CODE (insn) = icode;
304 return 0;
305 }
306
307 /* Return number of changes made and not validated yet. */
308 int
309 num_changes_pending (void)
310 {
311 return num_changes;
312 }
313
314 /* Apply a group of changes previously issued with `validate_change'.
315 Return 1 if all changes are valid, zero otherwise. */
316
317 int
318 apply_change_group (void)
319 {
320 int i;
321 rtx last_validated = NULL_RTX;
322
323 /* The changes have been applied and all INSN_CODEs have been reset to force
324 rerecognition.
325
326 The changes are valid if we aren't given an object, or if we are
327 given a MEM and it still is a valid address, or if this is in insn
328 and it is recognized. In the latter case, if reload has completed,
329 we also require that the operands meet the constraints for
330 the insn. */
331
332 for (i = 0; i < num_changes; i++)
333 {
334 rtx object = changes[i].object;
335
336 /* If there is no object to test or if it is the same as the one we
337 already tested, ignore it. */
338 if (object == 0 || object == last_validated)
339 continue;
340
341 if (MEM_P (object))
342 {
343 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
344 break;
345 }
346 else if (insn_invalid_p (object))
347 {
348 rtx pat = PATTERN (object);
349
350 /* Perhaps we couldn't recognize the insn because there were
351 extra CLOBBERs at the end. If so, try to re-recognize
352 without the last CLOBBER (later iterations will cause each of
353 them to be eliminated, in turn). But don't do this if we
354 have an ASM_OPERAND. */
355 if (GET_CODE (pat) == PARALLEL
356 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
357 && asm_noperands (PATTERN (object)) < 0)
358 {
359 rtx newpat;
360
361 if (XVECLEN (pat, 0) == 2)
362 newpat = XVECEXP (pat, 0, 0);
363 else
364 {
365 int j;
366
367 newpat
368 = gen_rtx_PARALLEL (VOIDmode,
369 rtvec_alloc (XVECLEN (pat, 0) - 1));
370 for (j = 0; j < XVECLEN (newpat, 0); j++)
371 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
372 }
373
374 /* Add a new change to this group to replace the pattern
375 with this new pattern. Then consider this change
376 as having succeeded. The change we added will
377 cause the entire call to fail if things remain invalid.
378
379 Note that this can lose if a later change than the one
380 we are processing specified &XVECEXP (PATTERN (object), 0, X)
381 but this shouldn't occur. */
382
383 validate_change (object, &PATTERN (object), newpat, 1);
384 continue;
385 }
386 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
387 /* If this insn is a CLOBBER or USE, it is always valid, but is
388 never recognized. */
389 continue;
390 else
391 break;
392 }
393 last_validated = object;
394 }
395
396 if (i == num_changes)
397 {
398 basic_block bb;
399
400 for (i = 0; i < num_changes; i++)
401 if (changes[i].object
402 && INSN_P (changes[i].object)
403 && (bb = BLOCK_FOR_INSN (changes[i].object)))
404 bb->flags |= BB_DIRTY;
405
406 num_changes = 0;
407 return 1;
408 }
409 else
410 {
411 cancel_changes (0);
412 return 0;
413 }
414 }
415
416 /* Return the number of changes so far in the current group. */
417
418 int
419 num_validated_changes (void)
420 {
421 return num_changes;
422 }
423
424 /* Retract the changes numbered NUM and up. */
425
426 void
427 cancel_changes (int num)
428 {
429 int i;
430
431 /* Back out all the changes. Do this in the opposite order in which
432 they were made. */
433 for (i = num_changes - 1; i >= num; i--)
434 {
435 *changes[i].loc = changes[i].old;
436 if (changes[i].object && !MEM_P (changes[i].object))
437 INSN_CODE (changes[i].object) = changes[i].old_code;
438 }
439 num_changes = num;
440 }
441
442 /* Replace every occurrence of FROM in X with TO. Mark each change with
443 validate_change passing OBJECT. */
444
445 static void
446 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
447 {
448 int i, j;
449 const char *fmt;
450 rtx x = *loc;
451 enum rtx_code code;
452 enum machine_mode op0_mode = VOIDmode;
453 int prev_changes = num_changes;
454 rtx new;
455
456 if (!x)
457 return;
458
459 code = GET_CODE (x);
460 fmt = GET_RTX_FORMAT (code);
461 if (fmt[0] == 'e')
462 op0_mode = GET_MODE (XEXP (x, 0));
463
464 /* X matches FROM if it is the same rtx or they are both referring to the
465 same register in the same mode. Avoid calling rtx_equal_p unless the
466 operands look similar. */
467
468 if (x == from
469 || (REG_P (x) && REG_P (from)
470 && GET_MODE (x) == GET_MODE (from)
471 && REGNO (x) == REGNO (from))
472 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
473 && rtx_equal_p (x, from)))
474 {
475 validate_change (object, loc, to, 1);
476 return;
477 }
478
479 /* Call ourself recursively to perform the replacements.
480 We must not replace inside already replaced expression, otherwise we
481 get infinite recursion for replacements like (reg X)->(subreg (reg X))
482 done by regmove, so we must special case shared ASM_OPERANDS. */
483
484 if (GET_CODE (x) == PARALLEL)
485 {
486 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
487 {
488 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
489 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
490 {
491 /* Verify that operands are really shared. */
492 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0))) !=
493 ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, j))))
494 abort ();
495 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
496 from, to, object);
497 }
498 else
499 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
500 }
501 }
502 else
503 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
504 {
505 if (fmt[i] == 'e')
506 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
507 else if (fmt[i] == 'E')
508 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
509 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
510 }
511
512 /* If we didn't substitute, there is nothing more to do. */
513 if (num_changes == prev_changes)
514 return;
515
516 /* Allow substituted expression to have different mode. This is used by
517 regmove to change mode of pseudo register. */
518 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
519 op0_mode = GET_MODE (XEXP (x, 0));
520
521 /* Do changes needed to keep rtx consistent. Don't do any other
522 simplifications, as it is not our job. */
523
524 if (SWAPPABLE_OPERANDS_P (x)
525 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
526 {
527 validate_change (object, loc,
528 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
529 : swap_condition (code),
530 GET_MODE (x), XEXP (x, 1),
531 XEXP (x, 0)), 1);
532 x = *loc;
533 code = GET_CODE (x);
534 }
535
536 switch (code)
537 {
538 case PLUS:
539 /* If we have a PLUS whose second operand is now a CONST_INT, use
540 simplify_gen_binary to try to simplify it.
541 ??? We may want later to remove this, once simplification is
542 separated from this function. */
543 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
544 validate_change (object, loc,
545 simplify_gen_binary
546 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
547 break;
548 case MINUS:
549 if (GET_CODE (XEXP (x, 1)) == CONST_INT
550 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
551 validate_change (object, loc,
552 simplify_gen_binary
553 (PLUS, GET_MODE (x), XEXP (x, 0),
554 simplify_gen_unary (NEG,
555 GET_MODE (x), XEXP (x, 1),
556 GET_MODE (x))), 1);
557 break;
558 case ZERO_EXTEND:
559 case SIGN_EXTEND:
560 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
561 {
562 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
563 op0_mode);
564 /* If any of the above failed, substitute in something that
565 we know won't be recognized. */
566 if (!new)
567 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
568 validate_change (object, loc, new, 1);
569 }
570 break;
571 case SUBREG:
572 /* All subregs possible to simplify should be simplified. */
573 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
574 SUBREG_BYTE (x));
575
576 /* Subregs of VOIDmode operands are incorrect. */
577 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
578 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
579 if (new)
580 validate_change (object, loc, new, 1);
581 break;
582 case ZERO_EXTRACT:
583 case SIGN_EXTRACT:
584 /* If we are replacing a register with memory, try to change the memory
585 to be the mode required for memory in extract operations (this isn't
586 likely to be an insertion operation; if it was, nothing bad will
587 happen, we might just fail in some cases). */
588
589 if (MEM_P (XEXP (x, 0))
590 && GET_CODE (XEXP (x, 1)) == CONST_INT
591 && GET_CODE (XEXP (x, 2)) == CONST_INT
592 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
593 && !MEM_VOLATILE_P (XEXP (x, 0)))
594 {
595 enum machine_mode wanted_mode = VOIDmode;
596 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
597 int pos = INTVAL (XEXP (x, 2));
598
599 if (GET_CODE (x) == ZERO_EXTRACT)
600 {
601 enum machine_mode new_mode
602 = mode_for_extraction (EP_extzv, 1);
603 if (new_mode != MAX_MACHINE_MODE)
604 wanted_mode = new_mode;
605 }
606 else if (GET_CODE (x) == SIGN_EXTRACT)
607 {
608 enum machine_mode new_mode
609 = mode_for_extraction (EP_extv, 1);
610 if (new_mode != MAX_MACHINE_MODE)
611 wanted_mode = new_mode;
612 }
613
614 /* If we have a narrower mode, we can do something. */
615 if (wanted_mode != VOIDmode
616 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
617 {
618 int offset = pos / BITS_PER_UNIT;
619 rtx newmem;
620
621 /* If the bytes and bits are counted differently, we
622 must adjust the offset. */
623 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
624 offset =
625 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
626 offset);
627
628 pos %= GET_MODE_BITSIZE (wanted_mode);
629
630 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
631
632 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
633 validate_change (object, &XEXP (x, 0), newmem, 1);
634 }
635 }
636
637 break;
638
639 default:
640 break;
641 }
642 }
643
644 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
645 with TO. After all changes have been made, validate by seeing
646 if INSN is still valid. */
647
648 int
649 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
650 {
651 validate_replace_rtx_1 (loc, from, to, insn);
652 return apply_change_group ();
653 }
654
655 /* Try replacing every occurrence of FROM in INSN with TO. After all
656 changes have been made, validate by seeing if INSN is still valid. */
657
658 int
659 validate_replace_rtx (rtx from, rtx to, rtx insn)
660 {
661 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
662 return apply_change_group ();
663 }
664
665 /* Try replacing every occurrence of FROM in INSN with TO. */
666
667 void
668 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
669 {
670 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
671 }
672
673 /* Function called by note_uses to replace used subexpressions. */
674 struct validate_replace_src_data
675 {
676 rtx from; /* Old RTX */
677 rtx to; /* New RTX */
678 rtx insn; /* Insn in which substitution is occurring. */
679 };
680
681 static void
682 validate_replace_src_1 (rtx *x, void *data)
683 {
684 struct validate_replace_src_data *d
685 = (struct validate_replace_src_data *) data;
686
687 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
688 }
689
690 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
691 SET_DESTs. */
692
693 void
694 validate_replace_src_group (rtx from, rtx to, rtx insn)
695 {
696 struct validate_replace_src_data d;
697
698 d.from = from;
699 d.to = to;
700 d.insn = insn;
701 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
702 }
703 \f
704 #ifdef HAVE_cc0
705 /* Return 1 if the insn using CC0 set by INSN does not contain
706 any ordered tests applied to the condition codes.
707 EQ and NE tests do not count. */
708
709 int
710 next_insn_tests_no_inequality (rtx insn)
711 {
712 rtx next = next_cc0_user (insn);
713
714 /* If there is no next insn, we have to take the conservative choice. */
715 if (next == 0)
716 return 0;
717
718 return ((GET_CODE (next) == JUMP_INSN
719 || GET_CODE (next) == INSN
720 || GET_CODE (next) == CALL_INSN)
721 && ! inequality_comparisons_p (PATTERN (next)));
722 }
723 #endif
724 \f
725 /* This is used by find_single_use to locate an rtx that contains exactly one
726 use of DEST, which is typically either a REG or CC0. It returns a
727 pointer to the innermost rtx expression containing DEST. Appearances of
728 DEST that are being used to totally replace it are not counted. */
729
730 static rtx *
731 find_single_use_1 (rtx dest, rtx *loc)
732 {
733 rtx x = *loc;
734 enum rtx_code code = GET_CODE (x);
735 rtx *result = 0;
736 rtx *this_result;
737 int i;
738 const char *fmt;
739
740 switch (code)
741 {
742 case CONST_INT:
743 case CONST:
744 case LABEL_REF:
745 case SYMBOL_REF:
746 case CONST_DOUBLE:
747 case CONST_VECTOR:
748 case CLOBBER:
749 return 0;
750
751 case SET:
752 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
753 of a REG that occupies all of the REG, the insn uses DEST if
754 it is mentioned in the destination or the source. Otherwise, we
755 need just check the source. */
756 if (GET_CODE (SET_DEST (x)) != CC0
757 && GET_CODE (SET_DEST (x)) != PC
758 && !REG_P (SET_DEST (x))
759 && ! (GET_CODE (SET_DEST (x)) == SUBREG
760 && REG_P (SUBREG_REG (SET_DEST (x)))
761 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
762 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
763 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
764 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
765 break;
766
767 return find_single_use_1 (dest, &SET_SRC (x));
768
769 case MEM:
770 case SUBREG:
771 return find_single_use_1 (dest, &XEXP (x, 0));
772
773 default:
774 break;
775 }
776
777 /* If it wasn't one of the common cases above, check each expression and
778 vector of this code. Look for a unique usage of DEST. */
779
780 fmt = GET_RTX_FORMAT (code);
781 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
782 {
783 if (fmt[i] == 'e')
784 {
785 if (dest == XEXP (x, i)
786 || (REG_P (dest) && REG_P (XEXP (x, i))
787 && REGNO (dest) == REGNO (XEXP (x, i))))
788 this_result = loc;
789 else
790 this_result = find_single_use_1 (dest, &XEXP (x, i));
791
792 if (result == 0)
793 result = this_result;
794 else if (this_result)
795 /* Duplicate usage. */
796 return 0;
797 }
798 else if (fmt[i] == 'E')
799 {
800 int j;
801
802 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
803 {
804 if (XVECEXP (x, i, j) == dest
805 || (REG_P (dest)
806 && REG_P (XVECEXP (x, i, j))
807 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
808 this_result = loc;
809 else
810 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
811
812 if (result == 0)
813 result = this_result;
814 else if (this_result)
815 return 0;
816 }
817 }
818 }
819
820 return result;
821 }
822 \f
823 /* See if DEST, produced in INSN, is used only a single time in the
824 sequel. If so, return a pointer to the innermost rtx expression in which
825 it is used.
826
827 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
828
829 This routine will return usually zero either before flow is called (because
830 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
831 note can't be trusted).
832
833 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
834 care about REG_DEAD notes or LOG_LINKS.
835
836 Otherwise, we find the single use by finding an insn that has a
837 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
838 only referenced once in that insn, we know that it must be the first
839 and last insn referencing DEST. */
840
841 rtx *
842 find_single_use (rtx dest, rtx insn, rtx *ploc)
843 {
844 rtx next;
845 rtx *result;
846 rtx link;
847
848 #ifdef HAVE_cc0
849 if (dest == cc0_rtx)
850 {
851 next = NEXT_INSN (insn);
852 if (next == 0
853 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
854 return 0;
855
856 result = find_single_use_1 (dest, &PATTERN (next));
857 if (result && ploc)
858 *ploc = next;
859 return result;
860 }
861 #endif
862
863 if (reload_completed || reload_in_progress || !REG_P (dest))
864 return 0;
865
866 for (next = next_nonnote_insn (insn);
867 next != 0 && GET_CODE (next) != CODE_LABEL;
868 next = next_nonnote_insn (next))
869 if (INSN_P (next) && dead_or_set_p (next, dest))
870 {
871 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
872 if (XEXP (link, 0) == insn)
873 break;
874
875 if (link)
876 {
877 result = find_single_use_1 (dest, &PATTERN (next));
878 if (ploc)
879 *ploc = next;
880 return result;
881 }
882 }
883
884 return 0;
885 }
886 \f
887 /* Return 1 if OP is a valid general operand for machine mode MODE.
888 This is either a register reference, a memory reference,
889 or a constant. In the case of a memory reference, the address
890 is checked for general validity for the target machine.
891
892 Register and memory references must have mode MODE in order to be valid,
893 but some constants have no machine mode and are valid for any mode.
894
895 If MODE is VOIDmode, OP is checked for validity for whatever mode
896 it has.
897
898 The main use of this function is as a predicate in match_operand
899 expressions in the machine description.
900
901 For an explanation of this function's behavior for registers of
902 class NO_REGS, see the comment for `register_operand'. */
903
904 int
905 general_operand (rtx op, enum machine_mode mode)
906 {
907 enum rtx_code code = GET_CODE (op);
908
909 if (mode == VOIDmode)
910 mode = GET_MODE (op);
911
912 /* Don't accept CONST_INT or anything similar
913 if the caller wants something floating. */
914 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
915 && GET_MODE_CLASS (mode) != MODE_INT
916 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
917 return 0;
918
919 if (GET_CODE (op) == CONST_INT
920 && mode != VOIDmode
921 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
922 return 0;
923
924 if (CONSTANT_P (op))
925 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
926 || mode == VOIDmode)
927 #ifdef LEGITIMATE_PIC_OPERAND_P
928 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
929 #endif
930 && LEGITIMATE_CONSTANT_P (op));
931
932 /* Except for certain constants with VOIDmode, already checked for,
933 OP's mode must match MODE if MODE specifies a mode. */
934
935 if (GET_MODE (op) != mode)
936 return 0;
937
938 if (code == SUBREG)
939 {
940 rtx sub = SUBREG_REG (op);
941
942 #ifdef INSN_SCHEDULING
943 /* On machines that have insn scheduling, we want all memory
944 reference to be explicit, so outlaw paradoxical SUBREGs. */
945 if (MEM_P (sub)
946 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
947 return 0;
948 #endif
949 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
950 may result in incorrect reference. We should simplify all valid
951 subregs of MEM anyway. But allow this after reload because we
952 might be called from cleanup_subreg_operands.
953
954 ??? This is a kludge. */
955 if (!reload_completed && SUBREG_BYTE (op) != 0
956 && MEM_P (sub))
957 return 0;
958
959 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
960 create such rtl, and we must reject it. */
961 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
962 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
963 return 0;
964
965 op = sub;
966 code = GET_CODE (op);
967 }
968
969 if (code == REG)
970 /* A register whose class is NO_REGS is not a general operand. */
971 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
972 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
973
974 if (code == MEM)
975 {
976 rtx y = XEXP (op, 0);
977
978 if (! volatile_ok && MEM_VOLATILE_P (op))
979 return 0;
980
981 /* Use the mem's mode, since it will be reloaded thus. */
982 mode = GET_MODE (op);
983 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
984 }
985
986 return 0;
987
988 win:
989 return 1;
990 }
991 \f
992 /* Return 1 if OP is a valid memory address for a memory reference
993 of mode MODE.
994
995 The main use of this function is as a predicate in match_operand
996 expressions in the machine description. */
997
998 int
999 address_operand (rtx op, enum machine_mode mode)
1000 {
1001 return memory_address_p (mode, op);
1002 }
1003
1004 /* Return 1 if OP is a register reference of mode MODE.
1005 If MODE is VOIDmode, accept a register in any mode.
1006
1007 The main use of this function is as a predicate in match_operand
1008 expressions in the machine description.
1009
1010 As a special exception, registers whose class is NO_REGS are
1011 not accepted by `register_operand'. The reason for this change
1012 is to allow the representation of special architecture artifacts
1013 (such as a condition code register) without extending the rtl
1014 definitions. Since registers of class NO_REGS cannot be used
1015 as registers in any case where register classes are examined,
1016 it is most consistent to keep this function from accepting them. */
1017
1018 int
1019 register_operand (rtx op, enum machine_mode mode)
1020 {
1021 if (GET_MODE (op) != mode && mode != VOIDmode)
1022 return 0;
1023
1024 if (GET_CODE (op) == SUBREG)
1025 {
1026 rtx sub = SUBREG_REG (op);
1027
1028 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1029 because it is guaranteed to be reloaded into one.
1030 Just make sure the MEM is valid in itself.
1031 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1032 but currently it does result from (SUBREG (REG)...) where the
1033 reg went on the stack.) */
1034 if (! reload_completed && MEM_P (sub))
1035 return general_operand (op, mode);
1036
1037 #ifdef CANNOT_CHANGE_MODE_CLASS
1038 if (REG_P (sub)
1039 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1040 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1041 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1042 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1043 return 0;
1044 #endif
1045
1046 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1047 create such rtl, and we must reject it. */
1048 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1049 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1050 return 0;
1051
1052 op = sub;
1053 }
1054
1055 /* We don't consider registers whose class is NO_REGS
1056 to be a register operand. */
1057 return (REG_P (op)
1058 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1059 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1060 }
1061
1062 /* Return 1 for a register in Pmode; ignore the tested mode. */
1063
1064 int
1065 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1066 {
1067 return register_operand (op, Pmode);
1068 }
1069
1070 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1071 or a hard register. */
1072
1073 int
1074 scratch_operand (rtx op, enum machine_mode mode)
1075 {
1076 if (GET_MODE (op) != mode && mode != VOIDmode)
1077 return 0;
1078
1079 return (GET_CODE (op) == SCRATCH
1080 || (REG_P (op)
1081 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1082 }
1083
1084 /* Return 1 if OP is a valid immediate operand for mode MODE.
1085
1086 The main use of this function is as a predicate in match_operand
1087 expressions in the machine description. */
1088
1089 int
1090 immediate_operand (rtx op, enum machine_mode mode)
1091 {
1092 /* Don't accept CONST_INT or anything similar
1093 if the caller wants something floating. */
1094 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1095 && GET_MODE_CLASS (mode) != MODE_INT
1096 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1097 return 0;
1098
1099 if (GET_CODE (op) == CONST_INT
1100 && mode != VOIDmode
1101 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1102 return 0;
1103
1104 return (CONSTANT_P (op)
1105 && (GET_MODE (op) == mode || mode == VOIDmode
1106 || GET_MODE (op) == VOIDmode)
1107 #ifdef LEGITIMATE_PIC_OPERAND_P
1108 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1109 #endif
1110 && LEGITIMATE_CONSTANT_P (op));
1111 }
1112
1113 /* Returns 1 if OP is an operand that is a CONST_INT. */
1114
1115 int
1116 const_int_operand (rtx op, enum machine_mode mode)
1117 {
1118 if (GET_CODE (op) != CONST_INT)
1119 return 0;
1120
1121 if (mode != VOIDmode
1122 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1123 return 0;
1124
1125 return 1;
1126 }
1127
1128 /* Returns 1 if OP is an operand that is a constant integer or constant
1129 floating-point number. */
1130
1131 int
1132 const_double_operand (rtx op, enum machine_mode mode)
1133 {
1134 /* Don't accept CONST_INT or anything similar
1135 if the caller wants something floating. */
1136 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1137 && GET_MODE_CLASS (mode) != MODE_INT
1138 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1139 return 0;
1140
1141 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1142 && (mode == VOIDmode || GET_MODE (op) == mode
1143 || GET_MODE (op) == VOIDmode));
1144 }
1145
1146 /* Return 1 if OP is a general operand that is not an immediate operand. */
1147
1148 int
1149 nonimmediate_operand (rtx op, enum machine_mode mode)
1150 {
1151 return (general_operand (op, mode) && ! CONSTANT_P (op));
1152 }
1153
1154 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1155
1156 int
1157 nonmemory_operand (rtx op, enum machine_mode mode)
1158 {
1159 if (CONSTANT_P (op))
1160 {
1161 /* Don't accept CONST_INT or anything similar
1162 if the caller wants something floating. */
1163 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1164 && GET_MODE_CLASS (mode) != MODE_INT
1165 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1166 return 0;
1167
1168 if (GET_CODE (op) == CONST_INT
1169 && mode != VOIDmode
1170 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1171 return 0;
1172
1173 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1174 || mode == VOIDmode)
1175 #ifdef LEGITIMATE_PIC_OPERAND_P
1176 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1177 #endif
1178 && LEGITIMATE_CONSTANT_P (op));
1179 }
1180
1181 if (GET_MODE (op) != mode && mode != VOIDmode)
1182 return 0;
1183
1184 if (GET_CODE (op) == SUBREG)
1185 {
1186 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1187 because it is guaranteed to be reloaded into one.
1188 Just make sure the MEM is valid in itself.
1189 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1190 but currently it does result from (SUBREG (REG)...) where the
1191 reg went on the stack.) */
1192 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1193 return general_operand (op, mode);
1194 op = SUBREG_REG (op);
1195 }
1196
1197 /* We don't consider registers whose class is NO_REGS
1198 to be a register operand. */
1199 return (REG_P (op)
1200 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1201 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1202 }
1203
1204 /* Return 1 if OP is a valid operand that stands for pushing a
1205 value of mode MODE onto the stack.
1206
1207 The main use of this function is as a predicate in match_operand
1208 expressions in the machine description. */
1209
1210 int
1211 push_operand (rtx op, enum machine_mode mode)
1212 {
1213 unsigned int rounded_size = GET_MODE_SIZE (mode);
1214
1215 #ifdef PUSH_ROUNDING
1216 rounded_size = PUSH_ROUNDING (rounded_size);
1217 #endif
1218
1219 if (!MEM_P (op))
1220 return 0;
1221
1222 if (mode != VOIDmode && GET_MODE (op) != mode)
1223 return 0;
1224
1225 op = XEXP (op, 0);
1226
1227 if (rounded_size == GET_MODE_SIZE (mode))
1228 {
1229 if (GET_CODE (op) != STACK_PUSH_CODE)
1230 return 0;
1231 }
1232 else
1233 {
1234 if (GET_CODE (op) != PRE_MODIFY
1235 || GET_CODE (XEXP (op, 1)) != PLUS
1236 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1237 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1238 #ifdef STACK_GROWS_DOWNWARD
1239 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1240 #else
1241 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1242 #endif
1243 )
1244 return 0;
1245 }
1246
1247 return XEXP (op, 0) == stack_pointer_rtx;
1248 }
1249
1250 /* Return 1 if OP is a valid operand that stands for popping a
1251 value of mode MODE off the stack.
1252
1253 The main use of this function is as a predicate in match_operand
1254 expressions in the machine description. */
1255
1256 int
1257 pop_operand (rtx op, enum machine_mode mode)
1258 {
1259 if (!MEM_P (op))
1260 return 0;
1261
1262 if (mode != VOIDmode && GET_MODE (op) != mode)
1263 return 0;
1264
1265 op = XEXP (op, 0);
1266
1267 if (GET_CODE (op) != STACK_POP_CODE)
1268 return 0;
1269
1270 return XEXP (op, 0) == stack_pointer_rtx;
1271 }
1272
1273 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1274
1275 int
1276 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1277 {
1278 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1279 return 0;
1280
1281 win:
1282 return 1;
1283 }
1284
1285 /* Return 1 if OP is a valid memory reference with mode MODE,
1286 including a valid address.
1287
1288 The main use of this function is as a predicate in match_operand
1289 expressions in the machine description. */
1290
1291 int
1292 memory_operand (rtx op, enum machine_mode mode)
1293 {
1294 rtx inner;
1295
1296 if (! reload_completed)
1297 /* Note that no SUBREG is a memory operand before end of reload pass,
1298 because (SUBREG (MEM...)) forces reloading into a register. */
1299 return MEM_P (op) && general_operand (op, mode);
1300
1301 if (mode != VOIDmode && GET_MODE (op) != mode)
1302 return 0;
1303
1304 inner = op;
1305 if (GET_CODE (inner) == SUBREG)
1306 inner = SUBREG_REG (inner);
1307
1308 return (MEM_P (inner) && general_operand (op, mode));
1309 }
1310
1311 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1312 that is, a memory reference whose address is a general_operand. */
1313
1314 int
1315 indirect_operand (rtx op, enum machine_mode mode)
1316 {
1317 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1318 if (! reload_completed
1319 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1320 {
1321 int offset = SUBREG_BYTE (op);
1322 rtx inner = SUBREG_REG (op);
1323
1324 if (mode != VOIDmode && GET_MODE (op) != mode)
1325 return 0;
1326
1327 /* The only way that we can have a general_operand as the resulting
1328 address is if OFFSET is zero and the address already is an operand
1329 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1330 operand. */
1331
1332 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1333 || (GET_CODE (XEXP (inner, 0)) == PLUS
1334 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1335 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1336 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1337 }
1338
1339 return (MEM_P (op)
1340 && memory_operand (op, mode)
1341 && general_operand (XEXP (op, 0), Pmode));
1342 }
1343
1344 /* Return 1 if this is a comparison operator. This allows the use of
1345 MATCH_OPERATOR to recognize all the branch insns. */
1346
1347 int
1348 comparison_operator (rtx op, enum machine_mode mode)
1349 {
1350 return ((mode == VOIDmode || GET_MODE (op) == mode)
1351 && COMPARISON_P (op));
1352 }
1353 \f
1354 /* If BODY is an insn body that uses ASM_OPERANDS,
1355 return the number of operands (both input and output) in the insn.
1356 Otherwise return -1. */
1357
1358 int
1359 asm_noperands (rtx body)
1360 {
1361 switch (GET_CODE (body))
1362 {
1363 case ASM_OPERANDS:
1364 /* No output operands: return number of input operands. */
1365 return ASM_OPERANDS_INPUT_LENGTH (body);
1366 case SET:
1367 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1368 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1369 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1370 else
1371 return -1;
1372 case PARALLEL:
1373 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1374 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1375 {
1376 /* Multiple output operands, or 1 output plus some clobbers:
1377 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1378 int i;
1379 int n_sets;
1380
1381 /* Count backwards through CLOBBERs to determine number of SETs. */
1382 for (i = XVECLEN (body, 0); i > 0; i--)
1383 {
1384 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1385 break;
1386 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1387 return -1;
1388 }
1389
1390 /* N_SETS is now number of output operands. */
1391 n_sets = i;
1392
1393 /* Verify that all the SETs we have
1394 came from a single original asm_operands insn
1395 (so that invalid combinations are blocked). */
1396 for (i = 0; i < n_sets; i++)
1397 {
1398 rtx elt = XVECEXP (body, 0, i);
1399 if (GET_CODE (elt) != SET)
1400 return -1;
1401 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1402 return -1;
1403 /* If these ASM_OPERANDS rtx's came from different original insns
1404 then they aren't allowed together. */
1405 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1406 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1407 return -1;
1408 }
1409 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1410 + n_sets);
1411 }
1412 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1413 {
1414 /* 0 outputs, but some clobbers:
1415 body is [(asm_operands ...) (clobber (reg ...))...]. */
1416 int i;
1417
1418 /* Make sure all the other parallel things really are clobbers. */
1419 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1420 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1421 return -1;
1422
1423 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1424 }
1425 else
1426 return -1;
1427 default:
1428 return -1;
1429 }
1430 }
1431
1432 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1433 copy its operands (both input and output) into the vector OPERANDS,
1434 the locations of the operands within the insn into the vector OPERAND_LOCS,
1435 and the constraints for the operands into CONSTRAINTS.
1436 Write the modes of the operands into MODES.
1437 Return the assembler-template.
1438
1439 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1440 we don't store that info. */
1441
1442 const char *
1443 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1444 const char **constraints, enum machine_mode *modes)
1445 {
1446 int i;
1447 int noperands;
1448 const char *template = 0;
1449
1450 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1451 {
1452 rtx asmop = SET_SRC (body);
1453 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1454
1455 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1456
1457 for (i = 1; i < noperands; i++)
1458 {
1459 if (operand_locs)
1460 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1461 if (operands)
1462 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1463 if (constraints)
1464 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1465 if (modes)
1466 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1467 }
1468
1469 /* The output is in the SET.
1470 Its constraint is in the ASM_OPERANDS itself. */
1471 if (operands)
1472 operands[0] = SET_DEST (body);
1473 if (operand_locs)
1474 operand_locs[0] = &SET_DEST (body);
1475 if (constraints)
1476 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1477 if (modes)
1478 modes[0] = GET_MODE (SET_DEST (body));
1479 template = ASM_OPERANDS_TEMPLATE (asmop);
1480 }
1481 else if (GET_CODE (body) == ASM_OPERANDS)
1482 {
1483 rtx asmop = body;
1484 /* No output operands: BODY is (asm_operands ....). */
1485
1486 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1487
1488 /* The input operands are found in the 1st element vector. */
1489 /* Constraints for inputs are in the 2nd element vector. */
1490 for (i = 0; i < noperands; i++)
1491 {
1492 if (operand_locs)
1493 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1494 if (operands)
1495 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1496 if (constraints)
1497 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1498 if (modes)
1499 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1500 }
1501 template = ASM_OPERANDS_TEMPLATE (asmop);
1502 }
1503 else if (GET_CODE (body) == PARALLEL
1504 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1505 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1506 {
1507 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1508 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1509 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1510 int nout = 0; /* Does not include CLOBBERs. */
1511
1512 /* At least one output, plus some CLOBBERs. */
1513
1514 /* The outputs are in the SETs.
1515 Their constraints are in the ASM_OPERANDS itself. */
1516 for (i = 0; i < nparallel; i++)
1517 {
1518 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1519 break; /* Past last SET */
1520
1521 if (operands)
1522 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1523 if (operand_locs)
1524 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1525 if (constraints)
1526 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1527 if (modes)
1528 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1529 nout++;
1530 }
1531
1532 for (i = 0; i < nin; i++)
1533 {
1534 if (operand_locs)
1535 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1536 if (operands)
1537 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1538 if (constraints)
1539 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1540 if (modes)
1541 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1542 }
1543
1544 template = ASM_OPERANDS_TEMPLATE (asmop);
1545 }
1546 else if (GET_CODE (body) == PARALLEL
1547 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1548 {
1549 /* No outputs, but some CLOBBERs. */
1550
1551 rtx asmop = XVECEXP (body, 0, 0);
1552 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1553
1554 for (i = 0; i < nin; i++)
1555 {
1556 if (operand_locs)
1557 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1558 if (operands)
1559 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1560 if (constraints)
1561 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1562 if (modes)
1563 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1564 }
1565
1566 template = ASM_OPERANDS_TEMPLATE (asmop);
1567 }
1568
1569 return template;
1570 }
1571
1572 /* Check if an asm_operand matches its constraints.
1573 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1574
1575 int
1576 asm_operand_ok (rtx op, const char *constraint)
1577 {
1578 int result = 0;
1579
1580 /* Use constrain_operands after reload. */
1581 if (reload_completed)
1582 abort ();
1583
1584 while (*constraint)
1585 {
1586 char c = *constraint;
1587 int len;
1588 switch (c)
1589 {
1590 case ',':
1591 constraint++;
1592 continue;
1593 case '=':
1594 case '+':
1595 case '*':
1596 case '%':
1597 case '!':
1598 case '#':
1599 case '&':
1600 case '?':
1601 break;
1602
1603 case '0': case '1': case '2': case '3': case '4':
1604 case '5': case '6': case '7': case '8': case '9':
1605 /* For best results, our caller should have given us the
1606 proper matching constraint, but we can't actually fail
1607 the check if they didn't. Indicate that results are
1608 inconclusive. */
1609 do
1610 constraint++;
1611 while (ISDIGIT (*constraint));
1612 if (! result)
1613 result = -1;
1614 continue;
1615
1616 case 'p':
1617 if (address_operand (op, VOIDmode))
1618 result = 1;
1619 break;
1620
1621 case 'm':
1622 case 'V': /* non-offsettable */
1623 if (memory_operand (op, VOIDmode))
1624 result = 1;
1625 break;
1626
1627 case 'o': /* offsettable */
1628 if (offsettable_nonstrict_memref_p (op))
1629 result = 1;
1630 break;
1631
1632 case '<':
1633 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1634 excepting those that expand_call created. Further, on some
1635 machines which do not have generalized auto inc/dec, an inc/dec
1636 is not a memory_operand.
1637
1638 Match any memory and hope things are resolved after reload. */
1639
1640 if (MEM_P (op)
1641 && (1
1642 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1643 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1644 result = 1;
1645 break;
1646
1647 case '>':
1648 if (MEM_P (op)
1649 && (1
1650 || GET_CODE (XEXP (op, 0)) == PRE_INC
1651 || GET_CODE (XEXP (op, 0)) == POST_INC))
1652 result = 1;
1653 break;
1654
1655 case 'E':
1656 case 'F':
1657 if (GET_CODE (op) == CONST_DOUBLE
1658 || (GET_CODE (op) == CONST_VECTOR
1659 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1660 result = 1;
1661 break;
1662
1663 case 'G':
1664 if (GET_CODE (op) == CONST_DOUBLE
1665 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1666 result = 1;
1667 break;
1668 case 'H':
1669 if (GET_CODE (op) == CONST_DOUBLE
1670 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1671 result = 1;
1672 break;
1673
1674 case 's':
1675 if (GET_CODE (op) == CONST_INT
1676 || (GET_CODE (op) == CONST_DOUBLE
1677 && GET_MODE (op) == VOIDmode))
1678 break;
1679 /* Fall through. */
1680
1681 case 'i':
1682 if (CONSTANT_P (op)
1683 #ifdef LEGITIMATE_PIC_OPERAND_P
1684 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1685 #endif
1686 )
1687 result = 1;
1688 break;
1689
1690 case 'n':
1691 if (GET_CODE (op) == CONST_INT
1692 || (GET_CODE (op) == CONST_DOUBLE
1693 && GET_MODE (op) == VOIDmode))
1694 result = 1;
1695 break;
1696
1697 case 'I':
1698 if (GET_CODE (op) == CONST_INT
1699 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1700 result = 1;
1701 break;
1702 case 'J':
1703 if (GET_CODE (op) == CONST_INT
1704 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1705 result = 1;
1706 break;
1707 case 'K':
1708 if (GET_CODE (op) == CONST_INT
1709 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1710 result = 1;
1711 break;
1712 case 'L':
1713 if (GET_CODE (op) == CONST_INT
1714 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1715 result = 1;
1716 break;
1717 case 'M':
1718 if (GET_CODE (op) == CONST_INT
1719 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1720 result = 1;
1721 break;
1722 case 'N':
1723 if (GET_CODE (op) == CONST_INT
1724 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1725 result = 1;
1726 break;
1727 case 'O':
1728 if (GET_CODE (op) == CONST_INT
1729 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1730 result = 1;
1731 break;
1732 case 'P':
1733 if (GET_CODE (op) == CONST_INT
1734 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1735 result = 1;
1736 break;
1737
1738 case 'X':
1739 result = 1;
1740 break;
1741
1742 case 'g':
1743 if (general_operand (op, VOIDmode))
1744 result = 1;
1745 break;
1746
1747 default:
1748 /* For all other letters, we first check for a register class,
1749 otherwise it is an EXTRA_CONSTRAINT. */
1750 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1751 {
1752 case 'r':
1753 if (GET_MODE (op) == BLKmode)
1754 break;
1755 if (register_operand (op, VOIDmode))
1756 result = 1;
1757 }
1758 #ifdef EXTRA_CONSTRAINT_STR
1759 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1760 result = 1;
1761 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1762 /* Every memory operand can be reloaded to fit. */
1763 && memory_operand (op, VOIDmode))
1764 result = 1;
1765 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1766 /* Every address operand can be reloaded to fit. */
1767 && address_operand (op, VOIDmode))
1768 result = 1;
1769 #endif
1770 break;
1771 }
1772 len = CONSTRAINT_LEN (c, constraint);
1773 do
1774 constraint++;
1775 while (--len && *constraint);
1776 if (len)
1777 return 0;
1778 }
1779
1780 return result;
1781 }
1782 \f
1783 /* Given an rtx *P, if it is a sum containing an integer constant term,
1784 return the location (type rtx *) of the pointer to that constant term.
1785 Otherwise, return a null pointer. */
1786
1787 rtx *
1788 find_constant_term_loc (rtx *p)
1789 {
1790 rtx *tem;
1791 enum rtx_code code = GET_CODE (*p);
1792
1793 /* If *P IS such a constant term, P is its location. */
1794
1795 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1796 || code == CONST)
1797 return p;
1798
1799 /* Otherwise, if not a sum, it has no constant term. */
1800
1801 if (GET_CODE (*p) != PLUS)
1802 return 0;
1803
1804 /* If one of the summands is constant, return its location. */
1805
1806 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1807 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1808 return p;
1809
1810 /* Otherwise, check each summand for containing a constant term. */
1811
1812 if (XEXP (*p, 0) != 0)
1813 {
1814 tem = find_constant_term_loc (&XEXP (*p, 0));
1815 if (tem != 0)
1816 return tem;
1817 }
1818
1819 if (XEXP (*p, 1) != 0)
1820 {
1821 tem = find_constant_term_loc (&XEXP (*p, 1));
1822 if (tem != 0)
1823 return tem;
1824 }
1825
1826 return 0;
1827 }
1828 \f
1829 /* Return 1 if OP is a memory reference
1830 whose address contains no side effects
1831 and remains valid after the addition
1832 of a positive integer less than the
1833 size of the object being referenced.
1834
1835 We assume that the original address is valid and do not check it.
1836
1837 This uses strict_memory_address_p as a subroutine, so
1838 don't use it before reload. */
1839
1840 int
1841 offsettable_memref_p (rtx op)
1842 {
1843 return ((MEM_P (op))
1844 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1845 }
1846
1847 /* Similar, but don't require a strictly valid mem ref:
1848 consider pseudo-regs valid as index or base regs. */
1849
1850 int
1851 offsettable_nonstrict_memref_p (rtx op)
1852 {
1853 return ((MEM_P (op))
1854 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1855 }
1856
1857 /* Return 1 if Y is a memory address which contains no side effects
1858 and would remain valid after the addition of a positive integer
1859 less than the size of that mode.
1860
1861 We assume that the original address is valid and do not check it.
1862 We do check that it is valid for narrower modes.
1863
1864 If STRICTP is nonzero, we require a strictly valid address,
1865 for the sake of use in reload.c. */
1866
1867 int
1868 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1869 {
1870 enum rtx_code ycode = GET_CODE (y);
1871 rtx z;
1872 rtx y1 = y;
1873 rtx *y2;
1874 int (*addressp) (enum machine_mode, rtx) =
1875 (strictp ? strict_memory_address_p : memory_address_p);
1876 unsigned int mode_sz = GET_MODE_SIZE (mode);
1877
1878 if (CONSTANT_ADDRESS_P (y))
1879 return 1;
1880
1881 /* Adjusting an offsettable address involves changing to a narrower mode.
1882 Make sure that's OK. */
1883
1884 if (mode_dependent_address_p (y))
1885 return 0;
1886
1887 /* ??? How much offset does an offsettable BLKmode reference need?
1888 Clearly that depends on the situation in which it's being used.
1889 However, the current situation in which we test 0xffffffff is
1890 less than ideal. Caveat user. */
1891 if (mode_sz == 0)
1892 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1893
1894 /* If the expression contains a constant term,
1895 see if it remains valid when max possible offset is added. */
1896
1897 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1898 {
1899 int good;
1900
1901 y1 = *y2;
1902 *y2 = plus_constant (*y2, mode_sz - 1);
1903 /* Use QImode because an odd displacement may be automatically invalid
1904 for any wider mode. But it should be valid for a single byte. */
1905 good = (*addressp) (QImode, y);
1906
1907 /* In any case, restore old contents of memory. */
1908 *y2 = y1;
1909 return good;
1910 }
1911
1912 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1913 return 0;
1914
1915 /* The offset added here is chosen as the maximum offset that
1916 any instruction could need to add when operating on something
1917 of the specified mode. We assume that if Y and Y+c are
1918 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1919 go inside a LO_SUM here, so we do so as well. */
1920 if (GET_CODE (y) == LO_SUM
1921 && mode != BLKmode
1922 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1923 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1924 plus_constant (XEXP (y, 1), mode_sz - 1));
1925 else
1926 z = plus_constant (y, mode_sz - 1);
1927
1928 /* Use QImode because an odd displacement may be automatically invalid
1929 for any wider mode. But it should be valid for a single byte. */
1930 return (*addressp) (QImode, z);
1931 }
1932
1933 /* Return 1 if ADDR is an address-expression whose effect depends
1934 on the mode of the memory reference it is used in.
1935
1936 Autoincrement addressing is a typical example of mode-dependence
1937 because the amount of the increment depends on the mode. */
1938
1939 int
1940 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1941 {
1942 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1943 return 0;
1944 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1945 win: ATTRIBUTE_UNUSED_LABEL
1946 return 1;
1947 }
1948 \f
1949 /* Like extract_insn, but save insn extracted and don't extract again, when
1950 called again for the same insn expecting that recog_data still contain the
1951 valid information. This is used primary by gen_attr infrastructure that
1952 often does extract insn again and again. */
1953 void
1954 extract_insn_cached (rtx insn)
1955 {
1956 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1957 return;
1958 extract_insn (insn);
1959 recog_data.insn = insn;
1960 }
1961 /* Do cached extract_insn, constrain_operands and complain about failures.
1962 Used by insn_attrtab. */
1963 void
1964 extract_constrain_insn_cached (rtx insn)
1965 {
1966 extract_insn_cached (insn);
1967 if (which_alternative == -1
1968 && !constrain_operands (reload_completed))
1969 fatal_insn_not_found (insn);
1970 }
1971 /* Do cached constrain_operands and complain about failures. */
1972 int
1973 constrain_operands_cached (int strict)
1974 {
1975 if (which_alternative == -1)
1976 return constrain_operands (strict);
1977 else
1978 return 1;
1979 }
1980 \f
1981 /* Analyze INSN and fill in recog_data. */
1982
1983 void
1984 extract_insn (rtx insn)
1985 {
1986 int i;
1987 int icode;
1988 int noperands;
1989 rtx body = PATTERN (insn);
1990
1991 recog_data.insn = NULL;
1992 recog_data.n_operands = 0;
1993 recog_data.n_alternatives = 0;
1994 recog_data.n_dups = 0;
1995 which_alternative = -1;
1996
1997 switch (GET_CODE (body))
1998 {
1999 case USE:
2000 case CLOBBER:
2001 case ASM_INPUT:
2002 case ADDR_VEC:
2003 case ADDR_DIFF_VEC:
2004 return;
2005
2006 case SET:
2007 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2008 goto asm_insn;
2009 else
2010 goto normal_insn;
2011 case PARALLEL:
2012 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2013 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2014 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2015 goto asm_insn;
2016 else
2017 goto normal_insn;
2018 case ASM_OPERANDS:
2019 asm_insn:
2020 recog_data.n_operands = noperands = asm_noperands (body);
2021 if (noperands >= 0)
2022 {
2023 /* This insn is an `asm' with operands. */
2024
2025 /* expand_asm_operands makes sure there aren't too many operands. */
2026 if (noperands > MAX_RECOG_OPERANDS)
2027 abort ();
2028
2029 /* Now get the operand values and constraints out of the insn. */
2030 decode_asm_operands (body, recog_data.operand,
2031 recog_data.operand_loc,
2032 recog_data.constraints,
2033 recog_data.operand_mode);
2034 if (noperands > 0)
2035 {
2036 const char *p = recog_data.constraints[0];
2037 recog_data.n_alternatives = 1;
2038 while (*p)
2039 recog_data.n_alternatives += (*p++ == ',');
2040 }
2041 break;
2042 }
2043 fatal_insn_not_found (insn);
2044
2045 default:
2046 normal_insn:
2047 /* Ordinary insn: recognize it, get the operands via insn_extract
2048 and get the constraints. */
2049
2050 icode = recog_memoized (insn);
2051 if (icode < 0)
2052 fatal_insn_not_found (insn);
2053
2054 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2055 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2056 recog_data.n_dups = insn_data[icode].n_dups;
2057
2058 insn_extract (insn);
2059
2060 for (i = 0; i < noperands; i++)
2061 {
2062 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2063 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2064 /* VOIDmode match_operands gets mode from their real operand. */
2065 if (recog_data.operand_mode[i] == VOIDmode)
2066 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2067 }
2068 }
2069 for (i = 0; i < noperands; i++)
2070 recog_data.operand_type[i]
2071 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2072 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2073 : OP_IN);
2074
2075 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2076 abort ();
2077 }
2078
2079 /* After calling extract_insn, you can use this function to extract some
2080 information from the constraint strings into a more usable form.
2081 The collected data is stored in recog_op_alt. */
2082 void
2083 preprocess_constraints (void)
2084 {
2085 int i;
2086
2087 for (i = 0; i < recog_data.n_operands; i++)
2088 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2089 * sizeof (struct operand_alternative)));
2090
2091 for (i = 0; i < recog_data.n_operands; i++)
2092 {
2093 int j;
2094 struct operand_alternative *op_alt;
2095 const char *p = recog_data.constraints[i];
2096
2097 op_alt = recog_op_alt[i];
2098
2099 for (j = 0; j < recog_data.n_alternatives; j++)
2100 {
2101 op_alt[j].class = NO_REGS;
2102 op_alt[j].constraint = p;
2103 op_alt[j].matches = -1;
2104 op_alt[j].matched = -1;
2105
2106 if (*p == '\0' || *p == ',')
2107 {
2108 op_alt[j].anything_ok = 1;
2109 continue;
2110 }
2111
2112 for (;;)
2113 {
2114 char c = *p;
2115 if (c == '#')
2116 do
2117 c = *++p;
2118 while (c != ',' && c != '\0');
2119 if (c == ',' || c == '\0')
2120 {
2121 p++;
2122 break;
2123 }
2124
2125 switch (c)
2126 {
2127 case '=': case '+': case '*': case '%':
2128 case 'E': case 'F': case 'G': case 'H':
2129 case 's': case 'i': case 'n':
2130 case 'I': case 'J': case 'K': case 'L':
2131 case 'M': case 'N': case 'O': case 'P':
2132 /* These don't say anything we care about. */
2133 break;
2134
2135 case '?':
2136 op_alt[j].reject += 6;
2137 break;
2138 case '!':
2139 op_alt[j].reject += 600;
2140 break;
2141 case '&':
2142 op_alt[j].earlyclobber = 1;
2143 break;
2144
2145 case '0': case '1': case '2': case '3': case '4':
2146 case '5': case '6': case '7': case '8': case '9':
2147 {
2148 char *end;
2149 op_alt[j].matches = strtoul (p, &end, 10);
2150 recog_op_alt[op_alt[j].matches][j].matched = i;
2151 p = end;
2152 }
2153 continue;
2154
2155 case 'm':
2156 op_alt[j].memory_ok = 1;
2157 break;
2158 case '<':
2159 op_alt[j].decmem_ok = 1;
2160 break;
2161 case '>':
2162 op_alt[j].incmem_ok = 1;
2163 break;
2164 case 'V':
2165 op_alt[j].nonoffmem_ok = 1;
2166 break;
2167 case 'o':
2168 op_alt[j].offmem_ok = 1;
2169 break;
2170 case 'X':
2171 op_alt[j].anything_ok = 1;
2172 break;
2173
2174 case 'p':
2175 op_alt[j].is_address = 1;
2176 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2177 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2178 break;
2179
2180 case 'g': case 'r':
2181 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2182 break;
2183
2184 default:
2185 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2186 {
2187 op_alt[j].memory_ok = 1;
2188 break;
2189 }
2190 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2191 {
2192 op_alt[j].is_address = 1;
2193 op_alt[j].class
2194 = (reg_class_subunion
2195 [(int) op_alt[j].class]
2196 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2197 break;
2198 }
2199
2200 op_alt[j].class
2201 = (reg_class_subunion
2202 [(int) op_alt[j].class]
2203 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2204 break;
2205 }
2206 p += CONSTRAINT_LEN (c, p);
2207 }
2208 }
2209 }
2210 }
2211
2212 /* Check the operands of an insn against the insn's operand constraints
2213 and return 1 if they are valid.
2214 The information about the insn's operands, constraints, operand modes
2215 etc. is obtained from the global variables set up by extract_insn.
2216
2217 WHICH_ALTERNATIVE is set to a number which indicates which
2218 alternative of constraints was matched: 0 for the first alternative,
2219 1 for the next, etc.
2220
2221 In addition, when two operands are required to match
2222 and it happens that the output operand is (reg) while the
2223 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2224 make the output operand look like the input.
2225 This is because the output operand is the one the template will print.
2226
2227 This is used in final, just before printing the assembler code and by
2228 the routines that determine an insn's attribute.
2229
2230 If STRICT is a positive nonzero value, it means that we have been
2231 called after reload has been completed. In that case, we must
2232 do all checks strictly. If it is zero, it means that we have been called
2233 before reload has completed. In that case, we first try to see if we can
2234 find an alternative that matches strictly. If not, we try again, this
2235 time assuming that reload will fix up the insn. This provides a "best
2236 guess" for the alternative and is used to compute attributes of insns prior
2237 to reload. A negative value of STRICT is used for this internal call. */
2238
2239 struct funny_match
2240 {
2241 int this, other;
2242 };
2243
2244 int
2245 constrain_operands (int strict)
2246 {
2247 const char *constraints[MAX_RECOG_OPERANDS];
2248 int matching_operands[MAX_RECOG_OPERANDS];
2249 int earlyclobber[MAX_RECOG_OPERANDS];
2250 int c;
2251
2252 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2253 int funny_match_index;
2254
2255 which_alternative = 0;
2256 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2257 return 1;
2258
2259 for (c = 0; c < recog_data.n_operands; c++)
2260 {
2261 constraints[c] = recog_data.constraints[c];
2262 matching_operands[c] = -1;
2263 }
2264
2265 do
2266 {
2267 int opno;
2268 int lose = 0;
2269 funny_match_index = 0;
2270
2271 for (opno = 0; opno < recog_data.n_operands; opno++)
2272 {
2273 rtx op = recog_data.operand[opno];
2274 enum machine_mode mode = GET_MODE (op);
2275 const char *p = constraints[opno];
2276 int offset = 0;
2277 int win = 0;
2278 int val;
2279 int len;
2280
2281 earlyclobber[opno] = 0;
2282
2283 /* A unary operator may be accepted by the predicate, but it
2284 is irrelevant for matching constraints. */
2285 if (UNARY_P (op))
2286 op = XEXP (op, 0);
2287
2288 if (GET_CODE (op) == SUBREG)
2289 {
2290 if (REG_P (SUBREG_REG (op))
2291 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2292 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2293 GET_MODE (SUBREG_REG (op)),
2294 SUBREG_BYTE (op),
2295 GET_MODE (op));
2296 op = SUBREG_REG (op);
2297 }
2298
2299 /* An empty constraint or empty alternative
2300 allows anything which matched the pattern. */
2301 if (*p == 0 || *p == ',')
2302 win = 1;
2303
2304 do
2305 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2306 {
2307 case '\0':
2308 len = 0;
2309 break;
2310 case ',':
2311 c = '\0';
2312 break;
2313
2314 case '?': case '!': case '*': case '%':
2315 case '=': case '+':
2316 break;
2317
2318 case '#':
2319 /* Ignore rest of this alternative as far as
2320 constraint checking is concerned. */
2321 do
2322 p++;
2323 while (*p && *p != ',');
2324 len = 0;
2325 break;
2326
2327 case '&':
2328 earlyclobber[opno] = 1;
2329 break;
2330
2331 case '0': case '1': case '2': case '3': case '4':
2332 case '5': case '6': case '7': case '8': case '9':
2333 {
2334 /* This operand must be the same as a previous one.
2335 This kind of constraint is used for instructions such
2336 as add when they take only two operands.
2337
2338 Note that the lower-numbered operand is passed first.
2339
2340 If we are not testing strictly, assume that this
2341 constraint will be satisfied. */
2342
2343 char *end;
2344 int match;
2345
2346 match = strtoul (p, &end, 10);
2347 p = end;
2348
2349 if (strict < 0)
2350 val = 1;
2351 else
2352 {
2353 rtx op1 = recog_data.operand[match];
2354 rtx op2 = recog_data.operand[opno];
2355
2356 /* A unary operator may be accepted by the predicate,
2357 but it is irrelevant for matching constraints. */
2358 if (UNARY_P (op1))
2359 op1 = XEXP (op1, 0);
2360 if (UNARY_P (op2))
2361 op2 = XEXP (op2, 0);
2362
2363 val = operands_match_p (op1, op2);
2364 }
2365
2366 matching_operands[opno] = match;
2367 matching_operands[match] = opno;
2368
2369 if (val != 0)
2370 win = 1;
2371
2372 /* If output is *x and input is *--x, arrange later
2373 to change the output to *--x as well, since the
2374 output op is the one that will be printed. */
2375 if (val == 2 && strict > 0)
2376 {
2377 funny_match[funny_match_index].this = opno;
2378 funny_match[funny_match_index++].other = match;
2379 }
2380 }
2381 len = 0;
2382 break;
2383
2384 case 'p':
2385 /* p is used for address_operands. When we are called by
2386 gen_reload, no one will have checked that the address is
2387 strictly valid, i.e., that all pseudos requiring hard regs
2388 have gotten them. */
2389 if (strict <= 0
2390 || (strict_memory_address_p (recog_data.operand_mode[opno],
2391 op)))
2392 win = 1;
2393 break;
2394
2395 /* No need to check general_operand again;
2396 it was done in insn-recog.c. */
2397 case 'g':
2398 /* Anything goes unless it is a REG and really has a hard reg
2399 but the hard reg is not in the class GENERAL_REGS. */
2400 if (strict < 0
2401 || GENERAL_REGS == ALL_REGS
2402 || !REG_P (op)
2403 || (reload_in_progress
2404 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2405 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2406 win = 1;
2407 break;
2408
2409 case 'X':
2410 /* This is used for a MATCH_SCRATCH in the cases when
2411 we don't actually need anything. So anything goes
2412 any time. */
2413 win = 1;
2414 break;
2415
2416 case 'm':
2417 /* Memory operands must be valid, to the extent
2418 required by STRICT. */
2419 if (MEM_P (op))
2420 {
2421 if (strict > 0
2422 && !strict_memory_address_p (GET_MODE (op),
2423 XEXP (op, 0)))
2424 break;
2425 if (strict == 0
2426 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2427 break;
2428 win = 1;
2429 }
2430 /* Before reload, accept what reload can turn into mem. */
2431 else if (strict < 0 && CONSTANT_P (op))
2432 win = 1;
2433 /* During reload, accept a pseudo */
2434 else if (reload_in_progress && REG_P (op)
2435 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2436 win = 1;
2437 break;
2438
2439 case '<':
2440 if (MEM_P (op)
2441 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2442 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2443 win = 1;
2444 break;
2445
2446 case '>':
2447 if (MEM_P (op)
2448 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2449 || GET_CODE (XEXP (op, 0)) == POST_INC))
2450 win = 1;
2451 break;
2452
2453 case 'E':
2454 case 'F':
2455 if (GET_CODE (op) == CONST_DOUBLE
2456 || (GET_CODE (op) == CONST_VECTOR
2457 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2458 win = 1;
2459 break;
2460
2461 case 'G':
2462 case 'H':
2463 if (GET_CODE (op) == CONST_DOUBLE
2464 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2465 win = 1;
2466 break;
2467
2468 case 's':
2469 if (GET_CODE (op) == CONST_INT
2470 || (GET_CODE (op) == CONST_DOUBLE
2471 && GET_MODE (op) == VOIDmode))
2472 break;
2473 case 'i':
2474 if (CONSTANT_P (op))
2475 win = 1;
2476 break;
2477
2478 case 'n':
2479 if (GET_CODE (op) == CONST_INT
2480 || (GET_CODE (op) == CONST_DOUBLE
2481 && GET_MODE (op) == VOIDmode))
2482 win = 1;
2483 break;
2484
2485 case 'I':
2486 case 'J':
2487 case 'K':
2488 case 'L':
2489 case 'M':
2490 case 'N':
2491 case 'O':
2492 case 'P':
2493 if (GET_CODE (op) == CONST_INT
2494 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2495 win = 1;
2496 break;
2497
2498 case 'V':
2499 if (MEM_P (op)
2500 && ((strict > 0 && ! offsettable_memref_p (op))
2501 || (strict < 0
2502 && !(CONSTANT_P (op) || MEM_P (op)))
2503 || (reload_in_progress
2504 && !(REG_P (op)
2505 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2506 win = 1;
2507 break;
2508
2509 case 'o':
2510 if ((strict > 0 && offsettable_memref_p (op))
2511 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2512 /* Before reload, accept what reload can handle. */
2513 || (strict < 0
2514 && (CONSTANT_P (op) || MEM_P (op)))
2515 /* During reload, accept a pseudo */
2516 || (reload_in_progress && REG_P (op)
2517 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2518 win = 1;
2519 break;
2520
2521 default:
2522 {
2523 enum reg_class class;
2524
2525 class = (c == 'r'
2526 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2527 if (class != NO_REGS)
2528 {
2529 if (strict < 0
2530 || (strict == 0
2531 && REG_P (op)
2532 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2533 || (strict == 0 && GET_CODE (op) == SCRATCH)
2534 || (REG_P (op)
2535 && reg_fits_class_p (op, class, offset, mode)))
2536 win = 1;
2537 }
2538 #ifdef EXTRA_CONSTRAINT_STR
2539 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2540 win = 1;
2541
2542 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2543 /* Every memory operand can be reloaded to fit. */
2544 && ((strict < 0 && MEM_P (op))
2545 /* Before reload, accept what reload can turn
2546 into mem. */
2547 || (strict < 0 && CONSTANT_P (op))
2548 /* During reload, accept a pseudo */
2549 || (reload_in_progress && REG_P (op)
2550 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2551 win = 1;
2552 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2553 /* Every address operand can be reloaded to fit. */
2554 && strict < 0)
2555 win = 1;
2556 #endif
2557 break;
2558 }
2559 }
2560 while (p += len, c);
2561
2562 constraints[opno] = p;
2563 /* If this operand did not win somehow,
2564 this alternative loses. */
2565 if (! win)
2566 lose = 1;
2567 }
2568 /* This alternative won; the operands are ok.
2569 Change whichever operands this alternative says to change. */
2570 if (! lose)
2571 {
2572 int opno, eopno;
2573
2574 /* See if any earlyclobber operand conflicts with some other
2575 operand. */
2576
2577 if (strict > 0)
2578 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2579 /* Ignore earlyclobber operands now in memory,
2580 because we would often report failure when we have
2581 two memory operands, one of which was formerly a REG. */
2582 if (earlyclobber[eopno]
2583 && REG_P (recog_data.operand[eopno]))
2584 for (opno = 0; opno < recog_data.n_operands; opno++)
2585 if ((MEM_P (recog_data.operand[opno])
2586 || recog_data.operand_type[opno] != OP_OUT)
2587 && opno != eopno
2588 /* Ignore things like match_operator operands. */
2589 && *recog_data.constraints[opno] != 0
2590 && ! (matching_operands[opno] == eopno
2591 && operands_match_p (recog_data.operand[opno],
2592 recog_data.operand[eopno]))
2593 && ! safe_from_earlyclobber (recog_data.operand[opno],
2594 recog_data.operand[eopno]))
2595 lose = 1;
2596
2597 if (! lose)
2598 {
2599 while (--funny_match_index >= 0)
2600 {
2601 recog_data.operand[funny_match[funny_match_index].other]
2602 = recog_data.operand[funny_match[funny_match_index].this];
2603 }
2604
2605 return 1;
2606 }
2607 }
2608
2609 which_alternative++;
2610 }
2611 while (which_alternative < recog_data.n_alternatives);
2612
2613 which_alternative = -1;
2614 /* If we are about to reject this, but we are not to test strictly,
2615 try a very loose test. Only return failure if it fails also. */
2616 if (strict == 0)
2617 return constrain_operands (-1);
2618 else
2619 return 0;
2620 }
2621
2622 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2623 is a hard reg in class CLASS when its regno is offset by OFFSET
2624 and changed to mode MODE.
2625 If REG occupies multiple hard regs, all of them must be in CLASS. */
2626
2627 int
2628 reg_fits_class_p (rtx operand, enum reg_class class, int offset,
2629 enum machine_mode mode)
2630 {
2631 int regno = REGNO (operand);
2632 if (regno < FIRST_PSEUDO_REGISTER
2633 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2634 regno + offset))
2635 {
2636 int sr;
2637 regno += offset;
2638 for (sr = hard_regno_nregs[regno][mode] - 1;
2639 sr > 0; sr--)
2640 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2641 regno + sr))
2642 break;
2643 return sr == 0;
2644 }
2645
2646 return 0;
2647 }
2648 \f
2649 /* Split single instruction. Helper function for split_all_insns and
2650 split_all_insns_noflow. Return last insn in the sequence if successful,
2651 or NULL if unsuccessful. */
2652
2653 static rtx
2654 split_insn (rtx insn)
2655 {
2656 /* Split insns here to get max fine-grain parallelism. */
2657 rtx first = PREV_INSN (insn);
2658 rtx last = try_split (PATTERN (insn), insn, 1);
2659
2660 if (last == insn)
2661 return NULL_RTX;
2662
2663 /* try_split returns the NOTE that INSN became. */
2664 SET_INSN_DELETED (insn);
2665
2666 /* ??? Coddle to md files that generate subregs in post-reload
2667 splitters instead of computing the proper hard register. */
2668 if (reload_completed && first != last)
2669 {
2670 first = NEXT_INSN (first);
2671 for (;;)
2672 {
2673 if (INSN_P (first))
2674 cleanup_subreg_operands (first);
2675 if (first == last)
2676 break;
2677 first = NEXT_INSN (first);
2678 }
2679 }
2680 return last;
2681 }
2682
2683 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2684
2685 void
2686 split_all_insns (int upd_life)
2687 {
2688 sbitmap blocks;
2689 bool changed;
2690 basic_block bb;
2691
2692 blocks = sbitmap_alloc (last_basic_block);
2693 sbitmap_zero (blocks);
2694 changed = false;
2695
2696 FOR_EACH_BB_REVERSE (bb)
2697 {
2698 rtx insn, next;
2699 bool finish = false;
2700
2701 for (insn = BB_HEAD (bb); !finish ; insn = next)
2702 {
2703 /* Can't use `next_real_insn' because that might go across
2704 CODE_LABELS and short-out basic blocks. */
2705 next = NEXT_INSN (insn);
2706 finish = (insn == BB_END (bb));
2707 if (INSN_P (insn))
2708 {
2709 rtx set = single_set (insn);
2710
2711 /* Don't split no-op move insns. These should silently
2712 disappear later in final. Splitting such insns would
2713 break the code that handles REG_NO_CONFLICT blocks. */
2714 if (set && set_noop_p (set))
2715 {
2716 /* Nops get in the way while scheduling, so delete them
2717 now if register allocation has already been done. It
2718 is too risky to try to do this before register
2719 allocation, and there are unlikely to be very many
2720 nops then anyways. */
2721 if (reload_completed)
2722 {
2723 /* If the no-op set has a REG_UNUSED note, we need
2724 to update liveness information. */
2725 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2726 {
2727 SET_BIT (blocks, bb->index);
2728 changed = true;
2729 }
2730 /* ??? Is life info affected by deleting edges? */
2731 delete_insn_and_edges (insn);
2732 }
2733 }
2734 else
2735 {
2736 rtx last = split_insn (insn);
2737 if (last)
2738 {
2739 /* The split sequence may include barrier, but the
2740 BB boundary we are interested in will be set to
2741 previous one. */
2742
2743 while (GET_CODE (last) == BARRIER)
2744 last = PREV_INSN (last);
2745 SET_BIT (blocks, bb->index);
2746 changed = true;
2747 }
2748 }
2749 }
2750 }
2751 }
2752
2753 if (changed)
2754 {
2755 int old_last_basic_block = last_basic_block;
2756
2757 find_many_sub_basic_blocks (blocks);
2758
2759 if (old_last_basic_block != last_basic_block && upd_life)
2760 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2761 }
2762
2763 if (changed && upd_life)
2764 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2765 PROP_DEATH_NOTES);
2766
2767 #ifdef ENABLE_CHECKING
2768 verify_flow_info ();
2769 #endif
2770
2771 sbitmap_free (blocks);
2772 }
2773
2774 /* Same as split_all_insns, but do not expect CFG to be available.
2775 Used by machine dependent reorg passes. */
2776
2777 void
2778 split_all_insns_noflow (void)
2779 {
2780 rtx next, insn;
2781
2782 for (insn = get_insns (); insn; insn = next)
2783 {
2784 next = NEXT_INSN (insn);
2785 if (INSN_P (insn))
2786 {
2787 /* Don't split no-op move insns. These should silently
2788 disappear later in final. Splitting such insns would
2789 break the code that handles REG_NO_CONFLICT blocks. */
2790 rtx set = single_set (insn);
2791 if (set && set_noop_p (set))
2792 {
2793 /* Nops get in the way while scheduling, so delete them
2794 now if register allocation has already been done. It
2795 is too risky to try to do this before register
2796 allocation, and there are unlikely to be very many
2797 nops then anyways.
2798
2799 ??? Should we use delete_insn when the CFG isn't valid? */
2800 if (reload_completed)
2801 delete_insn_and_edges (insn);
2802 }
2803 else
2804 split_insn (insn);
2805 }
2806 }
2807 }
2808 \f
2809 #ifdef HAVE_peephole2
2810 struct peep2_insn_data
2811 {
2812 rtx insn;
2813 regset live_before;
2814 };
2815
2816 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2817 static int peep2_current;
2818
2819 /* A non-insn marker indicating the last insn of the block.
2820 The live_before regset for this element is correct, indicating
2821 global_live_at_end for the block. */
2822 #define PEEP2_EOB pc_rtx
2823
2824 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2825 does not exist. Used by the recognizer to find the next insn to match
2826 in a multi-insn pattern. */
2827
2828 rtx
2829 peep2_next_insn (int n)
2830 {
2831 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2832 abort ();
2833
2834 n += peep2_current;
2835 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2836 n -= MAX_INSNS_PER_PEEP2 + 1;
2837
2838 if (peep2_insn_data[n].insn == PEEP2_EOB)
2839 return NULL_RTX;
2840 return peep2_insn_data[n].insn;
2841 }
2842
2843 /* Return true if REGNO is dead before the Nth non-note insn
2844 after `current'. */
2845
2846 int
2847 peep2_regno_dead_p (int ofs, int regno)
2848 {
2849 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2850 abort ();
2851
2852 ofs += peep2_current;
2853 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2854 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2855
2856 if (peep2_insn_data[ofs].insn == NULL_RTX)
2857 abort ();
2858
2859 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2860 }
2861
2862 /* Similarly for a REG. */
2863
2864 int
2865 peep2_reg_dead_p (int ofs, rtx reg)
2866 {
2867 int regno, n;
2868
2869 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2870 abort ();
2871
2872 ofs += peep2_current;
2873 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2874 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2875
2876 if (peep2_insn_data[ofs].insn == NULL_RTX)
2877 abort ();
2878
2879 regno = REGNO (reg);
2880 n = hard_regno_nregs[regno][GET_MODE (reg)];
2881 while (--n >= 0)
2882 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2883 return 0;
2884 return 1;
2885 }
2886
2887 /* Try to find a hard register of mode MODE, matching the register class in
2888 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2889 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2890 in which case the only condition is that the register must be available
2891 before CURRENT_INSN.
2892 Registers that already have bits set in REG_SET will not be considered.
2893
2894 If an appropriate register is available, it will be returned and the
2895 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2896 returned. */
2897
2898 rtx
2899 peep2_find_free_register (int from, int to, const char *class_str,
2900 enum machine_mode mode, HARD_REG_SET *reg_set)
2901 {
2902 static int search_ofs;
2903 enum reg_class class;
2904 HARD_REG_SET live;
2905 int i;
2906
2907 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2908 abort ();
2909
2910 from += peep2_current;
2911 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2912 from -= MAX_INSNS_PER_PEEP2 + 1;
2913 to += peep2_current;
2914 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2915 to -= MAX_INSNS_PER_PEEP2 + 1;
2916
2917 if (peep2_insn_data[from].insn == NULL_RTX)
2918 abort ();
2919 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2920
2921 while (from != to)
2922 {
2923 HARD_REG_SET this_live;
2924
2925 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2926 from = 0;
2927 if (peep2_insn_data[from].insn == NULL_RTX)
2928 abort ();
2929 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2930 IOR_HARD_REG_SET (live, this_live);
2931 }
2932
2933 class = (class_str[0] == 'r' ? GENERAL_REGS
2934 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2935
2936 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2937 {
2938 int raw_regno, regno, success, j;
2939
2940 /* Distribute the free registers as much as possible. */
2941 raw_regno = search_ofs + i;
2942 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2943 raw_regno -= FIRST_PSEUDO_REGISTER;
2944 #ifdef REG_ALLOC_ORDER
2945 regno = reg_alloc_order[raw_regno];
2946 #else
2947 regno = raw_regno;
2948 #endif
2949
2950 /* Don't allocate fixed registers. */
2951 if (fixed_regs[regno])
2952 continue;
2953 /* Make sure the register is of the right class. */
2954 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2955 continue;
2956 /* And can support the mode we need. */
2957 if (! HARD_REGNO_MODE_OK (regno, mode))
2958 continue;
2959 /* And that we don't create an extra save/restore. */
2960 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2961 continue;
2962 /* And we don't clobber traceback for noreturn functions. */
2963 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2964 && (! reload_completed || frame_pointer_needed))
2965 continue;
2966
2967 success = 1;
2968 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2969 {
2970 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2971 || TEST_HARD_REG_BIT (live, regno + j))
2972 {
2973 success = 0;
2974 break;
2975 }
2976 }
2977 if (success)
2978 {
2979 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2980 SET_HARD_REG_BIT (*reg_set, regno + j);
2981
2982 /* Start the next search with the next register. */
2983 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2984 raw_regno = 0;
2985 search_ofs = raw_regno;
2986
2987 return gen_rtx_REG (mode, regno);
2988 }
2989 }
2990
2991 search_ofs = 0;
2992 return NULL_RTX;
2993 }
2994
2995 /* Perform the peephole2 optimization pass. */
2996
2997 void
2998 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
2999 {
3000 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3001 rtx insn, prev;
3002 regset live;
3003 int i;
3004 basic_block bb;
3005 #ifdef HAVE_conditional_execution
3006 sbitmap blocks;
3007 bool changed;
3008 #endif
3009 bool do_cleanup_cfg = false;
3010 bool do_rebuild_jump_labels = false;
3011
3012 /* Initialize the regsets we're going to use. */
3013 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3014 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3015 live = INITIALIZE_REG_SET (rs_heads[i]);
3016
3017 #ifdef HAVE_conditional_execution
3018 blocks = sbitmap_alloc (last_basic_block);
3019 sbitmap_zero (blocks);
3020 changed = false;
3021 #else
3022 count_or_remove_death_notes (NULL, 1);
3023 #endif
3024
3025 FOR_EACH_BB_REVERSE (bb)
3026 {
3027 struct propagate_block_info *pbi;
3028
3029 /* Indicate that all slots except the last holds invalid data. */
3030 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3031 peep2_insn_data[i].insn = NULL_RTX;
3032
3033 /* Indicate that the last slot contains live_after data. */
3034 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3035 peep2_current = MAX_INSNS_PER_PEEP2;
3036
3037 /* Start up propagation. */
3038 COPY_REG_SET (live, bb->global_live_at_end);
3039 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3040
3041 #ifdef HAVE_conditional_execution
3042 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3043 #else
3044 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3045 #endif
3046
3047 for (insn = BB_END (bb); ; insn = prev)
3048 {
3049 prev = PREV_INSN (insn);
3050 if (INSN_P (insn))
3051 {
3052 rtx try, before_try, x;
3053 int match_len;
3054 rtx note;
3055 bool was_call = false;
3056
3057 /* Record this insn. */
3058 if (--peep2_current < 0)
3059 peep2_current = MAX_INSNS_PER_PEEP2;
3060 peep2_insn_data[peep2_current].insn = insn;
3061 propagate_one_insn (pbi, insn);
3062 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3063
3064 /* Match the peephole. */
3065 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3066 if (try != NULL)
3067 {
3068 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3069 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3070 cfg-related call notes. */
3071 for (i = 0; i <= match_len; ++i)
3072 {
3073 int j;
3074 rtx old_insn, new_insn, note;
3075
3076 j = i + peep2_current;
3077 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3078 j -= MAX_INSNS_PER_PEEP2 + 1;
3079 old_insn = peep2_insn_data[j].insn;
3080 if (GET_CODE (old_insn) != CALL_INSN)
3081 continue;
3082 was_call = true;
3083
3084 new_insn = try;
3085 while (new_insn != NULL_RTX)
3086 {
3087 if (GET_CODE (new_insn) == CALL_INSN)
3088 break;
3089 new_insn = NEXT_INSN (new_insn);
3090 }
3091
3092 if (new_insn == NULL_RTX)
3093 abort ();
3094
3095 CALL_INSN_FUNCTION_USAGE (new_insn)
3096 = CALL_INSN_FUNCTION_USAGE (old_insn);
3097
3098 for (note = REG_NOTES (old_insn);
3099 note;
3100 note = XEXP (note, 1))
3101 switch (REG_NOTE_KIND (note))
3102 {
3103 case REG_NORETURN:
3104 case REG_SETJMP:
3105 case REG_ALWAYS_RETURN:
3106 REG_NOTES (new_insn)
3107 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3108 XEXP (note, 0),
3109 REG_NOTES (new_insn));
3110 default:
3111 /* Discard all other reg notes. */
3112 break;
3113 }
3114
3115 /* Croak if there is another call in the sequence. */
3116 while (++i <= match_len)
3117 {
3118 j = i + peep2_current;
3119 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3120 j -= MAX_INSNS_PER_PEEP2 + 1;
3121 old_insn = peep2_insn_data[j].insn;
3122 if (GET_CODE (old_insn) == CALL_INSN)
3123 abort ();
3124 }
3125 break;
3126 }
3127
3128 i = match_len + peep2_current;
3129 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3130 i -= MAX_INSNS_PER_PEEP2 + 1;
3131
3132 note = find_reg_note (peep2_insn_data[i].insn,
3133 REG_EH_REGION, NULL_RTX);
3134
3135 /* Replace the old sequence with the new. */
3136 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3137 INSN_LOCATOR (peep2_insn_data[i].insn));
3138 before_try = PREV_INSN (insn);
3139 delete_insn_chain (insn, peep2_insn_data[i].insn);
3140
3141 /* Re-insert the EH_REGION notes. */
3142 if (note || (was_call && nonlocal_goto_handler_labels))
3143 {
3144 edge eh_edge;
3145
3146 for (eh_edge = bb->succ; eh_edge
3147 ; eh_edge = eh_edge->succ_next)
3148 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3149 break;
3150
3151 for (x = try ; x != before_try ; x = PREV_INSN (x))
3152 if (GET_CODE (x) == CALL_INSN
3153 || (flag_non_call_exceptions
3154 && may_trap_p (PATTERN (x))
3155 && !find_reg_note (x, REG_EH_REGION, NULL)))
3156 {
3157 if (note)
3158 REG_NOTES (x)
3159 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3160 XEXP (note, 0),
3161 REG_NOTES (x));
3162
3163 if (x != BB_END (bb) && eh_edge)
3164 {
3165 edge nfte, nehe;
3166 int flags;
3167
3168 nfte = split_block (bb, x);
3169 flags = (eh_edge->flags
3170 & (EDGE_EH | EDGE_ABNORMAL));
3171 if (GET_CODE (x) == CALL_INSN)
3172 flags |= EDGE_ABNORMAL_CALL;
3173 nehe = make_edge (nfte->src, eh_edge->dest,
3174 flags);
3175
3176 nehe->probability = eh_edge->probability;
3177 nfte->probability
3178 = REG_BR_PROB_BASE - nehe->probability;
3179
3180 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3181 #ifdef HAVE_conditional_execution
3182 SET_BIT (blocks, nfte->dest->index);
3183 changed = true;
3184 #endif
3185 bb = nfte->src;
3186 eh_edge = nehe;
3187 }
3188 }
3189
3190 /* Converting possibly trapping insn to non-trapping is
3191 possible. Zap dummy outgoing edges. */
3192 do_cleanup_cfg |= purge_dead_edges (bb);
3193 }
3194
3195 #ifdef HAVE_conditional_execution
3196 /* With conditional execution, we cannot back up the
3197 live information so easily, since the conditional
3198 death data structures are not so self-contained.
3199 So record that we've made a modification to this
3200 block and update life information at the end. */
3201 SET_BIT (blocks, bb->index);
3202 changed = true;
3203
3204 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3205 peep2_insn_data[i].insn = NULL_RTX;
3206 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3207 #else
3208 /* Back up lifetime information past the end of the
3209 newly created sequence. */
3210 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3211 i = 0;
3212 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3213
3214 /* Update life information for the new sequence. */
3215 x = try;
3216 do
3217 {
3218 if (INSN_P (x))
3219 {
3220 if (--i < 0)
3221 i = MAX_INSNS_PER_PEEP2;
3222 peep2_insn_data[i].insn = x;
3223 propagate_one_insn (pbi, x);
3224 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3225 }
3226 x = PREV_INSN (x);
3227 }
3228 while (x != prev);
3229
3230 /* ??? Should verify that LIVE now matches what we
3231 had before the new sequence. */
3232
3233 peep2_current = i;
3234 #endif
3235
3236 /* If we generated a jump instruction, it won't have
3237 JUMP_LABEL set. Recompute after we're done. */
3238 for (x = try; x != before_try; x = PREV_INSN (x))
3239 if (GET_CODE (x) == JUMP_INSN)
3240 {
3241 do_rebuild_jump_labels = true;
3242 break;
3243 }
3244 }
3245 }
3246
3247 if (insn == BB_HEAD (bb))
3248 break;
3249 }
3250
3251 free_propagate_block_info (pbi);
3252 }
3253
3254 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3255 FREE_REG_SET (peep2_insn_data[i].live_before);
3256 FREE_REG_SET (live);
3257
3258 if (do_rebuild_jump_labels)
3259 rebuild_jump_labels (get_insns ());
3260
3261 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3262 we've changed global life since exception handlers are no longer
3263 reachable. */
3264 if (do_cleanup_cfg)
3265 {
3266 cleanup_cfg (0);
3267 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3268 }
3269 #ifdef HAVE_conditional_execution
3270 else
3271 {
3272 count_or_remove_death_notes (blocks, 1);
3273 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3274 }
3275 sbitmap_free (blocks);
3276 #endif
3277 }
3278 #endif /* HAVE_peephole2 */
3279
3280 /* Common predicates for use with define_bypass. */
3281
3282 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3283 data not the address operand(s) of the store. IN_INSN must be
3284 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3285 SETs inside. */
3286
3287 int
3288 store_data_bypass_p (rtx out_insn, rtx in_insn)
3289 {
3290 rtx out_set, in_set;
3291
3292 in_set = single_set (in_insn);
3293 if (! in_set)
3294 abort ();
3295
3296 if (!MEM_P (SET_DEST (in_set)))
3297 return false;
3298
3299 out_set = single_set (out_insn);
3300 if (out_set)
3301 {
3302 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3303 return false;
3304 }
3305 else
3306 {
3307 rtx out_pat;
3308 int i;
3309
3310 out_pat = PATTERN (out_insn);
3311 if (GET_CODE (out_pat) != PARALLEL)
3312 abort ();
3313
3314 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3315 {
3316 rtx exp = XVECEXP (out_pat, 0, i);
3317
3318 if (GET_CODE (exp) == CLOBBER)
3319 continue;
3320
3321 if (GET_CODE (exp) != SET)
3322 abort ();
3323
3324 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3325 return false;
3326 }
3327 }
3328
3329 return true;
3330 }
3331
3332 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3333 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3334 or multiple set; IN_INSN should be single_set for truth, but for convenience
3335 of insn categorization may be any JUMP or CALL insn. */
3336
3337 int
3338 if_test_bypass_p (rtx out_insn, rtx in_insn)
3339 {
3340 rtx out_set, in_set;
3341
3342 in_set = single_set (in_insn);
3343 if (! in_set)
3344 {
3345 if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
3346 return false;
3347 abort ();
3348 }
3349
3350 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3351 return false;
3352 in_set = SET_SRC (in_set);
3353
3354 out_set = single_set (out_insn);
3355 if (out_set)
3356 {
3357 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3358 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3359 return false;
3360 }
3361 else
3362 {
3363 rtx out_pat;
3364 int i;
3365
3366 out_pat = PATTERN (out_insn);
3367 if (GET_CODE (out_pat) != PARALLEL)
3368 abort ();
3369
3370 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3371 {
3372 rtx exp = XVECEXP (out_pat, 0, i);
3373
3374 if (GET_CODE (exp) == CLOBBER)
3375 continue;
3376
3377 if (GET_CODE (exp) != SET)
3378 abort ();
3379
3380 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3381 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3382 return false;
3383 }
3384 }
3385
3386 return true;
3387 }