loop.c (emit_prefetch_instructions): Properly place the address computation.
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
30 #include "recog.h"
31 #include "regs.h"
32 #include "expr.h"
33 #include "function.h"
34 #include "flags.h"
35 #include "real.h"
36 #include "toplev.h"
37 #include "basic-block.h"
38 #include "output.h"
39 #include "reload.h"
40
41 #ifndef STACK_PUSH_CODE
42 #ifdef STACK_GROWS_DOWNWARD
43 #define STACK_PUSH_CODE PRE_DEC
44 #else
45 #define STACK_PUSH_CODE PRE_INC
46 #endif
47 #endif
48
49 #ifndef STACK_POP_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_POP_CODE POST_INC
52 #else
53 #define STACK_POP_CODE POST_DEC
54 #endif
55 #endif
56
57 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
58 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
59 static void validate_replace_src_1 PARAMS ((rtx *, void *));
60 static rtx split_insn PARAMS ((rtx));
61
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
67
68 init_recog and init_recog_no_volatile are responsible for setting this. */
69
70 int volatile_ok;
71
72 struct recog_data recog_data;
73
74 /* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
77
78 /* On return from `constrain_operands', indicate which alternative
79 was satisfied. */
80
81 int which_alternative;
82
83 /* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
86
87 int reload_completed;
88
89 /* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
92
93 void
94 init_recog_no_volatile ()
95 {
96 volatile_ok = 0;
97 }
98
99 void
100 init_recog ()
101 {
102 volatile_ok = 1;
103 }
104
105 /* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
109
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
113
114 int
115 recog_memoized_1 (insn)
116 rtx insn;
117 {
118 if (INSN_CODE (insn) < 0)
119 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
120 return INSN_CODE (insn);
121 }
122 \f
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
125
126 int
127 check_asm_operands (x)
128 rtx x;
129 {
130 int noperands;
131 rtx *operands;
132 const char **constraints;
133 int i;
134
135 /* Post-reload, be more strict with things. */
136 if (reload_completed)
137 {
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x));
140 constrain_operands (1);
141 return which_alternative >= 0;
142 }
143
144 noperands = asm_noperands (x);
145 if (noperands < 0)
146 return 0;
147 if (noperands == 0)
148 return 1;
149
150 operands = (rtx *) alloca (noperands * sizeof (rtx));
151 constraints = (const char **) alloca (noperands * sizeof (char *));
152
153 decode_asm_operands (x, operands, NULL, constraints, NULL);
154
155 for (i = 0; i < noperands; i++)
156 {
157 const char *c = constraints[i];
158 if (c[0] == '%')
159 c++;
160 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
161 c = constraints[c[0] - '0'];
162
163 if (! asm_operand_ok (operands[i], c))
164 return 0;
165 }
166
167 return 1;
168 }
169 \f
170 /* Static data for the next two routines. */
171
172 typedef struct change_t
173 {
174 rtx object;
175 int old_code;
176 rtx *loc;
177 rtx old;
178 } change_t;
179
180 static change_t *changes;
181 static int changes_allocated;
182
183 static int num_changes = 0;
184
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
188
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
192 the change in place.
193
194 IN_GROUP is non-zero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
197
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
202
203 int
204 validate_change (object, loc, new, in_group)
205 rtx object;
206 rtx *loc;
207 rtx new;
208 int in_group;
209 {
210 rtx old = *loc;
211
212 if (old == new || rtx_equal_p (old, new))
213 return 1;
214
215 if (in_group == 0 && num_changes != 0)
216 abort ();
217
218 *loc = new;
219
220 /* Save the information describing this change. */
221 if (num_changes >= changes_allocated)
222 {
223 if (changes_allocated == 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated = MAX_RECOG_OPERANDS * 5;
227 else
228 changes_allocated *= 2;
229
230 changes =
231 (change_t*) xrealloc (changes,
232 sizeof (change_t) * changes_allocated);
233 }
234
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
238
239 if (object && GET_CODE (object) != MEM)
240 {
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 case invalid. */
243 changes[num_changes].old_code = INSN_CODE (object);
244 INSN_CODE (object) = -1;
245 }
246
247 num_changes++;
248
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
251
252 if (in_group)
253 return 1;
254 else
255 return apply_change_group ();
256 }
257
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
260
261 int
262 insn_invalid_p (insn)
263 rtx insn;
264 {
265 rtx pat = PATTERN (insn);
266 int num_clobbers = 0;
267 /* If we are before reload and the pattern is a SET, see if we can add
268 clobbers. */
269 int icode = recog (pat, insn,
270 (GET_CODE (pat) == SET
271 && ! reload_completed && ! reload_in_progress)
272 ? &num_clobbers : 0);
273 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
274
275
276 /* If this is an asm and the operand aren't legal, then fail. Likewise if
277 this is not an asm and the insn wasn't recognized. */
278 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
279 || (!is_asm && icode < 0))
280 return 1;
281
282 /* If we have to add CLOBBERs, fail if we have to add ones that reference
283 hard registers since our callers can't know if they are live or not.
284 Otherwise, add them. */
285 if (num_clobbers > 0)
286 {
287 rtx newpat;
288
289 if (added_clobbers_hard_reg_p (icode))
290 return 1;
291
292 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
293 XVECEXP (newpat, 0, 0) = pat;
294 add_clobbers (newpat, icode);
295 PATTERN (insn) = pat = newpat;
296 }
297
298 /* After reload, verify that all constraints are satisfied. */
299 if (reload_completed)
300 {
301 extract_insn (insn);
302
303 if (! constrain_operands (1))
304 return 1;
305 }
306
307 INSN_CODE (insn) = icode;
308 return 0;
309 }
310
311 /* Apply a group of changes previously issued with `validate_change'.
312 Return 1 if all changes are valid, zero otherwise. */
313
314 int
315 apply_change_group ()
316 {
317 int i;
318 rtx last_validated = NULL_RTX;
319
320 /* The changes have been applied and all INSN_CODEs have been reset to force
321 rerecognition.
322
323 The changes are valid if we aren't given an object, or if we are
324 given a MEM and it still is a valid address, or if this is in insn
325 and it is recognized. In the latter case, if reload has completed,
326 we also require that the operands meet the constraints for
327 the insn. */
328
329 for (i = 0; i < num_changes; i++)
330 {
331 rtx object = changes[i].object;
332
333 /* if there is no object to test or if it is the same as the one we
334 already tested, ignore it. */
335 if (object == 0 || object == last_validated)
336 continue;
337
338 if (GET_CODE (object) == MEM)
339 {
340 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
341 break;
342 }
343 else if (insn_invalid_p (object))
344 {
345 rtx pat = PATTERN (object);
346
347 /* Perhaps we couldn't recognize the insn because there were
348 extra CLOBBERs at the end. If so, try to re-recognize
349 without the last CLOBBER (later iterations will cause each of
350 them to be eliminated, in turn). But don't do this if we
351 have an ASM_OPERAND. */
352 if (GET_CODE (pat) == PARALLEL
353 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
354 && asm_noperands (PATTERN (object)) < 0)
355 {
356 rtx newpat;
357
358 if (XVECLEN (pat, 0) == 2)
359 newpat = XVECEXP (pat, 0, 0);
360 else
361 {
362 int j;
363
364 newpat
365 = gen_rtx_PARALLEL (VOIDmode,
366 rtvec_alloc (XVECLEN (pat, 0) - 1));
367 for (j = 0; j < XVECLEN (newpat, 0); j++)
368 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
369 }
370
371 /* Add a new change to this group to replace the pattern
372 with this new pattern. Then consider this change
373 as having succeeded. The change we added will
374 cause the entire call to fail if things remain invalid.
375
376 Note that this can lose if a later change than the one
377 we are processing specified &XVECEXP (PATTERN (object), 0, X)
378 but this shouldn't occur. */
379
380 validate_change (object, &PATTERN (object), newpat, 1);
381 continue;
382 }
383 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
384 /* If this insn is a CLOBBER or USE, it is always valid, but is
385 never recognized. */
386 continue;
387 else
388 break;
389 }
390 last_validated = object;
391 }
392
393 if (i == num_changes)
394 {
395 basic_block bb;
396
397 for (i = 0; i < num_changes; i++)
398 if (changes[i].object
399 && INSN_P (changes[i].object)
400 && (bb = BLOCK_FOR_INSN (changes[i].object)))
401 bb->flags |= BB_DIRTY;
402
403 num_changes = 0;
404 return 1;
405 }
406 else
407 {
408 cancel_changes (0);
409 return 0;
410 }
411 }
412
413 /* Return the number of changes so far in the current group. */
414
415 int
416 num_validated_changes ()
417 {
418 return num_changes;
419 }
420
421 /* Retract the changes numbered NUM and up. */
422
423 void
424 cancel_changes (num)
425 int num;
426 {
427 int i;
428
429 /* Back out all the changes. Do this in the opposite order in which
430 they were made. */
431 for (i = num_changes - 1; i >= num; i--)
432 {
433 *changes[i].loc = changes[i].old;
434 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
435 INSN_CODE (changes[i].object) = changes[i].old_code;
436 }
437 num_changes = num;
438 }
439
440 /* Replace every occurrence of FROM in X with TO. Mark each change with
441 validate_change passing OBJECT. */
442
443 static void
444 validate_replace_rtx_1 (loc, from, to, object)
445 rtx *loc;
446 rtx from, to, object;
447 {
448 int i, j;
449 const char *fmt;
450 rtx x = *loc;
451 enum rtx_code code;
452 enum machine_mode op0_mode = VOIDmode;
453 int prev_changes = num_changes;
454 rtx new;
455
456 if (!x)
457 return;
458
459 code = GET_CODE (x);
460 fmt = GET_RTX_FORMAT (code);
461 if (fmt[0] == 'e')
462 op0_mode = GET_MODE (XEXP (x, 0));
463
464 /* X matches FROM if it is the same rtx or they are both referring to the
465 same register in the same mode. Avoid calling rtx_equal_p unless the
466 operands look similar. */
467
468 if (x == from
469 || (GET_CODE (x) == REG && GET_CODE (from) == REG
470 && GET_MODE (x) == GET_MODE (from)
471 && REGNO (x) == REGNO (from))
472 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
473 && rtx_equal_p (x, from)))
474 {
475 validate_change (object, loc, to, 1);
476 return;
477 }
478
479 /* Call ourself recursively to perform the replacements. */
480
481 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
482 {
483 if (fmt[i] == 'e')
484 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
485 else if (fmt[i] == 'E')
486 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
487 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
488 }
489
490 /* If we didn't substitute, there is nothing more to do. */
491 if (num_changes == prev_changes)
492 return;
493
494 /* Allow substituted expression to have different mode. This is used by
495 regmove to change mode of pseudo register. */
496 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
497 op0_mode = GET_MODE (XEXP (x, 0));
498
499 /* Do changes needed to keep rtx consistent. Don't do any other
500 simplifications, as it is not our job. */
501
502 if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
503 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
504 {
505 validate_change (object, loc,
506 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
507 : swap_condition (code),
508 GET_MODE (x), XEXP (x, 1),
509 XEXP (x, 0)), 1);
510 x = *loc;
511 code = GET_CODE (x);
512 }
513
514 switch (code)
515 {
516 case PLUS:
517 /* If we have a PLUS whose second operand is now a CONST_INT, use
518 plus_constant to try to simplify it.
519 ??? We may want later to remove this, once simplification is
520 separated from this function. */
521 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
522 validate_change (object, loc,
523 simplify_gen_binary
524 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
525 break;
526 case MINUS:
527 if (GET_CODE (XEXP (x, 1)) == CONST_INT
528 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
529 validate_change (object, loc,
530 simplify_gen_binary
531 (PLUS, GET_MODE (x), XEXP (x, 0),
532 simplify_gen_unary (NEG,
533 GET_MODE (x), XEXP (x, 1),
534 GET_MODE (x))), 1);
535 break;
536 case ZERO_EXTEND:
537 case SIGN_EXTEND:
538 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
539 {
540 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
541 op0_mode);
542 /* If any of the above failed, substitute in something that
543 we know won't be recognized. */
544 if (!new)
545 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
546 validate_change (object, loc, new, 1);
547 }
548 break;
549 case SUBREG:
550 /* All subregs possible to simplify should be simplified. */
551 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
552 SUBREG_BYTE (x));
553
554 /* Subregs of VOIDmode operands are incorrect. */
555 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
556 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
557 if (new)
558 validate_change (object, loc, new, 1);
559 break;
560 case ZERO_EXTRACT:
561 case SIGN_EXTRACT:
562 /* If we are replacing a register with memory, try to change the memory
563 to be the mode required for memory in extract operations (this isn't
564 likely to be an insertion operation; if it was, nothing bad will
565 happen, we might just fail in some cases). */
566
567 if (GET_CODE (XEXP (x, 0)) == MEM
568 && GET_CODE (XEXP (x, 1)) == CONST_INT
569 && GET_CODE (XEXP (x, 2)) == CONST_INT
570 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
571 && !MEM_VOLATILE_P (XEXP (x, 0)))
572 {
573 enum machine_mode wanted_mode = VOIDmode;
574 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
575 int pos = INTVAL (XEXP (x, 2));
576
577 if (GET_CODE (x) == ZERO_EXTRACT)
578 {
579 enum machine_mode new_mode
580 = mode_for_extraction (EP_extzv, 1);
581 if (new_mode != MAX_MACHINE_MODE)
582 wanted_mode = new_mode;
583 }
584 else if (GET_CODE (x) == SIGN_EXTRACT)
585 {
586 enum machine_mode new_mode
587 = mode_for_extraction (EP_extv, 1);
588 if (new_mode != MAX_MACHINE_MODE)
589 wanted_mode = new_mode;
590 }
591
592 /* If we have a narrower mode, we can do something. */
593 if (wanted_mode != VOIDmode
594 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
595 {
596 int offset = pos / BITS_PER_UNIT;
597 rtx newmem;
598
599 /* If the bytes and bits are counted differently, we
600 must adjust the offset. */
601 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
602 offset =
603 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
604 offset);
605
606 pos %= GET_MODE_BITSIZE (wanted_mode);
607
608 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
609
610 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
611 validate_change (object, &XEXP (x, 0), newmem, 1);
612 }
613 }
614
615 break;
616
617 default:
618 break;
619 }
620 }
621
622 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
623 with TO. After all changes have been made, validate by seeing
624 if INSN is still valid. */
625
626 int
627 validate_replace_rtx_subexp (from, to, insn, loc)
628 rtx from, to, insn, *loc;
629 {
630 validate_replace_rtx_1 (loc, from, to, insn);
631 return apply_change_group ();
632 }
633
634 /* Try replacing every occurrence of FROM in INSN with TO. After all
635 changes have been made, validate by seeing if INSN is still valid. */
636
637 int
638 validate_replace_rtx (from, to, insn)
639 rtx from, to, insn;
640 {
641 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
642 return apply_change_group ();
643 }
644
645 /* Try replacing every occurrence of FROM in INSN with TO. */
646
647 void
648 validate_replace_rtx_group (from, to, insn)
649 rtx from, to, insn;
650 {
651 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
652 }
653
654 /* Function called by note_uses to replace used subexpressions. */
655 struct validate_replace_src_data
656 {
657 rtx from; /* Old RTX */
658 rtx to; /* New RTX */
659 rtx insn; /* Insn in which substitution is occurring. */
660 };
661
662 static void
663 validate_replace_src_1 (x, data)
664 rtx *x;
665 void *data;
666 {
667 struct validate_replace_src_data *d
668 = (struct validate_replace_src_data *) data;
669
670 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
671 }
672
673 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
674 SET_DESTs. After all changes have been made, validate by seeing if
675 INSN is still valid. */
676
677 int
678 validate_replace_src (from, to, insn)
679 rtx from, to, insn;
680 {
681 struct validate_replace_src_data d;
682
683 d.from = from;
684 d.to = to;
685 d.insn = insn;
686 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
687 return apply_change_group ();
688 }
689 \f
690 #ifdef HAVE_cc0
691 /* Return 1 if the insn using CC0 set by INSN does not contain
692 any ordered tests applied to the condition codes.
693 EQ and NE tests do not count. */
694
695 int
696 next_insn_tests_no_inequality (insn)
697 rtx insn;
698 {
699 rtx next = next_cc0_user (insn);
700
701 /* If there is no next insn, we have to take the conservative choice. */
702 if (next == 0)
703 return 0;
704
705 return ((GET_CODE (next) == JUMP_INSN
706 || GET_CODE (next) == INSN
707 || GET_CODE (next) == CALL_INSN)
708 && ! inequality_comparisons_p (PATTERN (next)));
709 }
710
711 #if 0 /* This is useless since the insn that sets the cc's
712 must be followed immediately by the use of them. */
713 /* Return 1 if the CC value set up by INSN is not used. */
714
715 int
716 next_insns_test_no_inequality (insn)
717 rtx insn;
718 {
719 rtx next = NEXT_INSN (insn);
720
721 for (; next != 0; next = NEXT_INSN (next))
722 {
723 if (GET_CODE (next) == CODE_LABEL
724 || GET_CODE (next) == BARRIER)
725 return 1;
726 if (GET_CODE (next) == NOTE)
727 continue;
728 if (inequality_comparisons_p (PATTERN (next)))
729 return 0;
730 if (sets_cc0_p (PATTERN (next)) == 1)
731 return 1;
732 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
733 return 1;
734 }
735 return 1;
736 }
737 #endif
738 #endif
739 \f
740 /* This is used by find_single_use to locate an rtx that contains exactly one
741 use of DEST, which is typically either a REG or CC0. It returns a
742 pointer to the innermost rtx expression containing DEST. Appearances of
743 DEST that are being used to totally replace it are not counted. */
744
745 static rtx *
746 find_single_use_1 (dest, loc)
747 rtx dest;
748 rtx *loc;
749 {
750 rtx x = *loc;
751 enum rtx_code code = GET_CODE (x);
752 rtx *result = 0;
753 rtx *this_result;
754 int i;
755 const char *fmt;
756
757 switch (code)
758 {
759 case CONST_INT:
760 case CONST:
761 case LABEL_REF:
762 case SYMBOL_REF:
763 case CONST_DOUBLE:
764 case CONST_VECTOR:
765 case CLOBBER:
766 return 0;
767
768 case SET:
769 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
770 of a REG that occupies all of the REG, the insn uses DEST if
771 it is mentioned in the destination or the source. Otherwise, we
772 need just check the source. */
773 if (GET_CODE (SET_DEST (x)) != CC0
774 && GET_CODE (SET_DEST (x)) != PC
775 && GET_CODE (SET_DEST (x)) != REG
776 && ! (GET_CODE (SET_DEST (x)) == SUBREG
777 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
778 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
779 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
780 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
781 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
782 break;
783
784 return find_single_use_1 (dest, &SET_SRC (x));
785
786 case MEM:
787 case SUBREG:
788 return find_single_use_1 (dest, &XEXP (x, 0));
789
790 default:
791 break;
792 }
793
794 /* If it wasn't one of the common cases above, check each expression and
795 vector of this code. Look for a unique usage of DEST. */
796
797 fmt = GET_RTX_FORMAT (code);
798 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
799 {
800 if (fmt[i] == 'e')
801 {
802 if (dest == XEXP (x, i)
803 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
804 && REGNO (dest) == REGNO (XEXP (x, i))))
805 this_result = loc;
806 else
807 this_result = find_single_use_1 (dest, &XEXP (x, i));
808
809 if (result == 0)
810 result = this_result;
811 else if (this_result)
812 /* Duplicate usage. */
813 return 0;
814 }
815 else if (fmt[i] == 'E')
816 {
817 int j;
818
819 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
820 {
821 if (XVECEXP (x, i, j) == dest
822 || (GET_CODE (dest) == REG
823 && GET_CODE (XVECEXP (x, i, j)) == REG
824 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
825 this_result = loc;
826 else
827 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
828
829 if (result == 0)
830 result = this_result;
831 else if (this_result)
832 return 0;
833 }
834 }
835 }
836
837 return result;
838 }
839 \f
840 /* See if DEST, produced in INSN, is used only a single time in the
841 sequel. If so, return a pointer to the innermost rtx expression in which
842 it is used.
843
844 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
845
846 This routine will return usually zero either before flow is called (because
847 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
848 note can't be trusted).
849
850 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
851 care about REG_DEAD notes or LOG_LINKS.
852
853 Otherwise, we find the single use by finding an insn that has a
854 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
855 only referenced once in that insn, we know that it must be the first
856 and last insn referencing DEST. */
857
858 rtx *
859 find_single_use (dest, insn, ploc)
860 rtx dest;
861 rtx insn;
862 rtx *ploc;
863 {
864 rtx next;
865 rtx *result;
866 rtx link;
867
868 #ifdef HAVE_cc0
869 if (dest == cc0_rtx)
870 {
871 next = NEXT_INSN (insn);
872 if (next == 0
873 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
874 return 0;
875
876 result = find_single_use_1 (dest, &PATTERN (next));
877 if (result && ploc)
878 *ploc = next;
879 return result;
880 }
881 #endif
882
883 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
884 return 0;
885
886 for (next = next_nonnote_insn (insn);
887 next != 0 && GET_CODE (next) != CODE_LABEL;
888 next = next_nonnote_insn (next))
889 if (INSN_P (next) && dead_or_set_p (next, dest))
890 {
891 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
892 if (XEXP (link, 0) == insn)
893 break;
894
895 if (link)
896 {
897 result = find_single_use_1 (dest, &PATTERN (next));
898 if (ploc)
899 *ploc = next;
900 return result;
901 }
902 }
903
904 return 0;
905 }
906 \f
907 /* Return 1 if OP is a valid general operand for machine mode MODE.
908 This is either a register reference, a memory reference,
909 or a constant. In the case of a memory reference, the address
910 is checked for general validity for the target machine.
911
912 Register and memory references must have mode MODE in order to be valid,
913 but some constants have no machine mode and are valid for any mode.
914
915 If MODE is VOIDmode, OP is checked for validity for whatever mode
916 it has.
917
918 The main use of this function is as a predicate in match_operand
919 expressions in the machine description.
920
921 For an explanation of this function's behavior for registers of
922 class NO_REGS, see the comment for `register_operand'. */
923
924 int
925 general_operand (op, mode)
926 rtx op;
927 enum machine_mode mode;
928 {
929 enum rtx_code code = GET_CODE (op);
930
931 if (mode == VOIDmode)
932 mode = GET_MODE (op);
933
934 /* Don't accept CONST_INT or anything similar
935 if the caller wants something floating. */
936 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
937 && GET_MODE_CLASS (mode) != MODE_INT
938 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
939 return 0;
940
941 if (GET_CODE (op) == CONST_INT
942 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
943 return 0;
944
945 if (CONSTANT_P (op))
946 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
947 || mode == VOIDmode)
948 #ifdef LEGITIMATE_PIC_OPERAND_P
949 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
950 #endif
951 && LEGITIMATE_CONSTANT_P (op));
952
953 /* Except for certain constants with VOIDmode, already checked for,
954 OP's mode must match MODE if MODE specifies a mode. */
955
956 if (GET_MODE (op) != mode)
957 return 0;
958
959 if (code == SUBREG)
960 {
961 #ifdef INSN_SCHEDULING
962 /* On machines that have insn scheduling, we want all memory
963 reference to be explicit, so outlaw paradoxical SUBREGs. */
964 if (GET_CODE (SUBREG_REG (op)) == MEM
965 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
966 return 0;
967 #endif
968 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
969 may result in incorrect reference. We should simplify all valid
970 subregs of MEM anyway. But allow this after reload because we
971 might be called from cleanup_subreg_operands.
972
973 ??? This is a kludge. */
974 if (!reload_completed && SUBREG_BYTE (op) != 0
975 && GET_CODE (SUBREG_REG (op)) == MEM)
976 return 0;
977
978 op = SUBREG_REG (op);
979 code = GET_CODE (op);
980 }
981
982 if (code == REG)
983 /* A register whose class is NO_REGS is not a general operand. */
984 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
985 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
986
987 if (code == MEM)
988 {
989 rtx y = XEXP (op, 0);
990
991 if (! volatile_ok && MEM_VOLATILE_P (op))
992 return 0;
993
994 if (GET_CODE (y) == ADDRESSOF)
995 return 1;
996
997 /* Use the mem's mode, since it will be reloaded thus. */
998 mode = GET_MODE (op);
999 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1000 }
1001
1002 /* Pretend this is an operand for now; we'll run force_operand
1003 on its replacement in fixup_var_refs_1. */
1004 if (code == ADDRESSOF)
1005 return 1;
1006
1007 return 0;
1008
1009 win:
1010 return 1;
1011 }
1012 \f
1013 /* Return 1 if OP is a valid memory address for a memory reference
1014 of mode MODE.
1015
1016 The main use of this function is as a predicate in match_operand
1017 expressions in the machine description. */
1018
1019 int
1020 address_operand (op, mode)
1021 rtx op;
1022 enum machine_mode mode;
1023 {
1024 return memory_address_p (mode, op);
1025 }
1026
1027 /* Return 1 if OP is a register reference of mode MODE.
1028 If MODE is VOIDmode, accept a register in any mode.
1029
1030 The main use of this function is as a predicate in match_operand
1031 expressions in the machine description.
1032
1033 As a special exception, registers whose class is NO_REGS are
1034 not accepted by `register_operand'. The reason for this change
1035 is to allow the representation of special architecture artifacts
1036 (such as a condition code register) without extending the rtl
1037 definitions. Since registers of class NO_REGS cannot be used
1038 as registers in any case where register classes are examined,
1039 it is most consistent to keep this function from accepting them. */
1040
1041 int
1042 register_operand (op, mode)
1043 rtx op;
1044 enum machine_mode mode;
1045 {
1046 if (GET_MODE (op) != mode && mode != VOIDmode)
1047 return 0;
1048
1049 if (GET_CODE (op) == SUBREG)
1050 {
1051 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1052 because it is guaranteed to be reloaded into one.
1053 Just make sure the MEM is valid in itself.
1054 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1055 but currently it does result from (SUBREG (REG)...) where the
1056 reg went on the stack.) */
1057 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1058 return general_operand (op, mode);
1059
1060 #ifdef CLASS_CANNOT_CHANGE_MODE
1061 if (GET_CODE (SUBREG_REG (op)) == REG
1062 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1063 && (TEST_HARD_REG_BIT
1064 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1065 REGNO (SUBREG_REG (op))))
1066 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1067 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1068 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1069 return 0;
1070 #endif
1071
1072 op = SUBREG_REG (op);
1073 }
1074
1075 /* If we have an ADDRESSOF, consider it valid since it will be
1076 converted into something that will not be a MEM. */
1077 if (GET_CODE (op) == ADDRESSOF)
1078 return 1;
1079
1080 /* We don't consider registers whose class is NO_REGS
1081 to be a register operand. */
1082 return (GET_CODE (op) == REG
1083 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1084 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1085 }
1086
1087 /* Return 1 for a register in Pmode; ignore the tested mode. */
1088
1089 int
1090 pmode_register_operand (op, mode)
1091 rtx op;
1092 enum machine_mode mode ATTRIBUTE_UNUSED;
1093 {
1094 return register_operand (op, Pmode);
1095 }
1096
1097 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1098 or a hard register. */
1099
1100 int
1101 scratch_operand (op, mode)
1102 rtx op;
1103 enum machine_mode mode;
1104 {
1105 if (GET_MODE (op) != mode && mode != VOIDmode)
1106 return 0;
1107
1108 return (GET_CODE (op) == SCRATCH
1109 || (GET_CODE (op) == REG
1110 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1111 }
1112
1113 /* Return 1 if OP is a valid immediate operand for mode MODE.
1114
1115 The main use of this function is as a predicate in match_operand
1116 expressions in the machine description. */
1117
1118 int
1119 immediate_operand (op, mode)
1120 rtx op;
1121 enum machine_mode mode;
1122 {
1123 /* Don't accept CONST_INT or anything similar
1124 if the caller wants something floating. */
1125 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1126 && GET_MODE_CLASS (mode) != MODE_INT
1127 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1128 return 0;
1129
1130 if (GET_CODE (op) == CONST_INT
1131 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1132 return 0;
1133
1134 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1135 result in 0/1. It seems a safe assumption that this is
1136 in range for everyone. */
1137 if (GET_CODE (op) == CONSTANT_P_RTX)
1138 return 1;
1139
1140 return (CONSTANT_P (op)
1141 && (GET_MODE (op) == mode || mode == VOIDmode
1142 || GET_MODE (op) == VOIDmode)
1143 #ifdef LEGITIMATE_PIC_OPERAND_P
1144 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1145 #endif
1146 && LEGITIMATE_CONSTANT_P (op));
1147 }
1148
1149 /* Returns 1 if OP is an operand that is a CONST_INT. */
1150
1151 int
1152 const_int_operand (op, mode)
1153 rtx op;
1154 enum machine_mode mode;
1155 {
1156 if (GET_CODE (op) != CONST_INT)
1157 return 0;
1158
1159 if (mode != VOIDmode
1160 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1161 return 0;
1162
1163 return 1;
1164 }
1165
1166 /* Returns 1 if OP is an operand that is a constant integer or constant
1167 floating-point number. */
1168
1169 int
1170 const_double_operand (op, mode)
1171 rtx op;
1172 enum machine_mode mode;
1173 {
1174 /* Don't accept CONST_INT or anything similar
1175 if the caller wants something floating. */
1176 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1177 && GET_MODE_CLASS (mode) != MODE_INT
1178 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1179 return 0;
1180
1181 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1182 && (mode == VOIDmode || GET_MODE (op) == mode
1183 || GET_MODE (op) == VOIDmode));
1184 }
1185
1186 /* Return 1 if OP is a general operand that is not an immediate operand. */
1187
1188 int
1189 nonimmediate_operand (op, mode)
1190 rtx op;
1191 enum machine_mode mode;
1192 {
1193 return (general_operand (op, mode) && ! CONSTANT_P (op));
1194 }
1195
1196 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1197
1198 int
1199 nonmemory_operand (op, mode)
1200 rtx op;
1201 enum machine_mode mode;
1202 {
1203 if (CONSTANT_P (op))
1204 {
1205 /* Don't accept CONST_INT or anything similar
1206 if the caller wants something floating. */
1207 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1208 && GET_MODE_CLASS (mode) != MODE_INT
1209 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1210 return 0;
1211
1212 if (GET_CODE (op) == CONST_INT
1213 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1214 return 0;
1215
1216 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1217 || mode == VOIDmode)
1218 #ifdef LEGITIMATE_PIC_OPERAND_P
1219 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1220 #endif
1221 && LEGITIMATE_CONSTANT_P (op));
1222 }
1223
1224 if (GET_MODE (op) != mode && mode != VOIDmode)
1225 return 0;
1226
1227 if (GET_CODE (op) == SUBREG)
1228 {
1229 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1230 because it is guaranteed to be reloaded into one.
1231 Just make sure the MEM is valid in itself.
1232 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1233 but currently it does result from (SUBREG (REG)...) where the
1234 reg went on the stack.) */
1235 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1236 return general_operand (op, mode);
1237 op = SUBREG_REG (op);
1238 }
1239
1240 /* We don't consider registers whose class is NO_REGS
1241 to be a register operand. */
1242 return (GET_CODE (op) == REG
1243 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1244 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1245 }
1246
1247 /* Return 1 if OP is a valid operand that stands for pushing a
1248 value of mode MODE onto the stack.
1249
1250 The main use of this function is as a predicate in match_operand
1251 expressions in the machine description. */
1252
1253 int
1254 push_operand (op, mode)
1255 rtx op;
1256 enum machine_mode mode;
1257 {
1258 unsigned int rounded_size = GET_MODE_SIZE (mode);
1259
1260 #ifdef PUSH_ROUNDING
1261 rounded_size = PUSH_ROUNDING (rounded_size);
1262 #endif
1263
1264 if (GET_CODE (op) != MEM)
1265 return 0;
1266
1267 if (mode != VOIDmode && GET_MODE (op) != mode)
1268 return 0;
1269
1270 op = XEXP (op, 0);
1271
1272 if (rounded_size == GET_MODE_SIZE (mode))
1273 {
1274 if (GET_CODE (op) != STACK_PUSH_CODE)
1275 return 0;
1276 }
1277 else
1278 {
1279 if (GET_CODE (op) != PRE_MODIFY
1280 || GET_CODE (XEXP (op, 1)) != PLUS
1281 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1282 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1283 #ifdef STACK_GROWS_DOWNWARD
1284 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1285 #else
1286 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1287 #endif
1288 )
1289 return 0;
1290 }
1291
1292 return XEXP (op, 0) == stack_pointer_rtx;
1293 }
1294
1295 /* Return 1 if OP is a valid operand that stands for popping a
1296 value of mode MODE off the stack.
1297
1298 The main use of this function is as a predicate in match_operand
1299 expressions in the machine description. */
1300
1301 int
1302 pop_operand (op, mode)
1303 rtx op;
1304 enum machine_mode mode;
1305 {
1306 if (GET_CODE (op) != MEM)
1307 return 0;
1308
1309 if (mode != VOIDmode && GET_MODE (op) != mode)
1310 return 0;
1311
1312 op = XEXP (op, 0);
1313
1314 if (GET_CODE (op) != STACK_POP_CODE)
1315 return 0;
1316
1317 return XEXP (op, 0) == stack_pointer_rtx;
1318 }
1319
1320 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1321
1322 int
1323 memory_address_p (mode, addr)
1324 enum machine_mode mode ATTRIBUTE_UNUSED;
1325 rtx addr;
1326 {
1327 if (GET_CODE (addr) == ADDRESSOF)
1328 return 1;
1329
1330 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1331 return 0;
1332
1333 win:
1334 return 1;
1335 }
1336
1337 /* Return 1 if OP is a valid memory reference with mode MODE,
1338 including a valid address.
1339
1340 The main use of this function is as a predicate in match_operand
1341 expressions in the machine description. */
1342
1343 int
1344 memory_operand (op, mode)
1345 rtx op;
1346 enum machine_mode mode;
1347 {
1348 rtx inner;
1349
1350 if (! reload_completed)
1351 /* Note that no SUBREG is a memory operand before end of reload pass,
1352 because (SUBREG (MEM...)) forces reloading into a register. */
1353 return GET_CODE (op) == MEM && general_operand (op, mode);
1354
1355 if (mode != VOIDmode && GET_MODE (op) != mode)
1356 return 0;
1357
1358 inner = op;
1359 if (GET_CODE (inner) == SUBREG)
1360 inner = SUBREG_REG (inner);
1361
1362 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1363 }
1364
1365 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1366 that is, a memory reference whose address is a general_operand. */
1367
1368 int
1369 indirect_operand (op, mode)
1370 rtx op;
1371 enum machine_mode mode;
1372 {
1373 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1374 if (! reload_completed
1375 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1376 {
1377 int offset = SUBREG_BYTE (op);
1378 rtx inner = SUBREG_REG (op);
1379
1380 if (mode != VOIDmode && GET_MODE (op) != mode)
1381 return 0;
1382
1383 /* The only way that we can have a general_operand as the resulting
1384 address is if OFFSET is zero and the address already is an operand
1385 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1386 operand. */
1387
1388 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1389 || (GET_CODE (XEXP (inner, 0)) == PLUS
1390 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1391 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1392 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1393 }
1394
1395 return (GET_CODE (op) == MEM
1396 && memory_operand (op, mode)
1397 && general_operand (XEXP (op, 0), Pmode));
1398 }
1399
1400 /* Return 1 if this is a comparison operator. This allows the use of
1401 MATCH_OPERATOR to recognize all the branch insns. */
1402
1403 int
1404 comparison_operator (op, mode)
1405 rtx op;
1406 enum machine_mode mode;
1407 {
1408 return ((mode == VOIDmode || GET_MODE (op) == mode)
1409 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1410 }
1411 \f
1412 /* If BODY is an insn body that uses ASM_OPERANDS,
1413 return the number of operands (both input and output) in the insn.
1414 Otherwise return -1. */
1415
1416 int
1417 asm_noperands (body)
1418 rtx body;
1419 {
1420 switch (GET_CODE (body))
1421 {
1422 case ASM_OPERANDS:
1423 /* No output operands: return number of input operands. */
1424 return ASM_OPERANDS_INPUT_LENGTH (body);
1425 case SET:
1426 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1427 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1428 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1429 else
1430 return -1;
1431 case PARALLEL:
1432 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1433 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1434 {
1435 /* Multiple output operands, or 1 output plus some clobbers:
1436 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1437 int i;
1438 int n_sets;
1439
1440 /* Count backwards through CLOBBERs to determine number of SETs. */
1441 for (i = XVECLEN (body, 0); i > 0; i--)
1442 {
1443 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1444 break;
1445 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1446 return -1;
1447 }
1448
1449 /* N_SETS is now number of output operands. */
1450 n_sets = i;
1451
1452 /* Verify that all the SETs we have
1453 came from a single original asm_operands insn
1454 (so that invalid combinations are blocked). */
1455 for (i = 0; i < n_sets; i++)
1456 {
1457 rtx elt = XVECEXP (body, 0, i);
1458 if (GET_CODE (elt) != SET)
1459 return -1;
1460 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1461 return -1;
1462 /* If these ASM_OPERANDS rtx's came from different original insns
1463 then they aren't allowed together. */
1464 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1465 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1466 return -1;
1467 }
1468 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1469 + n_sets);
1470 }
1471 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1472 {
1473 /* 0 outputs, but some clobbers:
1474 body is [(asm_operands ...) (clobber (reg ...))...]. */
1475 int i;
1476
1477 /* Make sure all the other parallel things really are clobbers. */
1478 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1479 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1480 return -1;
1481
1482 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1483 }
1484 else
1485 return -1;
1486 default:
1487 return -1;
1488 }
1489 }
1490
1491 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1492 copy its operands (both input and output) into the vector OPERANDS,
1493 the locations of the operands within the insn into the vector OPERAND_LOCS,
1494 and the constraints for the operands into CONSTRAINTS.
1495 Write the modes of the operands into MODES.
1496 Return the assembler-template.
1497
1498 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1499 we don't store that info. */
1500
1501 const char *
1502 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1503 rtx body;
1504 rtx *operands;
1505 rtx **operand_locs;
1506 const char **constraints;
1507 enum machine_mode *modes;
1508 {
1509 int i;
1510 int noperands;
1511 const char *template = 0;
1512
1513 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1514 {
1515 rtx asmop = SET_SRC (body);
1516 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1517
1518 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1519
1520 for (i = 1; i < noperands; i++)
1521 {
1522 if (operand_locs)
1523 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1524 if (operands)
1525 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1526 if (constraints)
1527 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1528 if (modes)
1529 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1530 }
1531
1532 /* The output is in the SET.
1533 Its constraint is in the ASM_OPERANDS itself. */
1534 if (operands)
1535 operands[0] = SET_DEST (body);
1536 if (operand_locs)
1537 operand_locs[0] = &SET_DEST (body);
1538 if (constraints)
1539 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1540 if (modes)
1541 modes[0] = GET_MODE (SET_DEST (body));
1542 template = ASM_OPERANDS_TEMPLATE (asmop);
1543 }
1544 else if (GET_CODE (body) == ASM_OPERANDS)
1545 {
1546 rtx asmop = body;
1547 /* No output operands: BODY is (asm_operands ....). */
1548
1549 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1550
1551 /* The input operands are found in the 1st element vector. */
1552 /* Constraints for inputs are in the 2nd element vector. */
1553 for (i = 0; i < noperands; i++)
1554 {
1555 if (operand_locs)
1556 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1557 if (operands)
1558 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1559 if (constraints)
1560 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1561 if (modes)
1562 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1563 }
1564 template = ASM_OPERANDS_TEMPLATE (asmop);
1565 }
1566 else if (GET_CODE (body) == PARALLEL
1567 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1568 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1569 {
1570 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1571 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1572 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1573 int nout = 0; /* Does not include CLOBBERs. */
1574
1575 /* At least one output, plus some CLOBBERs. */
1576
1577 /* The outputs are in the SETs.
1578 Their constraints are in the ASM_OPERANDS itself. */
1579 for (i = 0; i < nparallel; i++)
1580 {
1581 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1582 break; /* Past last SET */
1583
1584 if (operands)
1585 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1586 if (operand_locs)
1587 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1588 if (constraints)
1589 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1590 if (modes)
1591 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1592 nout++;
1593 }
1594
1595 for (i = 0; i < nin; i++)
1596 {
1597 if (operand_locs)
1598 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1599 if (operands)
1600 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1601 if (constraints)
1602 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1603 if (modes)
1604 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1605 }
1606
1607 template = ASM_OPERANDS_TEMPLATE (asmop);
1608 }
1609 else if (GET_CODE (body) == PARALLEL
1610 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1611 {
1612 /* No outputs, but some CLOBBERs. */
1613
1614 rtx asmop = XVECEXP (body, 0, 0);
1615 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1616
1617 for (i = 0; i < nin; i++)
1618 {
1619 if (operand_locs)
1620 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1621 if (operands)
1622 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1623 if (constraints)
1624 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1625 if (modes)
1626 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1627 }
1628
1629 template = ASM_OPERANDS_TEMPLATE (asmop);
1630 }
1631
1632 return template;
1633 }
1634
1635 /* Check if an asm_operand matches it's constraints.
1636 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1637
1638 int
1639 asm_operand_ok (op, constraint)
1640 rtx op;
1641 const char *constraint;
1642 {
1643 int result = 0;
1644
1645 /* Use constrain_operands after reload. */
1646 if (reload_completed)
1647 abort ();
1648
1649 while (*constraint)
1650 {
1651 char c = *constraint++;
1652 switch (c)
1653 {
1654 case '=':
1655 case '+':
1656 case '*':
1657 case '%':
1658 case '?':
1659 case '!':
1660 case '#':
1661 case '&':
1662 case ',':
1663 break;
1664
1665 case '0': case '1': case '2': case '3': case '4':
1666 case '5': case '6': case '7': case '8': case '9':
1667 /* For best results, our caller should have given us the
1668 proper matching constraint, but we can't actually fail
1669 the check if they didn't. Indicate that results are
1670 inconclusive. */
1671 while (ISDIGIT (*constraint))
1672 constraint++;
1673 result = -1;
1674 break;
1675
1676 case 'p':
1677 if (address_operand (op, VOIDmode))
1678 return 1;
1679 break;
1680
1681 case 'm':
1682 case 'V': /* non-offsettable */
1683 if (memory_operand (op, VOIDmode))
1684 return 1;
1685 break;
1686
1687 case 'o': /* offsettable */
1688 if (offsettable_nonstrict_memref_p (op))
1689 return 1;
1690 break;
1691
1692 case '<':
1693 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1694 excepting those that expand_call created. Further, on some
1695 machines which do not have generalized auto inc/dec, an inc/dec
1696 is not a memory_operand.
1697
1698 Match any memory and hope things are resolved after reload. */
1699
1700 if (GET_CODE (op) == MEM
1701 && (1
1702 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1703 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1704 return 1;
1705 break;
1706
1707 case '>':
1708 if (GET_CODE (op) == MEM
1709 && (1
1710 || GET_CODE (XEXP (op, 0)) == PRE_INC
1711 || GET_CODE (XEXP (op, 0)) == POST_INC))
1712 return 1;
1713 break;
1714
1715 case 'E':
1716 case 'F':
1717 if (GET_CODE (op) == CONST_DOUBLE)
1718 return 1;
1719 break;
1720
1721 case 'G':
1722 if (GET_CODE (op) == CONST_DOUBLE
1723 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1724 return 1;
1725 break;
1726 case 'H':
1727 if (GET_CODE (op) == CONST_DOUBLE
1728 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1729 return 1;
1730 break;
1731
1732 case 's':
1733 if (GET_CODE (op) == CONST_INT
1734 || (GET_CODE (op) == CONST_DOUBLE
1735 && GET_MODE (op) == VOIDmode))
1736 break;
1737 /* FALLTHRU */
1738
1739 case 'i':
1740 if (CONSTANT_P (op)
1741 #ifdef LEGITIMATE_PIC_OPERAND_P
1742 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1743 #endif
1744 )
1745 return 1;
1746 break;
1747
1748 case 'n':
1749 if (GET_CODE (op) == CONST_INT
1750 || (GET_CODE (op) == CONST_DOUBLE
1751 && GET_MODE (op) == VOIDmode))
1752 return 1;
1753 break;
1754
1755 case 'I':
1756 if (GET_CODE (op) == CONST_INT
1757 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1758 return 1;
1759 break;
1760 case 'J':
1761 if (GET_CODE (op) == CONST_INT
1762 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1763 return 1;
1764 break;
1765 case 'K':
1766 if (GET_CODE (op) == CONST_INT
1767 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1768 return 1;
1769 break;
1770 case 'L':
1771 if (GET_CODE (op) == CONST_INT
1772 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1773 return 1;
1774 break;
1775 case 'M':
1776 if (GET_CODE (op) == CONST_INT
1777 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1778 return 1;
1779 break;
1780 case 'N':
1781 if (GET_CODE (op) == CONST_INT
1782 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1783 return 1;
1784 break;
1785 case 'O':
1786 if (GET_CODE (op) == CONST_INT
1787 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1788 return 1;
1789 break;
1790 case 'P':
1791 if (GET_CODE (op) == CONST_INT
1792 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1793 return 1;
1794 break;
1795
1796 case 'X':
1797 return 1;
1798
1799 case 'g':
1800 if (general_operand (op, VOIDmode))
1801 return 1;
1802 break;
1803
1804 default:
1805 /* For all other letters, we first check for a register class,
1806 otherwise it is an EXTRA_CONSTRAINT. */
1807 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1808 {
1809 case 'r':
1810 if (GET_MODE (op) == BLKmode)
1811 break;
1812 if (register_operand (op, VOIDmode))
1813 return 1;
1814 }
1815 #ifdef EXTRA_CONSTRAINT
1816 if (EXTRA_CONSTRAINT (op, c))
1817 return 1;
1818 #endif
1819 break;
1820 }
1821 }
1822
1823 return result;
1824 }
1825 \f
1826 /* Given an rtx *P, if it is a sum containing an integer constant term,
1827 return the location (type rtx *) of the pointer to that constant term.
1828 Otherwise, return a null pointer. */
1829
1830 rtx *
1831 find_constant_term_loc (p)
1832 rtx *p;
1833 {
1834 rtx *tem;
1835 enum rtx_code code = GET_CODE (*p);
1836
1837 /* If *P IS such a constant term, P is its location. */
1838
1839 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1840 || code == CONST)
1841 return p;
1842
1843 /* Otherwise, if not a sum, it has no constant term. */
1844
1845 if (GET_CODE (*p) != PLUS)
1846 return 0;
1847
1848 /* If one of the summands is constant, return its location. */
1849
1850 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1851 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1852 return p;
1853
1854 /* Otherwise, check each summand for containing a constant term. */
1855
1856 if (XEXP (*p, 0) != 0)
1857 {
1858 tem = find_constant_term_loc (&XEXP (*p, 0));
1859 if (tem != 0)
1860 return tem;
1861 }
1862
1863 if (XEXP (*p, 1) != 0)
1864 {
1865 tem = find_constant_term_loc (&XEXP (*p, 1));
1866 if (tem != 0)
1867 return tem;
1868 }
1869
1870 return 0;
1871 }
1872 \f
1873 /* Return 1 if OP is a memory reference
1874 whose address contains no side effects
1875 and remains valid after the addition
1876 of a positive integer less than the
1877 size of the object being referenced.
1878
1879 We assume that the original address is valid and do not check it.
1880
1881 This uses strict_memory_address_p as a subroutine, so
1882 don't use it before reload. */
1883
1884 int
1885 offsettable_memref_p (op)
1886 rtx op;
1887 {
1888 return ((GET_CODE (op) == MEM)
1889 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1890 }
1891
1892 /* Similar, but don't require a strictly valid mem ref:
1893 consider pseudo-regs valid as index or base regs. */
1894
1895 int
1896 offsettable_nonstrict_memref_p (op)
1897 rtx op;
1898 {
1899 return ((GET_CODE (op) == MEM)
1900 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1901 }
1902
1903 /* Return 1 if Y is a memory address which contains no side effects
1904 and would remain valid after the addition of a positive integer
1905 less than the size of that mode.
1906
1907 We assume that the original address is valid and do not check it.
1908 We do check that it is valid for narrower modes.
1909
1910 If STRICTP is nonzero, we require a strictly valid address,
1911 for the sake of use in reload.c. */
1912
1913 int
1914 offsettable_address_p (strictp, mode, y)
1915 int strictp;
1916 enum machine_mode mode;
1917 rtx y;
1918 {
1919 enum rtx_code ycode = GET_CODE (y);
1920 rtx z;
1921 rtx y1 = y;
1922 rtx *y2;
1923 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1924 (strictp ? strict_memory_address_p : memory_address_p);
1925 unsigned int mode_sz = GET_MODE_SIZE (mode);
1926
1927 if (CONSTANT_ADDRESS_P (y))
1928 return 1;
1929
1930 /* Adjusting an offsettable address involves changing to a narrower mode.
1931 Make sure that's OK. */
1932
1933 if (mode_dependent_address_p (y))
1934 return 0;
1935
1936 /* ??? How much offset does an offsettable BLKmode reference need?
1937 Clearly that depends on the situation in which it's being used.
1938 However, the current situation in which we test 0xffffffff is
1939 less than ideal. Caveat user. */
1940 if (mode_sz == 0)
1941 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1942
1943 /* If the expression contains a constant term,
1944 see if it remains valid when max possible offset is added. */
1945
1946 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1947 {
1948 int good;
1949
1950 y1 = *y2;
1951 *y2 = plus_constant (*y2, mode_sz - 1);
1952 /* Use QImode because an odd displacement may be automatically invalid
1953 for any wider mode. But it should be valid for a single byte. */
1954 good = (*addressp) (QImode, y);
1955
1956 /* In any case, restore old contents of memory. */
1957 *y2 = y1;
1958 return good;
1959 }
1960
1961 if (GET_RTX_CLASS (ycode) == 'a')
1962 return 0;
1963
1964 /* The offset added here is chosen as the maximum offset that
1965 any instruction could need to add when operating on something
1966 of the specified mode. We assume that if Y and Y+c are
1967 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1968 go inside a LO_SUM here, so we do so as well. */
1969 if (GET_CODE (y) == LO_SUM
1970 && mode != BLKmode
1971 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1972 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1973 plus_constant (XEXP (y, 1), mode_sz - 1));
1974 else
1975 z = plus_constant (y, mode_sz - 1);
1976
1977 /* Use QImode because an odd displacement may be automatically invalid
1978 for any wider mode. But it should be valid for a single byte. */
1979 return (*addressp) (QImode, z);
1980 }
1981
1982 /* Return 1 if ADDR is an address-expression whose effect depends
1983 on the mode of the memory reference it is used in.
1984
1985 Autoincrement addressing is a typical example of mode-dependence
1986 because the amount of the increment depends on the mode. */
1987
1988 int
1989 mode_dependent_address_p (addr)
1990 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1991 {
1992 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1993 return 0;
1994 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1995 win: ATTRIBUTE_UNUSED_LABEL
1996 return 1;
1997 }
1998
1999 /* Return 1 if OP is a general operand
2000 other than a memory ref with a mode dependent address. */
2001
2002 int
2003 mode_independent_operand (op, mode)
2004 enum machine_mode mode;
2005 rtx op;
2006 {
2007 rtx addr;
2008
2009 if (! general_operand (op, mode))
2010 return 0;
2011
2012 if (GET_CODE (op) != MEM)
2013 return 1;
2014
2015 addr = XEXP (op, 0);
2016 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2017 return 1;
2018 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2019 lose: ATTRIBUTE_UNUSED_LABEL
2020 return 0;
2021 }
2022 \f
2023 /* Like extract_insn, but save insn extracted and don't extract again, when
2024 called again for the same insn expecting that recog_data still contain the
2025 valid information. This is used primary by gen_attr infrastructure that
2026 often does extract insn again and again. */
2027 void
2028 extract_insn_cached (insn)
2029 rtx insn;
2030 {
2031 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2032 return;
2033 extract_insn (insn);
2034 recog_data.insn = insn;
2035 }
2036 /* Do cached extract_insn, constrain_operand and complain about failures.
2037 Used by insn_attrtab. */
2038 void
2039 extract_constrain_insn_cached (insn)
2040 rtx insn;
2041 {
2042 extract_insn_cached (insn);
2043 if (which_alternative == -1
2044 && !constrain_operands (reload_completed))
2045 fatal_insn_not_found (insn);
2046 }
2047 /* Do cached constrain_operand and complain about failures. */
2048 int
2049 constrain_operands_cached (strict)
2050 int strict;
2051 {
2052 if (which_alternative == -1)
2053 return constrain_operands (strict);
2054 else
2055 return 1;
2056 }
2057 \f
2058 /* Analyze INSN and fill in recog_data. */
2059
2060 void
2061 extract_insn (insn)
2062 rtx insn;
2063 {
2064 int i;
2065 int icode;
2066 int noperands;
2067 rtx body = PATTERN (insn);
2068
2069 recog_data.insn = NULL;
2070 recog_data.n_operands = 0;
2071 recog_data.n_alternatives = 0;
2072 recog_data.n_dups = 0;
2073 which_alternative = -1;
2074
2075 switch (GET_CODE (body))
2076 {
2077 case USE:
2078 case CLOBBER:
2079 case ASM_INPUT:
2080 case ADDR_VEC:
2081 case ADDR_DIFF_VEC:
2082 return;
2083
2084 case SET:
2085 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2086 goto asm_insn;
2087 else
2088 goto normal_insn;
2089 case PARALLEL:
2090 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2091 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2092 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2093 goto asm_insn;
2094 else
2095 goto normal_insn;
2096 case ASM_OPERANDS:
2097 asm_insn:
2098 recog_data.n_operands = noperands = asm_noperands (body);
2099 if (noperands >= 0)
2100 {
2101 /* This insn is an `asm' with operands. */
2102
2103 /* expand_asm_operands makes sure there aren't too many operands. */
2104 if (noperands > MAX_RECOG_OPERANDS)
2105 abort ();
2106
2107 /* Now get the operand values and constraints out of the insn. */
2108 decode_asm_operands (body, recog_data.operand,
2109 recog_data.operand_loc,
2110 recog_data.constraints,
2111 recog_data.operand_mode);
2112 if (noperands > 0)
2113 {
2114 const char *p = recog_data.constraints[0];
2115 recog_data.n_alternatives = 1;
2116 while (*p)
2117 recog_data.n_alternatives += (*p++ == ',');
2118 }
2119 break;
2120 }
2121 fatal_insn_not_found (insn);
2122
2123 default:
2124 normal_insn:
2125 /* Ordinary insn: recognize it, get the operands via insn_extract
2126 and get the constraints. */
2127
2128 icode = recog_memoized (insn);
2129 if (icode < 0)
2130 fatal_insn_not_found (insn);
2131
2132 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2133 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2134 recog_data.n_dups = insn_data[icode].n_dups;
2135
2136 insn_extract (insn);
2137
2138 for (i = 0; i < noperands; i++)
2139 {
2140 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2141 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2142 /* VOIDmode match_operands gets mode from their real operand. */
2143 if (recog_data.operand_mode[i] == VOIDmode)
2144 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2145 }
2146 }
2147 for (i = 0; i < noperands; i++)
2148 recog_data.operand_type[i]
2149 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2150 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2151 : OP_IN);
2152
2153 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2154 abort ();
2155 }
2156
2157 /* After calling extract_insn, you can use this function to extract some
2158 information from the constraint strings into a more usable form.
2159 The collected data is stored in recog_op_alt. */
2160 void
2161 preprocess_constraints ()
2162 {
2163 int i;
2164
2165 memset (recog_op_alt, 0, sizeof recog_op_alt);
2166 for (i = 0; i < recog_data.n_operands; i++)
2167 {
2168 int j;
2169 struct operand_alternative *op_alt;
2170 const char *p = recog_data.constraints[i];
2171
2172 op_alt = recog_op_alt[i];
2173
2174 for (j = 0; j < recog_data.n_alternatives; j++)
2175 {
2176 op_alt[j].class = NO_REGS;
2177 op_alt[j].constraint = p;
2178 op_alt[j].matches = -1;
2179 op_alt[j].matched = -1;
2180
2181 if (*p == '\0' || *p == ',')
2182 {
2183 op_alt[j].anything_ok = 1;
2184 continue;
2185 }
2186
2187 for (;;)
2188 {
2189 char c = *p++;
2190 if (c == '#')
2191 do
2192 c = *p++;
2193 while (c != ',' && c != '\0');
2194 if (c == ',' || c == '\0')
2195 break;
2196
2197 switch (c)
2198 {
2199 case '=': case '+': case '*': case '%':
2200 case 'E': case 'F': case 'G': case 'H':
2201 case 's': case 'i': case 'n':
2202 case 'I': case 'J': case 'K': case 'L':
2203 case 'M': case 'N': case 'O': case 'P':
2204 /* These don't say anything we care about. */
2205 break;
2206
2207 case '?':
2208 op_alt[j].reject += 6;
2209 break;
2210 case '!':
2211 op_alt[j].reject += 600;
2212 break;
2213 case '&':
2214 op_alt[j].earlyclobber = 1;
2215 break;
2216
2217 case '0': case '1': case '2': case '3': case '4':
2218 case '5': case '6': case '7': case '8': case '9':
2219 {
2220 char *end;
2221 op_alt[j].matches = strtoul (p - 1, &end, 10);
2222 recog_op_alt[op_alt[j].matches][j].matched = i;
2223 p = end;
2224 }
2225 break;
2226
2227 case 'm':
2228 op_alt[j].memory_ok = 1;
2229 break;
2230 case '<':
2231 op_alt[j].decmem_ok = 1;
2232 break;
2233 case '>':
2234 op_alt[j].incmem_ok = 1;
2235 break;
2236 case 'V':
2237 op_alt[j].nonoffmem_ok = 1;
2238 break;
2239 case 'o':
2240 op_alt[j].offmem_ok = 1;
2241 break;
2242 case 'X':
2243 op_alt[j].anything_ok = 1;
2244 break;
2245
2246 case 'p':
2247 op_alt[j].is_address = 1;
2248 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2249 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2250 break;
2251
2252 case 'g': case 'r':
2253 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2254 break;
2255
2256 default:
2257 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char) c)];
2258 break;
2259 }
2260 }
2261 }
2262 }
2263 }
2264
2265 /* Check the operands of an insn against the insn's operand constraints
2266 and return 1 if they are valid.
2267 The information about the insn's operands, constraints, operand modes
2268 etc. is obtained from the global variables set up by extract_insn.
2269
2270 WHICH_ALTERNATIVE is set to a number which indicates which
2271 alternative of constraints was matched: 0 for the first alternative,
2272 1 for the next, etc.
2273
2274 In addition, when two operands are match
2275 and it happens that the output operand is (reg) while the
2276 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2277 make the output operand look like the input.
2278 This is because the output operand is the one the template will print.
2279
2280 This is used in final, just before printing the assembler code and by
2281 the routines that determine an insn's attribute.
2282
2283 If STRICT is a positive non-zero value, it means that we have been
2284 called after reload has been completed. In that case, we must
2285 do all checks strictly. If it is zero, it means that we have been called
2286 before reload has completed. In that case, we first try to see if we can
2287 find an alternative that matches strictly. If not, we try again, this
2288 time assuming that reload will fix up the insn. This provides a "best
2289 guess" for the alternative and is used to compute attributes of insns prior
2290 to reload. A negative value of STRICT is used for this internal call. */
2291
2292 struct funny_match
2293 {
2294 int this, other;
2295 };
2296
2297 int
2298 constrain_operands (strict)
2299 int strict;
2300 {
2301 const char *constraints[MAX_RECOG_OPERANDS];
2302 int matching_operands[MAX_RECOG_OPERANDS];
2303 int earlyclobber[MAX_RECOG_OPERANDS];
2304 int c;
2305
2306 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2307 int funny_match_index;
2308
2309 which_alternative = 0;
2310 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2311 return 1;
2312
2313 for (c = 0; c < recog_data.n_operands; c++)
2314 {
2315 constraints[c] = recog_data.constraints[c];
2316 matching_operands[c] = -1;
2317 }
2318
2319 do
2320 {
2321 int opno;
2322 int lose = 0;
2323 funny_match_index = 0;
2324
2325 for (opno = 0; opno < recog_data.n_operands; opno++)
2326 {
2327 rtx op = recog_data.operand[opno];
2328 enum machine_mode mode = GET_MODE (op);
2329 const char *p = constraints[opno];
2330 int offset = 0;
2331 int win = 0;
2332 int val;
2333
2334 earlyclobber[opno] = 0;
2335
2336 /* A unary operator may be accepted by the predicate, but it
2337 is irrelevant for matching constraints. */
2338 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2339 op = XEXP (op, 0);
2340
2341 if (GET_CODE (op) == SUBREG)
2342 {
2343 if (GET_CODE (SUBREG_REG (op)) == REG
2344 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2345 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2346 GET_MODE (SUBREG_REG (op)),
2347 SUBREG_BYTE (op),
2348 GET_MODE (op));
2349 op = SUBREG_REG (op);
2350 }
2351
2352 /* An empty constraint or empty alternative
2353 allows anything which matched the pattern. */
2354 if (*p == 0 || *p == ',')
2355 win = 1;
2356
2357 while (*p && (c = *p++) != ',')
2358 switch (c)
2359 {
2360 case '?': case '!': case '*': case '%':
2361 case '=': case '+':
2362 break;
2363
2364 case '#':
2365 /* Ignore rest of this alternative as far as
2366 constraint checking is concerned. */
2367 while (*p && *p != ',')
2368 p++;
2369 break;
2370
2371 case '&':
2372 earlyclobber[opno] = 1;
2373 break;
2374
2375 case '0': case '1': case '2': case '3': case '4':
2376 case '5': case '6': case '7': case '8': case '9':
2377 {
2378 /* This operand must be the same as a previous one.
2379 This kind of constraint is used for instructions such
2380 as add when they take only two operands.
2381
2382 Note that the lower-numbered operand is passed first.
2383
2384 If we are not testing strictly, assume that this
2385 constraint will be satisfied. */
2386
2387 char *end;
2388 int match;
2389
2390 match = strtoul (p - 1, &end, 10);
2391 p = end;
2392
2393 if (strict < 0)
2394 val = 1;
2395 else
2396 {
2397 rtx op1 = recog_data.operand[match];
2398 rtx op2 = recog_data.operand[opno];
2399
2400 /* A unary operator may be accepted by the predicate,
2401 but it is irrelevant for matching constraints. */
2402 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2403 op1 = XEXP (op1, 0);
2404 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2405 op2 = XEXP (op2, 0);
2406
2407 val = operands_match_p (op1, op2);
2408 }
2409
2410 matching_operands[opno] = match;
2411 matching_operands[match] = opno;
2412
2413 if (val != 0)
2414 win = 1;
2415
2416 /* If output is *x and input is *--x, arrange later
2417 to change the output to *--x as well, since the
2418 output op is the one that will be printed. */
2419 if (val == 2 && strict > 0)
2420 {
2421 funny_match[funny_match_index].this = opno;
2422 funny_match[funny_match_index++].other = match;
2423 }
2424 }
2425 break;
2426
2427 case 'p':
2428 /* p is used for address_operands. When we are called by
2429 gen_reload, no one will have checked that the address is
2430 strictly valid, i.e., that all pseudos requiring hard regs
2431 have gotten them. */
2432 if (strict <= 0
2433 || (strict_memory_address_p (recog_data.operand_mode[opno],
2434 op)))
2435 win = 1;
2436 break;
2437
2438 /* No need to check general_operand again;
2439 it was done in insn-recog.c. */
2440 case 'g':
2441 /* Anything goes unless it is a REG and really has a hard reg
2442 but the hard reg is not in the class GENERAL_REGS. */
2443 if (strict < 0
2444 || GENERAL_REGS == ALL_REGS
2445 || GET_CODE (op) != REG
2446 || (reload_in_progress
2447 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2448 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2449 win = 1;
2450 break;
2451
2452 case 'X':
2453 /* This is used for a MATCH_SCRATCH in the cases when
2454 we don't actually need anything. So anything goes
2455 any time. */
2456 win = 1;
2457 break;
2458
2459 case 'm':
2460 if (GET_CODE (op) == MEM
2461 /* Before reload, accept what reload can turn into mem. */
2462 || (strict < 0 && CONSTANT_P (op))
2463 /* During reload, accept a pseudo */
2464 || (reload_in_progress && GET_CODE (op) == REG
2465 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2466 win = 1;
2467 break;
2468
2469 case '<':
2470 if (GET_CODE (op) == MEM
2471 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2472 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2473 win = 1;
2474 break;
2475
2476 case '>':
2477 if (GET_CODE (op) == MEM
2478 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2479 || GET_CODE (XEXP (op, 0)) == POST_INC))
2480 win = 1;
2481 break;
2482
2483 case 'E':
2484 case 'F':
2485 if (GET_CODE (op) == CONST_DOUBLE)
2486 win = 1;
2487 break;
2488
2489 case 'G':
2490 case 'H':
2491 if (GET_CODE (op) == CONST_DOUBLE
2492 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2493 win = 1;
2494 break;
2495
2496 case 's':
2497 if (GET_CODE (op) == CONST_INT
2498 || (GET_CODE (op) == CONST_DOUBLE
2499 && GET_MODE (op) == VOIDmode))
2500 break;
2501 case 'i':
2502 if (CONSTANT_P (op))
2503 win = 1;
2504 break;
2505
2506 case 'n':
2507 if (GET_CODE (op) == CONST_INT
2508 || (GET_CODE (op) == CONST_DOUBLE
2509 && GET_MODE (op) == VOIDmode))
2510 win = 1;
2511 break;
2512
2513 case 'I':
2514 case 'J':
2515 case 'K':
2516 case 'L':
2517 case 'M':
2518 case 'N':
2519 case 'O':
2520 case 'P':
2521 if (GET_CODE (op) == CONST_INT
2522 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2523 win = 1;
2524 break;
2525
2526 case 'V':
2527 if (GET_CODE (op) == MEM
2528 && ((strict > 0 && ! offsettable_memref_p (op))
2529 || (strict < 0
2530 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2531 || (reload_in_progress
2532 && !(GET_CODE (op) == REG
2533 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2534 win = 1;
2535 break;
2536
2537 case 'o':
2538 if ((strict > 0 && offsettable_memref_p (op))
2539 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2540 /* Before reload, accept what reload can handle. */
2541 || (strict < 0
2542 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2543 /* During reload, accept a pseudo */
2544 || (reload_in_progress && GET_CODE (op) == REG
2545 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2546 win = 1;
2547 break;
2548
2549 default:
2550 {
2551 enum reg_class class;
2552
2553 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2554 if (class != NO_REGS)
2555 {
2556 if (strict < 0
2557 || (strict == 0
2558 && GET_CODE (op) == REG
2559 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2560 || (strict == 0 && GET_CODE (op) == SCRATCH)
2561 || (GET_CODE (op) == REG
2562 && reg_fits_class_p (op, class, offset, mode)))
2563 win = 1;
2564 }
2565 #ifdef EXTRA_CONSTRAINT
2566 else if (EXTRA_CONSTRAINT (op, c))
2567 win = 1;
2568 #endif
2569 break;
2570 }
2571 }
2572
2573 constraints[opno] = p;
2574 /* If this operand did not win somehow,
2575 this alternative loses. */
2576 if (! win)
2577 lose = 1;
2578 }
2579 /* This alternative won; the operands are ok.
2580 Change whichever operands this alternative says to change. */
2581 if (! lose)
2582 {
2583 int opno, eopno;
2584
2585 /* See if any earlyclobber operand conflicts with some other
2586 operand. */
2587
2588 if (strict > 0)
2589 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2590 /* Ignore earlyclobber operands now in memory,
2591 because we would often report failure when we have
2592 two memory operands, one of which was formerly a REG. */
2593 if (earlyclobber[eopno]
2594 && GET_CODE (recog_data.operand[eopno]) == REG)
2595 for (opno = 0; opno < recog_data.n_operands; opno++)
2596 if ((GET_CODE (recog_data.operand[opno]) == MEM
2597 || recog_data.operand_type[opno] != OP_OUT)
2598 && opno != eopno
2599 /* Ignore things like match_operator operands. */
2600 && *recog_data.constraints[opno] != 0
2601 && ! (matching_operands[opno] == eopno
2602 && operands_match_p (recog_data.operand[opno],
2603 recog_data.operand[eopno]))
2604 && ! safe_from_earlyclobber (recog_data.operand[opno],
2605 recog_data.operand[eopno]))
2606 lose = 1;
2607
2608 if (! lose)
2609 {
2610 while (--funny_match_index >= 0)
2611 {
2612 recog_data.operand[funny_match[funny_match_index].other]
2613 = recog_data.operand[funny_match[funny_match_index].this];
2614 }
2615
2616 return 1;
2617 }
2618 }
2619
2620 which_alternative++;
2621 }
2622 while (which_alternative < recog_data.n_alternatives);
2623
2624 which_alternative = -1;
2625 /* If we are about to reject this, but we are not to test strictly,
2626 try a very loose test. Only return failure if it fails also. */
2627 if (strict == 0)
2628 return constrain_operands (-1);
2629 else
2630 return 0;
2631 }
2632
2633 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2634 is a hard reg in class CLASS when its regno is offset by OFFSET
2635 and changed to mode MODE.
2636 If REG occupies multiple hard regs, all of them must be in CLASS. */
2637
2638 int
2639 reg_fits_class_p (operand, class, offset, mode)
2640 rtx operand;
2641 enum reg_class class;
2642 int offset;
2643 enum machine_mode mode;
2644 {
2645 int regno = REGNO (operand);
2646 if (regno < FIRST_PSEUDO_REGISTER
2647 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2648 regno + offset))
2649 {
2650 int sr;
2651 regno += offset;
2652 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2653 sr > 0; sr--)
2654 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2655 regno + sr))
2656 break;
2657 return sr == 0;
2658 }
2659
2660 return 0;
2661 }
2662 \f
2663 /* Split single instruction. Helper function for split_all_insns.
2664 Return last insn in the sequence if successful, or NULL if unsuccessful. */
2665 static rtx
2666 split_insn (insn)
2667 rtx insn;
2668 {
2669 rtx set;
2670 if (!INSN_P (insn))
2671 ;
2672 /* Don't split no-op move insns. These should silently
2673 disappear later in final. Splitting such insns would
2674 break the code that handles REG_NO_CONFLICT blocks. */
2675
2676 else if ((set = single_set (insn)) != NULL && set_noop_p (set))
2677 {
2678 /* Nops get in the way while scheduling, so delete them
2679 now if register allocation has already been done. It
2680 is too risky to try to do this before register
2681 allocation, and there are unlikely to be very many
2682 nops then anyways. */
2683 if (reload_completed)
2684 delete_insn_and_edges (insn);
2685 }
2686 else
2687 {
2688 /* Split insns here to get max fine-grain parallelism. */
2689 rtx first = PREV_INSN (insn);
2690 rtx last = try_split (PATTERN (insn), insn, 1);
2691
2692 if (last != insn)
2693 {
2694 /* try_split returns the NOTE that INSN became. */
2695 PUT_CODE (insn, NOTE);
2696 NOTE_SOURCE_FILE (insn) = 0;
2697 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2698
2699 /* ??? Coddle to md files that generate subregs in post-
2700 reload splitters instead of computing the proper
2701 hard register. */
2702 if (reload_completed && first != last)
2703 {
2704 first = NEXT_INSN (first);
2705 while (1)
2706 {
2707 if (INSN_P (first))
2708 cleanup_subreg_operands (first);
2709 if (first == last)
2710 break;
2711 first = NEXT_INSN (first);
2712 }
2713 }
2714 return last;
2715 }
2716 }
2717 return NULL_RTX;
2718 }
2719 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2720
2721 void
2722 split_all_insns (upd_life)
2723 int upd_life;
2724 {
2725 sbitmap blocks;
2726 int changed;
2727 basic_block bb;
2728
2729 blocks = sbitmap_alloc (last_basic_block);
2730 sbitmap_zero (blocks);
2731 changed = 0;
2732
2733 FOR_EACH_BB_REVERSE (bb)
2734 {
2735 rtx insn, next;
2736 bool finish = false;
2737
2738 for (insn = bb->head; !finish ; insn = next)
2739 {
2740 rtx last;
2741
2742 /* Can't use `next_real_insn' because that might go across
2743 CODE_LABELS and short-out basic blocks. */
2744 next = NEXT_INSN (insn);
2745 finish = (insn == bb->end);
2746 last = split_insn (insn);
2747 if (last)
2748 {
2749 /* The split sequence may include barrier, but the
2750 BB boundary we are interested in will be set to previous
2751 one. */
2752
2753 while (GET_CODE (last) == BARRIER)
2754 last = PREV_INSN (last);
2755 SET_BIT (blocks, bb->index);
2756 changed = 1;
2757 insn = last;
2758 }
2759 }
2760 }
2761
2762 if (changed)
2763 {
2764 find_many_sub_basic_blocks (blocks);
2765 }
2766
2767 if (changed && upd_life)
2768 {
2769 count_or_remove_death_notes (blocks, 1);
2770 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2771 }
2772 #ifdef ENABLE_CHECKING
2773 verify_flow_info ();
2774 #endif
2775
2776 sbitmap_free (blocks);
2777 }
2778
2779 /* Same as split_all_insns, but do not expect CFG to be available.
2780 Used by machine depedent reorg passes. */
2781
2782 void
2783 split_all_insns_noflow ()
2784 {
2785 rtx next, insn;
2786
2787 for (insn = get_insns (); insn; insn = next)
2788 {
2789 next = NEXT_INSN (insn);
2790 split_insn (insn);
2791 }
2792 return;
2793 }
2794 \f
2795 #ifdef HAVE_peephole2
2796 struct peep2_insn_data
2797 {
2798 rtx insn;
2799 regset live_before;
2800 };
2801
2802 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2803 static int peep2_current;
2804
2805 /* A non-insn marker indicating the last insn of the block.
2806 The live_before regset for this element is correct, indicating
2807 global_live_at_end for the block. */
2808 #define PEEP2_EOB pc_rtx
2809
2810 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2811 does not exist. Used by the recognizer to find the next insn to match
2812 in a multi-insn pattern. */
2813
2814 rtx
2815 peep2_next_insn (n)
2816 int n;
2817 {
2818 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2819 abort ();
2820
2821 n += peep2_current;
2822 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2823 n -= MAX_INSNS_PER_PEEP2 + 1;
2824
2825 if (peep2_insn_data[n].insn == PEEP2_EOB)
2826 return NULL_RTX;
2827 return peep2_insn_data[n].insn;
2828 }
2829
2830 /* Return true if REGNO is dead before the Nth non-note insn
2831 after `current'. */
2832
2833 int
2834 peep2_regno_dead_p (ofs, regno)
2835 int ofs;
2836 int regno;
2837 {
2838 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2839 abort ();
2840
2841 ofs += peep2_current;
2842 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2843 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2844
2845 if (peep2_insn_data[ofs].insn == NULL_RTX)
2846 abort ();
2847
2848 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2849 }
2850
2851 /* Similarly for a REG. */
2852
2853 int
2854 peep2_reg_dead_p (ofs, reg)
2855 int ofs;
2856 rtx reg;
2857 {
2858 int regno, n;
2859
2860 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2861 abort ();
2862
2863 ofs += peep2_current;
2864 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2865 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2866
2867 if (peep2_insn_data[ofs].insn == NULL_RTX)
2868 abort ();
2869
2870 regno = REGNO (reg);
2871 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2872 while (--n >= 0)
2873 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2874 return 0;
2875 return 1;
2876 }
2877
2878 /* Try to find a hard register of mode MODE, matching the register class in
2879 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2880 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2881 in which case the only condition is that the register must be available
2882 before CURRENT_INSN.
2883 Registers that already have bits set in REG_SET will not be considered.
2884
2885 If an appropriate register is available, it will be returned and the
2886 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2887 returned. */
2888
2889 rtx
2890 peep2_find_free_register (from, to, class_str, mode, reg_set)
2891 int from, to;
2892 const char *class_str;
2893 enum machine_mode mode;
2894 HARD_REG_SET *reg_set;
2895 {
2896 static int search_ofs;
2897 enum reg_class class;
2898 HARD_REG_SET live;
2899 int i;
2900
2901 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2902 abort ();
2903
2904 from += peep2_current;
2905 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2906 from -= MAX_INSNS_PER_PEEP2 + 1;
2907 to += peep2_current;
2908 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2909 to -= MAX_INSNS_PER_PEEP2 + 1;
2910
2911 if (peep2_insn_data[from].insn == NULL_RTX)
2912 abort ();
2913 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2914
2915 while (from != to)
2916 {
2917 HARD_REG_SET this_live;
2918
2919 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2920 from = 0;
2921 if (peep2_insn_data[from].insn == NULL_RTX)
2922 abort ();
2923 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2924 IOR_HARD_REG_SET (live, this_live);
2925 }
2926
2927 class = (class_str[0] == 'r' ? GENERAL_REGS
2928 : REG_CLASS_FROM_LETTER (class_str[0]));
2929
2930 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2931 {
2932 int raw_regno, regno, success, j;
2933
2934 /* Distribute the free registers as much as possible. */
2935 raw_regno = search_ofs + i;
2936 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2937 raw_regno -= FIRST_PSEUDO_REGISTER;
2938 #ifdef REG_ALLOC_ORDER
2939 regno = reg_alloc_order[raw_regno];
2940 #else
2941 regno = raw_regno;
2942 #endif
2943
2944 /* Don't allocate fixed registers. */
2945 if (fixed_regs[regno])
2946 continue;
2947 /* Make sure the register is of the right class. */
2948 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2949 continue;
2950 /* And can support the mode we need. */
2951 if (! HARD_REGNO_MODE_OK (regno, mode))
2952 continue;
2953 /* And that we don't create an extra save/restore. */
2954 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2955 continue;
2956 /* And we don't clobber traceback for noreturn functions. */
2957 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2958 && (! reload_completed || frame_pointer_needed))
2959 continue;
2960
2961 success = 1;
2962 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2963 {
2964 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2965 || TEST_HARD_REG_BIT (live, regno + j))
2966 {
2967 success = 0;
2968 break;
2969 }
2970 }
2971 if (success)
2972 {
2973 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2974 SET_HARD_REG_BIT (*reg_set, regno + j);
2975
2976 /* Start the next search with the next register. */
2977 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2978 raw_regno = 0;
2979 search_ofs = raw_regno;
2980
2981 return gen_rtx_REG (mode, regno);
2982 }
2983 }
2984
2985 search_ofs = 0;
2986 return NULL_RTX;
2987 }
2988
2989 /* Perform the peephole2 optimization pass. */
2990
2991 void
2992 peephole2_optimize (dump_file)
2993 FILE *dump_file ATTRIBUTE_UNUSED;
2994 {
2995 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
2996 rtx insn, prev;
2997 regset live;
2998 int i;
2999 basic_block bb;
3000 #ifdef HAVE_conditional_execution
3001 sbitmap blocks;
3002 bool changed;
3003 #endif
3004 bool do_cleanup_cfg = false;
3005 bool do_rebuild_jump_labels = false;
3006
3007 /* Initialize the regsets we're going to use. */
3008 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3009 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3010 live = INITIALIZE_REG_SET (rs_heads[i]);
3011
3012 #ifdef HAVE_conditional_execution
3013 blocks = sbitmap_alloc (last_basic_block);
3014 sbitmap_zero (blocks);
3015 changed = false;
3016 #else
3017 count_or_remove_death_notes (NULL, 1);
3018 #endif
3019
3020 FOR_EACH_BB_REVERSE (bb)
3021 {
3022 struct propagate_block_info *pbi;
3023
3024 /* Indicate that all slots except the last holds invalid data. */
3025 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3026 peep2_insn_data[i].insn = NULL_RTX;
3027
3028 /* Indicate that the last slot contains live_after data. */
3029 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3030 peep2_current = MAX_INSNS_PER_PEEP2;
3031
3032 /* Start up propagation. */
3033 COPY_REG_SET (live, bb->global_live_at_end);
3034 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3035
3036 #ifdef HAVE_conditional_execution
3037 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3038 #else
3039 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3040 #endif
3041
3042 for (insn = bb->end; ; insn = prev)
3043 {
3044 prev = PREV_INSN (insn);
3045 if (INSN_P (insn))
3046 {
3047 rtx try, before_try, x;
3048 int match_len;
3049 rtx note;
3050 bool was_call = false;
3051
3052 /* Record this insn. */
3053 if (--peep2_current < 0)
3054 peep2_current = MAX_INSNS_PER_PEEP2;
3055 peep2_insn_data[peep2_current].insn = insn;
3056 propagate_one_insn (pbi, insn);
3057 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3058
3059 /* Match the peephole. */
3060 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3061 if (try != NULL)
3062 {
3063 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3064 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3065 cfg-related call notes. */
3066 for (i = 0; i <= match_len; ++i)
3067 {
3068 int j, k;
3069 rtx old_insn, new_insn, note;
3070
3071 j = i + peep2_current;
3072 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3073 j -= MAX_INSNS_PER_PEEP2 + 1;
3074 old_insn = peep2_insn_data[j].insn;
3075 if (GET_CODE (old_insn) != CALL_INSN)
3076 continue;
3077 was_call = true;
3078
3079 new_insn = NULL_RTX;
3080 if (GET_CODE (try) == SEQUENCE)
3081 for (k = XVECLEN (try, 0) - 1; k >= 0; k--)
3082 {
3083 rtx x = XVECEXP (try, 0, k);
3084 if (GET_CODE (x) == CALL_INSN)
3085 {
3086 new_insn = x;
3087 break;
3088 }
3089 }
3090 else if (GET_CODE (try) == CALL_INSN)
3091 new_insn = try;
3092 if (! new_insn)
3093 abort ();
3094
3095 CALL_INSN_FUNCTION_USAGE (new_insn)
3096 = CALL_INSN_FUNCTION_USAGE (old_insn);
3097
3098 for (note = REG_NOTES (old_insn);
3099 note;
3100 note = XEXP (note, 1))
3101 switch (REG_NOTE_KIND (note))
3102 {
3103 case REG_NORETURN:
3104 case REG_SETJMP:
3105 case REG_ALWAYS_RETURN:
3106 REG_NOTES (new_insn)
3107 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3108 XEXP (note, 0),
3109 REG_NOTES (new_insn));
3110 default:
3111 /* Discard all other reg notes. */
3112 break;
3113 }
3114
3115 /* Croak if there is another call in the sequence. */
3116 while (++i <= match_len)
3117 {
3118 j = i + peep2_current;
3119 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3120 j -= MAX_INSNS_PER_PEEP2 + 1;
3121 old_insn = peep2_insn_data[j].insn;
3122 if (GET_CODE (old_insn) == CALL_INSN)
3123 abort ();
3124 }
3125 break;
3126 }
3127
3128 i = match_len + peep2_current;
3129 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3130 i -= MAX_INSNS_PER_PEEP2 + 1;
3131
3132 note = find_reg_note (peep2_insn_data[i].insn,
3133 REG_EH_REGION, NULL_RTX);
3134
3135 /* Replace the old sequence with the new. */
3136 try = emit_insn_after (try, peep2_insn_data[i].insn);
3137 before_try = PREV_INSN (insn);
3138 delete_insn_chain (insn, peep2_insn_data[i].insn);
3139
3140 /* Re-insert the EH_REGION notes. */
3141 if (note || (was_call && nonlocal_goto_handler_labels))
3142 {
3143 edge eh_edge;
3144
3145 for (eh_edge = bb->succ; eh_edge
3146 ; eh_edge = eh_edge->succ_next)
3147 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3148 break;
3149
3150 for (x = try ; x != before_try ; x = PREV_INSN (x))
3151 if (GET_CODE (x) == CALL_INSN
3152 || (flag_non_call_exceptions
3153 && may_trap_p (PATTERN (x))
3154 && !find_reg_note (x, REG_EH_REGION, NULL)))
3155 {
3156 if (note)
3157 REG_NOTES (x)
3158 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3159 XEXP (note, 0),
3160 REG_NOTES (x));
3161
3162 if (x != bb->end && eh_edge)
3163 {
3164 edge nfte, nehe;
3165 int flags;
3166
3167 nfte = split_block (bb, x);
3168 flags = (eh_edge->flags
3169 & (EDGE_EH | EDGE_ABNORMAL));
3170 if (GET_CODE (x) == CALL_INSN)
3171 flags |= EDGE_ABNORMAL_CALL;
3172 nehe = make_edge (nfte->src, eh_edge->dest,
3173 flags);
3174
3175 nehe->probability = eh_edge->probability;
3176 nfte->probability
3177 = REG_BR_PROB_BASE - nehe->probability;
3178
3179 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3180 #ifdef HAVE_conditional_execution
3181 SET_BIT (blocks, nfte->dest->index);
3182 changed = true;
3183 #endif
3184 bb = nfte->src;
3185 eh_edge = nehe;
3186 }
3187 }
3188
3189 /* Converting possibly trapping insn to non-trapping is
3190 possible. Zap dummy outgoing edges. */
3191 do_cleanup_cfg |= purge_dead_edges (bb);
3192 }
3193
3194 #ifdef HAVE_conditional_execution
3195 /* With conditional execution, we cannot back up the
3196 live information so easily, since the conditional
3197 death data structures are not so self-contained.
3198 So record that we've made a modification to this
3199 block and update life information at the end. */
3200 SET_BIT (blocks, bb->index);
3201 changed = true;
3202
3203 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3204 peep2_insn_data[i].insn = NULL_RTX;
3205 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3206 #else
3207 /* Back up lifetime information past the end of the
3208 newly created sequence. */
3209 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3210 i = 0;
3211 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3212
3213 /* Update life information for the new sequence. */
3214 x = try;
3215 do
3216 {
3217 if (INSN_P (x))
3218 {
3219 if (--i < 0)
3220 i = MAX_INSNS_PER_PEEP2;
3221 peep2_insn_data[i].insn = x;
3222 propagate_one_insn (pbi, x);
3223 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3224 }
3225 x = PREV_INSN (x);
3226 }
3227 while (x != prev);
3228
3229 /* ??? Should verify that LIVE now matches what we
3230 had before the new sequence. */
3231
3232 peep2_current = i;
3233 #endif
3234
3235 /* If we generated a jump instruction, it won't have
3236 JUMP_LABEL set. Recompute after we're done. */
3237 for (x = try; x != before_try; x = PREV_INSN (x))
3238 if (GET_CODE (x) == JUMP_INSN)
3239 {
3240 do_rebuild_jump_labels = true;
3241 break;
3242 }
3243 }
3244 }
3245
3246 if (insn == bb->head)
3247 break;
3248 }
3249
3250 free_propagate_block_info (pbi);
3251 }
3252
3253 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3254 FREE_REG_SET (peep2_insn_data[i].live_before);
3255 FREE_REG_SET (live);
3256
3257 if (do_rebuild_jump_labels)
3258 rebuild_jump_labels (get_insns ());
3259
3260 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3261 we've changed global life since exception handlers are no longer
3262 reachable. */
3263 if (do_cleanup_cfg)
3264 {
3265 cleanup_cfg (0);
3266 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3267 }
3268 #ifdef HAVE_conditional_execution
3269 else
3270 {
3271 count_or_remove_death_notes (blocks, 1);
3272 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3273 }
3274 sbitmap_free (blocks);
3275 #endif
3276 }
3277 #endif /* HAVE_peephole2 */
3278
3279 /* Common predicates for use with define_bypass. */
3280
3281 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3282 data not the address operand(s) of the store. IN_INSN must be
3283 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3284 SETs inside. */
3285
3286 int
3287 store_data_bypass_p (out_insn, in_insn)
3288 rtx out_insn, in_insn;
3289 {
3290 rtx out_set, in_set;
3291
3292 in_set = single_set (in_insn);
3293 if (! in_set)
3294 abort ();
3295
3296 if (GET_CODE (SET_DEST (in_set)) != MEM)
3297 return false;
3298
3299 out_set = single_set (out_insn);
3300 if (out_set)
3301 {
3302 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3303 return false;
3304 }
3305 else
3306 {
3307 rtx out_pat;
3308 int i;
3309
3310 out_pat = PATTERN (out_insn);
3311 if (GET_CODE (out_pat) != PARALLEL)
3312 abort ();
3313
3314 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3315 {
3316 rtx exp = XVECEXP (out_pat, 0, i);
3317
3318 if (GET_CODE (exp) == CLOBBER)
3319 continue;
3320
3321 if (GET_CODE (exp) != SET)
3322 abort ();
3323
3324 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3325 return false;
3326 }
3327 }
3328
3329 return true;
3330 }
3331
3332 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3333 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3334 or multiple set; IN_INSN should be single_set for truth, but for convenience
3335 of insn categorization may be any JUMP or CALL insn. */
3336
3337 int
3338 if_test_bypass_p (out_insn, in_insn)
3339 rtx out_insn, in_insn;
3340 {
3341 rtx out_set, in_set;
3342
3343 in_set = single_set (in_insn);
3344 if (! in_set)
3345 {
3346 if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
3347 return false;
3348 abort ();
3349 }
3350
3351 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3352 return false;
3353 in_set = SET_SRC (in_set);
3354
3355 out_set = single_set (out_insn);
3356 if (out_set)
3357 {
3358 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3359 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3360 return false;
3361 }
3362 else
3363 {
3364 rtx out_pat;
3365 int i;
3366
3367 out_pat = PATTERN (out_insn);
3368 if (GET_CODE (out_pat) != PARALLEL)
3369 abort ();
3370
3371 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3372 {
3373 rtx exp = XVECEXP (out_pat, 0, i);
3374
3375 if (GET_CODE (exp) == CLOBBER)
3376 continue;
3377
3378 if (GET_CODE (exp) != SET)
3379 abort ();
3380
3381 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3382 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3383 return false;
3384 }
3385 }
3386
3387 return true;
3388 }