Bump for snapshot
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 91-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "insn-config.h"
26 #include "insn-attr.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
29 #include "recog.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "flags.h"
33 #include "real.h"
34 #include "toplev.h"
35
36 #ifndef STACK_PUSH_CODE
37 #ifdef STACK_GROWS_DOWNWARD
38 #define STACK_PUSH_CODE PRE_DEC
39 #else
40 #define STACK_PUSH_CODE PRE_INC
41 #endif
42 #endif
43
44 static void validate_replace_rtx_1 PROTO((rtx *, rtx, rtx, rtx));
45 static rtx *find_single_use_1 PROTO((rtx, rtx *));
46 static rtx *find_constant_term_loc PROTO((rtx *));
47 static int insn_invalid_p PROTO((rtx));
48
49 /* Nonzero means allow operands to be volatile.
50 This should be 0 if you are generating rtl, such as if you are calling
51 the functions in optabs.c and expmed.c (most of the time).
52 This should be 1 if all valid insns need to be recognized,
53 such as in regclass.c and final.c and reload.c.
54
55 init_recog and init_recog_no_volatile are responsible for setting this. */
56
57 int volatile_ok;
58
59 /* The next variables are set up by extract_insn. The first four of them
60 are also set up during insn_extract. */
61
62 /* Indexed by N, gives value of operand N. */
63 rtx recog_operand[MAX_RECOG_OPERANDS];
64
65 /* Indexed by N, gives location where operand N was found. */
66 rtx *recog_operand_loc[MAX_RECOG_OPERANDS];
67
68 /* Indexed by N, gives location where the Nth duplicate-appearance of
69 an operand was found. This is something that matched MATCH_DUP. */
70 rtx *recog_dup_loc[MAX_RECOG_OPERANDS];
71
72 /* Indexed by N, gives the operand number that was duplicated in the
73 Nth duplicate-appearance of an operand. */
74 char recog_dup_num[MAX_RECOG_OPERANDS];
75
76 /* The number of operands of the insn. */
77 int recog_n_operands;
78
79 /* The number of MATCH_DUPs in the insn. */
80 int recog_n_dups;
81
82 /* The number of alternatives in the constraints for the insn. */
83 int recog_n_alternatives;
84
85 /* Indexed by N, gives the mode of operand N. */
86 enum machine_mode recog_operand_mode[MAX_RECOG_OPERANDS];
87
88 /* Indexed by N, gives the constraint string for operand N. */
89 char *recog_constraints[MAX_RECOG_OPERANDS];
90
91 /* Indexed by N, gives the type (in, out, inout) for operand N. */
92 enum op_type recog_op_type[MAX_RECOG_OPERANDS];
93
94 #ifndef REGISTER_CONSTRAINTS
95 /* Indexed by N, nonzero if operand N should be an address. */
96 char recog_operand_address_p[MAX_RECOG_OPERANDS];
97 #endif
98
99 /* Contains a vector of operand_alternative structures for every operand.
100 Set up by preprocess_constraints. */
101 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
102
103 /* On return from `constrain_operands', indicate which alternative
104 was satisfied. */
105
106 int which_alternative;
107
108 /* Nonzero after end of reload pass.
109 Set to 1 or 0 by toplev.c.
110 Controls the significance of (SUBREG (MEM)). */
111
112 int reload_completed;
113
114 /* Initialize data used by the function `recog'.
115 This must be called once in the compilation of a function
116 before any insn recognition may be done in the function. */
117
118 void
119 init_recog_no_volatile ()
120 {
121 volatile_ok = 0;
122 }
123
124 void
125 init_recog ()
126 {
127 volatile_ok = 1;
128 }
129
130 /* Try recognizing the instruction INSN,
131 and return the code number that results.
132 Remember the code so that repeated calls do not
133 need to spend the time for actual rerecognition.
134
135 This function is the normal interface to instruction recognition.
136 The automatically-generated function `recog' is normally called
137 through this one. (The only exception is in combine.c.) */
138
139 int
140 recog_memoized (insn)
141 rtx insn;
142 {
143 if (INSN_CODE (insn) < 0)
144 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
145 return INSN_CODE (insn);
146 }
147 \f
148 /* Check that X is an insn-body for an `asm' with operands
149 and that the operands mentioned in it are legitimate. */
150
151 int
152 check_asm_operands (x)
153 rtx x;
154 {
155 int noperands;
156 rtx *operands;
157 char **constraints;
158 int i;
159
160 /* Post-reload, be more strict with things. */
161 if (reload_completed)
162 {
163 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
164 extract_insn (make_insn_raw (x));
165 constrain_operands (1);
166 return which_alternative >= 0;
167 }
168
169 noperands = asm_noperands (x);
170 if (noperands < 0)
171 return 0;
172 if (noperands == 0)
173 return 1;
174
175 operands = (rtx *) alloca (noperands * sizeof (rtx));
176 constraints = (char **) alloca (noperands * sizeof (char *));
177
178 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
179
180 for (i = 0; i < noperands; i++)
181 {
182 char *c = constraints[i];
183 if (ISDIGIT ((unsigned char)c[0]))
184 c = constraints[c[0] - '0'];
185
186 if (! asm_operand_ok (operands[i], c))
187 return 0;
188 }
189
190 return 1;
191 }
192 \f
193 /* Static data for the next two routines. */
194
195 typedef struct change_t
196 {
197 rtx object;
198 int old_code;
199 rtx *loc;
200 rtx old;
201 } change_t;
202
203 static change_t *changes;
204 static int changes_allocated;
205
206 static int num_changes = 0;
207
208 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
209 at which NEW will be placed. If OBJECT is zero, no validation is done,
210 the change is simply made.
211
212 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
213 will be called with the address and mode as parameters. If OBJECT is
214 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
215 the change in place.
216
217 IN_GROUP is non-zero if this is part of a group of changes that must be
218 performed as a group. In that case, the changes will be stored. The
219 function `apply_change_group' will validate and apply the changes.
220
221 If IN_GROUP is zero, this is a single change. Try to recognize the insn
222 or validate the memory reference with the change applied. If the result
223 is not valid for the machine, suppress the change and return zero.
224 Otherwise, perform the change and return 1. */
225
226 int
227 validate_change (object, loc, new, in_group)
228 rtx object;
229 rtx *loc;
230 rtx new;
231 int in_group;
232 {
233 rtx old = *loc;
234
235 if (old == new || rtx_equal_p (old, new))
236 return 1;
237
238 if (in_group == 0 && num_changes != 0)
239 abort ();
240
241 *loc = new;
242
243 /* Save the information describing this change. */
244 if (num_changes >= changes_allocated)
245 {
246 if (changes_allocated == 0)
247 /* This value allows for repeated substitutions inside complex
248 indexed addresses, or changes in up to 5 insns. */
249 changes_allocated = MAX_RECOG_OPERANDS * 5;
250 else
251 changes_allocated *= 2;
252
253 changes =
254 (change_t*) xrealloc (changes,
255 sizeof (change_t) * changes_allocated);
256 }
257
258 changes[num_changes].object = object;
259 changes[num_changes].loc = loc;
260 changes[num_changes].old = old;
261
262 if (object && GET_CODE (object) != MEM)
263 {
264 /* Set INSN_CODE to force rerecognition of insn. Save old code in
265 case invalid. */
266 changes[num_changes].old_code = INSN_CODE (object);
267 INSN_CODE (object) = -1;
268 }
269
270 num_changes++;
271
272 /* If we are making a group of changes, return 1. Otherwise, validate the
273 change group we made. */
274
275 if (in_group)
276 return 1;
277 else
278 return apply_change_group ();
279 }
280
281 /* This subroutine of apply_change_group verifies whether the changes to INSN
282 were valid; i.e. whether INSN can still be recognized. */
283
284 static int
285 insn_invalid_p (insn)
286 rtx insn;
287 {
288 int icode = recog_memoized (insn);
289 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
290
291 if (is_asm && ! check_asm_operands (PATTERN (insn)))
292 return 1;
293 if (! is_asm && icode < 0)
294 return 1;
295
296 /* After reload, verify that all constraints are satisfied. */
297 if (reload_completed)
298 {
299 extract_insn (insn);
300
301 if (! constrain_operands (1))
302 return 1;
303 }
304
305 return 0;
306 }
307
308 /* Apply a group of changes previously issued with `validate_change'.
309 Return 1 if all changes are valid, zero otherwise. */
310
311 int
312 apply_change_group ()
313 {
314 int i;
315
316 /* The changes have been applied and all INSN_CODEs have been reset to force
317 rerecognition.
318
319 The changes are valid if we aren't given an object, or if we are
320 given a MEM and it still is a valid address, or if this is in insn
321 and it is recognized. In the latter case, if reload has completed,
322 we also require that the operands meet the constraints for
323 the insn. */
324
325 for (i = 0; i < num_changes; i++)
326 {
327 rtx object = changes[i].object;
328
329 if (object == 0)
330 continue;
331
332 if (GET_CODE (object) == MEM)
333 {
334 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
335 break;
336 }
337 else if (insn_invalid_p (object))
338 {
339 rtx pat = PATTERN (object);
340
341 /* Perhaps we couldn't recognize the insn because there were
342 extra CLOBBERs at the end. If so, try to re-recognize
343 without the last CLOBBER (later iterations will cause each of
344 them to be eliminated, in turn). But don't do this if we
345 have an ASM_OPERAND. */
346 if (GET_CODE (pat) == PARALLEL
347 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
348 && asm_noperands (PATTERN (object)) < 0)
349 {
350 rtx newpat;
351
352 if (XVECLEN (pat, 0) == 2)
353 newpat = XVECEXP (pat, 0, 0);
354 else
355 {
356 int j;
357
358 newpat = gen_rtx_PARALLEL (VOIDmode,
359 gen_rtvec (XVECLEN (pat, 0) - 1));
360 for (j = 0; j < XVECLEN (newpat, 0); j++)
361 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
362 }
363
364 /* Add a new change to this group to replace the pattern
365 with this new pattern. Then consider this change
366 as having succeeded. The change we added will
367 cause the entire call to fail if things remain invalid.
368
369 Note that this can lose if a later change than the one
370 we are processing specified &XVECEXP (PATTERN (object), 0, X)
371 but this shouldn't occur. */
372
373 validate_change (object, &PATTERN (object), newpat, 1);
374 }
375 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
376 /* If this insn is a CLOBBER or USE, it is always valid, but is
377 never recognized. */
378 continue;
379 else
380 break;
381 }
382 }
383
384 if (i == num_changes)
385 {
386 num_changes = 0;
387 return 1;
388 }
389 else
390 {
391 cancel_changes (0);
392 return 0;
393 }
394 }
395
396 /* Return the number of changes so far in the current group. */
397
398 int
399 num_validated_changes ()
400 {
401 return num_changes;
402 }
403
404 /* Retract the changes numbered NUM and up. */
405
406 void
407 cancel_changes (num)
408 int num;
409 {
410 int i;
411
412 /* Back out all the changes. Do this in the opposite order in which
413 they were made. */
414 for (i = num_changes - 1; i >= num; i--)
415 {
416 *changes[i].loc = changes[i].old;
417 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
418 INSN_CODE (changes[i].object) = changes[i].old_code;
419 }
420 num_changes = num;
421 }
422
423 /* Replace every occurrence of FROM in X with TO. Mark each change with
424 validate_change passing OBJECT. */
425
426 static void
427 validate_replace_rtx_1 (loc, from, to, object)
428 rtx *loc;
429 rtx from, to, object;
430 {
431 register int i, j;
432 register char *fmt;
433 register rtx x = *loc;
434 enum rtx_code code = GET_CODE (x);
435
436 /* X matches FROM if it is the same rtx or they are both referring to the
437 same register in the same mode. Avoid calling rtx_equal_p unless the
438 operands look similar. */
439
440 if (x == from
441 || (GET_CODE (x) == REG && GET_CODE (from) == REG
442 && GET_MODE (x) == GET_MODE (from)
443 && REGNO (x) == REGNO (from))
444 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
445 && rtx_equal_p (x, from)))
446 {
447 validate_change (object, loc, to, 1);
448 return;
449 }
450
451 /* For commutative or comparison operations, try replacing each argument
452 separately and seeing if we made any changes. If so, put a constant
453 argument last.*/
454 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
455 {
456 int prev_changes = num_changes;
457
458 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
459 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
460 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
461 {
462 validate_change (object, loc,
463 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
464 : swap_condition (code),
465 GET_MODE (x), XEXP (x, 1),
466 XEXP (x, 0)),
467 1);
468 x = *loc;
469 code = GET_CODE (x);
470 }
471 }
472
473 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
474 done the substitution, otherwise we won't. */
475
476 switch (code)
477 {
478 case PLUS:
479 /* If we have a PLUS whose second operand is now a CONST_INT, use
480 plus_constant to try to simplify it. */
481 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
482 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
483 1);
484 return;
485
486 case MINUS:
487 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
488 {
489 validate_change (object, loc,
490 plus_constant (XEXP (x, 0), - INTVAL (to)),
491 1);
492 return;
493 }
494 break;
495
496 case ZERO_EXTEND:
497 case SIGN_EXTEND:
498 /* In these cases, the operation to be performed depends on the mode
499 of the operand. If we are replacing the operand with a VOIDmode
500 constant, we lose the information. So try to simplify the operation
501 in that case. If it fails, substitute in something that we know
502 won't be recognized. */
503 if (GET_MODE (to) == VOIDmode
504 && (XEXP (x, 0) == from
505 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
506 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
507 && REGNO (XEXP (x, 0)) == REGNO (from))))
508 {
509 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
510 GET_MODE (from));
511 if (new == 0)
512 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
513
514 validate_change (object, loc, new, 1);
515 return;
516 }
517 break;
518
519 case SUBREG:
520 /* If we have a SUBREG of a register that we are replacing and we are
521 replacing it with a MEM, make a new MEM and try replacing the
522 SUBREG with it. Don't do this if the MEM has a mode-dependent address
523 or if we would be widening it. */
524
525 if (SUBREG_REG (x) == from
526 && GET_CODE (from) == REG
527 && GET_CODE (to) == MEM
528 && ! mode_dependent_address_p (XEXP (to, 0))
529 && ! MEM_VOLATILE_P (to)
530 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
531 {
532 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
533 enum machine_mode mode = GET_MODE (x);
534 rtx new;
535
536 if (BYTES_BIG_ENDIAN)
537 offset += (MIN (UNITS_PER_WORD,
538 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
539 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
540
541 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
542 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (to);
543 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to);
544 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (to);
545 validate_change (object, loc, new, 1);
546 return;
547 }
548 break;
549
550 case ZERO_EXTRACT:
551 case SIGN_EXTRACT:
552 /* If we are replacing a register with memory, try to change the memory
553 to be the mode required for memory in extract operations (this isn't
554 likely to be an insertion operation; if it was, nothing bad will
555 happen, we might just fail in some cases). */
556
557 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
558 && GET_CODE (XEXP (x, 1)) == CONST_INT
559 && GET_CODE (XEXP (x, 2)) == CONST_INT
560 && ! mode_dependent_address_p (XEXP (to, 0))
561 && ! MEM_VOLATILE_P (to))
562 {
563 enum machine_mode wanted_mode = VOIDmode;
564 enum machine_mode is_mode = GET_MODE (to);
565 int pos = INTVAL (XEXP (x, 2));
566
567 #ifdef HAVE_extzv
568 if (code == ZERO_EXTRACT)
569 {
570 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
571 if (wanted_mode == VOIDmode)
572 wanted_mode = word_mode;
573 }
574 #endif
575 #ifdef HAVE_extv
576 if (code == SIGN_EXTRACT)
577 {
578 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
579 if (wanted_mode == VOIDmode)
580 wanted_mode = word_mode;
581 }
582 #endif
583
584 /* If we have a narrower mode, we can do something. */
585 if (wanted_mode != VOIDmode
586 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
587 {
588 int offset = pos / BITS_PER_UNIT;
589 rtx newmem;
590
591 /* If the bytes and bits are counted differently, we
592 must adjust the offset. */
593 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
594 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
595 - offset);
596
597 pos %= GET_MODE_BITSIZE (wanted_mode);
598
599 newmem = gen_rtx_MEM (wanted_mode,
600 plus_constant (XEXP (to, 0), offset));
601 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to);
602 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (to);
603 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (to);
604
605 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
606 validate_change (object, &XEXP (x, 0), newmem, 1);
607 }
608 }
609
610 break;
611
612 default:
613 break;
614 }
615
616 /* For commutative or comparison operations we've already performed
617 replacements. Don't try to perform them again. */
618 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
619 {
620 fmt = GET_RTX_FORMAT (code);
621 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
622 {
623 if (fmt[i] == 'e')
624 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
625 else if (fmt[i] == 'E')
626 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
627 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
628 }
629 }
630 }
631
632 /* Try replacing every occurrence of FROM in INSN with TO. After all
633 changes have been made, validate by seeing if INSN is still valid. */
634
635 int
636 validate_replace_rtx (from, to, insn)
637 rtx from, to, insn;
638 {
639 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
640 return apply_change_group ();
641 }
642
643 /* Try replacing every occurrence of FROM in INSN with TO. After all
644 changes have been made, validate by seeing if INSN is still valid. */
645
646 void
647 validate_replace_rtx_group (from, to, insn)
648 rtx from, to, insn;
649 {
650 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
651 }
652
653 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
654 SET_DESTs. After all changes have been made, validate by seeing if
655 INSN is still valid. */
656
657 int
658 validate_replace_src (from, to, insn)
659 rtx from, to, insn;
660 {
661 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
662 || GET_CODE (PATTERN (insn)) != SET)
663 abort ();
664
665 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
666 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
667 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
668 from, to, insn);
669 return apply_change_group ();
670 }
671 \f
672 #ifdef HAVE_cc0
673 /* Return 1 if the insn using CC0 set by INSN does not contain
674 any ordered tests applied to the condition codes.
675 EQ and NE tests do not count. */
676
677 int
678 next_insn_tests_no_inequality (insn)
679 rtx insn;
680 {
681 register rtx next = next_cc0_user (insn);
682
683 /* If there is no next insn, we have to take the conservative choice. */
684 if (next == 0)
685 return 0;
686
687 return ((GET_CODE (next) == JUMP_INSN
688 || GET_CODE (next) == INSN
689 || GET_CODE (next) == CALL_INSN)
690 && ! inequality_comparisons_p (PATTERN (next)));
691 }
692
693 #if 0 /* This is useless since the insn that sets the cc's
694 must be followed immediately by the use of them. */
695 /* Return 1 if the CC value set up by INSN is not used. */
696
697 int
698 next_insns_test_no_inequality (insn)
699 rtx insn;
700 {
701 register rtx next = NEXT_INSN (insn);
702
703 for (; next != 0; next = NEXT_INSN (next))
704 {
705 if (GET_CODE (next) == CODE_LABEL
706 || GET_CODE (next) == BARRIER)
707 return 1;
708 if (GET_CODE (next) == NOTE)
709 continue;
710 if (inequality_comparisons_p (PATTERN (next)))
711 return 0;
712 if (sets_cc0_p (PATTERN (next)) == 1)
713 return 1;
714 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
715 return 1;
716 }
717 return 1;
718 }
719 #endif
720 #endif
721 \f
722 /* This is used by find_single_use to locate an rtx that contains exactly one
723 use of DEST, which is typically either a REG or CC0. It returns a
724 pointer to the innermost rtx expression containing DEST. Appearances of
725 DEST that are being used to totally replace it are not counted. */
726
727 static rtx *
728 find_single_use_1 (dest, loc)
729 rtx dest;
730 rtx *loc;
731 {
732 rtx x = *loc;
733 enum rtx_code code = GET_CODE (x);
734 rtx *result = 0;
735 rtx *this_result;
736 int i;
737 char *fmt;
738
739 switch (code)
740 {
741 case CONST_INT:
742 case CONST:
743 case LABEL_REF:
744 case SYMBOL_REF:
745 case CONST_DOUBLE:
746 case CLOBBER:
747 return 0;
748
749 case SET:
750 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
751 of a REG that occupies all of the REG, the insn uses DEST if
752 it is mentioned in the destination or the source. Otherwise, we
753 need just check the source. */
754 if (GET_CODE (SET_DEST (x)) != CC0
755 && GET_CODE (SET_DEST (x)) != PC
756 && GET_CODE (SET_DEST (x)) != REG
757 && ! (GET_CODE (SET_DEST (x)) == SUBREG
758 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
759 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
760 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
761 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
762 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
763 break;
764
765 return find_single_use_1 (dest, &SET_SRC (x));
766
767 case MEM:
768 case SUBREG:
769 return find_single_use_1 (dest, &XEXP (x, 0));
770
771 default:
772 break;
773 }
774
775 /* If it wasn't one of the common cases above, check each expression and
776 vector of this code. Look for a unique usage of DEST. */
777
778 fmt = GET_RTX_FORMAT (code);
779 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
780 {
781 if (fmt[i] == 'e')
782 {
783 if (dest == XEXP (x, i)
784 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
785 && REGNO (dest) == REGNO (XEXP (x, i))))
786 this_result = loc;
787 else
788 this_result = find_single_use_1 (dest, &XEXP (x, i));
789
790 if (result == 0)
791 result = this_result;
792 else if (this_result)
793 /* Duplicate usage. */
794 return 0;
795 }
796 else if (fmt[i] == 'E')
797 {
798 int j;
799
800 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
801 {
802 if (XVECEXP (x, i, j) == dest
803 || (GET_CODE (dest) == REG
804 && GET_CODE (XVECEXP (x, i, j)) == REG
805 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
806 this_result = loc;
807 else
808 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
809
810 if (result == 0)
811 result = this_result;
812 else if (this_result)
813 return 0;
814 }
815 }
816 }
817
818 return result;
819 }
820 \f
821 /* See if DEST, produced in INSN, is used only a single time in the
822 sequel. If so, return a pointer to the innermost rtx expression in which
823 it is used.
824
825 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
826
827 This routine will return usually zero either before flow is called (because
828 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
829 note can't be trusted).
830
831 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
832 care about REG_DEAD notes or LOG_LINKS.
833
834 Otherwise, we find the single use by finding an insn that has a
835 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
836 only referenced once in that insn, we know that it must be the first
837 and last insn referencing DEST. */
838
839 rtx *
840 find_single_use (dest, insn, ploc)
841 rtx dest;
842 rtx insn;
843 rtx *ploc;
844 {
845 rtx next;
846 rtx *result;
847 rtx link;
848
849 #ifdef HAVE_cc0
850 if (dest == cc0_rtx)
851 {
852 next = NEXT_INSN (insn);
853 if (next == 0
854 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
855 return 0;
856
857 result = find_single_use_1 (dest, &PATTERN (next));
858 if (result && ploc)
859 *ploc = next;
860 return result;
861 }
862 #endif
863
864 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
865 return 0;
866
867 for (next = next_nonnote_insn (insn);
868 next != 0 && GET_CODE (next) != CODE_LABEL;
869 next = next_nonnote_insn (next))
870 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
871 {
872 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
873 if (XEXP (link, 0) == insn)
874 break;
875
876 if (link)
877 {
878 result = find_single_use_1 (dest, &PATTERN (next));
879 if (ploc)
880 *ploc = next;
881 return result;
882 }
883 }
884
885 return 0;
886 }
887 \f
888 /* Return 1 if OP is a valid general operand for machine mode MODE.
889 This is either a register reference, a memory reference,
890 or a constant. In the case of a memory reference, the address
891 is checked for general validity for the target machine.
892
893 Register and memory references must have mode MODE in order to be valid,
894 but some constants have no machine mode and are valid for any mode.
895
896 If MODE is VOIDmode, OP is checked for validity for whatever mode
897 it has.
898
899 The main use of this function is as a predicate in match_operand
900 expressions in the machine description.
901
902 For an explanation of this function's behavior for registers of
903 class NO_REGS, see the comment for `register_operand'. */
904
905 int
906 general_operand (op, mode)
907 register rtx op;
908 enum machine_mode mode;
909 {
910 register enum rtx_code code = GET_CODE (op);
911 int mode_altering_drug = 0;
912
913 if (mode == VOIDmode)
914 mode = GET_MODE (op);
915
916 /* Don't accept CONST_INT or anything similar
917 if the caller wants something floating. */
918 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
919 && GET_MODE_CLASS (mode) != MODE_INT
920 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
921 return 0;
922
923 if (CONSTANT_P (op))
924 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
925 #ifdef LEGITIMATE_PIC_OPERAND_P
926 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
927 #endif
928 && LEGITIMATE_CONSTANT_P (op));
929
930 /* Except for certain constants with VOIDmode, already checked for,
931 OP's mode must match MODE if MODE specifies a mode. */
932
933 if (GET_MODE (op) != mode)
934 return 0;
935
936 if (code == SUBREG)
937 {
938 #ifdef INSN_SCHEDULING
939 /* On machines that have insn scheduling, we want all memory
940 reference to be explicit, so outlaw paradoxical SUBREGs. */
941 if (GET_CODE (SUBREG_REG (op)) == MEM
942 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
943 return 0;
944 #endif
945
946 op = SUBREG_REG (op);
947 code = GET_CODE (op);
948 #if 0
949 /* No longer needed, since (SUBREG (MEM...))
950 will load the MEM into a reload reg in the MEM's own mode. */
951 mode_altering_drug = 1;
952 #endif
953 }
954
955 if (code == REG)
956 /* A register whose class is NO_REGS is not a general operand. */
957 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
958 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
959
960 if (code == MEM)
961 {
962 register rtx y = XEXP (op, 0);
963 if (! volatile_ok && MEM_VOLATILE_P (op))
964 return 0;
965 if (GET_CODE (y) == ADDRESSOF)
966 return 1;
967 /* Use the mem's mode, since it will be reloaded thus. */
968 mode = GET_MODE (op);
969 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
970 }
971
972 /* Pretend this is an operand for now; we'll run force_operand
973 on its replacement in fixup_var_refs_1. */
974 if (code == ADDRESSOF)
975 return 1;
976
977 return 0;
978
979 win:
980 if (mode_altering_drug)
981 return ! mode_dependent_address_p (XEXP (op, 0));
982 return 1;
983 }
984 \f
985 /* Return 1 if OP is a valid memory address for a memory reference
986 of mode MODE.
987
988 The main use of this function is as a predicate in match_operand
989 expressions in the machine description. */
990
991 int
992 address_operand (op, mode)
993 register rtx op;
994 enum machine_mode mode;
995 {
996 return memory_address_p (mode, op);
997 }
998
999 /* Return 1 if OP is a register reference of mode MODE.
1000 If MODE is VOIDmode, accept a register in any mode.
1001
1002 The main use of this function is as a predicate in match_operand
1003 expressions in the machine description.
1004
1005 As a special exception, registers whose class is NO_REGS are
1006 not accepted by `register_operand'. The reason for this change
1007 is to allow the representation of special architecture artifacts
1008 (such as a condition code register) without extending the rtl
1009 definitions. Since registers of class NO_REGS cannot be used
1010 as registers in any case where register classes are examined,
1011 it is most consistent to keep this function from accepting them. */
1012
1013 int
1014 register_operand (op, mode)
1015 register rtx op;
1016 enum machine_mode mode;
1017 {
1018 if (GET_MODE (op) != mode && mode != VOIDmode)
1019 return 0;
1020
1021 if (GET_CODE (op) == SUBREG)
1022 {
1023 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1024 because it is guaranteed to be reloaded into one.
1025 Just make sure the MEM is valid in itself.
1026 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1027 but currently it does result from (SUBREG (REG)...) where the
1028 reg went on the stack.) */
1029 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1030 return general_operand (op, mode);
1031
1032 #ifdef CLASS_CANNOT_CHANGE_SIZE
1033 if (GET_CODE (SUBREG_REG (op)) == REG
1034 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1035 && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
1036 REGNO (SUBREG_REG (op)))
1037 && (GET_MODE_SIZE (mode)
1038 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1039 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1040 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1041 return 0;
1042 #endif
1043
1044 op = SUBREG_REG (op);
1045 }
1046
1047 /* We don't consider registers whose class is NO_REGS
1048 to be a register operand. */
1049 return (GET_CODE (op) == REG
1050 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1051 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1052 }
1053
1054 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1055 or a hard register. */
1056
1057 int
1058 scratch_operand (op, mode)
1059 register rtx op;
1060 enum machine_mode mode;
1061 {
1062 return (GET_MODE (op) == mode
1063 && (GET_CODE (op) == SCRATCH
1064 || (GET_CODE (op) == REG
1065 && REGNO (op) < FIRST_PSEUDO_REGISTER)));
1066 }
1067
1068 /* Return 1 if OP is a valid immediate operand for mode MODE.
1069
1070 The main use of this function is as a predicate in match_operand
1071 expressions in the machine description. */
1072
1073 int
1074 immediate_operand (op, mode)
1075 register rtx op;
1076 enum machine_mode mode;
1077 {
1078 /* Don't accept CONST_INT or anything similar
1079 if the caller wants something floating. */
1080 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1081 && GET_MODE_CLASS (mode) != MODE_INT
1082 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1083 return 0;
1084
1085 return (CONSTANT_P (op)
1086 && (GET_MODE (op) == mode || mode == VOIDmode
1087 || GET_MODE (op) == VOIDmode)
1088 #ifdef LEGITIMATE_PIC_OPERAND_P
1089 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1090 #endif
1091 && LEGITIMATE_CONSTANT_P (op));
1092 }
1093
1094 /* Returns 1 if OP is an operand that is a CONST_INT. */
1095
1096 int
1097 const_int_operand (op, mode)
1098 register rtx op;
1099 enum machine_mode mode ATTRIBUTE_UNUSED;
1100 {
1101 return GET_CODE (op) == CONST_INT;
1102 }
1103
1104 /* Returns 1 if OP is an operand that is a constant integer or constant
1105 floating-point number. */
1106
1107 int
1108 const_double_operand (op, mode)
1109 register rtx op;
1110 enum machine_mode mode;
1111 {
1112 /* Don't accept CONST_INT or anything similar
1113 if the caller wants something floating. */
1114 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1115 && GET_MODE_CLASS (mode) != MODE_INT
1116 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1117 return 0;
1118
1119 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1120 && (mode == VOIDmode || GET_MODE (op) == mode
1121 || GET_MODE (op) == VOIDmode));
1122 }
1123
1124 /* Return 1 if OP is a general operand that is not an immediate operand. */
1125
1126 int
1127 nonimmediate_operand (op, mode)
1128 register rtx op;
1129 enum machine_mode mode;
1130 {
1131 return (general_operand (op, mode) && ! CONSTANT_P (op));
1132 }
1133
1134 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1135
1136 int
1137 nonmemory_operand (op, mode)
1138 register rtx op;
1139 enum machine_mode mode;
1140 {
1141 if (CONSTANT_P (op))
1142 {
1143 /* Don't accept CONST_INT or anything similar
1144 if the caller wants something floating. */
1145 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1146 && GET_MODE_CLASS (mode) != MODE_INT
1147 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1148 return 0;
1149
1150 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
1151 #ifdef LEGITIMATE_PIC_OPERAND_P
1152 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1153 #endif
1154 && LEGITIMATE_CONSTANT_P (op));
1155 }
1156
1157 if (GET_MODE (op) != mode && mode != VOIDmode)
1158 return 0;
1159
1160 if (GET_CODE (op) == SUBREG)
1161 {
1162 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1163 because it is guaranteed to be reloaded into one.
1164 Just make sure the MEM is valid in itself.
1165 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1166 but currently it does result from (SUBREG (REG)...) where the
1167 reg went on the stack.) */
1168 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1169 return general_operand (op, mode);
1170 op = SUBREG_REG (op);
1171 }
1172
1173 /* We don't consider registers whose class is NO_REGS
1174 to be a register operand. */
1175 return (GET_CODE (op) == REG
1176 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1177 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1178 }
1179
1180 /* Return 1 if OP is a valid operand that stands for pushing a
1181 value of mode MODE onto the stack.
1182
1183 The main use of this function is as a predicate in match_operand
1184 expressions in the machine description. */
1185
1186 int
1187 push_operand (op, mode)
1188 rtx op;
1189 enum machine_mode mode;
1190 {
1191 if (GET_CODE (op) != MEM)
1192 return 0;
1193
1194 if (GET_MODE (op) != mode)
1195 return 0;
1196
1197 op = XEXP (op, 0);
1198
1199 if (GET_CODE (op) != STACK_PUSH_CODE)
1200 return 0;
1201
1202 return XEXP (op, 0) == stack_pointer_rtx;
1203 }
1204
1205 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1206
1207 int
1208 memory_address_p (mode, addr)
1209 enum machine_mode mode;
1210 register rtx addr;
1211 {
1212 if (GET_CODE (addr) == ADDRESSOF)
1213 return 1;
1214
1215 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1216 return 0;
1217
1218 win:
1219 return 1;
1220 }
1221
1222 /* Return 1 if OP is a valid memory reference with mode MODE,
1223 including a valid address.
1224
1225 The main use of this function is as a predicate in match_operand
1226 expressions in the machine description. */
1227
1228 int
1229 memory_operand (op, mode)
1230 register rtx op;
1231 enum machine_mode mode;
1232 {
1233 rtx inner;
1234
1235 if (! reload_completed)
1236 /* Note that no SUBREG is a memory operand before end of reload pass,
1237 because (SUBREG (MEM...)) forces reloading into a register. */
1238 return GET_CODE (op) == MEM && general_operand (op, mode);
1239
1240 if (mode != VOIDmode && GET_MODE (op) != mode)
1241 return 0;
1242
1243 inner = op;
1244 if (GET_CODE (inner) == SUBREG)
1245 inner = SUBREG_REG (inner);
1246
1247 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1248 }
1249
1250 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1251 that is, a memory reference whose address is a general_operand. */
1252
1253 int
1254 indirect_operand (op, mode)
1255 register rtx op;
1256 enum machine_mode mode;
1257 {
1258 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1259 if (! reload_completed
1260 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1261 {
1262 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1263 rtx inner = SUBREG_REG (op);
1264
1265 if (BYTES_BIG_ENDIAN)
1266 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1267 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1268
1269 if (mode != VOIDmode && GET_MODE (op) != mode)
1270 return 0;
1271
1272 /* The only way that we can have a general_operand as the resulting
1273 address is if OFFSET is zero and the address already is an operand
1274 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1275 operand. */
1276
1277 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1278 || (GET_CODE (XEXP (inner, 0)) == PLUS
1279 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1280 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1281 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1282 }
1283
1284 return (GET_CODE (op) == MEM
1285 && memory_operand (op, mode)
1286 && general_operand (XEXP (op, 0), Pmode));
1287 }
1288
1289 /* Return 1 if this is a comparison operator. This allows the use of
1290 MATCH_OPERATOR to recognize all the branch insns. */
1291
1292 int
1293 comparison_operator (op, mode)
1294 register rtx op;
1295 enum machine_mode mode;
1296 {
1297 return ((mode == VOIDmode || GET_MODE (op) == mode)
1298 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1299 }
1300 \f
1301 /* If BODY is an insn body that uses ASM_OPERANDS,
1302 return the number of operands (both input and output) in the insn.
1303 Otherwise return -1. */
1304
1305 int
1306 asm_noperands (body)
1307 rtx body;
1308 {
1309 if (GET_CODE (body) == ASM_OPERANDS)
1310 /* No output operands: return number of input operands. */
1311 return ASM_OPERANDS_INPUT_LENGTH (body);
1312 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1313 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1314 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1315 else if (GET_CODE (body) == PARALLEL
1316 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1317 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1318 {
1319 /* Multiple output operands, or 1 output plus some clobbers:
1320 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1321 int i;
1322 int n_sets;
1323
1324 /* Count backwards through CLOBBERs to determine number of SETs. */
1325 for (i = XVECLEN (body, 0); i > 0; i--)
1326 {
1327 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1328 break;
1329 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1330 return -1;
1331 }
1332
1333 /* N_SETS is now number of output operands. */
1334 n_sets = i;
1335
1336 /* Verify that all the SETs we have
1337 came from a single original asm_operands insn
1338 (so that invalid combinations are blocked). */
1339 for (i = 0; i < n_sets; i++)
1340 {
1341 rtx elt = XVECEXP (body, 0, i);
1342 if (GET_CODE (elt) != SET)
1343 return -1;
1344 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1345 return -1;
1346 /* If these ASM_OPERANDS rtx's came from different original insns
1347 then they aren't allowed together. */
1348 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1349 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1350 return -1;
1351 }
1352 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1353 + n_sets);
1354 }
1355 else if (GET_CODE (body) == PARALLEL
1356 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1357 {
1358 /* 0 outputs, but some clobbers:
1359 body is [(asm_operands ...) (clobber (reg ...))...]. */
1360 int i;
1361
1362 /* Make sure all the other parallel things really are clobbers. */
1363 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1364 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1365 return -1;
1366
1367 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1368 }
1369 else
1370 return -1;
1371 }
1372
1373 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1374 copy its operands (both input and output) into the vector OPERANDS,
1375 the locations of the operands within the insn into the vector OPERAND_LOCS,
1376 and the constraints for the operands into CONSTRAINTS.
1377 Write the modes of the operands into MODES.
1378 Return the assembler-template.
1379
1380 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1381 we don't store that info. */
1382
1383 char *
1384 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1385 rtx body;
1386 rtx *operands;
1387 rtx **operand_locs;
1388 char **constraints;
1389 enum machine_mode *modes;
1390 {
1391 register int i;
1392 int noperands;
1393 char *template = 0;
1394
1395 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1396 {
1397 rtx asmop = SET_SRC (body);
1398 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1399
1400 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1401
1402 for (i = 1; i < noperands; i++)
1403 {
1404 if (operand_locs)
1405 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1406 if (operands)
1407 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1408 if (constraints)
1409 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1410 if (modes)
1411 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1412 }
1413
1414 /* The output is in the SET.
1415 Its constraint is in the ASM_OPERANDS itself. */
1416 if (operands)
1417 operands[0] = SET_DEST (body);
1418 if (operand_locs)
1419 operand_locs[0] = &SET_DEST (body);
1420 if (constraints)
1421 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1422 if (modes)
1423 modes[0] = GET_MODE (SET_DEST (body));
1424 template = ASM_OPERANDS_TEMPLATE (asmop);
1425 }
1426 else if (GET_CODE (body) == ASM_OPERANDS)
1427 {
1428 rtx asmop = body;
1429 /* No output operands: BODY is (asm_operands ....). */
1430
1431 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1432
1433 /* The input operands are found in the 1st element vector. */
1434 /* Constraints for inputs are in the 2nd element vector. */
1435 for (i = 0; i < noperands; i++)
1436 {
1437 if (operand_locs)
1438 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1439 if (operands)
1440 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1441 if (constraints)
1442 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1443 if (modes)
1444 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1445 }
1446 template = ASM_OPERANDS_TEMPLATE (asmop);
1447 }
1448 else if (GET_CODE (body) == PARALLEL
1449 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1450 {
1451 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1452 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1453 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1454 int nout = 0; /* Does not include CLOBBERs. */
1455
1456 /* At least one output, plus some CLOBBERs. */
1457
1458 /* The outputs are in the SETs.
1459 Their constraints are in the ASM_OPERANDS itself. */
1460 for (i = 0; i < nparallel; i++)
1461 {
1462 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1463 break; /* Past last SET */
1464
1465 if (operands)
1466 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1467 if (operand_locs)
1468 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1469 if (constraints)
1470 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1471 if (modes)
1472 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1473 nout++;
1474 }
1475
1476 for (i = 0; i < nin; i++)
1477 {
1478 if (operand_locs)
1479 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1480 if (operands)
1481 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1482 if (constraints)
1483 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1484 if (modes)
1485 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1486 }
1487
1488 template = ASM_OPERANDS_TEMPLATE (asmop);
1489 }
1490 else if (GET_CODE (body) == PARALLEL
1491 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1492 {
1493 /* No outputs, but some CLOBBERs. */
1494
1495 rtx asmop = XVECEXP (body, 0, 0);
1496 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1497
1498 for (i = 0; i < nin; i++)
1499 {
1500 if (operand_locs)
1501 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1502 if (operands)
1503 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1504 if (constraints)
1505 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1506 if (modes)
1507 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1508 }
1509
1510 template = ASM_OPERANDS_TEMPLATE (asmop);
1511 }
1512
1513 return template;
1514 }
1515
1516 /* Check if an asm_operand matches it's constraints. */
1517
1518 int
1519 asm_operand_ok (op, constraint)
1520 rtx op;
1521 const char *constraint;
1522 {
1523 /* Use constrain_operands after reload. */
1524 if (reload_completed)
1525 abort ();
1526
1527 while (*constraint)
1528 {
1529 switch (*constraint++)
1530 {
1531 case '=':
1532 case '+':
1533 case '*':
1534 case '%':
1535 case '?':
1536 case '!':
1537 case '#':
1538 case '&':
1539 case ',':
1540 break;
1541
1542 case '0': case '1': case '2': case '3': case '4':
1543 case '5': case '6': case '7': case '8': case '9':
1544 /* Our caller is supposed to have given us the proper
1545 matching constraint. */
1546 /* abort (); */
1547 break;
1548
1549 case 'p':
1550 if (address_operand (op, VOIDmode))
1551 return 1;
1552 break;
1553
1554 case 'm':
1555 case 'V': /* non-offsettable */
1556 if (memory_operand (op, VOIDmode))
1557 return 1;
1558 break;
1559
1560 case 'o': /* offsettable */
1561 if (offsettable_nonstrict_memref_p (op))
1562 return 1;
1563 break;
1564
1565 case '<':
1566 if (GET_CODE (op) == MEM
1567 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
1568 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1569 return 1;
1570 break;
1571
1572 case '>':
1573 if (GET_CODE (op) == MEM
1574 && (GET_CODE (XEXP (op, 0)) == PRE_INC
1575 || GET_CODE (XEXP (op, 0)) == POST_INC))
1576 return 1;
1577 break;
1578
1579 case 'E':
1580 #ifndef REAL_ARITHMETIC
1581 /* Match any floating double constant, but only if
1582 we can examine the bits of it reliably. */
1583 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1584 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1585 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1586 break;
1587 #endif
1588 /* FALLTHRU */
1589
1590 case 'F':
1591 if (GET_CODE (op) == CONST_DOUBLE)
1592 return 1;
1593 break;
1594
1595 case 'G':
1596 if (GET_CODE (op) == CONST_DOUBLE
1597 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1598 return 1;
1599 break;
1600 case 'H':
1601 if (GET_CODE (op) == CONST_DOUBLE
1602 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1603 return 1;
1604 break;
1605
1606 case 's':
1607 if (GET_CODE (op) == CONST_INT
1608 || (GET_CODE (op) == CONST_DOUBLE
1609 && GET_MODE (op) == VOIDmode))
1610 break;
1611 /* FALLTHRU */
1612
1613 case 'i':
1614 if (CONSTANT_P (op)
1615 #ifdef LEGITIMATE_PIC_OPERAND_P
1616 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1617 #endif
1618 )
1619 return 1;
1620 break;
1621
1622 case 'n':
1623 if (GET_CODE (op) == CONST_INT
1624 || (GET_CODE (op) == CONST_DOUBLE
1625 && GET_MODE (op) == VOIDmode))
1626 return 1;
1627 break;
1628
1629 case 'I':
1630 if (GET_CODE (op) == CONST_INT
1631 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1632 return 1;
1633 break;
1634 case 'J':
1635 if (GET_CODE (op) == CONST_INT
1636 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1637 return 1;
1638 break;
1639 case 'K':
1640 if (GET_CODE (op) == CONST_INT
1641 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1642 return 1;
1643 break;
1644 case 'L':
1645 if (GET_CODE (op) == CONST_INT
1646 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1647 return 1;
1648 break;
1649 case 'M':
1650 if (GET_CODE (op) == CONST_INT
1651 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1652 return 1;
1653 break;
1654 case 'N':
1655 if (GET_CODE (op) == CONST_INT
1656 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1657 return 1;
1658 break;
1659 case 'O':
1660 if (GET_CODE (op) == CONST_INT
1661 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1662 return 1;
1663 break;
1664 case 'P':
1665 if (GET_CODE (op) == CONST_INT
1666 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1667 return 1;
1668 break;
1669
1670 case 'X':
1671 return 1;
1672
1673 case 'g':
1674 if (general_operand (op, VOIDmode))
1675 return 1;
1676 break;
1677
1678 #ifdef EXTRA_CONSTRAINT
1679 case 'Q':
1680 if (EXTRA_CONSTRAINT (op, 'Q'))
1681 return 1;
1682 break;
1683 case 'R':
1684 if (EXTRA_CONSTRAINT (op, 'R'))
1685 return 1;
1686 break;
1687 case 'S':
1688 if (EXTRA_CONSTRAINT (op, 'S'))
1689 return 1;
1690 break;
1691 case 'T':
1692 if (EXTRA_CONSTRAINT (op, 'T'))
1693 return 1;
1694 break;
1695 case 'U':
1696 if (EXTRA_CONSTRAINT (op, 'U'))
1697 return 1;
1698 break;
1699 #endif
1700
1701 case 'r':
1702 default:
1703 if (GET_MODE (op) == BLKmode)
1704 break;
1705 if (register_operand (op, VOIDmode))
1706 return 1;
1707 break;
1708 }
1709 }
1710
1711 return 0;
1712 }
1713 \f
1714 /* Given an rtx *P, if it is a sum containing an integer constant term,
1715 return the location (type rtx *) of the pointer to that constant term.
1716 Otherwise, return a null pointer. */
1717
1718 static rtx *
1719 find_constant_term_loc (p)
1720 rtx *p;
1721 {
1722 register rtx *tem;
1723 register enum rtx_code code = GET_CODE (*p);
1724
1725 /* If *P IS such a constant term, P is its location. */
1726
1727 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1728 || code == CONST)
1729 return p;
1730
1731 /* Otherwise, if not a sum, it has no constant term. */
1732
1733 if (GET_CODE (*p) != PLUS)
1734 return 0;
1735
1736 /* If one of the summands is constant, return its location. */
1737
1738 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1739 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1740 return p;
1741
1742 /* Otherwise, check each summand for containing a constant term. */
1743
1744 if (XEXP (*p, 0) != 0)
1745 {
1746 tem = find_constant_term_loc (&XEXP (*p, 0));
1747 if (tem != 0)
1748 return tem;
1749 }
1750
1751 if (XEXP (*p, 1) != 0)
1752 {
1753 tem = find_constant_term_loc (&XEXP (*p, 1));
1754 if (tem != 0)
1755 return tem;
1756 }
1757
1758 return 0;
1759 }
1760 \f
1761 /* Return 1 if OP is a memory reference
1762 whose address contains no side effects
1763 and remains valid after the addition
1764 of a positive integer less than the
1765 size of the object being referenced.
1766
1767 We assume that the original address is valid and do not check it.
1768
1769 This uses strict_memory_address_p as a subroutine, so
1770 don't use it before reload. */
1771
1772 int
1773 offsettable_memref_p (op)
1774 rtx op;
1775 {
1776 return ((GET_CODE (op) == MEM)
1777 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1778 }
1779
1780 /* Similar, but don't require a strictly valid mem ref:
1781 consider pseudo-regs valid as index or base regs. */
1782
1783 int
1784 offsettable_nonstrict_memref_p (op)
1785 rtx op;
1786 {
1787 return ((GET_CODE (op) == MEM)
1788 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1789 }
1790
1791 /* Return 1 if Y is a memory address which contains no side effects
1792 and would remain valid after the addition of a positive integer
1793 less than the size of that mode.
1794
1795 We assume that the original address is valid and do not check it.
1796 We do check that it is valid for narrower modes.
1797
1798 If STRICTP is nonzero, we require a strictly valid address,
1799 for the sake of use in reload.c. */
1800
1801 int
1802 offsettable_address_p (strictp, mode, y)
1803 int strictp;
1804 enum machine_mode mode;
1805 register rtx y;
1806 {
1807 register enum rtx_code ycode = GET_CODE (y);
1808 register rtx z;
1809 rtx y1 = y;
1810 rtx *y2;
1811 int (*addressp) () = (strictp ? strict_memory_address_p : memory_address_p);
1812
1813 if (CONSTANT_ADDRESS_P (y))
1814 return 1;
1815
1816 /* Adjusting an offsettable address involves changing to a narrower mode.
1817 Make sure that's OK. */
1818
1819 if (mode_dependent_address_p (y))
1820 return 0;
1821
1822 /* If the expression contains a constant term,
1823 see if it remains valid when max possible offset is added. */
1824
1825 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1826 {
1827 int good;
1828
1829 y1 = *y2;
1830 *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1);
1831 /* Use QImode because an odd displacement may be automatically invalid
1832 for any wider mode. But it should be valid for a single byte. */
1833 good = (*addressp) (QImode, y);
1834
1835 /* In any case, restore old contents of memory. */
1836 *y2 = y1;
1837 return good;
1838 }
1839
1840 if (ycode == PRE_DEC || ycode == PRE_INC
1841 || ycode == POST_DEC || ycode == POST_INC)
1842 return 0;
1843
1844 /* The offset added here is chosen as the maximum offset that
1845 any instruction could need to add when operating on something
1846 of the specified mode. We assume that if Y and Y+c are
1847 valid addresses then so is Y+d for all 0<d<c. */
1848
1849 z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1);
1850
1851 /* Use QImode because an odd displacement may be automatically invalid
1852 for any wider mode. But it should be valid for a single byte. */
1853 return (*addressp) (QImode, z);
1854 }
1855
1856 /* Return 1 if ADDR is an address-expression whose effect depends
1857 on the mode of the memory reference it is used in.
1858
1859 Autoincrement addressing is a typical example of mode-dependence
1860 because the amount of the increment depends on the mode. */
1861
1862 int
1863 mode_dependent_address_p (addr)
1864 rtx addr;
1865 {
1866 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1867 return 0;
1868 win:
1869 return 1;
1870 }
1871
1872 /* Return 1 if OP is a general operand
1873 other than a memory ref with a mode dependent address. */
1874
1875 int
1876 mode_independent_operand (op, mode)
1877 enum machine_mode mode;
1878 rtx op;
1879 {
1880 rtx addr;
1881
1882 if (! general_operand (op, mode))
1883 return 0;
1884
1885 if (GET_CODE (op) != MEM)
1886 return 1;
1887
1888 addr = XEXP (op, 0);
1889 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1890 return 1;
1891 lose:
1892 return 0;
1893 }
1894
1895 /* Given an operand OP that is a valid memory reference
1896 which satisfies offsettable_memref_p,
1897 return a new memory reference whose address has been adjusted by OFFSET.
1898 OFFSET should be positive and less than the size of the object referenced.
1899 */
1900
1901 rtx
1902 adj_offsettable_operand (op, offset)
1903 rtx op;
1904 int offset;
1905 {
1906 register enum rtx_code code = GET_CODE (op);
1907
1908 if (code == MEM)
1909 {
1910 register rtx y = XEXP (op, 0);
1911 register rtx new;
1912
1913 if (CONSTANT_ADDRESS_P (y))
1914 {
1915 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1916 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1917 return new;
1918 }
1919
1920 if (GET_CODE (y) == PLUS)
1921 {
1922 rtx z = y;
1923 register rtx *const_loc;
1924
1925 op = copy_rtx (op);
1926 z = XEXP (op, 0);
1927 const_loc = find_constant_term_loc (&z);
1928 if (const_loc)
1929 {
1930 *const_loc = plus_constant_for_output (*const_loc, offset);
1931 return op;
1932 }
1933 }
1934
1935 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1936 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1937 return new;
1938 }
1939 abort ();
1940 }
1941 \f
1942 /* Analyze INSN and compute the variables recog_n_operands, recog_n_dups,
1943 recog_n_alternatives, recog_operand, recog_operand_loc, recog_constraints,
1944 recog_operand_mode, recog_dup_loc and recog_dup_num.
1945 If REGISTER_CONSTRAINTS is not defined, also compute
1946 recog_operand_address_p. */
1947 void
1948 extract_insn (insn)
1949 rtx insn;
1950 {
1951 int i;
1952 int icode;
1953 int noperands;
1954 rtx body = PATTERN (insn);
1955
1956 recog_n_operands = 0;
1957 recog_n_alternatives = 0;
1958 recog_n_dups = 0;
1959
1960 switch (GET_CODE (body))
1961 {
1962 case USE:
1963 case CLOBBER:
1964 case ASM_INPUT:
1965 case ADDR_VEC:
1966 case ADDR_DIFF_VEC:
1967 return;
1968
1969 case SET:
1970 case PARALLEL:
1971 case ASM_OPERANDS:
1972 recog_n_operands = noperands = asm_noperands (body);
1973 if (noperands >= 0)
1974 {
1975 /* This insn is an `asm' with operands. */
1976
1977 /* expand_asm_operands makes sure there aren't too many operands. */
1978 if (noperands > MAX_RECOG_OPERANDS)
1979 abort ();
1980
1981 /* Now get the operand values and constraints out of the insn. */
1982 decode_asm_operands (body, recog_operand, recog_operand_loc,
1983 recog_constraints, recog_operand_mode);
1984 if (noperands > 0)
1985 {
1986 char *p = recog_constraints[0];
1987 recog_n_alternatives = 1;
1988 while (*p)
1989 recog_n_alternatives += (*p++ == ',');
1990 }
1991 #ifndef REGISTER_CONSTRAINTS
1992 bzero (recog_operand_address_p, sizeof recog_operand_address_p);
1993 #endif
1994 break;
1995 }
1996
1997 /* FALLTHROUGH */
1998
1999 default:
2000 /* Ordinary insn: recognize it, get the operands via insn_extract
2001 and get the constraints. */
2002
2003 icode = recog_memoized (insn);
2004 if (icode < 0)
2005 fatal_insn_not_found (insn);
2006
2007 recog_n_operands = noperands = insn_n_operands[icode];
2008 recog_n_alternatives = insn_n_alternatives[icode];
2009 recog_n_dups = insn_n_dups[icode];
2010
2011 insn_extract (insn);
2012
2013 for (i = 0; i < noperands; i++)
2014 {
2015 #ifdef REGISTER_CONSTRAINTS
2016 recog_constraints[i] = insn_operand_constraint[icode][i];
2017 #else
2018 recog_operand_address_p[i] = insn_operand_address_p[icode][i];
2019 #endif
2020 recog_operand_mode[i] = insn_operand_mode[icode][i];
2021 }
2022 }
2023 for (i = 0; i < noperands; i++)
2024 recog_op_type[i] = (recog_constraints[i][0] == '=' ? OP_OUT
2025 : recog_constraints[i][0] == '+' ? OP_INOUT
2026 : OP_IN);
2027
2028 if (recog_n_alternatives > MAX_RECOG_ALTERNATIVES)
2029 abort ();
2030 }
2031
2032 /* After calling extract_insn, you can use this function to extract some
2033 information from the constraint strings into a more usable form.
2034 The collected data is stored in recog_op_alt. */
2035 void
2036 preprocess_constraints ()
2037 {
2038 int i;
2039
2040 for (i = 0; i < recog_n_operands; i++)
2041 {
2042 int j;
2043 struct operand_alternative *op_alt;
2044 char *p = recog_constraints[i];
2045
2046 op_alt = recog_op_alt[i];
2047
2048 for (j = 0; j < recog_n_alternatives; j++)
2049 {
2050 op_alt[j].class = NO_REGS;
2051 op_alt[j].constraint = p;
2052 op_alt[j].matches = -1;
2053 op_alt[j].matched = -1;
2054
2055 if (*p == '\0' || *p == ',')
2056 {
2057 op_alt[j].anything_ok = 1;
2058 continue;
2059 }
2060
2061 for (;;)
2062 {
2063 char c = *p++;
2064 if (c == '#')
2065 do
2066 c = *p++;
2067 while (c != ',' && c != '\0');
2068 if (c == ',' || c == '\0')
2069 break;
2070
2071 switch (c)
2072 {
2073 case '=': case '+': case '*': case '%':
2074 case 'E': case 'F': case 'G': case 'H':
2075 case 's': case 'i': case 'n':
2076 case 'I': case 'J': case 'K': case 'L':
2077 case 'M': case 'N': case 'O': case 'P':
2078 #ifdef EXTRA_CONSTRAINT
2079 case 'Q': case 'R': case 'S': case 'T': case 'U':
2080 #endif
2081 /* These don't say anything we care about. */
2082 break;
2083
2084 case '?':
2085 op_alt[j].reject += 6;
2086 break;
2087 case '!':
2088 op_alt[j].reject += 600;
2089 break;
2090 case '&':
2091 op_alt[j].earlyclobber = 1;
2092 break;
2093
2094 case '0': case '1': case '2': case '3': case '4':
2095 case '5': case '6': case '7': case '8': case '9':
2096 op_alt[j].matches = c - '0';
2097 op_alt[op_alt[j].matches].matched = i;
2098 break;
2099
2100 case 'm':
2101 op_alt[j].memory_ok = 1;
2102 break;
2103 case '<':
2104 op_alt[j].decmem_ok = 1;
2105 break;
2106 case '>':
2107 op_alt[j].incmem_ok = 1;
2108 break;
2109 case 'V':
2110 op_alt[j].nonoffmem_ok = 1;
2111 break;
2112 case 'o':
2113 op_alt[j].offmem_ok = 1;
2114 break;
2115 case 'X':
2116 op_alt[j].anything_ok = 1;
2117 break;
2118
2119 case 'p':
2120 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2121 break;
2122
2123 case 'g': case 'r':
2124 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2125 break;
2126
2127 default:
2128 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2129 break;
2130 }
2131 }
2132 }
2133 }
2134 }
2135
2136 #ifdef REGISTER_CONSTRAINTS
2137
2138 /* Check the operands of an insn against the insn's operand constraints
2139 and return 1 if they are valid.
2140 The information about the insn's operands, constraints, operand modes
2141 etc. is obtained from the global variables set up by extract_insn.
2142
2143 WHICH_ALTERNATIVE is set to a number which indicates which
2144 alternative of constraints was matched: 0 for the first alternative,
2145 1 for the next, etc.
2146
2147 In addition, when two operands are match
2148 and it happens that the output operand is (reg) while the
2149 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2150 make the output operand look like the input.
2151 This is because the output operand is the one the template will print.
2152
2153 This is used in final, just before printing the assembler code and by
2154 the routines that determine an insn's attribute.
2155
2156 If STRICT is a positive non-zero value, it means that we have been
2157 called after reload has been completed. In that case, we must
2158 do all checks strictly. If it is zero, it means that we have been called
2159 before reload has completed. In that case, we first try to see if we can
2160 find an alternative that matches strictly. If not, we try again, this
2161 time assuming that reload will fix up the insn. This provides a "best
2162 guess" for the alternative and is used to compute attributes of insns prior
2163 to reload. A negative value of STRICT is used for this internal call. */
2164
2165 struct funny_match
2166 {
2167 int this, other;
2168 };
2169
2170 int
2171 constrain_operands (strict)
2172 int strict;
2173 {
2174 char *constraints[MAX_RECOG_OPERANDS];
2175 int matching_operands[MAX_RECOG_OPERANDS];
2176 int earlyclobber[MAX_RECOG_OPERANDS];
2177 register int c;
2178
2179 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2180 int funny_match_index;
2181
2182 if (recog_n_operands == 0 || recog_n_alternatives == 0)
2183 return 1;
2184
2185 for (c = 0; c < recog_n_operands; c++)
2186 {
2187 constraints[c] = recog_constraints[c];
2188 matching_operands[c] = -1;
2189 }
2190
2191 which_alternative = 0;
2192
2193 while (which_alternative < recog_n_alternatives)
2194 {
2195 register int opno;
2196 int lose = 0;
2197 funny_match_index = 0;
2198
2199 for (opno = 0; opno < recog_n_operands; opno++)
2200 {
2201 register rtx op = recog_operand[opno];
2202 enum machine_mode mode = GET_MODE (op);
2203 register char *p = constraints[opno];
2204 int offset = 0;
2205 int win = 0;
2206 int val;
2207
2208 earlyclobber[opno] = 0;
2209
2210 /* A unary operator may be accepted by the predicate, but it
2211 is irrelevant for matching constraints. */
2212 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2213 op = XEXP (op, 0);
2214
2215 if (GET_CODE (op) == SUBREG)
2216 {
2217 if (GET_CODE (SUBREG_REG (op)) == REG
2218 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2219 offset = SUBREG_WORD (op);
2220 op = SUBREG_REG (op);
2221 }
2222
2223 /* An empty constraint or empty alternative
2224 allows anything which matched the pattern. */
2225 if (*p == 0 || *p == ',')
2226 win = 1;
2227
2228 while (*p && (c = *p++) != ',')
2229 switch (c)
2230 {
2231 case '?':
2232 case '!':
2233 case '*':
2234 case '%':
2235 case '=':
2236 case '+':
2237 break;
2238
2239 case '#':
2240 /* Ignore rest of this alternative as far as
2241 constraint checking is concerned. */
2242 while (*p && *p != ',')
2243 p++;
2244 break;
2245
2246 case '&':
2247 earlyclobber[opno] = 1;
2248 break;
2249
2250 case '0':
2251 case '1':
2252 case '2':
2253 case '3':
2254 case '4':
2255 /* This operand must be the same as a previous one.
2256 This kind of constraint is used for instructions such
2257 as add when they take only two operands.
2258
2259 Note that the lower-numbered operand is passed first.
2260
2261 If we are not testing strictly, assume that this constraint
2262 will be satisfied. */
2263 if (strict < 0)
2264 val = 1;
2265 else
2266 val = operands_match_p (recog_operand[c - '0'],
2267 recog_operand[opno]);
2268
2269 matching_operands[opno] = c - '0';
2270 matching_operands[c - '0'] = opno;
2271
2272 if (val != 0)
2273 win = 1;
2274 /* If output is *x and input is *--x,
2275 arrange later to change the output to *--x as well,
2276 since the output op is the one that will be printed. */
2277 if (val == 2 && strict > 0)
2278 {
2279 funny_match[funny_match_index].this = opno;
2280 funny_match[funny_match_index++].other = c - '0';
2281 }
2282 break;
2283
2284 case 'p':
2285 /* p is used for address_operands. When we are called by
2286 gen_reload, no one will have checked that the address is
2287 strictly valid, i.e., that all pseudos requiring hard regs
2288 have gotten them. */
2289 if (strict <= 0
2290 || (strict_memory_address_p (recog_operand_mode[opno],
2291 op)))
2292 win = 1;
2293 break;
2294
2295 /* No need to check general_operand again;
2296 it was done in insn-recog.c. */
2297 case 'g':
2298 /* Anything goes unless it is a REG and really has a hard reg
2299 but the hard reg is not in the class GENERAL_REGS. */
2300 if (strict < 0
2301 || GENERAL_REGS == ALL_REGS
2302 || GET_CODE (op) != REG
2303 || (reload_in_progress
2304 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2305 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2306 win = 1;
2307 break;
2308
2309 case 'r':
2310 if (strict < 0
2311 || (strict == 0
2312 && GET_CODE (op) == REG
2313 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2314 || (strict == 0 && GET_CODE (op) == SCRATCH)
2315 || (GET_CODE (op) == REG
2316 && ((GENERAL_REGS == ALL_REGS
2317 && REGNO (op) < FIRST_PSEUDO_REGISTER)
2318 || reg_fits_class_p (op, GENERAL_REGS,
2319 offset, mode))))
2320 win = 1;
2321 break;
2322
2323 case 'X':
2324 /* This is used for a MATCH_SCRATCH in the cases when
2325 we don't actually need anything. So anything goes
2326 any time. */
2327 win = 1;
2328 break;
2329
2330 case 'm':
2331 if (GET_CODE (op) == MEM
2332 /* Before reload, accept what reload can turn into mem. */
2333 || (strict < 0 && CONSTANT_P (op))
2334 /* During reload, accept a pseudo */
2335 || (reload_in_progress && GET_CODE (op) == REG
2336 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2337 win = 1;
2338 break;
2339
2340 case '<':
2341 if (GET_CODE (op) == MEM
2342 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2343 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2344 win = 1;
2345 break;
2346
2347 case '>':
2348 if (GET_CODE (op) == MEM
2349 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2350 || GET_CODE (XEXP (op, 0)) == POST_INC))
2351 win = 1;
2352 break;
2353
2354 case 'E':
2355 #ifndef REAL_ARITHMETIC
2356 /* Match any CONST_DOUBLE, but only if
2357 we can examine the bits of it reliably. */
2358 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2359 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2360 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2361 break;
2362 #endif
2363 if (GET_CODE (op) == CONST_DOUBLE)
2364 win = 1;
2365 break;
2366
2367 case 'F':
2368 if (GET_CODE (op) == CONST_DOUBLE)
2369 win = 1;
2370 break;
2371
2372 case 'G':
2373 case 'H':
2374 if (GET_CODE (op) == CONST_DOUBLE
2375 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2376 win = 1;
2377 break;
2378
2379 case 's':
2380 if (GET_CODE (op) == CONST_INT
2381 || (GET_CODE (op) == CONST_DOUBLE
2382 && GET_MODE (op) == VOIDmode))
2383 break;
2384 case 'i':
2385 if (CONSTANT_P (op))
2386 win = 1;
2387 break;
2388
2389 case 'n':
2390 if (GET_CODE (op) == CONST_INT
2391 || (GET_CODE (op) == CONST_DOUBLE
2392 && GET_MODE (op) == VOIDmode))
2393 win = 1;
2394 break;
2395
2396 case 'I':
2397 case 'J':
2398 case 'K':
2399 case 'L':
2400 case 'M':
2401 case 'N':
2402 case 'O':
2403 case 'P':
2404 if (GET_CODE (op) == CONST_INT
2405 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2406 win = 1;
2407 break;
2408
2409 #ifdef EXTRA_CONSTRAINT
2410 case 'Q':
2411 case 'R':
2412 case 'S':
2413 case 'T':
2414 case 'U':
2415 if (EXTRA_CONSTRAINT (op, c))
2416 win = 1;
2417 break;
2418 #endif
2419
2420 case 'V':
2421 if (GET_CODE (op) == MEM
2422 && ((strict > 0 && ! offsettable_memref_p (op))
2423 || (strict < 0
2424 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2425 || (reload_in_progress
2426 && !(GET_CODE (op) == REG
2427 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2428 win = 1;
2429 break;
2430
2431 case 'o':
2432 if ((strict > 0 && offsettable_memref_p (op))
2433 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2434 /* Before reload, accept what reload can handle. */
2435 || (strict < 0
2436 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2437 /* During reload, accept a pseudo */
2438 || (reload_in_progress && GET_CODE (op) == REG
2439 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2440 win = 1;
2441 break;
2442
2443 default:
2444 if (strict < 0
2445 || (strict == 0
2446 && GET_CODE (op) == REG
2447 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2448 || (strict == 0 && GET_CODE (op) == SCRATCH)
2449 || (GET_CODE (op) == REG
2450 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
2451 offset, mode)))
2452 win = 1;
2453 }
2454
2455 constraints[opno] = p;
2456 /* If this operand did not win somehow,
2457 this alternative loses. */
2458 if (! win)
2459 lose = 1;
2460 }
2461 /* This alternative won; the operands are ok.
2462 Change whichever operands this alternative says to change. */
2463 if (! lose)
2464 {
2465 int opno, eopno;
2466
2467 /* See if any earlyclobber operand conflicts with some other
2468 operand. */
2469
2470 if (strict > 0)
2471 for (eopno = 0; eopno < recog_n_operands; eopno++)
2472 /* Ignore earlyclobber operands now in memory,
2473 because we would often report failure when we have
2474 two memory operands, one of which was formerly a REG. */
2475 if (earlyclobber[eopno]
2476 && GET_CODE (recog_operand[eopno]) == REG)
2477 for (opno = 0; opno < recog_n_operands; opno++)
2478 if ((GET_CODE (recog_operand[opno]) == MEM
2479 || recog_op_type[opno] != OP_OUT)
2480 && opno != eopno
2481 /* Ignore things like match_operator operands. */
2482 && *recog_constraints[opno] != 0
2483 && ! (matching_operands[opno] == eopno
2484 && operands_match_p (recog_operand[opno],
2485 recog_operand[eopno]))
2486 && ! safe_from_earlyclobber (recog_operand[opno],
2487 recog_operand[eopno]))
2488 lose = 1;
2489
2490 if (! lose)
2491 {
2492 while (--funny_match_index >= 0)
2493 {
2494 recog_operand[funny_match[funny_match_index].other]
2495 = recog_operand[funny_match[funny_match_index].this];
2496 }
2497
2498 return 1;
2499 }
2500 }
2501
2502 which_alternative++;
2503 }
2504
2505 /* If we are about to reject this, but we are not to test strictly,
2506 try a very loose test. Only return failure if it fails also. */
2507 if (strict == 0)
2508 return constrain_operands (-1);
2509 else
2510 return 0;
2511 }
2512
2513 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2514 is a hard reg in class CLASS when its regno is offset by OFFSET
2515 and changed to mode MODE.
2516 If REG occupies multiple hard regs, all of them must be in CLASS. */
2517
2518 int
2519 reg_fits_class_p (operand, class, offset, mode)
2520 rtx operand;
2521 register enum reg_class class;
2522 int offset;
2523 enum machine_mode mode;
2524 {
2525 register int regno = REGNO (operand);
2526 if (regno < FIRST_PSEUDO_REGISTER
2527 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2528 regno + offset))
2529 {
2530 register int sr;
2531 regno += offset;
2532 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2533 sr > 0; sr--)
2534 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2535 regno + sr))
2536 break;
2537 return sr == 0;
2538 }
2539
2540 return 0;
2541 }
2542
2543 #endif /* REGISTER_CONSTRAINTS */