New syntax for -fsanitize-recover.
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl-error.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 #include "hard-reg-set.h"
31 #include "recog.h"
32 #include "regs.h"
33 #include "addresses.h"
34 #include "expr.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "vec.h"
38 #include "machmode.h"
39 #include "input.h"
40 #include "function.h"
41 #include "flags.h"
42 #include "basic-block.h"
43 #include "reload.h"
44 #include "target.h"
45 #include "tree-pass.h"
46 #include "df.h"
47 #include "insn-codes.h"
48
49 #ifndef STACK_PUSH_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_PUSH_CODE PRE_DEC
52 #else
53 #define STACK_PUSH_CODE PRE_INC
54 #endif
55 #endif
56
57 #ifndef STACK_POP_CODE
58 #ifdef STACK_GROWS_DOWNWARD
59 #define STACK_POP_CODE POST_INC
60 #else
61 #define STACK_POP_CODE POST_DEC
62 #endif
63 #endif
64
65 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
66 static void validate_replace_src_1 (rtx *, void *);
67 static rtx split_insn (rtx_insn *);
68
69 struct target_recog default_target_recog;
70 #if SWITCHABLE_TARGET
71 struct target_recog *this_target_recog = &default_target_recog;
72 #endif
73
74 /* Nonzero means allow operands to be volatile.
75 This should be 0 if you are generating rtl, such as if you are calling
76 the functions in optabs.c and expmed.c (most of the time).
77 This should be 1 if all valid insns need to be recognized,
78 such as in reginfo.c and final.c and reload.c.
79
80 init_recog and init_recog_no_volatile are responsible for setting this. */
81
82 int volatile_ok;
83
84 struct recog_data_d recog_data;
85
86 /* Contains a vector of operand_alternative structures, such that
87 operand OP of alternative A is at index A * n_operands + OP.
88 Set up by preprocess_constraints. */
89 const operand_alternative *recog_op_alt;
90
91 /* Used to provide recog_op_alt for asms. */
92 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
93 * MAX_RECOG_ALTERNATIVES];
94
95 /* On return from `constrain_operands', indicate which alternative
96 was satisfied. */
97
98 int which_alternative;
99
100 /* Nonzero after end of reload pass.
101 Set to 1 or 0 by toplev.c.
102 Controls the significance of (SUBREG (MEM)). */
103
104 int reload_completed;
105
106 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
107 int epilogue_completed;
108
109 /* Initialize data used by the function `recog'.
110 This must be called once in the compilation of a function
111 before any insn recognition may be done in the function. */
112
113 void
114 init_recog_no_volatile (void)
115 {
116 volatile_ok = 0;
117 }
118
119 void
120 init_recog (void)
121 {
122 volatile_ok = 1;
123 }
124
125 \f
126 /* Return true if labels in asm operands BODY are LABEL_REFs. */
127
128 static bool
129 asm_labels_ok (rtx body)
130 {
131 rtx asmop;
132 int i;
133
134 asmop = extract_asm_operands (body);
135 if (asmop == NULL_RTX)
136 return true;
137
138 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
139 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
140 return false;
141
142 return true;
143 }
144
145 /* Check that X is an insn-body for an `asm' with operands
146 and that the operands mentioned in it are legitimate. */
147
148 int
149 check_asm_operands (rtx x)
150 {
151 int noperands;
152 rtx *operands;
153 const char **constraints;
154 int i;
155
156 if (!asm_labels_ok (x))
157 return 0;
158
159 /* Post-reload, be more strict with things. */
160 if (reload_completed)
161 {
162 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
163 extract_insn (make_insn_raw (x));
164 constrain_operands (1);
165 return which_alternative >= 0;
166 }
167
168 noperands = asm_noperands (x);
169 if (noperands < 0)
170 return 0;
171 if (noperands == 0)
172 return 1;
173
174 operands = XALLOCAVEC (rtx, noperands);
175 constraints = XALLOCAVEC (const char *, noperands);
176
177 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
178
179 for (i = 0; i < noperands; i++)
180 {
181 const char *c = constraints[i];
182 if (c[0] == '%')
183 c++;
184 if (! asm_operand_ok (operands[i], c, constraints))
185 return 0;
186 }
187
188 return 1;
189 }
190 \f
191 /* Static data for the next two routines. */
192
193 typedef struct change_t
194 {
195 rtx object;
196 int old_code;
197 rtx *loc;
198 rtx old;
199 bool unshare;
200 } change_t;
201
202 static change_t *changes;
203 static int changes_allocated;
204
205 static int num_changes = 0;
206
207 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
208 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
209 the change is simply made.
210
211 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
212 will be called with the address and mode as parameters. If OBJECT is
213 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
214 the change in place.
215
216 IN_GROUP is nonzero if this is part of a group of changes that must be
217 performed as a group. In that case, the changes will be stored. The
218 function `apply_change_group' will validate and apply the changes.
219
220 If IN_GROUP is zero, this is a single change. Try to recognize the insn
221 or validate the memory reference with the change applied. If the result
222 is not valid for the machine, suppress the change and return zero.
223 Otherwise, perform the change and return 1. */
224
225 static bool
226 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
227 {
228 rtx old = *loc;
229
230 if (old == new_rtx || rtx_equal_p (old, new_rtx))
231 return 1;
232
233 gcc_assert (in_group != 0 || num_changes == 0);
234
235 *loc = new_rtx;
236
237 /* Save the information describing this change. */
238 if (num_changes >= changes_allocated)
239 {
240 if (changes_allocated == 0)
241 /* This value allows for repeated substitutions inside complex
242 indexed addresses, or changes in up to 5 insns. */
243 changes_allocated = MAX_RECOG_OPERANDS * 5;
244 else
245 changes_allocated *= 2;
246
247 changes = XRESIZEVEC (change_t, changes, changes_allocated);
248 }
249
250 changes[num_changes].object = object;
251 changes[num_changes].loc = loc;
252 changes[num_changes].old = old;
253 changes[num_changes].unshare = unshare;
254
255 if (object && !MEM_P (object))
256 {
257 /* Set INSN_CODE to force rerecognition of insn. Save old code in
258 case invalid. */
259 changes[num_changes].old_code = INSN_CODE (object);
260 INSN_CODE (object) = -1;
261 }
262
263 num_changes++;
264
265 /* If we are making a group of changes, return 1. Otherwise, validate the
266 change group we made. */
267
268 if (in_group)
269 return 1;
270 else
271 return apply_change_group ();
272 }
273
274 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
275 UNSHARE to false. */
276
277 bool
278 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
279 {
280 return validate_change_1 (object, loc, new_rtx, in_group, false);
281 }
282
283 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
284 UNSHARE to true. */
285
286 bool
287 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
288 {
289 return validate_change_1 (object, loc, new_rtx, in_group, true);
290 }
291
292
293 /* Keep X canonicalized if some changes have made it non-canonical; only
294 modifies the operands of X, not (for example) its code. Simplifications
295 are not the job of this routine.
296
297 Return true if anything was changed. */
298 bool
299 canonicalize_change_group (rtx insn, rtx x)
300 {
301 if (COMMUTATIVE_P (x)
302 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
303 {
304 /* Oops, the caller has made X no longer canonical.
305 Let's redo the changes in the correct order. */
306 rtx tem = XEXP (x, 0);
307 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
308 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
309 return true;
310 }
311 else
312 return false;
313 }
314
315
316 /* This subroutine of apply_change_group verifies whether the changes to INSN
317 were valid; i.e. whether INSN can still be recognized.
318
319 If IN_GROUP is true clobbers which have to be added in order to
320 match the instructions will be added to the current change group.
321 Otherwise the changes will take effect immediately. */
322
323 int
324 insn_invalid_p (rtx_insn *insn, bool in_group)
325 {
326 rtx pat = PATTERN (insn);
327 int num_clobbers = 0;
328 /* If we are before reload and the pattern is a SET, see if we can add
329 clobbers. */
330 int icode = recog (pat, insn,
331 (GET_CODE (pat) == SET
332 && ! reload_completed
333 && ! reload_in_progress)
334 ? &num_clobbers : 0);
335 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
336
337
338 /* If this is an asm and the operand aren't legal, then fail. Likewise if
339 this is not an asm and the insn wasn't recognized. */
340 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
341 || (!is_asm && icode < 0))
342 return 1;
343
344 /* If we have to add CLOBBERs, fail if we have to add ones that reference
345 hard registers since our callers can't know if they are live or not.
346 Otherwise, add them. */
347 if (num_clobbers > 0)
348 {
349 rtx newpat;
350
351 if (added_clobbers_hard_reg_p (icode))
352 return 1;
353
354 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
355 XVECEXP (newpat, 0, 0) = pat;
356 add_clobbers (newpat, icode);
357 if (in_group)
358 validate_change (insn, &PATTERN (insn), newpat, 1);
359 else
360 PATTERN (insn) = pat = newpat;
361 }
362
363 /* After reload, verify that all constraints are satisfied. */
364 if (reload_completed)
365 {
366 extract_insn (insn);
367
368 if (! constrain_operands (1))
369 return 1;
370 }
371
372 INSN_CODE (insn) = icode;
373 return 0;
374 }
375
376 /* Return number of changes made and not validated yet. */
377 int
378 num_changes_pending (void)
379 {
380 return num_changes;
381 }
382
383 /* Tentatively apply the changes numbered NUM and up.
384 Return 1 if all changes are valid, zero otherwise. */
385
386 int
387 verify_changes (int num)
388 {
389 int i;
390 rtx last_validated = NULL_RTX;
391
392 /* The changes have been applied and all INSN_CODEs have been reset to force
393 rerecognition.
394
395 The changes are valid if we aren't given an object, or if we are
396 given a MEM and it still is a valid address, or if this is in insn
397 and it is recognized. In the latter case, if reload has completed,
398 we also require that the operands meet the constraints for
399 the insn. */
400
401 for (i = num; i < num_changes; i++)
402 {
403 rtx object = changes[i].object;
404
405 /* If there is no object to test or if it is the same as the one we
406 already tested, ignore it. */
407 if (object == 0 || object == last_validated)
408 continue;
409
410 if (MEM_P (object))
411 {
412 if (! memory_address_addr_space_p (GET_MODE (object),
413 XEXP (object, 0),
414 MEM_ADDR_SPACE (object)))
415 break;
416 }
417 else if (/* changes[i].old might be zero, e.g. when putting a
418 REG_FRAME_RELATED_EXPR into a previously empty list. */
419 changes[i].old
420 && REG_P (changes[i].old)
421 && asm_noperands (PATTERN (object)) > 0
422 && REG_EXPR (changes[i].old) != NULL_TREE
423 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
424 && DECL_REGISTER (REG_EXPR (changes[i].old)))
425 {
426 /* Don't allow changes of hard register operands to inline
427 assemblies if they have been defined as register asm ("x"). */
428 break;
429 }
430 else if (DEBUG_INSN_P (object))
431 continue;
432 else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
433 {
434 rtx pat = PATTERN (object);
435
436 /* Perhaps we couldn't recognize the insn because there were
437 extra CLOBBERs at the end. If so, try to re-recognize
438 without the last CLOBBER (later iterations will cause each of
439 them to be eliminated, in turn). But don't do this if we
440 have an ASM_OPERAND. */
441 if (GET_CODE (pat) == PARALLEL
442 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
443 && asm_noperands (PATTERN (object)) < 0)
444 {
445 rtx newpat;
446
447 if (XVECLEN (pat, 0) == 2)
448 newpat = XVECEXP (pat, 0, 0);
449 else
450 {
451 int j;
452
453 newpat
454 = gen_rtx_PARALLEL (VOIDmode,
455 rtvec_alloc (XVECLEN (pat, 0) - 1));
456 for (j = 0; j < XVECLEN (newpat, 0); j++)
457 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
458 }
459
460 /* Add a new change to this group to replace the pattern
461 with this new pattern. Then consider this change
462 as having succeeded. The change we added will
463 cause the entire call to fail if things remain invalid.
464
465 Note that this can lose if a later change than the one
466 we are processing specified &XVECEXP (PATTERN (object), 0, X)
467 but this shouldn't occur. */
468
469 validate_change (object, &PATTERN (object), newpat, 1);
470 continue;
471 }
472 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
473 || GET_CODE (pat) == VAR_LOCATION)
474 /* If this insn is a CLOBBER or USE, it is always valid, but is
475 never recognized. */
476 continue;
477 else
478 break;
479 }
480 last_validated = object;
481 }
482
483 return (i == num_changes);
484 }
485
486 /* A group of changes has previously been issued with validate_change
487 and verified with verify_changes. Call df_insn_rescan for each of
488 the insn changed and clear num_changes. */
489
490 void
491 confirm_change_group (void)
492 {
493 int i;
494 rtx last_object = NULL;
495
496 for (i = 0; i < num_changes; i++)
497 {
498 rtx object = changes[i].object;
499
500 if (changes[i].unshare)
501 *changes[i].loc = copy_rtx (*changes[i].loc);
502
503 /* Avoid unnecessary rescanning when multiple changes to same instruction
504 are made. */
505 if (object)
506 {
507 if (object != last_object && last_object && INSN_P (last_object))
508 df_insn_rescan (as_a <rtx_insn *> (last_object));
509 last_object = object;
510 }
511 }
512
513 if (last_object && INSN_P (last_object))
514 df_insn_rescan (as_a <rtx_insn *> (last_object));
515 num_changes = 0;
516 }
517
518 /* Apply a group of changes previously issued with `validate_change'.
519 If all changes are valid, call confirm_change_group and return 1,
520 otherwise, call cancel_changes and return 0. */
521
522 int
523 apply_change_group (void)
524 {
525 if (verify_changes (0))
526 {
527 confirm_change_group ();
528 return 1;
529 }
530 else
531 {
532 cancel_changes (0);
533 return 0;
534 }
535 }
536
537
538 /* Return the number of changes so far in the current group. */
539
540 int
541 num_validated_changes (void)
542 {
543 return num_changes;
544 }
545
546 /* Retract the changes numbered NUM and up. */
547
548 void
549 cancel_changes (int num)
550 {
551 int i;
552
553 /* Back out all the changes. Do this in the opposite order in which
554 they were made. */
555 for (i = num_changes - 1; i >= num; i--)
556 {
557 *changes[i].loc = changes[i].old;
558 if (changes[i].object && !MEM_P (changes[i].object))
559 INSN_CODE (changes[i].object) = changes[i].old_code;
560 }
561 num_changes = num;
562 }
563
564 /* Reduce conditional compilation elsewhere. */
565 #ifndef HAVE_extv
566 #define HAVE_extv 0
567 #define CODE_FOR_extv CODE_FOR_nothing
568 #endif
569 #ifndef HAVE_extzv
570 #define HAVE_extzv 0
571 #define CODE_FOR_extzv CODE_FOR_nothing
572 #endif
573
574 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
575 rtx. */
576
577 static void
578 simplify_while_replacing (rtx *loc, rtx to, rtx object,
579 enum machine_mode op0_mode)
580 {
581 rtx x = *loc;
582 enum rtx_code code = GET_CODE (x);
583 rtx new_rtx = NULL_RTX;
584
585 if (SWAPPABLE_OPERANDS_P (x)
586 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
587 {
588 validate_unshare_change (object, loc,
589 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
590 : swap_condition (code),
591 GET_MODE (x), XEXP (x, 1),
592 XEXP (x, 0)), 1);
593 x = *loc;
594 code = GET_CODE (x);
595 }
596
597 /* Canonicalize arithmetics with all constant operands. */
598 switch (GET_RTX_CLASS (code))
599 {
600 case RTX_UNARY:
601 if (CONSTANT_P (XEXP (x, 0)))
602 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
603 op0_mode);
604 break;
605 case RTX_COMM_ARITH:
606 case RTX_BIN_ARITH:
607 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
608 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
609 XEXP (x, 1));
610 break;
611 case RTX_COMPARE:
612 case RTX_COMM_COMPARE:
613 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
614 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
615 XEXP (x, 0), XEXP (x, 1));
616 break;
617 default:
618 break;
619 }
620 if (new_rtx)
621 {
622 validate_change (object, loc, new_rtx, 1);
623 return;
624 }
625
626 switch (code)
627 {
628 case PLUS:
629 /* If we have a PLUS whose second operand is now a CONST_INT, use
630 simplify_gen_binary to try to simplify it.
631 ??? We may want later to remove this, once simplification is
632 separated from this function. */
633 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
634 validate_change (object, loc,
635 simplify_gen_binary
636 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
637 break;
638 case MINUS:
639 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
640 validate_change (object, loc,
641 simplify_gen_binary
642 (PLUS, GET_MODE (x), XEXP (x, 0),
643 simplify_gen_unary (NEG,
644 GET_MODE (x), XEXP (x, 1),
645 GET_MODE (x))), 1);
646 break;
647 case ZERO_EXTEND:
648 case SIGN_EXTEND:
649 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
650 {
651 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
652 op0_mode);
653 /* If any of the above failed, substitute in something that
654 we know won't be recognized. */
655 if (!new_rtx)
656 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
657 validate_change (object, loc, new_rtx, 1);
658 }
659 break;
660 case SUBREG:
661 /* All subregs possible to simplify should be simplified. */
662 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
663 SUBREG_BYTE (x));
664
665 /* Subregs of VOIDmode operands are incorrect. */
666 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
667 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
668 if (new_rtx)
669 validate_change (object, loc, new_rtx, 1);
670 break;
671 case ZERO_EXTRACT:
672 case SIGN_EXTRACT:
673 /* If we are replacing a register with memory, try to change the memory
674 to be the mode required for memory in extract operations (this isn't
675 likely to be an insertion operation; if it was, nothing bad will
676 happen, we might just fail in some cases). */
677
678 if (MEM_P (XEXP (x, 0))
679 && CONST_INT_P (XEXP (x, 1))
680 && CONST_INT_P (XEXP (x, 2))
681 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
682 MEM_ADDR_SPACE (XEXP (x, 0)))
683 && !MEM_VOLATILE_P (XEXP (x, 0)))
684 {
685 enum machine_mode wanted_mode = VOIDmode;
686 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
687 int pos = INTVAL (XEXP (x, 2));
688
689 if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
690 {
691 wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
692 if (wanted_mode == VOIDmode)
693 wanted_mode = word_mode;
694 }
695 else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
696 {
697 wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
698 if (wanted_mode == VOIDmode)
699 wanted_mode = word_mode;
700 }
701
702 /* If we have a narrower mode, we can do something. */
703 if (wanted_mode != VOIDmode
704 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
705 {
706 int offset = pos / BITS_PER_UNIT;
707 rtx newmem;
708
709 /* If the bytes and bits are counted differently, we
710 must adjust the offset. */
711 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
712 offset =
713 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
714 offset);
715
716 gcc_assert (GET_MODE_PRECISION (wanted_mode)
717 == GET_MODE_BITSIZE (wanted_mode));
718 pos %= GET_MODE_BITSIZE (wanted_mode);
719
720 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
721
722 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
723 validate_change (object, &XEXP (x, 0), newmem, 1);
724 }
725 }
726
727 break;
728
729 default:
730 break;
731 }
732 }
733
734 /* Replace every occurrence of FROM in X with TO. Mark each change with
735 validate_change passing OBJECT. */
736
737 static void
738 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
739 bool simplify)
740 {
741 int i, j;
742 const char *fmt;
743 rtx x = *loc;
744 enum rtx_code code;
745 enum machine_mode op0_mode = VOIDmode;
746 int prev_changes = num_changes;
747
748 if (!x)
749 return;
750
751 code = GET_CODE (x);
752 fmt = GET_RTX_FORMAT (code);
753 if (fmt[0] == 'e')
754 op0_mode = GET_MODE (XEXP (x, 0));
755
756 /* X matches FROM if it is the same rtx or they are both referring to the
757 same register in the same mode. Avoid calling rtx_equal_p unless the
758 operands look similar. */
759
760 if (x == from
761 || (REG_P (x) && REG_P (from)
762 && GET_MODE (x) == GET_MODE (from)
763 && REGNO (x) == REGNO (from))
764 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
765 && rtx_equal_p (x, from)))
766 {
767 validate_unshare_change (object, loc, to, 1);
768 return;
769 }
770
771 /* Call ourself recursively to perform the replacements.
772 We must not replace inside already replaced expression, otherwise we
773 get infinite recursion for replacements like (reg X)->(subreg (reg X))
774 so we must special case shared ASM_OPERANDS. */
775
776 if (GET_CODE (x) == PARALLEL)
777 {
778 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
779 {
780 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
781 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
782 {
783 /* Verify that operands are really shared. */
784 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
785 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
786 (x, 0, j))));
787 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
788 from, to, object, simplify);
789 }
790 else
791 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
792 simplify);
793 }
794 }
795 else
796 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
797 {
798 if (fmt[i] == 'e')
799 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
800 else if (fmt[i] == 'E')
801 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
802 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
803 simplify);
804 }
805
806 /* If we didn't substitute, there is nothing more to do. */
807 if (num_changes == prev_changes)
808 return;
809
810 /* ??? The regmove is no more, so is this aberration still necessary? */
811 /* Allow substituted expression to have different mode. This is used by
812 regmove to change mode of pseudo register. */
813 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
814 op0_mode = GET_MODE (XEXP (x, 0));
815
816 /* Do changes needed to keep rtx consistent. Don't do any other
817 simplifications, as it is not our job. */
818 if (simplify)
819 simplify_while_replacing (loc, to, object, op0_mode);
820 }
821
822 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
823 with TO. After all changes have been made, validate by seeing
824 if INSN is still valid. */
825
826 int
827 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
828 {
829 validate_replace_rtx_1 (loc, from, to, insn, true);
830 return apply_change_group ();
831 }
832
833 /* Try replacing every occurrence of FROM in INSN with TO. After all
834 changes have been made, validate by seeing if INSN is still valid. */
835
836 int
837 validate_replace_rtx (rtx from, rtx to, rtx insn)
838 {
839 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
840 return apply_change_group ();
841 }
842
843 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
844 is a part of INSN. After all changes have been made, validate by seeing if
845 INSN is still valid.
846 validate_replace_rtx (from, to, insn) is equivalent to
847 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
848
849 int
850 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
851 {
852 validate_replace_rtx_1 (where, from, to, insn, true);
853 return apply_change_group ();
854 }
855
856 /* Same as above, but do not simplify rtx afterwards. */
857 int
858 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
859 rtx insn)
860 {
861 validate_replace_rtx_1 (where, from, to, insn, false);
862 return apply_change_group ();
863
864 }
865
866 /* Try replacing every occurrence of FROM in INSN with TO. This also
867 will replace in REG_EQUAL and REG_EQUIV notes. */
868
869 void
870 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
871 {
872 rtx note;
873 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
874 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
875 if (REG_NOTE_KIND (note) == REG_EQUAL
876 || REG_NOTE_KIND (note) == REG_EQUIV)
877 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
878 }
879
880 /* Function called by note_uses to replace used subexpressions. */
881 struct validate_replace_src_data
882 {
883 rtx from; /* Old RTX */
884 rtx to; /* New RTX */
885 rtx insn; /* Insn in which substitution is occurring. */
886 };
887
888 static void
889 validate_replace_src_1 (rtx *x, void *data)
890 {
891 struct validate_replace_src_data *d
892 = (struct validate_replace_src_data *) data;
893
894 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
895 }
896
897 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
898 SET_DESTs. */
899
900 void
901 validate_replace_src_group (rtx from, rtx to, rtx insn)
902 {
903 struct validate_replace_src_data d;
904
905 d.from = from;
906 d.to = to;
907 d.insn = insn;
908 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
909 }
910
911 /* Try simplify INSN.
912 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
913 pattern and return true if something was simplified. */
914
915 bool
916 validate_simplify_insn (rtx insn)
917 {
918 int i;
919 rtx pat = NULL;
920 rtx newpat = NULL;
921
922 pat = PATTERN (insn);
923
924 if (GET_CODE (pat) == SET)
925 {
926 newpat = simplify_rtx (SET_SRC (pat));
927 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
928 validate_change (insn, &SET_SRC (pat), newpat, 1);
929 newpat = simplify_rtx (SET_DEST (pat));
930 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
931 validate_change (insn, &SET_DEST (pat), newpat, 1);
932 }
933 else if (GET_CODE (pat) == PARALLEL)
934 for (i = 0; i < XVECLEN (pat, 0); i++)
935 {
936 rtx s = XVECEXP (pat, 0, i);
937
938 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
939 {
940 newpat = simplify_rtx (SET_SRC (s));
941 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
942 validate_change (insn, &SET_SRC (s), newpat, 1);
943 newpat = simplify_rtx (SET_DEST (s));
944 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
945 validate_change (insn, &SET_DEST (s), newpat, 1);
946 }
947 }
948 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
949 }
950 \f
951 #ifdef HAVE_cc0
952 /* Return 1 if the insn using CC0 set by INSN does not contain
953 any ordered tests applied to the condition codes.
954 EQ and NE tests do not count. */
955
956 int
957 next_insn_tests_no_inequality (rtx insn)
958 {
959 rtx next = next_cc0_user (insn);
960
961 /* If there is no next insn, we have to take the conservative choice. */
962 if (next == 0)
963 return 0;
964
965 return (INSN_P (next)
966 && ! inequality_comparisons_p (PATTERN (next)));
967 }
968 #endif
969 \f
970 /* Return 1 if OP is a valid general operand for machine mode MODE.
971 This is either a register reference, a memory reference,
972 or a constant. In the case of a memory reference, the address
973 is checked for general validity for the target machine.
974
975 Register and memory references must have mode MODE in order to be valid,
976 but some constants have no machine mode and are valid for any mode.
977
978 If MODE is VOIDmode, OP is checked for validity for whatever mode
979 it has.
980
981 The main use of this function is as a predicate in match_operand
982 expressions in the machine description. */
983
984 int
985 general_operand (rtx op, enum machine_mode mode)
986 {
987 enum rtx_code code = GET_CODE (op);
988
989 if (mode == VOIDmode)
990 mode = GET_MODE (op);
991
992 /* Don't accept CONST_INT or anything similar
993 if the caller wants something floating. */
994 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
995 && GET_MODE_CLASS (mode) != MODE_INT
996 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
997 return 0;
998
999 if (CONST_INT_P (op)
1000 && mode != VOIDmode
1001 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1002 return 0;
1003
1004 if (CONSTANT_P (op))
1005 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1006 || mode == VOIDmode)
1007 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1008 && targetm.legitimate_constant_p (mode == VOIDmode
1009 ? GET_MODE (op)
1010 : mode, op));
1011
1012 /* Except for certain constants with VOIDmode, already checked for,
1013 OP's mode must match MODE if MODE specifies a mode. */
1014
1015 if (GET_MODE (op) != mode)
1016 return 0;
1017
1018 if (code == SUBREG)
1019 {
1020 rtx sub = SUBREG_REG (op);
1021
1022 #ifdef INSN_SCHEDULING
1023 /* On machines that have insn scheduling, we want all memory
1024 reference to be explicit, so outlaw paradoxical SUBREGs.
1025 However, we must allow them after reload so that they can
1026 get cleaned up by cleanup_subreg_operands. */
1027 if (!reload_completed && MEM_P (sub)
1028 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
1029 return 0;
1030 #endif
1031 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1032 may result in incorrect reference. We should simplify all valid
1033 subregs of MEM anyway. But allow this after reload because we
1034 might be called from cleanup_subreg_operands.
1035
1036 ??? This is a kludge. */
1037 if (!reload_completed && SUBREG_BYTE (op) != 0
1038 && MEM_P (sub))
1039 return 0;
1040
1041 #ifdef CANNOT_CHANGE_MODE_CLASS
1042 if (REG_P (sub)
1043 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1044 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1045 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1046 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1047 /* LRA can generate some invalid SUBREGS just for matched
1048 operand reload presentation. LRA needs to treat them as
1049 valid. */
1050 && ! LRA_SUBREG_P (op))
1051 return 0;
1052 #endif
1053
1054 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1055 create such rtl, and we must reject it. */
1056 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1057 /* LRA can use subreg to store a floating point value in an
1058 integer mode. Although the floating point and the
1059 integer modes need the same number of hard registers, the
1060 size of floating point mode can be less than the integer
1061 mode. */
1062 && ! lra_in_progress
1063 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1064 return 0;
1065
1066 op = sub;
1067 code = GET_CODE (op);
1068 }
1069
1070 if (code == REG)
1071 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1072 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1073
1074 if (code == MEM)
1075 {
1076 rtx y = XEXP (op, 0);
1077
1078 if (! volatile_ok && MEM_VOLATILE_P (op))
1079 return 0;
1080
1081 /* Use the mem's mode, since it will be reloaded thus. LRA can
1082 generate move insn with invalid addresses which is made valid
1083 and efficiently calculated by LRA through further numerous
1084 transformations. */
1085 if (lra_in_progress
1086 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1087 return 1;
1088 }
1089
1090 return 0;
1091 }
1092 \f
1093 /* Return 1 if OP is a valid memory address for a memory reference
1094 of mode MODE.
1095
1096 The main use of this function is as a predicate in match_operand
1097 expressions in the machine description. */
1098
1099 int
1100 address_operand (rtx op, enum machine_mode mode)
1101 {
1102 return memory_address_p (mode, op);
1103 }
1104
1105 /* Return 1 if OP is a register reference of mode MODE.
1106 If MODE is VOIDmode, accept a register in any mode.
1107
1108 The main use of this function is as a predicate in match_operand
1109 expressions in the machine description. */
1110
1111 int
1112 register_operand (rtx op, enum machine_mode mode)
1113 {
1114 if (GET_CODE (op) == SUBREG)
1115 {
1116 rtx sub = SUBREG_REG (op);
1117
1118 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1119 because it is guaranteed to be reloaded into one.
1120 Just make sure the MEM is valid in itself.
1121 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1122 but currently it does result from (SUBREG (REG)...) where the
1123 reg went on the stack.) */
1124 if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1125 return 0;
1126 }
1127 else if (!REG_P (op))
1128 return 0;
1129 return general_operand (op, mode);
1130 }
1131
1132 /* Return 1 for a register in Pmode; ignore the tested mode. */
1133
1134 int
1135 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1136 {
1137 return register_operand (op, Pmode);
1138 }
1139
1140 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1141 or a hard register. */
1142
1143 int
1144 scratch_operand (rtx op, enum machine_mode mode)
1145 {
1146 if (GET_MODE (op) != mode && mode != VOIDmode)
1147 return 0;
1148
1149 return (GET_CODE (op) == SCRATCH
1150 || (REG_P (op)
1151 && (lra_in_progress
1152 || (REGNO (op) < FIRST_PSEUDO_REGISTER
1153 && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1154 }
1155
1156 /* Return 1 if OP is a valid immediate operand for mode MODE.
1157
1158 The main use of this function is as a predicate in match_operand
1159 expressions in the machine description. */
1160
1161 int
1162 immediate_operand (rtx op, enum machine_mode mode)
1163 {
1164 /* Don't accept CONST_INT or anything similar
1165 if the caller wants something floating. */
1166 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1167 && GET_MODE_CLASS (mode) != MODE_INT
1168 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1169 return 0;
1170
1171 if (CONST_INT_P (op)
1172 && mode != VOIDmode
1173 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1174 return 0;
1175
1176 return (CONSTANT_P (op)
1177 && (GET_MODE (op) == mode || mode == VOIDmode
1178 || GET_MODE (op) == VOIDmode)
1179 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1180 && targetm.legitimate_constant_p (mode == VOIDmode
1181 ? GET_MODE (op)
1182 : mode, op));
1183 }
1184
1185 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1186
1187 int
1188 const_int_operand (rtx op, enum machine_mode mode)
1189 {
1190 if (!CONST_INT_P (op))
1191 return 0;
1192
1193 if (mode != VOIDmode
1194 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1195 return 0;
1196
1197 return 1;
1198 }
1199
1200 #if TARGET_SUPPORTS_WIDE_INT
1201 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1202 of mode MODE. */
1203 int
1204 const_scalar_int_operand (rtx op, enum machine_mode mode)
1205 {
1206 if (!CONST_SCALAR_INT_P (op))
1207 return 0;
1208
1209 if (CONST_INT_P (op))
1210 return const_int_operand (op, mode);
1211
1212 if (mode != VOIDmode)
1213 {
1214 int prec = GET_MODE_PRECISION (mode);
1215 int bitsize = GET_MODE_BITSIZE (mode);
1216
1217 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1218 return 0;
1219
1220 if (prec == bitsize)
1221 return 1;
1222 else
1223 {
1224 /* Multiword partial int. */
1225 HOST_WIDE_INT x
1226 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1227 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1228 }
1229 }
1230 return 1;
1231 }
1232
1233 /* Returns 1 if OP is an operand that is a constant integer or constant
1234 floating-point number of MODE. */
1235
1236 int
1237 const_double_operand (rtx op, enum machine_mode mode)
1238 {
1239 return (GET_CODE (op) == CONST_DOUBLE)
1240 && (GET_MODE (op) == mode || mode == VOIDmode);
1241 }
1242 #else
1243 /* Returns 1 if OP is an operand that is a constant integer or constant
1244 floating-point number of MODE. */
1245
1246 int
1247 const_double_operand (rtx op, enum machine_mode mode)
1248 {
1249 /* Don't accept CONST_INT or anything similar
1250 if the caller wants something floating. */
1251 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1252 && GET_MODE_CLASS (mode) != MODE_INT
1253 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1254 return 0;
1255
1256 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1257 && (mode == VOIDmode || GET_MODE (op) == mode
1258 || GET_MODE (op) == VOIDmode));
1259 }
1260 #endif
1261 /* Return 1 if OP is a general operand that is not an immediate
1262 operand of mode MODE. */
1263
1264 int
1265 nonimmediate_operand (rtx op, enum machine_mode mode)
1266 {
1267 return (general_operand (op, mode) && ! CONSTANT_P (op));
1268 }
1269
1270 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1271
1272 int
1273 nonmemory_operand (rtx op, enum machine_mode mode)
1274 {
1275 if (CONSTANT_P (op))
1276 return immediate_operand (op, mode);
1277 return register_operand (op, mode);
1278 }
1279
1280 /* Return 1 if OP is a valid operand that stands for pushing a
1281 value of mode MODE onto the stack.
1282
1283 The main use of this function is as a predicate in match_operand
1284 expressions in the machine description. */
1285
1286 int
1287 push_operand (rtx op, enum machine_mode mode)
1288 {
1289 unsigned int rounded_size = GET_MODE_SIZE (mode);
1290
1291 #ifdef PUSH_ROUNDING
1292 rounded_size = PUSH_ROUNDING (rounded_size);
1293 #endif
1294
1295 if (!MEM_P (op))
1296 return 0;
1297
1298 if (mode != VOIDmode && GET_MODE (op) != mode)
1299 return 0;
1300
1301 op = XEXP (op, 0);
1302
1303 if (rounded_size == GET_MODE_SIZE (mode))
1304 {
1305 if (GET_CODE (op) != STACK_PUSH_CODE)
1306 return 0;
1307 }
1308 else
1309 {
1310 if (GET_CODE (op) != PRE_MODIFY
1311 || GET_CODE (XEXP (op, 1)) != PLUS
1312 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1313 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1314 #ifdef STACK_GROWS_DOWNWARD
1315 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1316 #else
1317 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1318 #endif
1319 )
1320 return 0;
1321 }
1322
1323 return XEXP (op, 0) == stack_pointer_rtx;
1324 }
1325
1326 /* Return 1 if OP is a valid operand that stands for popping a
1327 value of mode MODE off the stack.
1328
1329 The main use of this function is as a predicate in match_operand
1330 expressions in the machine description. */
1331
1332 int
1333 pop_operand (rtx op, enum machine_mode mode)
1334 {
1335 if (!MEM_P (op))
1336 return 0;
1337
1338 if (mode != VOIDmode && GET_MODE (op) != mode)
1339 return 0;
1340
1341 op = XEXP (op, 0);
1342
1343 if (GET_CODE (op) != STACK_POP_CODE)
1344 return 0;
1345
1346 return XEXP (op, 0) == stack_pointer_rtx;
1347 }
1348
1349 /* Return 1 if ADDR is a valid memory address
1350 for mode MODE in address space AS. */
1351
1352 int
1353 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1354 rtx addr, addr_space_t as)
1355 {
1356 #ifdef GO_IF_LEGITIMATE_ADDRESS
1357 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1358 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1359 return 0;
1360
1361 win:
1362 return 1;
1363 #else
1364 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1365 #endif
1366 }
1367
1368 /* Return 1 if OP is a valid memory reference with mode MODE,
1369 including a valid address.
1370
1371 The main use of this function is as a predicate in match_operand
1372 expressions in the machine description. */
1373
1374 int
1375 memory_operand (rtx op, enum machine_mode mode)
1376 {
1377 rtx inner;
1378
1379 if (! reload_completed)
1380 /* Note that no SUBREG is a memory operand before end of reload pass,
1381 because (SUBREG (MEM...)) forces reloading into a register. */
1382 return MEM_P (op) && general_operand (op, mode);
1383
1384 if (mode != VOIDmode && GET_MODE (op) != mode)
1385 return 0;
1386
1387 inner = op;
1388 if (GET_CODE (inner) == SUBREG)
1389 inner = SUBREG_REG (inner);
1390
1391 return (MEM_P (inner) && general_operand (op, mode));
1392 }
1393
1394 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1395 that is, a memory reference whose address is a general_operand. */
1396
1397 int
1398 indirect_operand (rtx op, enum machine_mode mode)
1399 {
1400 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1401 if (! reload_completed
1402 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1403 {
1404 int offset = SUBREG_BYTE (op);
1405 rtx inner = SUBREG_REG (op);
1406
1407 if (mode != VOIDmode && GET_MODE (op) != mode)
1408 return 0;
1409
1410 /* The only way that we can have a general_operand as the resulting
1411 address is if OFFSET is zero and the address already is an operand
1412 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1413 operand. */
1414
1415 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1416 || (GET_CODE (XEXP (inner, 0)) == PLUS
1417 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1418 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1419 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1420 }
1421
1422 return (MEM_P (op)
1423 && memory_operand (op, mode)
1424 && general_operand (XEXP (op, 0), Pmode));
1425 }
1426
1427 /* Return 1 if this is an ordered comparison operator (not including
1428 ORDERED and UNORDERED). */
1429
1430 int
1431 ordered_comparison_operator (rtx op, enum machine_mode mode)
1432 {
1433 if (mode != VOIDmode && GET_MODE (op) != mode)
1434 return false;
1435 switch (GET_CODE (op))
1436 {
1437 case EQ:
1438 case NE:
1439 case LT:
1440 case LTU:
1441 case LE:
1442 case LEU:
1443 case GT:
1444 case GTU:
1445 case GE:
1446 case GEU:
1447 return true;
1448 default:
1449 return false;
1450 }
1451 }
1452
1453 /* Return 1 if this is a comparison operator. This allows the use of
1454 MATCH_OPERATOR to recognize all the branch insns. */
1455
1456 int
1457 comparison_operator (rtx op, enum machine_mode mode)
1458 {
1459 return ((mode == VOIDmode || GET_MODE (op) == mode)
1460 && COMPARISON_P (op));
1461 }
1462 \f
1463 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1464
1465 rtx
1466 extract_asm_operands (rtx body)
1467 {
1468 rtx tmp;
1469 switch (GET_CODE (body))
1470 {
1471 case ASM_OPERANDS:
1472 return body;
1473
1474 case SET:
1475 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1476 tmp = SET_SRC (body);
1477 if (GET_CODE (tmp) == ASM_OPERANDS)
1478 return tmp;
1479 break;
1480
1481 case PARALLEL:
1482 tmp = XVECEXP (body, 0, 0);
1483 if (GET_CODE (tmp) == ASM_OPERANDS)
1484 return tmp;
1485 if (GET_CODE (tmp) == SET)
1486 {
1487 tmp = SET_SRC (tmp);
1488 if (GET_CODE (tmp) == ASM_OPERANDS)
1489 return tmp;
1490 }
1491 break;
1492
1493 default:
1494 break;
1495 }
1496 return NULL;
1497 }
1498
1499 /* If BODY is an insn body that uses ASM_OPERANDS,
1500 return the number of operands (both input and output) in the insn.
1501 Otherwise return -1. */
1502
1503 int
1504 asm_noperands (const_rtx body)
1505 {
1506 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1507 int n_sets = 0;
1508
1509 if (asm_op == NULL)
1510 return -1;
1511
1512 if (GET_CODE (body) == SET)
1513 n_sets = 1;
1514 else if (GET_CODE (body) == PARALLEL)
1515 {
1516 int i;
1517 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1518 {
1519 /* Multiple output operands, or 1 output plus some clobbers:
1520 body is
1521 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1522 /* Count backwards through CLOBBERs to determine number of SETs. */
1523 for (i = XVECLEN (body, 0); i > 0; i--)
1524 {
1525 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1526 break;
1527 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1528 return -1;
1529 }
1530
1531 /* N_SETS is now number of output operands. */
1532 n_sets = i;
1533
1534 /* Verify that all the SETs we have
1535 came from a single original asm_operands insn
1536 (so that invalid combinations are blocked). */
1537 for (i = 0; i < n_sets; i++)
1538 {
1539 rtx elt = XVECEXP (body, 0, i);
1540 if (GET_CODE (elt) != SET)
1541 return -1;
1542 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1543 return -1;
1544 /* If these ASM_OPERANDS rtx's came from different original insns
1545 then they aren't allowed together. */
1546 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1547 != ASM_OPERANDS_INPUT_VEC (asm_op))
1548 return -1;
1549 }
1550 }
1551 else
1552 {
1553 /* 0 outputs, but some clobbers:
1554 body is [(asm_operands ...) (clobber (reg ...))...]. */
1555 /* Make sure all the other parallel things really are clobbers. */
1556 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1557 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1558 return -1;
1559 }
1560 }
1561
1562 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1563 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1564 }
1565
1566 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1567 copy its operands (both input and output) into the vector OPERANDS,
1568 the locations of the operands within the insn into the vector OPERAND_LOCS,
1569 and the constraints for the operands into CONSTRAINTS.
1570 Write the modes of the operands into MODES.
1571 Return the assembler-template.
1572
1573 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1574 we don't store that info. */
1575
1576 const char *
1577 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1578 const char **constraints, enum machine_mode *modes,
1579 location_t *loc)
1580 {
1581 int nbase = 0, n, i;
1582 rtx asmop;
1583
1584 switch (GET_CODE (body))
1585 {
1586 case ASM_OPERANDS:
1587 /* Zero output asm: BODY is (asm_operands ...). */
1588 asmop = body;
1589 break;
1590
1591 case SET:
1592 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1593 asmop = SET_SRC (body);
1594
1595 /* The output is in the SET.
1596 Its constraint is in the ASM_OPERANDS itself. */
1597 if (operands)
1598 operands[0] = SET_DEST (body);
1599 if (operand_locs)
1600 operand_locs[0] = &SET_DEST (body);
1601 if (constraints)
1602 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1603 if (modes)
1604 modes[0] = GET_MODE (SET_DEST (body));
1605 nbase = 1;
1606 break;
1607
1608 case PARALLEL:
1609 {
1610 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1611
1612 asmop = XVECEXP (body, 0, 0);
1613 if (GET_CODE (asmop) == SET)
1614 {
1615 asmop = SET_SRC (asmop);
1616
1617 /* At least one output, plus some CLOBBERs. The outputs are in
1618 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1619 for (i = 0; i < nparallel; i++)
1620 {
1621 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1622 break; /* Past last SET */
1623 if (operands)
1624 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1625 if (operand_locs)
1626 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1627 if (constraints)
1628 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1629 if (modes)
1630 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1631 }
1632 nbase = i;
1633 }
1634 break;
1635 }
1636
1637 default:
1638 gcc_unreachable ();
1639 }
1640
1641 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1642 for (i = 0; i < n; i++)
1643 {
1644 if (operand_locs)
1645 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1646 if (operands)
1647 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1648 if (constraints)
1649 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1650 if (modes)
1651 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1652 }
1653 nbase += n;
1654
1655 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1656 for (i = 0; i < n; i++)
1657 {
1658 if (operand_locs)
1659 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1660 if (operands)
1661 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1662 if (constraints)
1663 constraints[nbase + i] = "";
1664 if (modes)
1665 modes[nbase + i] = Pmode;
1666 }
1667
1668 if (loc)
1669 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1670
1671 return ASM_OPERANDS_TEMPLATE (asmop);
1672 }
1673
1674 /* Parse inline assembly string STRING and determine which operands are
1675 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1676 to true if operand I is referenced.
1677
1678 This is intended to distinguish barrier-like asms such as:
1679
1680 asm ("" : "=m" (...));
1681
1682 from real references such as:
1683
1684 asm ("sw\t$0, %0" : "=m" (...)); */
1685
1686 void
1687 get_referenced_operands (const char *string, bool *used,
1688 unsigned int noperands)
1689 {
1690 memset (used, 0, sizeof (bool) * noperands);
1691 const char *p = string;
1692 while (*p)
1693 switch (*p)
1694 {
1695 case '%':
1696 p += 1;
1697 /* A letter followed by a digit indicates an operand number. */
1698 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1699 p += 1;
1700 if (ISDIGIT (*p))
1701 {
1702 char *endptr;
1703 unsigned long opnum = strtoul (p, &endptr, 10);
1704 if (endptr != p && opnum < noperands)
1705 used[opnum] = true;
1706 p = endptr;
1707 }
1708 else
1709 p += 1;
1710 break;
1711
1712 default:
1713 p++;
1714 break;
1715 }
1716 }
1717
1718 /* Check if an asm_operand matches its constraints.
1719 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1720
1721 int
1722 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1723 {
1724 int result = 0;
1725 #ifdef AUTO_INC_DEC
1726 bool incdec_ok = false;
1727 #endif
1728
1729 /* Use constrain_operands after reload. */
1730 gcc_assert (!reload_completed);
1731
1732 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1733 many alternatives as required to match the other operands. */
1734 if (*constraint == '\0')
1735 result = 1;
1736
1737 while (*constraint)
1738 {
1739 enum constraint_num cn;
1740 char c = *constraint;
1741 int len;
1742 switch (c)
1743 {
1744 case ',':
1745 constraint++;
1746 continue;
1747
1748 case '0': case '1': case '2': case '3': case '4':
1749 case '5': case '6': case '7': case '8': case '9':
1750 /* If caller provided constraints pointer, look up
1751 the matching constraint. Otherwise, our caller should have
1752 given us the proper matching constraint, but we can't
1753 actually fail the check if they didn't. Indicate that
1754 results are inconclusive. */
1755 if (constraints)
1756 {
1757 char *end;
1758 unsigned long match;
1759
1760 match = strtoul (constraint, &end, 10);
1761 if (!result)
1762 result = asm_operand_ok (op, constraints[match], NULL);
1763 constraint = (const char *) end;
1764 }
1765 else
1766 {
1767 do
1768 constraint++;
1769 while (ISDIGIT (*constraint));
1770 if (! result)
1771 result = -1;
1772 }
1773 continue;
1774
1775 /* The rest of the compiler assumes that reloading the address
1776 of a MEM into a register will make it fit an 'o' constraint.
1777 That is, if it sees a MEM operand for an 'o' constraint,
1778 it assumes that (mem (base-reg)) will fit.
1779
1780 That assumption fails on targets that don't have offsettable
1781 addresses at all. We therefore need to treat 'o' asm
1782 constraints as a special case and only accept operands that
1783 are already offsettable, thus proving that at least one
1784 offsettable address exists. */
1785 case 'o': /* offsettable */
1786 if (offsettable_nonstrict_memref_p (op))
1787 result = 1;
1788 break;
1789
1790 case 'g':
1791 if (general_operand (op, VOIDmode))
1792 result = 1;
1793 break;
1794
1795 #ifdef AUTO_INC_DEC
1796 case '<':
1797 case '>':
1798 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1799 to exist, excepting those that expand_call created. Further,
1800 on some machines which do not have generalized auto inc/dec,
1801 an inc/dec is not a memory_operand.
1802
1803 Match any memory and hope things are resolved after reload. */
1804 incdec_ok = true;
1805 #endif
1806 default:
1807 cn = lookup_constraint (constraint);
1808 switch (get_constraint_type (cn))
1809 {
1810 case CT_REGISTER:
1811 if (!result
1812 && reg_class_for_constraint (cn) != NO_REGS
1813 && GET_MODE (op) != BLKmode
1814 && register_operand (op, VOIDmode))
1815 result = 1;
1816 break;
1817
1818 case CT_CONST_INT:
1819 if (!result
1820 && CONST_INT_P (op)
1821 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1822 result = 1;
1823 break;
1824
1825 case CT_MEMORY:
1826 /* Every memory operand can be reloaded to fit. */
1827 result = result || memory_operand (op, VOIDmode);
1828 break;
1829
1830 case CT_ADDRESS:
1831 /* Every address operand can be reloaded to fit. */
1832 result = result || address_operand (op, VOIDmode);
1833 break;
1834
1835 case CT_FIXED_FORM:
1836 result = result || constraint_satisfied_p (op, cn);
1837 break;
1838 }
1839 break;
1840 }
1841 len = CONSTRAINT_LEN (c, constraint);
1842 do
1843 constraint++;
1844 while (--len && *constraint);
1845 if (len)
1846 return 0;
1847 }
1848
1849 #ifdef AUTO_INC_DEC
1850 /* For operands without < or > constraints reject side-effects. */
1851 if (!incdec_ok && result && MEM_P (op))
1852 switch (GET_CODE (XEXP (op, 0)))
1853 {
1854 case PRE_INC:
1855 case POST_INC:
1856 case PRE_DEC:
1857 case POST_DEC:
1858 case PRE_MODIFY:
1859 case POST_MODIFY:
1860 return 0;
1861 default:
1862 break;
1863 }
1864 #endif
1865
1866 return result;
1867 }
1868 \f
1869 /* Given an rtx *P, if it is a sum containing an integer constant term,
1870 return the location (type rtx *) of the pointer to that constant term.
1871 Otherwise, return a null pointer. */
1872
1873 rtx *
1874 find_constant_term_loc (rtx *p)
1875 {
1876 rtx *tem;
1877 enum rtx_code code = GET_CODE (*p);
1878
1879 /* If *P IS such a constant term, P is its location. */
1880
1881 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1882 || code == CONST)
1883 return p;
1884
1885 /* Otherwise, if not a sum, it has no constant term. */
1886
1887 if (GET_CODE (*p) != PLUS)
1888 return 0;
1889
1890 /* If one of the summands is constant, return its location. */
1891
1892 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1893 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1894 return p;
1895
1896 /* Otherwise, check each summand for containing a constant term. */
1897
1898 if (XEXP (*p, 0) != 0)
1899 {
1900 tem = find_constant_term_loc (&XEXP (*p, 0));
1901 if (tem != 0)
1902 return tem;
1903 }
1904
1905 if (XEXP (*p, 1) != 0)
1906 {
1907 tem = find_constant_term_loc (&XEXP (*p, 1));
1908 if (tem != 0)
1909 return tem;
1910 }
1911
1912 return 0;
1913 }
1914 \f
1915 /* Return 1 if OP is a memory reference
1916 whose address contains no side effects
1917 and remains valid after the addition
1918 of a positive integer less than the
1919 size of the object being referenced.
1920
1921 We assume that the original address is valid and do not check it.
1922
1923 This uses strict_memory_address_p as a subroutine, so
1924 don't use it before reload. */
1925
1926 int
1927 offsettable_memref_p (rtx op)
1928 {
1929 return ((MEM_P (op))
1930 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1931 MEM_ADDR_SPACE (op)));
1932 }
1933
1934 /* Similar, but don't require a strictly valid mem ref:
1935 consider pseudo-regs valid as index or base regs. */
1936
1937 int
1938 offsettable_nonstrict_memref_p (rtx op)
1939 {
1940 return ((MEM_P (op))
1941 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1942 MEM_ADDR_SPACE (op)));
1943 }
1944
1945 /* Return 1 if Y is a memory address which contains no side effects
1946 and would remain valid for address space AS after the addition of
1947 a positive integer less than the size of that mode.
1948
1949 We assume that the original address is valid and do not check it.
1950 We do check that it is valid for narrower modes.
1951
1952 If STRICTP is nonzero, we require a strictly valid address,
1953 for the sake of use in reload.c. */
1954
1955 int
1956 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1957 addr_space_t as)
1958 {
1959 enum rtx_code ycode = GET_CODE (y);
1960 rtx z;
1961 rtx y1 = y;
1962 rtx *y2;
1963 int (*addressp) (enum machine_mode, rtx, addr_space_t) =
1964 (strictp ? strict_memory_address_addr_space_p
1965 : memory_address_addr_space_p);
1966 unsigned int mode_sz = GET_MODE_SIZE (mode);
1967
1968 if (CONSTANT_ADDRESS_P (y))
1969 return 1;
1970
1971 /* Adjusting an offsettable address involves changing to a narrower mode.
1972 Make sure that's OK. */
1973
1974 if (mode_dependent_address_p (y, as))
1975 return 0;
1976
1977 enum machine_mode address_mode = GET_MODE (y);
1978 if (address_mode == VOIDmode)
1979 address_mode = targetm.addr_space.address_mode (as);
1980 #ifdef POINTERS_EXTEND_UNSIGNED
1981 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1982 #endif
1983
1984 /* ??? How much offset does an offsettable BLKmode reference need?
1985 Clearly that depends on the situation in which it's being used.
1986 However, the current situation in which we test 0xffffffff is
1987 less than ideal. Caveat user. */
1988 if (mode_sz == 0)
1989 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1990
1991 /* If the expression contains a constant term,
1992 see if it remains valid when max possible offset is added. */
1993
1994 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1995 {
1996 int good;
1997
1998 y1 = *y2;
1999 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2000 /* Use QImode because an odd displacement may be automatically invalid
2001 for any wider mode. But it should be valid for a single byte. */
2002 good = (*addressp) (QImode, y, as);
2003
2004 /* In any case, restore old contents of memory. */
2005 *y2 = y1;
2006 return good;
2007 }
2008
2009 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2010 return 0;
2011
2012 /* The offset added here is chosen as the maximum offset that
2013 any instruction could need to add when operating on something
2014 of the specified mode. We assume that if Y and Y+c are
2015 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2016 go inside a LO_SUM here, so we do so as well. */
2017 if (GET_CODE (y) == LO_SUM
2018 && mode != BLKmode
2019 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2020 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2021 plus_constant (address_mode, XEXP (y, 1),
2022 mode_sz - 1));
2023 #ifdef POINTERS_EXTEND_UNSIGNED
2024 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2025 else if (POINTERS_EXTEND_UNSIGNED > 0
2026 && GET_CODE (y) == ZERO_EXTEND
2027 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2028 z = gen_rtx_ZERO_EXTEND (address_mode,
2029 plus_constant (pointer_mode, XEXP (y, 0),
2030 mode_sz - 1));
2031 #endif
2032 else
2033 z = plus_constant (address_mode, y, mode_sz - 1);
2034
2035 /* Use QImode because an odd displacement may be automatically invalid
2036 for any wider mode. But it should be valid for a single byte. */
2037 return (*addressp) (QImode, z, as);
2038 }
2039
2040 /* Return 1 if ADDR is an address-expression whose effect depends
2041 on the mode of the memory reference it is used in.
2042
2043 ADDRSPACE is the address space associated with the address.
2044
2045 Autoincrement addressing is a typical example of mode-dependence
2046 because the amount of the increment depends on the mode. */
2047
2048 bool
2049 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2050 {
2051 /* Auto-increment addressing with anything other than post_modify
2052 or pre_modify always introduces a mode dependency. Catch such
2053 cases now instead of deferring to the target. */
2054 if (GET_CODE (addr) == PRE_INC
2055 || GET_CODE (addr) == POST_INC
2056 || GET_CODE (addr) == PRE_DEC
2057 || GET_CODE (addr) == POST_DEC)
2058 return true;
2059
2060 return targetm.mode_dependent_address_p (addr, addrspace);
2061 }
2062 \f
2063 /* Return the mask of operand alternatives that are allowed for INSN.
2064 This mask depends only on INSN and on the current target; it does not
2065 depend on things like the values of operands. */
2066
2067 alternative_mask
2068 get_enabled_alternatives (rtx_insn *insn)
2069 {
2070 /* Quick exit for asms and for targets that don't use the "enabled"
2071 attribute. */
2072 int code = INSN_CODE (insn);
2073 if (code < 0 || !HAVE_ATTR_enabled)
2074 return ALL_ALTERNATIVES;
2075
2076 /* Calling get_attr_enabled can be expensive, so cache the mask
2077 for speed. */
2078 if (this_target_recog->x_enabled_alternatives[code])
2079 return this_target_recog->x_enabled_alternatives[code];
2080
2081 /* Temporarily install enough information for get_attr_enabled to assume
2082 that the insn operands are already cached. As above, the attribute
2083 mustn't depend on the values of operands, so we don't provide their
2084 real values here. */
2085 rtx old_insn = recog_data.insn;
2086 int old_alternative = which_alternative;
2087
2088 recog_data.insn = insn;
2089 alternative_mask enabled = ALL_ALTERNATIVES;
2090 int n_alternatives = insn_data[code].n_alternatives;
2091 for (int i = 0; i < n_alternatives; i++)
2092 {
2093 which_alternative = i;
2094 if (!get_attr_enabled (insn))
2095 enabled &= ~ALTERNATIVE_BIT (i);
2096 }
2097
2098 recog_data.insn = old_insn;
2099 which_alternative = old_alternative;
2100
2101 this_target_recog->x_enabled_alternatives[code] = enabled;
2102 return enabled;
2103 }
2104
2105 /* Like extract_insn, but save insn extracted and don't extract again, when
2106 called again for the same insn expecting that recog_data still contain the
2107 valid information. This is used primary by gen_attr infrastructure that
2108 often does extract insn again and again. */
2109 void
2110 extract_insn_cached (rtx_insn *insn)
2111 {
2112 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2113 return;
2114 extract_insn (insn);
2115 recog_data.insn = insn;
2116 }
2117
2118 /* Do cached extract_insn, constrain_operands and complain about failures.
2119 Used by insn_attrtab. */
2120 void
2121 extract_constrain_insn_cached (rtx_insn *insn)
2122 {
2123 extract_insn_cached (insn);
2124 if (which_alternative == -1
2125 && !constrain_operands (reload_completed))
2126 fatal_insn_not_found (insn);
2127 }
2128
2129 /* Do cached constrain_operands and complain about failures. */
2130 int
2131 constrain_operands_cached (int strict)
2132 {
2133 if (which_alternative == -1)
2134 return constrain_operands (strict);
2135 else
2136 return 1;
2137 }
2138 \f
2139 /* Analyze INSN and fill in recog_data. */
2140
2141 void
2142 extract_insn (rtx_insn *insn)
2143 {
2144 int i;
2145 int icode;
2146 int noperands;
2147 rtx body = PATTERN (insn);
2148
2149 recog_data.n_operands = 0;
2150 recog_data.n_alternatives = 0;
2151 recog_data.n_dups = 0;
2152 recog_data.is_asm = false;
2153
2154 switch (GET_CODE (body))
2155 {
2156 case USE:
2157 case CLOBBER:
2158 case ASM_INPUT:
2159 case ADDR_VEC:
2160 case ADDR_DIFF_VEC:
2161 case VAR_LOCATION:
2162 return;
2163
2164 case SET:
2165 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2166 goto asm_insn;
2167 else
2168 goto normal_insn;
2169 case PARALLEL:
2170 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2171 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2172 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2173 goto asm_insn;
2174 else
2175 goto normal_insn;
2176 case ASM_OPERANDS:
2177 asm_insn:
2178 recog_data.n_operands = noperands = asm_noperands (body);
2179 if (noperands >= 0)
2180 {
2181 /* This insn is an `asm' with operands. */
2182
2183 /* expand_asm_operands makes sure there aren't too many operands. */
2184 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2185
2186 /* Now get the operand values and constraints out of the insn. */
2187 decode_asm_operands (body, recog_data.operand,
2188 recog_data.operand_loc,
2189 recog_data.constraints,
2190 recog_data.operand_mode, NULL);
2191 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2192 if (noperands > 0)
2193 {
2194 const char *p = recog_data.constraints[0];
2195 recog_data.n_alternatives = 1;
2196 while (*p)
2197 recog_data.n_alternatives += (*p++ == ',');
2198 }
2199 recog_data.is_asm = true;
2200 break;
2201 }
2202 fatal_insn_not_found (insn);
2203
2204 default:
2205 normal_insn:
2206 /* Ordinary insn: recognize it, get the operands via insn_extract
2207 and get the constraints. */
2208
2209 icode = recog_memoized (insn);
2210 if (icode < 0)
2211 fatal_insn_not_found (insn);
2212
2213 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2214 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2215 recog_data.n_dups = insn_data[icode].n_dups;
2216
2217 insn_extract (insn);
2218
2219 for (i = 0; i < noperands; i++)
2220 {
2221 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2222 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2223 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2224 /* VOIDmode match_operands gets mode from their real operand. */
2225 if (recog_data.operand_mode[i] == VOIDmode)
2226 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2227 }
2228 }
2229 for (i = 0; i < noperands; i++)
2230 recog_data.operand_type[i]
2231 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2232 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2233 : OP_IN);
2234
2235 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2236
2237 recog_data.enabled_alternatives = get_enabled_alternatives (insn);
2238
2239 recog_data.insn = NULL;
2240 which_alternative = -1;
2241 }
2242
2243 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS operands,
2244 N_ALTERNATIVES alternatives and constraint strings CONSTRAINTS.
2245 OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries and CONSTRAINTS
2246 has N_OPERANDS entries. */
2247
2248 void
2249 preprocess_constraints (int n_operands, int n_alternatives,
2250 const char **constraints,
2251 operand_alternative *op_alt_base)
2252 {
2253 for (int i = 0; i < n_operands; i++)
2254 {
2255 int j;
2256 struct operand_alternative *op_alt;
2257 const char *p = constraints[i];
2258
2259 op_alt = op_alt_base;
2260
2261 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2262 {
2263 op_alt[i].cl = NO_REGS;
2264 op_alt[i].constraint = p;
2265 op_alt[i].matches = -1;
2266 op_alt[i].matched = -1;
2267
2268 if (*p == '\0' || *p == ',')
2269 {
2270 op_alt[i].anything_ok = 1;
2271 continue;
2272 }
2273
2274 for (;;)
2275 {
2276 char c = *p;
2277 if (c == '#')
2278 do
2279 c = *++p;
2280 while (c != ',' && c != '\0');
2281 if (c == ',' || c == '\0')
2282 {
2283 p++;
2284 break;
2285 }
2286
2287 switch (c)
2288 {
2289 case '?':
2290 op_alt[i].reject += 6;
2291 break;
2292 case '!':
2293 op_alt[i].reject += 600;
2294 break;
2295 case '&':
2296 op_alt[i].earlyclobber = 1;
2297 break;
2298
2299 case '0': case '1': case '2': case '3': case '4':
2300 case '5': case '6': case '7': case '8': case '9':
2301 {
2302 char *end;
2303 op_alt[i].matches = strtoul (p, &end, 10);
2304 op_alt[op_alt[i].matches].matched = i;
2305 p = end;
2306 }
2307 continue;
2308
2309 case 'X':
2310 op_alt[i].anything_ok = 1;
2311 break;
2312
2313 case 'g':
2314 op_alt[i].cl =
2315 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2316 break;
2317
2318 default:
2319 enum constraint_num cn = lookup_constraint (p);
2320 enum reg_class cl;
2321 switch (get_constraint_type (cn))
2322 {
2323 case CT_REGISTER:
2324 cl = reg_class_for_constraint (cn);
2325 if (cl != NO_REGS)
2326 op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2327 break;
2328
2329 case CT_CONST_INT:
2330 break;
2331
2332 case CT_MEMORY:
2333 op_alt[i].memory_ok = 1;
2334 break;
2335
2336 case CT_ADDRESS:
2337 op_alt[i].is_address = 1;
2338 op_alt[i].cl
2339 = (reg_class_subunion
2340 [(int) op_alt[i].cl]
2341 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2342 ADDRESS, SCRATCH)]);
2343 break;
2344
2345 case CT_FIXED_FORM:
2346 break;
2347 }
2348 break;
2349 }
2350 p += CONSTRAINT_LEN (c, p);
2351 }
2352 }
2353 }
2354 }
2355
2356 /* Return an array of operand_alternative instructions for
2357 instruction ICODE. */
2358
2359 const operand_alternative *
2360 preprocess_insn_constraints (int icode)
2361 {
2362 gcc_checking_assert (IN_RANGE (icode, 0, LAST_INSN_CODE));
2363 if (this_target_recog->x_op_alt[icode])
2364 return this_target_recog->x_op_alt[icode];
2365
2366 int n_operands = insn_data[icode].n_operands;
2367 if (n_operands == 0)
2368 return 0;
2369 /* Always provide at least one alternative so that which_op_alt ()
2370 works correctly. If the instruction has 0 alternatives (i.e. all
2371 constraint strings are empty) then each operand in this alternative
2372 will have anything_ok set. */
2373 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2374 int n_entries = n_operands * n_alternatives;
2375
2376 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2377 const char **constraints = XALLOCAVEC (const char *, n_operands);
2378
2379 for (int i = 0; i < n_operands; ++i)
2380 constraints[i] = insn_data[icode].operand[i].constraint;
2381 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt);
2382
2383 this_target_recog->x_op_alt[icode] = op_alt;
2384 return op_alt;
2385 }
2386
2387 /* After calling extract_insn, you can use this function to extract some
2388 information from the constraint strings into a more usable form.
2389 The collected data is stored in recog_op_alt. */
2390
2391 void
2392 preprocess_constraints (rtx insn)
2393 {
2394 int icode = INSN_CODE (insn);
2395 if (icode >= 0)
2396 recog_op_alt = preprocess_insn_constraints (icode);
2397 else
2398 {
2399 int n_operands = recog_data.n_operands;
2400 int n_alternatives = recog_data.n_alternatives;
2401 int n_entries = n_operands * n_alternatives;
2402 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2403 preprocess_constraints (n_operands, n_alternatives,
2404 recog_data.constraints, asm_op_alt);
2405 recog_op_alt = asm_op_alt;
2406 }
2407 }
2408
2409 /* Check the operands of an insn against the insn's operand constraints
2410 and return 1 if they are valid.
2411 The information about the insn's operands, constraints, operand modes
2412 etc. is obtained from the global variables set up by extract_insn.
2413
2414 WHICH_ALTERNATIVE is set to a number which indicates which
2415 alternative of constraints was matched: 0 for the first alternative,
2416 1 for the next, etc.
2417
2418 In addition, when two operands are required to match
2419 and it happens that the output operand is (reg) while the
2420 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2421 make the output operand look like the input.
2422 This is because the output operand is the one the template will print.
2423
2424 This is used in final, just before printing the assembler code and by
2425 the routines that determine an insn's attribute.
2426
2427 If STRICT is a positive nonzero value, it means that we have been
2428 called after reload has been completed. In that case, we must
2429 do all checks strictly. If it is zero, it means that we have been called
2430 before reload has completed. In that case, we first try to see if we can
2431 find an alternative that matches strictly. If not, we try again, this
2432 time assuming that reload will fix up the insn. This provides a "best
2433 guess" for the alternative and is used to compute attributes of insns prior
2434 to reload. A negative value of STRICT is used for this internal call. */
2435
2436 struct funny_match
2437 {
2438 int this_op, other;
2439 };
2440
2441 int
2442 constrain_operands (int strict)
2443 {
2444 const char *constraints[MAX_RECOG_OPERANDS];
2445 int matching_operands[MAX_RECOG_OPERANDS];
2446 int earlyclobber[MAX_RECOG_OPERANDS];
2447 int c;
2448
2449 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2450 int funny_match_index;
2451
2452 which_alternative = 0;
2453 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2454 return 1;
2455
2456 for (c = 0; c < recog_data.n_operands; c++)
2457 {
2458 constraints[c] = recog_data.constraints[c];
2459 matching_operands[c] = -1;
2460 }
2461
2462 do
2463 {
2464 int seen_earlyclobber_at = -1;
2465 int opno;
2466 int lose = 0;
2467 funny_match_index = 0;
2468
2469 if (!TEST_BIT (recog_data.enabled_alternatives, which_alternative))
2470 {
2471 int i;
2472
2473 for (i = 0; i < recog_data.n_operands; i++)
2474 constraints[i] = skip_alternative (constraints[i]);
2475
2476 which_alternative++;
2477 continue;
2478 }
2479
2480 for (opno = 0; opno < recog_data.n_operands; opno++)
2481 {
2482 rtx op = recog_data.operand[opno];
2483 enum machine_mode mode = GET_MODE (op);
2484 const char *p = constraints[opno];
2485 int offset = 0;
2486 int win = 0;
2487 int val;
2488 int len;
2489
2490 earlyclobber[opno] = 0;
2491
2492 /* A unary operator may be accepted by the predicate, but it
2493 is irrelevant for matching constraints. */
2494 if (UNARY_P (op))
2495 op = XEXP (op, 0);
2496
2497 if (GET_CODE (op) == SUBREG)
2498 {
2499 if (REG_P (SUBREG_REG (op))
2500 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2501 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2502 GET_MODE (SUBREG_REG (op)),
2503 SUBREG_BYTE (op),
2504 GET_MODE (op));
2505 op = SUBREG_REG (op);
2506 }
2507
2508 /* An empty constraint or empty alternative
2509 allows anything which matched the pattern. */
2510 if (*p == 0 || *p == ',')
2511 win = 1;
2512
2513 do
2514 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2515 {
2516 case '\0':
2517 len = 0;
2518 break;
2519 case ',':
2520 c = '\0';
2521 break;
2522
2523 case '#':
2524 /* Ignore rest of this alternative as far as
2525 constraint checking is concerned. */
2526 do
2527 p++;
2528 while (*p && *p != ',');
2529 len = 0;
2530 break;
2531
2532 case '&':
2533 earlyclobber[opno] = 1;
2534 if (seen_earlyclobber_at < 0)
2535 seen_earlyclobber_at = opno;
2536 break;
2537
2538 case '0': case '1': case '2': case '3': case '4':
2539 case '5': case '6': case '7': case '8': case '9':
2540 {
2541 /* This operand must be the same as a previous one.
2542 This kind of constraint is used for instructions such
2543 as add when they take only two operands.
2544
2545 Note that the lower-numbered operand is passed first.
2546
2547 If we are not testing strictly, assume that this
2548 constraint will be satisfied. */
2549
2550 char *end;
2551 int match;
2552
2553 match = strtoul (p, &end, 10);
2554 p = end;
2555
2556 if (strict < 0)
2557 val = 1;
2558 else
2559 {
2560 rtx op1 = recog_data.operand[match];
2561 rtx op2 = recog_data.operand[opno];
2562
2563 /* A unary operator may be accepted by the predicate,
2564 but it is irrelevant for matching constraints. */
2565 if (UNARY_P (op1))
2566 op1 = XEXP (op1, 0);
2567 if (UNARY_P (op2))
2568 op2 = XEXP (op2, 0);
2569
2570 val = operands_match_p (op1, op2);
2571 }
2572
2573 matching_operands[opno] = match;
2574 matching_operands[match] = opno;
2575
2576 if (val != 0)
2577 win = 1;
2578
2579 /* If output is *x and input is *--x, arrange later
2580 to change the output to *--x as well, since the
2581 output op is the one that will be printed. */
2582 if (val == 2 && strict > 0)
2583 {
2584 funny_match[funny_match_index].this_op = opno;
2585 funny_match[funny_match_index++].other = match;
2586 }
2587 }
2588 len = 0;
2589 break;
2590
2591 case 'p':
2592 /* p is used for address_operands. When we are called by
2593 gen_reload, no one will have checked that the address is
2594 strictly valid, i.e., that all pseudos requiring hard regs
2595 have gotten them. */
2596 if (strict <= 0
2597 || (strict_memory_address_p (recog_data.operand_mode[opno],
2598 op)))
2599 win = 1;
2600 break;
2601
2602 /* No need to check general_operand again;
2603 it was done in insn-recog.c. Well, except that reload
2604 doesn't check the validity of its replacements, but
2605 that should only matter when there's a bug. */
2606 case 'g':
2607 /* Anything goes unless it is a REG and really has a hard reg
2608 but the hard reg is not in the class GENERAL_REGS. */
2609 if (REG_P (op))
2610 {
2611 if (strict < 0
2612 || GENERAL_REGS == ALL_REGS
2613 || (reload_in_progress
2614 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2615 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2616 win = 1;
2617 }
2618 else if (strict < 0 || general_operand (op, mode))
2619 win = 1;
2620 break;
2621
2622 default:
2623 {
2624 enum constraint_num cn = lookup_constraint (p);
2625 enum reg_class cl = reg_class_for_constraint (cn);
2626 if (cl != NO_REGS)
2627 {
2628 if (strict < 0
2629 || (strict == 0
2630 && REG_P (op)
2631 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2632 || (strict == 0 && GET_CODE (op) == SCRATCH)
2633 || (REG_P (op)
2634 && reg_fits_class_p (op, cl, offset, mode)))
2635 win = 1;
2636 }
2637
2638 else if (constraint_satisfied_p (op, cn))
2639 win = 1;
2640
2641 else if (insn_extra_memory_constraint (cn)
2642 /* Every memory operand can be reloaded to fit. */
2643 && ((strict < 0 && MEM_P (op))
2644 /* Before reload, accept what reload can turn
2645 into mem. */
2646 || (strict < 0 && CONSTANT_P (op))
2647 /* During reload, accept a pseudo */
2648 || (reload_in_progress && REG_P (op)
2649 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2650 win = 1;
2651 else if (insn_extra_address_constraint (cn)
2652 /* Every address operand can be reloaded to fit. */
2653 && strict < 0)
2654 win = 1;
2655 /* Cater to architectures like IA-64 that define extra memory
2656 constraints without using define_memory_constraint. */
2657 else if (reload_in_progress
2658 && REG_P (op)
2659 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2660 && reg_renumber[REGNO (op)] < 0
2661 && reg_equiv_mem (REGNO (op)) != 0
2662 && constraint_satisfied_p
2663 (reg_equiv_mem (REGNO (op)), cn))
2664 win = 1;
2665 break;
2666 }
2667 }
2668 while (p += len, c);
2669
2670 constraints[opno] = p;
2671 /* If this operand did not win somehow,
2672 this alternative loses. */
2673 if (! win)
2674 lose = 1;
2675 }
2676 /* This alternative won; the operands are ok.
2677 Change whichever operands this alternative says to change. */
2678 if (! lose)
2679 {
2680 int opno, eopno;
2681
2682 /* See if any earlyclobber operand conflicts with some other
2683 operand. */
2684
2685 if (strict > 0 && seen_earlyclobber_at >= 0)
2686 for (eopno = seen_earlyclobber_at;
2687 eopno < recog_data.n_operands;
2688 eopno++)
2689 /* Ignore earlyclobber operands now in memory,
2690 because we would often report failure when we have
2691 two memory operands, one of which was formerly a REG. */
2692 if (earlyclobber[eopno]
2693 && REG_P (recog_data.operand[eopno]))
2694 for (opno = 0; opno < recog_data.n_operands; opno++)
2695 if ((MEM_P (recog_data.operand[opno])
2696 || recog_data.operand_type[opno] != OP_OUT)
2697 && opno != eopno
2698 /* Ignore things like match_operator operands. */
2699 && *recog_data.constraints[opno] != 0
2700 && ! (matching_operands[opno] == eopno
2701 && operands_match_p (recog_data.operand[opno],
2702 recog_data.operand[eopno]))
2703 && ! safe_from_earlyclobber (recog_data.operand[opno],
2704 recog_data.operand[eopno]))
2705 lose = 1;
2706
2707 if (! lose)
2708 {
2709 while (--funny_match_index >= 0)
2710 {
2711 recog_data.operand[funny_match[funny_match_index].other]
2712 = recog_data.operand[funny_match[funny_match_index].this_op];
2713 }
2714
2715 #ifdef AUTO_INC_DEC
2716 /* For operands without < or > constraints reject side-effects. */
2717 if (recog_data.is_asm)
2718 {
2719 for (opno = 0; opno < recog_data.n_operands; opno++)
2720 if (MEM_P (recog_data.operand[opno]))
2721 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2722 {
2723 case PRE_INC:
2724 case POST_INC:
2725 case PRE_DEC:
2726 case POST_DEC:
2727 case PRE_MODIFY:
2728 case POST_MODIFY:
2729 if (strchr (recog_data.constraints[opno], '<') == NULL
2730 && strchr (recog_data.constraints[opno], '>')
2731 == NULL)
2732 return 0;
2733 break;
2734 default:
2735 break;
2736 }
2737 }
2738 #endif
2739 return 1;
2740 }
2741 }
2742
2743 which_alternative++;
2744 }
2745 while (which_alternative < recog_data.n_alternatives);
2746
2747 which_alternative = -1;
2748 /* If we are about to reject this, but we are not to test strictly,
2749 try a very loose test. Only return failure if it fails also. */
2750 if (strict == 0)
2751 return constrain_operands (-1);
2752 else
2753 return 0;
2754 }
2755
2756 /* Return true iff OPERAND (assumed to be a REG rtx)
2757 is a hard reg in class CLASS when its regno is offset by OFFSET
2758 and changed to mode MODE.
2759 If REG occupies multiple hard regs, all of them must be in CLASS. */
2760
2761 bool
2762 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2763 enum machine_mode mode)
2764 {
2765 unsigned int regno = REGNO (operand);
2766
2767 if (cl == NO_REGS)
2768 return false;
2769
2770 /* Regno must not be a pseudo register. Offset may be negative. */
2771 return (HARD_REGISTER_NUM_P (regno)
2772 && HARD_REGISTER_NUM_P (regno + offset)
2773 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2774 regno + offset));
2775 }
2776 \f
2777 /* Split single instruction. Helper function for split_all_insns and
2778 split_all_insns_noflow. Return last insn in the sequence if successful,
2779 or NULL if unsuccessful. */
2780
2781 static rtx
2782 split_insn (rtx_insn *insn)
2783 {
2784 /* Split insns here to get max fine-grain parallelism. */
2785 rtx_insn *first = PREV_INSN (insn);
2786 rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2787 rtx insn_set, last_set, note;
2788
2789 if (last == insn)
2790 return NULL_RTX;
2791
2792 /* If the original instruction was a single set that was known to be
2793 equivalent to a constant, see if we can say the same about the last
2794 instruction in the split sequence. The two instructions must set
2795 the same destination. */
2796 insn_set = single_set (insn);
2797 if (insn_set)
2798 {
2799 last_set = single_set (last);
2800 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2801 {
2802 note = find_reg_equal_equiv_note (insn);
2803 if (note && CONSTANT_P (XEXP (note, 0)))
2804 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2805 else if (CONSTANT_P (SET_SRC (insn_set)))
2806 set_unique_reg_note (last, REG_EQUAL,
2807 copy_rtx (SET_SRC (insn_set)));
2808 }
2809 }
2810
2811 /* try_split returns the NOTE that INSN became. */
2812 SET_INSN_DELETED (insn);
2813
2814 /* ??? Coddle to md files that generate subregs in post-reload
2815 splitters instead of computing the proper hard register. */
2816 if (reload_completed && first != last)
2817 {
2818 first = NEXT_INSN (first);
2819 for (;;)
2820 {
2821 if (INSN_P (first))
2822 cleanup_subreg_operands (first);
2823 if (first == last)
2824 break;
2825 first = NEXT_INSN (first);
2826 }
2827 }
2828
2829 return last;
2830 }
2831
2832 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2833
2834 void
2835 split_all_insns (void)
2836 {
2837 sbitmap blocks;
2838 bool changed;
2839 basic_block bb;
2840
2841 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
2842 bitmap_clear (blocks);
2843 changed = false;
2844
2845 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2846 {
2847 rtx_insn *insn, *next;
2848 bool finish = false;
2849
2850 rtl_profile_for_bb (bb);
2851 for (insn = BB_HEAD (bb); !finish ; insn = next)
2852 {
2853 /* Can't use `next_real_insn' because that might go across
2854 CODE_LABELS and short-out basic blocks. */
2855 next = NEXT_INSN (insn);
2856 finish = (insn == BB_END (bb));
2857 if (INSN_P (insn))
2858 {
2859 rtx set = single_set (insn);
2860
2861 /* Don't split no-op move insns. These should silently
2862 disappear later in final. Splitting such insns would
2863 break the code that handles LIBCALL blocks. */
2864 if (set && set_noop_p (set))
2865 {
2866 /* Nops get in the way while scheduling, so delete them
2867 now if register allocation has already been done. It
2868 is too risky to try to do this before register
2869 allocation, and there are unlikely to be very many
2870 nops then anyways. */
2871 if (reload_completed)
2872 delete_insn_and_edges (insn);
2873 }
2874 else
2875 {
2876 if (split_insn (insn))
2877 {
2878 bitmap_set_bit (blocks, bb->index);
2879 changed = true;
2880 }
2881 }
2882 }
2883 }
2884 }
2885
2886 default_rtl_profile ();
2887 if (changed)
2888 find_many_sub_basic_blocks (blocks);
2889
2890 #ifdef ENABLE_CHECKING
2891 verify_flow_info ();
2892 #endif
2893
2894 sbitmap_free (blocks);
2895 }
2896
2897 /* Same as split_all_insns, but do not expect CFG to be available.
2898 Used by machine dependent reorg passes. */
2899
2900 unsigned int
2901 split_all_insns_noflow (void)
2902 {
2903 rtx_insn *next, *insn;
2904
2905 for (insn = get_insns (); insn; insn = next)
2906 {
2907 next = NEXT_INSN (insn);
2908 if (INSN_P (insn))
2909 {
2910 /* Don't split no-op move insns. These should silently
2911 disappear later in final. Splitting such insns would
2912 break the code that handles LIBCALL blocks. */
2913 rtx set = single_set (insn);
2914 if (set && set_noop_p (set))
2915 {
2916 /* Nops get in the way while scheduling, so delete them
2917 now if register allocation has already been done. It
2918 is too risky to try to do this before register
2919 allocation, and there are unlikely to be very many
2920 nops then anyways.
2921
2922 ??? Should we use delete_insn when the CFG isn't valid? */
2923 if (reload_completed)
2924 delete_insn_and_edges (insn);
2925 }
2926 else
2927 split_insn (insn);
2928 }
2929 }
2930 return 0;
2931 }
2932 \f
2933 #ifdef HAVE_peephole2
2934 struct peep2_insn_data
2935 {
2936 rtx insn;
2937 regset live_before;
2938 };
2939
2940 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2941 static int peep2_current;
2942
2943 static bool peep2_do_rebuild_jump_labels;
2944 static bool peep2_do_cleanup_cfg;
2945
2946 /* The number of instructions available to match a peep2. */
2947 int peep2_current_count;
2948
2949 /* A non-insn marker indicating the last insn of the block.
2950 The live_before regset for this element is correct, indicating
2951 DF_LIVE_OUT for the block. */
2952 #define PEEP2_EOB pc_rtx
2953
2954 /* Wrap N to fit into the peep2_insn_data buffer. */
2955
2956 static int
2957 peep2_buf_position (int n)
2958 {
2959 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2960 n -= MAX_INSNS_PER_PEEP2 + 1;
2961 return n;
2962 }
2963
2964 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2965 does not exist. Used by the recognizer to find the next insn to match
2966 in a multi-insn pattern. */
2967
2968 rtx
2969 peep2_next_insn (int n)
2970 {
2971 gcc_assert (n <= peep2_current_count);
2972
2973 n = peep2_buf_position (peep2_current + n);
2974
2975 return peep2_insn_data[n].insn;
2976 }
2977
2978 /* Return true if REGNO is dead before the Nth non-note insn
2979 after `current'. */
2980
2981 int
2982 peep2_regno_dead_p (int ofs, int regno)
2983 {
2984 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2985
2986 ofs = peep2_buf_position (peep2_current + ofs);
2987
2988 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2989
2990 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2991 }
2992
2993 /* Similarly for a REG. */
2994
2995 int
2996 peep2_reg_dead_p (int ofs, rtx reg)
2997 {
2998 int regno, n;
2999
3000 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3001
3002 ofs = peep2_buf_position (peep2_current + ofs);
3003
3004 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3005
3006 regno = REGNO (reg);
3007 n = hard_regno_nregs[regno][GET_MODE (reg)];
3008 while (--n >= 0)
3009 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3010 return 0;
3011 return 1;
3012 }
3013
3014 /* Regno offset to be used in the register search. */
3015 static int search_ofs;
3016
3017 /* Try to find a hard register of mode MODE, matching the register class in
3018 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3019 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3020 in which case the only condition is that the register must be available
3021 before CURRENT_INSN.
3022 Registers that already have bits set in REG_SET will not be considered.
3023
3024 If an appropriate register is available, it will be returned and the
3025 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3026 returned. */
3027
3028 rtx
3029 peep2_find_free_register (int from, int to, const char *class_str,
3030 enum machine_mode mode, HARD_REG_SET *reg_set)
3031 {
3032 enum reg_class cl;
3033 HARD_REG_SET live;
3034 df_ref def;
3035 int i;
3036
3037 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3038 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3039
3040 from = peep2_buf_position (peep2_current + from);
3041 to = peep2_buf_position (peep2_current + to);
3042
3043 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3044 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3045
3046 while (from != to)
3047 {
3048 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3049
3050 /* Don't use registers set or clobbered by the insn. */
3051 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3052 SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3053
3054 from = peep2_buf_position (from + 1);
3055 }
3056
3057 cl = reg_class_for_constraint (lookup_constraint (class_str));
3058
3059 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3060 {
3061 int raw_regno, regno, success, j;
3062
3063 /* Distribute the free registers as much as possible. */
3064 raw_regno = search_ofs + i;
3065 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3066 raw_regno -= FIRST_PSEUDO_REGISTER;
3067 #ifdef REG_ALLOC_ORDER
3068 regno = reg_alloc_order[raw_regno];
3069 #else
3070 regno = raw_regno;
3071 #endif
3072
3073 /* Can it support the mode we need? */
3074 if (! HARD_REGNO_MODE_OK (regno, mode))
3075 continue;
3076
3077 success = 1;
3078 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3079 {
3080 /* Don't allocate fixed registers. */
3081 if (fixed_regs[regno + j])
3082 {
3083 success = 0;
3084 break;
3085 }
3086 /* Don't allocate global registers. */
3087 if (global_regs[regno + j])
3088 {
3089 success = 0;
3090 break;
3091 }
3092 /* Make sure the register is of the right class. */
3093 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3094 {
3095 success = 0;
3096 break;
3097 }
3098 /* And that we don't create an extra save/restore. */
3099 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3100 {
3101 success = 0;
3102 break;
3103 }
3104
3105 if (! targetm.hard_regno_scratch_ok (regno + j))
3106 {
3107 success = 0;
3108 break;
3109 }
3110
3111 /* And we don't clobber traceback for noreturn functions. */
3112 if ((regno + j == FRAME_POINTER_REGNUM
3113 || regno + j == HARD_FRAME_POINTER_REGNUM)
3114 && (! reload_completed || frame_pointer_needed))
3115 {
3116 success = 0;
3117 break;
3118 }
3119
3120 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3121 || TEST_HARD_REG_BIT (live, regno + j))
3122 {
3123 success = 0;
3124 break;
3125 }
3126 }
3127
3128 if (success)
3129 {
3130 add_to_hard_reg_set (reg_set, mode, regno);
3131
3132 /* Start the next search with the next register. */
3133 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3134 raw_regno = 0;
3135 search_ofs = raw_regno;
3136
3137 return gen_rtx_REG (mode, regno);
3138 }
3139 }
3140
3141 search_ofs = 0;
3142 return NULL_RTX;
3143 }
3144
3145 /* Forget all currently tracked instructions, only remember current
3146 LIVE regset. */
3147
3148 static void
3149 peep2_reinit_state (regset live)
3150 {
3151 int i;
3152
3153 /* Indicate that all slots except the last holds invalid data. */
3154 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3155 peep2_insn_data[i].insn = NULL_RTX;
3156 peep2_current_count = 0;
3157
3158 /* Indicate that the last slot contains live_after data. */
3159 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3160 peep2_current = MAX_INSNS_PER_PEEP2;
3161
3162 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3163 }
3164
3165 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3166 starting at INSN. Perform the replacement, removing the old insns and
3167 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3168 if the replacement is rejected. */
3169
3170 static rtx_insn *
3171 peep2_attempt (basic_block bb, rtx uncast_insn, int match_len, rtx_insn *attempt)
3172 {
3173 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3174 int i;
3175 rtx_insn *last, *before_try, *x;
3176 rtx eh_note, as_note;
3177 rtx_insn *old_insn;
3178 rtx_insn *new_insn;
3179 bool was_call = false;
3180
3181 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3182 match more than one insn, or to be split into more than one insn. */
3183 old_insn = as_a <rtx_insn *> (peep2_insn_data[peep2_current].insn);
3184 if (RTX_FRAME_RELATED_P (old_insn))
3185 {
3186 bool any_note = false;
3187 rtx note;
3188
3189 if (match_len != 0)
3190 return NULL;
3191
3192 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3193 may be in the stream for the purpose of register allocation. */
3194 if (active_insn_p (attempt))
3195 new_insn = attempt;
3196 else
3197 new_insn = next_active_insn (attempt);
3198 if (next_active_insn (new_insn))
3199 return NULL;
3200
3201 /* We have a 1-1 replacement. Copy over any frame-related info. */
3202 RTX_FRAME_RELATED_P (new_insn) = 1;
3203
3204 /* Allow the backend to fill in a note during the split. */
3205 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3206 switch (REG_NOTE_KIND (note))
3207 {
3208 case REG_FRAME_RELATED_EXPR:
3209 case REG_CFA_DEF_CFA:
3210 case REG_CFA_ADJUST_CFA:
3211 case REG_CFA_OFFSET:
3212 case REG_CFA_REGISTER:
3213 case REG_CFA_EXPRESSION:
3214 case REG_CFA_RESTORE:
3215 case REG_CFA_SET_VDRAP:
3216 any_note = true;
3217 break;
3218 default:
3219 break;
3220 }
3221
3222 /* If the backend didn't supply a note, copy one over. */
3223 if (!any_note)
3224 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3225 switch (REG_NOTE_KIND (note))
3226 {
3227 case REG_FRAME_RELATED_EXPR:
3228 case REG_CFA_DEF_CFA:
3229 case REG_CFA_ADJUST_CFA:
3230 case REG_CFA_OFFSET:
3231 case REG_CFA_REGISTER:
3232 case REG_CFA_EXPRESSION:
3233 case REG_CFA_RESTORE:
3234 case REG_CFA_SET_VDRAP:
3235 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3236 any_note = true;
3237 break;
3238 default:
3239 break;
3240 }
3241
3242 /* If there still isn't a note, make sure the unwind info sees the
3243 same expression as before the split. */
3244 if (!any_note)
3245 {
3246 rtx old_set, new_set;
3247
3248 /* The old insn had better have been simple, or annotated. */
3249 old_set = single_set (old_insn);
3250 gcc_assert (old_set != NULL);
3251
3252 new_set = single_set (new_insn);
3253 if (!new_set || !rtx_equal_p (new_set, old_set))
3254 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3255 }
3256
3257 /* Copy prologue/epilogue status. This is required in order to keep
3258 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3259 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3260 }
3261
3262 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3263 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3264 cfg-related call notes. */
3265 for (i = 0; i <= match_len; ++i)
3266 {
3267 int j;
3268 rtx note;
3269
3270 j = peep2_buf_position (peep2_current + i);
3271 old_insn = as_a <rtx_insn *> (peep2_insn_data[j].insn);
3272 if (!CALL_P (old_insn))
3273 continue;
3274 was_call = true;
3275
3276 new_insn = attempt;
3277 while (new_insn != NULL_RTX)
3278 {
3279 if (CALL_P (new_insn))
3280 break;
3281 new_insn = NEXT_INSN (new_insn);
3282 }
3283
3284 gcc_assert (new_insn != NULL_RTX);
3285
3286 CALL_INSN_FUNCTION_USAGE (new_insn)
3287 = CALL_INSN_FUNCTION_USAGE (old_insn);
3288 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3289
3290 for (note = REG_NOTES (old_insn);
3291 note;
3292 note = XEXP (note, 1))
3293 switch (REG_NOTE_KIND (note))
3294 {
3295 case REG_NORETURN:
3296 case REG_SETJMP:
3297 case REG_TM:
3298 add_reg_note (new_insn, REG_NOTE_KIND (note),
3299 XEXP (note, 0));
3300 break;
3301 default:
3302 /* Discard all other reg notes. */
3303 break;
3304 }
3305
3306 /* Croak if there is another call in the sequence. */
3307 while (++i <= match_len)
3308 {
3309 j = peep2_buf_position (peep2_current + i);
3310 old_insn = as_a <rtx_insn *> (peep2_insn_data[j].insn);
3311 gcc_assert (!CALL_P (old_insn));
3312 }
3313 break;
3314 }
3315
3316 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3317 move those notes over to the new sequence. */
3318 as_note = NULL;
3319 for (i = match_len; i >= 0; --i)
3320 {
3321 int j = peep2_buf_position (peep2_current + i);
3322 old_insn = as_a <rtx_insn *> (peep2_insn_data[j].insn);
3323
3324 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3325 if (as_note)
3326 break;
3327 }
3328
3329 i = peep2_buf_position (peep2_current + match_len);
3330 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3331
3332 /* Replace the old sequence with the new. */
3333 rtx_insn *peepinsn = as_a <rtx_insn *> (peep2_insn_data[i].insn);
3334 last = emit_insn_after_setloc (attempt,
3335 peep2_insn_data[i].insn,
3336 INSN_LOCATION (peepinsn));
3337 before_try = PREV_INSN (insn);
3338 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3339
3340 /* Re-insert the EH_REGION notes. */
3341 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3342 {
3343 edge eh_edge;
3344 edge_iterator ei;
3345
3346 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3347 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3348 break;
3349
3350 if (eh_note)
3351 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3352
3353 if (eh_edge)
3354 for (x = last; x != before_try; x = PREV_INSN (x))
3355 if (x != BB_END (bb)
3356 && (can_throw_internal (x)
3357 || can_nonlocal_goto (x)))
3358 {
3359 edge nfte, nehe;
3360 int flags;
3361
3362 nfte = split_block (bb, x);
3363 flags = (eh_edge->flags
3364 & (EDGE_EH | EDGE_ABNORMAL));
3365 if (CALL_P (x))
3366 flags |= EDGE_ABNORMAL_CALL;
3367 nehe = make_edge (nfte->src, eh_edge->dest,
3368 flags);
3369
3370 nehe->probability = eh_edge->probability;
3371 nfte->probability
3372 = REG_BR_PROB_BASE - nehe->probability;
3373
3374 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3375 bb = nfte->src;
3376 eh_edge = nehe;
3377 }
3378
3379 /* Converting possibly trapping insn to non-trapping is
3380 possible. Zap dummy outgoing edges. */
3381 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3382 }
3383
3384 /* Re-insert the ARGS_SIZE notes. */
3385 if (as_note)
3386 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3387
3388 /* If we generated a jump instruction, it won't have
3389 JUMP_LABEL set. Recompute after we're done. */
3390 for (x = last; x != before_try; x = PREV_INSN (x))
3391 if (JUMP_P (x))
3392 {
3393 peep2_do_rebuild_jump_labels = true;
3394 break;
3395 }
3396
3397 return last;
3398 }
3399
3400 /* After performing a replacement in basic block BB, fix up the life
3401 information in our buffer. LAST is the last of the insns that we
3402 emitted as a replacement. PREV is the insn before the start of
3403 the replacement. MATCH_LEN is the number of instructions that were
3404 matched, and which now need to be replaced in the buffer. */
3405
3406 static void
3407 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3408 rtx_insn *prev)
3409 {
3410 int i = peep2_buf_position (peep2_current + match_len + 1);
3411 rtx_insn *x;
3412 regset_head live;
3413
3414 INIT_REG_SET (&live);
3415 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3416
3417 gcc_assert (peep2_current_count >= match_len + 1);
3418 peep2_current_count -= match_len + 1;
3419
3420 x = last;
3421 do
3422 {
3423 if (INSN_P (x))
3424 {
3425 df_insn_rescan (x);
3426 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3427 {
3428 peep2_current_count++;
3429 if (--i < 0)
3430 i = MAX_INSNS_PER_PEEP2;
3431 peep2_insn_data[i].insn = x;
3432 df_simulate_one_insn_backwards (bb, x, &live);
3433 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3434 }
3435 }
3436 x = PREV_INSN (x);
3437 }
3438 while (x != prev);
3439 CLEAR_REG_SET (&live);
3440
3441 peep2_current = i;
3442 }
3443
3444 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3445 Return true if we added it, false otherwise. The caller will try to match
3446 peepholes against the buffer if we return false; otherwise it will try to
3447 add more instructions to the buffer. */
3448
3449 static bool
3450 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3451 {
3452 int pos;
3453
3454 /* Once we have filled the maximum number of insns the buffer can hold,
3455 allow the caller to match the insns against peepholes. We wait until
3456 the buffer is full in case the target has similar peepholes of different
3457 length; we always want to match the longest if possible. */
3458 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3459 return false;
3460
3461 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3462 any other pattern, lest it change the semantics of the frame info. */
3463 if (RTX_FRAME_RELATED_P (insn))
3464 {
3465 /* Let the buffer drain first. */
3466 if (peep2_current_count > 0)
3467 return false;
3468 /* Now the insn will be the only thing in the buffer. */
3469 }
3470
3471 pos = peep2_buf_position (peep2_current + peep2_current_count);
3472 peep2_insn_data[pos].insn = insn;
3473 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3474 peep2_current_count++;
3475
3476 df_simulate_one_insn_forwards (bb, as_a <rtx_insn *> (insn), live);
3477 return true;
3478 }
3479
3480 /* Perform the peephole2 optimization pass. */
3481
3482 static void
3483 peephole2_optimize (void)
3484 {
3485 rtx_insn *insn;
3486 bitmap live;
3487 int i;
3488 basic_block bb;
3489
3490 peep2_do_cleanup_cfg = false;
3491 peep2_do_rebuild_jump_labels = false;
3492
3493 df_set_flags (DF_LR_RUN_DCE);
3494 df_note_add_problem ();
3495 df_analyze ();
3496
3497 /* Initialize the regsets we're going to use. */
3498 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3499 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3500 search_ofs = 0;
3501 live = BITMAP_ALLOC (&reg_obstack);
3502
3503 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3504 {
3505 bool past_end = false;
3506 int pos;
3507
3508 rtl_profile_for_bb (bb);
3509
3510 /* Start up propagation. */
3511 bitmap_copy (live, DF_LR_IN (bb));
3512 df_simulate_initialize_forwards (bb, live);
3513 peep2_reinit_state (live);
3514
3515 insn = BB_HEAD (bb);
3516 for (;;)
3517 {
3518 rtx_insn *attempt;
3519 rtx head;
3520 int match_len;
3521
3522 if (!past_end && !NONDEBUG_INSN_P (insn))
3523 {
3524 next_insn:
3525 insn = NEXT_INSN (insn);
3526 if (insn == NEXT_INSN (BB_END (bb)))
3527 past_end = true;
3528 continue;
3529 }
3530 if (!past_end && peep2_fill_buffer (bb, insn, live))
3531 goto next_insn;
3532
3533 /* If we did not fill an empty buffer, it signals the end of the
3534 block. */
3535 if (peep2_current_count == 0)
3536 break;
3537
3538 /* The buffer filled to the current maximum, so try to match. */
3539
3540 pos = peep2_buf_position (peep2_current + peep2_current_count);
3541 peep2_insn_data[pos].insn = PEEP2_EOB;
3542 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3543
3544 /* Match the peephole. */
3545 head = peep2_insn_data[peep2_current].insn;
3546 attempt = safe_as_a <rtx_insn *> (
3547 peephole2_insns (PATTERN (head), head, &match_len));
3548 if (attempt != NULL)
3549 {
3550 rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3551 if (last)
3552 {
3553 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3554 continue;
3555 }
3556 }
3557
3558 /* No match: advance the buffer by one insn. */
3559 peep2_current = peep2_buf_position (peep2_current + 1);
3560 peep2_current_count--;
3561 }
3562 }
3563
3564 default_rtl_profile ();
3565 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3566 BITMAP_FREE (peep2_insn_data[i].live_before);
3567 BITMAP_FREE (live);
3568 if (peep2_do_rebuild_jump_labels)
3569 rebuild_jump_labels (get_insns ());
3570 if (peep2_do_cleanup_cfg)
3571 cleanup_cfg (CLEANUP_CFG_CHANGED);
3572 }
3573 #endif /* HAVE_peephole2 */
3574
3575 /* Common predicates for use with define_bypass. */
3576
3577 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3578 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3579 must be either a single_set or a PARALLEL with SETs inside. */
3580
3581 int
3582 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3583 {
3584 rtx out_set, in_set;
3585 rtx out_pat, in_pat;
3586 rtx out_exp, in_exp;
3587 int i, j;
3588
3589 in_set = single_set (in_insn);
3590 if (in_set)
3591 {
3592 if (!MEM_P (SET_DEST (in_set)))
3593 return false;
3594
3595 out_set = single_set (out_insn);
3596 if (out_set)
3597 {
3598 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3599 return false;
3600 }
3601 else
3602 {
3603 out_pat = PATTERN (out_insn);
3604
3605 if (GET_CODE (out_pat) != PARALLEL)
3606 return false;
3607
3608 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3609 {
3610 out_exp = XVECEXP (out_pat, 0, i);
3611
3612 if (GET_CODE (out_exp) == CLOBBER)
3613 continue;
3614
3615 gcc_assert (GET_CODE (out_exp) == SET);
3616
3617 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3618 return false;
3619 }
3620 }
3621 }
3622 else
3623 {
3624 in_pat = PATTERN (in_insn);
3625 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3626
3627 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3628 {
3629 in_exp = XVECEXP (in_pat, 0, i);
3630
3631 if (GET_CODE (in_exp) == CLOBBER)
3632 continue;
3633
3634 gcc_assert (GET_CODE (in_exp) == SET);
3635
3636 if (!MEM_P (SET_DEST (in_exp)))
3637 return false;
3638
3639 out_set = single_set (out_insn);
3640 if (out_set)
3641 {
3642 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3643 return false;
3644 }
3645 else
3646 {
3647 out_pat = PATTERN (out_insn);
3648 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3649
3650 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3651 {
3652 out_exp = XVECEXP (out_pat, 0, j);
3653
3654 if (GET_CODE (out_exp) == CLOBBER)
3655 continue;
3656
3657 gcc_assert (GET_CODE (out_exp) == SET);
3658
3659 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3660 return false;
3661 }
3662 }
3663 }
3664 }
3665
3666 return true;
3667 }
3668
3669 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3670 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3671 or multiple set; IN_INSN should be single_set for truth, but for convenience
3672 of insn categorization may be any JUMP or CALL insn. */
3673
3674 int
3675 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3676 {
3677 rtx out_set, in_set;
3678
3679 in_set = single_set (in_insn);
3680 if (! in_set)
3681 {
3682 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3683 return false;
3684 }
3685
3686 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3687 return false;
3688 in_set = SET_SRC (in_set);
3689
3690 out_set = single_set (out_insn);
3691 if (out_set)
3692 {
3693 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3694 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3695 return false;
3696 }
3697 else
3698 {
3699 rtx out_pat;
3700 int i;
3701
3702 out_pat = PATTERN (out_insn);
3703 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3704
3705 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3706 {
3707 rtx exp = XVECEXP (out_pat, 0, i);
3708
3709 if (GET_CODE (exp) == CLOBBER)
3710 continue;
3711
3712 gcc_assert (GET_CODE (exp) == SET);
3713
3714 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3715 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3716 return false;
3717 }
3718 }
3719
3720 return true;
3721 }
3722 \f
3723 static unsigned int
3724 rest_of_handle_peephole2 (void)
3725 {
3726 #ifdef HAVE_peephole2
3727 peephole2_optimize ();
3728 #endif
3729 return 0;
3730 }
3731
3732 namespace {
3733
3734 const pass_data pass_data_peephole2 =
3735 {
3736 RTL_PASS, /* type */
3737 "peephole2", /* name */
3738 OPTGROUP_NONE, /* optinfo_flags */
3739 TV_PEEPHOLE2, /* tv_id */
3740 0, /* properties_required */
3741 0, /* properties_provided */
3742 0, /* properties_destroyed */
3743 0, /* todo_flags_start */
3744 TODO_df_finish, /* todo_flags_finish */
3745 };
3746
3747 class pass_peephole2 : public rtl_opt_pass
3748 {
3749 public:
3750 pass_peephole2 (gcc::context *ctxt)
3751 : rtl_opt_pass (pass_data_peephole2, ctxt)
3752 {}
3753
3754 /* opt_pass methods: */
3755 /* The epiphany backend creates a second instance of this pass, so we need
3756 a clone method. */
3757 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3758 virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
3759 virtual unsigned int execute (function *)
3760 {
3761 return rest_of_handle_peephole2 ();
3762 }
3763
3764 }; // class pass_peephole2
3765
3766 } // anon namespace
3767
3768 rtl_opt_pass *
3769 make_pass_peephole2 (gcc::context *ctxt)
3770 {
3771 return new pass_peephole2 (ctxt);
3772 }
3773
3774 namespace {
3775
3776 const pass_data pass_data_split_all_insns =
3777 {
3778 RTL_PASS, /* type */
3779 "split1", /* name */
3780 OPTGROUP_NONE, /* optinfo_flags */
3781 TV_NONE, /* tv_id */
3782 0, /* properties_required */
3783 0, /* properties_provided */
3784 0, /* properties_destroyed */
3785 0, /* todo_flags_start */
3786 0, /* todo_flags_finish */
3787 };
3788
3789 class pass_split_all_insns : public rtl_opt_pass
3790 {
3791 public:
3792 pass_split_all_insns (gcc::context *ctxt)
3793 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3794 {}
3795
3796 /* opt_pass methods: */
3797 /* The epiphany backend creates a second instance of this pass, so
3798 we need a clone method. */
3799 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3800 virtual unsigned int execute (function *)
3801 {
3802 split_all_insns ();
3803 return 0;
3804 }
3805
3806 }; // class pass_split_all_insns
3807
3808 } // anon namespace
3809
3810 rtl_opt_pass *
3811 make_pass_split_all_insns (gcc::context *ctxt)
3812 {
3813 return new pass_split_all_insns (ctxt);
3814 }
3815
3816 static unsigned int
3817 rest_of_handle_split_after_reload (void)
3818 {
3819 /* If optimizing, then go ahead and split insns now. */
3820 #ifndef STACK_REGS
3821 if (optimize > 0)
3822 #endif
3823 split_all_insns ();
3824 return 0;
3825 }
3826
3827 namespace {
3828
3829 const pass_data pass_data_split_after_reload =
3830 {
3831 RTL_PASS, /* type */
3832 "split2", /* name */
3833 OPTGROUP_NONE, /* optinfo_flags */
3834 TV_NONE, /* tv_id */
3835 0, /* properties_required */
3836 0, /* properties_provided */
3837 0, /* properties_destroyed */
3838 0, /* todo_flags_start */
3839 0, /* todo_flags_finish */
3840 };
3841
3842 class pass_split_after_reload : public rtl_opt_pass
3843 {
3844 public:
3845 pass_split_after_reload (gcc::context *ctxt)
3846 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3847 {}
3848
3849 /* opt_pass methods: */
3850 virtual unsigned int execute (function *)
3851 {
3852 return rest_of_handle_split_after_reload ();
3853 }
3854
3855 }; // class pass_split_after_reload
3856
3857 } // anon namespace
3858
3859 rtl_opt_pass *
3860 make_pass_split_after_reload (gcc::context *ctxt)
3861 {
3862 return new pass_split_after_reload (ctxt);
3863 }
3864
3865 namespace {
3866
3867 const pass_data pass_data_split_before_regstack =
3868 {
3869 RTL_PASS, /* type */
3870 "split3", /* name */
3871 OPTGROUP_NONE, /* optinfo_flags */
3872 TV_NONE, /* tv_id */
3873 0, /* properties_required */
3874 0, /* properties_provided */
3875 0, /* properties_destroyed */
3876 0, /* todo_flags_start */
3877 0, /* todo_flags_finish */
3878 };
3879
3880 class pass_split_before_regstack : public rtl_opt_pass
3881 {
3882 public:
3883 pass_split_before_regstack (gcc::context *ctxt)
3884 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3885 {}
3886
3887 /* opt_pass methods: */
3888 virtual bool gate (function *);
3889 virtual unsigned int execute (function *)
3890 {
3891 split_all_insns ();
3892 return 0;
3893 }
3894
3895 }; // class pass_split_before_regstack
3896
3897 bool
3898 pass_split_before_regstack::gate (function *)
3899 {
3900 #if HAVE_ATTR_length && defined (STACK_REGS)
3901 /* If flow2 creates new instructions which need splitting
3902 and scheduling after reload is not done, they might not be
3903 split until final which doesn't allow splitting
3904 if HAVE_ATTR_length. */
3905 # ifdef INSN_SCHEDULING
3906 return (optimize && !flag_schedule_insns_after_reload);
3907 # else
3908 return (optimize);
3909 # endif
3910 #else
3911 return 0;
3912 #endif
3913 }
3914
3915 } // anon namespace
3916
3917 rtl_opt_pass *
3918 make_pass_split_before_regstack (gcc::context *ctxt)
3919 {
3920 return new pass_split_before_regstack (ctxt);
3921 }
3922
3923 static unsigned int
3924 rest_of_handle_split_before_sched2 (void)
3925 {
3926 #ifdef INSN_SCHEDULING
3927 split_all_insns ();
3928 #endif
3929 return 0;
3930 }
3931
3932 namespace {
3933
3934 const pass_data pass_data_split_before_sched2 =
3935 {
3936 RTL_PASS, /* type */
3937 "split4", /* name */
3938 OPTGROUP_NONE, /* optinfo_flags */
3939 TV_NONE, /* tv_id */
3940 0, /* properties_required */
3941 0, /* properties_provided */
3942 0, /* properties_destroyed */
3943 0, /* todo_flags_start */
3944 0, /* todo_flags_finish */
3945 };
3946
3947 class pass_split_before_sched2 : public rtl_opt_pass
3948 {
3949 public:
3950 pass_split_before_sched2 (gcc::context *ctxt)
3951 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
3952 {}
3953
3954 /* opt_pass methods: */
3955 virtual bool gate (function *)
3956 {
3957 #ifdef INSN_SCHEDULING
3958 return optimize > 0 && flag_schedule_insns_after_reload;
3959 #else
3960 return false;
3961 #endif
3962 }
3963
3964 virtual unsigned int execute (function *)
3965 {
3966 return rest_of_handle_split_before_sched2 ();
3967 }
3968
3969 }; // class pass_split_before_sched2
3970
3971 } // anon namespace
3972
3973 rtl_opt_pass *
3974 make_pass_split_before_sched2 (gcc::context *ctxt)
3975 {
3976 return new pass_split_before_sched2 (ctxt);
3977 }
3978
3979 namespace {
3980
3981 const pass_data pass_data_split_for_shorten_branches =
3982 {
3983 RTL_PASS, /* type */
3984 "split5", /* name */
3985 OPTGROUP_NONE, /* optinfo_flags */
3986 TV_NONE, /* tv_id */
3987 0, /* properties_required */
3988 0, /* properties_provided */
3989 0, /* properties_destroyed */
3990 0, /* todo_flags_start */
3991 0, /* todo_flags_finish */
3992 };
3993
3994 class pass_split_for_shorten_branches : public rtl_opt_pass
3995 {
3996 public:
3997 pass_split_for_shorten_branches (gcc::context *ctxt)
3998 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
3999 {}
4000
4001 /* opt_pass methods: */
4002 virtual bool gate (function *)
4003 {
4004 /* The placement of the splitting that we do for shorten_branches
4005 depends on whether regstack is used by the target or not. */
4006 #if HAVE_ATTR_length && !defined (STACK_REGS)
4007 return true;
4008 #else
4009 return false;
4010 #endif
4011 }
4012
4013 virtual unsigned int execute (function *)
4014 {
4015 return split_all_insns_noflow ();
4016 }
4017
4018 }; // class pass_split_for_shorten_branches
4019
4020 } // anon namespace
4021
4022 rtl_opt_pass *
4023 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4024 {
4025 return new pass_split_for_shorten_branches (ctxt);
4026 }
4027
4028 /* (Re)initialize the target information after a change in target. */
4029
4030 void
4031 recog_init ()
4032 {
4033 /* The information is zero-initialized, so we don't need to do anything
4034 first time round. */
4035 if (!this_target_recog->x_initialized)
4036 {
4037 this_target_recog->x_initialized = true;
4038 return;
4039 }
4040 memset (this_target_recog->x_enabled_alternatives, 0,
4041 sizeof (this_target_recog->x_enabled_alternatives));
4042 for (int i = 0; i < LAST_INSN_CODE; ++i)
4043 if (this_target_recog->x_op_alt[i])
4044 {
4045 free (this_target_recog->x_op_alt[i]);
4046 this_target_recog->x_op_alt[i] = 0;
4047 }
4048 }