Promote types of rtl expressions to rtx_insn in gen_split and gen_peephole2
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "vec.h"
27 #include "input.h"
28 #include "alias.h"
29 #include "symtab.h"
30 #include "inchash.h"
31 #include "tree.h"
32 #include "rtl-error.h"
33 #include "tm_p.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 #include "hard-reg-set.h"
37 #include "recog.h"
38 #include "regs.h"
39 #include "addresses.h"
40 #include "hashtab.h"
41 #include "function.h"
42 #include "rtl.h"
43 #include "flags.h"
44 #include "statistics.h"
45 #include "expmed.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "emit-rtl.h"
50 #include "varasm.h"
51 #include "stmt.h"
52 #include "expr.h"
53 #include "predict.h"
54 #include "dominance.h"
55 #include "cfg.h"
56 #include "cfgrtl.h"
57 #include "cfgbuild.h"
58 #include "cfgcleanup.h"
59 #include "basic-block.h"
60 #include "reload.h"
61 #include "target.h"
62 #include "tree-pass.h"
63 #include "df.h"
64 #include "insn-codes.h"
65
66 #ifndef STACK_POP_CODE
67 #if STACK_GROWS_DOWNWARD
68 #define STACK_POP_CODE POST_INC
69 #else
70 #define STACK_POP_CODE POST_DEC
71 #endif
72 #endif
73
74 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
75 static void validate_replace_src_1 (rtx *, void *);
76 static rtx_insn *split_insn (rtx_insn *);
77
78 struct target_recog default_target_recog;
79 #if SWITCHABLE_TARGET
80 struct target_recog *this_target_recog = &default_target_recog;
81 #endif
82
83 /* Nonzero means allow operands to be volatile.
84 This should be 0 if you are generating rtl, such as if you are calling
85 the functions in optabs.c and expmed.c (most of the time).
86 This should be 1 if all valid insns need to be recognized,
87 such as in reginfo.c and final.c and reload.c.
88
89 init_recog and init_recog_no_volatile are responsible for setting this. */
90
91 int volatile_ok;
92
93 struct recog_data_d recog_data;
94
95 /* Contains a vector of operand_alternative structures, such that
96 operand OP of alternative A is at index A * n_operands + OP.
97 Set up by preprocess_constraints. */
98 const operand_alternative *recog_op_alt;
99
100 /* Used to provide recog_op_alt for asms. */
101 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
102 * MAX_RECOG_ALTERNATIVES];
103
104 /* On return from `constrain_operands', indicate which alternative
105 was satisfied. */
106
107 int which_alternative;
108
109 /* Nonzero after end of reload pass.
110 Set to 1 or 0 by toplev.c.
111 Controls the significance of (SUBREG (MEM)). */
112
113 int reload_completed;
114
115 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
116 int epilogue_completed;
117
118 /* Initialize data used by the function `recog'.
119 This must be called once in the compilation of a function
120 before any insn recognition may be done in the function. */
121
122 void
123 init_recog_no_volatile (void)
124 {
125 volatile_ok = 0;
126 }
127
128 void
129 init_recog (void)
130 {
131 volatile_ok = 1;
132 }
133
134 \f
135 /* Return true if labels in asm operands BODY are LABEL_REFs. */
136
137 static bool
138 asm_labels_ok (rtx body)
139 {
140 rtx asmop;
141 int i;
142
143 asmop = extract_asm_operands (body);
144 if (asmop == NULL_RTX)
145 return true;
146
147 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
148 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
149 return false;
150
151 return true;
152 }
153
154 /* Check that X is an insn-body for an `asm' with operands
155 and that the operands mentioned in it are legitimate. */
156
157 int
158 check_asm_operands (rtx x)
159 {
160 int noperands;
161 rtx *operands;
162 const char **constraints;
163 int i;
164
165 if (!asm_labels_ok (x))
166 return 0;
167
168 /* Post-reload, be more strict with things. */
169 if (reload_completed)
170 {
171 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
172 rtx_insn *insn = make_insn_raw (x);
173 extract_insn (insn);
174 constrain_operands (1, get_enabled_alternatives (insn));
175 return which_alternative >= 0;
176 }
177
178 noperands = asm_noperands (x);
179 if (noperands < 0)
180 return 0;
181 if (noperands == 0)
182 return 1;
183
184 operands = XALLOCAVEC (rtx, noperands);
185 constraints = XALLOCAVEC (const char *, noperands);
186
187 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
188
189 for (i = 0; i < noperands; i++)
190 {
191 const char *c = constraints[i];
192 if (c[0] == '%')
193 c++;
194 if (! asm_operand_ok (operands[i], c, constraints))
195 return 0;
196 }
197
198 return 1;
199 }
200 \f
201 /* Static data for the next two routines. */
202
203 typedef struct change_t
204 {
205 rtx object;
206 int old_code;
207 rtx *loc;
208 rtx old;
209 bool unshare;
210 } change_t;
211
212 static change_t *changes;
213 static int changes_allocated;
214
215 static int num_changes = 0;
216
217 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
218 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
219 the change is simply made.
220
221 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
222 will be called with the address and mode as parameters. If OBJECT is
223 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
224 the change in place.
225
226 IN_GROUP is nonzero if this is part of a group of changes that must be
227 performed as a group. In that case, the changes will be stored. The
228 function `apply_change_group' will validate and apply the changes.
229
230 If IN_GROUP is zero, this is a single change. Try to recognize the insn
231 or validate the memory reference with the change applied. If the result
232 is not valid for the machine, suppress the change and return zero.
233 Otherwise, perform the change and return 1. */
234
235 static bool
236 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
237 {
238 rtx old = *loc;
239
240 if (old == new_rtx || rtx_equal_p (old, new_rtx))
241 return 1;
242
243 gcc_assert (in_group != 0 || num_changes == 0);
244
245 *loc = new_rtx;
246
247 /* Save the information describing this change. */
248 if (num_changes >= changes_allocated)
249 {
250 if (changes_allocated == 0)
251 /* This value allows for repeated substitutions inside complex
252 indexed addresses, or changes in up to 5 insns. */
253 changes_allocated = MAX_RECOG_OPERANDS * 5;
254 else
255 changes_allocated *= 2;
256
257 changes = XRESIZEVEC (change_t, changes, changes_allocated);
258 }
259
260 changes[num_changes].object = object;
261 changes[num_changes].loc = loc;
262 changes[num_changes].old = old;
263 changes[num_changes].unshare = unshare;
264
265 if (object && !MEM_P (object))
266 {
267 /* Set INSN_CODE to force rerecognition of insn. Save old code in
268 case invalid. */
269 changes[num_changes].old_code = INSN_CODE (object);
270 INSN_CODE (object) = -1;
271 }
272
273 num_changes++;
274
275 /* If we are making a group of changes, return 1. Otherwise, validate the
276 change group we made. */
277
278 if (in_group)
279 return 1;
280 else
281 return apply_change_group ();
282 }
283
284 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
285 UNSHARE to false. */
286
287 bool
288 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
289 {
290 return validate_change_1 (object, loc, new_rtx, in_group, false);
291 }
292
293 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
294 UNSHARE to true. */
295
296 bool
297 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
298 {
299 return validate_change_1 (object, loc, new_rtx, in_group, true);
300 }
301
302
303 /* Keep X canonicalized if some changes have made it non-canonical; only
304 modifies the operands of X, not (for example) its code. Simplifications
305 are not the job of this routine.
306
307 Return true if anything was changed. */
308 bool
309 canonicalize_change_group (rtx_insn *insn, rtx x)
310 {
311 if (COMMUTATIVE_P (x)
312 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
313 {
314 /* Oops, the caller has made X no longer canonical.
315 Let's redo the changes in the correct order. */
316 rtx tem = XEXP (x, 0);
317 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
318 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
319 return true;
320 }
321 else
322 return false;
323 }
324
325
326 /* This subroutine of apply_change_group verifies whether the changes to INSN
327 were valid; i.e. whether INSN can still be recognized.
328
329 If IN_GROUP is true clobbers which have to be added in order to
330 match the instructions will be added to the current change group.
331 Otherwise the changes will take effect immediately. */
332
333 int
334 insn_invalid_p (rtx_insn *insn, bool in_group)
335 {
336 rtx pat = PATTERN (insn);
337 int num_clobbers = 0;
338 /* If we are before reload and the pattern is a SET, see if we can add
339 clobbers. */
340 int icode = recog (pat, insn,
341 (GET_CODE (pat) == SET
342 && ! reload_completed
343 && ! reload_in_progress)
344 ? &num_clobbers : 0);
345 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
346
347
348 /* If this is an asm and the operand aren't legal, then fail. Likewise if
349 this is not an asm and the insn wasn't recognized. */
350 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
351 || (!is_asm && icode < 0))
352 return 1;
353
354 /* If we have to add CLOBBERs, fail if we have to add ones that reference
355 hard registers since our callers can't know if they are live or not.
356 Otherwise, add them. */
357 if (num_clobbers > 0)
358 {
359 rtx newpat;
360
361 if (added_clobbers_hard_reg_p (icode))
362 return 1;
363
364 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
365 XVECEXP (newpat, 0, 0) = pat;
366 add_clobbers (newpat, icode);
367 if (in_group)
368 validate_change (insn, &PATTERN (insn), newpat, 1);
369 else
370 PATTERN (insn) = pat = newpat;
371 }
372
373 /* After reload, verify that all constraints are satisfied. */
374 if (reload_completed)
375 {
376 extract_insn (insn);
377
378 if (! constrain_operands (1, get_preferred_alternatives (insn)))
379 return 1;
380 }
381
382 INSN_CODE (insn) = icode;
383 return 0;
384 }
385
386 /* Return number of changes made and not validated yet. */
387 int
388 num_changes_pending (void)
389 {
390 return num_changes;
391 }
392
393 /* Tentatively apply the changes numbered NUM and up.
394 Return 1 if all changes are valid, zero otherwise. */
395
396 int
397 verify_changes (int num)
398 {
399 int i;
400 rtx last_validated = NULL_RTX;
401
402 /* The changes have been applied and all INSN_CODEs have been reset to force
403 rerecognition.
404
405 The changes are valid if we aren't given an object, or if we are
406 given a MEM and it still is a valid address, or if this is in insn
407 and it is recognized. In the latter case, if reload has completed,
408 we also require that the operands meet the constraints for
409 the insn. */
410
411 for (i = num; i < num_changes; i++)
412 {
413 rtx object = changes[i].object;
414
415 /* If there is no object to test or if it is the same as the one we
416 already tested, ignore it. */
417 if (object == 0 || object == last_validated)
418 continue;
419
420 if (MEM_P (object))
421 {
422 if (! memory_address_addr_space_p (GET_MODE (object),
423 XEXP (object, 0),
424 MEM_ADDR_SPACE (object)))
425 break;
426 }
427 else if (/* changes[i].old might be zero, e.g. when putting a
428 REG_FRAME_RELATED_EXPR into a previously empty list. */
429 changes[i].old
430 && REG_P (changes[i].old)
431 && asm_noperands (PATTERN (object)) > 0
432 && REG_EXPR (changes[i].old) != NULL_TREE
433 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
434 && DECL_REGISTER (REG_EXPR (changes[i].old)))
435 {
436 /* Don't allow changes of hard register operands to inline
437 assemblies if they have been defined as register asm ("x"). */
438 break;
439 }
440 else if (DEBUG_INSN_P (object))
441 continue;
442 else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
443 {
444 rtx pat = PATTERN (object);
445
446 /* Perhaps we couldn't recognize the insn because there were
447 extra CLOBBERs at the end. If so, try to re-recognize
448 without the last CLOBBER (later iterations will cause each of
449 them to be eliminated, in turn). But don't do this if we
450 have an ASM_OPERAND. */
451 if (GET_CODE (pat) == PARALLEL
452 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
453 && asm_noperands (PATTERN (object)) < 0)
454 {
455 rtx newpat;
456
457 if (XVECLEN (pat, 0) == 2)
458 newpat = XVECEXP (pat, 0, 0);
459 else
460 {
461 int j;
462
463 newpat
464 = gen_rtx_PARALLEL (VOIDmode,
465 rtvec_alloc (XVECLEN (pat, 0) - 1));
466 for (j = 0; j < XVECLEN (newpat, 0); j++)
467 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
468 }
469
470 /* Add a new change to this group to replace the pattern
471 with this new pattern. Then consider this change
472 as having succeeded. The change we added will
473 cause the entire call to fail if things remain invalid.
474
475 Note that this can lose if a later change than the one
476 we are processing specified &XVECEXP (PATTERN (object), 0, X)
477 but this shouldn't occur. */
478
479 validate_change (object, &PATTERN (object), newpat, 1);
480 continue;
481 }
482 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
483 || GET_CODE (pat) == VAR_LOCATION)
484 /* If this insn is a CLOBBER or USE, it is always valid, but is
485 never recognized. */
486 continue;
487 else
488 break;
489 }
490 last_validated = object;
491 }
492
493 return (i == num_changes);
494 }
495
496 /* A group of changes has previously been issued with validate_change
497 and verified with verify_changes. Call df_insn_rescan for each of
498 the insn changed and clear num_changes. */
499
500 void
501 confirm_change_group (void)
502 {
503 int i;
504 rtx last_object = NULL;
505
506 for (i = 0; i < num_changes; i++)
507 {
508 rtx object = changes[i].object;
509
510 if (changes[i].unshare)
511 *changes[i].loc = copy_rtx (*changes[i].loc);
512
513 /* Avoid unnecessary rescanning when multiple changes to same instruction
514 are made. */
515 if (object)
516 {
517 if (object != last_object && last_object && INSN_P (last_object))
518 df_insn_rescan (as_a <rtx_insn *> (last_object));
519 last_object = object;
520 }
521 }
522
523 if (last_object && INSN_P (last_object))
524 df_insn_rescan (as_a <rtx_insn *> (last_object));
525 num_changes = 0;
526 }
527
528 /* Apply a group of changes previously issued with `validate_change'.
529 If all changes are valid, call confirm_change_group and return 1,
530 otherwise, call cancel_changes and return 0. */
531
532 int
533 apply_change_group (void)
534 {
535 if (verify_changes (0))
536 {
537 confirm_change_group ();
538 return 1;
539 }
540 else
541 {
542 cancel_changes (0);
543 return 0;
544 }
545 }
546
547
548 /* Return the number of changes so far in the current group. */
549
550 int
551 num_validated_changes (void)
552 {
553 return num_changes;
554 }
555
556 /* Retract the changes numbered NUM and up. */
557
558 void
559 cancel_changes (int num)
560 {
561 int i;
562
563 /* Back out all the changes. Do this in the opposite order in which
564 they were made. */
565 for (i = num_changes - 1; i >= num; i--)
566 {
567 *changes[i].loc = changes[i].old;
568 if (changes[i].object && !MEM_P (changes[i].object))
569 INSN_CODE (changes[i].object) = changes[i].old_code;
570 }
571 num_changes = num;
572 }
573
574 /* Reduce conditional compilation elsewhere. */
575 #ifndef HAVE_extv
576 #define HAVE_extv 0
577 #define CODE_FOR_extv CODE_FOR_nothing
578 #endif
579 #ifndef HAVE_extzv
580 #define HAVE_extzv 0
581 #define CODE_FOR_extzv CODE_FOR_nothing
582 #endif
583
584 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
585 rtx. */
586
587 static void
588 simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
589 machine_mode op0_mode)
590 {
591 rtx x = *loc;
592 enum rtx_code code = GET_CODE (x);
593 rtx new_rtx = NULL_RTX;
594
595 if (SWAPPABLE_OPERANDS_P (x)
596 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
597 {
598 validate_unshare_change (object, loc,
599 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
600 : swap_condition (code),
601 GET_MODE (x), XEXP (x, 1),
602 XEXP (x, 0)), 1);
603 x = *loc;
604 code = GET_CODE (x);
605 }
606
607 /* Canonicalize arithmetics with all constant operands. */
608 switch (GET_RTX_CLASS (code))
609 {
610 case RTX_UNARY:
611 if (CONSTANT_P (XEXP (x, 0)))
612 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
613 op0_mode);
614 break;
615 case RTX_COMM_ARITH:
616 case RTX_BIN_ARITH:
617 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
618 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
619 XEXP (x, 1));
620 break;
621 case RTX_COMPARE:
622 case RTX_COMM_COMPARE:
623 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
624 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
625 XEXP (x, 0), XEXP (x, 1));
626 break;
627 default:
628 break;
629 }
630 if (new_rtx)
631 {
632 validate_change (object, loc, new_rtx, 1);
633 return;
634 }
635
636 switch (code)
637 {
638 case PLUS:
639 /* If we have a PLUS whose second operand is now a CONST_INT, use
640 simplify_gen_binary to try to simplify it.
641 ??? We may want later to remove this, once simplification is
642 separated from this function. */
643 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
644 validate_change (object, loc,
645 simplify_gen_binary
646 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
647 break;
648 case MINUS:
649 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
650 validate_change (object, loc,
651 simplify_gen_binary
652 (PLUS, GET_MODE (x), XEXP (x, 0),
653 simplify_gen_unary (NEG,
654 GET_MODE (x), XEXP (x, 1),
655 GET_MODE (x))), 1);
656 break;
657 case ZERO_EXTEND:
658 case SIGN_EXTEND:
659 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
660 {
661 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
662 op0_mode);
663 /* If any of the above failed, substitute in something that
664 we know won't be recognized. */
665 if (!new_rtx)
666 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
667 validate_change (object, loc, new_rtx, 1);
668 }
669 break;
670 case SUBREG:
671 /* All subregs possible to simplify should be simplified. */
672 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
673 SUBREG_BYTE (x));
674
675 /* Subregs of VOIDmode operands are incorrect. */
676 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
677 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
678 if (new_rtx)
679 validate_change (object, loc, new_rtx, 1);
680 break;
681 case ZERO_EXTRACT:
682 case SIGN_EXTRACT:
683 /* If we are replacing a register with memory, try to change the memory
684 to be the mode required for memory in extract operations (this isn't
685 likely to be an insertion operation; if it was, nothing bad will
686 happen, we might just fail in some cases). */
687
688 if (MEM_P (XEXP (x, 0))
689 && CONST_INT_P (XEXP (x, 1))
690 && CONST_INT_P (XEXP (x, 2))
691 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
692 MEM_ADDR_SPACE (XEXP (x, 0)))
693 && !MEM_VOLATILE_P (XEXP (x, 0)))
694 {
695 machine_mode wanted_mode = VOIDmode;
696 machine_mode is_mode = GET_MODE (XEXP (x, 0));
697 int pos = INTVAL (XEXP (x, 2));
698
699 if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
700 {
701 wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
702 if (wanted_mode == VOIDmode)
703 wanted_mode = word_mode;
704 }
705 else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
706 {
707 wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
708 if (wanted_mode == VOIDmode)
709 wanted_mode = word_mode;
710 }
711
712 /* If we have a narrower mode, we can do something. */
713 if (wanted_mode != VOIDmode
714 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
715 {
716 int offset = pos / BITS_PER_UNIT;
717 rtx newmem;
718
719 /* If the bytes and bits are counted differently, we
720 must adjust the offset. */
721 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
722 offset =
723 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
724 offset);
725
726 gcc_assert (GET_MODE_PRECISION (wanted_mode)
727 == GET_MODE_BITSIZE (wanted_mode));
728 pos %= GET_MODE_BITSIZE (wanted_mode);
729
730 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
731
732 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
733 validate_change (object, &XEXP (x, 0), newmem, 1);
734 }
735 }
736
737 break;
738
739 default:
740 break;
741 }
742 }
743
744 /* Replace every occurrence of FROM in X with TO. Mark each change with
745 validate_change passing OBJECT. */
746
747 static void
748 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
749 bool simplify)
750 {
751 int i, j;
752 const char *fmt;
753 rtx x = *loc;
754 enum rtx_code code;
755 machine_mode op0_mode = VOIDmode;
756 int prev_changes = num_changes;
757
758 if (!x)
759 return;
760
761 code = GET_CODE (x);
762 fmt = GET_RTX_FORMAT (code);
763 if (fmt[0] == 'e')
764 op0_mode = GET_MODE (XEXP (x, 0));
765
766 /* X matches FROM if it is the same rtx or they are both referring to the
767 same register in the same mode. Avoid calling rtx_equal_p unless the
768 operands look similar. */
769
770 if (x == from
771 || (REG_P (x) && REG_P (from)
772 && GET_MODE (x) == GET_MODE (from)
773 && REGNO (x) == REGNO (from))
774 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
775 && rtx_equal_p (x, from)))
776 {
777 validate_unshare_change (object, loc, to, 1);
778 return;
779 }
780
781 /* Call ourself recursively to perform the replacements.
782 We must not replace inside already replaced expression, otherwise we
783 get infinite recursion for replacements like (reg X)->(subreg (reg X))
784 so we must special case shared ASM_OPERANDS. */
785
786 if (GET_CODE (x) == PARALLEL)
787 {
788 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
789 {
790 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
791 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
792 {
793 /* Verify that operands are really shared. */
794 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
795 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
796 (x, 0, j))));
797 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
798 from, to, object, simplify);
799 }
800 else
801 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
802 simplify);
803 }
804 }
805 else
806 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
807 {
808 if (fmt[i] == 'e')
809 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
810 else if (fmt[i] == 'E')
811 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
812 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
813 simplify);
814 }
815
816 /* If we didn't substitute, there is nothing more to do. */
817 if (num_changes == prev_changes)
818 return;
819
820 /* ??? The regmove is no more, so is this aberration still necessary? */
821 /* Allow substituted expression to have different mode. This is used by
822 regmove to change mode of pseudo register. */
823 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
824 op0_mode = GET_MODE (XEXP (x, 0));
825
826 /* Do changes needed to keep rtx consistent. Don't do any other
827 simplifications, as it is not our job. */
828 if (simplify)
829 simplify_while_replacing (loc, to, object, op0_mode);
830 }
831
832 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
833 with TO. After all changes have been made, validate by seeing
834 if INSN is still valid. */
835
836 int
837 validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
838 {
839 validate_replace_rtx_1 (loc, from, to, insn, true);
840 return apply_change_group ();
841 }
842
843 /* Try replacing every occurrence of FROM in INSN with TO. After all
844 changes have been made, validate by seeing if INSN is still valid. */
845
846 int
847 validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
848 {
849 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
850 return apply_change_group ();
851 }
852
853 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
854 is a part of INSN. After all changes have been made, validate by seeing if
855 INSN is still valid.
856 validate_replace_rtx (from, to, insn) is equivalent to
857 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
858
859 int
860 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
861 {
862 validate_replace_rtx_1 (where, from, to, insn, true);
863 return apply_change_group ();
864 }
865
866 /* Same as above, but do not simplify rtx afterwards. */
867 int
868 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
869 rtx_insn *insn)
870 {
871 validate_replace_rtx_1 (where, from, to, insn, false);
872 return apply_change_group ();
873
874 }
875
876 /* Try replacing every occurrence of FROM in INSN with TO. This also
877 will replace in REG_EQUAL and REG_EQUIV notes. */
878
879 void
880 validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
881 {
882 rtx note;
883 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
884 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
885 if (REG_NOTE_KIND (note) == REG_EQUAL
886 || REG_NOTE_KIND (note) == REG_EQUIV)
887 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
888 }
889
890 /* Function called by note_uses to replace used subexpressions. */
891 struct validate_replace_src_data
892 {
893 rtx from; /* Old RTX */
894 rtx to; /* New RTX */
895 rtx_insn *insn; /* Insn in which substitution is occurring. */
896 };
897
898 static void
899 validate_replace_src_1 (rtx *x, void *data)
900 {
901 struct validate_replace_src_data *d
902 = (struct validate_replace_src_data *) data;
903
904 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
905 }
906
907 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
908 SET_DESTs. */
909
910 void
911 validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
912 {
913 struct validate_replace_src_data d;
914
915 d.from = from;
916 d.to = to;
917 d.insn = insn;
918 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
919 }
920
921 /* Try simplify INSN.
922 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
923 pattern and return true if something was simplified. */
924
925 bool
926 validate_simplify_insn (rtx_insn *insn)
927 {
928 int i;
929 rtx pat = NULL;
930 rtx newpat = NULL;
931
932 pat = PATTERN (insn);
933
934 if (GET_CODE (pat) == SET)
935 {
936 newpat = simplify_rtx (SET_SRC (pat));
937 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
938 validate_change (insn, &SET_SRC (pat), newpat, 1);
939 newpat = simplify_rtx (SET_DEST (pat));
940 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
941 validate_change (insn, &SET_DEST (pat), newpat, 1);
942 }
943 else if (GET_CODE (pat) == PARALLEL)
944 for (i = 0; i < XVECLEN (pat, 0); i++)
945 {
946 rtx s = XVECEXP (pat, 0, i);
947
948 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
949 {
950 newpat = simplify_rtx (SET_SRC (s));
951 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
952 validate_change (insn, &SET_SRC (s), newpat, 1);
953 newpat = simplify_rtx (SET_DEST (s));
954 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
955 validate_change (insn, &SET_DEST (s), newpat, 1);
956 }
957 }
958 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
959 }
960 \f
961 /* Return 1 if the insn using CC0 set by INSN does not contain
962 any ordered tests applied to the condition codes.
963 EQ and NE tests do not count. */
964
965 int
966 next_insn_tests_no_inequality (rtx_insn *insn)
967 {
968 rtx_insn *next = next_cc0_user (insn);
969
970 /* If there is no next insn, we have to take the conservative choice. */
971 if (next == 0)
972 return 0;
973
974 return (INSN_P (next)
975 && ! inequality_comparisons_p (PATTERN (next)));
976 }
977 \f
978 /* Return 1 if OP is a valid general operand for machine mode MODE.
979 This is either a register reference, a memory reference,
980 or a constant. In the case of a memory reference, the address
981 is checked for general validity for the target machine.
982
983 Register and memory references must have mode MODE in order to be valid,
984 but some constants have no machine mode and are valid for any mode.
985
986 If MODE is VOIDmode, OP is checked for validity for whatever mode
987 it has.
988
989 The main use of this function is as a predicate in match_operand
990 expressions in the machine description. */
991
992 int
993 general_operand (rtx op, machine_mode mode)
994 {
995 enum rtx_code code = GET_CODE (op);
996
997 if (mode == VOIDmode)
998 mode = GET_MODE (op);
999
1000 /* Don't accept CONST_INT or anything similar
1001 if the caller wants something floating. */
1002 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1003 && GET_MODE_CLASS (mode) != MODE_INT
1004 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1005 return 0;
1006
1007 if (CONST_INT_P (op)
1008 && mode != VOIDmode
1009 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1010 return 0;
1011
1012 if (CONSTANT_P (op))
1013 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1014 || mode == VOIDmode)
1015 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1016 && targetm.legitimate_constant_p (mode == VOIDmode
1017 ? GET_MODE (op)
1018 : mode, op));
1019
1020 /* Except for certain constants with VOIDmode, already checked for,
1021 OP's mode must match MODE if MODE specifies a mode. */
1022
1023 if (GET_MODE (op) != mode)
1024 return 0;
1025
1026 if (code == SUBREG)
1027 {
1028 rtx sub = SUBREG_REG (op);
1029
1030 #ifdef INSN_SCHEDULING
1031 /* On machines that have insn scheduling, we want all memory
1032 reference to be explicit, so outlaw paradoxical SUBREGs.
1033 However, we must allow them after reload so that they can
1034 get cleaned up by cleanup_subreg_operands. */
1035 if (!reload_completed && MEM_P (sub)
1036 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
1037 return 0;
1038 #endif
1039 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1040 may result in incorrect reference. We should simplify all valid
1041 subregs of MEM anyway. But allow this after reload because we
1042 might be called from cleanup_subreg_operands.
1043
1044 ??? This is a kludge. */
1045 if (!reload_completed && SUBREG_BYTE (op) != 0
1046 && MEM_P (sub))
1047 return 0;
1048
1049 #ifdef CANNOT_CHANGE_MODE_CLASS
1050 if (REG_P (sub)
1051 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1052 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1053 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1054 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1055 /* LRA can generate some invalid SUBREGS just for matched
1056 operand reload presentation. LRA needs to treat them as
1057 valid. */
1058 && ! LRA_SUBREG_P (op))
1059 return 0;
1060 #endif
1061
1062 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1063 create such rtl, and we must reject it. */
1064 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1065 /* LRA can use subreg to store a floating point value in an
1066 integer mode. Although the floating point and the
1067 integer modes need the same number of hard registers, the
1068 size of floating point mode can be less than the integer
1069 mode. */
1070 && ! lra_in_progress
1071 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1072 return 0;
1073
1074 op = sub;
1075 code = GET_CODE (op);
1076 }
1077
1078 if (code == REG)
1079 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1080 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1081
1082 if (code == MEM)
1083 {
1084 rtx y = XEXP (op, 0);
1085
1086 if (! volatile_ok && MEM_VOLATILE_P (op))
1087 return 0;
1088
1089 /* Use the mem's mode, since it will be reloaded thus. LRA can
1090 generate move insn with invalid addresses which is made valid
1091 and efficiently calculated by LRA through further numerous
1092 transformations. */
1093 if (lra_in_progress
1094 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1095 return 1;
1096 }
1097
1098 return 0;
1099 }
1100 \f
1101 /* Return 1 if OP is a valid memory address for a memory reference
1102 of mode MODE.
1103
1104 The main use of this function is as a predicate in match_operand
1105 expressions in the machine description. */
1106
1107 int
1108 address_operand (rtx op, machine_mode mode)
1109 {
1110 return memory_address_p (mode, op);
1111 }
1112
1113 /* Return 1 if OP is a register reference of mode MODE.
1114 If MODE is VOIDmode, accept a register in any mode.
1115
1116 The main use of this function is as a predicate in match_operand
1117 expressions in the machine description. */
1118
1119 int
1120 register_operand (rtx op, machine_mode mode)
1121 {
1122 if (GET_CODE (op) == SUBREG)
1123 {
1124 rtx sub = SUBREG_REG (op);
1125
1126 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1127 because it is guaranteed to be reloaded into one.
1128 Just make sure the MEM is valid in itself.
1129 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1130 but currently it does result from (SUBREG (REG)...) where the
1131 reg went on the stack.) */
1132 if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1133 return 0;
1134 }
1135 else if (!REG_P (op))
1136 return 0;
1137 return general_operand (op, mode);
1138 }
1139
1140 /* Return 1 for a register in Pmode; ignore the tested mode. */
1141
1142 int
1143 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1144 {
1145 return register_operand (op, Pmode);
1146 }
1147
1148 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1149 or a hard register. */
1150
1151 int
1152 scratch_operand (rtx op, machine_mode mode)
1153 {
1154 if (GET_MODE (op) != mode && mode != VOIDmode)
1155 return 0;
1156
1157 return (GET_CODE (op) == SCRATCH
1158 || (REG_P (op)
1159 && (lra_in_progress
1160 || (REGNO (op) < FIRST_PSEUDO_REGISTER
1161 && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1162 }
1163
1164 /* Return 1 if OP is a valid immediate operand for mode MODE.
1165
1166 The main use of this function is as a predicate in match_operand
1167 expressions in the machine description. */
1168
1169 int
1170 immediate_operand (rtx op, machine_mode mode)
1171 {
1172 /* Don't accept CONST_INT or anything similar
1173 if the caller wants something floating. */
1174 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1175 && GET_MODE_CLASS (mode) != MODE_INT
1176 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1177 return 0;
1178
1179 if (CONST_INT_P (op)
1180 && mode != VOIDmode
1181 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1182 return 0;
1183
1184 return (CONSTANT_P (op)
1185 && (GET_MODE (op) == mode || mode == VOIDmode
1186 || GET_MODE (op) == VOIDmode)
1187 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1188 && targetm.legitimate_constant_p (mode == VOIDmode
1189 ? GET_MODE (op)
1190 : mode, op));
1191 }
1192
1193 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1194
1195 int
1196 const_int_operand (rtx op, machine_mode mode)
1197 {
1198 if (!CONST_INT_P (op))
1199 return 0;
1200
1201 if (mode != VOIDmode
1202 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1203 return 0;
1204
1205 return 1;
1206 }
1207
1208 #if TARGET_SUPPORTS_WIDE_INT
1209 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1210 of mode MODE. */
1211 int
1212 const_scalar_int_operand (rtx op, machine_mode mode)
1213 {
1214 if (!CONST_SCALAR_INT_P (op))
1215 return 0;
1216
1217 if (CONST_INT_P (op))
1218 return const_int_operand (op, mode);
1219
1220 if (mode != VOIDmode)
1221 {
1222 int prec = GET_MODE_PRECISION (mode);
1223 int bitsize = GET_MODE_BITSIZE (mode);
1224
1225 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1226 return 0;
1227
1228 if (prec == bitsize)
1229 return 1;
1230 else
1231 {
1232 /* Multiword partial int. */
1233 HOST_WIDE_INT x
1234 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1235 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1236 }
1237 }
1238 return 1;
1239 }
1240
1241 /* Returns 1 if OP is an operand that is a constant integer or constant
1242 floating-point number of MODE. */
1243
1244 int
1245 const_double_operand (rtx op, machine_mode mode)
1246 {
1247 return (GET_CODE (op) == CONST_DOUBLE)
1248 && (GET_MODE (op) == mode || mode == VOIDmode);
1249 }
1250 #else
1251 /* Returns 1 if OP is an operand that is a constant integer or constant
1252 floating-point number of MODE. */
1253
1254 int
1255 const_double_operand (rtx op, machine_mode mode)
1256 {
1257 /* Don't accept CONST_INT or anything similar
1258 if the caller wants something floating. */
1259 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1260 && GET_MODE_CLASS (mode) != MODE_INT
1261 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1262 return 0;
1263
1264 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1265 && (mode == VOIDmode || GET_MODE (op) == mode
1266 || GET_MODE (op) == VOIDmode));
1267 }
1268 #endif
1269 /* Return 1 if OP is a general operand that is not an immediate
1270 operand of mode MODE. */
1271
1272 int
1273 nonimmediate_operand (rtx op, machine_mode mode)
1274 {
1275 return (general_operand (op, mode) && ! CONSTANT_P (op));
1276 }
1277
1278 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1279
1280 int
1281 nonmemory_operand (rtx op, machine_mode mode)
1282 {
1283 if (CONSTANT_P (op))
1284 return immediate_operand (op, mode);
1285 return register_operand (op, mode);
1286 }
1287
1288 /* Return 1 if OP is a valid operand that stands for pushing a
1289 value of mode MODE onto the stack.
1290
1291 The main use of this function is as a predicate in match_operand
1292 expressions in the machine description. */
1293
1294 int
1295 push_operand (rtx op, machine_mode mode)
1296 {
1297 unsigned int rounded_size = GET_MODE_SIZE (mode);
1298
1299 #ifdef PUSH_ROUNDING
1300 rounded_size = PUSH_ROUNDING (rounded_size);
1301 #endif
1302
1303 if (!MEM_P (op))
1304 return 0;
1305
1306 if (mode != VOIDmode && GET_MODE (op) != mode)
1307 return 0;
1308
1309 op = XEXP (op, 0);
1310
1311 if (rounded_size == GET_MODE_SIZE (mode))
1312 {
1313 if (GET_CODE (op) != STACK_PUSH_CODE)
1314 return 0;
1315 }
1316 else
1317 {
1318 if (GET_CODE (op) != PRE_MODIFY
1319 || GET_CODE (XEXP (op, 1)) != PLUS
1320 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1321 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1322 || INTVAL (XEXP (XEXP (op, 1), 1))
1323 != ((STACK_GROWS_DOWNWARD ? -1 : 1) * (int) rounded_size))
1324 return 0;
1325 }
1326
1327 return XEXP (op, 0) == stack_pointer_rtx;
1328 }
1329
1330 /* Return 1 if OP is a valid operand that stands for popping a
1331 value of mode MODE off the stack.
1332
1333 The main use of this function is as a predicate in match_operand
1334 expressions in the machine description. */
1335
1336 int
1337 pop_operand (rtx op, machine_mode mode)
1338 {
1339 if (!MEM_P (op))
1340 return 0;
1341
1342 if (mode != VOIDmode && GET_MODE (op) != mode)
1343 return 0;
1344
1345 op = XEXP (op, 0);
1346
1347 if (GET_CODE (op) != STACK_POP_CODE)
1348 return 0;
1349
1350 return XEXP (op, 0) == stack_pointer_rtx;
1351 }
1352
1353 /* Return 1 if ADDR is a valid memory address
1354 for mode MODE in address space AS. */
1355
1356 int
1357 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1358 rtx addr, addr_space_t as)
1359 {
1360 #ifdef GO_IF_LEGITIMATE_ADDRESS
1361 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1362 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1363 return 0;
1364
1365 win:
1366 return 1;
1367 #else
1368 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1369 #endif
1370 }
1371
1372 /* Return 1 if OP is a valid memory reference with mode MODE,
1373 including a valid address.
1374
1375 The main use of this function is as a predicate in match_operand
1376 expressions in the machine description. */
1377
1378 int
1379 memory_operand (rtx op, machine_mode mode)
1380 {
1381 rtx inner;
1382
1383 if (! reload_completed)
1384 /* Note that no SUBREG is a memory operand before end of reload pass,
1385 because (SUBREG (MEM...)) forces reloading into a register. */
1386 return MEM_P (op) && general_operand (op, mode);
1387
1388 if (mode != VOIDmode && GET_MODE (op) != mode)
1389 return 0;
1390
1391 inner = op;
1392 if (GET_CODE (inner) == SUBREG)
1393 inner = SUBREG_REG (inner);
1394
1395 return (MEM_P (inner) && general_operand (op, mode));
1396 }
1397
1398 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1399 that is, a memory reference whose address is a general_operand. */
1400
1401 int
1402 indirect_operand (rtx op, machine_mode mode)
1403 {
1404 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1405 if (! reload_completed
1406 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1407 {
1408 int offset = SUBREG_BYTE (op);
1409 rtx inner = SUBREG_REG (op);
1410
1411 if (mode != VOIDmode && GET_MODE (op) != mode)
1412 return 0;
1413
1414 /* The only way that we can have a general_operand as the resulting
1415 address is if OFFSET is zero and the address already is an operand
1416 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1417 operand. */
1418
1419 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1420 || (GET_CODE (XEXP (inner, 0)) == PLUS
1421 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1422 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1423 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1424 }
1425
1426 return (MEM_P (op)
1427 && memory_operand (op, mode)
1428 && general_operand (XEXP (op, 0), Pmode));
1429 }
1430
1431 /* Return 1 if this is an ordered comparison operator (not including
1432 ORDERED and UNORDERED). */
1433
1434 int
1435 ordered_comparison_operator (rtx op, machine_mode mode)
1436 {
1437 if (mode != VOIDmode && GET_MODE (op) != mode)
1438 return false;
1439 switch (GET_CODE (op))
1440 {
1441 case EQ:
1442 case NE:
1443 case LT:
1444 case LTU:
1445 case LE:
1446 case LEU:
1447 case GT:
1448 case GTU:
1449 case GE:
1450 case GEU:
1451 return true;
1452 default:
1453 return false;
1454 }
1455 }
1456
1457 /* Return 1 if this is a comparison operator. This allows the use of
1458 MATCH_OPERATOR to recognize all the branch insns. */
1459
1460 int
1461 comparison_operator (rtx op, machine_mode mode)
1462 {
1463 return ((mode == VOIDmode || GET_MODE (op) == mode)
1464 && COMPARISON_P (op));
1465 }
1466 \f
1467 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1468
1469 rtx
1470 extract_asm_operands (rtx body)
1471 {
1472 rtx tmp;
1473 switch (GET_CODE (body))
1474 {
1475 case ASM_OPERANDS:
1476 return body;
1477
1478 case SET:
1479 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1480 tmp = SET_SRC (body);
1481 if (GET_CODE (tmp) == ASM_OPERANDS)
1482 return tmp;
1483 break;
1484
1485 case PARALLEL:
1486 tmp = XVECEXP (body, 0, 0);
1487 if (GET_CODE (tmp) == ASM_OPERANDS)
1488 return tmp;
1489 if (GET_CODE (tmp) == SET)
1490 {
1491 tmp = SET_SRC (tmp);
1492 if (GET_CODE (tmp) == ASM_OPERANDS)
1493 return tmp;
1494 }
1495 break;
1496
1497 default:
1498 break;
1499 }
1500 return NULL;
1501 }
1502
1503 /* If BODY is an insn body that uses ASM_OPERANDS,
1504 return the number of operands (both input and output) in the insn.
1505 Otherwise return -1. */
1506
1507 int
1508 asm_noperands (const_rtx body)
1509 {
1510 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1511 int n_sets = 0;
1512
1513 if (asm_op == NULL)
1514 return -1;
1515
1516 if (GET_CODE (body) == SET)
1517 n_sets = 1;
1518 else if (GET_CODE (body) == PARALLEL)
1519 {
1520 int i;
1521 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1522 {
1523 /* Multiple output operands, or 1 output plus some clobbers:
1524 body is
1525 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1526 /* Count backwards through CLOBBERs to determine number of SETs. */
1527 for (i = XVECLEN (body, 0); i > 0; i--)
1528 {
1529 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1530 break;
1531 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1532 return -1;
1533 }
1534
1535 /* N_SETS is now number of output operands. */
1536 n_sets = i;
1537
1538 /* Verify that all the SETs we have
1539 came from a single original asm_operands insn
1540 (so that invalid combinations are blocked). */
1541 for (i = 0; i < n_sets; i++)
1542 {
1543 rtx elt = XVECEXP (body, 0, i);
1544 if (GET_CODE (elt) != SET)
1545 return -1;
1546 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1547 return -1;
1548 /* If these ASM_OPERANDS rtx's came from different original insns
1549 then they aren't allowed together. */
1550 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1551 != ASM_OPERANDS_INPUT_VEC (asm_op))
1552 return -1;
1553 }
1554 }
1555 else
1556 {
1557 /* 0 outputs, but some clobbers:
1558 body is [(asm_operands ...) (clobber (reg ...))...]. */
1559 /* Make sure all the other parallel things really are clobbers. */
1560 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1561 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1562 return -1;
1563 }
1564 }
1565
1566 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1567 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1568 }
1569
1570 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1571 copy its operands (both input and output) into the vector OPERANDS,
1572 the locations of the operands within the insn into the vector OPERAND_LOCS,
1573 and the constraints for the operands into CONSTRAINTS.
1574 Write the modes of the operands into MODES.
1575 Return the assembler-template.
1576
1577 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1578 we don't store that info. */
1579
1580 const char *
1581 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1582 const char **constraints, machine_mode *modes,
1583 location_t *loc)
1584 {
1585 int nbase = 0, n, i;
1586 rtx asmop;
1587
1588 switch (GET_CODE (body))
1589 {
1590 case ASM_OPERANDS:
1591 /* Zero output asm: BODY is (asm_operands ...). */
1592 asmop = body;
1593 break;
1594
1595 case SET:
1596 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1597 asmop = SET_SRC (body);
1598
1599 /* The output is in the SET.
1600 Its constraint is in the ASM_OPERANDS itself. */
1601 if (operands)
1602 operands[0] = SET_DEST (body);
1603 if (operand_locs)
1604 operand_locs[0] = &SET_DEST (body);
1605 if (constraints)
1606 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1607 if (modes)
1608 modes[0] = GET_MODE (SET_DEST (body));
1609 nbase = 1;
1610 break;
1611
1612 case PARALLEL:
1613 {
1614 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1615
1616 asmop = XVECEXP (body, 0, 0);
1617 if (GET_CODE (asmop) == SET)
1618 {
1619 asmop = SET_SRC (asmop);
1620
1621 /* At least one output, plus some CLOBBERs. The outputs are in
1622 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1623 for (i = 0; i < nparallel; i++)
1624 {
1625 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1626 break; /* Past last SET */
1627 if (operands)
1628 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1629 if (operand_locs)
1630 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1631 if (constraints)
1632 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1633 if (modes)
1634 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1635 }
1636 nbase = i;
1637 }
1638 break;
1639 }
1640
1641 default:
1642 gcc_unreachable ();
1643 }
1644
1645 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1646 for (i = 0; i < n; i++)
1647 {
1648 if (operand_locs)
1649 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1650 if (operands)
1651 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1652 if (constraints)
1653 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1654 if (modes)
1655 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1656 }
1657 nbase += n;
1658
1659 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1660 for (i = 0; i < n; i++)
1661 {
1662 if (operand_locs)
1663 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1664 if (operands)
1665 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1666 if (constraints)
1667 constraints[nbase + i] = "";
1668 if (modes)
1669 modes[nbase + i] = Pmode;
1670 }
1671
1672 if (loc)
1673 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1674
1675 return ASM_OPERANDS_TEMPLATE (asmop);
1676 }
1677
1678 /* Parse inline assembly string STRING and determine which operands are
1679 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1680 to true if operand I is referenced.
1681
1682 This is intended to distinguish barrier-like asms such as:
1683
1684 asm ("" : "=m" (...));
1685
1686 from real references such as:
1687
1688 asm ("sw\t$0, %0" : "=m" (...)); */
1689
1690 void
1691 get_referenced_operands (const char *string, bool *used,
1692 unsigned int noperands)
1693 {
1694 memset (used, 0, sizeof (bool) * noperands);
1695 const char *p = string;
1696 while (*p)
1697 switch (*p)
1698 {
1699 case '%':
1700 p += 1;
1701 /* A letter followed by a digit indicates an operand number. */
1702 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1703 p += 1;
1704 if (ISDIGIT (*p))
1705 {
1706 char *endptr;
1707 unsigned long opnum = strtoul (p, &endptr, 10);
1708 if (endptr != p && opnum < noperands)
1709 used[opnum] = true;
1710 p = endptr;
1711 }
1712 else
1713 p += 1;
1714 break;
1715
1716 default:
1717 p++;
1718 break;
1719 }
1720 }
1721
1722 /* Check if an asm_operand matches its constraints.
1723 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1724
1725 int
1726 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1727 {
1728 int result = 0;
1729 #ifdef AUTO_INC_DEC
1730 bool incdec_ok = false;
1731 #endif
1732
1733 /* Use constrain_operands after reload. */
1734 gcc_assert (!reload_completed);
1735
1736 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1737 many alternatives as required to match the other operands. */
1738 if (*constraint == '\0')
1739 result = 1;
1740
1741 while (*constraint)
1742 {
1743 enum constraint_num cn;
1744 char c = *constraint;
1745 int len;
1746 switch (c)
1747 {
1748 case ',':
1749 constraint++;
1750 continue;
1751
1752 case '0': case '1': case '2': case '3': case '4':
1753 case '5': case '6': case '7': case '8': case '9':
1754 /* If caller provided constraints pointer, look up
1755 the matching constraint. Otherwise, our caller should have
1756 given us the proper matching constraint, but we can't
1757 actually fail the check if they didn't. Indicate that
1758 results are inconclusive. */
1759 if (constraints)
1760 {
1761 char *end;
1762 unsigned long match;
1763
1764 match = strtoul (constraint, &end, 10);
1765 if (!result)
1766 result = asm_operand_ok (op, constraints[match], NULL);
1767 constraint = (const char *) end;
1768 }
1769 else
1770 {
1771 do
1772 constraint++;
1773 while (ISDIGIT (*constraint));
1774 if (! result)
1775 result = -1;
1776 }
1777 continue;
1778
1779 /* The rest of the compiler assumes that reloading the address
1780 of a MEM into a register will make it fit an 'o' constraint.
1781 That is, if it sees a MEM operand for an 'o' constraint,
1782 it assumes that (mem (base-reg)) will fit.
1783
1784 That assumption fails on targets that don't have offsettable
1785 addresses at all. We therefore need to treat 'o' asm
1786 constraints as a special case and only accept operands that
1787 are already offsettable, thus proving that at least one
1788 offsettable address exists. */
1789 case 'o': /* offsettable */
1790 if (offsettable_nonstrict_memref_p (op))
1791 result = 1;
1792 break;
1793
1794 case 'g':
1795 if (general_operand (op, VOIDmode))
1796 result = 1;
1797 break;
1798
1799 #ifdef AUTO_INC_DEC
1800 case '<':
1801 case '>':
1802 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1803 to exist, excepting those that expand_call created. Further,
1804 on some machines which do not have generalized auto inc/dec,
1805 an inc/dec is not a memory_operand.
1806
1807 Match any memory and hope things are resolved after reload. */
1808 incdec_ok = true;
1809 #endif
1810 default:
1811 cn = lookup_constraint (constraint);
1812 switch (get_constraint_type (cn))
1813 {
1814 case CT_REGISTER:
1815 if (!result
1816 && reg_class_for_constraint (cn) != NO_REGS
1817 && GET_MODE (op) != BLKmode
1818 && register_operand (op, VOIDmode))
1819 result = 1;
1820 break;
1821
1822 case CT_CONST_INT:
1823 if (!result
1824 && CONST_INT_P (op)
1825 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1826 result = 1;
1827 break;
1828
1829 case CT_MEMORY:
1830 /* Every memory operand can be reloaded to fit. */
1831 result = result || memory_operand (op, VOIDmode);
1832 break;
1833
1834 case CT_ADDRESS:
1835 /* Every address operand can be reloaded to fit. */
1836 result = result || address_operand (op, VOIDmode);
1837 break;
1838
1839 case CT_FIXED_FORM:
1840 result = result || constraint_satisfied_p (op, cn);
1841 break;
1842 }
1843 break;
1844 }
1845 len = CONSTRAINT_LEN (c, constraint);
1846 do
1847 constraint++;
1848 while (--len && *constraint);
1849 if (len)
1850 return 0;
1851 }
1852
1853 #ifdef AUTO_INC_DEC
1854 /* For operands without < or > constraints reject side-effects. */
1855 if (!incdec_ok && result && MEM_P (op))
1856 switch (GET_CODE (XEXP (op, 0)))
1857 {
1858 case PRE_INC:
1859 case POST_INC:
1860 case PRE_DEC:
1861 case POST_DEC:
1862 case PRE_MODIFY:
1863 case POST_MODIFY:
1864 return 0;
1865 default:
1866 break;
1867 }
1868 #endif
1869
1870 return result;
1871 }
1872 \f
1873 /* Given an rtx *P, if it is a sum containing an integer constant term,
1874 return the location (type rtx *) of the pointer to that constant term.
1875 Otherwise, return a null pointer. */
1876
1877 rtx *
1878 find_constant_term_loc (rtx *p)
1879 {
1880 rtx *tem;
1881 enum rtx_code code = GET_CODE (*p);
1882
1883 /* If *P IS such a constant term, P is its location. */
1884
1885 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1886 || code == CONST)
1887 return p;
1888
1889 /* Otherwise, if not a sum, it has no constant term. */
1890
1891 if (GET_CODE (*p) != PLUS)
1892 return 0;
1893
1894 /* If one of the summands is constant, return its location. */
1895
1896 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1897 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1898 return p;
1899
1900 /* Otherwise, check each summand for containing a constant term. */
1901
1902 if (XEXP (*p, 0) != 0)
1903 {
1904 tem = find_constant_term_loc (&XEXP (*p, 0));
1905 if (tem != 0)
1906 return tem;
1907 }
1908
1909 if (XEXP (*p, 1) != 0)
1910 {
1911 tem = find_constant_term_loc (&XEXP (*p, 1));
1912 if (tem != 0)
1913 return tem;
1914 }
1915
1916 return 0;
1917 }
1918 \f
1919 /* Return 1 if OP is a memory reference
1920 whose address contains no side effects
1921 and remains valid after the addition
1922 of a positive integer less than the
1923 size of the object being referenced.
1924
1925 We assume that the original address is valid and do not check it.
1926
1927 This uses strict_memory_address_p as a subroutine, so
1928 don't use it before reload. */
1929
1930 int
1931 offsettable_memref_p (rtx op)
1932 {
1933 return ((MEM_P (op))
1934 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1935 MEM_ADDR_SPACE (op)));
1936 }
1937
1938 /* Similar, but don't require a strictly valid mem ref:
1939 consider pseudo-regs valid as index or base regs. */
1940
1941 int
1942 offsettable_nonstrict_memref_p (rtx op)
1943 {
1944 return ((MEM_P (op))
1945 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1946 MEM_ADDR_SPACE (op)));
1947 }
1948
1949 /* Return 1 if Y is a memory address which contains no side effects
1950 and would remain valid for address space AS after the addition of
1951 a positive integer less than the size of that mode.
1952
1953 We assume that the original address is valid and do not check it.
1954 We do check that it is valid for narrower modes.
1955
1956 If STRICTP is nonzero, we require a strictly valid address,
1957 for the sake of use in reload.c. */
1958
1959 int
1960 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
1961 addr_space_t as)
1962 {
1963 enum rtx_code ycode = GET_CODE (y);
1964 rtx z;
1965 rtx y1 = y;
1966 rtx *y2;
1967 int (*addressp) (machine_mode, rtx, addr_space_t) =
1968 (strictp ? strict_memory_address_addr_space_p
1969 : memory_address_addr_space_p);
1970 unsigned int mode_sz = GET_MODE_SIZE (mode);
1971
1972 if (CONSTANT_ADDRESS_P (y))
1973 return 1;
1974
1975 /* Adjusting an offsettable address involves changing to a narrower mode.
1976 Make sure that's OK. */
1977
1978 if (mode_dependent_address_p (y, as))
1979 return 0;
1980
1981 machine_mode address_mode = GET_MODE (y);
1982 if (address_mode == VOIDmode)
1983 address_mode = targetm.addr_space.address_mode (as);
1984 #ifdef POINTERS_EXTEND_UNSIGNED
1985 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1986 #endif
1987
1988 /* ??? How much offset does an offsettable BLKmode reference need?
1989 Clearly that depends on the situation in which it's being used.
1990 However, the current situation in which we test 0xffffffff is
1991 less than ideal. Caveat user. */
1992 if (mode_sz == 0)
1993 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1994
1995 /* If the expression contains a constant term,
1996 see if it remains valid when max possible offset is added. */
1997
1998 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1999 {
2000 int good;
2001
2002 y1 = *y2;
2003 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2004 /* Use QImode because an odd displacement may be automatically invalid
2005 for any wider mode. But it should be valid for a single byte. */
2006 good = (*addressp) (QImode, y, as);
2007
2008 /* In any case, restore old contents of memory. */
2009 *y2 = y1;
2010 return good;
2011 }
2012
2013 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2014 return 0;
2015
2016 /* The offset added here is chosen as the maximum offset that
2017 any instruction could need to add when operating on something
2018 of the specified mode. We assume that if Y and Y+c are
2019 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2020 go inside a LO_SUM here, so we do so as well. */
2021 if (GET_CODE (y) == LO_SUM
2022 && mode != BLKmode
2023 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2024 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2025 plus_constant (address_mode, XEXP (y, 1),
2026 mode_sz - 1));
2027 #ifdef POINTERS_EXTEND_UNSIGNED
2028 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2029 else if (POINTERS_EXTEND_UNSIGNED > 0
2030 && GET_CODE (y) == ZERO_EXTEND
2031 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2032 z = gen_rtx_ZERO_EXTEND (address_mode,
2033 plus_constant (pointer_mode, XEXP (y, 0),
2034 mode_sz - 1));
2035 #endif
2036 else
2037 z = plus_constant (address_mode, y, mode_sz - 1);
2038
2039 /* Use QImode because an odd displacement may be automatically invalid
2040 for any wider mode. But it should be valid for a single byte. */
2041 return (*addressp) (QImode, z, as);
2042 }
2043
2044 /* Return 1 if ADDR is an address-expression whose effect depends
2045 on the mode of the memory reference it is used in.
2046
2047 ADDRSPACE is the address space associated with the address.
2048
2049 Autoincrement addressing is a typical example of mode-dependence
2050 because the amount of the increment depends on the mode. */
2051
2052 bool
2053 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2054 {
2055 /* Auto-increment addressing with anything other than post_modify
2056 or pre_modify always introduces a mode dependency. Catch such
2057 cases now instead of deferring to the target. */
2058 if (GET_CODE (addr) == PRE_INC
2059 || GET_CODE (addr) == POST_INC
2060 || GET_CODE (addr) == PRE_DEC
2061 || GET_CODE (addr) == POST_DEC)
2062 return true;
2063
2064 return targetm.mode_dependent_address_p (addr, addrspace);
2065 }
2066 \f
2067 /* Return true if boolean attribute ATTR is supported. */
2068
2069 static bool
2070 have_bool_attr (bool_attr attr)
2071 {
2072 switch (attr)
2073 {
2074 case BA_ENABLED:
2075 return HAVE_ATTR_enabled;
2076 case BA_PREFERRED_FOR_SIZE:
2077 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2078 case BA_PREFERRED_FOR_SPEED:
2079 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2080 }
2081 gcc_unreachable ();
2082 }
2083
2084 /* Return the value of ATTR for instruction INSN. */
2085
2086 static bool
2087 get_bool_attr (rtx_insn *insn, bool_attr attr)
2088 {
2089 switch (attr)
2090 {
2091 case BA_ENABLED:
2092 return get_attr_enabled (insn);
2093 case BA_PREFERRED_FOR_SIZE:
2094 return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2095 case BA_PREFERRED_FOR_SPEED:
2096 return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2097 }
2098 gcc_unreachable ();
2099 }
2100
2101 /* Like get_bool_attr_mask, but don't use the cache. */
2102
2103 static alternative_mask
2104 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2105 {
2106 /* Temporarily install enough information for get_attr_<foo> to assume
2107 that the insn operands are already cached. As above, the attribute
2108 mustn't depend on the values of operands, so we don't provide their
2109 real values here. */
2110 rtx_insn *old_insn = recog_data.insn;
2111 int old_alternative = which_alternative;
2112
2113 recog_data.insn = insn;
2114 alternative_mask mask = ALL_ALTERNATIVES;
2115 int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2116 for (int i = 0; i < n_alternatives; i++)
2117 {
2118 which_alternative = i;
2119 if (!get_bool_attr (insn, attr))
2120 mask &= ~ALTERNATIVE_BIT (i);
2121 }
2122
2123 recog_data.insn = old_insn;
2124 which_alternative = old_alternative;
2125 return mask;
2126 }
2127
2128 /* Return the mask of operand alternatives that are allowed for INSN
2129 by boolean attribute ATTR. This mask depends only on INSN and on
2130 the current target; it does not depend on things like the values of
2131 operands. */
2132
2133 static alternative_mask
2134 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2135 {
2136 /* Quick exit for asms and for targets that don't use these attributes. */
2137 int code = INSN_CODE (insn);
2138 if (code < 0 || !have_bool_attr (attr))
2139 return ALL_ALTERNATIVES;
2140
2141 /* Calling get_attr_<foo> can be expensive, so cache the mask
2142 for speed. */
2143 if (!this_target_recog->x_bool_attr_masks[code][attr])
2144 this_target_recog->x_bool_attr_masks[code][attr]
2145 = get_bool_attr_mask_uncached (insn, attr);
2146 return this_target_recog->x_bool_attr_masks[code][attr];
2147 }
2148
2149 /* Return the set of alternatives of INSN that are allowed by the current
2150 target. */
2151
2152 alternative_mask
2153 get_enabled_alternatives (rtx_insn *insn)
2154 {
2155 return get_bool_attr_mask (insn, BA_ENABLED);
2156 }
2157
2158 /* Return the set of alternatives of INSN that are allowed by the current
2159 target and are preferred for the current size/speed optimization
2160 choice. */
2161
2162 alternative_mask
2163 get_preferred_alternatives (rtx_insn *insn)
2164 {
2165 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2166 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2167 else
2168 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2169 }
2170
2171 /* Return the set of alternatives of INSN that are allowed by the current
2172 target and are preferred for the size/speed optimization choice
2173 associated with BB. Passing a separate BB is useful if INSN has not
2174 been emitted yet or if we are considering moving it to a different
2175 block. */
2176
2177 alternative_mask
2178 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2179 {
2180 if (optimize_bb_for_speed_p (bb))
2181 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2182 else
2183 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2184 }
2185
2186 /* Assert that the cached boolean attributes for INSN are still accurate.
2187 The backend is required to define these attributes in a way that only
2188 depends on the current target (rather than operands, compiler phase,
2189 etc.). */
2190
2191 bool
2192 check_bool_attrs (rtx_insn *insn)
2193 {
2194 int code = INSN_CODE (insn);
2195 if (code >= 0)
2196 for (int i = 0; i <= BA_LAST; ++i)
2197 {
2198 enum bool_attr attr = (enum bool_attr) i;
2199 if (this_target_recog->x_bool_attr_masks[code][attr])
2200 gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2201 == get_bool_attr_mask_uncached (insn, attr));
2202 }
2203 return true;
2204 }
2205
2206 /* Like extract_insn, but save insn extracted and don't extract again, when
2207 called again for the same insn expecting that recog_data still contain the
2208 valid information. This is used primary by gen_attr infrastructure that
2209 often does extract insn again and again. */
2210 void
2211 extract_insn_cached (rtx_insn *insn)
2212 {
2213 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2214 return;
2215 extract_insn (insn);
2216 recog_data.insn = insn;
2217 }
2218
2219 /* Do uncached extract_insn, constrain_operands and complain about failures.
2220 This should be used when extracting a pre-existing constrained instruction
2221 if the caller wants to know which alternative was chosen. */
2222 void
2223 extract_constrain_insn (rtx_insn *insn)
2224 {
2225 extract_insn (insn);
2226 if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2227 fatal_insn_not_found (insn);
2228 }
2229
2230 /* Do cached extract_insn, constrain_operands and complain about failures.
2231 Used by insn_attrtab. */
2232 void
2233 extract_constrain_insn_cached (rtx_insn *insn)
2234 {
2235 extract_insn_cached (insn);
2236 if (which_alternative == -1
2237 && !constrain_operands (reload_completed,
2238 get_enabled_alternatives (insn)))
2239 fatal_insn_not_found (insn);
2240 }
2241
2242 /* Do cached constrain_operands on INSN and complain about failures. */
2243 int
2244 constrain_operands_cached (rtx_insn *insn, int strict)
2245 {
2246 if (which_alternative == -1)
2247 return constrain_operands (strict, get_enabled_alternatives (insn));
2248 else
2249 return 1;
2250 }
2251 \f
2252 /* Analyze INSN and fill in recog_data. */
2253
2254 void
2255 extract_insn (rtx_insn *insn)
2256 {
2257 int i;
2258 int icode;
2259 int noperands;
2260 rtx body = PATTERN (insn);
2261
2262 recog_data.n_operands = 0;
2263 recog_data.n_alternatives = 0;
2264 recog_data.n_dups = 0;
2265 recog_data.is_asm = false;
2266
2267 switch (GET_CODE (body))
2268 {
2269 case USE:
2270 case CLOBBER:
2271 case ASM_INPUT:
2272 case ADDR_VEC:
2273 case ADDR_DIFF_VEC:
2274 case VAR_LOCATION:
2275 return;
2276
2277 case SET:
2278 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2279 goto asm_insn;
2280 else
2281 goto normal_insn;
2282 case PARALLEL:
2283 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2284 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2285 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2286 goto asm_insn;
2287 else
2288 goto normal_insn;
2289 case ASM_OPERANDS:
2290 asm_insn:
2291 recog_data.n_operands = noperands = asm_noperands (body);
2292 if (noperands >= 0)
2293 {
2294 /* This insn is an `asm' with operands. */
2295
2296 /* expand_asm_operands makes sure there aren't too many operands. */
2297 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2298
2299 /* Now get the operand values and constraints out of the insn. */
2300 decode_asm_operands (body, recog_data.operand,
2301 recog_data.operand_loc,
2302 recog_data.constraints,
2303 recog_data.operand_mode, NULL);
2304 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2305 if (noperands > 0)
2306 {
2307 const char *p = recog_data.constraints[0];
2308 recog_data.n_alternatives = 1;
2309 while (*p)
2310 recog_data.n_alternatives += (*p++ == ',');
2311 }
2312 recog_data.is_asm = true;
2313 break;
2314 }
2315 fatal_insn_not_found (insn);
2316
2317 default:
2318 normal_insn:
2319 /* Ordinary insn: recognize it, get the operands via insn_extract
2320 and get the constraints. */
2321
2322 icode = recog_memoized (insn);
2323 if (icode < 0)
2324 fatal_insn_not_found (insn);
2325
2326 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2327 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2328 recog_data.n_dups = insn_data[icode].n_dups;
2329
2330 insn_extract (insn);
2331
2332 for (i = 0; i < noperands; i++)
2333 {
2334 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2335 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2336 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2337 /* VOIDmode match_operands gets mode from their real operand. */
2338 if (recog_data.operand_mode[i] == VOIDmode)
2339 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2340 }
2341 }
2342 for (i = 0; i < noperands; i++)
2343 recog_data.operand_type[i]
2344 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2345 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2346 : OP_IN);
2347
2348 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2349
2350 recog_data.insn = NULL;
2351 which_alternative = -1;
2352 }
2353
2354 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS operands,
2355 N_ALTERNATIVES alternatives and constraint strings CONSTRAINTS.
2356 OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries and CONSTRAINTS
2357 has N_OPERANDS entries. */
2358
2359 void
2360 preprocess_constraints (int n_operands, int n_alternatives,
2361 const char **constraints,
2362 operand_alternative *op_alt_base)
2363 {
2364 for (int i = 0; i < n_operands; i++)
2365 {
2366 int j;
2367 struct operand_alternative *op_alt;
2368 const char *p = constraints[i];
2369
2370 op_alt = op_alt_base;
2371
2372 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2373 {
2374 op_alt[i].cl = NO_REGS;
2375 op_alt[i].constraint = p;
2376 op_alt[i].matches = -1;
2377 op_alt[i].matched = -1;
2378
2379 if (*p == '\0' || *p == ',')
2380 {
2381 op_alt[i].anything_ok = 1;
2382 continue;
2383 }
2384
2385 for (;;)
2386 {
2387 char c = *p;
2388 if (c == '#')
2389 do
2390 c = *++p;
2391 while (c != ',' && c != '\0');
2392 if (c == ',' || c == '\0')
2393 {
2394 p++;
2395 break;
2396 }
2397
2398 switch (c)
2399 {
2400 case '?':
2401 op_alt[i].reject += 6;
2402 break;
2403 case '!':
2404 op_alt[i].reject += 600;
2405 break;
2406 case '&':
2407 op_alt[i].earlyclobber = 1;
2408 break;
2409
2410 case '0': case '1': case '2': case '3': case '4':
2411 case '5': case '6': case '7': case '8': case '9':
2412 {
2413 char *end;
2414 op_alt[i].matches = strtoul (p, &end, 10);
2415 op_alt[op_alt[i].matches].matched = i;
2416 p = end;
2417 }
2418 continue;
2419
2420 case 'X':
2421 op_alt[i].anything_ok = 1;
2422 break;
2423
2424 case 'g':
2425 op_alt[i].cl =
2426 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2427 break;
2428
2429 default:
2430 enum constraint_num cn = lookup_constraint (p);
2431 enum reg_class cl;
2432 switch (get_constraint_type (cn))
2433 {
2434 case CT_REGISTER:
2435 cl = reg_class_for_constraint (cn);
2436 if (cl != NO_REGS)
2437 op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2438 break;
2439
2440 case CT_CONST_INT:
2441 break;
2442
2443 case CT_MEMORY:
2444 op_alt[i].memory_ok = 1;
2445 break;
2446
2447 case CT_ADDRESS:
2448 op_alt[i].is_address = 1;
2449 op_alt[i].cl
2450 = (reg_class_subunion
2451 [(int) op_alt[i].cl]
2452 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2453 ADDRESS, SCRATCH)]);
2454 break;
2455
2456 case CT_FIXED_FORM:
2457 break;
2458 }
2459 break;
2460 }
2461 p += CONSTRAINT_LEN (c, p);
2462 }
2463 }
2464 }
2465 }
2466
2467 /* Return an array of operand_alternative instructions for
2468 instruction ICODE. */
2469
2470 const operand_alternative *
2471 preprocess_insn_constraints (int icode)
2472 {
2473 gcc_checking_assert (IN_RANGE (icode, 0, LAST_INSN_CODE));
2474 if (this_target_recog->x_op_alt[icode])
2475 return this_target_recog->x_op_alt[icode];
2476
2477 int n_operands = insn_data[icode].n_operands;
2478 if (n_operands == 0)
2479 return 0;
2480 /* Always provide at least one alternative so that which_op_alt ()
2481 works correctly. If the instruction has 0 alternatives (i.e. all
2482 constraint strings are empty) then each operand in this alternative
2483 will have anything_ok set. */
2484 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2485 int n_entries = n_operands * n_alternatives;
2486
2487 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2488 const char **constraints = XALLOCAVEC (const char *, n_operands);
2489
2490 for (int i = 0; i < n_operands; ++i)
2491 constraints[i] = insn_data[icode].operand[i].constraint;
2492 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt);
2493
2494 this_target_recog->x_op_alt[icode] = op_alt;
2495 return op_alt;
2496 }
2497
2498 /* After calling extract_insn, you can use this function to extract some
2499 information from the constraint strings into a more usable form.
2500 The collected data is stored in recog_op_alt. */
2501
2502 void
2503 preprocess_constraints (rtx_insn *insn)
2504 {
2505 int icode = INSN_CODE (insn);
2506 if (icode >= 0)
2507 recog_op_alt = preprocess_insn_constraints (icode);
2508 else
2509 {
2510 int n_operands = recog_data.n_operands;
2511 int n_alternatives = recog_data.n_alternatives;
2512 int n_entries = n_operands * n_alternatives;
2513 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2514 preprocess_constraints (n_operands, n_alternatives,
2515 recog_data.constraints, asm_op_alt);
2516 recog_op_alt = asm_op_alt;
2517 }
2518 }
2519
2520 /* Check the operands of an insn against the insn's operand constraints
2521 and return 1 if they match any of the alternatives in ALTERNATIVES.
2522
2523 The information about the insn's operands, constraints, operand modes
2524 etc. is obtained from the global variables set up by extract_insn.
2525
2526 WHICH_ALTERNATIVE is set to a number which indicates which
2527 alternative of constraints was matched: 0 for the first alternative,
2528 1 for the next, etc.
2529
2530 In addition, when two operands are required to match
2531 and it happens that the output operand is (reg) while the
2532 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2533 make the output operand look like the input.
2534 This is because the output operand is the one the template will print.
2535
2536 This is used in final, just before printing the assembler code and by
2537 the routines that determine an insn's attribute.
2538
2539 If STRICT is a positive nonzero value, it means that we have been
2540 called after reload has been completed. In that case, we must
2541 do all checks strictly. If it is zero, it means that we have been called
2542 before reload has completed. In that case, we first try to see if we can
2543 find an alternative that matches strictly. If not, we try again, this
2544 time assuming that reload will fix up the insn. This provides a "best
2545 guess" for the alternative and is used to compute attributes of insns prior
2546 to reload. A negative value of STRICT is used for this internal call. */
2547
2548 struct funny_match
2549 {
2550 int this_op, other;
2551 };
2552
2553 int
2554 constrain_operands (int strict, alternative_mask alternatives)
2555 {
2556 const char *constraints[MAX_RECOG_OPERANDS];
2557 int matching_operands[MAX_RECOG_OPERANDS];
2558 int earlyclobber[MAX_RECOG_OPERANDS];
2559 int c;
2560
2561 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2562 int funny_match_index;
2563
2564 which_alternative = 0;
2565 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2566 return 1;
2567
2568 for (c = 0; c < recog_data.n_operands; c++)
2569 {
2570 constraints[c] = recog_data.constraints[c];
2571 matching_operands[c] = -1;
2572 }
2573
2574 do
2575 {
2576 int seen_earlyclobber_at = -1;
2577 int opno;
2578 int lose = 0;
2579 funny_match_index = 0;
2580
2581 if (!TEST_BIT (alternatives, which_alternative))
2582 {
2583 int i;
2584
2585 for (i = 0; i < recog_data.n_operands; i++)
2586 constraints[i] = skip_alternative (constraints[i]);
2587
2588 which_alternative++;
2589 continue;
2590 }
2591
2592 for (opno = 0; opno < recog_data.n_operands; opno++)
2593 {
2594 rtx op = recog_data.operand[opno];
2595 machine_mode mode = GET_MODE (op);
2596 const char *p = constraints[opno];
2597 int offset = 0;
2598 int win = 0;
2599 int val;
2600 int len;
2601
2602 earlyclobber[opno] = 0;
2603
2604 /* A unary operator may be accepted by the predicate, but it
2605 is irrelevant for matching constraints. */
2606 if (UNARY_P (op))
2607 op = XEXP (op, 0);
2608
2609 if (GET_CODE (op) == SUBREG)
2610 {
2611 if (REG_P (SUBREG_REG (op))
2612 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2613 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2614 GET_MODE (SUBREG_REG (op)),
2615 SUBREG_BYTE (op),
2616 GET_MODE (op));
2617 op = SUBREG_REG (op);
2618 }
2619
2620 /* An empty constraint or empty alternative
2621 allows anything which matched the pattern. */
2622 if (*p == 0 || *p == ',')
2623 win = 1;
2624
2625 do
2626 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2627 {
2628 case '\0':
2629 len = 0;
2630 break;
2631 case ',':
2632 c = '\0';
2633 break;
2634
2635 case '#':
2636 /* Ignore rest of this alternative as far as
2637 constraint checking is concerned. */
2638 do
2639 p++;
2640 while (*p && *p != ',');
2641 len = 0;
2642 break;
2643
2644 case '&':
2645 earlyclobber[opno] = 1;
2646 if (seen_earlyclobber_at < 0)
2647 seen_earlyclobber_at = opno;
2648 break;
2649
2650 case '0': case '1': case '2': case '3': case '4':
2651 case '5': case '6': case '7': case '8': case '9':
2652 {
2653 /* This operand must be the same as a previous one.
2654 This kind of constraint is used for instructions such
2655 as add when they take only two operands.
2656
2657 Note that the lower-numbered operand is passed first.
2658
2659 If we are not testing strictly, assume that this
2660 constraint will be satisfied. */
2661
2662 char *end;
2663 int match;
2664
2665 match = strtoul (p, &end, 10);
2666 p = end;
2667
2668 if (strict < 0)
2669 val = 1;
2670 else
2671 {
2672 rtx op1 = recog_data.operand[match];
2673 rtx op2 = recog_data.operand[opno];
2674
2675 /* A unary operator may be accepted by the predicate,
2676 but it is irrelevant for matching constraints. */
2677 if (UNARY_P (op1))
2678 op1 = XEXP (op1, 0);
2679 if (UNARY_P (op2))
2680 op2 = XEXP (op2, 0);
2681
2682 val = operands_match_p (op1, op2);
2683 }
2684
2685 matching_operands[opno] = match;
2686 matching_operands[match] = opno;
2687
2688 if (val != 0)
2689 win = 1;
2690
2691 /* If output is *x and input is *--x, arrange later
2692 to change the output to *--x as well, since the
2693 output op is the one that will be printed. */
2694 if (val == 2 && strict > 0)
2695 {
2696 funny_match[funny_match_index].this_op = opno;
2697 funny_match[funny_match_index++].other = match;
2698 }
2699 }
2700 len = 0;
2701 break;
2702
2703 case 'p':
2704 /* p is used for address_operands. When we are called by
2705 gen_reload, no one will have checked that the address is
2706 strictly valid, i.e., that all pseudos requiring hard regs
2707 have gotten them. */
2708 if (strict <= 0
2709 || (strict_memory_address_p (recog_data.operand_mode[opno],
2710 op)))
2711 win = 1;
2712 break;
2713
2714 /* No need to check general_operand again;
2715 it was done in insn-recog.c. Well, except that reload
2716 doesn't check the validity of its replacements, but
2717 that should only matter when there's a bug. */
2718 case 'g':
2719 /* Anything goes unless it is a REG and really has a hard reg
2720 but the hard reg is not in the class GENERAL_REGS. */
2721 if (REG_P (op))
2722 {
2723 if (strict < 0
2724 || GENERAL_REGS == ALL_REGS
2725 || (reload_in_progress
2726 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2727 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2728 win = 1;
2729 }
2730 else if (strict < 0 || general_operand (op, mode))
2731 win = 1;
2732 break;
2733
2734 default:
2735 {
2736 enum constraint_num cn = lookup_constraint (p);
2737 enum reg_class cl = reg_class_for_constraint (cn);
2738 if (cl != NO_REGS)
2739 {
2740 if (strict < 0
2741 || (strict == 0
2742 && REG_P (op)
2743 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2744 || (strict == 0 && GET_CODE (op) == SCRATCH)
2745 || (REG_P (op)
2746 && reg_fits_class_p (op, cl, offset, mode)))
2747 win = 1;
2748 }
2749
2750 else if (constraint_satisfied_p (op, cn))
2751 win = 1;
2752
2753 else if (insn_extra_memory_constraint (cn)
2754 /* Every memory operand can be reloaded to fit. */
2755 && ((strict < 0 && MEM_P (op))
2756 /* Before reload, accept what reload can turn
2757 into a mem. */
2758 || (strict < 0 && CONSTANT_P (op))
2759 /* Before reload, accept a pseudo,
2760 since LRA can turn it into a mem. */
2761 || (strict < 0 && targetm.lra_p () && REG_P (op)
2762 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2763 /* During reload, accept a pseudo */
2764 || (reload_in_progress && REG_P (op)
2765 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2766 win = 1;
2767 else if (insn_extra_address_constraint (cn)
2768 /* Every address operand can be reloaded to fit. */
2769 && strict < 0)
2770 win = 1;
2771 /* Cater to architectures like IA-64 that define extra memory
2772 constraints without using define_memory_constraint. */
2773 else if (reload_in_progress
2774 && REG_P (op)
2775 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2776 && reg_renumber[REGNO (op)] < 0
2777 && reg_equiv_mem (REGNO (op)) != 0
2778 && constraint_satisfied_p
2779 (reg_equiv_mem (REGNO (op)), cn))
2780 win = 1;
2781 break;
2782 }
2783 }
2784 while (p += len, c);
2785
2786 constraints[opno] = p;
2787 /* If this operand did not win somehow,
2788 this alternative loses. */
2789 if (! win)
2790 lose = 1;
2791 }
2792 /* This alternative won; the operands are ok.
2793 Change whichever operands this alternative says to change. */
2794 if (! lose)
2795 {
2796 int opno, eopno;
2797
2798 /* See if any earlyclobber operand conflicts with some other
2799 operand. */
2800
2801 if (strict > 0 && seen_earlyclobber_at >= 0)
2802 for (eopno = seen_earlyclobber_at;
2803 eopno < recog_data.n_operands;
2804 eopno++)
2805 /* Ignore earlyclobber operands now in memory,
2806 because we would often report failure when we have
2807 two memory operands, one of which was formerly a REG. */
2808 if (earlyclobber[eopno]
2809 && REG_P (recog_data.operand[eopno]))
2810 for (opno = 0; opno < recog_data.n_operands; opno++)
2811 if ((MEM_P (recog_data.operand[opno])
2812 || recog_data.operand_type[opno] != OP_OUT)
2813 && opno != eopno
2814 /* Ignore things like match_operator operands. */
2815 && *recog_data.constraints[opno] != 0
2816 && ! (matching_operands[opno] == eopno
2817 && operands_match_p (recog_data.operand[opno],
2818 recog_data.operand[eopno]))
2819 && ! safe_from_earlyclobber (recog_data.operand[opno],
2820 recog_data.operand[eopno]))
2821 lose = 1;
2822
2823 if (! lose)
2824 {
2825 while (--funny_match_index >= 0)
2826 {
2827 recog_data.operand[funny_match[funny_match_index].other]
2828 = recog_data.operand[funny_match[funny_match_index].this_op];
2829 }
2830
2831 #ifdef AUTO_INC_DEC
2832 /* For operands without < or > constraints reject side-effects. */
2833 if (recog_data.is_asm)
2834 {
2835 for (opno = 0; opno < recog_data.n_operands; opno++)
2836 if (MEM_P (recog_data.operand[opno]))
2837 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2838 {
2839 case PRE_INC:
2840 case POST_INC:
2841 case PRE_DEC:
2842 case POST_DEC:
2843 case PRE_MODIFY:
2844 case POST_MODIFY:
2845 if (strchr (recog_data.constraints[opno], '<') == NULL
2846 && strchr (recog_data.constraints[opno], '>')
2847 == NULL)
2848 return 0;
2849 break;
2850 default:
2851 break;
2852 }
2853 }
2854 #endif
2855 return 1;
2856 }
2857 }
2858
2859 which_alternative++;
2860 }
2861 while (which_alternative < recog_data.n_alternatives);
2862
2863 which_alternative = -1;
2864 /* If we are about to reject this, but we are not to test strictly,
2865 try a very loose test. Only return failure if it fails also. */
2866 if (strict == 0)
2867 return constrain_operands (-1, alternatives);
2868 else
2869 return 0;
2870 }
2871
2872 /* Return true iff OPERAND (assumed to be a REG rtx)
2873 is a hard reg in class CLASS when its regno is offset by OFFSET
2874 and changed to mode MODE.
2875 If REG occupies multiple hard regs, all of them must be in CLASS. */
2876
2877 bool
2878 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2879 machine_mode mode)
2880 {
2881 unsigned int regno = REGNO (operand);
2882
2883 if (cl == NO_REGS)
2884 return false;
2885
2886 /* Regno must not be a pseudo register. Offset may be negative. */
2887 return (HARD_REGISTER_NUM_P (regno)
2888 && HARD_REGISTER_NUM_P (regno + offset)
2889 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2890 regno + offset));
2891 }
2892 \f
2893 /* Split single instruction. Helper function for split_all_insns and
2894 split_all_insns_noflow. Return last insn in the sequence if successful,
2895 or NULL if unsuccessful. */
2896
2897 static rtx_insn *
2898 split_insn (rtx_insn *insn)
2899 {
2900 /* Split insns here to get max fine-grain parallelism. */
2901 rtx_insn *first = PREV_INSN (insn);
2902 rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2903 rtx insn_set, last_set, note;
2904
2905 if (last == insn)
2906 return NULL;
2907
2908 /* If the original instruction was a single set that was known to be
2909 equivalent to a constant, see if we can say the same about the last
2910 instruction in the split sequence. The two instructions must set
2911 the same destination. */
2912 insn_set = single_set (insn);
2913 if (insn_set)
2914 {
2915 last_set = single_set (last);
2916 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2917 {
2918 note = find_reg_equal_equiv_note (insn);
2919 if (note && CONSTANT_P (XEXP (note, 0)))
2920 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2921 else if (CONSTANT_P (SET_SRC (insn_set)))
2922 set_unique_reg_note (last, REG_EQUAL,
2923 copy_rtx (SET_SRC (insn_set)));
2924 }
2925 }
2926
2927 /* try_split returns the NOTE that INSN became. */
2928 SET_INSN_DELETED (insn);
2929
2930 /* ??? Coddle to md files that generate subregs in post-reload
2931 splitters instead of computing the proper hard register. */
2932 if (reload_completed && first != last)
2933 {
2934 first = NEXT_INSN (first);
2935 for (;;)
2936 {
2937 if (INSN_P (first))
2938 cleanup_subreg_operands (first);
2939 if (first == last)
2940 break;
2941 first = NEXT_INSN (first);
2942 }
2943 }
2944
2945 return last;
2946 }
2947
2948 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2949
2950 void
2951 split_all_insns (void)
2952 {
2953 sbitmap blocks;
2954 bool changed;
2955 basic_block bb;
2956
2957 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
2958 bitmap_clear (blocks);
2959 changed = false;
2960
2961 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2962 {
2963 rtx_insn *insn, *next;
2964 bool finish = false;
2965
2966 rtl_profile_for_bb (bb);
2967 for (insn = BB_HEAD (bb); !finish ; insn = next)
2968 {
2969 /* Can't use `next_real_insn' because that might go across
2970 CODE_LABELS and short-out basic blocks. */
2971 next = NEXT_INSN (insn);
2972 finish = (insn == BB_END (bb));
2973 if (INSN_P (insn))
2974 {
2975 rtx set = single_set (insn);
2976
2977 /* Don't split no-op move insns. These should silently
2978 disappear later in final. Splitting such insns would
2979 break the code that handles LIBCALL blocks. */
2980 if (set && set_noop_p (set))
2981 {
2982 /* Nops get in the way while scheduling, so delete them
2983 now if register allocation has already been done. It
2984 is too risky to try to do this before register
2985 allocation, and there are unlikely to be very many
2986 nops then anyways. */
2987 if (reload_completed)
2988 delete_insn_and_edges (insn);
2989 }
2990 else
2991 {
2992 if (split_insn (insn))
2993 {
2994 bitmap_set_bit (blocks, bb->index);
2995 changed = true;
2996 }
2997 }
2998 }
2999 }
3000 }
3001
3002 default_rtl_profile ();
3003 if (changed)
3004 find_many_sub_basic_blocks (blocks);
3005
3006 #ifdef ENABLE_CHECKING
3007 verify_flow_info ();
3008 #endif
3009
3010 sbitmap_free (blocks);
3011 }
3012
3013 /* Same as split_all_insns, but do not expect CFG to be available.
3014 Used by machine dependent reorg passes. */
3015
3016 unsigned int
3017 split_all_insns_noflow (void)
3018 {
3019 rtx_insn *next, *insn;
3020
3021 for (insn = get_insns (); insn; insn = next)
3022 {
3023 next = NEXT_INSN (insn);
3024 if (INSN_P (insn))
3025 {
3026 /* Don't split no-op move insns. These should silently
3027 disappear later in final. Splitting such insns would
3028 break the code that handles LIBCALL blocks. */
3029 rtx set = single_set (insn);
3030 if (set && set_noop_p (set))
3031 {
3032 /* Nops get in the way while scheduling, so delete them
3033 now if register allocation has already been done. It
3034 is too risky to try to do this before register
3035 allocation, and there are unlikely to be very many
3036 nops then anyways.
3037
3038 ??? Should we use delete_insn when the CFG isn't valid? */
3039 if (reload_completed)
3040 delete_insn_and_edges (insn);
3041 }
3042 else
3043 split_insn (insn);
3044 }
3045 }
3046 return 0;
3047 }
3048 \f
3049 #ifdef HAVE_peephole2
3050 struct peep2_insn_data
3051 {
3052 rtx_insn *insn;
3053 regset live_before;
3054 };
3055
3056 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3057 static int peep2_current;
3058
3059 static bool peep2_do_rebuild_jump_labels;
3060 static bool peep2_do_cleanup_cfg;
3061
3062 /* The number of instructions available to match a peep2. */
3063 int peep2_current_count;
3064
3065 /* A marker indicating the last insn of the block. The live_before regset
3066 for this element is correct, indicating DF_LIVE_OUT for the block. */
3067 #define PEEP2_EOB invalid_insn_rtx
3068
3069 /* Wrap N to fit into the peep2_insn_data buffer. */
3070
3071 static int
3072 peep2_buf_position (int n)
3073 {
3074 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3075 n -= MAX_INSNS_PER_PEEP2 + 1;
3076 return n;
3077 }
3078
3079 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3080 does not exist. Used by the recognizer to find the next insn to match
3081 in a multi-insn pattern. */
3082
3083 rtx_insn *
3084 peep2_next_insn (int n)
3085 {
3086 gcc_assert (n <= peep2_current_count);
3087
3088 n = peep2_buf_position (peep2_current + n);
3089
3090 return peep2_insn_data[n].insn;
3091 }
3092
3093 /* Return true if REGNO is dead before the Nth non-note insn
3094 after `current'. */
3095
3096 int
3097 peep2_regno_dead_p (int ofs, int regno)
3098 {
3099 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3100
3101 ofs = peep2_buf_position (peep2_current + ofs);
3102
3103 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3104
3105 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3106 }
3107
3108 /* Similarly for a REG. */
3109
3110 int
3111 peep2_reg_dead_p (int ofs, rtx reg)
3112 {
3113 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3114
3115 ofs = peep2_buf_position (peep2_current + ofs);
3116
3117 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3118
3119 unsigned int end_regno = END_REGNO (reg);
3120 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3121 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3122 return 0;
3123 return 1;
3124 }
3125
3126 /* Regno offset to be used in the register search. */
3127 static int search_ofs;
3128
3129 /* Try to find a hard register of mode MODE, matching the register class in
3130 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3131 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3132 in which case the only condition is that the register must be available
3133 before CURRENT_INSN.
3134 Registers that already have bits set in REG_SET will not be considered.
3135
3136 If an appropriate register is available, it will be returned and the
3137 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3138 returned. */
3139
3140 rtx
3141 peep2_find_free_register (int from, int to, const char *class_str,
3142 machine_mode mode, HARD_REG_SET *reg_set)
3143 {
3144 enum reg_class cl;
3145 HARD_REG_SET live;
3146 df_ref def;
3147 int i;
3148
3149 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3150 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3151
3152 from = peep2_buf_position (peep2_current + from);
3153 to = peep2_buf_position (peep2_current + to);
3154
3155 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3156 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3157
3158 while (from != to)
3159 {
3160 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3161
3162 /* Don't use registers set or clobbered by the insn. */
3163 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3164 SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3165
3166 from = peep2_buf_position (from + 1);
3167 }
3168
3169 cl = reg_class_for_constraint (lookup_constraint (class_str));
3170
3171 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3172 {
3173 int raw_regno, regno, success, j;
3174
3175 /* Distribute the free registers as much as possible. */
3176 raw_regno = search_ofs + i;
3177 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3178 raw_regno -= FIRST_PSEUDO_REGISTER;
3179 #ifdef REG_ALLOC_ORDER
3180 regno = reg_alloc_order[raw_regno];
3181 #else
3182 regno = raw_regno;
3183 #endif
3184
3185 /* Can it support the mode we need? */
3186 if (! HARD_REGNO_MODE_OK (regno, mode))
3187 continue;
3188
3189 success = 1;
3190 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3191 {
3192 /* Don't allocate fixed registers. */
3193 if (fixed_regs[regno + j])
3194 {
3195 success = 0;
3196 break;
3197 }
3198 /* Don't allocate global registers. */
3199 if (global_regs[regno + j])
3200 {
3201 success = 0;
3202 break;
3203 }
3204 /* Make sure the register is of the right class. */
3205 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3206 {
3207 success = 0;
3208 break;
3209 }
3210 /* And that we don't create an extra save/restore. */
3211 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3212 {
3213 success = 0;
3214 break;
3215 }
3216
3217 if (! targetm.hard_regno_scratch_ok (regno + j))
3218 {
3219 success = 0;
3220 break;
3221 }
3222
3223 /* And we don't clobber traceback for noreturn functions. */
3224 if ((regno + j == FRAME_POINTER_REGNUM
3225 || regno + j == HARD_FRAME_POINTER_REGNUM)
3226 && (! reload_completed || frame_pointer_needed))
3227 {
3228 success = 0;
3229 break;
3230 }
3231
3232 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3233 || TEST_HARD_REG_BIT (live, regno + j))
3234 {
3235 success = 0;
3236 break;
3237 }
3238 }
3239
3240 if (success)
3241 {
3242 add_to_hard_reg_set (reg_set, mode, regno);
3243
3244 /* Start the next search with the next register. */
3245 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3246 raw_regno = 0;
3247 search_ofs = raw_regno;
3248
3249 return gen_rtx_REG (mode, regno);
3250 }
3251 }
3252
3253 search_ofs = 0;
3254 return NULL_RTX;
3255 }
3256
3257 /* Forget all currently tracked instructions, only remember current
3258 LIVE regset. */
3259
3260 static void
3261 peep2_reinit_state (regset live)
3262 {
3263 int i;
3264
3265 /* Indicate that all slots except the last holds invalid data. */
3266 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3267 peep2_insn_data[i].insn = NULL;
3268 peep2_current_count = 0;
3269
3270 /* Indicate that the last slot contains live_after data. */
3271 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3272 peep2_current = MAX_INSNS_PER_PEEP2;
3273
3274 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3275 }
3276
3277 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3278 starting at INSN. Perform the replacement, removing the old insns and
3279 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3280 if the replacement is rejected. */
3281
3282 static rtx_insn *
3283 peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3284 {
3285 int i;
3286 rtx_insn *last, *before_try, *x;
3287 rtx eh_note, as_note;
3288 rtx_insn *old_insn;
3289 rtx_insn *new_insn;
3290 bool was_call = false;
3291
3292 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3293 match more than one insn, or to be split into more than one insn. */
3294 old_insn = peep2_insn_data[peep2_current].insn;
3295 if (RTX_FRAME_RELATED_P (old_insn))
3296 {
3297 bool any_note = false;
3298 rtx note;
3299
3300 if (match_len != 0)
3301 return NULL;
3302
3303 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3304 may be in the stream for the purpose of register allocation. */
3305 if (active_insn_p (attempt))
3306 new_insn = attempt;
3307 else
3308 new_insn = next_active_insn (attempt);
3309 if (next_active_insn (new_insn))
3310 return NULL;
3311
3312 /* We have a 1-1 replacement. Copy over any frame-related info. */
3313 RTX_FRAME_RELATED_P (new_insn) = 1;
3314
3315 /* Allow the backend to fill in a note during the split. */
3316 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3317 switch (REG_NOTE_KIND (note))
3318 {
3319 case REG_FRAME_RELATED_EXPR:
3320 case REG_CFA_DEF_CFA:
3321 case REG_CFA_ADJUST_CFA:
3322 case REG_CFA_OFFSET:
3323 case REG_CFA_REGISTER:
3324 case REG_CFA_EXPRESSION:
3325 case REG_CFA_RESTORE:
3326 case REG_CFA_SET_VDRAP:
3327 any_note = true;
3328 break;
3329 default:
3330 break;
3331 }
3332
3333 /* If the backend didn't supply a note, copy one over. */
3334 if (!any_note)
3335 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3336 switch (REG_NOTE_KIND (note))
3337 {
3338 case REG_FRAME_RELATED_EXPR:
3339 case REG_CFA_DEF_CFA:
3340 case REG_CFA_ADJUST_CFA:
3341 case REG_CFA_OFFSET:
3342 case REG_CFA_REGISTER:
3343 case REG_CFA_EXPRESSION:
3344 case REG_CFA_RESTORE:
3345 case REG_CFA_SET_VDRAP:
3346 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3347 any_note = true;
3348 break;
3349 default:
3350 break;
3351 }
3352
3353 /* If there still isn't a note, make sure the unwind info sees the
3354 same expression as before the split. */
3355 if (!any_note)
3356 {
3357 rtx old_set, new_set;
3358
3359 /* The old insn had better have been simple, or annotated. */
3360 old_set = single_set (old_insn);
3361 gcc_assert (old_set != NULL);
3362
3363 new_set = single_set (new_insn);
3364 if (!new_set || !rtx_equal_p (new_set, old_set))
3365 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3366 }
3367
3368 /* Copy prologue/epilogue status. This is required in order to keep
3369 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3370 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3371 }
3372
3373 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3374 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3375 cfg-related call notes. */
3376 for (i = 0; i <= match_len; ++i)
3377 {
3378 int j;
3379 rtx note;
3380
3381 j = peep2_buf_position (peep2_current + i);
3382 old_insn = peep2_insn_data[j].insn;
3383 if (!CALL_P (old_insn))
3384 continue;
3385 was_call = true;
3386
3387 new_insn = attempt;
3388 while (new_insn != NULL_RTX)
3389 {
3390 if (CALL_P (new_insn))
3391 break;
3392 new_insn = NEXT_INSN (new_insn);
3393 }
3394
3395 gcc_assert (new_insn != NULL_RTX);
3396
3397 CALL_INSN_FUNCTION_USAGE (new_insn)
3398 = CALL_INSN_FUNCTION_USAGE (old_insn);
3399 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3400
3401 for (note = REG_NOTES (old_insn);
3402 note;
3403 note = XEXP (note, 1))
3404 switch (REG_NOTE_KIND (note))
3405 {
3406 case REG_NORETURN:
3407 case REG_SETJMP:
3408 case REG_TM:
3409 add_reg_note (new_insn, REG_NOTE_KIND (note),
3410 XEXP (note, 0));
3411 break;
3412 default:
3413 /* Discard all other reg notes. */
3414 break;
3415 }
3416
3417 /* Croak if there is another call in the sequence. */
3418 while (++i <= match_len)
3419 {
3420 j = peep2_buf_position (peep2_current + i);
3421 old_insn = peep2_insn_data[j].insn;
3422 gcc_assert (!CALL_P (old_insn));
3423 }
3424 break;
3425 }
3426
3427 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3428 move those notes over to the new sequence. */
3429 as_note = NULL;
3430 for (i = match_len; i >= 0; --i)
3431 {
3432 int j = peep2_buf_position (peep2_current + i);
3433 old_insn = peep2_insn_data[j].insn;
3434
3435 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3436 if (as_note)
3437 break;
3438 }
3439
3440 i = peep2_buf_position (peep2_current + match_len);
3441 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3442
3443 /* Replace the old sequence with the new. */
3444 rtx_insn *peepinsn = peep2_insn_data[i].insn;
3445 last = emit_insn_after_setloc (attempt,
3446 peep2_insn_data[i].insn,
3447 INSN_LOCATION (peepinsn));
3448 before_try = PREV_INSN (insn);
3449 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3450
3451 /* Re-insert the EH_REGION notes. */
3452 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3453 {
3454 edge eh_edge;
3455 edge_iterator ei;
3456
3457 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3458 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3459 break;
3460
3461 if (eh_note)
3462 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3463
3464 if (eh_edge)
3465 for (x = last; x != before_try; x = PREV_INSN (x))
3466 if (x != BB_END (bb)
3467 && (can_throw_internal (x)
3468 || can_nonlocal_goto (x)))
3469 {
3470 edge nfte, nehe;
3471 int flags;
3472
3473 nfte = split_block (bb, x);
3474 flags = (eh_edge->flags
3475 & (EDGE_EH | EDGE_ABNORMAL));
3476 if (CALL_P (x))
3477 flags |= EDGE_ABNORMAL_CALL;
3478 nehe = make_edge (nfte->src, eh_edge->dest,
3479 flags);
3480
3481 nehe->probability = eh_edge->probability;
3482 nfte->probability
3483 = REG_BR_PROB_BASE - nehe->probability;
3484
3485 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3486 bb = nfte->src;
3487 eh_edge = nehe;
3488 }
3489
3490 /* Converting possibly trapping insn to non-trapping is
3491 possible. Zap dummy outgoing edges. */
3492 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3493 }
3494
3495 /* Re-insert the ARGS_SIZE notes. */
3496 if (as_note)
3497 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3498
3499 /* If we generated a jump instruction, it won't have
3500 JUMP_LABEL set. Recompute after we're done. */
3501 for (x = last; x != before_try; x = PREV_INSN (x))
3502 if (JUMP_P (x))
3503 {
3504 peep2_do_rebuild_jump_labels = true;
3505 break;
3506 }
3507
3508 return last;
3509 }
3510
3511 /* After performing a replacement in basic block BB, fix up the life
3512 information in our buffer. LAST is the last of the insns that we
3513 emitted as a replacement. PREV is the insn before the start of
3514 the replacement. MATCH_LEN is the number of instructions that were
3515 matched, and which now need to be replaced in the buffer. */
3516
3517 static void
3518 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3519 rtx_insn *prev)
3520 {
3521 int i = peep2_buf_position (peep2_current + match_len + 1);
3522 rtx_insn *x;
3523 regset_head live;
3524
3525 INIT_REG_SET (&live);
3526 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3527
3528 gcc_assert (peep2_current_count >= match_len + 1);
3529 peep2_current_count -= match_len + 1;
3530
3531 x = last;
3532 do
3533 {
3534 if (INSN_P (x))
3535 {
3536 df_insn_rescan (x);
3537 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3538 {
3539 peep2_current_count++;
3540 if (--i < 0)
3541 i = MAX_INSNS_PER_PEEP2;
3542 peep2_insn_data[i].insn = x;
3543 df_simulate_one_insn_backwards (bb, x, &live);
3544 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3545 }
3546 }
3547 x = PREV_INSN (x);
3548 }
3549 while (x != prev);
3550 CLEAR_REG_SET (&live);
3551
3552 peep2_current = i;
3553 }
3554
3555 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3556 Return true if we added it, false otherwise. The caller will try to match
3557 peepholes against the buffer if we return false; otherwise it will try to
3558 add more instructions to the buffer. */
3559
3560 static bool
3561 peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
3562 {
3563 int pos;
3564
3565 /* Once we have filled the maximum number of insns the buffer can hold,
3566 allow the caller to match the insns against peepholes. We wait until
3567 the buffer is full in case the target has similar peepholes of different
3568 length; we always want to match the longest if possible. */
3569 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3570 return false;
3571
3572 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3573 any other pattern, lest it change the semantics of the frame info. */
3574 if (RTX_FRAME_RELATED_P (insn))
3575 {
3576 /* Let the buffer drain first. */
3577 if (peep2_current_count > 0)
3578 return false;
3579 /* Now the insn will be the only thing in the buffer. */
3580 }
3581
3582 pos = peep2_buf_position (peep2_current + peep2_current_count);
3583 peep2_insn_data[pos].insn = insn;
3584 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3585 peep2_current_count++;
3586
3587 df_simulate_one_insn_forwards (bb, insn, live);
3588 return true;
3589 }
3590
3591 /* Perform the peephole2 optimization pass. */
3592
3593 static void
3594 peephole2_optimize (void)
3595 {
3596 rtx_insn *insn;
3597 bitmap live;
3598 int i;
3599 basic_block bb;
3600
3601 peep2_do_cleanup_cfg = false;
3602 peep2_do_rebuild_jump_labels = false;
3603
3604 df_set_flags (DF_LR_RUN_DCE);
3605 df_note_add_problem ();
3606 df_analyze ();
3607
3608 /* Initialize the regsets we're going to use. */
3609 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3610 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3611 search_ofs = 0;
3612 live = BITMAP_ALLOC (&reg_obstack);
3613
3614 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3615 {
3616 bool past_end = false;
3617 int pos;
3618
3619 rtl_profile_for_bb (bb);
3620
3621 /* Start up propagation. */
3622 bitmap_copy (live, DF_LR_IN (bb));
3623 df_simulate_initialize_forwards (bb, live);
3624 peep2_reinit_state (live);
3625
3626 insn = BB_HEAD (bb);
3627 for (;;)
3628 {
3629 rtx_insn *attempt, *head;
3630 int match_len;
3631
3632 if (!past_end && !NONDEBUG_INSN_P (insn))
3633 {
3634 next_insn:
3635 insn = NEXT_INSN (insn);
3636 if (insn == NEXT_INSN (BB_END (bb)))
3637 past_end = true;
3638 continue;
3639 }
3640 if (!past_end && peep2_fill_buffer (bb, insn, live))
3641 goto next_insn;
3642
3643 /* If we did not fill an empty buffer, it signals the end of the
3644 block. */
3645 if (peep2_current_count == 0)
3646 break;
3647
3648 /* The buffer filled to the current maximum, so try to match. */
3649
3650 pos = peep2_buf_position (peep2_current + peep2_current_count);
3651 peep2_insn_data[pos].insn = PEEP2_EOB;
3652 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3653
3654 /* Match the peephole. */
3655 head = peep2_insn_data[peep2_current].insn;
3656 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3657 if (attempt != NULL)
3658 {
3659 rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3660 if (last)
3661 {
3662 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3663 continue;
3664 }
3665 }
3666
3667 /* No match: advance the buffer by one insn. */
3668 peep2_current = peep2_buf_position (peep2_current + 1);
3669 peep2_current_count--;
3670 }
3671 }
3672
3673 default_rtl_profile ();
3674 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3675 BITMAP_FREE (peep2_insn_data[i].live_before);
3676 BITMAP_FREE (live);
3677 if (peep2_do_rebuild_jump_labels)
3678 rebuild_jump_labels (get_insns ());
3679 if (peep2_do_cleanup_cfg)
3680 cleanup_cfg (CLEANUP_CFG_CHANGED);
3681 }
3682 #endif /* HAVE_peephole2 */
3683
3684 /* Common predicates for use with define_bypass. */
3685
3686 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3687 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3688 must be either a single_set or a PARALLEL with SETs inside. */
3689
3690 int
3691 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3692 {
3693 rtx out_set, in_set;
3694 rtx out_pat, in_pat;
3695 rtx out_exp, in_exp;
3696 int i, j;
3697
3698 in_set = single_set (in_insn);
3699 if (in_set)
3700 {
3701 if (!MEM_P (SET_DEST (in_set)))
3702 return false;
3703
3704 out_set = single_set (out_insn);
3705 if (out_set)
3706 {
3707 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3708 return false;
3709 }
3710 else
3711 {
3712 out_pat = PATTERN (out_insn);
3713
3714 if (GET_CODE (out_pat) != PARALLEL)
3715 return false;
3716
3717 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3718 {
3719 out_exp = XVECEXP (out_pat, 0, i);
3720
3721 if (GET_CODE (out_exp) == CLOBBER)
3722 continue;
3723
3724 gcc_assert (GET_CODE (out_exp) == SET);
3725
3726 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3727 return false;
3728 }
3729 }
3730 }
3731 else
3732 {
3733 in_pat = PATTERN (in_insn);
3734 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3735
3736 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3737 {
3738 in_exp = XVECEXP (in_pat, 0, i);
3739
3740 if (GET_CODE (in_exp) == CLOBBER)
3741 continue;
3742
3743 gcc_assert (GET_CODE (in_exp) == SET);
3744
3745 if (!MEM_P (SET_DEST (in_exp)))
3746 return false;
3747
3748 out_set = single_set (out_insn);
3749 if (out_set)
3750 {
3751 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3752 return false;
3753 }
3754 else
3755 {
3756 out_pat = PATTERN (out_insn);
3757 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3758
3759 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3760 {
3761 out_exp = XVECEXP (out_pat, 0, j);
3762
3763 if (GET_CODE (out_exp) == CLOBBER)
3764 continue;
3765
3766 gcc_assert (GET_CODE (out_exp) == SET);
3767
3768 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3769 return false;
3770 }
3771 }
3772 }
3773 }
3774
3775 return true;
3776 }
3777
3778 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3779 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3780 or multiple set; IN_INSN should be single_set for truth, but for convenience
3781 of insn categorization may be any JUMP or CALL insn. */
3782
3783 int
3784 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3785 {
3786 rtx out_set, in_set;
3787
3788 in_set = single_set (in_insn);
3789 if (! in_set)
3790 {
3791 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3792 return false;
3793 }
3794
3795 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3796 return false;
3797 in_set = SET_SRC (in_set);
3798
3799 out_set = single_set (out_insn);
3800 if (out_set)
3801 {
3802 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3803 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3804 return false;
3805 }
3806 else
3807 {
3808 rtx out_pat;
3809 int i;
3810
3811 out_pat = PATTERN (out_insn);
3812 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3813
3814 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3815 {
3816 rtx exp = XVECEXP (out_pat, 0, i);
3817
3818 if (GET_CODE (exp) == CLOBBER)
3819 continue;
3820
3821 gcc_assert (GET_CODE (exp) == SET);
3822
3823 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3824 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3825 return false;
3826 }
3827 }
3828
3829 return true;
3830 }
3831 \f
3832 static unsigned int
3833 rest_of_handle_peephole2 (void)
3834 {
3835 #ifdef HAVE_peephole2
3836 peephole2_optimize ();
3837 #endif
3838 return 0;
3839 }
3840
3841 namespace {
3842
3843 const pass_data pass_data_peephole2 =
3844 {
3845 RTL_PASS, /* type */
3846 "peephole2", /* name */
3847 OPTGROUP_NONE, /* optinfo_flags */
3848 TV_PEEPHOLE2, /* tv_id */
3849 0, /* properties_required */
3850 0, /* properties_provided */
3851 0, /* properties_destroyed */
3852 0, /* todo_flags_start */
3853 TODO_df_finish, /* todo_flags_finish */
3854 };
3855
3856 class pass_peephole2 : public rtl_opt_pass
3857 {
3858 public:
3859 pass_peephole2 (gcc::context *ctxt)
3860 : rtl_opt_pass (pass_data_peephole2, ctxt)
3861 {}
3862
3863 /* opt_pass methods: */
3864 /* The epiphany backend creates a second instance of this pass, so we need
3865 a clone method. */
3866 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3867 virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
3868 virtual unsigned int execute (function *)
3869 {
3870 return rest_of_handle_peephole2 ();
3871 }
3872
3873 }; // class pass_peephole2
3874
3875 } // anon namespace
3876
3877 rtl_opt_pass *
3878 make_pass_peephole2 (gcc::context *ctxt)
3879 {
3880 return new pass_peephole2 (ctxt);
3881 }
3882
3883 namespace {
3884
3885 const pass_data pass_data_split_all_insns =
3886 {
3887 RTL_PASS, /* type */
3888 "split1", /* name */
3889 OPTGROUP_NONE, /* optinfo_flags */
3890 TV_NONE, /* tv_id */
3891 0, /* properties_required */
3892 0, /* properties_provided */
3893 0, /* properties_destroyed */
3894 0, /* todo_flags_start */
3895 0, /* todo_flags_finish */
3896 };
3897
3898 class pass_split_all_insns : public rtl_opt_pass
3899 {
3900 public:
3901 pass_split_all_insns (gcc::context *ctxt)
3902 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3903 {}
3904
3905 /* opt_pass methods: */
3906 /* The epiphany backend creates a second instance of this pass, so
3907 we need a clone method. */
3908 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3909 virtual unsigned int execute (function *)
3910 {
3911 split_all_insns ();
3912 return 0;
3913 }
3914
3915 }; // class pass_split_all_insns
3916
3917 } // anon namespace
3918
3919 rtl_opt_pass *
3920 make_pass_split_all_insns (gcc::context *ctxt)
3921 {
3922 return new pass_split_all_insns (ctxt);
3923 }
3924
3925 static unsigned int
3926 rest_of_handle_split_after_reload (void)
3927 {
3928 /* If optimizing, then go ahead and split insns now. */
3929 #ifndef STACK_REGS
3930 if (optimize > 0)
3931 #endif
3932 split_all_insns ();
3933 return 0;
3934 }
3935
3936 namespace {
3937
3938 const pass_data pass_data_split_after_reload =
3939 {
3940 RTL_PASS, /* type */
3941 "split2", /* name */
3942 OPTGROUP_NONE, /* optinfo_flags */
3943 TV_NONE, /* tv_id */
3944 0, /* properties_required */
3945 0, /* properties_provided */
3946 0, /* properties_destroyed */
3947 0, /* todo_flags_start */
3948 0, /* todo_flags_finish */
3949 };
3950
3951 class pass_split_after_reload : public rtl_opt_pass
3952 {
3953 public:
3954 pass_split_after_reload (gcc::context *ctxt)
3955 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3956 {}
3957
3958 /* opt_pass methods: */
3959 virtual unsigned int execute (function *)
3960 {
3961 return rest_of_handle_split_after_reload ();
3962 }
3963
3964 }; // class pass_split_after_reload
3965
3966 } // anon namespace
3967
3968 rtl_opt_pass *
3969 make_pass_split_after_reload (gcc::context *ctxt)
3970 {
3971 return new pass_split_after_reload (ctxt);
3972 }
3973
3974 namespace {
3975
3976 const pass_data pass_data_split_before_regstack =
3977 {
3978 RTL_PASS, /* type */
3979 "split3", /* name */
3980 OPTGROUP_NONE, /* optinfo_flags */
3981 TV_NONE, /* tv_id */
3982 0, /* properties_required */
3983 0, /* properties_provided */
3984 0, /* properties_destroyed */
3985 0, /* todo_flags_start */
3986 0, /* todo_flags_finish */
3987 };
3988
3989 class pass_split_before_regstack : public rtl_opt_pass
3990 {
3991 public:
3992 pass_split_before_regstack (gcc::context *ctxt)
3993 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3994 {}
3995
3996 /* opt_pass methods: */
3997 virtual bool gate (function *);
3998 virtual unsigned int execute (function *)
3999 {
4000 split_all_insns ();
4001 return 0;
4002 }
4003
4004 }; // class pass_split_before_regstack
4005
4006 bool
4007 pass_split_before_regstack::gate (function *)
4008 {
4009 #if HAVE_ATTR_length && defined (STACK_REGS)
4010 /* If flow2 creates new instructions which need splitting
4011 and scheduling after reload is not done, they might not be
4012 split until final which doesn't allow splitting
4013 if HAVE_ATTR_length. */
4014 # ifdef INSN_SCHEDULING
4015 return (optimize && !flag_schedule_insns_after_reload);
4016 # else
4017 return (optimize);
4018 # endif
4019 #else
4020 return 0;
4021 #endif
4022 }
4023
4024 } // anon namespace
4025
4026 rtl_opt_pass *
4027 make_pass_split_before_regstack (gcc::context *ctxt)
4028 {
4029 return new pass_split_before_regstack (ctxt);
4030 }
4031
4032 static unsigned int
4033 rest_of_handle_split_before_sched2 (void)
4034 {
4035 #ifdef INSN_SCHEDULING
4036 split_all_insns ();
4037 #endif
4038 return 0;
4039 }
4040
4041 namespace {
4042
4043 const pass_data pass_data_split_before_sched2 =
4044 {
4045 RTL_PASS, /* type */
4046 "split4", /* name */
4047 OPTGROUP_NONE, /* optinfo_flags */
4048 TV_NONE, /* tv_id */
4049 0, /* properties_required */
4050 0, /* properties_provided */
4051 0, /* properties_destroyed */
4052 0, /* todo_flags_start */
4053 0, /* todo_flags_finish */
4054 };
4055
4056 class pass_split_before_sched2 : public rtl_opt_pass
4057 {
4058 public:
4059 pass_split_before_sched2 (gcc::context *ctxt)
4060 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4061 {}
4062
4063 /* opt_pass methods: */
4064 virtual bool gate (function *)
4065 {
4066 #ifdef INSN_SCHEDULING
4067 return optimize > 0 && flag_schedule_insns_after_reload;
4068 #else
4069 return false;
4070 #endif
4071 }
4072
4073 virtual unsigned int execute (function *)
4074 {
4075 return rest_of_handle_split_before_sched2 ();
4076 }
4077
4078 }; // class pass_split_before_sched2
4079
4080 } // anon namespace
4081
4082 rtl_opt_pass *
4083 make_pass_split_before_sched2 (gcc::context *ctxt)
4084 {
4085 return new pass_split_before_sched2 (ctxt);
4086 }
4087
4088 namespace {
4089
4090 const pass_data pass_data_split_for_shorten_branches =
4091 {
4092 RTL_PASS, /* type */
4093 "split5", /* name */
4094 OPTGROUP_NONE, /* optinfo_flags */
4095 TV_NONE, /* tv_id */
4096 0, /* properties_required */
4097 0, /* properties_provided */
4098 0, /* properties_destroyed */
4099 0, /* todo_flags_start */
4100 0, /* todo_flags_finish */
4101 };
4102
4103 class pass_split_for_shorten_branches : public rtl_opt_pass
4104 {
4105 public:
4106 pass_split_for_shorten_branches (gcc::context *ctxt)
4107 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4108 {}
4109
4110 /* opt_pass methods: */
4111 virtual bool gate (function *)
4112 {
4113 /* The placement of the splitting that we do for shorten_branches
4114 depends on whether regstack is used by the target or not. */
4115 #if HAVE_ATTR_length && !defined (STACK_REGS)
4116 return true;
4117 #else
4118 return false;
4119 #endif
4120 }
4121
4122 virtual unsigned int execute (function *)
4123 {
4124 return split_all_insns_noflow ();
4125 }
4126
4127 }; // class pass_split_for_shorten_branches
4128
4129 } // anon namespace
4130
4131 rtl_opt_pass *
4132 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4133 {
4134 return new pass_split_for_shorten_branches (ctxt);
4135 }
4136
4137 /* (Re)initialize the target information after a change in target. */
4138
4139 void
4140 recog_init ()
4141 {
4142 /* The information is zero-initialized, so we don't need to do anything
4143 first time round. */
4144 if (!this_target_recog->x_initialized)
4145 {
4146 this_target_recog->x_initialized = true;
4147 return;
4148 }
4149 memset (this_target_recog->x_bool_attr_masks, 0,
4150 sizeof (this_target_recog->x_bool_attr_masks));
4151 for (int i = 0; i < LAST_INSN_CODE; ++i)
4152 if (this_target_recog->x_op_alt[i])
4153 {
4154 free (this_target_recog->x_op_alt[i]);
4155 this_target_recog->x_op_alt[i] = 0;
4156 }
4157 }