fold-const.c (fold_binary_loc): Fix copy-and-pasto.
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl-error.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 #include "hard-reg-set.h"
31 #include "recog.h"
32 #include "regs.h"
33 #include "addresses.h"
34 #include "expr.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "vec.h"
38 #include "machmode.h"
39 #include "input.h"
40 #include "function.h"
41 #include "flags.h"
42 #include "basic-block.h"
43 #include "reload.h"
44 #include "target.h"
45 #include "tree-pass.h"
46 #include "df.h"
47 #include "insn-codes.h"
48
49 #ifndef STACK_PUSH_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_PUSH_CODE PRE_DEC
52 #else
53 #define STACK_PUSH_CODE PRE_INC
54 #endif
55 #endif
56
57 #ifndef STACK_POP_CODE
58 #ifdef STACK_GROWS_DOWNWARD
59 #define STACK_POP_CODE POST_INC
60 #else
61 #define STACK_POP_CODE POST_DEC
62 #endif
63 #endif
64
65 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
66 static void validate_replace_src_1 (rtx *, void *);
67 static rtx split_insn (rtx_insn *);
68
69 struct target_recog default_target_recog;
70 #if SWITCHABLE_TARGET
71 struct target_recog *this_target_recog = &default_target_recog;
72 #endif
73
74 /* Nonzero means allow operands to be volatile.
75 This should be 0 if you are generating rtl, such as if you are calling
76 the functions in optabs.c and expmed.c (most of the time).
77 This should be 1 if all valid insns need to be recognized,
78 such as in reginfo.c and final.c and reload.c.
79
80 init_recog and init_recog_no_volatile are responsible for setting this. */
81
82 int volatile_ok;
83
84 struct recog_data_d recog_data;
85
86 /* Contains a vector of operand_alternative structures, such that
87 operand OP of alternative A is at index A * n_operands + OP.
88 Set up by preprocess_constraints. */
89 const operand_alternative *recog_op_alt;
90
91 /* Used to provide recog_op_alt for asms. */
92 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
93 * MAX_RECOG_ALTERNATIVES];
94
95 /* On return from `constrain_operands', indicate which alternative
96 was satisfied. */
97
98 int which_alternative;
99
100 /* Nonzero after end of reload pass.
101 Set to 1 or 0 by toplev.c.
102 Controls the significance of (SUBREG (MEM)). */
103
104 int reload_completed;
105
106 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
107 int epilogue_completed;
108
109 /* Initialize data used by the function `recog'.
110 This must be called once in the compilation of a function
111 before any insn recognition may be done in the function. */
112
113 void
114 init_recog_no_volatile (void)
115 {
116 volatile_ok = 0;
117 }
118
119 void
120 init_recog (void)
121 {
122 volatile_ok = 1;
123 }
124
125 \f
126 /* Return true if labels in asm operands BODY are LABEL_REFs. */
127
128 static bool
129 asm_labels_ok (rtx body)
130 {
131 rtx asmop;
132 int i;
133
134 asmop = extract_asm_operands (body);
135 if (asmop == NULL_RTX)
136 return true;
137
138 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
139 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
140 return false;
141
142 return true;
143 }
144
145 /* Check that X is an insn-body for an `asm' with operands
146 and that the operands mentioned in it are legitimate. */
147
148 int
149 check_asm_operands (rtx x)
150 {
151 int noperands;
152 rtx *operands;
153 const char **constraints;
154 int i;
155
156 if (!asm_labels_ok (x))
157 return 0;
158
159 /* Post-reload, be more strict with things. */
160 if (reload_completed)
161 {
162 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
163 rtx_insn *insn = make_insn_raw (x);
164 extract_insn (insn);
165 constrain_operands (1, get_enabled_alternatives (insn));
166 return which_alternative >= 0;
167 }
168
169 noperands = asm_noperands (x);
170 if (noperands < 0)
171 return 0;
172 if (noperands == 0)
173 return 1;
174
175 operands = XALLOCAVEC (rtx, noperands);
176 constraints = XALLOCAVEC (const char *, noperands);
177
178 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
179
180 for (i = 0; i < noperands; i++)
181 {
182 const char *c = constraints[i];
183 if (c[0] == '%')
184 c++;
185 if (! asm_operand_ok (operands[i], c, constraints))
186 return 0;
187 }
188
189 return 1;
190 }
191 \f
192 /* Static data for the next two routines. */
193
194 typedef struct change_t
195 {
196 rtx object;
197 int old_code;
198 rtx *loc;
199 rtx old;
200 bool unshare;
201 } change_t;
202
203 static change_t *changes;
204 static int changes_allocated;
205
206 static int num_changes = 0;
207
208 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
209 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
210 the change is simply made.
211
212 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
213 will be called with the address and mode as parameters. If OBJECT is
214 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
215 the change in place.
216
217 IN_GROUP is nonzero if this is part of a group of changes that must be
218 performed as a group. In that case, the changes will be stored. The
219 function `apply_change_group' will validate and apply the changes.
220
221 If IN_GROUP is zero, this is a single change. Try to recognize the insn
222 or validate the memory reference with the change applied. If the result
223 is not valid for the machine, suppress the change and return zero.
224 Otherwise, perform the change and return 1. */
225
226 static bool
227 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
228 {
229 rtx old = *loc;
230
231 if (old == new_rtx || rtx_equal_p (old, new_rtx))
232 return 1;
233
234 gcc_assert (in_group != 0 || num_changes == 0);
235
236 *loc = new_rtx;
237
238 /* Save the information describing this change. */
239 if (num_changes >= changes_allocated)
240 {
241 if (changes_allocated == 0)
242 /* This value allows for repeated substitutions inside complex
243 indexed addresses, or changes in up to 5 insns. */
244 changes_allocated = MAX_RECOG_OPERANDS * 5;
245 else
246 changes_allocated *= 2;
247
248 changes = XRESIZEVEC (change_t, changes, changes_allocated);
249 }
250
251 changes[num_changes].object = object;
252 changes[num_changes].loc = loc;
253 changes[num_changes].old = old;
254 changes[num_changes].unshare = unshare;
255
256 if (object && !MEM_P (object))
257 {
258 /* Set INSN_CODE to force rerecognition of insn. Save old code in
259 case invalid. */
260 changes[num_changes].old_code = INSN_CODE (object);
261 INSN_CODE (object) = -1;
262 }
263
264 num_changes++;
265
266 /* If we are making a group of changes, return 1. Otherwise, validate the
267 change group we made. */
268
269 if (in_group)
270 return 1;
271 else
272 return apply_change_group ();
273 }
274
275 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
276 UNSHARE to false. */
277
278 bool
279 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
280 {
281 return validate_change_1 (object, loc, new_rtx, in_group, false);
282 }
283
284 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
285 UNSHARE to true. */
286
287 bool
288 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
289 {
290 return validate_change_1 (object, loc, new_rtx, in_group, true);
291 }
292
293
294 /* Keep X canonicalized if some changes have made it non-canonical; only
295 modifies the operands of X, not (for example) its code. Simplifications
296 are not the job of this routine.
297
298 Return true if anything was changed. */
299 bool
300 canonicalize_change_group (rtx insn, rtx x)
301 {
302 if (COMMUTATIVE_P (x)
303 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
304 {
305 /* Oops, the caller has made X no longer canonical.
306 Let's redo the changes in the correct order. */
307 rtx tem = XEXP (x, 0);
308 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
309 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
310 return true;
311 }
312 else
313 return false;
314 }
315
316
317 /* This subroutine of apply_change_group verifies whether the changes to INSN
318 were valid; i.e. whether INSN can still be recognized.
319
320 If IN_GROUP is true clobbers which have to be added in order to
321 match the instructions will be added to the current change group.
322 Otherwise the changes will take effect immediately. */
323
324 int
325 insn_invalid_p (rtx_insn *insn, bool in_group)
326 {
327 rtx pat = PATTERN (insn);
328 int num_clobbers = 0;
329 /* If we are before reload and the pattern is a SET, see if we can add
330 clobbers. */
331 int icode = recog (pat, insn,
332 (GET_CODE (pat) == SET
333 && ! reload_completed
334 && ! reload_in_progress)
335 ? &num_clobbers : 0);
336 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
337
338
339 /* If this is an asm and the operand aren't legal, then fail. Likewise if
340 this is not an asm and the insn wasn't recognized. */
341 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
342 || (!is_asm && icode < 0))
343 return 1;
344
345 /* If we have to add CLOBBERs, fail if we have to add ones that reference
346 hard registers since our callers can't know if they are live or not.
347 Otherwise, add them. */
348 if (num_clobbers > 0)
349 {
350 rtx newpat;
351
352 if (added_clobbers_hard_reg_p (icode))
353 return 1;
354
355 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
356 XVECEXP (newpat, 0, 0) = pat;
357 add_clobbers (newpat, icode);
358 if (in_group)
359 validate_change (insn, &PATTERN (insn), newpat, 1);
360 else
361 PATTERN (insn) = pat = newpat;
362 }
363
364 /* After reload, verify that all constraints are satisfied. */
365 if (reload_completed)
366 {
367 extract_insn (insn);
368
369 if (! constrain_operands (1, get_preferred_alternatives (insn)))
370 return 1;
371 }
372
373 INSN_CODE (insn) = icode;
374 return 0;
375 }
376
377 /* Return number of changes made and not validated yet. */
378 int
379 num_changes_pending (void)
380 {
381 return num_changes;
382 }
383
384 /* Tentatively apply the changes numbered NUM and up.
385 Return 1 if all changes are valid, zero otherwise. */
386
387 int
388 verify_changes (int num)
389 {
390 int i;
391 rtx last_validated = NULL_RTX;
392
393 /* The changes have been applied and all INSN_CODEs have been reset to force
394 rerecognition.
395
396 The changes are valid if we aren't given an object, or if we are
397 given a MEM and it still is a valid address, or if this is in insn
398 and it is recognized. In the latter case, if reload has completed,
399 we also require that the operands meet the constraints for
400 the insn. */
401
402 for (i = num; i < num_changes; i++)
403 {
404 rtx object = changes[i].object;
405
406 /* If there is no object to test or if it is the same as the one we
407 already tested, ignore it. */
408 if (object == 0 || object == last_validated)
409 continue;
410
411 if (MEM_P (object))
412 {
413 if (! memory_address_addr_space_p (GET_MODE (object),
414 XEXP (object, 0),
415 MEM_ADDR_SPACE (object)))
416 break;
417 }
418 else if (/* changes[i].old might be zero, e.g. when putting a
419 REG_FRAME_RELATED_EXPR into a previously empty list. */
420 changes[i].old
421 && REG_P (changes[i].old)
422 && asm_noperands (PATTERN (object)) > 0
423 && REG_EXPR (changes[i].old) != NULL_TREE
424 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
425 && DECL_REGISTER (REG_EXPR (changes[i].old)))
426 {
427 /* Don't allow changes of hard register operands to inline
428 assemblies if they have been defined as register asm ("x"). */
429 break;
430 }
431 else if (DEBUG_INSN_P (object))
432 continue;
433 else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
434 {
435 rtx pat = PATTERN (object);
436
437 /* Perhaps we couldn't recognize the insn because there were
438 extra CLOBBERs at the end. If so, try to re-recognize
439 without the last CLOBBER (later iterations will cause each of
440 them to be eliminated, in turn). But don't do this if we
441 have an ASM_OPERAND. */
442 if (GET_CODE (pat) == PARALLEL
443 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
444 && asm_noperands (PATTERN (object)) < 0)
445 {
446 rtx newpat;
447
448 if (XVECLEN (pat, 0) == 2)
449 newpat = XVECEXP (pat, 0, 0);
450 else
451 {
452 int j;
453
454 newpat
455 = gen_rtx_PARALLEL (VOIDmode,
456 rtvec_alloc (XVECLEN (pat, 0) - 1));
457 for (j = 0; j < XVECLEN (newpat, 0); j++)
458 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
459 }
460
461 /* Add a new change to this group to replace the pattern
462 with this new pattern. Then consider this change
463 as having succeeded. The change we added will
464 cause the entire call to fail if things remain invalid.
465
466 Note that this can lose if a later change than the one
467 we are processing specified &XVECEXP (PATTERN (object), 0, X)
468 but this shouldn't occur. */
469
470 validate_change (object, &PATTERN (object), newpat, 1);
471 continue;
472 }
473 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
474 || GET_CODE (pat) == VAR_LOCATION)
475 /* If this insn is a CLOBBER or USE, it is always valid, but is
476 never recognized. */
477 continue;
478 else
479 break;
480 }
481 last_validated = object;
482 }
483
484 return (i == num_changes);
485 }
486
487 /* A group of changes has previously been issued with validate_change
488 and verified with verify_changes. Call df_insn_rescan for each of
489 the insn changed and clear num_changes. */
490
491 void
492 confirm_change_group (void)
493 {
494 int i;
495 rtx last_object = NULL;
496
497 for (i = 0; i < num_changes; i++)
498 {
499 rtx object = changes[i].object;
500
501 if (changes[i].unshare)
502 *changes[i].loc = copy_rtx (*changes[i].loc);
503
504 /* Avoid unnecessary rescanning when multiple changes to same instruction
505 are made. */
506 if (object)
507 {
508 if (object != last_object && last_object && INSN_P (last_object))
509 df_insn_rescan (as_a <rtx_insn *> (last_object));
510 last_object = object;
511 }
512 }
513
514 if (last_object && INSN_P (last_object))
515 df_insn_rescan (as_a <rtx_insn *> (last_object));
516 num_changes = 0;
517 }
518
519 /* Apply a group of changes previously issued with `validate_change'.
520 If all changes are valid, call confirm_change_group and return 1,
521 otherwise, call cancel_changes and return 0. */
522
523 int
524 apply_change_group (void)
525 {
526 if (verify_changes (0))
527 {
528 confirm_change_group ();
529 return 1;
530 }
531 else
532 {
533 cancel_changes (0);
534 return 0;
535 }
536 }
537
538
539 /* Return the number of changes so far in the current group. */
540
541 int
542 num_validated_changes (void)
543 {
544 return num_changes;
545 }
546
547 /* Retract the changes numbered NUM and up. */
548
549 void
550 cancel_changes (int num)
551 {
552 int i;
553
554 /* Back out all the changes. Do this in the opposite order in which
555 they were made. */
556 for (i = num_changes - 1; i >= num; i--)
557 {
558 *changes[i].loc = changes[i].old;
559 if (changes[i].object && !MEM_P (changes[i].object))
560 INSN_CODE (changes[i].object) = changes[i].old_code;
561 }
562 num_changes = num;
563 }
564
565 /* Reduce conditional compilation elsewhere. */
566 #ifndef HAVE_extv
567 #define HAVE_extv 0
568 #define CODE_FOR_extv CODE_FOR_nothing
569 #endif
570 #ifndef HAVE_extzv
571 #define HAVE_extzv 0
572 #define CODE_FOR_extzv CODE_FOR_nothing
573 #endif
574
575 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
576 rtx. */
577
578 static void
579 simplify_while_replacing (rtx *loc, rtx to, rtx object,
580 enum machine_mode op0_mode)
581 {
582 rtx x = *loc;
583 enum rtx_code code = GET_CODE (x);
584 rtx new_rtx = NULL_RTX;
585
586 if (SWAPPABLE_OPERANDS_P (x)
587 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
588 {
589 validate_unshare_change (object, loc,
590 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
591 : swap_condition (code),
592 GET_MODE (x), XEXP (x, 1),
593 XEXP (x, 0)), 1);
594 x = *loc;
595 code = GET_CODE (x);
596 }
597
598 /* Canonicalize arithmetics with all constant operands. */
599 switch (GET_RTX_CLASS (code))
600 {
601 case RTX_UNARY:
602 if (CONSTANT_P (XEXP (x, 0)))
603 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
604 op0_mode);
605 break;
606 case RTX_COMM_ARITH:
607 case RTX_BIN_ARITH:
608 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
609 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
610 XEXP (x, 1));
611 break;
612 case RTX_COMPARE:
613 case RTX_COMM_COMPARE:
614 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
615 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
616 XEXP (x, 0), XEXP (x, 1));
617 break;
618 default:
619 break;
620 }
621 if (new_rtx)
622 {
623 validate_change (object, loc, new_rtx, 1);
624 return;
625 }
626
627 switch (code)
628 {
629 case PLUS:
630 /* If we have a PLUS whose second operand is now a CONST_INT, use
631 simplify_gen_binary to try to simplify it.
632 ??? We may want later to remove this, once simplification is
633 separated from this function. */
634 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
635 validate_change (object, loc,
636 simplify_gen_binary
637 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
638 break;
639 case MINUS:
640 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
641 validate_change (object, loc,
642 simplify_gen_binary
643 (PLUS, GET_MODE (x), XEXP (x, 0),
644 simplify_gen_unary (NEG,
645 GET_MODE (x), XEXP (x, 1),
646 GET_MODE (x))), 1);
647 break;
648 case ZERO_EXTEND:
649 case SIGN_EXTEND:
650 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
651 {
652 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
653 op0_mode);
654 /* If any of the above failed, substitute in something that
655 we know won't be recognized. */
656 if (!new_rtx)
657 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
658 validate_change (object, loc, new_rtx, 1);
659 }
660 break;
661 case SUBREG:
662 /* All subregs possible to simplify should be simplified. */
663 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
664 SUBREG_BYTE (x));
665
666 /* Subregs of VOIDmode operands are incorrect. */
667 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
668 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
669 if (new_rtx)
670 validate_change (object, loc, new_rtx, 1);
671 break;
672 case ZERO_EXTRACT:
673 case SIGN_EXTRACT:
674 /* If we are replacing a register with memory, try to change the memory
675 to be the mode required for memory in extract operations (this isn't
676 likely to be an insertion operation; if it was, nothing bad will
677 happen, we might just fail in some cases). */
678
679 if (MEM_P (XEXP (x, 0))
680 && CONST_INT_P (XEXP (x, 1))
681 && CONST_INT_P (XEXP (x, 2))
682 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
683 MEM_ADDR_SPACE (XEXP (x, 0)))
684 && !MEM_VOLATILE_P (XEXP (x, 0)))
685 {
686 enum machine_mode wanted_mode = VOIDmode;
687 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
688 int pos = INTVAL (XEXP (x, 2));
689
690 if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
691 {
692 wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
693 if (wanted_mode == VOIDmode)
694 wanted_mode = word_mode;
695 }
696 else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
697 {
698 wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
699 if (wanted_mode == VOIDmode)
700 wanted_mode = word_mode;
701 }
702
703 /* If we have a narrower mode, we can do something. */
704 if (wanted_mode != VOIDmode
705 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
706 {
707 int offset = pos / BITS_PER_UNIT;
708 rtx newmem;
709
710 /* If the bytes and bits are counted differently, we
711 must adjust the offset. */
712 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
713 offset =
714 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
715 offset);
716
717 gcc_assert (GET_MODE_PRECISION (wanted_mode)
718 == GET_MODE_BITSIZE (wanted_mode));
719 pos %= GET_MODE_BITSIZE (wanted_mode);
720
721 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
722
723 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
724 validate_change (object, &XEXP (x, 0), newmem, 1);
725 }
726 }
727
728 break;
729
730 default:
731 break;
732 }
733 }
734
735 /* Replace every occurrence of FROM in X with TO. Mark each change with
736 validate_change passing OBJECT. */
737
738 static void
739 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
740 bool simplify)
741 {
742 int i, j;
743 const char *fmt;
744 rtx x = *loc;
745 enum rtx_code code;
746 enum machine_mode op0_mode = VOIDmode;
747 int prev_changes = num_changes;
748
749 if (!x)
750 return;
751
752 code = GET_CODE (x);
753 fmt = GET_RTX_FORMAT (code);
754 if (fmt[0] == 'e')
755 op0_mode = GET_MODE (XEXP (x, 0));
756
757 /* X matches FROM if it is the same rtx or they are both referring to the
758 same register in the same mode. Avoid calling rtx_equal_p unless the
759 operands look similar. */
760
761 if (x == from
762 || (REG_P (x) && REG_P (from)
763 && GET_MODE (x) == GET_MODE (from)
764 && REGNO (x) == REGNO (from))
765 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
766 && rtx_equal_p (x, from)))
767 {
768 validate_unshare_change (object, loc, to, 1);
769 return;
770 }
771
772 /* Call ourself recursively to perform the replacements.
773 We must not replace inside already replaced expression, otherwise we
774 get infinite recursion for replacements like (reg X)->(subreg (reg X))
775 so we must special case shared ASM_OPERANDS. */
776
777 if (GET_CODE (x) == PARALLEL)
778 {
779 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
780 {
781 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
782 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
783 {
784 /* Verify that operands are really shared. */
785 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
786 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
787 (x, 0, j))));
788 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
789 from, to, object, simplify);
790 }
791 else
792 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
793 simplify);
794 }
795 }
796 else
797 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
798 {
799 if (fmt[i] == 'e')
800 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
801 else if (fmt[i] == 'E')
802 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
803 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
804 simplify);
805 }
806
807 /* If we didn't substitute, there is nothing more to do. */
808 if (num_changes == prev_changes)
809 return;
810
811 /* ??? The regmove is no more, so is this aberration still necessary? */
812 /* Allow substituted expression to have different mode. This is used by
813 regmove to change mode of pseudo register. */
814 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
815 op0_mode = GET_MODE (XEXP (x, 0));
816
817 /* Do changes needed to keep rtx consistent. Don't do any other
818 simplifications, as it is not our job. */
819 if (simplify)
820 simplify_while_replacing (loc, to, object, op0_mode);
821 }
822
823 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
824 with TO. After all changes have been made, validate by seeing
825 if INSN is still valid. */
826
827 int
828 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
829 {
830 validate_replace_rtx_1 (loc, from, to, insn, true);
831 return apply_change_group ();
832 }
833
834 /* Try replacing every occurrence of FROM in INSN with TO. After all
835 changes have been made, validate by seeing if INSN is still valid. */
836
837 int
838 validate_replace_rtx (rtx from, rtx to, rtx insn)
839 {
840 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
841 return apply_change_group ();
842 }
843
844 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
845 is a part of INSN. After all changes have been made, validate by seeing if
846 INSN is still valid.
847 validate_replace_rtx (from, to, insn) is equivalent to
848 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
849
850 int
851 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
852 {
853 validate_replace_rtx_1 (where, from, to, insn, true);
854 return apply_change_group ();
855 }
856
857 /* Same as above, but do not simplify rtx afterwards. */
858 int
859 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
860 rtx insn)
861 {
862 validate_replace_rtx_1 (where, from, to, insn, false);
863 return apply_change_group ();
864
865 }
866
867 /* Try replacing every occurrence of FROM in INSN with TO. This also
868 will replace in REG_EQUAL and REG_EQUIV notes. */
869
870 void
871 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
872 {
873 rtx note;
874 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
875 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
876 if (REG_NOTE_KIND (note) == REG_EQUAL
877 || REG_NOTE_KIND (note) == REG_EQUIV)
878 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
879 }
880
881 /* Function called by note_uses to replace used subexpressions. */
882 struct validate_replace_src_data
883 {
884 rtx from; /* Old RTX */
885 rtx to; /* New RTX */
886 rtx insn; /* Insn in which substitution is occurring. */
887 };
888
889 static void
890 validate_replace_src_1 (rtx *x, void *data)
891 {
892 struct validate_replace_src_data *d
893 = (struct validate_replace_src_data *) data;
894
895 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
896 }
897
898 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
899 SET_DESTs. */
900
901 void
902 validate_replace_src_group (rtx from, rtx to, rtx insn)
903 {
904 struct validate_replace_src_data d;
905
906 d.from = from;
907 d.to = to;
908 d.insn = insn;
909 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
910 }
911
912 /* Try simplify INSN.
913 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
914 pattern and return true if something was simplified. */
915
916 bool
917 validate_simplify_insn (rtx insn)
918 {
919 int i;
920 rtx pat = NULL;
921 rtx newpat = NULL;
922
923 pat = PATTERN (insn);
924
925 if (GET_CODE (pat) == SET)
926 {
927 newpat = simplify_rtx (SET_SRC (pat));
928 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
929 validate_change (insn, &SET_SRC (pat), newpat, 1);
930 newpat = simplify_rtx (SET_DEST (pat));
931 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
932 validate_change (insn, &SET_DEST (pat), newpat, 1);
933 }
934 else if (GET_CODE (pat) == PARALLEL)
935 for (i = 0; i < XVECLEN (pat, 0); i++)
936 {
937 rtx s = XVECEXP (pat, 0, i);
938
939 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
940 {
941 newpat = simplify_rtx (SET_SRC (s));
942 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
943 validate_change (insn, &SET_SRC (s), newpat, 1);
944 newpat = simplify_rtx (SET_DEST (s));
945 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
946 validate_change (insn, &SET_DEST (s), newpat, 1);
947 }
948 }
949 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
950 }
951 \f
952 #ifdef HAVE_cc0
953 /* Return 1 if the insn using CC0 set by INSN does not contain
954 any ordered tests applied to the condition codes.
955 EQ and NE tests do not count. */
956
957 int
958 next_insn_tests_no_inequality (rtx insn)
959 {
960 rtx next = next_cc0_user (insn);
961
962 /* If there is no next insn, we have to take the conservative choice. */
963 if (next == 0)
964 return 0;
965
966 return (INSN_P (next)
967 && ! inequality_comparisons_p (PATTERN (next)));
968 }
969 #endif
970 \f
971 /* Return 1 if OP is a valid general operand for machine mode MODE.
972 This is either a register reference, a memory reference,
973 or a constant. In the case of a memory reference, the address
974 is checked for general validity for the target machine.
975
976 Register and memory references must have mode MODE in order to be valid,
977 but some constants have no machine mode and are valid for any mode.
978
979 If MODE is VOIDmode, OP is checked for validity for whatever mode
980 it has.
981
982 The main use of this function is as a predicate in match_operand
983 expressions in the machine description. */
984
985 int
986 general_operand (rtx op, enum machine_mode mode)
987 {
988 enum rtx_code code = GET_CODE (op);
989
990 if (mode == VOIDmode)
991 mode = GET_MODE (op);
992
993 /* Don't accept CONST_INT or anything similar
994 if the caller wants something floating. */
995 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
996 && GET_MODE_CLASS (mode) != MODE_INT
997 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
998 return 0;
999
1000 if (CONST_INT_P (op)
1001 && mode != VOIDmode
1002 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1003 return 0;
1004
1005 if (CONSTANT_P (op))
1006 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1007 || mode == VOIDmode)
1008 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1009 && targetm.legitimate_constant_p (mode == VOIDmode
1010 ? GET_MODE (op)
1011 : mode, op));
1012
1013 /* Except for certain constants with VOIDmode, already checked for,
1014 OP's mode must match MODE if MODE specifies a mode. */
1015
1016 if (GET_MODE (op) != mode)
1017 return 0;
1018
1019 if (code == SUBREG)
1020 {
1021 rtx sub = SUBREG_REG (op);
1022
1023 #ifdef INSN_SCHEDULING
1024 /* On machines that have insn scheduling, we want all memory
1025 reference to be explicit, so outlaw paradoxical SUBREGs.
1026 However, we must allow them after reload so that they can
1027 get cleaned up by cleanup_subreg_operands. */
1028 if (!reload_completed && MEM_P (sub)
1029 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
1030 return 0;
1031 #endif
1032 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1033 may result in incorrect reference. We should simplify all valid
1034 subregs of MEM anyway. But allow this after reload because we
1035 might be called from cleanup_subreg_operands.
1036
1037 ??? This is a kludge. */
1038 if (!reload_completed && SUBREG_BYTE (op) != 0
1039 && MEM_P (sub))
1040 return 0;
1041
1042 #ifdef CANNOT_CHANGE_MODE_CLASS
1043 if (REG_P (sub)
1044 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1045 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1046 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1047 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1048 /* LRA can generate some invalid SUBREGS just for matched
1049 operand reload presentation. LRA needs to treat them as
1050 valid. */
1051 && ! LRA_SUBREG_P (op))
1052 return 0;
1053 #endif
1054
1055 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1056 create such rtl, and we must reject it. */
1057 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1058 /* LRA can use subreg to store a floating point value in an
1059 integer mode. Although the floating point and the
1060 integer modes need the same number of hard registers, the
1061 size of floating point mode can be less than the integer
1062 mode. */
1063 && ! lra_in_progress
1064 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1065 return 0;
1066
1067 op = sub;
1068 code = GET_CODE (op);
1069 }
1070
1071 if (code == REG)
1072 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1073 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1074
1075 if (code == MEM)
1076 {
1077 rtx y = XEXP (op, 0);
1078
1079 if (! volatile_ok && MEM_VOLATILE_P (op))
1080 return 0;
1081
1082 /* Use the mem's mode, since it will be reloaded thus. LRA can
1083 generate move insn with invalid addresses which is made valid
1084 and efficiently calculated by LRA through further numerous
1085 transformations. */
1086 if (lra_in_progress
1087 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1088 return 1;
1089 }
1090
1091 return 0;
1092 }
1093 \f
1094 /* Return 1 if OP is a valid memory address for a memory reference
1095 of mode MODE.
1096
1097 The main use of this function is as a predicate in match_operand
1098 expressions in the machine description. */
1099
1100 int
1101 address_operand (rtx op, enum machine_mode mode)
1102 {
1103 return memory_address_p (mode, op);
1104 }
1105
1106 /* Return 1 if OP is a register reference of mode MODE.
1107 If MODE is VOIDmode, accept a register in any mode.
1108
1109 The main use of this function is as a predicate in match_operand
1110 expressions in the machine description. */
1111
1112 int
1113 register_operand (rtx op, enum machine_mode mode)
1114 {
1115 if (GET_CODE (op) == SUBREG)
1116 {
1117 rtx sub = SUBREG_REG (op);
1118
1119 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1120 because it is guaranteed to be reloaded into one.
1121 Just make sure the MEM is valid in itself.
1122 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1123 but currently it does result from (SUBREG (REG)...) where the
1124 reg went on the stack.) */
1125 if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1126 return 0;
1127 }
1128 else if (!REG_P (op))
1129 return 0;
1130 return general_operand (op, mode);
1131 }
1132
1133 /* Return 1 for a register in Pmode; ignore the tested mode. */
1134
1135 int
1136 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1137 {
1138 return register_operand (op, Pmode);
1139 }
1140
1141 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1142 or a hard register. */
1143
1144 int
1145 scratch_operand (rtx op, enum machine_mode mode)
1146 {
1147 if (GET_MODE (op) != mode && mode != VOIDmode)
1148 return 0;
1149
1150 return (GET_CODE (op) == SCRATCH
1151 || (REG_P (op)
1152 && (lra_in_progress
1153 || (REGNO (op) < FIRST_PSEUDO_REGISTER
1154 && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1155 }
1156
1157 /* Return 1 if OP is a valid immediate operand for mode MODE.
1158
1159 The main use of this function is as a predicate in match_operand
1160 expressions in the machine description. */
1161
1162 int
1163 immediate_operand (rtx op, enum machine_mode mode)
1164 {
1165 /* Don't accept CONST_INT or anything similar
1166 if the caller wants something floating. */
1167 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1168 && GET_MODE_CLASS (mode) != MODE_INT
1169 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1170 return 0;
1171
1172 if (CONST_INT_P (op)
1173 && mode != VOIDmode
1174 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1175 return 0;
1176
1177 return (CONSTANT_P (op)
1178 && (GET_MODE (op) == mode || mode == VOIDmode
1179 || GET_MODE (op) == VOIDmode)
1180 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1181 && targetm.legitimate_constant_p (mode == VOIDmode
1182 ? GET_MODE (op)
1183 : mode, op));
1184 }
1185
1186 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1187
1188 int
1189 const_int_operand (rtx op, enum machine_mode mode)
1190 {
1191 if (!CONST_INT_P (op))
1192 return 0;
1193
1194 if (mode != VOIDmode
1195 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1196 return 0;
1197
1198 return 1;
1199 }
1200
1201 #if TARGET_SUPPORTS_WIDE_INT
1202 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1203 of mode MODE. */
1204 int
1205 const_scalar_int_operand (rtx op, enum machine_mode mode)
1206 {
1207 if (!CONST_SCALAR_INT_P (op))
1208 return 0;
1209
1210 if (CONST_INT_P (op))
1211 return const_int_operand (op, mode);
1212
1213 if (mode != VOIDmode)
1214 {
1215 int prec = GET_MODE_PRECISION (mode);
1216 int bitsize = GET_MODE_BITSIZE (mode);
1217
1218 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1219 return 0;
1220
1221 if (prec == bitsize)
1222 return 1;
1223 else
1224 {
1225 /* Multiword partial int. */
1226 HOST_WIDE_INT x
1227 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1228 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1229 }
1230 }
1231 return 1;
1232 }
1233
1234 /* Returns 1 if OP is an operand that is a constant integer or constant
1235 floating-point number of MODE. */
1236
1237 int
1238 const_double_operand (rtx op, enum machine_mode mode)
1239 {
1240 return (GET_CODE (op) == CONST_DOUBLE)
1241 && (GET_MODE (op) == mode || mode == VOIDmode);
1242 }
1243 #else
1244 /* Returns 1 if OP is an operand that is a constant integer or constant
1245 floating-point number of MODE. */
1246
1247 int
1248 const_double_operand (rtx op, enum machine_mode mode)
1249 {
1250 /* Don't accept CONST_INT or anything similar
1251 if the caller wants something floating. */
1252 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1253 && GET_MODE_CLASS (mode) != MODE_INT
1254 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1255 return 0;
1256
1257 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1258 && (mode == VOIDmode || GET_MODE (op) == mode
1259 || GET_MODE (op) == VOIDmode));
1260 }
1261 #endif
1262 /* Return 1 if OP is a general operand that is not an immediate
1263 operand of mode MODE. */
1264
1265 int
1266 nonimmediate_operand (rtx op, enum machine_mode mode)
1267 {
1268 return (general_operand (op, mode) && ! CONSTANT_P (op));
1269 }
1270
1271 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1272
1273 int
1274 nonmemory_operand (rtx op, enum machine_mode mode)
1275 {
1276 if (CONSTANT_P (op))
1277 return immediate_operand (op, mode);
1278 return register_operand (op, mode);
1279 }
1280
1281 /* Return 1 if OP is a valid operand that stands for pushing a
1282 value of mode MODE onto the stack.
1283
1284 The main use of this function is as a predicate in match_operand
1285 expressions in the machine description. */
1286
1287 int
1288 push_operand (rtx op, enum machine_mode mode)
1289 {
1290 unsigned int rounded_size = GET_MODE_SIZE (mode);
1291
1292 #ifdef PUSH_ROUNDING
1293 rounded_size = PUSH_ROUNDING (rounded_size);
1294 #endif
1295
1296 if (!MEM_P (op))
1297 return 0;
1298
1299 if (mode != VOIDmode && GET_MODE (op) != mode)
1300 return 0;
1301
1302 op = XEXP (op, 0);
1303
1304 if (rounded_size == GET_MODE_SIZE (mode))
1305 {
1306 if (GET_CODE (op) != STACK_PUSH_CODE)
1307 return 0;
1308 }
1309 else
1310 {
1311 if (GET_CODE (op) != PRE_MODIFY
1312 || GET_CODE (XEXP (op, 1)) != PLUS
1313 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1314 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1315 #ifdef STACK_GROWS_DOWNWARD
1316 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1317 #else
1318 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1319 #endif
1320 )
1321 return 0;
1322 }
1323
1324 return XEXP (op, 0) == stack_pointer_rtx;
1325 }
1326
1327 /* Return 1 if OP is a valid operand that stands for popping a
1328 value of mode MODE off the stack.
1329
1330 The main use of this function is as a predicate in match_operand
1331 expressions in the machine description. */
1332
1333 int
1334 pop_operand (rtx op, enum machine_mode mode)
1335 {
1336 if (!MEM_P (op))
1337 return 0;
1338
1339 if (mode != VOIDmode && GET_MODE (op) != mode)
1340 return 0;
1341
1342 op = XEXP (op, 0);
1343
1344 if (GET_CODE (op) != STACK_POP_CODE)
1345 return 0;
1346
1347 return XEXP (op, 0) == stack_pointer_rtx;
1348 }
1349
1350 /* Return 1 if ADDR is a valid memory address
1351 for mode MODE in address space AS. */
1352
1353 int
1354 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1355 rtx addr, addr_space_t as)
1356 {
1357 #ifdef GO_IF_LEGITIMATE_ADDRESS
1358 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1359 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1360 return 0;
1361
1362 win:
1363 return 1;
1364 #else
1365 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1366 #endif
1367 }
1368
1369 /* Return 1 if OP is a valid memory reference with mode MODE,
1370 including a valid address.
1371
1372 The main use of this function is as a predicate in match_operand
1373 expressions in the machine description. */
1374
1375 int
1376 memory_operand (rtx op, enum machine_mode mode)
1377 {
1378 rtx inner;
1379
1380 if (! reload_completed)
1381 /* Note that no SUBREG is a memory operand before end of reload pass,
1382 because (SUBREG (MEM...)) forces reloading into a register. */
1383 return MEM_P (op) && general_operand (op, mode);
1384
1385 if (mode != VOIDmode && GET_MODE (op) != mode)
1386 return 0;
1387
1388 inner = op;
1389 if (GET_CODE (inner) == SUBREG)
1390 inner = SUBREG_REG (inner);
1391
1392 return (MEM_P (inner) && general_operand (op, mode));
1393 }
1394
1395 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1396 that is, a memory reference whose address is a general_operand. */
1397
1398 int
1399 indirect_operand (rtx op, enum machine_mode mode)
1400 {
1401 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1402 if (! reload_completed
1403 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1404 {
1405 int offset = SUBREG_BYTE (op);
1406 rtx inner = SUBREG_REG (op);
1407
1408 if (mode != VOIDmode && GET_MODE (op) != mode)
1409 return 0;
1410
1411 /* The only way that we can have a general_operand as the resulting
1412 address is if OFFSET is zero and the address already is an operand
1413 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1414 operand. */
1415
1416 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1417 || (GET_CODE (XEXP (inner, 0)) == PLUS
1418 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1419 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1420 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1421 }
1422
1423 return (MEM_P (op)
1424 && memory_operand (op, mode)
1425 && general_operand (XEXP (op, 0), Pmode));
1426 }
1427
1428 /* Return 1 if this is an ordered comparison operator (not including
1429 ORDERED and UNORDERED). */
1430
1431 int
1432 ordered_comparison_operator (rtx op, enum machine_mode mode)
1433 {
1434 if (mode != VOIDmode && GET_MODE (op) != mode)
1435 return false;
1436 switch (GET_CODE (op))
1437 {
1438 case EQ:
1439 case NE:
1440 case LT:
1441 case LTU:
1442 case LE:
1443 case LEU:
1444 case GT:
1445 case GTU:
1446 case GE:
1447 case GEU:
1448 return true;
1449 default:
1450 return false;
1451 }
1452 }
1453
1454 /* Return 1 if this is a comparison operator. This allows the use of
1455 MATCH_OPERATOR to recognize all the branch insns. */
1456
1457 int
1458 comparison_operator (rtx op, enum machine_mode mode)
1459 {
1460 return ((mode == VOIDmode || GET_MODE (op) == mode)
1461 && COMPARISON_P (op));
1462 }
1463 \f
1464 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1465
1466 rtx
1467 extract_asm_operands (rtx body)
1468 {
1469 rtx tmp;
1470 switch (GET_CODE (body))
1471 {
1472 case ASM_OPERANDS:
1473 return body;
1474
1475 case SET:
1476 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1477 tmp = SET_SRC (body);
1478 if (GET_CODE (tmp) == ASM_OPERANDS)
1479 return tmp;
1480 break;
1481
1482 case PARALLEL:
1483 tmp = XVECEXP (body, 0, 0);
1484 if (GET_CODE (tmp) == ASM_OPERANDS)
1485 return tmp;
1486 if (GET_CODE (tmp) == SET)
1487 {
1488 tmp = SET_SRC (tmp);
1489 if (GET_CODE (tmp) == ASM_OPERANDS)
1490 return tmp;
1491 }
1492 break;
1493
1494 default:
1495 break;
1496 }
1497 return NULL;
1498 }
1499
1500 /* If BODY is an insn body that uses ASM_OPERANDS,
1501 return the number of operands (both input and output) in the insn.
1502 Otherwise return -1. */
1503
1504 int
1505 asm_noperands (const_rtx body)
1506 {
1507 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1508 int n_sets = 0;
1509
1510 if (asm_op == NULL)
1511 return -1;
1512
1513 if (GET_CODE (body) == SET)
1514 n_sets = 1;
1515 else if (GET_CODE (body) == PARALLEL)
1516 {
1517 int i;
1518 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1519 {
1520 /* Multiple output operands, or 1 output plus some clobbers:
1521 body is
1522 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1523 /* Count backwards through CLOBBERs to determine number of SETs. */
1524 for (i = XVECLEN (body, 0); i > 0; i--)
1525 {
1526 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1527 break;
1528 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1529 return -1;
1530 }
1531
1532 /* N_SETS is now number of output operands. */
1533 n_sets = i;
1534
1535 /* Verify that all the SETs we have
1536 came from a single original asm_operands insn
1537 (so that invalid combinations are blocked). */
1538 for (i = 0; i < n_sets; i++)
1539 {
1540 rtx elt = XVECEXP (body, 0, i);
1541 if (GET_CODE (elt) != SET)
1542 return -1;
1543 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1544 return -1;
1545 /* If these ASM_OPERANDS rtx's came from different original insns
1546 then they aren't allowed together. */
1547 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1548 != ASM_OPERANDS_INPUT_VEC (asm_op))
1549 return -1;
1550 }
1551 }
1552 else
1553 {
1554 /* 0 outputs, but some clobbers:
1555 body is [(asm_operands ...) (clobber (reg ...))...]. */
1556 /* Make sure all the other parallel things really are clobbers. */
1557 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1558 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1559 return -1;
1560 }
1561 }
1562
1563 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1564 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1565 }
1566
1567 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1568 copy its operands (both input and output) into the vector OPERANDS,
1569 the locations of the operands within the insn into the vector OPERAND_LOCS,
1570 and the constraints for the operands into CONSTRAINTS.
1571 Write the modes of the operands into MODES.
1572 Return the assembler-template.
1573
1574 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1575 we don't store that info. */
1576
1577 const char *
1578 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1579 const char **constraints, enum machine_mode *modes,
1580 location_t *loc)
1581 {
1582 int nbase = 0, n, i;
1583 rtx asmop;
1584
1585 switch (GET_CODE (body))
1586 {
1587 case ASM_OPERANDS:
1588 /* Zero output asm: BODY is (asm_operands ...). */
1589 asmop = body;
1590 break;
1591
1592 case SET:
1593 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1594 asmop = SET_SRC (body);
1595
1596 /* The output is in the SET.
1597 Its constraint is in the ASM_OPERANDS itself. */
1598 if (operands)
1599 operands[0] = SET_DEST (body);
1600 if (operand_locs)
1601 operand_locs[0] = &SET_DEST (body);
1602 if (constraints)
1603 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1604 if (modes)
1605 modes[0] = GET_MODE (SET_DEST (body));
1606 nbase = 1;
1607 break;
1608
1609 case PARALLEL:
1610 {
1611 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1612
1613 asmop = XVECEXP (body, 0, 0);
1614 if (GET_CODE (asmop) == SET)
1615 {
1616 asmop = SET_SRC (asmop);
1617
1618 /* At least one output, plus some CLOBBERs. The outputs are in
1619 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1620 for (i = 0; i < nparallel; i++)
1621 {
1622 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1623 break; /* Past last SET */
1624 if (operands)
1625 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1626 if (operand_locs)
1627 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1628 if (constraints)
1629 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1630 if (modes)
1631 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1632 }
1633 nbase = i;
1634 }
1635 break;
1636 }
1637
1638 default:
1639 gcc_unreachable ();
1640 }
1641
1642 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1643 for (i = 0; i < n; i++)
1644 {
1645 if (operand_locs)
1646 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1647 if (operands)
1648 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1649 if (constraints)
1650 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1651 if (modes)
1652 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1653 }
1654 nbase += n;
1655
1656 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1657 for (i = 0; i < n; i++)
1658 {
1659 if (operand_locs)
1660 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1661 if (operands)
1662 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1663 if (constraints)
1664 constraints[nbase + i] = "";
1665 if (modes)
1666 modes[nbase + i] = Pmode;
1667 }
1668
1669 if (loc)
1670 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1671
1672 return ASM_OPERANDS_TEMPLATE (asmop);
1673 }
1674
1675 /* Parse inline assembly string STRING and determine which operands are
1676 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1677 to true if operand I is referenced.
1678
1679 This is intended to distinguish barrier-like asms such as:
1680
1681 asm ("" : "=m" (...));
1682
1683 from real references such as:
1684
1685 asm ("sw\t$0, %0" : "=m" (...)); */
1686
1687 void
1688 get_referenced_operands (const char *string, bool *used,
1689 unsigned int noperands)
1690 {
1691 memset (used, 0, sizeof (bool) * noperands);
1692 const char *p = string;
1693 while (*p)
1694 switch (*p)
1695 {
1696 case '%':
1697 p += 1;
1698 /* A letter followed by a digit indicates an operand number. */
1699 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1700 p += 1;
1701 if (ISDIGIT (*p))
1702 {
1703 char *endptr;
1704 unsigned long opnum = strtoul (p, &endptr, 10);
1705 if (endptr != p && opnum < noperands)
1706 used[opnum] = true;
1707 p = endptr;
1708 }
1709 else
1710 p += 1;
1711 break;
1712
1713 default:
1714 p++;
1715 break;
1716 }
1717 }
1718
1719 /* Check if an asm_operand matches its constraints.
1720 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1721
1722 int
1723 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1724 {
1725 int result = 0;
1726 #ifdef AUTO_INC_DEC
1727 bool incdec_ok = false;
1728 #endif
1729
1730 /* Use constrain_operands after reload. */
1731 gcc_assert (!reload_completed);
1732
1733 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1734 many alternatives as required to match the other operands. */
1735 if (*constraint == '\0')
1736 result = 1;
1737
1738 while (*constraint)
1739 {
1740 enum constraint_num cn;
1741 char c = *constraint;
1742 int len;
1743 switch (c)
1744 {
1745 case ',':
1746 constraint++;
1747 continue;
1748
1749 case '0': case '1': case '2': case '3': case '4':
1750 case '5': case '6': case '7': case '8': case '9':
1751 /* If caller provided constraints pointer, look up
1752 the matching constraint. Otherwise, our caller should have
1753 given us the proper matching constraint, but we can't
1754 actually fail the check if they didn't. Indicate that
1755 results are inconclusive. */
1756 if (constraints)
1757 {
1758 char *end;
1759 unsigned long match;
1760
1761 match = strtoul (constraint, &end, 10);
1762 if (!result)
1763 result = asm_operand_ok (op, constraints[match], NULL);
1764 constraint = (const char *) end;
1765 }
1766 else
1767 {
1768 do
1769 constraint++;
1770 while (ISDIGIT (*constraint));
1771 if (! result)
1772 result = -1;
1773 }
1774 continue;
1775
1776 /* The rest of the compiler assumes that reloading the address
1777 of a MEM into a register will make it fit an 'o' constraint.
1778 That is, if it sees a MEM operand for an 'o' constraint,
1779 it assumes that (mem (base-reg)) will fit.
1780
1781 That assumption fails on targets that don't have offsettable
1782 addresses at all. We therefore need to treat 'o' asm
1783 constraints as a special case and only accept operands that
1784 are already offsettable, thus proving that at least one
1785 offsettable address exists. */
1786 case 'o': /* offsettable */
1787 if (offsettable_nonstrict_memref_p (op))
1788 result = 1;
1789 break;
1790
1791 case 'g':
1792 if (general_operand (op, VOIDmode))
1793 result = 1;
1794 break;
1795
1796 #ifdef AUTO_INC_DEC
1797 case '<':
1798 case '>':
1799 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1800 to exist, excepting those that expand_call created. Further,
1801 on some machines which do not have generalized auto inc/dec,
1802 an inc/dec is not a memory_operand.
1803
1804 Match any memory and hope things are resolved after reload. */
1805 incdec_ok = true;
1806 #endif
1807 default:
1808 cn = lookup_constraint (constraint);
1809 switch (get_constraint_type (cn))
1810 {
1811 case CT_REGISTER:
1812 if (!result
1813 && reg_class_for_constraint (cn) != NO_REGS
1814 && GET_MODE (op) != BLKmode
1815 && register_operand (op, VOIDmode))
1816 result = 1;
1817 break;
1818
1819 case CT_CONST_INT:
1820 if (!result
1821 && CONST_INT_P (op)
1822 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1823 result = 1;
1824 break;
1825
1826 case CT_MEMORY:
1827 /* Every memory operand can be reloaded to fit. */
1828 result = result || memory_operand (op, VOIDmode);
1829 break;
1830
1831 case CT_ADDRESS:
1832 /* Every address operand can be reloaded to fit. */
1833 result = result || address_operand (op, VOIDmode);
1834 break;
1835
1836 case CT_FIXED_FORM:
1837 result = result || constraint_satisfied_p (op, cn);
1838 break;
1839 }
1840 break;
1841 }
1842 len = CONSTRAINT_LEN (c, constraint);
1843 do
1844 constraint++;
1845 while (--len && *constraint);
1846 if (len)
1847 return 0;
1848 }
1849
1850 #ifdef AUTO_INC_DEC
1851 /* For operands without < or > constraints reject side-effects. */
1852 if (!incdec_ok && result && MEM_P (op))
1853 switch (GET_CODE (XEXP (op, 0)))
1854 {
1855 case PRE_INC:
1856 case POST_INC:
1857 case PRE_DEC:
1858 case POST_DEC:
1859 case PRE_MODIFY:
1860 case POST_MODIFY:
1861 return 0;
1862 default:
1863 break;
1864 }
1865 #endif
1866
1867 return result;
1868 }
1869 \f
1870 /* Given an rtx *P, if it is a sum containing an integer constant term,
1871 return the location (type rtx *) of the pointer to that constant term.
1872 Otherwise, return a null pointer. */
1873
1874 rtx *
1875 find_constant_term_loc (rtx *p)
1876 {
1877 rtx *tem;
1878 enum rtx_code code = GET_CODE (*p);
1879
1880 /* If *P IS such a constant term, P is its location. */
1881
1882 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1883 || code == CONST)
1884 return p;
1885
1886 /* Otherwise, if not a sum, it has no constant term. */
1887
1888 if (GET_CODE (*p) != PLUS)
1889 return 0;
1890
1891 /* If one of the summands is constant, return its location. */
1892
1893 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1894 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1895 return p;
1896
1897 /* Otherwise, check each summand for containing a constant term. */
1898
1899 if (XEXP (*p, 0) != 0)
1900 {
1901 tem = find_constant_term_loc (&XEXP (*p, 0));
1902 if (tem != 0)
1903 return tem;
1904 }
1905
1906 if (XEXP (*p, 1) != 0)
1907 {
1908 tem = find_constant_term_loc (&XEXP (*p, 1));
1909 if (tem != 0)
1910 return tem;
1911 }
1912
1913 return 0;
1914 }
1915 \f
1916 /* Return 1 if OP is a memory reference
1917 whose address contains no side effects
1918 and remains valid after the addition
1919 of a positive integer less than the
1920 size of the object being referenced.
1921
1922 We assume that the original address is valid and do not check it.
1923
1924 This uses strict_memory_address_p as a subroutine, so
1925 don't use it before reload. */
1926
1927 int
1928 offsettable_memref_p (rtx op)
1929 {
1930 return ((MEM_P (op))
1931 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1932 MEM_ADDR_SPACE (op)));
1933 }
1934
1935 /* Similar, but don't require a strictly valid mem ref:
1936 consider pseudo-regs valid as index or base regs. */
1937
1938 int
1939 offsettable_nonstrict_memref_p (rtx op)
1940 {
1941 return ((MEM_P (op))
1942 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1943 MEM_ADDR_SPACE (op)));
1944 }
1945
1946 /* Return 1 if Y is a memory address which contains no side effects
1947 and would remain valid for address space AS after the addition of
1948 a positive integer less than the size of that mode.
1949
1950 We assume that the original address is valid and do not check it.
1951 We do check that it is valid for narrower modes.
1952
1953 If STRICTP is nonzero, we require a strictly valid address,
1954 for the sake of use in reload.c. */
1955
1956 int
1957 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1958 addr_space_t as)
1959 {
1960 enum rtx_code ycode = GET_CODE (y);
1961 rtx z;
1962 rtx y1 = y;
1963 rtx *y2;
1964 int (*addressp) (enum machine_mode, rtx, addr_space_t) =
1965 (strictp ? strict_memory_address_addr_space_p
1966 : memory_address_addr_space_p);
1967 unsigned int mode_sz = GET_MODE_SIZE (mode);
1968
1969 if (CONSTANT_ADDRESS_P (y))
1970 return 1;
1971
1972 /* Adjusting an offsettable address involves changing to a narrower mode.
1973 Make sure that's OK. */
1974
1975 if (mode_dependent_address_p (y, as))
1976 return 0;
1977
1978 enum machine_mode address_mode = GET_MODE (y);
1979 if (address_mode == VOIDmode)
1980 address_mode = targetm.addr_space.address_mode (as);
1981 #ifdef POINTERS_EXTEND_UNSIGNED
1982 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1983 #endif
1984
1985 /* ??? How much offset does an offsettable BLKmode reference need?
1986 Clearly that depends on the situation in which it's being used.
1987 However, the current situation in which we test 0xffffffff is
1988 less than ideal. Caveat user. */
1989 if (mode_sz == 0)
1990 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1991
1992 /* If the expression contains a constant term,
1993 see if it remains valid when max possible offset is added. */
1994
1995 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1996 {
1997 int good;
1998
1999 y1 = *y2;
2000 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2001 /* Use QImode because an odd displacement may be automatically invalid
2002 for any wider mode. But it should be valid for a single byte. */
2003 good = (*addressp) (QImode, y, as);
2004
2005 /* In any case, restore old contents of memory. */
2006 *y2 = y1;
2007 return good;
2008 }
2009
2010 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2011 return 0;
2012
2013 /* The offset added here is chosen as the maximum offset that
2014 any instruction could need to add when operating on something
2015 of the specified mode. We assume that if Y and Y+c are
2016 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2017 go inside a LO_SUM here, so we do so as well. */
2018 if (GET_CODE (y) == LO_SUM
2019 && mode != BLKmode
2020 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2021 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2022 plus_constant (address_mode, XEXP (y, 1),
2023 mode_sz - 1));
2024 #ifdef POINTERS_EXTEND_UNSIGNED
2025 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2026 else if (POINTERS_EXTEND_UNSIGNED > 0
2027 && GET_CODE (y) == ZERO_EXTEND
2028 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2029 z = gen_rtx_ZERO_EXTEND (address_mode,
2030 plus_constant (pointer_mode, XEXP (y, 0),
2031 mode_sz - 1));
2032 #endif
2033 else
2034 z = plus_constant (address_mode, y, mode_sz - 1);
2035
2036 /* Use QImode because an odd displacement may be automatically invalid
2037 for any wider mode. But it should be valid for a single byte. */
2038 return (*addressp) (QImode, z, as);
2039 }
2040
2041 /* Return 1 if ADDR is an address-expression whose effect depends
2042 on the mode of the memory reference it is used in.
2043
2044 ADDRSPACE is the address space associated with the address.
2045
2046 Autoincrement addressing is a typical example of mode-dependence
2047 because the amount of the increment depends on the mode. */
2048
2049 bool
2050 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2051 {
2052 /* Auto-increment addressing with anything other than post_modify
2053 or pre_modify always introduces a mode dependency. Catch such
2054 cases now instead of deferring to the target. */
2055 if (GET_CODE (addr) == PRE_INC
2056 || GET_CODE (addr) == POST_INC
2057 || GET_CODE (addr) == PRE_DEC
2058 || GET_CODE (addr) == POST_DEC)
2059 return true;
2060
2061 return targetm.mode_dependent_address_p (addr, addrspace);
2062 }
2063 \f
2064 /* Return true if boolean attribute ATTR is supported. */
2065
2066 static bool
2067 have_bool_attr (bool_attr attr)
2068 {
2069 switch (attr)
2070 {
2071 case BA_ENABLED:
2072 return HAVE_ATTR_enabled;
2073 case BA_PREFERRED_FOR_SIZE:
2074 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2075 case BA_PREFERRED_FOR_SPEED:
2076 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2077 }
2078 gcc_unreachable ();
2079 }
2080
2081 /* Return the value of ATTR for instruction INSN. */
2082
2083 static bool
2084 get_bool_attr (rtx_insn *insn, bool_attr attr)
2085 {
2086 switch (attr)
2087 {
2088 case BA_ENABLED:
2089 return get_attr_enabled (insn);
2090 case BA_PREFERRED_FOR_SIZE:
2091 return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2092 case BA_PREFERRED_FOR_SPEED:
2093 return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2094 }
2095 gcc_unreachable ();
2096 }
2097
2098 /* Like get_bool_attr_mask, but don't use the cache. */
2099
2100 static alternative_mask
2101 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2102 {
2103 /* Temporarily install enough information for get_attr_<foo> to assume
2104 that the insn operands are already cached. As above, the attribute
2105 mustn't depend on the values of operands, so we don't provide their
2106 real values here. */
2107 rtx old_insn = recog_data.insn;
2108 int old_alternative = which_alternative;
2109
2110 recog_data.insn = insn;
2111 alternative_mask mask = ALL_ALTERNATIVES;
2112 int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2113 for (int i = 0; i < n_alternatives; i++)
2114 {
2115 which_alternative = i;
2116 if (!get_bool_attr (insn, attr))
2117 mask &= ~ALTERNATIVE_BIT (i);
2118 }
2119
2120 recog_data.insn = old_insn;
2121 which_alternative = old_alternative;
2122 return mask;
2123 }
2124
2125 /* Return the mask of operand alternatives that are allowed for INSN
2126 by boolean attribute ATTR. This mask depends only on INSN and on
2127 the current target; it does not depend on things like the values of
2128 operands. */
2129
2130 static alternative_mask
2131 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2132 {
2133 /* Quick exit for asms and for targets that don't use these attributes. */
2134 int code = INSN_CODE (insn);
2135 if (code < 0 || !have_bool_attr (attr))
2136 return ALL_ALTERNATIVES;
2137
2138 /* Calling get_attr_<foo> can be expensive, so cache the mask
2139 for speed. */
2140 if (!this_target_recog->x_bool_attr_masks[code][attr])
2141 this_target_recog->x_bool_attr_masks[code][attr]
2142 = get_bool_attr_mask_uncached (insn, attr);
2143 return this_target_recog->x_bool_attr_masks[code][attr];
2144 }
2145
2146 /* Return the set of alternatives of INSN that are allowed by the current
2147 target. */
2148
2149 alternative_mask
2150 get_enabled_alternatives (rtx_insn *insn)
2151 {
2152 return get_bool_attr_mask (insn, BA_ENABLED);
2153 }
2154
2155 /* Return the set of alternatives of INSN that are allowed by the current
2156 target and are preferred for the current size/speed optimization
2157 choice. */
2158
2159 alternative_mask
2160 get_preferred_alternatives (rtx_insn *insn)
2161 {
2162 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2163 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2164 else
2165 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2166 }
2167
2168 /* Return the set of alternatives of INSN that are allowed by the current
2169 target and are preferred for the size/speed optimization choice
2170 associated with BB. Passing a separate BB is useful if INSN has not
2171 been emitted yet or if we are considering moving it to a different
2172 block. */
2173
2174 alternative_mask
2175 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2176 {
2177 if (optimize_bb_for_speed_p (bb))
2178 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2179 else
2180 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2181 }
2182
2183 /* Assert that the cached boolean attributes for INSN are still accurate.
2184 The backend is required to define these attributes in a way that only
2185 depends on the current target (rather than operands, compiler phase,
2186 etc.). */
2187
2188 bool
2189 check_bool_attrs (rtx_insn *insn)
2190 {
2191 int code = INSN_CODE (insn);
2192 if (code >= 0)
2193 for (int i = 0; i <= BA_LAST; ++i)
2194 {
2195 enum bool_attr attr = (enum bool_attr) i;
2196 if (this_target_recog->x_bool_attr_masks[code][attr])
2197 gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2198 == get_bool_attr_mask_uncached (insn, attr));
2199 }
2200 return true;
2201 }
2202
2203 /* Like extract_insn, but save insn extracted and don't extract again, when
2204 called again for the same insn expecting that recog_data still contain the
2205 valid information. This is used primary by gen_attr infrastructure that
2206 often does extract insn again and again. */
2207 void
2208 extract_insn_cached (rtx_insn *insn)
2209 {
2210 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2211 return;
2212 extract_insn (insn);
2213 recog_data.insn = insn;
2214 }
2215
2216 /* Do uncached extract_insn, constrain_operands and complain about failures.
2217 This should be used when extracting a pre-existing constrained instruction
2218 if the caller wants to know which alternative was chosen. */
2219 void
2220 extract_constrain_insn (rtx_insn *insn)
2221 {
2222 extract_insn (insn);
2223 if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2224 fatal_insn_not_found (insn);
2225 }
2226
2227 /* Do cached extract_insn, constrain_operands and complain about failures.
2228 Used by insn_attrtab. */
2229 void
2230 extract_constrain_insn_cached (rtx_insn *insn)
2231 {
2232 extract_insn_cached (insn);
2233 if (which_alternative == -1
2234 && !constrain_operands (reload_completed,
2235 get_enabled_alternatives (insn)))
2236 fatal_insn_not_found (insn);
2237 }
2238
2239 /* Do cached constrain_operands on INSN and complain about failures. */
2240 int
2241 constrain_operands_cached (rtx_insn *insn, int strict)
2242 {
2243 if (which_alternative == -1)
2244 return constrain_operands (strict, get_enabled_alternatives (insn));
2245 else
2246 return 1;
2247 }
2248 \f
2249 /* Analyze INSN and fill in recog_data. */
2250
2251 void
2252 extract_insn (rtx_insn *insn)
2253 {
2254 int i;
2255 int icode;
2256 int noperands;
2257 rtx body = PATTERN (insn);
2258
2259 recog_data.n_operands = 0;
2260 recog_data.n_alternatives = 0;
2261 recog_data.n_dups = 0;
2262 recog_data.is_asm = false;
2263
2264 switch (GET_CODE (body))
2265 {
2266 case USE:
2267 case CLOBBER:
2268 case ASM_INPUT:
2269 case ADDR_VEC:
2270 case ADDR_DIFF_VEC:
2271 case VAR_LOCATION:
2272 return;
2273
2274 case SET:
2275 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2276 goto asm_insn;
2277 else
2278 goto normal_insn;
2279 case PARALLEL:
2280 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2281 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2282 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2283 goto asm_insn;
2284 else
2285 goto normal_insn;
2286 case ASM_OPERANDS:
2287 asm_insn:
2288 recog_data.n_operands = noperands = asm_noperands (body);
2289 if (noperands >= 0)
2290 {
2291 /* This insn is an `asm' with operands. */
2292
2293 /* expand_asm_operands makes sure there aren't too many operands. */
2294 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2295
2296 /* Now get the operand values and constraints out of the insn. */
2297 decode_asm_operands (body, recog_data.operand,
2298 recog_data.operand_loc,
2299 recog_data.constraints,
2300 recog_data.operand_mode, NULL);
2301 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2302 if (noperands > 0)
2303 {
2304 const char *p = recog_data.constraints[0];
2305 recog_data.n_alternatives = 1;
2306 while (*p)
2307 recog_data.n_alternatives += (*p++ == ',');
2308 }
2309 recog_data.is_asm = true;
2310 break;
2311 }
2312 fatal_insn_not_found (insn);
2313
2314 default:
2315 normal_insn:
2316 /* Ordinary insn: recognize it, get the operands via insn_extract
2317 and get the constraints. */
2318
2319 icode = recog_memoized (insn);
2320 if (icode < 0)
2321 fatal_insn_not_found (insn);
2322
2323 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2324 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2325 recog_data.n_dups = insn_data[icode].n_dups;
2326
2327 insn_extract (insn);
2328
2329 for (i = 0; i < noperands; i++)
2330 {
2331 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2332 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2333 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2334 /* VOIDmode match_operands gets mode from their real operand. */
2335 if (recog_data.operand_mode[i] == VOIDmode)
2336 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2337 }
2338 }
2339 for (i = 0; i < noperands; i++)
2340 recog_data.operand_type[i]
2341 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2342 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2343 : OP_IN);
2344
2345 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2346
2347 recog_data.insn = NULL;
2348 which_alternative = -1;
2349 }
2350
2351 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS operands,
2352 N_ALTERNATIVES alternatives and constraint strings CONSTRAINTS.
2353 OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries and CONSTRAINTS
2354 has N_OPERANDS entries. */
2355
2356 void
2357 preprocess_constraints (int n_operands, int n_alternatives,
2358 const char **constraints,
2359 operand_alternative *op_alt_base)
2360 {
2361 for (int i = 0; i < n_operands; i++)
2362 {
2363 int j;
2364 struct operand_alternative *op_alt;
2365 const char *p = constraints[i];
2366
2367 op_alt = op_alt_base;
2368
2369 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2370 {
2371 op_alt[i].cl = NO_REGS;
2372 op_alt[i].constraint = p;
2373 op_alt[i].matches = -1;
2374 op_alt[i].matched = -1;
2375
2376 if (*p == '\0' || *p == ',')
2377 {
2378 op_alt[i].anything_ok = 1;
2379 continue;
2380 }
2381
2382 for (;;)
2383 {
2384 char c = *p;
2385 if (c == '#')
2386 do
2387 c = *++p;
2388 while (c != ',' && c != '\0');
2389 if (c == ',' || c == '\0')
2390 {
2391 p++;
2392 break;
2393 }
2394
2395 switch (c)
2396 {
2397 case '?':
2398 op_alt[i].reject += 6;
2399 break;
2400 case '!':
2401 op_alt[i].reject += 600;
2402 break;
2403 case '&':
2404 op_alt[i].earlyclobber = 1;
2405 break;
2406
2407 case '0': case '1': case '2': case '3': case '4':
2408 case '5': case '6': case '7': case '8': case '9':
2409 {
2410 char *end;
2411 op_alt[i].matches = strtoul (p, &end, 10);
2412 op_alt[op_alt[i].matches].matched = i;
2413 p = end;
2414 }
2415 continue;
2416
2417 case 'X':
2418 op_alt[i].anything_ok = 1;
2419 break;
2420
2421 case 'g':
2422 op_alt[i].cl =
2423 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2424 break;
2425
2426 default:
2427 enum constraint_num cn = lookup_constraint (p);
2428 enum reg_class cl;
2429 switch (get_constraint_type (cn))
2430 {
2431 case CT_REGISTER:
2432 cl = reg_class_for_constraint (cn);
2433 if (cl != NO_REGS)
2434 op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2435 break;
2436
2437 case CT_CONST_INT:
2438 break;
2439
2440 case CT_MEMORY:
2441 op_alt[i].memory_ok = 1;
2442 break;
2443
2444 case CT_ADDRESS:
2445 op_alt[i].is_address = 1;
2446 op_alt[i].cl
2447 = (reg_class_subunion
2448 [(int) op_alt[i].cl]
2449 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2450 ADDRESS, SCRATCH)]);
2451 break;
2452
2453 case CT_FIXED_FORM:
2454 break;
2455 }
2456 break;
2457 }
2458 p += CONSTRAINT_LEN (c, p);
2459 }
2460 }
2461 }
2462 }
2463
2464 /* Return an array of operand_alternative instructions for
2465 instruction ICODE. */
2466
2467 const operand_alternative *
2468 preprocess_insn_constraints (int icode)
2469 {
2470 gcc_checking_assert (IN_RANGE (icode, 0, LAST_INSN_CODE));
2471 if (this_target_recog->x_op_alt[icode])
2472 return this_target_recog->x_op_alt[icode];
2473
2474 int n_operands = insn_data[icode].n_operands;
2475 if (n_operands == 0)
2476 return 0;
2477 /* Always provide at least one alternative so that which_op_alt ()
2478 works correctly. If the instruction has 0 alternatives (i.e. all
2479 constraint strings are empty) then each operand in this alternative
2480 will have anything_ok set. */
2481 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2482 int n_entries = n_operands * n_alternatives;
2483
2484 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2485 const char **constraints = XALLOCAVEC (const char *, n_operands);
2486
2487 for (int i = 0; i < n_operands; ++i)
2488 constraints[i] = insn_data[icode].operand[i].constraint;
2489 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt);
2490
2491 this_target_recog->x_op_alt[icode] = op_alt;
2492 return op_alt;
2493 }
2494
2495 /* After calling extract_insn, you can use this function to extract some
2496 information from the constraint strings into a more usable form.
2497 The collected data is stored in recog_op_alt. */
2498
2499 void
2500 preprocess_constraints (rtx insn)
2501 {
2502 int icode = INSN_CODE (insn);
2503 if (icode >= 0)
2504 recog_op_alt = preprocess_insn_constraints (icode);
2505 else
2506 {
2507 int n_operands = recog_data.n_operands;
2508 int n_alternatives = recog_data.n_alternatives;
2509 int n_entries = n_operands * n_alternatives;
2510 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2511 preprocess_constraints (n_operands, n_alternatives,
2512 recog_data.constraints, asm_op_alt);
2513 recog_op_alt = asm_op_alt;
2514 }
2515 }
2516
2517 /* Check the operands of an insn against the insn's operand constraints
2518 and return 1 if they match any of the alternatives in ALTERNATIVES.
2519
2520 The information about the insn's operands, constraints, operand modes
2521 etc. is obtained from the global variables set up by extract_insn.
2522
2523 WHICH_ALTERNATIVE is set to a number which indicates which
2524 alternative of constraints was matched: 0 for the first alternative,
2525 1 for the next, etc.
2526
2527 In addition, when two operands are required to match
2528 and it happens that the output operand is (reg) while the
2529 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2530 make the output operand look like the input.
2531 This is because the output operand is the one the template will print.
2532
2533 This is used in final, just before printing the assembler code and by
2534 the routines that determine an insn's attribute.
2535
2536 If STRICT is a positive nonzero value, it means that we have been
2537 called after reload has been completed. In that case, we must
2538 do all checks strictly. If it is zero, it means that we have been called
2539 before reload has completed. In that case, we first try to see if we can
2540 find an alternative that matches strictly. If not, we try again, this
2541 time assuming that reload will fix up the insn. This provides a "best
2542 guess" for the alternative and is used to compute attributes of insns prior
2543 to reload. A negative value of STRICT is used for this internal call. */
2544
2545 struct funny_match
2546 {
2547 int this_op, other;
2548 };
2549
2550 int
2551 constrain_operands (int strict, alternative_mask alternatives)
2552 {
2553 const char *constraints[MAX_RECOG_OPERANDS];
2554 int matching_operands[MAX_RECOG_OPERANDS];
2555 int earlyclobber[MAX_RECOG_OPERANDS];
2556 int c;
2557
2558 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2559 int funny_match_index;
2560
2561 which_alternative = 0;
2562 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2563 return 1;
2564
2565 for (c = 0; c < recog_data.n_operands; c++)
2566 {
2567 constraints[c] = recog_data.constraints[c];
2568 matching_operands[c] = -1;
2569 }
2570
2571 do
2572 {
2573 int seen_earlyclobber_at = -1;
2574 int opno;
2575 int lose = 0;
2576 funny_match_index = 0;
2577
2578 if (!TEST_BIT (alternatives, which_alternative))
2579 {
2580 int i;
2581
2582 for (i = 0; i < recog_data.n_operands; i++)
2583 constraints[i] = skip_alternative (constraints[i]);
2584
2585 which_alternative++;
2586 continue;
2587 }
2588
2589 for (opno = 0; opno < recog_data.n_operands; opno++)
2590 {
2591 rtx op = recog_data.operand[opno];
2592 enum machine_mode mode = GET_MODE (op);
2593 const char *p = constraints[opno];
2594 int offset = 0;
2595 int win = 0;
2596 int val;
2597 int len;
2598
2599 earlyclobber[opno] = 0;
2600
2601 /* A unary operator may be accepted by the predicate, but it
2602 is irrelevant for matching constraints. */
2603 if (UNARY_P (op))
2604 op = XEXP (op, 0);
2605
2606 if (GET_CODE (op) == SUBREG)
2607 {
2608 if (REG_P (SUBREG_REG (op))
2609 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2610 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2611 GET_MODE (SUBREG_REG (op)),
2612 SUBREG_BYTE (op),
2613 GET_MODE (op));
2614 op = SUBREG_REG (op);
2615 }
2616
2617 /* An empty constraint or empty alternative
2618 allows anything which matched the pattern. */
2619 if (*p == 0 || *p == ',')
2620 win = 1;
2621
2622 do
2623 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2624 {
2625 case '\0':
2626 len = 0;
2627 break;
2628 case ',':
2629 c = '\0';
2630 break;
2631
2632 case '#':
2633 /* Ignore rest of this alternative as far as
2634 constraint checking is concerned. */
2635 do
2636 p++;
2637 while (*p && *p != ',');
2638 len = 0;
2639 break;
2640
2641 case '&':
2642 earlyclobber[opno] = 1;
2643 if (seen_earlyclobber_at < 0)
2644 seen_earlyclobber_at = opno;
2645 break;
2646
2647 case '0': case '1': case '2': case '3': case '4':
2648 case '5': case '6': case '7': case '8': case '9':
2649 {
2650 /* This operand must be the same as a previous one.
2651 This kind of constraint is used for instructions such
2652 as add when they take only two operands.
2653
2654 Note that the lower-numbered operand is passed first.
2655
2656 If we are not testing strictly, assume that this
2657 constraint will be satisfied. */
2658
2659 char *end;
2660 int match;
2661
2662 match = strtoul (p, &end, 10);
2663 p = end;
2664
2665 if (strict < 0)
2666 val = 1;
2667 else
2668 {
2669 rtx op1 = recog_data.operand[match];
2670 rtx op2 = recog_data.operand[opno];
2671
2672 /* A unary operator may be accepted by the predicate,
2673 but it is irrelevant for matching constraints. */
2674 if (UNARY_P (op1))
2675 op1 = XEXP (op1, 0);
2676 if (UNARY_P (op2))
2677 op2 = XEXP (op2, 0);
2678
2679 val = operands_match_p (op1, op2);
2680 }
2681
2682 matching_operands[opno] = match;
2683 matching_operands[match] = opno;
2684
2685 if (val != 0)
2686 win = 1;
2687
2688 /* If output is *x and input is *--x, arrange later
2689 to change the output to *--x as well, since the
2690 output op is the one that will be printed. */
2691 if (val == 2 && strict > 0)
2692 {
2693 funny_match[funny_match_index].this_op = opno;
2694 funny_match[funny_match_index++].other = match;
2695 }
2696 }
2697 len = 0;
2698 break;
2699
2700 case 'p':
2701 /* p is used for address_operands. When we are called by
2702 gen_reload, no one will have checked that the address is
2703 strictly valid, i.e., that all pseudos requiring hard regs
2704 have gotten them. */
2705 if (strict <= 0
2706 || (strict_memory_address_p (recog_data.operand_mode[opno],
2707 op)))
2708 win = 1;
2709 break;
2710
2711 /* No need to check general_operand again;
2712 it was done in insn-recog.c. Well, except that reload
2713 doesn't check the validity of its replacements, but
2714 that should only matter when there's a bug. */
2715 case 'g':
2716 /* Anything goes unless it is a REG and really has a hard reg
2717 but the hard reg is not in the class GENERAL_REGS. */
2718 if (REG_P (op))
2719 {
2720 if (strict < 0
2721 || GENERAL_REGS == ALL_REGS
2722 || (reload_in_progress
2723 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2724 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2725 win = 1;
2726 }
2727 else if (strict < 0 || general_operand (op, mode))
2728 win = 1;
2729 break;
2730
2731 default:
2732 {
2733 enum constraint_num cn = lookup_constraint (p);
2734 enum reg_class cl = reg_class_for_constraint (cn);
2735 if (cl != NO_REGS)
2736 {
2737 if (strict < 0
2738 || (strict == 0
2739 && REG_P (op)
2740 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2741 || (strict == 0 && GET_CODE (op) == SCRATCH)
2742 || (REG_P (op)
2743 && reg_fits_class_p (op, cl, offset, mode)))
2744 win = 1;
2745 }
2746
2747 else if (constraint_satisfied_p (op, cn))
2748 win = 1;
2749
2750 else if (insn_extra_memory_constraint (cn)
2751 /* Every memory operand can be reloaded to fit. */
2752 && ((strict < 0 && MEM_P (op))
2753 /* Before reload, accept what reload can turn
2754 into mem. */
2755 || (strict < 0 && CONSTANT_P (op))
2756 /* During reload, accept a pseudo */
2757 || (reload_in_progress && REG_P (op)
2758 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2759 win = 1;
2760 else if (insn_extra_address_constraint (cn)
2761 /* Every address operand can be reloaded to fit. */
2762 && strict < 0)
2763 win = 1;
2764 /* Cater to architectures like IA-64 that define extra memory
2765 constraints without using define_memory_constraint. */
2766 else if (reload_in_progress
2767 && REG_P (op)
2768 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2769 && reg_renumber[REGNO (op)] < 0
2770 && reg_equiv_mem (REGNO (op)) != 0
2771 && constraint_satisfied_p
2772 (reg_equiv_mem (REGNO (op)), cn))
2773 win = 1;
2774 break;
2775 }
2776 }
2777 while (p += len, c);
2778
2779 constraints[opno] = p;
2780 /* If this operand did not win somehow,
2781 this alternative loses. */
2782 if (! win)
2783 lose = 1;
2784 }
2785 /* This alternative won; the operands are ok.
2786 Change whichever operands this alternative says to change. */
2787 if (! lose)
2788 {
2789 int opno, eopno;
2790
2791 /* See if any earlyclobber operand conflicts with some other
2792 operand. */
2793
2794 if (strict > 0 && seen_earlyclobber_at >= 0)
2795 for (eopno = seen_earlyclobber_at;
2796 eopno < recog_data.n_operands;
2797 eopno++)
2798 /* Ignore earlyclobber operands now in memory,
2799 because we would often report failure when we have
2800 two memory operands, one of which was formerly a REG. */
2801 if (earlyclobber[eopno]
2802 && REG_P (recog_data.operand[eopno]))
2803 for (opno = 0; opno < recog_data.n_operands; opno++)
2804 if ((MEM_P (recog_data.operand[opno])
2805 || recog_data.operand_type[opno] != OP_OUT)
2806 && opno != eopno
2807 /* Ignore things like match_operator operands. */
2808 && *recog_data.constraints[opno] != 0
2809 && ! (matching_operands[opno] == eopno
2810 && operands_match_p (recog_data.operand[opno],
2811 recog_data.operand[eopno]))
2812 && ! safe_from_earlyclobber (recog_data.operand[opno],
2813 recog_data.operand[eopno]))
2814 lose = 1;
2815
2816 if (! lose)
2817 {
2818 while (--funny_match_index >= 0)
2819 {
2820 recog_data.operand[funny_match[funny_match_index].other]
2821 = recog_data.operand[funny_match[funny_match_index].this_op];
2822 }
2823
2824 #ifdef AUTO_INC_DEC
2825 /* For operands without < or > constraints reject side-effects. */
2826 if (recog_data.is_asm)
2827 {
2828 for (opno = 0; opno < recog_data.n_operands; opno++)
2829 if (MEM_P (recog_data.operand[opno]))
2830 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2831 {
2832 case PRE_INC:
2833 case POST_INC:
2834 case PRE_DEC:
2835 case POST_DEC:
2836 case PRE_MODIFY:
2837 case POST_MODIFY:
2838 if (strchr (recog_data.constraints[opno], '<') == NULL
2839 && strchr (recog_data.constraints[opno], '>')
2840 == NULL)
2841 return 0;
2842 break;
2843 default:
2844 break;
2845 }
2846 }
2847 #endif
2848 return 1;
2849 }
2850 }
2851
2852 which_alternative++;
2853 }
2854 while (which_alternative < recog_data.n_alternatives);
2855
2856 which_alternative = -1;
2857 /* If we are about to reject this, but we are not to test strictly,
2858 try a very loose test. Only return failure if it fails also. */
2859 if (strict == 0)
2860 return constrain_operands (-1, alternatives);
2861 else
2862 return 0;
2863 }
2864
2865 /* Return true iff OPERAND (assumed to be a REG rtx)
2866 is a hard reg in class CLASS when its regno is offset by OFFSET
2867 and changed to mode MODE.
2868 If REG occupies multiple hard regs, all of them must be in CLASS. */
2869
2870 bool
2871 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2872 enum machine_mode mode)
2873 {
2874 unsigned int regno = REGNO (operand);
2875
2876 if (cl == NO_REGS)
2877 return false;
2878
2879 /* Regno must not be a pseudo register. Offset may be negative. */
2880 return (HARD_REGISTER_NUM_P (regno)
2881 && HARD_REGISTER_NUM_P (regno + offset)
2882 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2883 regno + offset));
2884 }
2885 \f
2886 /* Split single instruction. Helper function for split_all_insns and
2887 split_all_insns_noflow. Return last insn in the sequence if successful,
2888 or NULL if unsuccessful. */
2889
2890 static rtx
2891 split_insn (rtx_insn *insn)
2892 {
2893 /* Split insns here to get max fine-grain parallelism. */
2894 rtx_insn *first = PREV_INSN (insn);
2895 rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2896 rtx insn_set, last_set, note;
2897
2898 if (last == insn)
2899 return NULL_RTX;
2900
2901 /* If the original instruction was a single set that was known to be
2902 equivalent to a constant, see if we can say the same about the last
2903 instruction in the split sequence. The two instructions must set
2904 the same destination. */
2905 insn_set = single_set (insn);
2906 if (insn_set)
2907 {
2908 last_set = single_set (last);
2909 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2910 {
2911 note = find_reg_equal_equiv_note (insn);
2912 if (note && CONSTANT_P (XEXP (note, 0)))
2913 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2914 else if (CONSTANT_P (SET_SRC (insn_set)))
2915 set_unique_reg_note (last, REG_EQUAL,
2916 copy_rtx (SET_SRC (insn_set)));
2917 }
2918 }
2919
2920 /* try_split returns the NOTE that INSN became. */
2921 SET_INSN_DELETED (insn);
2922
2923 /* ??? Coddle to md files that generate subregs in post-reload
2924 splitters instead of computing the proper hard register. */
2925 if (reload_completed && first != last)
2926 {
2927 first = NEXT_INSN (first);
2928 for (;;)
2929 {
2930 if (INSN_P (first))
2931 cleanup_subreg_operands (first);
2932 if (first == last)
2933 break;
2934 first = NEXT_INSN (first);
2935 }
2936 }
2937
2938 return last;
2939 }
2940
2941 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2942
2943 void
2944 split_all_insns (void)
2945 {
2946 sbitmap blocks;
2947 bool changed;
2948 basic_block bb;
2949
2950 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
2951 bitmap_clear (blocks);
2952 changed = false;
2953
2954 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2955 {
2956 rtx_insn *insn, *next;
2957 bool finish = false;
2958
2959 rtl_profile_for_bb (bb);
2960 for (insn = BB_HEAD (bb); !finish ; insn = next)
2961 {
2962 /* Can't use `next_real_insn' because that might go across
2963 CODE_LABELS and short-out basic blocks. */
2964 next = NEXT_INSN (insn);
2965 finish = (insn == BB_END (bb));
2966 if (INSN_P (insn))
2967 {
2968 rtx set = single_set (insn);
2969
2970 /* Don't split no-op move insns. These should silently
2971 disappear later in final. Splitting such insns would
2972 break the code that handles LIBCALL blocks. */
2973 if (set && set_noop_p (set))
2974 {
2975 /* Nops get in the way while scheduling, so delete them
2976 now if register allocation has already been done. It
2977 is too risky to try to do this before register
2978 allocation, and there are unlikely to be very many
2979 nops then anyways. */
2980 if (reload_completed)
2981 delete_insn_and_edges (insn);
2982 }
2983 else
2984 {
2985 if (split_insn (insn))
2986 {
2987 bitmap_set_bit (blocks, bb->index);
2988 changed = true;
2989 }
2990 }
2991 }
2992 }
2993 }
2994
2995 default_rtl_profile ();
2996 if (changed)
2997 find_many_sub_basic_blocks (blocks);
2998
2999 #ifdef ENABLE_CHECKING
3000 verify_flow_info ();
3001 #endif
3002
3003 sbitmap_free (blocks);
3004 }
3005
3006 /* Same as split_all_insns, but do not expect CFG to be available.
3007 Used by machine dependent reorg passes. */
3008
3009 unsigned int
3010 split_all_insns_noflow (void)
3011 {
3012 rtx_insn *next, *insn;
3013
3014 for (insn = get_insns (); insn; insn = next)
3015 {
3016 next = NEXT_INSN (insn);
3017 if (INSN_P (insn))
3018 {
3019 /* Don't split no-op move insns. These should silently
3020 disappear later in final. Splitting such insns would
3021 break the code that handles LIBCALL blocks. */
3022 rtx set = single_set (insn);
3023 if (set && set_noop_p (set))
3024 {
3025 /* Nops get in the way while scheduling, so delete them
3026 now if register allocation has already been done. It
3027 is too risky to try to do this before register
3028 allocation, and there are unlikely to be very many
3029 nops then anyways.
3030
3031 ??? Should we use delete_insn when the CFG isn't valid? */
3032 if (reload_completed)
3033 delete_insn_and_edges (insn);
3034 }
3035 else
3036 split_insn (insn);
3037 }
3038 }
3039 return 0;
3040 }
3041 \f
3042 #ifdef HAVE_peephole2
3043 struct peep2_insn_data
3044 {
3045 rtx insn;
3046 regset live_before;
3047 };
3048
3049 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3050 static int peep2_current;
3051
3052 static bool peep2_do_rebuild_jump_labels;
3053 static bool peep2_do_cleanup_cfg;
3054
3055 /* The number of instructions available to match a peep2. */
3056 int peep2_current_count;
3057
3058 /* A non-insn marker indicating the last insn of the block.
3059 The live_before regset for this element is correct, indicating
3060 DF_LIVE_OUT for the block. */
3061 #define PEEP2_EOB pc_rtx
3062
3063 /* Wrap N to fit into the peep2_insn_data buffer. */
3064
3065 static int
3066 peep2_buf_position (int n)
3067 {
3068 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3069 n -= MAX_INSNS_PER_PEEP2 + 1;
3070 return n;
3071 }
3072
3073 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3074 does not exist. Used by the recognizer to find the next insn to match
3075 in a multi-insn pattern. */
3076
3077 rtx
3078 peep2_next_insn (int n)
3079 {
3080 gcc_assert (n <= peep2_current_count);
3081
3082 n = peep2_buf_position (peep2_current + n);
3083
3084 return peep2_insn_data[n].insn;
3085 }
3086
3087 /* Return true if REGNO is dead before the Nth non-note insn
3088 after `current'. */
3089
3090 int
3091 peep2_regno_dead_p (int ofs, int regno)
3092 {
3093 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3094
3095 ofs = peep2_buf_position (peep2_current + ofs);
3096
3097 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3098
3099 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3100 }
3101
3102 /* Similarly for a REG. */
3103
3104 int
3105 peep2_reg_dead_p (int ofs, rtx reg)
3106 {
3107 int regno, n;
3108
3109 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3110
3111 ofs = peep2_buf_position (peep2_current + ofs);
3112
3113 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3114
3115 regno = REGNO (reg);
3116 n = hard_regno_nregs[regno][GET_MODE (reg)];
3117 while (--n >= 0)
3118 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3119 return 0;
3120 return 1;
3121 }
3122
3123 /* Regno offset to be used in the register search. */
3124 static int search_ofs;
3125
3126 /* Try to find a hard register of mode MODE, matching the register class in
3127 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3128 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3129 in which case the only condition is that the register must be available
3130 before CURRENT_INSN.
3131 Registers that already have bits set in REG_SET will not be considered.
3132
3133 If an appropriate register is available, it will be returned and the
3134 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3135 returned. */
3136
3137 rtx
3138 peep2_find_free_register (int from, int to, const char *class_str,
3139 enum machine_mode mode, HARD_REG_SET *reg_set)
3140 {
3141 enum reg_class cl;
3142 HARD_REG_SET live;
3143 df_ref def;
3144 int i;
3145
3146 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3147 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3148
3149 from = peep2_buf_position (peep2_current + from);
3150 to = peep2_buf_position (peep2_current + to);
3151
3152 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3153 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3154
3155 while (from != to)
3156 {
3157 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3158
3159 /* Don't use registers set or clobbered by the insn. */
3160 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3161 SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3162
3163 from = peep2_buf_position (from + 1);
3164 }
3165
3166 cl = reg_class_for_constraint (lookup_constraint (class_str));
3167
3168 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3169 {
3170 int raw_regno, regno, success, j;
3171
3172 /* Distribute the free registers as much as possible. */
3173 raw_regno = search_ofs + i;
3174 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3175 raw_regno -= FIRST_PSEUDO_REGISTER;
3176 #ifdef REG_ALLOC_ORDER
3177 regno = reg_alloc_order[raw_regno];
3178 #else
3179 regno = raw_regno;
3180 #endif
3181
3182 /* Can it support the mode we need? */
3183 if (! HARD_REGNO_MODE_OK (regno, mode))
3184 continue;
3185
3186 success = 1;
3187 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3188 {
3189 /* Don't allocate fixed registers. */
3190 if (fixed_regs[regno + j])
3191 {
3192 success = 0;
3193 break;
3194 }
3195 /* Don't allocate global registers. */
3196 if (global_regs[regno + j])
3197 {
3198 success = 0;
3199 break;
3200 }
3201 /* Make sure the register is of the right class. */
3202 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3203 {
3204 success = 0;
3205 break;
3206 }
3207 /* And that we don't create an extra save/restore. */
3208 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3209 {
3210 success = 0;
3211 break;
3212 }
3213
3214 if (! targetm.hard_regno_scratch_ok (regno + j))
3215 {
3216 success = 0;
3217 break;
3218 }
3219
3220 /* And we don't clobber traceback for noreturn functions. */
3221 if ((regno + j == FRAME_POINTER_REGNUM
3222 || regno + j == HARD_FRAME_POINTER_REGNUM)
3223 && (! reload_completed || frame_pointer_needed))
3224 {
3225 success = 0;
3226 break;
3227 }
3228
3229 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3230 || TEST_HARD_REG_BIT (live, regno + j))
3231 {
3232 success = 0;
3233 break;
3234 }
3235 }
3236
3237 if (success)
3238 {
3239 add_to_hard_reg_set (reg_set, mode, regno);
3240
3241 /* Start the next search with the next register. */
3242 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3243 raw_regno = 0;
3244 search_ofs = raw_regno;
3245
3246 return gen_rtx_REG (mode, regno);
3247 }
3248 }
3249
3250 search_ofs = 0;
3251 return NULL_RTX;
3252 }
3253
3254 /* Forget all currently tracked instructions, only remember current
3255 LIVE regset. */
3256
3257 static void
3258 peep2_reinit_state (regset live)
3259 {
3260 int i;
3261
3262 /* Indicate that all slots except the last holds invalid data. */
3263 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3264 peep2_insn_data[i].insn = NULL_RTX;
3265 peep2_current_count = 0;
3266
3267 /* Indicate that the last slot contains live_after data. */
3268 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3269 peep2_current = MAX_INSNS_PER_PEEP2;
3270
3271 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3272 }
3273
3274 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3275 starting at INSN. Perform the replacement, removing the old insns and
3276 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3277 if the replacement is rejected. */
3278
3279 static rtx_insn *
3280 peep2_attempt (basic_block bb, rtx uncast_insn, int match_len, rtx_insn *attempt)
3281 {
3282 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3283 int i;
3284 rtx_insn *last, *before_try, *x;
3285 rtx eh_note, as_note;
3286 rtx_insn *old_insn;
3287 rtx_insn *new_insn;
3288 bool was_call = false;
3289
3290 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3291 match more than one insn, or to be split into more than one insn. */
3292 old_insn = as_a <rtx_insn *> (peep2_insn_data[peep2_current].insn);
3293 if (RTX_FRAME_RELATED_P (old_insn))
3294 {
3295 bool any_note = false;
3296 rtx note;
3297
3298 if (match_len != 0)
3299 return NULL;
3300
3301 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3302 may be in the stream for the purpose of register allocation. */
3303 if (active_insn_p (attempt))
3304 new_insn = attempt;
3305 else
3306 new_insn = next_active_insn (attempt);
3307 if (next_active_insn (new_insn))
3308 return NULL;
3309
3310 /* We have a 1-1 replacement. Copy over any frame-related info. */
3311 RTX_FRAME_RELATED_P (new_insn) = 1;
3312
3313 /* Allow the backend to fill in a note during the split. */
3314 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3315 switch (REG_NOTE_KIND (note))
3316 {
3317 case REG_FRAME_RELATED_EXPR:
3318 case REG_CFA_DEF_CFA:
3319 case REG_CFA_ADJUST_CFA:
3320 case REG_CFA_OFFSET:
3321 case REG_CFA_REGISTER:
3322 case REG_CFA_EXPRESSION:
3323 case REG_CFA_RESTORE:
3324 case REG_CFA_SET_VDRAP:
3325 any_note = true;
3326 break;
3327 default:
3328 break;
3329 }
3330
3331 /* If the backend didn't supply a note, copy one over. */
3332 if (!any_note)
3333 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3334 switch (REG_NOTE_KIND (note))
3335 {
3336 case REG_FRAME_RELATED_EXPR:
3337 case REG_CFA_DEF_CFA:
3338 case REG_CFA_ADJUST_CFA:
3339 case REG_CFA_OFFSET:
3340 case REG_CFA_REGISTER:
3341 case REG_CFA_EXPRESSION:
3342 case REG_CFA_RESTORE:
3343 case REG_CFA_SET_VDRAP:
3344 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3345 any_note = true;
3346 break;
3347 default:
3348 break;
3349 }
3350
3351 /* If there still isn't a note, make sure the unwind info sees the
3352 same expression as before the split. */
3353 if (!any_note)
3354 {
3355 rtx old_set, new_set;
3356
3357 /* The old insn had better have been simple, or annotated. */
3358 old_set = single_set (old_insn);
3359 gcc_assert (old_set != NULL);
3360
3361 new_set = single_set (new_insn);
3362 if (!new_set || !rtx_equal_p (new_set, old_set))
3363 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3364 }
3365
3366 /* Copy prologue/epilogue status. This is required in order to keep
3367 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3368 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3369 }
3370
3371 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3372 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3373 cfg-related call notes. */
3374 for (i = 0; i <= match_len; ++i)
3375 {
3376 int j;
3377 rtx note;
3378
3379 j = peep2_buf_position (peep2_current + i);
3380 old_insn = as_a <rtx_insn *> (peep2_insn_data[j].insn);
3381 if (!CALL_P (old_insn))
3382 continue;
3383 was_call = true;
3384
3385 new_insn = attempt;
3386 while (new_insn != NULL_RTX)
3387 {
3388 if (CALL_P (new_insn))
3389 break;
3390 new_insn = NEXT_INSN (new_insn);
3391 }
3392
3393 gcc_assert (new_insn != NULL_RTX);
3394
3395 CALL_INSN_FUNCTION_USAGE (new_insn)
3396 = CALL_INSN_FUNCTION_USAGE (old_insn);
3397 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3398
3399 for (note = REG_NOTES (old_insn);
3400 note;
3401 note = XEXP (note, 1))
3402 switch (REG_NOTE_KIND (note))
3403 {
3404 case REG_NORETURN:
3405 case REG_SETJMP:
3406 case REG_TM:
3407 add_reg_note (new_insn, REG_NOTE_KIND (note),
3408 XEXP (note, 0));
3409 break;
3410 default:
3411 /* Discard all other reg notes. */
3412 break;
3413 }
3414
3415 /* Croak if there is another call in the sequence. */
3416 while (++i <= match_len)
3417 {
3418 j = peep2_buf_position (peep2_current + i);
3419 old_insn = as_a <rtx_insn *> (peep2_insn_data[j].insn);
3420 gcc_assert (!CALL_P (old_insn));
3421 }
3422 break;
3423 }
3424
3425 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3426 move those notes over to the new sequence. */
3427 as_note = NULL;
3428 for (i = match_len; i >= 0; --i)
3429 {
3430 int j = peep2_buf_position (peep2_current + i);
3431 old_insn = as_a <rtx_insn *> (peep2_insn_data[j].insn);
3432
3433 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3434 if (as_note)
3435 break;
3436 }
3437
3438 i = peep2_buf_position (peep2_current + match_len);
3439 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3440
3441 /* Replace the old sequence with the new. */
3442 rtx_insn *peepinsn = as_a <rtx_insn *> (peep2_insn_data[i].insn);
3443 last = emit_insn_after_setloc (attempt,
3444 peep2_insn_data[i].insn,
3445 INSN_LOCATION (peepinsn));
3446 before_try = PREV_INSN (insn);
3447 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3448
3449 /* Re-insert the EH_REGION notes. */
3450 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3451 {
3452 edge eh_edge;
3453 edge_iterator ei;
3454
3455 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3456 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3457 break;
3458
3459 if (eh_note)
3460 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3461
3462 if (eh_edge)
3463 for (x = last; x != before_try; x = PREV_INSN (x))
3464 if (x != BB_END (bb)
3465 && (can_throw_internal (x)
3466 || can_nonlocal_goto (x)))
3467 {
3468 edge nfte, nehe;
3469 int flags;
3470
3471 nfte = split_block (bb, x);
3472 flags = (eh_edge->flags
3473 & (EDGE_EH | EDGE_ABNORMAL));
3474 if (CALL_P (x))
3475 flags |= EDGE_ABNORMAL_CALL;
3476 nehe = make_edge (nfte->src, eh_edge->dest,
3477 flags);
3478
3479 nehe->probability = eh_edge->probability;
3480 nfte->probability
3481 = REG_BR_PROB_BASE - nehe->probability;
3482
3483 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3484 bb = nfte->src;
3485 eh_edge = nehe;
3486 }
3487
3488 /* Converting possibly trapping insn to non-trapping is
3489 possible. Zap dummy outgoing edges. */
3490 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3491 }
3492
3493 /* Re-insert the ARGS_SIZE notes. */
3494 if (as_note)
3495 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3496
3497 /* If we generated a jump instruction, it won't have
3498 JUMP_LABEL set. Recompute after we're done. */
3499 for (x = last; x != before_try; x = PREV_INSN (x))
3500 if (JUMP_P (x))
3501 {
3502 peep2_do_rebuild_jump_labels = true;
3503 break;
3504 }
3505
3506 return last;
3507 }
3508
3509 /* After performing a replacement in basic block BB, fix up the life
3510 information in our buffer. LAST is the last of the insns that we
3511 emitted as a replacement. PREV is the insn before the start of
3512 the replacement. MATCH_LEN is the number of instructions that were
3513 matched, and which now need to be replaced in the buffer. */
3514
3515 static void
3516 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3517 rtx_insn *prev)
3518 {
3519 int i = peep2_buf_position (peep2_current + match_len + 1);
3520 rtx_insn *x;
3521 regset_head live;
3522
3523 INIT_REG_SET (&live);
3524 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3525
3526 gcc_assert (peep2_current_count >= match_len + 1);
3527 peep2_current_count -= match_len + 1;
3528
3529 x = last;
3530 do
3531 {
3532 if (INSN_P (x))
3533 {
3534 df_insn_rescan (x);
3535 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3536 {
3537 peep2_current_count++;
3538 if (--i < 0)
3539 i = MAX_INSNS_PER_PEEP2;
3540 peep2_insn_data[i].insn = x;
3541 df_simulate_one_insn_backwards (bb, x, &live);
3542 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3543 }
3544 }
3545 x = PREV_INSN (x);
3546 }
3547 while (x != prev);
3548 CLEAR_REG_SET (&live);
3549
3550 peep2_current = i;
3551 }
3552
3553 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3554 Return true if we added it, false otherwise. The caller will try to match
3555 peepholes against the buffer if we return false; otherwise it will try to
3556 add more instructions to the buffer. */
3557
3558 static bool
3559 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3560 {
3561 int pos;
3562
3563 /* Once we have filled the maximum number of insns the buffer can hold,
3564 allow the caller to match the insns against peepholes. We wait until
3565 the buffer is full in case the target has similar peepholes of different
3566 length; we always want to match the longest if possible. */
3567 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3568 return false;
3569
3570 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3571 any other pattern, lest it change the semantics of the frame info. */
3572 if (RTX_FRAME_RELATED_P (insn))
3573 {
3574 /* Let the buffer drain first. */
3575 if (peep2_current_count > 0)
3576 return false;
3577 /* Now the insn will be the only thing in the buffer. */
3578 }
3579
3580 pos = peep2_buf_position (peep2_current + peep2_current_count);
3581 peep2_insn_data[pos].insn = insn;
3582 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3583 peep2_current_count++;
3584
3585 df_simulate_one_insn_forwards (bb, as_a <rtx_insn *> (insn), live);
3586 return true;
3587 }
3588
3589 /* Perform the peephole2 optimization pass. */
3590
3591 static void
3592 peephole2_optimize (void)
3593 {
3594 rtx_insn *insn;
3595 bitmap live;
3596 int i;
3597 basic_block bb;
3598
3599 peep2_do_cleanup_cfg = false;
3600 peep2_do_rebuild_jump_labels = false;
3601
3602 df_set_flags (DF_LR_RUN_DCE);
3603 df_note_add_problem ();
3604 df_analyze ();
3605
3606 /* Initialize the regsets we're going to use. */
3607 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3608 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3609 search_ofs = 0;
3610 live = BITMAP_ALLOC (&reg_obstack);
3611
3612 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3613 {
3614 bool past_end = false;
3615 int pos;
3616
3617 rtl_profile_for_bb (bb);
3618
3619 /* Start up propagation. */
3620 bitmap_copy (live, DF_LR_IN (bb));
3621 df_simulate_initialize_forwards (bb, live);
3622 peep2_reinit_state (live);
3623
3624 insn = BB_HEAD (bb);
3625 for (;;)
3626 {
3627 rtx_insn *attempt;
3628 rtx head;
3629 int match_len;
3630
3631 if (!past_end && !NONDEBUG_INSN_P (insn))
3632 {
3633 next_insn:
3634 insn = NEXT_INSN (insn);
3635 if (insn == NEXT_INSN (BB_END (bb)))
3636 past_end = true;
3637 continue;
3638 }
3639 if (!past_end && peep2_fill_buffer (bb, insn, live))
3640 goto next_insn;
3641
3642 /* If we did not fill an empty buffer, it signals the end of the
3643 block. */
3644 if (peep2_current_count == 0)
3645 break;
3646
3647 /* The buffer filled to the current maximum, so try to match. */
3648
3649 pos = peep2_buf_position (peep2_current + peep2_current_count);
3650 peep2_insn_data[pos].insn = PEEP2_EOB;
3651 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3652
3653 /* Match the peephole. */
3654 head = peep2_insn_data[peep2_current].insn;
3655 attempt = safe_as_a <rtx_insn *> (
3656 peephole2_insns (PATTERN (head), head, &match_len));
3657 if (attempt != NULL)
3658 {
3659 rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3660 if (last)
3661 {
3662 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3663 continue;
3664 }
3665 }
3666
3667 /* No match: advance the buffer by one insn. */
3668 peep2_current = peep2_buf_position (peep2_current + 1);
3669 peep2_current_count--;
3670 }
3671 }
3672
3673 default_rtl_profile ();
3674 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3675 BITMAP_FREE (peep2_insn_data[i].live_before);
3676 BITMAP_FREE (live);
3677 if (peep2_do_rebuild_jump_labels)
3678 rebuild_jump_labels (get_insns ());
3679 if (peep2_do_cleanup_cfg)
3680 cleanup_cfg (CLEANUP_CFG_CHANGED);
3681 }
3682 #endif /* HAVE_peephole2 */
3683
3684 /* Common predicates for use with define_bypass. */
3685
3686 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3687 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3688 must be either a single_set or a PARALLEL with SETs inside. */
3689
3690 int
3691 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3692 {
3693 rtx out_set, in_set;
3694 rtx out_pat, in_pat;
3695 rtx out_exp, in_exp;
3696 int i, j;
3697
3698 in_set = single_set (in_insn);
3699 if (in_set)
3700 {
3701 if (!MEM_P (SET_DEST (in_set)))
3702 return false;
3703
3704 out_set = single_set (out_insn);
3705 if (out_set)
3706 {
3707 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3708 return false;
3709 }
3710 else
3711 {
3712 out_pat = PATTERN (out_insn);
3713
3714 if (GET_CODE (out_pat) != PARALLEL)
3715 return false;
3716
3717 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3718 {
3719 out_exp = XVECEXP (out_pat, 0, i);
3720
3721 if (GET_CODE (out_exp) == CLOBBER)
3722 continue;
3723
3724 gcc_assert (GET_CODE (out_exp) == SET);
3725
3726 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3727 return false;
3728 }
3729 }
3730 }
3731 else
3732 {
3733 in_pat = PATTERN (in_insn);
3734 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3735
3736 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3737 {
3738 in_exp = XVECEXP (in_pat, 0, i);
3739
3740 if (GET_CODE (in_exp) == CLOBBER)
3741 continue;
3742
3743 gcc_assert (GET_CODE (in_exp) == SET);
3744
3745 if (!MEM_P (SET_DEST (in_exp)))
3746 return false;
3747
3748 out_set = single_set (out_insn);
3749 if (out_set)
3750 {
3751 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3752 return false;
3753 }
3754 else
3755 {
3756 out_pat = PATTERN (out_insn);
3757 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3758
3759 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3760 {
3761 out_exp = XVECEXP (out_pat, 0, j);
3762
3763 if (GET_CODE (out_exp) == CLOBBER)
3764 continue;
3765
3766 gcc_assert (GET_CODE (out_exp) == SET);
3767
3768 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3769 return false;
3770 }
3771 }
3772 }
3773 }
3774
3775 return true;
3776 }
3777
3778 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3779 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3780 or multiple set; IN_INSN should be single_set for truth, but for convenience
3781 of insn categorization may be any JUMP or CALL insn. */
3782
3783 int
3784 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3785 {
3786 rtx out_set, in_set;
3787
3788 in_set = single_set (in_insn);
3789 if (! in_set)
3790 {
3791 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3792 return false;
3793 }
3794
3795 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3796 return false;
3797 in_set = SET_SRC (in_set);
3798
3799 out_set = single_set (out_insn);
3800 if (out_set)
3801 {
3802 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3803 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3804 return false;
3805 }
3806 else
3807 {
3808 rtx out_pat;
3809 int i;
3810
3811 out_pat = PATTERN (out_insn);
3812 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3813
3814 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3815 {
3816 rtx exp = XVECEXP (out_pat, 0, i);
3817
3818 if (GET_CODE (exp) == CLOBBER)
3819 continue;
3820
3821 gcc_assert (GET_CODE (exp) == SET);
3822
3823 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3824 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3825 return false;
3826 }
3827 }
3828
3829 return true;
3830 }
3831 \f
3832 static unsigned int
3833 rest_of_handle_peephole2 (void)
3834 {
3835 #ifdef HAVE_peephole2
3836 peephole2_optimize ();
3837 #endif
3838 return 0;
3839 }
3840
3841 namespace {
3842
3843 const pass_data pass_data_peephole2 =
3844 {
3845 RTL_PASS, /* type */
3846 "peephole2", /* name */
3847 OPTGROUP_NONE, /* optinfo_flags */
3848 TV_PEEPHOLE2, /* tv_id */
3849 0, /* properties_required */
3850 0, /* properties_provided */
3851 0, /* properties_destroyed */
3852 0, /* todo_flags_start */
3853 TODO_df_finish, /* todo_flags_finish */
3854 };
3855
3856 class pass_peephole2 : public rtl_opt_pass
3857 {
3858 public:
3859 pass_peephole2 (gcc::context *ctxt)
3860 : rtl_opt_pass (pass_data_peephole2, ctxt)
3861 {}
3862
3863 /* opt_pass methods: */
3864 /* The epiphany backend creates a second instance of this pass, so we need
3865 a clone method. */
3866 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3867 virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
3868 virtual unsigned int execute (function *)
3869 {
3870 return rest_of_handle_peephole2 ();
3871 }
3872
3873 }; // class pass_peephole2
3874
3875 } // anon namespace
3876
3877 rtl_opt_pass *
3878 make_pass_peephole2 (gcc::context *ctxt)
3879 {
3880 return new pass_peephole2 (ctxt);
3881 }
3882
3883 namespace {
3884
3885 const pass_data pass_data_split_all_insns =
3886 {
3887 RTL_PASS, /* type */
3888 "split1", /* name */
3889 OPTGROUP_NONE, /* optinfo_flags */
3890 TV_NONE, /* tv_id */
3891 0, /* properties_required */
3892 0, /* properties_provided */
3893 0, /* properties_destroyed */
3894 0, /* todo_flags_start */
3895 0, /* todo_flags_finish */
3896 };
3897
3898 class pass_split_all_insns : public rtl_opt_pass
3899 {
3900 public:
3901 pass_split_all_insns (gcc::context *ctxt)
3902 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3903 {}
3904
3905 /* opt_pass methods: */
3906 /* The epiphany backend creates a second instance of this pass, so
3907 we need a clone method. */
3908 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3909 virtual unsigned int execute (function *)
3910 {
3911 split_all_insns ();
3912 return 0;
3913 }
3914
3915 }; // class pass_split_all_insns
3916
3917 } // anon namespace
3918
3919 rtl_opt_pass *
3920 make_pass_split_all_insns (gcc::context *ctxt)
3921 {
3922 return new pass_split_all_insns (ctxt);
3923 }
3924
3925 static unsigned int
3926 rest_of_handle_split_after_reload (void)
3927 {
3928 /* If optimizing, then go ahead and split insns now. */
3929 #ifndef STACK_REGS
3930 if (optimize > 0)
3931 #endif
3932 split_all_insns ();
3933 return 0;
3934 }
3935
3936 namespace {
3937
3938 const pass_data pass_data_split_after_reload =
3939 {
3940 RTL_PASS, /* type */
3941 "split2", /* name */
3942 OPTGROUP_NONE, /* optinfo_flags */
3943 TV_NONE, /* tv_id */
3944 0, /* properties_required */
3945 0, /* properties_provided */
3946 0, /* properties_destroyed */
3947 0, /* todo_flags_start */
3948 0, /* todo_flags_finish */
3949 };
3950
3951 class pass_split_after_reload : public rtl_opt_pass
3952 {
3953 public:
3954 pass_split_after_reload (gcc::context *ctxt)
3955 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3956 {}
3957
3958 /* opt_pass methods: */
3959 virtual unsigned int execute (function *)
3960 {
3961 return rest_of_handle_split_after_reload ();
3962 }
3963
3964 }; // class pass_split_after_reload
3965
3966 } // anon namespace
3967
3968 rtl_opt_pass *
3969 make_pass_split_after_reload (gcc::context *ctxt)
3970 {
3971 return new pass_split_after_reload (ctxt);
3972 }
3973
3974 namespace {
3975
3976 const pass_data pass_data_split_before_regstack =
3977 {
3978 RTL_PASS, /* type */
3979 "split3", /* name */
3980 OPTGROUP_NONE, /* optinfo_flags */
3981 TV_NONE, /* tv_id */
3982 0, /* properties_required */
3983 0, /* properties_provided */
3984 0, /* properties_destroyed */
3985 0, /* todo_flags_start */
3986 0, /* todo_flags_finish */
3987 };
3988
3989 class pass_split_before_regstack : public rtl_opt_pass
3990 {
3991 public:
3992 pass_split_before_regstack (gcc::context *ctxt)
3993 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3994 {}
3995
3996 /* opt_pass methods: */
3997 virtual bool gate (function *);
3998 virtual unsigned int execute (function *)
3999 {
4000 split_all_insns ();
4001 return 0;
4002 }
4003
4004 }; // class pass_split_before_regstack
4005
4006 bool
4007 pass_split_before_regstack::gate (function *)
4008 {
4009 #if HAVE_ATTR_length && defined (STACK_REGS)
4010 /* If flow2 creates new instructions which need splitting
4011 and scheduling after reload is not done, they might not be
4012 split until final which doesn't allow splitting
4013 if HAVE_ATTR_length. */
4014 # ifdef INSN_SCHEDULING
4015 return (optimize && !flag_schedule_insns_after_reload);
4016 # else
4017 return (optimize);
4018 # endif
4019 #else
4020 return 0;
4021 #endif
4022 }
4023
4024 } // anon namespace
4025
4026 rtl_opt_pass *
4027 make_pass_split_before_regstack (gcc::context *ctxt)
4028 {
4029 return new pass_split_before_regstack (ctxt);
4030 }
4031
4032 static unsigned int
4033 rest_of_handle_split_before_sched2 (void)
4034 {
4035 #ifdef INSN_SCHEDULING
4036 split_all_insns ();
4037 #endif
4038 return 0;
4039 }
4040
4041 namespace {
4042
4043 const pass_data pass_data_split_before_sched2 =
4044 {
4045 RTL_PASS, /* type */
4046 "split4", /* name */
4047 OPTGROUP_NONE, /* optinfo_flags */
4048 TV_NONE, /* tv_id */
4049 0, /* properties_required */
4050 0, /* properties_provided */
4051 0, /* properties_destroyed */
4052 0, /* todo_flags_start */
4053 0, /* todo_flags_finish */
4054 };
4055
4056 class pass_split_before_sched2 : public rtl_opt_pass
4057 {
4058 public:
4059 pass_split_before_sched2 (gcc::context *ctxt)
4060 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4061 {}
4062
4063 /* opt_pass methods: */
4064 virtual bool gate (function *)
4065 {
4066 #ifdef INSN_SCHEDULING
4067 return optimize > 0 && flag_schedule_insns_after_reload;
4068 #else
4069 return false;
4070 #endif
4071 }
4072
4073 virtual unsigned int execute (function *)
4074 {
4075 return rest_of_handle_split_before_sched2 ();
4076 }
4077
4078 }; // class pass_split_before_sched2
4079
4080 } // anon namespace
4081
4082 rtl_opt_pass *
4083 make_pass_split_before_sched2 (gcc::context *ctxt)
4084 {
4085 return new pass_split_before_sched2 (ctxt);
4086 }
4087
4088 namespace {
4089
4090 const pass_data pass_data_split_for_shorten_branches =
4091 {
4092 RTL_PASS, /* type */
4093 "split5", /* name */
4094 OPTGROUP_NONE, /* optinfo_flags */
4095 TV_NONE, /* tv_id */
4096 0, /* properties_required */
4097 0, /* properties_provided */
4098 0, /* properties_destroyed */
4099 0, /* todo_flags_start */
4100 0, /* todo_flags_finish */
4101 };
4102
4103 class pass_split_for_shorten_branches : public rtl_opt_pass
4104 {
4105 public:
4106 pass_split_for_shorten_branches (gcc::context *ctxt)
4107 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4108 {}
4109
4110 /* opt_pass methods: */
4111 virtual bool gate (function *)
4112 {
4113 /* The placement of the splitting that we do for shorten_branches
4114 depends on whether regstack is used by the target or not. */
4115 #if HAVE_ATTR_length && !defined (STACK_REGS)
4116 return true;
4117 #else
4118 return false;
4119 #endif
4120 }
4121
4122 virtual unsigned int execute (function *)
4123 {
4124 return split_all_insns_noflow ();
4125 }
4126
4127 }; // class pass_split_for_shorten_branches
4128
4129 } // anon namespace
4130
4131 rtl_opt_pass *
4132 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4133 {
4134 return new pass_split_for_shorten_branches (ctxt);
4135 }
4136
4137 /* (Re)initialize the target information after a change in target. */
4138
4139 void
4140 recog_init ()
4141 {
4142 /* The information is zero-initialized, so we don't need to do anything
4143 first time round. */
4144 if (!this_target_recog->x_initialized)
4145 {
4146 this_target_recog->x_initialized = true;
4147 return;
4148 }
4149 memset (this_target_recog->x_bool_attr_masks, 0,
4150 sizeof (this_target_recog->x_bool_attr_masks));
4151 for (int i = 0; i < LAST_INSN_CODE; ++i)
4152 if (this_target_recog->x_op_alt[i])
4153 {
4154 free (this_target_recog->x_op_alt[i]);
4155 this_target_recog->x_op_alt[i] = 0;
4156 }
4157 }