c++: Correct the handling of alignof(expr) [PR88115]
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "cfghooks.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "insn-config.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "insn-attr.h"
37 #include "addresses.h"
38 #include "cfgrtl.h"
39 #include "cfgbuild.h"
40 #include "cfgcleanup.h"
41 #include "reload.h"
42 #include "tree-pass.h"
43 #include "function-abi.h"
44
45 #ifndef STACK_POP_CODE
46 #if STACK_GROWS_DOWNWARD
47 #define STACK_POP_CODE POST_INC
48 #else
49 #define STACK_POP_CODE POST_DEC
50 #endif
51 #endif
52
53 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
54 static void validate_replace_src_1 (rtx *, void *);
55 static rtx_insn *split_insn (rtx_insn *);
56
57 struct target_recog default_target_recog;
58 #if SWITCHABLE_TARGET
59 struct target_recog *this_target_recog = &default_target_recog;
60 #endif
61
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in reginfo.c and final.c and reload.c.
67
68 init_recog and init_recog_no_volatile are responsible for setting this. */
69
70 int volatile_ok;
71
72 struct recog_data_d recog_data;
73
74 /* Contains a vector of operand_alternative structures, such that
75 operand OP of alternative A is at index A * n_operands + OP.
76 Set up by preprocess_constraints. */
77 const operand_alternative *recog_op_alt;
78
79 /* Used to provide recog_op_alt for asms. */
80 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
81 * MAX_RECOG_ALTERNATIVES];
82
83 /* On return from `constrain_operands', indicate which alternative
84 was satisfied. */
85
86 int which_alternative;
87
88 /* Nonzero after end of reload pass.
89 Set to 1 or 0 by toplev.c.
90 Controls the significance of (SUBREG (MEM)). */
91
92 int reload_completed;
93
94 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
95 int epilogue_completed;
96
97 /* Initialize data used by the function `recog'.
98 This must be called once in the compilation of a function
99 before any insn recognition may be done in the function. */
100
101 void
102 init_recog_no_volatile (void)
103 {
104 volatile_ok = 0;
105 }
106
107 void
108 init_recog (void)
109 {
110 volatile_ok = 1;
111 }
112
113 \f
114 /* Return true if labels in asm operands BODY are LABEL_REFs. */
115
116 static bool
117 asm_labels_ok (rtx body)
118 {
119 rtx asmop;
120 int i;
121
122 asmop = extract_asm_operands (body);
123 if (asmop == NULL_RTX)
124 return true;
125
126 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
127 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
128 return false;
129
130 return true;
131 }
132
133 /* Check that X is an insn-body for an `asm' with operands
134 and that the operands mentioned in it are legitimate. */
135
136 int
137 check_asm_operands (rtx x)
138 {
139 int noperands;
140 rtx *operands;
141 const char **constraints;
142 int i;
143
144 if (!asm_labels_ok (x))
145 return 0;
146
147 /* Post-reload, be more strict with things. */
148 if (reload_completed)
149 {
150 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
151 rtx_insn *insn = make_insn_raw (x);
152 extract_insn (insn);
153 constrain_operands (1, get_enabled_alternatives (insn));
154 return which_alternative >= 0;
155 }
156
157 noperands = asm_noperands (x);
158 if (noperands < 0)
159 return 0;
160 if (noperands == 0)
161 return 1;
162
163 operands = XALLOCAVEC (rtx, noperands);
164 constraints = XALLOCAVEC (const char *, noperands);
165
166 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
167
168 for (i = 0; i < noperands; i++)
169 {
170 const char *c = constraints[i];
171 if (c[0] == '%')
172 c++;
173 if (! asm_operand_ok (operands[i], c, constraints))
174 return 0;
175 }
176
177 return 1;
178 }
179 \f
180 /* Static data for the next two routines. */
181
182 struct change_t
183 {
184 rtx object;
185 int old_code;
186 bool unshare;
187 rtx *loc;
188 rtx old;
189 };
190
191 static change_t *changes;
192 static int changes_allocated;
193
194 static int num_changes = 0;
195
196 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
197 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
198 the change is simply made.
199
200 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
201 will be called with the address and mode as parameters. If OBJECT is
202 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
203 the change in place.
204
205 IN_GROUP is nonzero if this is part of a group of changes that must be
206 performed as a group. In that case, the changes will be stored. The
207 function `apply_change_group' will validate and apply the changes.
208
209 If IN_GROUP is zero, this is a single change. Try to recognize the insn
210 or validate the memory reference with the change applied. If the result
211 is not valid for the machine, suppress the change and return zero.
212 Otherwise, perform the change and return 1. */
213
214 static bool
215 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
216 {
217 rtx old = *loc;
218
219 if (old == new_rtx || rtx_equal_p (old, new_rtx))
220 return 1;
221
222 gcc_assert (in_group != 0 || num_changes == 0);
223
224 *loc = new_rtx;
225
226 /* Save the information describing this change. */
227 if (num_changes >= changes_allocated)
228 {
229 if (changes_allocated == 0)
230 /* This value allows for repeated substitutions inside complex
231 indexed addresses, or changes in up to 5 insns. */
232 changes_allocated = MAX_RECOG_OPERANDS * 5;
233 else
234 changes_allocated *= 2;
235
236 changes = XRESIZEVEC (change_t, changes, changes_allocated);
237 }
238
239 changes[num_changes].object = object;
240 changes[num_changes].loc = loc;
241 changes[num_changes].old = old;
242 changes[num_changes].unshare = unshare;
243
244 if (object && !MEM_P (object))
245 {
246 /* Set INSN_CODE to force rerecognition of insn. Save old code in
247 case invalid. */
248 changes[num_changes].old_code = INSN_CODE (object);
249 INSN_CODE (object) = -1;
250 }
251
252 num_changes++;
253
254 /* If we are making a group of changes, return 1. Otherwise, validate the
255 change group we made. */
256
257 if (in_group)
258 return 1;
259 else
260 return apply_change_group ();
261 }
262
263 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
264 UNSHARE to false. */
265
266 bool
267 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
268 {
269 return validate_change_1 (object, loc, new_rtx, in_group, false);
270 }
271
272 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
273 UNSHARE to true. */
274
275 bool
276 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
277 {
278 return validate_change_1 (object, loc, new_rtx, in_group, true);
279 }
280
281
282 /* Keep X canonicalized if some changes have made it non-canonical; only
283 modifies the operands of X, not (for example) its code. Simplifications
284 are not the job of this routine.
285
286 Return true if anything was changed. */
287 bool
288 canonicalize_change_group (rtx_insn *insn, rtx x)
289 {
290 if (COMMUTATIVE_P (x)
291 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
292 {
293 /* Oops, the caller has made X no longer canonical.
294 Let's redo the changes in the correct order. */
295 rtx tem = XEXP (x, 0);
296 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
297 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
298 return true;
299 }
300 else
301 return false;
302 }
303
304
305 /* This subroutine of apply_change_group verifies whether the changes to INSN
306 were valid; i.e. whether INSN can still be recognized.
307
308 If IN_GROUP is true clobbers which have to be added in order to
309 match the instructions will be added to the current change group.
310 Otherwise the changes will take effect immediately. */
311
312 int
313 insn_invalid_p (rtx_insn *insn, bool in_group)
314 {
315 rtx pat = PATTERN (insn);
316 int num_clobbers = 0;
317 /* If we are before reload and the pattern is a SET, see if we can add
318 clobbers. */
319 int icode = recog (pat, insn,
320 (GET_CODE (pat) == SET
321 && ! reload_completed
322 && ! reload_in_progress)
323 ? &num_clobbers : 0);
324 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
325
326
327 /* If this is an asm and the operand aren't legal, then fail. Likewise if
328 this is not an asm and the insn wasn't recognized. */
329 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
330 || (!is_asm && icode < 0))
331 return 1;
332
333 /* If we have to add CLOBBERs, fail if we have to add ones that reference
334 hard registers since our callers can't know if they are live or not.
335 Otherwise, add them. */
336 if (num_clobbers > 0)
337 {
338 rtx newpat;
339
340 if (added_clobbers_hard_reg_p (icode))
341 return 1;
342
343 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
344 XVECEXP (newpat, 0, 0) = pat;
345 add_clobbers (newpat, icode);
346 if (in_group)
347 validate_change (insn, &PATTERN (insn), newpat, 1);
348 else
349 PATTERN (insn) = pat = newpat;
350 }
351
352 /* After reload, verify that all constraints are satisfied. */
353 if (reload_completed)
354 {
355 extract_insn (insn);
356
357 if (! constrain_operands (1, get_preferred_alternatives (insn)))
358 return 1;
359 }
360
361 INSN_CODE (insn) = icode;
362 return 0;
363 }
364
365 /* Return number of changes made and not validated yet. */
366 int
367 num_changes_pending (void)
368 {
369 return num_changes;
370 }
371
372 /* Tentatively apply the changes numbered NUM and up.
373 Return 1 if all changes are valid, zero otherwise. */
374
375 int
376 verify_changes (int num)
377 {
378 int i;
379 rtx last_validated = NULL_RTX;
380
381 /* The changes have been applied and all INSN_CODEs have been reset to force
382 rerecognition.
383
384 The changes are valid if we aren't given an object, or if we are
385 given a MEM and it still is a valid address, or if this is in insn
386 and it is recognized. In the latter case, if reload has completed,
387 we also require that the operands meet the constraints for
388 the insn. */
389
390 for (i = num; i < num_changes; i++)
391 {
392 rtx object = changes[i].object;
393
394 /* If there is no object to test or if it is the same as the one we
395 already tested, ignore it. */
396 if (object == 0 || object == last_validated)
397 continue;
398
399 if (MEM_P (object))
400 {
401 if (! memory_address_addr_space_p (GET_MODE (object),
402 XEXP (object, 0),
403 MEM_ADDR_SPACE (object)))
404 break;
405 }
406 else if (/* changes[i].old might be zero, e.g. when putting a
407 REG_FRAME_RELATED_EXPR into a previously empty list. */
408 changes[i].old
409 && REG_P (changes[i].old)
410 && asm_noperands (PATTERN (object)) > 0
411 && REG_EXPR (changes[i].old) != NULL_TREE
412 && HAS_DECL_ASSEMBLER_NAME_P (REG_EXPR (changes[i].old))
413 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
414 && DECL_REGISTER (REG_EXPR (changes[i].old)))
415 {
416 /* Don't allow changes of hard register operands to inline
417 assemblies if they have been defined as register asm ("x"). */
418 break;
419 }
420 else if (DEBUG_INSN_P (object))
421 continue;
422 else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
423 {
424 rtx pat = PATTERN (object);
425
426 /* Perhaps we couldn't recognize the insn because there were
427 extra CLOBBERs at the end. If so, try to re-recognize
428 without the last CLOBBER (later iterations will cause each of
429 them to be eliminated, in turn). But don't do this if we
430 have an ASM_OPERAND. */
431 if (GET_CODE (pat) == PARALLEL
432 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
433 && asm_noperands (PATTERN (object)) < 0)
434 {
435 rtx newpat;
436
437 if (XVECLEN (pat, 0) == 2)
438 newpat = XVECEXP (pat, 0, 0);
439 else
440 {
441 int j;
442
443 newpat
444 = gen_rtx_PARALLEL (VOIDmode,
445 rtvec_alloc (XVECLEN (pat, 0) - 1));
446 for (j = 0; j < XVECLEN (newpat, 0); j++)
447 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
448 }
449
450 /* Add a new change to this group to replace the pattern
451 with this new pattern. Then consider this change
452 as having succeeded. The change we added will
453 cause the entire call to fail if things remain invalid.
454
455 Note that this can lose if a later change than the one
456 we are processing specified &XVECEXP (PATTERN (object), 0, X)
457 but this shouldn't occur. */
458
459 validate_change (object, &PATTERN (object), newpat, 1);
460 continue;
461 }
462 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
463 || GET_CODE (pat) == VAR_LOCATION)
464 /* If this insn is a CLOBBER or USE, it is always valid, but is
465 never recognized. */
466 continue;
467 else
468 break;
469 }
470 last_validated = object;
471 }
472
473 return (i == num_changes);
474 }
475
476 /* A group of changes has previously been issued with validate_change
477 and verified with verify_changes. Call df_insn_rescan for each of
478 the insn changed and clear num_changes. */
479
480 void
481 confirm_change_group (void)
482 {
483 int i;
484 rtx last_object = NULL;
485
486 for (i = 0; i < num_changes; i++)
487 {
488 rtx object = changes[i].object;
489
490 if (changes[i].unshare)
491 *changes[i].loc = copy_rtx (*changes[i].loc);
492
493 /* Avoid unnecessary rescanning when multiple changes to same instruction
494 are made. */
495 if (object)
496 {
497 if (object != last_object && last_object && INSN_P (last_object))
498 df_insn_rescan (as_a <rtx_insn *> (last_object));
499 last_object = object;
500 }
501 }
502
503 if (last_object && INSN_P (last_object))
504 df_insn_rescan (as_a <rtx_insn *> (last_object));
505 num_changes = 0;
506 }
507
508 /* Apply a group of changes previously issued with `validate_change'.
509 If all changes are valid, call confirm_change_group and return 1,
510 otherwise, call cancel_changes and return 0. */
511
512 int
513 apply_change_group (void)
514 {
515 if (verify_changes (0))
516 {
517 confirm_change_group ();
518 return 1;
519 }
520 else
521 {
522 cancel_changes (0);
523 return 0;
524 }
525 }
526
527
528 /* Return the number of changes so far in the current group. */
529
530 int
531 num_validated_changes (void)
532 {
533 return num_changes;
534 }
535
536 /* Retract the changes numbered NUM and up. */
537
538 void
539 cancel_changes (int num)
540 {
541 int i;
542
543 /* Back out all the changes. Do this in the opposite order in which
544 they were made. */
545 for (i = num_changes - 1; i >= num; i--)
546 {
547 *changes[i].loc = changes[i].old;
548 if (changes[i].object && !MEM_P (changes[i].object))
549 INSN_CODE (changes[i].object) = changes[i].old_code;
550 }
551 num_changes = num;
552 }
553
554 /* Reduce conditional compilation elsewhere. */
555 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
556 rtx. */
557
558 static void
559 simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
560 machine_mode op0_mode)
561 {
562 rtx x = *loc;
563 enum rtx_code code = GET_CODE (x);
564 rtx new_rtx = NULL_RTX;
565 scalar_int_mode is_mode;
566
567 if (SWAPPABLE_OPERANDS_P (x)
568 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
569 {
570 validate_unshare_change (object, loc,
571 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
572 : swap_condition (code),
573 GET_MODE (x), XEXP (x, 1),
574 XEXP (x, 0)), 1);
575 x = *loc;
576 code = GET_CODE (x);
577 }
578
579 /* Canonicalize arithmetics with all constant operands. */
580 switch (GET_RTX_CLASS (code))
581 {
582 case RTX_UNARY:
583 if (CONSTANT_P (XEXP (x, 0)))
584 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
585 op0_mode);
586 break;
587 case RTX_COMM_ARITH:
588 case RTX_BIN_ARITH:
589 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
590 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
591 XEXP (x, 1));
592 break;
593 case RTX_COMPARE:
594 case RTX_COMM_COMPARE:
595 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
596 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
597 XEXP (x, 0), XEXP (x, 1));
598 break;
599 default:
600 break;
601 }
602 if (new_rtx)
603 {
604 validate_change (object, loc, new_rtx, 1);
605 return;
606 }
607
608 switch (code)
609 {
610 case PLUS:
611 /* If we have a PLUS whose second operand is now a CONST_INT, use
612 simplify_gen_binary to try to simplify it.
613 ??? We may want later to remove this, once simplification is
614 separated from this function. */
615 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
616 validate_change (object, loc,
617 simplify_gen_binary
618 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
619 break;
620 case MINUS:
621 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
622 validate_change (object, loc,
623 simplify_gen_binary
624 (PLUS, GET_MODE (x), XEXP (x, 0),
625 simplify_gen_unary (NEG,
626 GET_MODE (x), XEXP (x, 1),
627 GET_MODE (x))), 1);
628 break;
629 case ZERO_EXTEND:
630 case SIGN_EXTEND:
631 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
632 {
633 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
634 op0_mode);
635 /* If any of the above failed, substitute in something that
636 we know won't be recognized. */
637 if (!new_rtx)
638 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
639 validate_change (object, loc, new_rtx, 1);
640 }
641 break;
642 case SUBREG:
643 /* All subregs possible to simplify should be simplified. */
644 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
645 SUBREG_BYTE (x));
646
647 /* Subregs of VOIDmode operands are incorrect. */
648 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
649 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
650 if (new_rtx)
651 validate_change (object, loc, new_rtx, 1);
652 break;
653 case ZERO_EXTRACT:
654 case SIGN_EXTRACT:
655 /* If we are replacing a register with memory, try to change the memory
656 to be the mode required for memory in extract operations (this isn't
657 likely to be an insertion operation; if it was, nothing bad will
658 happen, we might just fail in some cases). */
659
660 if (MEM_P (XEXP (x, 0))
661 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &is_mode)
662 && CONST_INT_P (XEXP (x, 1))
663 && CONST_INT_P (XEXP (x, 2))
664 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
665 MEM_ADDR_SPACE (XEXP (x, 0)))
666 && !MEM_VOLATILE_P (XEXP (x, 0)))
667 {
668 int pos = INTVAL (XEXP (x, 2));
669 machine_mode new_mode = is_mode;
670 if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
671 new_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
672 else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
673 new_mode = insn_data[targetm.code_for_extv].operand[1].mode;
674 scalar_int_mode wanted_mode = (new_mode == VOIDmode
675 ? word_mode
676 : as_a <scalar_int_mode> (new_mode));
677
678 /* If we have a narrower mode, we can do something. */
679 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
680 {
681 int offset = pos / BITS_PER_UNIT;
682 rtx newmem;
683
684 /* If the bytes and bits are counted differently, we
685 must adjust the offset. */
686 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
687 offset =
688 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
689 offset);
690
691 gcc_assert (GET_MODE_PRECISION (wanted_mode)
692 == GET_MODE_BITSIZE (wanted_mode));
693 pos %= GET_MODE_BITSIZE (wanted_mode);
694
695 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
696
697 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
698 validate_change (object, &XEXP (x, 0), newmem, 1);
699 }
700 }
701
702 break;
703
704 default:
705 break;
706 }
707 }
708
709 /* Replace every occurrence of FROM in X with TO. Mark each change with
710 validate_change passing OBJECT. */
711
712 static void
713 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
714 bool simplify)
715 {
716 int i, j;
717 const char *fmt;
718 rtx x = *loc;
719 enum rtx_code code;
720 machine_mode op0_mode = VOIDmode;
721 int prev_changes = num_changes;
722
723 if (!x)
724 return;
725
726 code = GET_CODE (x);
727 fmt = GET_RTX_FORMAT (code);
728 if (fmt[0] == 'e')
729 op0_mode = GET_MODE (XEXP (x, 0));
730
731 /* X matches FROM if it is the same rtx or they are both referring to the
732 same register in the same mode. Avoid calling rtx_equal_p unless the
733 operands look similar. */
734
735 if (x == from
736 || (REG_P (x) && REG_P (from)
737 && GET_MODE (x) == GET_MODE (from)
738 && REGNO (x) == REGNO (from))
739 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
740 && rtx_equal_p (x, from)))
741 {
742 validate_unshare_change (object, loc, to, 1);
743 return;
744 }
745
746 /* Call ourself recursively to perform the replacements.
747 We must not replace inside already replaced expression, otherwise we
748 get infinite recursion for replacements like (reg X)->(subreg (reg X))
749 so we must special case shared ASM_OPERANDS. */
750
751 if (GET_CODE (x) == PARALLEL)
752 {
753 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
754 {
755 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
756 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
757 {
758 /* Verify that operands are really shared. */
759 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
760 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
761 (x, 0, j))));
762 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
763 from, to, object, simplify);
764 }
765 else
766 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
767 simplify);
768 }
769 }
770 else
771 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
772 {
773 if (fmt[i] == 'e')
774 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
775 else if (fmt[i] == 'E')
776 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
777 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
778 simplify);
779 }
780
781 /* If we didn't substitute, there is nothing more to do. */
782 if (num_changes == prev_changes)
783 return;
784
785 /* ??? The regmove is no more, so is this aberration still necessary? */
786 /* Allow substituted expression to have different mode. This is used by
787 regmove to change mode of pseudo register. */
788 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
789 op0_mode = GET_MODE (XEXP (x, 0));
790
791 /* Do changes needed to keep rtx consistent. Don't do any other
792 simplifications, as it is not our job. */
793 if (simplify)
794 simplify_while_replacing (loc, to, object, op0_mode);
795 }
796
797 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
798 with TO. After all changes have been made, validate by seeing
799 if INSN is still valid. */
800
801 int
802 validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
803 {
804 validate_replace_rtx_1 (loc, from, to, insn, true);
805 return apply_change_group ();
806 }
807
808 /* Try replacing every occurrence of FROM in INSN with TO. After all
809 changes have been made, validate by seeing if INSN is still valid. */
810
811 int
812 validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
813 {
814 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
815 return apply_change_group ();
816 }
817
818 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
819 is a part of INSN. After all changes have been made, validate by seeing if
820 INSN is still valid.
821 validate_replace_rtx (from, to, insn) is equivalent to
822 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
823
824 int
825 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
826 {
827 validate_replace_rtx_1 (where, from, to, insn, true);
828 return apply_change_group ();
829 }
830
831 /* Same as above, but do not simplify rtx afterwards. */
832 int
833 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
834 rtx_insn *insn)
835 {
836 validate_replace_rtx_1 (where, from, to, insn, false);
837 return apply_change_group ();
838
839 }
840
841 /* Try replacing every occurrence of FROM in INSN with TO. This also
842 will replace in REG_EQUAL and REG_EQUIV notes. */
843
844 void
845 validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
846 {
847 rtx note;
848 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
849 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
850 if (REG_NOTE_KIND (note) == REG_EQUAL
851 || REG_NOTE_KIND (note) == REG_EQUIV)
852 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
853 }
854
855 /* Function called by note_uses to replace used subexpressions. */
856 struct validate_replace_src_data
857 {
858 rtx from; /* Old RTX */
859 rtx to; /* New RTX */
860 rtx_insn *insn; /* Insn in which substitution is occurring. */
861 };
862
863 static void
864 validate_replace_src_1 (rtx *x, void *data)
865 {
866 struct validate_replace_src_data *d
867 = (struct validate_replace_src_data *) data;
868
869 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
870 }
871
872 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
873 SET_DESTs. */
874
875 void
876 validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
877 {
878 struct validate_replace_src_data d;
879
880 d.from = from;
881 d.to = to;
882 d.insn = insn;
883 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
884 }
885
886 /* Try simplify INSN.
887 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
888 pattern and return true if something was simplified. */
889
890 bool
891 validate_simplify_insn (rtx_insn *insn)
892 {
893 int i;
894 rtx pat = NULL;
895 rtx newpat = NULL;
896
897 pat = PATTERN (insn);
898
899 if (GET_CODE (pat) == SET)
900 {
901 newpat = simplify_rtx (SET_SRC (pat));
902 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
903 validate_change (insn, &SET_SRC (pat), newpat, 1);
904 newpat = simplify_rtx (SET_DEST (pat));
905 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
906 validate_change (insn, &SET_DEST (pat), newpat, 1);
907 }
908 else if (GET_CODE (pat) == PARALLEL)
909 for (i = 0; i < XVECLEN (pat, 0); i++)
910 {
911 rtx s = XVECEXP (pat, 0, i);
912
913 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
914 {
915 newpat = simplify_rtx (SET_SRC (s));
916 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
917 validate_change (insn, &SET_SRC (s), newpat, 1);
918 newpat = simplify_rtx (SET_DEST (s));
919 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
920 validate_change (insn, &SET_DEST (s), newpat, 1);
921 }
922 }
923 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
924 }
925
926 /* Check whether INSN matches a specific alternative of an .md pattern. */
927
928 bool
929 valid_insn_p (rtx_insn *insn)
930 {
931 recog_memoized (insn);
932 if (INSN_CODE (insn) < 0)
933 return false;
934 extract_insn (insn);
935 /* We don't know whether the insn will be in code that is optimized
936 for size or speed, so consider all enabled alternatives. */
937 if (!constrain_operands (1, get_enabled_alternatives (insn)))
938 return false;
939 return true;
940 }
941
942 /* Return 1 if OP is a valid general operand for machine mode MODE.
943 This is either a register reference, a memory reference,
944 or a constant. In the case of a memory reference, the address
945 is checked for general validity for the target machine.
946
947 Register and memory references must have mode MODE in order to be valid,
948 but some constants have no machine mode and are valid for any mode.
949
950 If MODE is VOIDmode, OP is checked for validity for whatever mode
951 it has.
952
953 The main use of this function is as a predicate in match_operand
954 expressions in the machine description. */
955
956 int
957 general_operand (rtx op, machine_mode mode)
958 {
959 enum rtx_code code = GET_CODE (op);
960
961 if (mode == VOIDmode)
962 mode = GET_MODE (op);
963
964 /* Don't accept CONST_INT or anything similar
965 if the caller wants something floating. */
966 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
967 && GET_MODE_CLASS (mode) != MODE_INT
968 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
969 return 0;
970
971 if (CONST_INT_P (op)
972 && mode != VOIDmode
973 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
974 return 0;
975
976 if (CONSTANT_P (op))
977 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
978 || mode == VOIDmode)
979 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
980 && targetm.legitimate_constant_p (mode == VOIDmode
981 ? GET_MODE (op)
982 : mode, op));
983
984 /* Except for certain constants with VOIDmode, already checked for,
985 OP's mode must match MODE if MODE specifies a mode. */
986
987 if (GET_MODE (op) != mode)
988 return 0;
989
990 if (code == SUBREG)
991 {
992 rtx sub = SUBREG_REG (op);
993
994 #ifdef INSN_SCHEDULING
995 /* On machines that have insn scheduling, we want all memory
996 reference to be explicit, so outlaw paradoxical SUBREGs.
997 However, we must allow them after reload so that they can
998 get cleaned up by cleanup_subreg_operands. */
999 if (!reload_completed && MEM_P (sub)
1000 && paradoxical_subreg_p (op))
1001 return 0;
1002 #endif
1003 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1004 may result in incorrect reference. We should simplify all valid
1005 subregs of MEM anyway. But allow this after reload because we
1006 might be called from cleanup_subreg_operands.
1007
1008 ??? This is a kludge. */
1009 if (!reload_completed
1010 && maybe_ne (SUBREG_BYTE (op), 0)
1011 && MEM_P (sub))
1012 return 0;
1013
1014 if (REG_P (sub)
1015 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1016 && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1017 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1018 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1019 /* LRA can generate some invalid SUBREGS just for matched
1020 operand reload presentation. LRA needs to treat them as
1021 valid. */
1022 && ! LRA_SUBREG_P (op))
1023 return 0;
1024
1025 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1026 create such rtl, and we must reject it. */
1027 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1028 /* LRA can use subreg to store a floating point value in an
1029 integer mode. Although the floating point and the
1030 integer modes need the same number of hard registers, the
1031 size of floating point mode can be less than the integer
1032 mode. */
1033 && ! lra_in_progress
1034 && paradoxical_subreg_p (op))
1035 return 0;
1036
1037 op = sub;
1038 code = GET_CODE (op);
1039 }
1040
1041 if (code == REG)
1042 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1043 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1044
1045 if (code == MEM)
1046 {
1047 rtx y = XEXP (op, 0);
1048
1049 if (! volatile_ok && MEM_VOLATILE_P (op))
1050 return 0;
1051
1052 /* Use the mem's mode, since it will be reloaded thus. LRA can
1053 generate move insn with invalid addresses which is made valid
1054 and efficiently calculated by LRA through further numerous
1055 transformations. */
1056 if (lra_in_progress
1057 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1058 return 1;
1059 }
1060
1061 return 0;
1062 }
1063 \f
1064 /* Return 1 if OP is a valid memory address for a memory reference
1065 of mode MODE.
1066
1067 The main use of this function is as a predicate in match_operand
1068 expressions in the machine description. */
1069
1070 int
1071 address_operand (rtx op, machine_mode mode)
1072 {
1073 /* Wrong mode for an address expr. */
1074 if (GET_MODE (op) != VOIDmode
1075 && ! SCALAR_INT_MODE_P (GET_MODE (op)))
1076 return false;
1077
1078 return memory_address_p (mode, op);
1079 }
1080
1081 /* Return 1 if OP is a register reference of mode MODE.
1082 If MODE is VOIDmode, accept a register in any mode.
1083
1084 The main use of this function is as a predicate in match_operand
1085 expressions in the machine description. */
1086
1087 int
1088 register_operand (rtx op, machine_mode mode)
1089 {
1090 if (GET_CODE (op) == SUBREG)
1091 {
1092 rtx sub = SUBREG_REG (op);
1093
1094 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1095 because it is guaranteed to be reloaded into one.
1096 Just make sure the MEM is valid in itself.
1097 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1098 but currently it does result from (SUBREG (REG)...) where the
1099 reg went on the stack.) */
1100 if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1101 return 0;
1102 }
1103 else if (!REG_P (op))
1104 return 0;
1105 return general_operand (op, mode);
1106 }
1107
1108 /* Return 1 for a register in Pmode; ignore the tested mode. */
1109
1110 int
1111 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1112 {
1113 return register_operand (op, Pmode);
1114 }
1115
1116 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1117 or a hard register. */
1118
1119 int
1120 scratch_operand (rtx op, machine_mode mode)
1121 {
1122 if (GET_MODE (op) != mode && mode != VOIDmode)
1123 return 0;
1124
1125 return (GET_CODE (op) == SCRATCH
1126 || (REG_P (op)
1127 && (lra_in_progress
1128 || (REGNO (op) < FIRST_PSEUDO_REGISTER
1129 && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1130 }
1131
1132 /* Return 1 if OP is a valid immediate operand for mode MODE.
1133
1134 The main use of this function is as a predicate in match_operand
1135 expressions in the machine description. */
1136
1137 int
1138 immediate_operand (rtx op, machine_mode mode)
1139 {
1140 /* Don't accept CONST_INT or anything similar
1141 if the caller wants something floating. */
1142 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1143 && GET_MODE_CLASS (mode) != MODE_INT
1144 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1145 return 0;
1146
1147 if (CONST_INT_P (op)
1148 && mode != VOIDmode
1149 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1150 return 0;
1151
1152 return (CONSTANT_P (op)
1153 && (GET_MODE (op) == mode || mode == VOIDmode
1154 || GET_MODE (op) == VOIDmode)
1155 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1156 && targetm.legitimate_constant_p (mode == VOIDmode
1157 ? GET_MODE (op)
1158 : mode, op));
1159 }
1160
1161 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1162
1163 int
1164 const_int_operand (rtx op, machine_mode mode)
1165 {
1166 if (!CONST_INT_P (op))
1167 return 0;
1168
1169 if (mode != VOIDmode
1170 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1171 return 0;
1172
1173 return 1;
1174 }
1175
1176 #if TARGET_SUPPORTS_WIDE_INT
1177 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1178 of mode MODE. */
1179 int
1180 const_scalar_int_operand (rtx op, machine_mode mode)
1181 {
1182 if (!CONST_SCALAR_INT_P (op))
1183 return 0;
1184
1185 if (CONST_INT_P (op))
1186 return const_int_operand (op, mode);
1187
1188 if (mode != VOIDmode)
1189 {
1190 scalar_int_mode int_mode = as_a <scalar_int_mode> (mode);
1191 int prec = GET_MODE_PRECISION (int_mode);
1192 int bitsize = GET_MODE_BITSIZE (int_mode);
1193
1194 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1195 return 0;
1196
1197 if (prec == bitsize)
1198 return 1;
1199 else
1200 {
1201 /* Multiword partial int. */
1202 HOST_WIDE_INT x
1203 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1204 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1205 }
1206 }
1207 return 1;
1208 }
1209
1210 /* Returns 1 if OP is an operand that is a constant integer or constant
1211 floating-point number of MODE. */
1212
1213 int
1214 const_double_operand (rtx op, machine_mode mode)
1215 {
1216 return (GET_CODE (op) == CONST_DOUBLE)
1217 && (GET_MODE (op) == mode || mode == VOIDmode);
1218 }
1219 #else
1220 /* Returns 1 if OP is an operand that is a constant integer or constant
1221 floating-point number of MODE. */
1222
1223 int
1224 const_double_operand (rtx op, machine_mode mode)
1225 {
1226 /* Don't accept CONST_INT or anything similar
1227 if the caller wants something floating. */
1228 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1229 && GET_MODE_CLASS (mode) != MODE_INT
1230 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1231 return 0;
1232
1233 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1234 && (mode == VOIDmode || GET_MODE (op) == mode
1235 || GET_MODE (op) == VOIDmode));
1236 }
1237 #endif
1238 /* Return 1 if OP is a general operand that is not an immediate
1239 operand of mode MODE. */
1240
1241 int
1242 nonimmediate_operand (rtx op, machine_mode mode)
1243 {
1244 return (general_operand (op, mode) && ! CONSTANT_P (op));
1245 }
1246
1247 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1248
1249 int
1250 nonmemory_operand (rtx op, machine_mode mode)
1251 {
1252 if (CONSTANT_P (op))
1253 return immediate_operand (op, mode);
1254 return register_operand (op, mode);
1255 }
1256
1257 /* Return 1 if OP is a valid operand that stands for pushing a
1258 value of mode MODE onto the stack.
1259
1260 The main use of this function is as a predicate in match_operand
1261 expressions in the machine description. */
1262
1263 int
1264 push_operand (rtx op, machine_mode mode)
1265 {
1266 if (!MEM_P (op))
1267 return 0;
1268
1269 if (mode != VOIDmode && GET_MODE (op) != mode)
1270 return 0;
1271
1272 poly_int64 rounded_size = GET_MODE_SIZE (mode);
1273
1274 #ifdef PUSH_ROUNDING
1275 rounded_size = PUSH_ROUNDING (MACRO_INT (rounded_size));
1276 #endif
1277
1278 op = XEXP (op, 0);
1279
1280 if (known_eq (rounded_size, GET_MODE_SIZE (mode)))
1281 {
1282 if (GET_CODE (op) != STACK_PUSH_CODE)
1283 return 0;
1284 }
1285 else
1286 {
1287 poly_int64 offset;
1288 if (GET_CODE (op) != PRE_MODIFY
1289 || GET_CODE (XEXP (op, 1)) != PLUS
1290 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1291 || !poly_int_rtx_p (XEXP (XEXP (op, 1), 1), &offset)
1292 || (STACK_GROWS_DOWNWARD
1293 ? maybe_ne (offset, -rounded_size)
1294 : maybe_ne (offset, rounded_size)))
1295 return 0;
1296 }
1297
1298 return XEXP (op, 0) == stack_pointer_rtx;
1299 }
1300
1301 /* Return 1 if OP is a valid operand that stands for popping a
1302 value of mode MODE off the stack.
1303
1304 The main use of this function is as a predicate in match_operand
1305 expressions in the machine description. */
1306
1307 int
1308 pop_operand (rtx op, machine_mode mode)
1309 {
1310 if (!MEM_P (op))
1311 return 0;
1312
1313 if (mode != VOIDmode && GET_MODE (op) != mode)
1314 return 0;
1315
1316 op = XEXP (op, 0);
1317
1318 if (GET_CODE (op) != STACK_POP_CODE)
1319 return 0;
1320
1321 return XEXP (op, 0) == stack_pointer_rtx;
1322 }
1323
1324 /* Return 1 if ADDR is a valid memory address
1325 for mode MODE in address space AS. */
1326
1327 int
1328 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1329 rtx addr, addr_space_t as)
1330 {
1331 #ifdef GO_IF_LEGITIMATE_ADDRESS
1332 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1333 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1334 return 0;
1335
1336 win:
1337 return 1;
1338 #else
1339 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1340 #endif
1341 }
1342
1343 /* Return 1 if OP is a valid memory reference with mode MODE,
1344 including a valid address.
1345
1346 The main use of this function is as a predicate in match_operand
1347 expressions in the machine description. */
1348
1349 int
1350 memory_operand (rtx op, machine_mode mode)
1351 {
1352 rtx inner;
1353
1354 if (! reload_completed)
1355 /* Note that no SUBREG is a memory operand before end of reload pass,
1356 because (SUBREG (MEM...)) forces reloading into a register. */
1357 return MEM_P (op) && general_operand (op, mode);
1358
1359 if (mode != VOIDmode && GET_MODE (op) != mode)
1360 return 0;
1361
1362 inner = op;
1363 if (GET_CODE (inner) == SUBREG)
1364 inner = SUBREG_REG (inner);
1365
1366 return (MEM_P (inner) && general_operand (op, mode));
1367 }
1368
1369 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1370 that is, a memory reference whose address is a general_operand. */
1371
1372 int
1373 indirect_operand (rtx op, machine_mode mode)
1374 {
1375 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1376 if (! reload_completed
1377 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1378 {
1379 if (mode != VOIDmode && GET_MODE (op) != mode)
1380 return 0;
1381
1382 /* The only way that we can have a general_operand as the resulting
1383 address is if OFFSET is zero and the address already is an operand
1384 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1385 operand. */
1386 poly_int64 offset;
1387 rtx addr = strip_offset (XEXP (SUBREG_REG (op), 0), &offset);
1388 return (known_eq (offset + SUBREG_BYTE (op), 0)
1389 && general_operand (addr, Pmode));
1390 }
1391
1392 return (MEM_P (op)
1393 && memory_operand (op, mode)
1394 && general_operand (XEXP (op, 0), Pmode));
1395 }
1396
1397 /* Return 1 if this is an ordered comparison operator (not including
1398 ORDERED and UNORDERED). */
1399
1400 int
1401 ordered_comparison_operator (rtx op, machine_mode mode)
1402 {
1403 if (mode != VOIDmode && GET_MODE (op) != mode)
1404 return false;
1405 switch (GET_CODE (op))
1406 {
1407 case EQ:
1408 case NE:
1409 case LT:
1410 case LTU:
1411 case LE:
1412 case LEU:
1413 case GT:
1414 case GTU:
1415 case GE:
1416 case GEU:
1417 return true;
1418 default:
1419 return false;
1420 }
1421 }
1422
1423 /* Return 1 if this is a comparison operator. This allows the use of
1424 MATCH_OPERATOR to recognize all the branch insns. */
1425
1426 int
1427 comparison_operator (rtx op, machine_mode mode)
1428 {
1429 return ((mode == VOIDmode || GET_MODE (op) == mode)
1430 && COMPARISON_P (op));
1431 }
1432 \f
1433 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1434
1435 rtx
1436 extract_asm_operands (rtx body)
1437 {
1438 rtx tmp;
1439 switch (GET_CODE (body))
1440 {
1441 case ASM_OPERANDS:
1442 return body;
1443
1444 case SET:
1445 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1446 tmp = SET_SRC (body);
1447 if (GET_CODE (tmp) == ASM_OPERANDS)
1448 return tmp;
1449 break;
1450
1451 case PARALLEL:
1452 tmp = XVECEXP (body, 0, 0);
1453 if (GET_CODE (tmp) == ASM_OPERANDS)
1454 return tmp;
1455 if (GET_CODE (tmp) == SET)
1456 {
1457 tmp = SET_SRC (tmp);
1458 if (GET_CODE (tmp) == ASM_OPERANDS)
1459 return tmp;
1460 }
1461 break;
1462
1463 default:
1464 break;
1465 }
1466 return NULL;
1467 }
1468
1469 /* If BODY is an insn body that uses ASM_OPERANDS,
1470 return the number of operands (both input and output) in the insn.
1471 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1472 return 0.
1473 Otherwise return -1. */
1474
1475 int
1476 asm_noperands (const_rtx body)
1477 {
1478 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1479 int i, n_sets = 0;
1480
1481 if (asm_op == NULL)
1482 {
1483 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2
1484 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
1485 {
1486 /* body is [(asm_input ...) (clobber (reg ...))...]. */
1487 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1488 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1489 return -1;
1490 return 0;
1491 }
1492 return -1;
1493 }
1494
1495 if (GET_CODE (body) == SET)
1496 n_sets = 1;
1497 else if (GET_CODE (body) == PARALLEL)
1498 {
1499 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1500 {
1501 /* Multiple output operands, or 1 output plus some clobbers:
1502 body is
1503 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1504 /* Count backwards through CLOBBERs to determine number of SETs. */
1505 for (i = XVECLEN (body, 0); i > 0; i--)
1506 {
1507 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1508 break;
1509 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1510 return -1;
1511 }
1512
1513 /* N_SETS is now number of output operands. */
1514 n_sets = i;
1515
1516 /* Verify that all the SETs we have
1517 came from a single original asm_operands insn
1518 (so that invalid combinations are blocked). */
1519 for (i = 0; i < n_sets; i++)
1520 {
1521 rtx elt = XVECEXP (body, 0, i);
1522 if (GET_CODE (elt) != SET)
1523 return -1;
1524 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1525 return -1;
1526 /* If these ASM_OPERANDS rtx's came from different original insns
1527 then they aren't allowed together. */
1528 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1529 != ASM_OPERANDS_INPUT_VEC (asm_op))
1530 return -1;
1531 }
1532 }
1533 else
1534 {
1535 /* 0 outputs, but some clobbers:
1536 body is [(asm_operands ...) (clobber (reg ...))...]. */
1537 /* Make sure all the other parallel things really are clobbers. */
1538 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1539 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1540 return -1;
1541 }
1542 }
1543
1544 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1545 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1546 }
1547
1548 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1549 copy its operands (both input and output) into the vector OPERANDS,
1550 the locations of the operands within the insn into the vector OPERAND_LOCS,
1551 and the constraints for the operands into CONSTRAINTS.
1552 Write the modes of the operands into MODES.
1553 Write the location info into LOC.
1554 Return the assembler-template.
1555 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1556 return the basic assembly string.
1557
1558 If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1559 we don't store that info. */
1560
1561 const char *
1562 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1563 const char **constraints, machine_mode *modes,
1564 location_t *loc)
1565 {
1566 int nbase = 0, n, i;
1567 rtx asmop;
1568
1569 switch (GET_CODE (body))
1570 {
1571 case ASM_OPERANDS:
1572 /* Zero output asm: BODY is (asm_operands ...). */
1573 asmop = body;
1574 break;
1575
1576 case SET:
1577 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1578 asmop = SET_SRC (body);
1579
1580 /* The output is in the SET.
1581 Its constraint is in the ASM_OPERANDS itself. */
1582 if (operands)
1583 operands[0] = SET_DEST (body);
1584 if (operand_locs)
1585 operand_locs[0] = &SET_DEST (body);
1586 if (constraints)
1587 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1588 if (modes)
1589 modes[0] = GET_MODE (SET_DEST (body));
1590 nbase = 1;
1591 break;
1592
1593 case PARALLEL:
1594 {
1595 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1596
1597 asmop = XVECEXP (body, 0, 0);
1598 if (GET_CODE (asmop) == SET)
1599 {
1600 asmop = SET_SRC (asmop);
1601
1602 /* At least one output, plus some CLOBBERs. The outputs are in
1603 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1604 for (i = 0; i < nparallel; i++)
1605 {
1606 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1607 break; /* Past last SET */
1608 gcc_assert (GET_CODE (XVECEXP (body, 0, i)) == SET);
1609 if (operands)
1610 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1611 if (operand_locs)
1612 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1613 if (constraints)
1614 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1615 if (modes)
1616 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1617 }
1618 nbase = i;
1619 }
1620 else if (GET_CODE (asmop) == ASM_INPUT)
1621 {
1622 if (loc)
1623 *loc = ASM_INPUT_SOURCE_LOCATION (asmop);
1624 return XSTR (asmop, 0);
1625 }
1626 break;
1627 }
1628
1629 default:
1630 gcc_unreachable ();
1631 }
1632
1633 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1634 for (i = 0; i < n; i++)
1635 {
1636 if (operand_locs)
1637 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1638 if (operands)
1639 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1640 if (constraints)
1641 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1642 if (modes)
1643 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1644 }
1645 nbase += n;
1646
1647 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1648 for (i = 0; i < n; i++)
1649 {
1650 if (operand_locs)
1651 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1652 if (operands)
1653 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1654 if (constraints)
1655 constraints[nbase + i] = "";
1656 if (modes)
1657 modes[nbase + i] = Pmode;
1658 }
1659
1660 if (loc)
1661 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1662
1663 return ASM_OPERANDS_TEMPLATE (asmop);
1664 }
1665
1666 /* Parse inline assembly string STRING and determine which operands are
1667 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1668 to true if operand I is referenced.
1669
1670 This is intended to distinguish barrier-like asms such as:
1671
1672 asm ("" : "=m" (...));
1673
1674 from real references such as:
1675
1676 asm ("sw\t$0, %0" : "=m" (...)); */
1677
1678 void
1679 get_referenced_operands (const char *string, bool *used,
1680 unsigned int noperands)
1681 {
1682 memset (used, 0, sizeof (bool) * noperands);
1683 const char *p = string;
1684 while (*p)
1685 switch (*p)
1686 {
1687 case '%':
1688 p += 1;
1689 /* A letter followed by a digit indicates an operand number. */
1690 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1691 p += 1;
1692 if (ISDIGIT (*p))
1693 {
1694 char *endptr;
1695 unsigned long opnum = strtoul (p, &endptr, 10);
1696 if (endptr != p && opnum < noperands)
1697 used[opnum] = true;
1698 p = endptr;
1699 }
1700 else
1701 p += 1;
1702 break;
1703
1704 default:
1705 p++;
1706 break;
1707 }
1708 }
1709
1710 /* Check if an asm_operand matches its constraints.
1711 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1712
1713 int
1714 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1715 {
1716 int result = 0;
1717 bool incdec_ok = false;
1718
1719 /* Use constrain_operands after reload. */
1720 gcc_assert (!reload_completed);
1721
1722 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1723 many alternatives as required to match the other operands. */
1724 if (*constraint == '\0')
1725 result = 1;
1726
1727 while (*constraint)
1728 {
1729 enum constraint_num cn;
1730 char c = *constraint;
1731 int len;
1732 switch (c)
1733 {
1734 case ',':
1735 constraint++;
1736 continue;
1737
1738 case '0': case '1': case '2': case '3': case '4':
1739 case '5': case '6': case '7': case '8': case '9':
1740 /* If caller provided constraints pointer, look up
1741 the matching constraint. Otherwise, our caller should have
1742 given us the proper matching constraint, but we can't
1743 actually fail the check if they didn't. Indicate that
1744 results are inconclusive. */
1745 if (constraints)
1746 {
1747 char *end;
1748 unsigned long match;
1749
1750 match = strtoul (constraint, &end, 10);
1751 if (!result)
1752 result = asm_operand_ok (op, constraints[match], NULL);
1753 constraint = (const char *) end;
1754 }
1755 else
1756 {
1757 do
1758 constraint++;
1759 while (ISDIGIT (*constraint));
1760 if (! result)
1761 result = -1;
1762 }
1763 continue;
1764
1765 /* The rest of the compiler assumes that reloading the address
1766 of a MEM into a register will make it fit an 'o' constraint.
1767 That is, if it sees a MEM operand for an 'o' constraint,
1768 it assumes that (mem (base-reg)) will fit.
1769
1770 That assumption fails on targets that don't have offsettable
1771 addresses at all. We therefore need to treat 'o' asm
1772 constraints as a special case and only accept operands that
1773 are already offsettable, thus proving that at least one
1774 offsettable address exists. */
1775 case 'o': /* offsettable */
1776 if (offsettable_nonstrict_memref_p (op))
1777 result = 1;
1778 break;
1779
1780 case 'g':
1781 if (general_operand (op, VOIDmode))
1782 result = 1;
1783 break;
1784
1785 case '<':
1786 case '>':
1787 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1788 to exist, excepting those that expand_call created. Further,
1789 on some machines which do not have generalized auto inc/dec,
1790 an inc/dec is not a memory_operand.
1791
1792 Match any memory and hope things are resolved after reload. */
1793 incdec_ok = true;
1794 /* FALLTHRU */
1795 default:
1796 cn = lookup_constraint (constraint);
1797 rtx mem = NULL;
1798 switch (get_constraint_type (cn))
1799 {
1800 case CT_REGISTER:
1801 if (!result
1802 && reg_class_for_constraint (cn) != NO_REGS
1803 && GET_MODE (op) != BLKmode
1804 && register_operand (op, VOIDmode))
1805 result = 1;
1806 break;
1807
1808 case CT_CONST_INT:
1809 if (!result
1810 && CONST_INT_P (op)
1811 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1812 result = 1;
1813 break;
1814
1815 case CT_MEMORY:
1816 mem = op;
1817 /* Fall through. */
1818 case CT_SPECIAL_MEMORY:
1819 /* Every memory operand can be reloaded to fit. */
1820 if (!mem)
1821 mem = extract_mem_from_operand (op);
1822 result = result || memory_operand (mem, VOIDmode);
1823 break;
1824
1825 case CT_ADDRESS:
1826 /* Every address operand can be reloaded to fit. */
1827 result = result || address_operand (op, VOIDmode);
1828 break;
1829
1830 case CT_FIXED_FORM:
1831 result = result || constraint_satisfied_p (op, cn);
1832 break;
1833 }
1834 break;
1835 }
1836 len = CONSTRAINT_LEN (c, constraint);
1837 do
1838 constraint++;
1839 while (--len && *constraint && *constraint != ',');
1840 if (len)
1841 return 0;
1842 }
1843
1844 /* For operands without < or > constraints reject side-effects. */
1845 if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op))
1846 switch (GET_CODE (XEXP (op, 0)))
1847 {
1848 case PRE_INC:
1849 case POST_INC:
1850 case PRE_DEC:
1851 case POST_DEC:
1852 case PRE_MODIFY:
1853 case POST_MODIFY:
1854 return 0;
1855 default:
1856 break;
1857 }
1858
1859 return result;
1860 }
1861 \f
1862 /* Given an rtx *P, if it is a sum containing an integer constant term,
1863 return the location (type rtx *) of the pointer to that constant term.
1864 Otherwise, return a null pointer. */
1865
1866 rtx *
1867 find_constant_term_loc (rtx *p)
1868 {
1869 rtx *tem;
1870 enum rtx_code code = GET_CODE (*p);
1871
1872 /* If *P IS such a constant term, P is its location. */
1873
1874 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1875 || code == CONST)
1876 return p;
1877
1878 /* Otherwise, if not a sum, it has no constant term. */
1879
1880 if (GET_CODE (*p) != PLUS)
1881 return 0;
1882
1883 /* If one of the summands is constant, return its location. */
1884
1885 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1886 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1887 return p;
1888
1889 /* Otherwise, check each summand for containing a constant term. */
1890
1891 if (XEXP (*p, 0) != 0)
1892 {
1893 tem = find_constant_term_loc (&XEXP (*p, 0));
1894 if (tem != 0)
1895 return tem;
1896 }
1897
1898 if (XEXP (*p, 1) != 0)
1899 {
1900 tem = find_constant_term_loc (&XEXP (*p, 1));
1901 if (tem != 0)
1902 return tem;
1903 }
1904
1905 return 0;
1906 }
1907 \f
1908 /* Return 1 if OP is a memory reference
1909 whose address contains no side effects
1910 and remains valid after the addition
1911 of a positive integer less than the
1912 size of the object being referenced.
1913
1914 We assume that the original address is valid and do not check it.
1915
1916 This uses strict_memory_address_p as a subroutine, so
1917 don't use it before reload. */
1918
1919 int
1920 offsettable_memref_p (rtx op)
1921 {
1922 return ((MEM_P (op))
1923 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1924 MEM_ADDR_SPACE (op)));
1925 }
1926
1927 /* Similar, but don't require a strictly valid mem ref:
1928 consider pseudo-regs valid as index or base regs. */
1929
1930 int
1931 offsettable_nonstrict_memref_p (rtx op)
1932 {
1933 return ((MEM_P (op))
1934 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1935 MEM_ADDR_SPACE (op)));
1936 }
1937
1938 /* Return 1 if Y is a memory address which contains no side effects
1939 and would remain valid for address space AS after the addition of
1940 a positive integer less than the size of that mode.
1941
1942 We assume that the original address is valid and do not check it.
1943 We do check that it is valid for narrower modes.
1944
1945 If STRICTP is nonzero, we require a strictly valid address,
1946 for the sake of use in reload.c. */
1947
1948 int
1949 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
1950 addr_space_t as)
1951 {
1952 enum rtx_code ycode = GET_CODE (y);
1953 rtx z;
1954 rtx y1 = y;
1955 rtx *y2;
1956 int (*addressp) (machine_mode, rtx, addr_space_t) =
1957 (strictp ? strict_memory_address_addr_space_p
1958 : memory_address_addr_space_p);
1959 poly_int64 mode_sz = GET_MODE_SIZE (mode);
1960
1961 if (CONSTANT_ADDRESS_P (y))
1962 return 1;
1963
1964 /* Adjusting an offsettable address involves changing to a narrower mode.
1965 Make sure that's OK. */
1966
1967 if (mode_dependent_address_p (y, as))
1968 return 0;
1969
1970 machine_mode address_mode = GET_MODE (y);
1971 if (address_mode == VOIDmode)
1972 address_mode = targetm.addr_space.address_mode (as);
1973 #ifdef POINTERS_EXTEND_UNSIGNED
1974 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1975 #endif
1976
1977 /* ??? How much offset does an offsettable BLKmode reference need?
1978 Clearly that depends on the situation in which it's being used.
1979 However, the current situation in which we test 0xffffffff is
1980 less than ideal. Caveat user. */
1981 if (known_eq (mode_sz, 0))
1982 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1983
1984 /* If the expression contains a constant term,
1985 see if it remains valid when max possible offset is added. */
1986
1987 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1988 {
1989 int good;
1990
1991 y1 = *y2;
1992 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1993 /* Use QImode because an odd displacement may be automatically invalid
1994 for any wider mode. But it should be valid for a single byte. */
1995 good = (*addressp) (QImode, y, as);
1996
1997 /* In any case, restore old contents of memory. */
1998 *y2 = y1;
1999 return good;
2000 }
2001
2002 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2003 return 0;
2004
2005 /* The offset added here is chosen as the maximum offset that
2006 any instruction could need to add when operating on something
2007 of the specified mode. We assume that if Y and Y+c are
2008 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2009 go inside a LO_SUM here, so we do so as well. */
2010 if (GET_CODE (y) == LO_SUM
2011 && mode != BLKmode
2012 && known_le (mode_sz, GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT))
2013 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2014 plus_constant (address_mode, XEXP (y, 1),
2015 mode_sz - 1));
2016 #ifdef POINTERS_EXTEND_UNSIGNED
2017 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2018 else if (POINTERS_EXTEND_UNSIGNED > 0
2019 && GET_CODE (y) == ZERO_EXTEND
2020 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2021 z = gen_rtx_ZERO_EXTEND (address_mode,
2022 plus_constant (pointer_mode, XEXP (y, 0),
2023 mode_sz - 1));
2024 #endif
2025 else
2026 z = plus_constant (address_mode, y, mode_sz - 1);
2027
2028 /* Use QImode because an odd displacement may be automatically invalid
2029 for any wider mode. But it should be valid for a single byte. */
2030 return (*addressp) (QImode, z, as);
2031 }
2032
2033 /* Return 1 if ADDR is an address-expression whose effect depends
2034 on the mode of the memory reference it is used in.
2035
2036 ADDRSPACE is the address space associated with the address.
2037
2038 Autoincrement addressing is a typical example of mode-dependence
2039 because the amount of the increment depends on the mode. */
2040
2041 bool
2042 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2043 {
2044 /* Auto-increment addressing with anything other than post_modify
2045 or pre_modify always introduces a mode dependency. Catch such
2046 cases now instead of deferring to the target. */
2047 if (GET_CODE (addr) == PRE_INC
2048 || GET_CODE (addr) == POST_INC
2049 || GET_CODE (addr) == PRE_DEC
2050 || GET_CODE (addr) == POST_DEC)
2051 return true;
2052
2053 return targetm.mode_dependent_address_p (addr, addrspace);
2054 }
2055 \f
2056 /* Return true if boolean attribute ATTR is supported. */
2057
2058 static bool
2059 have_bool_attr (bool_attr attr)
2060 {
2061 switch (attr)
2062 {
2063 case BA_ENABLED:
2064 return HAVE_ATTR_enabled;
2065 case BA_PREFERRED_FOR_SIZE:
2066 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2067 case BA_PREFERRED_FOR_SPEED:
2068 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2069 }
2070 gcc_unreachable ();
2071 }
2072
2073 /* Return the value of ATTR for instruction INSN. */
2074
2075 static bool
2076 get_bool_attr (rtx_insn *insn, bool_attr attr)
2077 {
2078 switch (attr)
2079 {
2080 case BA_ENABLED:
2081 return get_attr_enabled (insn);
2082 case BA_PREFERRED_FOR_SIZE:
2083 return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2084 case BA_PREFERRED_FOR_SPEED:
2085 return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2086 }
2087 gcc_unreachable ();
2088 }
2089
2090 /* Like get_bool_attr_mask, but don't use the cache. */
2091
2092 static alternative_mask
2093 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2094 {
2095 /* Temporarily install enough information for get_attr_<foo> to assume
2096 that the insn operands are already cached. As above, the attribute
2097 mustn't depend on the values of operands, so we don't provide their
2098 real values here. */
2099 rtx_insn *old_insn = recog_data.insn;
2100 int old_alternative = which_alternative;
2101
2102 recog_data.insn = insn;
2103 alternative_mask mask = ALL_ALTERNATIVES;
2104 int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2105 for (int i = 0; i < n_alternatives; i++)
2106 {
2107 which_alternative = i;
2108 if (!get_bool_attr (insn, attr))
2109 mask &= ~ALTERNATIVE_BIT (i);
2110 }
2111
2112 recog_data.insn = old_insn;
2113 which_alternative = old_alternative;
2114 return mask;
2115 }
2116
2117 /* Return the mask of operand alternatives that are allowed for INSN
2118 by boolean attribute ATTR. This mask depends only on INSN and on
2119 the current target; it does not depend on things like the values of
2120 operands. */
2121
2122 static alternative_mask
2123 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2124 {
2125 /* Quick exit for asms and for targets that don't use these attributes. */
2126 int code = INSN_CODE (insn);
2127 if (code < 0 || !have_bool_attr (attr))
2128 return ALL_ALTERNATIVES;
2129
2130 /* Calling get_attr_<foo> can be expensive, so cache the mask
2131 for speed. */
2132 if (!this_target_recog->x_bool_attr_masks[code][attr])
2133 this_target_recog->x_bool_attr_masks[code][attr]
2134 = get_bool_attr_mask_uncached (insn, attr);
2135 return this_target_recog->x_bool_attr_masks[code][attr];
2136 }
2137
2138 /* Return the set of alternatives of INSN that are allowed by the current
2139 target. */
2140
2141 alternative_mask
2142 get_enabled_alternatives (rtx_insn *insn)
2143 {
2144 return get_bool_attr_mask (insn, BA_ENABLED);
2145 }
2146
2147 /* Return the set of alternatives of INSN that are allowed by the current
2148 target and are preferred for the current size/speed optimization
2149 choice. */
2150
2151 alternative_mask
2152 get_preferred_alternatives (rtx_insn *insn)
2153 {
2154 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2155 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2156 else
2157 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2158 }
2159
2160 /* Return the set of alternatives of INSN that are allowed by the current
2161 target and are preferred for the size/speed optimization choice
2162 associated with BB. Passing a separate BB is useful if INSN has not
2163 been emitted yet or if we are considering moving it to a different
2164 block. */
2165
2166 alternative_mask
2167 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2168 {
2169 if (optimize_bb_for_speed_p (bb))
2170 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2171 else
2172 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2173 }
2174
2175 /* Assert that the cached boolean attributes for INSN are still accurate.
2176 The backend is required to define these attributes in a way that only
2177 depends on the current target (rather than operands, compiler phase,
2178 etc.). */
2179
2180 bool
2181 check_bool_attrs (rtx_insn *insn)
2182 {
2183 int code = INSN_CODE (insn);
2184 if (code >= 0)
2185 for (int i = 0; i <= BA_LAST; ++i)
2186 {
2187 enum bool_attr attr = (enum bool_attr) i;
2188 if (this_target_recog->x_bool_attr_masks[code][attr])
2189 gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2190 == get_bool_attr_mask_uncached (insn, attr));
2191 }
2192 return true;
2193 }
2194
2195 /* Like extract_insn, but save insn extracted and don't extract again, when
2196 called again for the same insn expecting that recog_data still contain the
2197 valid information. This is used primary by gen_attr infrastructure that
2198 often does extract insn again and again. */
2199 void
2200 extract_insn_cached (rtx_insn *insn)
2201 {
2202 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2203 return;
2204 extract_insn (insn);
2205 recog_data.insn = insn;
2206 }
2207
2208 /* Do uncached extract_insn, constrain_operands and complain about failures.
2209 This should be used when extracting a pre-existing constrained instruction
2210 if the caller wants to know which alternative was chosen. */
2211 void
2212 extract_constrain_insn (rtx_insn *insn)
2213 {
2214 extract_insn (insn);
2215 if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2216 fatal_insn_not_found (insn);
2217 }
2218
2219 /* Do cached extract_insn, constrain_operands and complain about failures.
2220 Used by insn_attrtab. */
2221 void
2222 extract_constrain_insn_cached (rtx_insn *insn)
2223 {
2224 extract_insn_cached (insn);
2225 if (which_alternative == -1
2226 && !constrain_operands (reload_completed,
2227 get_enabled_alternatives (insn)))
2228 fatal_insn_not_found (insn);
2229 }
2230
2231 /* Do cached constrain_operands on INSN and complain about failures. */
2232 int
2233 constrain_operands_cached (rtx_insn *insn, int strict)
2234 {
2235 if (which_alternative == -1)
2236 return constrain_operands (strict, get_enabled_alternatives (insn));
2237 else
2238 return 1;
2239 }
2240 \f
2241 /* Analyze INSN and fill in recog_data. */
2242
2243 void
2244 extract_insn (rtx_insn *insn)
2245 {
2246 int i;
2247 int icode;
2248 int noperands;
2249 rtx body = PATTERN (insn);
2250
2251 recog_data.n_operands = 0;
2252 recog_data.n_alternatives = 0;
2253 recog_data.n_dups = 0;
2254 recog_data.is_asm = false;
2255
2256 switch (GET_CODE (body))
2257 {
2258 case USE:
2259 case CLOBBER:
2260 case ASM_INPUT:
2261 case ADDR_VEC:
2262 case ADDR_DIFF_VEC:
2263 case VAR_LOCATION:
2264 case DEBUG_MARKER:
2265 return;
2266
2267 case SET:
2268 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2269 goto asm_insn;
2270 else
2271 goto normal_insn;
2272 case PARALLEL:
2273 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2274 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2275 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS
2276 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2277 goto asm_insn;
2278 else
2279 goto normal_insn;
2280 case ASM_OPERANDS:
2281 asm_insn:
2282 recog_data.n_operands = noperands = asm_noperands (body);
2283 if (noperands >= 0)
2284 {
2285 /* This insn is an `asm' with operands. */
2286
2287 /* expand_asm_operands makes sure there aren't too many operands. */
2288 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2289
2290 /* Now get the operand values and constraints out of the insn. */
2291 decode_asm_operands (body, recog_data.operand,
2292 recog_data.operand_loc,
2293 recog_data.constraints,
2294 recog_data.operand_mode, NULL);
2295 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2296 if (noperands > 0)
2297 {
2298 const char *p = recog_data.constraints[0];
2299 recog_data.n_alternatives = 1;
2300 while (*p)
2301 recog_data.n_alternatives += (*p++ == ',');
2302 }
2303 recog_data.is_asm = true;
2304 break;
2305 }
2306 fatal_insn_not_found (insn);
2307
2308 default:
2309 normal_insn:
2310 /* Ordinary insn: recognize it, get the operands via insn_extract
2311 and get the constraints. */
2312
2313 icode = recog_memoized (insn);
2314 if (icode < 0)
2315 fatal_insn_not_found (insn);
2316
2317 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2318 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2319 recog_data.n_dups = insn_data[icode].n_dups;
2320
2321 insn_extract (insn);
2322
2323 for (i = 0; i < noperands; i++)
2324 {
2325 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2326 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2327 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2328 /* VOIDmode match_operands gets mode from their real operand. */
2329 if (recog_data.operand_mode[i] == VOIDmode)
2330 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2331 }
2332 }
2333 for (i = 0; i < noperands; i++)
2334 recog_data.operand_type[i]
2335 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2336 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2337 : OP_IN);
2338
2339 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2340
2341 recog_data.insn = NULL;
2342 which_alternative = -1;
2343 }
2344
2345 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2346 operands, N_ALTERNATIVES alternatives and constraint strings
2347 CONSTRAINTS. OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2348 and CONSTRAINTS has N_OPERANDS entries. OPLOC should be passed in
2349 if the insn is an asm statement and preprocessing should take the
2350 asm operands into account, e.g. to determine whether they could be
2351 addresses in constraints that require addresses; it should then
2352 point to an array of pointers to each operand. */
2353
2354 void
2355 preprocess_constraints (int n_operands, int n_alternatives,
2356 const char **constraints,
2357 operand_alternative *op_alt_base,
2358 rtx **oploc)
2359 {
2360 for (int i = 0; i < n_operands; i++)
2361 {
2362 int j;
2363 struct operand_alternative *op_alt;
2364 const char *p = constraints[i];
2365
2366 op_alt = op_alt_base;
2367
2368 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2369 {
2370 op_alt[i].cl = NO_REGS;
2371 op_alt[i].constraint = p;
2372 op_alt[i].matches = -1;
2373 op_alt[i].matched = -1;
2374
2375 if (*p == '\0' || *p == ',')
2376 {
2377 op_alt[i].anything_ok = 1;
2378 continue;
2379 }
2380
2381 for (;;)
2382 {
2383 char c = *p;
2384 if (c == '#')
2385 do
2386 c = *++p;
2387 while (c != ',' && c != '\0');
2388 if (c == ',' || c == '\0')
2389 {
2390 p++;
2391 break;
2392 }
2393
2394 switch (c)
2395 {
2396 case '?':
2397 op_alt[i].reject += 6;
2398 break;
2399 case '!':
2400 op_alt[i].reject += 600;
2401 break;
2402 case '&':
2403 op_alt[i].earlyclobber = 1;
2404 break;
2405
2406 case '0': case '1': case '2': case '3': case '4':
2407 case '5': case '6': case '7': case '8': case '9':
2408 {
2409 char *end;
2410 op_alt[i].matches = strtoul (p, &end, 10);
2411 op_alt[op_alt[i].matches].matched = i;
2412 p = end;
2413 }
2414 continue;
2415
2416 case 'X':
2417 op_alt[i].anything_ok = 1;
2418 break;
2419
2420 case 'g':
2421 op_alt[i].cl =
2422 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2423 break;
2424
2425 default:
2426 enum constraint_num cn = lookup_constraint (p);
2427 enum reg_class cl;
2428 switch (get_constraint_type (cn))
2429 {
2430 case CT_REGISTER:
2431 cl = reg_class_for_constraint (cn);
2432 if (cl != NO_REGS)
2433 op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2434 break;
2435
2436 case CT_CONST_INT:
2437 break;
2438
2439 case CT_MEMORY:
2440 case CT_SPECIAL_MEMORY:
2441 op_alt[i].memory_ok = 1;
2442 break;
2443
2444 case CT_ADDRESS:
2445 if (oploc && !address_operand (*oploc[i], VOIDmode))
2446 break;
2447
2448 op_alt[i].is_address = 1;
2449 op_alt[i].cl
2450 = (reg_class_subunion
2451 [(int) op_alt[i].cl]
2452 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2453 ADDRESS, SCRATCH)]);
2454 break;
2455
2456 case CT_FIXED_FORM:
2457 break;
2458 }
2459 break;
2460 }
2461 p += CONSTRAINT_LEN (c, p);
2462 }
2463 }
2464 }
2465 }
2466
2467 /* Return an array of operand_alternative instructions for
2468 instruction ICODE. */
2469
2470 const operand_alternative *
2471 preprocess_insn_constraints (unsigned int icode)
2472 {
2473 gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1));
2474 if (this_target_recog->x_op_alt[icode])
2475 return this_target_recog->x_op_alt[icode];
2476
2477 int n_operands = insn_data[icode].n_operands;
2478 if (n_operands == 0)
2479 return 0;
2480 /* Always provide at least one alternative so that which_op_alt ()
2481 works correctly. If the instruction has 0 alternatives (i.e. all
2482 constraint strings are empty) then each operand in this alternative
2483 will have anything_ok set. */
2484 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2485 int n_entries = n_operands * n_alternatives;
2486
2487 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2488 const char **constraints = XALLOCAVEC (const char *, n_operands);
2489
2490 for (int i = 0; i < n_operands; ++i)
2491 constraints[i] = insn_data[icode].operand[i].constraint;
2492 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt,
2493 NULL);
2494
2495 this_target_recog->x_op_alt[icode] = op_alt;
2496 return op_alt;
2497 }
2498
2499 /* After calling extract_insn, you can use this function to extract some
2500 information from the constraint strings into a more usable form.
2501 The collected data is stored in recog_op_alt. */
2502
2503 void
2504 preprocess_constraints (rtx_insn *insn)
2505 {
2506 int icode = INSN_CODE (insn);
2507 if (icode >= 0)
2508 recog_op_alt = preprocess_insn_constraints (icode);
2509 else
2510 {
2511 int n_operands = recog_data.n_operands;
2512 int n_alternatives = recog_data.n_alternatives;
2513 int n_entries = n_operands * n_alternatives;
2514 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2515 preprocess_constraints (n_operands, n_alternatives,
2516 recog_data.constraints, asm_op_alt,
2517 NULL);
2518 recog_op_alt = asm_op_alt;
2519 }
2520 }
2521
2522 /* Check the operands of an insn against the insn's operand constraints
2523 and return 1 if they match any of the alternatives in ALTERNATIVES.
2524
2525 The information about the insn's operands, constraints, operand modes
2526 etc. is obtained from the global variables set up by extract_insn.
2527
2528 WHICH_ALTERNATIVE is set to a number which indicates which
2529 alternative of constraints was matched: 0 for the first alternative,
2530 1 for the next, etc.
2531
2532 In addition, when two operands are required to match
2533 and it happens that the output operand is (reg) while the
2534 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2535 make the output operand look like the input.
2536 This is because the output operand is the one the template will print.
2537
2538 This is used in final, just before printing the assembler code and by
2539 the routines that determine an insn's attribute.
2540
2541 If STRICT is a positive nonzero value, it means that we have been
2542 called after reload has been completed. In that case, we must
2543 do all checks strictly. If it is zero, it means that we have been called
2544 before reload has completed. In that case, we first try to see if we can
2545 find an alternative that matches strictly. If not, we try again, this
2546 time assuming that reload will fix up the insn. This provides a "best
2547 guess" for the alternative and is used to compute attributes of insns prior
2548 to reload. A negative value of STRICT is used for this internal call. */
2549
2550 struct funny_match
2551 {
2552 int this_op, other;
2553 };
2554
2555 int
2556 constrain_operands (int strict, alternative_mask alternatives)
2557 {
2558 const char *constraints[MAX_RECOG_OPERANDS];
2559 int matching_operands[MAX_RECOG_OPERANDS];
2560 int earlyclobber[MAX_RECOG_OPERANDS];
2561 int c;
2562
2563 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2564 int funny_match_index;
2565
2566 which_alternative = 0;
2567 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2568 return 1;
2569
2570 for (c = 0; c < recog_data.n_operands; c++)
2571 {
2572 constraints[c] = recog_data.constraints[c];
2573 matching_operands[c] = -1;
2574 }
2575
2576 do
2577 {
2578 int seen_earlyclobber_at = -1;
2579 int opno;
2580 int lose = 0;
2581 funny_match_index = 0;
2582
2583 if (!TEST_BIT (alternatives, which_alternative))
2584 {
2585 int i;
2586
2587 for (i = 0; i < recog_data.n_operands; i++)
2588 constraints[i] = skip_alternative (constraints[i]);
2589
2590 which_alternative++;
2591 continue;
2592 }
2593
2594 for (opno = 0; opno < recog_data.n_operands; opno++)
2595 {
2596 rtx op = recog_data.operand[opno];
2597 machine_mode mode = GET_MODE (op);
2598 const char *p = constraints[opno];
2599 int offset = 0;
2600 int win = 0;
2601 int val;
2602 int len;
2603
2604 earlyclobber[opno] = 0;
2605
2606 /* A unary operator may be accepted by the predicate, but it
2607 is irrelevant for matching constraints. */
2608 /* For special_memory_operand, there could be a memory operand inside,
2609 and it would cause a mismatch for constraint_satisfied_p. */
2610 if (UNARY_P (op) && op == extract_mem_from_operand (op))
2611 op = XEXP (op, 0);
2612
2613 if (GET_CODE (op) == SUBREG)
2614 {
2615 if (REG_P (SUBREG_REG (op))
2616 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2617 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2618 GET_MODE (SUBREG_REG (op)),
2619 SUBREG_BYTE (op),
2620 GET_MODE (op));
2621 op = SUBREG_REG (op);
2622 }
2623
2624 /* An empty constraint or empty alternative
2625 allows anything which matched the pattern. */
2626 if (*p == 0 || *p == ',')
2627 win = 1;
2628
2629 do
2630 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2631 {
2632 case '\0':
2633 len = 0;
2634 break;
2635 case ',':
2636 c = '\0';
2637 break;
2638
2639 case '#':
2640 /* Ignore rest of this alternative as far as
2641 constraint checking is concerned. */
2642 do
2643 p++;
2644 while (*p && *p != ',');
2645 len = 0;
2646 break;
2647
2648 case '&':
2649 earlyclobber[opno] = 1;
2650 if (seen_earlyclobber_at < 0)
2651 seen_earlyclobber_at = opno;
2652 break;
2653
2654 case '0': case '1': case '2': case '3': case '4':
2655 case '5': case '6': case '7': case '8': case '9':
2656 {
2657 /* This operand must be the same as a previous one.
2658 This kind of constraint is used for instructions such
2659 as add when they take only two operands.
2660
2661 Note that the lower-numbered operand is passed first.
2662
2663 If we are not testing strictly, assume that this
2664 constraint will be satisfied. */
2665
2666 char *end;
2667 int match;
2668
2669 match = strtoul (p, &end, 10);
2670 p = end;
2671
2672 if (strict < 0)
2673 val = 1;
2674 else
2675 {
2676 rtx op1 = recog_data.operand[match];
2677 rtx op2 = recog_data.operand[opno];
2678
2679 /* A unary operator may be accepted by the predicate,
2680 but it is irrelevant for matching constraints. */
2681 if (UNARY_P (op1))
2682 op1 = XEXP (op1, 0);
2683 if (UNARY_P (op2))
2684 op2 = XEXP (op2, 0);
2685
2686 val = operands_match_p (op1, op2);
2687 }
2688
2689 matching_operands[opno] = match;
2690 matching_operands[match] = opno;
2691
2692 if (val != 0)
2693 win = 1;
2694
2695 /* If output is *x and input is *--x, arrange later
2696 to change the output to *--x as well, since the
2697 output op is the one that will be printed. */
2698 if (val == 2 && strict > 0)
2699 {
2700 funny_match[funny_match_index].this_op = opno;
2701 funny_match[funny_match_index++].other = match;
2702 }
2703 }
2704 len = 0;
2705 break;
2706
2707 case 'p':
2708 /* p is used for address_operands. When we are called by
2709 gen_reload, no one will have checked that the address is
2710 strictly valid, i.e., that all pseudos requiring hard regs
2711 have gotten them. We also want to make sure we have a
2712 valid mode. */
2713 if ((GET_MODE (op) == VOIDmode
2714 || SCALAR_INT_MODE_P (GET_MODE (op)))
2715 && (strict <= 0
2716 || (strict_memory_address_p
2717 (recog_data.operand_mode[opno], op))))
2718 win = 1;
2719 break;
2720
2721 /* No need to check general_operand again;
2722 it was done in insn-recog.c. Well, except that reload
2723 doesn't check the validity of its replacements, but
2724 that should only matter when there's a bug. */
2725 case 'g':
2726 /* Anything goes unless it is a REG and really has a hard reg
2727 but the hard reg is not in the class GENERAL_REGS. */
2728 if (REG_P (op))
2729 {
2730 if (strict < 0
2731 || GENERAL_REGS == ALL_REGS
2732 || (reload_in_progress
2733 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2734 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2735 win = 1;
2736 }
2737 else if (strict < 0 || general_operand (op, mode))
2738 win = 1;
2739 break;
2740
2741 default:
2742 {
2743 enum constraint_num cn = lookup_constraint (p);
2744 enum reg_class cl = reg_class_for_constraint (cn);
2745 if (cl != NO_REGS)
2746 {
2747 if (strict < 0
2748 || (strict == 0
2749 && REG_P (op)
2750 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2751 || (strict == 0 && GET_CODE (op) == SCRATCH)
2752 || (REG_P (op)
2753 && reg_fits_class_p (op, cl, offset, mode)))
2754 win = 1;
2755 }
2756
2757 else if (constraint_satisfied_p (op, cn))
2758 win = 1;
2759
2760 else if (insn_extra_memory_constraint (cn)
2761 /* Every memory operand can be reloaded to fit. */
2762 && ((strict < 0 && MEM_P (op))
2763 /* Before reload, accept what reload can turn
2764 into a mem. */
2765 || (strict < 0 && CONSTANT_P (op))
2766 /* Before reload, accept a pseudo or hard register,
2767 since LRA can turn it into a mem. */
2768 || (strict < 0 && targetm.lra_p () && REG_P (op))
2769 /* During reload, accept a pseudo */
2770 || (reload_in_progress && REG_P (op)
2771 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2772 win = 1;
2773 else if (insn_extra_address_constraint (cn)
2774 /* Every address operand can be reloaded to fit. */
2775 && strict < 0)
2776 win = 1;
2777 /* Cater to architectures like IA-64 that define extra memory
2778 constraints without using define_memory_constraint. */
2779 else if (reload_in_progress
2780 && REG_P (op)
2781 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2782 && reg_renumber[REGNO (op)] < 0
2783 && reg_equiv_mem (REGNO (op)) != 0
2784 && constraint_satisfied_p
2785 (reg_equiv_mem (REGNO (op)), cn))
2786 win = 1;
2787 break;
2788 }
2789 }
2790 while (p += len, c);
2791
2792 constraints[opno] = p;
2793 /* If this operand did not win somehow,
2794 this alternative loses. */
2795 if (! win)
2796 lose = 1;
2797 }
2798 /* This alternative won; the operands are ok.
2799 Change whichever operands this alternative says to change. */
2800 if (! lose)
2801 {
2802 int opno, eopno;
2803
2804 /* See if any earlyclobber operand conflicts with some other
2805 operand. */
2806
2807 if (strict > 0 && seen_earlyclobber_at >= 0)
2808 for (eopno = seen_earlyclobber_at;
2809 eopno < recog_data.n_operands;
2810 eopno++)
2811 /* Ignore earlyclobber operands now in memory,
2812 because we would often report failure when we have
2813 two memory operands, one of which was formerly a REG. */
2814 if (earlyclobber[eopno]
2815 && REG_P (recog_data.operand[eopno]))
2816 for (opno = 0; opno < recog_data.n_operands; opno++)
2817 if ((MEM_P (recog_data.operand[opno])
2818 || recog_data.operand_type[opno] != OP_OUT)
2819 && opno != eopno
2820 /* Ignore things like match_operator operands. */
2821 && *recog_data.constraints[opno] != 0
2822 && ! (matching_operands[opno] == eopno
2823 && operands_match_p (recog_data.operand[opno],
2824 recog_data.operand[eopno]))
2825 && ! safe_from_earlyclobber (recog_data.operand[opno],
2826 recog_data.operand[eopno]))
2827 lose = 1;
2828
2829 if (! lose)
2830 {
2831 while (--funny_match_index >= 0)
2832 {
2833 recog_data.operand[funny_match[funny_match_index].other]
2834 = recog_data.operand[funny_match[funny_match_index].this_op];
2835 }
2836
2837 /* For operands without < or > constraints reject side-effects. */
2838 if (AUTO_INC_DEC && recog_data.is_asm)
2839 {
2840 for (opno = 0; opno < recog_data.n_operands; opno++)
2841 if (MEM_P (recog_data.operand[opno]))
2842 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2843 {
2844 case PRE_INC:
2845 case POST_INC:
2846 case PRE_DEC:
2847 case POST_DEC:
2848 case PRE_MODIFY:
2849 case POST_MODIFY:
2850 if (strchr (recog_data.constraints[opno], '<') == NULL
2851 && strchr (recog_data.constraints[opno], '>')
2852 == NULL)
2853 return 0;
2854 break;
2855 default:
2856 break;
2857 }
2858 }
2859
2860 return 1;
2861 }
2862 }
2863
2864 which_alternative++;
2865 }
2866 while (which_alternative < recog_data.n_alternatives);
2867
2868 which_alternative = -1;
2869 /* If we are about to reject this, but we are not to test strictly,
2870 try a very loose test. Only return failure if it fails also. */
2871 if (strict == 0)
2872 return constrain_operands (-1, alternatives);
2873 else
2874 return 0;
2875 }
2876
2877 /* Return true iff OPERAND (assumed to be a REG rtx)
2878 is a hard reg in class CLASS when its regno is offset by OFFSET
2879 and changed to mode MODE.
2880 If REG occupies multiple hard regs, all of them must be in CLASS. */
2881
2882 bool
2883 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2884 machine_mode mode)
2885 {
2886 unsigned int regno = REGNO (operand);
2887
2888 if (cl == NO_REGS)
2889 return false;
2890
2891 /* Regno must not be a pseudo register. Offset may be negative. */
2892 return (HARD_REGISTER_NUM_P (regno)
2893 && HARD_REGISTER_NUM_P (regno + offset)
2894 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2895 regno + offset));
2896 }
2897 \f
2898 /* Split single instruction. Helper function for split_all_insns and
2899 split_all_insns_noflow. Return last insn in the sequence if successful,
2900 or NULL if unsuccessful. */
2901
2902 static rtx_insn *
2903 split_insn (rtx_insn *insn)
2904 {
2905 /* Split insns here to get max fine-grain parallelism. */
2906 rtx_insn *first = PREV_INSN (insn);
2907 rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2908 rtx insn_set, last_set, note;
2909
2910 if (last == insn)
2911 return NULL;
2912
2913 /* If the original instruction was a single set that was known to be
2914 equivalent to a constant, see if we can say the same about the last
2915 instruction in the split sequence. The two instructions must set
2916 the same destination. */
2917 insn_set = single_set (insn);
2918 if (insn_set)
2919 {
2920 last_set = single_set (last);
2921 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2922 {
2923 note = find_reg_equal_equiv_note (insn);
2924 if (note && CONSTANT_P (XEXP (note, 0)))
2925 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2926 else if (CONSTANT_P (SET_SRC (insn_set)))
2927 set_unique_reg_note (last, REG_EQUAL,
2928 copy_rtx (SET_SRC (insn_set)));
2929 }
2930 }
2931
2932 /* try_split returns the NOTE that INSN became. */
2933 SET_INSN_DELETED (insn);
2934
2935 /* ??? Coddle to md files that generate subregs in post-reload
2936 splitters instead of computing the proper hard register. */
2937 if (reload_completed && first != last)
2938 {
2939 first = NEXT_INSN (first);
2940 for (;;)
2941 {
2942 if (INSN_P (first))
2943 cleanup_subreg_operands (first);
2944 if (first == last)
2945 break;
2946 first = NEXT_INSN (first);
2947 }
2948 }
2949
2950 return last;
2951 }
2952
2953 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2954
2955 void
2956 split_all_insns (void)
2957 {
2958 bool changed;
2959 bool need_cfg_cleanup = false;
2960 basic_block bb;
2961
2962 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
2963 bitmap_clear (blocks);
2964 changed = false;
2965
2966 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2967 {
2968 rtx_insn *insn, *next;
2969 bool finish = false;
2970
2971 rtl_profile_for_bb (bb);
2972 for (insn = BB_HEAD (bb); !finish ; insn = next)
2973 {
2974 /* Can't use `next_real_insn' because that might go across
2975 CODE_LABELS and short-out basic blocks. */
2976 next = NEXT_INSN (insn);
2977 finish = (insn == BB_END (bb));
2978
2979 /* If INSN has a REG_EH_REGION note and we split INSN, the
2980 resulting split may not have/need REG_EH_REGION notes.
2981
2982 If that happens and INSN was the last reference to the
2983 given EH region, then the EH region will become unreachable.
2984 We cannot leave the unreachable blocks in the CFG as that
2985 will trigger a checking failure.
2986
2987 So track if INSN has a REG_EH_REGION note. If so and we
2988 split INSN, then trigger a CFG cleanup. */
2989 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2990 if (INSN_P (insn))
2991 {
2992 rtx set = single_set (insn);
2993
2994 /* Don't split no-op move insns. These should silently
2995 disappear later in final. Splitting such insns would
2996 break the code that handles LIBCALL blocks. */
2997 if (set && set_noop_p (set))
2998 {
2999 /* Nops get in the way while scheduling, so delete them
3000 now if register allocation has already been done. It
3001 is too risky to try to do this before register
3002 allocation, and there are unlikely to be very many
3003 nops then anyways. */
3004 if (reload_completed)
3005 delete_insn_and_edges (insn);
3006 if (note)
3007 need_cfg_cleanup = true;
3008 }
3009 else
3010 {
3011 if (split_insn (insn))
3012 {
3013 bitmap_set_bit (blocks, bb->index);
3014 changed = true;
3015 if (note)
3016 need_cfg_cleanup = true;
3017 }
3018 }
3019 }
3020 }
3021 }
3022
3023 default_rtl_profile ();
3024 if (changed)
3025 {
3026 find_many_sub_basic_blocks (blocks);
3027
3028 /* Splitting could drop an REG_EH_REGION if it potentially
3029 trapped in its original form, but does not in its split
3030 form. Consider a FLOAT_TRUNCATE which splits into a memory
3031 store/load pair and -fnon-call-exceptions. */
3032 if (need_cfg_cleanup)
3033 cleanup_cfg (0);
3034 }
3035
3036 checking_verify_flow_info ();
3037 }
3038
3039 /* Same as split_all_insns, but do not expect CFG to be available.
3040 Used by machine dependent reorg passes. */
3041
3042 unsigned int
3043 split_all_insns_noflow (void)
3044 {
3045 rtx_insn *next, *insn;
3046
3047 for (insn = get_insns (); insn; insn = next)
3048 {
3049 next = NEXT_INSN (insn);
3050 if (INSN_P (insn))
3051 {
3052 /* Don't split no-op move insns. These should silently
3053 disappear later in final. Splitting such insns would
3054 break the code that handles LIBCALL blocks. */
3055 rtx set = single_set (insn);
3056 if (set && set_noop_p (set))
3057 {
3058 /* Nops get in the way while scheduling, so delete them
3059 now if register allocation has already been done. It
3060 is too risky to try to do this before register
3061 allocation, and there are unlikely to be very many
3062 nops then anyways.
3063
3064 ??? Should we use delete_insn when the CFG isn't valid? */
3065 if (reload_completed)
3066 delete_insn_and_edges (insn);
3067 }
3068 else
3069 split_insn (insn);
3070 }
3071 }
3072 return 0;
3073 }
3074 \f
3075 struct peep2_insn_data
3076 {
3077 rtx_insn *insn;
3078 regset live_before;
3079 };
3080
3081 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3082 static int peep2_current;
3083
3084 static bool peep2_do_rebuild_jump_labels;
3085 static bool peep2_do_cleanup_cfg;
3086
3087 /* The number of instructions available to match a peep2. */
3088 int peep2_current_count;
3089
3090 /* A marker indicating the last insn of the block. The live_before regset
3091 for this element is correct, indicating DF_LIVE_OUT for the block. */
3092 #define PEEP2_EOB invalid_insn_rtx
3093
3094 /* Wrap N to fit into the peep2_insn_data buffer. */
3095
3096 static int
3097 peep2_buf_position (int n)
3098 {
3099 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3100 n -= MAX_INSNS_PER_PEEP2 + 1;
3101 return n;
3102 }
3103
3104 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3105 does not exist. Used by the recognizer to find the next insn to match
3106 in a multi-insn pattern. */
3107
3108 rtx_insn *
3109 peep2_next_insn (int n)
3110 {
3111 gcc_assert (n <= peep2_current_count);
3112
3113 n = peep2_buf_position (peep2_current + n);
3114
3115 return peep2_insn_data[n].insn;
3116 }
3117
3118 /* Return true if REGNO is dead before the Nth non-note insn
3119 after `current'. */
3120
3121 int
3122 peep2_regno_dead_p (int ofs, int regno)
3123 {
3124 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3125
3126 ofs = peep2_buf_position (peep2_current + ofs);
3127
3128 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3129
3130 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3131 }
3132
3133 /* Similarly for a REG. */
3134
3135 int
3136 peep2_reg_dead_p (int ofs, rtx reg)
3137 {
3138 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3139
3140 ofs = peep2_buf_position (peep2_current + ofs);
3141
3142 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3143
3144 unsigned int end_regno = END_REGNO (reg);
3145 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3146 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3147 return 0;
3148 return 1;
3149 }
3150
3151 /* Regno offset to be used in the register search. */
3152 static int search_ofs;
3153
3154 /* Try to find a hard register of mode MODE, matching the register class in
3155 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3156 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3157 in which case the only condition is that the register must be available
3158 before CURRENT_INSN.
3159 Registers that already have bits set in REG_SET will not be considered.
3160
3161 If an appropriate register is available, it will be returned and the
3162 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3163 returned. */
3164
3165 rtx
3166 peep2_find_free_register (int from, int to, const char *class_str,
3167 machine_mode mode, HARD_REG_SET *reg_set)
3168 {
3169 enum reg_class cl;
3170 HARD_REG_SET live;
3171 df_ref def;
3172 int i;
3173
3174 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3175 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3176
3177 from = peep2_buf_position (peep2_current + from);
3178 to = peep2_buf_position (peep2_current + to);
3179
3180 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3181 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3182
3183 while (from != to)
3184 {
3185 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3186
3187 /* Don't use registers set or clobbered by the insn. */
3188 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3189 SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3190
3191 from = peep2_buf_position (from + 1);
3192 }
3193
3194 cl = reg_class_for_constraint (lookup_constraint (class_str));
3195
3196 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3197 {
3198 int raw_regno, regno, success, j;
3199
3200 /* Distribute the free registers as much as possible. */
3201 raw_regno = search_ofs + i;
3202 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3203 raw_regno -= FIRST_PSEUDO_REGISTER;
3204 #ifdef REG_ALLOC_ORDER
3205 regno = reg_alloc_order[raw_regno];
3206 #else
3207 regno = raw_regno;
3208 #endif
3209
3210 /* Can it support the mode we need? */
3211 if (!targetm.hard_regno_mode_ok (regno, mode))
3212 continue;
3213
3214 success = 1;
3215 for (j = 0; success && j < hard_regno_nregs (regno, mode); j++)
3216 {
3217 /* Don't allocate fixed registers. */
3218 if (fixed_regs[regno + j])
3219 {
3220 success = 0;
3221 break;
3222 }
3223 /* Don't allocate global registers. */
3224 if (global_regs[regno + j])
3225 {
3226 success = 0;
3227 break;
3228 }
3229 /* Make sure the register is of the right class. */
3230 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3231 {
3232 success = 0;
3233 break;
3234 }
3235 /* And that we don't create an extra save/restore. */
3236 if (! crtl->abi->clobbers_full_reg_p (regno + j)
3237 && ! df_regs_ever_live_p (regno + j))
3238 {
3239 success = 0;
3240 break;
3241 }
3242
3243 if (! targetm.hard_regno_scratch_ok (regno + j))
3244 {
3245 success = 0;
3246 break;
3247 }
3248
3249 /* And we don't clobber traceback for noreturn functions. */
3250 if ((regno + j == FRAME_POINTER_REGNUM
3251 || regno + j == HARD_FRAME_POINTER_REGNUM)
3252 && (! reload_completed || frame_pointer_needed))
3253 {
3254 success = 0;
3255 break;
3256 }
3257
3258 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3259 || TEST_HARD_REG_BIT (live, regno + j))
3260 {
3261 success = 0;
3262 break;
3263 }
3264 }
3265
3266 if (success)
3267 {
3268 add_to_hard_reg_set (reg_set, mode, regno);
3269
3270 /* Start the next search with the next register. */
3271 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3272 raw_regno = 0;
3273 search_ofs = raw_regno;
3274
3275 return gen_rtx_REG (mode, regno);
3276 }
3277 }
3278
3279 search_ofs = 0;
3280 return NULL_RTX;
3281 }
3282
3283 /* Forget all currently tracked instructions, only remember current
3284 LIVE regset. */
3285
3286 static void
3287 peep2_reinit_state (regset live)
3288 {
3289 int i;
3290
3291 /* Indicate that all slots except the last holds invalid data. */
3292 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3293 peep2_insn_data[i].insn = NULL;
3294 peep2_current_count = 0;
3295
3296 /* Indicate that the last slot contains live_after data. */
3297 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3298 peep2_current = MAX_INSNS_PER_PEEP2;
3299
3300 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3301 }
3302
3303 /* Copies frame related info of an insn (OLD_INSN) to the single
3304 insn (NEW_INSN) that was obtained by splitting OLD_INSN. */
3305
3306 void
3307 copy_frame_info_to_split_insn (rtx_insn *old_insn, rtx_insn *new_insn)
3308 {
3309 bool any_note = false;
3310 rtx note;
3311
3312 if (!RTX_FRAME_RELATED_P (old_insn))
3313 return;
3314
3315 RTX_FRAME_RELATED_P (new_insn) = 1;
3316
3317 /* Allow the backend to fill in a note during the split. */
3318 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3319 switch (REG_NOTE_KIND (note))
3320 {
3321 case REG_FRAME_RELATED_EXPR:
3322 case REG_CFA_DEF_CFA:
3323 case REG_CFA_ADJUST_CFA:
3324 case REG_CFA_OFFSET:
3325 case REG_CFA_REGISTER:
3326 case REG_CFA_EXPRESSION:
3327 case REG_CFA_RESTORE:
3328 case REG_CFA_SET_VDRAP:
3329 any_note = true;
3330 break;
3331 default:
3332 break;
3333 }
3334
3335 /* If the backend didn't supply a note, copy one over. */
3336 if (!any_note)
3337 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3338 switch (REG_NOTE_KIND (note))
3339 {
3340 case REG_FRAME_RELATED_EXPR:
3341 case REG_CFA_DEF_CFA:
3342 case REG_CFA_ADJUST_CFA:
3343 case REG_CFA_OFFSET:
3344 case REG_CFA_REGISTER:
3345 case REG_CFA_EXPRESSION:
3346 case REG_CFA_RESTORE:
3347 case REG_CFA_SET_VDRAP:
3348 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3349 any_note = true;
3350 break;
3351 default:
3352 break;
3353 }
3354
3355 /* If there still isn't a note, make sure the unwind info sees the
3356 same expression as before the split. */
3357 if (!any_note)
3358 {
3359 rtx old_set, new_set;
3360
3361 /* The old insn had better have been simple, or annotated. */
3362 old_set = single_set (old_insn);
3363 gcc_assert (old_set != NULL);
3364
3365 new_set = single_set (new_insn);
3366 if (!new_set || !rtx_equal_p (new_set, old_set))
3367 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3368 }
3369
3370 /* Copy prologue/epilogue status. This is required in order to keep
3371 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3372 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3373 }
3374
3375 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3376 starting at INSN. Perform the replacement, removing the old insns and
3377 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3378 if the replacement is rejected. */
3379
3380 static rtx_insn *
3381 peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3382 {
3383 int i;
3384 rtx_insn *last, *before_try, *x;
3385 rtx eh_note, as_note;
3386 rtx_insn *old_insn;
3387 rtx_insn *new_insn;
3388 bool was_call = false;
3389
3390 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3391 match more than one insn, or to be split into more than one insn. */
3392 old_insn = peep2_insn_data[peep2_current].insn;
3393 if (RTX_FRAME_RELATED_P (old_insn))
3394 {
3395 if (match_len != 0)
3396 return NULL;
3397
3398 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3399 may be in the stream for the purpose of register allocation. */
3400 if (active_insn_p (attempt))
3401 new_insn = attempt;
3402 else
3403 new_insn = next_active_insn (attempt);
3404 if (next_active_insn (new_insn))
3405 return NULL;
3406
3407 /* We have a 1-1 replacement. Copy over any frame-related info. */
3408 copy_frame_info_to_split_insn (old_insn, new_insn);
3409 }
3410
3411 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3412 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3413 cfg-related call notes. */
3414 for (i = 0; i <= match_len; ++i)
3415 {
3416 int j;
3417 rtx note;
3418
3419 j = peep2_buf_position (peep2_current + i);
3420 old_insn = peep2_insn_data[j].insn;
3421 if (!CALL_P (old_insn))
3422 continue;
3423 was_call = true;
3424
3425 new_insn = attempt;
3426 while (new_insn != NULL_RTX)
3427 {
3428 if (CALL_P (new_insn))
3429 break;
3430 new_insn = NEXT_INSN (new_insn);
3431 }
3432
3433 gcc_assert (new_insn != NULL_RTX);
3434
3435 CALL_INSN_FUNCTION_USAGE (new_insn)
3436 = CALL_INSN_FUNCTION_USAGE (old_insn);
3437 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3438
3439 for (note = REG_NOTES (old_insn);
3440 note;
3441 note = XEXP (note, 1))
3442 switch (REG_NOTE_KIND (note))
3443 {
3444 case REG_NORETURN:
3445 case REG_SETJMP:
3446 case REG_TM:
3447 case REG_CALL_NOCF_CHECK:
3448 add_reg_note (new_insn, REG_NOTE_KIND (note),
3449 XEXP (note, 0));
3450 break;
3451 default:
3452 /* Discard all other reg notes. */
3453 break;
3454 }
3455
3456 /* Croak if there is another call in the sequence. */
3457 while (++i <= match_len)
3458 {
3459 j = peep2_buf_position (peep2_current + i);
3460 old_insn = peep2_insn_data[j].insn;
3461 gcc_assert (!CALL_P (old_insn));
3462 }
3463 break;
3464 }
3465
3466 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3467 move those notes over to the new sequence. */
3468 as_note = NULL;
3469 for (i = match_len; i >= 0; --i)
3470 {
3471 int j = peep2_buf_position (peep2_current + i);
3472 old_insn = peep2_insn_data[j].insn;
3473
3474 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3475 if (as_note)
3476 break;
3477 }
3478
3479 i = peep2_buf_position (peep2_current + match_len);
3480 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3481
3482 /* Replace the old sequence with the new. */
3483 rtx_insn *peepinsn = peep2_insn_data[i].insn;
3484 last = emit_insn_after_setloc (attempt,
3485 peep2_insn_data[i].insn,
3486 INSN_LOCATION (peepinsn));
3487 if (JUMP_P (peepinsn) && JUMP_P (last))
3488 CROSSING_JUMP_P (last) = CROSSING_JUMP_P (peepinsn);
3489 before_try = PREV_INSN (insn);
3490 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3491
3492 /* Re-insert the EH_REGION notes. */
3493 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3494 {
3495 edge eh_edge;
3496 edge_iterator ei;
3497
3498 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3499 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3500 break;
3501
3502 if (eh_note)
3503 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3504
3505 if (eh_edge)
3506 for (x = last; x != before_try; x = PREV_INSN (x))
3507 if (x != BB_END (bb)
3508 && (can_throw_internal (x)
3509 || can_nonlocal_goto (x)))
3510 {
3511 edge nfte, nehe;
3512 int flags;
3513
3514 nfte = split_block (bb, x);
3515 flags = (eh_edge->flags
3516 & (EDGE_EH | EDGE_ABNORMAL));
3517 if (CALL_P (x))
3518 flags |= EDGE_ABNORMAL_CALL;
3519 nehe = make_edge (nfte->src, eh_edge->dest,
3520 flags);
3521
3522 nehe->probability = eh_edge->probability;
3523 nfte->probability = nehe->probability.invert ();
3524
3525 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3526 bb = nfte->src;
3527 eh_edge = nehe;
3528 }
3529
3530 /* Converting possibly trapping insn to non-trapping is
3531 possible. Zap dummy outgoing edges. */
3532 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3533 }
3534
3535 /* Re-insert the ARGS_SIZE notes. */
3536 if (as_note)
3537 fixup_args_size_notes (before_try, last, get_args_size (as_note));
3538
3539 /* Scan the new insns for embedded side effects and add appropriate
3540 REG_INC notes. */
3541 if (AUTO_INC_DEC)
3542 for (x = last; x != before_try; x = PREV_INSN (x))
3543 if (NONDEBUG_INSN_P (x))
3544 add_auto_inc_notes (x, PATTERN (x));
3545
3546 /* If we generated a jump instruction, it won't have
3547 JUMP_LABEL set. Recompute after we're done. */
3548 for (x = last; x != before_try; x = PREV_INSN (x))
3549 if (JUMP_P (x))
3550 {
3551 peep2_do_rebuild_jump_labels = true;
3552 break;
3553 }
3554
3555 return last;
3556 }
3557
3558 /* After performing a replacement in basic block BB, fix up the life
3559 information in our buffer. LAST is the last of the insns that we
3560 emitted as a replacement. PREV is the insn before the start of
3561 the replacement. MATCH_LEN is the number of instructions that were
3562 matched, and which now need to be replaced in the buffer. */
3563
3564 static void
3565 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3566 rtx_insn *prev)
3567 {
3568 int i = peep2_buf_position (peep2_current + match_len + 1);
3569 rtx_insn *x;
3570 regset_head live;
3571
3572 INIT_REG_SET (&live);
3573 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3574
3575 gcc_assert (peep2_current_count >= match_len + 1);
3576 peep2_current_count -= match_len + 1;
3577
3578 x = last;
3579 do
3580 {
3581 if (INSN_P (x))
3582 {
3583 df_insn_rescan (x);
3584 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3585 {
3586 peep2_current_count++;
3587 if (--i < 0)
3588 i = MAX_INSNS_PER_PEEP2;
3589 peep2_insn_data[i].insn = x;
3590 df_simulate_one_insn_backwards (bb, x, &live);
3591 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3592 }
3593 }
3594 x = PREV_INSN (x);
3595 }
3596 while (x != prev);
3597 CLEAR_REG_SET (&live);
3598
3599 peep2_current = i;
3600 }
3601
3602 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3603 Return true if we added it, false otherwise. The caller will try to match
3604 peepholes against the buffer if we return false; otherwise it will try to
3605 add more instructions to the buffer. */
3606
3607 static bool
3608 peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
3609 {
3610 int pos;
3611
3612 /* Once we have filled the maximum number of insns the buffer can hold,
3613 allow the caller to match the insns against peepholes. We wait until
3614 the buffer is full in case the target has similar peepholes of different
3615 length; we always want to match the longest if possible. */
3616 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3617 return false;
3618
3619 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3620 any other pattern, lest it change the semantics of the frame info. */
3621 if (RTX_FRAME_RELATED_P (insn))
3622 {
3623 /* Let the buffer drain first. */
3624 if (peep2_current_count > 0)
3625 return false;
3626 /* Now the insn will be the only thing in the buffer. */
3627 }
3628
3629 pos = peep2_buf_position (peep2_current + peep2_current_count);
3630 peep2_insn_data[pos].insn = insn;
3631 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3632 peep2_current_count++;
3633
3634 df_simulate_one_insn_forwards (bb, insn, live);
3635 return true;
3636 }
3637
3638 /* Perform the peephole2 optimization pass. */
3639
3640 static void
3641 peephole2_optimize (void)
3642 {
3643 rtx_insn *insn;
3644 bitmap live;
3645 int i;
3646 basic_block bb;
3647
3648 peep2_do_cleanup_cfg = false;
3649 peep2_do_rebuild_jump_labels = false;
3650
3651 df_set_flags (DF_LR_RUN_DCE);
3652 df_note_add_problem ();
3653 df_analyze ();
3654
3655 /* Initialize the regsets we're going to use. */
3656 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3657 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3658 search_ofs = 0;
3659 live = BITMAP_ALLOC (&reg_obstack);
3660
3661 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3662 {
3663 bool past_end = false;
3664 int pos;
3665
3666 rtl_profile_for_bb (bb);
3667
3668 /* Start up propagation. */
3669 bitmap_copy (live, DF_LR_IN (bb));
3670 df_simulate_initialize_forwards (bb, live);
3671 peep2_reinit_state (live);
3672
3673 insn = BB_HEAD (bb);
3674 for (;;)
3675 {
3676 rtx_insn *attempt, *head;
3677 int match_len;
3678
3679 if (!past_end && !NONDEBUG_INSN_P (insn))
3680 {
3681 next_insn:
3682 insn = NEXT_INSN (insn);
3683 if (insn == NEXT_INSN (BB_END (bb)))
3684 past_end = true;
3685 continue;
3686 }
3687 if (!past_end && peep2_fill_buffer (bb, insn, live))
3688 goto next_insn;
3689
3690 /* If we did not fill an empty buffer, it signals the end of the
3691 block. */
3692 if (peep2_current_count == 0)
3693 break;
3694
3695 /* The buffer filled to the current maximum, so try to match. */
3696
3697 pos = peep2_buf_position (peep2_current + peep2_current_count);
3698 peep2_insn_data[pos].insn = PEEP2_EOB;
3699 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3700
3701 /* Match the peephole. */
3702 head = peep2_insn_data[peep2_current].insn;
3703 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3704 if (attempt != NULL)
3705 {
3706 rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3707 if (last)
3708 {
3709 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3710 continue;
3711 }
3712 }
3713
3714 /* No match: advance the buffer by one insn. */
3715 peep2_current = peep2_buf_position (peep2_current + 1);
3716 peep2_current_count--;
3717 }
3718 }
3719
3720 default_rtl_profile ();
3721 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3722 BITMAP_FREE (peep2_insn_data[i].live_before);
3723 BITMAP_FREE (live);
3724 if (peep2_do_rebuild_jump_labels)
3725 rebuild_jump_labels (get_insns ());
3726 if (peep2_do_cleanup_cfg)
3727 cleanup_cfg (CLEANUP_CFG_CHANGED);
3728 }
3729
3730 /* Common predicates for use with define_bypass. */
3731
3732 /* Helper function for store_data_bypass_p, handle just a single SET
3733 IN_SET. */
3734
3735 static bool
3736 store_data_bypass_p_1 (rtx_insn *out_insn, rtx in_set)
3737 {
3738 if (!MEM_P (SET_DEST (in_set)))
3739 return false;
3740
3741 rtx out_set = single_set (out_insn);
3742 if (out_set)
3743 return !reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set));
3744
3745 rtx out_pat = PATTERN (out_insn);
3746 if (GET_CODE (out_pat) != PARALLEL)
3747 return false;
3748
3749 for (int i = 0; i < XVECLEN (out_pat, 0); i++)
3750 {
3751 rtx out_exp = XVECEXP (out_pat, 0, i);
3752
3753 if (GET_CODE (out_exp) == CLOBBER || GET_CODE (out_exp) == USE)
3754 continue;
3755
3756 gcc_assert (GET_CODE (out_exp) == SET);
3757
3758 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3759 return false;
3760 }
3761
3762 return true;
3763 }
3764
3765 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3766 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3767 must be either a single_set or a PARALLEL with SETs inside. */
3768
3769 int
3770 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3771 {
3772 rtx in_set = single_set (in_insn);
3773 if (in_set)
3774 return store_data_bypass_p_1 (out_insn, in_set);
3775
3776 rtx in_pat = PATTERN (in_insn);
3777 if (GET_CODE (in_pat) != PARALLEL)
3778 return false;
3779
3780 for (int i = 0; i < XVECLEN (in_pat, 0); i++)
3781 {
3782 rtx in_exp = XVECEXP (in_pat, 0, i);
3783
3784 if (GET_CODE (in_exp) == CLOBBER || GET_CODE (in_exp) == USE)
3785 continue;
3786
3787 gcc_assert (GET_CODE (in_exp) == SET);
3788
3789 if (!store_data_bypass_p_1 (out_insn, in_exp))
3790 return false;
3791 }
3792
3793 return true;
3794 }
3795
3796 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3797 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3798 or multiple set; IN_INSN should be single_set for truth, but for convenience
3799 of insn categorization may be any JUMP or CALL insn. */
3800
3801 int
3802 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3803 {
3804 rtx out_set, in_set;
3805
3806 in_set = single_set (in_insn);
3807 if (! in_set)
3808 {
3809 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3810 return false;
3811 }
3812
3813 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3814 return false;
3815 in_set = SET_SRC (in_set);
3816
3817 out_set = single_set (out_insn);
3818 if (out_set)
3819 {
3820 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3821 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3822 return false;
3823 }
3824 else
3825 {
3826 rtx out_pat;
3827 int i;
3828
3829 out_pat = PATTERN (out_insn);
3830 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3831
3832 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3833 {
3834 rtx exp = XVECEXP (out_pat, 0, i);
3835
3836 if (GET_CODE (exp) == CLOBBER)
3837 continue;
3838
3839 gcc_assert (GET_CODE (exp) == SET);
3840
3841 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3842 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3843 return false;
3844 }
3845 }
3846
3847 return true;
3848 }
3849 \f
3850 static unsigned int
3851 rest_of_handle_peephole2 (void)
3852 {
3853 if (HAVE_peephole2)
3854 peephole2_optimize ();
3855
3856 return 0;
3857 }
3858
3859 namespace {
3860
3861 const pass_data pass_data_peephole2 =
3862 {
3863 RTL_PASS, /* type */
3864 "peephole2", /* name */
3865 OPTGROUP_NONE, /* optinfo_flags */
3866 TV_PEEPHOLE2, /* tv_id */
3867 0, /* properties_required */
3868 0, /* properties_provided */
3869 0, /* properties_destroyed */
3870 0, /* todo_flags_start */
3871 TODO_df_finish, /* todo_flags_finish */
3872 };
3873
3874 class pass_peephole2 : public rtl_opt_pass
3875 {
3876 public:
3877 pass_peephole2 (gcc::context *ctxt)
3878 : rtl_opt_pass (pass_data_peephole2, ctxt)
3879 {}
3880
3881 /* opt_pass methods: */
3882 /* The epiphany backend creates a second instance of this pass, so we need
3883 a clone method. */
3884 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3885 virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
3886 virtual unsigned int execute (function *)
3887 {
3888 return rest_of_handle_peephole2 ();
3889 }
3890
3891 }; // class pass_peephole2
3892
3893 } // anon namespace
3894
3895 rtl_opt_pass *
3896 make_pass_peephole2 (gcc::context *ctxt)
3897 {
3898 return new pass_peephole2 (ctxt);
3899 }
3900
3901 namespace {
3902
3903 const pass_data pass_data_split_all_insns =
3904 {
3905 RTL_PASS, /* type */
3906 "split1", /* name */
3907 OPTGROUP_NONE, /* optinfo_flags */
3908 TV_NONE, /* tv_id */
3909 0, /* properties_required */
3910 PROP_rtl_split_insns, /* properties_provided */
3911 0, /* properties_destroyed */
3912 0, /* todo_flags_start */
3913 0, /* todo_flags_finish */
3914 };
3915
3916 class pass_split_all_insns : public rtl_opt_pass
3917 {
3918 public:
3919 pass_split_all_insns (gcc::context *ctxt)
3920 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3921 {}
3922
3923 /* opt_pass methods: */
3924 /* The epiphany backend creates a second instance of this pass, so
3925 we need a clone method. */
3926 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3927 virtual unsigned int execute (function *)
3928 {
3929 split_all_insns ();
3930 return 0;
3931 }
3932
3933 }; // class pass_split_all_insns
3934
3935 } // anon namespace
3936
3937 rtl_opt_pass *
3938 make_pass_split_all_insns (gcc::context *ctxt)
3939 {
3940 return new pass_split_all_insns (ctxt);
3941 }
3942
3943 namespace {
3944
3945 const pass_data pass_data_split_after_reload =
3946 {
3947 RTL_PASS, /* type */
3948 "split2", /* name */
3949 OPTGROUP_NONE, /* optinfo_flags */
3950 TV_NONE, /* tv_id */
3951 0, /* properties_required */
3952 0, /* properties_provided */
3953 0, /* properties_destroyed */
3954 0, /* todo_flags_start */
3955 0, /* todo_flags_finish */
3956 };
3957
3958 class pass_split_after_reload : public rtl_opt_pass
3959 {
3960 public:
3961 pass_split_after_reload (gcc::context *ctxt)
3962 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3963 {}
3964
3965 /* opt_pass methods: */
3966 virtual bool gate (function *)
3967 {
3968 /* If optimizing, then go ahead and split insns now. */
3969 return optimize > 0;
3970 }
3971
3972 virtual unsigned int execute (function *)
3973 {
3974 split_all_insns ();
3975 return 0;
3976 }
3977
3978 }; // class pass_split_after_reload
3979
3980 } // anon namespace
3981
3982 rtl_opt_pass *
3983 make_pass_split_after_reload (gcc::context *ctxt)
3984 {
3985 return new pass_split_after_reload (ctxt);
3986 }
3987
3988 static bool
3989 enable_split_before_sched2 (void)
3990 {
3991 #ifdef INSN_SCHEDULING
3992 return optimize > 0 && flag_schedule_insns_after_reload;
3993 #else
3994 return false;
3995 #endif
3996 }
3997
3998 namespace {
3999
4000 const pass_data pass_data_split_before_sched2 =
4001 {
4002 RTL_PASS, /* type */
4003 "split3", /* name */
4004 OPTGROUP_NONE, /* optinfo_flags */
4005 TV_NONE, /* tv_id */
4006 0, /* properties_required */
4007 0, /* properties_provided */
4008 0, /* properties_destroyed */
4009 0, /* todo_flags_start */
4010 0, /* todo_flags_finish */
4011 };
4012
4013 class pass_split_before_sched2 : public rtl_opt_pass
4014 {
4015 public:
4016 pass_split_before_sched2 (gcc::context *ctxt)
4017 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4018 {}
4019
4020 /* opt_pass methods: */
4021 virtual bool gate (function *)
4022 {
4023 return enable_split_before_sched2 ();
4024 }
4025
4026 virtual unsigned int execute (function *)
4027 {
4028 split_all_insns ();
4029 return 0;
4030 }
4031
4032 }; // class pass_split_before_sched2
4033
4034 } // anon namespace
4035
4036 rtl_opt_pass *
4037 make_pass_split_before_sched2 (gcc::context *ctxt)
4038 {
4039 return new pass_split_before_sched2 (ctxt);
4040 }
4041
4042 namespace {
4043
4044 const pass_data pass_data_split_before_regstack =
4045 {
4046 RTL_PASS, /* type */
4047 "split4", /* name */
4048 OPTGROUP_NONE, /* optinfo_flags */
4049 TV_NONE, /* tv_id */
4050 0, /* properties_required */
4051 0, /* properties_provided */
4052 0, /* properties_destroyed */
4053 0, /* todo_flags_start */
4054 0, /* todo_flags_finish */
4055 };
4056
4057 class pass_split_before_regstack : public rtl_opt_pass
4058 {
4059 public:
4060 pass_split_before_regstack (gcc::context *ctxt)
4061 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4062 {}
4063
4064 /* opt_pass methods: */
4065 virtual bool gate (function *);
4066 virtual unsigned int execute (function *)
4067 {
4068 split_all_insns ();
4069 return 0;
4070 }
4071
4072 }; // class pass_split_before_regstack
4073
4074 bool
4075 pass_split_before_regstack::gate (function *)
4076 {
4077 #if HAVE_ATTR_length && defined (STACK_REGS)
4078 /* If flow2 creates new instructions which need splitting
4079 and scheduling after reload is not done, they might not be
4080 split until final which doesn't allow splitting
4081 if HAVE_ATTR_length. */
4082 return !enable_split_before_sched2 ();
4083 #else
4084 return false;
4085 #endif
4086 }
4087
4088 } // anon namespace
4089
4090 rtl_opt_pass *
4091 make_pass_split_before_regstack (gcc::context *ctxt)
4092 {
4093 return new pass_split_before_regstack (ctxt);
4094 }
4095
4096 namespace {
4097
4098 const pass_data pass_data_split_for_shorten_branches =
4099 {
4100 RTL_PASS, /* type */
4101 "split5", /* name */
4102 OPTGROUP_NONE, /* optinfo_flags */
4103 TV_NONE, /* tv_id */
4104 0, /* properties_required */
4105 0, /* properties_provided */
4106 0, /* properties_destroyed */
4107 0, /* todo_flags_start */
4108 0, /* todo_flags_finish */
4109 };
4110
4111 class pass_split_for_shorten_branches : public rtl_opt_pass
4112 {
4113 public:
4114 pass_split_for_shorten_branches (gcc::context *ctxt)
4115 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4116 {}
4117
4118 /* opt_pass methods: */
4119 virtual bool gate (function *)
4120 {
4121 /* The placement of the splitting that we do for shorten_branches
4122 depends on whether regstack is used by the target or not. */
4123 #if HAVE_ATTR_length && !defined (STACK_REGS)
4124 return true;
4125 #else
4126 return false;
4127 #endif
4128 }
4129
4130 virtual unsigned int execute (function *)
4131 {
4132 return split_all_insns_noflow ();
4133 }
4134
4135 }; // class pass_split_for_shorten_branches
4136
4137 } // anon namespace
4138
4139 rtl_opt_pass *
4140 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4141 {
4142 return new pass_split_for_shorten_branches (ctxt);
4143 }
4144
4145 /* (Re)initialize the target information after a change in target. */
4146
4147 void
4148 recog_init ()
4149 {
4150 /* The information is zero-initialized, so we don't need to do anything
4151 first time round. */
4152 if (!this_target_recog->x_initialized)
4153 {
4154 this_target_recog->x_initialized = true;
4155 return;
4156 }
4157 memset (this_target_recog->x_bool_attr_masks, 0,
4158 sizeof (this_target_recog->x_bool_attr_masks));
4159 for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4160 if (this_target_recog->x_op_alt[i])
4161 {
4162 free (this_target_recog->x_op_alt[i]);
4163 this_target_recog->x_op_alt[i] = 0;
4164 }
4165 }