1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004-2016 Free Software Foundation, Inc.
4 ;; This file is part of GCC.
6 ;; GCC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
11 ;; GCC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>.
20 ;; Return true if OP is either a i387 or SSE fp register.
21 (define_predicate "any_fp_register_operand"
22 (and (match_code "reg")
23 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
25 ;; Return true if OP is an i387 fp register.
26 (define_predicate "fp_register_operand"
27 (and (match_code "reg")
28 (match_test "STACK_REGNO_P (REGNO (op))")))
30 ;; Return true if OP is a non-fp register_operand.
31 (define_predicate "register_and_not_any_fp_reg_operand"
32 (and (match_code "reg")
33 (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
35 ;; True if the operand is a GENERAL class register.
36 (define_predicate "general_reg_operand"
37 (and (match_code "reg")
38 (match_test "GENERAL_REGNO_P (REGNO (op))")))
40 ;; True if the operand is a nonimmediate operand with GENERAL class register.
41 (define_predicate "nonimmediate_gr_operand"
42 (if_then_else (match_code "reg")
43 (match_test "GENERAL_REGNO_P (REGNO (op))")
44 (match_operand 0 "nonimmediate_operand")))
46 ;; Return true if OP is a register operand other than an i387 fp register.
47 (define_predicate "register_and_not_fp_reg_operand"
48 (and (match_code "reg")
49 (not (match_test "STACK_REGNO_P (REGNO (op))"))))
51 ;; True if the operand is an MMX register.
52 (define_predicate "mmx_reg_operand"
53 (and (match_code "reg")
54 (match_test "MMX_REGNO_P (REGNO (op))")))
56 ;; True if the operand is an SSE register.
57 (define_predicate "sse_reg_operand"
58 (and (match_code "reg")
59 (match_test "SSE_REGNO_P (REGNO (op))")))
61 ;; True if the operand is an AVX-512 new register.
62 (define_predicate "ext_sse_reg_operand"
63 (and (match_code "reg")
64 (match_test "EXT_REX_SSE_REGNO_P (REGNO (op))")))
66 ;; True if the operand is an AVX-512 mask register.
67 (define_predicate "mask_reg_operand"
68 (and (match_code "reg")
69 (match_test "MASK_REGNO_P (REGNO (op))")))
71 ;; Return true if op is a QImode register.
72 (define_predicate "any_QIreg_operand"
73 (and (match_code "reg")
74 (match_test "ANY_QI_REGNO_P (REGNO (op))")))
76 ;; Return true if op is one of QImode registers: %[abcd][hl].
77 (define_predicate "QIreg_operand"
78 (and (match_code "reg")
79 (match_test "QI_REGNO_P (REGNO (op))")))
81 ;; Return true if op is a QImode register operand other than %[abcd][hl].
82 (define_predicate "ext_QIreg_operand"
83 (and (match_test "TARGET_64BIT")
85 (not (match_test "QI_REGNO_P (REGNO (op))"))))
87 ;; Return true if op is the AX register.
88 (define_predicate "ax_reg_operand"
89 (and (match_code "reg")
90 (match_test "REGNO (op) == AX_REG")))
92 ;; Return true if op is the flags register.
93 (define_predicate "flags_reg_operand"
94 (and (match_code "reg")
95 (match_test "REGNO (op) == FLAGS_REG")))
97 ;; Match an SI or HImode register for a zero_extract.
98 (define_special_predicate "ext_register_operand"
99 (match_operand 0 "register_operand")
101 if ((!TARGET_64BIT || GET_MODE (op) != DImode)
102 && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
105 op = SUBREG_REG (op);
107 /* Be careful to accept only registers having upper parts. */
109 && (REGNO (op) > LAST_VIRTUAL_REGISTER || QI_REGNO_P (REGNO (op))));
112 ;; Match nonimmediate operands, but exclude memory operands on 64bit targets.
113 (define_predicate "nonimmediate_x64nomem_operand"
114 (if_then_else (match_test "TARGET_64BIT")
115 (match_operand 0 "register_operand")
116 (match_operand 0 "nonimmediate_operand")))
118 ;; Match general operands, but exclude memory operands on 64bit targets.
119 (define_predicate "general_x64nomem_operand"
120 (if_then_else (match_test "TARGET_64BIT")
121 (match_operand 0 "nonmemory_operand")
122 (match_operand 0 "general_operand")))
124 ;; Match register operands, include memory operand for TARGET_MIX_SSE_I387.
125 (define_predicate "register_mixssei387nonimm_operand"
126 (if_then_else (match_test "TARGET_MIX_SSE_I387")
127 (match_operand 0 "nonimmediate_operand")
128 (match_operand 0 "register_operand")))
130 ;; Match register operands, include memory operand for TARGET_SSE4_1.
131 (define_predicate "register_sse4nonimm_operand"
132 (if_then_else (match_test "TARGET_SSE4_1")
133 (match_operand 0 "nonimmediate_operand")
134 (match_operand 0 "register_operand")))
136 ;; Return true if VALUE is symbol reference
137 (define_predicate "symbol_operand"
138 (match_code "symbol_ref"))
140 ;; Return true if VALUE can be stored in a sign extended immediate field.
141 (define_predicate "x86_64_immediate_operand"
142 (match_code "const_int,symbol_ref,label_ref,const")
145 return immediate_operand (op, mode);
147 switch (GET_CODE (op))
151 HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
152 return trunc_int_for_mode (val, SImode) == val;
155 /* For certain code models, the symbolic references are known to fit.
156 in CM_SMALL_PIC model we know it fits if it is local to the shared
157 library. Don't count TLS SYMBOL_REFs here, since they should fit
158 only if inside of UNSPEC handled below. */
159 /* TLS symbols are not constant. */
160 if (SYMBOL_REF_TLS_MODEL (op))
162 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
163 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
166 /* For certain code models, the code is near as well. */
167 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
168 || ix86_cmodel == CM_KERNEL);
171 /* We also may accept the offsetted memory references in certain
173 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
174 switch (XINT (XEXP (op, 0), 1))
176 case UNSPEC_GOTPCREL:
178 case UNSPEC_GOTNTPOFF:
185 if (GET_CODE (XEXP (op, 0)) == PLUS)
187 rtx op1 = XEXP (XEXP (op, 0), 0);
188 rtx op2 = XEXP (XEXP (op, 0), 1);
189 HOST_WIDE_INT offset;
191 if (ix86_cmodel == CM_LARGE)
193 if (!CONST_INT_P (op2))
195 offset = trunc_int_for_mode (INTVAL (op2), DImode);
196 switch (GET_CODE (op1))
199 /* TLS symbols are not constant. */
200 if (SYMBOL_REF_TLS_MODEL (op1))
202 /* For CM_SMALL assume that latest object is 16MB before
203 end of 31bits boundary. We may also accept pretty
204 large negative constants knowing that all objects are
205 in the positive half of address space. */
206 if ((ix86_cmodel == CM_SMALL
207 || (ix86_cmodel == CM_MEDIUM
208 && !SYMBOL_REF_FAR_ADDR_P (op1)))
209 && offset < 16*1024*1024
210 && trunc_int_for_mode (offset, SImode) == offset)
212 /* For CM_KERNEL we know that all object resist in the
213 negative half of 32bits address space. We may not
214 accept negative offsets, since they may be just off
215 and we may accept pretty large positive ones. */
216 if (ix86_cmodel == CM_KERNEL
218 && trunc_int_for_mode (offset, SImode) == offset)
223 /* These conditions are similar to SYMBOL_REF ones, just the
224 constraints for code models differ. */
225 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
226 && offset < 16*1024*1024
227 && trunc_int_for_mode (offset, SImode) == offset)
229 if (ix86_cmodel == CM_KERNEL
231 && trunc_int_for_mode (offset, SImode) == offset)
236 switch (XINT (op1, 1))
240 if (trunc_int_for_mode (offset, SImode) == offset)
258 ;; Return true if VALUE can be stored in the zero extended immediate field.
259 (define_predicate "x86_64_zext_immediate_operand"
260 (match_code "const_int,symbol_ref,label_ref,const")
262 switch (GET_CODE (op))
265 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
268 /* For certain code models, the symbolic references are known to fit. */
269 /* TLS symbols are not constant. */
270 if (SYMBOL_REF_TLS_MODEL (op))
272 return (ix86_cmodel == CM_SMALL
273 || (ix86_cmodel == CM_MEDIUM
274 && !SYMBOL_REF_FAR_ADDR_P (op)));
277 /* For certain code models, the code is near as well. */
278 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
281 /* We also may accept the offsetted memory references in certain
283 if (GET_CODE (XEXP (op, 0)) == PLUS)
285 rtx op1 = XEXP (XEXP (op, 0), 0);
286 rtx op2 = XEXP (XEXP (op, 0), 1);
288 if (ix86_cmodel == CM_LARGE)
290 switch (GET_CODE (op1))
293 /* TLS symbols are not constant. */
294 if (SYMBOL_REF_TLS_MODEL (op1))
296 /* For small code model we may accept pretty large positive
297 offsets, since one bit is available for free. Negative
298 offsets are limited by the size of NULL pointer area
299 specified by the ABI. */
300 if ((ix86_cmodel == CM_SMALL
301 || (ix86_cmodel == CM_MEDIUM
302 && !SYMBOL_REF_FAR_ADDR_P (op1)))
304 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
305 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
307 /* ??? For the kernel, we may accept adjustment of
308 -0x10000000, since we know that it will just convert
309 negative address space to positive, but perhaps this
310 is not worthwhile. */
314 /* These conditions are similar to SYMBOL_REF ones, just the
315 constraints for code models differ. */
316 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
318 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
319 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
335 ;; Return true if size of VALUE can be stored in a sign
336 ;; extended immediate field.
337 (define_predicate "x86_64_immediate_size_operand"
338 (and (match_code "symbol_ref")
339 (ior (not (match_test "TARGET_64BIT"))
340 (match_test "ix86_cmodel == CM_SMALL")
341 (match_test "ix86_cmodel == CM_KERNEL"))))
343 ;; Return true if OP is general operand representable on x86_64.
344 (define_predicate "x86_64_general_operand"
345 (if_then_else (match_test "TARGET_64BIT")
346 (ior (match_operand 0 "nonimmediate_operand")
347 (match_operand 0 "x86_64_immediate_operand"))
348 (match_operand 0 "general_operand")))
350 ;; Return true if OP is non-VOIDmode general operand representable
351 ;; on x86_64. This predicate is used in sign-extending conversion
352 ;; operations that require non-VOIDmode immediate operands.
353 (define_predicate "x86_64_sext_operand"
354 (and (match_test "GET_MODE (op) != VOIDmode")
355 (match_operand 0 "x86_64_general_operand")))
357 ;; Return true if OP is non-VOIDmode general operand. This predicate
358 ;; is used in sign-extending conversion operations that require
359 ;; non-VOIDmode immediate operands.
360 (define_predicate "sext_operand"
361 (and (match_test "GET_MODE (op) != VOIDmode")
362 (match_operand 0 "general_operand")))
364 ;; Return true if OP is representable on x86_64 as zero-extended operand.
365 ;; This predicate is used in zero-extending conversion operations that
366 ;; require non-VOIDmode immediate operands.
367 (define_predicate "x86_64_zext_operand"
368 (if_then_else (match_test "TARGET_64BIT")
369 (ior (match_operand 0 "nonimmediate_operand")
370 (and (match_operand 0 "x86_64_zext_immediate_operand")
371 (match_test "GET_MODE (op) != VOIDmode")))
372 (match_operand 0 "nonimmediate_operand")))
374 ;; Return true if OP is general operand representable on x86_64
375 ;; as either sign extended or zero extended constant.
376 (define_predicate "x86_64_szext_general_operand"
377 (if_then_else (match_test "TARGET_64BIT")
378 (ior (match_operand 0 "nonimmediate_operand")
379 (match_operand 0 "x86_64_immediate_operand")
380 (match_operand 0 "x86_64_zext_immediate_operand"))
381 (match_operand 0 "general_operand")))
383 ;; Return true if OP is nonmemory operand representable on x86_64.
384 (define_predicate "x86_64_nonmemory_operand"
385 (if_then_else (match_test "TARGET_64BIT")
386 (ior (match_operand 0 "register_operand")
387 (match_operand 0 "x86_64_immediate_operand"))
388 (match_operand 0 "nonmemory_operand")))
390 ;; Return true if OP is nonmemory operand representable on x86_64.
391 (define_predicate "x86_64_szext_nonmemory_operand"
392 (if_then_else (match_test "TARGET_64BIT")
393 (ior (match_operand 0 "register_operand")
394 (match_operand 0 "x86_64_immediate_operand")
395 (match_operand 0 "x86_64_zext_immediate_operand"))
396 (match_operand 0 "nonmemory_operand")))
398 ;; Return true when operand is PIC expression that can be computed by lea
400 (define_predicate "pic_32bit_operand"
401 (match_code "const,symbol_ref,label_ref")
406 /* Rule out relocations that translate into 64bit constants. */
407 if (TARGET_64BIT && GET_CODE (op) == CONST)
410 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
412 if (GET_CODE (op) == UNSPEC
413 && (XINT (op, 1) == UNSPEC_GOTOFF
414 || XINT (op, 1) == UNSPEC_GOT))
418 return symbolic_operand (op, mode);
421 ;; Return true if OP is nonmemory operand acceptable by movabs patterns.
422 (define_predicate "x86_64_movabs_operand"
423 (and (match_operand 0 "nonmemory_operand")
424 (not (match_operand 0 "pic_32bit_operand"))))
426 ;; Return true if OP is either a symbol reference or a sum of a symbol
427 ;; reference and a constant.
428 (define_predicate "symbolic_operand"
429 (match_code "symbol_ref,label_ref,const")
431 switch (GET_CODE (op))
439 if (GET_CODE (op) == SYMBOL_REF
440 || GET_CODE (op) == LABEL_REF
441 || (GET_CODE (op) == UNSPEC
442 && (XINT (op, 1) == UNSPEC_GOT
443 || XINT (op, 1) == UNSPEC_GOTOFF
444 || XINT (op, 1) == UNSPEC_PCREL
445 || XINT (op, 1) == UNSPEC_GOTPCREL)))
447 if (GET_CODE (op) != PLUS
448 || !CONST_INT_P (XEXP (op, 1)))
452 if (GET_CODE (op) == SYMBOL_REF
453 || GET_CODE (op) == LABEL_REF)
455 /* Only @GOTOFF gets offsets. */
456 if (GET_CODE (op) != UNSPEC
457 || XINT (op, 1) != UNSPEC_GOTOFF)
460 op = XVECEXP (op, 0, 0);
461 if (GET_CODE (op) == SYMBOL_REF
462 || GET_CODE (op) == LABEL_REF)
471 ;; Return true if OP is a symbolic operand that resolves locally.
472 (define_predicate "local_symbolic_operand"
473 (match_code "const,label_ref,symbol_ref")
475 if (GET_CODE (op) == CONST
476 && GET_CODE (XEXP (op, 0)) == PLUS
477 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
478 op = XEXP (XEXP (op, 0), 0);
480 if (GET_CODE (op) == LABEL_REF)
483 if (GET_CODE (op) != SYMBOL_REF)
486 if (SYMBOL_REF_TLS_MODEL (op))
489 /* Dll-imported symbols are always external. */
490 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
492 if (SYMBOL_REF_LOCAL_P (op))
495 /* There is, however, a not insubstantial body of code in the rest of
496 the compiler that assumes it can just stick the results of
497 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
498 /* ??? This is a hack. Should update the body of the compiler to
499 always create a DECL an invoke targetm.encode_section_info. */
500 if (strncmp (XSTR (op, 0), internal_label_prefix,
501 internal_label_prefix_len) == 0)
507 ;; Test for a legitimate @GOTOFF operand.
509 ;; VxWorks does not impose a fixed gap between segments; the run-time
510 ;; gap can be different from the object-file gap. We therefore can't
511 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
512 ;; same segment as the GOT. Unfortunately, the flexibility of linker
513 ;; scripts means that we can't be sure of that in general, so assume
514 ;; that @GOTOFF is never valid on VxWorks.
515 (define_predicate "gotoff_operand"
516 (and (not (match_test "TARGET_VXWORKS_RTP"))
517 (match_operand 0 "local_symbolic_operand")))
519 ;; Test for various thread-local symbols.
520 (define_special_predicate "tls_symbolic_operand"
521 (and (match_code "symbol_ref")
522 (match_test "SYMBOL_REF_TLS_MODEL (op)")))
524 (define_special_predicate "tls_modbase_operand"
525 (and (match_code "symbol_ref")
526 (match_test "op == ix86_tls_module_base ()")))
528 ;; Test for a pc-relative call operand
529 (define_predicate "constant_call_address_operand"
530 (match_code "symbol_ref")
532 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC)
534 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
539 ;; P6 processors will jump to the address after the decrement when %esp
540 ;; is used as a call operand, so they will execute return address as a code.
541 ;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17.
543 (define_predicate "call_register_no_elim_operand"
544 (match_operand 0 "register_operand")
547 op = SUBREG_REG (op);
549 if (!TARGET_64BIT && op == stack_pointer_rtx)
552 return register_no_elim_operand (op, mode);
555 ;; True for any non-virtual or eliminable register. Used in places where
556 ;; instantiation of such a register may cause the pattern to not be recognized.
557 (define_predicate "register_no_elim_operand"
558 (match_operand 0 "register_operand")
561 op = SUBREG_REG (op);
562 return !(op == arg_pointer_rtx
563 || op == frame_pointer_rtx
564 || IN_RANGE (REGNO (op),
565 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
568 ;; Similarly, but include the stack pointer. This is used to prevent esp
569 ;; from being used as an index reg.
570 (define_predicate "index_register_operand"
571 (match_operand 0 "register_operand")
574 op = SUBREG_REG (op);
575 if (reload_completed)
576 return REG_OK_FOR_INDEX_STRICT_P (op);
578 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
581 ;; Return false if this is any eliminable register. Otherwise general_operand.
582 (define_predicate "general_no_elim_operand"
583 (if_then_else (match_code "reg,subreg")
584 (match_operand 0 "register_no_elim_operand")
585 (match_operand 0 "general_operand")))
587 ;; Return false if this is any eliminable register. Otherwise
588 ;; register_operand or a constant.
589 (define_predicate "nonmemory_no_elim_operand"
590 (ior (match_operand 0 "register_no_elim_operand")
591 (match_operand 0 "immediate_operand")))
593 ;; Test for a valid operand for indirect branch.
594 (define_predicate "indirect_branch_operand"
595 (ior (match_operand 0 "register_operand")
596 (and (not (match_test "TARGET_X32"))
597 (match_operand 0 "memory_operand"))))
599 ;; Return true if OP is a memory operands that can be used in sibcalls.
600 ;; Since sibcall never returns, we can only use call-clobbered register
601 ;; as GOT base. Allow GOT slot here only with pseudo register as GOT
602 ;; base. Properly handle sibcall over GOT slot with *sibcall_GOT_32
603 ;; and *sibcall_value_GOT_32 patterns.
604 (define_predicate "sibcall_memory_operand"
605 (match_operand 0 "memory_operand")
610 if (GET_CODE (op) == PLUS && REG_P (XEXP (op, 0)))
612 int regno = REGNO (XEXP (op, 0));
613 if (!HARD_REGISTER_NUM_P (regno) || call_used_regs[regno])
616 if (GOT32_symbol_operand (op, VOIDmode))
623 ;; Return true if OP is a GOT memory operand.
624 (define_predicate "GOT_memory_operand"
625 (match_operand 0 "memory_operand")
628 return (GET_CODE (op) == CONST
629 && GET_CODE (XEXP (op, 0)) == UNSPEC
630 && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL);
633 ;; Test for a valid operand for a call instruction.
634 ;; Allow constant call address operands in Pmode only.
635 (define_special_predicate "call_insn_operand"
636 (ior (match_test "constant_call_address_operand
637 (op, mode == VOIDmode ? mode : Pmode)")
638 (match_operand 0 "call_register_no_elim_operand")
639 (ior (and (not (match_test "TARGET_X32"))
640 (match_operand 0 "sibcall_memory_operand"))
641 (and (match_test "TARGET_X32 && Pmode == DImode")
642 (match_operand 0 "GOT_memory_operand")))))
644 ;; Similarly, but for tail calls, in which we cannot allow memory references.
645 (define_special_predicate "sibcall_insn_operand"
646 (ior (match_test "constant_call_address_operand
647 (op, mode == VOIDmode ? mode : Pmode)")
648 (match_operand 0 "register_no_elim_operand")
649 (ior (and (not (match_test "TARGET_X32"))
650 (match_operand 0 "sibcall_memory_operand"))
651 (and (match_test "TARGET_X32 && Pmode == DImode")
652 (match_operand 0 "GOT_memory_operand")))))
654 ;; Return true if OP is a 32-bit GOT symbol operand.
655 (define_predicate "GOT32_symbol_operand"
656 (match_test "GET_CODE (op) == CONST
657 && GET_CODE (XEXP (op, 0)) == UNSPEC
658 && XINT (XEXP (op, 0), 1) == UNSPEC_GOT"))
660 ;; Match exactly zero.
661 (define_predicate "const0_operand"
662 (match_code "const_int,const_wide_int,const_double,const_vector")
664 if (mode == VOIDmode)
665 mode = GET_MODE (op);
666 return op == CONST0_RTX (mode);
670 (define_predicate "constm1_operand"
671 (match_code "const_int,const_wide_int,const_double,const_vector")
673 if (mode == VOIDmode)
674 mode = GET_MODE (op);
675 return op == CONSTM1_RTX (mode);
678 ;; Match one or vector filled with ones.
679 (define_predicate "const1_operand"
680 (match_code "const_int,const_wide_int,const_double,const_vector")
682 if (mode == VOIDmode)
683 mode = GET_MODE (op);
684 return op == CONST1_RTX (mode);
687 ;; Match exactly eight.
688 (define_predicate "const8_operand"
689 (and (match_code "const_int")
690 (match_test "INTVAL (op) == 8")))
692 ;; Match exactly 128.
693 (define_predicate "const128_operand"
694 (and (match_code "const_int")
695 (match_test "INTVAL (op) == 128")))
697 ;; Match exactly 0x0FFFFFFFF in anddi as a zero-extension operation
698 (define_predicate "const_32bit_mask"
699 (and (match_code "const_int")
700 (match_test "trunc_int_for_mode (INTVAL (op), DImode)
701 == (HOST_WIDE_INT) 0xffffffff")))
703 ;; Match 2, 4, or 8. Used for leal multiplicands.
704 (define_predicate "const248_operand"
705 (match_code "const_int")
707 HOST_WIDE_INT i = INTVAL (op);
708 return i == 2 || i == 4 || i == 8;
711 ;; Match 2, 3, 6, or 7
712 (define_predicate "const2367_operand"
713 (match_code "const_int")
715 HOST_WIDE_INT i = INTVAL (op);
716 return i == 2 || i == 3 || i == 6 || i == 7;
719 ;; Match 1, 2, 4, or 8
720 (define_predicate "const1248_operand"
721 (match_code "const_int")
723 HOST_WIDE_INT i = INTVAL (op);
724 return i == 1 || i == 2 || i == 4 || i == 8;
727 ;; Match 3, 5, or 9. Used for leal multiplicands.
728 (define_predicate "const359_operand"
729 (match_code "const_int")
731 HOST_WIDE_INT i = INTVAL (op);
732 return i == 3 || i == 5 || i == 9;
735 ;; Match 4 or 8 to 11. Used for embeded rounding.
736 (define_predicate "const_4_or_8_to_11_operand"
737 (match_code "const_int")
739 HOST_WIDE_INT i = INTVAL (op);
740 return i == 4 || (i >= 8 && i <= 11);
743 ;; Match 4 or 8. Used for SAE.
744 (define_predicate "const48_operand"
745 (match_code "const_int")
747 HOST_WIDE_INT i = INTVAL (op);
748 return i == 4 || i == 8;
752 (define_predicate "const_0_to_1_operand"
753 (and (match_code "const_int")
754 (ior (match_test "op == const0_rtx")
755 (match_test "op == const1_rtx"))))
758 (define_predicate "const_0_to_3_operand"
759 (and (match_code "const_int")
760 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
763 (define_predicate "const_0_to_4_operand"
764 (and (match_code "const_int")
765 (match_test "IN_RANGE (INTVAL (op), 0, 4)")))
768 (define_predicate "const_0_to_5_operand"
769 (and (match_code "const_int")
770 (match_test "IN_RANGE (INTVAL (op), 0, 5)")))
773 (define_predicate "const_0_to_7_operand"
774 (and (match_code "const_int")
775 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
778 (define_predicate "const_0_to_15_operand"
779 (and (match_code "const_int")
780 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
783 (define_predicate "const_0_to_31_operand"
784 (and (match_code "const_int")
785 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
788 (define_predicate "const_0_to_63_operand"
789 (and (match_code "const_int")
790 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
793 (define_predicate "const_0_to_255_operand"
794 (and (match_code "const_int")
795 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
797 ;; Match (0 to 255) * 8
798 (define_predicate "const_0_to_255_mul_8_operand"
799 (match_code "const_int")
801 unsigned HOST_WIDE_INT val = INTVAL (op);
802 return val <= 255*8 && val % 8 == 0;
805 ;; Return true if OP is CONST_INT >= 1 and <= 31 (a valid operand
806 ;; for shift & compare patterns, as shifting by 0 does not change flags).
807 (define_predicate "const_1_to_31_operand"
808 (and (match_code "const_int")
809 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
811 ;; Return true if OP is CONST_INT >= 1 and <= 63 (a valid operand
812 ;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
813 (define_predicate "const_1_to_63_operand"
814 (and (match_code "const_int")
815 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
818 (define_predicate "const_2_to_3_operand"
819 (and (match_code "const_int")
820 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
823 (define_predicate "const_4_to_5_operand"
824 (and (match_code "const_int")
825 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
828 (define_predicate "const_4_to_7_operand"
829 (and (match_code "const_int")
830 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
833 (define_predicate "const_6_to_7_operand"
834 (and (match_code "const_int")
835 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
838 (define_predicate "const_8_to_9_operand"
839 (and (match_code "const_int")
840 (match_test "IN_RANGE (INTVAL (op), 8, 9)")))
843 (define_predicate "const_8_to_11_operand"
844 (and (match_code "const_int")
845 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
848 (define_predicate "const_8_to_15_operand"
849 (and (match_code "const_int")
850 (match_test "IN_RANGE (INTVAL (op), 8, 15)")))
853 (define_predicate "const_10_to_11_operand"
854 (and (match_code "const_int")
855 (match_test "IN_RANGE (INTVAL (op), 10, 11)")))
858 (define_predicate "const_12_to_13_operand"
859 (and (match_code "const_int")
860 (match_test "IN_RANGE (INTVAL (op), 12, 13)")))
863 (define_predicate "const_12_to_15_operand"
864 (and (match_code "const_int")
865 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
868 (define_predicate "const_14_to_15_operand"
869 (and (match_code "const_int")
870 (match_test "IN_RANGE (INTVAL (op), 14, 15)")))
873 (define_predicate "const_16_to_19_operand"
874 (and (match_code "const_int")
875 (match_test "IN_RANGE (INTVAL (op), 16, 19)")))
878 (define_predicate "const_16_to_31_operand"
879 (and (match_code "const_int")
880 (match_test "IN_RANGE (INTVAL (op), 16, 31)")))
883 (define_predicate "const_20_to_23_operand"
884 (and (match_code "const_int")
885 (match_test "IN_RANGE (INTVAL (op), 20, 23)")))
888 (define_predicate "const_24_to_27_operand"
889 (and (match_code "const_int")
890 (match_test "IN_RANGE (INTVAL (op), 24, 27)")))
893 (define_predicate "const_28_to_31_operand"
894 (and (match_code "const_int")
895 (match_test "IN_RANGE (INTVAL (op), 28, 31)")))
897 ;; True if this is a constant appropriate for an increment or decrement.
898 (define_predicate "incdec_operand"
899 (match_code "const_int")
901 /* On Pentium4, the inc and dec operations causes extra dependency on flag
902 registers, since carry flag is not set. */
903 if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
905 return op == const1_rtx || op == constm1_rtx;
908 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
909 (define_predicate "reg_or_pm1_operand"
910 (ior (match_operand 0 "register_operand")
911 (and (match_code "const_int")
912 (ior (match_test "op == const1_rtx")
913 (match_test "op == constm1_rtx")))))
915 ;; True if OP is acceptable as operand of DImode shift expander.
916 (define_predicate "shiftdi_operand"
917 (if_then_else (match_test "TARGET_64BIT")
918 (match_operand 0 "nonimmediate_operand")
919 (match_operand 0 "register_operand")))
921 (define_predicate "ashldi_input_operand"
922 (if_then_else (match_test "TARGET_64BIT")
923 (match_operand 0 "nonimmediate_operand")
924 (match_operand 0 "reg_or_pm1_operand")))
926 ;; Return true if OP is a vector load from the constant pool with just
927 ;; the first element nonzero.
928 (define_predicate "zero_extended_scalar_load_operand"
932 op = maybe_get_pool_constant (op);
934 if (!(op && GET_CODE (op) == CONST_VECTOR))
937 n_elts = CONST_VECTOR_NUNITS (op);
939 for (n_elts--; n_elts > 0; n_elts--)
941 rtx elt = CONST_VECTOR_ELT (op, n_elts);
942 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
948 /* Return true if operand is a vector constant that is all ones. */
949 (define_predicate "vector_all_ones_operand"
950 (and (match_code "const_vector")
951 (match_test "INTEGRAL_MODE_P (GET_MODE (op))")
952 (match_test "op == CONSTM1_RTX (GET_MODE (op))")))
954 ; Return true when OP is operand acceptable for standard SSE move.
955 (define_predicate "vector_move_operand"
956 (ior (match_operand 0 "nonimmediate_operand")
957 (match_operand 0 "const0_operand")))
959 ;; Return true when OP is either nonimmediate operand, or any
961 (define_predicate "nonimmediate_or_const_vector_operand"
962 (ior (match_operand 0 "nonimmediate_operand")
963 (match_code "const_vector")))
965 ;; Return true when OP is nonimmediate or standard SSE constant.
966 (define_predicate "nonimmediate_or_sse_const_operand"
967 (match_operand 0 "general_operand")
969 if (nonimmediate_operand (op, mode))
971 if (standard_sse_constant_p (op) > 0)
976 ;; Return true if OP is a register or a zero.
977 (define_predicate "reg_or_0_operand"
978 (ior (match_operand 0 "register_operand")
979 (match_operand 0 "const0_operand")))
981 ;; Return true for RTX codes that force SImode address.
982 (define_predicate "SImode_address_operand"
983 (match_code "subreg,zero_extend,and"))
985 ;; Return true if op if a valid address for LEA, and does not contain
986 ;; a segment override. Defined as a special predicate to allow
987 ;; mode-less const_int operands pass to address_operand.
988 (define_special_predicate "address_no_seg_operand"
989 (match_test "address_operand (op, VOIDmode)")
991 struct ix86_address parts;
994 if (!CONST_INT_P (op)
996 && GET_MODE (op) != mode)
999 ok = ix86_decompose_address (op, &parts);
1001 return parts.seg == ADDR_SPACE_GENERIC;
1004 ;; Return true if op if a valid base register, displacement or
1005 ;; sum of base register and displacement for VSIB addressing.
1006 (define_predicate "vsib_address_operand"
1007 (match_test "address_operand (op, VOIDmode)")
1009 struct ix86_address parts;
1013 ok = ix86_decompose_address (op, &parts);
1015 if (parts.index || parts.seg != ADDR_SPACE_GENERIC)
1018 /* VSIB addressing doesn't support (%rip). */
1022 if (GET_CODE (disp) == CONST)
1024 disp = XEXP (disp, 0);
1025 if (GET_CODE (disp) == PLUS)
1026 disp = XEXP (disp, 0);
1027 if (GET_CODE (disp) == UNSPEC)
1028 switch (XINT (disp, 1))
1030 case UNSPEC_GOTPCREL:
1032 case UNSPEC_GOTNTPOFF:
1038 && (GET_CODE (disp) == SYMBOL_REF
1039 || GET_CODE (disp) == LABEL_REF))
1046 ;; Return true if op is valid MPX address operand without base
1047 (define_predicate "address_mpx_no_base_operand"
1048 (match_test "address_operand (op, VOIDmode)")
1050 struct ix86_address parts;
1053 ok = ix86_decompose_address (op, &parts);
1056 if (parts.index && parts.base)
1059 if (parts.seg != ADDR_SPACE_GENERIC)
1062 /* Do not support (%rip). */
1063 if (parts.disp && flag_pic && TARGET_64BIT
1064 && SYMBOLIC_CONST (parts.disp))
1066 if (GET_CODE (parts.disp) != CONST
1067 || GET_CODE (XEXP (parts.disp, 0)) != PLUS
1068 || GET_CODE (XEXP (XEXP (parts.disp, 0), 0)) != UNSPEC
1069 || !CONST_INT_P (XEXP (XEXP (parts.disp, 0), 1))
1070 || (XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_DTPOFF
1071 && XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_NTPOFF))
1078 ;; Return true if op is valid MPX address operand without index
1079 (define_predicate "address_mpx_no_index_operand"
1080 (match_test "address_operand (op, VOIDmode)")
1082 struct ix86_address parts;
1085 ok = ix86_decompose_address (op, &parts);
1091 if (parts.seg != ADDR_SPACE_GENERIC)
1094 /* Do not support (%rip). */
1095 if (parts.disp && flag_pic && TARGET_64BIT
1096 && SYMBOLIC_CONST (parts.disp)
1097 && (GET_CODE (parts.disp) != CONST
1098 || GET_CODE (XEXP (parts.disp, 0)) != PLUS
1099 || GET_CODE (XEXP (XEXP (parts.disp, 0), 0)) != UNSPEC
1100 || !CONST_INT_P (XEXP (XEXP (parts.disp, 0), 1))
1101 || (XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_DTPOFF
1102 && XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_NTPOFF)))
1108 (define_predicate "vsib_mem_operator"
1111 (define_predicate "bnd_mem_operator"
1114 ;; Return true if the rtx is known to be at least 32 bits aligned.
1115 (define_predicate "aligned_operand"
1116 (match_operand 0 "general_operand")
1118 struct ix86_address parts;
1121 /* Registers and immediate operands are always "aligned". */
1125 /* All patterns using aligned_operand on memory operands ends up
1126 in promoting memory operand to 64bit and thus causing memory mismatch. */
1127 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
1130 /* Don't even try to do any aligned optimizations with volatiles. */
1131 if (MEM_VOLATILE_P (op))
1134 if (MEM_ALIGN (op) >= 32)
1139 /* Pushes and pops are only valid on the stack pointer. */
1140 if (GET_CODE (op) == PRE_DEC
1141 || GET_CODE (op) == POST_INC)
1144 /* Decode the address. */
1145 ok = ix86_decompose_address (op, &parts);
1148 if (parts.base && SUBREG_P (parts.base))
1149 parts.base = SUBREG_REG (parts.base);
1150 if (parts.index && SUBREG_P (parts.index))
1151 parts.index = SUBREG_REG (parts.index);
1153 /* Look for some component that isn't known to be aligned. */
1156 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
1161 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
1166 if (!CONST_INT_P (parts.disp)
1167 || (INTVAL (parts.disp) & 3))
1171 /* Didn't find one -- this must be an aligned address. */
1175 ;; Return true if OP is memory operand with a displacement.
1176 (define_predicate "memory_displacement_operand"
1177 (match_operand 0 "memory_operand")
1179 struct ix86_address parts;
1182 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1184 return parts.disp != NULL_RTX;
1187 ;; Return true if OP is memory operand with a displacement only.
1188 (define_predicate "memory_displacement_only_operand"
1189 (match_operand 0 "memory_operand")
1191 struct ix86_address parts;
1197 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1200 if (parts.base || parts.index)
1203 return parts.disp != NULL_RTX;
1206 ;; Return true if OP is memory operand that cannot be represented
1207 ;; by the modRM array.
1208 (define_predicate "long_memory_operand"
1209 (and (match_operand 0 "memory_operand")
1210 (match_test "memory_address_length (op, false)")))
1212 ;; Return true if OP is a comparison operator that can be issued by fcmov.
1213 (define_predicate "fcmov_comparison_operator"
1214 (match_operand 0 "comparison_operator")
1216 machine_mode inmode = GET_MODE (XEXP (op, 0));
1217 enum rtx_code code = GET_CODE (op);
1219 if (inmode == CCFPmode || inmode == CCFPUmode)
1221 if (!ix86_trivial_fp_comparison_operator (op, mode))
1223 code = ix86_fp_compare_code_to_integer (code);
1225 /* i387 supports just limited amount of conditional codes. */
1228 case LTU: case GTU: case LEU: case GEU:
1229 if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode
1230 || inmode == CCCmode)
1233 case ORDERED: case UNORDERED:
1241 ;; Return true if OP is a comparison that can be used in the CMPSS/CMPPS insns.
1242 ;; The first set are supported directly; the second set can't be done with
1243 ;; full IEEE support, i.e. NaNs.
1245 (define_predicate "sse_comparison_operator"
1246 (ior (match_code "eq,ne,lt,le,unordered,unge,ungt,ordered")
1247 (and (match_test "TARGET_AVX")
1248 (match_code "ge,gt,uneq,unle,unlt,ltgt"))))
1250 (define_predicate "ix86_comparison_int_operator"
1251 (match_code "ne,eq,ge,gt,le,lt"))
1253 (define_predicate "ix86_comparison_uns_operator"
1254 (match_code "ne,eq,geu,gtu,leu,ltu"))
1256 (define_predicate "bt_comparison_operator"
1257 (match_code "ne,eq"))
1259 ;; Return true if OP is a valid comparison operator in valid mode.
1260 (define_predicate "ix86_comparison_operator"
1261 (match_operand 0 "comparison_operator")
1263 machine_mode inmode = GET_MODE (XEXP (op, 0));
1264 enum rtx_code code = GET_CODE (op);
1266 if (inmode == CCFPmode || inmode == CCFPUmode)
1267 return ix86_trivial_fp_comparison_operator (op, mode);
1274 if (inmode == CCmode || inmode == CCGCmode
1275 || inmode == CCGOCmode || inmode == CCNOmode)
1278 case LTU: case GTU: case LEU: case GEU:
1279 if (inmode == CCmode || inmode == CCCmode)
1282 case ORDERED: case UNORDERED:
1283 if (inmode == CCmode)
1287 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1295 ;; Return true if OP is a valid comparison operator
1296 ;; testing carry flag to be set.
1297 (define_predicate "ix86_carry_flag_operator"
1298 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1300 machine_mode inmode = GET_MODE (XEXP (op, 0));
1301 enum rtx_code code = GET_CODE (op);
1303 if (inmode == CCFPmode || inmode == CCFPUmode)
1305 if (!ix86_trivial_fp_comparison_operator (op, mode))
1307 code = ix86_fp_compare_code_to_integer (code);
1309 else if (inmode == CCCmode)
1310 return code == LTU || code == GTU;
1311 else if (inmode != CCmode)
1317 ;; Return true if this comparison only requires testing one flag bit.
1318 (define_predicate "ix86_trivial_fp_comparison_operator"
1319 (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
1321 ;; Return true if we know how to do this comparison. Others require
1322 ;; testing more than one flag bit, and we let the generic middle-end
1324 (define_predicate "ix86_fp_comparison_operator"
1325 (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1326 == IX86_FPCMP_ARITH")
1327 (match_operand 0 "comparison_operator")
1328 (match_operand 0 "ix86_trivial_fp_comparison_operator")))
1330 ;; Same as above, but for swapped comparison used in *jcc<fp>_<int>_i387.
1331 (define_predicate "ix86_swapped_fp_comparison_operator"
1332 (match_operand 0 "comparison_operator")
1334 enum rtx_code code = GET_CODE (op);
1337 PUT_CODE (op, swap_condition (code));
1338 ret = ix86_fp_comparison_operator (op, mode);
1339 PUT_CODE (op, code);
1343 ;; Nearly general operand, but accept any const_double, since we wish
1344 ;; to be able to drop them into memory rather than have them get pulled
1346 (define_predicate "cmp_fp_expander_operand"
1347 (ior (match_code "const_double")
1348 (match_operand 0 "general_operand")))
1350 ;; Return true if this is a valid binary floating-point operation.
1351 (define_predicate "binary_fp_operator"
1352 (match_code "plus,minus,mult,div"))
1354 ;; Return true if this is a multiply operation.
1355 (define_predicate "mult_operator"
1356 (match_code "mult"))
1358 ;; Return true if this is a division operation.
1359 (define_predicate "div_operator"
1362 ;; Return true if this is a plus, minus, and, ior or xor operation.
1363 (define_predicate "plusminuslogic_operator"
1364 (match_code "plus,minus,and,ior,xor"))
1366 ;; Return true if this is a float extend operation.
1367 (define_predicate "float_operator"
1368 (match_code "float"))
1370 ;; Return true for ARITHMETIC_P.
1371 (define_predicate "arith_or_logical_operator"
1372 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1373 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1375 ;; Return true for COMMUTATIVE_P.
1376 (define_predicate "commutative_operator"
1377 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1379 ;; Return true if OP is a binary operator that can be promoted to wider mode.
1380 (define_predicate "promotable_binary_operator"
1381 (ior (match_code "plus,minus,and,ior,xor,ashift")
1382 (and (match_code "mult")
1383 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1385 (define_predicate "compare_operator"
1386 (match_code "compare"))
1388 (define_predicate "absneg_operator"
1389 (match_code "abs,neg"))
1391 ;; Return true if OP is a memory operand, aligned to
1392 ;; less than its natural alignment.
1393 (define_predicate "misaligned_operand"
1394 (and (match_code "mem")
1395 (match_test "MEM_ALIGN (op) < GET_MODE_BITSIZE (mode)")))
1397 ;; Return true if OP is a emms operation, known to be a PARALLEL.
1398 (define_predicate "emms_operation"
1399 (match_code "parallel")
1403 if (XVECLEN (op, 0) != 17)
1406 for (i = 0; i < 8; i++)
1408 rtx elt = XVECEXP (op, 0, i+1);
1410 if (GET_CODE (elt) != CLOBBER
1411 || GET_CODE (SET_DEST (elt)) != REG
1412 || GET_MODE (SET_DEST (elt)) != XFmode
1413 || REGNO (SET_DEST (elt)) != FIRST_STACK_REG + i)
1416 elt = XVECEXP (op, 0, i+9);
1418 if (GET_CODE (elt) != CLOBBER
1419 || GET_CODE (SET_DEST (elt)) != REG
1420 || GET_MODE (SET_DEST (elt)) != DImode
1421 || REGNO (SET_DEST (elt)) != FIRST_MMX_REG + i)
1427 ;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
1428 (define_predicate "vzeroall_operation"
1429 (match_code "parallel")
1431 unsigned i, nregs = TARGET_64BIT ? 16 : 8;
1433 if ((unsigned) XVECLEN (op, 0) != 1 + nregs)
1436 for (i = 0; i < nregs; i++)
1438 rtx elt = XVECEXP (op, 0, i+1);
1440 if (GET_CODE (elt) != SET
1441 || GET_CODE (SET_DEST (elt)) != REG
1442 || GET_MODE (SET_DEST (elt)) != V8SImode
1443 || REGNO (SET_DEST (elt)) != SSE_REGNO (i)
1444 || SET_SRC (elt) != CONST0_RTX (V8SImode))
1450 ;; return true if OP is a vzeroupper operation.
1451 (define_predicate "vzeroupper_operation"
1452 (and (match_code "unspec_volatile")
1453 (match_test "XINT (op, 1) == UNSPECV_VZEROUPPER")))
1455 ;; Return true if OP is an addsub vec_merge operation
1456 (define_predicate "addsub_vm_operator"
1457 (match_code "vec_merge")
1468 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1470 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1475 mask = INTVAL (XEXP (op, 2));
1476 nunits = GET_MODE_NUNITS (mode);
1478 for (elt = 0; elt < nunits; elt++)
1480 /* bit clear: take from op0, set: take from op1 */
1481 int bit = !(mask & (HOST_WIDE_INT_1U << elt));
1483 if (bit != ((elt & 1) ^ swapped))
1490 ;; Return true if OP is an addsub vec_select/vec_concat operation
1491 (define_predicate "addsub_vs_operator"
1492 (and (match_code "vec_select")
1493 (match_code "vec_concat" "0"))
1499 op0 = XEXP (XEXP (op, 0), 0);
1500 op1 = XEXP (XEXP (op, 0), 1);
1503 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1505 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1510 nunits = GET_MODE_NUNITS (mode);
1511 if (XVECLEN (XEXP (op, 1), 0) != nunits)
1514 /* We already checked that permutation is suitable for addsub,
1515 so only look at the first element of the parallel. */
1516 elt = INTVAL (XVECEXP (XEXP (op, 1), 0, 0));
1518 return elt == (swapped ? nunits : 0);
1521 ;; Return true if OP is a parallel for an addsub vec_select.
1522 (define_predicate "addsub_vs_parallel"
1523 (and (match_code "parallel")
1524 (match_code "const_int" "a"))
1526 int nelt = XVECLEN (op, 0);
1532 /* Check that the permutation is suitable for addsub.
1533 For example, { 0 9 2 11 4 13 6 15 } or { 8 1 10 3 12 5 14 7 }. */
1534 elt = INTVAL (XVECEXP (op, 0, 0));
1537 for (i = 1; i < nelt; ++i)
1538 if (INTVAL (XVECEXP (op, 0, i)) != (i + (i & 1) * nelt))
1541 else if (elt == nelt)
1543 for (i = 1; i < nelt; ++i)
1544 if (INTVAL (XVECEXP (op, 0, i)) != (elt + i - (i & 1) * nelt))
1553 ;; Return true if OP is a parallel for a vbroadcast permute.
1554 (define_predicate "avx_vbroadcast_operand"
1555 (and (match_code "parallel")
1556 (match_code "const_int" "a"))
1558 rtx elt = XVECEXP (op, 0, 0);
1559 int i, nelt = XVECLEN (op, 0);
1561 /* Don't bother checking there are the right number of operands,
1562 merely that they're all identical. */
1563 for (i = 1; i < nelt; ++i)
1564 if (XVECEXP (op, 0, i) != elt)
1569 ;; Return true if OP is a parallel for a palignr permute.
1570 (define_predicate "palignr_operand"
1571 (and (match_code "parallel")
1572 (match_code "const_int" "a"))
1574 int elt = INTVAL (XVECEXP (op, 0, 0));
1575 int i, nelt = XVECLEN (op, 0);
1577 /* Check that an order in the permutation is suitable for palignr.
1578 For example, {5 6 7 0 1 2 3 4} is "palignr 5, xmm, xmm". */
1579 for (i = 1; i < nelt; ++i)
1580 if (INTVAL (XVECEXP (op, 0, i)) != ((elt + i) % nelt))
1585 ;; Return true if OP is a proper third operand to vpblendw256.
1586 (define_predicate "avx2_pblendw_operand"
1587 (match_code "const_int")
1589 HOST_WIDE_INT val = INTVAL (op);
1590 HOST_WIDE_INT low = val & 0xff;
1591 return val == ((low << 8) | low);
1594 ;; Return true if OP is nonimmediate_operand or CONST_VECTOR.
1595 (define_predicate "general_vector_operand"
1596 (ior (match_operand 0 "nonimmediate_operand")
1597 (match_code "const_vector")))
1599 ;; Return true if OP is either -1 constant or stored in register.
1600 (define_predicate "register_or_constm1_operand"
1601 (ior (match_operand 0 "register_operand")
1602 (and (match_code "const_int")
1603 (match_test "op == constm1_rtx"))))