1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004-2016 Free Software Foundation, Inc.
4 ;; This file is part of GCC.
6 ;; GCC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
11 ;; GCC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>.
20 ;; Return true if OP is either a i387 or SSE fp register.
21 (define_predicate "any_fp_register_operand"
22 (and (match_code "reg")
23 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
25 ;; Return true if OP is an i387 fp register.
26 (define_predicate "fp_register_operand"
27 (and (match_code "reg")
28 (match_test "STACK_REGNO_P (REGNO (op))")))
30 ;; True if the operand is a GENERAL class register.
31 (define_predicate "general_reg_operand"
32 (and (match_code "reg")
33 (match_test "GENERAL_REGNO_P (REGNO (op))")))
35 ;; True if the operand is a nonimmediate operand with GENERAL class register.
36 (define_predicate "nonimmediate_gr_operand"
37 (if_then_else (match_code "reg")
38 (match_test "GENERAL_REGNO_P (REGNO (op))")
39 (match_operand 0 "nonimmediate_operand")))
41 ;; True if the operand is a general operand with GENERAL class register.
42 (define_predicate "general_gr_operand"
43 (if_then_else (match_code "reg")
44 (match_test "GENERAL_REGNO_P (REGNO (op))")
45 (match_operand 0 "general_operand")))
47 ;; True if the operand is an MMX register.
48 (define_predicate "mmx_reg_operand"
49 (and (match_code "reg")
50 (match_test "MMX_REGNO_P (REGNO (op))")))
52 ;; True if the operand is an SSE register.
53 (define_predicate "sse_reg_operand"
54 (and (match_code "reg")
55 (match_test "SSE_REGNO_P (REGNO (op))")))
57 ;; True if the operand is an AVX-512 new register.
58 (define_predicate "ext_sse_reg_operand"
59 (and (match_code "reg")
60 (match_test "EXT_REX_SSE_REGNO_P (REGNO (op))")))
62 ;; Return true if op is a QImode register.
63 (define_predicate "any_QIreg_operand"
64 (and (match_code "reg")
65 (match_test "ANY_QI_REGNO_P (REGNO (op))")))
67 ;; Return true if op is one of QImode registers: %[abcd][hl].
68 (define_predicate "QIreg_operand"
69 (and (match_code "reg")
70 (match_test "QI_REGNO_P (REGNO (op))")))
72 ;; Return true if op is a QImode register operand other than %[abcd][hl].
73 (define_predicate "ext_QIreg_operand"
74 (and (match_test "TARGET_64BIT")
76 (not (match_test "QI_REGNO_P (REGNO (op))"))))
78 ;; Return true if op is the AX register.
79 (define_predicate "ax_reg_operand"
80 (and (match_code "reg")
81 (match_test "REGNO (op) == AX_REG")))
83 ;; Return true if op is the flags register.
84 (define_predicate "flags_reg_operand"
85 (and (match_code "reg")
86 (match_test "REGNO (op) == FLAGS_REG")))
88 ;; Match an SI or HImode register for a zero_extract.
89 (define_special_predicate "ext_register_operand"
90 (match_operand 0 "register_operand")
92 if ((!TARGET_64BIT || GET_MODE (op) != DImode)
93 && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
98 /* Be careful to accept only registers having upper parts. */
100 && (REGNO (op) > LAST_VIRTUAL_REGISTER || QI_REGNO_P (REGNO (op))));
103 ;; Match nonimmediate operands, but exclude memory operands on 64bit targets.
104 (define_predicate "nonimmediate_x64nomem_operand"
105 (if_then_else (match_test "TARGET_64BIT")
106 (match_operand 0 "register_operand")
107 (match_operand 0 "nonimmediate_operand")))
109 ;; Match general operands, but exclude memory operands on 64bit targets.
110 (define_predicate "general_x64nomem_operand"
111 (if_then_else (match_test "TARGET_64BIT")
112 (match_operand 0 "nonmemory_operand")
113 (match_operand 0 "general_operand")))
115 ;; Match register operands, but include memory operands for TARGET_SSE_MATH.
116 (define_predicate "register_ssemem_operand"
118 (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
119 (match_operand 0 "nonimmediate_operand")
120 (match_operand 0 "register_operand")))
122 ;; Match nonimmediate operands, but exclude memory operands
123 ;; for TARGET_SSE_MATH if TARGET_MIX_SSE_I387 is not enabled.
124 (define_predicate "nonimm_ssenomem_operand"
126 (and (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
127 (not (match_test "TARGET_MIX_SSE_I387")))
128 (match_operand 0 "register_operand")
129 (match_operand 0 "nonimmediate_operand")))
131 ;; The above predicate, suitable for x87 arithmetic operators.
132 (define_predicate "x87nonimm_ssenomem_operand"
134 (and (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
135 (not (match_test "TARGET_MIX_SSE_I387 && X87_ENABLE_ARITH (mode)")))
136 (match_operand 0 "register_operand")
137 (match_operand 0 "nonimmediate_operand")))
139 ;; Match register operands, include memory operand for TARGET_SSE4_1.
140 (define_predicate "register_sse4nonimm_operand"
141 (if_then_else (match_test "TARGET_SSE4_1")
142 (match_operand 0 "nonimmediate_operand")
143 (match_operand 0 "register_operand")))
145 ;; Return true if VALUE is symbol reference
146 (define_predicate "symbol_operand"
147 (match_code "symbol_ref"))
149 ;; Return true if VALUE can be stored in a sign extended immediate field.
150 (define_predicate "x86_64_immediate_operand"
151 (match_code "const_int,symbol_ref,label_ref,const")
154 return immediate_operand (op, mode);
156 switch (GET_CODE (op))
160 HOST_WIDE_INT val = INTVAL (op);
161 return trunc_int_for_mode (val, SImode) == val;
164 /* TLS symbols are not constant. */
165 if (SYMBOL_REF_TLS_MODEL (op))
168 /* Load the external function address via the GOT slot. */
169 if (ix86_force_load_from_GOT_p (op))
172 /* For certain code models, the symbolic references are known to fit.
173 in CM_SMALL_PIC model we know it fits if it is local to the shared
174 library. Don't count TLS SYMBOL_REFs here, since they should fit
175 only if inside of UNSPEC handled below. */
176 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
177 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
180 /* For certain code models, the code is near as well. */
181 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
182 || ix86_cmodel == CM_KERNEL);
185 /* We also may accept the offsetted memory references in certain
187 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
188 switch (XINT (XEXP (op, 0), 1))
190 case UNSPEC_GOTPCREL:
192 case UNSPEC_GOTNTPOFF:
199 if (GET_CODE (XEXP (op, 0)) == PLUS)
201 rtx op1 = XEXP (XEXP (op, 0), 0);
202 rtx op2 = XEXP (XEXP (op, 0), 1);
204 if (ix86_cmodel == CM_LARGE)
206 if (!CONST_INT_P (op2))
209 HOST_WIDE_INT offset = INTVAL (op2);
210 if (trunc_int_for_mode (offset, SImode) != offset)
213 switch (GET_CODE (op1))
216 /* TLS symbols are not constant. */
217 if (SYMBOL_REF_TLS_MODEL (op1))
220 /* Load the external function address via the GOT slot. */
221 if (ix86_force_load_from_GOT_p (op1))
224 /* For CM_SMALL assume that latest object is 16MB before
225 end of 31bits boundary. We may also accept pretty
226 large negative constants knowing that all objects are
227 in the positive half of address space. */
228 if ((ix86_cmodel == CM_SMALL
229 || (ix86_cmodel == CM_MEDIUM
230 && !SYMBOL_REF_FAR_ADDR_P (op1)))
231 && offset < 16*1024*1024)
233 /* For CM_KERNEL we know that all object resist in the
234 negative half of 32bits address space. We may not
235 accept negative offsets, since they may be just off
236 and we may accept pretty large positive ones. */
237 if (ix86_cmodel == CM_KERNEL
243 /* These conditions are similar to SYMBOL_REF ones, just the
244 constraints for code models differ. */
245 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
246 && offset < 16*1024*1024)
248 if (ix86_cmodel == CM_KERNEL
254 switch (XINT (op1, 1))
275 ;; Return true if VALUE can be stored in the zero extended immediate field.
276 (define_predicate "x86_64_zext_immediate_operand"
277 (match_code "const_int,symbol_ref,label_ref,const")
279 switch (GET_CODE (op))
282 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
285 /* TLS symbols are not constant. */
286 if (SYMBOL_REF_TLS_MODEL (op))
289 /* Load the external function address via the GOT slot. */
290 if (ix86_force_load_from_GOT_p (op))
293 /* For certain code models, the symbolic references are known to fit. */
294 return (ix86_cmodel == CM_SMALL
295 || (ix86_cmodel == CM_MEDIUM
296 && !SYMBOL_REF_FAR_ADDR_P (op)));
299 /* For certain code models, the code is near as well. */
300 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
303 /* We also may accept the offsetted memory references in certain
305 if (GET_CODE (XEXP (op, 0)) == PLUS)
307 rtx op1 = XEXP (XEXP (op, 0), 0);
308 rtx op2 = XEXP (XEXP (op, 0), 1);
310 if (ix86_cmodel == CM_LARGE)
312 if (!CONST_INT_P (op2))
315 HOST_WIDE_INT offset = INTVAL (op2);
316 if (trunc_int_for_mode (offset, SImode) != offset)
319 switch (GET_CODE (op1))
322 /* TLS symbols are not constant. */
323 if (SYMBOL_REF_TLS_MODEL (op1))
326 /* Load the external function address via the GOT slot. */
327 if (ix86_force_load_from_GOT_p (op1))
330 /* For small code model we may accept pretty large positive
331 offsets, since one bit is available for free. Negative
332 offsets are limited by the size of NULL pointer area
333 specified by the ABI. */
334 if ((ix86_cmodel == CM_SMALL
335 || (ix86_cmodel == CM_MEDIUM
336 && !SYMBOL_REF_FAR_ADDR_P (op1)))
337 && offset > -0x10000)
339 /* ??? For the kernel, we may accept adjustment of
340 -0x10000000, since we know that it will just convert
341 negative address space to positive, but perhaps this
342 is not worthwhile. */
346 /* These conditions are similar to SYMBOL_REF ones, just the
347 constraints for code models differ. */
348 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
349 && offset > -0x10000)
365 ;; Return true if VALUE is a constant integer whose low and high words satisfy
366 ;; x86_64_immediate_operand.
367 (define_predicate "x86_64_hilo_int_operand"
368 (match_code "const_int,const_wide_int")
370 switch (GET_CODE (op))
373 return x86_64_immediate_operand (op, mode);
376 gcc_assert (CONST_WIDE_INT_NUNITS (op) == 2);
377 return (x86_64_immediate_operand (GEN_INT (CONST_WIDE_INT_ELT (op, 0)),
379 && x86_64_immediate_operand (GEN_INT (CONST_WIDE_INT_ELT (op,
388 ;; Return true if size of VALUE can be stored in a sign
389 ;; extended immediate field.
390 (define_predicate "x86_64_immediate_size_operand"
391 (and (match_code "symbol_ref")
392 (ior (not (match_test "TARGET_64BIT"))
393 (match_test "ix86_cmodel == CM_SMALL")
394 (match_test "ix86_cmodel == CM_KERNEL"))))
396 ;; Return true if OP is general operand representable on x86_64.
397 (define_predicate "x86_64_general_operand"
398 (if_then_else (match_test "TARGET_64BIT")
399 (ior (match_operand 0 "nonimmediate_operand")
400 (match_operand 0 "x86_64_immediate_operand"))
401 (match_operand 0 "general_operand")))
403 ;; Return true if OP's both words are general operands representable
405 (define_predicate "x86_64_hilo_general_operand"
406 (if_then_else (match_test "TARGET_64BIT")
407 (ior (match_operand 0 "nonimmediate_operand")
408 (match_operand 0 "x86_64_hilo_int_operand"))
409 (match_operand 0 "general_operand")))
411 ;; Return true if OP is non-VOIDmode general operand representable
412 ;; on x86_64. This predicate is used in sign-extending conversion
413 ;; operations that require non-VOIDmode immediate operands.
414 (define_predicate "x86_64_sext_operand"
415 (and (match_test "GET_MODE (op) != VOIDmode")
416 (match_operand 0 "x86_64_general_operand")))
418 ;; Return true if OP is non-VOIDmode general operand. This predicate
419 ;; is used in sign-extending conversion operations that require
420 ;; non-VOIDmode immediate operands.
421 (define_predicate "sext_operand"
422 (and (match_test "GET_MODE (op) != VOIDmode")
423 (match_operand 0 "general_operand")))
425 ;; Return true if OP is representable on x86_64 as zero-extended operand.
426 ;; This predicate is used in zero-extending conversion operations that
427 ;; require non-VOIDmode immediate operands.
428 (define_predicate "x86_64_zext_operand"
429 (if_then_else (match_test "TARGET_64BIT")
430 (ior (match_operand 0 "nonimmediate_operand")
431 (and (match_operand 0 "x86_64_zext_immediate_operand")
432 (match_test "GET_MODE (op) != VOIDmode")))
433 (match_operand 0 "nonimmediate_operand")))
435 ;; Return true if OP is general operand representable on x86_64
436 ;; as either sign extended or zero extended constant.
437 (define_predicate "x86_64_szext_general_operand"
438 (if_then_else (match_test "TARGET_64BIT")
439 (ior (match_operand 0 "nonimmediate_operand")
440 (match_operand 0 "x86_64_immediate_operand")
441 (match_operand 0 "x86_64_zext_immediate_operand"))
442 (match_operand 0 "general_operand")))
444 ;; Return true if OP is nonmemory operand representable on x86_64.
445 (define_predicate "x86_64_nonmemory_operand"
446 (if_then_else (match_test "TARGET_64BIT")
447 (ior (match_operand 0 "register_operand")
448 (match_operand 0 "x86_64_immediate_operand"))
449 (match_operand 0 "nonmemory_operand")))
451 ;; Return true if OP is nonmemory operand representable on x86_64.
452 (define_predicate "x86_64_szext_nonmemory_operand"
453 (if_then_else (match_test "TARGET_64BIT")
454 (ior (match_operand 0 "register_operand")
455 (match_operand 0 "x86_64_immediate_operand")
456 (match_operand 0 "x86_64_zext_immediate_operand"))
457 (match_operand 0 "nonmemory_operand")))
459 ;; Return true when operand is PIC expression that can be computed by lea
461 (define_predicate "pic_32bit_operand"
462 (match_code "const,symbol_ref,label_ref")
467 /* Rule out relocations that translate into 64bit constants. */
468 if (TARGET_64BIT && GET_CODE (op) == CONST)
471 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
473 if (GET_CODE (op) == UNSPEC
474 && (XINT (op, 1) == UNSPEC_GOTOFF
475 || XINT (op, 1) == UNSPEC_GOT))
479 return symbolic_operand (op, mode);
482 ;; Return true if OP is nonmemory operand acceptable by movabs patterns.
483 (define_predicate "x86_64_movabs_operand"
484 (and (match_operand 0 "nonmemory_operand")
485 (not (match_operand 0 "pic_32bit_operand"))))
487 ;; Return true if OP is either a symbol reference or a sum of a symbol
488 ;; reference and a constant.
489 (define_predicate "symbolic_operand"
490 (match_code "symbol_ref,label_ref,const")
492 switch (GET_CODE (op))
500 if (GET_CODE (op) == SYMBOL_REF
501 || GET_CODE (op) == LABEL_REF
502 || (GET_CODE (op) == UNSPEC
503 && (XINT (op, 1) == UNSPEC_GOT
504 || XINT (op, 1) == UNSPEC_GOTOFF
505 || XINT (op, 1) == UNSPEC_PCREL
506 || XINT (op, 1) == UNSPEC_GOTPCREL)))
508 if (GET_CODE (op) != PLUS
509 || !CONST_INT_P (XEXP (op, 1)))
513 if (GET_CODE (op) == SYMBOL_REF
514 || GET_CODE (op) == LABEL_REF)
516 /* Only @GOTOFF gets offsets. */
517 if (GET_CODE (op) != UNSPEC
518 || XINT (op, 1) != UNSPEC_GOTOFF)
521 op = XVECEXP (op, 0, 0);
522 if (GET_CODE (op) == SYMBOL_REF
523 || GET_CODE (op) == LABEL_REF)
532 ;; Return true if OP is a symbolic operand that resolves locally.
533 (define_predicate "local_symbolic_operand"
534 (match_code "const,label_ref,symbol_ref")
536 if (GET_CODE (op) == CONST
537 && GET_CODE (XEXP (op, 0)) == PLUS
538 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
539 op = XEXP (XEXP (op, 0), 0);
541 if (GET_CODE (op) == LABEL_REF)
544 if (GET_CODE (op) != SYMBOL_REF)
547 if (SYMBOL_REF_TLS_MODEL (op))
550 /* Dll-imported symbols are always external. */
551 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
553 if (SYMBOL_REF_LOCAL_P (op))
556 /* There is, however, a not insubstantial body of code in the rest of
557 the compiler that assumes it can just stick the results of
558 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
559 /* ??? This is a hack. Should update the body of the compiler to
560 always create a DECL an invoke targetm.encode_section_info. */
561 if (strncmp (XSTR (op, 0), internal_label_prefix,
562 internal_label_prefix_len) == 0)
568 ;; Test for a legitimate @GOTOFF operand.
570 ;; VxWorks does not impose a fixed gap between segments; the run-time
571 ;; gap can be different from the object-file gap. We therefore can't
572 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
573 ;; same segment as the GOT. Unfortunately, the flexibility of linker
574 ;; scripts means that we can't be sure of that in general, so assume
575 ;; that @GOTOFF is never valid on VxWorks.
576 (define_predicate "gotoff_operand"
577 (and (not (match_test "TARGET_VXWORKS_RTP"))
578 (match_operand 0 "local_symbolic_operand")))
580 ;; Test for various thread-local symbols.
581 (define_special_predicate "tls_symbolic_operand"
582 (and (match_code "symbol_ref")
583 (match_test "SYMBOL_REF_TLS_MODEL (op)")))
585 (define_special_predicate "tls_modbase_operand"
586 (and (match_code "symbol_ref")
587 (match_test "op == ix86_tls_module_base ()")))
589 ;; Test for a pc-relative call operand
590 (define_predicate "constant_call_address_operand"
591 (match_code "symbol_ref")
593 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC)
595 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
600 ;; P6 processors will jump to the address after the decrement when %esp
601 ;; is used as a call operand, so they will execute return address as a code.
602 ;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17.
604 (define_predicate "call_register_no_elim_operand"
605 (match_operand 0 "register_operand")
608 op = SUBREG_REG (op);
610 if (!TARGET_64BIT && op == stack_pointer_rtx)
613 return register_no_elim_operand (op, mode);
616 ;; True for any non-virtual or eliminable register. Used in places where
617 ;; instantiation of such a register may cause the pattern to not be recognized.
618 (define_predicate "register_no_elim_operand"
619 (match_operand 0 "register_operand")
622 op = SUBREG_REG (op);
623 return !(op == arg_pointer_rtx
624 || op == frame_pointer_rtx
625 || IN_RANGE (REGNO (op),
626 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
629 ;; Similarly, but include the stack pointer. This is used to prevent esp
630 ;; from being used as an index reg.
631 (define_predicate "index_register_operand"
632 (match_operand 0 "register_operand")
635 op = SUBREG_REG (op);
636 if (reload_completed)
637 return REG_OK_FOR_INDEX_STRICT_P (op);
639 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
642 ;; Return false if this is any eliminable register. Otherwise general_operand.
643 (define_predicate "general_no_elim_operand"
644 (if_then_else (match_code "reg,subreg")
645 (match_operand 0 "register_no_elim_operand")
646 (match_operand 0 "general_operand")))
648 ;; Return false if this is any eliminable register. Otherwise
649 ;; register_operand or a constant.
650 (define_predicate "nonmemory_no_elim_operand"
651 (ior (match_operand 0 "register_no_elim_operand")
652 (match_operand 0 "immediate_operand")))
654 ;; Test for a valid operand for indirect branch.
655 (define_predicate "indirect_branch_operand"
656 (ior (match_operand 0 "register_operand")
657 (and (not (match_test "TARGET_X32"))
658 (match_operand 0 "memory_operand"))))
660 ;; Return true if OP is a memory operands that can be used in sibcalls.
661 ;; Since sibcall never returns, we can only use call-clobbered register
662 ;; as GOT base. Allow GOT slot here only with pseudo register as GOT
663 ;; base. Properly handle sibcall over GOT slot with *sibcall_GOT_32
664 ;; and *sibcall_value_GOT_32 patterns.
665 (define_predicate "sibcall_memory_operand"
666 (match_operand 0 "memory_operand")
671 if (GET_CODE (op) == PLUS && REG_P (XEXP (op, 0)))
673 int regno = REGNO (XEXP (op, 0));
674 if (!HARD_REGISTER_NUM_P (regno) || call_used_regs[regno])
677 if (GOT32_symbol_operand (op, VOIDmode))
684 ;; Return true if OP is a GOT memory operand.
685 (define_predicate "GOT_memory_operand"
686 (match_operand 0 "memory_operand")
689 return (GET_CODE (op) == CONST
690 && GET_CODE (XEXP (op, 0)) == UNSPEC
691 && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL);
694 ;; Test for a valid operand for a call instruction.
695 ;; Allow constant call address operands in Pmode only.
696 (define_special_predicate "call_insn_operand"
697 (ior (match_test "constant_call_address_operand
698 (op, mode == VOIDmode ? mode : Pmode)")
699 (match_operand 0 "call_register_no_elim_operand")
700 (ior (and (not (match_test "TARGET_X32"))
701 (match_operand 0 "memory_operand"))
702 (and (match_test "TARGET_X32 && Pmode == DImode")
703 (match_operand 0 "GOT_memory_operand")))))
705 ;; Similarly, but for tail calls, in which we cannot allow memory references.
706 (define_special_predicate "sibcall_insn_operand"
707 (ior (match_test "constant_call_address_operand
708 (op, mode == VOIDmode ? mode : Pmode)")
709 (match_operand 0 "register_no_elim_operand")
710 (ior (and (not (match_test "TARGET_X32"))
711 (match_operand 0 "sibcall_memory_operand"))
712 (and (match_test "TARGET_X32 && Pmode == DImode")
713 (match_operand 0 "GOT_memory_operand")))))
715 ;; Return true if OP is a 32-bit GOT symbol operand.
716 (define_predicate "GOT32_symbol_operand"
717 (match_test "GET_CODE (op) == CONST
718 && GET_CODE (XEXP (op, 0)) == UNSPEC
719 && XINT (XEXP (op, 0), 1) == UNSPEC_GOT"))
721 ;; Match exactly zero.
722 (define_predicate "const0_operand"
723 (match_code "const_int,const_double,const_vector")
725 if (mode == VOIDmode)
726 mode = GET_MODE (op);
727 return op == CONST0_RTX (mode);
730 ;; Match one or a vector with all elements equal to one.
731 (define_predicate "const1_operand"
732 (match_code "const_int,const_double,const_vector")
734 if (mode == VOIDmode)
735 mode = GET_MODE (op);
736 return op == CONST1_RTX (mode);
740 (define_predicate "constm1_operand"
741 (and (match_code "const_int")
742 (match_test "op == constm1_rtx")))
744 ;; Match exactly eight.
745 (define_predicate "const8_operand"
746 (and (match_code "const_int")
747 (match_test "INTVAL (op) == 8")))
749 ;; Match exactly 128.
750 (define_predicate "const128_operand"
751 (and (match_code "const_int")
752 (match_test "INTVAL (op) == 128")))
754 ;; Match exactly 0x0FFFFFFFF in anddi as a zero-extension operation
755 (define_predicate "const_32bit_mask"
756 (and (match_code "const_int")
757 (match_test "trunc_int_for_mode (INTVAL (op), DImode)
758 == (HOST_WIDE_INT) 0xffffffff")))
760 ;; Match 2, 4, or 8. Used for leal multiplicands.
761 (define_predicate "const248_operand"
762 (match_code "const_int")
764 HOST_WIDE_INT i = INTVAL (op);
765 return i == 2 || i == 4 || i == 8;
768 ;; Match 2, 3, 6, or 7
769 (define_predicate "const2367_operand"
770 (match_code "const_int")
772 HOST_WIDE_INT i = INTVAL (op);
773 return i == 2 || i == 3 || i == 6 || i == 7;
776 ;; Match 1, 2, 4, or 8
777 (define_predicate "const1248_operand"
778 (match_code "const_int")
780 HOST_WIDE_INT i = INTVAL (op);
781 return i == 1 || i == 2 || i == 4 || i == 8;
784 ;; Match 3, 5, or 9. Used for leal multiplicands.
785 (define_predicate "const359_operand"
786 (match_code "const_int")
788 HOST_WIDE_INT i = INTVAL (op);
789 return i == 3 || i == 5 || i == 9;
792 ;; Match 4 or 8 to 11. Used for embeded rounding.
793 (define_predicate "const_4_or_8_to_11_operand"
794 (match_code "const_int")
796 HOST_WIDE_INT i = INTVAL (op);
797 return i == 4 || (i >= 8 && i <= 11);
800 ;; Match 4 or 8. Used for SAE.
801 (define_predicate "const48_operand"
802 (match_code "const_int")
804 HOST_WIDE_INT i = INTVAL (op);
805 return i == 4 || i == 8;
809 (define_predicate "const_0_to_1_operand"
810 (and (match_code "const_int")
811 (ior (match_test "op == const0_rtx")
812 (match_test "op == const1_rtx"))))
815 (define_predicate "const_0_to_3_operand"
816 (and (match_code "const_int")
817 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
820 (define_predicate "const_0_to_4_operand"
821 (and (match_code "const_int")
822 (match_test "IN_RANGE (INTVAL (op), 0, 4)")))
825 (define_predicate "const_0_to_5_operand"
826 (and (match_code "const_int")
827 (match_test "IN_RANGE (INTVAL (op), 0, 5)")))
830 (define_predicate "const_0_to_7_operand"
831 (and (match_code "const_int")
832 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
835 (define_predicate "const_0_to_15_operand"
836 (and (match_code "const_int")
837 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
840 (define_predicate "const_0_to_31_operand"
841 (and (match_code "const_int")
842 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
845 (define_predicate "const_0_to_63_operand"
846 (and (match_code "const_int")
847 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
850 (define_predicate "const_0_to_255_operand"
851 (and (match_code "const_int")
852 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
854 ;; Match (0 to 255) * 8
855 (define_predicate "const_0_to_255_mul_8_operand"
856 (match_code "const_int")
858 unsigned HOST_WIDE_INT val = INTVAL (op);
859 return val <= 255*8 && val % 8 == 0;
862 ;; Return true if OP is CONST_INT >= 1 and <= 31 (a valid operand
863 ;; for shift & compare patterns, as shifting by 0 does not change flags).
864 (define_predicate "const_1_to_31_operand"
865 (and (match_code "const_int")
866 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
868 ;; Return true if OP is CONST_INT >= 1 and <= 63 (a valid operand
869 ;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
870 (define_predicate "const_1_to_63_operand"
871 (and (match_code "const_int")
872 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
875 (define_predicate "const_2_to_3_operand"
876 (and (match_code "const_int")
877 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
880 (define_predicate "const_4_to_5_operand"
881 (and (match_code "const_int")
882 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
885 (define_predicate "const_4_to_7_operand"
886 (and (match_code "const_int")
887 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
890 (define_predicate "const_6_to_7_operand"
891 (and (match_code "const_int")
892 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
895 (define_predicate "const_8_to_9_operand"
896 (and (match_code "const_int")
897 (match_test "IN_RANGE (INTVAL (op), 8, 9)")))
900 (define_predicate "const_8_to_11_operand"
901 (and (match_code "const_int")
902 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
905 (define_predicate "const_8_to_15_operand"
906 (and (match_code "const_int")
907 (match_test "IN_RANGE (INTVAL (op), 8, 15)")))
910 (define_predicate "const_10_to_11_operand"
911 (and (match_code "const_int")
912 (match_test "IN_RANGE (INTVAL (op), 10, 11)")))
915 (define_predicate "const_12_to_13_operand"
916 (and (match_code "const_int")
917 (match_test "IN_RANGE (INTVAL (op), 12, 13)")))
920 (define_predicate "const_12_to_15_operand"
921 (and (match_code "const_int")
922 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
925 (define_predicate "const_14_to_15_operand"
926 (and (match_code "const_int")
927 (match_test "IN_RANGE (INTVAL (op), 14, 15)")))
930 (define_predicate "const_16_to_19_operand"
931 (and (match_code "const_int")
932 (match_test "IN_RANGE (INTVAL (op), 16, 19)")))
935 (define_predicate "const_16_to_31_operand"
936 (and (match_code "const_int")
937 (match_test "IN_RANGE (INTVAL (op), 16, 31)")))
940 (define_predicate "const_20_to_23_operand"
941 (and (match_code "const_int")
942 (match_test "IN_RANGE (INTVAL (op), 20, 23)")))
945 (define_predicate "const_24_to_27_operand"
946 (and (match_code "const_int")
947 (match_test "IN_RANGE (INTVAL (op), 24, 27)")))
950 (define_predicate "const_28_to_31_operand"
951 (and (match_code "const_int")
952 (match_test "IN_RANGE (INTVAL (op), 28, 31)")))
954 ;; True if this is a constant appropriate for an increment or decrement.
955 (define_predicate "incdec_operand"
956 (match_code "const_int")
958 /* On Pentium4, the inc and dec operations causes extra dependency on flag
959 registers, since carry flag is not set. */
960 if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
962 return op == const1_rtx || op == constm1_rtx;
965 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
966 (define_predicate "reg_or_pm1_operand"
967 (ior (match_operand 0 "register_operand")
968 (and (match_code "const_int")
969 (ior (match_test "op == const1_rtx")
970 (match_test "op == constm1_rtx")))))
972 ;; True if OP is acceptable as operand of DImode shift expander.
973 (define_predicate "shiftdi_operand"
974 (if_then_else (match_test "TARGET_64BIT")
975 (match_operand 0 "nonimmediate_operand")
976 (match_operand 0 "register_operand")))
978 (define_predicate "ashldi_input_operand"
979 (if_then_else (match_test "TARGET_64BIT")
980 (match_operand 0 "nonimmediate_operand")
981 (match_operand 0 "reg_or_pm1_operand")))
983 ;; Return true if OP is a vector load from the constant pool with just
984 ;; the first element nonzero.
985 (define_predicate "zero_extended_scalar_load_operand"
989 op = maybe_get_pool_constant (op);
991 if (!(op && GET_CODE (op) == CONST_VECTOR))
994 n_elts = CONST_VECTOR_NUNITS (op);
996 for (n_elts--; n_elts > 0; n_elts--)
998 rtx elt = CONST_VECTOR_ELT (op, n_elts);
999 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
1005 /* Return true if operand is a vector constant that is all ones. */
1006 (define_predicate "vector_all_ones_operand"
1007 (and (match_code "const_vector")
1008 (match_test "INTEGRAL_MODE_P (GET_MODE (op))")
1009 (match_test "op == CONSTM1_RTX (GET_MODE (op))")))
1011 ; Return true when OP is operand acceptable for vector memory operand.
1012 ; Only AVX can have misaligned memory operand.
1013 (define_predicate "vector_memory_operand"
1014 (and (match_operand 0 "memory_operand")
1015 (ior (match_test "TARGET_AVX")
1016 (match_test "MEM_ALIGN (op) >= GET_MODE_ALIGNMENT (mode)"))))
1018 ; Return true when OP is register_operand or vector_memory_operand.
1019 (define_predicate "vector_operand"
1020 (ior (match_operand 0 "register_operand")
1021 (match_operand 0 "vector_memory_operand")))
1023 ; Return true when OP is operand acceptable for standard SSE move.
1024 (define_predicate "vector_move_operand"
1025 (ior (match_operand 0 "nonimmediate_operand")
1026 (match_operand 0 "const0_operand")))
1028 ;; Return true when OP is either nonimmediate operand, or any
1030 (define_predicate "nonimmediate_or_const_vector_operand"
1031 (ior (match_operand 0 "nonimmediate_operand")
1032 (match_code "const_vector")))
1034 ;; Return true when OP is nonimmediate or standard SSE constant.
1035 (define_predicate "nonimmediate_or_sse_const_operand"
1036 (ior (match_operand 0 "nonimmediate_operand")
1037 (match_test "standard_sse_constant_p (op, mode)")))
1039 ;; Return true if OP is a register or a zero.
1040 (define_predicate "reg_or_0_operand"
1041 (ior (match_operand 0 "register_operand")
1042 (match_operand 0 "const0_operand")))
1044 ;; Return true for RTX codes that force SImode address.
1045 (define_predicate "SImode_address_operand"
1046 (match_code "subreg,zero_extend,and"))
1048 ;; Return true if op if a valid address for LEA, and does not contain
1049 ;; a segment override. Defined as a special predicate to allow
1050 ;; mode-less const_int operands pass to address_operand.
1051 (define_special_predicate "address_no_seg_operand"
1052 (match_test "address_operand (op, VOIDmode)")
1054 struct ix86_address parts;
1057 if (!CONST_INT_P (op)
1059 && GET_MODE (op) != mode)
1062 ok = ix86_decompose_address (op, &parts);
1064 return parts.seg == ADDR_SPACE_GENERIC;
1067 ;; Return true if op if a valid base register, displacement or
1068 ;; sum of base register and displacement for VSIB addressing.
1069 (define_predicate "vsib_address_operand"
1070 (match_test "address_operand (op, VOIDmode)")
1072 struct ix86_address parts;
1076 ok = ix86_decompose_address (op, &parts);
1078 if (parts.index || parts.seg != ADDR_SPACE_GENERIC)
1081 /* VSIB addressing doesn't support (%rip). */
1085 if (GET_CODE (disp) == CONST)
1087 disp = XEXP (disp, 0);
1088 if (GET_CODE (disp) == PLUS)
1089 disp = XEXP (disp, 0);
1090 if (GET_CODE (disp) == UNSPEC)
1091 switch (XINT (disp, 1))
1093 case UNSPEC_GOTPCREL:
1095 case UNSPEC_GOTNTPOFF:
1101 && (GET_CODE (disp) == SYMBOL_REF
1102 || GET_CODE (disp) == LABEL_REF))
1109 ;; Return true if op is valid MPX address operand without base
1110 (define_predicate "address_mpx_no_base_operand"
1111 (match_test "address_operand (op, VOIDmode)")
1113 struct ix86_address parts;
1116 ok = ix86_decompose_address (op, &parts);
1119 if (parts.index && parts.base)
1122 if (parts.seg != ADDR_SPACE_GENERIC)
1125 /* Do not support (%rip). */
1126 if (parts.disp && flag_pic && TARGET_64BIT
1127 && SYMBOLIC_CONST (parts.disp))
1129 if (GET_CODE (parts.disp) != CONST
1130 || GET_CODE (XEXP (parts.disp, 0)) != PLUS
1131 || GET_CODE (XEXP (XEXP (parts.disp, 0), 0)) != UNSPEC
1132 || !CONST_INT_P (XEXP (XEXP (parts.disp, 0), 1))
1133 || (XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_DTPOFF
1134 && XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_NTPOFF))
1141 ;; Return true if op is valid MPX address operand without index
1142 (define_predicate "address_mpx_no_index_operand"
1143 (match_test "address_operand (op, VOIDmode)")
1145 struct ix86_address parts;
1148 ok = ix86_decompose_address (op, &parts);
1154 if (parts.seg != ADDR_SPACE_GENERIC)
1157 /* Do not support (%rip). */
1158 if (parts.disp && flag_pic && TARGET_64BIT
1159 && SYMBOLIC_CONST (parts.disp)
1160 && (GET_CODE (parts.disp) != CONST
1161 || GET_CODE (XEXP (parts.disp, 0)) != PLUS
1162 || GET_CODE (XEXP (XEXP (parts.disp, 0), 0)) != UNSPEC
1163 || !CONST_INT_P (XEXP (XEXP (parts.disp, 0), 1))
1164 || (XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_DTPOFF
1165 && XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_NTPOFF)))
1171 (define_predicate "vsib_mem_operator"
1174 (define_predicate "bnd_mem_operator"
1177 ;; Return true if the rtx is known to be at least 32 bits aligned.
1178 (define_predicate "aligned_operand"
1179 (match_operand 0 "general_operand")
1181 struct ix86_address parts;
1184 /* Registers and immediate operands are always "aligned". */
1188 /* All patterns using aligned_operand on memory operands ends up
1189 in promoting memory operand to 64bit and thus causing memory mismatch. */
1190 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
1193 /* Don't even try to do any aligned optimizations with volatiles. */
1194 if (MEM_VOLATILE_P (op))
1197 if (MEM_ALIGN (op) >= 32)
1202 /* Pushes and pops are only valid on the stack pointer. */
1203 if (GET_CODE (op) == PRE_DEC
1204 || GET_CODE (op) == POST_INC)
1207 /* Decode the address. */
1208 ok = ix86_decompose_address (op, &parts);
1211 if (parts.base && SUBREG_P (parts.base))
1212 parts.base = SUBREG_REG (parts.base);
1213 if (parts.index && SUBREG_P (parts.index))
1214 parts.index = SUBREG_REG (parts.index);
1216 /* Look for some component that isn't known to be aligned. */
1219 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
1224 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
1229 if (!CONST_INT_P (parts.disp)
1230 || (INTVAL (parts.disp) & 3))
1234 /* Didn't find one -- this must be an aligned address. */
1238 ;; Return true if OP is memory operand with a displacement.
1239 (define_predicate "memory_displacement_operand"
1240 (match_operand 0 "memory_operand")
1242 struct ix86_address parts;
1245 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1247 return parts.disp != NULL_RTX;
1250 ;; Return true if OP is memory operand with a displacement only.
1251 (define_predicate "memory_displacement_only_operand"
1252 (match_operand 0 "memory_operand")
1254 struct ix86_address parts;
1260 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1263 if (parts.base || parts.index)
1266 return parts.disp != NULL_RTX;
1269 ;; Return true if OP is memory operand that cannot be represented
1270 ;; by the modRM array.
1271 (define_predicate "long_memory_operand"
1272 (and (match_operand 0 "memory_operand")
1273 (match_test "memory_address_length (op, false)")))
1275 ;; Return true if OP is a comparison operator that can be issued by fcmov.
1276 (define_predicate "fcmov_comparison_operator"
1277 (match_operand 0 "comparison_operator")
1279 machine_mode inmode = GET_MODE (XEXP (op, 0));
1280 enum rtx_code code = GET_CODE (op);
1282 if (inmode == CCFPmode || inmode == CCFPUmode)
1284 if (!ix86_trivial_fp_comparison_operator (op, mode))
1286 code = ix86_fp_compare_code_to_integer (code);
1288 /* i387 supports just limited amount of conditional codes. */
1291 case LTU: case GTU: case LEU: case GEU:
1292 if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode
1293 || inmode == CCCmode)
1296 case ORDERED: case UNORDERED:
1304 ;; Return true if OP is a comparison that can be used in the CMPSS/CMPPS insns.
1305 ;; The first set are supported directly; the second set can't be done with
1306 ;; full IEEE support, i.e. NaNs.
1308 (define_predicate "sse_comparison_operator"
1309 (ior (match_code "eq,ne,lt,le,unordered,unge,ungt,ordered")
1310 (and (match_test "TARGET_AVX")
1311 (match_code "ge,gt,uneq,unle,unlt,ltgt"))))
1313 (define_predicate "ix86_comparison_int_operator"
1314 (match_code "ne,eq,ge,gt,le,lt"))
1316 (define_predicate "ix86_comparison_uns_operator"
1317 (match_code "ne,eq,geu,gtu,leu,ltu"))
1319 (define_predicate "bt_comparison_operator"
1320 (match_code "ne,eq"))
1322 ;; Return true if OP is a valid comparison operator in valid mode.
1323 (define_predicate "ix86_comparison_operator"
1324 (match_operand 0 "comparison_operator")
1326 machine_mode inmode = GET_MODE (XEXP (op, 0));
1327 enum rtx_code code = GET_CODE (op);
1329 if (inmode == CCFPmode || inmode == CCFPUmode)
1330 return ix86_trivial_fp_comparison_operator (op, mode);
1337 if (inmode == CCmode || inmode == CCGCmode
1338 || inmode == CCGOCmode || inmode == CCNOmode)
1341 case LTU: case GTU: case LEU: case GEU:
1342 if (inmode == CCmode || inmode == CCCmode)
1345 case ORDERED: case UNORDERED:
1346 if (inmode == CCmode)
1350 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1358 ;; Return true if OP is a valid comparison operator
1359 ;; testing carry flag to be set.
1360 (define_predicate "ix86_carry_flag_operator"
1361 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1363 machine_mode inmode = GET_MODE (XEXP (op, 0));
1364 enum rtx_code code = GET_CODE (op);
1366 if (inmode == CCFPmode || inmode == CCFPUmode)
1368 if (!ix86_trivial_fp_comparison_operator (op, mode))
1370 code = ix86_fp_compare_code_to_integer (code);
1372 else if (inmode == CCCmode)
1373 return code == LTU || code == GTU;
1374 else if (inmode != CCmode)
1380 ;; Return true if this comparison only requires testing one flag bit.
1381 (define_predicate "ix86_trivial_fp_comparison_operator"
1382 (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
1384 ;; Return true if we know how to do this comparison. Others require
1385 ;; testing more than one flag bit, and we let the generic middle-end
1387 (define_predicate "ix86_fp_comparison_operator"
1388 (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1389 == IX86_FPCMP_ARITH")
1390 (match_operand 0 "comparison_operator")
1391 (match_operand 0 "ix86_trivial_fp_comparison_operator")))
1393 ;; Same as above, but for swapped comparison used in *jcc<fp>_<int>_i387.
1394 (define_predicate "ix86_swapped_fp_comparison_operator"
1395 (match_operand 0 "comparison_operator")
1397 enum rtx_code code = GET_CODE (op);
1400 PUT_CODE (op, swap_condition (code));
1401 ret = ix86_fp_comparison_operator (op, mode);
1402 PUT_CODE (op, code);
1406 ;; Nearly general operand, but accept any const_double, since we wish
1407 ;; to be able to drop them into memory rather than have them get pulled
1409 (define_predicate "cmp_fp_expander_operand"
1410 (ior (match_code "const_double")
1411 (match_operand 0 "general_operand")))
1413 ;; Return true if this is a valid binary floating-point operation.
1414 (define_predicate "binary_fp_operator"
1415 (match_code "plus,minus,mult,div"))
1417 ;; Return true if this is a multiply operation.
1418 (define_predicate "mult_operator"
1419 (match_code "mult"))
1421 ;; Return true if this is a division operation.
1422 (define_predicate "div_operator"
1425 ;; Return true if this is a plus, minus, and, ior or xor operation.
1426 (define_predicate "plusminuslogic_operator"
1427 (match_code "plus,minus,and,ior,xor"))
1429 ;; Return true if this is a float extend operation.
1430 (define_predicate "float_operator"
1431 (match_code "float"))
1433 ;; Return true for ARITHMETIC_P.
1434 (define_predicate "arith_or_logical_operator"
1435 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1436 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1438 ;; Return true for COMMUTATIVE_P.
1439 (define_predicate "commutative_operator"
1440 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1442 ;; Return true if OP is a binary operator that can be promoted to wider mode.
1443 (define_predicate "promotable_binary_operator"
1444 (ior (match_code "plus,minus,and,ior,xor,ashift")
1445 (and (match_code "mult")
1446 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1448 (define_predicate "compare_operator"
1449 (match_code "compare"))
1451 (define_predicate "absneg_operator"
1452 (match_code "abs,neg"))
1454 ;; Return true if OP is a memory operand, aligned to
1455 ;; less than its natural alignment.
1456 (define_predicate "misaligned_operand"
1457 (and (match_code "mem")
1458 (match_test "MEM_ALIGN (op) < GET_MODE_BITSIZE (mode)")))
1460 ;; Return true if OP is a emms operation, known to be a PARALLEL.
1461 (define_predicate "emms_operation"
1462 (match_code "parallel")
1466 if (XVECLEN (op, 0) != 17)
1469 for (i = 0; i < 8; i++)
1471 rtx elt = XVECEXP (op, 0, i+1);
1473 if (GET_CODE (elt) != CLOBBER
1474 || GET_CODE (SET_DEST (elt)) != REG
1475 || GET_MODE (SET_DEST (elt)) != XFmode
1476 || REGNO (SET_DEST (elt)) != FIRST_STACK_REG + i)
1479 elt = XVECEXP (op, 0, i+9);
1481 if (GET_CODE (elt) != CLOBBER
1482 || GET_CODE (SET_DEST (elt)) != REG
1483 || GET_MODE (SET_DEST (elt)) != DImode
1484 || REGNO (SET_DEST (elt)) != FIRST_MMX_REG + i)
1490 ;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
1491 (define_predicate "vzeroall_operation"
1492 (match_code "parallel")
1494 unsigned i, nregs = TARGET_64BIT ? 16 : 8;
1496 if ((unsigned) XVECLEN (op, 0) != 1 + nregs)
1499 for (i = 0; i < nregs; i++)
1501 rtx elt = XVECEXP (op, 0, i+1);
1503 if (GET_CODE (elt) != SET
1504 || GET_CODE (SET_DEST (elt)) != REG
1505 || GET_MODE (SET_DEST (elt)) != V8SImode
1506 || REGNO (SET_DEST (elt)) != SSE_REGNO (i)
1507 || SET_SRC (elt) != CONST0_RTX (V8SImode))
1513 ;; return true if OP is a vzeroupper operation.
1514 (define_predicate "vzeroupper_operation"
1515 (and (match_code "unspec_volatile")
1516 (match_test "XINT (op, 1) == UNSPECV_VZEROUPPER")))
1518 ;; Return true if OP is an addsub vec_merge operation
1519 (define_predicate "addsub_vm_operator"
1520 (match_code "vec_merge")
1531 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1533 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1538 mask = INTVAL (XEXP (op, 2));
1539 nunits = GET_MODE_NUNITS (mode);
1541 for (elt = 0; elt < nunits; elt++)
1543 /* bit clear: take from op0, set: take from op1 */
1544 int bit = !(mask & (HOST_WIDE_INT_1U << elt));
1546 if (bit != ((elt & 1) ^ swapped))
1553 ;; Return true if OP is an addsub vec_select/vec_concat operation
1554 (define_predicate "addsub_vs_operator"
1555 (and (match_code "vec_select")
1556 (match_code "vec_concat" "0"))
1562 op0 = XEXP (XEXP (op, 0), 0);
1563 op1 = XEXP (XEXP (op, 0), 1);
1566 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1568 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1573 nunits = GET_MODE_NUNITS (mode);
1574 if (XVECLEN (XEXP (op, 1), 0) != nunits)
1577 /* We already checked that permutation is suitable for addsub,
1578 so only look at the first element of the parallel. */
1579 elt = INTVAL (XVECEXP (XEXP (op, 1), 0, 0));
1581 return elt == (swapped ? nunits : 0);
1584 ;; Return true if OP is a parallel for an addsub vec_select.
1585 (define_predicate "addsub_vs_parallel"
1586 (and (match_code "parallel")
1587 (match_code "const_int" "a"))
1589 int nelt = XVECLEN (op, 0);
1595 /* Check that the permutation is suitable for addsub.
1596 For example, { 0 9 2 11 4 13 6 15 } or { 8 1 10 3 12 5 14 7 }. */
1597 elt = INTVAL (XVECEXP (op, 0, 0));
1600 for (i = 1; i < nelt; ++i)
1601 if (INTVAL (XVECEXP (op, 0, i)) != (i + (i & 1) * nelt))
1604 else if (elt == nelt)
1606 for (i = 1; i < nelt; ++i)
1607 if (INTVAL (XVECEXP (op, 0, i)) != (elt + i - (i & 1) * nelt))
1616 ;; Return true if OP is a parallel for a vbroadcast permute.
1617 (define_predicate "avx_vbroadcast_operand"
1618 (and (match_code "parallel")
1619 (match_code "const_int" "a"))
1621 rtx elt = XVECEXP (op, 0, 0);
1622 int i, nelt = XVECLEN (op, 0);
1624 /* Don't bother checking there are the right number of operands,
1625 merely that they're all identical. */
1626 for (i = 1; i < nelt; ++i)
1627 if (XVECEXP (op, 0, i) != elt)
1632 ;; Return true if OP is a parallel for a palignr permute.
1633 (define_predicate "palignr_operand"
1634 (and (match_code "parallel")
1635 (match_code "const_int" "a"))
1637 int elt = INTVAL (XVECEXP (op, 0, 0));
1638 int i, nelt = XVECLEN (op, 0);
1640 /* Check that an order in the permutation is suitable for palignr.
1641 For example, {5 6 7 0 1 2 3 4} is "palignr 5, xmm, xmm". */
1642 for (i = 1; i < nelt; ++i)
1643 if (INTVAL (XVECEXP (op, 0, i)) != ((elt + i) % nelt))
1648 ;; Return true if OP is a proper third operand to vpblendw256.
1649 (define_predicate "avx2_pblendw_operand"
1650 (match_code "const_int")
1652 HOST_WIDE_INT val = INTVAL (op);
1653 HOST_WIDE_INT low = val & 0xff;
1654 return val == ((low << 8) | low);
1657 ;; Return true if OP is vector_operand or CONST_VECTOR.
1658 (define_predicate "general_vector_operand"
1659 (ior (match_operand 0 "vector_operand")
1660 (match_code "const_vector")))
1662 ;; Return true if OP is either -1 constant or stored in register.
1663 (define_predicate "register_or_constm1_operand"
1664 (ior (match_operand 0 "register_operand")
1665 (and (match_code "const_int")
1666 (match_test "op == constm1_rtx"))))