i386.md (UNSPEC_VSIBADDR): New.
[gcc.git] / gcc / config / i386 / predicates.md
1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 ;; Free Software Foundation, Inc.
4 ;;
5 ;; This file is part of GCC.
6 ;;
7 ;; GCC is free software; you can redistribute it and/or modify
8 ;; it under the terms of the GNU General Public License as published by
9 ;; the Free Software Foundation; either version 3, or (at your option)
10 ;; any later version.
11 ;;
12 ;; GCC is distributed in the hope that it will be useful,
13 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
14 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 ;; GNU General Public License for more details.
16 ;;
17 ;; You should have received a copy of the GNU General Public License
18 ;; along with GCC; see the file COPYING3. If not see
19 ;; <http://www.gnu.org/licenses/>.
20
21 ;; Return true if OP is either a i387 or SSE fp register.
22 (define_predicate "any_fp_register_operand"
23 (and (match_code "reg")
24 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
25
26 ;; Return true if OP is an i387 fp register.
27 (define_predicate "fp_register_operand"
28 (and (match_code "reg")
29 (match_test "FP_REGNO_P (REGNO (op))")))
30
31 ;; Return true if OP is a non-fp register_operand.
32 (define_predicate "register_and_not_any_fp_reg_operand"
33 (and (match_code "reg")
34 (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
35
36 ;; Return true if OP is a register operand other than an i387 fp register.
37 (define_predicate "register_and_not_fp_reg_operand"
38 (and (match_code "reg")
39 (not (match_test "FP_REGNO_P (REGNO (op))"))))
40
41 ;; True if the operand is an MMX register.
42 (define_predicate "mmx_reg_operand"
43 (and (match_code "reg")
44 (match_test "MMX_REGNO_P (REGNO (op))")))
45
46 ;; True if the operand is an SSE register.
47 (define_predicate "sse_reg_operand"
48 (and (match_code "reg")
49 (match_test "SSE_REGNO_P (REGNO (op))")))
50
51 ;; True if the operand is a Q_REGS class register.
52 (define_predicate "q_regs_operand"
53 (match_operand 0 "register_operand")
54 {
55 if (GET_CODE (op) == SUBREG)
56 op = SUBREG_REG (op);
57 return ANY_QI_REG_P (op);
58 })
59
60 ;; Match an SI or HImode register for a zero_extract.
61 (define_special_predicate "ext_register_operand"
62 (match_operand 0 "register_operand")
63 {
64 if ((!TARGET_64BIT || GET_MODE (op) != DImode)
65 && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
66 return false;
67 if (GET_CODE (op) == SUBREG)
68 op = SUBREG_REG (op);
69
70 /* Be careful to accept only registers having upper parts. */
71 return (REG_P (op)
72 && (REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) <= BX_REG));
73 })
74
75 ;; Return true if op is the AX register.
76 (define_predicate "ax_reg_operand"
77 (and (match_code "reg")
78 (match_test "REGNO (op) == AX_REG")))
79
80 ;; Return true if op is the flags register.
81 (define_predicate "flags_reg_operand"
82 (and (match_code "reg")
83 (match_test "REGNO (op) == FLAGS_REG")))
84
85 ;; Return true if op is one of QImode registers: %[abcd][hl].
86 (define_predicate "QIreg_operand"
87 (match_test "QI_REG_P (op)"))
88
89 ;; Return true if op is a QImode register operand other than
90 ;; %[abcd][hl].
91 (define_predicate "ext_QIreg_operand"
92 (and (match_code "reg")
93 (match_test "TARGET_64BIT")
94 (match_test "REGNO (op) > BX_REG")))
95
96 ;; Return true if op is not xmm0 register.
97 (define_predicate "reg_not_xmm0_operand"
98 (match_operand 0 "register_operand")
99 {
100 if (GET_CODE (op) == SUBREG)
101 op = SUBREG_REG (op);
102
103 return !REG_P (op) || REGNO (op) != FIRST_SSE_REG;
104 })
105
106 ;; As above, but also allow memory operands.
107 (define_predicate "nonimm_not_xmm0_operand"
108 (ior (match_operand 0 "memory_operand")
109 (match_operand 0 "reg_not_xmm0_operand")))
110
111 ;; Return true if op is not xmm0 register, but only for non-AVX targets.
112 (define_predicate "reg_not_xmm0_operand_maybe_avx"
113 (if_then_else (match_test "TARGET_AVX")
114 (match_operand 0 "register_operand")
115 (match_operand 0 "reg_not_xmm0_operand")))
116
117 ;; As above, but also allow memory operands.
118 (define_predicate "nonimm_not_xmm0_operand_maybe_avx"
119 (if_then_else (match_test "TARGET_AVX")
120 (match_operand 0 "nonimmediate_operand")
121 (match_operand 0 "nonimm_not_xmm0_operand")))
122
123 ;; Return true if VALUE can be stored in a sign extended immediate field.
124 (define_predicate "x86_64_immediate_operand"
125 (match_code "const_int,symbol_ref,label_ref,const")
126 {
127 if (!TARGET_64BIT)
128 return immediate_operand (op, mode);
129
130 switch (GET_CODE (op))
131 {
132 case CONST_INT:
133 /* CONST_DOUBLES never match, since HOST_BITS_PER_WIDE_INT is known
134 to be at least 32 and this all acceptable constants are
135 represented as CONST_INT. */
136 if (HOST_BITS_PER_WIDE_INT == 32)
137 return true;
138 else
139 {
140 HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
141 return trunc_int_for_mode (val, SImode) == val;
142 }
143 break;
144
145 case SYMBOL_REF:
146 /* For certain code models, the symbolic references are known to fit.
147 in CM_SMALL_PIC model we know it fits if it is local to the shared
148 library. Don't count TLS SYMBOL_REFs here, since they should fit
149 only if inside of UNSPEC handled below. */
150 /* TLS symbols are not constant. */
151 if (SYMBOL_REF_TLS_MODEL (op))
152 return false;
153 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
154 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
155
156 case LABEL_REF:
157 /* For certain code models, the code is near as well. */
158 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
159 || ix86_cmodel == CM_KERNEL);
160
161 case CONST:
162 /* We also may accept the offsetted memory references in certain
163 special cases. */
164 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
165 switch (XINT (XEXP (op, 0), 1))
166 {
167 case UNSPEC_GOTPCREL:
168 case UNSPEC_DTPOFF:
169 case UNSPEC_GOTNTPOFF:
170 case UNSPEC_NTPOFF:
171 return true;
172 default:
173 break;
174 }
175
176 if (GET_CODE (XEXP (op, 0)) == PLUS)
177 {
178 rtx op1 = XEXP (XEXP (op, 0), 0);
179 rtx op2 = XEXP (XEXP (op, 0), 1);
180 HOST_WIDE_INT offset;
181
182 if (ix86_cmodel == CM_LARGE)
183 return false;
184 if (!CONST_INT_P (op2))
185 return false;
186 offset = trunc_int_for_mode (INTVAL (op2), DImode);
187 switch (GET_CODE (op1))
188 {
189 case SYMBOL_REF:
190 /* TLS symbols are not constant. */
191 if (SYMBOL_REF_TLS_MODEL (op1))
192 return false;
193 /* For CM_SMALL assume that latest object is 16MB before
194 end of 31bits boundary. We may also accept pretty
195 large negative constants knowing that all objects are
196 in the positive half of address space. */
197 if ((ix86_cmodel == CM_SMALL
198 || (ix86_cmodel == CM_MEDIUM
199 && !SYMBOL_REF_FAR_ADDR_P (op1)))
200 && offset < 16*1024*1024
201 && trunc_int_for_mode (offset, SImode) == offset)
202 return true;
203 /* For CM_KERNEL we know that all object resist in the
204 negative half of 32bits address space. We may not
205 accept negative offsets, since they may be just off
206 and we may accept pretty large positive ones. */
207 if (ix86_cmodel == CM_KERNEL
208 && offset > 0
209 && trunc_int_for_mode (offset, SImode) == offset)
210 return true;
211 break;
212
213 case LABEL_REF:
214 /* These conditions are similar to SYMBOL_REF ones, just the
215 constraints for code models differ. */
216 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
217 && offset < 16*1024*1024
218 && trunc_int_for_mode (offset, SImode) == offset)
219 return true;
220 if (ix86_cmodel == CM_KERNEL
221 && offset > 0
222 && trunc_int_for_mode (offset, SImode) == offset)
223 return true;
224 break;
225
226 case UNSPEC:
227 switch (XINT (op1, 1))
228 {
229 case UNSPEC_DTPOFF:
230 case UNSPEC_NTPOFF:
231 if (offset > 0
232 && trunc_int_for_mode (offset, SImode) == offset)
233 return true;
234 }
235 break;
236
237 default:
238 break;
239 }
240 }
241 break;
242
243 default:
244 gcc_unreachable ();
245 }
246
247 return false;
248 })
249
250 ;; Return true if VALUE can be stored in the zero extended immediate field.
251 (define_predicate "x86_64_zext_immediate_operand"
252 (match_code "const_double,const_int,symbol_ref,label_ref,const")
253 {
254 switch (GET_CODE (op))
255 {
256 case CONST_DOUBLE:
257 if (HOST_BITS_PER_WIDE_INT == 32)
258 return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
259 else
260 return false;
261
262 case CONST_INT:
263 if (HOST_BITS_PER_WIDE_INT == 32)
264 return INTVAL (op) >= 0;
265 else
266 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
267
268 case SYMBOL_REF:
269 /* For certain code models, the symbolic references are known to fit. */
270 /* TLS symbols are not constant. */
271 if (SYMBOL_REF_TLS_MODEL (op))
272 return false;
273 return (ix86_cmodel == CM_SMALL
274 || (ix86_cmodel == CM_MEDIUM
275 && !SYMBOL_REF_FAR_ADDR_P (op)));
276
277 case LABEL_REF:
278 /* For certain code models, the code is near as well. */
279 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
280
281 case CONST:
282 /* We also may accept the offsetted memory references in certain
283 special cases. */
284 if (GET_CODE (XEXP (op, 0)) == PLUS)
285 {
286 rtx op1 = XEXP (XEXP (op, 0), 0);
287 rtx op2 = XEXP (XEXP (op, 0), 1);
288
289 if (ix86_cmodel == CM_LARGE)
290 return false;
291 switch (GET_CODE (op1))
292 {
293 case SYMBOL_REF:
294 /* TLS symbols are not constant. */
295 if (SYMBOL_REF_TLS_MODEL (op1))
296 return false;
297 /* For small code model we may accept pretty large positive
298 offsets, since one bit is available for free. Negative
299 offsets are limited by the size of NULL pointer area
300 specified by the ABI. */
301 if ((ix86_cmodel == CM_SMALL
302 || (ix86_cmodel == CM_MEDIUM
303 && !SYMBOL_REF_FAR_ADDR_P (op1)))
304 && CONST_INT_P (op2)
305 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
306 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
307 return true;
308 /* ??? For the kernel, we may accept adjustment of
309 -0x10000000, since we know that it will just convert
310 negative address space to positive, but perhaps this
311 is not worthwhile. */
312 break;
313
314 case LABEL_REF:
315 /* These conditions are similar to SYMBOL_REF ones, just the
316 constraints for code models differ. */
317 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
318 && CONST_INT_P (op2)
319 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
320 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
321 return true;
322 break;
323
324 default:
325 return false;
326 }
327 }
328 break;
329
330 default:
331 gcc_unreachable ();
332 }
333 return false;
334 })
335
336 ;; Return true if OP is general operand representable on x86_64.
337 (define_predicate "x86_64_general_operand"
338 (if_then_else (match_test "TARGET_64BIT")
339 (ior (match_operand 0 "nonimmediate_operand")
340 (match_operand 0 "x86_64_immediate_operand"))
341 (match_operand 0 "general_operand")))
342
343 ;; Return true if OP is general operand representable on x86_64
344 ;; as either sign extended or zero extended constant.
345 (define_predicate "x86_64_szext_general_operand"
346 (if_then_else (match_test "TARGET_64BIT")
347 (ior (match_operand 0 "nonimmediate_operand")
348 (match_operand 0 "x86_64_immediate_operand")
349 (match_operand 0 "x86_64_zext_immediate_operand"))
350 (match_operand 0 "general_operand")))
351
352 ;; Return true if OP is nonmemory operand representable on x86_64.
353 (define_predicate "x86_64_nonmemory_operand"
354 (if_then_else (match_test "TARGET_64BIT")
355 (ior (match_operand 0 "register_operand")
356 (match_operand 0 "x86_64_immediate_operand"))
357 (match_operand 0 "nonmemory_operand")))
358
359 ;; Return true if OP is nonmemory operand representable on x86_64.
360 (define_predicate "x86_64_szext_nonmemory_operand"
361 (if_then_else (match_test "TARGET_64BIT")
362 (ior (match_operand 0 "register_operand")
363 (match_operand 0 "x86_64_immediate_operand")
364 (match_operand 0 "x86_64_zext_immediate_operand"))
365 (match_operand 0 "nonmemory_operand")))
366
367 ;; Return true when operand is PIC expression that can be computed by lea
368 ;; operation.
369 (define_predicate "pic_32bit_operand"
370 (match_code "const,symbol_ref,label_ref")
371 {
372 if (!flag_pic)
373 return false;
374
375 /* Rule out relocations that translate into 64bit constants. */
376 if (TARGET_64BIT && GET_CODE (op) == CONST)
377 {
378 op = XEXP (op, 0);
379 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
380 op = XEXP (op, 0);
381 if (GET_CODE (op) == UNSPEC
382 && (XINT (op, 1) == UNSPEC_GOTOFF
383 || XINT (op, 1) == UNSPEC_GOT))
384 return false;
385 }
386
387 return symbolic_operand (op, mode);
388 })
389
390 ;; Return true if OP is nonmemory operand acceptable by movabs patterns.
391 (define_predicate "x86_64_movabs_operand"
392 (and (match_operand 0 "nonmemory_operand")
393 (not (match_operand 0 "pic_32bit_operand"))))
394
395 ;; Return true if OP is either a symbol reference or a sum of a symbol
396 ;; reference and a constant.
397 (define_predicate "symbolic_operand"
398 (match_code "symbol_ref,label_ref,const")
399 {
400 switch (GET_CODE (op))
401 {
402 case SYMBOL_REF:
403 case LABEL_REF:
404 return true;
405
406 case CONST:
407 op = XEXP (op, 0);
408 if (GET_CODE (op) == SYMBOL_REF
409 || GET_CODE (op) == LABEL_REF
410 || (GET_CODE (op) == UNSPEC
411 && (XINT (op, 1) == UNSPEC_GOT
412 || XINT (op, 1) == UNSPEC_GOTOFF
413 || XINT (op, 1) == UNSPEC_GOTPCREL)))
414 return true;
415 if (GET_CODE (op) != PLUS
416 || !CONST_INT_P (XEXP (op, 1)))
417 return false;
418
419 op = XEXP (op, 0);
420 if (GET_CODE (op) == SYMBOL_REF
421 || GET_CODE (op) == LABEL_REF)
422 return true;
423 /* Only @GOTOFF gets offsets. */
424 if (GET_CODE (op) != UNSPEC
425 || XINT (op, 1) != UNSPEC_GOTOFF)
426 return false;
427
428 op = XVECEXP (op, 0, 0);
429 if (GET_CODE (op) == SYMBOL_REF
430 || GET_CODE (op) == LABEL_REF)
431 return true;
432 return false;
433
434 default:
435 gcc_unreachable ();
436 }
437 })
438
439 ;; Return true if OP is a symbolic operand that resolves locally.
440 (define_predicate "local_symbolic_operand"
441 (match_code "const,label_ref,symbol_ref")
442 {
443 if (GET_CODE (op) == CONST
444 && GET_CODE (XEXP (op, 0)) == PLUS
445 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
446 op = XEXP (XEXP (op, 0), 0);
447
448 if (GET_CODE (op) == LABEL_REF)
449 return true;
450
451 if (GET_CODE (op) != SYMBOL_REF)
452 return false;
453
454 if (SYMBOL_REF_TLS_MODEL (op))
455 return false;
456
457 if (SYMBOL_REF_LOCAL_P (op))
458 return true;
459
460 /* There is, however, a not insubstantial body of code in the rest of
461 the compiler that assumes it can just stick the results of
462 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
463 /* ??? This is a hack. Should update the body of the compiler to
464 always create a DECL an invoke targetm.encode_section_info. */
465 if (strncmp (XSTR (op, 0), internal_label_prefix,
466 internal_label_prefix_len) == 0)
467 return true;
468
469 return false;
470 })
471
472 ;; Test for a legitimate @GOTOFF operand.
473 ;;
474 ;; VxWorks does not impose a fixed gap between segments; the run-time
475 ;; gap can be different from the object-file gap. We therefore can't
476 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
477 ;; same segment as the GOT. Unfortunately, the flexibility of linker
478 ;; scripts means that we can't be sure of that in general, so assume
479 ;; that @GOTOFF is never valid on VxWorks.
480 (define_predicate "gotoff_operand"
481 (and (not (match_test "TARGET_VXWORKS_RTP"))
482 (match_operand 0 "local_symbolic_operand")))
483
484 ;; Test for various thread-local symbols.
485 (define_predicate "tls_symbolic_operand"
486 (and (match_code "symbol_ref")
487 (match_test "SYMBOL_REF_TLS_MODEL (op)")))
488
489 (define_predicate "tls_modbase_operand"
490 (and (match_code "symbol_ref")
491 (match_test "op == ix86_tls_module_base ()")))
492
493 ;; Test for a pc-relative call operand
494 (define_predicate "constant_call_address_operand"
495 (match_code "symbol_ref")
496 {
497 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC)
498 return false;
499 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
500 return false;
501 return true;
502 })
503
504 ;; P6 processors will jump to the address after the decrement when %esp
505 ;; is used as a call operand, so they will execute return address as a code.
506 ;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17.
507
508 (define_predicate "call_register_no_elim_operand"
509 (match_operand 0 "register_operand")
510 {
511 if (GET_CODE (op) == SUBREG)
512 op = SUBREG_REG (op);
513
514 if (!TARGET_64BIT && op == stack_pointer_rtx)
515 return false;
516
517 return register_no_elim_operand (op, mode);
518 })
519
520 ;; True for any non-virtual or eliminable register. Used in places where
521 ;; instantiation of such a register may cause the pattern to not be recognized.
522 (define_predicate "register_no_elim_operand"
523 (match_operand 0 "register_operand")
524 {
525 if (GET_CODE (op) == SUBREG)
526 op = SUBREG_REG (op);
527 return !(op == arg_pointer_rtx
528 || op == frame_pointer_rtx
529 || IN_RANGE (REGNO (op),
530 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
531 })
532
533 ;; Similarly, but include the stack pointer. This is used to prevent esp
534 ;; from being used as an index reg.
535 (define_predicate "index_register_operand"
536 (match_operand 0 "register_operand")
537 {
538 if (GET_CODE (op) == SUBREG)
539 op = SUBREG_REG (op);
540 if (reload_in_progress || reload_completed)
541 return REG_OK_FOR_INDEX_STRICT_P (op);
542 else
543 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
544 })
545
546 ;; Return false if this is any eliminable register. Otherwise general_operand.
547 (define_predicate "general_no_elim_operand"
548 (if_then_else (match_code "reg,subreg")
549 (match_operand 0 "register_no_elim_operand")
550 (match_operand 0 "general_operand")))
551
552 ;; Return false if this is any eliminable register. Otherwise
553 ;; register_operand or a constant.
554 (define_predicate "nonmemory_no_elim_operand"
555 (ior (match_operand 0 "register_no_elim_operand")
556 (match_operand 0 "immediate_operand")))
557
558 ;; Test for a valid operand for indirect branch.
559 (define_predicate "indirect_branch_operand"
560 (if_then_else (match_test "TARGET_X32")
561 (match_operand 0 "register_operand")
562 (match_operand 0 "nonimmediate_operand")))
563
564 ;; Test for a valid operand for a call instruction.
565 (define_predicate "call_insn_operand"
566 (ior (match_operand 0 "constant_call_address_operand")
567 (match_operand 0 "call_register_no_elim_operand")
568 (and (not (match_test "TARGET_X32"))
569 (match_operand 0 "memory_operand"))))
570
571 ;; Similarly, but for tail calls, in which we cannot allow memory references.
572 (define_predicate "sibcall_insn_operand"
573 (ior (match_operand 0 "constant_call_address_operand")
574 (match_operand 0 "register_no_elim_operand")))
575
576 ;; Match exactly zero.
577 (define_predicate "const0_operand"
578 (match_code "const_int,const_double,const_vector")
579 {
580 if (mode == VOIDmode)
581 mode = GET_MODE (op);
582 return op == CONST0_RTX (mode);
583 })
584
585 ;; Match exactly one.
586 (define_predicate "const1_operand"
587 (and (match_code "const_int")
588 (match_test "op == const1_rtx")))
589
590 ;; Match exactly eight.
591 (define_predicate "const8_operand"
592 (and (match_code "const_int")
593 (match_test "INTVAL (op) == 8")))
594
595 ;; Match exactly 128.
596 (define_predicate "const128_operand"
597 (and (match_code "const_int")
598 (match_test "INTVAL (op) == 128")))
599
600 ;; Match exactly 0x0FFFFFFFF in anddi as a zero-extension operation
601 (define_predicate "const_32bit_mask"
602 (and (match_code "const_int")
603 (match_test "trunc_int_for_mode (INTVAL (op), DImode)
604 == (HOST_WIDE_INT) 0xffffffff")))
605
606 ;; Match 2, 4, or 8. Used for leal multiplicands.
607 (define_predicate "const248_operand"
608 (match_code "const_int")
609 {
610 HOST_WIDE_INT i = INTVAL (op);
611 return i == 2 || i == 4 || i == 8;
612 })
613
614 ;; Match 1, 2, 4, or 8
615 (define_predicate "const1248_operand"
616 (match_code "const_int")
617 {
618 HOST_WIDE_INT i = INTVAL (op);
619 return i == 1 || i == 2 || i == 4 || i == 8;
620 })
621
622 ;; Match 3, 5, or 9. Used for leal multiplicands.
623 (define_predicate "const359_operand"
624 (match_code "const_int")
625 {
626 HOST_WIDE_INT i = INTVAL (op);
627 return i == 3 || i == 5 || i == 9;
628 })
629
630 ;; Match 0 or 1.
631 (define_predicate "const_0_to_1_operand"
632 (and (match_code "const_int")
633 (ior (match_test "op == const0_rtx")
634 (match_test "op == const1_rtx"))))
635
636 ;; Match 0 to 3.
637 (define_predicate "const_0_to_3_operand"
638 (and (match_code "const_int")
639 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
640
641 ;; Match 0 to 7.
642 (define_predicate "const_0_to_7_operand"
643 (and (match_code "const_int")
644 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
645
646 ;; Match 0 to 15.
647 (define_predicate "const_0_to_15_operand"
648 (and (match_code "const_int")
649 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
650
651 ;; Match 0 to 31.
652 (define_predicate "const_0_to_31_operand"
653 (and (match_code "const_int")
654 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
655
656 ;; Match 0 to 63.
657 (define_predicate "const_0_to_63_operand"
658 (and (match_code "const_int")
659 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
660
661 ;; Match 0 to 255.
662 (define_predicate "const_0_to_255_operand"
663 (and (match_code "const_int")
664 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
665
666 ;; Match (0 to 255) * 8
667 (define_predicate "const_0_to_255_mul_8_operand"
668 (match_code "const_int")
669 {
670 unsigned HOST_WIDE_INT val = INTVAL (op);
671 return val <= 255*8 && val % 8 == 0;
672 })
673
674 ;; Return true if OP is CONST_INT >= 1 and <= 31 (a valid operand
675 ;; for shift & compare patterns, as shifting by 0 does not change flags).
676 (define_predicate "const_1_to_31_operand"
677 (and (match_code "const_int")
678 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
679
680 ;; Return true if OP is CONST_INT >= 1 and <= 63 (a valid operand
681 ;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
682 (define_predicate "const_1_to_63_operand"
683 (and (match_code "const_int")
684 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
685
686 ;; Match 2 or 3.
687 (define_predicate "const_2_to_3_operand"
688 (and (match_code "const_int")
689 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
690
691 ;; Match 4 to 5.
692 (define_predicate "const_4_to_5_operand"
693 (and (match_code "const_int")
694 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
695
696 ;; Match 4 to 7.
697 (define_predicate "const_4_to_7_operand"
698 (and (match_code "const_int")
699 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
700
701 ;; Match 6 to 7.
702 (define_predicate "const_6_to_7_operand"
703 (and (match_code "const_int")
704 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
705
706 ;; Match 8 to 11.
707 (define_predicate "const_8_to_11_operand"
708 (and (match_code "const_int")
709 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
710
711 ;; Match 12 to 15.
712 (define_predicate "const_12_to_15_operand"
713 (and (match_code "const_int")
714 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
715
716 ;; True if this is a constant appropriate for an increment or decrement.
717 (define_predicate "incdec_operand"
718 (match_code "const_int")
719 {
720 /* On Pentium4, the inc and dec operations causes extra dependency on flag
721 registers, since carry flag is not set. */
722 if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
723 return false;
724 return op == const1_rtx || op == constm1_rtx;
725 })
726
727 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
728 (define_predicate "reg_or_pm1_operand"
729 (ior (match_operand 0 "register_operand")
730 (and (match_code "const_int")
731 (ior (match_test "op == const1_rtx")
732 (match_test "op == constm1_rtx")))))
733
734 ;; True if OP is acceptable as operand of DImode shift expander.
735 (define_predicate "shiftdi_operand"
736 (if_then_else (match_test "TARGET_64BIT")
737 (match_operand 0 "nonimmediate_operand")
738 (match_operand 0 "register_operand")))
739
740 (define_predicate "ashldi_input_operand"
741 (if_then_else (match_test "TARGET_64BIT")
742 (match_operand 0 "nonimmediate_operand")
743 (match_operand 0 "reg_or_pm1_operand")))
744
745 ;; Return true if OP is a vector load from the constant pool with just
746 ;; the first element nonzero.
747 (define_predicate "zero_extended_scalar_load_operand"
748 (match_code "mem")
749 {
750 unsigned n_elts;
751 op = maybe_get_pool_constant (op);
752
753 if (!(op && GET_CODE (op) == CONST_VECTOR))
754 return false;
755
756 n_elts = CONST_VECTOR_NUNITS (op);
757
758 for (n_elts--; n_elts > 0; n_elts--)
759 {
760 rtx elt = CONST_VECTOR_ELT (op, n_elts);
761 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
762 return false;
763 }
764 return true;
765 })
766
767 /* Return true if operand is a vector constant that is all ones. */
768 (define_predicate "vector_all_ones_operand"
769 (match_code "const_vector")
770 {
771 int nunits = GET_MODE_NUNITS (mode);
772
773 if (GET_CODE (op) == CONST_VECTOR
774 && CONST_VECTOR_NUNITS (op) == nunits)
775 {
776 int i;
777 for (i = 0; i < nunits; ++i)
778 {
779 rtx x = CONST_VECTOR_ELT (op, i);
780 if (x != constm1_rtx)
781 return false;
782 }
783 return true;
784 }
785
786 return false;
787 })
788
789 ; Return true when OP is operand acceptable for standard SSE move.
790 (define_predicate "vector_move_operand"
791 (ior (match_operand 0 "nonimmediate_operand")
792 (match_operand 0 "const0_operand")))
793
794 ;; Return true when OP is nonimmediate or standard SSE constant.
795 (define_predicate "nonimmediate_or_sse_const_operand"
796 (match_operand 0 "general_operand")
797 {
798 if (nonimmediate_operand (op, mode))
799 return true;
800 if (standard_sse_constant_p (op) > 0)
801 return true;
802 return false;
803 })
804
805 ;; Return true if OP is a register or a zero.
806 (define_predicate "reg_or_0_operand"
807 (ior (match_operand 0 "register_operand")
808 (match_operand 0 "const0_operand")))
809
810 ;; Return true if op if a valid address for LEA, and does not contain
811 ;; a segment override.
812 (define_predicate "lea_address_operand"
813 (match_operand 0 "address_operand")
814 {
815 struct ix86_address parts;
816 int ok;
817
818 /* LEA handles zero-extend by itself. */
819 if (GET_CODE (op) == ZERO_EXTEND
820 || GET_CODE (op) == AND)
821 return false;
822
823 ok = ix86_decompose_address (op, &parts);
824 gcc_assert (ok);
825 return parts.seg == SEG_DEFAULT;
826 })
827
828 ;; Return true if op if a valid base register, displacement or
829 ;; sum of base register and displacement for VSIB addressing.
830 (define_predicate "vsib_address_operand"
831 (match_operand 0 "address_operand")
832 {
833 struct ix86_address parts;
834 int ok;
835 rtx disp;
836
837 ok = ix86_decompose_address (op, &parts);
838 gcc_assert (ok);
839 if (parts.index || parts.seg != SEG_DEFAULT)
840 return false;
841
842 /* VSIB addressing doesn't support (%rip). */
843 if (parts.disp && GET_CODE (parts.disp) == CONST)
844 {
845 disp = XEXP (parts.disp, 0);
846 if (GET_CODE (disp) == PLUS)
847 disp = XEXP (disp, 0);
848 if (GET_CODE (disp) == UNSPEC)
849 switch (XINT (disp, 1))
850 {
851 case UNSPEC_GOTPCREL:
852 case UNSPEC_PCREL:
853 case UNSPEC_GOTNTPOFF:
854 return false;
855 }
856 }
857
858 return true;
859 })
860
861 (define_predicate "vsib_mem_operator"
862 (match_code "mem"))
863
864 ;; Return true if the rtx is known to be at least 32 bits aligned.
865 (define_predicate "aligned_operand"
866 (match_operand 0 "general_operand")
867 {
868 struct ix86_address parts;
869 int ok;
870
871 /* Registers and immediate operands are always "aligned". */
872 if (!MEM_P (op))
873 return true;
874
875 /* All patterns using aligned_operand on memory operands ends up
876 in promoting memory operand to 64bit and thus causing memory mismatch. */
877 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
878 return false;
879
880 /* Don't even try to do any aligned optimizations with volatiles. */
881 if (MEM_VOLATILE_P (op))
882 return false;
883
884 if (MEM_ALIGN (op) >= 32)
885 return true;
886
887 op = XEXP (op, 0);
888
889 /* Pushes and pops are only valid on the stack pointer. */
890 if (GET_CODE (op) == PRE_DEC
891 || GET_CODE (op) == POST_INC)
892 return true;
893
894 /* Decode the address. */
895 ok = ix86_decompose_address (op, &parts);
896 gcc_assert (ok);
897
898 if (parts.base && GET_CODE (parts.base) == SUBREG)
899 parts.base = SUBREG_REG (parts.base);
900 if (parts.index && GET_CODE (parts.index) == SUBREG)
901 parts.index = SUBREG_REG (parts.index);
902
903 /* Look for some component that isn't known to be aligned. */
904 if (parts.index)
905 {
906 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
907 return false;
908 }
909 if (parts.base)
910 {
911 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
912 return false;
913 }
914 if (parts.disp)
915 {
916 if (!CONST_INT_P (parts.disp)
917 || (INTVAL (parts.disp) & 3))
918 return false;
919 }
920
921 /* Didn't find one -- this must be an aligned address. */
922 return true;
923 })
924
925 ;; Return true if OP is memory operand with a displacement.
926 (define_predicate "memory_displacement_operand"
927 (match_operand 0 "memory_operand")
928 {
929 struct ix86_address parts;
930 int ok;
931
932 ok = ix86_decompose_address (XEXP (op, 0), &parts);
933 gcc_assert (ok);
934 return parts.disp != NULL_RTX;
935 })
936
937 ;; Return true if OP is memory operand with a displacement only.
938 (define_predicate "memory_displacement_only_operand"
939 (match_operand 0 "memory_operand")
940 {
941 struct ix86_address parts;
942 int ok;
943
944 if (TARGET_64BIT)
945 return false;
946
947 ok = ix86_decompose_address (XEXP (op, 0), &parts);
948 gcc_assert (ok);
949
950 if (parts.base || parts.index)
951 return false;
952
953 return parts.disp != NULL_RTX;
954 })
955
956 ;; Return true if OP is memory operand which will need zero or
957 ;; one register at most, not counting stack pointer or frame pointer.
958 (define_predicate "cmpxchg8b_pic_memory_operand"
959 (match_operand 0 "memory_operand")
960 {
961 struct ix86_address parts;
962 int ok;
963
964 ok = ix86_decompose_address (XEXP (op, 0), &parts);
965 gcc_assert (ok);
966
967 if (parts.base && GET_CODE (parts.base) == SUBREG)
968 parts.base = SUBREG_REG (parts.base);
969 if (parts.index && GET_CODE (parts.index) == SUBREG)
970 parts.index = SUBREG_REG (parts.index);
971
972 if (parts.base == NULL_RTX
973 || parts.base == arg_pointer_rtx
974 || parts.base == frame_pointer_rtx
975 || parts.base == hard_frame_pointer_rtx
976 || parts.base == stack_pointer_rtx)
977 return true;
978
979 if (parts.index == NULL_RTX
980 || parts.index == arg_pointer_rtx
981 || parts.index == frame_pointer_rtx
982 || parts.index == hard_frame_pointer_rtx
983 || parts.index == stack_pointer_rtx)
984 return true;
985
986 return false;
987 })
988
989
990 ;; Return true if OP is memory operand that cannot be represented
991 ;; by the modRM array.
992 (define_predicate "long_memory_operand"
993 (and (match_operand 0 "memory_operand")
994 (match_test "memory_address_length (op)")))
995
996 ;; Return true if OP is a comparison operator that can be issued by fcmov.
997 (define_predicate "fcmov_comparison_operator"
998 (match_operand 0 "comparison_operator")
999 {
1000 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1001 enum rtx_code code = GET_CODE (op);
1002
1003 if (inmode == CCFPmode || inmode == CCFPUmode)
1004 {
1005 if (!ix86_trivial_fp_comparison_operator (op, mode))
1006 return false;
1007 code = ix86_fp_compare_code_to_integer (code);
1008 }
1009 /* i387 supports just limited amount of conditional codes. */
1010 switch (code)
1011 {
1012 case LTU: case GTU: case LEU: case GEU:
1013 if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode
1014 || inmode == CCCmode)
1015 return true;
1016 return false;
1017 case ORDERED: case UNORDERED:
1018 case EQ: case NE:
1019 return true;
1020 default:
1021 return false;
1022 }
1023 })
1024
1025 ;; Return true if OP is a comparison that can be used in the CMPSS/CMPPS insns.
1026 ;; The first set are supported directly; the second set can't be done with
1027 ;; full IEEE support, i.e. NaNs.
1028
1029 (define_predicate "sse_comparison_operator"
1030 (ior (match_code "eq,ne,lt,le,unordered,unge,ungt,ordered")
1031 (and (match_test "TARGET_AVX")
1032 (match_code "ge,gt,uneq,unle,unlt,ltgt"))))
1033
1034 (define_predicate "ix86_comparison_int_operator"
1035 (match_code "ne,eq,ge,gt,le,lt"))
1036
1037 (define_predicate "ix86_comparison_uns_operator"
1038 (match_code "ne,eq,geu,gtu,leu,ltu"))
1039
1040 (define_predicate "bt_comparison_operator"
1041 (match_code "ne,eq"))
1042
1043 ;; Return true if OP is a valid comparison operator in valid mode.
1044 (define_predicate "ix86_comparison_operator"
1045 (match_operand 0 "comparison_operator")
1046 {
1047 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1048 enum rtx_code code = GET_CODE (op);
1049
1050 if (inmode == CCFPmode || inmode == CCFPUmode)
1051 return ix86_trivial_fp_comparison_operator (op, mode);
1052
1053 switch (code)
1054 {
1055 case EQ: case NE:
1056 return true;
1057 case LT: case GE:
1058 if (inmode == CCmode || inmode == CCGCmode
1059 || inmode == CCGOCmode || inmode == CCNOmode)
1060 return true;
1061 return false;
1062 case LTU: case GTU: case LEU: case GEU:
1063 if (inmode == CCmode || inmode == CCCmode)
1064 return true;
1065 return false;
1066 case ORDERED: case UNORDERED:
1067 if (inmode == CCmode)
1068 return true;
1069 return false;
1070 case GT: case LE:
1071 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1072 return true;
1073 return false;
1074 default:
1075 return false;
1076 }
1077 })
1078
1079 ;; Return true if OP is a valid comparison operator
1080 ;; testing carry flag to be set.
1081 (define_predicate "ix86_carry_flag_operator"
1082 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1083 {
1084 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1085 enum rtx_code code = GET_CODE (op);
1086
1087 if (inmode == CCFPmode || inmode == CCFPUmode)
1088 {
1089 if (!ix86_trivial_fp_comparison_operator (op, mode))
1090 return false;
1091 code = ix86_fp_compare_code_to_integer (code);
1092 }
1093 else if (inmode == CCCmode)
1094 return code == LTU || code == GTU;
1095 else if (inmode != CCmode)
1096 return false;
1097
1098 return code == LTU;
1099 })
1100
1101 ;; Return true if this comparison only requires testing one flag bit.
1102 (define_predicate "ix86_trivial_fp_comparison_operator"
1103 (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
1104
1105 ;; Return true if we know how to do this comparison. Others require
1106 ;; testing more than one flag bit, and we let the generic middle-end
1107 ;; code do that.
1108 (define_predicate "ix86_fp_comparison_operator"
1109 (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1110 == IX86_FPCMP_ARITH")
1111 (match_operand 0 "comparison_operator")
1112 (match_operand 0 "ix86_trivial_fp_comparison_operator")))
1113
1114 ;; Same as above, but for swapped comparison used in fp_jcc_4_387.
1115 (define_predicate "ix86_swapped_fp_comparison_operator"
1116 (match_operand 0 "comparison_operator")
1117 {
1118 enum rtx_code code = GET_CODE (op);
1119 bool ret;
1120
1121 PUT_CODE (op, swap_condition (code));
1122 ret = ix86_fp_comparison_operator (op, mode);
1123 PUT_CODE (op, code);
1124 return ret;
1125 })
1126
1127 ;; Nearly general operand, but accept any const_double, since we wish
1128 ;; to be able to drop them into memory rather than have them get pulled
1129 ;; into registers.
1130 (define_predicate "cmp_fp_expander_operand"
1131 (ior (match_code "const_double")
1132 (match_operand 0 "general_operand")))
1133
1134 ;; Return true if this is a valid binary floating-point operation.
1135 (define_predicate "binary_fp_operator"
1136 (match_code "plus,minus,mult,div"))
1137
1138 ;; Return true if this is a multiply operation.
1139 (define_predicate "mult_operator"
1140 (match_code "mult"))
1141
1142 ;; Return true if this is a division operation.
1143 (define_predicate "div_operator"
1144 (match_code "div"))
1145
1146 ;; Return true if this is a plus, minus, and, ior or xor operation.
1147 (define_predicate "plusminuslogic_operator"
1148 (match_code "plus,minus,and,ior,xor"))
1149
1150 ;; Return true if this is a float extend operation.
1151 (define_predicate "float_operator"
1152 (match_code "float"))
1153
1154 ;; Return true for ARITHMETIC_P.
1155 (define_predicate "arith_or_logical_operator"
1156 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1157 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1158
1159 ;; Return true for COMMUTATIVE_P.
1160 (define_predicate "commutative_operator"
1161 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1162
1163 ;; Return true if OP is a binary operator that can be promoted to wider mode.
1164 (define_predicate "promotable_binary_operator"
1165 (ior (match_code "plus,and,ior,xor,ashift")
1166 (and (match_code "mult")
1167 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1168
1169 (define_predicate "compare_operator"
1170 (match_code "compare"))
1171
1172 (define_predicate "absneg_operator"
1173 (match_code "abs,neg"))
1174
1175 ;; Return true if OP is misaligned memory operand
1176 (define_predicate "misaligned_operand"
1177 (and (match_code "mem")
1178 (match_test "MEM_ALIGN (op) < GET_MODE_ALIGNMENT (mode)")))
1179
1180 ;; Return true if OP is a emms operation, known to be a PARALLEL.
1181 (define_predicate "emms_operation"
1182 (match_code "parallel")
1183 {
1184 unsigned i;
1185
1186 if (XVECLEN (op, 0) != 17)
1187 return false;
1188
1189 for (i = 0; i < 8; i++)
1190 {
1191 rtx elt = XVECEXP (op, 0, i+1);
1192
1193 if (GET_CODE (elt) != CLOBBER
1194 || GET_CODE (SET_DEST (elt)) != REG
1195 || GET_MODE (SET_DEST (elt)) != XFmode
1196 || REGNO (SET_DEST (elt)) != FIRST_STACK_REG + i)
1197 return false;
1198
1199 elt = XVECEXP (op, 0, i+9);
1200
1201 if (GET_CODE (elt) != CLOBBER
1202 || GET_CODE (SET_DEST (elt)) != REG
1203 || GET_MODE (SET_DEST (elt)) != DImode
1204 || REGNO (SET_DEST (elt)) != FIRST_MMX_REG + i)
1205 return false;
1206 }
1207 return true;
1208 })
1209
1210 ;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
1211 (define_predicate "vzeroall_operation"
1212 (match_code "parallel")
1213 {
1214 unsigned i, nregs = TARGET_64BIT ? 16 : 8;
1215
1216 if ((unsigned) XVECLEN (op, 0) != 1 + nregs)
1217 return false;
1218
1219 for (i = 0; i < nregs; i++)
1220 {
1221 rtx elt = XVECEXP (op, 0, i+1);
1222
1223 if (GET_CODE (elt) != SET
1224 || GET_CODE (SET_DEST (elt)) != REG
1225 || GET_MODE (SET_DEST (elt)) != V8SImode
1226 || REGNO (SET_DEST (elt)) != SSE_REGNO (i)
1227 || SET_SRC (elt) != CONST0_RTX (V8SImode))
1228 return false;
1229 }
1230 return true;
1231 })
1232
1233 ;; Return true if OP is a parallel for a vbroadcast permute.
1234
1235 (define_predicate "avx_vbroadcast_operand"
1236 (and (match_code "parallel")
1237 (match_code "const_int" "a"))
1238 {
1239 rtx elt = XVECEXP (op, 0, 0);
1240 int i, nelt = XVECLEN (op, 0);
1241
1242 /* Don't bother checking there are the right number of operands,
1243 merely that they're all identical. */
1244 for (i = 1; i < nelt; ++i)
1245 if (XVECEXP (op, 0, i) != elt)
1246 return false;
1247 return true;
1248 })
1249
1250 ;; Return true if OP is a proper third operand to vpblendw256.
1251 (define_predicate "avx2_pblendw_operand"
1252 (match_code "const_int")
1253 {
1254 HOST_WIDE_INT val = INTVAL (op);
1255 HOST_WIDE_INT low = val & 0xff;
1256 return val == ((low << 8) | low);
1257 })