predicates.md (general_gr_operand): New predicate.
[gcc.git] / gcc / config / i386 / predicates.md
1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004-2016 Free Software Foundation, Inc.
3 ;;
4 ;; This file is part of GCC.
5 ;;
6 ;; GCC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
9 ;; any later version.
10 ;;
11 ;; GCC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
15 ;;
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>.
19
20 ;; Return true if OP is either a i387 or SSE fp register.
21 (define_predicate "any_fp_register_operand"
22 (and (match_code "reg")
23 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
24
25 ;; Return true if OP is an i387 fp register.
26 (define_predicate "fp_register_operand"
27 (and (match_code "reg")
28 (match_test "STACK_REGNO_P (REGNO (op))")))
29
30 ;; True if the operand is a GENERAL class register.
31 (define_predicate "general_reg_operand"
32 (and (match_code "reg")
33 (match_test "GENERAL_REGNO_P (REGNO (op))")))
34
35 ;; True if the operand is a nonimmediate operand with GENERAL class register.
36 (define_predicate "nonimmediate_gr_operand"
37 (if_then_else (match_code "reg")
38 (match_test "GENERAL_REGNO_P (REGNO (op))")
39 (match_operand 0 "nonimmediate_operand")))
40
41 ;; True if the operand is a general operand with GENERAL class register.
42 (define_predicate "general_gr_operand"
43 (if_then_else (match_code "reg")
44 (match_test "GENERAL_REGNO_P (REGNO (op))")
45 (match_operand 0 "general_operand")))
46
47 ;; True if the operand is an MMX register.
48 (define_predicate "mmx_reg_operand"
49 (and (match_code "reg")
50 (match_test "MMX_REGNO_P (REGNO (op))")))
51
52 ;; True if the operand is an SSE register.
53 (define_predicate "sse_reg_operand"
54 (and (match_code "reg")
55 (match_test "SSE_REGNO_P (REGNO (op))")))
56
57 ;; True if the operand is an AVX-512 new register.
58 (define_predicate "ext_sse_reg_operand"
59 (and (match_code "reg")
60 (match_test "EXT_REX_SSE_REGNO_P (REGNO (op))")))
61
62 ;; Return true if op is a QImode register.
63 (define_predicate "any_QIreg_operand"
64 (and (match_code "reg")
65 (match_test "ANY_QI_REGNO_P (REGNO (op))")))
66
67 ;; Return true if op is one of QImode registers: %[abcd][hl].
68 (define_predicate "QIreg_operand"
69 (and (match_code "reg")
70 (match_test "QI_REGNO_P (REGNO (op))")))
71
72 ;; Return true if op is a QImode register operand other than %[abcd][hl].
73 (define_predicate "ext_QIreg_operand"
74 (and (match_test "TARGET_64BIT")
75 (match_code "reg")
76 (not (match_test "QI_REGNO_P (REGNO (op))"))))
77
78 ;; Return true if op is the AX register.
79 (define_predicate "ax_reg_operand"
80 (and (match_code "reg")
81 (match_test "REGNO (op) == AX_REG")))
82
83 ;; Return true if op is the flags register.
84 (define_predicate "flags_reg_operand"
85 (and (match_code "reg")
86 (match_test "REGNO (op) == FLAGS_REG")))
87
88 ;; Match an SI or HImode register for a zero_extract.
89 (define_special_predicate "ext_register_operand"
90 (match_operand 0 "register_operand")
91 {
92 if ((!TARGET_64BIT || GET_MODE (op) != DImode)
93 && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
94 return false;
95 if (SUBREG_P (op))
96 op = SUBREG_REG (op);
97
98 /* Be careful to accept only registers having upper parts. */
99 return (REG_P (op)
100 && (REGNO (op) > LAST_VIRTUAL_REGISTER || QI_REGNO_P (REGNO (op))));
101 })
102
103 ;; Match nonimmediate operands, but exclude memory operands on 64bit targets.
104 (define_predicate "nonimmediate_x64nomem_operand"
105 (if_then_else (match_test "TARGET_64BIT")
106 (match_operand 0 "register_operand")
107 (match_operand 0 "nonimmediate_operand")))
108
109 ;; Match general operands, but exclude memory operands on 64bit targets.
110 (define_predicate "general_x64nomem_operand"
111 (if_then_else (match_test "TARGET_64BIT")
112 (match_operand 0 "nonmemory_operand")
113 (match_operand 0 "general_operand")))
114
115 ;; Match register operands, but include memory operands for TARGET_SSE_MATH.
116 (define_predicate "register_ssemem_operand"
117 (if_then_else
118 (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
119 (match_operand 0 "nonimmediate_operand")
120 (match_operand 0 "register_operand")))
121
122 ;; Match nonimmediate operands, but exclude memory operands
123 ;; for TARGET_SSE_MATH if TARGET_MIX_SSE_I387 is not enabled.
124 (define_predicate "nonimm_ssenomem_operand"
125 (if_then_else
126 (and (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
127 (not (match_test "TARGET_MIX_SSE_I387")))
128 (match_operand 0 "register_operand")
129 (match_operand 0 "nonimmediate_operand")))
130
131 ;; The above predicate, suitable for x87 arithmetic operators.
132 (define_predicate "x87nonimm_ssenomem_operand"
133 (if_then_else
134 (and (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
135 (not (match_test "TARGET_MIX_SSE_I387 && X87_ENABLE_ARITH (mode)")))
136 (match_operand 0 "register_operand")
137 (match_operand 0 "nonimmediate_operand")))
138
139 ;; Match register operands, include memory operand for TARGET_SSE4_1.
140 (define_predicate "register_sse4nonimm_operand"
141 (if_then_else (match_test "TARGET_SSE4_1")
142 (match_operand 0 "nonimmediate_operand")
143 (match_operand 0 "register_operand")))
144
145 ;; Return true if VALUE is symbol reference
146 (define_predicate "symbol_operand"
147 (match_code "symbol_ref"))
148
149 ;; Return true if VALUE can be stored in a sign extended immediate field.
150 (define_predicate "x86_64_immediate_operand"
151 (match_code "const_int,symbol_ref,label_ref,const")
152 {
153 if (!TARGET_64BIT)
154 return immediate_operand (op, mode);
155
156 switch (GET_CODE (op))
157 {
158 case CONST_INT:
159 {
160 HOST_WIDE_INT val = INTVAL (op);
161 return trunc_int_for_mode (val, SImode) == val;
162 }
163 case SYMBOL_REF:
164 /* TLS symbols are not constant. */
165 if (SYMBOL_REF_TLS_MODEL (op))
166 return false;
167
168 /* Load the external function address via the GOT slot. */
169 if (ix86_force_load_from_GOT_p (op))
170 return false;
171
172 /* For certain code models, the symbolic references are known to fit.
173 in CM_SMALL_PIC model we know it fits if it is local to the shared
174 library. Don't count TLS SYMBOL_REFs here, since they should fit
175 only if inside of UNSPEC handled below. */
176 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
177 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
178
179 case LABEL_REF:
180 /* For certain code models, the code is near as well. */
181 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
182 || ix86_cmodel == CM_KERNEL);
183
184 case CONST:
185 /* We also may accept the offsetted memory references in certain
186 special cases. */
187 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
188 switch (XINT (XEXP (op, 0), 1))
189 {
190 case UNSPEC_GOTPCREL:
191 case UNSPEC_DTPOFF:
192 case UNSPEC_GOTNTPOFF:
193 case UNSPEC_NTPOFF:
194 return true;
195 default:
196 break;
197 }
198
199 if (GET_CODE (XEXP (op, 0)) == PLUS)
200 {
201 rtx op1 = XEXP (XEXP (op, 0), 0);
202 rtx op2 = XEXP (XEXP (op, 0), 1);
203
204 if (ix86_cmodel == CM_LARGE)
205 return false;
206 if (!CONST_INT_P (op2))
207 return false;
208
209 HOST_WIDE_INT offset = INTVAL (op2);
210 if (trunc_int_for_mode (offset, SImode) != offset)
211 return false;
212
213 switch (GET_CODE (op1))
214 {
215 case SYMBOL_REF:
216 /* TLS symbols are not constant. */
217 if (SYMBOL_REF_TLS_MODEL (op1))
218 return false;
219
220 /* Load the external function address via the GOT slot. */
221 if (ix86_force_load_from_GOT_p (op1))
222 return false;
223
224 /* For CM_SMALL assume that latest object is 16MB before
225 end of 31bits boundary. We may also accept pretty
226 large negative constants knowing that all objects are
227 in the positive half of address space. */
228 if ((ix86_cmodel == CM_SMALL
229 || (ix86_cmodel == CM_MEDIUM
230 && !SYMBOL_REF_FAR_ADDR_P (op1)))
231 && offset < 16*1024*1024)
232 return true;
233 /* For CM_KERNEL we know that all object resist in the
234 negative half of 32bits address space. We may not
235 accept negative offsets, since they may be just off
236 and we may accept pretty large positive ones. */
237 if (ix86_cmodel == CM_KERNEL
238 && offset > 0)
239 return true;
240 break;
241
242 case LABEL_REF:
243 /* These conditions are similar to SYMBOL_REF ones, just the
244 constraints for code models differ. */
245 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
246 && offset < 16*1024*1024)
247 return true;
248 if (ix86_cmodel == CM_KERNEL
249 && offset > 0)
250 return true;
251 break;
252
253 case UNSPEC:
254 switch (XINT (op1, 1))
255 {
256 case UNSPEC_DTPOFF:
257 case UNSPEC_NTPOFF:
258 return true;
259 }
260 break;
261
262 default:
263 break;
264 }
265 }
266 break;
267
268 default:
269 gcc_unreachable ();
270 }
271
272 return false;
273 })
274
275 ;; Return true if VALUE can be stored in the zero extended immediate field.
276 (define_predicate "x86_64_zext_immediate_operand"
277 (match_code "const_int,symbol_ref,label_ref,const")
278 {
279 switch (GET_CODE (op))
280 {
281 case CONST_INT:
282 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
283
284 case SYMBOL_REF:
285 /* TLS symbols are not constant. */
286 if (SYMBOL_REF_TLS_MODEL (op))
287 return false;
288
289 /* Load the external function address via the GOT slot. */
290 if (ix86_force_load_from_GOT_p (op))
291 return false;
292
293 /* For certain code models, the symbolic references are known to fit. */
294 return (ix86_cmodel == CM_SMALL
295 || (ix86_cmodel == CM_MEDIUM
296 && !SYMBOL_REF_FAR_ADDR_P (op)));
297
298 case LABEL_REF:
299 /* For certain code models, the code is near as well. */
300 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
301
302 case CONST:
303 /* We also may accept the offsetted memory references in certain
304 special cases. */
305 if (GET_CODE (XEXP (op, 0)) == PLUS)
306 {
307 rtx op1 = XEXP (XEXP (op, 0), 0);
308 rtx op2 = XEXP (XEXP (op, 0), 1);
309
310 if (ix86_cmodel == CM_LARGE)
311 return false;
312 if (!CONST_INT_P (op2))
313 return false;
314
315 HOST_WIDE_INT offset = INTVAL (op2);
316 if (trunc_int_for_mode (offset, SImode) != offset)
317 return false;
318
319 switch (GET_CODE (op1))
320 {
321 case SYMBOL_REF:
322 /* TLS symbols are not constant. */
323 if (SYMBOL_REF_TLS_MODEL (op1))
324 return false;
325
326 /* Load the external function address via the GOT slot. */
327 if (ix86_force_load_from_GOT_p (op1))
328 return false;
329
330 /* For small code model we may accept pretty large positive
331 offsets, since one bit is available for free. Negative
332 offsets are limited by the size of NULL pointer area
333 specified by the ABI. */
334 if ((ix86_cmodel == CM_SMALL
335 || (ix86_cmodel == CM_MEDIUM
336 && !SYMBOL_REF_FAR_ADDR_P (op1)))
337 && offset > -0x10000)
338 return true;
339 /* ??? For the kernel, we may accept adjustment of
340 -0x10000000, since we know that it will just convert
341 negative address space to positive, but perhaps this
342 is not worthwhile. */
343 break;
344
345 case LABEL_REF:
346 /* These conditions are similar to SYMBOL_REF ones, just the
347 constraints for code models differ. */
348 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
349 && offset > -0x10000)
350 return true;
351 break;
352
353 default:
354 return false;
355 }
356 }
357 break;
358
359 default:
360 gcc_unreachable ();
361 }
362 return false;
363 })
364
365 ;; Return true if VALUE is a constant integer whose low and high words satisfy
366 ;; x86_64_immediate_operand.
367 (define_predicate "x86_64_hilo_int_operand"
368 (match_code "const_int,const_wide_int")
369 {
370 switch (GET_CODE (op))
371 {
372 case CONST_INT:
373 return x86_64_immediate_operand (op, mode);
374
375 case CONST_WIDE_INT:
376 gcc_assert (CONST_WIDE_INT_NUNITS (op) == 2);
377 return (x86_64_immediate_operand (GEN_INT (CONST_WIDE_INT_ELT (op, 0)),
378 DImode)
379 && x86_64_immediate_operand (GEN_INT (CONST_WIDE_INT_ELT (op,
380 1)),
381 DImode));
382
383 default:
384 gcc_unreachable ();
385 }
386 })
387
388 ;; Return true if size of VALUE can be stored in a sign
389 ;; extended immediate field.
390 (define_predicate "x86_64_immediate_size_operand"
391 (and (match_code "symbol_ref")
392 (ior (not (match_test "TARGET_64BIT"))
393 (match_test "ix86_cmodel == CM_SMALL")
394 (match_test "ix86_cmodel == CM_KERNEL"))))
395
396 ;; Return true if OP is general operand representable on x86_64.
397 (define_predicate "x86_64_general_operand"
398 (if_then_else (match_test "TARGET_64BIT")
399 (ior (match_operand 0 "nonimmediate_operand")
400 (match_operand 0 "x86_64_immediate_operand"))
401 (match_operand 0 "general_operand")))
402
403 ;; Return true if OP's both words are general operands representable
404 ;; on x86_64.
405 (define_predicate "x86_64_hilo_general_operand"
406 (if_then_else (match_test "TARGET_64BIT")
407 (ior (match_operand 0 "nonimmediate_operand")
408 (match_operand 0 "x86_64_hilo_int_operand"))
409 (match_operand 0 "general_operand")))
410
411 ;; Return true if OP is non-VOIDmode general operand representable
412 ;; on x86_64. This predicate is used in sign-extending conversion
413 ;; operations that require non-VOIDmode immediate operands.
414 (define_predicate "x86_64_sext_operand"
415 (and (match_test "GET_MODE (op) != VOIDmode")
416 (match_operand 0 "x86_64_general_operand")))
417
418 ;; Return true if OP is non-VOIDmode general operand. This predicate
419 ;; is used in sign-extending conversion operations that require
420 ;; non-VOIDmode immediate operands.
421 (define_predicate "sext_operand"
422 (and (match_test "GET_MODE (op) != VOIDmode")
423 (match_operand 0 "general_operand")))
424
425 ;; Return true if OP is representable on x86_64 as zero-extended operand.
426 ;; This predicate is used in zero-extending conversion operations that
427 ;; require non-VOIDmode immediate operands.
428 (define_predicate "x86_64_zext_operand"
429 (if_then_else (match_test "TARGET_64BIT")
430 (ior (match_operand 0 "nonimmediate_operand")
431 (and (match_operand 0 "x86_64_zext_immediate_operand")
432 (match_test "GET_MODE (op) != VOIDmode")))
433 (match_operand 0 "nonimmediate_operand")))
434
435 ;; Return true if OP is general operand representable on x86_64
436 ;; as either sign extended or zero extended constant.
437 (define_predicate "x86_64_szext_general_operand"
438 (if_then_else (match_test "TARGET_64BIT")
439 (ior (match_operand 0 "nonimmediate_operand")
440 (match_operand 0 "x86_64_immediate_operand")
441 (match_operand 0 "x86_64_zext_immediate_operand"))
442 (match_operand 0 "general_operand")))
443
444 ;; Return true if OP is nonmemory operand representable on x86_64.
445 (define_predicate "x86_64_nonmemory_operand"
446 (if_then_else (match_test "TARGET_64BIT")
447 (ior (match_operand 0 "register_operand")
448 (match_operand 0 "x86_64_immediate_operand"))
449 (match_operand 0 "nonmemory_operand")))
450
451 ;; Return true if OP is nonmemory operand representable on x86_64.
452 (define_predicate "x86_64_szext_nonmemory_operand"
453 (if_then_else (match_test "TARGET_64BIT")
454 (ior (match_operand 0 "register_operand")
455 (match_operand 0 "x86_64_immediate_operand")
456 (match_operand 0 "x86_64_zext_immediate_operand"))
457 (match_operand 0 "nonmemory_operand")))
458
459 ;; Return true when operand is PIC expression that can be computed by lea
460 ;; operation.
461 (define_predicate "pic_32bit_operand"
462 (match_code "const,symbol_ref,label_ref")
463 {
464 if (!flag_pic)
465 return false;
466
467 /* Rule out relocations that translate into 64bit constants. */
468 if (TARGET_64BIT && GET_CODE (op) == CONST)
469 {
470 op = XEXP (op, 0);
471 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
472 op = XEXP (op, 0);
473 if (GET_CODE (op) == UNSPEC
474 && (XINT (op, 1) == UNSPEC_GOTOFF
475 || XINT (op, 1) == UNSPEC_GOT))
476 return false;
477 }
478
479 return symbolic_operand (op, mode);
480 })
481
482 ;; Return true if OP is nonmemory operand acceptable by movabs patterns.
483 (define_predicate "x86_64_movabs_operand"
484 (and (match_operand 0 "nonmemory_operand")
485 (not (match_operand 0 "pic_32bit_operand"))))
486
487 ;; Return true if OP is either a symbol reference or a sum of a symbol
488 ;; reference and a constant.
489 (define_predicate "symbolic_operand"
490 (match_code "symbol_ref,label_ref,const")
491 {
492 switch (GET_CODE (op))
493 {
494 case SYMBOL_REF:
495 case LABEL_REF:
496 return true;
497
498 case CONST:
499 op = XEXP (op, 0);
500 if (GET_CODE (op) == SYMBOL_REF
501 || GET_CODE (op) == LABEL_REF
502 || (GET_CODE (op) == UNSPEC
503 && (XINT (op, 1) == UNSPEC_GOT
504 || XINT (op, 1) == UNSPEC_GOTOFF
505 || XINT (op, 1) == UNSPEC_PCREL
506 || XINT (op, 1) == UNSPEC_GOTPCREL)))
507 return true;
508 if (GET_CODE (op) != PLUS
509 || !CONST_INT_P (XEXP (op, 1)))
510 return false;
511
512 op = XEXP (op, 0);
513 if (GET_CODE (op) == SYMBOL_REF
514 || GET_CODE (op) == LABEL_REF)
515 return true;
516 /* Only @GOTOFF gets offsets. */
517 if (GET_CODE (op) != UNSPEC
518 || XINT (op, 1) != UNSPEC_GOTOFF)
519 return false;
520
521 op = XVECEXP (op, 0, 0);
522 if (GET_CODE (op) == SYMBOL_REF
523 || GET_CODE (op) == LABEL_REF)
524 return true;
525 return false;
526
527 default:
528 gcc_unreachable ();
529 }
530 })
531
532 ;; Return true if OP is a symbolic operand that resolves locally.
533 (define_predicate "local_symbolic_operand"
534 (match_code "const,label_ref,symbol_ref")
535 {
536 if (GET_CODE (op) == CONST
537 && GET_CODE (XEXP (op, 0)) == PLUS
538 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
539 op = XEXP (XEXP (op, 0), 0);
540
541 if (GET_CODE (op) == LABEL_REF)
542 return true;
543
544 if (GET_CODE (op) != SYMBOL_REF)
545 return false;
546
547 if (SYMBOL_REF_TLS_MODEL (op))
548 return false;
549
550 /* Dll-imported symbols are always external. */
551 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
552 return false;
553 if (SYMBOL_REF_LOCAL_P (op))
554 return true;
555
556 /* There is, however, a not insubstantial body of code in the rest of
557 the compiler that assumes it can just stick the results of
558 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
559 /* ??? This is a hack. Should update the body of the compiler to
560 always create a DECL an invoke targetm.encode_section_info. */
561 if (strncmp (XSTR (op, 0), internal_label_prefix,
562 internal_label_prefix_len) == 0)
563 return true;
564
565 return false;
566 })
567
568 ;; Test for a legitimate @GOTOFF operand.
569 ;;
570 ;; VxWorks does not impose a fixed gap between segments; the run-time
571 ;; gap can be different from the object-file gap. We therefore can't
572 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
573 ;; same segment as the GOT. Unfortunately, the flexibility of linker
574 ;; scripts means that we can't be sure of that in general, so assume
575 ;; that @GOTOFF is never valid on VxWorks.
576 (define_predicate "gotoff_operand"
577 (and (not (match_test "TARGET_VXWORKS_RTP"))
578 (match_operand 0 "local_symbolic_operand")))
579
580 ;; Test for various thread-local symbols.
581 (define_special_predicate "tls_symbolic_operand"
582 (and (match_code "symbol_ref")
583 (match_test "SYMBOL_REF_TLS_MODEL (op)")))
584
585 (define_special_predicate "tls_modbase_operand"
586 (and (match_code "symbol_ref")
587 (match_test "op == ix86_tls_module_base ()")))
588
589 ;; Test for a pc-relative call operand
590 (define_predicate "constant_call_address_operand"
591 (match_code "symbol_ref")
592 {
593 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC)
594 return false;
595 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
596 return false;
597 return true;
598 })
599
600 ;; P6 processors will jump to the address after the decrement when %esp
601 ;; is used as a call operand, so they will execute return address as a code.
602 ;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17.
603
604 (define_predicate "call_register_no_elim_operand"
605 (match_operand 0 "register_operand")
606 {
607 if (SUBREG_P (op))
608 op = SUBREG_REG (op);
609
610 if (!TARGET_64BIT && op == stack_pointer_rtx)
611 return false;
612
613 return register_no_elim_operand (op, mode);
614 })
615
616 ;; True for any non-virtual or eliminable register. Used in places where
617 ;; instantiation of such a register may cause the pattern to not be recognized.
618 (define_predicate "register_no_elim_operand"
619 (match_operand 0 "register_operand")
620 {
621 if (SUBREG_P (op))
622 op = SUBREG_REG (op);
623 return !(op == arg_pointer_rtx
624 || op == frame_pointer_rtx
625 || IN_RANGE (REGNO (op),
626 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
627 })
628
629 ;; Similarly, but include the stack pointer. This is used to prevent esp
630 ;; from being used as an index reg.
631 (define_predicate "index_register_operand"
632 (match_operand 0 "register_operand")
633 {
634 if (SUBREG_P (op))
635 op = SUBREG_REG (op);
636 if (reload_completed)
637 return REG_OK_FOR_INDEX_STRICT_P (op);
638 else
639 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
640 })
641
642 ;; Return false if this is any eliminable register. Otherwise general_operand.
643 (define_predicate "general_no_elim_operand"
644 (if_then_else (match_code "reg,subreg")
645 (match_operand 0 "register_no_elim_operand")
646 (match_operand 0 "general_operand")))
647
648 ;; Return false if this is any eliminable register. Otherwise
649 ;; register_operand or a constant.
650 (define_predicate "nonmemory_no_elim_operand"
651 (ior (match_operand 0 "register_no_elim_operand")
652 (match_operand 0 "immediate_operand")))
653
654 ;; Test for a valid operand for indirect branch.
655 (define_predicate "indirect_branch_operand"
656 (ior (match_operand 0 "register_operand")
657 (and (not (match_test "TARGET_X32"))
658 (match_operand 0 "memory_operand"))))
659
660 ;; Return true if OP is a memory operands that can be used in sibcalls.
661 ;; Since sibcall never returns, we can only use call-clobbered register
662 ;; as GOT base. Allow GOT slot here only with pseudo register as GOT
663 ;; base. Properly handle sibcall over GOT slot with *sibcall_GOT_32
664 ;; and *sibcall_value_GOT_32 patterns.
665 (define_predicate "sibcall_memory_operand"
666 (match_operand 0 "memory_operand")
667 {
668 op = XEXP (op, 0);
669 if (CONSTANT_P (op))
670 return true;
671 if (GET_CODE (op) == PLUS && REG_P (XEXP (op, 0)))
672 {
673 int regno = REGNO (XEXP (op, 0));
674 if (!HARD_REGISTER_NUM_P (regno) || call_used_regs[regno])
675 {
676 op = XEXP (op, 1);
677 if (GOT32_symbol_operand (op, VOIDmode))
678 return true;
679 }
680 }
681 return false;
682 })
683
684 ;; Return true if OP is a GOT memory operand.
685 (define_predicate "GOT_memory_operand"
686 (match_operand 0 "memory_operand")
687 {
688 op = XEXP (op, 0);
689 return (GET_CODE (op) == CONST
690 && GET_CODE (XEXP (op, 0)) == UNSPEC
691 && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL);
692 })
693
694 ;; Test for a valid operand for a call instruction.
695 ;; Allow constant call address operands in Pmode only.
696 (define_special_predicate "call_insn_operand"
697 (ior (match_test "constant_call_address_operand
698 (op, mode == VOIDmode ? mode : Pmode)")
699 (match_operand 0 "call_register_no_elim_operand")
700 (ior (and (not (match_test "TARGET_X32"))
701 (match_operand 0 "memory_operand"))
702 (and (match_test "TARGET_X32 && Pmode == DImode")
703 (match_operand 0 "GOT_memory_operand")))))
704
705 ;; Similarly, but for tail calls, in which we cannot allow memory references.
706 (define_special_predicate "sibcall_insn_operand"
707 (ior (match_test "constant_call_address_operand
708 (op, mode == VOIDmode ? mode : Pmode)")
709 (match_operand 0 "register_no_elim_operand")
710 (ior (and (not (match_test "TARGET_X32"))
711 (match_operand 0 "sibcall_memory_operand"))
712 (and (match_test "TARGET_X32 && Pmode == DImode")
713 (match_operand 0 "GOT_memory_operand")))))
714
715 ;; Return true if OP is a 32-bit GOT symbol operand.
716 (define_predicate "GOT32_symbol_operand"
717 (match_test "GET_CODE (op) == CONST
718 && GET_CODE (XEXP (op, 0)) == UNSPEC
719 && XINT (XEXP (op, 0), 1) == UNSPEC_GOT"))
720
721 ;; Match exactly zero.
722 (define_predicate "const0_operand"
723 (match_code "const_int,const_double,const_vector")
724 {
725 if (mode == VOIDmode)
726 mode = GET_MODE (op);
727 return op == CONST0_RTX (mode);
728 })
729
730 ;; Match one or a vector with all elements equal to one.
731 (define_predicate "const1_operand"
732 (match_code "const_int,const_double,const_vector")
733 {
734 if (mode == VOIDmode)
735 mode = GET_MODE (op);
736 return op == CONST1_RTX (mode);
737 })
738
739 ;; Match exactly -1.
740 (define_predicate "constm1_operand"
741 (and (match_code "const_int")
742 (match_test "op == constm1_rtx")))
743
744 ;; Match exactly eight.
745 (define_predicate "const8_operand"
746 (and (match_code "const_int")
747 (match_test "INTVAL (op) == 8")))
748
749 ;; Match exactly 128.
750 (define_predicate "const128_operand"
751 (and (match_code "const_int")
752 (match_test "INTVAL (op) == 128")))
753
754 ;; Match exactly 0x0FFFFFFFF in anddi as a zero-extension operation
755 (define_predicate "const_32bit_mask"
756 (and (match_code "const_int")
757 (match_test "trunc_int_for_mode (INTVAL (op), DImode)
758 == (HOST_WIDE_INT) 0xffffffff")))
759
760 ;; Match 2, 4, or 8. Used for leal multiplicands.
761 (define_predicate "const248_operand"
762 (match_code "const_int")
763 {
764 HOST_WIDE_INT i = INTVAL (op);
765 return i == 2 || i == 4 || i == 8;
766 })
767
768 ;; Match 2, 3, 6, or 7
769 (define_predicate "const2367_operand"
770 (match_code "const_int")
771 {
772 HOST_WIDE_INT i = INTVAL (op);
773 return i == 2 || i == 3 || i == 6 || i == 7;
774 })
775
776 ;; Match 1, 2, 4, or 8
777 (define_predicate "const1248_operand"
778 (match_code "const_int")
779 {
780 HOST_WIDE_INT i = INTVAL (op);
781 return i == 1 || i == 2 || i == 4 || i == 8;
782 })
783
784 ;; Match 3, 5, or 9. Used for leal multiplicands.
785 (define_predicate "const359_operand"
786 (match_code "const_int")
787 {
788 HOST_WIDE_INT i = INTVAL (op);
789 return i == 3 || i == 5 || i == 9;
790 })
791
792 ;; Match 4 or 8 to 11. Used for embeded rounding.
793 (define_predicate "const_4_or_8_to_11_operand"
794 (match_code "const_int")
795 {
796 HOST_WIDE_INT i = INTVAL (op);
797 return i == 4 || (i >= 8 && i <= 11);
798 })
799
800 ;; Match 4 or 8. Used for SAE.
801 (define_predicate "const48_operand"
802 (match_code "const_int")
803 {
804 HOST_WIDE_INT i = INTVAL (op);
805 return i == 4 || i == 8;
806 })
807
808 ;; Match 0 or 1.
809 (define_predicate "const_0_to_1_operand"
810 (and (match_code "const_int")
811 (ior (match_test "op == const0_rtx")
812 (match_test "op == const1_rtx"))))
813
814 ;; Match 0 to 3.
815 (define_predicate "const_0_to_3_operand"
816 (and (match_code "const_int")
817 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
818
819 ;; Match 0 to 4.
820 (define_predicate "const_0_to_4_operand"
821 (and (match_code "const_int")
822 (match_test "IN_RANGE (INTVAL (op), 0, 4)")))
823
824 ;; Match 0 to 5.
825 (define_predicate "const_0_to_5_operand"
826 (and (match_code "const_int")
827 (match_test "IN_RANGE (INTVAL (op), 0, 5)")))
828
829 ;; Match 0 to 7.
830 (define_predicate "const_0_to_7_operand"
831 (and (match_code "const_int")
832 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
833
834 ;; Match 0 to 15.
835 (define_predicate "const_0_to_15_operand"
836 (and (match_code "const_int")
837 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
838
839 ;; Match 0 to 31.
840 (define_predicate "const_0_to_31_operand"
841 (and (match_code "const_int")
842 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
843
844 ;; Match 0 to 63.
845 (define_predicate "const_0_to_63_operand"
846 (and (match_code "const_int")
847 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
848
849 ;; Match 0 to 255.
850 (define_predicate "const_0_to_255_operand"
851 (and (match_code "const_int")
852 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
853
854 ;; Match (0 to 255) * 8
855 (define_predicate "const_0_to_255_mul_8_operand"
856 (match_code "const_int")
857 {
858 unsigned HOST_WIDE_INT val = INTVAL (op);
859 return val <= 255*8 && val % 8 == 0;
860 })
861
862 ;; Return true if OP is CONST_INT >= 1 and <= 31 (a valid operand
863 ;; for shift & compare patterns, as shifting by 0 does not change flags).
864 (define_predicate "const_1_to_31_operand"
865 (and (match_code "const_int")
866 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
867
868 ;; Return true if OP is CONST_INT >= 1 and <= 63 (a valid operand
869 ;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
870 (define_predicate "const_1_to_63_operand"
871 (and (match_code "const_int")
872 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
873
874 ;; Match 2 or 3.
875 (define_predicate "const_2_to_3_operand"
876 (and (match_code "const_int")
877 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
878
879 ;; Match 4 to 5.
880 (define_predicate "const_4_to_5_operand"
881 (and (match_code "const_int")
882 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
883
884 ;; Match 4 to 7.
885 (define_predicate "const_4_to_7_operand"
886 (and (match_code "const_int")
887 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
888
889 ;; Match 6 to 7.
890 (define_predicate "const_6_to_7_operand"
891 (and (match_code "const_int")
892 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
893
894 ;; Match 8 to 9.
895 (define_predicate "const_8_to_9_operand"
896 (and (match_code "const_int")
897 (match_test "IN_RANGE (INTVAL (op), 8, 9)")))
898
899 ;; Match 8 to 11.
900 (define_predicate "const_8_to_11_operand"
901 (and (match_code "const_int")
902 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
903
904 ;; Match 8 to 15.
905 (define_predicate "const_8_to_15_operand"
906 (and (match_code "const_int")
907 (match_test "IN_RANGE (INTVAL (op), 8, 15)")))
908
909 ;; Match 10 to 11.
910 (define_predicate "const_10_to_11_operand"
911 (and (match_code "const_int")
912 (match_test "IN_RANGE (INTVAL (op), 10, 11)")))
913
914 ;; Match 12 to 13.
915 (define_predicate "const_12_to_13_operand"
916 (and (match_code "const_int")
917 (match_test "IN_RANGE (INTVAL (op), 12, 13)")))
918
919 ;; Match 12 to 15.
920 (define_predicate "const_12_to_15_operand"
921 (and (match_code "const_int")
922 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
923
924 ;; Match 14 to 15.
925 (define_predicate "const_14_to_15_operand"
926 (and (match_code "const_int")
927 (match_test "IN_RANGE (INTVAL (op), 14, 15)")))
928
929 ;; Match 16 to 19.
930 (define_predicate "const_16_to_19_operand"
931 (and (match_code "const_int")
932 (match_test "IN_RANGE (INTVAL (op), 16, 19)")))
933
934 ;; Match 16 to 31.
935 (define_predicate "const_16_to_31_operand"
936 (and (match_code "const_int")
937 (match_test "IN_RANGE (INTVAL (op), 16, 31)")))
938
939 ;; Match 20 to 23.
940 (define_predicate "const_20_to_23_operand"
941 (and (match_code "const_int")
942 (match_test "IN_RANGE (INTVAL (op), 20, 23)")))
943
944 ;; Match 24 to 27.
945 (define_predicate "const_24_to_27_operand"
946 (and (match_code "const_int")
947 (match_test "IN_RANGE (INTVAL (op), 24, 27)")))
948
949 ;; Match 28 to 31.
950 (define_predicate "const_28_to_31_operand"
951 (and (match_code "const_int")
952 (match_test "IN_RANGE (INTVAL (op), 28, 31)")))
953
954 ;; True if this is a constant appropriate for an increment or decrement.
955 (define_predicate "incdec_operand"
956 (match_code "const_int")
957 {
958 /* On Pentium4, the inc and dec operations causes extra dependency on flag
959 registers, since carry flag is not set. */
960 if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
961 return false;
962 return op == const1_rtx || op == constm1_rtx;
963 })
964
965 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
966 (define_predicate "reg_or_pm1_operand"
967 (ior (match_operand 0 "register_operand")
968 (and (match_code "const_int")
969 (ior (match_test "op == const1_rtx")
970 (match_test "op == constm1_rtx")))))
971
972 ;; True if OP is acceptable as operand of DImode shift expander.
973 (define_predicate "shiftdi_operand"
974 (if_then_else (match_test "TARGET_64BIT")
975 (match_operand 0 "nonimmediate_operand")
976 (match_operand 0 "register_operand")))
977
978 (define_predicate "ashldi_input_operand"
979 (if_then_else (match_test "TARGET_64BIT")
980 (match_operand 0 "nonimmediate_operand")
981 (match_operand 0 "reg_or_pm1_operand")))
982
983 ;; Return true if OP is a vector load from the constant pool with just
984 ;; the first element nonzero.
985 (define_predicate "zero_extended_scalar_load_operand"
986 (match_code "mem")
987 {
988 unsigned n_elts;
989 op = maybe_get_pool_constant (op);
990
991 if (!(op && GET_CODE (op) == CONST_VECTOR))
992 return false;
993
994 n_elts = CONST_VECTOR_NUNITS (op);
995
996 for (n_elts--; n_elts > 0; n_elts--)
997 {
998 rtx elt = CONST_VECTOR_ELT (op, n_elts);
999 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
1000 return false;
1001 }
1002 return true;
1003 })
1004
1005 /* Return true if operand is a vector constant that is all ones. */
1006 (define_predicate "vector_all_ones_operand"
1007 (and (match_code "const_vector")
1008 (match_test "INTEGRAL_MODE_P (GET_MODE (op))")
1009 (match_test "op == CONSTM1_RTX (GET_MODE (op))")))
1010
1011 ; Return true when OP is operand acceptable for vector memory operand.
1012 ; Only AVX can have misaligned memory operand.
1013 (define_predicate "vector_memory_operand"
1014 (and (match_operand 0 "memory_operand")
1015 (ior (match_test "TARGET_AVX")
1016 (match_test "MEM_ALIGN (op) >= GET_MODE_ALIGNMENT (mode)"))))
1017
1018 ; Return true when OP is register_operand or vector_memory_operand.
1019 (define_predicate "vector_operand"
1020 (ior (match_operand 0 "register_operand")
1021 (match_operand 0 "vector_memory_operand")))
1022
1023 ; Return true when OP is operand acceptable for standard SSE move.
1024 (define_predicate "vector_move_operand"
1025 (ior (match_operand 0 "nonimmediate_operand")
1026 (match_operand 0 "const0_operand")))
1027
1028 ;; Return true when OP is either nonimmediate operand, or any
1029 ;; CONST_VECTOR.
1030 (define_predicate "nonimmediate_or_const_vector_operand"
1031 (ior (match_operand 0 "nonimmediate_operand")
1032 (match_code "const_vector")))
1033
1034 ;; Return true when OP is nonimmediate or standard SSE constant.
1035 (define_predicate "nonimmediate_or_sse_const_operand"
1036 (ior (match_operand 0 "nonimmediate_operand")
1037 (match_test "standard_sse_constant_p (op, mode)")))
1038
1039 ;; Return true if OP is a register or a zero.
1040 (define_predicate "reg_or_0_operand"
1041 (ior (match_operand 0 "register_operand")
1042 (match_operand 0 "const0_operand")))
1043
1044 ;; Return true for RTX codes that force SImode address.
1045 (define_predicate "SImode_address_operand"
1046 (match_code "subreg,zero_extend,and"))
1047
1048 ;; Return true if op if a valid address for LEA, and does not contain
1049 ;; a segment override. Defined as a special predicate to allow
1050 ;; mode-less const_int operands pass to address_operand.
1051 (define_special_predicate "address_no_seg_operand"
1052 (match_test "address_operand (op, VOIDmode)")
1053 {
1054 struct ix86_address parts;
1055 int ok;
1056
1057 if (!CONST_INT_P (op)
1058 && mode != VOIDmode
1059 && GET_MODE (op) != mode)
1060 return false;
1061
1062 ok = ix86_decompose_address (op, &parts);
1063 gcc_assert (ok);
1064 return parts.seg == ADDR_SPACE_GENERIC;
1065 })
1066
1067 ;; Return true if op if a valid base register, displacement or
1068 ;; sum of base register and displacement for VSIB addressing.
1069 (define_predicate "vsib_address_operand"
1070 (match_test "address_operand (op, VOIDmode)")
1071 {
1072 struct ix86_address parts;
1073 int ok;
1074 rtx disp;
1075
1076 ok = ix86_decompose_address (op, &parts);
1077 gcc_assert (ok);
1078 if (parts.index || parts.seg != ADDR_SPACE_GENERIC)
1079 return false;
1080
1081 /* VSIB addressing doesn't support (%rip). */
1082 if (parts.disp)
1083 {
1084 disp = parts.disp;
1085 if (GET_CODE (disp) == CONST)
1086 {
1087 disp = XEXP (disp, 0);
1088 if (GET_CODE (disp) == PLUS)
1089 disp = XEXP (disp, 0);
1090 if (GET_CODE (disp) == UNSPEC)
1091 switch (XINT (disp, 1))
1092 {
1093 case UNSPEC_GOTPCREL:
1094 case UNSPEC_PCREL:
1095 case UNSPEC_GOTNTPOFF:
1096 return false;
1097 }
1098 }
1099 if (TARGET_64BIT
1100 && flag_pic
1101 && (GET_CODE (disp) == SYMBOL_REF
1102 || GET_CODE (disp) == LABEL_REF))
1103 return false;
1104 }
1105
1106 return true;
1107 })
1108
1109 ;; Return true if op is valid MPX address operand without base
1110 (define_predicate "address_mpx_no_base_operand"
1111 (match_test "address_operand (op, VOIDmode)")
1112 {
1113 struct ix86_address parts;
1114 int ok;
1115
1116 ok = ix86_decompose_address (op, &parts);
1117 gcc_assert (ok);
1118
1119 if (parts.index && parts.base)
1120 return false;
1121
1122 if (parts.seg != ADDR_SPACE_GENERIC)
1123 return false;
1124
1125 /* Do not support (%rip). */
1126 if (parts.disp && flag_pic && TARGET_64BIT
1127 && SYMBOLIC_CONST (parts.disp))
1128 {
1129 if (GET_CODE (parts.disp) != CONST
1130 || GET_CODE (XEXP (parts.disp, 0)) != PLUS
1131 || GET_CODE (XEXP (XEXP (parts.disp, 0), 0)) != UNSPEC
1132 || !CONST_INT_P (XEXP (XEXP (parts.disp, 0), 1))
1133 || (XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_DTPOFF
1134 && XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_NTPOFF))
1135 return false;
1136 }
1137
1138 return true;
1139 })
1140
1141 ;; Return true if op is valid MPX address operand without index
1142 (define_predicate "address_mpx_no_index_operand"
1143 (match_test "address_operand (op, VOIDmode)")
1144 {
1145 struct ix86_address parts;
1146 int ok;
1147
1148 ok = ix86_decompose_address (op, &parts);
1149 gcc_assert (ok);
1150
1151 if (parts.index)
1152 return false;
1153
1154 if (parts.seg != ADDR_SPACE_GENERIC)
1155 return false;
1156
1157 /* Do not support (%rip). */
1158 if (parts.disp && flag_pic && TARGET_64BIT
1159 && SYMBOLIC_CONST (parts.disp)
1160 && (GET_CODE (parts.disp) != CONST
1161 || GET_CODE (XEXP (parts.disp, 0)) != PLUS
1162 || GET_CODE (XEXP (XEXP (parts.disp, 0), 0)) != UNSPEC
1163 || !CONST_INT_P (XEXP (XEXP (parts.disp, 0), 1))
1164 || (XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_DTPOFF
1165 && XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_NTPOFF)))
1166 return false;
1167
1168 return true;
1169 })
1170
1171 (define_predicate "vsib_mem_operator"
1172 (match_code "mem"))
1173
1174 (define_predicate "bnd_mem_operator"
1175 (match_code "mem"))
1176
1177 ;; Return true if the rtx is known to be at least 32 bits aligned.
1178 (define_predicate "aligned_operand"
1179 (match_operand 0 "general_operand")
1180 {
1181 struct ix86_address parts;
1182 int ok;
1183
1184 /* Registers and immediate operands are always "aligned". */
1185 if (!MEM_P (op))
1186 return true;
1187
1188 /* All patterns using aligned_operand on memory operands ends up
1189 in promoting memory operand to 64bit and thus causing memory mismatch. */
1190 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
1191 return false;
1192
1193 /* Don't even try to do any aligned optimizations with volatiles. */
1194 if (MEM_VOLATILE_P (op))
1195 return false;
1196
1197 if (MEM_ALIGN (op) >= 32)
1198 return true;
1199
1200 op = XEXP (op, 0);
1201
1202 /* Pushes and pops are only valid on the stack pointer. */
1203 if (GET_CODE (op) == PRE_DEC
1204 || GET_CODE (op) == POST_INC)
1205 return true;
1206
1207 /* Decode the address. */
1208 ok = ix86_decompose_address (op, &parts);
1209 gcc_assert (ok);
1210
1211 if (parts.base && SUBREG_P (parts.base))
1212 parts.base = SUBREG_REG (parts.base);
1213 if (parts.index && SUBREG_P (parts.index))
1214 parts.index = SUBREG_REG (parts.index);
1215
1216 /* Look for some component that isn't known to be aligned. */
1217 if (parts.index)
1218 {
1219 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
1220 return false;
1221 }
1222 if (parts.base)
1223 {
1224 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
1225 return false;
1226 }
1227 if (parts.disp)
1228 {
1229 if (!CONST_INT_P (parts.disp)
1230 || (INTVAL (parts.disp) & 3))
1231 return false;
1232 }
1233
1234 /* Didn't find one -- this must be an aligned address. */
1235 return true;
1236 })
1237
1238 ;; Return true if OP is memory operand with a displacement.
1239 (define_predicate "memory_displacement_operand"
1240 (match_operand 0 "memory_operand")
1241 {
1242 struct ix86_address parts;
1243 int ok;
1244
1245 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1246 gcc_assert (ok);
1247 return parts.disp != NULL_RTX;
1248 })
1249
1250 ;; Return true if OP is memory operand with a displacement only.
1251 (define_predicate "memory_displacement_only_operand"
1252 (match_operand 0 "memory_operand")
1253 {
1254 struct ix86_address parts;
1255 int ok;
1256
1257 if (TARGET_64BIT)
1258 return false;
1259
1260 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1261 gcc_assert (ok);
1262
1263 if (parts.base || parts.index)
1264 return false;
1265
1266 return parts.disp != NULL_RTX;
1267 })
1268
1269 ;; Return true if OP is memory operand that cannot be represented
1270 ;; by the modRM array.
1271 (define_predicate "long_memory_operand"
1272 (and (match_operand 0 "memory_operand")
1273 (match_test "memory_address_length (op, false)")))
1274
1275 ;; Return true if OP is a comparison operator that can be issued by fcmov.
1276 (define_predicate "fcmov_comparison_operator"
1277 (match_operand 0 "comparison_operator")
1278 {
1279 machine_mode inmode = GET_MODE (XEXP (op, 0));
1280 enum rtx_code code = GET_CODE (op);
1281
1282 if (inmode == CCFPmode || inmode == CCFPUmode)
1283 {
1284 if (!ix86_trivial_fp_comparison_operator (op, mode))
1285 return false;
1286 code = ix86_fp_compare_code_to_integer (code);
1287 }
1288 /* i387 supports just limited amount of conditional codes. */
1289 switch (code)
1290 {
1291 case LTU: case GTU: case LEU: case GEU:
1292 if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode
1293 || inmode == CCCmode)
1294 return true;
1295 return false;
1296 case ORDERED: case UNORDERED:
1297 case EQ: case NE:
1298 return true;
1299 default:
1300 return false;
1301 }
1302 })
1303
1304 ;; Return true if OP is a comparison that can be used in the CMPSS/CMPPS insns.
1305 ;; The first set are supported directly; the second set can't be done with
1306 ;; full IEEE support, i.e. NaNs.
1307
1308 (define_predicate "sse_comparison_operator"
1309 (ior (match_code "eq,ne,lt,le,unordered,unge,ungt,ordered")
1310 (and (match_test "TARGET_AVX")
1311 (match_code "ge,gt,uneq,unle,unlt,ltgt"))))
1312
1313 (define_predicate "ix86_comparison_int_operator"
1314 (match_code "ne,eq,ge,gt,le,lt"))
1315
1316 (define_predicate "ix86_comparison_uns_operator"
1317 (match_code "ne,eq,geu,gtu,leu,ltu"))
1318
1319 (define_predicate "bt_comparison_operator"
1320 (match_code "ne,eq"))
1321
1322 ;; Return true if OP is a valid comparison operator in valid mode.
1323 (define_predicate "ix86_comparison_operator"
1324 (match_operand 0 "comparison_operator")
1325 {
1326 machine_mode inmode = GET_MODE (XEXP (op, 0));
1327 enum rtx_code code = GET_CODE (op);
1328
1329 if (inmode == CCFPmode || inmode == CCFPUmode)
1330 return ix86_trivial_fp_comparison_operator (op, mode);
1331
1332 switch (code)
1333 {
1334 case EQ: case NE:
1335 return true;
1336 case LT: case GE:
1337 if (inmode == CCmode || inmode == CCGCmode
1338 || inmode == CCGOCmode || inmode == CCNOmode)
1339 return true;
1340 return false;
1341 case LTU: case GTU: case LEU: case GEU:
1342 if (inmode == CCmode || inmode == CCCmode)
1343 return true;
1344 return false;
1345 case ORDERED: case UNORDERED:
1346 if (inmode == CCmode)
1347 return true;
1348 return false;
1349 case GT: case LE:
1350 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1351 return true;
1352 return false;
1353 default:
1354 return false;
1355 }
1356 })
1357
1358 ;; Return true if OP is a valid comparison operator
1359 ;; testing carry flag to be set.
1360 (define_predicate "ix86_carry_flag_operator"
1361 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1362 {
1363 machine_mode inmode = GET_MODE (XEXP (op, 0));
1364 enum rtx_code code = GET_CODE (op);
1365
1366 if (inmode == CCFPmode || inmode == CCFPUmode)
1367 {
1368 if (!ix86_trivial_fp_comparison_operator (op, mode))
1369 return false;
1370 code = ix86_fp_compare_code_to_integer (code);
1371 }
1372 else if (inmode == CCCmode)
1373 return code == LTU || code == GTU;
1374 else if (inmode != CCmode)
1375 return false;
1376
1377 return code == LTU;
1378 })
1379
1380 ;; Return true if this comparison only requires testing one flag bit.
1381 (define_predicate "ix86_trivial_fp_comparison_operator"
1382 (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
1383
1384 ;; Return true if we know how to do this comparison. Others require
1385 ;; testing more than one flag bit, and we let the generic middle-end
1386 ;; code do that.
1387 (define_predicate "ix86_fp_comparison_operator"
1388 (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1389 == IX86_FPCMP_ARITH")
1390 (match_operand 0 "comparison_operator")
1391 (match_operand 0 "ix86_trivial_fp_comparison_operator")))
1392
1393 ;; Same as above, but for swapped comparison used in *jcc<fp>_<int>_i387.
1394 (define_predicate "ix86_swapped_fp_comparison_operator"
1395 (match_operand 0 "comparison_operator")
1396 {
1397 enum rtx_code code = GET_CODE (op);
1398 bool ret;
1399
1400 PUT_CODE (op, swap_condition (code));
1401 ret = ix86_fp_comparison_operator (op, mode);
1402 PUT_CODE (op, code);
1403 return ret;
1404 })
1405
1406 ;; Nearly general operand, but accept any const_double, since we wish
1407 ;; to be able to drop them into memory rather than have them get pulled
1408 ;; into registers.
1409 (define_predicate "cmp_fp_expander_operand"
1410 (ior (match_code "const_double")
1411 (match_operand 0 "general_operand")))
1412
1413 ;; Return true if this is a valid binary floating-point operation.
1414 (define_predicate "binary_fp_operator"
1415 (match_code "plus,minus,mult,div"))
1416
1417 ;; Return true if this is a multiply operation.
1418 (define_predicate "mult_operator"
1419 (match_code "mult"))
1420
1421 ;; Return true if this is a division operation.
1422 (define_predicate "div_operator"
1423 (match_code "div"))
1424
1425 ;; Return true if this is a plus, minus, and, ior or xor operation.
1426 (define_predicate "plusminuslogic_operator"
1427 (match_code "plus,minus,and,ior,xor"))
1428
1429 ;; Return true if this is a float extend operation.
1430 (define_predicate "float_operator"
1431 (match_code "float"))
1432
1433 ;; Return true for ARITHMETIC_P.
1434 (define_predicate "arith_or_logical_operator"
1435 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1436 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1437
1438 ;; Return true for COMMUTATIVE_P.
1439 (define_predicate "commutative_operator"
1440 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1441
1442 ;; Return true if OP is a binary operator that can be promoted to wider mode.
1443 (define_predicate "promotable_binary_operator"
1444 (ior (match_code "plus,minus,and,ior,xor,ashift")
1445 (and (match_code "mult")
1446 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1447
1448 (define_predicate "compare_operator"
1449 (match_code "compare"))
1450
1451 (define_predicate "absneg_operator"
1452 (match_code "abs,neg"))
1453
1454 ;; Return true if OP is a memory operand, aligned to
1455 ;; less than its natural alignment.
1456 (define_predicate "misaligned_operand"
1457 (and (match_code "mem")
1458 (match_test "MEM_ALIGN (op) < GET_MODE_BITSIZE (mode)")))
1459
1460 ;; Return true if OP is a emms operation, known to be a PARALLEL.
1461 (define_predicate "emms_operation"
1462 (match_code "parallel")
1463 {
1464 unsigned i;
1465
1466 if (XVECLEN (op, 0) != 17)
1467 return false;
1468
1469 for (i = 0; i < 8; i++)
1470 {
1471 rtx elt = XVECEXP (op, 0, i+1);
1472
1473 if (GET_CODE (elt) != CLOBBER
1474 || GET_CODE (SET_DEST (elt)) != REG
1475 || GET_MODE (SET_DEST (elt)) != XFmode
1476 || REGNO (SET_DEST (elt)) != FIRST_STACK_REG + i)
1477 return false;
1478
1479 elt = XVECEXP (op, 0, i+9);
1480
1481 if (GET_CODE (elt) != CLOBBER
1482 || GET_CODE (SET_DEST (elt)) != REG
1483 || GET_MODE (SET_DEST (elt)) != DImode
1484 || REGNO (SET_DEST (elt)) != FIRST_MMX_REG + i)
1485 return false;
1486 }
1487 return true;
1488 })
1489
1490 ;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
1491 (define_predicate "vzeroall_operation"
1492 (match_code "parallel")
1493 {
1494 unsigned i, nregs = TARGET_64BIT ? 16 : 8;
1495
1496 if ((unsigned) XVECLEN (op, 0) != 1 + nregs)
1497 return false;
1498
1499 for (i = 0; i < nregs; i++)
1500 {
1501 rtx elt = XVECEXP (op, 0, i+1);
1502
1503 if (GET_CODE (elt) != SET
1504 || GET_CODE (SET_DEST (elt)) != REG
1505 || GET_MODE (SET_DEST (elt)) != V8SImode
1506 || REGNO (SET_DEST (elt)) != SSE_REGNO (i)
1507 || SET_SRC (elt) != CONST0_RTX (V8SImode))
1508 return false;
1509 }
1510 return true;
1511 })
1512
1513 ;; return true if OP is a vzeroupper operation.
1514 (define_predicate "vzeroupper_operation"
1515 (and (match_code "unspec_volatile")
1516 (match_test "XINT (op, 1) == UNSPECV_VZEROUPPER")))
1517
1518 ;; Return true if OP is an addsub vec_merge operation
1519 (define_predicate "addsub_vm_operator"
1520 (match_code "vec_merge")
1521 {
1522 rtx op0, op1;
1523 int swapped;
1524 HOST_WIDE_INT mask;
1525 int nunits, elt;
1526
1527 op0 = XEXP (op, 0);
1528 op1 = XEXP (op, 1);
1529
1530 /* Sanity check. */
1531 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1532 swapped = 0;
1533 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1534 swapped = 1;
1535 else
1536 gcc_unreachable ();
1537
1538 mask = INTVAL (XEXP (op, 2));
1539 nunits = GET_MODE_NUNITS (mode);
1540
1541 for (elt = 0; elt < nunits; elt++)
1542 {
1543 /* bit clear: take from op0, set: take from op1 */
1544 int bit = !(mask & (HOST_WIDE_INT_1U << elt));
1545
1546 if (bit != ((elt & 1) ^ swapped))
1547 return false;
1548 }
1549
1550 return true;
1551 })
1552
1553 ;; Return true if OP is an addsub vec_select/vec_concat operation
1554 (define_predicate "addsub_vs_operator"
1555 (and (match_code "vec_select")
1556 (match_code "vec_concat" "0"))
1557 {
1558 rtx op0, op1;
1559 bool swapped;
1560 int nunits, elt;
1561
1562 op0 = XEXP (XEXP (op, 0), 0);
1563 op1 = XEXP (XEXP (op, 0), 1);
1564
1565 /* Sanity check. */
1566 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1567 swapped = false;
1568 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1569 swapped = true;
1570 else
1571 gcc_unreachable ();
1572
1573 nunits = GET_MODE_NUNITS (mode);
1574 if (XVECLEN (XEXP (op, 1), 0) != nunits)
1575 return false;
1576
1577 /* We already checked that permutation is suitable for addsub,
1578 so only look at the first element of the parallel. */
1579 elt = INTVAL (XVECEXP (XEXP (op, 1), 0, 0));
1580
1581 return elt == (swapped ? nunits : 0);
1582 })
1583
1584 ;; Return true if OP is a parallel for an addsub vec_select.
1585 (define_predicate "addsub_vs_parallel"
1586 (and (match_code "parallel")
1587 (match_code "const_int" "a"))
1588 {
1589 int nelt = XVECLEN (op, 0);
1590 int elt, i;
1591
1592 if (nelt < 2)
1593 return false;
1594
1595 /* Check that the permutation is suitable for addsub.
1596 For example, { 0 9 2 11 4 13 6 15 } or { 8 1 10 3 12 5 14 7 }. */
1597 elt = INTVAL (XVECEXP (op, 0, 0));
1598 if (elt == 0)
1599 {
1600 for (i = 1; i < nelt; ++i)
1601 if (INTVAL (XVECEXP (op, 0, i)) != (i + (i & 1) * nelt))
1602 return false;
1603 }
1604 else if (elt == nelt)
1605 {
1606 for (i = 1; i < nelt; ++i)
1607 if (INTVAL (XVECEXP (op, 0, i)) != (elt + i - (i & 1) * nelt))
1608 return false;
1609 }
1610 else
1611 return false;
1612
1613 return true;
1614 })
1615
1616 ;; Return true if OP is a parallel for a vbroadcast permute.
1617 (define_predicate "avx_vbroadcast_operand"
1618 (and (match_code "parallel")
1619 (match_code "const_int" "a"))
1620 {
1621 rtx elt = XVECEXP (op, 0, 0);
1622 int i, nelt = XVECLEN (op, 0);
1623
1624 /* Don't bother checking there are the right number of operands,
1625 merely that they're all identical. */
1626 for (i = 1; i < nelt; ++i)
1627 if (XVECEXP (op, 0, i) != elt)
1628 return false;
1629 return true;
1630 })
1631
1632 ;; Return true if OP is a parallel for a palignr permute.
1633 (define_predicate "palignr_operand"
1634 (and (match_code "parallel")
1635 (match_code "const_int" "a"))
1636 {
1637 int elt = INTVAL (XVECEXP (op, 0, 0));
1638 int i, nelt = XVECLEN (op, 0);
1639
1640 /* Check that an order in the permutation is suitable for palignr.
1641 For example, {5 6 7 0 1 2 3 4} is "palignr 5, xmm, xmm". */
1642 for (i = 1; i < nelt; ++i)
1643 if (INTVAL (XVECEXP (op, 0, i)) != ((elt + i) % nelt))
1644 return false;
1645 return true;
1646 })
1647
1648 ;; Return true if OP is a proper third operand to vpblendw256.
1649 (define_predicate "avx2_pblendw_operand"
1650 (match_code "const_int")
1651 {
1652 HOST_WIDE_INT val = INTVAL (op);
1653 HOST_WIDE_INT low = val & 0xff;
1654 return val == ((low << 8) | low);
1655 })
1656
1657 ;; Return true if OP is vector_operand or CONST_VECTOR.
1658 (define_predicate "general_vector_operand"
1659 (ior (match_operand 0 "vector_operand")
1660 (match_code "const_vector")))
1661
1662 ;; Return true if OP is either -1 constant or stored in register.
1663 (define_predicate "register_or_constm1_operand"
1664 (ior (match_operand 0 "register_operand")
1665 (and (match_code "const_int")
1666 (match_test "op == constm1_rtx"))))