c3f442eb8ac556ee44ede54bb4ef2e5dceeb4eaf
[gcc.git] / gcc / config / i386 / predicates.md
1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004-2017 Free Software Foundation, Inc.
3 ;;
4 ;; This file is part of GCC.
5 ;;
6 ;; GCC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
9 ;; any later version.
10 ;;
11 ;; GCC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
15 ;;
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>.
19
20 ;; Return true if OP is either a i387 or SSE fp register.
21 (define_predicate "any_fp_register_operand"
22 (and (match_code "reg")
23 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
24
25 ;; Return true if OP is an i387 fp register.
26 (define_predicate "fp_register_operand"
27 (and (match_code "reg")
28 (match_test "STACK_REGNO_P (REGNO (op))")))
29
30 ;; True if the operand is a GENERAL class register.
31 (define_predicate "general_reg_operand"
32 (and (match_code "reg")
33 (match_test "GENERAL_REGNO_P (REGNO (op))")))
34
35 ;; True if the operand is a nonimmediate operand with GENERAL class register.
36 (define_predicate "nonimmediate_gr_operand"
37 (if_then_else (match_code "reg")
38 (match_test "GENERAL_REGNO_P (REGNO (op))")
39 (match_operand 0 "nonimmediate_operand")))
40
41 ;; True if the operand is a general operand with GENERAL class register.
42 (define_predicate "general_gr_operand"
43 (if_then_else (match_code "reg")
44 (match_test "GENERAL_REGNO_P (REGNO (op))")
45 (match_operand 0 "general_operand")))
46
47 ;; True if the operand is an MMX register.
48 (define_predicate "mmx_reg_operand"
49 (and (match_code "reg")
50 (match_test "MMX_REGNO_P (REGNO (op))")))
51
52 ;; True if the operand is an SSE register.
53 (define_predicate "sse_reg_operand"
54 (and (match_code "reg")
55 (match_test "SSE_REGNO_P (REGNO (op))")))
56
57 ;; True if the operand is an AVX-512 new register.
58 (define_predicate "ext_sse_reg_operand"
59 (and (match_code "reg")
60 (match_test "EXT_REX_SSE_REGNO_P (REGNO (op))")))
61
62 ;; Return true if op is a QImode register.
63 (define_predicate "any_QIreg_operand"
64 (and (match_code "reg")
65 (match_test "ANY_QI_REGNO_P (REGNO (op))")))
66
67 ;; Return true if op is one of QImode registers: %[abcd][hl].
68 (define_predicate "QIreg_operand"
69 (and (match_code "reg")
70 (match_test "QI_REGNO_P (REGNO (op))")))
71
72 ;; Return true if op is a QImode register operand other than %[abcd][hl].
73 (define_predicate "ext_QIreg_operand"
74 (and (match_test "TARGET_64BIT")
75 (match_code "reg")
76 (not (match_test "QI_REGNO_P (REGNO (op))"))))
77
78 ;; Return true if op is the AX register.
79 (define_predicate "ax_reg_operand"
80 (and (match_code "reg")
81 (match_test "REGNO (op) == AX_REG")))
82
83 ;; Return true if op is the flags register.
84 (define_predicate "flags_reg_operand"
85 (and (match_code "reg")
86 (match_test "REGNO (op) == FLAGS_REG")))
87
88 ;; Match a DI, SI or HImode register for a zero_extract.
89 (define_special_predicate "ext_register_operand"
90 (and (match_operand 0 "register_operand")
91 (ior (and (match_test "TARGET_64BIT")
92 (match_test "GET_MODE (op) == DImode"))
93 (match_test "GET_MODE (op) == SImode")
94 (match_test "GET_MODE (op) == HImode"))))
95
96 ;; Match register operands, but include memory operands for TARGET_SSE_MATH.
97 (define_predicate "register_ssemem_operand"
98 (if_then_else
99 (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
100 (match_operand 0 "nonimmediate_operand")
101 (match_operand 0 "register_operand")))
102
103 ;; Match nonimmediate operands, but exclude memory operands
104 ;; for TARGET_SSE_MATH if TARGET_MIX_SSE_I387 is not enabled.
105 (define_predicate "nonimm_ssenomem_operand"
106 (if_then_else
107 (and (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
108 (not (match_test "TARGET_MIX_SSE_I387")))
109 (match_operand 0 "register_operand")
110 (match_operand 0 "nonimmediate_operand")))
111
112 ;; The above predicate, suitable for x87 arithmetic operators.
113 (define_predicate "x87nonimm_ssenomem_operand"
114 (if_then_else
115 (and (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
116 (not (match_test "TARGET_MIX_SSE_I387 && X87_ENABLE_ARITH (mode)")))
117 (match_operand 0 "register_operand")
118 (match_operand 0 "nonimmediate_operand")))
119
120 ;; Match register operands, include memory operand for TARGET_SSE4_1.
121 (define_predicate "register_sse4nonimm_operand"
122 (if_then_else (match_test "TARGET_SSE4_1")
123 (match_operand 0 "nonimmediate_operand")
124 (match_operand 0 "register_operand")))
125
126 ;; Return true if VALUE is symbol reference
127 (define_predicate "symbol_operand"
128 (match_code "symbol_ref"))
129
130 ;; Return true if VALUE can be stored in a sign extended immediate field.
131 (define_predicate "x86_64_immediate_operand"
132 (match_code "const_int,symbol_ref,label_ref,const")
133 {
134 if (!TARGET_64BIT)
135 return immediate_operand (op, mode);
136
137 switch (GET_CODE (op))
138 {
139 case CONST_INT:
140 {
141 HOST_WIDE_INT val = INTVAL (op);
142 return trunc_int_for_mode (val, SImode) == val;
143 }
144 case SYMBOL_REF:
145 /* TLS symbols are not constant. */
146 if (SYMBOL_REF_TLS_MODEL (op))
147 return false;
148
149 /* Load the external function address via the GOT slot. */
150 if (ix86_force_load_from_GOT_p (op))
151 return false;
152
153 /* For certain code models, the symbolic references are known to fit.
154 in CM_SMALL_PIC model we know it fits if it is local to the shared
155 library. Don't count TLS SYMBOL_REFs here, since they should fit
156 only if inside of UNSPEC handled below. */
157 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
158 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
159
160 case LABEL_REF:
161 /* For certain code models, the code is near as well. */
162 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
163 || ix86_cmodel == CM_KERNEL);
164
165 case CONST:
166 /* We also may accept the offsetted memory references in certain
167 special cases. */
168 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
169 switch (XINT (XEXP (op, 0), 1))
170 {
171 case UNSPEC_GOTPCREL:
172 case UNSPEC_DTPOFF:
173 case UNSPEC_GOTNTPOFF:
174 case UNSPEC_NTPOFF:
175 return true;
176 default:
177 break;
178 }
179
180 if (GET_CODE (XEXP (op, 0)) == PLUS)
181 {
182 rtx op1 = XEXP (XEXP (op, 0), 0);
183 rtx op2 = XEXP (XEXP (op, 0), 1);
184
185 if (ix86_cmodel == CM_LARGE)
186 return false;
187 if (!CONST_INT_P (op2))
188 return false;
189
190 HOST_WIDE_INT offset = INTVAL (op2);
191 if (trunc_int_for_mode (offset, SImode) != offset)
192 return false;
193
194 switch (GET_CODE (op1))
195 {
196 case SYMBOL_REF:
197 /* TLS symbols are not constant. */
198 if (SYMBOL_REF_TLS_MODEL (op1))
199 return false;
200
201 /* Load the external function address via the GOT slot. */
202 if (ix86_force_load_from_GOT_p (op1))
203 return false;
204
205 /* For CM_SMALL assume that latest object is 16MB before
206 end of 31bits boundary. We may also accept pretty
207 large negative constants knowing that all objects are
208 in the positive half of address space. */
209 if ((ix86_cmodel == CM_SMALL
210 || (ix86_cmodel == CM_MEDIUM
211 && !SYMBOL_REF_FAR_ADDR_P (op1)))
212 && offset < 16*1024*1024)
213 return true;
214 /* For CM_KERNEL we know that all object resist in the
215 negative half of 32bits address space. We may not
216 accept negative offsets, since they may be just off
217 and we may accept pretty large positive ones. */
218 if (ix86_cmodel == CM_KERNEL
219 && offset > 0)
220 return true;
221 break;
222
223 case LABEL_REF:
224 /* These conditions are similar to SYMBOL_REF ones, just the
225 constraints for code models differ. */
226 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
227 && offset < 16*1024*1024)
228 return true;
229 if (ix86_cmodel == CM_KERNEL
230 && offset > 0)
231 return true;
232 break;
233
234 case UNSPEC:
235 switch (XINT (op1, 1))
236 {
237 case UNSPEC_DTPOFF:
238 case UNSPEC_NTPOFF:
239 return true;
240 }
241 break;
242
243 default:
244 break;
245 }
246 }
247 break;
248
249 default:
250 gcc_unreachable ();
251 }
252
253 return false;
254 })
255
256 ;; Return true if VALUE can be stored in the zero extended immediate field.
257 (define_predicate "x86_64_zext_immediate_operand"
258 (match_code "const_int,symbol_ref,label_ref,const")
259 {
260 switch (GET_CODE (op))
261 {
262 case CONST_INT:
263 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
264
265 case SYMBOL_REF:
266 /* TLS symbols are not constant. */
267 if (SYMBOL_REF_TLS_MODEL (op))
268 return false;
269
270 /* Load the external function address via the GOT slot. */
271 if (ix86_force_load_from_GOT_p (op))
272 return false;
273
274 /* For certain code models, the symbolic references are known to fit. */
275 return (ix86_cmodel == CM_SMALL
276 || (ix86_cmodel == CM_MEDIUM
277 && !SYMBOL_REF_FAR_ADDR_P (op)));
278
279 case LABEL_REF:
280 /* For certain code models, the code is near as well. */
281 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
282
283 case CONST:
284 /* We also may accept the offsetted memory references in certain
285 special cases. */
286 if (GET_CODE (XEXP (op, 0)) == PLUS)
287 {
288 rtx op1 = XEXP (XEXP (op, 0), 0);
289 rtx op2 = XEXP (XEXP (op, 0), 1);
290
291 if (ix86_cmodel == CM_LARGE)
292 return false;
293 if (!CONST_INT_P (op2))
294 return false;
295
296 HOST_WIDE_INT offset = INTVAL (op2);
297 if (trunc_int_for_mode (offset, SImode) != offset)
298 return false;
299
300 switch (GET_CODE (op1))
301 {
302 case SYMBOL_REF:
303 /* TLS symbols are not constant. */
304 if (SYMBOL_REF_TLS_MODEL (op1))
305 return false;
306
307 /* Load the external function address via the GOT slot. */
308 if (ix86_force_load_from_GOT_p (op1))
309 return false;
310
311 /* For small code model we may accept pretty large positive
312 offsets, since one bit is available for free. Negative
313 offsets are limited by the size of NULL pointer area
314 specified by the ABI. */
315 if ((ix86_cmodel == CM_SMALL
316 || (ix86_cmodel == CM_MEDIUM
317 && !SYMBOL_REF_FAR_ADDR_P (op1)))
318 && offset > -0x10000)
319 return true;
320 /* ??? For the kernel, we may accept adjustment of
321 -0x10000000, since we know that it will just convert
322 negative address space to positive, but perhaps this
323 is not worthwhile. */
324 break;
325
326 case LABEL_REF:
327 /* These conditions are similar to SYMBOL_REF ones, just the
328 constraints for code models differ. */
329 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
330 && offset > -0x10000)
331 return true;
332 break;
333
334 default:
335 return false;
336 }
337 }
338 break;
339
340 default:
341 gcc_unreachable ();
342 }
343 return false;
344 })
345
346 ;; Return true if VALUE is a constant integer whose low and high words satisfy
347 ;; x86_64_immediate_operand.
348 (define_predicate "x86_64_hilo_int_operand"
349 (match_code "const_int,const_wide_int")
350 {
351 switch (GET_CODE (op))
352 {
353 case CONST_INT:
354 return x86_64_immediate_operand (op, mode);
355
356 case CONST_WIDE_INT:
357 gcc_assert (CONST_WIDE_INT_NUNITS (op) == 2);
358 return (x86_64_immediate_operand (GEN_INT (CONST_WIDE_INT_ELT (op, 0)),
359 DImode)
360 && x86_64_immediate_operand (GEN_INT (CONST_WIDE_INT_ELT (op,
361 1)),
362 DImode));
363
364 default:
365 gcc_unreachable ();
366 }
367 })
368
369 ;; Return true if VALUE is a constant integer whose value is
370 ;; x86_64_immediate_operand value zero extended from word mode to mode.
371 (define_predicate "x86_64_dwzext_immediate_operand"
372 (match_code "const_int,const_wide_int")
373 {
374 switch (GET_CODE (op))
375 {
376 case CONST_INT:
377 if (!TARGET_64BIT)
378 return UINTVAL (op) <= HOST_WIDE_INT_UC (0xffffffff);
379 return UINTVAL (op) <= HOST_WIDE_INT_UC (0x7fffffff);
380
381 case CONST_WIDE_INT:
382 if (!TARGET_64BIT)
383 return false;
384 return (CONST_WIDE_INT_NUNITS (op) == 2
385 && CONST_WIDE_INT_ELT (op, 1) == 0
386 && (trunc_int_for_mode (CONST_WIDE_INT_ELT (op, 0), SImode)
387 == (HOST_WIDE_INT) CONST_WIDE_INT_ELT (op, 0)));
388
389 default:
390 gcc_unreachable ();
391 }
392 })
393
394 ;; Return true if size of VALUE can be stored in a sign
395 ;; extended immediate field.
396 (define_predicate "x86_64_immediate_size_operand"
397 (and (match_code "symbol_ref")
398 (ior (not (match_test "TARGET_64BIT"))
399 (match_test "ix86_cmodel == CM_SMALL")
400 (match_test "ix86_cmodel == CM_KERNEL"))))
401
402 ;; Return true if OP is general operand representable on x86_64.
403 (define_predicate "x86_64_general_operand"
404 (if_then_else (match_test "TARGET_64BIT")
405 (ior (match_operand 0 "nonimmediate_operand")
406 (match_operand 0 "x86_64_immediate_operand"))
407 (match_operand 0 "general_operand")))
408
409 ;; Return true if OP's both words are general operands representable
410 ;; on x86_64.
411 (define_predicate "x86_64_hilo_general_operand"
412 (if_then_else (match_test "TARGET_64BIT")
413 (ior (match_operand 0 "nonimmediate_operand")
414 (match_operand 0 "x86_64_hilo_int_operand"))
415 (match_operand 0 "general_operand")))
416
417 ;; Return true if OP is non-VOIDmode general operand representable
418 ;; on x86_64. This predicate is used in sign-extending conversion
419 ;; operations that require non-VOIDmode immediate operands.
420 (define_predicate "x86_64_sext_operand"
421 (and (match_test "GET_MODE (op) != VOIDmode")
422 (match_operand 0 "x86_64_general_operand")))
423
424 ;; Return true if OP is non-VOIDmode general operand. This predicate
425 ;; is used in sign-extending conversion operations that require
426 ;; non-VOIDmode immediate operands.
427 (define_predicate "sext_operand"
428 (and (match_test "GET_MODE (op) != VOIDmode")
429 (match_operand 0 "general_operand")))
430
431 ;; Return true if OP is representable on x86_64 as zero-extended operand.
432 ;; This predicate is used in zero-extending conversion operations that
433 ;; require non-VOIDmode immediate operands.
434 (define_predicate "x86_64_zext_operand"
435 (if_then_else (match_test "TARGET_64BIT")
436 (ior (match_operand 0 "nonimmediate_operand")
437 (and (match_operand 0 "x86_64_zext_immediate_operand")
438 (match_test "GET_MODE (op) != VOIDmode")))
439 (match_operand 0 "nonimmediate_operand")))
440
441 ;; Return true if OP is general operand representable on x86_64
442 ;; as either sign extended or zero extended constant.
443 (define_predicate "x86_64_szext_general_operand"
444 (if_then_else (match_test "TARGET_64BIT")
445 (ior (match_operand 0 "nonimmediate_operand")
446 (match_operand 0 "x86_64_immediate_operand")
447 (match_operand 0 "x86_64_zext_immediate_operand"))
448 (match_operand 0 "general_operand")))
449
450 ;; Return true if OP is nonmemory operand representable on x86_64.
451 (define_predicate "x86_64_nonmemory_operand"
452 (if_then_else (match_test "TARGET_64BIT")
453 (ior (match_operand 0 "register_operand")
454 (match_operand 0 "x86_64_immediate_operand"))
455 (match_operand 0 "nonmemory_operand")))
456
457 ;; Return true if OP is nonmemory operand representable on x86_64.
458 (define_predicate "x86_64_szext_nonmemory_operand"
459 (if_then_else (match_test "TARGET_64BIT")
460 (ior (match_operand 0 "register_operand")
461 (match_operand 0 "x86_64_immediate_operand")
462 (match_operand 0 "x86_64_zext_immediate_operand"))
463 (match_operand 0 "nonmemory_operand")))
464
465 ;; Return true when operand is PIC expression that can be computed by lea
466 ;; operation.
467 (define_predicate "pic_32bit_operand"
468 (match_code "const,symbol_ref,label_ref")
469 {
470 if (!flag_pic)
471 return false;
472
473 /* Rule out relocations that translate into 64bit constants. */
474 if (TARGET_64BIT && GET_CODE (op) == CONST)
475 {
476 op = XEXP (op, 0);
477 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
478 op = XEXP (op, 0);
479 if (GET_CODE (op) == UNSPEC
480 && (XINT (op, 1) == UNSPEC_GOTOFF
481 || XINT (op, 1) == UNSPEC_GOT))
482 return false;
483 }
484
485 return symbolic_operand (op, mode);
486 })
487
488 ;; Return true if OP is nonmemory operand acceptable by movabs patterns.
489 (define_predicate "x86_64_movabs_operand"
490 (and (match_operand 0 "nonmemory_operand")
491 (not (match_operand 0 "pic_32bit_operand"))))
492
493 ;; Return true if OP is either a symbol reference or a sum of a symbol
494 ;; reference and a constant.
495 (define_predicate "symbolic_operand"
496 (match_code "symbol_ref,label_ref,const")
497 {
498 switch (GET_CODE (op))
499 {
500 case SYMBOL_REF:
501 case LABEL_REF:
502 return true;
503
504 case CONST:
505 op = XEXP (op, 0);
506 if (GET_CODE (op) == SYMBOL_REF
507 || GET_CODE (op) == LABEL_REF
508 || (GET_CODE (op) == UNSPEC
509 && (XINT (op, 1) == UNSPEC_GOT
510 || XINT (op, 1) == UNSPEC_GOTOFF
511 || XINT (op, 1) == UNSPEC_PCREL
512 || XINT (op, 1) == UNSPEC_GOTPCREL)))
513 return true;
514 if (GET_CODE (op) != PLUS
515 || !CONST_INT_P (XEXP (op, 1)))
516 return false;
517
518 op = XEXP (op, 0);
519 if (GET_CODE (op) == SYMBOL_REF
520 || GET_CODE (op) == LABEL_REF)
521 return true;
522 /* Only @GOTOFF gets offsets. */
523 if (GET_CODE (op) != UNSPEC
524 || XINT (op, 1) != UNSPEC_GOTOFF)
525 return false;
526
527 op = XVECEXP (op, 0, 0);
528 if (GET_CODE (op) == SYMBOL_REF
529 || GET_CODE (op) == LABEL_REF)
530 return true;
531 return false;
532
533 default:
534 gcc_unreachable ();
535 }
536 })
537
538 ;; Return true if OP is a symbolic operand that resolves locally.
539 (define_predicate "local_symbolic_operand"
540 (match_code "const,label_ref,symbol_ref")
541 {
542 if (GET_CODE (op) == CONST
543 && GET_CODE (XEXP (op, 0)) == PLUS
544 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
545 op = XEXP (XEXP (op, 0), 0);
546
547 if (GET_CODE (op) == LABEL_REF)
548 return true;
549
550 if (GET_CODE (op) != SYMBOL_REF)
551 return false;
552
553 if (SYMBOL_REF_TLS_MODEL (op))
554 return false;
555
556 /* Dll-imported symbols are always external. */
557 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
558 return false;
559 if (SYMBOL_REF_LOCAL_P (op))
560 return true;
561
562 /* There is, however, a not insubstantial body of code in the rest of
563 the compiler that assumes it can just stick the results of
564 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
565 /* ??? This is a hack. Should update the body of the compiler to
566 always create a DECL an invoke targetm.encode_section_info. */
567 if (strncmp (XSTR (op, 0), internal_label_prefix,
568 internal_label_prefix_len) == 0)
569 return true;
570
571 return false;
572 })
573
574 ;; Test for a legitimate @GOTOFF operand.
575 ;;
576 ;; VxWorks does not impose a fixed gap between segments; the run-time
577 ;; gap can be different from the object-file gap. We therefore can't
578 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
579 ;; same segment as the GOT. Unfortunately, the flexibility of linker
580 ;; scripts means that we can't be sure of that in general, so assume
581 ;; that @GOTOFF is never valid on VxWorks.
582 (define_predicate "gotoff_operand"
583 (and (not (match_test "TARGET_VXWORKS_RTP"))
584 (match_operand 0 "local_symbolic_operand")))
585
586 ;; Test for various thread-local symbols.
587 (define_special_predicate "tls_symbolic_operand"
588 (and (match_code "symbol_ref")
589 (match_test "SYMBOL_REF_TLS_MODEL (op)")))
590
591 (define_special_predicate "tls_modbase_operand"
592 (and (match_code "symbol_ref")
593 (match_test "op == ix86_tls_module_base ()")))
594
595 (define_predicate "tls_address_pattern"
596 (and (match_code "set,parallel,unspec,unspec_volatile")
597 (match_test "ix86_tls_address_pattern_p (op)")))
598
599 ;; Test for a pc-relative call operand
600 (define_predicate "constant_call_address_operand"
601 (match_code "symbol_ref")
602 {
603 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC)
604 return false;
605 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
606 return false;
607 return true;
608 })
609
610 ;; P6 processors will jump to the address after the decrement when %esp
611 ;; is used as a call operand, so they will execute return address as a code.
612 ;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17.
613
614 (define_predicate "call_register_no_elim_operand"
615 (match_operand 0 "register_operand")
616 {
617 if (SUBREG_P (op))
618 op = SUBREG_REG (op);
619
620 if (!TARGET_64BIT && op == stack_pointer_rtx)
621 return false;
622
623 return register_no_elim_operand (op, mode);
624 })
625
626 ;; True for any non-virtual or eliminable register. Used in places where
627 ;; instantiation of such a register may cause the pattern to not be recognized.
628 (define_predicate "register_no_elim_operand"
629 (match_operand 0 "register_operand")
630 {
631 if (SUBREG_P (op))
632 op = SUBREG_REG (op);
633 return !(op == arg_pointer_rtx
634 || op == frame_pointer_rtx
635 || IN_RANGE (REGNO (op),
636 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
637 })
638
639 ;; Similarly, but include the stack pointer. This is used to prevent esp
640 ;; from being used as an index reg.
641 (define_predicate "index_register_operand"
642 (match_operand 0 "register_operand")
643 {
644 if (SUBREG_P (op))
645 op = SUBREG_REG (op);
646 if (reload_completed)
647 return REG_OK_FOR_INDEX_STRICT_P (op);
648 else
649 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
650 })
651
652 ;; Return false if this is any eliminable register. Otherwise general_operand.
653 (define_predicate "general_no_elim_operand"
654 (if_then_else (match_code "reg,subreg")
655 (match_operand 0 "register_no_elim_operand")
656 (match_operand 0 "general_operand")))
657
658 ;; Return false if this is any eliminable register. Otherwise
659 ;; register_operand or a constant.
660 (define_predicate "nonmemory_no_elim_operand"
661 (ior (match_operand 0 "register_no_elim_operand")
662 (match_operand 0 "immediate_operand")))
663
664 ;; Test for a valid operand for indirect branch.
665 (define_predicate "indirect_branch_operand"
666 (ior (match_operand 0 "register_operand")
667 (and (not (match_test "TARGET_X32"))
668 (match_operand 0 "memory_operand"))))
669
670 ;; Return true if OP is a memory operands that can be used in sibcalls.
671 ;; Since sibcall never returns, we can only use call-clobbered register
672 ;; as GOT base. Allow GOT slot here only with pseudo register as GOT
673 ;; base. Properly handle sibcall over GOT slot with *sibcall_GOT_32
674 ;; and *sibcall_value_GOT_32 patterns.
675 (define_predicate "sibcall_memory_operand"
676 (match_operand 0 "memory_operand")
677 {
678 op = XEXP (op, 0);
679 if (CONSTANT_P (op))
680 return true;
681 if (GET_CODE (op) == PLUS && REG_P (XEXP (op, 0)))
682 {
683 int regno = REGNO (XEXP (op, 0));
684 if (!HARD_REGISTER_NUM_P (regno) || call_used_regs[regno])
685 {
686 op = XEXP (op, 1);
687 if (GOT32_symbol_operand (op, VOIDmode))
688 return true;
689 }
690 }
691 return false;
692 })
693
694 ;; Return true if OP is a GOT memory operand.
695 (define_predicate "GOT_memory_operand"
696 (match_operand 0 "memory_operand")
697 {
698 op = XEXP (op, 0);
699 return (GET_CODE (op) == CONST
700 && GET_CODE (XEXP (op, 0)) == UNSPEC
701 && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL);
702 })
703
704 ;; Test for a valid operand for a call instruction.
705 ;; Allow constant call address operands in Pmode only.
706 (define_special_predicate "call_insn_operand"
707 (ior (match_test "constant_call_address_operand
708 (op, mode == VOIDmode ? mode : Pmode)")
709 (match_operand 0 "call_register_no_elim_operand")
710 (ior (and (not (match_test "TARGET_X32"))
711 (match_operand 0 "memory_operand"))
712 (and (match_test "TARGET_X32 && Pmode == DImode")
713 (match_operand 0 "GOT_memory_operand")))))
714
715 ;; Similarly, but for tail calls, in which we cannot allow memory references.
716 (define_special_predicate "sibcall_insn_operand"
717 (ior (match_test "constant_call_address_operand
718 (op, mode == VOIDmode ? mode : Pmode)")
719 (match_operand 0 "register_no_elim_operand")
720 (ior (and (not (match_test "TARGET_X32"))
721 (match_operand 0 "sibcall_memory_operand"))
722 (and (match_test "TARGET_X32 && Pmode == DImode")
723 (match_operand 0 "GOT_memory_operand")))))
724
725 ;; Return true if OP is a 32-bit GOT symbol operand.
726 (define_predicate "GOT32_symbol_operand"
727 (match_test "GET_CODE (op) == CONST
728 && GET_CODE (XEXP (op, 0)) == UNSPEC
729 && XINT (XEXP (op, 0), 1) == UNSPEC_GOT"))
730
731 ;; Match exactly zero.
732 (define_predicate "const0_operand"
733 (match_code "const_int,const_double,const_vector")
734 {
735 if (mode == VOIDmode)
736 mode = GET_MODE (op);
737 return op == CONST0_RTX (mode);
738 })
739
740 ;; Match one or a vector with all elements equal to one.
741 (define_predicate "const1_operand"
742 (match_code "const_int,const_double,const_vector")
743 {
744 if (mode == VOIDmode)
745 mode = GET_MODE (op);
746 return op == CONST1_RTX (mode);
747 })
748
749 ;; Match exactly -1.
750 (define_predicate "constm1_operand"
751 (and (match_code "const_int")
752 (match_test "op == constm1_rtx")))
753
754 ;; Match exactly eight.
755 (define_predicate "const8_operand"
756 (and (match_code "const_int")
757 (match_test "INTVAL (op) == 8")))
758
759 ;; Match exactly 128.
760 (define_predicate "const128_operand"
761 (and (match_code "const_int")
762 (match_test "INTVAL (op) == 128")))
763
764 ;; Match exactly 0x0FFFFFFFF in anddi as a zero-extension operation
765 (define_predicate "const_32bit_mask"
766 (and (match_code "const_int")
767 (match_test "trunc_int_for_mode (INTVAL (op), DImode)
768 == (HOST_WIDE_INT) 0xffffffff")))
769
770 ;; Match 2, 4, or 8. Used for leal multiplicands.
771 (define_predicate "const248_operand"
772 (match_code "const_int")
773 {
774 HOST_WIDE_INT i = INTVAL (op);
775 return i == 2 || i == 4 || i == 8;
776 })
777
778 ;; Match 1, 2, or 3. Used for lea shift amounts.
779 (define_predicate "const123_operand"
780 (match_code "const_int")
781 {
782 HOST_WIDE_INT i = INTVAL (op);
783 return i == 1 || i == 2 || i == 3;
784 })
785
786 ;; Match 2, 3, 6, or 7
787 (define_predicate "const2367_operand"
788 (match_code "const_int")
789 {
790 HOST_WIDE_INT i = INTVAL (op);
791 return i == 2 || i == 3 || i == 6 || i == 7;
792 })
793
794 ;; Match 1, 2, 4, or 8
795 (define_predicate "const1248_operand"
796 (match_code "const_int")
797 {
798 HOST_WIDE_INT i = INTVAL (op);
799 return i == 1 || i == 2 || i == 4 || i == 8;
800 })
801
802 ;; Match 3, 5, or 9. Used for leal multiplicands.
803 (define_predicate "const359_operand"
804 (match_code "const_int")
805 {
806 HOST_WIDE_INT i = INTVAL (op);
807 return i == 3 || i == 5 || i == 9;
808 })
809
810 ;; Match 4 or 8 to 11. Used for embeded rounding.
811 (define_predicate "const_4_or_8_to_11_operand"
812 (match_code "const_int")
813 {
814 HOST_WIDE_INT i = INTVAL (op);
815 return i == 4 || (i >= 8 && i <= 11);
816 })
817
818 ;; Match 4 or 8. Used for SAE.
819 (define_predicate "const48_operand"
820 (match_code "const_int")
821 {
822 HOST_WIDE_INT i = INTVAL (op);
823 return i == 4 || i == 8;
824 })
825
826 ;; Match 0 or 1.
827 (define_predicate "const_0_to_1_operand"
828 (and (match_code "const_int")
829 (ior (match_test "op == const0_rtx")
830 (match_test "op == const1_rtx"))))
831
832 ;; Match 0 to 3.
833 (define_predicate "const_0_to_3_operand"
834 (and (match_code "const_int")
835 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
836
837 ;; Match 0 to 4.
838 (define_predicate "const_0_to_4_operand"
839 (and (match_code "const_int")
840 (match_test "IN_RANGE (INTVAL (op), 0, 4)")))
841
842 ;; Match 0 to 5.
843 (define_predicate "const_0_to_5_operand"
844 (and (match_code "const_int")
845 (match_test "IN_RANGE (INTVAL (op), 0, 5)")))
846
847 ;; Match 0 to 7.
848 (define_predicate "const_0_to_7_operand"
849 (and (match_code "const_int")
850 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
851
852 ;; Match 0 to 15.
853 (define_predicate "const_0_to_15_operand"
854 (and (match_code "const_int")
855 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
856
857 ;; Match 0 to 31.
858 (define_predicate "const_0_to_31_operand"
859 (and (match_code "const_int")
860 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
861
862 ;; Match 0 to 63.
863 (define_predicate "const_0_to_63_operand"
864 (and (match_code "const_int")
865 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
866
867 ;; Match 0 to 255.
868 (define_predicate "const_0_to_255_operand"
869 (and (match_code "const_int")
870 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
871
872 ;; Match (0 to 255) * 8
873 (define_predicate "const_0_to_255_mul_8_operand"
874 (match_code "const_int")
875 {
876 unsigned HOST_WIDE_INT val = INTVAL (op);
877 return val <= 255*8 && val % 8 == 0;
878 })
879
880 ;; Return true if OP is CONST_INT >= 1 and <= 31 (a valid operand
881 ;; for shift & compare patterns, as shifting by 0 does not change flags).
882 (define_predicate "const_1_to_31_operand"
883 (and (match_code "const_int")
884 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
885
886 ;; Return true if OP is CONST_INT >= 1 and <= 63 (a valid operand
887 ;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
888 (define_predicate "const_1_to_63_operand"
889 (and (match_code "const_int")
890 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
891
892 ;; Match 2 or 3.
893 (define_predicate "const_2_to_3_operand"
894 (and (match_code "const_int")
895 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
896
897 ;; Match 4 to 5.
898 (define_predicate "const_4_to_5_operand"
899 (and (match_code "const_int")
900 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
901
902 ;; Match 4 to 7.
903 (define_predicate "const_4_to_7_operand"
904 (and (match_code "const_int")
905 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
906
907 ;; Match 6 to 7.
908 (define_predicate "const_6_to_7_operand"
909 (and (match_code "const_int")
910 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
911
912 ;; Match 8 to 9.
913 (define_predicate "const_8_to_9_operand"
914 (and (match_code "const_int")
915 (match_test "IN_RANGE (INTVAL (op), 8, 9)")))
916
917 ;; Match 8 to 11.
918 (define_predicate "const_8_to_11_operand"
919 (and (match_code "const_int")
920 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
921
922 ;; Match 8 to 15.
923 (define_predicate "const_8_to_15_operand"
924 (and (match_code "const_int")
925 (match_test "IN_RANGE (INTVAL (op), 8, 15)")))
926
927 ;; Match 10 to 11.
928 (define_predicate "const_10_to_11_operand"
929 (and (match_code "const_int")
930 (match_test "IN_RANGE (INTVAL (op), 10, 11)")))
931
932 ;; Match 12 to 13.
933 (define_predicate "const_12_to_13_operand"
934 (and (match_code "const_int")
935 (match_test "IN_RANGE (INTVAL (op), 12, 13)")))
936
937 ;; Match 12 to 15.
938 (define_predicate "const_12_to_15_operand"
939 (and (match_code "const_int")
940 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
941
942 ;; Match 14 to 15.
943 (define_predicate "const_14_to_15_operand"
944 (and (match_code "const_int")
945 (match_test "IN_RANGE (INTVAL (op), 14, 15)")))
946
947 ;; Match 16 to 19.
948 (define_predicate "const_16_to_19_operand"
949 (and (match_code "const_int")
950 (match_test "IN_RANGE (INTVAL (op), 16, 19)")))
951
952 ;; Match 16 to 31.
953 (define_predicate "const_16_to_31_operand"
954 (and (match_code "const_int")
955 (match_test "IN_RANGE (INTVAL (op), 16, 31)")))
956
957 ;; Match 20 to 23.
958 (define_predicate "const_20_to_23_operand"
959 (and (match_code "const_int")
960 (match_test "IN_RANGE (INTVAL (op), 20, 23)")))
961
962 ;; Match 24 to 27.
963 (define_predicate "const_24_to_27_operand"
964 (and (match_code "const_int")
965 (match_test "IN_RANGE (INTVAL (op), 24, 27)")))
966
967 ;; Match 28 to 31.
968 (define_predicate "const_28_to_31_operand"
969 (and (match_code "const_int")
970 (match_test "IN_RANGE (INTVAL (op), 28, 31)")))
971
972 ;; True if this is a constant appropriate for an increment or decrement.
973 (define_predicate "incdec_operand"
974 (match_code "const_int")
975 {
976 /* On Pentium4, the inc and dec operations causes extra dependency on flag
977 registers, since carry flag is not set. */
978 if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
979 return false;
980 return op == const1_rtx || op == constm1_rtx;
981 })
982
983 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
984 (define_predicate "reg_or_pm1_operand"
985 (ior (match_operand 0 "register_operand")
986 (and (match_code "const_int")
987 (ior (match_test "op == const1_rtx")
988 (match_test "op == constm1_rtx")))))
989
990 ;; True if OP is acceptable as operand of DImode shift expander.
991 (define_predicate "shiftdi_operand"
992 (if_then_else (match_test "TARGET_64BIT")
993 (match_operand 0 "nonimmediate_operand")
994 (match_operand 0 "register_operand")))
995
996 (define_predicate "ashldi_input_operand"
997 (if_then_else (match_test "TARGET_64BIT")
998 (match_operand 0 "nonimmediate_operand")
999 (match_operand 0 "reg_or_pm1_operand")))
1000
1001 ;; Return true if OP is a vector load from the constant pool with just
1002 ;; the first element nonzero.
1003 (define_predicate "zero_extended_scalar_load_operand"
1004 (match_code "mem")
1005 {
1006 unsigned n_elts;
1007 op = avoid_constant_pool_reference (op);
1008
1009 if (GET_CODE (op) != CONST_VECTOR)
1010 return false;
1011
1012 n_elts = CONST_VECTOR_NUNITS (op);
1013
1014 for (n_elts--; n_elts > 0; n_elts--)
1015 {
1016 rtx elt = CONST_VECTOR_ELT (op, n_elts);
1017 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
1018 return false;
1019 }
1020 return true;
1021 })
1022
1023 /* Return true if operand is a vector constant that is all ones. */
1024 (define_predicate "vector_all_ones_operand"
1025 (and (match_code "const_vector")
1026 (match_test "INTEGRAL_MODE_P (GET_MODE (op))")
1027 (match_test "op == CONSTM1_RTX (GET_MODE (op))")))
1028
1029 ; Return true when OP is operand acceptable for vector memory operand.
1030 ; Only AVX can have misaligned memory operand.
1031 (define_predicate "vector_memory_operand"
1032 (and (match_operand 0 "memory_operand")
1033 (ior (match_test "TARGET_AVX")
1034 (match_test "MEM_ALIGN (op) >= GET_MODE_ALIGNMENT (mode)"))))
1035
1036 ; Return true when OP is register_operand or vector_memory_operand.
1037 (define_predicate "vector_operand"
1038 (ior (match_operand 0 "register_operand")
1039 (match_operand 0 "vector_memory_operand")))
1040
1041 ; Return true when OP is operand acceptable for standard SSE move.
1042 (define_predicate "vector_move_operand"
1043 (ior (match_operand 0 "nonimmediate_operand")
1044 (match_operand 0 "const0_operand")))
1045
1046 ;; Return true when OP is either nonimmediate operand, or any
1047 ;; CONST_VECTOR.
1048 (define_predicate "nonimmediate_or_const_vector_operand"
1049 (ior (match_operand 0 "nonimmediate_operand")
1050 (match_code "const_vector")))
1051
1052 ;; Return true when OP is nonimmediate or standard SSE constant.
1053 (define_predicate "nonimmediate_or_sse_const_operand"
1054 (ior (match_operand 0 "nonimmediate_operand")
1055 (match_test "standard_sse_constant_p (op, mode)")))
1056
1057 ;; Return true if OP is a register or a zero.
1058 (define_predicate "reg_or_0_operand"
1059 (ior (match_operand 0 "register_operand")
1060 (match_operand 0 "const0_operand")))
1061
1062 (define_predicate "norex_memory_operand"
1063 (and (match_operand 0 "memory_operand")
1064 (not (match_test "x86_extended_reg_mentioned_p (op)"))))
1065
1066 ;; Return true for RTX codes that force SImode address.
1067 (define_predicate "SImode_address_operand"
1068 (match_code "subreg,zero_extend,and"))
1069
1070 ;; Return true if op is a valid address for LEA, and does not contain
1071 ;; a segment override. Defined as a special predicate to allow
1072 ;; mode-less const_int operands pass to address_operand.
1073 (define_special_predicate "address_no_seg_operand"
1074 (match_test "address_operand (op, VOIDmode)")
1075 {
1076 struct ix86_address parts;
1077 int ok;
1078
1079 if (!CONST_INT_P (op)
1080 && mode != VOIDmode
1081 && GET_MODE (op) != mode)
1082 return false;
1083
1084 ok = ix86_decompose_address (op, &parts);
1085 gcc_assert (ok);
1086 return parts.seg == ADDR_SPACE_GENERIC;
1087 })
1088
1089 ;; Return true if op if a valid base register, displacement or
1090 ;; sum of base register and displacement for VSIB addressing.
1091 (define_predicate "vsib_address_operand"
1092 (match_test "address_operand (op, VOIDmode)")
1093 {
1094 struct ix86_address parts;
1095 int ok;
1096 rtx disp;
1097
1098 ok = ix86_decompose_address (op, &parts);
1099 gcc_assert (ok);
1100 if (parts.index || parts.seg != ADDR_SPACE_GENERIC)
1101 return false;
1102
1103 /* VSIB addressing doesn't support (%rip). */
1104 if (parts.disp)
1105 {
1106 disp = parts.disp;
1107 if (GET_CODE (disp) == CONST)
1108 {
1109 disp = XEXP (disp, 0);
1110 if (GET_CODE (disp) == PLUS)
1111 disp = XEXP (disp, 0);
1112 if (GET_CODE (disp) == UNSPEC)
1113 switch (XINT (disp, 1))
1114 {
1115 case UNSPEC_GOTPCREL:
1116 case UNSPEC_PCREL:
1117 case UNSPEC_GOTNTPOFF:
1118 return false;
1119 }
1120 }
1121 if (TARGET_64BIT
1122 && flag_pic
1123 && (GET_CODE (disp) == SYMBOL_REF
1124 || GET_CODE (disp) == LABEL_REF))
1125 return false;
1126 }
1127
1128 return true;
1129 })
1130
1131 ;; Return true if op is valid MPX address operand without base
1132 (define_predicate "address_mpx_no_base_operand"
1133 (match_test "address_operand (op, VOIDmode)")
1134 {
1135 struct ix86_address parts;
1136 int ok;
1137
1138 ok = ix86_decompose_address (op, &parts);
1139 gcc_assert (ok);
1140
1141 if (parts.index && parts.base)
1142 return false;
1143
1144 if (parts.seg != ADDR_SPACE_GENERIC)
1145 return false;
1146
1147 /* Do not support (%rip). */
1148 if (parts.disp && flag_pic && TARGET_64BIT
1149 && SYMBOLIC_CONST (parts.disp))
1150 {
1151 if (GET_CODE (parts.disp) != CONST
1152 || GET_CODE (XEXP (parts.disp, 0)) != PLUS
1153 || GET_CODE (XEXP (XEXP (parts.disp, 0), 0)) != UNSPEC
1154 || !CONST_INT_P (XEXP (XEXP (parts.disp, 0), 1))
1155 || (XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_DTPOFF
1156 && XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_NTPOFF))
1157 return false;
1158 }
1159
1160 return true;
1161 })
1162
1163 ;; Return true if op is valid MPX address operand without index
1164 (define_predicate "address_mpx_no_index_operand"
1165 (match_test "address_operand (op, VOIDmode)")
1166 {
1167 struct ix86_address parts;
1168 int ok;
1169
1170 ok = ix86_decompose_address (op, &parts);
1171 gcc_assert (ok);
1172
1173 if (parts.index)
1174 return false;
1175
1176 if (parts.seg != ADDR_SPACE_GENERIC)
1177 return false;
1178
1179 /* Do not support (%rip). */
1180 if (parts.disp && flag_pic && TARGET_64BIT
1181 && SYMBOLIC_CONST (parts.disp)
1182 && (GET_CODE (parts.disp) != CONST
1183 || GET_CODE (XEXP (parts.disp, 0)) != PLUS
1184 || GET_CODE (XEXP (XEXP (parts.disp, 0), 0)) != UNSPEC
1185 || !CONST_INT_P (XEXP (XEXP (parts.disp, 0), 1))
1186 || (XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_DTPOFF
1187 && XINT (XEXP (XEXP (parts.disp, 0), 0), 1) != UNSPEC_NTPOFF)))
1188 return false;
1189
1190 return true;
1191 })
1192
1193 (define_predicate "vsib_mem_operator"
1194 (match_code "mem"))
1195
1196 (define_predicate "bnd_mem_operator"
1197 (match_code "mem"))
1198
1199 ;; Return true if the rtx is known to be at least 32 bits aligned.
1200 (define_predicate "aligned_operand"
1201 (match_operand 0 "general_operand")
1202 {
1203 struct ix86_address parts;
1204 int ok;
1205
1206 /* Registers and immediate operands are always "aligned". */
1207 if (!MEM_P (op))
1208 return true;
1209
1210 /* All patterns using aligned_operand on memory operands ends up
1211 in promoting memory operand to 64bit and thus causing memory mismatch. */
1212 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
1213 return false;
1214
1215 /* Don't even try to do any aligned optimizations with volatiles. */
1216 if (MEM_VOLATILE_P (op))
1217 return false;
1218
1219 if (MEM_ALIGN (op) >= 32)
1220 return true;
1221
1222 op = XEXP (op, 0);
1223
1224 /* Pushes and pops are only valid on the stack pointer. */
1225 if (GET_CODE (op) == PRE_DEC
1226 || GET_CODE (op) == POST_INC)
1227 return true;
1228
1229 /* Decode the address. */
1230 ok = ix86_decompose_address (op, &parts);
1231 gcc_assert (ok);
1232
1233 if (parts.base && SUBREG_P (parts.base))
1234 parts.base = SUBREG_REG (parts.base);
1235 if (parts.index && SUBREG_P (parts.index))
1236 parts.index = SUBREG_REG (parts.index);
1237
1238 /* Look for some component that isn't known to be aligned. */
1239 if (parts.index)
1240 {
1241 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
1242 return false;
1243 }
1244 if (parts.base)
1245 {
1246 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
1247 return false;
1248 }
1249 if (parts.disp)
1250 {
1251 if (!CONST_INT_P (parts.disp)
1252 || (INTVAL (parts.disp) & 3))
1253 return false;
1254 }
1255
1256 /* Didn't find one -- this must be an aligned address. */
1257 return true;
1258 })
1259
1260 ;; Return true if OP is memory operand with a displacement.
1261 (define_predicate "memory_displacement_operand"
1262 (match_operand 0 "memory_operand")
1263 {
1264 struct ix86_address parts;
1265 int ok;
1266
1267 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1268 gcc_assert (ok);
1269 return parts.disp != NULL_RTX;
1270 })
1271
1272 ;; Return true if OP is memory operand with a displacement only.
1273 (define_predicate "memory_displacement_only_operand"
1274 (match_operand 0 "memory_operand")
1275 {
1276 struct ix86_address parts;
1277 int ok;
1278
1279 if (TARGET_64BIT)
1280 return false;
1281
1282 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1283 gcc_assert (ok);
1284
1285 if (parts.base || parts.index)
1286 return false;
1287
1288 return parts.disp != NULL_RTX;
1289 })
1290
1291 ;; Return true if OP is memory operand that cannot be represented
1292 ;; by the modRM array.
1293 (define_predicate "long_memory_operand"
1294 (and (match_operand 0 "memory_operand")
1295 (match_test "memory_address_length (op, false)")))
1296
1297 ;; Return true if OP is a comparison operator that can be issued by fcmov.
1298 (define_predicate "fcmov_comparison_operator"
1299 (match_operand 0 "comparison_operator")
1300 {
1301 machine_mode inmode = GET_MODE (XEXP (op, 0));
1302 enum rtx_code code = GET_CODE (op);
1303
1304 if (inmode == CCFPmode)
1305 {
1306 if (!ix86_trivial_fp_comparison_operator (op, mode))
1307 return false;
1308 code = ix86_fp_compare_code_to_integer (code);
1309 }
1310 /* i387 supports just limited amount of conditional codes. */
1311 switch (code)
1312 {
1313 case LTU: case GTU: case LEU: case GEU:
1314 if (inmode == CCmode || inmode == CCFPmode || inmode == CCCmode)
1315 return true;
1316 return false;
1317 case ORDERED: case UNORDERED:
1318 case EQ: case NE:
1319 return true;
1320 default:
1321 return false;
1322 }
1323 })
1324
1325 ;; Return true if OP is a comparison that can be used in the CMPSS/CMPPS insns.
1326 ;; The first set are supported directly; the second set can't be done with
1327 ;; full IEEE support, i.e. NaNs.
1328
1329 (define_predicate "sse_comparison_operator"
1330 (ior (match_code "eq,ne,lt,le,unordered,unge,ungt,ordered")
1331 (and (match_test "TARGET_AVX")
1332 (match_code "ge,gt,uneq,unle,unlt,ltgt"))))
1333
1334 (define_predicate "ix86_comparison_int_operator"
1335 (match_code "ne,eq,ge,gt,le,lt"))
1336
1337 (define_predicate "ix86_comparison_uns_operator"
1338 (match_code "ne,eq,geu,gtu,leu,ltu"))
1339
1340 (define_predicate "bt_comparison_operator"
1341 (match_code "ne,eq"))
1342
1343 ;; Return true if OP is a valid comparison operator in valid mode.
1344 (define_predicate "ix86_comparison_operator"
1345 (match_operand 0 "comparison_operator")
1346 {
1347 machine_mode inmode = GET_MODE (XEXP (op, 0));
1348 enum rtx_code code = GET_CODE (op);
1349
1350 if (inmode == CCFPmode)
1351 return ix86_trivial_fp_comparison_operator (op, mode);
1352
1353 switch (code)
1354 {
1355 case EQ: case NE:
1356 if (inmode == CCGZmode)
1357 return false;
1358 return true;
1359 case GE: case LT:
1360 if (inmode == CCmode || inmode == CCGCmode
1361 || inmode == CCGOCmode || inmode == CCNOmode || inmode == CCGZmode)
1362 return true;
1363 return false;
1364 case GEU: case LTU:
1365 if (inmode == CCGZmode)
1366 return true;
1367 /* FALLTHRU */
1368 case GTU: case LEU:
1369 if (inmode == CCmode || inmode == CCCmode || inmode == CCGZmode)
1370 return true;
1371 return false;
1372 case ORDERED: case UNORDERED:
1373 if (inmode == CCmode)
1374 return true;
1375 return false;
1376 case GT: case LE:
1377 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1378 return true;
1379 return false;
1380 default:
1381 return false;
1382 }
1383 })
1384
1385 ;; Return true if OP is a valid comparison operator
1386 ;; testing carry flag to be set.
1387 (define_predicate "ix86_carry_flag_operator"
1388 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1389 {
1390 machine_mode inmode = GET_MODE (XEXP (op, 0));
1391 enum rtx_code code = GET_CODE (op);
1392
1393 if (inmode == CCFPmode)
1394 {
1395 if (!ix86_trivial_fp_comparison_operator (op, mode))
1396 return false;
1397 code = ix86_fp_compare_code_to_integer (code);
1398 }
1399 else if (inmode == CCCmode)
1400 return code == LTU || code == GTU;
1401 else if (inmode != CCmode)
1402 return false;
1403
1404 return code == LTU;
1405 })
1406
1407 ;; Return true if this comparison only requires testing one flag bit.
1408 (define_predicate "ix86_trivial_fp_comparison_operator"
1409 (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
1410
1411 ;; Return true if we know how to do this comparison. Others require
1412 ;; testing more than one flag bit, and we let the generic middle-end
1413 ;; code do that.
1414 (define_predicate "ix86_fp_comparison_operator"
1415 (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1416 == IX86_FPCMP_ARITH")
1417 (match_operand 0 "comparison_operator")
1418 (match_operand 0 "ix86_trivial_fp_comparison_operator")))
1419
1420 ;; Nearly general operand, but accept any const_double, since we wish
1421 ;; to be able to drop them into memory rather than have them get pulled
1422 ;; into registers.
1423 (define_predicate "cmp_fp_expander_operand"
1424 (ior (match_code "const_double")
1425 (match_operand 0 "general_operand")))
1426
1427 ;; Return true if this is a valid binary floating-point operation.
1428 (define_predicate "binary_fp_operator"
1429 (match_code "plus,minus,mult,div"))
1430
1431 ;; Return true if this is a multiply operation.
1432 (define_predicate "mult_operator"
1433 (match_code "mult"))
1434
1435 ;; Return true if this is a division operation.
1436 (define_predicate "div_operator"
1437 (match_code "div"))
1438
1439 ;; Return true if this is a plus, minus, and, ior or xor operation.
1440 (define_predicate "plusminuslogic_operator"
1441 (match_code "plus,minus,and,ior,xor"))
1442
1443 ;; Return true for ARITHMETIC_P.
1444 (define_predicate "arith_or_logical_operator"
1445 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1446 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1447
1448 ;; Return true for COMMUTATIVE_P.
1449 (define_predicate "commutative_operator"
1450 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1451
1452 ;; Return true if OP is a binary operator that can be promoted to wider mode.
1453 (define_predicate "promotable_binary_operator"
1454 (ior (match_code "plus,minus,and,ior,xor,ashift")
1455 (and (match_code "mult")
1456 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1457
1458 (define_predicate "compare_operator"
1459 (match_code "compare"))
1460
1461 (define_predicate "absneg_operator"
1462 (match_code "abs,neg"))
1463
1464 ;; Return true if OP is a memory operand, aligned to
1465 ;; less than its natural alignment.
1466 (define_predicate "misaligned_operand"
1467 (and (match_code "mem")
1468 (match_test "MEM_ALIGN (op) < GET_MODE_BITSIZE (mode)")))
1469
1470 ;; Return true if OP is a emms operation, known to be a PARALLEL.
1471 (define_predicate "emms_operation"
1472 (match_code "parallel")
1473 {
1474 unsigned i;
1475
1476 if (XVECLEN (op, 0) != 17)
1477 return false;
1478
1479 for (i = 0; i < 8; i++)
1480 {
1481 rtx elt = XVECEXP (op, 0, i+1);
1482
1483 if (GET_CODE (elt) != CLOBBER
1484 || GET_CODE (SET_DEST (elt)) != REG
1485 || GET_MODE (SET_DEST (elt)) != XFmode
1486 || REGNO (SET_DEST (elt)) != FIRST_STACK_REG + i)
1487 return false;
1488
1489 elt = XVECEXP (op, 0, i+9);
1490
1491 if (GET_CODE (elt) != CLOBBER
1492 || GET_CODE (SET_DEST (elt)) != REG
1493 || GET_MODE (SET_DEST (elt)) != DImode
1494 || REGNO (SET_DEST (elt)) != FIRST_MMX_REG + i)
1495 return false;
1496 }
1497 return true;
1498 })
1499
1500 ;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
1501 (define_predicate "vzeroall_operation"
1502 (match_code "parallel")
1503 {
1504 unsigned i, nregs = TARGET_64BIT ? 16 : 8;
1505
1506 if ((unsigned) XVECLEN (op, 0) != 1 + nregs)
1507 return false;
1508
1509 for (i = 0; i < nregs; i++)
1510 {
1511 rtx elt = XVECEXP (op, 0, i+1);
1512
1513 if (GET_CODE (elt) != SET
1514 || GET_CODE (SET_DEST (elt)) != REG
1515 || GET_MODE (SET_DEST (elt)) != V8SImode
1516 || REGNO (SET_DEST (elt)) != SSE_REGNO (i)
1517 || SET_SRC (elt) != CONST0_RTX (V8SImode))
1518 return false;
1519 }
1520 return true;
1521 })
1522
1523 ;; return true if OP is a vzeroupper operation.
1524 (define_predicate "vzeroupper_operation"
1525 (and (match_code "unspec_volatile")
1526 (match_test "XINT (op, 1) == UNSPECV_VZEROUPPER")))
1527
1528 ;; Return true if OP is an addsub vec_merge operation
1529 (define_predicate "addsub_vm_operator"
1530 (match_code "vec_merge")
1531 {
1532 rtx op0, op1;
1533 int swapped;
1534 HOST_WIDE_INT mask;
1535 int nunits, elt;
1536
1537 op0 = XEXP (op, 0);
1538 op1 = XEXP (op, 1);
1539
1540 /* Sanity check. */
1541 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1542 swapped = 0;
1543 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1544 swapped = 1;
1545 else
1546 gcc_unreachable ();
1547
1548 mask = INTVAL (XEXP (op, 2));
1549 nunits = GET_MODE_NUNITS (mode);
1550
1551 for (elt = 0; elt < nunits; elt++)
1552 {
1553 /* bit clear: take from op0, set: take from op1 */
1554 int bit = !(mask & (HOST_WIDE_INT_1U << elt));
1555
1556 if (bit != ((elt & 1) ^ swapped))
1557 return false;
1558 }
1559
1560 return true;
1561 })
1562
1563 ;; Return true if OP is an addsub vec_select/vec_concat operation
1564 (define_predicate "addsub_vs_operator"
1565 (and (match_code "vec_select")
1566 (match_code "vec_concat" "0"))
1567 {
1568 rtx op0, op1;
1569 bool swapped;
1570 int nunits, elt;
1571
1572 op0 = XEXP (XEXP (op, 0), 0);
1573 op1 = XEXP (XEXP (op, 0), 1);
1574
1575 /* Sanity check. */
1576 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1577 swapped = false;
1578 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1579 swapped = true;
1580 else
1581 gcc_unreachable ();
1582
1583 nunits = GET_MODE_NUNITS (mode);
1584 if (XVECLEN (XEXP (op, 1), 0) != nunits)
1585 return false;
1586
1587 /* We already checked that permutation is suitable for addsub,
1588 so only look at the first element of the parallel. */
1589 elt = INTVAL (XVECEXP (XEXP (op, 1), 0, 0));
1590
1591 return elt == (swapped ? nunits : 0);
1592 })
1593
1594 ;; Return true if OP is a parallel for an addsub vec_select.
1595 (define_predicate "addsub_vs_parallel"
1596 (and (match_code "parallel")
1597 (match_code "const_int" "a"))
1598 {
1599 int nelt = XVECLEN (op, 0);
1600 int elt, i;
1601
1602 if (nelt < 2)
1603 return false;
1604
1605 /* Check that the permutation is suitable for addsub.
1606 For example, { 0 9 2 11 4 13 6 15 } or { 8 1 10 3 12 5 14 7 }. */
1607 elt = INTVAL (XVECEXP (op, 0, 0));
1608 if (elt == 0)
1609 {
1610 for (i = 1; i < nelt; ++i)
1611 if (INTVAL (XVECEXP (op, 0, i)) != (i + (i & 1) * nelt))
1612 return false;
1613 }
1614 else if (elt == nelt)
1615 {
1616 for (i = 1; i < nelt; ++i)
1617 if (INTVAL (XVECEXP (op, 0, i)) != (elt + i - (i & 1) * nelt))
1618 return false;
1619 }
1620 else
1621 return false;
1622
1623 return true;
1624 })
1625
1626 ;; Return true if OP is a parallel for a vbroadcast permute.
1627 (define_predicate "avx_vbroadcast_operand"
1628 (and (match_code "parallel")
1629 (match_code "const_int" "a"))
1630 {
1631 rtx elt = XVECEXP (op, 0, 0);
1632 int i, nelt = XVECLEN (op, 0);
1633
1634 /* Don't bother checking there are the right number of operands,
1635 merely that they're all identical. */
1636 for (i = 1; i < nelt; ++i)
1637 if (XVECEXP (op, 0, i) != elt)
1638 return false;
1639 return true;
1640 })
1641
1642 ;; Return true if OP is a parallel for a palignr permute.
1643 (define_predicate "palignr_operand"
1644 (and (match_code "parallel")
1645 (match_code "const_int" "a"))
1646 {
1647 int elt = INTVAL (XVECEXP (op, 0, 0));
1648 int i, nelt = XVECLEN (op, 0);
1649
1650 /* Check that an order in the permutation is suitable for palignr.
1651 For example, {5 6 7 0 1 2 3 4} is "palignr 5, xmm, xmm". */
1652 for (i = 1; i < nelt; ++i)
1653 if (INTVAL (XVECEXP (op, 0, i)) != ((elt + i) % nelt))
1654 return false;
1655 return true;
1656 })
1657
1658 ;; Return true if OP is a proper third operand to vpblendw256.
1659 (define_predicate "avx2_pblendw_operand"
1660 (match_code "const_int")
1661 {
1662 HOST_WIDE_INT val = INTVAL (op);
1663 HOST_WIDE_INT low = val & 0xff;
1664 return val == ((low << 8) | low);
1665 })
1666
1667 ;; Return true if OP is vector_operand or CONST_VECTOR.
1668 (define_predicate "general_vector_operand"
1669 (ior (match_operand 0 "vector_operand")
1670 (match_code "const_vector")))
1671
1672 ;; Return true if OP is either -1 constant or stored in register.
1673 (define_predicate "register_or_constm1_operand"
1674 (ior (match_operand 0 "register_operand")
1675 (and (match_code "const_int")
1676 (match_test "op == constm1_rtx"))))
1677
1678 ;; Return true if the vector ends with between 12 and 18 register saves using
1679 ;; RAX as the base address.
1680 (define_predicate "save_multiple"
1681 (match_code "parallel")
1682 {
1683 const unsigned len = XVECLEN (op, 0);
1684 unsigned i;
1685
1686 /* Starting from end of vector, count register saves. */
1687 for (i = 0; i < len; ++i)
1688 {
1689 rtx src, dest, addr;
1690 rtx e = XVECEXP (op, 0, len - 1 - i);
1691
1692 if (GET_CODE (e) != SET)
1693 break;
1694
1695 src = SET_SRC (e);
1696 dest = SET_DEST (e);
1697
1698 if (!REG_P (src) || !MEM_P (dest))
1699 break;
1700
1701 addr = XEXP (dest, 0);
1702
1703 /* Good if dest address is in RAX. */
1704 if (REG_P (addr) && REGNO (addr) == AX_REG)
1705 continue;
1706
1707 /* Good if dest address is offset of RAX. */
1708 if (GET_CODE (addr) == PLUS
1709 && REG_P (XEXP (addr, 0))
1710 && REGNO (XEXP (addr, 0)) == AX_REG)
1711 continue;
1712
1713 break;
1714 }
1715 return (i >= 12 && i <= 18);
1716 })
1717
1718
1719 ;; Return true if the vector ends with between 12 and 18 register loads using
1720 ;; RSI as the base address.
1721 (define_predicate "restore_multiple"
1722 (match_code "parallel")
1723 {
1724 const unsigned len = XVECLEN (op, 0);
1725 unsigned i;
1726
1727 /* Starting from end of vector, count register restores. */
1728 for (i = 0; i < len; ++i)
1729 {
1730 rtx src, dest, addr;
1731 rtx e = XVECEXP (op, 0, len - 1 - i);
1732
1733 if (GET_CODE (e) != SET)
1734 break;
1735
1736 src = SET_SRC (e);
1737 dest = SET_DEST (e);
1738
1739 if (!MEM_P (src) || !REG_P (dest))
1740 break;
1741
1742 addr = XEXP (src, 0);
1743
1744 /* Good if src address is in RSI. */
1745 if (REG_P (addr) && REGNO (addr) == SI_REG)
1746 continue;
1747
1748 /* Good if src address is offset of RSI. */
1749 if (GET_CODE (addr) == PLUS
1750 && REG_P (XEXP (addr, 0))
1751 && REGNO (XEXP (addr, 0)) == SI_REG)
1752 continue;
1753
1754 break;
1755 }
1756 return (i >= 12 && i <= 18);
1757 })