i386.h (enum ix86_fpcmp_strategy): New.
[gcc.git] / gcc / config / i386 / predicates.md
1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 ;; Free Software Foundation, Inc.
4 ;;
5 ;; This file is part of GCC.
6 ;;
7 ;; GCC is free software; you can redistribute it and/or modify
8 ;; it under the terms of the GNU General Public License as published by
9 ;; the Free Software Foundation; either version 3, or (at your option)
10 ;; any later version.
11 ;;
12 ;; GCC is distributed in the hope that it will be useful,
13 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
14 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 ;; GNU General Public License for more details.
16 ;;
17 ;; You should have received a copy of the GNU General Public License
18 ;; along with GCC; see the file COPYING3. If not see
19 ;; <http://www.gnu.org/licenses/>.
20
21 ;; Return nonzero if OP is either a i387 or SSE fp register.
22 (define_predicate "any_fp_register_operand"
23 (and (match_code "reg")
24 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
25
26 ;; Return nonzero if OP is an i387 fp register.
27 (define_predicate "fp_register_operand"
28 (and (match_code "reg")
29 (match_test "FP_REGNO_P (REGNO (op))")))
30
31 ;; Return nonzero if OP is a non-fp register_operand.
32 (define_predicate "register_and_not_any_fp_reg_operand"
33 (and (match_code "reg")
34 (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
35
36 ;; Return nonzero if OP is a register operand other than an i387 fp register.
37 (define_predicate "register_and_not_fp_reg_operand"
38 (and (match_code "reg")
39 (not (match_test "FP_REGNO_P (REGNO (op))"))))
40
41 ;; True if the operand is an MMX register.
42 (define_predicate "mmx_reg_operand"
43 (and (match_code "reg")
44 (match_test "MMX_REGNO_P (REGNO (op))")))
45
46 ;; True if the operand is a Q_REGS class register.
47 (define_predicate "q_regs_operand"
48 (match_operand 0 "register_operand")
49 {
50 if (GET_CODE (op) == SUBREG)
51 op = SUBREG_REG (op);
52 return ANY_QI_REG_P (op);
53 })
54
55 ;; Match an SI or HImode register for a zero_extract.
56 (define_special_predicate "ext_register_operand"
57 (match_operand 0 "register_operand")
58 {
59 if ((!TARGET_64BIT || GET_MODE (op) != DImode)
60 && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
61 return 0;
62 if (GET_CODE (op) == SUBREG)
63 op = SUBREG_REG (op);
64
65 /* Be careful to accept only registers having upper parts. */
66 return REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) < 4;
67 })
68
69 ;; Return true if op is the AX register.
70 (define_predicate "ax_reg_operand"
71 (and (match_code "reg")
72 (match_test "REGNO (op) == 0")))
73
74 ;; Return true if op is the flags register.
75 (define_predicate "flags_reg_operand"
76 (and (match_code "reg")
77 (match_test "REGNO (op) == FLAGS_REG")))
78
79 ;; Return true if op is a QImode register operand other than
80 ;; %[abcd][hl].
81 (define_predicate "ext_QIreg_operand"
82 (and (match_code "reg")
83 (match_test "TARGET_64BIT
84 && GET_MODE (op) == QImode
85 && REGNO (op) > BX_REG")))
86
87 ;; Similarly, but don't check mode of the operand.
88 (define_predicate "ext_QIreg_nomode_operand"
89 (and (match_code "reg")
90 (match_test "TARGET_64BIT
91 && REGNO (op) > BX_REG")))
92
93 ;; Return true if op is not xmm0 register.
94 (define_predicate "reg_not_xmm0_operand"
95 (and (match_operand 0 "register_operand")
96 (match_test "!REG_P (op)
97 || REGNO (op) != FIRST_SSE_REG")))
98
99 ;; As above, but allow nonimmediate operands.
100 (define_predicate "nonimm_not_xmm0_operand"
101 (and (match_operand 0 "nonimmediate_operand")
102 (match_test "!REG_P (op)
103 || REGNO (op) != FIRST_SSE_REG")))
104
105 ;; Return 1 if VALUE can be stored in a sign extended immediate field.
106 (define_predicate "x86_64_immediate_operand"
107 (match_code "const_int,symbol_ref,label_ref,const")
108 {
109 if (!TARGET_64BIT)
110 return immediate_operand (op, mode);
111
112 switch (GET_CODE (op))
113 {
114 case CONST_INT:
115 /* CONST_DOUBLES never match, since HOST_BITS_PER_WIDE_INT is known
116 to be at least 32 and this all acceptable constants are
117 represented as CONST_INT. */
118 if (HOST_BITS_PER_WIDE_INT == 32)
119 return 1;
120 else
121 {
122 HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
123 return trunc_int_for_mode (val, SImode) == val;
124 }
125 break;
126
127 case SYMBOL_REF:
128 /* For certain code models, the symbolic references are known to fit.
129 in CM_SMALL_PIC model we know it fits if it is local to the shared
130 library. Don't count TLS SYMBOL_REFs here, since they should fit
131 only if inside of UNSPEC handled below. */
132 /* TLS symbols are not constant. */
133 if (SYMBOL_REF_TLS_MODEL (op))
134 return false;
135 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
136 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
137
138 case LABEL_REF:
139 /* For certain code models, the code is near as well. */
140 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
141 || ix86_cmodel == CM_KERNEL);
142
143 case CONST:
144 /* We also may accept the offsetted memory references in certain
145 special cases. */
146 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
147 switch (XINT (XEXP (op, 0), 1))
148 {
149 case UNSPEC_GOTPCREL:
150 case UNSPEC_DTPOFF:
151 case UNSPEC_GOTNTPOFF:
152 case UNSPEC_NTPOFF:
153 return 1;
154 default:
155 break;
156 }
157
158 if (GET_CODE (XEXP (op, 0)) == PLUS)
159 {
160 rtx op1 = XEXP (XEXP (op, 0), 0);
161 rtx op2 = XEXP (XEXP (op, 0), 1);
162 HOST_WIDE_INT offset;
163
164 if (ix86_cmodel == CM_LARGE)
165 return 0;
166 if (!CONST_INT_P (op2))
167 return 0;
168 offset = trunc_int_for_mode (INTVAL (op2), DImode);
169 switch (GET_CODE (op1))
170 {
171 case SYMBOL_REF:
172 /* TLS symbols are not constant. */
173 if (SYMBOL_REF_TLS_MODEL (op1))
174 return 0;
175 /* For CM_SMALL assume that latest object is 16MB before
176 end of 31bits boundary. We may also accept pretty
177 large negative constants knowing that all objects are
178 in the positive half of address space. */
179 if ((ix86_cmodel == CM_SMALL
180 || (ix86_cmodel == CM_MEDIUM
181 && !SYMBOL_REF_FAR_ADDR_P (op1)))
182 && offset < 16*1024*1024
183 && trunc_int_for_mode (offset, SImode) == offset)
184 return 1;
185 /* For CM_KERNEL we know that all object resist in the
186 negative half of 32bits address space. We may not
187 accept negative offsets, since they may be just off
188 and we may accept pretty large positive ones. */
189 if (ix86_cmodel == CM_KERNEL
190 && offset > 0
191 && trunc_int_for_mode (offset, SImode) == offset)
192 return 1;
193 break;
194
195 case LABEL_REF:
196 /* These conditions are similar to SYMBOL_REF ones, just the
197 constraints for code models differ. */
198 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
199 && offset < 16*1024*1024
200 && trunc_int_for_mode (offset, SImode) == offset)
201 return 1;
202 if (ix86_cmodel == CM_KERNEL
203 && offset > 0
204 && trunc_int_for_mode (offset, SImode) == offset)
205 return 1;
206 break;
207
208 case UNSPEC:
209 switch (XINT (op1, 1))
210 {
211 case UNSPEC_DTPOFF:
212 case UNSPEC_NTPOFF:
213 if (offset > 0
214 && trunc_int_for_mode (offset, SImode) == offset)
215 return 1;
216 }
217 break;
218
219 default:
220 break;
221 }
222 }
223 break;
224
225 default:
226 gcc_unreachable ();
227 }
228
229 return 0;
230 })
231
232 ;; Return 1 if VALUE can be stored in the zero extended immediate field.
233 (define_predicate "x86_64_zext_immediate_operand"
234 (match_code "const_double,const_int,symbol_ref,label_ref,const")
235 {
236 switch (GET_CODE (op))
237 {
238 case CONST_DOUBLE:
239 if (HOST_BITS_PER_WIDE_INT == 32)
240 return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
241 else
242 return 0;
243
244 case CONST_INT:
245 if (HOST_BITS_PER_WIDE_INT == 32)
246 return INTVAL (op) >= 0;
247 else
248 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
249
250 case SYMBOL_REF:
251 /* For certain code models, the symbolic references are known to fit. */
252 /* TLS symbols are not constant. */
253 if (SYMBOL_REF_TLS_MODEL (op))
254 return false;
255 return (ix86_cmodel == CM_SMALL
256 || (ix86_cmodel == CM_MEDIUM
257 && !SYMBOL_REF_FAR_ADDR_P (op)));
258
259 case LABEL_REF:
260 /* For certain code models, the code is near as well. */
261 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
262
263 case CONST:
264 /* We also may accept the offsetted memory references in certain
265 special cases. */
266 if (GET_CODE (XEXP (op, 0)) == PLUS)
267 {
268 rtx op1 = XEXP (XEXP (op, 0), 0);
269 rtx op2 = XEXP (XEXP (op, 0), 1);
270
271 if (ix86_cmodel == CM_LARGE)
272 return 0;
273 switch (GET_CODE (op1))
274 {
275 case SYMBOL_REF:
276 /* TLS symbols are not constant. */
277 if (SYMBOL_REF_TLS_MODEL (op1))
278 return 0;
279 /* For small code model we may accept pretty large positive
280 offsets, since one bit is available for free. Negative
281 offsets are limited by the size of NULL pointer area
282 specified by the ABI. */
283 if ((ix86_cmodel == CM_SMALL
284 || (ix86_cmodel == CM_MEDIUM
285 && !SYMBOL_REF_FAR_ADDR_P (op1)))
286 && CONST_INT_P (op2)
287 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
288 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
289 return 1;
290 /* ??? For the kernel, we may accept adjustment of
291 -0x10000000, since we know that it will just convert
292 negative address space to positive, but perhaps this
293 is not worthwhile. */
294 break;
295
296 case LABEL_REF:
297 /* These conditions are similar to SYMBOL_REF ones, just the
298 constraints for code models differ. */
299 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
300 && CONST_INT_P (op2)
301 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
302 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
303 return 1;
304 break;
305
306 default:
307 return 0;
308 }
309 }
310 break;
311
312 default:
313 gcc_unreachable ();
314 }
315 return 0;
316 })
317
318 ;; Return nonzero if OP is general operand representable on x86_64.
319 (define_predicate "x86_64_general_operand"
320 (if_then_else (match_test "TARGET_64BIT")
321 (ior (match_operand 0 "nonimmediate_operand")
322 (match_operand 0 "x86_64_immediate_operand"))
323 (match_operand 0 "general_operand")))
324
325 ;; Return nonzero if OP is general operand representable on x86_64
326 ;; as either sign extended or zero extended constant.
327 (define_predicate "x86_64_szext_general_operand"
328 (if_then_else (match_test "TARGET_64BIT")
329 (ior (match_operand 0 "nonimmediate_operand")
330 (ior (match_operand 0 "x86_64_immediate_operand")
331 (match_operand 0 "x86_64_zext_immediate_operand")))
332 (match_operand 0 "general_operand")))
333
334 ;; Return nonzero if OP is nonmemory operand representable on x86_64.
335 (define_predicate "x86_64_nonmemory_operand"
336 (if_then_else (match_test "TARGET_64BIT")
337 (ior (match_operand 0 "register_operand")
338 (match_operand 0 "x86_64_immediate_operand"))
339 (match_operand 0 "nonmemory_operand")))
340
341 ;; Return nonzero if OP is nonmemory operand representable on x86_64.
342 (define_predicate "x86_64_szext_nonmemory_operand"
343 (if_then_else (match_test "TARGET_64BIT")
344 (ior (match_operand 0 "register_operand")
345 (ior (match_operand 0 "x86_64_immediate_operand")
346 (match_operand 0 "x86_64_zext_immediate_operand")))
347 (match_operand 0 "nonmemory_operand")))
348
349 ;; Return true when operand is PIC expression that can be computed by lea
350 ;; operation.
351 (define_predicate "pic_32bit_operand"
352 (match_code "const,symbol_ref,label_ref")
353 {
354 if (!flag_pic)
355 return 0;
356 /* Rule out relocations that translate into 64bit constants. */
357 if (TARGET_64BIT && GET_CODE (op) == CONST)
358 {
359 op = XEXP (op, 0);
360 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
361 op = XEXP (op, 0);
362 if (GET_CODE (op) == UNSPEC
363 && (XINT (op, 1) == UNSPEC_GOTOFF
364 || XINT (op, 1) == UNSPEC_GOT))
365 return 0;
366 }
367 return symbolic_operand (op, mode);
368 })
369
370
371 ;; Return nonzero if OP is nonmemory operand acceptable by movabs patterns.
372 (define_predicate "x86_64_movabs_operand"
373 (if_then_else (match_test "!TARGET_64BIT || !flag_pic")
374 (match_operand 0 "nonmemory_operand")
375 (ior (match_operand 0 "register_operand")
376 (and (match_operand 0 "const_double_operand")
377 (match_test "GET_MODE_SIZE (mode) <= 8")))))
378
379 ;; Returns nonzero if OP is either a symbol reference or a sum of a symbol
380 ;; reference and a constant.
381 (define_predicate "symbolic_operand"
382 (match_code "symbol_ref,label_ref,const")
383 {
384 switch (GET_CODE (op))
385 {
386 case SYMBOL_REF:
387 case LABEL_REF:
388 return 1;
389
390 case CONST:
391 op = XEXP (op, 0);
392 if (GET_CODE (op) == SYMBOL_REF
393 || GET_CODE (op) == LABEL_REF
394 || (GET_CODE (op) == UNSPEC
395 && (XINT (op, 1) == UNSPEC_GOT
396 || XINT (op, 1) == UNSPEC_GOTOFF
397 || XINT (op, 1) == UNSPEC_GOTPCREL)))
398 return 1;
399 if (GET_CODE (op) != PLUS
400 || !CONST_INT_P (XEXP (op, 1)))
401 return 0;
402
403 op = XEXP (op, 0);
404 if (GET_CODE (op) == SYMBOL_REF
405 || GET_CODE (op) == LABEL_REF)
406 return 1;
407 /* Only @GOTOFF gets offsets. */
408 if (GET_CODE (op) != UNSPEC
409 || XINT (op, 1) != UNSPEC_GOTOFF)
410 return 0;
411
412 op = XVECEXP (op, 0, 0);
413 if (GET_CODE (op) == SYMBOL_REF
414 || GET_CODE (op) == LABEL_REF)
415 return 1;
416 return 0;
417
418 default:
419 gcc_unreachable ();
420 }
421 })
422
423 ;; Return true if the operand contains a @GOT or @GOTOFF reference.
424 (define_predicate "pic_symbolic_operand"
425 (match_code "const")
426 {
427 op = XEXP (op, 0);
428 if (TARGET_64BIT)
429 {
430 if (GET_CODE (op) == UNSPEC
431 && XINT (op, 1) == UNSPEC_GOTPCREL)
432 return 1;
433 if (GET_CODE (op) == PLUS
434 && GET_CODE (XEXP (op, 0)) == UNSPEC
435 && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL)
436 return 1;
437 }
438 else
439 {
440 if (GET_CODE (op) == UNSPEC)
441 return 1;
442 if (GET_CODE (op) != PLUS
443 || !CONST_INT_P (XEXP (op, 1)))
444 return 0;
445 op = XEXP (op, 0);
446 if (GET_CODE (op) == UNSPEC
447 && XINT (op, 1) != UNSPEC_MACHOPIC_OFFSET)
448 return 1;
449 }
450 return 0;
451 })
452
453 ;; Return true if OP is a symbolic operand that resolves locally.
454 (define_predicate "local_symbolic_operand"
455 (match_code "const,label_ref,symbol_ref")
456 {
457 if (GET_CODE (op) == CONST
458 && GET_CODE (XEXP (op, 0)) == PLUS
459 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
460 op = XEXP (XEXP (op, 0), 0);
461
462 if (GET_CODE (op) == LABEL_REF)
463 return 1;
464
465 if (GET_CODE (op) != SYMBOL_REF)
466 return 0;
467
468 if (SYMBOL_REF_TLS_MODEL (op) != 0)
469 return 0;
470
471 if (SYMBOL_REF_LOCAL_P (op))
472 return 1;
473
474 /* There is, however, a not insubstantial body of code in the rest of
475 the compiler that assumes it can just stick the results of
476 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
477 /* ??? This is a hack. Should update the body of the compiler to
478 always create a DECL an invoke targetm.encode_section_info. */
479 if (strncmp (XSTR (op, 0), internal_label_prefix,
480 internal_label_prefix_len) == 0)
481 return 1;
482
483 return 0;
484 })
485
486 ;; Test for a legitimate @GOTOFF operand.
487 ;;
488 ;; VxWorks does not impose a fixed gap between segments; the run-time
489 ;; gap can be different from the object-file gap. We therefore can't
490 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
491 ;; same segment as the GOT. Unfortunately, the flexibility of linker
492 ;; scripts means that we can't be sure of that in general, so assume
493 ;; that @GOTOFF is never valid on VxWorks.
494 (define_predicate "gotoff_operand"
495 (and (match_test "!TARGET_VXWORKS_RTP")
496 (match_operand 0 "local_symbolic_operand")))
497
498 ;; Test for various thread-local symbols.
499 (define_predicate "tls_symbolic_operand"
500 (and (match_code "symbol_ref")
501 (match_test "SYMBOL_REF_TLS_MODEL (op) != 0")))
502
503 (define_predicate "tls_modbase_operand"
504 (and (match_code "symbol_ref")
505 (match_test "op == ix86_tls_module_base ()")))
506
507 (define_predicate "tp_or_register_operand"
508 (ior (match_operand 0 "register_operand")
509 (and (match_code "unspec")
510 (match_test "XINT (op, 1) == UNSPEC_TP"))))
511
512 ;; Test for a pc-relative call operand
513 (define_predicate "constant_call_address_operand"
514 (match_code "symbol_ref")
515 {
516 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC)
517 return false;
518 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
519 return false;
520 return true;
521 })
522
523 ;; True for any non-virtual or eliminable register. Used in places where
524 ;; instantiation of such a register may cause the pattern to not be recognized.
525 (define_predicate "register_no_elim_operand"
526 (match_operand 0 "register_operand")
527 {
528 if (GET_CODE (op) == SUBREG)
529 op = SUBREG_REG (op);
530 return !(op == arg_pointer_rtx
531 || op == frame_pointer_rtx
532 || IN_RANGE (REGNO (op),
533 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
534 })
535
536 ;; Similarly, but include the stack pointer. This is used to prevent esp
537 ;; from being used as an index reg.
538 (define_predicate "index_register_operand"
539 (match_operand 0 "register_operand")
540 {
541 if (GET_CODE (op) == SUBREG)
542 op = SUBREG_REG (op);
543 if (reload_in_progress || reload_completed)
544 return REG_OK_FOR_INDEX_STRICT_P (op);
545 else
546 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
547 })
548
549 ;; Return false if this is any eliminable register. Otherwise general_operand.
550 (define_predicate "general_no_elim_operand"
551 (if_then_else (match_code "reg,subreg")
552 (match_operand 0 "register_no_elim_operand")
553 (match_operand 0 "general_operand")))
554
555 ;; Return false if this is any eliminable register. Otherwise
556 ;; register_operand or a constant.
557 (define_predicate "nonmemory_no_elim_operand"
558 (ior (match_operand 0 "register_no_elim_operand")
559 (match_operand 0 "immediate_operand")))
560
561 ;; Test for a valid operand for a call instruction.
562 (define_predicate "call_insn_operand"
563 (ior (match_operand 0 "constant_call_address_operand")
564 (ior (match_operand 0 "register_no_elim_operand")
565 (match_operand 0 "memory_operand"))))
566
567 ;; Similarly, but for tail calls, in which we cannot allow memory references.
568 (define_predicate "sibcall_insn_operand"
569 (ior (match_operand 0 "constant_call_address_operand")
570 (match_operand 0 "register_no_elim_operand")))
571
572 ;; Match exactly zero.
573 (define_predicate "const0_operand"
574 (match_code "const_int,const_double,const_vector")
575 {
576 if (mode == VOIDmode)
577 mode = GET_MODE (op);
578 return op == CONST0_RTX (mode);
579 })
580
581 ;; Match exactly one.
582 (define_predicate "const1_operand"
583 (and (match_code "const_int")
584 (match_test "op == const1_rtx")))
585
586 ;; Match exactly eight.
587 (define_predicate "const8_operand"
588 (and (match_code "const_int")
589 (match_test "INTVAL (op) == 8")))
590
591 ;; Match exactly 128.
592 (define_predicate "const128_operand"
593 (and (match_code "const_int")
594 (match_test "INTVAL (op) == 128")))
595
596 ;; Match 2, 4, or 8. Used for leal multiplicands.
597 (define_predicate "const248_operand"
598 (match_code "const_int")
599 {
600 HOST_WIDE_INT i = INTVAL (op);
601 return i == 2 || i == 4 || i == 8;
602 })
603
604 ;; Match 0 or 1.
605 (define_predicate "const_0_to_1_operand"
606 (and (match_code "const_int")
607 (match_test "op == const0_rtx || op == const1_rtx")))
608
609 ;; Match 0 to 3.
610 (define_predicate "const_0_to_3_operand"
611 (and (match_code "const_int")
612 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
613
614 ;; Match 0 to 7.
615 (define_predicate "const_0_to_7_operand"
616 (and (match_code "const_int")
617 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
618
619 ;; Match 0 to 15.
620 (define_predicate "const_0_to_15_operand"
621 (and (match_code "const_int")
622 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
623
624 ;; Match 0 to 31.
625 (define_predicate "const_0_to_31_operand"
626 (and (match_code "const_int")
627 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
628
629 ;; Match 0 to 63.
630 (define_predicate "const_0_to_63_operand"
631 (and (match_code "const_int")
632 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
633
634 ;; Match 0 to 255.
635 (define_predicate "const_0_to_255_operand"
636 (and (match_code "const_int")
637 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
638
639 ;; Match (0 to 255) * 8
640 (define_predicate "const_0_to_255_mul_8_operand"
641 (match_code "const_int")
642 {
643 unsigned HOST_WIDE_INT val = INTVAL (op);
644 return val <= 255*8 && val % 8 == 0;
645 })
646
647 ;; Return nonzero if OP is CONST_INT >= 1 and <= 31 (a valid operand
648 ;; for shift & compare patterns, as shifting by 0 does not change flags).
649 (define_predicate "const_1_to_31_operand"
650 (and (match_code "const_int")
651 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
652
653 ;; Return nonzero if OP is CONST_INT >= 1 and <= 63 (a valid operand
654 ;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
655 (define_predicate "const_1_to_63_operand"
656 (and (match_code "const_int")
657 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
658
659 ;; Match 2 or 3.
660 (define_predicate "const_2_to_3_operand"
661 (and (match_code "const_int")
662 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
663
664 ;; Match 4 to 5.
665 (define_predicate "const_4_to_5_operand"
666 (and (match_code "const_int")
667 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
668
669 ;; Match 4 to 7.
670 (define_predicate "const_4_to_7_operand"
671 (and (match_code "const_int")
672 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
673
674 ;; Match 6 to 7.
675 (define_predicate "const_6_to_7_operand"
676 (and (match_code "const_int")
677 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
678
679 ;; Match 8 to 11.
680 (define_predicate "const_8_to_11_operand"
681 (and (match_code "const_int")
682 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
683
684 ;; Match 12 to 15.
685 (define_predicate "const_12_to_15_operand"
686 (and (match_code "const_int")
687 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
688
689 ;; Match exactly one bit in 2-bit mask.
690 (define_predicate "const_pow2_1_to_2_operand"
691 (and (match_code "const_int")
692 (match_test "INTVAL (op) == 1 || INTVAL (op) == 2")))
693
694 ;; Match exactly one bit in 4-bit mask.
695 (define_predicate "const_pow2_1_to_8_operand"
696 (match_code "const_int")
697 {
698 unsigned int log = exact_log2 (INTVAL (op));
699 return log <= 3;
700 })
701
702 ;; Match exactly one bit in 8-bit mask.
703 (define_predicate "const_pow2_1_to_128_operand"
704 (match_code "const_int")
705 {
706 unsigned int log = exact_log2 (INTVAL (op));
707 return log <= 7;
708 })
709
710 ;; Match exactly one bit in 16-bit mask.
711 (define_predicate "const_pow2_1_to_32768_operand"
712 (match_code "const_int")
713 {
714 unsigned int log = exact_log2 (INTVAL (op));
715 return log <= 15;
716 })
717
718 ;; True if this is a constant appropriate for an increment or decrement.
719 (define_predicate "incdec_operand"
720 (match_code "const_int")
721 {
722 /* On Pentium4, the inc and dec operations causes extra dependency on flag
723 registers, since carry flag is not set. */
724 if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
725 return 0;
726 return op == const1_rtx || op == constm1_rtx;
727 })
728
729 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
730 (define_predicate "reg_or_pm1_operand"
731 (ior (match_operand 0 "register_operand")
732 (and (match_code "const_int")
733 (match_test "op == const1_rtx || op == constm1_rtx"))))
734
735 ;; True if OP is acceptable as operand of DImode shift expander.
736 (define_predicate "shiftdi_operand"
737 (if_then_else (match_test "TARGET_64BIT")
738 (match_operand 0 "nonimmediate_operand")
739 (match_operand 0 "register_operand")))
740
741 (define_predicate "ashldi_input_operand"
742 (if_then_else (match_test "TARGET_64BIT")
743 (match_operand 0 "nonimmediate_operand")
744 (match_operand 0 "reg_or_pm1_operand")))
745
746 ;; Return true if OP is a vector load from the constant pool with just
747 ;; the first element nonzero.
748 (define_predicate "zero_extended_scalar_load_operand"
749 (match_code "mem")
750 {
751 unsigned n_elts;
752 op = maybe_get_pool_constant (op);
753 if (!op)
754 return 0;
755 if (GET_CODE (op) != CONST_VECTOR)
756 return 0;
757 n_elts =
758 (GET_MODE_SIZE (GET_MODE (op)) /
759 GET_MODE_SIZE (GET_MODE_INNER (GET_MODE (op))));
760 for (n_elts--; n_elts > 0; n_elts--)
761 {
762 rtx elt = CONST_VECTOR_ELT (op, n_elts);
763 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
764 return 0;
765 }
766 return 1;
767 })
768
769 /* Return true if operand is a vector constant that is all ones. */
770 (define_predicate "vector_all_ones_operand"
771 (match_code "const_vector")
772 {
773 int nunits = GET_MODE_NUNITS (mode);
774
775 if (GET_CODE (op) == CONST_VECTOR
776 && CONST_VECTOR_NUNITS (op) == nunits)
777 {
778 int i;
779 for (i = 0; i < nunits; ++i)
780 {
781 rtx x = CONST_VECTOR_ELT (op, i);
782 if (x != constm1_rtx)
783 return 0;
784 }
785 return 1;
786 }
787
788 return 0;
789 })
790
791 ; Return 1 when OP is operand acceptable for standard SSE move.
792 (define_predicate "vector_move_operand"
793 (ior (match_operand 0 "nonimmediate_operand")
794 (match_operand 0 "const0_operand")))
795
796 ;; Return 1 when OP is nonimmediate or standard SSE constant.
797 (define_predicate "nonimmediate_or_sse_const_operand"
798 (match_operand 0 "general_operand")
799 {
800 if (nonimmediate_operand (op, mode))
801 return 1;
802 if (standard_sse_constant_p (op) > 0)
803 return 1;
804 return 0;
805 })
806
807 ;; Return true if OP is a register or a zero.
808 (define_predicate "reg_or_0_operand"
809 (ior (match_operand 0 "register_operand")
810 (match_operand 0 "const0_operand")))
811
812 ;; Return true if op if a valid address, and does not contain
813 ;; a segment override.
814 (define_special_predicate "no_seg_address_operand"
815 (match_operand 0 "address_operand")
816 {
817 struct ix86_address parts;
818 int ok;
819
820 ok = ix86_decompose_address (op, &parts);
821 gcc_assert (ok);
822 return parts.seg == SEG_DEFAULT;
823 })
824
825 ;; Return nonzero if the rtx is known to be at least 32 bits aligned.
826 (define_predicate "aligned_operand"
827 (match_operand 0 "general_operand")
828 {
829 struct ix86_address parts;
830 int ok;
831
832 /* Registers and immediate operands are always "aligned". */
833 if (!MEM_P (op))
834 return 1;
835
836 /* All patterns using aligned_operand on memory operands ends up
837 in promoting memory operand to 64bit and thus causing memory mismatch. */
838 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
839 return 0;
840
841 /* Don't even try to do any aligned optimizations with volatiles. */
842 if (MEM_VOLATILE_P (op))
843 return 0;
844
845 if (MEM_ALIGN (op) >= 32)
846 return 1;
847
848 op = XEXP (op, 0);
849
850 /* Pushes and pops are only valid on the stack pointer. */
851 if (GET_CODE (op) == PRE_DEC
852 || GET_CODE (op) == POST_INC)
853 return 1;
854
855 /* Decode the address. */
856 ok = ix86_decompose_address (op, &parts);
857 gcc_assert (ok);
858
859 /* Look for some component that isn't known to be aligned. */
860 if (parts.index)
861 {
862 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
863 return 0;
864 }
865 if (parts.base)
866 {
867 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
868 return 0;
869 }
870 if (parts.disp)
871 {
872 if (!CONST_INT_P (parts.disp)
873 || (INTVAL (parts.disp) & 3) != 0)
874 return 0;
875 }
876
877 /* Didn't find one -- this must be an aligned address. */
878 return 1;
879 })
880
881 ;; Returns 1 if OP is memory operand with a displacement.
882 (define_predicate "memory_displacement_operand"
883 (match_operand 0 "memory_operand")
884 {
885 struct ix86_address parts;
886 int ok;
887
888 ok = ix86_decompose_address (XEXP (op, 0), &parts);
889 gcc_assert (ok);
890 return parts.disp != NULL_RTX;
891 })
892
893 ;; Returns 1 if OP is memory operand with a displacement only.
894 (define_predicate "memory_displacement_only_operand"
895 (match_operand 0 "memory_operand")
896 {
897 struct ix86_address parts;
898 int ok;
899
900 if (TARGET_64BIT)
901 return 0;
902
903 ok = ix86_decompose_address (XEXP (op, 0), &parts);
904 gcc_assert (ok);
905
906 if (parts.base || parts.index)
907 return 0;
908
909 return parts.disp != NULL_RTX;
910 })
911
912 ;; Returns 1 if OP is memory operand which will need zero or
913 ;; one register at most, not counting stack pointer or frame pointer.
914 (define_predicate "cmpxchg8b_pic_memory_operand"
915 (match_operand 0 "memory_operand")
916 {
917 struct ix86_address parts;
918 int ok;
919
920 ok = ix86_decompose_address (XEXP (op, 0), &parts);
921 gcc_assert (ok);
922 if (parts.base == NULL_RTX
923 || parts.base == arg_pointer_rtx
924 || parts.base == frame_pointer_rtx
925 || parts.base == hard_frame_pointer_rtx
926 || parts.base == stack_pointer_rtx)
927 return 1;
928
929 if (parts.index == NULL_RTX
930 || parts.index == arg_pointer_rtx
931 || parts.index == frame_pointer_rtx
932 || parts.index == hard_frame_pointer_rtx
933 || parts.index == stack_pointer_rtx)
934 return 1;
935
936 return 0;
937 })
938
939
940 ;; Returns 1 if OP is memory operand that cannot be represented
941 ;; by the modRM array.
942 (define_predicate "long_memory_operand"
943 (and (match_operand 0 "memory_operand")
944 (match_test "memory_address_length (op) != 0")))
945
946 ;; Return 1 if OP is a comparison operator that can be issued by fcmov.
947 (define_predicate "fcmov_comparison_operator"
948 (match_operand 0 "comparison_operator")
949 {
950 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
951 enum rtx_code code = GET_CODE (op);
952
953 if (inmode == CCFPmode || inmode == CCFPUmode)
954 {
955 if (!ix86_trivial_fp_comparison_operator (op, mode))
956 return 0;
957 code = ix86_fp_compare_code_to_integer (code);
958 }
959 /* i387 supports just limited amount of conditional codes. */
960 switch (code)
961 {
962 case LTU: case GTU: case LEU: case GEU:
963 if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode
964 || inmode == CCCmode)
965 return 1;
966 return 0;
967 case ORDERED: case UNORDERED:
968 case EQ: case NE:
969 return 1;
970 default:
971 return 0;
972 }
973 })
974
975 ;; Return 1 if OP is a comparison that can be used in the CMPSS/CMPPS insns.
976 ;; The first set are supported directly; the second set can't be done with
977 ;; full IEEE support, i.e. NaNs.
978 ;;
979 ;; ??? It would seem that we have a lot of uses of this predicate that pass
980 ;; it the wrong mode. We got away with this because the old function didn't
981 ;; check the mode at all. Mirror that for now by calling this a special
982 ;; predicate.
983
984 (define_special_predicate "sse_comparison_operator"
985 (match_code "eq,lt,le,unordered,ne,unge,ungt,ordered"))
986
987 ;; Return 1 if OP is a comparison operator that can be issued by
988 ;; avx predicate generation instructions
989 (define_predicate "avx_comparison_float_operator"
990 (match_code "ne,eq,ge,gt,le,lt,unordered,ordered,uneq,unge,ungt,unle,unlt,ltgt"))
991
992 ;; Return 1 if OP is a comparison operator that can be issued by sse predicate
993 ;; generation instructions
994 (define_predicate "sse5_comparison_float_operator"
995 (and (match_test "TARGET_SSE5")
996 (match_code "ne,eq,ge,gt,le,lt,unordered,ordered,uneq,unge,ungt,unle,unlt,ltgt")))
997
998 (define_predicate "ix86_comparison_int_operator"
999 (match_code "ne,eq,ge,gt,le,lt"))
1000
1001 (define_predicate "ix86_comparison_uns_operator"
1002 (match_code "ne,eq,geu,gtu,leu,ltu"))
1003
1004 (define_predicate "bt_comparison_operator"
1005 (match_code "ne,eq"))
1006
1007 ;; Return 1 if OP is a valid comparison operator in valid mode.
1008 (define_predicate "ix86_comparison_operator"
1009 (match_operand 0 "comparison_operator")
1010 {
1011 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1012 enum rtx_code code = GET_CODE (op);
1013
1014 if (inmode == CCFPmode || inmode == CCFPUmode)
1015 return ix86_trivial_fp_comparison_operator (op, mode);
1016
1017 switch (code)
1018 {
1019 case EQ: case NE:
1020 return 1;
1021 case LT: case GE:
1022 if (inmode == CCmode || inmode == CCGCmode
1023 || inmode == CCGOCmode || inmode == CCNOmode)
1024 return 1;
1025 return 0;
1026 case LTU: case GTU: case LEU: case GEU:
1027 if (inmode == CCmode || inmode == CCCmode)
1028 return 1;
1029 return 0;
1030 case ORDERED: case UNORDERED:
1031 if (inmode == CCmode)
1032 return 1;
1033 return 0;
1034 case GT: case LE:
1035 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1036 return 1;
1037 return 0;
1038 default:
1039 return 0;
1040 }
1041 })
1042
1043 ;; Return 1 if OP is a valid comparison operator testing carry flag to be set.
1044 (define_predicate "ix86_carry_flag_operator"
1045 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1046 {
1047 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1048 enum rtx_code code = GET_CODE (op);
1049
1050 if (!REG_P (XEXP (op, 0))
1051 || REGNO (XEXP (op, 0)) != FLAGS_REG
1052 || XEXP (op, 1) != const0_rtx)
1053 return 0;
1054
1055 if (inmode == CCFPmode || inmode == CCFPUmode)
1056 {
1057 if (!ix86_trivial_fp_comparison_operator (op, mode))
1058 return 0;
1059 code = ix86_fp_compare_code_to_integer (code);
1060 }
1061 else if (inmode == CCCmode)
1062 return code == LTU || code == GTU;
1063 else if (inmode != CCmode)
1064 return 0;
1065
1066 return code == LTU;
1067 })
1068
1069 ;; Return 1 if this comparison only requires testing one flag bit.
1070 (define_predicate "ix86_trivial_fp_comparison_operator"
1071 (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
1072
1073 ;; Return 1 if we know how to do this comparison. Others require
1074 ;; testing more than one flag bit, and we let the generic middle-end
1075 ;; code do that.
1076 (define_predicate "ix86_fp_comparison_operator"
1077 (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1078 == IX86_FPCMP_ARITH")
1079 (match_operand 0 "comparison_operator")
1080 (match_operand 0 "ix86_trivial_fp_comparison_operator")))
1081
1082 ;; Nearly general operand, but accept any const_double, since we wish
1083 ;; to be able to drop them into memory rather than have them get pulled
1084 ;; into registers.
1085 (define_predicate "cmp_fp_expander_operand"
1086 (ior (match_code "const_double")
1087 (match_operand 0 "general_operand")))
1088
1089 ;; Return true if this is a valid binary floating-point operation.
1090 (define_predicate "binary_fp_operator"
1091 (match_code "plus,minus,mult,div"))
1092
1093 ;; Return true if this is a multiply operation.
1094 (define_predicate "mult_operator"
1095 (match_code "mult"))
1096
1097 ;; Return true if this is a division operation.
1098 (define_predicate "div_operator"
1099 (match_code "div"))
1100
1101 ;; Return true if this is a float extend operation.
1102 (define_predicate "float_operator"
1103 (match_code "float"))
1104
1105 ;; Return true for ARITHMETIC_P.
1106 (define_predicate "arith_or_logical_operator"
1107 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1108 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1109
1110 ;; Return true for COMMUTATIVE_P.
1111 (define_predicate "commutative_operator"
1112 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1113
1114 ;; Return 1 if OP is a binary operator that can be promoted to wider mode.
1115 (define_predicate "promotable_binary_operator"
1116 (ior (match_code "plus,and,ior,xor,ashift")
1117 (and (match_code "mult")
1118 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1119
1120 ;; To avoid problems when jump re-emits comparisons like testqi_ext_ccno_0,
1121 ;; re-recognize the operand to avoid a copy_to_mode_reg that will fail.
1122 ;;
1123 ;; ??? It seems likely that this will only work because cmpsi is an
1124 ;; expander, and no actual insns use this.
1125
1126 (define_predicate "cmpsi_operand"
1127 (ior (match_operand 0 "nonimmediate_operand")
1128 (and (match_code "and")
1129 (match_code "zero_extract" "0")
1130 (match_code "const_int" "1")
1131 (match_code "const_int" "01")
1132 (match_code "const_int" "02")
1133 (match_test "INTVAL (XEXP (XEXP (op, 0), 1)) == 8")
1134 (match_test "INTVAL (XEXP (XEXP (op, 0), 2)) == 8")
1135 )))
1136
1137 (define_predicate "compare_operator"
1138 (match_code "compare"))
1139
1140 (define_predicate "absneg_operator"
1141 (match_code "abs,neg"))
1142
1143 ;; Return 1 if OP is misaligned memory operand
1144 (define_predicate "misaligned_operand"
1145 (and (match_code "mem")
1146 (match_test "MEM_ALIGN (op) < GET_MODE_ALIGNMENT (mode)")))
1147
1148 ;; Return 1 if OP is a vzeroall operation, known to be a PARALLEL.
1149 (define_predicate "vzeroall_operation"
1150 (match_code "parallel")
1151 {
1152 int nregs = TARGET_64BIT ? 16 : 8;
1153
1154 if (XVECLEN (op, 0) != nregs + 1)
1155 return 0;
1156
1157 return 1;
1158 })