be2141c13f25f4b67ee12a2b63d52380188f603e
[gcc.git] / gcc / config / i386 / predicates.md
1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004, 2005, 2006 Free Software Foundation, Inc.
3 ;;
4 ;; This file is part of GCC.
5 ;;
6 ;; GCC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 2, or (at your option)
9 ;; any later version.
10 ;;
11 ;; GCC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
15 ;;
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING. If not, write to
18 ;; the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 ;; Boston, MA 02110-1301, USA.
20
21 ;; Return nonzero if OP is either a i387 or SSE fp register.
22 (define_predicate "any_fp_register_operand"
23 (and (match_code "reg")
24 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
25
26 ;; Return nonzero if OP is an i387 fp register.
27 (define_predicate "fp_register_operand"
28 (and (match_code "reg")
29 (match_test "FP_REGNO_P (REGNO (op))")))
30
31 ;; Return nonzero if OP is a non-fp register_operand.
32 (define_predicate "register_and_not_any_fp_reg_operand"
33 (and (match_code "reg")
34 (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
35
36 ;; Return nonzero if OP is a register operand other than an i387 fp register.
37 (define_predicate "register_and_not_fp_reg_operand"
38 (and (match_code "reg")
39 (not (match_test "FP_REGNO_P (REGNO (op))"))))
40
41 ;; True if the operand is an MMX register.
42 (define_predicate "mmx_reg_operand"
43 (and (match_code "reg")
44 (match_test "MMX_REGNO_P (REGNO (op))")))
45
46 ;; True if the operand is a Q_REGS class register.
47 (define_predicate "q_regs_operand"
48 (match_operand 0 "register_operand")
49 {
50 if (GET_CODE (op) == SUBREG)
51 op = SUBREG_REG (op);
52 return ANY_QI_REG_P (op);
53 })
54
55 ;; Match an SI or HImode register for a zero_extract.
56 (define_special_predicate "ext_register_operand"
57 (match_operand 0 "register_operand")
58 {
59 if ((!TARGET_64BIT || GET_MODE (op) != DImode)
60 && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
61 return 0;
62 if (GET_CODE (op) == SUBREG)
63 op = SUBREG_REG (op);
64
65 /* Be careful to accept only registers having upper parts. */
66 return REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) < 4;
67 })
68
69 ;; Return true if op is the AX register.
70 (define_predicate "ax_reg_operand"
71 (and (match_code "reg")
72 (match_test "REGNO (op) == 0")))
73
74 ;; Return true if op is the flags register.
75 (define_predicate "flags_reg_operand"
76 (and (match_code "reg")
77 (match_test "REGNO (op) == FLAGS_REG")))
78
79 ;; Return 1 if VALUE can be stored in a sign extended immediate field.
80 (define_predicate "x86_64_immediate_operand"
81 (match_code "const_int,symbol_ref,label_ref,const")
82 {
83 if (!TARGET_64BIT)
84 return immediate_operand (op, mode);
85
86 switch (GET_CODE (op))
87 {
88 case CONST_INT:
89 /* CONST_DOUBLES never match, since HOST_BITS_PER_WIDE_INT is known
90 to be at least 32 and this all acceptable constants are
91 represented as CONST_INT. */
92 if (HOST_BITS_PER_WIDE_INT == 32)
93 return 1;
94 else
95 {
96 HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
97 return trunc_int_for_mode (val, SImode) == val;
98 }
99 break;
100
101 case SYMBOL_REF:
102 /* For certain code models, the symbolic references are known to fit.
103 in CM_SMALL_PIC model we know it fits if it is local to the shared
104 library. Don't count TLS SYMBOL_REFs here, since they should fit
105 only if inside of UNSPEC handled below. */
106 /* TLS symbols are not constant. */
107 if (SYMBOL_REF_TLS_MODEL (op))
108 return false;
109 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
110 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
111
112 case LABEL_REF:
113 /* For certain code models, the code is near as well. */
114 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
115 || ix86_cmodel == CM_KERNEL);
116
117 case CONST:
118 /* We also may accept the offsetted memory references in certain
119 special cases. */
120 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
121 switch (XINT (XEXP (op, 0), 1))
122 {
123 case UNSPEC_GOTPCREL:
124 case UNSPEC_DTPOFF:
125 case UNSPEC_GOTNTPOFF:
126 case UNSPEC_NTPOFF:
127 return 1;
128 default:
129 break;
130 }
131
132 if (GET_CODE (XEXP (op, 0)) == PLUS)
133 {
134 rtx op1 = XEXP (XEXP (op, 0), 0);
135 rtx op2 = XEXP (XEXP (op, 0), 1);
136 HOST_WIDE_INT offset;
137
138 if (ix86_cmodel == CM_LARGE)
139 return 0;
140 if (!CONST_INT_P (op2))
141 return 0;
142 offset = trunc_int_for_mode (INTVAL (op2), DImode);
143 switch (GET_CODE (op1))
144 {
145 case SYMBOL_REF:
146 /* TLS symbols are not constant. */
147 if (SYMBOL_REF_TLS_MODEL (op1))
148 return 0;
149 /* For CM_SMALL assume that latest object is 16MB before
150 end of 31bits boundary. We may also accept pretty
151 large negative constants knowing that all objects are
152 in the positive half of address space. */
153 if ((ix86_cmodel == CM_SMALL
154 || (ix86_cmodel == CM_MEDIUM
155 && !SYMBOL_REF_FAR_ADDR_P (op1)))
156 && offset < 16*1024*1024
157 && trunc_int_for_mode (offset, SImode) == offset)
158 return 1;
159 /* For CM_KERNEL we know that all object resist in the
160 negative half of 32bits address space. We may not
161 accept negative offsets, since they may be just off
162 and we may accept pretty large positive ones. */
163 if (ix86_cmodel == CM_KERNEL
164 && offset > 0
165 && trunc_int_for_mode (offset, SImode) == offset)
166 return 1;
167 break;
168
169 case LABEL_REF:
170 /* These conditions are similar to SYMBOL_REF ones, just the
171 constraints for code models differ. */
172 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
173 && offset < 16*1024*1024
174 && trunc_int_for_mode (offset, SImode) == offset)
175 return 1;
176 if (ix86_cmodel == CM_KERNEL
177 && offset > 0
178 && trunc_int_for_mode (offset, SImode) == offset)
179 return 1;
180 break;
181
182 case UNSPEC:
183 switch (XINT (op1, 1))
184 {
185 case UNSPEC_DTPOFF:
186 case UNSPEC_NTPOFF:
187 if (offset > 0
188 && trunc_int_for_mode (offset, SImode) == offset)
189 return 1;
190 }
191 break;
192
193 default:
194 break;
195 }
196 }
197 break;
198
199 default:
200 gcc_unreachable ();
201 }
202
203 return 0;
204 })
205
206 ;; Return 1 if VALUE can be stored in the zero extended immediate field.
207 (define_predicate "x86_64_zext_immediate_operand"
208 (match_code "const_double,const_int,symbol_ref,label_ref,const")
209 {
210 switch (GET_CODE (op))
211 {
212 case CONST_DOUBLE:
213 if (HOST_BITS_PER_WIDE_INT == 32)
214 return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
215 else
216 return 0;
217
218 case CONST_INT:
219 if (HOST_BITS_PER_WIDE_INT == 32)
220 return INTVAL (op) >= 0;
221 else
222 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
223
224 case SYMBOL_REF:
225 /* For certain code models, the symbolic references are known to fit. */
226 /* TLS symbols are not constant. */
227 if (SYMBOL_REF_TLS_MODEL (op))
228 return false;
229 return (ix86_cmodel == CM_SMALL
230 || (ix86_cmodel == CM_MEDIUM
231 && !SYMBOL_REF_FAR_ADDR_P (op)));
232
233 case LABEL_REF:
234 /* For certain code models, the code is near as well. */
235 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
236
237 case CONST:
238 /* We also may accept the offsetted memory references in certain
239 special cases. */
240 if (GET_CODE (XEXP (op, 0)) == PLUS)
241 {
242 rtx op1 = XEXP (XEXP (op, 0), 0);
243 rtx op2 = XEXP (XEXP (op, 0), 1);
244
245 if (ix86_cmodel == CM_LARGE)
246 return 0;
247 switch (GET_CODE (op1))
248 {
249 case SYMBOL_REF:
250 /* TLS symbols are not constant. */
251 if (SYMBOL_REF_TLS_MODEL (op1))
252 return 0;
253 /* For small code model we may accept pretty large positive
254 offsets, since one bit is available for free. Negative
255 offsets are limited by the size of NULL pointer area
256 specified by the ABI. */
257 if ((ix86_cmodel == CM_SMALL
258 || (ix86_cmodel == CM_MEDIUM
259 && !SYMBOL_REF_FAR_ADDR_P (op1)))
260 && CONST_INT_P (op2)
261 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
262 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
263 return 1;
264 /* ??? For the kernel, we may accept adjustment of
265 -0x10000000, since we know that it will just convert
266 negative address space to positive, but perhaps this
267 is not worthwhile. */
268 break;
269
270 case LABEL_REF:
271 /* These conditions are similar to SYMBOL_REF ones, just the
272 constraints for code models differ. */
273 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
274 && CONST_INT_P (op2)
275 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
276 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
277 return 1;
278 break;
279
280 default:
281 return 0;
282 }
283 }
284 break;
285
286 default:
287 gcc_unreachable ();
288 }
289 return 0;
290 })
291
292 ;; Return nonzero if OP is general operand representable on x86_64.
293 (define_predicate "x86_64_general_operand"
294 (if_then_else (match_test "TARGET_64BIT")
295 (ior (match_operand 0 "nonimmediate_operand")
296 (match_operand 0 "x86_64_immediate_operand"))
297 (match_operand 0 "general_operand")))
298
299 ;; Return nonzero if OP is general operand representable on x86_64
300 ;; as either sign extended or zero extended constant.
301 (define_predicate "x86_64_szext_general_operand"
302 (if_then_else (match_test "TARGET_64BIT")
303 (ior (match_operand 0 "nonimmediate_operand")
304 (ior (match_operand 0 "x86_64_immediate_operand")
305 (match_operand 0 "x86_64_zext_immediate_operand")))
306 (match_operand 0 "general_operand")))
307
308 ;; Return nonzero if OP is nonmemory operand representable on x86_64.
309 (define_predicate "x86_64_nonmemory_operand"
310 (if_then_else (match_test "TARGET_64BIT")
311 (ior (match_operand 0 "register_operand")
312 (match_operand 0 "x86_64_immediate_operand"))
313 (match_operand 0 "nonmemory_operand")))
314
315 ;; Return nonzero if OP is nonmemory operand representable on x86_64.
316 (define_predicate "x86_64_szext_nonmemory_operand"
317 (if_then_else (match_test "TARGET_64BIT")
318 (ior (match_operand 0 "register_operand")
319 (ior (match_operand 0 "x86_64_immediate_operand")
320 (match_operand 0 "x86_64_zext_immediate_operand")))
321 (match_operand 0 "nonmemory_operand")))
322
323 ;; Return true when operand is PIC expression that can be computed by lea
324 ;; operation.
325 (define_predicate "pic_32bit_operand"
326 (match_code "const,symbol_ref,label_ref")
327 {
328 if (!flag_pic)
329 return 0;
330 /* Rule out relocations that translate into 64bit constants. */
331 if (TARGET_64BIT && GET_CODE (op) == CONST)
332 {
333 op = XEXP (op, 0);
334 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
335 op = XEXP (op, 0);
336 if (GET_CODE (op) == UNSPEC
337 && (XINT (op, 1) == UNSPEC_GOTOFF
338 || XINT (op, 1) == UNSPEC_GOT))
339 return 0;
340 }
341 return symbolic_operand (op, mode);
342 })
343
344
345 ;; Return nonzero if OP is nonmemory operand acceptable by movabs patterns.
346 (define_predicate "x86_64_movabs_operand"
347 (if_then_else (match_test "!TARGET_64BIT || !flag_pic")
348 (match_operand 0 "nonmemory_operand")
349 (ior (match_operand 0 "register_operand")
350 (and (match_operand 0 "const_double_operand")
351 (match_test "GET_MODE_SIZE (mode) <= 8")))))
352
353 ;; Returns nonzero if OP is either a symbol reference or a sum of a symbol
354 ;; reference and a constant.
355 (define_predicate "symbolic_operand"
356 (match_code "symbol_ref,label_ref,const")
357 {
358 switch (GET_CODE (op))
359 {
360 case SYMBOL_REF:
361 case LABEL_REF:
362 return 1;
363
364 case CONST:
365 op = XEXP (op, 0);
366 if (GET_CODE (op) == SYMBOL_REF
367 || GET_CODE (op) == LABEL_REF
368 || (GET_CODE (op) == UNSPEC
369 && (XINT (op, 1) == UNSPEC_GOT
370 || XINT (op, 1) == UNSPEC_GOTOFF
371 || XINT (op, 1) == UNSPEC_GOTPCREL)))
372 return 1;
373 if (GET_CODE (op) != PLUS
374 || !CONST_INT_P (XEXP (op, 1)))
375 return 0;
376
377 op = XEXP (op, 0);
378 if (GET_CODE (op) == SYMBOL_REF
379 || GET_CODE (op) == LABEL_REF)
380 return 1;
381 /* Only @GOTOFF gets offsets. */
382 if (GET_CODE (op) != UNSPEC
383 || XINT (op, 1) != UNSPEC_GOTOFF)
384 return 0;
385
386 op = XVECEXP (op, 0, 0);
387 if (GET_CODE (op) == SYMBOL_REF
388 || GET_CODE (op) == LABEL_REF)
389 return 1;
390 return 0;
391
392 default:
393 gcc_unreachable ();
394 }
395 })
396
397 ;; Return true if the operand contains a @GOT or @GOTOFF reference.
398 (define_predicate "pic_symbolic_operand"
399 (match_code "const")
400 {
401 op = XEXP (op, 0);
402 if (TARGET_64BIT)
403 {
404 if (GET_CODE (op) == UNSPEC
405 && XINT (op, 1) == UNSPEC_GOTPCREL)
406 return 1;
407 if (GET_CODE (op) == PLUS
408 && GET_CODE (XEXP (op, 0)) == UNSPEC
409 && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL)
410 return 1;
411 }
412 else
413 {
414 if (GET_CODE (op) == UNSPEC)
415 return 1;
416 if (GET_CODE (op) != PLUS
417 || !CONST_INT_P (XEXP (op, 1)))
418 return 0;
419 op = XEXP (op, 0);
420 if (GET_CODE (op) == UNSPEC)
421 return 1;
422 }
423 return 0;
424 })
425
426 ;; Return true if OP is a symbolic operand that resolves locally.
427 (define_predicate "local_symbolic_operand"
428 (match_code "const,label_ref,symbol_ref")
429 {
430 if (GET_CODE (op) == CONST
431 && GET_CODE (XEXP (op, 0)) == PLUS
432 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
433 op = XEXP (XEXP (op, 0), 0);
434
435 if (GET_CODE (op) == LABEL_REF)
436 return 1;
437
438 if (GET_CODE (op) != SYMBOL_REF)
439 return 0;
440
441 if (SYMBOL_REF_TLS_MODEL (op) != 0)
442 return 0;
443
444 if (SYMBOL_REF_LOCAL_P (op))
445 return 1;
446
447 /* There is, however, a not insubstantial body of code in the rest of
448 the compiler that assumes it can just stick the results of
449 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
450 /* ??? This is a hack. Should update the body of the compiler to
451 always create a DECL an invoke targetm.encode_section_info. */
452 if (strncmp (XSTR (op, 0), internal_label_prefix,
453 internal_label_prefix_len) == 0)
454 return 1;
455
456 return 0;
457 })
458
459 ;; Test for a legitimate @GOTOFF operand.
460 ;;
461 ;; VxWorks does not impose a fixed gap between segments; the run-time
462 ;; gap can be different from the object-file gap. We therefore can't
463 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
464 ;; same segment as the GOT. Unfortunately, the flexibility of linker
465 ;; scripts means that we can't be sure of that in general, so assume
466 ;; that @GOTOFF is never valid on VxWorks.
467 (define_predicate "gotoff_operand"
468 (and (match_test "!TARGET_VXWORKS_RTP")
469 (match_operand 0 "local_symbolic_operand")))
470
471 ;; Test for various thread-local symbols.
472 (define_predicate "tls_symbolic_operand"
473 (and (match_code "symbol_ref")
474 (match_test "SYMBOL_REF_TLS_MODEL (op) != 0")))
475
476 (define_predicate "tls_modbase_operand"
477 (and (match_code "symbol_ref")
478 (match_test "op == ix86_tls_module_base ()")))
479
480 (define_predicate "tp_or_register_operand"
481 (ior (match_operand 0 "register_operand")
482 (and (match_code "unspec")
483 (match_test "XINT (op, 1) == UNSPEC_TP"))))
484
485 ;; Test for a pc-relative call operand
486 (define_predicate "constant_call_address_operand"
487 (and (ior (match_code "symbol_ref")
488 (match_operand 0 "local_symbolic_operand"))
489 (match_test "ix86_cmodel != CM_LARGE && ix86_cmodel != CM_LARGE_PIC")))
490
491 ;; True for any non-virtual or eliminable register. Used in places where
492 ;; instantiation of such a register may cause the pattern to not be recognized.
493 (define_predicate "register_no_elim_operand"
494 (match_operand 0 "register_operand")
495 {
496 if (GET_CODE (op) == SUBREG)
497 op = SUBREG_REG (op);
498 return !(op == arg_pointer_rtx
499 || op == frame_pointer_rtx
500 || (REGNO (op) >= FIRST_PSEUDO_REGISTER
501 && REGNO (op) <= LAST_VIRTUAL_REGISTER));
502 })
503
504 ;; Similarly, but include the stack pointer. This is used to prevent esp
505 ;; from being used as an index reg.
506 (define_predicate "index_register_operand"
507 (match_operand 0 "register_operand")
508 {
509 if (GET_CODE (op) == SUBREG)
510 op = SUBREG_REG (op);
511 if (reload_in_progress || reload_completed)
512 return REG_OK_FOR_INDEX_STRICT_P (op);
513 else
514 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
515 })
516
517 ;; Return false if this is any eliminable register. Otherwise general_operand.
518 (define_predicate "general_no_elim_operand"
519 (if_then_else (match_code "reg,subreg")
520 (match_operand 0 "register_no_elim_operand")
521 (match_operand 0 "general_operand")))
522
523 ;; Return false if this is any eliminable register. Otherwise
524 ;; register_operand or a constant.
525 (define_predicate "nonmemory_no_elim_operand"
526 (ior (match_operand 0 "register_no_elim_operand")
527 (match_operand 0 "immediate_operand")))
528
529 ;; Test for a valid operand for a call instruction.
530 (define_predicate "call_insn_operand"
531 (ior (match_operand 0 "constant_call_address_operand")
532 (ior (match_operand 0 "register_no_elim_operand")
533 (match_operand 0 "memory_operand"))))
534
535 ;; Similarly, but for tail calls, in which we cannot allow memory references.
536 (define_predicate "sibcall_insn_operand"
537 (ior (match_operand 0 "constant_call_address_operand")
538 (match_operand 0 "register_no_elim_operand")))
539
540 ;; Match exactly zero.
541 (define_predicate "const0_operand"
542 (match_code "const_int,const_double,const_vector")
543 {
544 if (mode == VOIDmode)
545 mode = GET_MODE (op);
546 return op == CONST0_RTX (mode);
547 })
548
549 ;; Match exactly one.
550 (define_predicate "const1_operand"
551 (and (match_code "const_int")
552 (match_test "op == const1_rtx")))
553
554 ;; Match exactly eight.
555 (define_predicate "const8_operand"
556 (and (match_code "const_int")
557 (match_test "INTVAL (op) == 8")))
558
559 ;; Match 2, 4, or 8. Used for leal multiplicands.
560 (define_predicate "const248_operand"
561 (match_code "const_int")
562 {
563 HOST_WIDE_INT i = INTVAL (op);
564 return i == 2 || i == 4 || i == 8;
565 })
566
567 ;; Match 0 or 1.
568 (define_predicate "const_0_to_1_operand"
569 (and (match_code "const_int")
570 (match_test "op == const0_rtx || op == const1_rtx")))
571
572 ;; Match 0 to 3.
573 (define_predicate "const_0_to_3_operand"
574 (and (match_code "const_int")
575 (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 3")))
576
577 ;; Match 0 to 7.
578 (define_predicate "const_0_to_7_operand"
579 (and (match_code "const_int")
580 (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 7")))
581
582 ;; Match 0 to 15.
583 (define_predicate "const_0_to_15_operand"
584 (and (match_code "const_int")
585 (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 15")))
586
587 ;; Match 0 to 63.
588 (define_predicate "const_0_to_63_operand"
589 (and (match_code "const_int")
590 (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 63")))
591
592 ;; Match 0 to 255.
593 (define_predicate "const_0_to_255_operand"
594 (and (match_code "const_int")
595 (match_test "INTVAL (op) >= 0 && INTVAL (op) <= 255")))
596
597 ;; Match (0 to 255) * 8
598 (define_predicate "const_0_to_255_mul_8_operand"
599 (match_code "const_int")
600 {
601 unsigned HOST_WIDE_INT val = INTVAL (op);
602 return val <= 255*8 && val % 8 == 0;
603 })
604
605 ;; Return nonzero if OP is CONST_INT >= 1 and <= 31 (a valid operand
606 ;; for shift & compare patterns, as shifting by 0 does not change flags).
607 (define_predicate "const_1_to_31_operand"
608 (and (match_code "const_int")
609 (match_test "INTVAL (op) >= 1 && INTVAL (op) <= 31")))
610
611 ;; Match 2 or 3.
612 (define_predicate "const_2_to_3_operand"
613 (and (match_code "const_int")
614 (match_test "INTVAL (op) == 2 || INTVAL (op) == 3")))
615
616 ;; Match 4 to 7.
617 (define_predicate "const_4_to_7_operand"
618 (and (match_code "const_int")
619 (match_test "INTVAL (op) >= 4 && INTVAL (op) <= 7")))
620
621 ;; Match exactly one bit in 4-bit mask.
622 (define_predicate "const_pow2_1_to_8_operand"
623 (match_code "const_int")
624 {
625 unsigned int log = exact_log2 (INTVAL (op));
626 return log <= 3;
627 })
628
629 ;; Match exactly one bit in 8-bit mask.
630 (define_predicate "const_pow2_1_to_128_operand"
631 (match_code "const_int")
632 {
633 unsigned int log = exact_log2 (INTVAL (op));
634 return log <= 7;
635 })
636
637 ;; True if this is a constant appropriate for an increment or decrement.
638 (define_predicate "incdec_operand"
639 (match_code "const_int")
640 {
641 /* On Pentium4, the inc and dec operations causes extra dependency on flag
642 registers, since carry flag is not set. */
643 if (!TARGET_USE_INCDEC && !optimize_size)
644 return 0;
645 return op == const1_rtx || op == constm1_rtx;
646 })
647
648 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
649 (define_predicate "reg_or_pm1_operand"
650 (ior (match_operand 0 "register_operand")
651 (and (match_code "const_int")
652 (match_test "op == const1_rtx || op == constm1_rtx"))))
653
654 ;; True if OP is acceptable as operand of DImode shift expander.
655 (define_predicate "shiftdi_operand"
656 (if_then_else (match_test "TARGET_64BIT")
657 (match_operand 0 "nonimmediate_operand")
658 (match_operand 0 "register_operand")))
659
660 (define_predicate "ashldi_input_operand"
661 (if_then_else (match_test "TARGET_64BIT")
662 (match_operand 0 "nonimmediate_operand")
663 (match_operand 0 "reg_or_pm1_operand")))
664
665 ;; Return true if OP is a vector load from the constant pool with just
666 ;; the first element nonzero.
667 (define_predicate "zero_extended_scalar_load_operand"
668 (match_code "mem")
669 {
670 unsigned n_elts;
671 op = maybe_get_pool_constant (op);
672 if (!op)
673 return 0;
674 if (GET_CODE (op) != CONST_VECTOR)
675 return 0;
676 n_elts =
677 (GET_MODE_SIZE (GET_MODE (op)) /
678 GET_MODE_SIZE (GET_MODE_INNER (GET_MODE (op))));
679 for (n_elts--; n_elts > 0; n_elts--)
680 {
681 rtx elt = CONST_VECTOR_ELT (op, n_elts);
682 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
683 return 0;
684 }
685 return 1;
686 })
687
688 /* Return true if operand is a vector constant that is all ones. */
689 (define_predicate "vector_all_ones_operand"
690 (match_code "const_vector")
691 {
692 int nunits = GET_MODE_NUNITS (mode);
693
694 if (GET_CODE (op) == CONST_VECTOR
695 && CONST_VECTOR_NUNITS (op) == nunits)
696 {
697 int i;
698 for (i = 0; i < nunits; ++i)
699 {
700 rtx x = CONST_VECTOR_ELT (op, i);
701 if (x != constm1_rtx)
702 return 0;
703 }
704 return 1;
705 }
706
707 return 0;
708 })
709
710 ; Return 1 when OP is operand acceptable for standard SSE move.
711 (define_predicate "vector_move_operand"
712 (ior (match_operand 0 "nonimmediate_operand")
713 (match_operand 0 "const0_operand")))
714
715 ;; Return 1 when OP is nonimmediate or standard SSE constant.
716 (define_predicate "nonimmediate_or_sse_const_operand"
717 (match_operand 0 "general_operand")
718 {
719 if (nonimmediate_operand (op, mode))
720 return 1;
721 if (standard_sse_constant_p (op) > 0)
722 return 1;
723 return 0;
724 })
725
726 ;; Return true if OP is a register or a zero.
727 (define_predicate "reg_or_0_operand"
728 (ior (match_operand 0 "register_operand")
729 (match_operand 0 "const0_operand")))
730
731 ;; Return true if op if a valid address, and does not contain
732 ;; a segment override.
733 (define_special_predicate "no_seg_address_operand"
734 (match_operand 0 "address_operand")
735 {
736 struct ix86_address parts;
737 int ok;
738
739 ok = ix86_decompose_address (op, &parts);
740 gcc_assert (ok);
741 return parts.seg == SEG_DEFAULT;
742 })
743
744 ;; Return nonzero if the rtx is known to be at least 32 bits aligned.
745 (define_predicate "aligned_operand"
746 (match_operand 0 "general_operand")
747 {
748 struct ix86_address parts;
749 int ok;
750
751 /* Registers and immediate operands are always "aligned". */
752 if (GET_CODE (op) != MEM)
753 return 1;
754
755 /* All patterns using aligned_operand on memory operands ends up
756 in promoting memory operand to 64bit and thus causing memory mismatch. */
757 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_size)
758 return 0;
759
760 /* Don't even try to do any aligned optimizations with volatiles. */
761 if (MEM_VOLATILE_P (op))
762 return 0;
763
764 if (MEM_ALIGN (op) >= 32)
765 return 1;
766
767 op = XEXP (op, 0);
768
769 /* Pushes and pops are only valid on the stack pointer. */
770 if (GET_CODE (op) == PRE_DEC
771 || GET_CODE (op) == POST_INC)
772 return 1;
773
774 /* Decode the address. */
775 ok = ix86_decompose_address (op, &parts);
776 gcc_assert (ok);
777
778 /* Look for some component that isn't known to be aligned. */
779 if (parts.index)
780 {
781 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
782 return 0;
783 }
784 if (parts.base)
785 {
786 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
787 return 0;
788 }
789 if (parts.disp)
790 {
791 if (!CONST_INT_P (parts.disp)
792 || (INTVAL (parts.disp) & 3) != 0)
793 return 0;
794 }
795
796 /* Didn't find one -- this must be an aligned address. */
797 return 1;
798 })
799
800 ;; Returns 1 if OP is memory operand with a displacement.
801 (define_predicate "memory_displacement_operand"
802 (match_operand 0 "memory_operand")
803 {
804 struct ix86_address parts;
805 int ok;
806
807 ok = ix86_decompose_address (XEXP (op, 0), &parts);
808 gcc_assert (ok);
809 return parts.disp != NULL_RTX;
810 })
811
812 ;; Returns 1 if OP is memory operand with a displacement only.
813 (define_predicate "memory_displacement_only_operand"
814 (match_operand 0 "memory_operand")
815 {
816 struct ix86_address parts;
817 int ok;
818
819 ok = ix86_decompose_address (XEXP (op, 0), &parts);
820 gcc_assert (ok);
821
822 if (parts.base || parts.index)
823 return 0;
824
825 return parts.disp != NULL_RTX;
826 })
827
828 ;; Returns 1 if OP is memory operand that cannot be represented
829 ;; by the modRM array.
830 (define_predicate "long_memory_operand"
831 (and (match_operand 0 "memory_operand")
832 (match_test "memory_address_length (op) != 0")))
833
834 ;; Return 1 if OP is a comparison operator that can be issued by fcmov.
835 (define_predicate "fcmov_comparison_operator"
836 (match_operand 0 "comparison_operator")
837 {
838 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
839 enum rtx_code code = GET_CODE (op);
840
841 if (inmode == CCFPmode || inmode == CCFPUmode)
842 {
843 enum rtx_code second_code, bypass_code;
844 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
845 if (bypass_code != UNKNOWN || second_code != UNKNOWN)
846 return 0;
847 code = ix86_fp_compare_code_to_integer (code);
848 }
849 /* i387 supports just limited amount of conditional codes. */
850 switch (code)
851 {
852 case LTU: case GTU: case LEU: case GEU:
853 if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode)
854 return 1;
855 return 0;
856 case ORDERED: case UNORDERED:
857 case EQ: case NE:
858 return 1;
859 default:
860 return 0;
861 }
862 })
863
864 ;; Return 1 if OP is a comparison that can be used in the CMPSS/CMPPS insns.
865 ;; The first set are supported directly; the second set can't be done with
866 ;; full IEEE support, i.e. NaNs.
867 ;;
868 ;; ??? It would seem that we have a lot of uses of this predicate that pass
869 ;; it the wrong mode. We got away with this because the old function didn't
870 ;; check the mode at all. Mirror that for now by calling this a special
871 ;; predicate.
872
873 (define_special_predicate "sse_comparison_operator"
874 (match_code "eq,lt,le,unordered,ne,unge,ungt,ordered"))
875
876 ;; Return 1 if OP is a valid comparison operator in valid mode.
877 (define_predicate "ix86_comparison_operator"
878 (match_operand 0 "comparison_operator")
879 {
880 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
881 enum rtx_code code = GET_CODE (op);
882
883 if (inmode == CCFPmode || inmode == CCFPUmode)
884 {
885 enum rtx_code second_code, bypass_code;
886 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
887 return (bypass_code == UNKNOWN && second_code == UNKNOWN);
888 }
889 switch (code)
890 {
891 case EQ: case NE:
892 return 1;
893 case LT: case GE:
894 if (inmode == CCmode || inmode == CCGCmode
895 || inmode == CCGOCmode || inmode == CCNOmode)
896 return 1;
897 return 0;
898 case LTU: case GTU: case LEU: case ORDERED: case UNORDERED: case GEU:
899 if (inmode == CCmode)
900 return 1;
901 return 0;
902 case GT: case LE:
903 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
904 return 1;
905 return 0;
906 default:
907 return 0;
908 }
909 })
910
911 ;; Return 1 if OP is a valid comparison operator testing carry flag to be set.
912 (define_predicate "ix86_carry_flag_operator"
913 (match_code "ltu,lt,unlt,gt,ungt,le,unle,ge,unge,ltgt,uneq")
914 {
915 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
916 enum rtx_code code = GET_CODE (op);
917
918 if (!REG_P (XEXP (op, 0))
919 || REGNO (XEXP (op, 0)) != FLAGS_REG
920 || XEXP (op, 1) != const0_rtx)
921 return 0;
922
923 if (inmode == CCFPmode || inmode == CCFPUmode)
924 {
925 enum rtx_code second_code, bypass_code;
926 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
927 if (bypass_code != UNKNOWN || second_code != UNKNOWN)
928 return 0;
929 code = ix86_fp_compare_code_to_integer (code);
930 }
931 else if (inmode != CCmode)
932 return 0;
933
934 return code == LTU;
935 })
936
937 ;; Nearly general operand, but accept any const_double, since we wish
938 ;; to be able to drop them into memory rather than have them get pulled
939 ;; into registers.
940 (define_predicate "cmp_fp_expander_operand"
941 (ior (match_code "const_double")
942 (match_operand 0 "general_operand")))
943
944 ;; Return true if this is a valid binary floating-point operation.
945 (define_predicate "binary_fp_operator"
946 (match_code "plus,minus,mult,div"))
947
948 ;; Return true if this is a multiply operation.
949 (define_predicate "mult_operator"
950 (match_code "mult"))
951
952 ;; Return true if this is a division operation.
953 (define_predicate "div_operator"
954 (match_code "div"))
955
956 ;; Return true if this is a float extend operation.
957 (define_predicate "float_operator"
958 (match_code "float"))
959
960 ;; Return true for ARITHMETIC_P.
961 (define_predicate "arith_or_logical_operator"
962 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
963 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
964
965 ;; Return 1 if OP is a binary operator that can be promoted to wider mode.
966 ;; Modern CPUs have same latency for HImode and SImode multiply,
967 ;; but 386 and 486 do HImode multiply faster. */
968 (define_predicate "promotable_binary_operator"
969 (ior (match_code "plus,and,ior,xor,ashift")
970 (and (match_code "mult")
971 (match_test "ix86_tune > PROCESSOR_I486"))))
972
973 ;; To avoid problems when jump re-emits comparisons like testqi_ext_ccno_0,
974 ;; re-recognize the operand to avoid a copy_to_mode_reg that will fail.
975 ;;
976 ;; ??? It seems likely that this will only work because cmpsi is an
977 ;; expander, and no actual insns use this.
978
979 (define_predicate "cmpsi_operand"
980 (ior (match_operand 0 "nonimmediate_operand")
981 (and (match_code "and")
982 (match_code "zero_extract" "0")
983 (match_code "const_int" "1")
984 (match_code "const_int" "01")
985 (match_code "const_int" "02")
986 (match_test "INTVAL (XEXP (XEXP (op, 0), 1)) == 8")
987 (match_test "INTVAL (XEXP (XEXP (op, 0), 2)) == 8")
988 )))
989
990 (define_predicate "compare_operator"
991 (match_code "compare"))
992
993 (define_predicate "absneg_operator"
994 (match_code "abs,neg"))