efa5c98ee79fab36f2b40575e33d653e3acd8d90
[gcc.git] / gcc / config / i386 / predicates.md
1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004, 2005, 2006 Free Software Foundation, Inc.
3 ;;
4 ;; This file is part of GCC.
5 ;;
6 ;; GCC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 2, or (at your option)
9 ;; any later version.
10 ;;
11 ;; GCC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
15 ;;
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING. If not, write to
18 ;; the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 ;; Boston, MA 02110-1301, USA.
20
21 ;; Return nonzero if OP is either a i387 or SSE fp register.
22 (define_predicate "any_fp_register_operand"
23 (and (match_code "reg")
24 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
25
26 ;; Return nonzero if OP is an i387 fp register.
27 (define_predicate "fp_register_operand"
28 (and (match_code "reg")
29 (match_test "FP_REGNO_P (REGNO (op))")))
30
31 ;; Return nonzero if OP is a non-fp register_operand.
32 (define_predicate "register_and_not_any_fp_reg_operand"
33 (and (match_code "reg")
34 (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
35
36 ;; Return nonzero if OP is a register operand other than an i387 fp register.
37 (define_predicate "register_and_not_fp_reg_operand"
38 (and (match_code "reg")
39 (not (match_test "FP_REGNO_P (REGNO (op))"))))
40
41 ;; True if the operand is an MMX register.
42 (define_predicate "mmx_reg_operand"
43 (and (match_code "reg")
44 (match_test "MMX_REGNO_P (REGNO (op))")))
45
46 ;; True if the operand is a Q_REGS class register.
47 (define_predicate "q_regs_operand"
48 (match_operand 0 "register_operand")
49 {
50 if (GET_CODE (op) == SUBREG)
51 op = SUBREG_REG (op);
52 return ANY_QI_REG_P (op);
53 })
54
55 ;; Match an SI or HImode register for a zero_extract.
56 (define_special_predicate "ext_register_operand"
57 (match_operand 0 "register_operand")
58 {
59 if ((!TARGET_64BIT || GET_MODE (op) != DImode)
60 && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
61 return 0;
62 if (GET_CODE (op) == SUBREG)
63 op = SUBREG_REG (op);
64
65 /* Be careful to accept only registers having upper parts. */
66 return REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) < 4;
67 })
68
69 ;; Return true if op is the AX register.
70 (define_predicate "ax_reg_operand"
71 (and (match_code "reg")
72 (match_test "REGNO (op) == 0")))
73
74 ;; Return true if op is the flags register.
75 (define_predicate "flags_reg_operand"
76 (and (match_code "reg")
77 (match_test "REGNO (op) == FLAGS_REG")))
78
79 ;; Return 1 if VALUE can be stored in a sign extended immediate field.
80 (define_predicate "x86_64_immediate_operand"
81 (match_code "const_int,symbol_ref,label_ref,const")
82 {
83 if (!TARGET_64BIT)
84 return immediate_operand (op, mode);
85
86 switch (GET_CODE (op))
87 {
88 case CONST_INT:
89 /* CONST_DOUBLES never match, since HOST_BITS_PER_WIDE_INT is known
90 to be at least 32 and this all acceptable constants are
91 represented as CONST_INT. */
92 if (HOST_BITS_PER_WIDE_INT == 32)
93 return 1;
94 else
95 {
96 HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
97 return trunc_int_for_mode (val, SImode) == val;
98 }
99 break;
100
101 case SYMBOL_REF:
102 /* For certain code models, the symbolic references are known to fit.
103 in CM_SMALL_PIC model we know it fits if it is local to the shared
104 library. Don't count TLS SYMBOL_REFs here, since they should fit
105 only if inside of UNSPEC handled below. */
106 /* TLS symbols are not constant. */
107 if (SYMBOL_REF_TLS_MODEL (op))
108 return false;
109 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
110 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
111
112 case LABEL_REF:
113 /* For certain code models, the code is near as well. */
114 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
115 || ix86_cmodel == CM_KERNEL);
116
117 case CONST:
118 /* We also may accept the offsetted memory references in certain
119 special cases. */
120 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
121 switch (XINT (XEXP (op, 0), 1))
122 {
123 case UNSPEC_GOTPCREL:
124 case UNSPEC_DTPOFF:
125 case UNSPEC_GOTNTPOFF:
126 case UNSPEC_NTPOFF:
127 return 1;
128 default:
129 break;
130 }
131
132 if (GET_CODE (XEXP (op, 0)) == PLUS)
133 {
134 rtx op1 = XEXP (XEXP (op, 0), 0);
135 rtx op2 = XEXP (XEXP (op, 0), 1);
136 HOST_WIDE_INT offset;
137
138 if (ix86_cmodel == CM_LARGE)
139 return 0;
140 if (!CONST_INT_P (op2))
141 return 0;
142 offset = trunc_int_for_mode (INTVAL (op2), DImode);
143 switch (GET_CODE (op1))
144 {
145 case SYMBOL_REF:
146 /* TLS symbols are not constant. */
147 if (SYMBOL_REF_TLS_MODEL (op1))
148 return 0;
149 /* For CM_SMALL assume that latest object is 16MB before
150 end of 31bits boundary. We may also accept pretty
151 large negative constants knowing that all objects are
152 in the positive half of address space. */
153 if ((ix86_cmodel == CM_SMALL
154 || (ix86_cmodel == CM_MEDIUM
155 && !SYMBOL_REF_FAR_ADDR_P (op1)))
156 && offset < 16*1024*1024
157 && trunc_int_for_mode (offset, SImode) == offset)
158 return 1;
159 /* For CM_KERNEL we know that all object resist in the
160 negative half of 32bits address space. We may not
161 accept negative offsets, since they may be just off
162 and we may accept pretty large positive ones. */
163 if (ix86_cmodel == CM_KERNEL
164 && offset > 0
165 && trunc_int_for_mode (offset, SImode) == offset)
166 return 1;
167 break;
168
169 case LABEL_REF:
170 /* These conditions are similar to SYMBOL_REF ones, just the
171 constraints for code models differ. */
172 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
173 && offset < 16*1024*1024
174 && trunc_int_for_mode (offset, SImode) == offset)
175 return 1;
176 if (ix86_cmodel == CM_KERNEL
177 && offset > 0
178 && trunc_int_for_mode (offset, SImode) == offset)
179 return 1;
180 break;
181
182 case UNSPEC:
183 switch (XINT (op1, 1))
184 {
185 case UNSPEC_DTPOFF:
186 case UNSPEC_NTPOFF:
187 if (offset > 0
188 && trunc_int_for_mode (offset, SImode) == offset)
189 return 1;
190 }
191 break;
192
193 default:
194 break;
195 }
196 }
197 break;
198
199 default:
200 gcc_unreachable ();
201 }
202
203 return 0;
204 })
205
206 ;; Return 1 if VALUE can be stored in the zero extended immediate field.
207 (define_predicate "x86_64_zext_immediate_operand"
208 (match_code "const_double,const_int,symbol_ref,label_ref,const")
209 {
210 switch (GET_CODE (op))
211 {
212 case CONST_DOUBLE:
213 if (HOST_BITS_PER_WIDE_INT == 32)
214 return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
215 else
216 return 0;
217
218 case CONST_INT:
219 if (HOST_BITS_PER_WIDE_INT == 32)
220 return INTVAL (op) >= 0;
221 else
222 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
223
224 case SYMBOL_REF:
225 /* For certain code models, the symbolic references are known to fit. */
226 /* TLS symbols are not constant. */
227 if (SYMBOL_REF_TLS_MODEL (op))
228 return false;
229 return (ix86_cmodel == CM_SMALL
230 || (ix86_cmodel == CM_MEDIUM
231 && !SYMBOL_REF_FAR_ADDR_P (op)));
232
233 case LABEL_REF:
234 /* For certain code models, the code is near as well. */
235 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
236
237 case CONST:
238 /* We also may accept the offsetted memory references in certain
239 special cases. */
240 if (GET_CODE (XEXP (op, 0)) == PLUS)
241 {
242 rtx op1 = XEXP (XEXP (op, 0), 0);
243 rtx op2 = XEXP (XEXP (op, 0), 1);
244
245 if (ix86_cmodel == CM_LARGE)
246 return 0;
247 switch (GET_CODE (op1))
248 {
249 case SYMBOL_REF:
250 /* TLS symbols are not constant. */
251 if (SYMBOL_REF_TLS_MODEL (op1))
252 return 0;
253 /* For small code model we may accept pretty large positive
254 offsets, since one bit is available for free. Negative
255 offsets are limited by the size of NULL pointer area
256 specified by the ABI. */
257 if ((ix86_cmodel == CM_SMALL
258 || (ix86_cmodel == CM_MEDIUM
259 && !SYMBOL_REF_FAR_ADDR_P (op1)))
260 && CONST_INT_P (op2)
261 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
262 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
263 return 1;
264 /* ??? For the kernel, we may accept adjustment of
265 -0x10000000, since we know that it will just convert
266 negative address space to positive, but perhaps this
267 is not worthwhile. */
268 break;
269
270 case LABEL_REF:
271 /* These conditions are similar to SYMBOL_REF ones, just the
272 constraints for code models differ. */
273 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
274 && CONST_INT_P (op2)
275 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
276 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
277 return 1;
278 break;
279
280 default:
281 return 0;
282 }
283 }
284 break;
285
286 default:
287 gcc_unreachable ();
288 }
289 return 0;
290 })
291
292 ;; Return nonzero if OP is general operand representable on x86_64.
293 (define_predicate "x86_64_general_operand"
294 (if_then_else (match_test "TARGET_64BIT")
295 (ior (match_operand 0 "nonimmediate_operand")
296 (match_operand 0 "x86_64_immediate_operand"))
297 (match_operand 0 "general_operand")))
298
299 ;; Return nonzero if OP is general operand representable on x86_64
300 ;; as either sign extended or zero extended constant.
301 (define_predicate "x86_64_szext_general_operand"
302 (if_then_else (match_test "TARGET_64BIT")
303 (ior (match_operand 0 "nonimmediate_operand")
304 (ior (match_operand 0 "x86_64_immediate_operand")
305 (match_operand 0 "x86_64_zext_immediate_operand")))
306 (match_operand 0 "general_operand")))
307
308 ;; Return nonzero if OP is nonmemory operand representable on x86_64.
309 (define_predicate "x86_64_nonmemory_operand"
310 (if_then_else (match_test "TARGET_64BIT")
311 (ior (match_operand 0 "register_operand")
312 (match_operand 0 "x86_64_immediate_operand"))
313 (match_operand 0 "nonmemory_operand")))
314
315 ;; Return nonzero if OP is nonmemory operand representable on x86_64.
316 (define_predicate "x86_64_szext_nonmemory_operand"
317 (if_then_else (match_test "TARGET_64BIT")
318 (ior (match_operand 0 "register_operand")
319 (ior (match_operand 0 "x86_64_immediate_operand")
320 (match_operand 0 "x86_64_zext_immediate_operand")))
321 (match_operand 0 "nonmemory_operand")))
322
323 ;; Return true when operand is PIC expression that can be computed by lea
324 ;; operation.
325 (define_predicate "pic_32bit_operand"
326 (match_code "const,symbol_ref,label_ref")
327 {
328 if (!flag_pic)
329 return 0;
330 /* Rule out relocations that translate into 64bit constants. */
331 if (TARGET_64BIT && GET_CODE (op) == CONST)
332 {
333 op = XEXP (op, 0);
334 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
335 op = XEXP (op, 0);
336 if (GET_CODE (op) == UNSPEC
337 && (XINT (op, 1) == UNSPEC_GOTOFF
338 || XINT (op, 1) == UNSPEC_GOT))
339 return 0;
340 }
341 return symbolic_operand (op, mode);
342 })
343
344
345 ;; Return nonzero if OP is nonmemory operand acceptable by movabs patterns.
346 (define_predicate "x86_64_movabs_operand"
347 (if_then_else (match_test "!TARGET_64BIT || !flag_pic")
348 (match_operand 0 "nonmemory_operand")
349 (ior (match_operand 0 "register_operand")
350 (and (match_operand 0 "const_double_operand")
351 (match_test "GET_MODE_SIZE (mode) <= 8")))))
352
353 ;; Returns nonzero if OP is either a symbol reference or a sum of a symbol
354 ;; reference and a constant.
355 (define_predicate "symbolic_operand"
356 (match_code "symbol_ref,label_ref,const")
357 {
358 switch (GET_CODE (op))
359 {
360 case SYMBOL_REF:
361 case LABEL_REF:
362 return 1;
363
364 case CONST:
365 op = XEXP (op, 0);
366 if (GET_CODE (op) == SYMBOL_REF
367 || GET_CODE (op) == LABEL_REF
368 || (GET_CODE (op) == UNSPEC
369 && (XINT (op, 1) == UNSPEC_GOT
370 || XINT (op, 1) == UNSPEC_GOTOFF
371 || XINT (op, 1) == UNSPEC_GOTPCREL)))
372 return 1;
373 if (GET_CODE (op) != PLUS
374 || !CONST_INT_P (XEXP (op, 1)))
375 return 0;
376
377 op = XEXP (op, 0);
378 if (GET_CODE (op) == SYMBOL_REF
379 || GET_CODE (op) == LABEL_REF)
380 return 1;
381 /* Only @GOTOFF gets offsets. */
382 if (GET_CODE (op) != UNSPEC
383 || XINT (op, 1) != UNSPEC_GOTOFF)
384 return 0;
385
386 op = XVECEXP (op, 0, 0);
387 if (GET_CODE (op) == SYMBOL_REF
388 || GET_CODE (op) == LABEL_REF)
389 return 1;
390 return 0;
391
392 default:
393 gcc_unreachable ();
394 }
395 })
396
397 ;; Return true if the operand contains a @GOT or @GOTOFF reference.
398 (define_predicate "pic_symbolic_operand"
399 (match_code "const")
400 {
401 op = XEXP (op, 0);
402 if (TARGET_64BIT)
403 {
404 if (GET_CODE (op) == UNSPEC
405 && XINT (op, 1) == UNSPEC_GOTPCREL)
406 return 1;
407 if (GET_CODE (op) == PLUS
408 && GET_CODE (XEXP (op, 0)) == UNSPEC
409 && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL)
410 return 1;
411 }
412 else
413 {
414 if (GET_CODE (op) == UNSPEC)
415 return 1;
416 if (GET_CODE (op) != PLUS
417 || !CONST_INT_P (XEXP (op, 1)))
418 return 0;
419 op = XEXP (op, 0);
420 if (GET_CODE (op) == UNSPEC)
421 return 1;
422 }
423 return 0;
424 })
425
426 ;; Return true if OP is a symbolic operand that resolves locally.
427 (define_predicate "local_symbolic_operand"
428 (match_code "const,label_ref,symbol_ref")
429 {
430 if (GET_CODE (op) == CONST
431 && GET_CODE (XEXP (op, 0)) == PLUS
432 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
433 op = XEXP (XEXP (op, 0), 0);
434
435 if (GET_CODE (op) == LABEL_REF)
436 return 1;
437
438 if (GET_CODE (op) != SYMBOL_REF)
439 return 0;
440
441 if (SYMBOL_REF_TLS_MODEL (op) != 0)
442 return 0;
443
444 if (SYMBOL_REF_LOCAL_P (op))
445 return 1;
446
447 /* There is, however, a not insubstantial body of code in the rest of
448 the compiler that assumes it can just stick the results of
449 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
450 /* ??? This is a hack. Should update the body of the compiler to
451 always create a DECL an invoke targetm.encode_section_info. */
452 if (strncmp (XSTR (op, 0), internal_label_prefix,
453 internal_label_prefix_len) == 0)
454 return 1;
455
456 return 0;
457 })
458
459 ;; Test for a legitimate @GOTOFF operand.
460 ;;
461 ;; VxWorks does not impose a fixed gap between segments; the run-time
462 ;; gap can be different from the object-file gap. We therefore can't
463 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
464 ;; same segment as the GOT. Unfortunately, the flexibility of linker
465 ;; scripts means that we can't be sure of that in general, so assume
466 ;; that @GOTOFF is never valid on VxWorks.
467 (define_predicate "gotoff_operand"
468 (and (match_test "!TARGET_VXWORKS_RTP")
469 (match_operand 0 "local_symbolic_operand")))
470
471 ;; Test for various thread-local symbols.
472 (define_predicate "tls_symbolic_operand"
473 (and (match_code "symbol_ref")
474 (match_test "SYMBOL_REF_TLS_MODEL (op) != 0")))
475
476 (define_predicate "tls_modbase_operand"
477 (and (match_code "symbol_ref")
478 (match_test "op == ix86_tls_module_base ()")))
479
480 (define_predicate "tp_or_register_operand"
481 (ior (match_operand 0 "register_operand")
482 (and (match_code "unspec")
483 (match_test "XINT (op, 1) == UNSPEC_TP"))))
484
485 ;; Test for a pc-relative call operand
486 (define_predicate "constant_call_address_operand"
487 (match_code "symbol_ref")
488 {
489 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC)
490 return false;
491 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
492 return false;
493 return true;
494 })
495
496 ;; True for any non-virtual or eliminable register. Used in places where
497 ;; instantiation of such a register may cause the pattern to not be recognized.
498 (define_predicate "register_no_elim_operand"
499 (match_operand 0 "register_operand")
500 {
501 if (GET_CODE (op) == SUBREG)
502 op = SUBREG_REG (op);
503 return !(op == arg_pointer_rtx
504 || op == frame_pointer_rtx
505 || IN_RANGE (REGNO (op),
506 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
507 })
508
509 ;; Similarly, but include the stack pointer. This is used to prevent esp
510 ;; from being used as an index reg.
511 (define_predicate "index_register_operand"
512 (match_operand 0 "register_operand")
513 {
514 if (GET_CODE (op) == SUBREG)
515 op = SUBREG_REG (op);
516 if (reload_in_progress || reload_completed)
517 return REG_OK_FOR_INDEX_STRICT_P (op);
518 else
519 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
520 })
521
522 ;; Return false if this is any eliminable register. Otherwise general_operand.
523 (define_predicate "general_no_elim_operand"
524 (if_then_else (match_code "reg,subreg")
525 (match_operand 0 "register_no_elim_operand")
526 (match_operand 0 "general_operand")))
527
528 ;; Return false if this is any eliminable register. Otherwise
529 ;; register_operand or a constant.
530 (define_predicate "nonmemory_no_elim_operand"
531 (ior (match_operand 0 "register_no_elim_operand")
532 (match_operand 0 "immediate_operand")))
533
534 ;; Test for a valid operand for a call instruction.
535 (define_predicate "call_insn_operand"
536 (ior (match_operand 0 "constant_call_address_operand")
537 (ior (match_operand 0 "register_no_elim_operand")
538 (match_operand 0 "memory_operand"))))
539
540 ;; Similarly, but for tail calls, in which we cannot allow memory references.
541 (define_predicate "sibcall_insn_operand"
542 (ior (match_operand 0 "constant_call_address_operand")
543 (match_operand 0 "register_no_elim_operand")))
544
545 ;; Match exactly zero.
546 (define_predicate "const0_operand"
547 (match_code "const_int,const_double,const_vector")
548 {
549 if (mode == VOIDmode)
550 mode = GET_MODE (op);
551 return op == CONST0_RTX (mode);
552 })
553
554 ;; Match exactly one.
555 (define_predicate "const1_operand"
556 (and (match_code "const_int")
557 (match_test "op == const1_rtx")))
558
559 ;; Match exactly eight.
560 (define_predicate "const8_operand"
561 (and (match_code "const_int")
562 (match_test "INTVAL (op) == 8")))
563
564 ;; Match 2, 4, or 8. Used for leal multiplicands.
565 (define_predicate "const248_operand"
566 (match_code "const_int")
567 {
568 HOST_WIDE_INT i = INTVAL (op);
569 return i == 2 || i == 4 || i == 8;
570 })
571
572 ;; Match 0 or 1.
573 (define_predicate "const_0_to_1_operand"
574 (and (match_code "const_int")
575 (match_test "op == const0_rtx || op == const1_rtx")))
576
577 ;; Match 0 to 3.
578 (define_predicate "const_0_to_3_operand"
579 (and (match_code "const_int")
580 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
581
582 ;; Match 0 to 7.
583 (define_predicate "const_0_to_7_operand"
584 (and (match_code "const_int")
585 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
586
587 ;; Match 0 to 15.
588 (define_predicate "const_0_to_15_operand"
589 (and (match_code "const_int")
590 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
591
592 ;; Match 0 to 63.
593 (define_predicate "const_0_to_63_operand"
594 (and (match_code "const_int")
595 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
596
597 ;; Match 0 to 255.
598 (define_predicate "const_0_to_255_operand"
599 (and (match_code "const_int")
600 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
601
602 ;; Match (0 to 255) * 8
603 (define_predicate "const_0_to_255_mul_8_operand"
604 (match_code "const_int")
605 {
606 unsigned HOST_WIDE_INT val = INTVAL (op);
607 return val <= 255*8 && val % 8 == 0;
608 })
609
610 ;; Return nonzero if OP is CONST_INT >= 1 and <= 31 (a valid operand
611 ;; for shift & compare patterns, as shifting by 0 does not change flags).
612 (define_predicate "const_1_to_31_operand"
613 (and (match_code "const_int")
614 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
615
616 ;; Match 2 or 3.
617 (define_predicate "const_2_to_3_operand"
618 (and (match_code "const_int")
619 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
620
621 ;; Match 4 to 7.
622 (define_predicate "const_4_to_7_operand"
623 (and (match_code "const_int")
624 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
625
626 ;; Match exactly one bit in 4-bit mask.
627 (define_predicate "const_pow2_1_to_8_operand"
628 (match_code "const_int")
629 {
630 unsigned int log = exact_log2 (INTVAL (op));
631 return log <= 3;
632 })
633
634 ;; Match exactly one bit in 8-bit mask.
635 (define_predicate "const_pow2_1_to_128_operand"
636 (match_code "const_int")
637 {
638 unsigned int log = exact_log2 (INTVAL (op));
639 return log <= 7;
640 })
641
642 ;; True if this is a constant appropriate for an increment or decrement.
643 (define_predicate "incdec_operand"
644 (match_code "const_int")
645 {
646 /* On Pentium4, the inc and dec operations causes extra dependency on flag
647 registers, since carry flag is not set. */
648 if (!TARGET_USE_INCDEC && !optimize_size)
649 return 0;
650 return op == const1_rtx || op == constm1_rtx;
651 })
652
653 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
654 (define_predicate "reg_or_pm1_operand"
655 (ior (match_operand 0 "register_operand")
656 (and (match_code "const_int")
657 (match_test "op == const1_rtx || op == constm1_rtx"))))
658
659 ;; True if OP is acceptable as operand of DImode shift expander.
660 (define_predicate "shiftdi_operand"
661 (if_then_else (match_test "TARGET_64BIT")
662 (match_operand 0 "nonimmediate_operand")
663 (match_operand 0 "register_operand")))
664
665 (define_predicate "ashldi_input_operand"
666 (if_then_else (match_test "TARGET_64BIT")
667 (match_operand 0 "nonimmediate_operand")
668 (match_operand 0 "reg_or_pm1_operand")))
669
670 ;; Return true if OP is a vector load from the constant pool with just
671 ;; the first element nonzero.
672 (define_predicate "zero_extended_scalar_load_operand"
673 (match_code "mem")
674 {
675 unsigned n_elts;
676 op = maybe_get_pool_constant (op);
677 if (!op)
678 return 0;
679 if (GET_CODE (op) != CONST_VECTOR)
680 return 0;
681 n_elts =
682 (GET_MODE_SIZE (GET_MODE (op)) /
683 GET_MODE_SIZE (GET_MODE_INNER (GET_MODE (op))));
684 for (n_elts--; n_elts > 0; n_elts--)
685 {
686 rtx elt = CONST_VECTOR_ELT (op, n_elts);
687 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
688 return 0;
689 }
690 return 1;
691 })
692
693 /* Return true if operand is a vector constant that is all ones. */
694 (define_predicate "vector_all_ones_operand"
695 (match_code "const_vector")
696 {
697 int nunits = GET_MODE_NUNITS (mode);
698
699 if (GET_CODE (op) == CONST_VECTOR
700 && CONST_VECTOR_NUNITS (op) == nunits)
701 {
702 int i;
703 for (i = 0; i < nunits; ++i)
704 {
705 rtx x = CONST_VECTOR_ELT (op, i);
706 if (x != constm1_rtx)
707 return 0;
708 }
709 return 1;
710 }
711
712 return 0;
713 })
714
715 ; Return 1 when OP is operand acceptable for standard SSE move.
716 (define_predicate "vector_move_operand"
717 (ior (match_operand 0 "nonimmediate_operand")
718 (match_operand 0 "const0_operand")))
719
720 ;; Return 1 when OP is nonimmediate or standard SSE constant.
721 (define_predicate "nonimmediate_or_sse_const_operand"
722 (match_operand 0 "general_operand")
723 {
724 if (nonimmediate_operand (op, mode))
725 return 1;
726 if (standard_sse_constant_p (op) > 0)
727 return 1;
728 return 0;
729 })
730
731 ;; Return true if OP is a register or a zero.
732 (define_predicate "reg_or_0_operand"
733 (ior (match_operand 0 "register_operand")
734 (match_operand 0 "const0_operand")))
735
736 ;; Return true if op if a valid address, and does not contain
737 ;; a segment override.
738 (define_special_predicate "no_seg_address_operand"
739 (match_operand 0 "address_operand")
740 {
741 struct ix86_address parts;
742 int ok;
743
744 ok = ix86_decompose_address (op, &parts);
745 gcc_assert (ok);
746 return parts.seg == SEG_DEFAULT;
747 })
748
749 ;; Return nonzero if the rtx is known to be at least 32 bits aligned.
750 (define_predicate "aligned_operand"
751 (match_operand 0 "general_operand")
752 {
753 struct ix86_address parts;
754 int ok;
755
756 /* Registers and immediate operands are always "aligned". */
757 if (GET_CODE (op) != MEM)
758 return 1;
759
760 /* All patterns using aligned_operand on memory operands ends up
761 in promoting memory operand to 64bit and thus causing memory mismatch. */
762 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_size)
763 return 0;
764
765 /* Don't even try to do any aligned optimizations with volatiles. */
766 if (MEM_VOLATILE_P (op))
767 return 0;
768
769 if (MEM_ALIGN (op) >= 32)
770 return 1;
771
772 op = XEXP (op, 0);
773
774 /* Pushes and pops are only valid on the stack pointer. */
775 if (GET_CODE (op) == PRE_DEC
776 || GET_CODE (op) == POST_INC)
777 return 1;
778
779 /* Decode the address. */
780 ok = ix86_decompose_address (op, &parts);
781 gcc_assert (ok);
782
783 /* Look for some component that isn't known to be aligned. */
784 if (parts.index)
785 {
786 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
787 return 0;
788 }
789 if (parts.base)
790 {
791 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
792 return 0;
793 }
794 if (parts.disp)
795 {
796 if (!CONST_INT_P (parts.disp)
797 || (INTVAL (parts.disp) & 3) != 0)
798 return 0;
799 }
800
801 /* Didn't find one -- this must be an aligned address. */
802 return 1;
803 })
804
805 ;; Returns 1 if OP is memory operand with a displacement.
806 (define_predicate "memory_displacement_operand"
807 (match_operand 0 "memory_operand")
808 {
809 struct ix86_address parts;
810 int ok;
811
812 ok = ix86_decompose_address (XEXP (op, 0), &parts);
813 gcc_assert (ok);
814 return parts.disp != NULL_RTX;
815 })
816
817 ;; Returns 1 if OP is memory operand with a displacement only.
818 (define_predicate "memory_displacement_only_operand"
819 (match_operand 0 "memory_operand")
820 {
821 struct ix86_address parts;
822 int ok;
823
824 ok = ix86_decompose_address (XEXP (op, 0), &parts);
825 gcc_assert (ok);
826
827 if (parts.base || parts.index)
828 return 0;
829
830 return parts.disp != NULL_RTX;
831 })
832
833 ;; Returns 1 if OP is memory operand that cannot be represented
834 ;; by the modRM array.
835 (define_predicate "long_memory_operand"
836 (and (match_operand 0 "memory_operand")
837 (match_test "memory_address_length (op) != 0")))
838
839 ;; Return 1 if OP is a comparison operator that can be issued by fcmov.
840 (define_predicate "fcmov_comparison_operator"
841 (match_operand 0 "comparison_operator")
842 {
843 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
844 enum rtx_code code = GET_CODE (op);
845
846 if (inmode == CCFPmode || inmode == CCFPUmode)
847 {
848 enum rtx_code second_code, bypass_code;
849 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
850 if (bypass_code != UNKNOWN || second_code != UNKNOWN)
851 return 0;
852 code = ix86_fp_compare_code_to_integer (code);
853 }
854 /* i387 supports just limited amount of conditional codes. */
855 switch (code)
856 {
857 case LTU: case GTU: case LEU: case GEU:
858 if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode)
859 return 1;
860 return 0;
861 case ORDERED: case UNORDERED:
862 case EQ: case NE:
863 return 1;
864 default:
865 return 0;
866 }
867 })
868
869 ;; Return 1 if OP is a comparison that can be used in the CMPSS/CMPPS insns.
870 ;; The first set are supported directly; the second set can't be done with
871 ;; full IEEE support, i.e. NaNs.
872 ;;
873 ;; ??? It would seem that we have a lot of uses of this predicate that pass
874 ;; it the wrong mode. We got away with this because the old function didn't
875 ;; check the mode at all. Mirror that for now by calling this a special
876 ;; predicate.
877
878 (define_special_predicate "sse_comparison_operator"
879 (match_code "eq,lt,le,unordered,ne,unge,ungt,ordered"))
880
881 ;; Return 1 if OP is a valid comparison operator in valid mode.
882 (define_predicate "ix86_comparison_operator"
883 (match_operand 0 "comparison_operator")
884 {
885 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
886 enum rtx_code code = GET_CODE (op);
887
888 if (inmode == CCFPmode || inmode == CCFPUmode)
889 {
890 enum rtx_code second_code, bypass_code;
891 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
892 return (bypass_code == UNKNOWN && second_code == UNKNOWN);
893 }
894 switch (code)
895 {
896 case EQ: case NE:
897 return 1;
898 case LT: case GE:
899 if (inmode == CCmode || inmode == CCGCmode
900 || inmode == CCGOCmode || inmode == CCNOmode)
901 return 1;
902 return 0;
903 case LTU: case GTU: case LEU: case ORDERED: case UNORDERED: case GEU:
904 if (inmode == CCmode)
905 return 1;
906 return 0;
907 case GT: case LE:
908 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
909 return 1;
910 return 0;
911 default:
912 return 0;
913 }
914 })
915
916 ;; Return 1 if OP is a valid comparison operator testing carry flag to be set.
917 (define_predicate "ix86_carry_flag_operator"
918 (match_code "ltu,lt,unlt,gt,ungt,le,unle,ge,unge,ltgt,uneq")
919 {
920 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
921 enum rtx_code code = GET_CODE (op);
922
923 if (!REG_P (XEXP (op, 0))
924 || REGNO (XEXP (op, 0)) != FLAGS_REG
925 || XEXP (op, 1) != const0_rtx)
926 return 0;
927
928 if (inmode == CCFPmode || inmode == CCFPUmode)
929 {
930 enum rtx_code second_code, bypass_code;
931 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
932 if (bypass_code != UNKNOWN || second_code != UNKNOWN)
933 return 0;
934 code = ix86_fp_compare_code_to_integer (code);
935 }
936 else if (inmode != CCmode)
937 return 0;
938
939 return code == LTU;
940 })
941
942 ;; Nearly general operand, but accept any const_double, since we wish
943 ;; to be able to drop them into memory rather than have them get pulled
944 ;; into registers.
945 (define_predicate "cmp_fp_expander_operand"
946 (ior (match_code "const_double")
947 (match_operand 0 "general_operand")))
948
949 ;; Return true if this is a valid binary floating-point operation.
950 (define_predicate "binary_fp_operator"
951 (match_code "plus,minus,mult,div"))
952
953 ;; Return true if this is a multiply operation.
954 (define_predicate "mult_operator"
955 (match_code "mult"))
956
957 ;; Return true if this is a division operation.
958 (define_predicate "div_operator"
959 (match_code "div"))
960
961 ;; Return true if this is a float extend operation.
962 (define_predicate "float_operator"
963 (match_code "float"))
964
965 ;; Return true for ARITHMETIC_P.
966 (define_predicate "arith_or_logical_operator"
967 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
968 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
969
970 ;; Return 1 if OP is a binary operator that can be promoted to wider mode.
971 (define_predicate "promotable_binary_operator"
972 (ior (match_code "plus,and,ior,xor,ashift")
973 (and (match_code "mult")
974 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
975
976 ;; To avoid problems when jump re-emits comparisons like testqi_ext_ccno_0,
977 ;; re-recognize the operand to avoid a copy_to_mode_reg that will fail.
978 ;;
979 ;; ??? It seems likely that this will only work because cmpsi is an
980 ;; expander, and no actual insns use this.
981
982 (define_predicate "cmpsi_operand"
983 (ior (match_operand 0 "nonimmediate_operand")
984 (and (match_code "and")
985 (match_code "zero_extract" "0")
986 (match_code "const_int" "1")
987 (match_code "const_int" "01")
988 (match_code "const_int" "02")
989 (match_test "INTVAL (XEXP (XEXP (op, 0), 1)) == 8")
990 (match_test "INTVAL (XEXP (XEXP (op, 0), 2)) == 8")
991 )))
992
993 (define_predicate "compare_operator"
994 (match_code "compare"))
995
996 (define_predicate "absneg_operator"
997 (match_code "abs,neg"))