re PR target/32000 (x86 backend uses aligned load on unaligned memory)
[gcc.git] / gcc / config / i386 / predicates.md
1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3 ;;
4 ;; This file is part of GCC.
5 ;;
6 ;; GCC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
9 ;; any later version.
10 ;;
11 ;; GCC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
15 ;;
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>.
19
20 ;; Return nonzero if OP is either a i387 or SSE fp register.
21 (define_predicate "any_fp_register_operand"
22 (and (match_code "reg")
23 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
24
25 ;; Return nonzero if OP is an i387 fp register.
26 (define_predicate "fp_register_operand"
27 (and (match_code "reg")
28 (match_test "FP_REGNO_P (REGNO (op))")))
29
30 ;; Return nonzero if OP is a non-fp register_operand.
31 (define_predicate "register_and_not_any_fp_reg_operand"
32 (and (match_code "reg")
33 (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
34
35 ;; Return nonzero if OP is a register operand other than an i387 fp register.
36 (define_predicate "register_and_not_fp_reg_operand"
37 (and (match_code "reg")
38 (not (match_test "FP_REGNO_P (REGNO (op))"))))
39
40 ;; True if the operand is an MMX register.
41 (define_predicate "mmx_reg_operand"
42 (and (match_code "reg")
43 (match_test "MMX_REGNO_P (REGNO (op))")))
44
45 ;; True if the operand is a Q_REGS class register.
46 (define_predicate "q_regs_operand"
47 (match_operand 0 "register_operand")
48 {
49 if (GET_CODE (op) == SUBREG)
50 op = SUBREG_REG (op);
51 return ANY_QI_REG_P (op);
52 })
53
54 ;; Match an SI or HImode register for a zero_extract.
55 (define_special_predicate "ext_register_operand"
56 (match_operand 0 "register_operand")
57 {
58 if ((!TARGET_64BIT || GET_MODE (op) != DImode)
59 && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
60 return 0;
61 if (GET_CODE (op) == SUBREG)
62 op = SUBREG_REG (op);
63
64 /* Be careful to accept only registers having upper parts. */
65 return REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) < 4;
66 })
67
68 ;; Return true if op is the AX register.
69 (define_predicate "ax_reg_operand"
70 (and (match_code "reg")
71 (match_test "REGNO (op) == 0")))
72
73 ;; Return true if op is the flags register.
74 (define_predicate "flags_reg_operand"
75 (and (match_code "reg")
76 (match_test "REGNO (op) == FLAGS_REG")))
77
78 ;; Return true if op is not xmm0 register.
79 (define_predicate "reg_not_xmm0_operand"
80 (and (match_operand 0 "register_operand")
81 (match_test "GET_CODE (op) != REG
82 || REGNO (op) != FIRST_SSE_REG")))
83
84 ;; As above, but allow nonimmediate operands.
85 (define_predicate "nonimm_not_xmm0_operand"
86 (and (match_operand 0 "nonimmediate_operand")
87 (match_test "GET_CODE (op) != REG
88 || REGNO (op) != FIRST_SSE_REG")))
89
90 ;; Return 1 if VALUE can be stored in a sign extended immediate field.
91 (define_predicate "x86_64_immediate_operand"
92 (match_code "const_int,symbol_ref,label_ref,const")
93 {
94 if (!TARGET_64BIT)
95 return immediate_operand (op, mode);
96
97 switch (GET_CODE (op))
98 {
99 case CONST_INT:
100 /* CONST_DOUBLES never match, since HOST_BITS_PER_WIDE_INT is known
101 to be at least 32 and this all acceptable constants are
102 represented as CONST_INT. */
103 if (HOST_BITS_PER_WIDE_INT == 32)
104 return 1;
105 else
106 {
107 HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
108 return trunc_int_for_mode (val, SImode) == val;
109 }
110 break;
111
112 case SYMBOL_REF:
113 /* For certain code models, the symbolic references are known to fit.
114 in CM_SMALL_PIC model we know it fits if it is local to the shared
115 library. Don't count TLS SYMBOL_REFs here, since they should fit
116 only if inside of UNSPEC handled below. */
117 /* TLS symbols are not constant. */
118 if (SYMBOL_REF_TLS_MODEL (op))
119 return false;
120 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
121 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
122
123 case LABEL_REF:
124 /* For certain code models, the code is near as well. */
125 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
126 || ix86_cmodel == CM_KERNEL);
127
128 case CONST:
129 /* We also may accept the offsetted memory references in certain
130 special cases. */
131 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
132 switch (XINT (XEXP (op, 0), 1))
133 {
134 case UNSPEC_GOTPCREL:
135 case UNSPEC_DTPOFF:
136 case UNSPEC_GOTNTPOFF:
137 case UNSPEC_NTPOFF:
138 return 1;
139 default:
140 break;
141 }
142
143 if (GET_CODE (XEXP (op, 0)) == PLUS)
144 {
145 rtx op1 = XEXP (XEXP (op, 0), 0);
146 rtx op2 = XEXP (XEXP (op, 0), 1);
147 HOST_WIDE_INT offset;
148
149 if (ix86_cmodel == CM_LARGE)
150 return 0;
151 if (!CONST_INT_P (op2))
152 return 0;
153 offset = trunc_int_for_mode (INTVAL (op2), DImode);
154 switch (GET_CODE (op1))
155 {
156 case SYMBOL_REF:
157 /* TLS symbols are not constant. */
158 if (SYMBOL_REF_TLS_MODEL (op1))
159 return 0;
160 /* For CM_SMALL assume that latest object is 16MB before
161 end of 31bits boundary. We may also accept pretty
162 large negative constants knowing that all objects are
163 in the positive half of address space. */
164 if ((ix86_cmodel == CM_SMALL
165 || (ix86_cmodel == CM_MEDIUM
166 && !SYMBOL_REF_FAR_ADDR_P (op1)))
167 && offset < 16*1024*1024
168 && trunc_int_for_mode (offset, SImode) == offset)
169 return 1;
170 /* For CM_KERNEL we know that all object resist in the
171 negative half of 32bits address space. We may not
172 accept negative offsets, since they may be just off
173 and we may accept pretty large positive ones. */
174 if (ix86_cmodel == CM_KERNEL
175 && offset > 0
176 && trunc_int_for_mode (offset, SImode) == offset)
177 return 1;
178 break;
179
180 case LABEL_REF:
181 /* These conditions are similar to SYMBOL_REF ones, just the
182 constraints for code models differ. */
183 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
184 && offset < 16*1024*1024
185 && trunc_int_for_mode (offset, SImode) == offset)
186 return 1;
187 if (ix86_cmodel == CM_KERNEL
188 && offset > 0
189 && trunc_int_for_mode (offset, SImode) == offset)
190 return 1;
191 break;
192
193 case UNSPEC:
194 switch (XINT (op1, 1))
195 {
196 case UNSPEC_DTPOFF:
197 case UNSPEC_NTPOFF:
198 if (offset > 0
199 && trunc_int_for_mode (offset, SImode) == offset)
200 return 1;
201 }
202 break;
203
204 default:
205 break;
206 }
207 }
208 break;
209
210 default:
211 gcc_unreachable ();
212 }
213
214 return 0;
215 })
216
217 ;; Return 1 if VALUE can be stored in the zero extended immediate field.
218 (define_predicate "x86_64_zext_immediate_operand"
219 (match_code "const_double,const_int,symbol_ref,label_ref,const")
220 {
221 switch (GET_CODE (op))
222 {
223 case CONST_DOUBLE:
224 if (HOST_BITS_PER_WIDE_INT == 32)
225 return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
226 else
227 return 0;
228
229 case CONST_INT:
230 if (HOST_BITS_PER_WIDE_INT == 32)
231 return INTVAL (op) >= 0;
232 else
233 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
234
235 case SYMBOL_REF:
236 /* For certain code models, the symbolic references are known to fit. */
237 /* TLS symbols are not constant. */
238 if (SYMBOL_REF_TLS_MODEL (op))
239 return false;
240 return (ix86_cmodel == CM_SMALL
241 || (ix86_cmodel == CM_MEDIUM
242 && !SYMBOL_REF_FAR_ADDR_P (op)));
243
244 case LABEL_REF:
245 /* For certain code models, the code is near as well. */
246 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
247
248 case CONST:
249 /* We also may accept the offsetted memory references in certain
250 special cases. */
251 if (GET_CODE (XEXP (op, 0)) == PLUS)
252 {
253 rtx op1 = XEXP (XEXP (op, 0), 0);
254 rtx op2 = XEXP (XEXP (op, 0), 1);
255
256 if (ix86_cmodel == CM_LARGE)
257 return 0;
258 switch (GET_CODE (op1))
259 {
260 case SYMBOL_REF:
261 /* TLS symbols are not constant. */
262 if (SYMBOL_REF_TLS_MODEL (op1))
263 return 0;
264 /* For small code model we may accept pretty large positive
265 offsets, since one bit is available for free. Negative
266 offsets are limited by the size of NULL pointer area
267 specified by the ABI. */
268 if ((ix86_cmodel == CM_SMALL
269 || (ix86_cmodel == CM_MEDIUM
270 && !SYMBOL_REF_FAR_ADDR_P (op1)))
271 && CONST_INT_P (op2)
272 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
273 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
274 return 1;
275 /* ??? For the kernel, we may accept adjustment of
276 -0x10000000, since we know that it will just convert
277 negative address space to positive, but perhaps this
278 is not worthwhile. */
279 break;
280
281 case LABEL_REF:
282 /* These conditions are similar to SYMBOL_REF ones, just the
283 constraints for code models differ. */
284 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
285 && CONST_INT_P (op2)
286 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
287 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
288 return 1;
289 break;
290
291 default:
292 return 0;
293 }
294 }
295 break;
296
297 default:
298 gcc_unreachable ();
299 }
300 return 0;
301 })
302
303 ;; Return nonzero if OP is general operand representable on x86_64.
304 (define_predicate "x86_64_general_operand"
305 (if_then_else (match_test "TARGET_64BIT")
306 (ior (match_operand 0 "nonimmediate_operand")
307 (match_operand 0 "x86_64_immediate_operand"))
308 (match_operand 0 "general_operand")))
309
310 ;; Return nonzero if OP is general operand representable on x86_64
311 ;; as either sign extended or zero extended constant.
312 (define_predicate "x86_64_szext_general_operand"
313 (if_then_else (match_test "TARGET_64BIT")
314 (ior (match_operand 0 "nonimmediate_operand")
315 (ior (match_operand 0 "x86_64_immediate_operand")
316 (match_operand 0 "x86_64_zext_immediate_operand")))
317 (match_operand 0 "general_operand")))
318
319 ;; Return nonzero if OP is nonmemory operand representable on x86_64.
320 (define_predicate "x86_64_nonmemory_operand"
321 (if_then_else (match_test "TARGET_64BIT")
322 (ior (match_operand 0 "register_operand")
323 (match_operand 0 "x86_64_immediate_operand"))
324 (match_operand 0 "nonmemory_operand")))
325
326 ;; Return nonzero if OP is nonmemory operand representable on x86_64.
327 (define_predicate "x86_64_szext_nonmemory_operand"
328 (if_then_else (match_test "TARGET_64BIT")
329 (ior (match_operand 0 "register_operand")
330 (ior (match_operand 0 "x86_64_immediate_operand")
331 (match_operand 0 "x86_64_zext_immediate_operand")))
332 (match_operand 0 "nonmemory_operand")))
333
334 ;; Return true when operand is PIC expression that can be computed by lea
335 ;; operation.
336 (define_predicate "pic_32bit_operand"
337 (match_code "const,symbol_ref,label_ref")
338 {
339 if (!flag_pic)
340 return 0;
341 /* Rule out relocations that translate into 64bit constants. */
342 if (TARGET_64BIT && GET_CODE (op) == CONST)
343 {
344 op = XEXP (op, 0);
345 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
346 op = XEXP (op, 0);
347 if (GET_CODE (op) == UNSPEC
348 && (XINT (op, 1) == UNSPEC_GOTOFF
349 || XINT (op, 1) == UNSPEC_GOT))
350 return 0;
351 }
352 return symbolic_operand (op, mode);
353 })
354
355
356 ;; Return nonzero if OP is nonmemory operand acceptable by movabs patterns.
357 (define_predicate "x86_64_movabs_operand"
358 (if_then_else (match_test "!TARGET_64BIT || !flag_pic")
359 (match_operand 0 "nonmemory_operand")
360 (ior (match_operand 0 "register_operand")
361 (and (match_operand 0 "const_double_operand")
362 (match_test "GET_MODE_SIZE (mode) <= 8")))))
363
364 ;; Returns nonzero if OP is either a symbol reference or a sum of a symbol
365 ;; reference and a constant.
366 (define_predicate "symbolic_operand"
367 (match_code "symbol_ref,label_ref,const")
368 {
369 switch (GET_CODE (op))
370 {
371 case SYMBOL_REF:
372 case LABEL_REF:
373 return 1;
374
375 case CONST:
376 op = XEXP (op, 0);
377 if (GET_CODE (op) == SYMBOL_REF
378 || GET_CODE (op) == LABEL_REF
379 || (GET_CODE (op) == UNSPEC
380 && (XINT (op, 1) == UNSPEC_GOT
381 || XINT (op, 1) == UNSPEC_GOTOFF
382 || XINT (op, 1) == UNSPEC_GOTPCREL)))
383 return 1;
384 if (GET_CODE (op) != PLUS
385 || !CONST_INT_P (XEXP (op, 1)))
386 return 0;
387
388 op = XEXP (op, 0);
389 if (GET_CODE (op) == SYMBOL_REF
390 || GET_CODE (op) == LABEL_REF)
391 return 1;
392 /* Only @GOTOFF gets offsets. */
393 if (GET_CODE (op) != UNSPEC
394 || XINT (op, 1) != UNSPEC_GOTOFF)
395 return 0;
396
397 op = XVECEXP (op, 0, 0);
398 if (GET_CODE (op) == SYMBOL_REF
399 || GET_CODE (op) == LABEL_REF)
400 return 1;
401 return 0;
402
403 default:
404 gcc_unreachable ();
405 }
406 })
407
408 ;; Return true if the operand contains a @GOT or @GOTOFF reference.
409 (define_predicate "pic_symbolic_operand"
410 (match_code "const")
411 {
412 op = XEXP (op, 0);
413 if (TARGET_64BIT)
414 {
415 if (GET_CODE (op) == UNSPEC
416 && XINT (op, 1) == UNSPEC_GOTPCREL)
417 return 1;
418 if (GET_CODE (op) == PLUS
419 && GET_CODE (XEXP (op, 0)) == UNSPEC
420 && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL)
421 return 1;
422 }
423 else
424 {
425 if (GET_CODE (op) == UNSPEC)
426 return 1;
427 if (GET_CODE (op) != PLUS
428 || !CONST_INT_P (XEXP (op, 1)))
429 return 0;
430 op = XEXP (op, 0);
431 if (GET_CODE (op) == UNSPEC)
432 return 1;
433 }
434 return 0;
435 })
436
437 ;; Return true if OP is a symbolic operand that resolves locally.
438 (define_predicate "local_symbolic_operand"
439 (match_code "const,label_ref,symbol_ref")
440 {
441 if (GET_CODE (op) == CONST
442 && GET_CODE (XEXP (op, 0)) == PLUS
443 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
444 op = XEXP (XEXP (op, 0), 0);
445
446 if (GET_CODE (op) == LABEL_REF)
447 return 1;
448
449 if (GET_CODE (op) != SYMBOL_REF)
450 return 0;
451
452 if (SYMBOL_REF_TLS_MODEL (op) != 0)
453 return 0;
454
455 if (SYMBOL_REF_LOCAL_P (op))
456 return 1;
457
458 /* There is, however, a not insubstantial body of code in the rest of
459 the compiler that assumes it can just stick the results of
460 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
461 /* ??? This is a hack. Should update the body of the compiler to
462 always create a DECL an invoke targetm.encode_section_info. */
463 if (strncmp (XSTR (op, 0), internal_label_prefix,
464 internal_label_prefix_len) == 0)
465 return 1;
466
467 return 0;
468 })
469
470 ;; Test for a legitimate @GOTOFF operand.
471 ;;
472 ;; VxWorks does not impose a fixed gap between segments; the run-time
473 ;; gap can be different from the object-file gap. We therefore can't
474 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
475 ;; same segment as the GOT. Unfortunately, the flexibility of linker
476 ;; scripts means that we can't be sure of that in general, so assume
477 ;; that @GOTOFF is never valid on VxWorks.
478 (define_predicate "gotoff_operand"
479 (and (match_test "!TARGET_VXWORKS_RTP")
480 (match_operand 0 "local_symbolic_operand")))
481
482 ;; Test for various thread-local symbols.
483 (define_predicate "tls_symbolic_operand"
484 (and (match_code "symbol_ref")
485 (match_test "SYMBOL_REF_TLS_MODEL (op) != 0")))
486
487 (define_predicate "tls_modbase_operand"
488 (and (match_code "symbol_ref")
489 (match_test "op == ix86_tls_module_base ()")))
490
491 (define_predicate "tp_or_register_operand"
492 (ior (match_operand 0 "register_operand")
493 (and (match_code "unspec")
494 (match_test "XINT (op, 1) == UNSPEC_TP"))))
495
496 ;; Test for a pc-relative call operand
497 (define_predicate "constant_call_address_operand"
498 (match_code "symbol_ref")
499 {
500 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC)
501 return false;
502 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
503 return false;
504 return true;
505 })
506
507 ;; True for any non-virtual or eliminable register. Used in places where
508 ;; instantiation of such a register may cause the pattern to not be recognized.
509 (define_predicate "register_no_elim_operand"
510 (match_operand 0 "register_operand")
511 {
512 if (GET_CODE (op) == SUBREG)
513 op = SUBREG_REG (op);
514 return !(op == arg_pointer_rtx
515 || op == frame_pointer_rtx
516 || IN_RANGE (REGNO (op),
517 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
518 })
519
520 ;; Similarly, but include the stack pointer. This is used to prevent esp
521 ;; from being used as an index reg.
522 (define_predicate "index_register_operand"
523 (match_operand 0 "register_operand")
524 {
525 if (GET_CODE (op) == SUBREG)
526 op = SUBREG_REG (op);
527 if (reload_in_progress || reload_completed)
528 return REG_OK_FOR_INDEX_STRICT_P (op);
529 else
530 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
531 })
532
533 ;; Return false if this is any eliminable register. Otherwise general_operand.
534 (define_predicate "general_no_elim_operand"
535 (if_then_else (match_code "reg,subreg")
536 (match_operand 0 "register_no_elim_operand")
537 (match_operand 0 "general_operand")))
538
539 ;; Return false if this is any eliminable register. Otherwise
540 ;; register_operand or a constant.
541 (define_predicate "nonmemory_no_elim_operand"
542 (ior (match_operand 0 "register_no_elim_operand")
543 (match_operand 0 "immediate_operand")))
544
545 ;; Test for a valid operand for a call instruction.
546 (define_predicate "call_insn_operand"
547 (ior (match_operand 0 "constant_call_address_operand")
548 (ior (match_operand 0 "register_no_elim_operand")
549 (match_operand 0 "memory_operand"))))
550
551 ;; Similarly, but for tail calls, in which we cannot allow memory references.
552 (define_predicate "sibcall_insn_operand"
553 (ior (match_operand 0 "constant_call_address_operand")
554 (match_operand 0 "register_no_elim_operand")))
555
556 ;; Match exactly zero.
557 (define_predicate "const0_operand"
558 (match_code "const_int,const_double,const_vector")
559 {
560 if (mode == VOIDmode)
561 mode = GET_MODE (op);
562 return op == CONST0_RTX (mode);
563 })
564
565 ;; Match exactly one.
566 (define_predicate "const1_operand"
567 (and (match_code "const_int")
568 (match_test "op == const1_rtx")))
569
570 ;; Match exactly eight.
571 (define_predicate "const8_operand"
572 (and (match_code "const_int")
573 (match_test "INTVAL (op) == 8")))
574
575 ;; Match 2, 4, or 8. Used for leal multiplicands.
576 (define_predicate "const248_operand"
577 (match_code "const_int")
578 {
579 HOST_WIDE_INT i = INTVAL (op);
580 return i == 2 || i == 4 || i == 8;
581 })
582
583 ;; Match 0 or 1.
584 (define_predicate "const_0_to_1_operand"
585 (and (match_code "const_int")
586 (match_test "op == const0_rtx || op == const1_rtx")))
587
588 ;; Match 0 to 3.
589 (define_predicate "const_0_to_3_operand"
590 (and (match_code "const_int")
591 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
592
593 ;; Match 0 to 7.
594 (define_predicate "const_0_to_7_operand"
595 (and (match_code "const_int")
596 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
597
598 ;; Match 0 to 15.
599 (define_predicate "const_0_to_15_operand"
600 (and (match_code "const_int")
601 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
602
603 ;; Match 0 to 31.
604 (define_predicate "const_0_to_31_operand"
605 (and (match_code "const_int")
606 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
607
608 ;; Match 0 to 63.
609 (define_predicate "const_0_to_63_operand"
610 (and (match_code "const_int")
611 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
612
613 ;; Match 0 to 255.
614 (define_predicate "const_0_to_255_operand"
615 (and (match_code "const_int")
616 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
617
618 ;; Match (0 to 255) * 8
619 (define_predicate "const_0_to_255_mul_8_operand"
620 (match_code "const_int")
621 {
622 unsigned HOST_WIDE_INT val = INTVAL (op);
623 return val <= 255*8 && val % 8 == 0;
624 })
625
626 ;; Return nonzero if OP is CONST_INT >= 1 and <= 31 (a valid operand
627 ;; for shift & compare patterns, as shifting by 0 does not change flags).
628 (define_predicate "const_1_to_31_operand"
629 (and (match_code "const_int")
630 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
631
632 ;; Match 2 or 3.
633 (define_predicate "const_2_to_3_operand"
634 (and (match_code "const_int")
635 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
636
637 ;; Match 4 to 7.
638 (define_predicate "const_4_to_7_operand"
639 (and (match_code "const_int")
640 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
641
642 ;; Match exactly one bit in 2-bit mask.
643 (define_predicate "const_pow2_1_to_2_operand"
644 (and (match_code "const_int")
645 (match_test "INTVAL (op) == 1 || INTVAL (op) == 2")))
646
647 ;; Match exactly one bit in 4-bit mask.
648 (define_predicate "const_pow2_1_to_8_operand"
649 (match_code "const_int")
650 {
651 unsigned int log = exact_log2 (INTVAL (op));
652 return log <= 3;
653 })
654
655 ;; Match exactly one bit in 8-bit mask.
656 (define_predicate "const_pow2_1_to_128_operand"
657 (match_code "const_int")
658 {
659 unsigned int log = exact_log2 (INTVAL (op));
660 return log <= 7;
661 })
662
663 ;; Match exactly one bit in 16-bit mask.
664 (define_predicate "const_pow2_1_to_32768_operand"
665 (match_code "const_int")
666 {
667 unsigned int log = exact_log2 (INTVAL (op));
668 return log <= 15;
669 })
670
671 ;; True if this is a constant appropriate for an increment or decrement.
672 (define_predicate "incdec_operand"
673 (match_code "const_int")
674 {
675 /* On Pentium4, the inc and dec operations causes extra dependency on flag
676 registers, since carry flag is not set. */
677 if (!TARGET_USE_INCDEC && !optimize_size)
678 return 0;
679 return op == const1_rtx || op == constm1_rtx;
680 })
681
682 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
683 (define_predicate "reg_or_pm1_operand"
684 (ior (match_operand 0 "register_operand")
685 (and (match_code "const_int")
686 (match_test "op == const1_rtx || op == constm1_rtx"))))
687
688 ;; True if OP is acceptable as operand of DImode shift expander.
689 (define_predicate "shiftdi_operand"
690 (if_then_else (match_test "TARGET_64BIT")
691 (match_operand 0 "nonimmediate_operand")
692 (match_operand 0 "register_operand")))
693
694 (define_predicate "ashldi_input_operand"
695 (if_then_else (match_test "TARGET_64BIT")
696 (match_operand 0 "nonimmediate_operand")
697 (match_operand 0 "reg_or_pm1_operand")))
698
699 ;; Return true if OP is a vector load from the constant pool with just
700 ;; the first element nonzero.
701 (define_predicate "zero_extended_scalar_load_operand"
702 (match_code "mem")
703 {
704 unsigned n_elts;
705 op = maybe_get_pool_constant (op);
706 if (!op)
707 return 0;
708 if (GET_CODE (op) != CONST_VECTOR)
709 return 0;
710 n_elts =
711 (GET_MODE_SIZE (GET_MODE (op)) /
712 GET_MODE_SIZE (GET_MODE_INNER (GET_MODE (op))));
713 for (n_elts--; n_elts > 0; n_elts--)
714 {
715 rtx elt = CONST_VECTOR_ELT (op, n_elts);
716 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
717 return 0;
718 }
719 return 1;
720 })
721
722 /* Return true if operand is a vector constant that is all ones. */
723 (define_predicate "vector_all_ones_operand"
724 (match_code "const_vector")
725 {
726 int nunits = GET_MODE_NUNITS (mode);
727
728 if (GET_CODE (op) == CONST_VECTOR
729 && CONST_VECTOR_NUNITS (op) == nunits)
730 {
731 int i;
732 for (i = 0; i < nunits; ++i)
733 {
734 rtx x = CONST_VECTOR_ELT (op, i);
735 if (x != constm1_rtx)
736 return 0;
737 }
738 return 1;
739 }
740
741 return 0;
742 })
743
744 ; Return 1 when OP is operand acceptable for standard SSE move.
745 (define_predicate "vector_move_operand"
746 (ior (match_operand 0 "nonimmediate_operand")
747 (match_operand 0 "const0_operand")))
748
749 ;; Return 1 when OP is nonimmediate or standard SSE constant.
750 (define_predicate "nonimmediate_or_sse_const_operand"
751 (match_operand 0 "general_operand")
752 {
753 if (nonimmediate_operand (op, mode))
754 return 1;
755 if (standard_sse_constant_p (op) > 0)
756 return 1;
757 return 0;
758 })
759
760 ;; Return true if OP is a register or a zero.
761 (define_predicate "reg_or_0_operand"
762 (ior (match_operand 0 "register_operand")
763 (match_operand 0 "const0_operand")))
764
765 ;; Return true if op if a valid address, and does not contain
766 ;; a segment override.
767 (define_special_predicate "no_seg_address_operand"
768 (match_operand 0 "address_operand")
769 {
770 struct ix86_address parts;
771 int ok;
772
773 ok = ix86_decompose_address (op, &parts);
774 gcc_assert (ok);
775 return parts.seg == SEG_DEFAULT;
776 })
777
778 ;; Return nonzero if the rtx is known to be at least 32 bits aligned.
779 (define_predicate "aligned_operand"
780 (match_operand 0 "general_operand")
781 {
782 struct ix86_address parts;
783 int ok;
784
785 /* Registers and immediate operands are always "aligned". */
786 if (GET_CODE (op) != MEM)
787 return 1;
788
789 /* All patterns using aligned_operand on memory operands ends up
790 in promoting memory operand to 64bit and thus causing memory mismatch. */
791 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_size)
792 return 0;
793
794 /* Don't even try to do any aligned optimizations with volatiles. */
795 if (MEM_VOLATILE_P (op))
796 return 0;
797
798 if (MEM_ALIGN (op) >= 32)
799 return 1;
800
801 op = XEXP (op, 0);
802
803 /* Pushes and pops are only valid on the stack pointer. */
804 if (GET_CODE (op) == PRE_DEC
805 || GET_CODE (op) == POST_INC)
806 return 1;
807
808 /* Decode the address. */
809 ok = ix86_decompose_address (op, &parts);
810 gcc_assert (ok);
811
812 /* Look for some component that isn't known to be aligned. */
813 if (parts.index)
814 {
815 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
816 return 0;
817 }
818 if (parts.base)
819 {
820 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
821 return 0;
822 }
823 if (parts.disp)
824 {
825 if (!CONST_INT_P (parts.disp)
826 || (INTVAL (parts.disp) & 3) != 0)
827 return 0;
828 }
829
830 /* Didn't find one -- this must be an aligned address. */
831 return 1;
832 })
833
834 ;; Returns 1 if OP is memory operand with a displacement.
835 (define_predicate "memory_displacement_operand"
836 (match_operand 0 "memory_operand")
837 {
838 struct ix86_address parts;
839 int ok;
840
841 ok = ix86_decompose_address (XEXP (op, 0), &parts);
842 gcc_assert (ok);
843 return parts.disp != NULL_RTX;
844 })
845
846 ;; Returns 1 if OP is memory operand with a displacement only.
847 (define_predicate "memory_displacement_only_operand"
848 (match_operand 0 "memory_operand")
849 {
850 struct ix86_address parts;
851 int ok;
852
853 ok = ix86_decompose_address (XEXP (op, 0), &parts);
854 gcc_assert (ok);
855
856 if (parts.base || parts.index)
857 return 0;
858
859 return parts.disp != NULL_RTX;
860 })
861
862 ;; Returns 1 if OP is memory operand that cannot be represented
863 ;; by the modRM array.
864 (define_predicate "long_memory_operand"
865 (and (match_operand 0 "memory_operand")
866 (match_test "memory_address_length (op) != 0")))
867
868 ;; Return 1 if OP is a comparison operator that can be issued by fcmov.
869 (define_predicate "fcmov_comparison_operator"
870 (match_operand 0 "comparison_operator")
871 {
872 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
873 enum rtx_code code = GET_CODE (op);
874
875 if (inmode == CCFPmode || inmode == CCFPUmode)
876 {
877 enum rtx_code second_code, bypass_code;
878 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
879 if (bypass_code != UNKNOWN || second_code != UNKNOWN)
880 return 0;
881 code = ix86_fp_compare_code_to_integer (code);
882 }
883 /* i387 supports just limited amount of conditional codes. */
884 switch (code)
885 {
886 case LTU: case GTU: case LEU: case GEU:
887 if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode
888 || inmode == CCCmode)
889 return 1;
890 return 0;
891 case ORDERED: case UNORDERED:
892 case EQ: case NE:
893 return 1;
894 default:
895 return 0;
896 }
897 })
898
899 ;; Return 1 if OP is a comparison that can be used in the CMPSS/CMPPS insns.
900 ;; The first set are supported directly; the second set can't be done with
901 ;; full IEEE support, i.e. NaNs.
902 ;;
903 ;; ??? It would seem that we have a lot of uses of this predicate that pass
904 ;; it the wrong mode. We got away with this because the old function didn't
905 ;; check the mode at all. Mirror that for now by calling this a special
906 ;; predicate.
907
908 (define_special_predicate "sse_comparison_operator"
909 (match_code "eq,lt,le,unordered,ne,unge,ungt,ordered"))
910
911 ;; Return 1 if OP is a comparison operator that can be issued by sse predicate
912 ;; generation instructions
913 (define_predicate "sse5_comparison_float_operator"
914 (and (match_test "TARGET_SSE5")
915 (match_code "ne,eq,ge,gt,le,lt,unordered,ordered,uneq,unge,ungt,unle,unlt,ltgt")))
916
917 (define_predicate "ix86_comparison_int_operator"
918 (match_code "ne,eq,ge,gt,le,lt"))
919
920 (define_predicate "ix86_comparison_uns_operator"
921 (match_code "ne,eq,geu,gtu,leu,ltu"))
922
923 ;; Return 1 if OP is a valid comparison operator in valid mode.
924 (define_predicate "ix86_comparison_operator"
925 (match_operand 0 "comparison_operator")
926 {
927 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
928 enum rtx_code code = GET_CODE (op);
929
930 if (inmode == CCFPmode || inmode == CCFPUmode)
931 {
932 enum rtx_code second_code, bypass_code;
933 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
934 return (bypass_code == UNKNOWN && second_code == UNKNOWN);
935 }
936 switch (code)
937 {
938 case EQ: case NE:
939 return 1;
940 case LT: case GE:
941 if (inmode == CCmode || inmode == CCGCmode
942 || inmode == CCGOCmode || inmode == CCNOmode)
943 return 1;
944 return 0;
945 case LTU: case GTU: case LEU: case GEU:
946 if (inmode == CCmode || inmode == CCCmode)
947 return 1;
948 return 0;
949 case ORDERED: case UNORDERED:
950 if (inmode == CCmode)
951 return 1;
952 return 0;
953 case GT: case LE:
954 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
955 return 1;
956 return 0;
957 default:
958 return 0;
959 }
960 })
961
962 ;; Return 1 if OP is a valid comparison operator testing carry flag to be set.
963 (define_predicate "ix86_carry_flag_operator"
964 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
965 {
966 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
967 enum rtx_code code = GET_CODE (op);
968
969 if (!REG_P (XEXP (op, 0))
970 || REGNO (XEXP (op, 0)) != FLAGS_REG
971 || XEXP (op, 1) != const0_rtx)
972 return 0;
973
974 if (inmode == CCFPmode || inmode == CCFPUmode)
975 {
976 enum rtx_code second_code, bypass_code;
977 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
978 if (bypass_code != UNKNOWN || second_code != UNKNOWN)
979 return 0;
980 code = ix86_fp_compare_code_to_integer (code);
981 }
982 else if (inmode == CCCmode)
983 return code == LTU || code == GTU;
984 else if (inmode != CCmode)
985 return 0;
986
987 return code == LTU;
988 })
989
990 ;; Nearly general operand, but accept any const_double, since we wish
991 ;; to be able to drop them into memory rather than have them get pulled
992 ;; into registers.
993 (define_predicate "cmp_fp_expander_operand"
994 (ior (match_code "const_double")
995 (match_operand 0 "general_operand")))
996
997 ;; Return true if this is a valid binary floating-point operation.
998 (define_predicate "binary_fp_operator"
999 (match_code "plus,minus,mult,div"))
1000
1001 ;; Return true if this is a multiply operation.
1002 (define_predicate "mult_operator"
1003 (match_code "mult"))
1004
1005 ;; Return true if this is a division operation.
1006 (define_predicate "div_operator"
1007 (match_code "div"))
1008
1009 ;; Return true if this is a float extend operation.
1010 (define_predicate "float_operator"
1011 (match_code "float"))
1012
1013 ;; Return true for ARITHMETIC_P.
1014 (define_predicate "arith_or_logical_operator"
1015 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1016 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1017
1018 ;; Return 1 if OP is a binary operator that can be promoted to wider mode.
1019 (define_predicate "promotable_binary_operator"
1020 (ior (match_code "plus,and,ior,xor,ashift")
1021 (and (match_code "mult")
1022 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1023
1024 ;; To avoid problems when jump re-emits comparisons like testqi_ext_ccno_0,
1025 ;; re-recognize the operand to avoid a copy_to_mode_reg that will fail.
1026 ;;
1027 ;; ??? It seems likely that this will only work because cmpsi is an
1028 ;; expander, and no actual insns use this.
1029
1030 (define_predicate "cmpsi_operand"
1031 (ior (match_operand 0 "nonimmediate_operand")
1032 (and (match_code "and")
1033 (match_code "zero_extract" "0")
1034 (match_code "const_int" "1")
1035 (match_code "const_int" "01")
1036 (match_code "const_int" "02")
1037 (match_test "INTVAL (XEXP (XEXP (op, 0), 1)) == 8")
1038 (match_test "INTVAL (XEXP (XEXP (op, 0), 2)) == 8")
1039 )))
1040
1041 (define_predicate "compare_operator"
1042 (match_code "compare"))
1043
1044 (define_predicate "absneg_operator"
1045 (match_code "abs,neg"))
1046
1047 ;; Return 1 if OP is misaligned memory operand
1048 (define_predicate "misaligned_operand"
1049 (and (match_code "mem")
1050 (match_test "MEM_ALIGN (op) < GET_MODE_ALIGNMENT (mode)")))