s390: Constraints, predicates, and op letters for contiguous bitmasks
[gcc.git] / gcc / config / s390 / predicates.md
1 ;; Predicate definitions for S/390 and zSeries.
2 ;; Copyright (C) 2005, 2007, 2008 Free Software Foundation, Inc.
3 ;; Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 ;; Ulrich Weigand (uweigand@de.ibm.com).
5 ;;
6 ;; This file is part of GCC.
7 ;;
8 ;; GCC is free software; you can redistribute it and/or modify
9 ;; it under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 3, or (at your option)
11 ;; any later version.
12 ;;
13 ;; GCC is distributed in the hope that it will be useful,
14 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ;; GNU General Public License for more details.
17 ;;
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GCC; see the file COPYING3. If not see
20 ;; <http://www.gnu.org/licenses/>.
21
22 ;; OP is the current operation.
23 ;; MODE is the current operation mode.
24
25 ;; operands --------------------------------------------------------------
26
27 ;; Return true if OP a (const_int 0) operand.
28
29 (define_predicate "const0_operand"
30 (and (match_code "const_int, const_double")
31 (match_test "op == CONST0_RTX (mode)")))
32
33 ;; Return true if OP is constant.
34
35 (define_special_predicate "consttable_operand"
36 (and (match_code "symbol_ref, label_ref, const, const_int, const_double")
37 (match_test "CONSTANT_P (op)")))
38
39 ;; Return true if OP is a valid S-type operand.
40
41 (define_predicate "s_operand"
42 (and (match_code "subreg, mem")
43 (match_operand 0 "general_operand"))
44 {
45 /* Just like memory_operand, allow (subreg (mem ...))
46 after reload. */
47 if (reload_completed
48 && GET_CODE (op) == SUBREG
49 && GET_CODE (SUBREG_REG (op)) == MEM)
50 op = SUBREG_REG (op);
51
52 if (GET_CODE (op) != MEM)
53 return false;
54 if (!s390_legitimate_address_without_index_p (op))
55 return false;
56
57 return true;
58 })
59
60 ;; Return true if OP is a valid operand for the BRAS instruction.
61 ;; Allow SYMBOL_REFs and @PLT stubs.
62
63 (define_special_predicate "bras_sym_operand"
64 (ior (and (match_code "symbol_ref")
65 (match_test "!flag_pic || SYMBOL_REF_LOCAL_P (op)"))
66 (and (match_code "const")
67 (and (match_test "GET_CODE (XEXP (op, 0)) == UNSPEC")
68 (match_test "XINT (XEXP (op, 0), 1) == UNSPEC_PLT")))))
69
70 ;; Return true if OP is a PLUS that is not a legitimate
71 ;; operand for the LA instruction.
72
73 (define_predicate "s390_plus_operand"
74 (and (match_code "plus")
75 (and (match_test "mode == Pmode")
76 (match_test "!legitimate_la_operand_p (op)"))))
77
78 ;; Return true if OP is a valid operand as shift count or setmem.
79
80 (define_predicate "shift_count_or_setmem_operand"
81 (match_code "reg, subreg, plus, const_int")
82 {
83 HOST_WIDE_INT offset;
84 rtx base;
85
86 /* Extract base register and offset. */
87 if (!s390_decompose_shift_count (op, &base, &offset))
88 return false;
89
90 /* Don't allow any non-base hard registers. Doing so without
91 confusing reload and/or regrename would be tricky, and doesn't
92 buy us much anyway. */
93 if (base && REGNO (base) < FIRST_PSEUDO_REGISTER && !ADDR_REG_P (base))
94 return false;
95
96 /* Unfortunately we have to reject constants that are invalid
97 for an address, or else reload will get confused. */
98 if (!DISP_IN_RANGE (offset))
99 return false;
100
101 return true;
102 })
103
104 ;; Return true if OP a valid operand for the LARL instruction.
105
106 (define_predicate "larl_operand"
107 (match_code "label_ref, symbol_ref, const, const_int, const_double")
108 {
109 /* Allow labels and local symbols. */
110 if (GET_CODE (op) == LABEL_REF)
111 return true;
112 if (GET_CODE (op) == SYMBOL_REF)
113 return (!SYMBOL_REF_ALIGN1_P (op)
114 && SYMBOL_REF_TLS_MODEL (op) == 0
115 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
116
117 /* Everything else must have a CONST, so strip it. */
118 if (GET_CODE (op) != CONST)
119 return false;
120 op = XEXP (op, 0);
121
122 /* Allow adding *even* in-range constants. */
123 if (GET_CODE (op) == PLUS)
124 {
125 if (GET_CODE (XEXP (op, 1)) != CONST_INT
126 || (INTVAL (XEXP (op, 1)) & 1) != 0)
127 return false;
128 if (INTVAL (XEXP (op, 1)) >= (HOST_WIDE_INT)1 << 31
129 || INTVAL (XEXP (op, 1)) < -((HOST_WIDE_INT)1 << 31))
130 return false;
131 op = XEXP (op, 0);
132 }
133
134 /* Labels and local symbols allowed here as well. */
135 if (GET_CODE (op) == LABEL_REF)
136 return true;
137 if (GET_CODE (op) == SYMBOL_REF)
138 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
139 && SYMBOL_REF_TLS_MODEL (op) == 0
140 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
141
142 /* Now we must have a @GOTENT offset or @PLT stub
143 or an @INDNTPOFF TLS offset. */
144 if (GET_CODE (op) == UNSPEC
145 && XINT (op, 1) == UNSPEC_GOTENT)
146 return true;
147 if (GET_CODE (op) == UNSPEC
148 && XINT (op, 1) == UNSPEC_PLT)
149 return true;
150 if (GET_CODE (op) == UNSPEC
151 && XINT (op, 1) == UNSPEC_INDNTPOFF)
152 return true;
153
154 return false;
155 })
156
157 (define_predicate "contiguous_bitmask_operand"
158 (match_code "const_int")
159 {
160 return s390_contiguous_bitmask_p (INTVAL (op), GET_MODE_BITSIZE (mode), NULL, NULL);
161 })
162
163 ;; operators --------------------------------------------------------------
164
165 ;; Return nonzero if OP is a valid comparison operator
166 ;; for a branch condition.
167
168 (define_predicate "s390_comparison"
169 (match_code "eq, ne, lt, gt, le, ge, ltu, gtu, leu, geu,
170 uneq, unlt, ungt, unle, unge, ltgt,
171 unordered, ordered")
172 {
173 if (GET_CODE (XEXP (op, 0)) != REG
174 || REGNO (XEXP (op, 0)) != CC_REGNUM
175 || XEXP (op, 1) != const0_rtx)
176 return false;
177
178 return (s390_branch_condition_mask (op) >= 0);
179 })
180
181 ;; Return true if op is the cc register.
182 (define_predicate "cc_reg_operand"
183 (and (match_code "reg")
184 (match_test "REGNO (op) == CC_REGNUM")))
185
186 (define_predicate "s390_signed_integer_comparison"
187 (match_code "eq, ne, lt, gt, le, ge")
188 {
189 return (s390_compare_and_branch_condition_mask (op) >= 0);
190 })
191
192 (define_predicate "s390_unsigned_integer_comparison"
193 (match_code "eq, ne, ltu, gtu, leu, geu")
194 {
195 return (s390_compare_and_branch_condition_mask (op) >= 0);
196 })
197
198 ;; Return nonzero if OP is a valid comparison operator for the
199 ;; cstore expanders -- respectively cstorecc4 and integer cstore.
200 (define_predicate "s390_eqne_operator"
201 (match_code "eq, ne"))
202
203 (define_predicate "s390_scond_operator"
204 (match_code "ltu, gtu, leu, geu"))
205
206 (define_predicate "s390_brx_operator"
207 (match_code "le, gt"))
208
209 ;; Return nonzero if OP is a valid comparison operator
210 ;; for an ALC condition.
211
212 (define_predicate "s390_alc_comparison"
213 (match_code "zero_extend, sign_extend, ltu, gtu, leu, geu")
214 {
215 while (GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND)
216 op = XEXP (op, 0);
217
218 if (!COMPARISON_P (op))
219 return false;
220
221 if (GET_CODE (XEXP (op, 0)) != REG
222 || REGNO (XEXP (op, 0)) != CC_REGNUM
223 || XEXP (op, 1) != const0_rtx)
224 return false;
225
226 switch (GET_MODE (XEXP (op, 0)))
227 {
228 case CCL1mode:
229 return GET_CODE (op) == LTU;
230
231 case CCL2mode:
232 return GET_CODE (op) == LEU;
233
234 case CCL3mode:
235 return GET_CODE (op) == GEU;
236
237 case CCUmode:
238 return GET_CODE (op) == GTU;
239
240 case CCURmode:
241 return GET_CODE (op) == LTU;
242
243 case CCSmode:
244 return GET_CODE (op) == UNGT;
245
246 case CCSRmode:
247 return GET_CODE (op) == UNLT;
248
249 default:
250 return false;
251 }
252 })
253
254 ;; Return nonzero if OP is a valid comparison operator
255 ;; for an SLB condition.
256
257 (define_predicate "s390_slb_comparison"
258 (match_code "zero_extend, sign_extend, ltu, gtu, leu, geu")
259 {
260 while (GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND)
261 op = XEXP (op, 0);
262
263 if (!COMPARISON_P (op))
264 return false;
265
266 if (GET_CODE (XEXP (op, 0)) != REG
267 || REGNO (XEXP (op, 0)) != CC_REGNUM
268 || XEXP (op, 1) != const0_rtx)
269 return false;
270
271 switch (GET_MODE (XEXP (op, 0)))
272 {
273 case CCL1mode:
274 return GET_CODE (op) == GEU;
275
276 case CCL2mode:
277 return GET_CODE (op) == GTU;
278
279 case CCL3mode:
280 return GET_CODE (op) == LTU;
281
282 case CCUmode:
283 return GET_CODE (op) == LEU;
284
285 case CCURmode:
286 return GET_CODE (op) == GEU;
287
288 case CCSmode:
289 return GET_CODE (op) == LE;
290
291 case CCSRmode:
292 return GET_CODE (op) == GE;
293
294 default:
295 return false;
296 }
297 })
298
299 ;; Return true if OP is a load multiple operation. It is known to be a
300 ;; PARALLEL and the first section will be tested.
301
302 (define_special_predicate "load_multiple_operation"
303 (match_code "parallel")
304 {
305 enum machine_mode elt_mode;
306 int count = XVECLEN (op, 0);
307 unsigned int dest_regno;
308 rtx src_addr;
309 int i, off;
310
311 /* Perform a quick check so we don't blow up below. */
312 if (count <= 1
313 || GET_CODE (XVECEXP (op, 0, 0)) != SET
314 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
315 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
316 return false;
317
318 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
319 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
320 elt_mode = GET_MODE (SET_DEST (XVECEXP (op, 0, 0)));
321
322 /* Check, is base, or base + displacement. */
323
324 if (GET_CODE (src_addr) == REG)
325 off = 0;
326 else if (GET_CODE (src_addr) == PLUS
327 && GET_CODE (XEXP (src_addr, 0)) == REG
328 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
329 {
330 off = INTVAL (XEXP (src_addr, 1));
331 src_addr = XEXP (src_addr, 0);
332 }
333 else
334 return false;
335
336 for (i = 1; i < count; i++)
337 {
338 rtx elt = XVECEXP (op, 0, i);
339
340 if (GET_CODE (elt) != SET
341 || GET_CODE (SET_DEST (elt)) != REG
342 || GET_MODE (SET_DEST (elt)) != elt_mode
343 || REGNO (SET_DEST (elt)) != dest_regno + i
344 || GET_CODE (SET_SRC (elt)) != MEM
345 || GET_MODE (SET_SRC (elt)) != elt_mode
346 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
347 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
348 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
349 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
350 != off + i * GET_MODE_SIZE (elt_mode))
351 return false;
352 }
353
354 return true;
355 })
356
357 ;; For an execute pattern the target instruction is embedded into the
358 ;; RTX but will not get checked for validity by recog automatically.
359 ;; The execute_operation predicate extracts the target RTX and invokes
360 ;; recog.
361 (define_special_predicate "execute_operation"
362 (match_code "parallel")
363 {
364 rtx pattern = op;
365 rtx insn;
366 int icode;
367
368 /* This is redundant but since this predicate is evaluated
369 first when recognizing the insn we can prevent the more
370 expensive code below from being executed for many cases. */
371 if (GET_CODE (XVECEXP (pattern, 0, 0)) != UNSPEC
372 || XINT (XVECEXP (pattern, 0, 0), 1) != UNSPEC_EXECUTE)
373 return false;
374
375 /* Keep in sync with s390_execute_target. */
376 if (XVECLEN (pattern, 0) == 2)
377 {
378 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
379 }
380 else
381 {
382 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
383 int i;
384
385 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
386 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
387
388 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
389 }
390
391 /* Since we do not have the wrapping insn here we have to build one. */
392 insn = make_insn_raw (pattern);
393 icode = recog_memoized (insn);
394 if (icode < 0)
395 return false;
396
397 extract_insn (insn);
398 constrain_operands (1);
399
400 return which_alternative >= 0;
401 })
402
403 ;; Return true if OP is a store multiple operation. It is known to be a
404 ;; PARALLEL and the first section will be tested.
405
406 (define_special_predicate "store_multiple_operation"
407 (match_code "parallel")
408 {
409 enum machine_mode elt_mode;
410 int count = XVECLEN (op, 0);
411 unsigned int src_regno;
412 rtx dest_addr;
413 int i, off;
414
415 /* Perform a quick check so we don't blow up below. */
416 if (count <= 1
417 || GET_CODE (XVECEXP (op, 0, 0)) != SET
418 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
419 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
420 return false;
421
422 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
423 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
424 elt_mode = GET_MODE (SET_SRC (XVECEXP (op, 0, 0)));
425
426 /* Check, is base, or base + displacement. */
427
428 if (GET_CODE (dest_addr) == REG)
429 off = 0;
430 else if (GET_CODE (dest_addr) == PLUS
431 && GET_CODE (XEXP (dest_addr, 0)) == REG
432 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
433 {
434 off = INTVAL (XEXP (dest_addr, 1));
435 dest_addr = XEXP (dest_addr, 0);
436 }
437 else
438 return false;
439
440 for (i = 1; i < count; i++)
441 {
442 rtx elt = XVECEXP (op, 0, i);
443
444 if (GET_CODE (elt) != SET
445 || GET_CODE (SET_SRC (elt)) != REG
446 || GET_MODE (SET_SRC (elt)) != elt_mode
447 || REGNO (SET_SRC (elt)) != src_regno + i
448 || GET_CODE (SET_DEST (elt)) != MEM
449 || GET_MODE (SET_DEST (elt)) != elt_mode
450 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
451 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
452 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
453 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
454 != off + i * GET_MODE_SIZE (elt_mode))
455 return false;
456 }
457 return true;
458 })