rs6000.opt: Rename -mdfp option to -mhard-dfp.
[gcc.git] / gcc / config / s390 / s390.c
1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "except.h"
39 #include "function.h"
40 #include "recog.h"
41 #include "expr.h"
42 #include "reload.h"
43 #include "toplev.h"
44 #include "basic-block.h"
45 #include "integrate.h"
46 #include "ggc.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "debug.h"
50 #include "langhooks.h"
51 #include "optabs.h"
52 #include "tree-gimple.h"
53 #include "df.h"
54
55
56 /* Define the specific costs for a given cpu. */
57
58 struct processor_costs
59 {
60 /* multiplication */
61 const int m; /* cost of an M instruction. */
62 const int mghi; /* cost of an MGHI instruction. */
63 const int mh; /* cost of an MH instruction. */
64 const int mhi; /* cost of an MHI instruction. */
65 const int ml; /* cost of an ML instruction. */
66 const int mr; /* cost of an MR instruction. */
67 const int ms; /* cost of an MS instruction. */
68 const int msg; /* cost of an MSG instruction. */
69 const int msgf; /* cost of an MSGF instruction. */
70 const int msgfr; /* cost of an MSGFR instruction. */
71 const int msgr; /* cost of an MSGR instruction. */
72 const int msr; /* cost of an MSR instruction. */
73 const int mult_df; /* cost of multiplication in DFmode. */
74 const int mxbr;
75 /* square root */
76 const int sqxbr; /* cost of square root in TFmode. */
77 const int sqdbr; /* cost of square root in DFmode. */
78 const int sqebr; /* cost of square root in SFmode. */
79 /* multiply and add */
80 const int madbr; /* cost of multiply and add in DFmode. */
81 const int maebr; /* cost of multiply and add in SFmode. */
82 /* division */
83 const int dxbr;
84 const int ddbr;
85 const int debr;
86 const int dlgr;
87 const int dlr;
88 const int dr;
89 const int dsgfr;
90 const int dsgr;
91 };
92
93 const struct processor_costs *s390_cost;
94
95 static const
96 struct processor_costs z900_cost =
97 {
98 COSTS_N_INSNS (5), /* M */
99 COSTS_N_INSNS (10), /* MGHI */
100 COSTS_N_INSNS (5), /* MH */
101 COSTS_N_INSNS (4), /* MHI */
102 COSTS_N_INSNS (5), /* ML */
103 COSTS_N_INSNS (5), /* MR */
104 COSTS_N_INSNS (4), /* MS */
105 COSTS_N_INSNS (15), /* MSG */
106 COSTS_N_INSNS (7), /* MSGF */
107 COSTS_N_INSNS (7), /* MSGFR */
108 COSTS_N_INSNS (10), /* MSGR */
109 COSTS_N_INSNS (4), /* MSR */
110 COSTS_N_INSNS (7), /* multiplication in DFmode */
111 COSTS_N_INSNS (13), /* MXBR */
112 COSTS_N_INSNS (136), /* SQXBR */
113 COSTS_N_INSNS (44), /* SQDBR */
114 COSTS_N_INSNS (35), /* SQEBR */
115 COSTS_N_INSNS (18), /* MADBR */
116 COSTS_N_INSNS (13), /* MAEBR */
117 COSTS_N_INSNS (134), /* DXBR */
118 COSTS_N_INSNS (30), /* DDBR */
119 COSTS_N_INSNS (27), /* DEBR */
120 COSTS_N_INSNS (220), /* DLGR */
121 COSTS_N_INSNS (34), /* DLR */
122 COSTS_N_INSNS (34), /* DR */
123 COSTS_N_INSNS (32), /* DSGFR */
124 COSTS_N_INSNS (32), /* DSGR */
125 };
126
127 static const
128 struct processor_costs z990_cost =
129 {
130 COSTS_N_INSNS (4), /* M */
131 COSTS_N_INSNS (2), /* MGHI */
132 COSTS_N_INSNS (2), /* MH */
133 COSTS_N_INSNS (2), /* MHI */
134 COSTS_N_INSNS (4), /* ML */
135 COSTS_N_INSNS (4), /* MR */
136 COSTS_N_INSNS (5), /* MS */
137 COSTS_N_INSNS (6), /* MSG */
138 COSTS_N_INSNS (4), /* MSGF */
139 COSTS_N_INSNS (4), /* MSGFR */
140 COSTS_N_INSNS (4), /* MSGR */
141 COSTS_N_INSNS (4), /* MSR */
142 COSTS_N_INSNS (1), /* multiplication in DFmode */
143 COSTS_N_INSNS (28), /* MXBR */
144 COSTS_N_INSNS (130), /* SQXBR */
145 COSTS_N_INSNS (66), /* SQDBR */
146 COSTS_N_INSNS (38), /* SQEBR */
147 COSTS_N_INSNS (1), /* MADBR */
148 COSTS_N_INSNS (1), /* MAEBR */
149 COSTS_N_INSNS (60), /* DXBR */
150 COSTS_N_INSNS (40), /* DDBR */
151 COSTS_N_INSNS (26), /* DEBR */
152 COSTS_N_INSNS (176), /* DLGR */
153 COSTS_N_INSNS (31), /* DLR */
154 COSTS_N_INSNS (31), /* DR */
155 COSTS_N_INSNS (31), /* DSGFR */
156 COSTS_N_INSNS (31), /* DSGR */
157 };
158
159 static const
160 struct processor_costs z9_109_cost =
161 {
162 COSTS_N_INSNS (4), /* M */
163 COSTS_N_INSNS (2), /* MGHI */
164 COSTS_N_INSNS (2), /* MH */
165 COSTS_N_INSNS (2), /* MHI */
166 COSTS_N_INSNS (4), /* ML */
167 COSTS_N_INSNS (4), /* MR */
168 COSTS_N_INSNS (5), /* MS */
169 COSTS_N_INSNS (6), /* MSG */
170 COSTS_N_INSNS (4), /* MSGF */
171 COSTS_N_INSNS (4), /* MSGFR */
172 COSTS_N_INSNS (4), /* MSGR */
173 COSTS_N_INSNS (4), /* MSR */
174 COSTS_N_INSNS (1), /* multiplication in DFmode */
175 COSTS_N_INSNS (28), /* MXBR */
176 COSTS_N_INSNS (130), /* SQXBR */
177 COSTS_N_INSNS (66), /* SQDBR */
178 COSTS_N_INSNS (38), /* SQEBR */
179 COSTS_N_INSNS (1), /* MADBR */
180 COSTS_N_INSNS (1), /* MAEBR */
181 COSTS_N_INSNS (60), /* DXBR */
182 COSTS_N_INSNS (40), /* DDBR */
183 COSTS_N_INSNS (26), /* DEBR */
184 COSTS_N_INSNS (30), /* DLGR */
185 COSTS_N_INSNS (23), /* DLR */
186 COSTS_N_INSNS (23), /* DR */
187 COSTS_N_INSNS (24), /* DSGFR */
188 COSTS_N_INSNS (24), /* DSGR */
189 };
190
191 extern int reload_completed;
192
193 /* Save information from a "cmpxx" operation until the branch or scc is
194 emitted. */
195 rtx s390_compare_op0, s390_compare_op1;
196
197 /* Save the result of a compare_and_swap until the branch or scc is
198 emitted. */
199 rtx s390_compare_emitted = NULL_RTX;
200
201 /* Structure used to hold the components of a S/390 memory
202 address. A legitimate address on S/390 is of the general
203 form
204 base + index + displacement
205 where any of the components is optional.
206
207 base and index are registers of the class ADDR_REGS,
208 displacement is an unsigned 12-bit immediate constant. */
209
210 struct s390_address
211 {
212 rtx base;
213 rtx indx;
214 rtx disp;
215 bool pointer;
216 bool literal_pool;
217 };
218
219 /* Which cpu are we tuning for. */
220 enum processor_type s390_tune = PROCESSOR_max;
221 enum processor_flags s390_tune_flags;
222 /* Which instruction set architecture to use. */
223 enum processor_type s390_arch;
224 enum processor_flags s390_arch_flags;
225
226 HOST_WIDE_INT s390_warn_framesize = 0;
227 HOST_WIDE_INT s390_stack_size = 0;
228 HOST_WIDE_INT s390_stack_guard = 0;
229
230 /* The following structure is embedded in the machine
231 specific part of struct function. */
232
233 struct s390_frame_layout GTY (())
234 {
235 /* Offset within stack frame. */
236 HOST_WIDE_INT gprs_offset;
237 HOST_WIDE_INT f0_offset;
238 HOST_WIDE_INT f4_offset;
239 HOST_WIDE_INT f8_offset;
240 HOST_WIDE_INT backchain_offset;
241
242 /* Number of first and last gpr where slots in the register
243 save area are reserved for. */
244 int first_save_gpr_slot;
245 int last_save_gpr_slot;
246
247 /* Number of first and last gpr to be saved, restored. */
248 int first_save_gpr;
249 int first_restore_gpr;
250 int last_save_gpr;
251 int last_restore_gpr;
252
253 /* Bits standing for floating point registers. Set, if the
254 respective register has to be saved. Starting with reg 16 (f0)
255 at the rightmost bit.
256 Bit 15 - 8 7 6 5 4 3 2 1 0
257 fpr 15 - 8 7 5 3 1 6 4 2 0
258 reg 31 - 24 23 22 21 20 19 18 17 16 */
259 unsigned int fpr_bitmap;
260
261 /* Number of floating point registers f8-f15 which must be saved. */
262 int high_fprs;
263
264 /* Set if return address needs to be saved.
265 This flag is set by s390_return_addr_rtx if it could not use
266 the initial value of r14 and therefore depends on r14 saved
267 to the stack. */
268 bool save_return_addr_p;
269
270 /* Size of stack frame. */
271 HOST_WIDE_INT frame_size;
272 };
273
274 /* Define the structure for the machine field in struct function. */
275
276 struct machine_function GTY(())
277 {
278 struct s390_frame_layout frame_layout;
279
280 /* Literal pool base register. */
281 rtx base_reg;
282
283 /* True if we may need to perform branch splitting. */
284 bool split_branches_pending_p;
285
286 /* True during final stage of literal pool processing. */
287 bool decomposed_literal_pool_addresses_ok_p;
288
289 /* Some local-dynamic TLS symbol name. */
290 const char *some_ld_name;
291
292 bool has_landing_pad_p;
293 };
294
295 /* Few accessor macros for struct cfun->machine->s390_frame_layout. */
296
297 #define cfun_frame_layout (cfun->machine->frame_layout)
298 #define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
299 #define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr_slot - \
300 cfun_frame_layout.first_save_gpr_slot + 1) * UNITS_PER_WORD)
301 #define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
302 (1 << (BITNUM)))
303 #define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
304 (1 << (BITNUM))))
305
306 /* Number of GPRs and FPRs used for argument passing. */
307 #define GP_ARG_NUM_REG 5
308 #define FP_ARG_NUM_REG (TARGET_64BIT? 4 : 2)
309
310 /* A couple of shortcuts. */
311 #define CONST_OK_FOR_J(x) \
312 CONST_OK_FOR_CONSTRAINT_P((x), 'J', "J")
313 #define CONST_OK_FOR_K(x) \
314 CONST_OK_FOR_CONSTRAINT_P((x), 'K', "K")
315 #define CONST_OK_FOR_Os(x) \
316 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Os")
317 #define CONST_OK_FOR_Op(x) \
318 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Op")
319 #define CONST_OK_FOR_On(x) \
320 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "On")
321
322 #define REGNO_PAIR_OK(REGNO, MODE) \
323 (HARD_REGNO_NREGS ((REGNO), (MODE)) == 1 || !((REGNO) & 1))
324
325 static enum machine_mode
326 s390_libgcc_cmp_return_mode (void)
327 {
328 return TARGET_64BIT ? DImode : SImode;
329 }
330
331 static enum machine_mode
332 s390_libgcc_shift_count_mode (void)
333 {
334 return TARGET_64BIT ? DImode : SImode;
335 }
336
337 /* Return true if the back end supports mode MODE. */
338 static bool
339 s390_scalar_mode_supported_p (enum machine_mode mode)
340 {
341 if (DECIMAL_FLOAT_MODE_P (mode))
342 return true;
343 else
344 return default_scalar_mode_supported_p (mode);
345 }
346
347 /* Set the has_landing_pad_p flag in struct machine_function to VALUE. */
348
349 void
350 s390_set_has_landing_pad_p (bool value)
351 {
352 cfun->machine->has_landing_pad_p = value;
353 }
354
355 /* If two condition code modes are compatible, return a condition code
356 mode which is compatible with both. Otherwise, return
357 VOIDmode. */
358
359 static enum machine_mode
360 s390_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
361 {
362 if (m1 == m2)
363 return m1;
364
365 switch (m1)
366 {
367 case CCZmode:
368 if (m2 == CCUmode || m2 == CCTmode || m2 == CCZ1mode
369 || m2 == CCSmode || m2 == CCSRmode || m2 == CCURmode)
370 return m2;
371 return VOIDmode;
372
373 case CCSmode:
374 case CCUmode:
375 case CCTmode:
376 case CCSRmode:
377 case CCURmode:
378 case CCZ1mode:
379 if (m2 == CCZmode)
380 return m1;
381
382 return VOIDmode;
383
384 default:
385 return VOIDmode;
386 }
387 return VOIDmode;
388 }
389
390 /* Return true if SET either doesn't set the CC register, or else
391 the source and destination have matching CC modes and that
392 CC mode is at least as constrained as REQ_MODE. */
393
394 static bool
395 s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
396 {
397 enum machine_mode set_mode;
398
399 gcc_assert (GET_CODE (set) == SET);
400
401 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
402 return 1;
403
404 set_mode = GET_MODE (SET_DEST (set));
405 switch (set_mode)
406 {
407 case CCSmode:
408 case CCSRmode:
409 case CCUmode:
410 case CCURmode:
411 case CCLmode:
412 case CCL1mode:
413 case CCL2mode:
414 case CCL3mode:
415 case CCT1mode:
416 case CCT2mode:
417 case CCT3mode:
418 if (req_mode != set_mode)
419 return 0;
420 break;
421
422 case CCZmode:
423 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
424 && req_mode != CCSRmode && req_mode != CCURmode)
425 return 0;
426 break;
427
428 case CCAPmode:
429 case CCANmode:
430 if (req_mode != CCAmode)
431 return 0;
432 break;
433
434 default:
435 gcc_unreachable ();
436 }
437
438 return (GET_MODE (SET_SRC (set)) == set_mode);
439 }
440
441 /* Return true if every SET in INSN that sets the CC register
442 has source and destination with matching CC modes and that
443 CC mode is at least as constrained as REQ_MODE.
444 If REQ_MODE is VOIDmode, always return false. */
445
446 bool
447 s390_match_ccmode (rtx insn, enum machine_mode req_mode)
448 {
449 int i;
450
451 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
452 if (req_mode == VOIDmode)
453 return false;
454
455 if (GET_CODE (PATTERN (insn)) == SET)
456 return s390_match_ccmode_set (PATTERN (insn), req_mode);
457
458 if (GET_CODE (PATTERN (insn)) == PARALLEL)
459 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
460 {
461 rtx set = XVECEXP (PATTERN (insn), 0, i);
462 if (GET_CODE (set) == SET)
463 if (!s390_match_ccmode_set (set, req_mode))
464 return false;
465 }
466
467 return true;
468 }
469
470 /* If a test-under-mask instruction can be used to implement
471 (compare (and ... OP1) OP2), return the CC mode required
472 to do that. Otherwise, return VOIDmode.
473 MIXED is true if the instruction can distinguish between
474 CC1 and CC2 for mixed selected bits (TMxx), it is false
475 if the instruction cannot (TM). */
476
477 enum machine_mode
478 s390_tm_ccmode (rtx op1, rtx op2, bool mixed)
479 {
480 int bit0, bit1;
481
482 /* ??? Fixme: should work on CONST_DOUBLE as well. */
483 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
484 return VOIDmode;
485
486 /* Selected bits all zero: CC0.
487 e.g.: int a; if ((a & (16 + 128)) == 0) */
488 if (INTVAL (op2) == 0)
489 return CCTmode;
490
491 /* Selected bits all one: CC3.
492 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */
493 if (INTVAL (op2) == INTVAL (op1))
494 return CCT3mode;
495
496 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.:
497 int a;
498 if ((a & (16 + 128)) == 16) -> CCT1
499 if ((a & (16 + 128)) == 128) -> CCT2 */
500 if (mixed)
501 {
502 bit1 = exact_log2 (INTVAL (op2));
503 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
504 if (bit0 != -1 && bit1 != -1)
505 return bit0 > bit1 ? CCT1mode : CCT2mode;
506 }
507
508 return VOIDmode;
509 }
510
511 /* Given a comparison code OP (EQ, NE, etc.) and the operands
512 OP0 and OP1 of a COMPARE, return the mode to be used for the
513 comparison. */
514
515 enum machine_mode
516 s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
517 {
518 switch (code)
519 {
520 case EQ:
521 case NE:
522 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
523 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
524 return CCAPmode;
525 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
526 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
527 return CCAPmode;
528 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
529 || GET_CODE (op1) == NEG)
530 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
531 return CCLmode;
532
533 if (GET_CODE (op0) == AND)
534 {
535 /* Check whether we can potentially do it via TM. */
536 enum machine_mode ccmode;
537 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
538 if (ccmode != VOIDmode)
539 {
540 /* Relax CCTmode to CCZmode to allow fall-back to AND
541 if that turns out to be beneficial. */
542 return ccmode == CCTmode ? CCZmode : ccmode;
543 }
544 }
545
546 if (register_operand (op0, HImode)
547 && GET_CODE (op1) == CONST_INT
548 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
549 return CCT3mode;
550 if (register_operand (op0, QImode)
551 && GET_CODE (op1) == CONST_INT
552 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
553 return CCT3mode;
554
555 return CCZmode;
556
557 case LE:
558 case LT:
559 case GE:
560 case GT:
561 /* The only overflow condition of NEG and ABS happens when
562 -INT_MAX is used as parameter, which stays negative. So
563 we have an overflow from a positive value to a negative.
564 Using CCAP mode the resulting cc can be used for comparisons. */
565 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
566 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
567 return CCAPmode;
568
569 /* If constants are involved in an add instruction it is possible to use
570 the resulting cc for comparisons with zero. Knowing the sign of the
571 constant the overflow behavior gets predictable. e.g.:
572 int a, b; if ((b = a + c) > 0)
573 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */
574 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
575 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
576 {
577 if (INTVAL (XEXP((op0), 1)) < 0)
578 return CCANmode;
579 else
580 return CCAPmode;
581 }
582 /* Fall through. */
583 case UNORDERED:
584 case ORDERED:
585 case UNEQ:
586 case UNLE:
587 case UNLT:
588 case UNGE:
589 case UNGT:
590 case LTGT:
591 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
592 && GET_CODE (op1) != CONST_INT)
593 return CCSRmode;
594 return CCSmode;
595
596 case LTU:
597 case GEU:
598 if (GET_CODE (op0) == PLUS
599 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
600 return CCL1mode;
601
602 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
603 && GET_CODE (op1) != CONST_INT)
604 return CCURmode;
605 return CCUmode;
606
607 case LEU:
608 case GTU:
609 if (GET_CODE (op0) == MINUS
610 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
611 return CCL2mode;
612
613 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
614 && GET_CODE (op1) != CONST_INT)
615 return CCURmode;
616 return CCUmode;
617
618 default:
619 gcc_unreachable ();
620 }
621 }
622
623 /* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
624 that we can implement more efficiently. */
625
626 void
627 s390_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1)
628 {
629 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
630 if ((*code == EQ || *code == NE)
631 && *op1 == const0_rtx
632 && GET_CODE (*op0) == ZERO_EXTRACT
633 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
634 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
635 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
636 {
637 rtx inner = XEXP (*op0, 0);
638 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
639 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
640 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
641
642 if (len > 0 && len < modesize
643 && pos >= 0 && pos + len <= modesize
644 && modesize <= HOST_BITS_PER_WIDE_INT)
645 {
646 unsigned HOST_WIDE_INT block;
647 block = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
648 block <<= modesize - pos - len;
649
650 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
651 gen_int_mode (block, GET_MODE (inner)));
652 }
653 }
654
655 /* Narrow AND of memory against immediate to enable TM. */
656 if ((*code == EQ || *code == NE)
657 && *op1 == const0_rtx
658 && GET_CODE (*op0) == AND
659 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
660 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
661 {
662 rtx inner = XEXP (*op0, 0);
663 rtx mask = XEXP (*op0, 1);
664
665 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
666 if (GET_CODE (inner) == SUBREG
667 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
668 && (GET_MODE_SIZE (GET_MODE (inner))
669 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
670 && ((INTVAL (mask)
671 & GET_MODE_MASK (GET_MODE (inner))
672 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
673 == 0))
674 inner = SUBREG_REG (inner);
675
676 /* Do not change volatile MEMs. */
677 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
678 {
679 int part = s390_single_part (XEXP (*op0, 1),
680 GET_MODE (inner), QImode, 0);
681 if (part >= 0)
682 {
683 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
684 inner = adjust_address_nv (inner, QImode, part);
685 *op0 = gen_rtx_AND (QImode, inner, mask);
686 }
687 }
688 }
689
690 /* Narrow comparisons against 0xffff to HImode if possible. */
691 if ((*code == EQ || *code == NE)
692 && GET_CODE (*op1) == CONST_INT
693 && INTVAL (*op1) == 0xffff
694 && SCALAR_INT_MODE_P (GET_MODE (*op0))
695 && (nonzero_bits (*op0, GET_MODE (*op0))
696 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0)
697 {
698 *op0 = gen_lowpart (HImode, *op0);
699 *op1 = constm1_rtx;
700 }
701
702 /* Remove redundant UNSPEC_CCU_TO_INT conversions if possible. */
703 if (GET_CODE (*op0) == UNSPEC
704 && XINT (*op0, 1) == UNSPEC_CCU_TO_INT
705 && XVECLEN (*op0, 0) == 1
706 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
707 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
708 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
709 && *op1 == const0_rtx)
710 {
711 enum rtx_code new_code = UNKNOWN;
712 switch (*code)
713 {
714 case EQ: new_code = EQ; break;
715 case NE: new_code = NE; break;
716 case LT: new_code = GTU; break;
717 case GT: new_code = LTU; break;
718 case LE: new_code = GEU; break;
719 case GE: new_code = LEU; break;
720 default: break;
721 }
722
723 if (new_code != UNKNOWN)
724 {
725 *op0 = XVECEXP (*op0, 0, 0);
726 *code = new_code;
727 }
728 }
729
730 /* Remove redundant UNSPEC_CCZ_TO_INT conversions if possible. */
731 if (GET_CODE (*op0) == UNSPEC
732 && XINT (*op0, 1) == UNSPEC_CCZ_TO_INT
733 && XVECLEN (*op0, 0) == 1
734 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCZmode
735 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
736 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
737 && *op1 == const0_rtx)
738 {
739 enum rtx_code new_code = UNKNOWN;
740 switch (*code)
741 {
742 case EQ: new_code = EQ; break;
743 case NE: new_code = NE; break;
744 default: break;
745 }
746
747 if (new_code != UNKNOWN)
748 {
749 *op0 = XVECEXP (*op0, 0, 0);
750 *code = new_code;
751 }
752 }
753
754 /* Simplify cascaded EQ, NE with const0_rtx. */
755 if ((*code == NE || *code == EQ)
756 && (GET_CODE (*op0) == EQ || GET_CODE (*op0) == NE)
757 && GET_MODE (*op0) == SImode
758 && GET_MODE (XEXP (*op0, 0)) == CCZ1mode
759 && REG_P (XEXP (*op0, 0))
760 && XEXP (*op0, 1) == const0_rtx
761 && *op1 == const0_rtx)
762 {
763 if ((*code == EQ && GET_CODE (*op0) == NE)
764 || (*code == NE && GET_CODE (*op0) == EQ))
765 *code = EQ;
766 else
767 *code = NE;
768 *op0 = XEXP (*op0, 0);
769 }
770
771 /* Prefer register over memory as first operand. */
772 if (MEM_P (*op0) && REG_P (*op1))
773 {
774 rtx tem = *op0; *op0 = *op1; *op1 = tem;
775 *code = swap_condition (*code);
776 }
777 }
778
779 /* Emit a compare instruction suitable to implement the comparison
780 OP0 CODE OP1. Return the correct condition RTL to be placed in
781 the IF_THEN_ELSE of the conditional branch testing the result. */
782
783 rtx
784 s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
785 {
786 enum machine_mode mode = s390_select_ccmode (code, op0, op1);
787 rtx ret = NULL_RTX;
788
789 /* Do not output a redundant compare instruction if a compare_and_swap
790 pattern already computed the result and the machine modes are compatible. */
791 if (s390_compare_emitted
792 && (s390_cc_modes_compatible (GET_MODE (s390_compare_emitted), mode)
793 == GET_MODE (s390_compare_emitted)))
794 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
795 else
796 {
797 rtx cc = gen_rtx_REG (mode, CC_REGNUM);
798
799 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1)));
800 ret = gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
801 }
802 s390_compare_emitted = NULL_RTX;
803 return ret;
804 }
805
806 /* Emit a SImode compare and swap instruction setting MEM to NEW if OLD
807 matches CMP.
808 Return the correct condition RTL to be placed in the IF_THEN_ELSE of the
809 conditional branch testing the result. */
810
811 static rtx
812 s390_emit_compare_and_swap (enum rtx_code code, rtx old, rtx mem, rtx cmp, rtx new)
813 {
814 rtx ret;
815
816 emit_insn (gen_sync_compare_and_swap_ccsi (old, mem, cmp, new));
817 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
818
819 s390_compare_emitted = NULL_RTX;
820
821 return ret;
822 }
823
824 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
825 unconditional jump, else a conditional jump under condition COND. */
826
827 void
828 s390_emit_jump (rtx target, rtx cond)
829 {
830 rtx insn;
831
832 target = gen_rtx_LABEL_REF (VOIDmode, target);
833 if (cond)
834 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
835
836 insn = gen_rtx_SET (VOIDmode, pc_rtx, target);
837 emit_jump_insn (insn);
838 }
839
840 /* Return branch condition mask to implement a branch
841 specified by CODE. Return -1 for invalid comparisons. */
842
843 int
844 s390_branch_condition_mask (rtx code)
845 {
846 const int CC0 = 1 << 3;
847 const int CC1 = 1 << 2;
848 const int CC2 = 1 << 1;
849 const int CC3 = 1 << 0;
850
851 gcc_assert (GET_CODE (XEXP (code, 0)) == REG);
852 gcc_assert (REGNO (XEXP (code, 0)) == CC_REGNUM);
853 gcc_assert (XEXP (code, 1) == const0_rtx);
854
855 switch (GET_MODE (XEXP (code, 0)))
856 {
857 case CCZmode:
858 case CCZ1mode:
859 switch (GET_CODE (code))
860 {
861 case EQ: return CC0;
862 case NE: return CC1 | CC2 | CC3;
863 default: return -1;
864 }
865 break;
866
867 case CCT1mode:
868 switch (GET_CODE (code))
869 {
870 case EQ: return CC1;
871 case NE: return CC0 | CC2 | CC3;
872 default: return -1;
873 }
874 break;
875
876 case CCT2mode:
877 switch (GET_CODE (code))
878 {
879 case EQ: return CC2;
880 case NE: return CC0 | CC1 | CC3;
881 default: return -1;
882 }
883 break;
884
885 case CCT3mode:
886 switch (GET_CODE (code))
887 {
888 case EQ: return CC3;
889 case NE: return CC0 | CC1 | CC2;
890 default: return -1;
891 }
892 break;
893
894 case CCLmode:
895 switch (GET_CODE (code))
896 {
897 case EQ: return CC0 | CC2;
898 case NE: return CC1 | CC3;
899 default: return -1;
900 }
901 break;
902
903 case CCL1mode:
904 switch (GET_CODE (code))
905 {
906 case LTU: return CC2 | CC3; /* carry */
907 case GEU: return CC0 | CC1; /* no carry */
908 default: return -1;
909 }
910 break;
911
912 case CCL2mode:
913 switch (GET_CODE (code))
914 {
915 case GTU: return CC0 | CC1; /* borrow */
916 case LEU: return CC2 | CC3; /* no borrow */
917 default: return -1;
918 }
919 break;
920
921 case CCL3mode:
922 switch (GET_CODE (code))
923 {
924 case EQ: return CC0 | CC2;
925 case NE: return CC1 | CC3;
926 case LTU: return CC1;
927 case GTU: return CC3;
928 case LEU: return CC1 | CC2;
929 case GEU: return CC2 | CC3;
930 default: return -1;
931 }
932
933 case CCUmode:
934 switch (GET_CODE (code))
935 {
936 case EQ: return CC0;
937 case NE: return CC1 | CC2 | CC3;
938 case LTU: return CC1;
939 case GTU: return CC2;
940 case LEU: return CC0 | CC1;
941 case GEU: return CC0 | CC2;
942 default: return -1;
943 }
944 break;
945
946 case CCURmode:
947 switch (GET_CODE (code))
948 {
949 case EQ: return CC0;
950 case NE: return CC2 | CC1 | CC3;
951 case LTU: return CC2;
952 case GTU: return CC1;
953 case LEU: return CC0 | CC2;
954 case GEU: return CC0 | CC1;
955 default: return -1;
956 }
957 break;
958
959 case CCAPmode:
960 switch (GET_CODE (code))
961 {
962 case EQ: return CC0;
963 case NE: return CC1 | CC2 | CC3;
964 case LT: return CC1 | CC3;
965 case GT: return CC2;
966 case LE: return CC0 | CC1 | CC3;
967 case GE: return CC0 | CC2;
968 default: return -1;
969 }
970 break;
971
972 case CCANmode:
973 switch (GET_CODE (code))
974 {
975 case EQ: return CC0;
976 case NE: return CC1 | CC2 | CC3;
977 case LT: return CC1;
978 case GT: return CC2 | CC3;
979 case LE: return CC0 | CC1;
980 case GE: return CC0 | CC2 | CC3;
981 default: return -1;
982 }
983 break;
984
985 case CCSmode:
986 switch (GET_CODE (code))
987 {
988 case EQ: return CC0;
989 case NE: return CC1 | CC2 | CC3;
990 case LT: return CC1;
991 case GT: return CC2;
992 case LE: return CC0 | CC1;
993 case GE: return CC0 | CC2;
994 case UNORDERED: return CC3;
995 case ORDERED: return CC0 | CC1 | CC2;
996 case UNEQ: return CC0 | CC3;
997 case UNLT: return CC1 | CC3;
998 case UNGT: return CC2 | CC3;
999 case UNLE: return CC0 | CC1 | CC3;
1000 case UNGE: return CC0 | CC2 | CC3;
1001 case LTGT: return CC1 | CC2;
1002 default: return -1;
1003 }
1004 break;
1005
1006 case CCSRmode:
1007 switch (GET_CODE (code))
1008 {
1009 case EQ: return CC0;
1010 case NE: return CC2 | CC1 | CC3;
1011 case LT: return CC2;
1012 case GT: return CC1;
1013 case LE: return CC0 | CC2;
1014 case GE: return CC0 | CC1;
1015 case UNORDERED: return CC3;
1016 case ORDERED: return CC0 | CC2 | CC1;
1017 case UNEQ: return CC0 | CC3;
1018 case UNLT: return CC2 | CC3;
1019 case UNGT: return CC1 | CC3;
1020 case UNLE: return CC0 | CC2 | CC3;
1021 case UNGE: return CC0 | CC1 | CC3;
1022 case LTGT: return CC2 | CC1;
1023 default: return -1;
1024 }
1025 break;
1026
1027 default:
1028 return -1;
1029 }
1030 }
1031
1032 /* If INV is false, return assembler mnemonic string to implement
1033 a branch specified by CODE. If INV is true, return mnemonic
1034 for the corresponding inverted branch. */
1035
1036 static const char *
1037 s390_branch_condition_mnemonic (rtx code, int inv)
1038 {
1039 static const char *const mnemonic[16] =
1040 {
1041 NULL, "o", "h", "nle",
1042 "l", "nhe", "lh", "ne",
1043 "e", "nlh", "he", "nl",
1044 "le", "nh", "no", NULL
1045 };
1046
1047 int mask = s390_branch_condition_mask (code);
1048 gcc_assert (mask >= 0);
1049
1050 if (inv)
1051 mask ^= 15;
1052
1053 gcc_assert (mask >= 1 && mask <= 14);
1054
1055 return mnemonic[mask];
1056 }
1057
1058 /* Return the part of op which has a value different from def.
1059 The size of the part is determined by mode.
1060 Use this function only if you already know that op really
1061 contains such a part. */
1062
1063 unsigned HOST_WIDE_INT
1064 s390_extract_part (rtx op, enum machine_mode mode, int def)
1065 {
1066 unsigned HOST_WIDE_INT value = 0;
1067 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
1068 int part_bits = GET_MODE_BITSIZE (mode);
1069 unsigned HOST_WIDE_INT part_mask
1070 = ((unsigned HOST_WIDE_INT)1 << part_bits) - 1;
1071 int i;
1072
1073 for (i = 0; i < max_parts; i++)
1074 {
1075 if (i == 0)
1076 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1077 else
1078 value >>= part_bits;
1079
1080 if ((value & part_mask) != (def & part_mask))
1081 return value & part_mask;
1082 }
1083
1084 gcc_unreachable ();
1085 }
1086
1087 /* If OP is an integer constant of mode MODE with exactly one
1088 part of mode PART_MODE unequal to DEF, return the number of that
1089 part. Otherwise, return -1. */
1090
1091 int
1092 s390_single_part (rtx op,
1093 enum machine_mode mode,
1094 enum machine_mode part_mode,
1095 int def)
1096 {
1097 unsigned HOST_WIDE_INT value = 0;
1098 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
1099 unsigned HOST_WIDE_INT part_mask
1100 = ((unsigned HOST_WIDE_INT)1 << GET_MODE_BITSIZE (part_mode)) - 1;
1101 int i, part = -1;
1102
1103 if (GET_CODE (op) != CONST_INT)
1104 return -1;
1105
1106 for (i = 0; i < n_parts; i++)
1107 {
1108 if (i == 0)
1109 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1110 else
1111 value >>= GET_MODE_BITSIZE (part_mode);
1112
1113 if ((value & part_mask) != (def & part_mask))
1114 {
1115 if (part != -1)
1116 return -1;
1117 else
1118 part = i;
1119 }
1120 }
1121 return part == -1 ? -1 : n_parts - 1 - part;
1122 }
1123
1124 /* Check whether we can (and want to) split a double-word
1125 move in mode MODE from SRC to DST into two single-word
1126 moves, moving the subword FIRST_SUBWORD first. */
1127
1128 bool
1129 s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
1130 {
1131 /* Floating point registers cannot be split. */
1132 if (FP_REG_P (src) || FP_REG_P (dst))
1133 return false;
1134
1135 /* We don't need to split if operands are directly accessible. */
1136 if (s_operand (src, mode) || s_operand (dst, mode))
1137 return false;
1138
1139 /* Non-offsettable memory references cannot be split. */
1140 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
1141 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
1142 return false;
1143
1144 /* Moving the first subword must not clobber a register
1145 needed to move the second subword. */
1146 if (register_operand (dst, mode))
1147 {
1148 rtx subreg = operand_subword (dst, first_subword, 0, mode);
1149 if (reg_overlap_mentioned_p (subreg, src))
1150 return false;
1151 }
1152
1153 return true;
1154 }
1155
1156 /* Return true if it can be proven that [MEM1, MEM1 + SIZE]
1157 and [MEM2, MEM2 + SIZE] do overlap and false
1158 otherwise. */
1159
1160 bool
1161 s390_overlap_p (rtx mem1, rtx mem2, HOST_WIDE_INT size)
1162 {
1163 rtx addr1, addr2, addr_delta;
1164 HOST_WIDE_INT delta;
1165
1166 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1167 return true;
1168
1169 if (size == 0)
1170 return false;
1171
1172 addr1 = XEXP (mem1, 0);
1173 addr2 = XEXP (mem2, 0);
1174
1175 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1176
1177 /* This overlapping check is used by peepholes merging memory block operations.
1178 Overlapping operations would otherwise be recognized by the S/390 hardware
1179 and would fall back to a slower implementation. Allowing overlapping
1180 operations would lead to slow code but not to wrong code. Therefore we are
1181 somewhat optimistic if we cannot prove that the memory blocks are
1182 overlapping.
1183 That's why we return false here although this may accept operations on
1184 overlapping memory areas. */
1185 if (!addr_delta || GET_CODE (addr_delta) != CONST_INT)
1186 return false;
1187
1188 delta = INTVAL (addr_delta);
1189
1190 if (delta == 0
1191 || (delta > 0 && delta < size)
1192 || (delta < 0 && -delta < size))
1193 return true;
1194
1195 return false;
1196 }
1197
1198 /* Check whether the address of memory reference MEM2 equals exactly
1199 the address of memory reference MEM1 plus DELTA. Return true if
1200 we can prove this to be the case, false otherwise. */
1201
1202 bool
1203 s390_offset_p (rtx mem1, rtx mem2, rtx delta)
1204 {
1205 rtx addr1, addr2, addr_delta;
1206
1207 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1208 return false;
1209
1210 addr1 = XEXP (mem1, 0);
1211 addr2 = XEXP (mem2, 0);
1212
1213 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1214 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
1215 return false;
1216
1217 return true;
1218 }
1219
1220 /* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1221
1222 void
1223 s390_expand_logical_operator (enum rtx_code code, enum machine_mode mode,
1224 rtx *operands)
1225 {
1226 enum machine_mode wmode = mode;
1227 rtx dst = operands[0];
1228 rtx src1 = operands[1];
1229 rtx src2 = operands[2];
1230 rtx op, clob, tem;
1231
1232 /* If we cannot handle the operation directly, use a temp register. */
1233 if (!s390_logical_operator_ok_p (operands))
1234 dst = gen_reg_rtx (mode);
1235
1236 /* QImode and HImode patterns make sense only if we have a destination
1237 in memory. Otherwise perform the operation in SImode. */
1238 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
1239 wmode = SImode;
1240
1241 /* Widen operands if required. */
1242 if (mode != wmode)
1243 {
1244 if (GET_CODE (dst) == SUBREG
1245 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
1246 dst = tem;
1247 else if (REG_P (dst))
1248 dst = gen_rtx_SUBREG (wmode, dst, 0);
1249 else
1250 dst = gen_reg_rtx (wmode);
1251
1252 if (GET_CODE (src1) == SUBREG
1253 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
1254 src1 = tem;
1255 else if (GET_MODE (src1) != VOIDmode)
1256 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
1257
1258 if (GET_CODE (src2) == SUBREG
1259 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
1260 src2 = tem;
1261 else if (GET_MODE (src2) != VOIDmode)
1262 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
1263 }
1264
1265 /* Emit the instruction. */
1266 op = gen_rtx_SET (VOIDmode, dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
1267 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
1268 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
1269
1270 /* Fix up the destination if needed. */
1271 if (dst != operands[0])
1272 emit_move_insn (operands[0], gen_lowpart (mode, dst));
1273 }
1274
1275 /* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1276
1277 bool
1278 s390_logical_operator_ok_p (rtx *operands)
1279 {
1280 /* If the destination operand is in memory, it needs to coincide
1281 with one of the source operands. After reload, it has to be
1282 the first source operand. */
1283 if (GET_CODE (operands[0]) == MEM)
1284 return rtx_equal_p (operands[0], operands[1])
1285 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
1286
1287 return true;
1288 }
1289
1290 /* Narrow logical operation CODE of memory operand MEMOP with immediate
1291 operand IMMOP to switch from SS to SI type instructions. */
1292
1293 void
1294 s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
1295 {
1296 int def = code == AND ? -1 : 0;
1297 HOST_WIDE_INT mask;
1298 int part;
1299
1300 gcc_assert (GET_CODE (*memop) == MEM);
1301 gcc_assert (!MEM_VOLATILE_P (*memop));
1302
1303 mask = s390_extract_part (*immop, QImode, def);
1304 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
1305 gcc_assert (part >= 0);
1306
1307 *memop = adjust_address (*memop, QImode, part);
1308 *immop = gen_int_mode (mask, QImode);
1309 }
1310
1311
1312 /* How to allocate a 'struct machine_function'. */
1313
1314 static struct machine_function *
1315 s390_init_machine_status (void)
1316 {
1317 return ggc_alloc_cleared (sizeof (struct machine_function));
1318 }
1319
1320 /* Change optimizations to be performed, depending on the
1321 optimization level.
1322
1323 LEVEL is the optimization level specified; 2 if `-O2' is
1324 specified, 1 if `-O' is specified, and 0 if neither is specified.
1325
1326 SIZE is nonzero if `-Os' is specified and zero otherwise. */
1327
1328 void
1329 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1330 {
1331 /* ??? There are apparently still problems with -fcaller-saves. */
1332 flag_caller_saves = 0;
1333
1334 /* By default, always emit DWARF-2 unwind info. This allows debugging
1335 without maintaining a stack frame back-chain. */
1336 flag_asynchronous_unwind_tables = 1;
1337
1338 /* Use MVCLE instructions to decrease code size if requested. */
1339 if (size != 0)
1340 target_flags |= MASK_MVCLE;
1341 }
1342
1343 /* Return true if ARG is the name of a processor. Set *TYPE and *FLAGS
1344 to the associated processor_type and processor_flags if so. */
1345
1346 static bool
1347 s390_handle_arch_option (const char *arg,
1348 enum processor_type *type,
1349 enum processor_flags *flags)
1350 {
1351 static struct pta
1352 {
1353 const char *const name; /* processor name or nickname. */
1354 const enum processor_type processor;
1355 const enum processor_flags flags;
1356 }
1357 const processor_alias_table[] =
1358 {
1359 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
1360 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
1361 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
1362 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
1363 | PF_LONG_DISPLACEMENT},
1364 {"z9-109", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1365 | PF_LONG_DISPLACEMENT | PF_EXTIMM},
1366 {"z9-ec", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1367 | PF_LONG_DISPLACEMENT | PF_EXTIMM | PF_DFP },
1368 };
1369 size_t i;
1370
1371 for (i = 0; i < ARRAY_SIZE (processor_alias_table); i++)
1372 if (strcmp (arg, processor_alias_table[i].name) == 0)
1373 {
1374 *type = processor_alias_table[i].processor;
1375 *flags = processor_alias_table[i].flags;
1376 return true;
1377 }
1378 return false;
1379 }
1380
1381 /* Implement TARGET_HANDLE_OPTION. */
1382
1383 static bool
1384 s390_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
1385 {
1386 switch (code)
1387 {
1388 case OPT_march_:
1389 return s390_handle_arch_option (arg, &s390_arch, &s390_arch_flags);
1390
1391 case OPT_mstack_guard_:
1392 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_guard) != 1)
1393 return false;
1394 if (exact_log2 (s390_stack_guard) == -1)
1395 error ("stack guard value must be an exact power of 2");
1396 return true;
1397
1398 case OPT_mstack_size_:
1399 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_size) != 1)
1400 return false;
1401 if (exact_log2 (s390_stack_size) == -1)
1402 error ("stack size must be an exact power of 2");
1403 return true;
1404
1405 case OPT_mtune_:
1406 return s390_handle_arch_option (arg, &s390_tune, &s390_tune_flags);
1407
1408 case OPT_mwarn_framesize_:
1409 return sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_warn_framesize) == 1;
1410
1411 default:
1412 return true;
1413 }
1414 }
1415
1416 void
1417 override_options (void)
1418 {
1419 /* Set up function hooks. */
1420 init_machine_status = s390_init_machine_status;
1421
1422 /* Architecture mode defaults according to ABI. */
1423 if (!(target_flags_explicit & MASK_ZARCH))
1424 {
1425 if (TARGET_64BIT)
1426 target_flags |= MASK_ZARCH;
1427 else
1428 target_flags &= ~MASK_ZARCH;
1429 }
1430
1431 /* Determine processor architectural level. */
1432 if (!s390_arch_string)
1433 {
1434 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
1435 s390_handle_arch_option (s390_arch_string, &s390_arch, &s390_arch_flags);
1436 }
1437
1438 /* Determine processor to tune for. */
1439 if (s390_tune == PROCESSOR_max)
1440 {
1441 s390_tune = s390_arch;
1442 s390_tune_flags = s390_arch_flags;
1443 }
1444
1445 /* Sanity checks. */
1446 if (TARGET_ZARCH && !TARGET_CPU_ZARCH)
1447 error ("z/Architecture mode not supported on %s", s390_arch_string);
1448 if (TARGET_64BIT && !TARGET_ZARCH)
1449 error ("64-bit ABI not supported in ESA/390 mode");
1450
1451 if (TARGET_HARD_DFP && !TARGET_DFP)
1452 {
1453 if (target_flags_explicit & MASK_HARD_DFP)
1454 {
1455 if (!TARGET_CPU_DFP)
1456 error ("Hardware decimal floating point instructions"
1457 " not available on %s", s390_arch_string);
1458 if (!TARGET_ZARCH)
1459 error ("Hardware decimal floating point instructions"
1460 " not available in ESA/390 mode");
1461 }
1462 else
1463 target_flags &= ~MASK_HARD_DFP;
1464 }
1465
1466 if ((target_flags_explicit & MASK_SOFT_FLOAT) && TARGET_SOFT_FLOAT)
1467 {
1468 if ((target_flags_explicit & MASK_HARD_DFP) && TARGET_HARD_DFP)
1469 error ("-mhard-dfp can't be used in conjunction with -msoft-float");
1470
1471 target_flags &= ~MASK_HARD_DFP;
1472 }
1473
1474 /* Set processor cost function. */
1475 if (s390_tune == PROCESSOR_2094_Z9_109)
1476 s390_cost = &z9_109_cost;
1477 else if (s390_tune == PROCESSOR_2084_Z990)
1478 s390_cost = &z990_cost;
1479 else
1480 s390_cost = &z900_cost;
1481
1482 if (TARGET_BACKCHAIN && TARGET_PACKED_STACK && TARGET_HARD_FLOAT)
1483 error ("-mbackchain -mpacked-stack -mhard-float are not supported "
1484 "in combination");
1485
1486 if (s390_stack_size)
1487 {
1488 if (s390_stack_guard >= s390_stack_size)
1489 error ("stack size must be greater than the stack guard value");
1490 else if (s390_stack_size > 1 << 16)
1491 error ("stack size must not be greater than 64k");
1492 }
1493 else if (s390_stack_guard)
1494 error ("-mstack-guard implies use of -mstack-size");
1495
1496 #ifdef TARGET_DEFAULT_LONG_DOUBLE_128
1497 if (!(target_flags_explicit & MASK_LONG_DOUBLE_128))
1498 target_flags |= MASK_LONG_DOUBLE_128;
1499 #endif
1500 }
1501
1502 /* Map for smallest class containing reg regno. */
1503
1504 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
1505 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1506 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1507 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1508 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1509 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1510 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1511 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1512 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1513 ADDR_REGS, CC_REGS, ADDR_REGS, ADDR_REGS,
1514 ACCESS_REGS, ACCESS_REGS
1515 };
1516
1517 /* Return attribute type of insn. */
1518
1519 static enum attr_type
1520 s390_safe_attr_type (rtx insn)
1521 {
1522 if (recog_memoized (insn) >= 0)
1523 return get_attr_type (insn);
1524 else
1525 return TYPE_NONE;
1526 }
1527
1528 /* Return true if DISP is a valid short displacement. */
1529
1530 static bool
1531 s390_short_displacement (rtx disp)
1532 {
1533 /* No displacement is OK. */
1534 if (!disp)
1535 return true;
1536
1537 /* Integer displacement in range. */
1538 if (GET_CODE (disp) == CONST_INT)
1539 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1540
1541 /* GOT offset is not OK, the GOT can be large. */
1542 if (GET_CODE (disp) == CONST
1543 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1544 && (XINT (XEXP (disp, 0), 1) == UNSPEC_GOT
1545 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
1546 return false;
1547
1548 /* All other symbolic constants are literal pool references,
1549 which are OK as the literal pool must be small. */
1550 if (GET_CODE (disp) == CONST)
1551 return true;
1552
1553 return false;
1554 }
1555
1556 /* Decompose a RTL expression ADDR for a memory address into
1557 its components, returned in OUT.
1558
1559 Returns false if ADDR is not a valid memory address, true
1560 otherwise. If OUT is NULL, don't return the components,
1561 but check for validity only.
1562
1563 Note: Only addresses in canonical form are recognized.
1564 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1565 canonical form so that they will be recognized. */
1566
1567 static int
1568 s390_decompose_address (rtx addr, struct s390_address *out)
1569 {
1570 HOST_WIDE_INT offset = 0;
1571 rtx base = NULL_RTX;
1572 rtx indx = NULL_RTX;
1573 rtx disp = NULL_RTX;
1574 rtx orig_disp;
1575 bool pointer = false;
1576 bool base_ptr = false;
1577 bool indx_ptr = false;
1578 bool literal_pool = false;
1579
1580 /* We may need to substitute the literal pool base register into the address
1581 below. However, at this point we do not know which register is going to
1582 be used as base, so we substitute the arg pointer register. This is going
1583 to be treated as holding a pointer below -- it shouldn't be used for any
1584 other purpose. */
1585 rtx fake_pool_base = gen_rtx_REG (Pmode, ARG_POINTER_REGNUM);
1586
1587 /* Decompose address into base + index + displacement. */
1588
1589 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1590 base = addr;
1591
1592 else if (GET_CODE (addr) == PLUS)
1593 {
1594 rtx op0 = XEXP (addr, 0);
1595 rtx op1 = XEXP (addr, 1);
1596 enum rtx_code code0 = GET_CODE (op0);
1597 enum rtx_code code1 = GET_CODE (op1);
1598
1599 if (code0 == REG || code0 == UNSPEC)
1600 {
1601 if (code1 == REG || code1 == UNSPEC)
1602 {
1603 indx = op0; /* index + base */
1604 base = op1;
1605 }
1606
1607 else
1608 {
1609 base = op0; /* base + displacement */
1610 disp = op1;
1611 }
1612 }
1613
1614 else if (code0 == PLUS)
1615 {
1616 indx = XEXP (op0, 0); /* index + base + disp */
1617 base = XEXP (op0, 1);
1618 disp = op1;
1619 }
1620
1621 else
1622 {
1623 return false;
1624 }
1625 }
1626
1627 else
1628 disp = addr; /* displacement */
1629
1630 /* Extract integer part of displacement. */
1631 orig_disp = disp;
1632 if (disp)
1633 {
1634 if (GET_CODE (disp) == CONST_INT)
1635 {
1636 offset = INTVAL (disp);
1637 disp = NULL_RTX;
1638 }
1639 else if (GET_CODE (disp) == CONST
1640 && GET_CODE (XEXP (disp, 0)) == PLUS
1641 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1642 {
1643 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1644 disp = XEXP (XEXP (disp, 0), 0);
1645 }
1646 }
1647
1648 /* Strip off CONST here to avoid special case tests later. */
1649 if (disp && GET_CODE (disp) == CONST)
1650 disp = XEXP (disp, 0);
1651
1652 /* We can convert literal pool addresses to
1653 displacements by basing them off the base register. */
1654 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
1655 {
1656 /* Either base or index must be free to hold the base register. */
1657 if (!base)
1658 base = fake_pool_base, literal_pool = true;
1659 else if (!indx)
1660 indx = fake_pool_base, literal_pool = true;
1661 else
1662 return false;
1663
1664 /* Mark up the displacement. */
1665 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
1666 UNSPEC_LTREL_OFFSET);
1667 }
1668
1669 /* Validate base register. */
1670 if (base)
1671 {
1672 if (GET_CODE (base) == UNSPEC)
1673 switch (XINT (base, 1))
1674 {
1675 case UNSPEC_LTREF:
1676 if (!disp)
1677 disp = gen_rtx_UNSPEC (Pmode,
1678 gen_rtvec (1, XVECEXP (base, 0, 0)),
1679 UNSPEC_LTREL_OFFSET);
1680 else
1681 return false;
1682
1683 base = XVECEXP (base, 0, 1);
1684 break;
1685
1686 case UNSPEC_LTREL_BASE:
1687 if (XVECLEN (base, 0) == 1)
1688 base = fake_pool_base, literal_pool = true;
1689 else
1690 base = XVECEXP (base, 0, 1);
1691 break;
1692
1693 default:
1694 return false;
1695 }
1696
1697 if (!REG_P (base)
1698 || (GET_MODE (base) != SImode
1699 && GET_MODE (base) != Pmode))
1700 return false;
1701
1702 if (REGNO (base) == STACK_POINTER_REGNUM
1703 || REGNO (base) == FRAME_POINTER_REGNUM
1704 || ((reload_completed || reload_in_progress)
1705 && frame_pointer_needed
1706 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1707 || REGNO (base) == ARG_POINTER_REGNUM
1708 || (flag_pic
1709 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1710 pointer = base_ptr = true;
1711
1712 if ((reload_completed || reload_in_progress)
1713 && base == cfun->machine->base_reg)
1714 pointer = base_ptr = literal_pool = true;
1715 }
1716
1717 /* Validate index register. */
1718 if (indx)
1719 {
1720 if (GET_CODE (indx) == UNSPEC)
1721 switch (XINT (indx, 1))
1722 {
1723 case UNSPEC_LTREF:
1724 if (!disp)
1725 disp = gen_rtx_UNSPEC (Pmode,
1726 gen_rtvec (1, XVECEXP (indx, 0, 0)),
1727 UNSPEC_LTREL_OFFSET);
1728 else
1729 return false;
1730
1731 indx = XVECEXP (indx, 0, 1);
1732 break;
1733
1734 case UNSPEC_LTREL_BASE:
1735 if (XVECLEN (indx, 0) == 1)
1736 indx = fake_pool_base, literal_pool = true;
1737 else
1738 indx = XVECEXP (indx, 0, 1);
1739 break;
1740
1741 default:
1742 return false;
1743 }
1744
1745 if (!REG_P (indx)
1746 || (GET_MODE (indx) != SImode
1747 && GET_MODE (indx) != Pmode))
1748 return false;
1749
1750 if (REGNO (indx) == STACK_POINTER_REGNUM
1751 || REGNO (indx) == FRAME_POINTER_REGNUM
1752 || ((reload_completed || reload_in_progress)
1753 && frame_pointer_needed
1754 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1755 || REGNO (indx) == ARG_POINTER_REGNUM
1756 || (flag_pic
1757 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
1758 pointer = indx_ptr = true;
1759
1760 if ((reload_completed || reload_in_progress)
1761 && indx == cfun->machine->base_reg)
1762 pointer = indx_ptr = literal_pool = true;
1763 }
1764
1765 /* Prefer to use pointer as base, not index. */
1766 if (base && indx && !base_ptr
1767 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
1768 {
1769 rtx tmp = base;
1770 base = indx;
1771 indx = tmp;
1772 }
1773
1774 /* Validate displacement. */
1775 if (!disp)
1776 {
1777 /* If virtual registers are involved, the displacement will change later
1778 anyway as the virtual registers get eliminated. This could make a
1779 valid displacement invalid, but it is more likely to make an invalid
1780 displacement valid, because we sometimes access the register save area
1781 via negative offsets to one of those registers.
1782 Thus we don't check the displacement for validity here. If after
1783 elimination the displacement turns out to be invalid after all,
1784 this is fixed up by reload in any case. */
1785 if (base != arg_pointer_rtx
1786 && indx != arg_pointer_rtx
1787 && base != return_address_pointer_rtx
1788 && indx != return_address_pointer_rtx
1789 && base != frame_pointer_rtx
1790 && indx != frame_pointer_rtx
1791 && base != virtual_stack_vars_rtx
1792 && indx != virtual_stack_vars_rtx)
1793 if (!DISP_IN_RANGE (offset))
1794 return false;
1795 }
1796 else
1797 {
1798 /* All the special cases are pointers. */
1799 pointer = true;
1800
1801 /* In the small-PIC case, the linker converts @GOT
1802 and @GOTNTPOFF offsets to possible displacements. */
1803 if (GET_CODE (disp) == UNSPEC
1804 && (XINT (disp, 1) == UNSPEC_GOT
1805 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
1806 && flag_pic == 1)
1807 {
1808 ;
1809 }
1810
1811 /* Accept chunkified literal pool symbol references. */
1812 else if (cfun && cfun->machine
1813 && cfun->machine->decomposed_literal_pool_addresses_ok_p
1814 && GET_CODE (disp) == MINUS
1815 && GET_CODE (XEXP (disp, 0)) == LABEL_REF
1816 && GET_CODE (XEXP (disp, 1)) == LABEL_REF)
1817 {
1818 ;
1819 }
1820
1821 /* Accept literal pool references. */
1822 else if (GET_CODE (disp) == UNSPEC
1823 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
1824 {
1825 orig_disp = gen_rtx_CONST (Pmode, disp);
1826 if (offset)
1827 {
1828 /* If we have an offset, make sure it does not
1829 exceed the size of the constant pool entry. */
1830 rtx sym = XVECEXP (disp, 0, 0);
1831 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
1832 return false;
1833
1834 orig_disp = plus_constant (orig_disp, offset);
1835 }
1836 }
1837
1838 else
1839 return false;
1840 }
1841
1842 if (!base && !indx)
1843 pointer = true;
1844
1845 if (out)
1846 {
1847 out->base = base;
1848 out->indx = indx;
1849 out->disp = orig_disp;
1850 out->pointer = pointer;
1851 out->literal_pool = literal_pool;
1852 }
1853
1854 return true;
1855 }
1856
1857 /* Decompose a RTL expression OP for a shift count into its components,
1858 and return the base register in BASE and the offset in OFFSET.
1859
1860 Return true if OP is a valid shift count, false if not. */
1861
1862 bool
1863 s390_decompose_shift_count (rtx op, rtx *base, HOST_WIDE_INT *offset)
1864 {
1865 HOST_WIDE_INT off = 0;
1866
1867 /* We can have an integer constant, an address register,
1868 or a sum of the two. */
1869 if (GET_CODE (op) == CONST_INT)
1870 {
1871 off = INTVAL (op);
1872 op = NULL_RTX;
1873 }
1874 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
1875 {
1876 off = INTVAL (XEXP (op, 1));
1877 op = XEXP (op, 0);
1878 }
1879 while (op && GET_CODE (op) == SUBREG)
1880 op = SUBREG_REG (op);
1881
1882 if (op && GET_CODE (op) != REG)
1883 return false;
1884
1885 if (offset)
1886 *offset = off;
1887 if (base)
1888 *base = op;
1889
1890 return true;
1891 }
1892
1893
1894 /* Return true if CODE is a valid address without index. */
1895
1896 bool
1897 s390_legitimate_address_without_index_p (rtx op)
1898 {
1899 struct s390_address addr;
1900
1901 if (!s390_decompose_address (XEXP (op, 0), &addr))
1902 return false;
1903 if (addr.indx)
1904 return false;
1905
1906 return true;
1907 }
1908
1909
1910 /* Evaluates constraint strings described by the regular expression
1911 ([A|B](Q|R|S|T))|U|W and returns 1 if OP is a valid operand for the
1912 constraint given in STR, or 0 else. */
1913
1914 int
1915 s390_mem_constraint (const char *str, rtx op)
1916 {
1917 struct s390_address addr;
1918 char c = str[0];
1919
1920 /* Check for offsettable variants of memory constraints. */
1921 if (c == 'A')
1922 {
1923 /* Only accept non-volatile MEMs. */
1924 if (!MEM_P (op) || MEM_VOLATILE_P (op))
1925 return 0;
1926
1927 if ((reload_completed || reload_in_progress)
1928 ? !offsettable_memref_p (op) : !offsettable_nonstrict_memref_p (op))
1929 return 0;
1930
1931 c = str[1];
1932 }
1933
1934 /* Check for non-literal-pool variants of memory constraints. */
1935 else if (c == 'B')
1936 {
1937 if (GET_CODE (op) != MEM)
1938 return 0;
1939 if (!s390_decompose_address (XEXP (op, 0), &addr))
1940 return 0;
1941 if (addr.literal_pool)
1942 return 0;
1943
1944 c = str[1];
1945 }
1946
1947 switch (c)
1948 {
1949 case 'Q':
1950 if (GET_CODE (op) != MEM)
1951 return 0;
1952 if (!s390_decompose_address (XEXP (op, 0), &addr))
1953 return 0;
1954 if (addr.indx)
1955 return 0;
1956
1957 if (TARGET_LONG_DISPLACEMENT)
1958 {
1959 if (!s390_short_displacement (addr.disp))
1960 return 0;
1961 }
1962 break;
1963
1964 case 'R':
1965 if (GET_CODE (op) != MEM)
1966 return 0;
1967
1968 if (TARGET_LONG_DISPLACEMENT)
1969 {
1970 if (!s390_decompose_address (XEXP (op, 0), &addr))
1971 return 0;
1972 if (!s390_short_displacement (addr.disp))
1973 return 0;
1974 }
1975 break;
1976
1977 case 'S':
1978 if (!TARGET_LONG_DISPLACEMENT)
1979 return 0;
1980 if (GET_CODE (op) != MEM)
1981 return 0;
1982 if (!s390_decompose_address (XEXP (op, 0), &addr))
1983 return 0;
1984 if (addr.indx)
1985 return 0;
1986 if (s390_short_displacement (addr.disp))
1987 return 0;
1988 break;
1989
1990 case 'T':
1991 if (!TARGET_LONG_DISPLACEMENT)
1992 return 0;
1993 if (GET_CODE (op) != MEM)
1994 return 0;
1995 /* Any invalid address here will be fixed up by reload,
1996 so accept it for the most generic constraint. */
1997 if (s390_decompose_address (XEXP (op, 0), &addr)
1998 && s390_short_displacement (addr.disp))
1999 return 0;
2000 break;
2001
2002 case 'U':
2003 if (TARGET_LONG_DISPLACEMENT)
2004 {
2005 if (!s390_decompose_address (op, &addr))
2006 return 0;
2007 if (!s390_short_displacement (addr.disp))
2008 return 0;
2009 }
2010 break;
2011
2012 case 'W':
2013 if (!TARGET_LONG_DISPLACEMENT)
2014 return 0;
2015 /* Any invalid address here will be fixed up by reload,
2016 so accept it for the most generic constraint. */
2017 if (s390_decompose_address (op, &addr)
2018 && s390_short_displacement (addr.disp))
2019 return 0;
2020 break;
2021
2022 case 'Y':
2023 /* Simply check for the basic form of a shift count. Reload will
2024 take care of making sure we have a proper base register. */
2025 if (!s390_decompose_shift_count (op, NULL, NULL))
2026 return 0;
2027 break;
2028
2029 default:
2030 return 0;
2031 }
2032
2033 return 1;
2034 }
2035
2036
2037
2038 /* Evaluates constraint strings starting with letter O. Input
2039 parameter C is the second letter following the "O" in the constraint
2040 string. Returns 1 if VALUE meets the respective constraint and 0
2041 otherwise. */
2042
2043 int
2044 s390_O_constraint_str (const char c, HOST_WIDE_INT value)
2045 {
2046 if (!TARGET_EXTIMM)
2047 return 0;
2048
2049 switch (c)
2050 {
2051 case 's':
2052 return trunc_int_for_mode (value, SImode) == value;
2053
2054 case 'p':
2055 return value == 0
2056 || s390_single_part (GEN_INT (value), DImode, SImode, 0) == 1;
2057
2058 case 'n':
2059 return value == -1
2060 || s390_single_part (GEN_INT (value), DImode, SImode, -1) == 1;
2061
2062 default:
2063 gcc_unreachable ();
2064 }
2065 }
2066
2067
2068 /* Evaluates constraint strings starting with letter N. Parameter STR
2069 contains the letters following letter "N" in the constraint string.
2070 Returns true if VALUE matches the constraint. */
2071
2072 int
2073 s390_N_constraint_str (const char *str, HOST_WIDE_INT value)
2074 {
2075 enum machine_mode mode, part_mode;
2076 int def;
2077 int part, part_goal;
2078
2079
2080 if (str[0] == 'x')
2081 part_goal = -1;
2082 else
2083 part_goal = str[0] - '0';
2084
2085 switch (str[1])
2086 {
2087 case 'Q':
2088 part_mode = QImode;
2089 break;
2090 case 'H':
2091 part_mode = HImode;
2092 break;
2093 case 'S':
2094 part_mode = SImode;
2095 break;
2096 default:
2097 return 0;
2098 }
2099
2100 switch (str[2])
2101 {
2102 case 'H':
2103 mode = HImode;
2104 break;
2105 case 'S':
2106 mode = SImode;
2107 break;
2108 case 'D':
2109 mode = DImode;
2110 break;
2111 default:
2112 return 0;
2113 }
2114
2115 switch (str[3])
2116 {
2117 case '0':
2118 def = 0;
2119 break;
2120 case 'F':
2121 def = -1;
2122 break;
2123 default:
2124 return 0;
2125 }
2126
2127 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
2128 return 0;
2129
2130 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
2131 if (part < 0)
2132 return 0;
2133 if (part_goal != -1 && part_goal != part)
2134 return 0;
2135
2136 return 1;
2137 }
2138
2139
2140 /* Returns true if the input parameter VALUE is a float zero. */
2141
2142 int
2143 s390_float_const_zero_p (rtx value)
2144 {
2145 return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
2146 && value == CONST0_RTX (GET_MODE (value)));
2147 }
2148
2149
2150 /* Compute a (partial) cost for rtx X. Return true if the complete
2151 cost has been computed, and false if subexpressions should be
2152 scanned. In either case, *TOTAL contains the cost result.
2153 CODE contains GET_CODE (x), OUTER_CODE contains the code
2154 of the superexpression of x. */
2155
2156 static bool
2157 s390_rtx_costs (rtx x, int code, int outer_code, int *total)
2158 {
2159 switch (code)
2160 {
2161 case CONST:
2162 case CONST_INT:
2163 case LABEL_REF:
2164 case SYMBOL_REF:
2165 case CONST_DOUBLE:
2166 case MEM:
2167 *total = 0;
2168 return true;
2169
2170 case ASHIFT:
2171 case ASHIFTRT:
2172 case LSHIFTRT:
2173 case ROTATE:
2174 case ROTATERT:
2175 case AND:
2176 case IOR:
2177 case XOR:
2178 case NEG:
2179 case NOT:
2180 *total = COSTS_N_INSNS (1);
2181 return false;
2182
2183 case PLUS:
2184 case MINUS:
2185 /* Check for multiply and add. */
2186 if ((GET_MODE (x) == DFmode || GET_MODE (x) == SFmode)
2187 && GET_CODE (XEXP (x, 0)) == MULT
2188 && TARGET_HARD_FLOAT && TARGET_FUSED_MADD)
2189 {
2190 /* This is the multiply and add case. */
2191 if (GET_MODE (x) == DFmode)
2192 *total = s390_cost->madbr;
2193 else
2194 *total = s390_cost->maebr;
2195 *total += rtx_cost (XEXP (XEXP (x, 0), 0), MULT)
2196 + rtx_cost (XEXP (XEXP (x, 0), 1), MULT)
2197 + rtx_cost (XEXP (x, 1), code);
2198 return true; /* Do not do an additional recursive descent. */
2199 }
2200 *total = COSTS_N_INSNS (1);
2201 return false;
2202
2203 case MULT:
2204 switch (GET_MODE (x))
2205 {
2206 case SImode:
2207 {
2208 rtx left = XEXP (x, 0);
2209 rtx right = XEXP (x, 1);
2210 if (GET_CODE (right) == CONST_INT
2211 && CONST_OK_FOR_K (INTVAL (right)))
2212 *total = s390_cost->mhi;
2213 else if (GET_CODE (left) == SIGN_EXTEND)
2214 *total = s390_cost->mh;
2215 else
2216 *total = s390_cost->ms; /* msr, ms, msy */
2217 break;
2218 }
2219 case DImode:
2220 {
2221 rtx left = XEXP (x, 0);
2222 rtx right = XEXP (x, 1);
2223 if (TARGET_64BIT)
2224 {
2225 if (GET_CODE (right) == CONST_INT
2226 && CONST_OK_FOR_K (INTVAL (right)))
2227 *total = s390_cost->mghi;
2228 else if (GET_CODE (left) == SIGN_EXTEND)
2229 *total = s390_cost->msgf;
2230 else
2231 *total = s390_cost->msg; /* msgr, msg */
2232 }
2233 else /* TARGET_31BIT */
2234 {
2235 if (GET_CODE (left) == SIGN_EXTEND
2236 && GET_CODE (right) == SIGN_EXTEND)
2237 /* mulsidi case: mr, m */
2238 *total = s390_cost->m;
2239 else if (GET_CODE (left) == ZERO_EXTEND
2240 && GET_CODE (right) == ZERO_EXTEND
2241 && TARGET_CPU_ZARCH)
2242 /* umulsidi case: ml, mlr */
2243 *total = s390_cost->ml;
2244 else
2245 /* Complex calculation is required. */
2246 *total = COSTS_N_INSNS (40);
2247 }
2248 break;
2249 }
2250 case SFmode:
2251 case DFmode:
2252 *total = s390_cost->mult_df;
2253 break;
2254 case TFmode:
2255 *total = s390_cost->mxbr;
2256 break;
2257 default:
2258 return false;
2259 }
2260 return false;
2261
2262 case UDIV:
2263 case UMOD:
2264 if (GET_MODE (x) == TImode) /* 128 bit division */
2265 *total = s390_cost->dlgr;
2266 else if (GET_MODE (x) == DImode)
2267 {
2268 rtx right = XEXP (x, 1);
2269 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2270 *total = s390_cost->dlr;
2271 else /* 64 by 64 bit division */
2272 *total = s390_cost->dlgr;
2273 }
2274 else if (GET_MODE (x) == SImode) /* 32 bit division */
2275 *total = s390_cost->dlr;
2276 return false;
2277
2278 case DIV:
2279 case MOD:
2280 if (GET_MODE (x) == DImode)
2281 {
2282 rtx right = XEXP (x, 1);
2283 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2284 if (TARGET_64BIT)
2285 *total = s390_cost->dsgfr;
2286 else
2287 *total = s390_cost->dr;
2288 else /* 64 by 64 bit division */
2289 *total = s390_cost->dsgr;
2290 }
2291 else if (GET_MODE (x) == SImode) /* 32 bit division */
2292 *total = s390_cost->dlr;
2293 else if (GET_MODE (x) == SFmode)
2294 {
2295 *total = s390_cost->debr;
2296 }
2297 else if (GET_MODE (x) == DFmode)
2298 {
2299 *total = s390_cost->ddbr;
2300 }
2301 else if (GET_MODE (x) == TFmode)
2302 {
2303 *total = s390_cost->dxbr;
2304 }
2305 return false;
2306
2307 case SQRT:
2308 if (GET_MODE (x) == SFmode)
2309 *total = s390_cost->sqebr;
2310 else if (GET_MODE (x) == DFmode)
2311 *total = s390_cost->sqdbr;
2312 else /* TFmode */
2313 *total = s390_cost->sqxbr;
2314 return false;
2315
2316 case SIGN_EXTEND:
2317 case ZERO_EXTEND:
2318 if (outer_code == MULT || outer_code == DIV || outer_code == MOD
2319 || outer_code == PLUS || outer_code == MINUS
2320 || outer_code == COMPARE)
2321 *total = 0;
2322 return false;
2323
2324 case COMPARE:
2325 *total = COSTS_N_INSNS (1);
2326 if (GET_CODE (XEXP (x, 0)) == AND
2327 && GET_CODE (XEXP (x, 1)) == CONST_INT
2328 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2329 {
2330 rtx op0 = XEXP (XEXP (x, 0), 0);
2331 rtx op1 = XEXP (XEXP (x, 0), 1);
2332 rtx op2 = XEXP (x, 1);
2333
2334 if (memory_operand (op0, GET_MODE (op0))
2335 && s390_tm_ccmode (op1, op2, 0) != VOIDmode)
2336 return true;
2337 if (register_operand (op0, GET_MODE (op0))
2338 && s390_tm_ccmode (op1, op2, 1) != VOIDmode)
2339 return true;
2340 }
2341 return false;
2342
2343 default:
2344 return false;
2345 }
2346 }
2347
2348 /* Return the cost of an address rtx ADDR. */
2349
2350 static int
2351 s390_address_cost (rtx addr)
2352 {
2353 struct s390_address ad;
2354 if (!s390_decompose_address (addr, &ad))
2355 return 1000;
2356
2357 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
2358 }
2359
2360 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
2361 otherwise return 0. */
2362
2363 int
2364 tls_symbolic_operand (rtx op)
2365 {
2366 if (GET_CODE (op) != SYMBOL_REF)
2367 return 0;
2368 return SYMBOL_REF_TLS_MODEL (op);
2369 }
2370 \f
2371 /* Split DImode access register reference REG (on 64-bit) into its constituent
2372 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2373 gen_highpart cannot be used as they assume all registers are word-sized,
2374 while our access registers have only half that size. */
2375
2376 void
2377 s390_split_access_reg (rtx reg, rtx *lo, rtx *hi)
2378 {
2379 gcc_assert (TARGET_64BIT);
2380 gcc_assert (ACCESS_REG_P (reg));
2381 gcc_assert (GET_MODE (reg) == DImode);
2382 gcc_assert (!(REGNO (reg) & 1));
2383
2384 *lo = gen_rtx_REG (SImode, REGNO (reg) + 1);
2385 *hi = gen_rtx_REG (SImode, REGNO (reg));
2386 }
2387
2388 /* Return true if OP contains a symbol reference */
2389
2390 bool
2391 symbolic_reference_mentioned_p (rtx op)
2392 {
2393 const char *fmt;
2394 int i;
2395
2396 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2397 return 1;
2398
2399 fmt = GET_RTX_FORMAT (GET_CODE (op));
2400 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2401 {
2402 if (fmt[i] == 'E')
2403 {
2404 int j;
2405
2406 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2407 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2408 return 1;
2409 }
2410
2411 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2412 return 1;
2413 }
2414
2415 return 0;
2416 }
2417
2418 /* Return true if OP contains a reference to a thread-local symbol. */
2419
2420 bool
2421 tls_symbolic_reference_mentioned_p (rtx op)
2422 {
2423 const char *fmt;
2424 int i;
2425
2426 if (GET_CODE (op) == SYMBOL_REF)
2427 return tls_symbolic_operand (op);
2428
2429 fmt = GET_RTX_FORMAT (GET_CODE (op));
2430 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2431 {
2432 if (fmt[i] == 'E')
2433 {
2434 int j;
2435
2436 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2437 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2438 return true;
2439 }
2440
2441 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
2442 return true;
2443 }
2444
2445 return false;
2446 }
2447
2448
2449 /* Return true if OP is a legitimate general operand when
2450 generating PIC code. It is given that flag_pic is on
2451 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2452
2453 int
2454 legitimate_pic_operand_p (rtx op)
2455 {
2456 /* Accept all non-symbolic constants. */
2457 if (!SYMBOLIC_CONST (op))
2458 return 1;
2459
2460 /* Reject everything else; must be handled
2461 via emit_symbolic_move. */
2462 return 0;
2463 }
2464
2465 /* Returns true if the constant value OP is a legitimate general operand.
2466 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2467
2468 int
2469 legitimate_constant_p (rtx op)
2470 {
2471 /* Accept all non-symbolic constants. */
2472 if (!SYMBOLIC_CONST (op))
2473 return 1;
2474
2475 /* Accept immediate LARL operands. */
2476 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
2477 return 1;
2478
2479 /* Thread-local symbols are never legal constants. This is
2480 so that emit_call knows that computing such addresses
2481 might require a function call. */
2482 if (TLS_SYMBOLIC_CONST (op))
2483 return 0;
2484
2485 /* In the PIC case, symbolic constants must *not* be
2486 forced into the literal pool. We accept them here,
2487 so that they will be handled by emit_symbolic_move. */
2488 if (flag_pic)
2489 return 1;
2490
2491 /* All remaining non-PIC symbolic constants are
2492 forced into the literal pool. */
2493 return 0;
2494 }
2495
2496 /* Determine if it's legal to put X into the constant pool. This
2497 is not possible if X contains the address of a symbol that is
2498 not constant (TLS) or not known at final link time (PIC). */
2499
2500 static bool
2501 s390_cannot_force_const_mem (rtx x)
2502 {
2503 switch (GET_CODE (x))
2504 {
2505 case CONST_INT:
2506 case CONST_DOUBLE:
2507 /* Accept all non-symbolic constants. */
2508 return false;
2509
2510 case LABEL_REF:
2511 /* Labels are OK iff we are non-PIC. */
2512 return flag_pic != 0;
2513
2514 case SYMBOL_REF:
2515 /* 'Naked' TLS symbol references are never OK,
2516 non-TLS symbols are OK iff we are non-PIC. */
2517 if (tls_symbolic_operand (x))
2518 return true;
2519 else
2520 return flag_pic != 0;
2521
2522 case CONST:
2523 return s390_cannot_force_const_mem (XEXP (x, 0));
2524 case PLUS:
2525 case MINUS:
2526 return s390_cannot_force_const_mem (XEXP (x, 0))
2527 || s390_cannot_force_const_mem (XEXP (x, 1));
2528
2529 case UNSPEC:
2530 switch (XINT (x, 1))
2531 {
2532 /* Only lt-relative or GOT-relative UNSPECs are OK. */
2533 case UNSPEC_LTREL_OFFSET:
2534 case UNSPEC_GOT:
2535 case UNSPEC_GOTOFF:
2536 case UNSPEC_PLTOFF:
2537 case UNSPEC_TLSGD:
2538 case UNSPEC_TLSLDM:
2539 case UNSPEC_NTPOFF:
2540 case UNSPEC_DTPOFF:
2541 case UNSPEC_GOTNTPOFF:
2542 case UNSPEC_INDNTPOFF:
2543 return false;
2544
2545 /* If the literal pool shares the code section, be put
2546 execute template placeholders into the pool as well. */
2547 case UNSPEC_INSN:
2548 return TARGET_CPU_ZARCH;
2549
2550 default:
2551 return true;
2552 }
2553 break;
2554
2555 default:
2556 gcc_unreachable ();
2557 }
2558 }
2559
2560 /* Returns true if the constant value OP is a legitimate general
2561 operand during and after reload. The difference to
2562 legitimate_constant_p is that this function will not accept
2563 a constant that would need to be forced to the literal pool
2564 before it can be used as operand. */
2565
2566 bool
2567 legitimate_reload_constant_p (rtx op)
2568 {
2569 /* Accept la(y) operands. */
2570 if (GET_CODE (op) == CONST_INT
2571 && DISP_IN_RANGE (INTVAL (op)))
2572 return true;
2573
2574 /* Accept l(g)hi/l(g)fi operands. */
2575 if (GET_CODE (op) == CONST_INT
2576 && (CONST_OK_FOR_K (INTVAL (op)) || CONST_OK_FOR_Os (INTVAL (op))))
2577 return true;
2578
2579 /* Accept lliXX operands. */
2580 if (TARGET_ZARCH
2581 && GET_CODE (op) == CONST_INT
2582 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2583 && s390_single_part (op, word_mode, HImode, 0) >= 0)
2584 return true;
2585
2586 if (TARGET_EXTIMM
2587 && GET_CODE (op) == CONST_INT
2588 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2589 && s390_single_part (op, word_mode, SImode, 0) >= 0)
2590 return true;
2591
2592 /* Accept larl operands. */
2593 if (TARGET_CPU_ZARCH
2594 && larl_operand (op, VOIDmode))
2595 return true;
2596
2597 /* Accept lzXX operands. */
2598 if (GET_CODE (op) == CONST_DOUBLE
2599 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', "G"))
2600 return true;
2601
2602 /* Accept double-word operands that can be split. */
2603 if (GET_CODE (op) == CONST_INT
2604 && trunc_int_for_mode (INTVAL (op), word_mode) != INTVAL (op))
2605 {
2606 enum machine_mode dword_mode = word_mode == SImode ? DImode : TImode;
2607 rtx hi = operand_subword (op, 0, 0, dword_mode);
2608 rtx lo = operand_subword (op, 1, 0, dword_mode);
2609 return legitimate_reload_constant_p (hi)
2610 && legitimate_reload_constant_p (lo);
2611 }
2612
2613 /* Everything else cannot be handled without reload. */
2614 return false;
2615 }
2616
2617 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
2618 return the class of reg to actually use. */
2619
2620 enum reg_class
2621 s390_preferred_reload_class (rtx op, enum reg_class class)
2622 {
2623 switch (GET_CODE (op))
2624 {
2625 /* Constants we cannot reload must be forced into the
2626 literal pool. */
2627
2628 case CONST_DOUBLE:
2629 case CONST_INT:
2630 if (legitimate_reload_constant_p (op))
2631 return class;
2632 else
2633 return NO_REGS;
2634
2635 /* If a symbolic constant or a PLUS is reloaded,
2636 it is most likely being used as an address, so
2637 prefer ADDR_REGS. If 'class' is not a superset
2638 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
2639 case PLUS:
2640 case LABEL_REF:
2641 case SYMBOL_REF:
2642 case CONST:
2643 if (reg_class_subset_p (ADDR_REGS, class))
2644 return ADDR_REGS;
2645 else
2646 return NO_REGS;
2647
2648 default:
2649 break;
2650 }
2651
2652 return class;
2653 }
2654
2655 /* Inform reload about cases where moving X with a mode MODE to a register in
2656 CLASS requires an extra scratch or immediate register. Return the class
2657 needed for the immediate register. */
2658
2659 static enum reg_class
2660 s390_secondary_reload (bool in_p, rtx x, enum reg_class class,
2661 enum machine_mode mode, secondary_reload_info *sri)
2662 {
2663 /* Intermediate register needed. */
2664 if (reg_classes_intersect_p (CC_REGS, class))
2665 return GENERAL_REGS;
2666
2667 /* We need a scratch register when loading a PLUS expression which
2668 is not a legitimate operand of the LOAD ADDRESS instruction. */
2669 if (in_p && s390_plus_operand (x, mode))
2670 sri->icode = (TARGET_64BIT ?
2671 CODE_FOR_reloaddi_plus : CODE_FOR_reloadsi_plus);
2672
2673 /* Performing a multiword move from or to memory we have to make sure the
2674 second chunk in memory is addressable without causing a displacement
2675 overflow. If that would be the case we calculate the address in
2676 a scratch register. */
2677 if (MEM_P (x)
2678 && GET_CODE (XEXP (x, 0)) == PLUS
2679 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2680 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (x, 0), 1))
2681 + GET_MODE_SIZE (mode) - 1))
2682 {
2683 /* For GENERAL_REGS a displacement overflow is no problem if occurring
2684 in a s_operand address since we may fallback to lm/stm. So we only
2685 have to care about overflows in the b+i+d case. */
2686 if ((reg_classes_intersect_p (GENERAL_REGS, class)
2687 && s390_class_max_nregs (GENERAL_REGS, mode) > 1
2688 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS)
2689 /* For FP_REGS no lm/stm is available so this check is triggered
2690 for displacement overflows in b+i+d and b+d like addresses. */
2691 || (reg_classes_intersect_p (FP_REGS, class)
2692 && s390_class_max_nregs (FP_REGS, mode) > 1))
2693 {
2694 if (in_p)
2695 sri->icode = (TARGET_64BIT ?
2696 CODE_FOR_reloaddi_nonoffmem_in :
2697 CODE_FOR_reloadsi_nonoffmem_in);
2698 else
2699 sri->icode = (TARGET_64BIT ?
2700 CODE_FOR_reloaddi_nonoffmem_out :
2701 CODE_FOR_reloadsi_nonoffmem_out);
2702 }
2703 }
2704
2705 /* Either scratch or no register needed. */
2706 return NO_REGS;
2707 }
2708
2709 /* Generate code to load SRC, which is PLUS that is not a
2710 legitimate operand for the LA instruction, into TARGET.
2711 SCRATCH may be used as scratch register. */
2712
2713 void
2714 s390_expand_plus_operand (rtx target, rtx src,
2715 rtx scratch)
2716 {
2717 rtx sum1, sum2;
2718 struct s390_address ad;
2719
2720 /* src must be a PLUS; get its two operands. */
2721 gcc_assert (GET_CODE (src) == PLUS);
2722 gcc_assert (GET_MODE (src) == Pmode);
2723
2724 /* Check if any of the two operands is already scheduled
2725 for replacement by reload. This can happen e.g. when
2726 float registers occur in an address. */
2727 sum1 = find_replacement (&XEXP (src, 0));
2728 sum2 = find_replacement (&XEXP (src, 1));
2729 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2730
2731 /* If the address is already strictly valid, there's nothing to do. */
2732 if (!s390_decompose_address (src, &ad)
2733 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
2734 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
2735 {
2736 /* Otherwise, one of the operands cannot be an address register;
2737 we reload its value into the scratch register. */
2738 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
2739 {
2740 emit_move_insn (scratch, sum1);
2741 sum1 = scratch;
2742 }
2743 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
2744 {
2745 emit_move_insn (scratch, sum2);
2746 sum2 = scratch;
2747 }
2748
2749 /* According to the way these invalid addresses are generated
2750 in reload.c, it should never happen (at least on s390) that
2751 *neither* of the PLUS components, after find_replacements
2752 was applied, is an address register. */
2753 if (sum1 == scratch && sum2 == scratch)
2754 {
2755 debug_rtx (src);
2756 gcc_unreachable ();
2757 }
2758
2759 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2760 }
2761
2762 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
2763 is only ever performed on addresses, so we can mark the
2764 sum as legitimate for LA in any case. */
2765 s390_load_address (target, src);
2766 }
2767
2768
2769 /* Return true if ADDR is a valid memory address.
2770 STRICT specifies whether strict register checking applies. */
2771
2772 bool
2773 legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2774 rtx addr, int strict)
2775 {
2776 struct s390_address ad;
2777 if (!s390_decompose_address (addr, &ad))
2778 return false;
2779
2780 if (strict)
2781 {
2782 if (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
2783 return false;
2784
2785 if (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx)))
2786 return false;
2787 }
2788 else
2789 {
2790 if (ad.base
2791 && !(REGNO (ad.base) >= FIRST_PSEUDO_REGISTER
2792 || REGNO_REG_CLASS (REGNO (ad.base)) == ADDR_REGS))
2793 return false;
2794
2795 if (ad.indx
2796 && !(REGNO (ad.indx) >= FIRST_PSEUDO_REGISTER
2797 || REGNO_REG_CLASS (REGNO (ad.indx)) == ADDR_REGS))
2798 return false;
2799 }
2800 return true;
2801 }
2802
2803 /* Return true if OP is a valid operand for the LA instruction.
2804 In 31-bit, we need to prove that the result is used as an
2805 address, as LA performs only a 31-bit addition. */
2806
2807 bool
2808 legitimate_la_operand_p (rtx op)
2809 {
2810 struct s390_address addr;
2811 if (!s390_decompose_address (op, &addr))
2812 return false;
2813
2814 return (TARGET_64BIT || addr.pointer);
2815 }
2816
2817 /* Return true if it is valid *and* preferable to use LA to
2818 compute the sum of OP1 and OP2. */
2819
2820 bool
2821 preferred_la_operand_p (rtx op1, rtx op2)
2822 {
2823 struct s390_address addr;
2824
2825 if (op2 != const0_rtx)
2826 op1 = gen_rtx_PLUS (Pmode, op1, op2);
2827
2828 if (!s390_decompose_address (op1, &addr))
2829 return false;
2830 if (addr.base && !REGNO_OK_FOR_BASE_P (REGNO (addr.base)))
2831 return false;
2832 if (addr.indx && !REGNO_OK_FOR_INDEX_P (REGNO (addr.indx)))
2833 return false;
2834
2835 if (!TARGET_64BIT && !addr.pointer)
2836 return false;
2837
2838 if (addr.pointer)
2839 return true;
2840
2841 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2842 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2843 return true;
2844
2845 return false;
2846 }
2847
2848 /* Emit a forced load-address operation to load SRC into DST.
2849 This will use the LOAD ADDRESS instruction even in situations
2850 where legitimate_la_operand_p (SRC) returns false. */
2851
2852 void
2853 s390_load_address (rtx dst, rtx src)
2854 {
2855 if (TARGET_64BIT)
2856 emit_move_insn (dst, src);
2857 else
2858 emit_insn (gen_force_la_31 (dst, src));
2859 }
2860
2861 /* Return a legitimate reference for ORIG (an address) using the
2862 register REG. If REG is 0, a new pseudo is generated.
2863
2864 There are two types of references that must be handled:
2865
2866 1. Global data references must load the address from the GOT, via
2867 the PIC reg. An insn is emitted to do this load, and the reg is
2868 returned.
2869
2870 2. Static data references, constant pool addresses, and code labels
2871 compute the address as an offset from the GOT, whose base is in
2872 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2873 differentiate them from global data objects. The returned
2874 address is the PIC reg + an unspec constant.
2875
2876 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2877 reg also appears in the address. */
2878
2879 rtx
2880 legitimize_pic_address (rtx orig, rtx reg)
2881 {
2882 rtx addr = orig;
2883 rtx new = orig;
2884 rtx base;
2885
2886 gcc_assert (!TLS_SYMBOLIC_CONST (addr));
2887
2888 if (GET_CODE (addr) == LABEL_REF
2889 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
2890 {
2891 /* This is a local symbol. */
2892 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
2893 {
2894 /* Access local symbols PC-relative via LARL.
2895 This is the same as in the non-PIC case, so it is
2896 handled automatically ... */
2897 }
2898 else
2899 {
2900 /* Access local symbols relative to the GOT. */
2901
2902 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2903
2904 if (reload_in_progress || reload_completed)
2905 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
2906
2907 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
2908 addr = gen_rtx_CONST (Pmode, addr);
2909 addr = force_const_mem (Pmode, addr);
2910 emit_move_insn (temp, addr);
2911
2912 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2913 if (reg != 0)
2914 {
2915 s390_load_address (reg, new);
2916 new = reg;
2917 }
2918 }
2919 }
2920 else if (GET_CODE (addr) == SYMBOL_REF)
2921 {
2922 if (reg == 0)
2923 reg = gen_reg_rtx (Pmode);
2924
2925 if (flag_pic == 1)
2926 {
2927 /* Assume GOT offset < 4k. This is handled the same way
2928 in both 31- and 64-bit code (@GOT). */
2929
2930 if (reload_in_progress || reload_completed)
2931 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
2932
2933 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2934 new = gen_rtx_CONST (Pmode, new);
2935 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2936 new = gen_const_mem (Pmode, new);
2937 emit_move_insn (reg, new);
2938 new = reg;
2939 }
2940 else if (TARGET_CPU_ZARCH)
2941 {
2942 /* If the GOT offset might be >= 4k, we determine the position
2943 of the GOT entry via a PC-relative LARL (@GOTENT). */
2944
2945 rtx temp = gen_reg_rtx (Pmode);
2946
2947 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
2948 new = gen_rtx_CONST (Pmode, new);
2949 emit_move_insn (temp, new);
2950
2951 new = gen_const_mem (Pmode, temp);
2952 emit_move_insn (reg, new);
2953 new = reg;
2954 }
2955 else
2956 {
2957 /* If the GOT offset might be >= 4k, we have to load it
2958 from the literal pool (@GOT). */
2959
2960 rtx temp = gen_reg_rtx (Pmode);
2961
2962 if (reload_in_progress || reload_completed)
2963 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
2964
2965 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2966 addr = gen_rtx_CONST (Pmode, addr);
2967 addr = force_const_mem (Pmode, addr);
2968 emit_move_insn (temp, addr);
2969
2970 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2971 new = gen_const_mem (Pmode, new);
2972 emit_move_insn (reg, new);
2973 new = reg;
2974 }
2975 }
2976 else
2977 {
2978 if (GET_CODE (addr) == CONST)
2979 {
2980 addr = XEXP (addr, 0);
2981 if (GET_CODE (addr) == UNSPEC)
2982 {
2983 gcc_assert (XVECLEN (addr, 0) == 1);
2984 switch (XINT (addr, 1))
2985 {
2986 /* If someone moved a GOT-relative UNSPEC
2987 out of the literal pool, force them back in. */
2988 case UNSPEC_GOTOFF:
2989 case UNSPEC_PLTOFF:
2990 new = force_const_mem (Pmode, orig);
2991 break;
2992
2993 /* @GOT is OK as is if small. */
2994 case UNSPEC_GOT:
2995 if (flag_pic == 2)
2996 new = force_const_mem (Pmode, orig);
2997 break;
2998
2999 /* @GOTENT is OK as is. */
3000 case UNSPEC_GOTENT:
3001 break;
3002
3003 /* @PLT is OK as is on 64-bit, must be converted to
3004 GOT-relative @PLTOFF on 31-bit. */
3005 case UNSPEC_PLT:
3006 if (!TARGET_CPU_ZARCH)
3007 {
3008 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3009
3010 if (reload_in_progress || reload_completed)
3011 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3012
3013 addr = XVECEXP (addr, 0, 0);
3014 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
3015 UNSPEC_PLTOFF);
3016 addr = gen_rtx_CONST (Pmode, addr);
3017 addr = force_const_mem (Pmode, addr);
3018 emit_move_insn (temp, addr);
3019
3020 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3021 if (reg != 0)
3022 {
3023 s390_load_address (reg, new);
3024 new = reg;
3025 }
3026 }
3027 break;
3028
3029 /* Everything else cannot happen. */
3030 default:
3031 gcc_unreachable ();
3032 }
3033 }
3034 else
3035 gcc_assert (GET_CODE (addr) == PLUS);
3036 }
3037 if (GET_CODE (addr) == PLUS)
3038 {
3039 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
3040
3041 gcc_assert (!TLS_SYMBOLIC_CONST (op0));
3042 gcc_assert (!TLS_SYMBOLIC_CONST (op1));
3043
3044 /* Check first to see if this is a constant offset
3045 from a local symbol reference. */
3046 if ((GET_CODE (op0) == LABEL_REF
3047 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
3048 && GET_CODE (op1) == CONST_INT)
3049 {
3050 if (TARGET_CPU_ZARCH
3051 && larl_operand (op0, VOIDmode)
3052 && INTVAL (op1) < (HOST_WIDE_INT)1 << 31
3053 && INTVAL (op1) >= -((HOST_WIDE_INT)1 << 31))
3054 {
3055 if (INTVAL (op1) & 1)
3056 {
3057 /* LARL can't handle odd offsets, so emit a
3058 pair of LARL and LA. */
3059 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3060
3061 if (!DISP_IN_RANGE (INTVAL (op1)))
3062 {
3063 HOST_WIDE_INT even = INTVAL (op1) - 1;
3064 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
3065 op0 = gen_rtx_CONST (Pmode, op0);
3066 op1 = const1_rtx;
3067 }
3068
3069 emit_move_insn (temp, op0);
3070 new = gen_rtx_PLUS (Pmode, temp, op1);
3071
3072 if (reg != 0)
3073 {
3074 s390_load_address (reg, new);
3075 new = reg;
3076 }
3077 }
3078 else
3079 {
3080 /* If the offset is even, we can just use LARL.
3081 This will happen automatically. */
3082 }
3083 }
3084 else
3085 {
3086 /* Access local symbols relative to the GOT. */
3087
3088 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3089
3090 if (reload_in_progress || reload_completed)
3091 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3092
3093 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
3094 UNSPEC_GOTOFF);
3095 addr = gen_rtx_PLUS (Pmode, addr, op1);
3096 addr = gen_rtx_CONST (Pmode, addr);
3097 addr = force_const_mem (Pmode, addr);
3098 emit_move_insn (temp, addr);
3099
3100 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3101 if (reg != 0)
3102 {
3103 s390_load_address (reg, new);
3104 new = reg;
3105 }
3106 }
3107 }
3108
3109 /* Now, check whether it is a GOT relative symbol plus offset
3110 that was pulled out of the literal pool. Force it back in. */
3111
3112 else if (GET_CODE (op0) == UNSPEC
3113 && GET_CODE (op1) == CONST_INT
3114 && XINT (op0, 1) == UNSPEC_GOTOFF)
3115 {
3116 gcc_assert (XVECLEN (op0, 0) == 1);
3117
3118 new = force_const_mem (Pmode, orig);
3119 }
3120
3121 /* Otherwise, compute the sum. */
3122 else
3123 {
3124 base = legitimize_pic_address (XEXP (addr, 0), reg);
3125 new = legitimize_pic_address (XEXP (addr, 1),
3126 base == reg ? NULL_RTX : reg);
3127 if (GET_CODE (new) == CONST_INT)
3128 new = plus_constant (base, INTVAL (new));
3129 else
3130 {
3131 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
3132 {
3133 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
3134 new = XEXP (new, 1);
3135 }
3136 new = gen_rtx_PLUS (Pmode, base, new);
3137 }
3138
3139 if (GET_CODE (new) == CONST)
3140 new = XEXP (new, 0);
3141 new = force_operand (new, 0);
3142 }
3143 }
3144 }
3145 return new;
3146 }
3147
3148 /* Load the thread pointer into a register. */
3149
3150 rtx
3151 s390_get_thread_pointer (void)
3152 {
3153 rtx tp = gen_reg_rtx (Pmode);
3154
3155 emit_move_insn (tp, gen_rtx_REG (Pmode, TP_REGNUM));
3156 mark_reg_pointer (tp, BITS_PER_WORD);
3157
3158 return tp;
3159 }
3160
3161 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
3162 in s390_tls_symbol which always refers to __tls_get_offset.
3163 The returned offset is written to RESULT_REG and an USE rtx is
3164 generated for TLS_CALL. */
3165
3166 static GTY(()) rtx s390_tls_symbol;
3167
3168 static void
3169 s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
3170 {
3171 rtx insn;
3172
3173 gcc_assert (flag_pic);
3174
3175 if (!s390_tls_symbol)
3176 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
3177
3178 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
3179 gen_rtx_REG (Pmode, RETURN_REGNUM));
3180
3181 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
3182 CONST_OR_PURE_CALL_P (insn) = 1;
3183 }
3184
3185 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3186 this (thread-local) address. REG may be used as temporary. */
3187
3188 static rtx
3189 legitimize_tls_address (rtx addr, rtx reg)
3190 {
3191 rtx new, tls_call, temp, base, r2, insn;
3192
3193 if (GET_CODE (addr) == SYMBOL_REF)
3194 switch (tls_symbolic_operand (addr))
3195 {
3196 case TLS_MODEL_GLOBAL_DYNAMIC:
3197 start_sequence ();
3198 r2 = gen_rtx_REG (Pmode, 2);
3199 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
3200 new = gen_rtx_CONST (Pmode, tls_call);
3201 new = force_const_mem (Pmode, new);
3202 emit_move_insn (r2, new);
3203 s390_emit_tls_call_insn (r2, tls_call);
3204 insn = get_insns ();
3205 end_sequence ();
3206
3207 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3208 temp = gen_reg_rtx (Pmode);
3209 emit_libcall_block (insn, temp, r2, new);
3210
3211 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3212 if (reg != 0)
3213 {
3214 s390_load_address (reg, new);
3215 new = reg;
3216 }
3217 break;
3218
3219 case TLS_MODEL_LOCAL_DYNAMIC:
3220 start_sequence ();
3221 r2 = gen_rtx_REG (Pmode, 2);
3222 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
3223 new = gen_rtx_CONST (Pmode, tls_call);
3224 new = force_const_mem (Pmode, new);
3225 emit_move_insn (r2, new);
3226 s390_emit_tls_call_insn (r2, tls_call);
3227 insn = get_insns ();
3228 end_sequence ();
3229
3230 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
3231 temp = gen_reg_rtx (Pmode);
3232 emit_libcall_block (insn, temp, r2, new);
3233
3234 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3235 base = gen_reg_rtx (Pmode);
3236 s390_load_address (base, new);
3237
3238 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
3239 new = gen_rtx_CONST (Pmode, new);
3240 new = force_const_mem (Pmode, new);
3241 temp = gen_reg_rtx (Pmode);
3242 emit_move_insn (temp, new);
3243
3244 new = gen_rtx_PLUS (Pmode, base, temp);
3245 if (reg != 0)
3246 {
3247 s390_load_address (reg, new);
3248 new = reg;
3249 }
3250 break;
3251
3252 case TLS_MODEL_INITIAL_EXEC:
3253 if (flag_pic == 1)
3254 {
3255 /* Assume GOT offset < 4k. This is handled the same way
3256 in both 31- and 64-bit code. */
3257
3258 if (reload_in_progress || reload_completed)
3259 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3260
3261 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3262 new = gen_rtx_CONST (Pmode, new);
3263 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
3264 new = gen_const_mem (Pmode, new);
3265 temp = gen_reg_rtx (Pmode);
3266 emit_move_insn (temp, new);
3267 }
3268 else if (TARGET_CPU_ZARCH)
3269 {
3270 /* If the GOT offset might be >= 4k, we determine the position
3271 of the GOT entry via a PC-relative LARL. */
3272
3273 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3274 new = gen_rtx_CONST (Pmode, new);
3275 temp = gen_reg_rtx (Pmode);
3276 emit_move_insn (temp, new);
3277
3278 new = gen_const_mem (Pmode, temp);
3279 temp = gen_reg_rtx (Pmode);
3280 emit_move_insn (temp, new);
3281 }
3282 else if (flag_pic)
3283 {
3284 /* If the GOT offset might be >= 4k, we have to load it
3285 from the literal pool. */
3286
3287 if (reload_in_progress || reload_completed)
3288 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3289
3290 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3291 new = gen_rtx_CONST (Pmode, new);
3292 new = force_const_mem (Pmode, new);
3293 temp = gen_reg_rtx (Pmode);
3294 emit_move_insn (temp, new);
3295
3296 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3297 new = gen_const_mem (Pmode, new);
3298
3299 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3300 temp = gen_reg_rtx (Pmode);
3301 emit_insn (gen_rtx_SET (Pmode, temp, new));
3302 }
3303 else
3304 {
3305 /* In position-dependent code, load the absolute address of
3306 the GOT entry from the literal pool. */
3307
3308 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3309 new = gen_rtx_CONST (Pmode, new);
3310 new = force_const_mem (Pmode, new);
3311 temp = gen_reg_rtx (Pmode);
3312 emit_move_insn (temp, new);
3313
3314 new = temp;
3315 new = gen_const_mem (Pmode, new);
3316 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3317 temp = gen_reg_rtx (Pmode);
3318 emit_insn (gen_rtx_SET (Pmode, temp, new));
3319 }
3320
3321 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3322 if (reg != 0)
3323 {
3324 s390_load_address (reg, new);
3325 new = reg;
3326 }
3327 break;
3328
3329 case TLS_MODEL_LOCAL_EXEC:
3330 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3331 new = gen_rtx_CONST (Pmode, new);
3332 new = force_const_mem (Pmode, new);
3333 temp = gen_reg_rtx (Pmode);
3334 emit_move_insn (temp, new);
3335
3336 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3337 if (reg != 0)
3338 {
3339 s390_load_address (reg, new);
3340 new = reg;
3341 }
3342 break;
3343
3344 default:
3345 gcc_unreachable ();
3346 }
3347
3348 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
3349 {
3350 switch (XINT (XEXP (addr, 0), 1))
3351 {
3352 case UNSPEC_INDNTPOFF:
3353 gcc_assert (TARGET_CPU_ZARCH);
3354 new = addr;
3355 break;
3356
3357 default:
3358 gcc_unreachable ();
3359 }
3360 }
3361
3362 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3363 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3364 {
3365 new = XEXP (XEXP (addr, 0), 0);
3366 if (GET_CODE (new) != SYMBOL_REF)
3367 new = gen_rtx_CONST (Pmode, new);
3368
3369 new = legitimize_tls_address (new, reg);
3370 new = plus_constant (new, INTVAL (XEXP (XEXP (addr, 0), 1)));
3371 new = force_operand (new, 0);
3372 }
3373
3374 else
3375 gcc_unreachable (); /* for now ... */
3376
3377 return new;
3378 }
3379
3380 /* Emit insns to move operands[1] into operands[0]. */
3381
3382 void
3383 emit_symbolic_move (rtx *operands)
3384 {
3385 rtx temp = !can_create_pseudo_p () ? operands[0] : gen_reg_rtx (Pmode);
3386
3387 if (GET_CODE (operands[0]) == MEM)
3388 operands[1] = force_reg (Pmode, operands[1]);
3389 else if (TLS_SYMBOLIC_CONST (operands[1]))
3390 operands[1] = legitimize_tls_address (operands[1], temp);
3391 else if (flag_pic)
3392 operands[1] = legitimize_pic_address (operands[1], temp);
3393 }
3394
3395 /* Try machine-dependent ways of modifying an illegitimate address X
3396 to be legitimate. If we find one, return the new, valid address.
3397
3398 OLDX is the address as it was before break_out_memory_refs was called.
3399 In some cases it is useful to look at this to decide what needs to be done.
3400
3401 MODE is the mode of the operand pointed to by X.
3402
3403 When -fpic is used, special handling is needed for symbolic references.
3404 See comments by legitimize_pic_address for details. */
3405
3406 rtx
3407 legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3408 enum machine_mode mode ATTRIBUTE_UNUSED)
3409 {
3410 rtx constant_term = const0_rtx;
3411
3412 if (TLS_SYMBOLIC_CONST (x))
3413 {
3414 x = legitimize_tls_address (x, 0);
3415
3416 if (legitimate_address_p (mode, x, FALSE))
3417 return x;
3418 }
3419 else if (GET_CODE (x) == PLUS
3420 && (TLS_SYMBOLIC_CONST (XEXP (x, 0))
3421 || TLS_SYMBOLIC_CONST (XEXP (x, 1))))
3422 {
3423 return x;
3424 }
3425 else if (flag_pic)
3426 {
3427 if (SYMBOLIC_CONST (x)
3428 || (GET_CODE (x) == PLUS
3429 && (SYMBOLIC_CONST (XEXP (x, 0))
3430 || SYMBOLIC_CONST (XEXP (x, 1)))))
3431 x = legitimize_pic_address (x, 0);
3432
3433 if (legitimate_address_p (mode, x, FALSE))
3434 return x;
3435 }
3436
3437 x = eliminate_constant_term (x, &constant_term);
3438
3439 /* Optimize loading of large displacements by splitting them
3440 into the multiple of 4K and the rest; this allows the
3441 former to be CSE'd if possible.
3442
3443 Don't do this if the displacement is added to a register
3444 pointing into the stack frame, as the offsets will
3445 change later anyway. */
3446
3447 if (GET_CODE (constant_term) == CONST_INT
3448 && !TARGET_LONG_DISPLACEMENT
3449 && !DISP_IN_RANGE (INTVAL (constant_term))
3450 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
3451 {
3452 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
3453 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
3454
3455 rtx temp = gen_reg_rtx (Pmode);
3456 rtx val = force_operand (GEN_INT (upper), temp);
3457 if (val != temp)
3458 emit_move_insn (temp, val);
3459
3460 x = gen_rtx_PLUS (Pmode, x, temp);
3461 constant_term = GEN_INT (lower);
3462 }
3463
3464 if (GET_CODE (x) == PLUS)
3465 {
3466 if (GET_CODE (XEXP (x, 0)) == REG)
3467 {
3468 rtx temp = gen_reg_rtx (Pmode);
3469 rtx val = force_operand (XEXP (x, 1), temp);
3470 if (val != temp)
3471 emit_move_insn (temp, val);
3472
3473 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
3474 }
3475
3476 else if (GET_CODE (XEXP (x, 1)) == REG)
3477 {
3478 rtx temp = gen_reg_rtx (Pmode);
3479 rtx val = force_operand (XEXP (x, 0), temp);
3480 if (val != temp)
3481 emit_move_insn (temp, val);
3482
3483 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
3484 }
3485 }
3486
3487 if (constant_term != const0_rtx)
3488 x = gen_rtx_PLUS (Pmode, x, constant_term);
3489
3490 return x;
3491 }
3492
3493 /* Try a machine-dependent way of reloading an illegitimate address AD
3494 operand. If we find one, push the reload and and return the new address.
3495
3496 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3497 and TYPE is the reload type of the current reload. */
3498
3499 rtx
3500 legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
3501 int opnum, int type)
3502 {
3503 if (!optimize || TARGET_LONG_DISPLACEMENT)
3504 return NULL_RTX;
3505
3506 if (GET_CODE (ad) == PLUS)
3507 {
3508 rtx tem = simplify_binary_operation (PLUS, Pmode,
3509 XEXP (ad, 0), XEXP (ad, 1));
3510 if (tem)
3511 ad = tem;
3512 }
3513
3514 if (GET_CODE (ad) == PLUS
3515 && GET_CODE (XEXP (ad, 0)) == REG
3516 && GET_CODE (XEXP (ad, 1)) == CONST_INT
3517 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
3518 {
3519 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
3520 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
3521 rtx cst, tem, new;
3522
3523 cst = GEN_INT (upper);
3524 if (!legitimate_reload_constant_p (cst))
3525 cst = force_const_mem (Pmode, cst);
3526
3527 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
3528 new = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
3529
3530 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
3531 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3532 opnum, (enum reload_type) type);
3533 return new;
3534 }
3535
3536 return NULL_RTX;
3537 }
3538
3539 /* Emit code to move LEN bytes from DST to SRC. */
3540
3541 void
3542 s390_expand_movmem (rtx dst, rtx src, rtx len)
3543 {
3544 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3545 {
3546 if (INTVAL (len) > 0)
3547 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
3548 }
3549
3550 else if (TARGET_MVCLE)
3551 {
3552 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
3553 }
3554
3555 else
3556 {
3557 rtx dst_addr, src_addr, count, blocks, temp;
3558 rtx loop_start_label = gen_label_rtx ();
3559 rtx loop_end_label = gen_label_rtx ();
3560 rtx end_label = gen_label_rtx ();
3561 enum machine_mode mode;
3562
3563 mode = GET_MODE (len);
3564 if (mode == VOIDmode)
3565 mode = Pmode;
3566
3567 dst_addr = gen_reg_rtx (Pmode);
3568 src_addr = gen_reg_rtx (Pmode);
3569 count = gen_reg_rtx (mode);
3570 blocks = gen_reg_rtx (mode);
3571
3572 convert_move (count, len, 1);
3573 emit_cmp_and_jump_insns (count, const0_rtx,
3574 EQ, NULL_RTX, mode, 1, end_label);
3575
3576 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3577 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
3578 dst = change_address (dst, VOIDmode, dst_addr);
3579 src = change_address (src, VOIDmode, src_addr);
3580
3581 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3582 if (temp != count)
3583 emit_move_insn (count, temp);
3584
3585 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
3586 if (temp != blocks)
3587 emit_move_insn (blocks, temp);
3588
3589 emit_cmp_and_jump_insns (blocks, const0_rtx,
3590 EQ, NULL_RTX, mode, 1, loop_end_label);
3591
3592 emit_label (loop_start_label);
3593
3594 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
3595 s390_load_address (dst_addr,
3596 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3597 s390_load_address (src_addr,
3598 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
3599
3600 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3601 if (temp != blocks)
3602 emit_move_insn (blocks, temp);
3603
3604 emit_cmp_and_jump_insns (blocks, const0_rtx,
3605 EQ, NULL_RTX, mode, 1, loop_end_label);
3606
3607 emit_jump (loop_start_label);
3608 emit_label (loop_end_label);
3609
3610 emit_insn (gen_movmem_short (dst, src,
3611 convert_to_mode (Pmode, count, 1)));
3612 emit_label (end_label);
3613 }
3614 }
3615
3616 /* Emit code to set LEN bytes at DST to VAL.
3617 Make use of clrmem if VAL is zero. */
3618
3619 void
3620 s390_expand_setmem (rtx dst, rtx len, rtx val)
3621 {
3622 if (GET_CODE (len) == CONST_INT && INTVAL (len) == 0)
3623 return;
3624
3625 gcc_assert (GET_CODE (val) == CONST_INT || GET_MODE (val) == QImode);
3626
3627 if (GET_CODE (len) == CONST_INT && INTVAL (len) > 0 && INTVAL (len) <= 257)
3628 {
3629 if (val == const0_rtx && INTVAL (len) <= 256)
3630 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
3631 else
3632 {
3633 /* Initialize memory by storing the first byte. */
3634 emit_move_insn (adjust_address (dst, QImode, 0), val);
3635
3636 if (INTVAL (len) > 1)
3637 {
3638 /* Initiate 1 byte overlap move.
3639 The first byte of DST is propagated through DSTP1.
3640 Prepare a movmem for: DST+1 = DST (length = LEN - 1).
3641 DST is set to size 1 so the rest of the memory location
3642 does not count as source operand. */
3643 rtx dstp1 = adjust_address (dst, VOIDmode, 1);
3644 set_mem_size (dst, const1_rtx);
3645
3646 emit_insn (gen_movmem_short (dstp1, dst,
3647 GEN_INT (INTVAL (len) - 2)));
3648 }
3649 }
3650 }
3651
3652 else if (TARGET_MVCLE)
3653 {
3654 val = force_not_mem (convert_modes (Pmode, QImode, val, 1));
3655 emit_insn (gen_setmem_long (dst, convert_to_mode (Pmode, len, 1), val));
3656 }
3657
3658 else
3659 {
3660 rtx dst_addr, src_addr, count, blocks, temp, dstp1 = NULL_RTX;
3661 rtx loop_start_label = gen_label_rtx ();
3662 rtx loop_end_label = gen_label_rtx ();
3663 rtx end_label = gen_label_rtx ();
3664 enum machine_mode mode;
3665
3666 mode = GET_MODE (len);
3667 if (mode == VOIDmode)
3668 mode = Pmode;
3669
3670 dst_addr = gen_reg_rtx (Pmode);
3671 src_addr = gen_reg_rtx (Pmode);
3672 count = gen_reg_rtx (mode);
3673 blocks = gen_reg_rtx (mode);
3674
3675 convert_move (count, len, 1);
3676 emit_cmp_and_jump_insns (count, const0_rtx,
3677 EQ, NULL_RTX, mode, 1, end_label);
3678
3679 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3680 dst = change_address (dst, VOIDmode, dst_addr);
3681
3682 if (val == const0_rtx)
3683 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3684 else
3685 {
3686 dstp1 = adjust_address (dst, VOIDmode, 1);
3687 set_mem_size (dst, const1_rtx);
3688
3689 /* Initialize memory by storing the first byte. */
3690 emit_move_insn (adjust_address (dst, QImode, 0), val);
3691
3692 /* If count is 1 we are done. */
3693 emit_cmp_and_jump_insns (count, const1_rtx,
3694 EQ, NULL_RTX, mode, 1, end_label);
3695
3696 temp = expand_binop (mode, add_optab, count, GEN_INT (-2), count, 1, 0);
3697 }
3698 if (temp != count)
3699 emit_move_insn (count, temp);
3700
3701 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
3702 if (temp != blocks)
3703 emit_move_insn (blocks, temp);
3704
3705 emit_cmp_and_jump_insns (blocks, const0_rtx,
3706 EQ, NULL_RTX, mode, 1, loop_end_label);
3707
3708 emit_label (loop_start_label);
3709
3710 if (val == const0_rtx)
3711 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
3712 else
3713 emit_insn (gen_movmem_short (dstp1, dst, GEN_INT (255)));
3714 s390_load_address (dst_addr,
3715 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3716
3717 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3718 if (temp != blocks)
3719 emit_move_insn (blocks, temp);
3720
3721 emit_cmp_and_jump_insns (blocks, const0_rtx,
3722 EQ, NULL_RTX, mode, 1, loop_end_label);
3723
3724 emit_jump (loop_start_label);
3725 emit_label (loop_end_label);
3726
3727 if (val == const0_rtx)
3728 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
3729 else
3730 emit_insn (gen_movmem_short (dstp1, dst, convert_to_mode (Pmode, count, 1)));
3731 emit_label (end_label);
3732 }
3733 }
3734
3735 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3736 and return the result in TARGET. */
3737
3738 void
3739 s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
3740 {
3741 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
3742 rtx tmp;
3743
3744 /* As the result of CMPINT is inverted compared to what we need,
3745 we have to swap the operands. */
3746 tmp = op0; op0 = op1; op1 = tmp;
3747
3748 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3749 {
3750 if (INTVAL (len) > 0)
3751 {
3752 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
3753 emit_insn (gen_cmpint (target, ccreg));
3754 }
3755 else
3756 emit_move_insn (target, const0_rtx);
3757 }
3758 else if (TARGET_MVCLE)
3759 {
3760 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
3761 emit_insn (gen_cmpint (target, ccreg));
3762 }
3763 else
3764 {
3765 rtx addr0, addr1, count, blocks, temp;
3766 rtx loop_start_label = gen_label_rtx ();
3767 rtx loop_end_label = gen_label_rtx ();
3768 rtx end_label = gen_label_rtx ();
3769 enum machine_mode mode;
3770
3771 mode = GET_MODE (len);
3772 if (mode == VOIDmode)
3773 mode = Pmode;
3774
3775 addr0 = gen_reg_rtx (Pmode);
3776 addr1 = gen_reg_rtx (Pmode);
3777 count = gen_reg_rtx (mode);
3778 blocks = gen_reg_rtx (mode);
3779
3780 convert_move (count, len, 1);
3781 emit_cmp_and_jump_insns (count, const0_rtx,
3782 EQ, NULL_RTX, mode, 1, end_label);
3783
3784 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3785 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3786 op0 = change_address (op0, VOIDmode, addr0);
3787 op1 = change_address (op1, VOIDmode, addr1);
3788
3789 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3790 if (temp != count)
3791 emit_move_insn (count, temp);
3792
3793 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
3794 if (temp != blocks)
3795 emit_move_insn (blocks, temp);
3796
3797 emit_cmp_and_jump_insns (blocks, const0_rtx,
3798 EQ, NULL_RTX, mode, 1, loop_end_label);
3799
3800 emit_label (loop_start_label);
3801
3802 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
3803 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
3804 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
3805 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3806 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3807 emit_jump_insn (temp);
3808
3809 s390_load_address (addr0,
3810 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
3811 s390_load_address (addr1,
3812 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
3813
3814 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3815 if (temp != blocks)
3816 emit_move_insn (blocks, temp);
3817
3818 emit_cmp_and_jump_insns (blocks, const0_rtx,
3819 EQ, NULL_RTX, mode, 1, loop_end_label);
3820
3821 emit_jump (loop_start_label);
3822 emit_label (loop_end_label);
3823
3824 emit_insn (gen_cmpmem_short (op0, op1,
3825 convert_to_mode (Pmode, count, 1)));
3826 emit_label (end_label);
3827
3828 emit_insn (gen_cmpint (target, ccreg));
3829 }
3830 }
3831
3832
3833 /* Expand conditional increment or decrement using alc/slb instructions.
3834 Should generate code setting DST to either SRC or SRC + INCREMENT,
3835 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
3836 Returns true if successful, false otherwise.
3837
3838 That makes it possible to implement some if-constructs without jumps e.g.:
3839 (borrow = CC0 | CC1 and carry = CC2 | CC3)
3840 unsigned int a, b, c;
3841 if (a < b) c++; -> CCU b > a -> CC2; c += carry;
3842 if (a < b) c--; -> CCL3 a - b -> borrow; c -= borrow;
3843 if (a <= b) c++; -> CCL3 b - a -> borrow; c += carry;
3844 if (a <= b) c--; -> CCU a <= b -> borrow; c -= borrow;
3845
3846 Checks for EQ and NE with a nonzero value need an additional xor e.g.:
3847 if (a == b) c++; -> CCL3 a ^= b; 0 - a -> borrow; c += carry;
3848 if (a == b) c--; -> CCU a ^= b; a <= 0 -> CC0 | CC1; c -= borrow;
3849 if (a != b) c++; -> CCU a ^= b; a > 0 -> CC2; c += carry;
3850 if (a != b) c--; -> CCL3 a ^= b; 0 - a -> borrow; c -= borrow; */
3851
3852 bool
3853 s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
3854 rtx dst, rtx src, rtx increment)
3855 {
3856 enum machine_mode cmp_mode;
3857 enum machine_mode cc_mode;
3858 rtx op_res;
3859 rtx insn;
3860 rtvec p;
3861 int ret;
3862
3863 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
3864 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
3865 cmp_mode = SImode;
3866 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
3867 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
3868 cmp_mode = DImode;
3869 else
3870 return false;
3871
3872 /* Try ADD LOGICAL WITH CARRY. */
3873 if (increment == const1_rtx)
3874 {
3875 /* Determine CC mode to use. */
3876 if (cmp_code == EQ || cmp_code == NE)
3877 {
3878 if (cmp_op1 != const0_rtx)
3879 {
3880 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3881 NULL_RTX, 0, OPTAB_WIDEN);
3882 cmp_op1 = const0_rtx;
3883 }
3884
3885 cmp_code = cmp_code == EQ ? LEU : GTU;
3886 }
3887
3888 if (cmp_code == LTU || cmp_code == LEU)
3889 {
3890 rtx tem = cmp_op0;
3891 cmp_op0 = cmp_op1;
3892 cmp_op1 = tem;
3893 cmp_code = swap_condition (cmp_code);
3894 }
3895
3896 switch (cmp_code)
3897 {
3898 case GTU:
3899 cc_mode = CCUmode;
3900 break;
3901
3902 case GEU:
3903 cc_mode = CCL3mode;
3904 break;
3905
3906 default:
3907 return false;
3908 }
3909
3910 /* Emit comparison instruction pattern. */
3911 if (!register_operand (cmp_op0, cmp_mode))
3912 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3913
3914 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3915 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3916 /* We use insn_invalid_p here to add clobbers if required. */
3917 ret = insn_invalid_p (emit_insn (insn));
3918 gcc_assert (!ret);
3919
3920 /* Emit ALC instruction pattern. */
3921 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3922 gen_rtx_REG (cc_mode, CC_REGNUM),
3923 const0_rtx);
3924
3925 if (src != const0_rtx)
3926 {
3927 if (!register_operand (src, GET_MODE (dst)))
3928 src = force_reg (GET_MODE (dst), src);
3929
3930 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, src);
3931 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, const0_rtx);
3932 }
3933
3934 p = rtvec_alloc (2);
3935 RTVEC_ELT (p, 0) =
3936 gen_rtx_SET (VOIDmode, dst, op_res);
3937 RTVEC_ELT (p, 1) =
3938 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3939 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3940
3941 return true;
3942 }
3943
3944 /* Try SUBTRACT LOGICAL WITH BORROW. */
3945 if (increment == constm1_rtx)
3946 {
3947 /* Determine CC mode to use. */
3948 if (cmp_code == EQ || cmp_code == NE)
3949 {
3950 if (cmp_op1 != const0_rtx)
3951 {
3952 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3953 NULL_RTX, 0, OPTAB_WIDEN);
3954 cmp_op1 = const0_rtx;
3955 }
3956
3957 cmp_code = cmp_code == EQ ? LEU : GTU;
3958 }
3959
3960 if (cmp_code == GTU || cmp_code == GEU)
3961 {
3962 rtx tem = cmp_op0;
3963 cmp_op0 = cmp_op1;
3964 cmp_op1 = tem;
3965 cmp_code = swap_condition (cmp_code);
3966 }
3967
3968 switch (cmp_code)
3969 {
3970 case LEU:
3971 cc_mode = CCUmode;
3972 break;
3973
3974 case LTU:
3975 cc_mode = CCL3mode;
3976 break;
3977
3978 default:
3979 return false;
3980 }
3981
3982 /* Emit comparison instruction pattern. */
3983 if (!register_operand (cmp_op0, cmp_mode))
3984 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3985
3986 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3987 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3988 /* We use insn_invalid_p here to add clobbers if required. */
3989 ret = insn_invalid_p (emit_insn (insn));
3990 gcc_assert (!ret);
3991
3992 /* Emit SLB instruction pattern. */
3993 if (!register_operand (src, GET_MODE (dst)))
3994 src = force_reg (GET_MODE (dst), src);
3995
3996 op_res = gen_rtx_MINUS (GET_MODE (dst),
3997 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
3998 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3999 gen_rtx_REG (cc_mode, CC_REGNUM),
4000 const0_rtx));
4001 p = rtvec_alloc (2);
4002 RTVEC_ELT (p, 0) =
4003 gen_rtx_SET (VOIDmode, dst, op_res);
4004 RTVEC_ELT (p, 1) =
4005 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
4006 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
4007
4008 return true;
4009 }
4010
4011 return false;
4012 }
4013
4014 /* Expand code for the insv template. Return true if successful, false else. */
4015
4016 bool
4017 s390_expand_insv (rtx dest, rtx op1, rtx op2, rtx src)
4018 {
4019 int bitsize = INTVAL (op1);
4020 int bitpos = INTVAL (op2);
4021
4022 /* We need byte alignment. */
4023 if (bitsize % BITS_PER_UNIT)
4024 return false;
4025
4026 if (bitpos == 0
4027 && memory_operand (dest, VOIDmode)
4028 && (register_operand (src, word_mode)
4029 || const_int_operand (src, VOIDmode)))
4030 {
4031 /* Emit standard pattern if possible. */
4032 enum machine_mode mode = smallest_mode_for_size (bitsize, MODE_INT);
4033 if (GET_MODE_BITSIZE (mode) == bitsize)
4034 emit_move_insn (adjust_address (dest, mode, 0), gen_lowpart (mode, src));
4035
4036 /* (set (ze (mem)) (const_int)). */
4037 else if (const_int_operand (src, VOIDmode))
4038 {
4039 int size = bitsize / BITS_PER_UNIT;
4040 rtx src_mem = adjust_address (force_const_mem (word_mode, src), BLKmode,
4041 GET_MODE_SIZE (word_mode) - size);
4042
4043 dest = adjust_address (dest, BLKmode, 0);
4044 set_mem_size (dest, GEN_INT (size));
4045 s390_expand_movmem (dest, src_mem, GEN_INT (size));
4046 }
4047
4048 /* (set (ze (mem)) (reg)). */
4049 else if (register_operand (src, word_mode))
4050 {
4051 if (bitsize <= GET_MODE_BITSIZE (SImode))
4052 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, op1,
4053 const0_rtx), src);
4054 else
4055 {
4056 /* Emit st,stcmh sequence. */
4057 int stcmh_width = bitsize - GET_MODE_BITSIZE (SImode);
4058 int size = stcmh_width / BITS_PER_UNIT;
4059
4060 emit_move_insn (adjust_address (dest, SImode, size),
4061 gen_lowpart (SImode, src));
4062 set_mem_size (dest, GEN_INT (size));
4063 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, GEN_INT
4064 (stcmh_width), const0_rtx),
4065 gen_rtx_LSHIFTRT (word_mode, src, GEN_INT
4066 (GET_MODE_BITSIZE (SImode))));
4067 }
4068 }
4069 else
4070 return false;
4071
4072 return true;
4073 }
4074
4075 /* (set (ze (reg)) (const_int)). */
4076 if (TARGET_ZARCH
4077 && register_operand (dest, word_mode)
4078 && (bitpos % 16) == 0
4079 && (bitsize % 16) == 0
4080 && const_int_operand (src, VOIDmode))
4081 {
4082 HOST_WIDE_INT val = INTVAL (src);
4083 int regpos = bitpos + bitsize;
4084
4085 while (regpos > bitpos)
4086 {
4087 enum machine_mode putmode;
4088 int putsize;
4089
4090 if (TARGET_EXTIMM && (regpos % 32 == 0) && (regpos >= bitpos + 32))
4091 putmode = SImode;
4092 else
4093 putmode = HImode;
4094
4095 putsize = GET_MODE_BITSIZE (putmode);
4096 regpos -= putsize;
4097 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest,
4098 GEN_INT (putsize),
4099 GEN_INT (regpos)),
4100 gen_int_mode (val, putmode));
4101 val >>= putsize;
4102 }
4103 gcc_assert (regpos == bitpos);
4104 return true;
4105 }
4106
4107 return false;
4108 }
4109
4110 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic which returns a
4111 register that holds VAL of mode MODE shifted by COUNT bits. */
4112
4113 static inline rtx
4114 s390_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
4115 {
4116 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
4117 NULL_RTX, 1, OPTAB_DIRECT);
4118 return expand_simple_binop (SImode, ASHIFT, val, count,
4119 NULL_RTX, 1, OPTAB_DIRECT);
4120 }
4121
4122 /* Structure to hold the initial parameters for a compare_and_swap operation
4123 in HImode and QImode. */
4124
4125 struct alignment_context
4126 {
4127 rtx memsi; /* SI aligned memory location. */
4128 rtx shift; /* Bit offset with regard to lsb. */
4129 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
4130 rtx modemaski; /* ~modemask */
4131 bool aligned; /* True if memory is aligned, false else. */
4132 };
4133
4134 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic to initialize
4135 structure AC for transparent simplifying, if the memory alignment is known
4136 to be at least 32bit. MEM is the memory location for the actual operation
4137 and MODE its mode. */
4138
4139 static void
4140 init_alignment_context (struct alignment_context *ac, rtx mem,
4141 enum machine_mode mode)
4142 {
4143 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
4144 ac->aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
4145
4146 if (ac->aligned)
4147 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
4148 else
4149 {
4150 /* Alignment is unknown. */
4151 rtx byteoffset, addr, align;
4152
4153 /* Force the address into a register. */
4154 addr = force_reg (Pmode, XEXP (mem, 0));
4155
4156 /* Align it to SImode. */
4157 align = expand_simple_binop (Pmode, AND, addr,
4158 GEN_INT (-GET_MODE_SIZE (SImode)),
4159 NULL_RTX, 1, OPTAB_DIRECT);
4160 /* Generate MEM. */
4161 ac->memsi = gen_rtx_MEM (SImode, align);
4162 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
4163 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
4164 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
4165
4166 /* Calculate shiftcount. */
4167 byteoffset = expand_simple_binop (Pmode, AND, addr,
4168 GEN_INT (GET_MODE_SIZE (SImode) - 1),
4169 NULL_RTX, 1, OPTAB_DIRECT);
4170 /* As we already have some offset, evaluate the remaining distance. */
4171 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
4172 NULL_RTX, 1, OPTAB_DIRECT);
4173
4174 }
4175 /* Shift is the byte count, but we need the bitcount. */
4176 ac->shift = expand_simple_binop (SImode, MULT, ac->shift, GEN_INT (BITS_PER_UNIT),
4177 NULL_RTX, 1, OPTAB_DIRECT);
4178 /* Calculate masks. */
4179 ac->modemask = expand_simple_binop (SImode, ASHIFT,
4180 GEN_INT (GET_MODE_MASK (mode)), ac->shift,
4181 NULL_RTX, 1, OPTAB_DIRECT);
4182 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
4183 }
4184
4185 /* Expand an atomic compare and swap operation for HImode and QImode. MEM is
4186 the memory location, CMP the old value to compare MEM with and NEW the value
4187 to set if CMP == MEM.
4188 CMP is never in memory for compare_and_swap_cc because
4189 expand_bool_compare_and_swap puts it into a register for later compare. */
4190
4191 void
4192 s390_expand_cs_hqi (enum machine_mode mode, rtx target, rtx mem, rtx cmp, rtx new)
4193 {
4194 struct alignment_context ac;
4195 rtx cmpv, newv, val, resv, cc;
4196 rtx res = gen_reg_rtx (SImode);
4197 rtx csloop = gen_label_rtx ();
4198 rtx csend = gen_label_rtx ();
4199
4200 gcc_assert (register_operand (target, VOIDmode));
4201 gcc_assert (MEM_P (mem));
4202
4203 init_alignment_context (&ac, mem, mode);
4204
4205 /* Shift the values to the correct bit positions. */
4206 if (!(ac.aligned && MEM_P (cmp)))
4207 cmp = s390_expand_mask_and_shift (cmp, mode, ac.shift);
4208 if (!(ac.aligned && MEM_P (new)))
4209 new = s390_expand_mask_and_shift (new, mode, ac.shift);
4210
4211 /* Load full word. Subsequent loads are performed by CS. */
4212 val = expand_simple_binop (SImode, AND, ac.memsi, ac.modemaski,
4213 NULL_RTX, 1, OPTAB_DIRECT);
4214
4215 /* Start CS loop. */
4216 emit_label (csloop);
4217 /* val = "<mem>00..0<mem>"
4218 * cmp = "00..0<cmp>00..0"
4219 * new = "00..0<new>00..0"
4220 */
4221
4222 /* Patch cmp and new with val at correct position. */
4223 if (ac.aligned && MEM_P (cmp))
4224 {
4225 cmpv = force_reg (SImode, val);
4226 store_bit_field (cmpv, GET_MODE_BITSIZE (mode), 0, SImode, cmp);
4227 }
4228 else
4229 cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
4230 NULL_RTX, 1, OPTAB_DIRECT));
4231 if (ac.aligned && MEM_P (new))
4232 {
4233 newv = force_reg (SImode, val);
4234 store_bit_field (newv, GET_MODE_BITSIZE (mode), 0, SImode, new);
4235 }
4236 else
4237 newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new, val,
4238 NULL_RTX, 1, OPTAB_DIRECT));
4239
4240 /* Jump to end if we're done (likely?). */
4241 s390_emit_jump (csend, s390_emit_compare_and_swap (EQ, res, ac.memsi,
4242 cmpv, newv));
4243
4244 /* Check for changes outside mode. */
4245 resv = expand_simple_binop (SImode, AND, res, ac.modemaski,
4246 NULL_RTX, 1, OPTAB_DIRECT);
4247 cc = s390_emit_compare (NE, resv, val);
4248 emit_move_insn (val, resv);
4249 /* Loop internal if so. */
4250 s390_emit_jump (csloop, cc);
4251
4252 emit_label (csend);
4253
4254 /* Return the correct part of the bitfield. */
4255 convert_move (target, expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
4256 NULL_RTX, 1, OPTAB_DIRECT), 1);
4257 }
4258
4259 /* Expand an atomic operation CODE of mode MODE. MEM is the memory location
4260 and VAL the value to play with. If AFTER is true then store the value
4261 MEM holds after the operation, if AFTER is false then store the value MEM
4262 holds before the operation. If TARGET is zero then discard that value, else
4263 store it to TARGET. */
4264
4265 void
4266 s390_expand_atomic (enum machine_mode mode, enum rtx_code code,
4267 rtx target, rtx mem, rtx val, bool after)
4268 {
4269 struct alignment_context ac;
4270 rtx cmp;
4271 rtx new = gen_reg_rtx (SImode);
4272 rtx orig = gen_reg_rtx (SImode);
4273 rtx csloop = gen_label_rtx ();
4274
4275 gcc_assert (!target || register_operand (target, VOIDmode));
4276 gcc_assert (MEM_P (mem));
4277
4278 init_alignment_context (&ac, mem, mode);
4279
4280 /* Shift val to the correct bit positions.
4281 Preserve "icm", but prevent "ex icm". */
4282 if (!(ac.aligned && code == SET && MEM_P (val)))
4283 val = s390_expand_mask_and_shift (val, mode, ac.shift);
4284
4285 /* Further preparation insns. */
4286 if (code == PLUS || code == MINUS)
4287 emit_move_insn (orig, val);
4288 else if (code == MULT || code == AND) /* val = "11..1<val>11..1" */
4289 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
4290 NULL_RTX, 1, OPTAB_DIRECT);
4291
4292 /* Load full word. Subsequent loads are performed by CS. */
4293 cmp = force_reg (SImode, ac.memsi);
4294
4295 /* Start CS loop. */
4296 emit_label (csloop);
4297 emit_move_insn (new, cmp);
4298
4299 /* Patch new with val at correct position. */
4300 switch (code)
4301 {
4302 case PLUS:
4303 case MINUS:
4304 val = expand_simple_binop (SImode, code, new, orig,
4305 NULL_RTX, 1, OPTAB_DIRECT);
4306 val = expand_simple_binop (SImode, AND, val, ac.modemask,
4307 NULL_RTX, 1, OPTAB_DIRECT);
4308 /* FALLTHRU */
4309 case SET:
4310 if (ac.aligned && MEM_P (val))
4311 store_bit_field (new, GET_MODE_BITSIZE (mode), 0, SImode, val);
4312 else
4313 {
4314 new = expand_simple_binop (SImode, AND, new, ac.modemaski,
4315 NULL_RTX, 1, OPTAB_DIRECT);
4316 new = expand_simple_binop (SImode, IOR, new, val,
4317 NULL_RTX, 1, OPTAB_DIRECT);
4318 }
4319 break;
4320 case AND:
4321 case IOR:
4322 case XOR:
4323 new = expand_simple_binop (SImode, code, new, val,
4324 NULL_RTX, 1, OPTAB_DIRECT);
4325 break;
4326 case MULT: /* NAND */
4327 new = expand_simple_binop (SImode, XOR, new, ac.modemask,
4328 NULL_RTX, 1, OPTAB_DIRECT);
4329 new = expand_simple_binop (SImode, AND, new, val,
4330 NULL_RTX, 1, OPTAB_DIRECT);
4331 break;
4332 default:
4333 gcc_unreachable ();
4334 }
4335
4336 s390_emit_jump (csloop, s390_emit_compare_and_swap (NE, cmp,
4337 ac.memsi, cmp, new));
4338
4339 /* Return the correct part of the bitfield. */
4340 if (target)
4341 convert_move (target, expand_simple_binop (SImode, LSHIFTRT,
4342 after ? new : cmp, ac.shift,
4343 NULL_RTX, 1, OPTAB_DIRECT), 1);
4344 }
4345
4346 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
4347 We need to emit DTP-relative relocations. */
4348
4349 static void s390_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
4350
4351 static void
4352 s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
4353 {
4354 switch (size)
4355 {
4356 case 4:
4357 fputs ("\t.long\t", file);
4358 break;
4359 case 8:
4360 fputs ("\t.quad\t", file);
4361 break;
4362 default:
4363 gcc_unreachable ();
4364 }
4365 output_addr_const (file, x);
4366 fputs ("@DTPOFF", file);
4367 }
4368
4369 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
4370 /* Implement TARGET_MANGLE_TYPE. */
4371
4372 static const char *
4373 s390_mangle_type (const_tree type)
4374 {
4375 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
4376 && TARGET_LONG_DOUBLE_128)
4377 return "g";
4378
4379 /* For all other types, use normal C++ mangling. */
4380 return NULL;
4381 }
4382 #endif
4383
4384 /* In the name of slightly smaller debug output, and to cater to
4385 general assembler lossage, recognize various UNSPEC sequences
4386 and turn them back into a direct symbol reference. */
4387
4388 static rtx
4389 s390_delegitimize_address (rtx orig_x)
4390 {
4391 rtx x = orig_x, y;
4392
4393 if (GET_CODE (x) != MEM)
4394 return orig_x;
4395
4396 x = XEXP (x, 0);
4397 if (GET_CODE (x) == PLUS
4398 && GET_CODE (XEXP (x, 1)) == CONST
4399 && GET_CODE (XEXP (x, 0)) == REG
4400 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
4401 {
4402 y = XEXP (XEXP (x, 1), 0);
4403 if (GET_CODE (y) == UNSPEC
4404 && XINT (y, 1) == UNSPEC_GOT)
4405 return XVECEXP (y, 0, 0);
4406 return orig_x;
4407 }
4408
4409 if (GET_CODE (x) == CONST)
4410 {
4411 y = XEXP (x, 0);
4412 if (GET_CODE (y) == UNSPEC
4413 && XINT (y, 1) == UNSPEC_GOTENT)
4414 return XVECEXP (y, 0, 0);
4415 return orig_x;
4416 }
4417
4418 return orig_x;
4419 }
4420
4421 /* Output operand OP to stdio stream FILE.
4422 OP is an address (register + offset) which is not used to address data;
4423 instead the rightmost bits are interpreted as the value. */
4424
4425 static void
4426 print_shift_count_operand (FILE *file, rtx op)
4427 {
4428 HOST_WIDE_INT offset;
4429 rtx base;
4430
4431 /* Extract base register and offset. */
4432 if (!s390_decompose_shift_count (op, &base, &offset))
4433 gcc_unreachable ();
4434
4435 /* Sanity check. */
4436 if (base)
4437 {
4438 gcc_assert (GET_CODE (base) == REG);
4439 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
4440 gcc_assert (REGNO_REG_CLASS (REGNO (base)) == ADDR_REGS);
4441 }
4442
4443 /* Offsets are constricted to twelve bits. */
4444 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & ((1 << 12) - 1));
4445 if (base)
4446 fprintf (file, "(%s)", reg_names[REGNO (base)]);
4447 }
4448
4449 /* See 'get_some_local_dynamic_name'. */
4450
4451 static int
4452 get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
4453 {
4454 rtx x = *px;
4455
4456 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
4457 {
4458 x = get_pool_constant (x);
4459 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
4460 }
4461
4462 if (GET_CODE (x) == SYMBOL_REF
4463 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
4464 {
4465 cfun->machine->some_ld_name = XSTR (x, 0);
4466 return 1;
4467 }
4468
4469 return 0;
4470 }
4471
4472 /* Locate some local-dynamic symbol still in use by this function
4473 so that we can print its name in local-dynamic base patterns. */
4474
4475 static const char *
4476 get_some_local_dynamic_name (void)
4477 {
4478 rtx insn;
4479
4480 if (cfun->machine->some_ld_name)
4481 return cfun->machine->some_ld_name;
4482
4483 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
4484 if (INSN_P (insn)
4485 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
4486 return cfun->machine->some_ld_name;
4487
4488 gcc_unreachable ();
4489 }
4490
4491 /* Output machine-dependent UNSPECs occurring in address constant X
4492 in assembler syntax to stdio stream FILE. Returns true if the
4493 constant X could be recognized, false otherwise. */
4494
4495 bool
4496 s390_output_addr_const_extra (FILE *file, rtx x)
4497 {
4498 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
4499 switch (XINT (x, 1))
4500 {
4501 case UNSPEC_GOTENT:
4502 output_addr_const (file, XVECEXP (x, 0, 0));
4503 fprintf (file, "@GOTENT");
4504 return true;
4505 case UNSPEC_GOT:
4506 output_addr_const (file, XVECEXP (x, 0, 0));
4507 fprintf (file, "@GOT");
4508 return true;
4509 case UNSPEC_GOTOFF:
4510 output_addr_const (file, XVECEXP (x, 0, 0));
4511 fprintf (file, "@GOTOFF");
4512 return true;
4513 case UNSPEC_PLT:
4514 output_addr_const (file, XVECEXP (x, 0, 0));
4515 fprintf (file, "@PLT");
4516 return true;
4517 case UNSPEC_PLTOFF:
4518 output_addr_const (file, XVECEXP (x, 0, 0));
4519 fprintf (file, "@PLTOFF");
4520 return true;
4521 case UNSPEC_TLSGD:
4522 output_addr_const (file, XVECEXP (x, 0, 0));
4523 fprintf (file, "@TLSGD");
4524 return true;
4525 case UNSPEC_TLSLDM:
4526 assemble_name (file, get_some_local_dynamic_name ());
4527 fprintf (file, "@TLSLDM");
4528 return true;
4529 case UNSPEC_DTPOFF:
4530 output_addr_const (file, XVECEXP (x, 0, 0));
4531 fprintf (file, "@DTPOFF");
4532 return true;
4533 case UNSPEC_NTPOFF:
4534 output_addr_const (file, XVECEXP (x, 0, 0));
4535 fprintf (file, "@NTPOFF");
4536 return true;
4537 case UNSPEC_GOTNTPOFF:
4538 output_addr_const (file, XVECEXP (x, 0, 0));
4539 fprintf (file, "@GOTNTPOFF");
4540 return true;
4541 case UNSPEC_INDNTPOFF:
4542 output_addr_const (file, XVECEXP (x, 0, 0));
4543 fprintf (file, "@INDNTPOFF");
4544 return true;
4545 }
4546
4547 return false;
4548 }
4549
4550 /* Output address operand ADDR in assembler syntax to
4551 stdio stream FILE. */
4552
4553 void
4554 print_operand_address (FILE *file, rtx addr)
4555 {
4556 struct s390_address ad;
4557
4558 if (!s390_decompose_address (addr, &ad)
4559 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
4560 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
4561 output_operand_lossage ("cannot decompose address");
4562
4563 if (ad.disp)
4564 output_addr_const (file, ad.disp);
4565 else
4566 fprintf (file, "0");
4567
4568 if (ad.base && ad.indx)
4569 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
4570 reg_names[REGNO (ad.base)]);
4571 else if (ad.base)
4572 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4573 }
4574
4575 /* Output operand X in assembler syntax to stdio stream FILE.
4576 CODE specified the format flag. The following format flags
4577 are recognized:
4578
4579 'C': print opcode suffix for branch condition.
4580 'D': print opcode suffix for inverse branch condition.
4581 'J': print tls_load/tls_gdcall/tls_ldcall suffix
4582 'G': print the size of the operand in bytes.
4583 'O': print only the displacement of a memory reference.
4584 'R': print only the base register of a memory reference.
4585 'S': print S-type memory reference (base+displacement).
4586 'N': print the second word of a DImode operand.
4587 'M': print the second word of a TImode operand.
4588 'Y': print shift count operand.
4589
4590 'b': print integer X as if it's an unsigned byte.
4591 'x': print integer X as if it's an unsigned halfword.
4592 'h': print integer X as if it's a signed halfword.
4593 'i': print the first nonzero HImode part of X.
4594 'j': print the first HImode part unequal to -1 of X.
4595 'k': print the first nonzero SImode part of X.
4596 'm': print the first SImode part unequal to -1 of X.
4597 'o': print integer X as if it's an unsigned 32bit word. */
4598
4599 void
4600 print_operand (FILE *file, rtx x, int code)
4601 {
4602 switch (code)
4603 {
4604 case 'C':
4605 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
4606 return;
4607
4608 case 'D':
4609 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
4610 return;
4611
4612 case 'J':
4613 if (GET_CODE (x) == SYMBOL_REF)
4614 {
4615 fprintf (file, "%s", ":tls_load:");
4616 output_addr_const (file, x);
4617 }
4618 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
4619 {
4620 fprintf (file, "%s", ":tls_gdcall:");
4621 output_addr_const (file, XVECEXP (x, 0, 0));
4622 }
4623 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
4624 {
4625 fprintf (file, "%s", ":tls_ldcall:");
4626 assemble_name (file, get_some_local_dynamic_name ());
4627 }
4628 else
4629 gcc_unreachable ();
4630 return;
4631
4632 case 'G':
4633 fprintf (file, "%u", GET_MODE_SIZE (GET_MODE (x)));
4634 return;
4635
4636 case 'O':
4637 {
4638 struct s390_address ad;
4639 int ret;
4640
4641 gcc_assert (GET_CODE (x) == MEM);
4642 ret = s390_decompose_address (XEXP (x, 0), &ad);
4643 gcc_assert (ret);
4644 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
4645 gcc_assert (!ad.indx);
4646
4647 if (ad.disp)
4648 output_addr_const (file, ad.disp);
4649 else
4650 fprintf (file, "0");
4651 }
4652 return;
4653
4654 case 'R':
4655 {
4656 struct s390_address ad;
4657 int ret;
4658
4659 gcc_assert (GET_CODE (x) == MEM);
4660 ret = s390_decompose_address (XEXP (x, 0), &ad);
4661 gcc_assert (ret);
4662 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
4663 gcc_assert (!ad.indx);
4664
4665 if (ad.base)
4666 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
4667 else
4668 fprintf (file, "0");
4669 }
4670 return;
4671
4672 case 'S':
4673 {
4674 struct s390_address ad;
4675 int ret;
4676
4677 gcc_assert (GET_CODE (x) == MEM);
4678 ret = s390_decompose_address (XEXP (x, 0), &ad);
4679 gcc_assert (ret);
4680 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
4681 gcc_assert (!ad.indx);
4682
4683 if (ad.disp)
4684 output_addr_const (file, ad.disp);
4685 else
4686 fprintf (file, "0");
4687
4688 if (ad.base)
4689 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4690 }
4691 return;
4692
4693 case 'N':
4694 if (GET_CODE (x) == REG)
4695 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4696 else if (GET_CODE (x) == MEM)
4697 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
4698 else
4699 gcc_unreachable ();
4700 break;
4701
4702 case 'M':
4703 if (GET_CODE (x) == REG)
4704 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4705 else if (GET_CODE (x) == MEM)
4706 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
4707 else
4708 gcc_unreachable ();
4709 break;
4710
4711 case 'Y':
4712 print_shift_count_operand (file, x);
4713 return;
4714 }
4715
4716 switch (GET_CODE (x))
4717 {
4718 case REG:
4719 fprintf (file, "%s", reg_names[REGNO (x)]);
4720 break;
4721
4722 case MEM:
4723 output_address (XEXP (x, 0));
4724 break;
4725
4726 case CONST:
4727 case CODE_LABEL:
4728 case LABEL_REF:
4729 case SYMBOL_REF:
4730 output_addr_const (file, x);
4731 break;
4732
4733 case CONST_INT:
4734 if (code == 'b')
4735 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
4736 else if (code == 'x')
4737 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
4738 else if (code == 'h')
4739 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
4740 else if (code == 'i')
4741 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4742 s390_extract_part (x, HImode, 0));
4743 else if (code == 'j')
4744 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4745 s390_extract_part (x, HImode, -1));
4746 else if (code == 'k')
4747 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4748 s390_extract_part (x, SImode, 0));
4749 else if (code == 'm')
4750 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4751 s390_extract_part (x, SImode, -1));
4752 else if (code == 'o')
4753 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffffffff);
4754 else
4755 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
4756 break;
4757
4758 case CONST_DOUBLE:
4759 gcc_assert (GET_MODE (x) == VOIDmode);
4760 if (code == 'b')
4761 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
4762 else if (code == 'x')
4763 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
4764 else if (code == 'h')
4765 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
4766 else
4767 gcc_unreachable ();
4768 break;
4769
4770 default:
4771 fatal_insn ("UNKNOWN in print_operand !?", x);
4772 break;
4773 }
4774 }
4775
4776 /* Target hook for assembling integer objects. We need to define it
4777 here to work a round a bug in some versions of GAS, which couldn't
4778 handle values smaller than INT_MIN when printed in decimal. */
4779
4780 static bool
4781 s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
4782 {
4783 if (size == 8 && aligned_p
4784 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
4785 {
4786 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
4787 INTVAL (x));
4788 return true;
4789 }
4790 return default_assemble_integer (x, size, aligned_p);
4791 }
4792
4793 /* Returns true if register REGNO is used for forming
4794 a memory address in expression X. */
4795
4796 static bool
4797 reg_used_in_mem_p (int regno, rtx x)
4798 {
4799 enum rtx_code code = GET_CODE (x);
4800 int i, j;
4801 const char *fmt;
4802
4803 if (code == MEM)
4804 {
4805 if (refers_to_regno_p (regno, regno+1,
4806 XEXP (x, 0), 0))
4807 return true;
4808 }
4809 else if (code == SET
4810 && GET_CODE (SET_DEST (x)) == PC)
4811 {
4812 if (refers_to_regno_p (regno, regno+1,
4813 SET_SRC (x), 0))
4814 return true;
4815 }
4816
4817 fmt = GET_RTX_FORMAT (code);
4818 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4819 {
4820 if (fmt[i] == 'e'
4821 && reg_used_in_mem_p (regno, XEXP (x, i)))
4822 return true;
4823
4824 else if (fmt[i] == 'E')
4825 for (j = 0; j < XVECLEN (x, i); j++)
4826 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
4827 return true;
4828 }
4829 return false;
4830 }
4831
4832 /* Returns true if expression DEP_RTX sets an address register
4833 used by instruction INSN to address memory. */
4834
4835 static bool
4836 addr_generation_dependency_p (rtx dep_rtx, rtx insn)
4837 {
4838 rtx target, pat;
4839
4840 if (GET_CODE (dep_rtx) == INSN)
4841 dep_rtx = PATTERN (dep_rtx);
4842
4843 if (GET_CODE (dep_rtx) == SET)
4844 {
4845 target = SET_DEST (dep_rtx);
4846 if (GET_CODE (target) == STRICT_LOW_PART)
4847 target = XEXP (target, 0);
4848 while (GET_CODE (target) == SUBREG)
4849 target = SUBREG_REG (target);
4850
4851 if (GET_CODE (target) == REG)
4852 {
4853 int regno = REGNO (target);
4854
4855 if (s390_safe_attr_type (insn) == TYPE_LA)
4856 {
4857 pat = PATTERN (insn);
4858 if (GET_CODE (pat) == PARALLEL)
4859 {
4860 gcc_assert (XVECLEN (pat, 0) == 2);
4861 pat = XVECEXP (pat, 0, 0);
4862 }
4863 gcc_assert (GET_CODE (pat) == SET);
4864 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
4865 }
4866 else if (get_attr_atype (insn) == ATYPE_AGEN)
4867 return reg_used_in_mem_p (regno, PATTERN (insn));
4868 }
4869 }
4870 return false;
4871 }
4872
4873 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
4874
4875 int
4876 s390_agen_dep_p (rtx dep_insn, rtx insn)
4877 {
4878 rtx dep_rtx = PATTERN (dep_insn);
4879 int i;
4880
4881 if (GET_CODE (dep_rtx) == SET
4882 && addr_generation_dependency_p (dep_rtx, insn))
4883 return 1;
4884 else if (GET_CODE (dep_rtx) == PARALLEL)
4885 {
4886 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
4887 {
4888 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
4889 return 1;
4890 }
4891 }
4892 return 0;
4893 }
4894
4895 /* A C statement (sans semicolon) to update the integer scheduling priority
4896 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
4897 reduce the priority to execute INSN later. Do not define this macro if
4898 you do not need to adjust the scheduling priorities of insns.
4899
4900 A STD instruction should be scheduled earlier,
4901 in order to use the bypass. */
4902
4903 static int
4904 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
4905 {
4906 if (! INSN_P (insn))
4907 return priority;
4908
4909 if (s390_tune != PROCESSOR_2084_Z990
4910 && s390_tune != PROCESSOR_2094_Z9_109)
4911 return priority;
4912
4913 switch (s390_safe_attr_type (insn))
4914 {
4915 case TYPE_FSTOREDF:
4916 case TYPE_FSTORESF:
4917 priority = priority << 3;
4918 break;
4919 case TYPE_STORE:
4920 case TYPE_STM:
4921 priority = priority << 1;
4922 break;
4923 default:
4924 break;
4925 }
4926 return priority;
4927 }
4928
4929 /* The number of instructions that can be issued per cycle. */
4930
4931 static int
4932 s390_issue_rate (void)
4933 {
4934 if (s390_tune == PROCESSOR_2084_Z990
4935 || s390_tune == PROCESSOR_2094_Z9_109)
4936 return 3;
4937 return 1;
4938 }
4939
4940 static int
4941 s390_first_cycle_multipass_dfa_lookahead (void)
4942 {
4943 return 4;
4944 }
4945
4946
4947 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
4948 Fix up MEMs as required. */
4949
4950 static void
4951 annotate_constant_pool_refs (rtx *x)
4952 {
4953 int i, j;
4954 const char *fmt;
4955
4956 gcc_assert (GET_CODE (*x) != SYMBOL_REF
4957 || !CONSTANT_POOL_ADDRESS_P (*x));
4958
4959 /* Literal pool references can only occur inside a MEM ... */
4960 if (GET_CODE (*x) == MEM)
4961 {
4962 rtx memref = XEXP (*x, 0);
4963
4964 if (GET_CODE (memref) == SYMBOL_REF
4965 && CONSTANT_POOL_ADDRESS_P (memref))
4966 {
4967 rtx base = cfun->machine->base_reg;
4968 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
4969 UNSPEC_LTREF);
4970
4971 *x = replace_equiv_address (*x, addr);
4972 return;
4973 }
4974
4975 if (GET_CODE (memref) == CONST
4976 && GET_CODE (XEXP (memref, 0)) == PLUS
4977 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
4978 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
4979 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
4980 {
4981 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
4982 rtx sym = XEXP (XEXP (memref, 0), 0);
4983 rtx base = cfun->machine->base_reg;
4984 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4985 UNSPEC_LTREF);
4986
4987 *x = replace_equiv_address (*x, plus_constant (addr, off));
4988 return;
4989 }
4990 }
4991
4992 /* ... or a load-address type pattern. */
4993 if (GET_CODE (*x) == SET)
4994 {
4995 rtx addrref = SET_SRC (*x);
4996
4997 if (GET_CODE (addrref) == SYMBOL_REF
4998 && CONSTANT_POOL_ADDRESS_P (addrref))
4999 {
5000 rtx base = cfun->machine->base_reg;
5001 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
5002 UNSPEC_LTREF);
5003
5004 SET_SRC (*x) = addr;
5005 return;
5006 }
5007
5008 if (GET_CODE (addrref) == CONST
5009 && GET_CODE (XEXP (addrref, 0)) == PLUS
5010 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
5011 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
5012 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
5013 {
5014 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
5015 rtx sym = XEXP (XEXP (addrref, 0), 0);
5016 rtx base = cfun->machine->base_reg;
5017 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
5018 UNSPEC_LTREF);
5019
5020 SET_SRC (*x) = plus_constant (addr, off);
5021 return;
5022 }
5023 }
5024
5025 /* Annotate LTREL_BASE as well. */
5026 if (GET_CODE (*x) == UNSPEC
5027 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
5028 {
5029 rtx base = cfun->machine->base_reg;
5030 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
5031 UNSPEC_LTREL_BASE);
5032 return;
5033 }
5034
5035 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5036 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5037 {
5038 if (fmt[i] == 'e')
5039 {
5040 annotate_constant_pool_refs (&XEXP (*x, i));
5041 }
5042 else if (fmt[i] == 'E')
5043 {
5044 for (j = 0; j < XVECLEN (*x, i); j++)
5045 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
5046 }
5047 }
5048 }
5049
5050 /* Split all branches that exceed the maximum distance.
5051 Returns true if this created a new literal pool entry. */
5052
5053 static int
5054 s390_split_branches (void)
5055 {
5056 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5057 int new_literal = 0, ret;
5058 rtx insn, pat, tmp, target;
5059 rtx *label;
5060
5061 /* We need correct insn addresses. */
5062
5063 shorten_branches (get_insns ());
5064
5065 /* Find all branches that exceed 64KB, and split them. */
5066
5067 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5068 {
5069 if (GET_CODE (insn) != JUMP_INSN)
5070 continue;
5071
5072 pat = PATTERN (insn);
5073 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
5074 pat = XVECEXP (pat, 0, 0);
5075 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
5076 continue;
5077
5078 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
5079 {
5080 label = &SET_SRC (pat);
5081 }
5082 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
5083 {
5084 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
5085 label = &XEXP (SET_SRC (pat), 1);
5086 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
5087 label = &XEXP (SET_SRC (pat), 2);
5088 else
5089 continue;
5090 }
5091 else
5092 continue;
5093
5094 if (get_attr_length (insn) <= 4)
5095 continue;
5096
5097 /* We are going to use the return register as scratch register,
5098 make sure it will be saved/restored by the prologue/epilogue. */
5099 cfun_frame_layout.save_return_addr_p = 1;
5100
5101 if (!flag_pic)
5102 {
5103 new_literal = 1;
5104 tmp = force_const_mem (Pmode, *label);
5105 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
5106 INSN_ADDRESSES_NEW (tmp, -1);
5107 annotate_constant_pool_refs (&PATTERN (tmp));
5108
5109 target = temp_reg;
5110 }
5111 else
5112 {
5113 new_literal = 1;
5114 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
5115 UNSPEC_LTREL_OFFSET);
5116 target = gen_rtx_CONST (Pmode, target);
5117 target = force_const_mem (Pmode, target);
5118 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
5119 INSN_ADDRESSES_NEW (tmp, -1);
5120 annotate_constant_pool_refs (&PATTERN (tmp));
5121
5122 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
5123 cfun->machine->base_reg),
5124 UNSPEC_LTREL_BASE);
5125 target = gen_rtx_PLUS (Pmode, temp_reg, target);
5126 }
5127
5128 ret = validate_change (insn, label, target, 0);
5129 gcc_assert (ret);
5130 }
5131
5132 return new_literal;
5133 }
5134
5135
5136 /* Find an annotated literal pool symbol referenced in RTX X,
5137 and store it at REF. Will abort if X contains references to
5138 more than one such pool symbol; multiple references to the same
5139 symbol are allowed, however.
5140
5141 The rtx pointed to by REF must be initialized to NULL_RTX
5142 by the caller before calling this routine. */
5143
5144 static void
5145 find_constant_pool_ref (rtx x, rtx *ref)
5146 {
5147 int i, j;
5148 const char *fmt;
5149
5150 /* Ignore LTREL_BASE references. */
5151 if (GET_CODE (x) == UNSPEC
5152 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5153 return;
5154 /* Likewise POOL_ENTRY insns. */
5155 if (GET_CODE (x) == UNSPEC_VOLATILE
5156 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
5157 return;
5158
5159 gcc_assert (GET_CODE (x) != SYMBOL_REF
5160 || !CONSTANT_POOL_ADDRESS_P (x));
5161
5162 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
5163 {
5164 rtx sym = XVECEXP (x, 0, 0);
5165 gcc_assert (GET_CODE (sym) == SYMBOL_REF
5166 && CONSTANT_POOL_ADDRESS_P (sym));
5167
5168 if (*ref == NULL_RTX)
5169 *ref = sym;
5170 else
5171 gcc_assert (*ref == sym);
5172
5173 return;
5174 }
5175
5176 fmt = GET_RTX_FORMAT (GET_CODE (x));
5177 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5178 {
5179 if (fmt[i] == 'e')
5180 {
5181 find_constant_pool_ref (XEXP (x, i), ref);
5182 }
5183 else if (fmt[i] == 'E')
5184 {
5185 for (j = 0; j < XVECLEN (x, i); j++)
5186 find_constant_pool_ref (XVECEXP (x, i, j), ref);
5187 }
5188 }
5189 }
5190
5191 /* Replace every reference to the annotated literal pool
5192 symbol REF in X by its base plus OFFSET. */
5193
5194 static void
5195 replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
5196 {
5197 int i, j;
5198 const char *fmt;
5199
5200 gcc_assert (*x != ref);
5201
5202 if (GET_CODE (*x) == UNSPEC
5203 && XINT (*x, 1) == UNSPEC_LTREF
5204 && XVECEXP (*x, 0, 0) == ref)
5205 {
5206 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
5207 return;
5208 }
5209
5210 if (GET_CODE (*x) == PLUS
5211 && GET_CODE (XEXP (*x, 1)) == CONST_INT
5212 && GET_CODE (XEXP (*x, 0)) == UNSPEC
5213 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
5214 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
5215 {
5216 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
5217 *x = plus_constant (addr, INTVAL (XEXP (*x, 1)));
5218 return;
5219 }
5220
5221 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5222 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5223 {
5224 if (fmt[i] == 'e')
5225 {
5226 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
5227 }
5228 else if (fmt[i] == 'E')
5229 {
5230 for (j = 0; j < XVECLEN (*x, i); j++)
5231 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
5232 }
5233 }
5234 }
5235
5236 /* Check whether X contains an UNSPEC_LTREL_BASE.
5237 Return its constant pool symbol if found, NULL_RTX otherwise. */
5238
5239 static rtx
5240 find_ltrel_base (rtx x)
5241 {
5242 int i, j;
5243 const char *fmt;
5244
5245 if (GET_CODE (x) == UNSPEC
5246 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5247 return XVECEXP (x, 0, 0);
5248
5249 fmt = GET_RTX_FORMAT (GET_CODE (x));
5250 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5251 {
5252 if (fmt[i] == 'e')
5253 {
5254 rtx fnd = find_ltrel_base (XEXP (x, i));
5255 if (fnd)
5256 return fnd;
5257 }
5258 else if (fmt[i] == 'E')
5259 {
5260 for (j = 0; j < XVECLEN (x, i); j++)
5261 {
5262 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
5263 if (fnd)
5264 return fnd;
5265 }
5266 }
5267 }
5268
5269 return NULL_RTX;
5270 }
5271
5272 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
5273
5274 static void
5275 replace_ltrel_base (rtx *x)
5276 {
5277 int i, j;
5278 const char *fmt;
5279
5280 if (GET_CODE (*x) == UNSPEC
5281 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
5282 {
5283 *x = XVECEXP (*x, 0, 1);
5284 return;
5285 }
5286
5287 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5288 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5289 {
5290 if (fmt[i] == 'e')
5291 {
5292 replace_ltrel_base (&XEXP (*x, i));
5293 }
5294 else if (fmt[i] == 'E')
5295 {
5296 for (j = 0; j < XVECLEN (*x, i); j++)
5297 replace_ltrel_base (&XVECEXP (*x, i, j));
5298 }
5299 }
5300 }
5301
5302
5303 /* We keep a list of constants which we have to add to internal
5304 constant tables in the middle of large functions. */
5305
5306 #define NR_C_MODES 11
5307 enum machine_mode constant_modes[NR_C_MODES] =
5308 {
5309 TFmode, TImode, TDmode,
5310 DFmode, DImode, DDmode,
5311 SFmode, SImode, SDmode,
5312 HImode,
5313 QImode
5314 };
5315
5316 struct constant
5317 {
5318 struct constant *next;
5319 rtx value;
5320 rtx label;
5321 };
5322
5323 struct constant_pool
5324 {
5325 struct constant_pool *next;
5326 rtx first_insn;
5327 rtx pool_insn;
5328 bitmap insns;
5329
5330 struct constant *constants[NR_C_MODES];
5331 struct constant *execute;
5332 rtx label;
5333 int size;
5334 };
5335
5336 /* Allocate new constant_pool structure. */
5337
5338 static struct constant_pool *
5339 s390_alloc_pool (void)
5340 {
5341 struct constant_pool *pool;
5342 int i;
5343
5344 pool = (struct constant_pool *) xmalloc (sizeof *pool);
5345 pool->next = NULL;
5346 for (i = 0; i < NR_C_MODES; i++)
5347 pool->constants[i] = NULL;
5348
5349 pool->execute = NULL;
5350 pool->label = gen_label_rtx ();
5351 pool->first_insn = NULL_RTX;
5352 pool->pool_insn = NULL_RTX;
5353 pool->insns = BITMAP_ALLOC (NULL);
5354 pool->size = 0;
5355
5356 return pool;
5357 }
5358
5359 /* Create new constant pool covering instructions starting at INSN
5360 and chain it to the end of POOL_LIST. */
5361
5362 static struct constant_pool *
5363 s390_start_pool (struct constant_pool **pool_list, rtx insn)
5364 {
5365 struct constant_pool *pool, **prev;
5366
5367 pool = s390_alloc_pool ();
5368 pool->first_insn = insn;
5369
5370 for (prev = pool_list; *prev; prev = &(*prev)->next)
5371 ;
5372 *prev = pool;
5373
5374 return pool;
5375 }
5376
5377 /* End range of instructions covered by POOL at INSN and emit
5378 placeholder insn representing the pool. */
5379
5380 static void
5381 s390_end_pool (struct constant_pool *pool, rtx insn)
5382 {
5383 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
5384
5385 if (!insn)
5386 insn = get_last_insn ();
5387
5388 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
5389 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5390 }
5391
5392 /* Add INSN to the list of insns covered by POOL. */
5393
5394 static void
5395 s390_add_pool_insn (struct constant_pool *pool, rtx insn)
5396 {
5397 bitmap_set_bit (pool->insns, INSN_UID (insn));
5398 }
5399
5400 /* Return pool out of POOL_LIST that covers INSN. */
5401
5402 static struct constant_pool *
5403 s390_find_pool (struct constant_pool *pool_list, rtx insn)
5404 {
5405 struct constant_pool *pool;
5406
5407 for (pool = pool_list; pool; pool = pool->next)
5408 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
5409 break;
5410
5411 return pool;
5412 }
5413
5414 /* Add constant VAL of mode MODE to the constant pool POOL. */
5415
5416 static void
5417 s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
5418 {
5419 struct constant *c;
5420 int i;
5421
5422 for (i = 0; i < NR_C_MODES; i++)
5423 if (constant_modes[i] == mode)
5424 break;
5425 gcc_assert (i != NR_C_MODES);
5426
5427 for (c = pool->constants[i]; c != NULL; c = c->next)
5428 if (rtx_equal_p (val, c->value))
5429 break;
5430
5431 if (c == NULL)
5432 {
5433 c = (struct constant *) xmalloc (sizeof *c);
5434 c->value = val;
5435 c->label = gen_label_rtx ();
5436 c->next = pool->constants[i];
5437 pool->constants[i] = c;
5438 pool->size += GET_MODE_SIZE (mode);
5439 }
5440 }
5441
5442 /* Find constant VAL of mode MODE in the constant pool POOL.
5443 Return an RTX describing the distance from the start of
5444 the pool to the location of the new constant. */
5445
5446 static rtx
5447 s390_find_constant (struct constant_pool *pool, rtx val,
5448 enum machine_mode mode)
5449 {
5450 struct constant *c;
5451 rtx offset;
5452 int i;
5453
5454 for (i = 0; i < NR_C_MODES; i++)
5455 if (constant_modes[i] == mode)
5456 break;
5457 gcc_assert (i != NR_C_MODES);
5458
5459 for (c = pool->constants[i]; c != NULL; c = c->next)
5460 if (rtx_equal_p (val, c->value))
5461 break;
5462
5463 gcc_assert (c);
5464
5465 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5466 gen_rtx_LABEL_REF (Pmode, pool->label));
5467 offset = gen_rtx_CONST (Pmode, offset);
5468 return offset;
5469 }
5470
5471 /* Check whether INSN is an execute. Return the label_ref to its
5472 execute target template if so, NULL_RTX otherwise. */
5473
5474 static rtx
5475 s390_execute_label (rtx insn)
5476 {
5477 if (GET_CODE (insn) == INSN
5478 && GET_CODE (PATTERN (insn)) == PARALLEL
5479 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
5480 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
5481 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
5482
5483 return NULL_RTX;
5484 }
5485
5486 /* Add execute target for INSN to the constant pool POOL. */
5487
5488 static void
5489 s390_add_execute (struct constant_pool *pool, rtx insn)
5490 {
5491 struct constant *c;
5492
5493 for (c = pool->execute; c != NULL; c = c->next)
5494 if (INSN_UID (insn) == INSN_UID (c->value))
5495 break;
5496
5497 if (c == NULL)
5498 {
5499 c = (struct constant *) xmalloc (sizeof *c);
5500 c->value = insn;
5501 c->label = gen_label_rtx ();
5502 c->next = pool->execute;
5503 pool->execute = c;
5504 pool->size += 6;
5505 }
5506 }
5507
5508 /* Find execute target for INSN in the constant pool POOL.
5509 Return an RTX describing the distance from the start of
5510 the pool to the location of the execute target. */
5511
5512 static rtx
5513 s390_find_execute (struct constant_pool *pool, rtx insn)
5514 {
5515 struct constant *c;
5516 rtx offset;
5517
5518 for (c = pool->execute; c != NULL; c = c->next)
5519 if (INSN_UID (insn) == INSN_UID (c->value))
5520 break;
5521
5522 gcc_assert (c);
5523
5524 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5525 gen_rtx_LABEL_REF (Pmode, pool->label));
5526 offset = gen_rtx_CONST (Pmode, offset);
5527 return offset;
5528 }
5529
5530 /* For an execute INSN, extract the execute target template. */
5531
5532 static rtx
5533 s390_execute_target (rtx insn)
5534 {
5535 rtx pattern = PATTERN (insn);
5536 gcc_assert (s390_execute_label (insn));
5537
5538 if (XVECLEN (pattern, 0) == 2)
5539 {
5540 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
5541 }
5542 else
5543 {
5544 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
5545 int i;
5546
5547 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
5548 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
5549
5550 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
5551 }
5552
5553 return pattern;
5554 }
5555
5556 /* Indicate that INSN cannot be duplicated. This is the case for
5557 execute insns that carry a unique label. */
5558
5559 static bool
5560 s390_cannot_copy_insn_p (rtx insn)
5561 {
5562 rtx label = s390_execute_label (insn);
5563 return label && label != const0_rtx;
5564 }
5565
5566 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
5567 do not emit the pool base label. */
5568
5569 static void
5570 s390_dump_pool (struct constant_pool *pool, bool remote_label)
5571 {
5572 struct constant *c;
5573 rtx insn = pool->pool_insn;
5574 int i;
5575
5576 /* Switch to rodata section. */
5577 if (TARGET_CPU_ZARCH)
5578 {
5579 insn = emit_insn_after (gen_pool_section_start (), insn);
5580 INSN_ADDRESSES_NEW (insn, -1);
5581 }
5582
5583 /* Ensure minimum pool alignment. */
5584 if (TARGET_CPU_ZARCH)
5585 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
5586 else
5587 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
5588 INSN_ADDRESSES_NEW (insn, -1);
5589
5590 /* Emit pool base label. */
5591 if (!remote_label)
5592 {
5593 insn = emit_label_after (pool->label, insn);
5594 INSN_ADDRESSES_NEW (insn, -1);
5595 }
5596
5597 /* Dump constants in descending alignment requirement order,
5598 ensuring proper alignment for every constant. */
5599 for (i = 0; i < NR_C_MODES; i++)
5600 for (c = pool->constants[i]; c; c = c->next)
5601 {
5602 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
5603 rtx value = copy_rtx (c->value);
5604 if (GET_CODE (value) == CONST
5605 && GET_CODE (XEXP (value, 0)) == UNSPEC
5606 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
5607 && XVECLEN (XEXP (value, 0), 0) == 1)
5608 {
5609 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
5610 gen_rtx_LABEL_REF (VOIDmode, pool->label));
5611 value = gen_rtx_CONST (VOIDmode, value);
5612 }
5613
5614 insn = emit_label_after (c->label, insn);
5615 INSN_ADDRESSES_NEW (insn, -1);
5616
5617 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
5618 gen_rtvec (1, value),
5619 UNSPECV_POOL_ENTRY);
5620 insn = emit_insn_after (value, insn);
5621 INSN_ADDRESSES_NEW (insn, -1);
5622 }
5623
5624 /* Ensure minimum alignment for instructions. */
5625 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
5626 INSN_ADDRESSES_NEW (insn, -1);
5627
5628 /* Output in-pool execute template insns. */
5629 for (c = pool->execute; c; c = c->next)
5630 {
5631 insn = emit_label_after (c->label, insn);
5632 INSN_ADDRESSES_NEW (insn, -1);
5633
5634 insn = emit_insn_after (s390_execute_target (c->value), insn);
5635 INSN_ADDRESSES_NEW (insn, -1);
5636 }
5637
5638 /* Switch back to previous section. */
5639 if (TARGET_CPU_ZARCH)
5640 {
5641 insn = emit_insn_after (gen_pool_section_end (), insn);
5642 INSN_ADDRESSES_NEW (insn, -1);
5643 }
5644
5645 insn = emit_barrier_after (insn);
5646 INSN_ADDRESSES_NEW (insn, -1);
5647
5648 /* Remove placeholder insn. */
5649 remove_insn (pool->pool_insn);
5650 }
5651
5652 /* Free all memory used by POOL. */
5653
5654 static void
5655 s390_free_pool (struct constant_pool *pool)
5656 {
5657 struct constant *c, *next;
5658 int i;
5659
5660 for (i = 0; i < NR_C_MODES; i++)
5661 for (c = pool->constants[i]; c; c = next)
5662 {
5663 next = c->next;
5664 free (c);
5665 }
5666
5667 for (c = pool->execute; c; c = next)
5668 {
5669 next = c->next;
5670 free (c);
5671 }
5672
5673 BITMAP_FREE (pool->insns);
5674 free (pool);
5675 }
5676
5677
5678 /* Collect main literal pool. Return NULL on overflow. */
5679
5680 static struct constant_pool *
5681 s390_mainpool_start (void)
5682 {
5683 struct constant_pool *pool;
5684 rtx insn;
5685
5686 pool = s390_alloc_pool ();
5687
5688 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5689 {
5690 if (GET_CODE (insn) == INSN
5691 && GET_CODE (PATTERN (insn)) == SET
5692 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
5693 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
5694 {
5695 gcc_assert (!pool->pool_insn);
5696 pool->pool_insn = insn;
5697 }
5698
5699 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
5700 {
5701 s390_add_execute (pool, insn);
5702 }
5703 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5704 {
5705 rtx pool_ref = NULL_RTX;
5706 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5707 if (pool_ref)
5708 {
5709 rtx constant = get_pool_constant (pool_ref);
5710 enum machine_mode mode = get_pool_mode (pool_ref);
5711 s390_add_constant (pool, constant, mode);
5712 }
5713 }
5714 }
5715
5716 gcc_assert (pool->pool_insn || pool->size == 0);
5717
5718 if (pool->size >= 4096)
5719 {
5720 /* We're going to chunkify the pool, so remove the main
5721 pool placeholder insn. */
5722 remove_insn (pool->pool_insn);
5723
5724 s390_free_pool (pool);
5725 pool = NULL;
5726 }
5727
5728 return pool;
5729 }
5730
5731 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5732 Modify the current function to output the pool constants as well as
5733 the pool register setup instruction. */
5734
5735 static void
5736 s390_mainpool_finish (struct constant_pool *pool)
5737 {
5738 rtx base_reg = cfun->machine->base_reg;
5739 rtx insn;
5740
5741 /* If the pool is empty, we're done. */
5742 if (pool->size == 0)
5743 {
5744 /* We don't actually need a base register after all. */
5745 cfun->machine->base_reg = NULL_RTX;
5746
5747 if (pool->pool_insn)
5748 remove_insn (pool->pool_insn);
5749 s390_free_pool (pool);
5750 return;
5751 }
5752
5753 /* We need correct insn addresses. */
5754 shorten_branches (get_insns ());
5755
5756 /* On zSeries, we use a LARL to load the pool register. The pool is
5757 located in the .rodata section, so we emit it after the function. */
5758 if (TARGET_CPU_ZARCH)
5759 {
5760 insn = gen_main_base_64 (base_reg, pool->label);
5761 insn = emit_insn_after (insn, pool->pool_insn);
5762 INSN_ADDRESSES_NEW (insn, -1);
5763 remove_insn (pool->pool_insn);
5764
5765 insn = get_last_insn ();
5766 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5767 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5768
5769 s390_dump_pool (pool, 0);
5770 }
5771
5772 /* On S/390, if the total size of the function's code plus literal pool
5773 does not exceed 4096 bytes, we use BASR to set up a function base
5774 pointer, and emit the literal pool at the end of the function. */
5775 else if (INSN_ADDRESSES (INSN_UID (get_last_insn ()))
5776 + pool->size + 8 /* alignment slop */ < 4096)
5777 {
5778 insn = gen_main_base_31_small (base_reg, pool->label);
5779 insn = emit_insn_after (insn, pool->pool_insn);
5780 INSN_ADDRESSES_NEW (insn, -1);
5781 remove_insn (pool->pool_insn);
5782
5783 insn = emit_label_after (pool->label, insn);
5784 INSN_ADDRESSES_NEW (insn, -1);
5785
5786 insn = get_last_insn ();
5787 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5788 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5789
5790 s390_dump_pool (pool, 1);
5791 }
5792
5793 /* Otherwise, we emit an inline literal pool and use BASR to branch
5794 over it, setting up the pool register at the same time. */
5795 else
5796 {
5797 rtx pool_end = gen_label_rtx ();
5798
5799 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
5800 insn = emit_insn_after (insn, pool->pool_insn);
5801 INSN_ADDRESSES_NEW (insn, -1);
5802 remove_insn (pool->pool_insn);
5803
5804 insn = emit_label_after (pool->label, insn);
5805 INSN_ADDRESSES_NEW (insn, -1);
5806
5807 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5808 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5809
5810 insn = emit_label_after (pool_end, pool->pool_insn);
5811 INSN_ADDRESSES_NEW (insn, -1);
5812
5813 s390_dump_pool (pool, 1);
5814 }
5815
5816
5817 /* Replace all literal pool references. */
5818
5819 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5820 {
5821 if (INSN_P (insn))
5822 replace_ltrel_base (&PATTERN (insn));
5823
5824 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5825 {
5826 rtx addr, pool_ref = NULL_RTX;
5827 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5828 if (pool_ref)
5829 {
5830 if (s390_execute_label (insn))
5831 addr = s390_find_execute (pool, insn);
5832 else
5833 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
5834 get_pool_mode (pool_ref));
5835
5836 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5837 INSN_CODE (insn) = -1;
5838 }
5839 }
5840 }
5841
5842
5843 /* Free the pool. */
5844 s390_free_pool (pool);
5845 }
5846
5847 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5848 We have decided we cannot use this pool, so revert all changes
5849 to the current function that were done by s390_mainpool_start. */
5850 static void
5851 s390_mainpool_cancel (struct constant_pool *pool)
5852 {
5853 /* We didn't actually change the instruction stream, so simply
5854 free the pool memory. */
5855 s390_free_pool (pool);
5856 }
5857
5858
5859 /* Chunkify the literal pool. */
5860
5861 #define S390_POOL_CHUNK_MIN 0xc00
5862 #define S390_POOL_CHUNK_MAX 0xe00
5863
5864 static struct constant_pool *
5865 s390_chunkify_start (void)
5866 {
5867 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
5868 int extra_size = 0;
5869 bitmap far_labels;
5870 rtx pending_ltrel = NULL_RTX;
5871 rtx insn;
5872
5873 rtx (*gen_reload_base) (rtx, rtx) =
5874 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
5875
5876
5877 /* We need correct insn addresses. */
5878
5879 shorten_branches (get_insns ());
5880
5881 /* Scan all insns and move literals to pool chunks. */
5882
5883 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5884 {
5885 /* Check for pending LTREL_BASE. */
5886 if (INSN_P (insn))
5887 {
5888 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
5889 if (ltrel_base)
5890 {
5891 gcc_assert (ltrel_base == pending_ltrel);
5892 pending_ltrel = NULL_RTX;
5893 }
5894 }
5895
5896 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
5897 {
5898 if (!curr_pool)
5899 curr_pool = s390_start_pool (&pool_list, insn);
5900
5901 s390_add_execute (curr_pool, insn);
5902 s390_add_pool_insn (curr_pool, insn);
5903 }
5904 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5905 {
5906 rtx pool_ref = NULL_RTX;
5907 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5908 if (pool_ref)
5909 {
5910 rtx constant = get_pool_constant (pool_ref);
5911 enum machine_mode mode = get_pool_mode (pool_ref);
5912
5913 if (!curr_pool)
5914 curr_pool = s390_start_pool (&pool_list, insn);
5915
5916 s390_add_constant (curr_pool, constant, mode);
5917 s390_add_pool_insn (curr_pool, insn);
5918
5919 /* Don't split the pool chunk between a LTREL_OFFSET load
5920 and the corresponding LTREL_BASE. */
5921 if (GET_CODE (constant) == CONST
5922 && GET_CODE (XEXP (constant, 0)) == UNSPEC
5923 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
5924 {
5925 gcc_assert (!pending_ltrel);
5926 pending_ltrel = pool_ref;
5927 }
5928 }
5929 }
5930
5931 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
5932 {
5933 if (curr_pool)
5934 s390_add_pool_insn (curr_pool, insn);
5935 /* An LTREL_BASE must follow within the same basic block. */
5936 gcc_assert (!pending_ltrel);
5937 }
5938
5939 if (!curr_pool
5940 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
5941 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
5942 continue;
5943
5944 if (TARGET_CPU_ZARCH)
5945 {
5946 if (curr_pool->size < S390_POOL_CHUNK_MAX)
5947 continue;
5948
5949 s390_end_pool (curr_pool, NULL_RTX);
5950 curr_pool = NULL;
5951 }
5952 else
5953 {
5954 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
5955 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
5956 + extra_size;
5957
5958 /* We will later have to insert base register reload insns.
5959 Those will have an effect on code size, which we need to
5960 consider here. This calculation makes rather pessimistic
5961 worst-case assumptions. */
5962 if (GET_CODE (insn) == CODE_LABEL)
5963 extra_size += 6;
5964
5965 if (chunk_size < S390_POOL_CHUNK_MIN
5966 && curr_pool->size < S390_POOL_CHUNK_MIN)
5967 continue;
5968
5969 /* Pool chunks can only be inserted after BARRIERs ... */
5970 if (GET_CODE (insn) == BARRIER)
5971 {
5972 s390_end_pool (curr_pool, insn);
5973 curr_pool = NULL;
5974 extra_size = 0;
5975 }
5976
5977 /* ... so if we don't find one in time, create one. */
5978 else if ((chunk_size > S390_POOL_CHUNK_MAX
5979 || curr_pool->size > S390_POOL_CHUNK_MAX))
5980 {
5981 rtx label, jump, barrier;
5982
5983 /* We can insert the barrier only after a 'real' insn. */
5984 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
5985 continue;
5986 if (get_attr_length (insn) == 0)
5987 continue;
5988
5989 /* Don't separate LTREL_BASE from the corresponding
5990 LTREL_OFFSET load. */
5991 if (pending_ltrel)
5992 continue;
5993
5994 label = gen_label_rtx ();
5995 jump = emit_jump_insn_after (gen_jump (label), insn);
5996 barrier = emit_barrier_after (jump);
5997 insn = emit_label_after (label, barrier);
5998 JUMP_LABEL (jump) = label;
5999 LABEL_NUSES (label) = 1;
6000
6001 INSN_ADDRESSES_NEW (jump, -1);
6002 INSN_ADDRESSES_NEW (barrier, -1);
6003 INSN_ADDRESSES_NEW (insn, -1);
6004
6005 s390_end_pool (curr_pool, barrier);
6006 curr_pool = NULL;
6007 extra_size = 0;
6008 }
6009 }
6010 }
6011
6012 if (curr_pool)
6013 s390_end_pool (curr_pool, NULL_RTX);
6014 gcc_assert (!pending_ltrel);
6015
6016 /* Find all labels that are branched into
6017 from an insn belonging to a different chunk. */
6018
6019 far_labels = BITMAP_ALLOC (NULL);
6020
6021 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6022 {
6023 /* Labels marked with LABEL_PRESERVE_P can be target
6024 of non-local jumps, so we have to mark them.
6025 The same holds for named labels.
6026
6027 Don't do that, however, if it is the label before
6028 a jump table. */
6029
6030 if (GET_CODE (insn) == CODE_LABEL
6031 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
6032 {
6033 rtx vec_insn = next_real_insn (insn);
6034 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
6035 PATTERN (vec_insn) : NULL_RTX;
6036 if (!vec_pat
6037 || !(GET_CODE (vec_pat) == ADDR_VEC
6038 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6039 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
6040 }
6041
6042 /* If we have a direct jump (conditional or unconditional)
6043 or a casesi jump, check all potential targets. */
6044 else if (GET_CODE (insn) == JUMP_INSN)
6045 {
6046 rtx pat = PATTERN (insn);
6047 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
6048 pat = XVECEXP (pat, 0, 0);
6049
6050 if (GET_CODE (pat) == SET)
6051 {
6052 rtx label = JUMP_LABEL (insn);
6053 if (label)
6054 {
6055 if (s390_find_pool (pool_list, label)
6056 != s390_find_pool (pool_list, insn))
6057 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
6058 }
6059 }
6060 else if (GET_CODE (pat) == PARALLEL
6061 && XVECLEN (pat, 0) == 2
6062 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
6063 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
6064 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
6065 {
6066 /* Find the jump table used by this casesi jump. */
6067 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
6068 rtx vec_insn = next_real_insn (vec_label);
6069 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
6070 PATTERN (vec_insn) : NULL_RTX;
6071 if (vec_pat
6072 && (GET_CODE (vec_pat) == ADDR_VEC
6073 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6074 {
6075 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
6076
6077 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
6078 {
6079 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
6080
6081 if (s390_find_pool (pool_list, label)
6082 != s390_find_pool (pool_list, insn))
6083 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
6084 }
6085 }
6086 }
6087 }
6088 }
6089
6090 /* Insert base register reload insns before every pool. */
6091
6092 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6093 {
6094 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6095 curr_pool->label);
6096 rtx insn = curr_pool->first_insn;
6097 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
6098 }
6099
6100 /* Insert base register reload insns at every far label. */
6101
6102 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6103 if (GET_CODE (insn) == CODE_LABEL
6104 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
6105 {
6106 struct constant_pool *pool = s390_find_pool (pool_list, insn);
6107 if (pool)
6108 {
6109 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6110 pool->label);
6111 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
6112 }
6113 }
6114
6115
6116 BITMAP_FREE (far_labels);
6117
6118
6119 /* Recompute insn addresses. */
6120
6121 init_insn_lengths ();
6122 shorten_branches (get_insns ());
6123
6124 return pool_list;
6125 }
6126
6127 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
6128 After we have decided to use this list, finish implementing
6129 all changes to the current function as required. */
6130
6131 static void
6132 s390_chunkify_finish (struct constant_pool *pool_list)
6133 {
6134 struct constant_pool *curr_pool = NULL;
6135 rtx insn;
6136
6137
6138 /* Replace all literal pool references. */
6139
6140 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6141 {
6142 if (INSN_P (insn))
6143 replace_ltrel_base (&PATTERN (insn));
6144
6145 curr_pool = s390_find_pool (pool_list, insn);
6146 if (!curr_pool)
6147 continue;
6148
6149 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6150 {
6151 rtx addr, pool_ref = NULL_RTX;
6152 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6153 if (pool_ref)
6154 {
6155 if (s390_execute_label (insn))
6156 addr = s390_find_execute (curr_pool, insn);
6157 else
6158 addr = s390_find_constant (curr_pool,
6159 get_pool_constant (pool_ref),
6160 get_pool_mode (pool_ref));
6161
6162 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
6163 INSN_CODE (insn) = -1;
6164 }
6165 }
6166 }
6167
6168 /* Dump out all literal pools. */
6169
6170 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6171 s390_dump_pool (curr_pool, 0);
6172
6173 /* Free pool list. */
6174
6175 while (pool_list)
6176 {
6177 struct constant_pool *next = pool_list->next;
6178 s390_free_pool (pool_list);
6179 pool_list = next;
6180 }
6181 }
6182
6183 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
6184 We have decided we cannot use this list, so revert all changes
6185 to the current function that were done by s390_chunkify_start. */
6186
6187 static void
6188 s390_chunkify_cancel (struct constant_pool *pool_list)
6189 {
6190 struct constant_pool *curr_pool = NULL;
6191 rtx insn;
6192
6193 /* Remove all pool placeholder insns. */
6194
6195 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6196 {
6197 /* Did we insert an extra barrier? Remove it. */
6198 rtx barrier = PREV_INSN (curr_pool->pool_insn);
6199 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
6200 rtx label = NEXT_INSN (curr_pool->pool_insn);
6201
6202 if (jump && GET_CODE (jump) == JUMP_INSN
6203 && barrier && GET_CODE (barrier) == BARRIER
6204 && label && GET_CODE (label) == CODE_LABEL
6205 && GET_CODE (PATTERN (jump)) == SET
6206 && SET_DEST (PATTERN (jump)) == pc_rtx
6207 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
6208 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
6209 {
6210 remove_insn (jump);
6211 remove_insn (barrier);
6212 remove_insn (label);
6213 }
6214
6215 remove_insn (curr_pool->pool_insn);
6216 }
6217
6218 /* Remove all base register reload insns. */
6219
6220 for (insn = get_insns (); insn; )
6221 {
6222 rtx next_insn = NEXT_INSN (insn);
6223
6224 if (GET_CODE (insn) == INSN
6225 && GET_CODE (PATTERN (insn)) == SET
6226 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
6227 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
6228 remove_insn (insn);
6229
6230 insn = next_insn;
6231 }
6232
6233 /* Free pool list. */
6234
6235 while (pool_list)
6236 {
6237 struct constant_pool *next = pool_list->next;
6238 s390_free_pool (pool_list);
6239 pool_list = next;
6240 }
6241 }
6242
6243
6244 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
6245
6246 void
6247 s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
6248 {
6249 REAL_VALUE_TYPE r;
6250
6251 switch (GET_MODE_CLASS (mode))
6252 {
6253 case MODE_FLOAT:
6254 case MODE_DECIMAL_FLOAT:
6255 gcc_assert (GET_CODE (exp) == CONST_DOUBLE);
6256
6257 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
6258 assemble_real (r, mode, align);
6259 break;
6260
6261 case MODE_INT:
6262 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
6263 break;
6264
6265 default:
6266 gcc_unreachable ();
6267 }
6268 }
6269
6270
6271 /* Return an RTL expression representing the value of the return address
6272 for the frame COUNT steps up from the current frame. FRAME is the
6273 frame pointer of that frame. */
6274
6275 rtx
6276 s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
6277 {
6278 int offset;
6279 rtx addr;
6280
6281 /* Without backchain, we fail for all but the current frame. */
6282
6283 if (!TARGET_BACKCHAIN && count > 0)
6284 return NULL_RTX;
6285
6286 /* For the current frame, we need to make sure the initial
6287 value of RETURN_REGNUM is actually saved. */
6288
6289 if (count == 0)
6290 {
6291 /* On non-z architectures branch splitting could overwrite r14. */
6292 if (TARGET_CPU_ZARCH)
6293 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
6294 else
6295 {
6296 cfun_frame_layout.save_return_addr_p = true;
6297 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
6298 }
6299 }
6300
6301 if (TARGET_PACKED_STACK)
6302 offset = -2 * UNITS_PER_WORD;
6303 else
6304 offset = RETURN_REGNUM * UNITS_PER_WORD;
6305
6306 addr = plus_constant (frame, offset);
6307 addr = memory_address (Pmode, addr);
6308 return gen_rtx_MEM (Pmode, addr);
6309 }
6310
6311 /* Return an RTL expression representing the back chain stored in
6312 the current stack frame. */
6313
6314 rtx
6315 s390_back_chain_rtx (void)
6316 {
6317 rtx chain;
6318
6319 gcc_assert (TARGET_BACKCHAIN);
6320
6321 if (TARGET_PACKED_STACK)
6322 chain = plus_constant (stack_pointer_rtx,
6323 STACK_POINTER_OFFSET - UNITS_PER_WORD);
6324 else
6325 chain = stack_pointer_rtx;
6326
6327 chain = gen_rtx_MEM (Pmode, chain);
6328 return chain;
6329 }
6330
6331 /* Find first call clobbered register unused in a function.
6332 This could be used as base register in a leaf function
6333 or for holding the return address before epilogue. */
6334
6335 static int
6336 find_unused_clobbered_reg (void)
6337 {
6338 int i;
6339 for (i = 0; i < 6; i++)
6340 if (!df_regs_ever_live_p (i))
6341 return i;
6342 return 0;
6343 }
6344
6345
6346 /* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all
6347 clobbered hard regs in SETREG. */
6348
6349 static void
6350 s390_reg_clobbered_rtx (rtx setreg, const_rtx set_insn ATTRIBUTE_UNUSED, void *data)
6351 {
6352 int *regs_ever_clobbered = (int *)data;
6353 unsigned int i, regno;
6354 enum machine_mode mode = GET_MODE (setreg);
6355
6356 if (GET_CODE (setreg) == SUBREG)
6357 {
6358 rtx inner = SUBREG_REG (setreg);
6359 if (!GENERAL_REG_P (inner))
6360 return;
6361 regno = subreg_regno (setreg);
6362 }
6363 else if (GENERAL_REG_P (setreg))
6364 regno = REGNO (setreg);
6365 else
6366 return;
6367
6368 for (i = regno;
6369 i < regno + HARD_REGNO_NREGS (regno, mode);
6370 i++)
6371 regs_ever_clobbered[i] = 1;
6372 }
6373
6374 /* Walks through all basic blocks of the current function looking
6375 for clobbered hard regs using s390_reg_clobbered_rtx. The fields
6376 of the passed integer array REGS_EVER_CLOBBERED are set to one for
6377 each of those regs. */
6378
6379 static void
6380 s390_regs_ever_clobbered (int *regs_ever_clobbered)
6381 {
6382 basic_block cur_bb;
6383 rtx cur_insn;
6384 unsigned int i;
6385
6386 memset (regs_ever_clobbered, 0, 16 * sizeof (int));
6387
6388 /* For non-leaf functions we have to consider all call clobbered regs to be
6389 clobbered. */
6390 if (!current_function_is_leaf)
6391 {
6392 for (i = 0; i < 16; i++)
6393 regs_ever_clobbered[i] = call_really_used_regs[i];
6394 }
6395
6396 /* Make the "magic" eh_return registers live if necessary. For regs_ever_live
6397 this work is done by liveness analysis (mark_regs_live_at_end).
6398 Special care is needed for functions containing landing pads. Landing pads
6399 may use the eh registers, but the code which sets these registers is not
6400 contained in that function. Hence s390_regs_ever_clobbered is not able to
6401 deal with this automatically. */
6402 if (current_function_calls_eh_return || cfun->machine->has_landing_pad_p)
6403 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM ; i++)
6404 if (current_function_calls_eh_return
6405 || (cfun->machine->has_landing_pad_p
6406 && df_regs_ever_live_p (EH_RETURN_DATA_REGNO (i))))
6407 regs_ever_clobbered[EH_RETURN_DATA_REGNO (i)] = 1;
6408
6409 /* For nonlocal gotos all call-saved registers have to be saved.
6410 This flag is also set for the unwinding code in libgcc.
6411 See expand_builtin_unwind_init. For regs_ever_live this is done by
6412 reload. */
6413 if (current_function_has_nonlocal_label)
6414 for (i = 0; i < 16; i++)
6415 if (!call_really_used_regs[i])
6416 regs_ever_clobbered[i] = 1;
6417
6418 FOR_EACH_BB (cur_bb)
6419 {
6420 FOR_BB_INSNS (cur_bb, cur_insn)
6421 {
6422 if (INSN_P (cur_insn))
6423 note_stores (PATTERN (cur_insn),
6424 s390_reg_clobbered_rtx,
6425 regs_ever_clobbered);
6426 }
6427 }
6428 }
6429
6430 /* Determine the frame area which actually has to be accessed
6431 in the function epilogue. The values are stored at the
6432 given pointers AREA_BOTTOM (address of the lowest used stack
6433 address) and AREA_TOP (address of the first item which does
6434 not belong to the stack frame). */
6435
6436 static void
6437 s390_frame_area (int *area_bottom, int *area_top)
6438 {
6439 int b, t;
6440 int i;
6441
6442 b = INT_MAX;
6443 t = INT_MIN;
6444
6445 if (cfun_frame_layout.first_restore_gpr != -1)
6446 {
6447 b = (cfun_frame_layout.gprs_offset
6448 + cfun_frame_layout.first_restore_gpr * UNITS_PER_WORD);
6449 t = b + (cfun_frame_layout.last_restore_gpr
6450 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_WORD;
6451 }
6452
6453 if (TARGET_64BIT && cfun_save_high_fprs_p)
6454 {
6455 b = MIN (b, cfun_frame_layout.f8_offset);
6456 t = MAX (t, (cfun_frame_layout.f8_offset
6457 + cfun_frame_layout.high_fprs * 8));
6458 }
6459
6460 if (!TARGET_64BIT)
6461 for (i = 2; i < 4; i++)
6462 if (cfun_fpr_bit_p (i))
6463 {
6464 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8);
6465 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8);
6466 }
6467
6468 *area_bottom = b;
6469 *area_top = t;
6470 }
6471
6472 /* Fill cfun->machine with info about register usage of current function.
6473 Return in CLOBBERED_REGS which GPRs are currently considered set. */
6474
6475 static void
6476 s390_register_info (int clobbered_regs[])
6477 {
6478 int i, j;
6479
6480 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
6481 cfun_frame_layout.fpr_bitmap = 0;
6482 cfun_frame_layout.high_fprs = 0;
6483 if (TARGET_64BIT)
6484 for (i = 24; i < 32; i++)
6485 if (df_regs_ever_live_p (i) && !global_regs[i])
6486 {
6487 cfun_set_fpr_bit (i - 16);
6488 cfun_frame_layout.high_fprs++;
6489 }
6490
6491 /* Find first and last gpr to be saved. We trust regs_ever_live
6492 data, except that we don't save and restore global registers.
6493
6494 Also, all registers with special meaning to the compiler need
6495 to be handled extra. */
6496
6497 s390_regs_ever_clobbered (clobbered_regs);
6498
6499 for (i = 0; i < 16; i++)
6500 clobbered_regs[i] = clobbered_regs[i] && !global_regs[i] && !fixed_regs[i];
6501
6502 if (frame_pointer_needed)
6503 clobbered_regs[HARD_FRAME_POINTER_REGNUM] = 1;
6504
6505 if (flag_pic)
6506 clobbered_regs[PIC_OFFSET_TABLE_REGNUM]
6507 |= df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM);
6508
6509 clobbered_regs[BASE_REGNUM]
6510 |= (cfun->machine->base_reg
6511 && REGNO (cfun->machine->base_reg) == BASE_REGNUM);
6512
6513 clobbered_regs[RETURN_REGNUM]
6514 |= (!current_function_is_leaf
6515 || TARGET_TPF_PROFILING
6516 || cfun->machine->split_branches_pending_p
6517 || cfun_frame_layout.save_return_addr_p
6518 || current_function_calls_eh_return
6519 || current_function_stdarg);
6520
6521 clobbered_regs[STACK_POINTER_REGNUM]
6522 |= (!current_function_is_leaf
6523 || TARGET_TPF_PROFILING
6524 || cfun_save_high_fprs_p
6525 || get_frame_size () > 0
6526 || current_function_calls_alloca
6527 || current_function_stdarg);
6528
6529 for (i = 6; i < 16; i++)
6530 if (df_regs_ever_live_p (i) || clobbered_regs[i])
6531 break;
6532 for (j = 15; j > i; j--)
6533 if (df_regs_ever_live_p (j) || clobbered_regs[j])
6534 break;
6535
6536 if (i == 16)
6537 {
6538 /* Nothing to save/restore. */
6539 cfun_frame_layout.first_save_gpr_slot = -1;
6540 cfun_frame_layout.last_save_gpr_slot = -1;
6541 cfun_frame_layout.first_save_gpr = -1;
6542 cfun_frame_layout.first_restore_gpr = -1;
6543 cfun_frame_layout.last_save_gpr = -1;
6544 cfun_frame_layout.last_restore_gpr = -1;
6545 }
6546 else
6547 {
6548 /* Save slots for gprs from i to j. */
6549 cfun_frame_layout.first_save_gpr_slot = i;
6550 cfun_frame_layout.last_save_gpr_slot = j;
6551
6552 for (i = cfun_frame_layout.first_save_gpr_slot;
6553 i < cfun_frame_layout.last_save_gpr_slot + 1;
6554 i++)
6555 if (clobbered_regs[i])
6556 break;
6557
6558 for (j = cfun_frame_layout.last_save_gpr_slot; j > i; j--)
6559 if (clobbered_regs[j])
6560 break;
6561
6562 if (i == cfun_frame_layout.last_save_gpr_slot + 1)
6563 {
6564 /* Nothing to save/restore. */
6565 cfun_frame_layout.first_save_gpr = -1;
6566 cfun_frame_layout.first_restore_gpr = -1;
6567 cfun_frame_layout.last_save_gpr = -1;
6568 cfun_frame_layout.last_restore_gpr = -1;
6569 }
6570 else
6571 {
6572 /* Save / Restore from gpr i to j. */
6573 cfun_frame_layout.first_save_gpr = i;
6574 cfun_frame_layout.first_restore_gpr = i;
6575 cfun_frame_layout.last_save_gpr = j;
6576 cfun_frame_layout.last_restore_gpr = j;
6577 }
6578 }
6579
6580 if (current_function_stdarg)
6581 {
6582 /* Varargs functions need to save gprs 2 to 6. */
6583 if (cfun->va_list_gpr_size
6584 && current_function_args_info.gprs < GP_ARG_NUM_REG)
6585 {
6586 int min_gpr = current_function_args_info.gprs;
6587 int max_gpr = min_gpr + cfun->va_list_gpr_size;
6588 if (max_gpr > GP_ARG_NUM_REG)
6589 max_gpr = GP_ARG_NUM_REG;
6590
6591 if (cfun_frame_layout.first_save_gpr == -1
6592 || cfun_frame_layout.first_save_gpr > 2 + min_gpr)
6593 {
6594 cfun_frame_layout.first_save_gpr = 2 + min_gpr;
6595 cfun_frame_layout.first_save_gpr_slot = 2 + min_gpr;
6596 }
6597
6598 if (cfun_frame_layout.last_save_gpr == -1
6599 || cfun_frame_layout.last_save_gpr < 2 + max_gpr - 1)
6600 {
6601 cfun_frame_layout.last_save_gpr = 2 + max_gpr - 1;
6602 cfun_frame_layout.last_save_gpr_slot = 2 + max_gpr - 1;
6603 }
6604 }
6605
6606 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
6607 if (TARGET_HARD_FLOAT && cfun->va_list_fpr_size
6608 && current_function_args_info.fprs < FP_ARG_NUM_REG)
6609 {
6610 int min_fpr = current_function_args_info.fprs;
6611 int max_fpr = min_fpr + cfun->va_list_fpr_size;
6612 if (max_fpr > FP_ARG_NUM_REG)
6613 max_fpr = FP_ARG_NUM_REG;
6614
6615 /* ??? This is currently required to ensure proper location
6616 of the fpr save slots within the va_list save area. */
6617 if (TARGET_PACKED_STACK)
6618 min_fpr = 0;
6619
6620 for (i = min_fpr; i < max_fpr; i++)
6621 cfun_set_fpr_bit (i);
6622 }
6623 }
6624
6625 if (!TARGET_64BIT)
6626 for (i = 2; i < 4; i++)
6627 if (df_regs_ever_live_p (i + 16) && !global_regs[i + 16])
6628 cfun_set_fpr_bit (i);
6629 }
6630
6631 /* Fill cfun->machine with info about frame of current function. */
6632
6633 static void
6634 s390_frame_info (void)
6635 {
6636 int i;
6637
6638 cfun_frame_layout.frame_size = get_frame_size ();
6639 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
6640 fatal_error ("total size of local variables exceeds architecture limit");
6641
6642 if (!TARGET_PACKED_STACK)
6643 {
6644 cfun_frame_layout.backchain_offset = 0;
6645 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD;
6646 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
6647 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
6648 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr_slot
6649 * UNITS_PER_WORD);
6650 }
6651 else if (TARGET_BACKCHAIN) /* kernel stack layout */
6652 {
6653 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
6654 - UNITS_PER_WORD);
6655 cfun_frame_layout.gprs_offset
6656 = (cfun_frame_layout.backchain_offset
6657 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr_slot + 1)
6658 * UNITS_PER_WORD);
6659
6660 if (TARGET_64BIT)
6661 {
6662 cfun_frame_layout.f4_offset
6663 = (cfun_frame_layout.gprs_offset
6664 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6665
6666 cfun_frame_layout.f0_offset
6667 = (cfun_frame_layout.f4_offset
6668 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6669 }
6670 else
6671 {
6672 /* On 31 bit we have to care about alignment of the
6673 floating point regs to provide fastest access. */
6674 cfun_frame_layout.f0_offset
6675 = ((cfun_frame_layout.gprs_offset
6676 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1))
6677 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6678
6679 cfun_frame_layout.f4_offset
6680 = (cfun_frame_layout.f0_offset
6681 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6682 }
6683 }
6684 else /* no backchain */
6685 {
6686 cfun_frame_layout.f4_offset
6687 = (STACK_POINTER_OFFSET
6688 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6689
6690 cfun_frame_layout.f0_offset
6691 = (cfun_frame_layout.f4_offset
6692 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6693
6694 cfun_frame_layout.gprs_offset
6695 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
6696 }
6697
6698 if (current_function_is_leaf
6699 && !TARGET_TPF_PROFILING
6700 && cfun_frame_layout.frame_size == 0
6701 && !cfun_save_high_fprs_p
6702 && !current_function_calls_alloca
6703 && !current_function_stdarg)
6704 return;
6705
6706 if (!TARGET_PACKED_STACK)
6707 cfun_frame_layout.frame_size += (STACK_POINTER_OFFSET
6708 + current_function_outgoing_args_size
6709 + cfun_frame_layout.high_fprs * 8);
6710 else
6711 {
6712 if (TARGET_BACKCHAIN)
6713 cfun_frame_layout.frame_size += UNITS_PER_WORD;
6714
6715 /* No alignment trouble here because f8-f15 are only saved under
6716 64 bit. */
6717 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset,
6718 cfun_frame_layout.f4_offset),
6719 cfun_frame_layout.gprs_offset)
6720 - cfun_frame_layout.high_fprs * 8);
6721
6722 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
6723
6724 for (i = 0; i < 8; i++)
6725 if (cfun_fpr_bit_p (i))
6726 cfun_frame_layout.frame_size += 8;
6727
6728 cfun_frame_layout.frame_size += cfun_gprs_save_area_size;
6729
6730 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
6731 the frame size to sustain 8 byte alignment of stack frames. */
6732 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
6733 STACK_BOUNDARY / BITS_PER_UNIT - 1)
6734 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
6735
6736 cfun_frame_layout.frame_size += current_function_outgoing_args_size;
6737 }
6738 }
6739
6740 /* Generate frame layout. Fills in register and frame data for the current
6741 function in cfun->machine. This routine can be called multiple times;
6742 it will re-do the complete frame layout every time. */
6743
6744 static void
6745 s390_init_frame_layout (void)
6746 {
6747 HOST_WIDE_INT frame_size;
6748 int base_used;
6749 int clobbered_regs[16];
6750
6751 /* On S/390 machines, we may need to perform branch splitting, which
6752 will require both base and return address register. We have no
6753 choice but to assume we're going to need them until right at the
6754 end of the machine dependent reorg phase. */
6755 if (!TARGET_CPU_ZARCH)
6756 cfun->machine->split_branches_pending_p = true;
6757
6758 do
6759 {
6760 frame_size = cfun_frame_layout.frame_size;
6761
6762 /* Try to predict whether we'll need the base register. */
6763 base_used = cfun->machine->split_branches_pending_p
6764 || current_function_uses_const_pool
6765 || (!DISP_IN_RANGE (frame_size)
6766 && !CONST_OK_FOR_K (frame_size));
6767
6768 /* Decide which register to use as literal pool base. In small
6769 leaf functions, try to use an unused call-clobbered register
6770 as base register to avoid save/restore overhead. */
6771 if (!base_used)
6772 cfun->machine->base_reg = NULL_RTX;
6773 else if (current_function_is_leaf && !df_regs_ever_live_p (5))
6774 cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
6775 else
6776 cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
6777
6778 s390_register_info (clobbered_regs);
6779 s390_frame_info ();
6780 }
6781 while (frame_size != cfun_frame_layout.frame_size);
6782 }
6783
6784 /* Update frame layout. Recompute actual register save data based on
6785 current info and update regs_ever_live for the special registers.
6786 May be called multiple times, but may never cause *more* registers
6787 to be saved than s390_init_frame_layout allocated room for. */
6788
6789 static void
6790 s390_update_frame_layout (void)
6791 {
6792 int clobbered_regs[16];
6793
6794 s390_register_info (clobbered_regs);
6795
6796 df_set_regs_ever_live (BASE_REGNUM,
6797 clobbered_regs[BASE_REGNUM] ? true : false);
6798 df_set_regs_ever_live (RETURN_REGNUM,
6799 clobbered_regs[RETURN_REGNUM] ? true : false);
6800 df_set_regs_ever_live (STACK_POINTER_REGNUM,
6801 clobbered_regs[STACK_POINTER_REGNUM] ? true : false);
6802
6803 if (cfun->machine->base_reg)
6804 df_set_regs_ever_live (REGNO (cfun->machine->base_reg), true);
6805 }
6806
6807 /* Return true if it is legal to put a value with MODE into REGNO. */
6808
6809 bool
6810 s390_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
6811 {
6812 switch (REGNO_REG_CLASS (regno))
6813 {
6814 case FP_REGS:
6815 if (REGNO_PAIR_OK (regno, mode))
6816 {
6817 if (mode == SImode || mode == DImode)
6818 return true;
6819
6820 if (FLOAT_MODE_P (mode) && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
6821 return true;
6822 }
6823 break;
6824 case ADDR_REGS:
6825 if (FRAME_REGNO_P (regno) && mode == Pmode)
6826 return true;
6827
6828 /* fallthrough */
6829 case GENERAL_REGS:
6830 if (REGNO_PAIR_OK (regno, mode))
6831 {
6832 if (TARGET_64BIT
6833 || (mode != TFmode && mode != TCmode && mode != TDmode))
6834 return true;
6835 }
6836 break;
6837 case CC_REGS:
6838 if (GET_MODE_CLASS (mode) == MODE_CC)
6839 return true;
6840 break;
6841 case ACCESS_REGS:
6842 if (REGNO_PAIR_OK (regno, mode))
6843 {
6844 if (mode == SImode || mode == Pmode)
6845 return true;
6846 }
6847 break;
6848 default:
6849 return false;
6850 }
6851
6852 return false;
6853 }
6854
6855 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6856
6857 bool
6858 s390_hard_regno_rename_ok (unsigned int old_reg, unsigned int new_reg)
6859 {
6860 /* Once we've decided upon a register to use as base register, it must
6861 no longer be used for any other purpose. */
6862 if (cfun->machine->base_reg)
6863 if (REGNO (cfun->machine->base_reg) == old_reg
6864 || REGNO (cfun->machine->base_reg) == new_reg)
6865 return false;
6866
6867 return true;
6868 }
6869
6870 /* Maximum number of registers to represent a value of mode MODE
6871 in a register of class CLASS. */
6872
6873 bool
6874 s390_class_max_nregs (enum reg_class class, enum machine_mode mode)
6875 {
6876 switch (class)
6877 {
6878 case FP_REGS:
6879 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6880 return 2 * ((GET_MODE_SIZE (mode) / 2 + 8 - 1) / 8);
6881 else
6882 return (GET_MODE_SIZE (mode) + 8 - 1) / 8;
6883 case ACCESS_REGS:
6884 return (GET_MODE_SIZE (mode) + 4 - 1) / 4;
6885 default:
6886 break;
6887 }
6888 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6889 }
6890
6891 /* Return true if register FROM can be eliminated via register TO. */
6892
6893 bool
6894 s390_can_eliminate (int from, int to)
6895 {
6896 /* On zSeries machines, we have not marked the base register as fixed.
6897 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM.
6898 If a function requires the base register, we say here that this
6899 elimination cannot be performed. This will cause reload to free
6900 up the base register (as if it were fixed). On the other hand,
6901 if the current function does *not* require the base register, we
6902 say here the elimination succeeds, which in turn allows reload
6903 to allocate the base register for any other purpose. */
6904 if (from == BASE_REGNUM && to == BASE_REGNUM)
6905 {
6906 if (TARGET_CPU_ZARCH)
6907 {
6908 s390_init_frame_layout ();
6909 return cfun->machine->base_reg == NULL_RTX;
6910 }
6911
6912 return false;
6913 }
6914
6915 /* Everything else must point into the stack frame. */
6916 gcc_assert (to == STACK_POINTER_REGNUM
6917 || to == HARD_FRAME_POINTER_REGNUM);
6918
6919 gcc_assert (from == FRAME_POINTER_REGNUM
6920 || from == ARG_POINTER_REGNUM
6921 || from == RETURN_ADDRESS_POINTER_REGNUM);
6922
6923 /* Make sure we actually saved the return address. */
6924 if (from == RETURN_ADDRESS_POINTER_REGNUM)
6925 if (!current_function_calls_eh_return
6926 && !current_function_stdarg
6927 && !cfun_frame_layout.save_return_addr_p)
6928 return false;
6929
6930 return true;
6931 }
6932
6933 /* Return offset between register FROM and TO initially after prolog. */
6934
6935 HOST_WIDE_INT
6936 s390_initial_elimination_offset (int from, int to)
6937 {
6938 HOST_WIDE_INT offset;
6939 int index;
6940
6941 /* ??? Why are we called for non-eliminable pairs? */
6942 if (!s390_can_eliminate (from, to))
6943 return 0;
6944
6945 switch (from)
6946 {
6947 case FRAME_POINTER_REGNUM:
6948 offset = (get_frame_size()
6949 + STACK_POINTER_OFFSET
6950 + current_function_outgoing_args_size);
6951 break;
6952
6953 case ARG_POINTER_REGNUM:
6954 s390_init_frame_layout ();
6955 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
6956 break;
6957
6958 case RETURN_ADDRESS_POINTER_REGNUM:
6959 s390_init_frame_layout ();
6960 index = RETURN_REGNUM - cfun_frame_layout.first_save_gpr_slot;
6961 gcc_assert (index >= 0);
6962 offset = cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset;
6963 offset += index * UNITS_PER_WORD;
6964 break;
6965
6966 case BASE_REGNUM:
6967 offset = 0;
6968 break;
6969
6970 default:
6971 gcc_unreachable ();
6972 }
6973
6974 return offset;
6975 }
6976
6977 /* Emit insn to save fpr REGNUM at offset OFFSET relative
6978 to register BASE. Return generated insn. */
6979
6980 static rtx
6981 save_fpr (rtx base, int offset, int regnum)
6982 {
6983 rtx addr;
6984 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
6985
6986 if (regnum >= 16 && regnum <= (16 + FP_ARG_NUM_REG))
6987 set_mem_alias_set (addr, get_varargs_alias_set ());
6988 else
6989 set_mem_alias_set (addr, get_frame_alias_set ());
6990
6991 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
6992 }
6993
6994 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
6995 to register BASE. Return generated insn. */
6996
6997 static rtx
6998 restore_fpr (rtx base, int offset, int regnum)
6999 {
7000 rtx addr;
7001 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
7002 set_mem_alias_set (addr, get_frame_alias_set ());
7003
7004 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
7005 }
7006
7007 /* Generate insn to save registers FIRST to LAST into
7008 the register save area located at offset OFFSET
7009 relative to register BASE. */
7010
7011 static rtx
7012 save_gprs (rtx base, int offset, int first, int last)
7013 {
7014 rtx addr, insn, note;
7015 int i;
7016
7017 addr = plus_constant (base, offset);
7018 addr = gen_rtx_MEM (Pmode, addr);
7019
7020 set_mem_alias_set (addr, get_frame_alias_set ());
7021
7022 /* Special-case single register. */
7023 if (first == last)
7024 {
7025 if (TARGET_64BIT)
7026 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
7027 else
7028 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
7029
7030 RTX_FRAME_RELATED_P (insn) = 1;
7031 return insn;
7032 }
7033
7034
7035 insn = gen_store_multiple (addr,
7036 gen_rtx_REG (Pmode, first),
7037 GEN_INT (last - first + 1));
7038
7039 if (first <= 6 && current_function_stdarg)
7040 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7041 {
7042 rtx mem = XEXP (XVECEXP (PATTERN (insn), 0, i), 0);
7043
7044 if (first + i <= 6)
7045 set_mem_alias_set (mem, get_varargs_alias_set ());
7046 }
7047
7048 /* We need to set the FRAME_RELATED flag on all SETs
7049 inside the store-multiple pattern.
7050
7051 However, we must not emit DWARF records for registers 2..5
7052 if they are stored for use by variable arguments ...
7053
7054 ??? Unfortunately, it is not enough to simply not the
7055 FRAME_RELATED flags for those SETs, because the first SET
7056 of the PARALLEL is always treated as if it had the flag
7057 set, even if it does not. Therefore we emit a new pattern
7058 without those registers as REG_FRAME_RELATED_EXPR note. */
7059
7060 if (first >= 6)
7061 {
7062 rtx pat = PATTERN (insn);
7063
7064 for (i = 0; i < XVECLEN (pat, 0); i++)
7065 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
7066 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
7067
7068 RTX_FRAME_RELATED_P (insn) = 1;
7069 }
7070 else if (last >= 6)
7071 {
7072 addr = plus_constant (base, offset + (6 - first) * UNITS_PER_WORD);
7073 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
7074 gen_rtx_REG (Pmode, 6),
7075 GEN_INT (last - 6 + 1));
7076 note = PATTERN (note);
7077
7078 REG_NOTES (insn) =
7079 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7080 note, REG_NOTES (insn));
7081
7082 for (i = 0; i < XVECLEN (note, 0); i++)
7083 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
7084 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
7085
7086 RTX_FRAME_RELATED_P (insn) = 1;
7087 }
7088
7089 return insn;
7090 }
7091
7092 /* Generate insn to restore registers FIRST to LAST from
7093 the register save area located at offset OFFSET
7094 relative to register BASE. */
7095
7096 static rtx
7097 restore_gprs (rtx base, int offset, int first, int last)
7098 {
7099 rtx addr, insn;
7100
7101 addr = plus_constant (base, offset);
7102 addr = gen_rtx_MEM (Pmode, addr);
7103 set_mem_alias_set (addr, get_frame_alias_set ());
7104
7105 /* Special-case single register. */
7106 if (first == last)
7107 {
7108 if (TARGET_64BIT)
7109 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
7110 else
7111 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
7112
7113 return insn;
7114 }
7115
7116 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
7117 addr,
7118 GEN_INT (last - first + 1));
7119 return insn;
7120 }
7121
7122 /* Return insn sequence to load the GOT register. */
7123
7124 static GTY(()) rtx got_symbol;
7125 rtx
7126 s390_load_got (void)
7127 {
7128 rtx insns;
7129
7130 if (!got_symbol)
7131 {
7132 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
7133 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
7134 }
7135
7136 start_sequence ();
7137
7138 if (TARGET_CPU_ZARCH)
7139 {
7140 emit_move_insn (pic_offset_table_rtx, got_symbol);
7141 }
7142 else
7143 {
7144 rtx offset;
7145
7146 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
7147 UNSPEC_LTREL_OFFSET);
7148 offset = gen_rtx_CONST (Pmode, offset);
7149 offset = force_const_mem (Pmode, offset);
7150
7151 emit_move_insn (pic_offset_table_rtx, offset);
7152
7153 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
7154 UNSPEC_LTREL_BASE);
7155 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
7156
7157 emit_move_insn (pic_offset_table_rtx, offset);
7158 }
7159
7160 insns = get_insns ();
7161 end_sequence ();
7162 return insns;
7163 }
7164
7165 /* Expand the prologue into a bunch of separate insns. */
7166
7167 void
7168 s390_emit_prologue (void)
7169 {
7170 rtx insn, addr;
7171 rtx temp_reg;
7172 int i;
7173 int offset;
7174 int next_fpr = 0;
7175
7176 /* Complete frame layout. */
7177
7178 s390_update_frame_layout ();
7179
7180 /* Annotate all constant pool references to let the scheduler know
7181 they implicitly use the base register. */
7182
7183 push_topmost_sequence ();
7184
7185 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7186 if (INSN_P (insn))
7187 {
7188 annotate_constant_pool_refs (&PATTERN (insn));
7189 df_insn_rescan (insn);
7190 }
7191
7192 pop_topmost_sequence ();
7193
7194 /* Choose best register to use for temp use within prologue.
7195 See below for why TPF must use the register 1. */
7196
7197 if (!has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
7198 && !current_function_is_leaf
7199 && !TARGET_TPF_PROFILING)
7200 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
7201 else
7202 temp_reg = gen_rtx_REG (Pmode, 1);
7203
7204 /* Save call saved gprs. */
7205 if (cfun_frame_layout.first_save_gpr != -1)
7206 {
7207 insn = save_gprs (stack_pointer_rtx,
7208 cfun_frame_layout.gprs_offset +
7209 UNITS_PER_WORD * (cfun_frame_layout.first_save_gpr
7210 - cfun_frame_layout.first_save_gpr_slot),
7211 cfun_frame_layout.first_save_gpr,
7212 cfun_frame_layout.last_save_gpr);
7213 emit_insn (insn);
7214 }
7215
7216 /* Dummy insn to mark literal pool slot. */
7217
7218 if (cfun->machine->base_reg)
7219 emit_insn (gen_main_pool (cfun->machine->base_reg));
7220
7221 offset = cfun_frame_layout.f0_offset;
7222
7223 /* Save f0 and f2. */
7224 for (i = 0; i < 2; i++)
7225 {
7226 if (cfun_fpr_bit_p (i))
7227 {
7228 save_fpr (stack_pointer_rtx, offset, i + 16);
7229 offset += 8;
7230 }
7231 else if (!TARGET_PACKED_STACK)
7232 offset += 8;
7233 }
7234
7235 /* Save f4 and f6. */
7236 offset = cfun_frame_layout.f4_offset;
7237 for (i = 2; i < 4; i++)
7238 {
7239 if (cfun_fpr_bit_p (i))
7240 {
7241 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7242 offset += 8;
7243
7244 /* If f4 and f6 are call clobbered they are saved due to stdargs and
7245 therefore are not frame related. */
7246 if (!call_really_used_regs[i + 16])
7247 RTX_FRAME_RELATED_P (insn) = 1;
7248 }
7249 else if (!TARGET_PACKED_STACK)
7250 offset += 8;
7251 }
7252
7253 if (TARGET_PACKED_STACK
7254 && cfun_save_high_fprs_p
7255 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
7256 {
7257 offset = (cfun_frame_layout.f8_offset
7258 + (cfun_frame_layout.high_fprs - 1) * 8);
7259
7260 for (i = 15; i > 7 && offset >= 0; i--)
7261 if (cfun_fpr_bit_p (i))
7262 {
7263 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7264
7265 RTX_FRAME_RELATED_P (insn) = 1;
7266 offset -= 8;
7267 }
7268 if (offset >= cfun_frame_layout.f8_offset)
7269 next_fpr = i + 16;
7270 }
7271
7272 if (!TARGET_PACKED_STACK)
7273 next_fpr = cfun_save_high_fprs_p ? 31 : 0;
7274
7275 /* Decrement stack pointer. */
7276
7277 if (cfun_frame_layout.frame_size > 0)
7278 {
7279 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
7280
7281 if (s390_stack_size)
7282 {
7283 HOST_WIDE_INT stack_guard;
7284
7285 if (s390_stack_guard)
7286 stack_guard = s390_stack_guard;
7287 else
7288 {
7289 /* If no value for stack guard is provided the smallest power of 2
7290 larger than the current frame size is chosen. */
7291 stack_guard = 1;
7292 while (stack_guard < cfun_frame_layout.frame_size)
7293 stack_guard <<= 1;
7294 }
7295
7296 if (cfun_frame_layout.frame_size >= s390_stack_size)
7297 {
7298 warning (0, "frame size of function %qs is "
7299 HOST_WIDE_INT_PRINT_DEC
7300 " bytes exceeding user provided stack limit of "
7301 HOST_WIDE_INT_PRINT_DEC " bytes. "
7302 "An unconditional trap is added.",
7303 current_function_name(), cfun_frame_layout.frame_size,
7304 s390_stack_size);
7305 emit_insn (gen_trap ());
7306 }
7307 else
7308 {
7309 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
7310 & ~(stack_guard - 1));
7311 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
7312 GEN_INT (stack_check_mask));
7313 if (TARGET_64BIT)
7314 gen_cmpdi (t, const0_rtx);
7315 else
7316 gen_cmpsi (t, const0_rtx);
7317
7318 emit_insn (gen_conditional_trap (gen_rtx_EQ (CCmode,
7319 gen_rtx_REG (CCmode,
7320 CC_REGNUM),
7321 const0_rtx),
7322 const0_rtx));
7323 }
7324 }
7325
7326 if (s390_warn_framesize > 0
7327 && cfun_frame_layout.frame_size >= s390_warn_framesize)
7328 warning (0, "frame size of %qs is " HOST_WIDE_INT_PRINT_DEC " bytes",
7329 current_function_name (), cfun_frame_layout.frame_size);
7330
7331 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
7332 warning (0, "%qs uses dynamic stack allocation", current_function_name ());
7333
7334 /* Save incoming stack pointer into temp reg. */
7335 if (TARGET_BACKCHAIN || next_fpr)
7336 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
7337
7338 /* Subtract frame size from stack pointer. */
7339
7340 if (DISP_IN_RANGE (INTVAL (frame_off)))
7341 {
7342 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7343 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7344 frame_off));
7345 insn = emit_insn (insn);
7346 }
7347 else
7348 {
7349 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
7350 frame_off = force_const_mem (Pmode, frame_off);
7351
7352 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
7353 annotate_constant_pool_refs (&PATTERN (insn));
7354 }
7355
7356 RTX_FRAME_RELATED_P (insn) = 1;
7357 REG_NOTES (insn) =
7358 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7359 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7360 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7361 GEN_INT (-cfun_frame_layout.frame_size))),
7362 REG_NOTES (insn));
7363
7364 /* Set backchain. */
7365
7366 if (TARGET_BACKCHAIN)
7367 {
7368 if (cfun_frame_layout.backchain_offset)
7369 addr = gen_rtx_MEM (Pmode,
7370 plus_constant (stack_pointer_rtx,
7371 cfun_frame_layout.backchain_offset));
7372 else
7373 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
7374 set_mem_alias_set (addr, get_frame_alias_set ());
7375 insn = emit_insn (gen_move_insn (addr, temp_reg));
7376 }
7377
7378 /* If we support asynchronous exceptions (e.g. for Java),
7379 we need to make sure the backchain pointer is set up
7380 before any possibly trapping memory access. */
7381
7382 if (TARGET_BACKCHAIN && flag_non_call_exceptions)
7383 {
7384 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
7385 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
7386 }
7387 }
7388
7389 /* Save fprs 8 - 15 (64 bit ABI). */
7390
7391 if (cfun_save_high_fprs_p && next_fpr)
7392 {
7393 insn = emit_insn (gen_add2_insn (temp_reg,
7394 GEN_INT (cfun_frame_layout.f8_offset)));
7395
7396 offset = 0;
7397
7398 for (i = 24; i <= next_fpr; i++)
7399 if (cfun_fpr_bit_p (i - 16))
7400 {
7401 rtx addr = plus_constant (stack_pointer_rtx,
7402 cfun_frame_layout.frame_size
7403 + cfun_frame_layout.f8_offset
7404 + offset);
7405
7406 insn = save_fpr (temp_reg, offset, i);
7407 offset += 8;
7408 RTX_FRAME_RELATED_P (insn) = 1;
7409 REG_NOTES (insn) =
7410 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7411 gen_rtx_SET (VOIDmode,
7412 gen_rtx_MEM (DFmode, addr),
7413 gen_rtx_REG (DFmode, i)),
7414 REG_NOTES (insn));
7415 }
7416 }
7417
7418 /* Set frame pointer, if needed. */
7419
7420 if (frame_pointer_needed)
7421 {
7422 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
7423 RTX_FRAME_RELATED_P (insn) = 1;
7424 }
7425
7426 /* Set up got pointer, if needed. */
7427
7428 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
7429 {
7430 rtx insns = s390_load_got ();
7431
7432 for (insn = insns; insn; insn = NEXT_INSN (insn))
7433 annotate_constant_pool_refs (&PATTERN (insn));
7434
7435 emit_insn (insns);
7436 }
7437
7438 if (TARGET_TPF_PROFILING)
7439 {
7440 /* Generate a BAS instruction to serve as a function
7441 entry intercept to facilitate the use of tracing
7442 algorithms located at the branch target. */
7443 emit_insn (gen_prologue_tpf ());
7444
7445 /* Emit a blockage here so that all code
7446 lies between the profiling mechanisms. */
7447 emit_insn (gen_blockage ());
7448 }
7449 }
7450
7451 /* Expand the epilogue into a bunch of separate insns. */
7452
7453 void
7454 s390_emit_epilogue (bool sibcall)
7455 {
7456 rtx frame_pointer, return_reg;
7457 int area_bottom, area_top, offset = 0;
7458 int next_offset;
7459 rtvec p;
7460 int i;
7461
7462 if (TARGET_TPF_PROFILING)
7463 {
7464
7465 /* Generate a BAS instruction to serve as a function
7466 entry intercept to facilitate the use of tracing
7467 algorithms located at the branch target. */
7468
7469 /* Emit a blockage here so that all code
7470 lies between the profiling mechanisms. */
7471 emit_insn (gen_blockage ());
7472
7473 emit_insn (gen_epilogue_tpf ());
7474 }
7475
7476 /* Check whether to use frame or stack pointer for restore. */
7477
7478 frame_pointer = (frame_pointer_needed
7479 ? hard_frame_pointer_rtx : stack_pointer_rtx);
7480
7481 s390_frame_area (&area_bottom, &area_top);
7482
7483 /* Check whether we can access the register save area.
7484 If not, increment the frame pointer as required. */
7485
7486 if (area_top <= area_bottom)
7487 {
7488 /* Nothing to restore. */
7489 }
7490 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
7491 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
7492 {
7493 /* Area is in range. */
7494 offset = cfun_frame_layout.frame_size;
7495 }
7496 else
7497 {
7498 rtx insn, frame_off;
7499
7500 offset = area_bottom < 0 ? -area_bottom : 0;
7501 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
7502
7503 if (DISP_IN_RANGE (INTVAL (frame_off)))
7504 {
7505 insn = gen_rtx_SET (VOIDmode, frame_pointer,
7506 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
7507 insn = emit_insn (insn);
7508 }
7509 else
7510 {
7511 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
7512 frame_off = force_const_mem (Pmode, frame_off);
7513
7514 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
7515 annotate_constant_pool_refs (&PATTERN (insn));
7516 }
7517 }
7518
7519 /* Restore call saved fprs. */
7520
7521 if (TARGET_64BIT)
7522 {
7523 if (cfun_save_high_fprs_p)
7524 {
7525 next_offset = cfun_frame_layout.f8_offset;
7526 for (i = 24; i < 32; i++)
7527 {
7528 if (cfun_fpr_bit_p (i - 16))
7529 {
7530 restore_fpr (frame_pointer,
7531 offset + next_offset, i);
7532 next_offset += 8;
7533 }
7534 }
7535 }
7536
7537 }
7538 else
7539 {
7540 next_offset = cfun_frame_layout.f4_offset;
7541 for (i = 18; i < 20; i++)
7542 {
7543 if (cfun_fpr_bit_p (i - 16))
7544 {
7545 restore_fpr (frame_pointer,
7546 offset + next_offset, i);
7547 next_offset += 8;
7548 }
7549 else if (!TARGET_PACKED_STACK)
7550 next_offset += 8;
7551 }
7552
7553 }
7554
7555 /* Return register. */
7556
7557 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
7558
7559 /* Restore call saved gprs. */
7560
7561 if (cfun_frame_layout.first_restore_gpr != -1)
7562 {
7563 rtx insn, addr;
7564 int i;
7565
7566 /* Check for global register and save them
7567 to stack location from where they get restored. */
7568
7569 for (i = cfun_frame_layout.first_restore_gpr;
7570 i <= cfun_frame_layout.last_restore_gpr;
7571 i++)
7572 {
7573 /* These registers are special and need to be
7574 restored in any case. */
7575 if (i == STACK_POINTER_REGNUM
7576 || i == RETURN_REGNUM
7577 || i == BASE_REGNUM
7578 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
7579 continue;
7580
7581 if (global_regs[i])
7582 {
7583 addr = plus_constant (frame_pointer,
7584 offset + cfun_frame_layout.gprs_offset
7585 + (i - cfun_frame_layout.first_save_gpr_slot)
7586 * UNITS_PER_WORD);
7587 addr = gen_rtx_MEM (Pmode, addr);
7588 set_mem_alias_set (addr, get_frame_alias_set ());
7589 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
7590 }
7591 }
7592
7593 if (! sibcall)
7594 {
7595 /* Fetch return address from stack before load multiple,
7596 this will do good for scheduling. */
7597
7598 if (cfun_frame_layout.save_return_addr_p
7599 || (cfun_frame_layout.first_restore_gpr < BASE_REGNUM
7600 && cfun_frame_layout.last_restore_gpr > RETURN_REGNUM))
7601 {
7602 int return_regnum = find_unused_clobbered_reg();
7603 if (!return_regnum)
7604 return_regnum = 4;
7605 return_reg = gen_rtx_REG (Pmode, return_regnum);
7606
7607 addr = plus_constant (frame_pointer,
7608 offset + cfun_frame_layout.gprs_offset
7609 + (RETURN_REGNUM
7610 - cfun_frame_layout.first_save_gpr_slot)
7611 * UNITS_PER_WORD);
7612 addr = gen_rtx_MEM (Pmode, addr);
7613 set_mem_alias_set (addr, get_frame_alias_set ());
7614 emit_move_insn (return_reg, addr);
7615 }
7616 }
7617
7618 insn = restore_gprs (frame_pointer,
7619 offset + cfun_frame_layout.gprs_offset
7620 + (cfun_frame_layout.first_restore_gpr
7621 - cfun_frame_layout.first_save_gpr_slot)
7622 * UNITS_PER_WORD,
7623 cfun_frame_layout.first_restore_gpr,
7624 cfun_frame_layout.last_restore_gpr);
7625 emit_insn (insn);
7626 }
7627
7628 if (! sibcall)
7629 {
7630
7631 /* Return to caller. */
7632
7633 p = rtvec_alloc (2);
7634
7635 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
7636 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
7637 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
7638 }
7639 }
7640
7641
7642 /* Return the size in bytes of a function argument of
7643 type TYPE and/or mode MODE. At least one of TYPE or
7644 MODE must be specified. */
7645
7646 static int
7647 s390_function_arg_size (enum machine_mode mode, const_tree type)
7648 {
7649 if (type)
7650 return int_size_in_bytes (type);
7651
7652 /* No type info available for some library calls ... */
7653 if (mode != BLKmode)
7654 return GET_MODE_SIZE (mode);
7655
7656 /* If we have neither type nor mode, abort */
7657 gcc_unreachable ();
7658 }
7659
7660 /* Return true if a function argument of type TYPE and mode MODE
7661 is to be passed in a floating-point register, if available. */
7662
7663 static bool
7664 s390_function_arg_float (enum machine_mode mode, tree type)
7665 {
7666 int size = s390_function_arg_size (mode, type);
7667 if (size > 8)
7668 return false;
7669
7670 /* Soft-float changes the ABI: no floating-point registers are used. */
7671 if (TARGET_SOFT_FLOAT)
7672 return false;
7673
7674 /* No type info available for some library calls ... */
7675 if (!type)
7676 return mode == SFmode || mode == DFmode || mode == SDmode || mode == DDmode;
7677
7678 /* The ABI says that record types with a single member are treated
7679 just like that member would be. */
7680 while (TREE_CODE (type) == RECORD_TYPE)
7681 {
7682 tree field, single = NULL_TREE;
7683
7684 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7685 {
7686 if (TREE_CODE (field) != FIELD_DECL)
7687 continue;
7688
7689 if (single == NULL_TREE)
7690 single = TREE_TYPE (field);
7691 else
7692 return false;
7693 }
7694
7695 if (single == NULL_TREE)
7696 return false;
7697 else
7698 type = single;
7699 }
7700
7701 return TREE_CODE (type) == REAL_TYPE;
7702 }
7703
7704 /* Return true if a function argument of type TYPE and mode MODE
7705 is to be passed in an integer register, or a pair of integer
7706 registers, if available. */
7707
7708 static bool
7709 s390_function_arg_integer (enum machine_mode mode, tree type)
7710 {
7711 int size = s390_function_arg_size (mode, type);
7712 if (size > 8)
7713 return false;
7714
7715 /* No type info available for some library calls ... */
7716 if (!type)
7717 return GET_MODE_CLASS (mode) == MODE_INT
7718 || (TARGET_SOFT_FLOAT && SCALAR_FLOAT_MODE_P (mode));
7719
7720 /* We accept small integral (and similar) types. */
7721 if (INTEGRAL_TYPE_P (type)
7722 || POINTER_TYPE_P (type)
7723 || TREE_CODE (type) == OFFSET_TYPE
7724 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
7725 return true;
7726
7727 /* We also accept structs of size 1, 2, 4, 8 that are not
7728 passed in floating-point registers. */
7729 if (AGGREGATE_TYPE_P (type)
7730 && exact_log2 (size) >= 0
7731 && !s390_function_arg_float (mode, type))
7732 return true;
7733
7734 return false;
7735 }
7736
7737 /* Return 1 if a function argument of type TYPE and mode MODE
7738 is to be passed by reference. The ABI specifies that only
7739 structures of size 1, 2, 4, or 8 bytes are passed by value,
7740 all other structures (and complex numbers) are passed by
7741 reference. */
7742
7743 static bool
7744 s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
7745 enum machine_mode mode, const_tree type,
7746 bool named ATTRIBUTE_UNUSED)
7747 {
7748 int size = s390_function_arg_size (mode, type);
7749 if (size > 8)
7750 return true;
7751
7752 if (type)
7753 {
7754 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
7755 return 1;
7756
7757 if (TREE_CODE (type) == COMPLEX_TYPE
7758 || TREE_CODE (type) == VECTOR_TYPE)
7759 return 1;
7760 }
7761
7762 return 0;
7763 }
7764
7765 /* Update the data in CUM to advance over an argument of mode MODE and
7766 data type TYPE. (TYPE is null for libcalls where that information
7767 may not be available.). The boolean NAMED specifies whether the
7768 argument is a named argument (as opposed to an unnamed argument
7769 matching an ellipsis). */
7770
7771 void
7772 s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
7773 tree type, int named ATTRIBUTE_UNUSED)
7774 {
7775 if (s390_function_arg_float (mode, type))
7776 {
7777 cum->fprs += 1;
7778 }
7779 else if (s390_function_arg_integer (mode, type))
7780 {
7781 int size = s390_function_arg_size (mode, type);
7782 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
7783 }
7784 else
7785 gcc_unreachable ();
7786 }
7787
7788 /* Define where to put the arguments to a function.
7789 Value is zero to push the argument on the stack,
7790 or a hard register in which to store the argument.
7791
7792 MODE is the argument's machine mode.
7793 TYPE is the data type of the argument (as a tree).
7794 This is null for libcalls where that information may
7795 not be available.
7796 CUM is a variable of type CUMULATIVE_ARGS which gives info about
7797 the preceding args and about the function being called.
7798 NAMED is nonzero if this argument is a named parameter
7799 (otherwise it is an extra parameter matching an ellipsis).
7800
7801 On S/390, we use general purpose registers 2 through 6 to
7802 pass integer, pointer, and certain structure arguments, and
7803 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
7804 to pass floating point arguments. All remaining arguments
7805 are pushed to the stack. */
7806
7807 rtx
7808 s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
7809 int named ATTRIBUTE_UNUSED)
7810 {
7811 if (s390_function_arg_float (mode, type))
7812 {
7813 if (cum->fprs + 1 > FP_ARG_NUM_REG)
7814 return 0;
7815 else
7816 return gen_rtx_REG (mode, cum->fprs + 16);
7817 }
7818 else if (s390_function_arg_integer (mode, type))
7819 {
7820 int size = s390_function_arg_size (mode, type);
7821 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
7822
7823 if (cum->gprs + n_gprs > GP_ARG_NUM_REG)
7824 return 0;
7825 else
7826 return gen_rtx_REG (mode, cum->gprs + 2);
7827 }
7828
7829 /* After the real arguments, expand_call calls us once again
7830 with a void_type_node type. Whatever we return here is
7831 passed as operand 2 to the call expanders.
7832
7833 We don't need this feature ... */
7834 else if (type == void_type_node)
7835 return const0_rtx;
7836
7837 gcc_unreachable ();
7838 }
7839
7840 /* Return true if return values of type TYPE should be returned
7841 in a memory buffer whose address is passed by the caller as
7842 hidden first argument. */
7843
7844 static bool
7845 s390_return_in_memory (const_tree type, const_tree fundecl ATTRIBUTE_UNUSED)
7846 {
7847 /* We accept small integral (and similar) types. */
7848 if (INTEGRAL_TYPE_P (type)
7849 || POINTER_TYPE_P (type)
7850 || TREE_CODE (type) == OFFSET_TYPE
7851 || TREE_CODE (type) == REAL_TYPE)
7852 return int_size_in_bytes (type) > 8;
7853
7854 /* Aggregates and similar constructs are always returned
7855 in memory. */
7856 if (AGGREGATE_TYPE_P (type)
7857 || TREE_CODE (type) == COMPLEX_TYPE
7858 || TREE_CODE (type) == VECTOR_TYPE)
7859 return true;
7860
7861 /* ??? We get called on all sorts of random stuff from
7862 aggregate_value_p. We can't abort, but it's not clear
7863 what's safe to return. Pretend it's a struct I guess. */
7864 return true;
7865 }
7866
7867 /* Define where to return a (scalar) value of type TYPE.
7868 If TYPE is null, define where to return a (scalar)
7869 value of mode MODE from a libcall. */
7870
7871 rtx
7872 s390_function_value (const_tree type, enum machine_mode mode)
7873 {
7874 if (type)
7875 {
7876 int unsignedp = TYPE_UNSIGNED (type);
7877 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
7878 }
7879
7880 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT || SCALAR_FLOAT_MODE_P (mode));
7881 gcc_assert (GET_MODE_SIZE (mode) <= 8);
7882
7883 if (TARGET_HARD_FLOAT && SCALAR_FLOAT_MODE_P (mode))
7884 return gen_rtx_REG (mode, 16);
7885 else
7886 return gen_rtx_REG (mode, 2);
7887 }
7888
7889
7890 /* Create and return the va_list datatype.
7891
7892 On S/390, va_list is an array type equivalent to
7893
7894 typedef struct __va_list_tag
7895 {
7896 long __gpr;
7897 long __fpr;
7898 void *__overflow_arg_area;
7899 void *__reg_save_area;
7900 } va_list[1];
7901
7902 where __gpr and __fpr hold the number of general purpose
7903 or floating point arguments used up to now, respectively,
7904 __overflow_arg_area points to the stack location of the
7905 next argument passed on the stack, and __reg_save_area
7906 always points to the start of the register area in the
7907 call frame of the current function. The function prologue
7908 saves all registers used for argument passing into this
7909 area if the function uses variable arguments. */
7910
7911 static tree
7912 s390_build_builtin_va_list (void)
7913 {
7914 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
7915
7916 record = lang_hooks.types.make_type (RECORD_TYPE);
7917
7918 type_decl =
7919 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
7920
7921 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
7922 long_integer_type_node);
7923 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
7924 long_integer_type_node);
7925 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
7926 ptr_type_node);
7927 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
7928 ptr_type_node);
7929
7930 va_list_gpr_counter_field = f_gpr;
7931 va_list_fpr_counter_field = f_fpr;
7932
7933 DECL_FIELD_CONTEXT (f_gpr) = record;
7934 DECL_FIELD_CONTEXT (f_fpr) = record;
7935 DECL_FIELD_CONTEXT (f_ovf) = record;
7936 DECL_FIELD_CONTEXT (f_sav) = record;
7937
7938 TREE_CHAIN (record) = type_decl;
7939 TYPE_NAME (record) = type_decl;
7940 TYPE_FIELDS (record) = f_gpr;
7941 TREE_CHAIN (f_gpr) = f_fpr;
7942 TREE_CHAIN (f_fpr) = f_ovf;
7943 TREE_CHAIN (f_ovf) = f_sav;
7944
7945 layout_type (record);
7946
7947 /* The correct type is an array type of one element. */
7948 return build_array_type (record, build_index_type (size_zero_node));
7949 }
7950
7951 /* Implement va_start by filling the va_list structure VALIST.
7952 STDARG_P is always true, and ignored.
7953 NEXTARG points to the first anonymous stack argument.
7954
7955 The following global variables are used to initialize
7956 the va_list structure:
7957
7958 current_function_args_info:
7959 holds number of gprs and fprs used for named arguments.
7960 current_function_arg_offset_rtx:
7961 holds the offset of the first anonymous stack argument
7962 (relative to the virtual arg pointer). */
7963
7964 void
7965 s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
7966 {
7967 HOST_WIDE_INT n_gpr, n_fpr;
7968 int off;
7969 tree f_gpr, f_fpr, f_ovf, f_sav;
7970 tree gpr, fpr, ovf, sav, t;
7971
7972 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7973 f_fpr = TREE_CHAIN (f_gpr);
7974 f_ovf = TREE_CHAIN (f_fpr);
7975 f_sav = TREE_CHAIN (f_ovf);
7976
7977 valist = build_va_arg_indirect_ref (valist);
7978 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7979 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
7980 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
7981 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
7982
7983 /* Count number of gp and fp argument registers used. */
7984
7985 n_gpr = current_function_args_info.gprs;
7986 n_fpr = current_function_args_info.fprs;
7987
7988 if (cfun->va_list_gpr_size)
7989 {
7990 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
7991 build_int_cst (NULL_TREE, n_gpr));
7992 TREE_SIDE_EFFECTS (t) = 1;
7993 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7994 }
7995
7996 if (cfun->va_list_fpr_size)
7997 {
7998 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
7999 build_int_cst (NULL_TREE, n_fpr));
8000 TREE_SIDE_EFFECTS (t) = 1;
8001 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8002 }
8003
8004 /* Find the overflow area. */
8005 if (n_gpr + cfun->va_list_gpr_size > GP_ARG_NUM_REG
8006 || n_fpr + cfun->va_list_fpr_size > FP_ARG_NUM_REG)
8007 {
8008 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
8009
8010 off = INTVAL (current_function_arg_offset_rtx);
8011 off = off < 0 ? 0 : off;
8012 if (TARGET_DEBUG_ARG)
8013 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
8014 (int)n_gpr, (int)n_fpr, off);
8015
8016 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t, size_int (off));
8017
8018 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
8019 TREE_SIDE_EFFECTS (t) = 1;
8020 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8021 }
8022
8023 /* Find the register save area. */
8024 if ((cfun->va_list_gpr_size && n_gpr < GP_ARG_NUM_REG)
8025 || (cfun->va_list_fpr_size && n_fpr < FP_ARG_NUM_REG))
8026 {
8027 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
8028 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
8029 size_int (-RETURN_REGNUM * UNITS_PER_WORD));
8030
8031 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
8032 TREE_SIDE_EFFECTS (t) = 1;
8033 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8034 }
8035 }
8036
8037 /* Implement va_arg by updating the va_list structure
8038 VALIST as required to retrieve an argument of type
8039 TYPE, and returning that argument.
8040
8041 Generates code equivalent to:
8042
8043 if (integral value) {
8044 if (size <= 4 && args.gpr < 5 ||
8045 size > 4 && args.gpr < 4 )
8046 ret = args.reg_save_area[args.gpr+8]
8047 else
8048 ret = *args.overflow_arg_area++;
8049 } else if (float value) {
8050 if (args.fgpr < 2)
8051 ret = args.reg_save_area[args.fpr+64]
8052 else
8053 ret = *args.overflow_arg_area++;
8054 } else if (aggregate value) {
8055 if (args.gpr < 5)
8056 ret = *args.reg_save_area[args.gpr]
8057 else
8058 ret = **args.overflow_arg_area++;
8059 } */
8060
8061 static tree
8062 s390_gimplify_va_arg (tree valist, tree type, tree *pre_p,
8063 tree *post_p ATTRIBUTE_UNUSED)
8064 {
8065 tree f_gpr, f_fpr, f_ovf, f_sav;
8066 tree gpr, fpr, ovf, sav, reg, t, u;
8067 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
8068 tree lab_false, lab_over, addr;
8069
8070 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
8071 f_fpr = TREE_CHAIN (f_gpr);
8072 f_ovf = TREE_CHAIN (f_fpr);
8073 f_sav = TREE_CHAIN (f_ovf);
8074
8075 valist = build_va_arg_indirect_ref (valist);
8076 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
8077 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
8078 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
8079 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
8080
8081 size = int_size_in_bytes (type);
8082
8083 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
8084 {
8085 if (TARGET_DEBUG_ARG)
8086 {
8087 fprintf (stderr, "va_arg: aggregate type");
8088 debug_tree (type);
8089 }
8090
8091 /* Aggregates are passed by reference. */
8092 indirect_p = 1;
8093 reg = gpr;
8094 n_reg = 1;
8095
8096 /* kernel stack layout on 31 bit: It is assumed here that no padding
8097 will be added by s390_frame_info because for va_args always an even
8098 number of gprs has to be saved r15-r2 = 14 regs. */
8099 sav_ofs = 2 * UNITS_PER_WORD;
8100 sav_scale = UNITS_PER_WORD;
8101 size = UNITS_PER_WORD;
8102 max_reg = GP_ARG_NUM_REG - n_reg;
8103 }
8104 else if (s390_function_arg_float (TYPE_MODE (type), type))
8105 {
8106 if (TARGET_DEBUG_ARG)
8107 {
8108 fprintf (stderr, "va_arg: float type");
8109 debug_tree (type);
8110 }
8111
8112 /* FP args go in FP registers, if present. */
8113 indirect_p = 0;
8114 reg = fpr;
8115 n_reg = 1;
8116 sav_ofs = 16 * UNITS_PER_WORD;
8117 sav_scale = 8;
8118 max_reg = FP_ARG_NUM_REG - n_reg;
8119 }
8120 else
8121 {
8122 if (TARGET_DEBUG_ARG)
8123 {
8124 fprintf (stderr, "va_arg: other type");
8125 debug_tree (type);
8126 }
8127
8128 /* Otherwise into GP registers. */
8129 indirect_p = 0;
8130 reg = gpr;
8131 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
8132
8133 /* kernel stack layout on 31 bit: It is assumed here that no padding
8134 will be added by s390_frame_info because for va_args always an even
8135 number of gprs has to be saved r15-r2 = 14 regs. */
8136 sav_ofs = 2 * UNITS_PER_WORD;
8137
8138 if (size < UNITS_PER_WORD)
8139 sav_ofs += UNITS_PER_WORD - size;
8140
8141 sav_scale = UNITS_PER_WORD;
8142 max_reg = GP_ARG_NUM_REG - n_reg;
8143 }
8144
8145 /* Pull the value out of the saved registers ... */
8146
8147 lab_false = create_artificial_label ();
8148 lab_over = create_artificial_label ();
8149 addr = create_tmp_var (ptr_type_node, "addr");
8150 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
8151
8152 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
8153 t = build2 (GT_EXPR, boolean_type_node, reg, t);
8154 u = build1 (GOTO_EXPR, void_type_node, lab_false);
8155 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
8156 gimplify_and_add (t, pre_p);
8157
8158 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav,
8159 size_int (sav_ofs));
8160 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
8161 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
8162 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, fold_convert (sizetype, u));
8163
8164 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
8165 gimplify_and_add (t, pre_p);
8166
8167 t = build1 (GOTO_EXPR, void_type_node, lab_over);
8168 gimplify_and_add (t, pre_p);
8169
8170 t = build1 (LABEL_EXPR, void_type_node, lab_false);
8171 append_to_statement_list (t, pre_p);
8172
8173
8174 /* ... Otherwise out of the overflow area. */
8175
8176 t = ovf;
8177 if (size < UNITS_PER_WORD)
8178 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
8179 size_int (UNITS_PER_WORD - size));
8180
8181 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
8182
8183 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
8184 gimplify_and_add (u, pre_p);
8185
8186 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
8187 size_int (size));
8188 t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, ovf, t);
8189 gimplify_and_add (t, pre_p);
8190
8191 t = build1 (LABEL_EXPR, void_type_node, lab_over);
8192 append_to_statement_list (t, pre_p);
8193
8194
8195 /* Increment register save count. */
8196
8197 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
8198 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
8199 gimplify_and_add (u, pre_p);
8200
8201 if (indirect_p)
8202 {
8203 t = build_pointer_type (build_pointer_type (type));
8204 addr = fold_convert (t, addr);
8205 addr = build_va_arg_indirect_ref (addr);
8206 }
8207 else
8208 {
8209 t = build_pointer_type (type);
8210 addr = fold_convert (t, addr);
8211 }
8212
8213 return build_va_arg_indirect_ref (addr);
8214 }
8215
8216
8217 /* Builtins. */
8218
8219 enum s390_builtin
8220 {
8221 S390_BUILTIN_THREAD_POINTER,
8222 S390_BUILTIN_SET_THREAD_POINTER,
8223
8224 S390_BUILTIN_max
8225 };
8226
8227 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
8228 CODE_FOR_get_tp_64,
8229 CODE_FOR_set_tp_64
8230 };
8231
8232 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
8233 CODE_FOR_get_tp_31,
8234 CODE_FOR_set_tp_31
8235 };
8236
8237 static void
8238 s390_init_builtins (void)
8239 {
8240 tree ftype;
8241
8242 ftype = build_function_type (ptr_type_node, void_list_node);
8243 add_builtin_function ("__builtin_thread_pointer", ftype,
8244 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
8245 NULL, NULL_TREE);
8246
8247 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
8248 add_builtin_function ("__builtin_set_thread_pointer", ftype,
8249 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
8250 NULL, NULL_TREE);
8251 }
8252
8253 /* Expand an expression EXP that calls a built-in function,
8254 with result going to TARGET if that's convenient
8255 (and in mode MODE if that's convenient).
8256 SUBTARGET may be used as the target for computing one of EXP's operands.
8257 IGNORE is nonzero if the value is to be ignored. */
8258
8259 static rtx
8260 s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
8261 enum machine_mode mode ATTRIBUTE_UNUSED,
8262 int ignore ATTRIBUTE_UNUSED)
8263 {
8264 #define MAX_ARGS 2
8265
8266 unsigned int const *code_for_builtin =
8267 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
8268
8269 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8270 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8271 enum insn_code icode;
8272 rtx op[MAX_ARGS], pat;
8273 int arity;
8274 bool nonvoid;
8275 tree arg;
8276 call_expr_arg_iterator iter;
8277
8278 if (fcode >= S390_BUILTIN_max)
8279 internal_error ("bad builtin fcode");
8280 icode = code_for_builtin[fcode];
8281 if (icode == 0)
8282 internal_error ("bad builtin fcode");
8283
8284 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
8285
8286 arity = 0;
8287 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
8288 {
8289 const struct insn_operand_data *insn_op;
8290
8291 if (arg == error_mark_node)
8292 return NULL_RTX;
8293 if (arity > MAX_ARGS)
8294 return NULL_RTX;
8295
8296 insn_op = &insn_data[icode].operand[arity + nonvoid];
8297
8298 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
8299
8300 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
8301 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
8302 arity++;
8303 }
8304
8305 if (nonvoid)
8306 {
8307 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8308 if (!target
8309 || GET_MODE (target) != tmode
8310 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
8311 target = gen_reg_rtx (tmode);
8312 }
8313
8314 switch (arity)
8315 {
8316 case 0:
8317 pat = GEN_FCN (icode) (target);
8318 break;
8319 case 1:
8320 if (nonvoid)
8321 pat = GEN_FCN (icode) (target, op[0]);
8322 else
8323 pat = GEN_FCN (icode) (op[0]);
8324 break;
8325 case 2:
8326 pat = GEN_FCN (icode) (target, op[0], op[1]);
8327 break;
8328 default:
8329 gcc_unreachable ();
8330 }
8331 if (!pat)
8332 return NULL_RTX;
8333 emit_insn (pat);
8334
8335 if (nonvoid)
8336 return target;
8337 else
8338 return const0_rtx;
8339 }
8340
8341
8342 /* Output assembly code for the trampoline template to
8343 stdio stream FILE.
8344
8345 On S/390, we use gpr 1 internally in the trampoline code;
8346 gpr 0 is used to hold the static chain. */
8347
8348 void
8349 s390_trampoline_template (FILE *file)
8350 {
8351 rtx op[2];
8352 op[0] = gen_rtx_REG (Pmode, 0);
8353 op[1] = gen_rtx_REG (Pmode, 1);
8354
8355 if (TARGET_64BIT)
8356 {
8357 output_asm_insn ("basr\t%1,0", op);
8358 output_asm_insn ("lmg\t%0,%1,14(%1)", op);
8359 output_asm_insn ("br\t%1", op);
8360 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
8361 }
8362 else
8363 {
8364 output_asm_insn ("basr\t%1,0", op);
8365 output_asm_insn ("lm\t%0,%1,6(%1)", op);
8366 output_asm_insn ("br\t%1", op);
8367 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
8368 }
8369 }
8370
8371 /* Emit RTL insns to initialize the variable parts of a trampoline.
8372 FNADDR is an RTX for the address of the function's pure code.
8373 CXT is an RTX for the static chain value for the function. */
8374
8375 void
8376 s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
8377 {
8378 emit_move_insn (gen_rtx_MEM (Pmode,
8379 memory_address (Pmode,
8380 plus_constant (addr, (TARGET_64BIT ? 16 : 8)))), cxt);
8381 emit_move_insn (gen_rtx_MEM (Pmode,
8382 memory_address (Pmode,
8383 plus_constant (addr, (TARGET_64BIT ? 24 : 12)))), fnaddr);
8384 }
8385
8386 /* Output assembler code to FILE to increment profiler label # LABELNO
8387 for profiling a function entry. */
8388
8389 void
8390 s390_function_profiler (FILE *file, int labelno)
8391 {
8392 rtx op[7];
8393
8394 char label[128];
8395 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
8396
8397 fprintf (file, "# function profiler \n");
8398
8399 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
8400 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8401 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
8402
8403 op[2] = gen_rtx_REG (Pmode, 1);
8404 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
8405 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
8406
8407 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
8408 if (flag_pic)
8409 {
8410 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
8411 op[4] = gen_rtx_CONST (Pmode, op[4]);
8412 }
8413
8414 if (TARGET_64BIT)
8415 {
8416 output_asm_insn ("stg\t%0,%1", op);
8417 output_asm_insn ("larl\t%2,%3", op);
8418 output_asm_insn ("brasl\t%0,%4", op);
8419 output_asm_insn ("lg\t%0,%1", op);
8420 }
8421 else if (!flag_pic)
8422 {
8423 op[6] = gen_label_rtx ();
8424
8425 output_asm_insn ("st\t%0,%1", op);
8426 output_asm_insn ("bras\t%2,%l6", op);
8427 output_asm_insn (".long\t%4", op);
8428 output_asm_insn (".long\t%3", op);
8429 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
8430 output_asm_insn ("l\t%0,0(%2)", op);
8431 output_asm_insn ("l\t%2,4(%2)", op);
8432 output_asm_insn ("basr\t%0,%0", op);
8433 output_asm_insn ("l\t%0,%1", op);
8434 }
8435 else
8436 {
8437 op[5] = gen_label_rtx ();
8438 op[6] = gen_label_rtx ();
8439
8440 output_asm_insn ("st\t%0,%1", op);
8441 output_asm_insn ("bras\t%2,%l6", op);
8442 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
8443 output_asm_insn (".long\t%4-%l5", op);
8444 output_asm_insn (".long\t%3-%l5", op);
8445 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
8446 output_asm_insn ("lr\t%0,%2", op);
8447 output_asm_insn ("a\t%0,0(%2)", op);
8448 output_asm_insn ("a\t%2,4(%2)", op);
8449 output_asm_insn ("basr\t%0,%0", op);
8450 output_asm_insn ("l\t%0,%1", op);
8451 }
8452 }
8453
8454 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
8455 into its SYMBOL_REF_FLAGS. */
8456
8457 static void
8458 s390_encode_section_info (tree decl, rtx rtl, int first)
8459 {
8460 default_encode_section_info (decl, rtl, first);
8461
8462 /* If a variable has a forced alignment to < 2 bytes, mark it with
8463 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
8464 if (TREE_CODE (decl) == VAR_DECL
8465 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
8466 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
8467 }
8468
8469 /* Output thunk to FILE that implements a C++ virtual function call (with
8470 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
8471 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
8472 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
8473 relative to the resulting this pointer. */
8474
8475 static void
8476 s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
8477 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8478 tree function)
8479 {
8480 rtx op[10];
8481 int nonlocal = 0;
8482
8483 /* Operand 0 is the target function. */
8484 op[0] = XEXP (DECL_RTL (function), 0);
8485 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
8486 {
8487 nonlocal = 1;
8488 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
8489 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
8490 op[0] = gen_rtx_CONST (Pmode, op[0]);
8491 }
8492
8493 /* Operand 1 is the 'this' pointer. */
8494 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
8495 op[1] = gen_rtx_REG (Pmode, 3);
8496 else
8497 op[1] = gen_rtx_REG (Pmode, 2);
8498
8499 /* Operand 2 is the delta. */
8500 op[2] = GEN_INT (delta);
8501
8502 /* Operand 3 is the vcall_offset. */
8503 op[3] = GEN_INT (vcall_offset);
8504
8505 /* Operand 4 is the temporary register. */
8506 op[4] = gen_rtx_REG (Pmode, 1);
8507
8508 /* Operands 5 to 8 can be used as labels. */
8509 op[5] = NULL_RTX;
8510 op[6] = NULL_RTX;
8511 op[7] = NULL_RTX;
8512 op[8] = NULL_RTX;
8513
8514 /* Operand 9 can be used for temporary register. */
8515 op[9] = NULL_RTX;
8516
8517 /* Generate code. */
8518 if (TARGET_64BIT)
8519 {
8520 /* Setup literal pool pointer if required. */
8521 if ((!DISP_IN_RANGE (delta)
8522 && !CONST_OK_FOR_K (delta)
8523 && !CONST_OK_FOR_Os (delta))
8524 || (!DISP_IN_RANGE (vcall_offset)
8525 && !CONST_OK_FOR_K (vcall_offset)
8526 && !CONST_OK_FOR_Os (vcall_offset)))
8527 {
8528 op[5] = gen_label_rtx ();
8529 output_asm_insn ("larl\t%4,%5", op);
8530 }
8531
8532 /* Add DELTA to this pointer. */
8533 if (delta)
8534 {
8535 if (CONST_OK_FOR_J (delta))
8536 output_asm_insn ("la\t%1,%2(%1)", op);
8537 else if (DISP_IN_RANGE (delta))
8538 output_asm_insn ("lay\t%1,%2(%1)", op);
8539 else if (CONST_OK_FOR_K (delta))
8540 output_asm_insn ("aghi\t%1,%2", op);
8541 else if (CONST_OK_FOR_Os (delta))
8542 output_asm_insn ("agfi\t%1,%2", op);
8543 else
8544 {
8545 op[6] = gen_label_rtx ();
8546 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
8547 }
8548 }
8549
8550 /* Perform vcall adjustment. */
8551 if (vcall_offset)
8552 {
8553 if (DISP_IN_RANGE (vcall_offset))
8554 {
8555 output_asm_insn ("lg\t%4,0(%1)", op);
8556 output_asm_insn ("ag\t%1,%3(%4)", op);
8557 }
8558 else if (CONST_OK_FOR_K (vcall_offset))
8559 {
8560 output_asm_insn ("lghi\t%4,%3", op);
8561 output_asm_insn ("ag\t%4,0(%1)", op);
8562 output_asm_insn ("ag\t%1,0(%4)", op);
8563 }
8564 else if (CONST_OK_FOR_Os (vcall_offset))
8565 {
8566 output_asm_insn ("lgfi\t%4,%3", op);
8567 output_asm_insn ("ag\t%4,0(%1)", op);
8568 output_asm_insn ("ag\t%1,0(%4)", op);
8569 }
8570 else
8571 {
8572 op[7] = gen_label_rtx ();
8573 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
8574 output_asm_insn ("ag\t%4,0(%1)", op);
8575 output_asm_insn ("ag\t%1,0(%4)", op);
8576 }
8577 }
8578
8579 /* Jump to target. */
8580 output_asm_insn ("jg\t%0", op);
8581
8582 /* Output literal pool if required. */
8583 if (op[5])
8584 {
8585 output_asm_insn (".align\t4", op);
8586 targetm.asm_out.internal_label (file, "L",
8587 CODE_LABEL_NUMBER (op[5]));
8588 }
8589 if (op[6])
8590 {
8591 targetm.asm_out.internal_label (file, "L",
8592 CODE_LABEL_NUMBER (op[6]));
8593 output_asm_insn (".long\t%2", op);
8594 }
8595 if (op[7])
8596 {
8597 targetm.asm_out.internal_label (file, "L",
8598 CODE_LABEL_NUMBER (op[7]));
8599 output_asm_insn (".long\t%3", op);
8600 }
8601 }
8602 else
8603 {
8604 /* Setup base pointer if required. */
8605 if (!vcall_offset
8606 || (!DISP_IN_RANGE (delta)
8607 && !CONST_OK_FOR_K (delta)
8608 && !CONST_OK_FOR_Os (delta))
8609 || (!DISP_IN_RANGE (delta)
8610 && !CONST_OK_FOR_K (vcall_offset)
8611 && !CONST_OK_FOR_Os (vcall_offset)))
8612 {
8613 op[5] = gen_label_rtx ();
8614 output_asm_insn ("basr\t%4,0", op);
8615 targetm.asm_out.internal_label (file, "L",
8616 CODE_LABEL_NUMBER (op[5]));
8617 }
8618
8619 /* Add DELTA to this pointer. */
8620 if (delta)
8621 {
8622 if (CONST_OK_FOR_J (delta))
8623 output_asm_insn ("la\t%1,%2(%1)", op);
8624 else if (DISP_IN_RANGE (delta))
8625 output_asm_insn ("lay\t%1,%2(%1)", op);
8626 else if (CONST_OK_FOR_K (delta))
8627 output_asm_insn ("ahi\t%1,%2", op);
8628 else if (CONST_OK_FOR_Os (delta))
8629 output_asm_insn ("afi\t%1,%2", op);
8630 else
8631 {
8632 op[6] = gen_label_rtx ();
8633 output_asm_insn ("a\t%1,%6-%5(%4)", op);
8634 }
8635 }
8636
8637 /* Perform vcall adjustment. */
8638 if (vcall_offset)
8639 {
8640 if (CONST_OK_FOR_J (vcall_offset))
8641 {
8642 output_asm_insn ("l\t%4,0(%1)", op);
8643 output_asm_insn ("a\t%1,%3(%4)", op);
8644 }
8645 else if (DISP_IN_RANGE (vcall_offset))
8646 {
8647 output_asm_insn ("l\t%4,0(%1)", op);
8648 output_asm_insn ("ay\t%1,%3(%4)", op);
8649 }
8650 else if (CONST_OK_FOR_K (vcall_offset))
8651 {
8652 output_asm_insn ("lhi\t%4,%3", op);
8653 output_asm_insn ("a\t%4,0(%1)", op);
8654 output_asm_insn ("a\t%1,0(%4)", op);
8655 }
8656 else if (CONST_OK_FOR_Os (vcall_offset))
8657 {
8658 output_asm_insn ("iilf\t%4,%3", op);
8659 output_asm_insn ("a\t%4,0(%1)", op);
8660 output_asm_insn ("a\t%1,0(%4)", op);
8661 }
8662 else
8663 {
8664 op[7] = gen_label_rtx ();
8665 output_asm_insn ("l\t%4,%7-%5(%4)", op);
8666 output_asm_insn ("a\t%4,0(%1)", op);
8667 output_asm_insn ("a\t%1,0(%4)", op);
8668 }
8669
8670 /* We had to clobber the base pointer register.
8671 Re-setup the base pointer (with a different base). */
8672 op[5] = gen_label_rtx ();
8673 output_asm_insn ("basr\t%4,0", op);
8674 targetm.asm_out.internal_label (file, "L",
8675 CODE_LABEL_NUMBER (op[5]));
8676 }
8677
8678 /* Jump to target. */
8679 op[8] = gen_label_rtx ();
8680
8681 if (!flag_pic)
8682 output_asm_insn ("l\t%4,%8-%5(%4)", op);
8683 else if (!nonlocal)
8684 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8685 /* We cannot call through .plt, since .plt requires %r12 loaded. */
8686 else if (flag_pic == 1)
8687 {
8688 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8689 output_asm_insn ("l\t%4,%0(%4)", op);
8690 }
8691 else if (flag_pic == 2)
8692 {
8693 op[9] = gen_rtx_REG (Pmode, 0);
8694 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
8695 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8696 output_asm_insn ("ar\t%4,%9", op);
8697 output_asm_insn ("l\t%4,0(%4)", op);
8698 }
8699
8700 output_asm_insn ("br\t%4", op);
8701
8702 /* Output literal pool. */
8703 output_asm_insn (".align\t4", op);
8704
8705 if (nonlocal && flag_pic == 2)
8706 output_asm_insn (".long\t%0", op);
8707 if (nonlocal)
8708 {
8709 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
8710 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
8711 }
8712
8713 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
8714 if (!flag_pic)
8715 output_asm_insn (".long\t%0", op);
8716 else
8717 output_asm_insn (".long\t%0-%5", op);
8718
8719 if (op[6])
8720 {
8721 targetm.asm_out.internal_label (file, "L",
8722 CODE_LABEL_NUMBER (op[6]));
8723 output_asm_insn (".long\t%2", op);
8724 }
8725 if (op[7])
8726 {
8727 targetm.asm_out.internal_label (file, "L",
8728 CODE_LABEL_NUMBER (op[7]));
8729 output_asm_insn (".long\t%3", op);
8730 }
8731 }
8732 }
8733
8734 static bool
8735 s390_valid_pointer_mode (enum machine_mode mode)
8736 {
8737 return (mode == SImode || (TARGET_64BIT && mode == DImode));
8738 }
8739
8740 /* Checks whether the given CALL_EXPR would use a caller
8741 saved register. This is used to decide whether sibling call
8742 optimization could be performed on the respective function
8743 call. */
8744
8745 static bool
8746 s390_call_saved_register_used (tree call_expr)
8747 {
8748 CUMULATIVE_ARGS cum;
8749 tree parameter;
8750 enum machine_mode mode;
8751 tree type;
8752 rtx parm_rtx;
8753 int reg, i;
8754
8755 INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
8756
8757 for (i = 0; i < call_expr_nargs (call_expr); i++)
8758 {
8759 parameter = CALL_EXPR_ARG (call_expr, i);
8760 gcc_assert (parameter);
8761
8762 /* For an undeclared variable passed as parameter we will get
8763 an ERROR_MARK node here. */
8764 if (TREE_CODE (parameter) == ERROR_MARK)
8765 return true;
8766
8767 type = TREE_TYPE (parameter);
8768 gcc_assert (type);
8769
8770 mode = TYPE_MODE (type);
8771 gcc_assert (mode);
8772
8773 if (pass_by_reference (&cum, mode, type, true))
8774 {
8775 mode = Pmode;
8776 type = build_pointer_type (type);
8777 }
8778
8779 parm_rtx = s390_function_arg (&cum, mode, type, 0);
8780
8781 s390_function_arg_advance (&cum, mode, type, 0);
8782
8783 if (parm_rtx && REG_P (parm_rtx))
8784 {
8785 for (reg = 0;
8786 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
8787 reg++)
8788 if (! call_used_regs[reg + REGNO (parm_rtx)])
8789 return true;
8790 }
8791 }
8792 return false;
8793 }
8794
8795 /* Return true if the given call expression can be
8796 turned into a sibling call.
8797 DECL holds the declaration of the function to be called whereas
8798 EXP is the call expression itself. */
8799
8800 static bool
8801 s390_function_ok_for_sibcall (tree decl, tree exp)
8802 {
8803 /* The TPF epilogue uses register 1. */
8804 if (TARGET_TPF_PROFILING)
8805 return false;
8806
8807 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
8808 which would have to be restored before the sibcall. */
8809 if (!TARGET_64BIT && flag_pic && decl && !targetm.binds_local_p (decl))
8810 return false;
8811
8812 /* Register 6 on s390 is available as an argument register but unfortunately
8813 "caller saved". This makes functions needing this register for arguments
8814 not suitable for sibcalls. */
8815 return !s390_call_saved_register_used (exp);
8816 }
8817
8818 /* Return the fixed registers used for condition codes. */
8819
8820 static bool
8821 s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
8822 {
8823 *p1 = CC_REGNUM;
8824 *p2 = INVALID_REGNUM;
8825
8826 return true;
8827 }
8828
8829 /* This function is used by the call expanders of the machine description.
8830 It emits the call insn itself together with the necessary operations
8831 to adjust the target address and returns the emitted insn.
8832 ADDR_LOCATION is the target address rtx
8833 TLS_CALL the location of the thread-local symbol
8834 RESULT_REG the register where the result of the call should be stored
8835 RETADDR_REG the register where the return address should be stored
8836 If this parameter is NULL_RTX the call is considered
8837 to be a sibling call. */
8838
8839 rtx
8840 s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
8841 rtx retaddr_reg)
8842 {
8843 bool plt_call = false;
8844 rtx insn;
8845 rtx call;
8846 rtx clobber;
8847 rtvec vec;
8848
8849 /* Direct function calls need special treatment. */
8850 if (GET_CODE (addr_location) == SYMBOL_REF)
8851 {
8852 /* When calling a global routine in PIC mode, we must
8853 replace the symbol itself with the PLT stub. */
8854 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
8855 {
8856 addr_location = gen_rtx_UNSPEC (Pmode,
8857 gen_rtvec (1, addr_location),
8858 UNSPEC_PLT);
8859 addr_location = gen_rtx_CONST (Pmode, addr_location);
8860 plt_call = true;
8861 }
8862
8863 /* Unless we can use the bras(l) insn, force the
8864 routine address into a register. */
8865 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
8866 {
8867 if (flag_pic)
8868 addr_location = legitimize_pic_address (addr_location, 0);
8869 else
8870 addr_location = force_reg (Pmode, addr_location);
8871 }
8872 }
8873
8874 /* If it is already an indirect call or the code above moved the
8875 SYMBOL_REF to somewhere else make sure the address can be found in
8876 register 1. */
8877 if (retaddr_reg == NULL_RTX
8878 && GET_CODE (addr_location) != SYMBOL_REF
8879 && !plt_call)
8880 {
8881 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
8882 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
8883 }
8884
8885 addr_location = gen_rtx_MEM (QImode, addr_location);
8886 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
8887
8888 if (result_reg != NULL_RTX)
8889 call = gen_rtx_SET (VOIDmode, result_reg, call);
8890
8891 if (retaddr_reg != NULL_RTX)
8892 {
8893 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
8894
8895 if (tls_call != NULL_RTX)
8896 vec = gen_rtvec (3, call, clobber,
8897 gen_rtx_USE (VOIDmode, tls_call));
8898 else
8899 vec = gen_rtvec (2, call, clobber);
8900
8901 call = gen_rtx_PARALLEL (VOIDmode, vec);
8902 }
8903
8904 insn = emit_call_insn (call);
8905
8906 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
8907 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
8908 {
8909 /* s390_function_ok_for_sibcall should
8910 have denied sibcalls in this case. */
8911 gcc_assert (retaddr_reg != NULL_RTX);
8912
8913 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
8914 }
8915 return insn;
8916 }
8917
8918 /* Implement CONDITIONAL_REGISTER_USAGE. */
8919
8920 void
8921 s390_conditional_register_usage (void)
8922 {
8923 int i;
8924
8925 if (flag_pic)
8926 {
8927 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8928 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8929 }
8930 if (TARGET_CPU_ZARCH)
8931 {
8932 fixed_regs[BASE_REGNUM] = 0;
8933 call_used_regs[BASE_REGNUM] = 0;
8934 fixed_regs[RETURN_REGNUM] = 0;
8935 call_used_regs[RETURN_REGNUM] = 0;
8936 }
8937 if (TARGET_64BIT)
8938 {
8939 for (i = 24; i < 32; i++)
8940 call_used_regs[i] = call_really_used_regs[i] = 0;
8941 }
8942 else
8943 {
8944 for (i = 18; i < 20; i++)
8945 call_used_regs[i] = call_really_used_regs[i] = 0;
8946 }
8947
8948 if (TARGET_SOFT_FLOAT)
8949 {
8950 for (i = 16; i < 32; i++)
8951 call_used_regs[i] = fixed_regs[i] = 1;
8952 }
8953 }
8954
8955 /* Corresponding function to eh_return expander. */
8956
8957 static GTY(()) rtx s390_tpf_eh_return_symbol;
8958 void
8959 s390_emit_tpf_eh_return (rtx target)
8960 {
8961 rtx insn, reg;
8962
8963 if (!s390_tpf_eh_return_symbol)
8964 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
8965
8966 reg = gen_rtx_REG (Pmode, 2);
8967
8968 emit_move_insn (reg, target);
8969 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
8970 gen_rtx_REG (Pmode, RETURN_REGNUM));
8971 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
8972
8973 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
8974 }
8975
8976 /* Rework the prologue/epilogue to avoid saving/restoring
8977 registers unnecessarily. */
8978
8979 static void
8980 s390_optimize_prologue (void)
8981 {
8982 rtx insn, new_insn, next_insn;
8983
8984 /* Do a final recompute of the frame-related data. */
8985
8986 s390_update_frame_layout ();
8987
8988 /* If all special registers are in fact used, there's nothing we
8989 can do, so no point in walking the insn list. */
8990
8991 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
8992 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
8993 && (TARGET_CPU_ZARCH
8994 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
8995 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
8996 return;
8997
8998 /* Search for prologue/epilogue insns and replace them. */
8999
9000 for (insn = get_insns (); insn; insn = next_insn)
9001 {
9002 int first, last, off;
9003 rtx set, base, offset;
9004
9005 next_insn = NEXT_INSN (insn);
9006
9007 if (GET_CODE (insn) != INSN)
9008 continue;
9009
9010 if (GET_CODE (PATTERN (insn)) == PARALLEL
9011 && store_multiple_operation (PATTERN (insn), VOIDmode))
9012 {
9013 set = XVECEXP (PATTERN (insn), 0, 0);
9014 first = REGNO (SET_SRC (set));
9015 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9016 offset = const0_rtx;
9017 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9018 off = INTVAL (offset);
9019
9020 if (GET_CODE (base) != REG || off < 0)
9021 continue;
9022 if (cfun_frame_layout.first_save_gpr != -1
9023 && (cfun_frame_layout.first_save_gpr < first
9024 || cfun_frame_layout.last_save_gpr > last))
9025 continue;
9026 if (REGNO (base) != STACK_POINTER_REGNUM
9027 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9028 continue;
9029 if (first > BASE_REGNUM || last < BASE_REGNUM)
9030 continue;
9031
9032 if (cfun_frame_layout.first_save_gpr != -1)
9033 {
9034 new_insn = save_gprs (base,
9035 off + (cfun_frame_layout.first_save_gpr
9036 - first) * UNITS_PER_WORD,
9037 cfun_frame_layout.first_save_gpr,
9038 cfun_frame_layout.last_save_gpr);
9039 new_insn = emit_insn_before (new_insn, insn);
9040 INSN_ADDRESSES_NEW (new_insn, -1);
9041 }
9042
9043 remove_insn (insn);
9044 continue;
9045 }
9046
9047 if (cfun_frame_layout.first_save_gpr == -1
9048 && GET_CODE (PATTERN (insn)) == SET
9049 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
9050 && (REGNO (SET_SRC (PATTERN (insn))) == BASE_REGNUM
9051 || (!TARGET_CPU_ZARCH
9052 && REGNO (SET_SRC (PATTERN (insn))) == RETURN_REGNUM))
9053 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
9054 {
9055 set = PATTERN (insn);
9056 first = REGNO (SET_SRC (set));
9057 offset = const0_rtx;
9058 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9059 off = INTVAL (offset);
9060
9061 if (GET_CODE (base) != REG || off < 0)
9062 continue;
9063 if (REGNO (base) != STACK_POINTER_REGNUM
9064 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9065 continue;
9066
9067 remove_insn (insn);
9068 continue;
9069 }
9070
9071 if (GET_CODE (PATTERN (insn)) == PARALLEL
9072 && load_multiple_operation (PATTERN (insn), VOIDmode))
9073 {
9074 set = XVECEXP (PATTERN (insn), 0, 0);
9075 first = REGNO (SET_DEST (set));
9076 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9077 offset = const0_rtx;
9078 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9079 off = INTVAL (offset);
9080
9081 if (GET_CODE (base) != REG || off < 0)
9082 continue;
9083 if (cfun_frame_layout.first_restore_gpr != -1
9084 && (cfun_frame_layout.first_restore_gpr < first
9085 || cfun_frame_layout.last_restore_gpr > last))
9086 continue;
9087 if (REGNO (base) != STACK_POINTER_REGNUM
9088 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9089 continue;
9090 if (first > BASE_REGNUM || last < BASE_REGNUM)
9091 continue;
9092
9093 if (cfun_frame_layout.first_restore_gpr != -1)
9094 {
9095 new_insn = restore_gprs (base,
9096 off + (cfun_frame_layout.first_restore_gpr
9097 - first) * UNITS_PER_WORD,
9098 cfun_frame_layout.first_restore_gpr,
9099 cfun_frame_layout.last_restore_gpr);
9100 new_insn = emit_insn_before (new_insn, insn);
9101 INSN_ADDRESSES_NEW (new_insn, -1);
9102 }
9103
9104 remove_insn (insn);
9105 continue;
9106 }
9107
9108 if (cfun_frame_layout.first_restore_gpr == -1
9109 && GET_CODE (PATTERN (insn)) == SET
9110 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
9111 && (REGNO (SET_DEST (PATTERN (insn))) == BASE_REGNUM
9112 || (!TARGET_CPU_ZARCH
9113 && REGNO (SET_DEST (PATTERN (insn))) == RETURN_REGNUM))
9114 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
9115 {
9116 set = PATTERN (insn);
9117 first = REGNO (SET_DEST (set));
9118 offset = const0_rtx;
9119 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9120 off = INTVAL (offset);
9121
9122 if (GET_CODE (base) != REG || off < 0)
9123 continue;
9124 if (REGNO (base) != STACK_POINTER_REGNUM
9125 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9126 continue;
9127
9128 remove_insn (insn);
9129 continue;
9130 }
9131 }
9132 }
9133
9134 /* Perform machine-dependent processing. */
9135
9136 static void
9137 s390_reorg (void)
9138 {
9139 bool pool_overflow = false;
9140
9141 /* Make sure all splits have been performed; splits after
9142 machine_dependent_reorg might confuse insn length counts. */
9143 split_all_insns_noflow ();
9144
9145 /* From here on decomposed literal pool addresses must be accepted. */
9146 cfun->machine->decomposed_literal_pool_addresses_ok_p = true;
9147
9148 /* Install the main literal pool and the associated base
9149 register load insns.
9150
9151 In addition, there are two problematic situations we need
9152 to correct:
9153
9154 - the literal pool might be > 4096 bytes in size, so that
9155 some of its elements cannot be directly accessed
9156
9157 - a branch target might be > 64K away from the branch, so that
9158 it is not possible to use a PC-relative instruction.
9159
9160 To fix those, we split the single literal pool into multiple
9161 pool chunks, reloading the pool base register at various
9162 points throughout the function to ensure it always points to
9163 the pool chunk the following code expects, and / or replace
9164 PC-relative branches by absolute branches.
9165
9166 However, the two problems are interdependent: splitting the
9167 literal pool can move a branch further away from its target,
9168 causing the 64K limit to overflow, and on the other hand,
9169 replacing a PC-relative branch by an absolute branch means
9170 we need to put the branch target address into the literal
9171 pool, possibly causing it to overflow.
9172
9173 So, we loop trying to fix up both problems until we manage
9174 to satisfy both conditions at the same time. Note that the
9175 loop is guaranteed to terminate as every pass of the loop
9176 strictly decreases the total number of PC-relative branches
9177 in the function. (This is not completely true as there
9178 might be branch-over-pool insns introduced by chunkify_start.
9179 Those never need to be split however.) */
9180
9181 for (;;)
9182 {
9183 struct constant_pool *pool = NULL;
9184
9185 /* Collect the literal pool. */
9186 if (!pool_overflow)
9187 {
9188 pool = s390_mainpool_start ();
9189 if (!pool)
9190 pool_overflow = true;
9191 }
9192
9193 /* If literal pool overflowed, start to chunkify it. */
9194 if (pool_overflow)
9195 pool = s390_chunkify_start ();
9196
9197 /* Split out-of-range branches. If this has created new
9198 literal pool entries, cancel current chunk list and
9199 recompute it. zSeries machines have large branch
9200 instructions, so we never need to split a branch. */
9201 if (!TARGET_CPU_ZARCH && s390_split_branches ())
9202 {
9203 if (pool_overflow)
9204 s390_chunkify_cancel (pool);
9205 else
9206 s390_mainpool_cancel (pool);
9207
9208 continue;
9209 }
9210
9211 /* If we made it up to here, both conditions are satisfied.
9212 Finish up literal pool related changes. */
9213 if (pool_overflow)
9214 s390_chunkify_finish (pool);
9215 else
9216 s390_mainpool_finish (pool);
9217
9218 /* We're done splitting branches. */
9219 cfun->machine->split_branches_pending_p = false;
9220 break;
9221 }
9222
9223 /* Generate out-of-pool execute target insns. */
9224 if (TARGET_CPU_ZARCH)
9225 {
9226 rtx insn, label, target;
9227
9228 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9229 {
9230 label = s390_execute_label (insn);
9231 if (!label)
9232 continue;
9233
9234 gcc_assert (label != const0_rtx);
9235
9236 target = emit_label (XEXP (label, 0));
9237 INSN_ADDRESSES_NEW (target, -1);
9238
9239 target = emit_insn (s390_execute_target (insn));
9240 INSN_ADDRESSES_NEW (target, -1);
9241 }
9242 }
9243
9244 /* Try to optimize prologue and epilogue further. */
9245 s390_optimize_prologue ();
9246 }
9247
9248
9249 /* Initialize GCC target structure. */
9250
9251 #undef TARGET_ASM_ALIGNED_HI_OP
9252 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
9253 #undef TARGET_ASM_ALIGNED_DI_OP
9254 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
9255 #undef TARGET_ASM_INTEGER
9256 #define TARGET_ASM_INTEGER s390_assemble_integer
9257
9258 #undef TARGET_ASM_OPEN_PAREN
9259 #define TARGET_ASM_OPEN_PAREN ""
9260
9261 #undef TARGET_ASM_CLOSE_PAREN
9262 #define TARGET_ASM_CLOSE_PAREN ""
9263
9264 #undef TARGET_DEFAULT_TARGET_FLAGS
9265 #define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
9266 #undef TARGET_HANDLE_OPTION
9267 #define TARGET_HANDLE_OPTION s390_handle_option
9268
9269 #undef TARGET_ENCODE_SECTION_INFO
9270 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
9271
9272 #ifdef HAVE_AS_TLS
9273 #undef TARGET_HAVE_TLS
9274 #define TARGET_HAVE_TLS true
9275 #endif
9276 #undef TARGET_CANNOT_FORCE_CONST_MEM
9277 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
9278
9279 #undef TARGET_DELEGITIMIZE_ADDRESS
9280 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
9281
9282 #undef TARGET_RETURN_IN_MEMORY
9283 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
9284
9285 #undef TARGET_INIT_BUILTINS
9286 #define TARGET_INIT_BUILTINS s390_init_builtins
9287 #undef TARGET_EXPAND_BUILTIN
9288 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
9289
9290 #undef TARGET_ASM_OUTPUT_MI_THUNK
9291 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
9292 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
9293 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
9294
9295 #undef TARGET_SCHED_ADJUST_PRIORITY
9296 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
9297 #undef TARGET_SCHED_ISSUE_RATE
9298 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
9299 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
9300 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
9301
9302 #undef TARGET_CANNOT_COPY_INSN_P
9303 #define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
9304 #undef TARGET_RTX_COSTS
9305 #define TARGET_RTX_COSTS s390_rtx_costs
9306 #undef TARGET_ADDRESS_COST
9307 #define TARGET_ADDRESS_COST s390_address_cost
9308
9309 #undef TARGET_MACHINE_DEPENDENT_REORG
9310 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
9311
9312 #undef TARGET_VALID_POINTER_MODE
9313 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
9314
9315 #undef TARGET_BUILD_BUILTIN_VA_LIST
9316 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
9317 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
9318 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
9319
9320 #undef TARGET_PROMOTE_FUNCTION_ARGS
9321 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
9322 #undef TARGET_PROMOTE_FUNCTION_RETURN
9323 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
9324 #undef TARGET_PASS_BY_REFERENCE
9325 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
9326
9327 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
9328 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
9329
9330 #undef TARGET_FIXED_CONDITION_CODE_REGS
9331 #define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
9332
9333 #undef TARGET_CC_MODES_COMPATIBLE
9334 #define TARGET_CC_MODES_COMPATIBLE s390_cc_modes_compatible
9335
9336 #undef TARGET_INVALID_WITHIN_DOLOOP
9337 #define TARGET_INVALID_WITHIN_DOLOOP hook_constcharptr_const_rtx_null
9338
9339 #ifdef HAVE_AS_TLS
9340 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
9341 #define TARGET_ASM_OUTPUT_DWARF_DTPREL s390_output_dwarf_dtprel
9342 #endif
9343
9344 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
9345 #undef TARGET_MANGLE_TYPE
9346 #define TARGET_MANGLE_TYPE s390_mangle_type
9347 #endif
9348
9349 #undef TARGET_SCALAR_MODE_SUPPORTED_P
9350 #define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
9351
9352 #undef TARGET_SECONDARY_RELOAD
9353 #define TARGET_SECONDARY_RELOAD s390_secondary_reload
9354
9355 #undef TARGET_LIBGCC_CMP_RETURN_MODE
9356 #define TARGET_LIBGCC_CMP_RETURN_MODE s390_libgcc_cmp_return_mode
9357
9358 #undef TARGET_LIBGCC_SHIFT_COUNT_MODE
9359 #define TARGET_LIBGCC_SHIFT_COUNT_MODE s390_libgcc_shift_count_mode
9360
9361 struct gcc_target targetm = TARGET_INITIALIZER;
9362
9363 #include "gt-s390.h"