1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007, 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com) and
6 Andreas Krebbel (Andreas.Krebbel@de.ibm.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
43 #include "diagnostic-core.h"
44 #include "basic-block.h"
47 #include "target-def.h"
49 #include "langhooks.h"
57 /* Define the specific costs for a given cpu. */
59 struct processor_costs
62 const int m
; /* cost of an M instruction. */
63 const int mghi
; /* cost of an MGHI instruction. */
64 const int mh
; /* cost of an MH instruction. */
65 const int mhi
; /* cost of an MHI instruction. */
66 const int ml
; /* cost of an ML instruction. */
67 const int mr
; /* cost of an MR instruction. */
68 const int ms
; /* cost of an MS instruction. */
69 const int msg
; /* cost of an MSG instruction. */
70 const int msgf
; /* cost of an MSGF instruction. */
71 const int msgfr
; /* cost of an MSGFR instruction. */
72 const int msgr
; /* cost of an MSGR instruction. */
73 const int msr
; /* cost of an MSR instruction. */
74 const int mult_df
; /* cost of multiplication in DFmode. */
77 const int sqxbr
; /* cost of square root in TFmode. */
78 const int sqdbr
; /* cost of square root in DFmode. */
79 const int sqebr
; /* cost of square root in SFmode. */
80 /* multiply and add */
81 const int madbr
; /* cost of multiply and add in DFmode. */
82 const int maebr
; /* cost of multiply and add in SFmode. */
94 const struct processor_costs
*s390_cost
;
97 struct processor_costs z900_cost
=
99 COSTS_N_INSNS (5), /* M */
100 COSTS_N_INSNS (10), /* MGHI */
101 COSTS_N_INSNS (5), /* MH */
102 COSTS_N_INSNS (4), /* MHI */
103 COSTS_N_INSNS (5), /* ML */
104 COSTS_N_INSNS (5), /* MR */
105 COSTS_N_INSNS (4), /* MS */
106 COSTS_N_INSNS (15), /* MSG */
107 COSTS_N_INSNS (7), /* MSGF */
108 COSTS_N_INSNS (7), /* MSGFR */
109 COSTS_N_INSNS (10), /* MSGR */
110 COSTS_N_INSNS (4), /* MSR */
111 COSTS_N_INSNS (7), /* multiplication in DFmode */
112 COSTS_N_INSNS (13), /* MXBR */
113 COSTS_N_INSNS (136), /* SQXBR */
114 COSTS_N_INSNS (44), /* SQDBR */
115 COSTS_N_INSNS (35), /* SQEBR */
116 COSTS_N_INSNS (18), /* MADBR */
117 COSTS_N_INSNS (13), /* MAEBR */
118 COSTS_N_INSNS (134), /* DXBR */
119 COSTS_N_INSNS (30), /* DDBR */
120 COSTS_N_INSNS (27), /* DEBR */
121 COSTS_N_INSNS (220), /* DLGR */
122 COSTS_N_INSNS (34), /* DLR */
123 COSTS_N_INSNS (34), /* DR */
124 COSTS_N_INSNS (32), /* DSGFR */
125 COSTS_N_INSNS (32), /* DSGR */
129 struct processor_costs z990_cost
=
131 COSTS_N_INSNS (4), /* M */
132 COSTS_N_INSNS (2), /* MGHI */
133 COSTS_N_INSNS (2), /* MH */
134 COSTS_N_INSNS (2), /* MHI */
135 COSTS_N_INSNS (4), /* ML */
136 COSTS_N_INSNS (4), /* MR */
137 COSTS_N_INSNS (5), /* MS */
138 COSTS_N_INSNS (6), /* MSG */
139 COSTS_N_INSNS (4), /* MSGF */
140 COSTS_N_INSNS (4), /* MSGFR */
141 COSTS_N_INSNS (4), /* MSGR */
142 COSTS_N_INSNS (4), /* MSR */
143 COSTS_N_INSNS (1), /* multiplication in DFmode */
144 COSTS_N_INSNS (28), /* MXBR */
145 COSTS_N_INSNS (130), /* SQXBR */
146 COSTS_N_INSNS (66), /* SQDBR */
147 COSTS_N_INSNS (38), /* SQEBR */
148 COSTS_N_INSNS (1), /* MADBR */
149 COSTS_N_INSNS (1), /* MAEBR */
150 COSTS_N_INSNS (60), /* DXBR */
151 COSTS_N_INSNS (40), /* DDBR */
152 COSTS_N_INSNS (26), /* DEBR */
153 COSTS_N_INSNS (176), /* DLGR */
154 COSTS_N_INSNS (31), /* DLR */
155 COSTS_N_INSNS (31), /* DR */
156 COSTS_N_INSNS (31), /* DSGFR */
157 COSTS_N_INSNS (31), /* DSGR */
161 struct processor_costs z9_109_cost
=
163 COSTS_N_INSNS (4), /* M */
164 COSTS_N_INSNS (2), /* MGHI */
165 COSTS_N_INSNS (2), /* MH */
166 COSTS_N_INSNS (2), /* MHI */
167 COSTS_N_INSNS (4), /* ML */
168 COSTS_N_INSNS (4), /* MR */
169 COSTS_N_INSNS (5), /* MS */
170 COSTS_N_INSNS (6), /* MSG */
171 COSTS_N_INSNS (4), /* MSGF */
172 COSTS_N_INSNS (4), /* MSGFR */
173 COSTS_N_INSNS (4), /* MSGR */
174 COSTS_N_INSNS (4), /* MSR */
175 COSTS_N_INSNS (1), /* multiplication in DFmode */
176 COSTS_N_INSNS (28), /* MXBR */
177 COSTS_N_INSNS (130), /* SQXBR */
178 COSTS_N_INSNS (66), /* SQDBR */
179 COSTS_N_INSNS (38), /* SQEBR */
180 COSTS_N_INSNS (1), /* MADBR */
181 COSTS_N_INSNS (1), /* MAEBR */
182 COSTS_N_INSNS (60), /* DXBR */
183 COSTS_N_INSNS (40), /* DDBR */
184 COSTS_N_INSNS (26), /* DEBR */
185 COSTS_N_INSNS (30), /* DLGR */
186 COSTS_N_INSNS (23), /* DLR */
187 COSTS_N_INSNS (23), /* DR */
188 COSTS_N_INSNS (24), /* DSGFR */
189 COSTS_N_INSNS (24), /* DSGR */
193 struct processor_costs z10_cost
=
195 COSTS_N_INSNS (10), /* M */
196 COSTS_N_INSNS (10), /* MGHI */
197 COSTS_N_INSNS (10), /* MH */
198 COSTS_N_INSNS (10), /* MHI */
199 COSTS_N_INSNS (10), /* ML */
200 COSTS_N_INSNS (10), /* MR */
201 COSTS_N_INSNS (10), /* MS */
202 COSTS_N_INSNS (10), /* MSG */
203 COSTS_N_INSNS (10), /* MSGF */
204 COSTS_N_INSNS (10), /* MSGFR */
205 COSTS_N_INSNS (10), /* MSGR */
206 COSTS_N_INSNS (10), /* MSR */
207 COSTS_N_INSNS (1) , /* multiplication in DFmode */
208 COSTS_N_INSNS (50), /* MXBR */
209 COSTS_N_INSNS (120), /* SQXBR */
210 COSTS_N_INSNS (52), /* SQDBR */
211 COSTS_N_INSNS (38), /* SQEBR */
212 COSTS_N_INSNS (1), /* MADBR */
213 COSTS_N_INSNS (1), /* MAEBR */
214 COSTS_N_INSNS (111), /* DXBR */
215 COSTS_N_INSNS (39), /* DDBR */
216 COSTS_N_INSNS (32), /* DEBR */
217 COSTS_N_INSNS (160), /* DLGR */
218 COSTS_N_INSNS (71), /* DLR */
219 COSTS_N_INSNS (71), /* DR */
220 COSTS_N_INSNS (71), /* DSGFR */
221 COSTS_N_INSNS (71), /* DSGR */
225 struct processor_costs z196_cost
=
227 COSTS_N_INSNS (7), /* M */
228 COSTS_N_INSNS (5), /* MGHI */
229 COSTS_N_INSNS (5), /* MH */
230 COSTS_N_INSNS (5), /* MHI */
231 COSTS_N_INSNS (7), /* ML */
232 COSTS_N_INSNS (7), /* MR */
233 COSTS_N_INSNS (6), /* MS */
234 COSTS_N_INSNS (8), /* MSG */
235 COSTS_N_INSNS (6), /* MSGF */
236 COSTS_N_INSNS (6), /* MSGFR */
237 COSTS_N_INSNS (8), /* MSGR */
238 COSTS_N_INSNS (6), /* MSR */
239 COSTS_N_INSNS (1) , /* multiplication in DFmode */
240 COSTS_N_INSNS (40), /* MXBR B+40 */
241 COSTS_N_INSNS (100), /* SQXBR B+100 */
242 COSTS_N_INSNS (42), /* SQDBR B+42 */
243 COSTS_N_INSNS (28), /* SQEBR B+28 */
244 COSTS_N_INSNS (1), /* MADBR B */
245 COSTS_N_INSNS (1), /* MAEBR B */
246 COSTS_N_INSNS (101), /* DXBR B+101 */
247 COSTS_N_INSNS (29), /* DDBR */
248 COSTS_N_INSNS (22), /* DEBR */
249 COSTS_N_INSNS (160), /* DLGR cracked */
250 COSTS_N_INSNS (160), /* DLR cracked */
251 COSTS_N_INSNS (160), /* DR expanded */
252 COSTS_N_INSNS (160), /* DSGFR cracked */
253 COSTS_N_INSNS (160), /* DSGR cracked */
256 extern int reload_completed
;
258 /* Kept up to date using the SCHED_VARIABLE_ISSUE hook. */
259 static rtx last_scheduled_insn
;
261 /* Structure used to hold the components of a S/390 memory
262 address. A legitimate address on S/390 is of the general
264 base + index + displacement
265 where any of the components is optional.
267 base and index are registers of the class ADDR_REGS,
268 displacement is an unsigned 12-bit immediate constant. */
279 /* The following structure is embedded in the machine
280 specific part of struct function. */
282 struct GTY (()) s390_frame_layout
284 /* Offset within stack frame. */
285 HOST_WIDE_INT gprs_offset
;
286 HOST_WIDE_INT f0_offset
;
287 HOST_WIDE_INT f4_offset
;
288 HOST_WIDE_INT f8_offset
;
289 HOST_WIDE_INT backchain_offset
;
291 /* Number of first and last gpr where slots in the register
292 save area are reserved for. */
293 int first_save_gpr_slot
;
294 int last_save_gpr_slot
;
296 /* Number of first and last gpr to be saved, restored. */
298 int first_restore_gpr
;
300 int last_restore_gpr
;
302 /* Bits standing for floating point registers. Set, if the
303 respective register has to be saved. Starting with reg 16 (f0)
304 at the rightmost bit.
305 Bit 15 - 8 7 6 5 4 3 2 1 0
306 fpr 15 - 8 7 5 3 1 6 4 2 0
307 reg 31 - 24 23 22 21 20 19 18 17 16 */
308 unsigned int fpr_bitmap
;
310 /* Number of floating point registers f8-f15 which must be saved. */
313 /* Set if return address needs to be saved.
314 This flag is set by s390_return_addr_rtx if it could not use
315 the initial value of r14 and therefore depends on r14 saved
317 bool save_return_addr_p
;
319 /* Size of stack frame. */
320 HOST_WIDE_INT frame_size
;
323 /* Define the structure for the machine field in struct function. */
325 struct GTY(()) machine_function
327 struct s390_frame_layout frame_layout
;
329 /* Literal pool base register. */
332 /* True if we may need to perform branch splitting. */
333 bool split_branches_pending_p
;
335 /* Some local-dynamic TLS symbol name. */
336 const char *some_ld_name
;
338 bool has_landing_pad_p
;
341 /* Few accessor macros for struct cfun->machine->s390_frame_layout. */
343 #define cfun_frame_layout (cfun->machine->frame_layout)
344 #define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
345 #define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr_slot - \
346 cfun_frame_layout.first_save_gpr_slot + 1) * UNITS_PER_LONG)
347 #define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
349 #define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
352 /* Number of GPRs and FPRs used for argument passing. */
353 #define GP_ARG_NUM_REG 5
354 #define FP_ARG_NUM_REG (TARGET_64BIT? 4 : 2)
356 /* A couple of shortcuts. */
357 #define CONST_OK_FOR_J(x) \
358 CONST_OK_FOR_CONSTRAINT_P((x), 'J', "J")
359 #define CONST_OK_FOR_K(x) \
360 CONST_OK_FOR_CONSTRAINT_P((x), 'K', "K")
361 #define CONST_OK_FOR_Os(x) \
362 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Os")
363 #define CONST_OK_FOR_Op(x) \
364 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Op")
365 #define CONST_OK_FOR_On(x) \
366 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "On")
368 #define REGNO_PAIR_OK(REGNO, MODE) \
369 (HARD_REGNO_NREGS ((REGNO), (MODE)) == 1 || !((REGNO) & 1))
371 /* That's the read ahead of the dynamic branch prediction unit in
372 bytes on a z10 (or higher) CPU. */
373 #define PREDICT_DISTANCE (TARGET_Z10 ? 384 : 2048)
375 /* Return the alignment for LABEL. We default to the -falign-labels
376 value except for the literal pool base label. */
378 s390_label_align (rtx label
)
380 rtx prev_insn
= prev_active_insn (label
);
382 if (prev_insn
== NULL_RTX
)
385 prev_insn
= single_set (prev_insn
);
387 if (prev_insn
== NULL_RTX
)
390 prev_insn
= SET_SRC (prev_insn
);
392 /* Don't align literal pool base labels. */
393 if (GET_CODE (prev_insn
) == UNSPEC
394 && XINT (prev_insn
, 1) == UNSPEC_MAIN_BASE
)
398 return align_labels_log
;
401 static enum machine_mode
402 s390_libgcc_cmp_return_mode (void)
404 return TARGET_64BIT
? DImode
: SImode
;
407 static enum machine_mode
408 s390_libgcc_shift_count_mode (void)
410 return TARGET_64BIT
? DImode
: SImode
;
413 static enum machine_mode
414 s390_unwind_word_mode (void)
416 return TARGET_64BIT
? DImode
: SImode
;
419 /* Return true if the back end supports mode MODE. */
421 s390_scalar_mode_supported_p (enum machine_mode mode
)
423 /* In contrast to the default implementation reject TImode constants on 31bit
424 TARGET_ZARCH for ABI compliance. */
425 if (!TARGET_64BIT
&& TARGET_ZARCH
&& mode
== TImode
)
428 if (DECIMAL_FLOAT_MODE_P (mode
))
429 return default_decimal_float_supported_p ();
431 return default_scalar_mode_supported_p (mode
);
434 /* Set the has_landing_pad_p flag in struct machine_function to VALUE. */
437 s390_set_has_landing_pad_p (bool value
)
439 cfun
->machine
->has_landing_pad_p
= value
;
442 /* If two condition code modes are compatible, return a condition code
443 mode which is compatible with both. Otherwise, return
446 static enum machine_mode
447 s390_cc_modes_compatible (enum machine_mode m1
, enum machine_mode m2
)
455 if (m2
== CCUmode
|| m2
== CCTmode
|| m2
== CCZ1mode
456 || m2
== CCSmode
|| m2
== CCSRmode
|| m2
== CCURmode
)
477 /* Return true if SET either doesn't set the CC register, or else
478 the source and destination have matching CC modes and that
479 CC mode is at least as constrained as REQ_MODE. */
482 s390_match_ccmode_set (rtx set
, enum machine_mode req_mode
)
484 enum machine_mode set_mode
;
486 gcc_assert (GET_CODE (set
) == SET
);
488 if (GET_CODE (SET_DEST (set
)) != REG
|| !CC_REGNO_P (REGNO (SET_DEST (set
))))
491 set_mode
= GET_MODE (SET_DEST (set
));
505 if (req_mode
!= set_mode
)
510 if (req_mode
!= CCSmode
&& req_mode
!= CCUmode
&& req_mode
!= CCTmode
511 && req_mode
!= CCSRmode
&& req_mode
!= CCURmode
)
517 if (req_mode
!= CCAmode
)
525 return (GET_MODE (SET_SRC (set
)) == set_mode
);
528 /* Return true if every SET in INSN that sets the CC register
529 has source and destination with matching CC modes and that
530 CC mode is at least as constrained as REQ_MODE.
531 If REQ_MODE is VOIDmode, always return false. */
534 s390_match_ccmode (rtx insn
, enum machine_mode req_mode
)
538 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
539 if (req_mode
== VOIDmode
)
542 if (GET_CODE (PATTERN (insn
)) == SET
)
543 return s390_match_ccmode_set (PATTERN (insn
), req_mode
);
545 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
546 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
548 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
549 if (GET_CODE (set
) == SET
)
550 if (!s390_match_ccmode_set (set
, req_mode
))
557 /* If a test-under-mask instruction can be used to implement
558 (compare (and ... OP1) OP2), return the CC mode required
559 to do that. Otherwise, return VOIDmode.
560 MIXED is true if the instruction can distinguish between
561 CC1 and CC2 for mixed selected bits (TMxx), it is false
562 if the instruction cannot (TM). */
565 s390_tm_ccmode (rtx op1
, rtx op2
, bool mixed
)
569 /* ??? Fixme: should work on CONST_DOUBLE as well. */
570 if (GET_CODE (op1
) != CONST_INT
|| GET_CODE (op2
) != CONST_INT
)
573 /* Selected bits all zero: CC0.
574 e.g.: int a; if ((a & (16 + 128)) == 0) */
575 if (INTVAL (op2
) == 0)
578 /* Selected bits all one: CC3.
579 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */
580 if (INTVAL (op2
) == INTVAL (op1
))
583 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.:
585 if ((a & (16 + 128)) == 16) -> CCT1
586 if ((a & (16 + 128)) == 128) -> CCT2 */
589 bit1
= exact_log2 (INTVAL (op2
));
590 bit0
= exact_log2 (INTVAL (op1
) ^ INTVAL (op2
));
591 if (bit0
!= -1 && bit1
!= -1)
592 return bit0
> bit1
? CCT1mode
: CCT2mode
;
598 /* Given a comparison code OP (EQ, NE, etc.) and the operands
599 OP0 and OP1 of a COMPARE, return the mode to be used for the
603 s390_select_ccmode (enum rtx_code code
, rtx op0
, rtx op1
)
609 if ((GET_CODE (op0
) == NEG
|| GET_CODE (op0
) == ABS
)
610 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
612 if (GET_CODE (op0
) == PLUS
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
613 && CONST_OK_FOR_K (INTVAL (XEXP (op0
, 1))))
615 if ((GET_CODE (op0
) == PLUS
|| GET_CODE (op0
) == MINUS
616 || GET_CODE (op1
) == NEG
)
617 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
620 if (GET_CODE (op0
) == AND
)
622 /* Check whether we can potentially do it via TM. */
623 enum machine_mode ccmode
;
624 ccmode
= s390_tm_ccmode (XEXP (op0
, 1), op1
, 1);
625 if (ccmode
!= VOIDmode
)
627 /* Relax CCTmode to CCZmode to allow fall-back to AND
628 if that turns out to be beneficial. */
629 return ccmode
== CCTmode
? CCZmode
: ccmode
;
633 if (register_operand (op0
, HImode
)
634 && GET_CODE (op1
) == CONST_INT
635 && (INTVAL (op1
) == -1 || INTVAL (op1
) == 65535))
637 if (register_operand (op0
, QImode
)
638 && GET_CODE (op1
) == CONST_INT
639 && (INTVAL (op1
) == -1 || INTVAL (op1
) == 255))
648 /* The only overflow condition of NEG and ABS happens when
649 -INT_MAX is used as parameter, which stays negative. So
650 we have an overflow from a positive value to a negative.
651 Using CCAP mode the resulting cc can be used for comparisons. */
652 if ((GET_CODE (op0
) == NEG
|| GET_CODE (op0
) == ABS
)
653 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
656 /* If constants are involved in an add instruction it is possible to use
657 the resulting cc for comparisons with zero. Knowing the sign of the
658 constant the overflow behavior gets predictable. e.g.:
659 int a, b; if ((b = a + c) > 0)
660 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */
661 if (GET_CODE (op0
) == PLUS
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
662 && CONST_OK_FOR_K (INTVAL (XEXP (op0
, 1))))
664 if (INTVAL (XEXP((op0
), 1)) < 0)
678 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
679 && GET_CODE (op1
) != CONST_INT
)
685 if (GET_CODE (op0
) == PLUS
686 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
689 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
690 && GET_CODE (op1
) != CONST_INT
)
696 if (GET_CODE (op0
) == MINUS
697 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
700 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
701 && GET_CODE (op1
) != CONST_INT
)
710 /* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
711 that we can implement more efficiently. */
714 s390_canonicalize_comparison (enum rtx_code
*code
, rtx
*op0
, rtx
*op1
)
716 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
717 if ((*code
== EQ
|| *code
== NE
)
718 && *op1
== const0_rtx
719 && GET_CODE (*op0
) == ZERO_EXTRACT
720 && GET_CODE (XEXP (*op0
, 1)) == CONST_INT
721 && GET_CODE (XEXP (*op0
, 2)) == CONST_INT
722 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0
, 0))))
724 rtx inner
= XEXP (*op0
, 0);
725 HOST_WIDE_INT modesize
= GET_MODE_BITSIZE (GET_MODE (inner
));
726 HOST_WIDE_INT len
= INTVAL (XEXP (*op0
, 1));
727 HOST_WIDE_INT pos
= INTVAL (XEXP (*op0
, 2));
729 if (len
> 0 && len
< modesize
730 && pos
>= 0 && pos
+ len
<= modesize
731 && modesize
<= HOST_BITS_PER_WIDE_INT
)
733 unsigned HOST_WIDE_INT block
;
734 block
= ((unsigned HOST_WIDE_INT
) 1 << len
) - 1;
735 block
<<= modesize
- pos
- len
;
737 *op0
= gen_rtx_AND (GET_MODE (inner
), inner
,
738 gen_int_mode (block
, GET_MODE (inner
)));
742 /* Narrow AND of memory against immediate to enable TM. */
743 if ((*code
== EQ
|| *code
== NE
)
744 && *op1
== const0_rtx
745 && GET_CODE (*op0
) == AND
746 && GET_CODE (XEXP (*op0
, 1)) == CONST_INT
747 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0
, 0))))
749 rtx inner
= XEXP (*op0
, 0);
750 rtx mask
= XEXP (*op0
, 1);
752 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
753 if (GET_CODE (inner
) == SUBREG
754 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner
)))
755 && (GET_MODE_SIZE (GET_MODE (inner
))
756 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner
))))
758 & GET_MODE_MASK (GET_MODE (inner
))
759 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner
))))
761 inner
= SUBREG_REG (inner
);
763 /* Do not change volatile MEMs. */
764 if (MEM_P (inner
) && !MEM_VOLATILE_P (inner
))
766 int part
= s390_single_part (XEXP (*op0
, 1),
767 GET_MODE (inner
), QImode
, 0);
770 mask
= gen_int_mode (s390_extract_part (mask
, QImode
, 0), QImode
);
771 inner
= adjust_address_nv (inner
, QImode
, part
);
772 *op0
= gen_rtx_AND (QImode
, inner
, mask
);
777 /* Narrow comparisons against 0xffff to HImode if possible. */
778 if ((*code
== EQ
|| *code
== NE
)
779 && GET_CODE (*op1
) == CONST_INT
780 && INTVAL (*op1
) == 0xffff
781 && SCALAR_INT_MODE_P (GET_MODE (*op0
))
782 && (nonzero_bits (*op0
, GET_MODE (*op0
))
783 & ~(unsigned HOST_WIDE_INT
) 0xffff) == 0)
785 *op0
= gen_lowpart (HImode
, *op0
);
789 /* Remove redundant UNSPEC_CCU_TO_INT conversions if possible. */
790 if (GET_CODE (*op0
) == UNSPEC
791 && XINT (*op0
, 1) == UNSPEC_CCU_TO_INT
792 && XVECLEN (*op0
, 0) == 1
793 && GET_MODE (XVECEXP (*op0
, 0, 0)) == CCUmode
794 && GET_CODE (XVECEXP (*op0
, 0, 0)) == REG
795 && REGNO (XVECEXP (*op0
, 0, 0)) == CC_REGNUM
796 && *op1
== const0_rtx
)
798 enum rtx_code new_code
= UNKNOWN
;
801 case EQ
: new_code
= EQ
; break;
802 case NE
: new_code
= NE
; break;
803 case LT
: new_code
= GTU
; break;
804 case GT
: new_code
= LTU
; break;
805 case LE
: new_code
= GEU
; break;
806 case GE
: new_code
= LEU
; break;
810 if (new_code
!= UNKNOWN
)
812 *op0
= XVECEXP (*op0
, 0, 0);
817 /* Remove redundant UNSPEC_CCZ_TO_INT conversions if possible. */
818 if (GET_CODE (*op0
) == UNSPEC
819 && XINT (*op0
, 1) == UNSPEC_CCZ_TO_INT
820 && XVECLEN (*op0
, 0) == 1
821 && GET_MODE (XVECEXP (*op0
, 0, 0)) == CCZmode
822 && GET_CODE (XVECEXP (*op0
, 0, 0)) == REG
823 && REGNO (XVECEXP (*op0
, 0, 0)) == CC_REGNUM
824 && *op1
== const0_rtx
)
826 enum rtx_code new_code
= UNKNOWN
;
829 case EQ
: new_code
= EQ
; break;
830 case NE
: new_code
= NE
; break;
834 if (new_code
!= UNKNOWN
)
836 *op0
= XVECEXP (*op0
, 0, 0);
841 /* Simplify cascaded EQ, NE with const0_rtx. */
842 if ((*code
== NE
|| *code
== EQ
)
843 && (GET_CODE (*op0
) == EQ
|| GET_CODE (*op0
) == NE
)
844 && GET_MODE (*op0
) == SImode
845 && GET_MODE (XEXP (*op0
, 0)) == CCZ1mode
846 && REG_P (XEXP (*op0
, 0))
847 && XEXP (*op0
, 1) == const0_rtx
848 && *op1
== const0_rtx
)
850 if ((*code
== EQ
&& GET_CODE (*op0
) == NE
)
851 || (*code
== NE
&& GET_CODE (*op0
) == EQ
))
855 *op0
= XEXP (*op0
, 0);
858 /* Prefer register over memory as first operand. */
859 if (MEM_P (*op0
) && REG_P (*op1
))
861 rtx tem
= *op0
; *op0
= *op1
; *op1
= tem
;
862 *code
= swap_condition (*code
);
866 /* Emit a compare instruction suitable to implement the comparison
867 OP0 CODE OP1. Return the correct condition RTL to be placed in
868 the IF_THEN_ELSE of the conditional branch testing the result. */
871 s390_emit_compare (enum rtx_code code
, rtx op0
, rtx op1
)
873 enum machine_mode mode
= s390_select_ccmode (code
, op0
, op1
);
876 /* Do not output a redundant compare instruction if a compare_and_swap
877 pattern already computed the result and the machine modes are compatible. */
878 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_CC
)
880 gcc_assert (s390_cc_modes_compatible (GET_MODE (op0
), mode
)
886 cc
= gen_rtx_REG (mode
, CC_REGNUM
);
887 emit_insn (gen_rtx_SET (VOIDmode
, cc
, gen_rtx_COMPARE (mode
, op0
, op1
)));
890 return gen_rtx_fmt_ee (code
, VOIDmode
, cc
, const0_rtx
);
893 /* Emit a SImode compare and swap instruction setting MEM to NEW_RTX if OLD
895 Return the correct condition RTL to be placed in the IF_THEN_ELSE of the
896 conditional branch testing the result. */
899 s390_emit_compare_and_swap (enum rtx_code code
, rtx old
, rtx mem
,
900 rtx cmp
, rtx new_rtx
)
902 emit_insn (gen_atomic_compare_and_swapsi_internal (old
, mem
, cmp
, new_rtx
));
903 return s390_emit_compare (code
, gen_rtx_REG (CCZ1mode
, CC_REGNUM
),
907 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
908 unconditional jump, else a conditional jump under condition COND. */
911 s390_emit_jump (rtx target
, rtx cond
)
915 target
= gen_rtx_LABEL_REF (VOIDmode
, target
);
917 target
= gen_rtx_IF_THEN_ELSE (VOIDmode
, cond
, target
, pc_rtx
);
919 insn
= gen_rtx_SET (VOIDmode
, pc_rtx
, target
);
920 emit_jump_insn (insn
);
923 /* Return branch condition mask to implement a branch
924 specified by CODE. Return -1 for invalid comparisons. */
927 s390_branch_condition_mask (rtx code
)
929 const int CC0
= 1 << 3;
930 const int CC1
= 1 << 2;
931 const int CC2
= 1 << 1;
932 const int CC3
= 1 << 0;
934 gcc_assert (GET_CODE (XEXP (code
, 0)) == REG
);
935 gcc_assert (REGNO (XEXP (code
, 0)) == CC_REGNUM
);
936 gcc_assert (XEXP (code
, 1) == const0_rtx
);
938 switch (GET_MODE (XEXP (code
, 0)))
942 switch (GET_CODE (code
))
945 case NE
: return CC1
| CC2
| CC3
;
951 switch (GET_CODE (code
))
954 case NE
: return CC0
| CC2
| CC3
;
960 switch (GET_CODE (code
))
963 case NE
: return CC0
| CC1
| CC3
;
969 switch (GET_CODE (code
))
972 case NE
: return CC0
| CC1
| CC2
;
978 switch (GET_CODE (code
))
980 case EQ
: return CC0
| CC2
;
981 case NE
: return CC1
| CC3
;
987 switch (GET_CODE (code
))
989 case LTU
: return CC2
| CC3
; /* carry */
990 case GEU
: return CC0
| CC1
; /* no carry */
996 switch (GET_CODE (code
))
998 case GTU
: return CC0
| CC1
; /* borrow */
999 case LEU
: return CC2
| CC3
; /* no borrow */
1005 switch (GET_CODE (code
))
1007 case EQ
: return CC0
| CC2
;
1008 case NE
: return CC1
| CC3
;
1009 case LTU
: return CC1
;
1010 case GTU
: return CC3
;
1011 case LEU
: return CC1
| CC2
;
1012 case GEU
: return CC2
| CC3
;
1017 switch (GET_CODE (code
))
1019 case EQ
: return CC0
;
1020 case NE
: return CC1
| CC2
| CC3
;
1021 case LTU
: return CC1
;
1022 case GTU
: return CC2
;
1023 case LEU
: return CC0
| CC1
;
1024 case GEU
: return CC0
| CC2
;
1030 switch (GET_CODE (code
))
1032 case EQ
: return CC0
;
1033 case NE
: return CC2
| CC1
| CC3
;
1034 case LTU
: return CC2
;
1035 case GTU
: return CC1
;
1036 case LEU
: return CC0
| CC2
;
1037 case GEU
: return CC0
| CC1
;
1043 switch (GET_CODE (code
))
1045 case EQ
: return CC0
;
1046 case NE
: return CC1
| CC2
| CC3
;
1047 case LT
: return CC1
| CC3
;
1048 case GT
: return CC2
;
1049 case LE
: return CC0
| CC1
| CC3
;
1050 case GE
: return CC0
| CC2
;
1056 switch (GET_CODE (code
))
1058 case EQ
: return CC0
;
1059 case NE
: return CC1
| CC2
| CC3
;
1060 case LT
: return CC1
;
1061 case GT
: return CC2
| CC3
;
1062 case LE
: return CC0
| CC1
;
1063 case GE
: return CC0
| CC2
| CC3
;
1069 switch (GET_CODE (code
))
1071 case EQ
: return CC0
;
1072 case NE
: return CC1
| CC2
| CC3
;
1073 case LT
: return CC1
;
1074 case GT
: return CC2
;
1075 case LE
: return CC0
| CC1
;
1076 case GE
: return CC0
| CC2
;
1077 case UNORDERED
: return CC3
;
1078 case ORDERED
: return CC0
| CC1
| CC2
;
1079 case UNEQ
: return CC0
| CC3
;
1080 case UNLT
: return CC1
| CC3
;
1081 case UNGT
: return CC2
| CC3
;
1082 case UNLE
: return CC0
| CC1
| CC3
;
1083 case UNGE
: return CC0
| CC2
| CC3
;
1084 case LTGT
: return CC1
| CC2
;
1090 switch (GET_CODE (code
))
1092 case EQ
: return CC0
;
1093 case NE
: return CC2
| CC1
| CC3
;
1094 case LT
: return CC2
;
1095 case GT
: return CC1
;
1096 case LE
: return CC0
| CC2
;
1097 case GE
: return CC0
| CC1
;
1098 case UNORDERED
: return CC3
;
1099 case ORDERED
: return CC0
| CC2
| CC1
;
1100 case UNEQ
: return CC0
| CC3
;
1101 case UNLT
: return CC2
| CC3
;
1102 case UNGT
: return CC1
| CC3
;
1103 case UNLE
: return CC0
| CC2
| CC3
;
1104 case UNGE
: return CC0
| CC1
| CC3
;
1105 case LTGT
: return CC2
| CC1
;
1116 /* Return branch condition mask to implement a compare and branch
1117 specified by CODE. Return -1 for invalid comparisons. */
1120 s390_compare_and_branch_condition_mask (rtx code
)
1122 const int CC0
= 1 << 3;
1123 const int CC1
= 1 << 2;
1124 const int CC2
= 1 << 1;
1126 switch (GET_CODE (code
))
1150 /* If INV is false, return assembler mnemonic string to implement
1151 a branch specified by CODE. If INV is true, return mnemonic
1152 for the corresponding inverted branch. */
1155 s390_branch_condition_mnemonic (rtx code
, int inv
)
1159 static const char *const mnemonic
[16] =
1161 NULL
, "o", "h", "nle",
1162 "l", "nhe", "lh", "ne",
1163 "e", "nlh", "he", "nl",
1164 "le", "nh", "no", NULL
1167 if (GET_CODE (XEXP (code
, 0)) == REG
1168 && REGNO (XEXP (code
, 0)) == CC_REGNUM
1169 && XEXP (code
, 1) == const0_rtx
)
1170 mask
= s390_branch_condition_mask (code
);
1172 mask
= s390_compare_and_branch_condition_mask (code
);
1174 gcc_assert (mask
>= 0);
1179 gcc_assert (mask
>= 1 && mask
<= 14);
1181 return mnemonic
[mask
];
1184 /* Return the part of op which has a value different from def.
1185 The size of the part is determined by mode.
1186 Use this function only if you already know that op really
1187 contains such a part. */
1189 unsigned HOST_WIDE_INT
1190 s390_extract_part (rtx op
, enum machine_mode mode
, int def
)
1192 unsigned HOST_WIDE_INT value
= 0;
1193 int max_parts
= HOST_BITS_PER_WIDE_INT
/ GET_MODE_BITSIZE (mode
);
1194 int part_bits
= GET_MODE_BITSIZE (mode
);
1195 unsigned HOST_WIDE_INT part_mask
1196 = ((unsigned HOST_WIDE_INT
)1 << part_bits
) - 1;
1199 for (i
= 0; i
< max_parts
; i
++)
1202 value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
1204 value
>>= part_bits
;
1206 if ((value
& part_mask
) != (def
& part_mask
))
1207 return value
& part_mask
;
1213 /* If OP is an integer constant of mode MODE with exactly one
1214 part of mode PART_MODE unequal to DEF, return the number of that
1215 part. Otherwise, return -1. */
1218 s390_single_part (rtx op
,
1219 enum machine_mode mode
,
1220 enum machine_mode part_mode
,
1223 unsigned HOST_WIDE_INT value
= 0;
1224 int n_parts
= GET_MODE_SIZE (mode
) / GET_MODE_SIZE (part_mode
);
1225 unsigned HOST_WIDE_INT part_mask
1226 = ((unsigned HOST_WIDE_INT
)1 << GET_MODE_BITSIZE (part_mode
)) - 1;
1229 if (GET_CODE (op
) != CONST_INT
)
1232 for (i
= 0; i
< n_parts
; i
++)
1235 value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
1237 value
>>= GET_MODE_BITSIZE (part_mode
);
1239 if ((value
& part_mask
) != (def
& part_mask
))
1247 return part
== -1 ? -1 : n_parts
- 1 - part
;
1250 /* Return true if IN contains a contiguous bitfield in the lower SIZE
1251 bits and no other bits are set in IN. POS and LENGTH can be used
1252 to obtain the start position and the length of the bitfield.
1254 POS gives the position of the first bit of the bitfield counting
1255 from the lowest order bit starting with zero. In order to use this
1256 value for S/390 instructions this has to be converted to "bits big
1260 s390_contiguous_bitmask_p (unsigned HOST_WIDE_INT in
, int size
,
1261 int *pos
, int *length
)
1266 unsigned HOST_WIDE_INT mask
= 1ULL;
1267 bool contiguous
= false;
1269 for (i
= 0; i
< size
; mask
<<= 1, i
++)
1293 /* Calculate a mask for all bits beyond the contiguous bits. */
1294 mask
= (-1LL & ~(((1ULL << (tmp_length
+ tmp_pos
- 1)) << 1) - 1));
1299 if (tmp_length
+ tmp_pos
- 1 > size
)
1303 *length
= tmp_length
;
1311 /* Check whether we can (and want to) split a double-word
1312 move in mode MODE from SRC to DST into two single-word
1313 moves, moving the subword FIRST_SUBWORD first. */
1316 s390_split_ok_p (rtx dst
, rtx src
, enum machine_mode mode
, int first_subword
)
1318 /* Floating point registers cannot be split. */
1319 if (FP_REG_P (src
) || FP_REG_P (dst
))
1322 /* We don't need to split if operands are directly accessible. */
1323 if (s_operand (src
, mode
) || s_operand (dst
, mode
))
1326 /* Non-offsettable memory references cannot be split. */
1327 if ((GET_CODE (src
) == MEM
&& !offsettable_memref_p (src
))
1328 || (GET_CODE (dst
) == MEM
&& !offsettable_memref_p (dst
)))
1331 /* Moving the first subword must not clobber a register
1332 needed to move the second subword. */
1333 if (register_operand (dst
, mode
))
1335 rtx subreg
= operand_subword (dst
, first_subword
, 0, mode
);
1336 if (reg_overlap_mentioned_p (subreg
, src
))
1343 /* Return true if it can be proven that [MEM1, MEM1 + SIZE]
1344 and [MEM2, MEM2 + SIZE] do overlap and false
1348 s390_overlap_p (rtx mem1
, rtx mem2
, HOST_WIDE_INT size
)
1350 rtx addr1
, addr2
, addr_delta
;
1351 HOST_WIDE_INT delta
;
1353 if (GET_CODE (mem1
) != MEM
|| GET_CODE (mem2
) != MEM
)
1359 addr1
= XEXP (mem1
, 0);
1360 addr2
= XEXP (mem2
, 0);
1362 addr_delta
= simplify_binary_operation (MINUS
, Pmode
, addr2
, addr1
);
1364 /* This overlapping check is used by peepholes merging memory block operations.
1365 Overlapping operations would otherwise be recognized by the S/390 hardware
1366 and would fall back to a slower implementation. Allowing overlapping
1367 operations would lead to slow code but not to wrong code. Therefore we are
1368 somewhat optimistic if we cannot prove that the memory blocks are
1370 That's why we return false here although this may accept operations on
1371 overlapping memory areas. */
1372 if (!addr_delta
|| GET_CODE (addr_delta
) != CONST_INT
)
1375 delta
= INTVAL (addr_delta
);
1378 || (delta
> 0 && delta
< size
)
1379 || (delta
< 0 && -delta
< size
))
1385 /* Check whether the address of memory reference MEM2 equals exactly
1386 the address of memory reference MEM1 plus DELTA. Return true if
1387 we can prove this to be the case, false otherwise. */
1390 s390_offset_p (rtx mem1
, rtx mem2
, rtx delta
)
1392 rtx addr1
, addr2
, addr_delta
;
1394 if (GET_CODE (mem1
) != MEM
|| GET_CODE (mem2
) != MEM
)
1397 addr1
= XEXP (mem1
, 0);
1398 addr2
= XEXP (mem2
, 0);
1400 addr_delta
= simplify_binary_operation (MINUS
, Pmode
, addr2
, addr1
);
1401 if (!addr_delta
|| !rtx_equal_p (addr_delta
, delta
))
1407 /* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1410 s390_expand_logical_operator (enum rtx_code code
, enum machine_mode mode
,
1413 enum machine_mode wmode
= mode
;
1414 rtx dst
= operands
[0];
1415 rtx src1
= operands
[1];
1416 rtx src2
= operands
[2];
1419 /* If we cannot handle the operation directly, use a temp register. */
1420 if (!s390_logical_operator_ok_p (operands
))
1421 dst
= gen_reg_rtx (mode
);
1423 /* QImode and HImode patterns make sense only if we have a destination
1424 in memory. Otherwise perform the operation in SImode. */
1425 if ((mode
== QImode
|| mode
== HImode
) && GET_CODE (dst
) != MEM
)
1428 /* Widen operands if required. */
1431 if (GET_CODE (dst
) == SUBREG
1432 && (tem
= simplify_subreg (wmode
, dst
, mode
, 0)) != 0)
1434 else if (REG_P (dst
))
1435 dst
= gen_rtx_SUBREG (wmode
, dst
, 0);
1437 dst
= gen_reg_rtx (wmode
);
1439 if (GET_CODE (src1
) == SUBREG
1440 && (tem
= simplify_subreg (wmode
, src1
, mode
, 0)) != 0)
1442 else if (GET_MODE (src1
) != VOIDmode
)
1443 src1
= gen_rtx_SUBREG (wmode
, force_reg (mode
, src1
), 0);
1445 if (GET_CODE (src2
) == SUBREG
1446 && (tem
= simplify_subreg (wmode
, src2
, mode
, 0)) != 0)
1448 else if (GET_MODE (src2
) != VOIDmode
)
1449 src2
= gen_rtx_SUBREG (wmode
, force_reg (mode
, src2
), 0);
1452 /* Emit the instruction. */
1453 op
= gen_rtx_SET (VOIDmode
, dst
, gen_rtx_fmt_ee (code
, wmode
, src1
, src2
));
1454 clob
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
1455 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, op
, clob
)));
1457 /* Fix up the destination if needed. */
1458 if (dst
!= operands
[0])
1459 emit_move_insn (operands
[0], gen_lowpart (mode
, dst
));
1462 /* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1465 s390_logical_operator_ok_p (rtx
*operands
)
1467 /* If the destination operand is in memory, it needs to coincide
1468 with one of the source operands. After reload, it has to be
1469 the first source operand. */
1470 if (GET_CODE (operands
[0]) == MEM
)
1471 return rtx_equal_p (operands
[0], operands
[1])
1472 || (!reload_completed
&& rtx_equal_p (operands
[0], operands
[2]));
1477 /* Narrow logical operation CODE of memory operand MEMOP with immediate
1478 operand IMMOP to switch from SS to SI type instructions. */
1481 s390_narrow_logical_operator (enum rtx_code code
, rtx
*memop
, rtx
*immop
)
1483 int def
= code
== AND
? -1 : 0;
1487 gcc_assert (GET_CODE (*memop
) == MEM
);
1488 gcc_assert (!MEM_VOLATILE_P (*memop
));
1490 mask
= s390_extract_part (*immop
, QImode
, def
);
1491 part
= s390_single_part (*immop
, GET_MODE (*memop
), QImode
, def
);
1492 gcc_assert (part
>= 0);
1494 *memop
= adjust_address (*memop
, QImode
, part
);
1495 *immop
= gen_int_mode (mask
, QImode
);
1499 /* How to allocate a 'struct machine_function'. */
1501 static struct machine_function
*
1502 s390_init_machine_status (void)
1504 return ggc_alloc_cleared_machine_function ();
1508 s390_option_override (void)
1510 /* Set up function hooks. */
1511 init_machine_status
= s390_init_machine_status
;
1513 /* Architecture mode defaults according to ABI. */
1514 if (!(target_flags_explicit
& MASK_ZARCH
))
1517 target_flags
|= MASK_ZARCH
;
1519 target_flags
&= ~MASK_ZARCH
;
1522 /* Set the march default in case it hasn't been specified on
1524 if (s390_arch
== PROCESSOR_max
)
1526 s390_arch_string
= TARGET_ZARCH
? "z900" : "g5";
1527 s390_arch
= TARGET_ZARCH
? PROCESSOR_2064_Z900
: PROCESSOR_9672_G5
;
1528 s390_arch_flags
= processor_flags_table
[(int)s390_arch
];
1531 /* Determine processor to tune for. */
1532 if (s390_tune
== PROCESSOR_max
)
1534 s390_tune
= s390_arch
;
1535 s390_tune_flags
= s390_arch_flags
;
1538 /* Sanity checks. */
1539 if (TARGET_ZARCH
&& !TARGET_CPU_ZARCH
)
1540 error ("z/Architecture mode not supported on %s", s390_arch_string
);
1541 if (TARGET_64BIT
&& !TARGET_ZARCH
)
1542 error ("64-bit ABI not supported in ESA/390 mode");
1544 /* Use hardware DFP if available and not explicitly disabled by
1545 user. E.g. with -m31 -march=z10 -mzarch */
1546 if (!(target_flags_explicit
& MASK_HARD_DFP
) && TARGET_DFP
)
1547 target_flags
|= MASK_HARD_DFP
;
1549 if (TARGET_HARD_DFP
&& !TARGET_DFP
)
1551 if (target_flags_explicit
& MASK_HARD_DFP
)
1553 if (!TARGET_CPU_DFP
)
1554 error ("hardware decimal floating point instructions"
1555 " not available on %s", s390_arch_string
);
1557 error ("hardware decimal floating point instructions"
1558 " not available in ESA/390 mode");
1561 target_flags
&= ~MASK_HARD_DFP
;
1564 if ((target_flags_explicit
& MASK_SOFT_FLOAT
) && TARGET_SOFT_FLOAT
)
1566 if ((target_flags_explicit
& MASK_HARD_DFP
) && TARGET_HARD_DFP
)
1567 error ("-mhard-dfp can%'t be used in conjunction with -msoft-float");
1569 target_flags
&= ~MASK_HARD_DFP
;
1572 /* Set processor cost function. */
1575 case PROCESSOR_2084_Z990
:
1576 s390_cost
= &z990_cost
;
1578 case PROCESSOR_2094_Z9_109
:
1579 s390_cost
= &z9_109_cost
;
1581 case PROCESSOR_2097_Z10
:
1582 s390_cost
= &z10_cost
;
1584 case PROCESSOR_2817_Z196
:
1585 s390_cost
= &z196_cost
;
1588 s390_cost
= &z900_cost
;
1591 if (TARGET_BACKCHAIN
&& TARGET_PACKED_STACK
&& TARGET_HARD_FLOAT
)
1592 error ("-mbackchain -mpacked-stack -mhard-float are not supported "
1595 if (s390_stack_size
)
1597 if (s390_stack_guard
>= s390_stack_size
)
1598 error ("stack size must be greater than the stack guard value");
1599 else if (s390_stack_size
> 1 << 16)
1600 error ("stack size must not be greater than 64k");
1602 else if (s390_stack_guard
)
1603 error ("-mstack-guard implies use of -mstack-size");
1605 #ifdef TARGET_DEFAULT_LONG_DOUBLE_128
1606 if (!(target_flags_explicit
& MASK_LONG_DOUBLE_128
))
1607 target_flags
|= MASK_LONG_DOUBLE_128
;
1610 if (s390_tune
== PROCESSOR_2097_Z10
1611 || s390_tune
== PROCESSOR_2817_Z196
)
1613 maybe_set_param_value (PARAM_MAX_UNROLLED_INSNS
, 100,
1614 global_options
.x_param_values
,
1615 global_options_set
.x_param_values
);
1616 maybe_set_param_value (PARAM_MAX_UNROLL_TIMES
, 32,
1617 global_options
.x_param_values
,
1618 global_options_set
.x_param_values
);
1619 maybe_set_param_value (PARAM_MAX_COMPLETELY_PEELED_INSNS
, 2000,
1620 global_options
.x_param_values
,
1621 global_options_set
.x_param_values
);
1622 maybe_set_param_value (PARAM_MAX_COMPLETELY_PEEL_TIMES
, 64,
1623 global_options
.x_param_values
,
1624 global_options_set
.x_param_values
);
1627 maybe_set_param_value (PARAM_MAX_PENDING_LIST_LENGTH
, 256,
1628 global_options
.x_param_values
,
1629 global_options_set
.x_param_values
);
1630 /* values for loop prefetching */
1631 maybe_set_param_value (PARAM_L1_CACHE_LINE_SIZE
, 256,
1632 global_options
.x_param_values
,
1633 global_options_set
.x_param_values
);
1634 maybe_set_param_value (PARAM_L1_CACHE_SIZE
, 128,
1635 global_options
.x_param_values
,
1636 global_options_set
.x_param_values
);
1637 /* s390 has more than 2 levels and the size is much larger. Since
1638 we are always running virtualized assume that we only get a small
1639 part of the caches above l1. */
1640 maybe_set_param_value (PARAM_L2_CACHE_SIZE
, 1500,
1641 global_options
.x_param_values
,
1642 global_options_set
.x_param_values
);
1643 maybe_set_param_value (PARAM_PREFETCH_MIN_INSN_TO_MEM_RATIO
, 2,
1644 global_options
.x_param_values
,
1645 global_options_set
.x_param_values
);
1646 maybe_set_param_value (PARAM_SIMULTANEOUS_PREFETCHES
, 6,
1647 global_options
.x_param_values
,
1648 global_options_set
.x_param_values
);
1650 /* This cannot reside in s390_option_optimization_table since HAVE_prefetch
1651 requires the arch flags to be evaluated already. Since prefetching
1652 is beneficial on s390, we enable it if available. */
1653 if (flag_prefetch_loop_arrays
< 0 && HAVE_prefetch
&& optimize
>= 3)
1654 flag_prefetch_loop_arrays
= 1;
1656 /* Use the alternative scheduling-pressure algorithm by default. */
1657 maybe_set_param_value (PARAM_SCHED_PRESSURE_ALGORITHM
, 2,
1658 global_options
.x_param_values
,
1659 global_options_set
.x_param_values
);
1663 /* Don't emit DWARF3/4 unless specifically selected. The TPF
1664 debuggers do not yet support DWARF 3/4. */
1665 if (!global_options_set
.x_dwarf_strict
)
1667 if (!global_options_set
.x_dwarf_version
)
1672 /* Map for smallest class containing reg regno. */
1674 const enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
1675 { GENERAL_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1676 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1677 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1678 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1679 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1680 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1681 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1682 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1683 ADDR_REGS
, CC_REGS
, ADDR_REGS
, ADDR_REGS
,
1684 ACCESS_REGS
, ACCESS_REGS
1687 /* Return attribute type of insn. */
1689 static enum attr_type
1690 s390_safe_attr_type (rtx insn
)
1692 if (recog_memoized (insn
) >= 0)
1693 return get_attr_type (insn
);
1698 /* Return true if DISP is a valid short displacement. */
1701 s390_short_displacement (rtx disp
)
1703 /* No displacement is OK. */
1707 /* Without the long displacement facility we don't need to
1708 distingiush between long and short displacement. */
1709 if (!TARGET_LONG_DISPLACEMENT
)
1712 /* Integer displacement in range. */
1713 if (GET_CODE (disp
) == CONST_INT
)
1714 return INTVAL (disp
) >= 0 && INTVAL (disp
) < 4096;
1716 /* GOT offset is not OK, the GOT can be large. */
1717 if (GET_CODE (disp
) == CONST
1718 && GET_CODE (XEXP (disp
, 0)) == UNSPEC
1719 && (XINT (XEXP (disp
, 0), 1) == UNSPEC_GOT
1720 || XINT (XEXP (disp
, 0), 1) == UNSPEC_GOTNTPOFF
))
1723 /* All other symbolic constants are literal pool references,
1724 which are OK as the literal pool must be small. */
1725 if (GET_CODE (disp
) == CONST
)
1731 /* Decompose a RTL expression ADDR for a memory address into
1732 its components, returned in OUT.
1734 Returns false if ADDR is not a valid memory address, true
1735 otherwise. If OUT is NULL, don't return the components,
1736 but check for validity only.
1738 Note: Only addresses in canonical form are recognized.
1739 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1740 canonical form so that they will be recognized. */
1743 s390_decompose_address (rtx addr
, struct s390_address
*out
)
1745 HOST_WIDE_INT offset
= 0;
1746 rtx base
= NULL_RTX
;
1747 rtx indx
= NULL_RTX
;
1748 rtx disp
= NULL_RTX
;
1750 bool pointer
= false;
1751 bool base_ptr
= false;
1752 bool indx_ptr
= false;
1753 bool literal_pool
= false;
1755 /* We may need to substitute the literal pool base register into the address
1756 below. However, at this point we do not know which register is going to
1757 be used as base, so we substitute the arg pointer register. This is going
1758 to be treated as holding a pointer below -- it shouldn't be used for any
1760 rtx fake_pool_base
= gen_rtx_REG (Pmode
, ARG_POINTER_REGNUM
);
1762 /* Decompose address into base + index + displacement. */
1764 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == UNSPEC
)
1767 else if (GET_CODE (addr
) == PLUS
)
1769 rtx op0
= XEXP (addr
, 0);
1770 rtx op1
= XEXP (addr
, 1);
1771 enum rtx_code code0
= GET_CODE (op0
);
1772 enum rtx_code code1
= GET_CODE (op1
);
1774 if (code0
== REG
|| code0
== UNSPEC
)
1776 if (code1
== REG
|| code1
== UNSPEC
)
1778 indx
= op0
; /* index + base */
1784 base
= op0
; /* base + displacement */
1789 else if (code0
== PLUS
)
1791 indx
= XEXP (op0
, 0); /* index + base + disp */
1792 base
= XEXP (op0
, 1);
1803 disp
= addr
; /* displacement */
1805 /* Extract integer part of displacement. */
1809 if (GET_CODE (disp
) == CONST_INT
)
1811 offset
= INTVAL (disp
);
1814 else if (GET_CODE (disp
) == CONST
1815 && GET_CODE (XEXP (disp
, 0)) == PLUS
1816 && GET_CODE (XEXP (XEXP (disp
, 0), 1)) == CONST_INT
)
1818 offset
= INTVAL (XEXP (XEXP (disp
, 0), 1));
1819 disp
= XEXP (XEXP (disp
, 0), 0);
1823 /* Strip off CONST here to avoid special case tests later. */
1824 if (disp
&& GET_CODE (disp
) == CONST
)
1825 disp
= XEXP (disp
, 0);
1827 /* We can convert literal pool addresses to
1828 displacements by basing them off the base register. */
1829 if (disp
&& GET_CODE (disp
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (disp
))
1831 /* Either base or index must be free to hold the base register. */
1833 base
= fake_pool_base
, literal_pool
= true;
1835 indx
= fake_pool_base
, literal_pool
= true;
1839 /* Mark up the displacement. */
1840 disp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, disp
),
1841 UNSPEC_LTREL_OFFSET
);
1844 /* Validate base register. */
1847 if (GET_CODE (base
) == UNSPEC
)
1848 switch (XINT (base
, 1))
1852 disp
= gen_rtx_UNSPEC (Pmode
,
1853 gen_rtvec (1, XVECEXP (base
, 0, 0)),
1854 UNSPEC_LTREL_OFFSET
);
1858 base
= XVECEXP (base
, 0, 1);
1861 case UNSPEC_LTREL_BASE
:
1862 if (XVECLEN (base
, 0) == 1)
1863 base
= fake_pool_base
, literal_pool
= true;
1865 base
= XVECEXP (base
, 0, 1);
1873 || (GET_MODE (base
) != SImode
1874 && GET_MODE (base
) != Pmode
))
1877 if (REGNO (base
) == STACK_POINTER_REGNUM
1878 || REGNO (base
) == FRAME_POINTER_REGNUM
1879 || ((reload_completed
|| reload_in_progress
)
1880 && frame_pointer_needed
1881 && REGNO (base
) == HARD_FRAME_POINTER_REGNUM
)
1882 || REGNO (base
) == ARG_POINTER_REGNUM
1884 && REGNO (base
) == PIC_OFFSET_TABLE_REGNUM
))
1885 pointer
= base_ptr
= true;
1887 if ((reload_completed
|| reload_in_progress
)
1888 && base
== cfun
->machine
->base_reg
)
1889 pointer
= base_ptr
= literal_pool
= true;
1892 /* Validate index register. */
1895 if (GET_CODE (indx
) == UNSPEC
)
1896 switch (XINT (indx
, 1))
1900 disp
= gen_rtx_UNSPEC (Pmode
,
1901 gen_rtvec (1, XVECEXP (indx
, 0, 0)),
1902 UNSPEC_LTREL_OFFSET
);
1906 indx
= XVECEXP (indx
, 0, 1);
1909 case UNSPEC_LTREL_BASE
:
1910 if (XVECLEN (indx
, 0) == 1)
1911 indx
= fake_pool_base
, literal_pool
= true;
1913 indx
= XVECEXP (indx
, 0, 1);
1921 || (GET_MODE (indx
) != SImode
1922 && GET_MODE (indx
) != Pmode
))
1925 if (REGNO (indx
) == STACK_POINTER_REGNUM
1926 || REGNO (indx
) == FRAME_POINTER_REGNUM
1927 || ((reload_completed
|| reload_in_progress
)
1928 && frame_pointer_needed
1929 && REGNO (indx
) == HARD_FRAME_POINTER_REGNUM
)
1930 || REGNO (indx
) == ARG_POINTER_REGNUM
1932 && REGNO (indx
) == PIC_OFFSET_TABLE_REGNUM
))
1933 pointer
= indx_ptr
= true;
1935 if ((reload_completed
|| reload_in_progress
)
1936 && indx
== cfun
->machine
->base_reg
)
1937 pointer
= indx_ptr
= literal_pool
= true;
1940 /* Prefer to use pointer as base, not index. */
1941 if (base
&& indx
&& !base_ptr
1942 && (indx_ptr
|| (!REG_POINTER (base
) && REG_POINTER (indx
))))
1949 /* Validate displacement. */
1952 /* If virtual registers are involved, the displacement will change later
1953 anyway as the virtual registers get eliminated. This could make a
1954 valid displacement invalid, but it is more likely to make an invalid
1955 displacement valid, because we sometimes access the register save area
1956 via negative offsets to one of those registers.
1957 Thus we don't check the displacement for validity here. If after
1958 elimination the displacement turns out to be invalid after all,
1959 this is fixed up by reload in any case. */
1960 if (base
!= arg_pointer_rtx
1961 && indx
!= arg_pointer_rtx
1962 && base
!= return_address_pointer_rtx
1963 && indx
!= return_address_pointer_rtx
1964 && base
!= frame_pointer_rtx
1965 && indx
!= frame_pointer_rtx
1966 && base
!= virtual_stack_vars_rtx
1967 && indx
!= virtual_stack_vars_rtx
)
1968 if (!DISP_IN_RANGE (offset
))
1973 /* All the special cases are pointers. */
1976 /* In the small-PIC case, the linker converts @GOT
1977 and @GOTNTPOFF offsets to possible displacements. */
1978 if (GET_CODE (disp
) == UNSPEC
1979 && (XINT (disp
, 1) == UNSPEC_GOT
1980 || XINT (disp
, 1) == UNSPEC_GOTNTPOFF
)
1986 /* Accept pool label offsets. */
1987 else if (GET_CODE (disp
) == UNSPEC
1988 && XINT (disp
, 1) == UNSPEC_POOL_OFFSET
)
1991 /* Accept literal pool references. */
1992 else if (GET_CODE (disp
) == UNSPEC
1993 && XINT (disp
, 1) == UNSPEC_LTREL_OFFSET
)
1995 /* In case CSE pulled a non literal pool reference out of
1996 the pool we have to reject the address. This is
1997 especially important when loading the GOT pointer on non
1998 zarch CPUs. In this case the literal pool contains an lt
1999 relative offset to the _GLOBAL_OFFSET_TABLE_ label which
2000 will most likely exceed the displacement. */
2001 if (GET_CODE (XVECEXP (disp
, 0, 0)) != SYMBOL_REF
2002 || !CONSTANT_POOL_ADDRESS_P (XVECEXP (disp
, 0, 0)))
2005 orig_disp
= gen_rtx_CONST (Pmode
, disp
);
2008 /* If we have an offset, make sure it does not
2009 exceed the size of the constant pool entry. */
2010 rtx sym
= XVECEXP (disp
, 0, 0);
2011 if (offset
>= GET_MODE_SIZE (get_pool_mode (sym
)))
2014 orig_disp
= plus_constant (Pmode
, orig_disp
, offset
);
2029 out
->disp
= orig_disp
;
2030 out
->pointer
= pointer
;
2031 out
->literal_pool
= literal_pool
;
2037 /* Decompose a RTL expression OP for a shift count into its components,
2038 and return the base register in BASE and the offset in OFFSET.
2040 Return true if OP is a valid shift count, false if not. */
2043 s390_decompose_shift_count (rtx op
, rtx
*base
, HOST_WIDE_INT
*offset
)
2045 HOST_WIDE_INT off
= 0;
2047 /* We can have an integer constant, an address register,
2048 or a sum of the two. */
2049 if (GET_CODE (op
) == CONST_INT
)
2054 if (op
&& GET_CODE (op
) == PLUS
&& GET_CODE (XEXP (op
, 1)) == CONST_INT
)
2056 off
= INTVAL (XEXP (op
, 1));
2059 while (op
&& GET_CODE (op
) == SUBREG
)
2060 op
= SUBREG_REG (op
);
2062 if (op
&& GET_CODE (op
) != REG
)
2074 /* Return true if CODE is a valid address without index. */
2077 s390_legitimate_address_without_index_p (rtx op
)
2079 struct s390_address addr
;
2081 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
2090 /* Return true if ADDR is of kind symbol_ref or symbol_ref + const_int
2091 and return these parts in SYMREF and ADDEND. You can pass NULL in
2092 SYMREF and/or ADDEND if you are not interested in these values.
2093 Literal pool references are *not* considered symbol references. */
2096 s390_symref_operand_p (rtx addr
, rtx
*symref
, HOST_WIDE_INT
*addend
)
2098 HOST_WIDE_INT tmpaddend
= 0;
2100 if (GET_CODE (addr
) == CONST
)
2101 addr
= XEXP (addr
, 0);
2103 if (GET_CODE (addr
) == PLUS
)
2105 if (GET_CODE (XEXP (addr
, 0)) == SYMBOL_REF
2106 && !CONSTANT_POOL_ADDRESS_P (XEXP (addr
, 0))
2107 && CONST_INT_P (XEXP (addr
, 1)))
2109 tmpaddend
= INTVAL (XEXP (addr
, 1));
2110 addr
= XEXP (addr
, 0);
2116 if (GET_CODE (addr
) != SYMBOL_REF
|| CONSTANT_POOL_ADDRESS_P (addr
))
2122 *addend
= tmpaddend
;
2127 /* Return TRUE if ADDR is an operand valid for a load/store relative
2128 instructions. Be aware that the alignment of the operand needs to
2129 be checked separately. */
2131 s390_loadrelative_operand_p (rtx addr
)
2133 if (GET_CODE (addr
) == CONST
)
2134 addr
= XEXP (addr
, 0);
2136 /* Enable load relative for symbol@GOTENT. */
2137 if (GET_CODE (addr
) == UNSPEC
2138 && XINT (addr
, 1) == UNSPEC_GOTENT
)
2141 return s390_symref_operand_p (addr
, NULL
, NULL
);
2144 /* Return true if the address in OP is valid for constraint letter C
2145 if wrapped in a MEM rtx. Set LIT_POOL_OK to true if it literal
2146 pool MEMs should be accepted. Only the Q, R, S, T constraint
2147 letters are allowed for C. */
2150 s390_check_qrst_address (char c
, rtx op
, bool lit_pool_ok
)
2152 struct s390_address addr
;
2153 bool decomposed
= false;
2155 /* This check makes sure that no symbolic address (except literal
2156 pool references) are accepted by the R or T constraints. */
2157 if (s390_loadrelative_operand_p (op
))
2160 /* Ensure literal pool references are only accepted if LIT_POOL_OK. */
2163 if (!s390_decompose_address (op
, &addr
))
2165 if (addr
.literal_pool
)
2172 case 'Q': /* no index short displacement */
2173 if (!decomposed
&& !s390_decompose_address (op
, &addr
))
2177 if (!s390_short_displacement (addr
.disp
))
2181 case 'R': /* with index short displacement */
2182 if (TARGET_LONG_DISPLACEMENT
)
2184 if (!decomposed
&& !s390_decompose_address (op
, &addr
))
2186 if (!s390_short_displacement (addr
.disp
))
2189 /* Any invalid address here will be fixed up by reload,
2190 so accept it for the most generic constraint. */
2193 case 'S': /* no index long displacement */
2194 if (!TARGET_LONG_DISPLACEMENT
)
2196 if (!decomposed
&& !s390_decompose_address (op
, &addr
))
2200 if (s390_short_displacement (addr
.disp
))
2204 case 'T': /* with index long displacement */
2205 if (!TARGET_LONG_DISPLACEMENT
)
2207 /* Any invalid address here will be fixed up by reload,
2208 so accept it for the most generic constraint. */
2209 if ((decomposed
|| s390_decompose_address (op
, &addr
))
2210 && s390_short_displacement (addr
.disp
))
2220 /* Evaluates constraint strings described by the regular expression
2221 ([A|B|Z](Q|R|S|T))|U|W|Y and returns 1 if OP is a valid operand for
2222 the constraint given in STR, or 0 else. */
2225 s390_mem_constraint (const char *str
, rtx op
)
2232 /* Check for offsettable variants of memory constraints. */
2233 if (!MEM_P (op
) || MEM_VOLATILE_P (op
))
2235 if ((reload_completed
|| reload_in_progress
)
2236 ? !offsettable_memref_p (op
) : !offsettable_nonstrict_memref_p (op
))
2238 return s390_check_qrst_address (str
[1], XEXP (op
, 0), true);
2240 /* Check for non-literal-pool variants of memory constraints. */
2243 return s390_check_qrst_address (str
[1], XEXP (op
, 0), false);
2248 if (GET_CODE (op
) != MEM
)
2250 return s390_check_qrst_address (c
, XEXP (op
, 0), true);
2252 return (s390_check_qrst_address ('Q', op
, true)
2253 || s390_check_qrst_address ('R', op
, true));
2255 return (s390_check_qrst_address ('S', op
, true)
2256 || s390_check_qrst_address ('T', op
, true));
2258 /* Simply check for the basic form of a shift count. Reload will
2259 take care of making sure we have a proper base register. */
2260 if (!s390_decompose_shift_count (op
, NULL
, NULL
))
2264 return s390_check_qrst_address (str
[1], op
, true);
2272 /* Evaluates constraint strings starting with letter O. Input
2273 parameter C is the second letter following the "O" in the constraint
2274 string. Returns 1 if VALUE meets the respective constraint and 0
2278 s390_O_constraint_str (const char c
, HOST_WIDE_INT value
)
2286 return trunc_int_for_mode (value
, SImode
) == value
;
2290 || s390_single_part (GEN_INT (value
), DImode
, SImode
, 0) == 1;
2293 return s390_single_part (GEN_INT (value
- 1), DImode
, SImode
, -1) == 1;
2301 /* Evaluates constraint strings starting with letter N. Parameter STR
2302 contains the letters following letter "N" in the constraint string.
2303 Returns true if VALUE matches the constraint. */
2306 s390_N_constraint_str (const char *str
, HOST_WIDE_INT value
)
2308 enum machine_mode mode
, part_mode
;
2310 int part
, part_goal
;
2316 part_goal
= str
[0] - '0';
2360 if (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (part_mode
))
2363 part
= s390_single_part (GEN_INT (value
), mode
, part_mode
, def
);
2366 if (part_goal
!= -1 && part_goal
!= part
)
2373 /* Returns true if the input parameter VALUE is a float zero. */
2376 s390_float_const_zero_p (rtx value
)
2378 return (GET_MODE_CLASS (GET_MODE (value
)) == MODE_FLOAT
2379 && value
== CONST0_RTX (GET_MODE (value
)));
2382 /* Implement TARGET_REGISTER_MOVE_COST. */
2385 s390_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
2386 reg_class_t from
, reg_class_t to
)
2388 /* On s390, copy between fprs and gprs is expensive. */
2389 if ((reg_classes_intersect_p (from
, GENERAL_REGS
)
2390 && reg_classes_intersect_p (to
, FP_REGS
))
2391 || (reg_classes_intersect_p (from
, FP_REGS
)
2392 && reg_classes_intersect_p (to
, GENERAL_REGS
)))
2398 /* Implement TARGET_MEMORY_MOVE_COST. */
2401 s390_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
2402 reg_class_t rclass ATTRIBUTE_UNUSED
,
2403 bool in ATTRIBUTE_UNUSED
)
2408 /* Compute a (partial) cost for rtx X. Return true if the complete
2409 cost has been computed, and false if subexpressions should be
2410 scanned. In either case, *TOTAL contains the cost result.
2411 CODE contains GET_CODE (x), OUTER_CODE contains the code
2412 of the superexpression of x. */
2415 s390_rtx_costs (rtx x
, int code
, int outer_code
, int opno ATTRIBUTE_UNUSED
,
2416 int *total
, bool speed ATTRIBUTE_UNUSED
)
2439 *total
= COSTS_N_INSNS (1);
2444 *total
= COSTS_N_INSNS (1);
2448 switch (GET_MODE (x
))
2452 rtx left
= XEXP (x
, 0);
2453 rtx right
= XEXP (x
, 1);
2454 if (GET_CODE (right
) == CONST_INT
2455 && CONST_OK_FOR_K (INTVAL (right
)))
2456 *total
= s390_cost
->mhi
;
2457 else if (GET_CODE (left
) == SIGN_EXTEND
)
2458 *total
= s390_cost
->mh
;
2460 *total
= s390_cost
->ms
; /* msr, ms, msy */
2465 rtx left
= XEXP (x
, 0);
2466 rtx right
= XEXP (x
, 1);
2469 if (GET_CODE (right
) == CONST_INT
2470 && CONST_OK_FOR_K (INTVAL (right
)))
2471 *total
= s390_cost
->mghi
;
2472 else if (GET_CODE (left
) == SIGN_EXTEND
)
2473 *total
= s390_cost
->msgf
;
2475 *total
= s390_cost
->msg
; /* msgr, msg */
2477 else /* TARGET_31BIT */
2479 if (GET_CODE (left
) == SIGN_EXTEND
2480 && GET_CODE (right
) == SIGN_EXTEND
)
2481 /* mulsidi case: mr, m */
2482 *total
= s390_cost
->m
;
2483 else if (GET_CODE (left
) == ZERO_EXTEND
2484 && GET_CODE (right
) == ZERO_EXTEND
2485 && TARGET_CPU_ZARCH
)
2486 /* umulsidi case: ml, mlr */
2487 *total
= s390_cost
->ml
;
2489 /* Complex calculation is required. */
2490 *total
= COSTS_N_INSNS (40);
2496 *total
= s390_cost
->mult_df
;
2499 *total
= s390_cost
->mxbr
;
2507 switch (GET_MODE (x
))
2510 *total
= s390_cost
->madbr
;
2513 *total
= s390_cost
->maebr
;
2518 /* Negate in the third argument is free: FMSUB. */
2519 if (GET_CODE (XEXP (x
, 2)) == NEG
)
2521 *total
+= (rtx_cost (XEXP (x
, 0), FMA
, 0, speed
)
2522 + rtx_cost (XEXP (x
, 1), FMA
, 1, speed
)
2523 + rtx_cost (XEXP (XEXP (x
, 2), 0), FMA
, 2, speed
));
2530 if (GET_MODE (x
) == TImode
) /* 128 bit division */
2531 *total
= s390_cost
->dlgr
;
2532 else if (GET_MODE (x
) == DImode
)
2534 rtx right
= XEXP (x
, 1);
2535 if (GET_CODE (right
) == ZERO_EXTEND
) /* 64 by 32 bit division */
2536 *total
= s390_cost
->dlr
;
2537 else /* 64 by 64 bit division */
2538 *total
= s390_cost
->dlgr
;
2540 else if (GET_MODE (x
) == SImode
) /* 32 bit division */
2541 *total
= s390_cost
->dlr
;
2546 if (GET_MODE (x
) == DImode
)
2548 rtx right
= XEXP (x
, 1);
2549 if (GET_CODE (right
) == ZERO_EXTEND
) /* 64 by 32 bit division */
2551 *total
= s390_cost
->dsgfr
;
2553 *total
= s390_cost
->dr
;
2554 else /* 64 by 64 bit division */
2555 *total
= s390_cost
->dsgr
;
2557 else if (GET_MODE (x
) == SImode
) /* 32 bit division */
2558 *total
= s390_cost
->dlr
;
2559 else if (GET_MODE (x
) == SFmode
)
2561 *total
= s390_cost
->debr
;
2563 else if (GET_MODE (x
) == DFmode
)
2565 *total
= s390_cost
->ddbr
;
2567 else if (GET_MODE (x
) == TFmode
)
2569 *total
= s390_cost
->dxbr
;
2574 if (GET_MODE (x
) == SFmode
)
2575 *total
= s390_cost
->sqebr
;
2576 else if (GET_MODE (x
) == DFmode
)
2577 *total
= s390_cost
->sqdbr
;
2579 *total
= s390_cost
->sqxbr
;
2584 if (outer_code
== MULT
|| outer_code
== DIV
|| outer_code
== MOD
2585 || outer_code
== PLUS
|| outer_code
== MINUS
2586 || outer_code
== COMPARE
)
2591 *total
= COSTS_N_INSNS (1);
2592 if (GET_CODE (XEXP (x
, 0)) == AND
2593 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2594 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
)
2596 rtx op0
= XEXP (XEXP (x
, 0), 0);
2597 rtx op1
= XEXP (XEXP (x
, 0), 1);
2598 rtx op2
= XEXP (x
, 1);
2600 if (memory_operand (op0
, GET_MODE (op0
))
2601 && s390_tm_ccmode (op1
, op2
, 0) != VOIDmode
)
2603 if (register_operand (op0
, GET_MODE (op0
))
2604 && s390_tm_ccmode (op1
, op2
, 1) != VOIDmode
)
2614 /* Return the cost of an address rtx ADDR. */
2617 s390_address_cost (rtx addr
, enum machine_mode mode ATTRIBUTE_UNUSED
,
2618 addr_space_t as ATTRIBUTE_UNUSED
,
2619 bool speed ATTRIBUTE_UNUSED
)
2621 struct s390_address ad
;
2622 if (!s390_decompose_address (addr
, &ad
))
2625 return ad
.indx
? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
2628 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
2629 otherwise return 0. */
2632 tls_symbolic_operand (rtx op
)
2634 if (GET_CODE (op
) != SYMBOL_REF
)
2636 return SYMBOL_REF_TLS_MODEL (op
);
2639 /* Split DImode access register reference REG (on 64-bit) into its constituent
2640 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2641 gen_highpart cannot be used as they assume all registers are word-sized,
2642 while our access registers have only half that size. */
2645 s390_split_access_reg (rtx reg
, rtx
*lo
, rtx
*hi
)
2647 gcc_assert (TARGET_64BIT
);
2648 gcc_assert (ACCESS_REG_P (reg
));
2649 gcc_assert (GET_MODE (reg
) == DImode
);
2650 gcc_assert (!(REGNO (reg
) & 1));
2652 *lo
= gen_rtx_REG (SImode
, REGNO (reg
) + 1);
2653 *hi
= gen_rtx_REG (SImode
, REGNO (reg
));
2656 /* Return true if OP contains a symbol reference */
2659 symbolic_reference_mentioned_p (rtx op
)
2664 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
2667 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
2668 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
2674 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
2675 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
2679 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
2686 /* Return true if OP contains a reference to a thread-local symbol. */
2689 tls_symbolic_reference_mentioned_p (rtx op
)
2694 if (GET_CODE (op
) == SYMBOL_REF
)
2695 return tls_symbolic_operand (op
);
2697 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
2698 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
2704 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
2705 if (tls_symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
2709 else if (fmt
[i
] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op
, i
)))
2717 /* Return true if OP is a legitimate general operand when
2718 generating PIC code. It is given that flag_pic is on
2719 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2722 legitimate_pic_operand_p (rtx op
)
2724 /* Accept all non-symbolic constants. */
2725 if (!SYMBOLIC_CONST (op
))
2728 /* Reject everything else; must be handled
2729 via emit_symbolic_move. */
2733 /* Returns true if the constant value OP is a legitimate general operand.
2734 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2737 s390_legitimate_constant_p (enum machine_mode mode
, rtx op
)
2739 /* Accept all non-symbolic constants. */
2740 if (!SYMBOLIC_CONST (op
))
2743 /* Accept immediate LARL operands. */
2744 if (TARGET_CPU_ZARCH
&& larl_operand (op
, mode
))
2747 /* Thread-local symbols are never legal constants. This is
2748 so that emit_call knows that computing such addresses
2749 might require a function call. */
2750 if (TLS_SYMBOLIC_CONST (op
))
2753 /* In the PIC case, symbolic constants must *not* be
2754 forced into the literal pool. We accept them here,
2755 so that they will be handled by emit_symbolic_move. */
2759 /* All remaining non-PIC symbolic constants are
2760 forced into the literal pool. */
2764 /* Determine if it's legal to put X into the constant pool. This
2765 is not possible if X contains the address of a symbol that is
2766 not constant (TLS) or not known at final link time (PIC). */
2769 s390_cannot_force_const_mem (enum machine_mode mode
, rtx x
)
2771 switch (GET_CODE (x
))
2775 /* Accept all non-symbolic constants. */
2779 /* Labels are OK iff we are non-PIC. */
2780 return flag_pic
!= 0;
2783 /* 'Naked' TLS symbol references are never OK,
2784 non-TLS symbols are OK iff we are non-PIC. */
2785 if (tls_symbolic_operand (x
))
2788 return flag_pic
!= 0;
2791 return s390_cannot_force_const_mem (mode
, XEXP (x
, 0));
2794 return s390_cannot_force_const_mem (mode
, XEXP (x
, 0))
2795 || s390_cannot_force_const_mem (mode
, XEXP (x
, 1));
2798 switch (XINT (x
, 1))
2800 /* Only lt-relative or GOT-relative UNSPECs are OK. */
2801 case UNSPEC_LTREL_OFFSET
:
2809 case UNSPEC_GOTNTPOFF
:
2810 case UNSPEC_INDNTPOFF
:
2813 /* If the literal pool shares the code section, be put
2814 execute template placeholders into the pool as well. */
2816 return TARGET_CPU_ZARCH
;
2828 /* Returns true if the constant value OP is a legitimate general
2829 operand during and after reload. The difference to
2830 legitimate_constant_p is that this function will not accept
2831 a constant that would need to be forced to the literal pool
2832 before it can be used as operand.
2833 This function accepts all constants which can be loaded directly
2837 legitimate_reload_constant_p (rtx op
)
2839 /* Accept la(y) operands. */
2840 if (GET_CODE (op
) == CONST_INT
2841 && DISP_IN_RANGE (INTVAL (op
)))
2844 /* Accept l(g)hi/l(g)fi operands. */
2845 if (GET_CODE (op
) == CONST_INT
2846 && (CONST_OK_FOR_K (INTVAL (op
)) || CONST_OK_FOR_Os (INTVAL (op
))))
2849 /* Accept lliXX operands. */
2851 && GET_CODE (op
) == CONST_INT
2852 && trunc_int_for_mode (INTVAL (op
), word_mode
) == INTVAL (op
)
2853 && s390_single_part (op
, word_mode
, HImode
, 0) >= 0)
2857 && GET_CODE (op
) == CONST_INT
2858 && trunc_int_for_mode (INTVAL (op
), word_mode
) == INTVAL (op
)
2859 && s390_single_part (op
, word_mode
, SImode
, 0) >= 0)
2862 /* Accept larl operands. */
2863 if (TARGET_CPU_ZARCH
2864 && larl_operand (op
, VOIDmode
))
2867 /* Accept floating-point zero operands that fit into a single GPR. */
2868 if (GET_CODE (op
) == CONST_DOUBLE
2869 && s390_float_const_zero_p (op
)
2870 && GET_MODE_SIZE (GET_MODE (op
)) <= UNITS_PER_WORD
)
2873 /* Accept double-word operands that can be split. */
2874 if (GET_CODE (op
) == CONST_INT
2875 && trunc_int_for_mode (INTVAL (op
), word_mode
) != INTVAL (op
))
2877 enum machine_mode dword_mode
= word_mode
== SImode
? DImode
: TImode
;
2878 rtx hi
= operand_subword (op
, 0, 0, dword_mode
);
2879 rtx lo
= operand_subword (op
, 1, 0, dword_mode
);
2880 return legitimate_reload_constant_p (hi
)
2881 && legitimate_reload_constant_p (lo
);
2884 /* Everything else cannot be handled without reload. */
2888 /* Returns true if the constant value OP is a legitimate fp operand
2889 during and after reload.
2890 This function accepts all constants which can be loaded directly
2894 legitimate_reload_fp_constant_p (rtx op
)
2896 /* Accept floating-point zero operands if the load zero instruction
2899 && GET_CODE (op
) == CONST_DOUBLE
2900 && s390_float_const_zero_p (op
))
2906 /* Given an rtx OP being reloaded into a reg required to be in class RCLASS,
2907 return the class of reg to actually use. */
2910 s390_preferred_reload_class (rtx op
, reg_class_t rclass
)
2912 switch (GET_CODE (op
))
2914 /* Constants we cannot reload into general registers
2915 must be forced into the literal pool. */
2918 if (reg_class_subset_p (GENERAL_REGS
, rclass
)
2919 && legitimate_reload_constant_p (op
))
2920 return GENERAL_REGS
;
2921 else if (reg_class_subset_p (ADDR_REGS
, rclass
)
2922 && legitimate_reload_constant_p (op
))
2924 else if (reg_class_subset_p (FP_REGS
, rclass
)
2925 && legitimate_reload_fp_constant_p (op
))
2929 /* If a symbolic constant or a PLUS is reloaded,
2930 it is most likely being used as an address, so
2931 prefer ADDR_REGS. If 'class' is not a superset
2932 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
2936 if (!legitimate_reload_constant_p (op
))
2940 /* load address will be used. */
2941 if (reg_class_subset_p (ADDR_REGS
, rclass
))
2953 /* Return true if ADDR is SYMBOL_REF + addend with addend being a
2954 multiple of ALIGNMENT and the SYMBOL_REF being naturally
2958 s390_check_symref_alignment (rtx addr
, HOST_WIDE_INT alignment
)
2960 HOST_WIDE_INT addend
;
2963 /* Accept symbol@GOTENT with pointer size alignment. */
2964 if (GET_CODE (addr
) == CONST
2965 && GET_CODE (XEXP (addr
, 0)) == UNSPEC
2966 && XINT (XEXP (addr
, 0), 1) == UNSPEC_GOTENT
2967 && alignment
<= UNITS_PER_LONG
)
2970 if (!s390_symref_operand_p (addr
, &symref
, &addend
))
2973 return (!SYMBOL_REF_NOT_NATURALLY_ALIGNED_P (symref
)
2974 && !(addend
& (alignment
- 1)));
2977 /* ADDR is moved into REG using larl. If ADDR isn't a valid larl
2978 operand SCRATCH is used to reload the even part of the address and
2982 s390_reload_larl_operand (rtx reg
, rtx addr
, rtx scratch
)
2984 HOST_WIDE_INT addend
;
2987 if (!s390_symref_operand_p (addr
, &symref
, &addend
))
2991 /* Easy case. The addend is even so larl will do fine. */
2992 emit_move_insn (reg
, addr
);
2995 /* We can leave the scratch register untouched if the target
2996 register is a valid base register. */
2997 if (REGNO (reg
) < FIRST_PSEUDO_REGISTER
2998 && REGNO_REG_CLASS (REGNO (reg
)) == ADDR_REGS
)
3001 gcc_assert (REGNO (scratch
) < FIRST_PSEUDO_REGISTER
);
3002 gcc_assert (REGNO_REG_CLASS (REGNO (scratch
)) == ADDR_REGS
);
3005 emit_move_insn (scratch
,
3006 gen_rtx_CONST (Pmode
,
3007 gen_rtx_PLUS (Pmode
, symref
,
3008 GEN_INT (addend
- 1))));
3010 emit_move_insn (scratch
, symref
);
3012 /* Increment the address using la in order to avoid clobbering cc. */
3013 emit_move_insn (reg
, gen_rtx_PLUS (Pmode
, scratch
, const1_rtx
));
3017 /* Generate what is necessary to move between REG and MEM using
3018 SCRATCH. The direction is given by TOMEM. */
3021 s390_reload_symref_address (rtx reg
, rtx mem
, rtx scratch
, bool tomem
)
3023 /* Reload might have pulled a constant out of the literal pool.
3024 Force it back in. */
3025 if (CONST_INT_P (mem
) || GET_CODE (mem
) == CONST_DOUBLE
3026 || GET_CODE (mem
) == CONST
)
3027 mem
= force_const_mem (GET_MODE (reg
), mem
);
3029 gcc_assert (MEM_P (mem
));
3031 /* For a load from memory we can leave the scratch register
3032 untouched if the target register is a valid base register. */
3034 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
3035 && REGNO_REG_CLASS (REGNO (reg
)) == ADDR_REGS
3036 && GET_MODE (reg
) == GET_MODE (scratch
))
3039 /* Load address into scratch register. Since we can't have a
3040 secondary reload for a secondary reload we have to cover the case
3041 where larl would need a secondary reload here as well. */
3042 s390_reload_larl_operand (scratch
, XEXP (mem
, 0), scratch
);
3044 /* Now we can use a standard load/store to do the move. */
3046 emit_move_insn (replace_equiv_address (mem
, scratch
), reg
);
3048 emit_move_insn (reg
, replace_equiv_address (mem
, scratch
));
3051 /* Inform reload about cases where moving X with a mode MODE to a register in
3052 RCLASS requires an extra scratch or immediate register. Return the class
3053 needed for the immediate register. */
3056 s390_secondary_reload (bool in_p
, rtx x
, reg_class_t rclass_i
,
3057 enum machine_mode mode
, secondary_reload_info
*sri
)
3059 enum reg_class rclass
= (enum reg_class
) rclass_i
;
3061 /* Intermediate register needed. */
3062 if (reg_classes_intersect_p (CC_REGS
, rclass
))
3063 return GENERAL_REGS
;
3067 HOST_WIDE_INT offset
;
3070 /* On z10 several optimizer steps may generate larl operands with
3073 && s390_symref_operand_p (x
, &symref
, &offset
)
3075 && !SYMBOL_REF_ALIGN1_P (symref
)
3076 && (offset
& 1) == 1)
3077 sri
->icode
= ((mode
== DImode
) ? CODE_FOR_reloaddi_larl_odd_addend_z10
3078 : CODE_FOR_reloadsi_larl_odd_addend_z10
);
3080 /* On z10 we need a scratch register when moving QI, TI or floating
3081 point mode values from or to a memory location with a SYMBOL_REF
3082 or if the symref addend of a SI or DI move is not aligned to the
3083 width of the access. */
3085 && s390_symref_operand_p (XEXP (x
, 0), NULL
, NULL
)
3086 && (mode
== QImode
|| mode
== TImode
|| FLOAT_MODE_P (mode
)
3087 || (!TARGET_ZARCH
&& mode
== DImode
)
3088 || ((mode
== HImode
|| mode
== SImode
|| mode
== DImode
)
3089 && (!s390_check_symref_alignment (XEXP (x
, 0),
3090 GET_MODE_SIZE (mode
))))))
3092 #define __SECONDARY_RELOAD_CASE(M,m) \
3095 sri->icode = in_p ? CODE_FOR_reload##m##di_toreg_z10 : \
3096 CODE_FOR_reload##m##di_tomem_z10; \
3098 sri->icode = in_p ? CODE_FOR_reload##m##si_toreg_z10 : \
3099 CODE_FOR_reload##m##si_tomem_z10; \
3102 switch (GET_MODE (x
))
3104 __SECONDARY_RELOAD_CASE (QI
, qi
);
3105 __SECONDARY_RELOAD_CASE (HI
, hi
);
3106 __SECONDARY_RELOAD_CASE (SI
, si
);
3107 __SECONDARY_RELOAD_CASE (DI
, di
);
3108 __SECONDARY_RELOAD_CASE (TI
, ti
);
3109 __SECONDARY_RELOAD_CASE (SF
, sf
);
3110 __SECONDARY_RELOAD_CASE (DF
, df
);
3111 __SECONDARY_RELOAD_CASE (TF
, tf
);
3112 __SECONDARY_RELOAD_CASE (SD
, sd
);
3113 __SECONDARY_RELOAD_CASE (DD
, dd
);
3114 __SECONDARY_RELOAD_CASE (TD
, td
);
3119 #undef __SECONDARY_RELOAD_CASE
3123 /* We need a scratch register when loading a PLUS expression which
3124 is not a legitimate operand of the LOAD ADDRESS instruction. */
3125 if (in_p
&& s390_plus_operand (x
, mode
))
3126 sri
->icode
= (TARGET_64BIT
?
3127 CODE_FOR_reloaddi_plus
: CODE_FOR_reloadsi_plus
);
3129 /* Performing a multiword move from or to memory we have to make sure the
3130 second chunk in memory is addressable without causing a displacement
3131 overflow. If that would be the case we calculate the address in
3132 a scratch register. */
3134 && GET_CODE (XEXP (x
, 0)) == PLUS
3135 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3136 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (x
, 0), 1))
3137 + GET_MODE_SIZE (mode
) - 1))
3139 /* For GENERAL_REGS a displacement overflow is no problem if occurring
3140 in a s_operand address since we may fallback to lm/stm. So we only
3141 have to care about overflows in the b+i+d case. */
3142 if ((reg_classes_intersect_p (GENERAL_REGS
, rclass
)
3143 && s390_class_max_nregs (GENERAL_REGS
, mode
) > 1
3144 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == PLUS
)
3145 /* For FP_REGS no lm/stm is available so this check is triggered
3146 for displacement overflows in b+i+d and b+d like addresses. */
3147 || (reg_classes_intersect_p (FP_REGS
, rclass
)
3148 && s390_class_max_nregs (FP_REGS
, mode
) > 1))
3151 sri
->icode
= (TARGET_64BIT
?
3152 CODE_FOR_reloaddi_nonoffmem_in
:
3153 CODE_FOR_reloadsi_nonoffmem_in
);
3155 sri
->icode
= (TARGET_64BIT
?
3156 CODE_FOR_reloaddi_nonoffmem_out
:
3157 CODE_FOR_reloadsi_nonoffmem_out
);
3161 /* A scratch address register is needed when a symbolic constant is
3162 copied to r0 compiling with -fPIC. In other cases the target
3163 register might be used as temporary (see legitimize_pic_address). */
3164 if (in_p
&& SYMBOLIC_CONST (x
) && flag_pic
== 2 && rclass
!= ADDR_REGS
)
3165 sri
->icode
= (TARGET_64BIT
?
3166 CODE_FOR_reloaddi_PIC_addr
:
3167 CODE_FOR_reloadsi_PIC_addr
);
3169 /* Either scratch or no register needed. */
3173 /* Generate code to load SRC, which is PLUS that is not a
3174 legitimate operand for the LA instruction, into TARGET.
3175 SCRATCH may be used as scratch register. */
3178 s390_expand_plus_operand (rtx target
, rtx src
,
3182 struct s390_address ad
;
3184 /* src must be a PLUS; get its two operands. */
3185 gcc_assert (GET_CODE (src
) == PLUS
);
3186 gcc_assert (GET_MODE (src
) == Pmode
);
3188 /* Check if any of the two operands is already scheduled
3189 for replacement by reload. This can happen e.g. when
3190 float registers occur in an address. */
3191 sum1
= find_replacement (&XEXP (src
, 0));
3192 sum2
= find_replacement (&XEXP (src
, 1));
3193 src
= gen_rtx_PLUS (Pmode
, sum1
, sum2
);
3195 /* If the address is already strictly valid, there's nothing to do. */
3196 if (!s390_decompose_address (src
, &ad
)
3197 || (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
3198 || (ad
.indx
&& !REGNO_OK_FOR_INDEX_P (REGNO (ad
.indx
))))
3200 /* Otherwise, one of the operands cannot be an address register;
3201 we reload its value into the scratch register. */
3202 if (true_regnum (sum1
) < 1 || true_regnum (sum1
) > 15)
3204 emit_move_insn (scratch
, sum1
);
3207 if (true_regnum (sum2
) < 1 || true_regnum (sum2
) > 15)
3209 emit_move_insn (scratch
, sum2
);
3213 /* According to the way these invalid addresses are generated
3214 in reload.c, it should never happen (at least on s390) that
3215 *neither* of the PLUS components, after find_replacements
3216 was applied, is an address register. */
3217 if (sum1
== scratch
&& sum2
== scratch
)
3223 src
= gen_rtx_PLUS (Pmode
, sum1
, sum2
);
3226 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
3227 is only ever performed on addresses, so we can mark the
3228 sum as legitimate for LA in any case. */
3229 s390_load_address (target
, src
);
3233 /* Return true if ADDR is a valid memory address.
3234 STRICT specifies whether strict register checking applies. */
3237 s390_legitimate_address_p (enum machine_mode mode
, rtx addr
, bool strict
)
3239 struct s390_address ad
;
3242 && larl_operand (addr
, VOIDmode
)
3243 && (mode
== VOIDmode
3244 || s390_check_symref_alignment (addr
, GET_MODE_SIZE (mode
))))
3247 if (!s390_decompose_address (addr
, &ad
))
3252 if (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
3255 if (ad
.indx
&& !REGNO_OK_FOR_INDEX_P (REGNO (ad
.indx
)))
3261 && !(REGNO (ad
.base
) >= FIRST_PSEUDO_REGISTER
3262 || REGNO_REG_CLASS (REGNO (ad
.base
)) == ADDR_REGS
))
3266 && !(REGNO (ad
.indx
) >= FIRST_PSEUDO_REGISTER
3267 || REGNO_REG_CLASS (REGNO (ad
.indx
)) == ADDR_REGS
))
3273 /* Return true if OP is a valid operand for the LA instruction.
3274 In 31-bit, we need to prove that the result is used as an
3275 address, as LA performs only a 31-bit addition. */
3278 legitimate_la_operand_p (rtx op
)
3280 struct s390_address addr
;
3281 if (!s390_decompose_address (op
, &addr
))
3284 return (TARGET_64BIT
|| addr
.pointer
);
3287 /* Return true if it is valid *and* preferable to use LA to
3288 compute the sum of OP1 and OP2. */
3291 preferred_la_operand_p (rtx op1
, rtx op2
)
3293 struct s390_address addr
;
3295 if (op2
!= const0_rtx
)
3296 op1
= gen_rtx_PLUS (Pmode
, op1
, op2
);
3298 if (!s390_decompose_address (op1
, &addr
))
3300 if (addr
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (addr
.base
)))
3302 if (addr
.indx
&& !REGNO_OK_FOR_INDEX_P (REGNO (addr
.indx
)))
3305 /* Avoid LA instructions with index register on z196; it is
3306 preferable to use regular add instructions when possible. */
3307 if (addr
.indx
&& s390_tune
== PROCESSOR_2817_Z196
)
3310 if (!TARGET_64BIT
&& !addr
.pointer
)
3316 if ((addr
.base
&& REG_P (addr
.base
) && REG_POINTER (addr
.base
))
3317 || (addr
.indx
&& REG_P (addr
.indx
) && REG_POINTER (addr
.indx
)))
3323 /* Emit a forced load-address operation to load SRC into DST.
3324 This will use the LOAD ADDRESS instruction even in situations
3325 where legitimate_la_operand_p (SRC) returns false. */
3328 s390_load_address (rtx dst
, rtx src
)
3331 emit_move_insn (dst
, src
);
3333 emit_insn (gen_force_la_31 (dst
, src
));
3336 /* Return a legitimate reference for ORIG (an address) using the
3337 register REG. If REG is 0, a new pseudo is generated.
3339 There are two types of references that must be handled:
3341 1. Global data references must load the address from the GOT, via
3342 the PIC reg. An insn is emitted to do this load, and the reg is
3345 2. Static data references, constant pool addresses, and code labels
3346 compute the address as an offset from the GOT, whose base is in
3347 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
3348 differentiate them from global data objects. The returned
3349 address is the PIC reg + an unspec constant.
3351 TARGET_LEGITIMIZE_ADDRESS_P rejects symbolic references unless the PIC
3352 reg also appears in the address. */
3355 legitimize_pic_address (rtx orig
, rtx reg
)
3361 gcc_assert (!TLS_SYMBOLIC_CONST (addr
));
3363 if (GET_CODE (addr
) == LABEL_REF
3364 || (GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_LOCAL_P (addr
)))
3366 /* This is a local symbol. */
3367 if (TARGET_CPU_ZARCH
&& larl_operand (addr
, VOIDmode
))
3369 /* Access local symbols PC-relative via LARL.
3370 This is the same as in the non-PIC case, so it is
3371 handled automatically ... */
3375 /* Access local symbols relative to the GOT. */
3377 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3379 if (reload_in_progress
|| reload_completed
)
3380 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3382 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTOFF
);
3383 addr
= gen_rtx_CONST (Pmode
, addr
);
3384 addr
= force_const_mem (Pmode
, addr
);
3385 emit_move_insn (temp
, addr
);
3387 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3390 s390_load_address (reg
, new_rtx
);
3395 else if (GET_CODE (addr
) == SYMBOL_REF
)
3398 reg
= gen_reg_rtx (Pmode
);
3402 /* Assume GOT offset < 4k. This is handled the same way
3403 in both 31- and 64-bit code (@GOT). */
3405 if (reload_in_progress
|| reload_completed
)
3406 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3408 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOT
);
3409 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3410 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new_rtx
);
3411 new_rtx
= gen_const_mem (Pmode
, new_rtx
);
3412 emit_move_insn (reg
, new_rtx
);
3415 else if (TARGET_CPU_ZARCH
)
3417 /* If the GOT offset might be >= 4k, we determine the position
3418 of the GOT entry via a PC-relative LARL (@GOTENT). */
3420 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3422 gcc_assert (REGNO (temp
) >= FIRST_PSEUDO_REGISTER
3423 || REGNO_REG_CLASS (REGNO (temp
)) == ADDR_REGS
);
3425 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTENT
);
3426 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3430 emit_move_insn (temp
, new_rtx
);
3431 new_rtx
= gen_const_mem (Pmode
, temp
);
3434 new_rtx
= gen_const_mem (GET_MODE (reg
), new_rtx
);
3435 emit_move_insn (reg
, new_rtx
);
3440 /* If the GOT offset might be >= 4k, we have to load it
3441 from the literal pool (@GOT). */
3443 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3445 gcc_assert (REGNO (temp
) >= FIRST_PSEUDO_REGISTER
3446 || REGNO_REG_CLASS (REGNO (temp
)) == ADDR_REGS
);
3448 if (reload_in_progress
|| reload_completed
)
3449 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3451 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOT
);
3452 addr
= gen_rtx_CONST (Pmode
, addr
);
3453 addr
= force_const_mem (Pmode
, addr
);
3454 emit_move_insn (temp
, addr
);
3456 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3457 new_rtx
= gen_const_mem (Pmode
, new_rtx
);
3458 emit_move_insn (reg
, new_rtx
);
3464 if (GET_CODE (addr
) == CONST
)
3466 addr
= XEXP (addr
, 0);
3467 if (GET_CODE (addr
) == UNSPEC
)
3469 gcc_assert (XVECLEN (addr
, 0) == 1);
3470 switch (XINT (addr
, 1))
3472 /* If someone moved a GOT-relative UNSPEC
3473 out of the literal pool, force them back in. */
3476 new_rtx
= force_const_mem (Pmode
, orig
);
3479 /* @GOT is OK as is if small. */
3482 new_rtx
= force_const_mem (Pmode
, orig
);
3485 /* @GOTENT is OK as is. */
3489 /* @PLT is OK as is on 64-bit, must be converted to
3490 GOT-relative @PLTOFF on 31-bit. */
3492 if (!TARGET_CPU_ZARCH
)
3494 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3496 if (reload_in_progress
|| reload_completed
)
3497 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3499 addr
= XVECEXP (addr
, 0, 0);
3500 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
),
3502 addr
= gen_rtx_CONST (Pmode
, addr
);
3503 addr
= force_const_mem (Pmode
, addr
);
3504 emit_move_insn (temp
, addr
);
3506 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3509 s390_load_address (reg
, new_rtx
);
3515 /* Everything else cannot happen. */
3521 gcc_assert (GET_CODE (addr
) == PLUS
);
3523 if (GET_CODE (addr
) == PLUS
)
3525 rtx op0
= XEXP (addr
, 0), op1
= XEXP (addr
, 1);
3527 gcc_assert (!TLS_SYMBOLIC_CONST (op0
));
3528 gcc_assert (!TLS_SYMBOLIC_CONST (op1
));
3530 /* Check first to see if this is a constant offset
3531 from a local symbol reference. */
3532 if ((GET_CODE (op0
) == LABEL_REF
3533 || (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_LOCAL_P (op0
)))
3534 && GET_CODE (op1
) == CONST_INT
)
3536 if (TARGET_CPU_ZARCH
3537 && larl_operand (op0
, VOIDmode
)
3538 && INTVAL (op1
) < (HOST_WIDE_INT
)1 << 31
3539 && INTVAL (op1
) >= -((HOST_WIDE_INT
)1 << 31))
3541 if (INTVAL (op1
) & 1)
3543 /* LARL can't handle odd offsets, so emit a
3544 pair of LARL and LA. */
3545 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3547 if (!DISP_IN_RANGE (INTVAL (op1
)))
3549 HOST_WIDE_INT even
= INTVAL (op1
) - 1;
3550 op0
= gen_rtx_PLUS (Pmode
, op0
, GEN_INT (even
));
3551 op0
= gen_rtx_CONST (Pmode
, op0
);
3555 emit_move_insn (temp
, op0
);
3556 new_rtx
= gen_rtx_PLUS (Pmode
, temp
, op1
);
3560 s390_load_address (reg
, new_rtx
);
3566 /* If the offset is even, we can just use LARL.
3567 This will happen automatically. */
3572 /* Access local symbols relative to the GOT. */
3574 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3576 if (reload_in_progress
|| reload_completed
)
3577 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3579 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op0
),
3581 addr
= gen_rtx_PLUS (Pmode
, addr
, op1
);
3582 addr
= gen_rtx_CONST (Pmode
, addr
);
3583 addr
= force_const_mem (Pmode
, addr
);
3584 emit_move_insn (temp
, addr
);
3586 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3589 s390_load_address (reg
, new_rtx
);
3595 /* Now, check whether it is a GOT relative symbol plus offset
3596 that was pulled out of the literal pool. Force it back in. */
3598 else if (GET_CODE (op0
) == UNSPEC
3599 && GET_CODE (op1
) == CONST_INT
3600 && XINT (op0
, 1) == UNSPEC_GOTOFF
)
3602 gcc_assert (XVECLEN (op0
, 0) == 1);
3604 new_rtx
= force_const_mem (Pmode
, orig
);
3607 /* Otherwise, compute the sum. */
3610 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
3611 new_rtx
= legitimize_pic_address (XEXP (addr
, 1),
3612 base
== reg
? NULL_RTX
: reg
);
3613 if (GET_CODE (new_rtx
) == CONST_INT
)
3614 new_rtx
= plus_constant (Pmode
, base
, INTVAL (new_rtx
));
3617 if (GET_CODE (new_rtx
) == PLUS
&& CONSTANT_P (XEXP (new_rtx
, 1)))
3619 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (new_rtx
, 0));
3620 new_rtx
= XEXP (new_rtx
, 1);
3622 new_rtx
= gen_rtx_PLUS (Pmode
, base
, new_rtx
);
3625 if (GET_CODE (new_rtx
) == CONST
)
3626 new_rtx
= XEXP (new_rtx
, 0);
3627 new_rtx
= force_operand (new_rtx
, 0);
3634 /* Load the thread pointer into a register. */
3637 s390_get_thread_pointer (void)
3639 rtx tp
= gen_reg_rtx (Pmode
);
3641 emit_move_insn (tp
, gen_rtx_REG (Pmode
, TP_REGNUM
));
3642 mark_reg_pointer (tp
, BITS_PER_WORD
);
3647 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
3648 in s390_tls_symbol which always refers to __tls_get_offset.
3649 The returned offset is written to RESULT_REG and an USE rtx is
3650 generated for TLS_CALL. */
3652 static GTY(()) rtx s390_tls_symbol
;
3655 s390_emit_tls_call_insn (rtx result_reg
, rtx tls_call
)
3660 emit_insn (s390_load_got ());
3662 if (!s390_tls_symbol
)
3663 s390_tls_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "__tls_get_offset");
3665 insn
= s390_emit_call (s390_tls_symbol
, tls_call
, result_reg
,
3666 gen_rtx_REG (Pmode
, RETURN_REGNUM
));
3668 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), result_reg
);
3669 RTL_CONST_CALL_P (insn
) = 1;
3672 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3673 this (thread-local) address. REG may be used as temporary. */
3676 legitimize_tls_address (rtx addr
, rtx reg
)
3678 rtx new_rtx
, tls_call
, temp
, base
, r2
, insn
;
3680 if (GET_CODE (addr
) == SYMBOL_REF
)
3681 switch (tls_symbolic_operand (addr
))
3683 case TLS_MODEL_GLOBAL_DYNAMIC
:
3685 r2
= gen_rtx_REG (Pmode
, 2);
3686 tls_call
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_TLSGD
);
3687 new_rtx
= gen_rtx_CONST (Pmode
, tls_call
);
3688 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3689 emit_move_insn (r2
, new_rtx
);
3690 s390_emit_tls_call_insn (r2
, tls_call
);
3691 insn
= get_insns ();
3694 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_NTPOFF
);
3695 temp
= gen_reg_rtx (Pmode
);
3696 emit_libcall_block (insn
, temp
, r2
, new_rtx
);
3698 new_rtx
= gen_rtx_PLUS (Pmode
, s390_get_thread_pointer (), temp
);
3701 s390_load_address (reg
, new_rtx
);
3706 case TLS_MODEL_LOCAL_DYNAMIC
:
3708 r2
= gen_rtx_REG (Pmode
, 2);
3709 tls_call
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TLSLDM
);
3710 new_rtx
= gen_rtx_CONST (Pmode
, tls_call
);
3711 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3712 emit_move_insn (r2
, new_rtx
);
3713 s390_emit_tls_call_insn (r2
, tls_call
);
3714 insn
= get_insns ();
3717 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TLSLDM_NTPOFF
);
3718 temp
= gen_reg_rtx (Pmode
);
3719 emit_libcall_block (insn
, temp
, r2
, new_rtx
);
3721 new_rtx
= gen_rtx_PLUS (Pmode
, s390_get_thread_pointer (), temp
);
3722 base
= gen_reg_rtx (Pmode
);
3723 s390_load_address (base
, new_rtx
);
3725 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_DTPOFF
);
3726 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3727 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3728 temp
= gen_reg_rtx (Pmode
);
3729 emit_move_insn (temp
, new_rtx
);
3731 new_rtx
= gen_rtx_PLUS (Pmode
, base
, temp
);
3734 s390_load_address (reg
, new_rtx
);
3739 case TLS_MODEL_INITIAL_EXEC
:
3742 /* Assume GOT offset < 4k. This is handled the same way
3743 in both 31- and 64-bit code. */
3745 if (reload_in_progress
|| reload_completed
)
3746 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3748 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTNTPOFF
);
3749 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3750 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new_rtx
);
3751 new_rtx
= gen_const_mem (Pmode
, new_rtx
);
3752 temp
= gen_reg_rtx (Pmode
);
3753 emit_move_insn (temp
, new_rtx
);
3755 else if (TARGET_CPU_ZARCH
)
3757 /* If the GOT offset might be >= 4k, we determine the position
3758 of the GOT entry via a PC-relative LARL. */
3760 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_INDNTPOFF
);
3761 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3762 temp
= gen_reg_rtx (Pmode
);
3763 emit_move_insn (temp
, new_rtx
);
3765 new_rtx
= gen_const_mem (Pmode
, temp
);
3766 temp
= gen_reg_rtx (Pmode
);
3767 emit_move_insn (temp
, new_rtx
);
3771 /* If the GOT offset might be >= 4k, we have to load it
3772 from the literal pool. */
3774 if (reload_in_progress
|| reload_completed
)
3775 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3777 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTNTPOFF
);
3778 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3779 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3780 temp
= gen_reg_rtx (Pmode
);
3781 emit_move_insn (temp
, new_rtx
);
3783 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3784 new_rtx
= gen_const_mem (Pmode
, new_rtx
);
3786 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, new_rtx
, addr
), UNSPEC_TLS_LOAD
);
3787 temp
= gen_reg_rtx (Pmode
);
3788 emit_insn (gen_rtx_SET (Pmode
, temp
, new_rtx
));
3792 /* In position-dependent code, load the absolute address of
3793 the GOT entry from the literal pool. */
3795 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_INDNTPOFF
);
3796 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3797 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3798 temp
= gen_reg_rtx (Pmode
);
3799 emit_move_insn (temp
, new_rtx
);
3802 new_rtx
= gen_const_mem (Pmode
, new_rtx
);
3803 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, new_rtx
, addr
), UNSPEC_TLS_LOAD
);
3804 temp
= gen_reg_rtx (Pmode
);
3805 emit_insn (gen_rtx_SET (Pmode
, temp
, new_rtx
));
3808 new_rtx
= gen_rtx_PLUS (Pmode
, s390_get_thread_pointer (), temp
);
3811 s390_load_address (reg
, new_rtx
);
3816 case TLS_MODEL_LOCAL_EXEC
:
3817 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_NTPOFF
);
3818 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3819 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3820 temp
= gen_reg_rtx (Pmode
);
3821 emit_move_insn (temp
, new_rtx
);
3823 new_rtx
= gen_rtx_PLUS (Pmode
, s390_get_thread_pointer (), temp
);
3826 s390_load_address (reg
, new_rtx
);
3835 else if (GET_CODE (addr
) == CONST
&& GET_CODE (XEXP (addr
, 0)) == UNSPEC
)
3837 switch (XINT (XEXP (addr
, 0), 1))
3839 case UNSPEC_INDNTPOFF
:
3840 gcc_assert (TARGET_CPU_ZARCH
);
3849 else if (GET_CODE (addr
) == CONST
&& GET_CODE (XEXP (addr
, 0)) == PLUS
3850 && GET_CODE (XEXP (XEXP (addr
, 0), 1)) == CONST_INT
)
3852 new_rtx
= XEXP (XEXP (addr
, 0), 0);
3853 if (GET_CODE (new_rtx
) != SYMBOL_REF
)
3854 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3856 new_rtx
= legitimize_tls_address (new_rtx
, reg
);
3857 new_rtx
= plus_constant (Pmode
, new_rtx
,
3858 INTVAL (XEXP (XEXP (addr
, 0), 1)));
3859 new_rtx
= force_operand (new_rtx
, 0);
3863 gcc_unreachable (); /* for now ... */
3868 /* Emit insns making the address in operands[1] valid for a standard
3869 move to operands[0]. operands[1] is replaced by an address which
3870 should be used instead of the former RTX to emit the move
3874 emit_symbolic_move (rtx
*operands
)
3876 rtx temp
= !can_create_pseudo_p () ? operands
[0] : gen_reg_rtx (Pmode
);
3878 if (GET_CODE (operands
[0]) == MEM
)
3879 operands
[1] = force_reg (Pmode
, operands
[1]);
3880 else if (TLS_SYMBOLIC_CONST (operands
[1]))
3881 operands
[1] = legitimize_tls_address (operands
[1], temp
);
3883 operands
[1] = legitimize_pic_address (operands
[1], temp
);
3886 /* Try machine-dependent ways of modifying an illegitimate address X
3887 to be legitimate. If we find one, return the new, valid address.
3889 OLDX is the address as it was before break_out_memory_refs was called.
3890 In some cases it is useful to look at this to decide what needs to be done.
3892 MODE is the mode of the operand pointed to by X.
3894 When -fpic is used, special handling is needed for symbolic references.
3895 See comments by legitimize_pic_address for details. */
3898 s390_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
3899 enum machine_mode mode ATTRIBUTE_UNUSED
)
3901 rtx constant_term
= const0_rtx
;
3903 if (TLS_SYMBOLIC_CONST (x
))
3905 x
= legitimize_tls_address (x
, 0);
3907 if (s390_legitimate_address_p (mode
, x
, FALSE
))
3910 else if (GET_CODE (x
) == PLUS
3911 && (TLS_SYMBOLIC_CONST (XEXP (x
, 0))
3912 || TLS_SYMBOLIC_CONST (XEXP (x
, 1))))
3918 if (SYMBOLIC_CONST (x
)
3919 || (GET_CODE (x
) == PLUS
3920 && (SYMBOLIC_CONST (XEXP (x
, 0))
3921 || SYMBOLIC_CONST (XEXP (x
, 1)))))
3922 x
= legitimize_pic_address (x
, 0);
3924 if (s390_legitimate_address_p (mode
, x
, FALSE
))
3928 x
= eliminate_constant_term (x
, &constant_term
);
3930 /* Optimize loading of large displacements by splitting them
3931 into the multiple of 4K and the rest; this allows the
3932 former to be CSE'd if possible.
3934 Don't do this if the displacement is added to a register
3935 pointing into the stack frame, as the offsets will
3936 change later anyway. */
3938 if (GET_CODE (constant_term
) == CONST_INT
3939 && !TARGET_LONG_DISPLACEMENT
3940 && !DISP_IN_RANGE (INTVAL (constant_term
))
3941 && !(REG_P (x
) && REGNO_PTR_FRAME_P (REGNO (x
))))
3943 HOST_WIDE_INT lower
= INTVAL (constant_term
) & 0xfff;
3944 HOST_WIDE_INT upper
= INTVAL (constant_term
) ^ lower
;
3946 rtx temp
= gen_reg_rtx (Pmode
);
3947 rtx val
= force_operand (GEN_INT (upper
), temp
);
3949 emit_move_insn (temp
, val
);
3951 x
= gen_rtx_PLUS (Pmode
, x
, temp
);
3952 constant_term
= GEN_INT (lower
);
3955 if (GET_CODE (x
) == PLUS
)
3957 if (GET_CODE (XEXP (x
, 0)) == REG
)
3959 rtx temp
= gen_reg_rtx (Pmode
);
3960 rtx val
= force_operand (XEXP (x
, 1), temp
);
3962 emit_move_insn (temp
, val
);
3964 x
= gen_rtx_PLUS (Pmode
, XEXP (x
, 0), temp
);
3967 else if (GET_CODE (XEXP (x
, 1)) == REG
)
3969 rtx temp
= gen_reg_rtx (Pmode
);
3970 rtx val
= force_operand (XEXP (x
, 0), temp
);
3972 emit_move_insn (temp
, val
);
3974 x
= gen_rtx_PLUS (Pmode
, temp
, XEXP (x
, 1));
3978 if (constant_term
!= const0_rtx
)
3979 x
= gen_rtx_PLUS (Pmode
, x
, constant_term
);
3984 /* Try a machine-dependent way of reloading an illegitimate address AD
3985 operand. If we find one, push the reload and return the new address.
3987 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3988 and TYPE is the reload type of the current reload. */
3991 legitimize_reload_address (rtx ad
, enum machine_mode mode ATTRIBUTE_UNUSED
,
3992 int opnum
, int type
)
3994 if (!optimize
|| TARGET_LONG_DISPLACEMENT
)
3997 if (GET_CODE (ad
) == PLUS
)
3999 rtx tem
= simplify_binary_operation (PLUS
, Pmode
,
4000 XEXP (ad
, 0), XEXP (ad
, 1));
4005 if (GET_CODE (ad
) == PLUS
4006 && GET_CODE (XEXP (ad
, 0)) == REG
4007 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
4008 && !DISP_IN_RANGE (INTVAL (XEXP (ad
, 1))))
4010 HOST_WIDE_INT lower
= INTVAL (XEXP (ad
, 1)) & 0xfff;
4011 HOST_WIDE_INT upper
= INTVAL (XEXP (ad
, 1)) ^ lower
;
4012 rtx cst
, tem
, new_rtx
;
4014 cst
= GEN_INT (upper
);
4015 if (!legitimate_reload_constant_p (cst
))
4016 cst
= force_const_mem (Pmode
, cst
);
4018 tem
= gen_rtx_PLUS (Pmode
, XEXP (ad
, 0), cst
);
4019 new_rtx
= gen_rtx_PLUS (Pmode
, tem
, GEN_INT (lower
));
4021 push_reload (XEXP (tem
, 1), 0, &XEXP (tem
, 1), 0,
4022 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
4023 opnum
, (enum reload_type
) type
);
4030 /* Emit code to move LEN bytes from DST to SRC. */
4033 s390_expand_movmem (rtx dst
, rtx src
, rtx len
)
4035 /* When tuning for z10 or higher we rely on the Glibc functions to
4036 do the right thing. Only for constant lengths below 64k we will
4037 generate inline code. */
4038 if (s390_tune
>= PROCESSOR_2097_Z10
4039 && (GET_CODE (len
) != CONST_INT
|| INTVAL (len
) > (1<<16)))
4042 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
4044 if (INTVAL (len
) > 0)
4045 emit_insn (gen_movmem_short (dst
, src
, GEN_INT (INTVAL (len
) - 1)));
4048 else if (TARGET_MVCLE
)
4050 emit_insn (gen_movmem_long (dst
, src
, convert_to_mode (Pmode
, len
, 1)));
4055 rtx dst_addr
, src_addr
, count
, blocks
, temp
;
4056 rtx loop_start_label
= gen_label_rtx ();
4057 rtx loop_end_label
= gen_label_rtx ();
4058 rtx end_label
= gen_label_rtx ();
4059 enum machine_mode mode
;
4061 mode
= GET_MODE (len
);
4062 if (mode
== VOIDmode
)
4065 dst_addr
= gen_reg_rtx (Pmode
);
4066 src_addr
= gen_reg_rtx (Pmode
);
4067 count
= gen_reg_rtx (mode
);
4068 blocks
= gen_reg_rtx (mode
);
4070 convert_move (count
, len
, 1);
4071 emit_cmp_and_jump_insns (count
, const0_rtx
,
4072 EQ
, NULL_RTX
, mode
, 1, end_label
);
4074 emit_move_insn (dst_addr
, force_operand (XEXP (dst
, 0), NULL_RTX
));
4075 emit_move_insn (src_addr
, force_operand (XEXP (src
, 0), NULL_RTX
));
4076 dst
= change_address (dst
, VOIDmode
, dst_addr
);
4077 src
= change_address (src
, VOIDmode
, src_addr
);
4079 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1,
4082 emit_move_insn (count
, temp
);
4084 temp
= expand_binop (mode
, lshr_optab
, count
, GEN_INT (8), blocks
, 1,
4087 emit_move_insn (blocks
, temp
);
4089 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4090 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4092 emit_label (loop_start_label
);
4095 && (GET_CODE (len
) != CONST_INT
|| INTVAL (len
) > 768))
4099 /* Issue a read prefetch for the +3 cache line. */
4100 prefetch
= gen_prefetch (gen_rtx_PLUS (Pmode
, src_addr
, GEN_INT (768)),
4101 const0_rtx
, const0_rtx
);
4102 PREFETCH_SCHEDULE_BARRIER_P (prefetch
) = true;
4103 emit_insn (prefetch
);
4105 /* Issue a write prefetch for the +3 cache line. */
4106 prefetch
= gen_prefetch (gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (768)),
4107 const1_rtx
, const0_rtx
);
4108 PREFETCH_SCHEDULE_BARRIER_P (prefetch
) = true;
4109 emit_insn (prefetch
);
4112 emit_insn (gen_movmem_short (dst
, src
, GEN_INT (255)));
4113 s390_load_address (dst_addr
,
4114 gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (256)));
4115 s390_load_address (src_addr
,
4116 gen_rtx_PLUS (Pmode
, src_addr
, GEN_INT (256)));
4118 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1,
4121 emit_move_insn (blocks
, temp
);
4123 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4124 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4126 emit_jump (loop_start_label
);
4127 emit_label (loop_end_label
);
4129 emit_insn (gen_movmem_short (dst
, src
,
4130 convert_to_mode (Pmode
, count
, 1)));
4131 emit_label (end_label
);
4136 /* Emit code to set LEN bytes at DST to VAL.
4137 Make use of clrmem if VAL is zero. */
4140 s390_expand_setmem (rtx dst
, rtx len
, rtx val
)
4142 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) == 0)
4145 gcc_assert (GET_CODE (val
) == CONST_INT
|| GET_MODE (val
) == QImode
);
4147 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) > 0 && INTVAL (len
) <= 257)
4149 if (val
== const0_rtx
&& INTVAL (len
) <= 256)
4150 emit_insn (gen_clrmem_short (dst
, GEN_INT (INTVAL (len
) - 1)));
4153 /* Initialize memory by storing the first byte. */
4154 emit_move_insn (adjust_address (dst
, QImode
, 0), val
);
4156 if (INTVAL (len
) > 1)
4158 /* Initiate 1 byte overlap move.
4159 The first byte of DST is propagated through DSTP1.
4160 Prepare a movmem for: DST+1 = DST (length = LEN - 1).
4161 DST is set to size 1 so the rest of the memory location
4162 does not count as source operand. */
4163 rtx dstp1
= adjust_address (dst
, VOIDmode
, 1);
4164 set_mem_size (dst
, 1);
4166 emit_insn (gen_movmem_short (dstp1
, dst
,
4167 GEN_INT (INTVAL (len
) - 2)));
4172 else if (TARGET_MVCLE
)
4174 val
= force_not_mem (convert_modes (Pmode
, QImode
, val
, 1));
4175 emit_insn (gen_setmem_long (dst
, convert_to_mode (Pmode
, len
, 1), val
));
4180 rtx dst_addr
, count
, blocks
, temp
, dstp1
= NULL_RTX
;
4181 rtx loop_start_label
= gen_label_rtx ();
4182 rtx loop_end_label
= gen_label_rtx ();
4183 rtx end_label
= gen_label_rtx ();
4184 enum machine_mode mode
;
4186 mode
= GET_MODE (len
);
4187 if (mode
== VOIDmode
)
4190 dst_addr
= gen_reg_rtx (Pmode
);
4191 count
= gen_reg_rtx (mode
);
4192 blocks
= gen_reg_rtx (mode
);
4194 convert_move (count
, len
, 1);
4195 emit_cmp_and_jump_insns (count
, const0_rtx
,
4196 EQ
, NULL_RTX
, mode
, 1, end_label
);
4198 emit_move_insn (dst_addr
, force_operand (XEXP (dst
, 0), NULL_RTX
));
4199 dst
= change_address (dst
, VOIDmode
, dst_addr
);
4201 if (val
== const0_rtx
)
4202 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1,
4206 dstp1
= adjust_address (dst
, VOIDmode
, 1);
4207 set_mem_size (dst
, 1);
4209 /* Initialize memory by storing the first byte. */
4210 emit_move_insn (adjust_address (dst
, QImode
, 0), val
);
4212 /* If count is 1 we are done. */
4213 emit_cmp_and_jump_insns (count
, const1_rtx
,
4214 EQ
, NULL_RTX
, mode
, 1, end_label
);
4216 temp
= expand_binop (mode
, add_optab
, count
, GEN_INT (-2), count
, 1,
4220 emit_move_insn (count
, temp
);
4222 temp
= expand_binop (mode
, lshr_optab
, count
, GEN_INT (8), blocks
, 1,
4225 emit_move_insn (blocks
, temp
);
4227 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4228 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4230 emit_label (loop_start_label
);
4233 && (GET_CODE (len
) != CONST_INT
|| INTVAL (len
) > 1024))
4235 /* Issue a write prefetch for the +4 cache line. */
4236 rtx prefetch
= gen_prefetch (gen_rtx_PLUS (Pmode
, dst_addr
,
4238 const1_rtx
, const0_rtx
);
4239 emit_insn (prefetch
);
4240 PREFETCH_SCHEDULE_BARRIER_P (prefetch
) = true;
4243 if (val
== const0_rtx
)
4244 emit_insn (gen_clrmem_short (dst
, GEN_INT (255)));
4246 emit_insn (gen_movmem_short (dstp1
, dst
, GEN_INT (255)));
4247 s390_load_address (dst_addr
,
4248 gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (256)));
4250 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1,
4253 emit_move_insn (blocks
, temp
);
4255 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4256 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4258 emit_jump (loop_start_label
);
4259 emit_label (loop_end_label
);
4261 if (val
== const0_rtx
)
4262 emit_insn (gen_clrmem_short (dst
, convert_to_mode (Pmode
, count
, 1)));
4264 emit_insn (gen_movmem_short (dstp1
, dst
, convert_to_mode (Pmode
, count
, 1)));
4265 emit_label (end_label
);
4269 /* Emit code to compare LEN bytes at OP0 with those at OP1,
4270 and return the result in TARGET. */
4273 s390_expand_cmpmem (rtx target
, rtx op0
, rtx op1
, rtx len
)
4275 rtx ccreg
= gen_rtx_REG (CCUmode
, CC_REGNUM
);
4278 /* When tuning for z10 or higher we rely on the Glibc functions to
4279 do the right thing. Only for constant lengths below 64k we will
4280 generate inline code. */
4281 if (s390_tune
>= PROCESSOR_2097_Z10
4282 && (GET_CODE (len
) != CONST_INT
|| INTVAL (len
) > (1<<16)))
4285 /* As the result of CMPINT is inverted compared to what we need,
4286 we have to swap the operands. */
4287 tmp
= op0
; op0
= op1
; op1
= tmp
;
4289 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
4291 if (INTVAL (len
) > 0)
4293 emit_insn (gen_cmpmem_short (op0
, op1
, GEN_INT (INTVAL (len
) - 1)));
4294 emit_insn (gen_cmpint (target
, ccreg
));
4297 emit_move_insn (target
, const0_rtx
);
4299 else if (TARGET_MVCLE
)
4301 emit_insn (gen_cmpmem_long (op0
, op1
, convert_to_mode (Pmode
, len
, 1)));
4302 emit_insn (gen_cmpint (target
, ccreg
));
4306 rtx addr0
, addr1
, count
, blocks
, temp
;
4307 rtx loop_start_label
= gen_label_rtx ();
4308 rtx loop_end_label
= gen_label_rtx ();
4309 rtx end_label
= gen_label_rtx ();
4310 enum machine_mode mode
;
4312 mode
= GET_MODE (len
);
4313 if (mode
== VOIDmode
)
4316 addr0
= gen_reg_rtx (Pmode
);
4317 addr1
= gen_reg_rtx (Pmode
);
4318 count
= gen_reg_rtx (mode
);
4319 blocks
= gen_reg_rtx (mode
);
4321 convert_move (count
, len
, 1);
4322 emit_cmp_and_jump_insns (count
, const0_rtx
,
4323 EQ
, NULL_RTX
, mode
, 1, end_label
);
4325 emit_move_insn (addr0
, force_operand (XEXP (op0
, 0), NULL_RTX
));
4326 emit_move_insn (addr1
, force_operand (XEXP (op1
, 0), NULL_RTX
));
4327 op0
= change_address (op0
, VOIDmode
, addr0
);
4328 op1
= change_address (op1
, VOIDmode
, addr1
);
4330 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1,
4333 emit_move_insn (count
, temp
);
4335 temp
= expand_binop (mode
, lshr_optab
, count
, GEN_INT (8), blocks
, 1,
4338 emit_move_insn (blocks
, temp
);
4340 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4341 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4343 emit_label (loop_start_label
);
4346 && (GET_CODE (len
) != CONST_INT
|| INTVAL (len
) > 512))
4350 /* Issue a read prefetch for the +2 cache line of operand 1. */
4351 prefetch
= gen_prefetch (gen_rtx_PLUS (Pmode
, addr0
, GEN_INT (512)),
4352 const0_rtx
, const0_rtx
);
4353 emit_insn (prefetch
);
4354 PREFETCH_SCHEDULE_BARRIER_P (prefetch
) = true;
4356 /* Issue a read prefetch for the +2 cache line of operand 2. */
4357 prefetch
= gen_prefetch (gen_rtx_PLUS (Pmode
, addr1
, GEN_INT (512)),
4358 const0_rtx
, const0_rtx
);
4359 emit_insn (prefetch
);
4360 PREFETCH_SCHEDULE_BARRIER_P (prefetch
) = true;
4363 emit_insn (gen_cmpmem_short (op0
, op1
, GEN_INT (255)));
4364 temp
= gen_rtx_NE (VOIDmode
, ccreg
, const0_rtx
);
4365 temp
= gen_rtx_IF_THEN_ELSE (VOIDmode
, temp
,
4366 gen_rtx_LABEL_REF (VOIDmode
, end_label
), pc_rtx
);
4367 temp
= gen_rtx_SET (VOIDmode
, pc_rtx
, temp
);
4368 emit_jump_insn (temp
);
4370 s390_load_address (addr0
,
4371 gen_rtx_PLUS (Pmode
, addr0
, GEN_INT (256)));
4372 s390_load_address (addr1
,
4373 gen_rtx_PLUS (Pmode
, addr1
, GEN_INT (256)));
4375 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1,
4378 emit_move_insn (blocks
, temp
);
4380 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4381 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4383 emit_jump (loop_start_label
);
4384 emit_label (loop_end_label
);
4386 emit_insn (gen_cmpmem_short (op0
, op1
,
4387 convert_to_mode (Pmode
, count
, 1)));
4388 emit_label (end_label
);
4390 emit_insn (gen_cmpint (target
, ccreg
));
4396 /* Expand conditional increment or decrement using alc/slb instructions.
4397 Should generate code setting DST to either SRC or SRC + INCREMENT,
4398 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
4399 Returns true if successful, false otherwise.
4401 That makes it possible to implement some if-constructs without jumps e.g.:
4402 (borrow = CC0 | CC1 and carry = CC2 | CC3)
4403 unsigned int a, b, c;
4404 if (a < b) c++; -> CCU b > a -> CC2; c += carry;
4405 if (a < b) c--; -> CCL3 a - b -> borrow; c -= borrow;
4406 if (a <= b) c++; -> CCL3 b - a -> borrow; c += carry;
4407 if (a <= b) c--; -> CCU a <= b -> borrow; c -= borrow;
4409 Checks for EQ and NE with a nonzero value need an additional xor e.g.:
4410 if (a == b) c++; -> CCL3 a ^= b; 0 - a -> borrow; c += carry;
4411 if (a == b) c--; -> CCU a ^= b; a <= 0 -> CC0 | CC1; c -= borrow;
4412 if (a != b) c++; -> CCU a ^= b; a > 0 -> CC2; c += carry;
4413 if (a != b) c--; -> CCL3 a ^= b; 0 - a -> borrow; c -= borrow; */
4416 s390_expand_addcc (enum rtx_code cmp_code
, rtx cmp_op0
, rtx cmp_op1
,
4417 rtx dst
, rtx src
, rtx increment
)
4419 enum machine_mode cmp_mode
;
4420 enum machine_mode cc_mode
;
4426 if ((GET_MODE (cmp_op0
) == SImode
|| GET_MODE (cmp_op0
) == VOIDmode
)
4427 && (GET_MODE (cmp_op1
) == SImode
|| GET_MODE (cmp_op1
) == VOIDmode
))
4429 else if ((GET_MODE (cmp_op0
) == DImode
|| GET_MODE (cmp_op0
) == VOIDmode
)
4430 && (GET_MODE (cmp_op1
) == DImode
|| GET_MODE (cmp_op1
) == VOIDmode
))
4435 /* Try ADD LOGICAL WITH CARRY. */
4436 if (increment
== const1_rtx
)
4438 /* Determine CC mode to use. */
4439 if (cmp_code
== EQ
|| cmp_code
== NE
)
4441 if (cmp_op1
!= const0_rtx
)
4443 cmp_op0
= expand_simple_binop (cmp_mode
, XOR
, cmp_op0
, cmp_op1
,
4444 NULL_RTX
, 0, OPTAB_WIDEN
);
4445 cmp_op1
= const0_rtx
;
4448 cmp_code
= cmp_code
== EQ
? LEU
: GTU
;
4451 if (cmp_code
== LTU
|| cmp_code
== LEU
)
4456 cmp_code
= swap_condition (cmp_code
);
4473 /* Emit comparison instruction pattern. */
4474 if (!register_operand (cmp_op0
, cmp_mode
))
4475 cmp_op0
= force_reg (cmp_mode
, cmp_op0
);
4477 insn
= gen_rtx_SET (VOIDmode
, gen_rtx_REG (cc_mode
, CC_REGNUM
),
4478 gen_rtx_COMPARE (cc_mode
, cmp_op0
, cmp_op1
));
4479 /* We use insn_invalid_p here to add clobbers if required. */
4480 ret
= insn_invalid_p (emit_insn (insn
), false);
4483 /* Emit ALC instruction pattern. */
4484 op_res
= gen_rtx_fmt_ee (cmp_code
, GET_MODE (dst
),
4485 gen_rtx_REG (cc_mode
, CC_REGNUM
),
4488 if (src
!= const0_rtx
)
4490 if (!register_operand (src
, GET_MODE (dst
)))
4491 src
= force_reg (GET_MODE (dst
), src
);
4493 op_res
= gen_rtx_PLUS (GET_MODE (dst
), op_res
, src
);
4494 op_res
= gen_rtx_PLUS (GET_MODE (dst
), op_res
, const0_rtx
);
4497 p
= rtvec_alloc (2);
4499 gen_rtx_SET (VOIDmode
, dst
, op_res
);
4501 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
4502 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
4507 /* Try SUBTRACT LOGICAL WITH BORROW. */
4508 if (increment
== constm1_rtx
)
4510 /* Determine CC mode to use. */
4511 if (cmp_code
== EQ
|| cmp_code
== NE
)
4513 if (cmp_op1
!= const0_rtx
)
4515 cmp_op0
= expand_simple_binop (cmp_mode
, XOR
, cmp_op0
, cmp_op1
,
4516 NULL_RTX
, 0, OPTAB_WIDEN
);
4517 cmp_op1
= const0_rtx
;
4520 cmp_code
= cmp_code
== EQ
? LEU
: GTU
;
4523 if (cmp_code
== GTU
|| cmp_code
== GEU
)
4528 cmp_code
= swap_condition (cmp_code
);
4545 /* Emit comparison instruction pattern. */
4546 if (!register_operand (cmp_op0
, cmp_mode
))
4547 cmp_op0
= force_reg (cmp_mode
, cmp_op0
);
4549 insn
= gen_rtx_SET (VOIDmode
, gen_rtx_REG (cc_mode
, CC_REGNUM
),
4550 gen_rtx_COMPARE (cc_mode
, cmp_op0
, cmp_op1
));
4551 /* We use insn_invalid_p here to add clobbers if required. */
4552 ret
= insn_invalid_p (emit_insn (insn
), false);
4555 /* Emit SLB instruction pattern. */
4556 if (!register_operand (src
, GET_MODE (dst
)))
4557 src
= force_reg (GET_MODE (dst
), src
);
4559 op_res
= gen_rtx_MINUS (GET_MODE (dst
),
4560 gen_rtx_MINUS (GET_MODE (dst
), src
, const0_rtx
),
4561 gen_rtx_fmt_ee (cmp_code
, GET_MODE (dst
),
4562 gen_rtx_REG (cc_mode
, CC_REGNUM
),
4564 p
= rtvec_alloc (2);
4566 gen_rtx_SET (VOIDmode
, dst
, op_res
);
4568 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
4569 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
4577 /* Expand code for the insv template. Return true if successful. */
4580 s390_expand_insv (rtx dest
, rtx op1
, rtx op2
, rtx src
)
4582 int bitsize
= INTVAL (op1
);
4583 int bitpos
= INTVAL (op2
);
4584 enum machine_mode mode
= GET_MODE (dest
);
4585 enum machine_mode smode
;
4586 int smode_bsize
, mode_bsize
;
4589 /* Generate INSERT IMMEDIATE (IILL et al). */
4590 /* (set (ze (reg)) (const_int)). */
4592 && register_operand (dest
, word_mode
)
4593 && (bitpos
% 16) == 0
4594 && (bitsize
% 16) == 0
4595 && const_int_operand (src
, VOIDmode
))
4597 HOST_WIDE_INT val
= INTVAL (src
);
4598 int regpos
= bitpos
+ bitsize
;
4600 while (regpos
> bitpos
)
4602 enum machine_mode putmode
;
4605 if (TARGET_EXTIMM
&& (regpos
% 32 == 0) && (regpos
>= bitpos
+ 32))
4610 putsize
= GET_MODE_BITSIZE (putmode
);
4612 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode
, dest
,
4615 gen_int_mode (val
, putmode
));
4618 gcc_assert (regpos
== bitpos
);
4622 smode
= smallest_mode_for_size (bitsize
, MODE_INT
);
4623 smode_bsize
= GET_MODE_BITSIZE (smode
);
4624 mode_bsize
= GET_MODE_BITSIZE (mode
);
4626 /* Generate STORE CHARACTERS UNDER MASK (STCM et al). */
4628 && (bitsize
% BITS_PER_UNIT
) == 0
4630 && (register_operand (src
, word_mode
)
4631 || const_int_operand (src
, VOIDmode
)))
4633 /* Emit standard pattern if possible. */
4634 if (smode_bsize
== bitsize
)
4636 emit_move_insn (adjust_address (dest
, smode
, 0),
4637 gen_lowpart (smode
, src
));
4641 /* (set (ze (mem)) (const_int)). */
4642 else if (const_int_operand (src
, VOIDmode
))
4644 int size
= bitsize
/ BITS_PER_UNIT
;
4645 rtx src_mem
= adjust_address (force_const_mem (word_mode
, src
),
4647 UNITS_PER_WORD
- size
);
4649 dest
= adjust_address (dest
, BLKmode
, 0);
4650 set_mem_size (dest
, size
);
4651 s390_expand_movmem (dest
, src_mem
, GEN_INT (size
));
4655 /* (set (ze (mem)) (reg)). */
4656 else if (register_operand (src
, word_mode
))
4659 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode
, dest
, op1
,
4663 /* Emit st,stcmh sequence. */
4664 int stcmh_width
= bitsize
- 32;
4665 int size
= stcmh_width
/ BITS_PER_UNIT
;
4667 emit_move_insn (adjust_address (dest
, SImode
, size
),
4668 gen_lowpart (SImode
, src
));
4669 set_mem_size (dest
, size
);
4670 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode
, dest
,
4671 GEN_INT (stcmh_width
),
4673 gen_rtx_LSHIFTRT (word_mode
, src
, GEN_INT (32)));
4679 /* Generate INSERT CHARACTERS UNDER MASK (IC, ICM et al). */
4680 if ((bitpos
% BITS_PER_UNIT
) == 0
4681 && (bitsize
% BITS_PER_UNIT
) == 0
4682 && (bitpos
& 32) == ((bitpos
+ bitsize
- 1) & 32)
4684 && (mode
== DImode
|| mode
== SImode
)
4685 && register_operand (dest
, mode
))
4687 /* Emit a strict_low_part pattern if possible. */
4688 if (smode_bsize
== bitsize
&& bitpos
== mode_bsize
- smode_bsize
)
4690 op
= gen_rtx_STRICT_LOW_PART (VOIDmode
, gen_lowpart (smode
, dest
));
4691 op
= gen_rtx_SET (VOIDmode
, op
, gen_lowpart (smode
, src
));
4692 clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
4693 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, op
, clobber
)));
4697 /* ??? There are more powerful versions of ICM that are not
4698 completely represented in the md file. */
4701 /* For z10, generate ROTATE THEN INSERT SELECTED BITS (RISBG et al). */
4702 if (TARGET_Z10
&& (mode
== DImode
|| mode
== SImode
))
4704 enum machine_mode mode_s
= GET_MODE (src
);
4706 if (mode_s
== VOIDmode
)
4708 /* Assume const_int etc already in the proper mode. */
4709 src
= force_reg (mode
, src
);
4711 else if (mode_s
!= mode
)
4713 gcc_assert (GET_MODE_BITSIZE (mode_s
) >= bitsize
);
4714 src
= force_reg (mode_s
, src
);
4715 src
= gen_lowpart (mode
, src
);
4718 op
= gen_rtx_ZERO_EXTRACT (mode
, dest
, op1
, op2
),
4719 op
= gen_rtx_SET (VOIDmode
, op
, src
);
4720 clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
4721 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, op
, clobber
)));
4729 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic which returns a
4730 register that holds VAL of mode MODE shifted by COUNT bits. */
4733 s390_expand_mask_and_shift (rtx val
, enum machine_mode mode
, rtx count
)
4735 val
= expand_simple_binop (SImode
, AND
, val
, GEN_INT (GET_MODE_MASK (mode
)),
4736 NULL_RTX
, 1, OPTAB_DIRECT
);
4737 return expand_simple_binop (SImode
, ASHIFT
, val
, count
,
4738 NULL_RTX
, 1, OPTAB_DIRECT
);
4741 /* Structure to hold the initial parameters for a compare_and_swap operation
4742 in HImode and QImode. */
4744 struct alignment_context
4746 rtx memsi
; /* SI aligned memory location. */
4747 rtx shift
; /* Bit offset with regard to lsb. */
4748 rtx modemask
; /* Mask of the HQImode shifted by SHIFT bits. */
4749 rtx modemaski
; /* ~modemask */
4750 bool aligned
; /* True if memory is aligned, false else. */
4753 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic to initialize
4754 structure AC for transparent simplifying, if the memory alignment is known
4755 to be at least 32bit. MEM is the memory location for the actual operation
4756 and MODE its mode. */
4759 init_alignment_context (struct alignment_context
*ac
, rtx mem
,
4760 enum machine_mode mode
)
4762 ac
->shift
= GEN_INT (GET_MODE_SIZE (SImode
) - GET_MODE_SIZE (mode
));
4763 ac
->aligned
= (MEM_ALIGN (mem
) >= GET_MODE_BITSIZE (SImode
));
4766 ac
->memsi
= adjust_address (mem
, SImode
, 0); /* Memory is aligned. */
4769 /* Alignment is unknown. */
4770 rtx byteoffset
, addr
, align
;
4772 /* Force the address into a register. */
4773 addr
= force_reg (Pmode
, XEXP (mem
, 0));
4775 /* Align it to SImode. */
4776 align
= expand_simple_binop (Pmode
, AND
, addr
,
4777 GEN_INT (-GET_MODE_SIZE (SImode
)),
4778 NULL_RTX
, 1, OPTAB_DIRECT
);
4780 ac
->memsi
= gen_rtx_MEM (SImode
, align
);
4781 MEM_VOLATILE_P (ac
->memsi
) = MEM_VOLATILE_P (mem
);
4782 set_mem_alias_set (ac
->memsi
, ALIAS_SET_MEMORY_BARRIER
);
4783 set_mem_align (ac
->memsi
, GET_MODE_BITSIZE (SImode
));
4785 /* Calculate shiftcount. */
4786 byteoffset
= expand_simple_binop (Pmode
, AND
, addr
,
4787 GEN_INT (GET_MODE_SIZE (SImode
) - 1),
4788 NULL_RTX
, 1, OPTAB_DIRECT
);
4789 /* As we already have some offset, evaluate the remaining distance. */
4790 ac
->shift
= expand_simple_binop (SImode
, MINUS
, ac
->shift
, byteoffset
,
4791 NULL_RTX
, 1, OPTAB_DIRECT
);
4794 /* Shift is the byte count, but we need the bitcount. */
4795 ac
->shift
= expand_simple_binop (SImode
, ASHIFT
, ac
->shift
, GEN_INT (3),
4796 NULL_RTX
, 1, OPTAB_DIRECT
);
4798 /* Calculate masks. */
4799 ac
->modemask
= expand_simple_binop (SImode
, ASHIFT
,
4800 GEN_INT (GET_MODE_MASK (mode
)),
4801 ac
->shift
, NULL_RTX
, 1, OPTAB_DIRECT
);
4802 ac
->modemaski
= expand_simple_unop (SImode
, NOT
, ac
->modemask
,
4806 /* A subroutine of s390_expand_cs_hqi. Insert INS into VAL. If possible,
4807 use a single insv insn into SEQ2. Otherwise, put prep insns in SEQ1 and
4808 perform the merge in SEQ2. */
4811 s390_two_part_insv (struct alignment_context
*ac
, rtx
*seq1
, rtx
*seq2
,
4812 enum machine_mode mode
, rtx val
, rtx ins
)
4819 tmp
= copy_to_mode_reg (SImode
, val
);
4820 if (s390_expand_insv (tmp
, GEN_INT (GET_MODE_BITSIZE (mode
)),
4824 *seq2
= get_insns ();
4831 /* Failed to use insv. Generate a two part shift and mask. */
4833 tmp
= s390_expand_mask_and_shift (ins
, mode
, ac
->shift
);
4834 *seq1
= get_insns ();
4838 tmp
= expand_simple_binop (SImode
, IOR
, tmp
, val
, NULL_RTX
, 1, OPTAB_DIRECT
);
4839 *seq2
= get_insns ();
4845 /* Expand an atomic compare and swap operation for HImode and QImode. MEM is
4846 the memory location, CMP the old value to compare MEM with and NEW_RTX the
4847 value to set if CMP == MEM. */
4850 s390_expand_cs_hqi (enum machine_mode mode
, rtx btarget
, rtx vtarget
, rtx mem
,
4851 rtx cmp
, rtx new_rtx
, bool is_weak
)
4853 struct alignment_context ac
;
4854 rtx cmpv
, newv
, val
, cc
, seq0
, seq1
, seq2
, seq3
;
4855 rtx res
= gen_reg_rtx (SImode
);
4856 rtx csloop
= NULL
, csend
= NULL
;
4858 gcc_assert (MEM_P (mem
));
4860 init_alignment_context (&ac
, mem
, mode
);
4862 /* Load full word. Subsequent loads are performed by CS. */
4863 val
= expand_simple_binop (SImode
, AND
, ac
.memsi
, ac
.modemaski
,
4864 NULL_RTX
, 1, OPTAB_DIRECT
);
4866 /* Prepare insertions of cmp and new_rtx into the loaded value. When
4867 possible, we try to use insv to make this happen efficiently. If
4868 that fails we'll generate code both inside and outside the loop. */
4869 cmpv
= s390_two_part_insv (&ac
, &seq0
, &seq2
, mode
, val
, cmp
);
4870 newv
= s390_two_part_insv (&ac
, &seq1
, &seq3
, mode
, val
, new_rtx
);
4877 /* Start CS loop. */
4880 /* Begin assuming success. */
4881 emit_move_insn (btarget
, const1_rtx
);
4883 csloop
= gen_label_rtx ();
4884 csend
= gen_label_rtx ();
4885 emit_label (csloop
);
4888 /* val = "<mem>00..0<mem>"
4889 * cmp = "00..0<cmp>00..0"
4890 * new = "00..0<new>00..0"
4896 cc
= s390_emit_compare_and_swap (EQ
, res
, ac
.memsi
, cmpv
, newv
);
4898 emit_insn (gen_cstorecc4 (btarget
, cc
, XEXP (cc
, 0), XEXP (cc
, 1)));
4903 /* Jump to end if we're done (likely?). */
4904 s390_emit_jump (csend
, cc
);
4906 /* Check for changes outside mode, and loop internal if so.
4907 Arrange the moves so that the compare is adjacent to the
4908 branch so that we can generate CRJ. */
4909 tmp
= copy_to_reg (val
);
4910 force_expand_binop (SImode
, and_optab
, res
, ac
.modemaski
, val
,
4912 cc
= s390_emit_compare (NE
, val
, tmp
);
4913 s390_emit_jump (csloop
, cc
);
4916 emit_move_insn (btarget
, const0_rtx
);
4920 /* Return the correct part of the bitfield. */
4921 convert_move (vtarget
, expand_simple_binop (SImode
, LSHIFTRT
, res
, ac
.shift
,
4922 NULL_RTX
, 1, OPTAB_DIRECT
), 1);
4925 /* Expand an atomic operation CODE of mode MODE. MEM is the memory location
4926 and VAL the value to play with. If AFTER is true then store the value
4927 MEM holds after the operation, if AFTER is false then store the value MEM
4928 holds before the operation. If TARGET is zero then discard that value, else
4929 store it to TARGET. */
4932 s390_expand_atomic (enum machine_mode mode
, enum rtx_code code
,
4933 rtx target
, rtx mem
, rtx val
, bool after
)
4935 struct alignment_context ac
;
4937 rtx new_rtx
= gen_reg_rtx (SImode
);
4938 rtx orig
= gen_reg_rtx (SImode
);
4939 rtx csloop
= gen_label_rtx ();
4941 gcc_assert (!target
|| register_operand (target
, VOIDmode
));
4942 gcc_assert (MEM_P (mem
));
4944 init_alignment_context (&ac
, mem
, mode
);
4946 /* Shift val to the correct bit positions.
4947 Preserve "icm", but prevent "ex icm". */
4948 if (!(ac
.aligned
&& code
== SET
&& MEM_P (val
)))
4949 val
= s390_expand_mask_and_shift (val
, mode
, ac
.shift
);
4951 /* Further preparation insns. */
4952 if (code
== PLUS
|| code
== MINUS
)
4953 emit_move_insn (orig
, val
);
4954 else if (code
== MULT
|| code
== AND
) /* val = "11..1<val>11..1" */
4955 val
= expand_simple_binop (SImode
, XOR
, val
, ac
.modemaski
,
4956 NULL_RTX
, 1, OPTAB_DIRECT
);
4958 /* Load full word. Subsequent loads are performed by CS. */
4959 cmp
= force_reg (SImode
, ac
.memsi
);
4961 /* Start CS loop. */
4962 emit_label (csloop
);
4963 emit_move_insn (new_rtx
, cmp
);
4965 /* Patch new with val at correct position. */
4970 val
= expand_simple_binop (SImode
, code
, new_rtx
, orig
,
4971 NULL_RTX
, 1, OPTAB_DIRECT
);
4972 val
= expand_simple_binop (SImode
, AND
, val
, ac
.modemask
,
4973 NULL_RTX
, 1, OPTAB_DIRECT
);
4976 if (ac
.aligned
&& MEM_P (val
))
4977 store_bit_field (new_rtx
, GET_MODE_BITSIZE (mode
), 0,
4981 new_rtx
= expand_simple_binop (SImode
, AND
, new_rtx
, ac
.modemaski
,
4982 NULL_RTX
, 1, OPTAB_DIRECT
);
4983 new_rtx
= expand_simple_binop (SImode
, IOR
, new_rtx
, val
,
4984 NULL_RTX
, 1, OPTAB_DIRECT
);
4990 new_rtx
= expand_simple_binop (SImode
, code
, new_rtx
, val
,
4991 NULL_RTX
, 1, OPTAB_DIRECT
);
4993 case MULT
: /* NAND */
4994 new_rtx
= expand_simple_binop (SImode
, AND
, new_rtx
, val
,
4995 NULL_RTX
, 1, OPTAB_DIRECT
);
4996 new_rtx
= expand_simple_binop (SImode
, XOR
, new_rtx
, ac
.modemask
,
4997 NULL_RTX
, 1, OPTAB_DIRECT
);
5003 s390_emit_jump (csloop
, s390_emit_compare_and_swap (NE
, cmp
,
5004 ac
.memsi
, cmp
, new_rtx
));
5006 /* Return the correct part of the bitfield. */
5008 convert_move (target
, expand_simple_binop (SImode
, LSHIFTRT
,
5009 after
? new_rtx
: cmp
, ac
.shift
,
5010 NULL_RTX
, 1, OPTAB_DIRECT
), 1);
5013 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
5014 We need to emit DTP-relative relocations. */
5016 static void s390_output_dwarf_dtprel (FILE *, int, rtx
) ATTRIBUTE_UNUSED
;
5019 s390_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
5024 fputs ("\t.long\t", file
);
5027 fputs ("\t.quad\t", file
);
5032 output_addr_const (file
, x
);
5033 fputs ("@DTPOFF", file
);
5036 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
5037 /* Implement TARGET_MANGLE_TYPE. */
5040 s390_mangle_type (const_tree type
)
5042 if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
5043 && TARGET_LONG_DOUBLE_128
)
5046 /* For all other types, use normal C++ mangling. */
5051 /* In the name of slightly smaller debug output, and to cater to
5052 general assembler lossage, recognize various UNSPEC sequences
5053 and turn them back into a direct symbol reference. */
5056 s390_delegitimize_address (rtx orig_x
)
5060 orig_x
= delegitimize_mem_from_attrs (orig_x
);
5063 /* Extract the symbol ref from:
5064 (plus:SI (reg:SI 12 %r12)
5065 (const:SI (unspec:SI [(symbol_ref/f:SI ("*.LC0"))]
5066 UNSPEC_GOTOFF/PLTOFF)))
5068 (plus:SI (reg:SI 12 %r12)
5069 (const:SI (plus:SI (unspec:SI [(symbol_ref:SI ("L"))]
5070 UNSPEC_GOTOFF/PLTOFF)
5071 (const_int 4 [0x4])))) */
5072 if (GET_CODE (x
) == PLUS
5073 && REG_P (XEXP (x
, 0))
5074 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
5075 && GET_CODE (XEXP (x
, 1)) == CONST
)
5077 HOST_WIDE_INT offset
= 0;
5079 /* The const operand. */
5080 y
= XEXP (XEXP (x
, 1), 0);
5082 if (GET_CODE (y
) == PLUS
5083 && GET_CODE (XEXP (y
, 1)) == CONST_INT
)
5085 offset
= INTVAL (XEXP (y
, 1));
5089 if (GET_CODE (y
) == UNSPEC
5090 && (XINT (y
, 1) == UNSPEC_GOTOFF
5091 || XINT (y
, 1) == UNSPEC_PLTOFF
))
5092 return plus_constant (Pmode
, XVECEXP (y
, 0, 0), offset
);
5095 if (GET_CODE (x
) != MEM
)
5099 if (GET_CODE (x
) == PLUS
5100 && GET_CODE (XEXP (x
, 1)) == CONST
5101 && GET_CODE (XEXP (x
, 0)) == REG
5102 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
5104 y
= XEXP (XEXP (x
, 1), 0);
5105 if (GET_CODE (y
) == UNSPEC
5106 && XINT (y
, 1) == UNSPEC_GOT
)
5107 y
= XVECEXP (y
, 0, 0);
5111 else if (GET_CODE (x
) == CONST
)
5113 /* Extract the symbol ref from:
5114 (mem:QI (const:DI (unspec:DI [(symbol_ref:DI ("foo"))]
5115 UNSPEC_PLT/GOTENT))) */
5118 if (GET_CODE (y
) == UNSPEC
5119 && (XINT (y
, 1) == UNSPEC_GOTENT
5120 || XINT (y
, 1) == UNSPEC_PLT
))
5121 y
= XVECEXP (y
, 0, 0);
5128 if (GET_MODE (orig_x
) != Pmode
)
5130 if (GET_MODE (orig_x
) == BLKmode
)
5132 y
= lowpart_subreg (GET_MODE (orig_x
), y
, Pmode
);
5139 /* Output operand OP to stdio stream FILE.
5140 OP is an address (register + offset) which is not used to address data;
5141 instead the rightmost bits are interpreted as the value. */
5144 print_shift_count_operand (FILE *file
, rtx op
)
5146 HOST_WIDE_INT offset
;
5149 /* Extract base register and offset. */
5150 if (!s390_decompose_shift_count (op
, &base
, &offset
))
5156 gcc_assert (GET_CODE (base
) == REG
);
5157 gcc_assert (REGNO (base
) < FIRST_PSEUDO_REGISTER
);
5158 gcc_assert (REGNO_REG_CLASS (REGNO (base
)) == ADDR_REGS
);
5161 /* Offsets are constricted to twelve bits. */
5162 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, offset
& ((1 << 12) - 1));
5164 fprintf (file
, "(%s)", reg_names
[REGNO (base
)]);
5167 /* See 'get_some_local_dynamic_name'. */
5170 get_some_local_dynamic_name_1 (rtx
*px
, void *data ATTRIBUTE_UNUSED
)
5174 if (GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
5176 x
= get_pool_constant (x
);
5177 return for_each_rtx (&x
, get_some_local_dynamic_name_1
, 0);
5180 if (GET_CODE (x
) == SYMBOL_REF
5181 && tls_symbolic_operand (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
5183 cfun
->machine
->some_ld_name
= XSTR (x
, 0);
5190 /* Locate some local-dynamic symbol still in use by this function
5191 so that we can print its name in local-dynamic base patterns. */
5194 get_some_local_dynamic_name (void)
5198 if (cfun
->machine
->some_ld_name
)
5199 return cfun
->machine
->some_ld_name
;
5201 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5203 && for_each_rtx (&PATTERN (insn
), get_some_local_dynamic_name_1
, 0))
5204 return cfun
->machine
->some_ld_name
;
5209 /* Output machine-dependent UNSPECs occurring in address constant X
5210 in assembler syntax to stdio stream FILE. Returns true if the
5211 constant X could be recognized, false otherwise. */
5214 s390_output_addr_const_extra (FILE *file
, rtx x
)
5216 if (GET_CODE (x
) == UNSPEC
&& XVECLEN (x
, 0) == 1)
5217 switch (XINT (x
, 1))
5220 output_addr_const (file
, XVECEXP (x
, 0, 0));
5221 fprintf (file
, "@GOTENT");
5224 output_addr_const (file
, XVECEXP (x
, 0, 0));
5225 fprintf (file
, "@GOT");
5228 output_addr_const (file
, XVECEXP (x
, 0, 0));
5229 fprintf (file
, "@GOTOFF");
5232 output_addr_const (file
, XVECEXP (x
, 0, 0));
5233 fprintf (file
, "@PLT");
5236 output_addr_const (file
, XVECEXP (x
, 0, 0));
5237 fprintf (file
, "@PLTOFF");
5240 output_addr_const (file
, XVECEXP (x
, 0, 0));
5241 fprintf (file
, "@TLSGD");
5244 assemble_name (file
, get_some_local_dynamic_name ());
5245 fprintf (file
, "@TLSLDM");
5248 output_addr_const (file
, XVECEXP (x
, 0, 0));
5249 fprintf (file
, "@DTPOFF");
5252 output_addr_const (file
, XVECEXP (x
, 0, 0));
5253 fprintf (file
, "@NTPOFF");
5255 case UNSPEC_GOTNTPOFF
:
5256 output_addr_const (file
, XVECEXP (x
, 0, 0));
5257 fprintf (file
, "@GOTNTPOFF");
5259 case UNSPEC_INDNTPOFF
:
5260 output_addr_const (file
, XVECEXP (x
, 0, 0));
5261 fprintf (file
, "@INDNTPOFF");
5265 if (GET_CODE (x
) == UNSPEC
&& XVECLEN (x
, 0) == 2)
5266 switch (XINT (x
, 1))
5268 case UNSPEC_POOL_OFFSET
:
5269 x
= gen_rtx_MINUS (GET_MODE (x
), XVECEXP (x
, 0, 0), XVECEXP (x
, 0, 1));
5270 output_addr_const (file
, x
);
5276 /* Output address operand ADDR in assembler syntax to
5277 stdio stream FILE. */
5280 print_operand_address (FILE *file
, rtx addr
)
5282 struct s390_address ad
;
5284 if (s390_loadrelative_operand_p (addr
))
5288 output_operand_lossage ("symbolic memory references are "
5289 "only supported on z10 or later");
5292 output_addr_const (file
, addr
);
5296 if (!s390_decompose_address (addr
, &ad
)
5297 || (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
5298 || (ad
.indx
&& !REGNO_OK_FOR_INDEX_P (REGNO (ad
.indx
))))
5299 output_operand_lossage ("cannot decompose address");
5302 output_addr_const (file
, ad
.disp
);
5304 fprintf (file
, "0");
5306 if (ad
.base
&& ad
.indx
)
5307 fprintf (file
, "(%s,%s)", reg_names
[REGNO (ad
.indx
)],
5308 reg_names
[REGNO (ad
.base
)]);
5310 fprintf (file
, "(%s)", reg_names
[REGNO (ad
.base
)]);
5313 /* Output operand X in assembler syntax to stdio stream FILE.
5314 CODE specified the format flag. The following format flags
5317 'C': print opcode suffix for branch condition.
5318 'D': print opcode suffix for inverse branch condition.
5319 'E': print opcode suffix for branch on index instruction.
5320 'J': print tls_load/tls_gdcall/tls_ldcall suffix
5321 'G': print the size of the operand in bytes.
5322 'O': print only the displacement of a memory reference.
5323 'R': print only the base register of a memory reference.
5324 'S': print S-type memory reference (base+displacement).
5325 'N': print the second word of a DImode operand.
5326 'M': print the second word of a TImode operand.
5327 'Y': print shift count operand.
5329 'b': print integer X as if it's an unsigned byte.
5330 'c': print integer X as if it's an signed byte.
5331 'x': print integer X as if it's an unsigned halfword.
5332 'h': print integer X as if it's a signed halfword.
5333 'i': print the first nonzero HImode part of X.
5334 'j': print the first HImode part unequal to -1 of X.
5335 'k': print the first nonzero SImode part of X.
5336 'm': print the first SImode part unequal to -1 of X.
5337 'o': print integer X as if it's an unsigned 32bit word. */
5340 print_operand (FILE *file
, rtx x
, int code
)
5345 fprintf (file
, s390_branch_condition_mnemonic (x
, FALSE
));
5349 fprintf (file
, s390_branch_condition_mnemonic (x
, TRUE
));
5353 if (GET_CODE (x
) == LE
)
5354 fprintf (file
, "l");
5355 else if (GET_CODE (x
) == GT
)
5356 fprintf (file
, "h");
5358 output_operand_lossage ("invalid comparison operator "
5359 "for 'E' output modifier");
5363 if (GET_CODE (x
) == SYMBOL_REF
)
5365 fprintf (file
, "%s", ":tls_load:");
5366 output_addr_const (file
, x
);
5368 else if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_TLSGD
)
5370 fprintf (file
, "%s", ":tls_gdcall:");
5371 output_addr_const (file
, XVECEXP (x
, 0, 0));
5373 else if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_TLSLDM
)
5375 fprintf (file
, "%s", ":tls_ldcall:");
5376 assemble_name (file
, get_some_local_dynamic_name ());
5379 output_operand_lossage ("invalid reference for 'J' output modifier");
5383 fprintf (file
, "%u", GET_MODE_SIZE (GET_MODE (x
)));
5388 struct s390_address ad
;
5393 output_operand_lossage ("memory reference expected for "
5394 "'O' output modifier");
5398 ret
= s390_decompose_address (XEXP (x
, 0), &ad
);
5401 || (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
5404 output_operand_lossage ("invalid address for 'O' output modifier");
5409 output_addr_const (file
, ad
.disp
);
5411 fprintf (file
, "0");
5417 struct s390_address ad
;
5422 output_operand_lossage ("memory reference expected for "
5423 "'R' output modifier");
5427 ret
= s390_decompose_address (XEXP (x
, 0), &ad
);
5430 || (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
5433 output_operand_lossage ("invalid address for 'R' output modifier");
5438 fprintf (file
, "%s", reg_names
[REGNO (ad
.base
)]);
5440 fprintf (file
, "0");
5446 struct s390_address ad
;
5451 output_operand_lossage ("memory reference expected for "
5452 "'S' output modifier");
5455 ret
= s390_decompose_address (XEXP (x
, 0), &ad
);
5458 || (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
5461 output_operand_lossage ("invalid address for 'S' output modifier");
5466 output_addr_const (file
, ad
.disp
);
5468 fprintf (file
, "0");
5471 fprintf (file
, "(%s)", reg_names
[REGNO (ad
.base
)]);
5476 if (GET_CODE (x
) == REG
)
5477 x
= gen_rtx_REG (GET_MODE (x
), REGNO (x
) + 1);
5478 else if (GET_CODE (x
) == MEM
)
5479 x
= change_address (x
, VOIDmode
,
5480 plus_constant (Pmode
, XEXP (x
, 0), 4));
5482 output_operand_lossage ("register or memory expression expected "
5483 "for 'N' output modifier");
5487 if (GET_CODE (x
) == REG
)
5488 x
= gen_rtx_REG (GET_MODE (x
), REGNO (x
) + 1);
5489 else if (GET_CODE (x
) == MEM
)
5490 x
= change_address (x
, VOIDmode
,
5491 plus_constant (Pmode
, XEXP (x
, 0), 8));
5493 output_operand_lossage ("register or memory expression expected "
5494 "for 'M' output modifier");
5498 print_shift_count_operand (file
, x
);
5502 switch (GET_CODE (x
))
5505 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
5509 output_address (XEXP (x
, 0));
5516 output_addr_const (file
, x
);
5521 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xff);
5522 else if (code
== 'c')
5523 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ((INTVAL (x
) & 0xff) ^ 0x80) - 0x80);
5524 else if (code
== 'x')
5525 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xffff);
5526 else if (code
== 'h')
5527 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ((INTVAL (x
) & 0xffff) ^ 0x8000) - 0x8000);
5528 else if (code
== 'i')
5529 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
5530 s390_extract_part (x
, HImode
, 0));
5531 else if (code
== 'j')
5532 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
5533 s390_extract_part (x
, HImode
, -1));
5534 else if (code
== 'k')
5535 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
5536 s390_extract_part (x
, SImode
, 0));
5537 else if (code
== 'm')
5538 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
5539 s390_extract_part (x
, SImode
, -1));
5540 else if (code
== 'o')
5541 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xffffffff);
5543 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
5547 gcc_assert (GET_MODE (x
) == VOIDmode
);
5549 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
) & 0xff);
5550 else if (code
== 'x')
5551 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
) & 0xffff);
5552 else if (code
== 'h')
5553 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
5554 ((CONST_DOUBLE_LOW (x
) & 0xffff) ^ 0x8000) - 0x8000);
5558 output_operand_lossage ("invalid constant - try using "
5559 "an output modifier");
5561 output_operand_lossage ("invalid constant for output modifier '%c'",
5568 output_operand_lossage ("invalid expression - try using "
5569 "an output modifier");
5571 output_operand_lossage ("invalid expression for output "
5572 "modifier '%c'", code
);
5577 /* Target hook for assembling integer objects. We need to define it
5578 here to work a round a bug in some versions of GAS, which couldn't
5579 handle values smaller than INT_MIN when printed in decimal. */
5582 s390_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
5584 if (size
== 8 && aligned_p
5585 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < INT_MIN
)
5587 fprintf (asm_out_file
, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX
"\n",
5591 return default_assemble_integer (x
, size
, aligned_p
);
5594 /* Returns true if register REGNO is used for forming
5595 a memory address in expression X. */
5598 reg_used_in_mem_p (int regno
, rtx x
)
5600 enum rtx_code code
= GET_CODE (x
);
5606 if (refers_to_regno_p (regno
, regno
+1,
5610 else if (code
== SET
5611 && GET_CODE (SET_DEST (x
)) == PC
)
5613 if (refers_to_regno_p (regno
, regno
+1,
5618 fmt
= GET_RTX_FORMAT (code
);
5619 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5622 && reg_used_in_mem_p (regno
, XEXP (x
, i
)))
5625 else if (fmt
[i
] == 'E')
5626 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5627 if (reg_used_in_mem_p (regno
, XVECEXP (x
, i
, j
)))
5633 /* Returns true if expression DEP_RTX sets an address register
5634 used by instruction INSN to address memory. */
5637 addr_generation_dependency_p (rtx dep_rtx
, rtx insn
)
5641 if (GET_CODE (dep_rtx
) == INSN
)
5642 dep_rtx
= PATTERN (dep_rtx
);
5644 if (GET_CODE (dep_rtx
) == SET
)
5646 target
= SET_DEST (dep_rtx
);
5647 if (GET_CODE (target
) == STRICT_LOW_PART
)
5648 target
= XEXP (target
, 0);
5649 while (GET_CODE (target
) == SUBREG
)
5650 target
= SUBREG_REG (target
);
5652 if (GET_CODE (target
) == REG
)
5654 int regno
= REGNO (target
);
5656 if (s390_safe_attr_type (insn
) == TYPE_LA
)
5658 pat
= PATTERN (insn
);
5659 if (GET_CODE (pat
) == PARALLEL
)
5661 gcc_assert (XVECLEN (pat
, 0) == 2);
5662 pat
= XVECEXP (pat
, 0, 0);
5664 gcc_assert (GET_CODE (pat
) == SET
);
5665 return refers_to_regno_p (regno
, regno
+1, SET_SRC (pat
), 0);
5667 else if (get_attr_atype (insn
) == ATYPE_AGEN
)
5668 return reg_used_in_mem_p (regno
, PATTERN (insn
));
5674 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
5677 s390_agen_dep_p (rtx dep_insn
, rtx insn
)
5679 rtx dep_rtx
= PATTERN (dep_insn
);
5682 if (GET_CODE (dep_rtx
) == SET
5683 && addr_generation_dependency_p (dep_rtx
, insn
))
5685 else if (GET_CODE (dep_rtx
) == PARALLEL
)
5687 for (i
= 0; i
< XVECLEN (dep_rtx
, 0); i
++)
5689 if (addr_generation_dependency_p (XVECEXP (dep_rtx
, 0, i
), insn
))
5697 /* A C statement (sans semicolon) to update the integer scheduling priority
5698 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
5699 reduce the priority to execute INSN later. Do not define this macro if
5700 you do not need to adjust the scheduling priorities of insns.
5702 A STD instruction should be scheduled earlier,
5703 in order to use the bypass. */
5705 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED
, int priority
)
5707 if (! INSN_P (insn
))
5710 if (s390_tune
!= PROCESSOR_2084_Z990
5711 && s390_tune
!= PROCESSOR_2094_Z9_109
5712 && s390_tune
!= PROCESSOR_2097_Z10
5713 && s390_tune
!= PROCESSOR_2817_Z196
)
5716 switch (s390_safe_attr_type (insn
))
5720 priority
= priority
<< 3;
5724 priority
= priority
<< 1;
5733 /* The number of instructions that can be issued per cycle. */
5736 s390_issue_rate (void)
5740 case PROCESSOR_2084_Z990
:
5741 case PROCESSOR_2094_Z9_109
:
5742 case PROCESSOR_2817_Z196
:
5744 case PROCESSOR_2097_Z10
:
5752 s390_first_cycle_multipass_dfa_lookahead (void)
5757 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
5758 Fix up MEMs as required. */
5761 annotate_constant_pool_refs (rtx
*x
)
5766 gcc_assert (GET_CODE (*x
) != SYMBOL_REF
5767 || !CONSTANT_POOL_ADDRESS_P (*x
));
5769 /* Literal pool references can only occur inside a MEM ... */
5770 if (GET_CODE (*x
) == MEM
)
5772 rtx memref
= XEXP (*x
, 0);
5774 if (GET_CODE (memref
) == SYMBOL_REF
5775 && CONSTANT_POOL_ADDRESS_P (memref
))
5777 rtx base
= cfun
->machine
->base_reg
;
5778 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, memref
, base
),
5781 *x
= replace_equiv_address (*x
, addr
);
5785 if (GET_CODE (memref
) == CONST
5786 && GET_CODE (XEXP (memref
, 0)) == PLUS
5787 && GET_CODE (XEXP (XEXP (memref
, 0), 1)) == CONST_INT
5788 && GET_CODE (XEXP (XEXP (memref
, 0), 0)) == SYMBOL_REF
5789 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref
, 0), 0)))
5791 HOST_WIDE_INT off
= INTVAL (XEXP (XEXP (memref
, 0), 1));
5792 rtx sym
= XEXP (XEXP (memref
, 0), 0);
5793 rtx base
= cfun
->machine
->base_reg
;
5794 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, sym
, base
),
5797 *x
= replace_equiv_address (*x
, plus_constant (Pmode
, addr
, off
));
5802 /* ... or a load-address type pattern. */
5803 if (GET_CODE (*x
) == SET
)
5805 rtx addrref
= SET_SRC (*x
);
5807 if (GET_CODE (addrref
) == SYMBOL_REF
5808 && CONSTANT_POOL_ADDRESS_P (addrref
))
5810 rtx base
= cfun
->machine
->base_reg
;
5811 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, addrref
, base
),
5814 SET_SRC (*x
) = addr
;
5818 if (GET_CODE (addrref
) == CONST
5819 && GET_CODE (XEXP (addrref
, 0)) == PLUS
5820 && GET_CODE (XEXP (XEXP (addrref
, 0), 1)) == CONST_INT
5821 && GET_CODE (XEXP (XEXP (addrref
, 0), 0)) == SYMBOL_REF
5822 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref
, 0), 0)))
5824 HOST_WIDE_INT off
= INTVAL (XEXP (XEXP (addrref
, 0), 1));
5825 rtx sym
= XEXP (XEXP (addrref
, 0), 0);
5826 rtx base
= cfun
->machine
->base_reg
;
5827 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, sym
, base
),
5830 SET_SRC (*x
) = plus_constant (Pmode
, addr
, off
);
5835 /* Annotate LTREL_BASE as well. */
5836 if (GET_CODE (*x
) == UNSPEC
5837 && XINT (*x
, 1) == UNSPEC_LTREL_BASE
)
5839 rtx base
= cfun
->machine
->base_reg
;
5840 *x
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, XVECEXP (*x
, 0, 0), base
),
5845 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
5846 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
5850 annotate_constant_pool_refs (&XEXP (*x
, i
));
5852 else if (fmt
[i
] == 'E')
5854 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
5855 annotate_constant_pool_refs (&XVECEXP (*x
, i
, j
));
5860 /* Split all branches that exceed the maximum distance.
5861 Returns true if this created a new literal pool entry. */
5864 s390_split_branches (void)
5866 rtx temp_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
5867 int new_literal
= 0, ret
;
5868 rtx insn
, pat
, tmp
, target
;
5871 /* We need correct insn addresses. */
5873 shorten_branches (get_insns ());
5875 /* Find all branches that exceed 64KB, and split them. */
5877 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5879 if (GET_CODE (insn
) != JUMP_INSN
)
5882 pat
= PATTERN (insn
);
5883 if (GET_CODE (pat
) == PARALLEL
&& XVECLEN (pat
, 0) > 2)
5884 pat
= XVECEXP (pat
, 0, 0);
5885 if (GET_CODE (pat
) != SET
|| SET_DEST (pat
) != pc_rtx
)
5888 if (GET_CODE (SET_SRC (pat
)) == LABEL_REF
)
5890 label
= &SET_SRC (pat
);
5892 else if (GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
5894 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) == LABEL_REF
)
5895 label
= &XEXP (SET_SRC (pat
), 1);
5896 else if (GET_CODE (XEXP (SET_SRC (pat
), 2)) == LABEL_REF
)
5897 label
= &XEXP (SET_SRC (pat
), 2);
5904 if (get_attr_length (insn
) <= 4)
5907 /* We are going to use the return register as scratch register,
5908 make sure it will be saved/restored by the prologue/epilogue. */
5909 cfun_frame_layout
.save_return_addr_p
= 1;
5914 tmp
= force_const_mem (Pmode
, *label
);
5915 tmp
= emit_insn_before (gen_rtx_SET (Pmode
, temp_reg
, tmp
), insn
);
5916 INSN_ADDRESSES_NEW (tmp
, -1);
5917 annotate_constant_pool_refs (&PATTERN (tmp
));
5924 target
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, *label
),
5925 UNSPEC_LTREL_OFFSET
);
5926 target
= gen_rtx_CONST (Pmode
, target
);
5927 target
= force_const_mem (Pmode
, target
);
5928 tmp
= emit_insn_before (gen_rtx_SET (Pmode
, temp_reg
, target
), insn
);
5929 INSN_ADDRESSES_NEW (tmp
, -1);
5930 annotate_constant_pool_refs (&PATTERN (tmp
));
5932 target
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, XEXP (target
, 0),
5933 cfun
->machine
->base_reg
),
5935 target
= gen_rtx_PLUS (Pmode
, temp_reg
, target
);
5938 ret
= validate_change (insn
, label
, target
, 0);
5946 /* Find an annotated literal pool symbol referenced in RTX X,
5947 and store it at REF. Will abort if X contains references to
5948 more than one such pool symbol; multiple references to the same
5949 symbol are allowed, however.
5951 The rtx pointed to by REF must be initialized to NULL_RTX
5952 by the caller before calling this routine. */
5955 find_constant_pool_ref (rtx x
, rtx
*ref
)
5960 /* Ignore LTREL_BASE references. */
5961 if (GET_CODE (x
) == UNSPEC
5962 && XINT (x
, 1) == UNSPEC_LTREL_BASE
)
5964 /* Likewise POOL_ENTRY insns. */
5965 if (GET_CODE (x
) == UNSPEC_VOLATILE
5966 && XINT (x
, 1) == UNSPECV_POOL_ENTRY
)
5969 gcc_assert (GET_CODE (x
) != SYMBOL_REF
5970 || !CONSTANT_POOL_ADDRESS_P (x
));
5972 if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_LTREF
)
5974 rtx sym
= XVECEXP (x
, 0, 0);
5975 gcc_assert (GET_CODE (sym
) == SYMBOL_REF
5976 && CONSTANT_POOL_ADDRESS_P (sym
));
5978 if (*ref
== NULL_RTX
)
5981 gcc_assert (*ref
== sym
);
5986 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
5987 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
5991 find_constant_pool_ref (XEXP (x
, i
), ref
);
5993 else if (fmt
[i
] == 'E')
5995 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5996 find_constant_pool_ref (XVECEXP (x
, i
, j
), ref
);
6001 /* Replace every reference to the annotated literal pool
6002 symbol REF in X by its base plus OFFSET. */
6005 replace_constant_pool_ref (rtx
*x
, rtx ref
, rtx offset
)
6010 gcc_assert (*x
!= ref
);
6012 if (GET_CODE (*x
) == UNSPEC
6013 && XINT (*x
, 1) == UNSPEC_LTREF
6014 && XVECEXP (*x
, 0, 0) == ref
)
6016 *x
= gen_rtx_PLUS (Pmode
, XVECEXP (*x
, 0, 1), offset
);
6020 if (GET_CODE (*x
) == PLUS
6021 && GET_CODE (XEXP (*x
, 1)) == CONST_INT
6022 && GET_CODE (XEXP (*x
, 0)) == UNSPEC
6023 && XINT (XEXP (*x
, 0), 1) == UNSPEC_LTREF
6024 && XVECEXP (XEXP (*x
, 0), 0, 0) == ref
)
6026 rtx addr
= gen_rtx_PLUS (Pmode
, XVECEXP (XEXP (*x
, 0), 0, 1), offset
);
6027 *x
= plus_constant (Pmode
, addr
, INTVAL (XEXP (*x
, 1)));
6031 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
6032 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
6036 replace_constant_pool_ref (&XEXP (*x
, i
), ref
, offset
);
6038 else if (fmt
[i
] == 'E')
6040 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
6041 replace_constant_pool_ref (&XVECEXP (*x
, i
, j
), ref
, offset
);
6046 /* Check whether X contains an UNSPEC_LTREL_BASE.
6047 Return its constant pool symbol if found, NULL_RTX otherwise. */
6050 find_ltrel_base (rtx x
)
6055 if (GET_CODE (x
) == UNSPEC
6056 && XINT (x
, 1) == UNSPEC_LTREL_BASE
)
6057 return XVECEXP (x
, 0, 0);
6059 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
6060 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
6064 rtx fnd
= find_ltrel_base (XEXP (x
, i
));
6068 else if (fmt
[i
] == 'E')
6070 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
6072 rtx fnd
= find_ltrel_base (XVECEXP (x
, i
, j
));
6082 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
6085 replace_ltrel_base (rtx
*x
)
6090 if (GET_CODE (*x
) == UNSPEC
6091 && XINT (*x
, 1) == UNSPEC_LTREL_BASE
)
6093 *x
= XVECEXP (*x
, 0, 1);
6097 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
6098 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
6102 replace_ltrel_base (&XEXP (*x
, i
));
6104 else if (fmt
[i
] == 'E')
6106 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
6107 replace_ltrel_base (&XVECEXP (*x
, i
, j
));
6113 /* We keep a list of constants which we have to add to internal
6114 constant tables in the middle of large functions. */
6116 #define NR_C_MODES 11
6117 enum machine_mode constant_modes
[NR_C_MODES
] =
6119 TFmode
, TImode
, TDmode
,
6120 DFmode
, DImode
, DDmode
,
6121 SFmode
, SImode
, SDmode
,
6128 struct constant
*next
;
6133 struct constant_pool
6135 struct constant_pool
*next
;
6139 rtx emit_pool_after
;
6141 struct constant
*constants
[NR_C_MODES
];
6142 struct constant
*execute
;
6147 /* Allocate new constant_pool structure. */
6149 static struct constant_pool
*
6150 s390_alloc_pool (void)
6152 struct constant_pool
*pool
;
6155 pool
= (struct constant_pool
*) xmalloc (sizeof *pool
);
6157 for (i
= 0; i
< NR_C_MODES
; i
++)
6158 pool
->constants
[i
] = NULL
;
6160 pool
->execute
= NULL
;
6161 pool
->label
= gen_label_rtx ();
6162 pool
->first_insn
= NULL_RTX
;
6163 pool
->pool_insn
= NULL_RTX
;
6164 pool
->insns
= BITMAP_ALLOC (NULL
);
6166 pool
->emit_pool_after
= NULL_RTX
;
6171 /* Create new constant pool covering instructions starting at INSN
6172 and chain it to the end of POOL_LIST. */
6174 static struct constant_pool
*
6175 s390_start_pool (struct constant_pool
**pool_list
, rtx insn
)
6177 struct constant_pool
*pool
, **prev
;
6179 pool
= s390_alloc_pool ();
6180 pool
->first_insn
= insn
;
6182 for (prev
= pool_list
; *prev
; prev
= &(*prev
)->next
)
6189 /* End range of instructions covered by POOL at INSN and emit
6190 placeholder insn representing the pool. */
6193 s390_end_pool (struct constant_pool
*pool
, rtx insn
)
6195 rtx pool_size
= GEN_INT (pool
->size
+ 8 /* alignment slop */);
6198 insn
= get_last_insn ();
6200 pool
->pool_insn
= emit_insn_after (gen_pool (pool_size
), insn
);
6201 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
6204 /* Add INSN to the list of insns covered by POOL. */
6207 s390_add_pool_insn (struct constant_pool
*pool
, rtx insn
)
6209 bitmap_set_bit (pool
->insns
, INSN_UID (insn
));
6212 /* Return pool out of POOL_LIST that covers INSN. */
6214 static struct constant_pool
*
6215 s390_find_pool (struct constant_pool
*pool_list
, rtx insn
)
6217 struct constant_pool
*pool
;
6219 for (pool
= pool_list
; pool
; pool
= pool
->next
)
6220 if (bitmap_bit_p (pool
->insns
, INSN_UID (insn
)))
6226 /* Add constant VAL of mode MODE to the constant pool POOL. */
6229 s390_add_constant (struct constant_pool
*pool
, rtx val
, enum machine_mode mode
)
6234 for (i
= 0; i
< NR_C_MODES
; i
++)
6235 if (constant_modes
[i
] == mode
)
6237 gcc_assert (i
!= NR_C_MODES
);
6239 for (c
= pool
->constants
[i
]; c
!= NULL
; c
= c
->next
)
6240 if (rtx_equal_p (val
, c
->value
))
6245 c
= (struct constant
*) xmalloc (sizeof *c
);
6247 c
->label
= gen_label_rtx ();
6248 c
->next
= pool
->constants
[i
];
6249 pool
->constants
[i
] = c
;
6250 pool
->size
+= GET_MODE_SIZE (mode
);
6254 /* Return an rtx that represents the offset of X from the start of
6258 s390_pool_offset (struct constant_pool
*pool
, rtx x
)
6262 label
= gen_rtx_LABEL_REF (GET_MODE (x
), pool
->label
);
6263 x
= gen_rtx_UNSPEC (GET_MODE (x
), gen_rtvec (2, x
, label
),
6264 UNSPEC_POOL_OFFSET
);
6265 return gen_rtx_CONST (GET_MODE (x
), x
);
6268 /* Find constant VAL of mode MODE in the constant pool POOL.
6269 Return an RTX describing the distance from the start of
6270 the pool to the location of the new constant. */
6273 s390_find_constant (struct constant_pool
*pool
, rtx val
,
6274 enum machine_mode mode
)
6279 for (i
= 0; i
< NR_C_MODES
; i
++)
6280 if (constant_modes
[i
] == mode
)
6282 gcc_assert (i
!= NR_C_MODES
);
6284 for (c
= pool
->constants
[i
]; c
!= NULL
; c
= c
->next
)
6285 if (rtx_equal_p (val
, c
->value
))
6290 return s390_pool_offset (pool
, gen_rtx_LABEL_REF (Pmode
, c
->label
));
6293 /* Check whether INSN is an execute. Return the label_ref to its
6294 execute target template if so, NULL_RTX otherwise. */
6297 s390_execute_label (rtx insn
)
6299 if (GET_CODE (insn
) == INSN
6300 && GET_CODE (PATTERN (insn
)) == PARALLEL
6301 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == UNSPEC
6302 && XINT (XVECEXP (PATTERN (insn
), 0, 0), 1) == UNSPEC_EXECUTE
)
6303 return XVECEXP (XVECEXP (PATTERN (insn
), 0, 0), 0, 2);
6308 /* Add execute target for INSN to the constant pool POOL. */
6311 s390_add_execute (struct constant_pool
*pool
, rtx insn
)
6315 for (c
= pool
->execute
; c
!= NULL
; c
= c
->next
)
6316 if (INSN_UID (insn
) == INSN_UID (c
->value
))
6321 c
= (struct constant
*) xmalloc (sizeof *c
);
6323 c
->label
= gen_label_rtx ();
6324 c
->next
= pool
->execute
;
6330 /* Find execute target for INSN in the constant pool POOL.
6331 Return an RTX describing the distance from the start of
6332 the pool to the location of the execute target. */
6335 s390_find_execute (struct constant_pool
*pool
, rtx insn
)
6339 for (c
= pool
->execute
; c
!= NULL
; c
= c
->next
)
6340 if (INSN_UID (insn
) == INSN_UID (c
->value
))
6345 return s390_pool_offset (pool
, gen_rtx_LABEL_REF (Pmode
, c
->label
));
6348 /* For an execute INSN, extract the execute target template. */
6351 s390_execute_target (rtx insn
)
6353 rtx pattern
= PATTERN (insn
);
6354 gcc_assert (s390_execute_label (insn
));
6356 if (XVECLEN (pattern
, 0) == 2)
6358 pattern
= copy_rtx (XVECEXP (pattern
, 0, 1));
6362 rtvec vec
= rtvec_alloc (XVECLEN (pattern
, 0) - 1);
6365 for (i
= 0; i
< XVECLEN (pattern
, 0) - 1; i
++)
6366 RTVEC_ELT (vec
, i
) = copy_rtx (XVECEXP (pattern
, 0, i
+ 1));
6368 pattern
= gen_rtx_PARALLEL (VOIDmode
, vec
);
6374 /* Indicate that INSN cannot be duplicated. This is the case for
6375 execute insns that carry a unique label. */
6378 s390_cannot_copy_insn_p (rtx insn
)
6380 rtx label
= s390_execute_label (insn
);
6381 return label
&& label
!= const0_rtx
;
6384 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
6385 do not emit the pool base label. */
6388 s390_dump_pool (struct constant_pool
*pool
, bool remote_label
)
6391 rtx insn
= pool
->pool_insn
;
6394 /* Switch to rodata section. */
6395 if (TARGET_CPU_ZARCH
)
6397 insn
= emit_insn_after (gen_pool_section_start (), insn
);
6398 INSN_ADDRESSES_NEW (insn
, -1);
6401 /* Ensure minimum pool alignment. */
6402 if (TARGET_CPU_ZARCH
)
6403 insn
= emit_insn_after (gen_pool_align (GEN_INT (8)), insn
);
6405 insn
= emit_insn_after (gen_pool_align (GEN_INT (4)), insn
);
6406 INSN_ADDRESSES_NEW (insn
, -1);
6408 /* Emit pool base label. */
6411 insn
= emit_label_after (pool
->label
, insn
);
6412 INSN_ADDRESSES_NEW (insn
, -1);
6415 /* Dump constants in descending alignment requirement order,
6416 ensuring proper alignment for every constant. */
6417 for (i
= 0; i
< NR_C_MODES
; i
++)
6418 for (c
= pool
->constants
[i
]; c
; c
= c
->next
)
6420 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
6421 rtx value
= copy_rtx (c
->value
);
6422 if (GET_CODE (value
) == CONST
6423 && GET_CODE (XEXP (value
, 0)) == UNSPEC
6424 && XINT (XEXP (value
, 0), 1) == UNSPEC_LTREL_OFFSET
6425 && XVECLEN (XEXP (value
, 0), 0) == 1)
6426 value
= s390_pool_offset (pool
, XVECEXP (XEXP (value
, 0), 0, 0));
6428 insn
= emit_label_after (c
->label
, insn
);
6429 INSN_ADDRESSES_NEW (insn
, -1);
6431 value
= gen_rtx_UNSPEC_VOLATILE (constant_modes
[i
],
6432 gen_rtvec (1, value
),
6433 UNSPECV_POOL_ENTRY
);
6434 insn
= emit_insn_after (value
, insn
);
6435 INSN_ADDRESSES_NEW (insn
, -1);
6438 /* Ensure minimum alignment for instructions. */
6439 insn
= emit_insn_after (gen_pool_align (GEN_INT (2)), insn
);
6440 INSN_ADDRESSES_NEW (insn
, -1);
6442 /* Output in-pool execute template insns. */
6443 for (c
= pool
->execute
; c
; c
= c
->next
)
6445 insn
= emit_label_after (c
->label
, insn
);
6446 INSN_ADDRESSES_NEW (insn
, -1);
6448 insn
= emit_insn_after (s390_execute_target (c
->value
), insn
);
6449 INSN_ADDRESSES_NEW (insn
, -1);
6452 /* Switch back to previous section. */
6453 if (TARGET_CPU_ZARCH
)
6455 insn
= emit_insn_after (gen_pool_section_end (), insn
);
6456 INSN_ADDRESSES_NEW (insn
, -1);
6459 insn
= emit_barrier_after (insn
);
6460 INSN_ADDRESSES_NEW (insn
, -1);
6462 /* Remove placeholder insn. */
6463 remove_insn (pool
->pool_insn
);
6466 /* Free all memory used by POOL. */
6469 s390_free_pool (struct constant_pool
*pool
)
6471 struct constant
*c
, *next
;
6474 for (i
= 0; i
< NR_C_MODES
; i
++)
6475 for (c
= pool
->constants
[i
]; c
; c
= next
)
6481 for (c
= pool
->execute
; c
; c
= next
)
6487 BITMAP_FREE (pool
->insns
);
6492 /* Collect main literal pool. Return NULL on overflow. */
6494 static struct constant_pool
*
6495 s390_mainpool_start (void)
6497 struct constant_pool
*pool
;
6500 pool
= s390_alloc_pool ();
6502 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6504 if (GET_CODE (insn
) == INSN
6505 && GET_CODE (PATTERN (insn
)) == SET
6506 && GET_CODE (SET_SRC (PATTERN (insn
))) == UNSPEC_VOLATILE
6507 && XINT (SET_SRC (PATTERN (insn
)), 1) == UNSPECV_MAIN_POOL
)
6509 gcc_assert (!pool
->pool_insn
);
6510 pool
->pool_insn
= insn
;
6513 if (!TARGET_CPU_ZARCH
&& s390_execute_label (insn
))
6515 s390_add_execute (pool
, insn
);
6517 else if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
6519 rtx pool_ref
= NULL_RTX
;
6520 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
6523 rtx constant
= get_pool_constant (pool_ref
);
6524 enum machine_mode mode
= get_pool_mode (pool_ref
);
6525 s390_add_constant (pool
, constant
, mode
);
6529 /* If hot/cold partitioning is enabled we have to make sure that
6530 the literal pool is emitted in the same section where the
6531 initialization of the literal pool base pointer takes place.
6532 emit_pool_after is only used in the non-overflow case on non
6533 Z cpus where we can emit the literal pool at the end of the
6534 function body within the text section. */
6536 && NOTE_KIND (insn
) == NOTE_INSN_SWITCH_TEXT_SECTIONS
6537 && !pool
->emit_pool_after
)
6538 pool
->emit_pool_after
= PREV_INSN (insn
);
6541 gcc_assert (pool
->pool_insn
|| pool
->size
== 0);
6543 if (pool
->size
>= 4096)
6545 /* We're going to chunkify the pool, so remove the main
6546 pool placeholder insn. */
6547 remove_insn (pool
->pool_insn
);
6549 s390_free_pool (pool
);
6553 /* If the functions ends with the section where the literal pool
6554 should be emitted set the marker to its end. */
6555 if (pool
&& !pool
->emit_pool_after
)
6556 pool
->emit_pool_after
= get_last_insn ();
6561 /* POOL holds the main literal pool as collected by s390_mainpool_start.
6562 Modify the current function to output the pool constants as well as
6563 the pool register setup instruction. */
6566 s390_mainpool_finish (struct constant_pool
*pool
)
6568 rtx base_reg
= cfun
->machine
->base_reg
;
6571 /* If the pool is empty, we're done. */
6572 if (pool
->size
== 0)
6574 /* We don't actually need a base register after all. */
6575 cfun
->machine
->base_reg
= NULL_RTX
;
6577 if (pool
->pool_insn
)
6578 remove_insn (pool
->pool_insn
);
6579 s390_free_pool (pool
);
6583 /* We need correct insn addresses. */
6584 shorten_branches (get_insns ());
6586 /* On zSeries, we use a LARL to load the pool register. The pool is
6587 located in the .rodata section, so we emit it after the function. */
6588 if (TARGET_CPU_ZARCH
)
6590 insn
= gen_main_base_64 (base_reg
, pool
->label
);
6591 insn
= emit_insn_after (insn
, pool
->pool_insn
);
6592 INSN_ADDRESSES_NEW (insn
, -1);
6593 remove_insn (pool
->pool_insn
);
6595 insn
= get_last_insn ();
6596 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
6597 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
6599 s390_dump_pool (pool
, 0);
6602 /* On S/390, if the total size of the function's code plus literal pool
6603 does not exceed 4096 bytes, we use BASR to set up a function base
6604 pointer, and emit the literal pool at the end of the function. */
6605 else if (INSN_ADDRESSES (INSN_UID (pool
->emit_pool_after
))
6606 + pool
->size
+ 8 /* alignment slop */ < 4096)
6608 insn
= gen_main_base_31_small (base_reg
, pool
->label
);
6609 insn
= emit_insn_after (insn
, pool
->pool_insn
);
6610 INSN_ADDRESSES_NEW (insn
, -1);
6611 remove_insn (pool
->pool_insn
);
6613 insn
= emit_label_after (pool
->label
, insn
);
6614 INSN_ADDRESSES_NEW (insn
, -1);
6616 /* emit_pool_after will be set by s390_mainpool_start to the
6617 last insn of the section where the literal pool should be
6619 insn
= pool
->emit_pool_after
;
6621 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
6622 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
6624 s390_dump_pool (pool
, 1);
6627 /* Otherwise, we emit an inline literal pool and use BASR to branch
6628 over it, setting up the pool register at the same time. */
6631 rtx pool_end
= gen_label_rtx ();
6633 insn
= gen_main_base_31_large (base_reg
, pool
->label
, pool_end
);
6634 insn
= emit_jump_insn_after (insn
, pool
->pool_insn
);
6635 JUMP_LABEL (insn
) = pool_end
;
6636 INSN_ADDRESSES_NEW (insn
, -1);
6637 remove_insn (pool
->pool_insn
);
6639 insn
= emit_label_after (pool
->label
, insn
);
6640 INSN_ADDRESSES_NEW (insn
, -1);
6642 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
6643 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
6645 insn
= emit_label_after (pool_end
, pool
->pool_insn
);
6646 INSN_ADDRESSES_NEW (insn
, -1);
6648 s390_dump_pool (pool
, 1);
6652 /* Replace all literal pool references. */
6654 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6657 replace_ltrel_base (&PATTERN (insn
));
6659 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
6661 rtx addr
, pool_ref
= NULL_RTX
;
6662 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
6665 if (s390_execute_label (insn
))
6666 addr
= s390_find_execute (pool
, insn
);
6668 addr
= s390_find_constant (pool
, get_pool_constant (pool_ref
),
6669 get_pool_mode (pool_ref
));
6671 replace_constant_pool_ref (&PATTERN (insn
), pool_ref
, addr
);
6672 INSN_CODE (insn
) = -1;
6678 /* Free the pool. */
6679 s390_free_pool (pool
);
6682 /* POOL holds the main literal pool as collected by s390_mainpool_start.
6683 We have decided we cannot use this pool, so revert all changes
6684 to the current function that were done by s390_mainpool_start. */
6686 s390_mainpool_cancel (struct constant_pool
*pool
)
6688 /* We didn't actually change the instruction stream, so simply
6689 free the pool memory. */
6690 s390_free_pool (pool
);
6694 /* Chunkify the literal pool. */
6696 #define S390_POOL_CHUNK_MIN 0xc00
6697 #define S390_POOL_CHUNK_MAX 0xe00
6699 static struct constant_pool
*
6700 s390_chunkify_start (void)
6702 struct constant_pool
*curr_pool
= NULL
, *pool_list
= NULL
;
6705 rtx pending_ltrel
= NULL_RTX
;
6708 rtx (*gen_reload_base
) (rtx
, rtx
) =
6709 TARGET_CPU_ZARCH
? gen_reload_base_64
: gen_reload_base_31
;
6712 /* We need correct insn addresses. */
6714 shorten_branches (get_insns ());
6716 /* Scan all insns and move literals to pool chunks. */
6718 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6720 bool section_switch_p
= false;
6722 /* Check for pending LTREL_BASE. */
6725 rtx ltrel_base
= find_ltrel_base (PATTERN (insn
));
6728 gcc_assert (ltrel_base
== pending_ltrel
);
6729 pending_ltrel
= NULL_RTX
;
6733 if (!TARGET_CPU_ZARCH
&& s390_execute_label (insn
))
6736 curr_pool
= s390_start_pool (&pool_list
, insn
);
6738 s390_add_execute (curr_pool
, insn
);
6739 s390_add_pool_insn (curr_pool
, insn
);
6741 else if (GET_CODE (insn
) == INSN
|| CALL_P (insn
))
6743 rtx pool_ref
= NULL_RTX
;
6744 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
6747 rtx constant
= get_pool_constant (pool_ref
);
6748 enum machine_mode mode
= get_pool_mode (pool_ref
);
6751 curr_pool
= s390_start_pool (&pool_list
, insn
);
6753 s390_add_constant (curr_pool
, constant
, mode
);
6754 s390_add_pool_insn (curr_pool
, insn
);
6756 /* Don't split the pool chunk between a LTREL_OFFSET load
6757 and the corresponding LTREL_BASE. */
6758 if (GET_CODE (constant
) == CONST
6759 && GET_CODE (XEXP (constant
, 0)) == UNSPEC
6760 && XINT (XEXP (constant
, 0), 1) == UNSPEC_LTREL_OFFSET
)
6762 gcc_assert (!pending_ltrel
);
6763 pending_ltrel
= pool_ref
;
6768 if (GET_CODE (insn
) == JUMP_INSN
|| GET_CODE (insn
) == CODE_LABEL
)
6771 s390_add_pool_insn (curr_pool
, insn
);
6772 /* An LTREL_BASE must follow within the same basic block. */
6773 gcc_assert (!pending_ltrel
);
6777 switch (NOTE_KIND (insn
))
6779 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
6780 section_switch_p
= true;
6782 case NOTE_INSN_VAR_LOCATION
:
6783 case NOTE_INSN_CALL_ARG_LOCATION
:
6790 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn
)
6791 || INSN_ADDRESSES (INSN_UID (insn
)) == -1)
6794 if (TARGET_CPU_ZARCH
)
6796 if (curr_pool
->size
< S390_POOL_CHUNK_MAX
)
6799 s390_end_pool (curr_pool
, NULL_RTX
);
6804 int chunk_size
= INSN_ADDRESSES (INSN_UID (insn
))
6805 - INSN_ADDRESSES (INSN_UID (curr_pool
->first_insn
))
6808 /* We will later have to insert base register reload insns.
6809 Those will have an effect on code size, which we need to
6810 consider here. This calculation makes rather pessimistic
6811 worst-case assumptions. */
6812 if (GET_CODE (insn
) == CODE_LABEL
)
6815 if (chunk_size
< S390_POOL_CHUNK_MIN
6816 && curr_pool
->size
< S390_POOL_CHUNK_MIN
6817 && !section_switch_p
)
6820 /* Pool chunks can only be inserted after BARRIERs ... */
6821 if (GET_CODE (insn
) == BARRIER
)
6823 s390_end_pool (curr_pool
, insn
);
6828 /* ... so if we don't find one in time, create one. */
6829 else if (chunk_size
> S390_POOL_CHUNK_MAX
6830 || curr_pool
->size
> S390_POOL_CHUNK_MAX
6831 || section_switch_p
)
6833 rtx label
, jump
, barrier
, next
, prev
;
6835 if (!section_switch_p
)
6837 /* We can insert the barrier only after a 'real' insn. */
6838 if (GET_CODE (insn
) != INSN
&& GET_CODE (insn
) != CALL_INSN
)
6840 if (get_attr_length (insn
) == 0)
6842 /* Don't separate LTREL_BASE from the corresponding
6843 LTREL_OFFSET load. */
6850 next
= NEXT_INSN (insn
);
6854 && (NOTE_KIND (next
) == NOTE_INSN_VAR_LOCATION
6855 || NOTE_KIND (next
) == NOTE_INSN_CALL_ARG_LOCATION
));
6859 gcc_assert (!pending_ltrel
);
6861 /* The old pool has to end before the section switch
6862 note in order to make it part of the current
6864 insn
= PREV_INSN (insn
);
6867 label
= gen_label_rtx ();
6869 if (prev
&& NOTE_P (prev
))
6870 prev
= prev_nonnote_insn (prev
);
6872 jump
= emit_jump_insn_after_setloc (gen_jump (label
), insn
,
6873 INSN_LOCATION (prev
));
6875 jump
= emit_jump_insn_after_noloc (gen_jump (label
), insn
);
6876 barrier
= emit_barrier_after (jump
);
6877 insn
= emit_label_after (label
, barrier
);
6878 JUMP_LABEL (jump
) = label
;
6879 LABEL_NUSES (label
) = 1;
6881 INSN_ADDRESSES_NEW (jump
, -1);
6882 INSN_ADDRESSES_NEW (barrier
, -1);
6883 INSN_ADDRESSES_NEW (insn
, -1);
6885 s390_end_pool (curr_pool
, barrier
);
6893 s390_end_pool (curr_pool
, NULL_RTX
);
6894 gcc_assert (!pending_ltrel
);
6896 /* Find all labels that are branched into
6897 from an insn belonging to a different chunk. */
6899 far_labels
= BITMAP_ALLOC (NULL
);
6901 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6903 /* Labels marked with LABEL_PRESERVE_P can be target
6904 of non-local jumps, so we have to mark them.
6905 The same holds for named labels.
6907 Don't do that, however, if it is the label before
6910 if (GET_CODE (insn
) == CODE_LABEL
6911 && (LABEL_PRESERVE_P (insn
) || LABEL_NAME (insn
)))
6913 rtx vec_insn
= next_real_insn (insn
);
6914 rtx vec_pat
= vec_insn
&& GET_CODE (vec_insn
) == JUMP_INSN
?
6915 PATTERN (vec_insn
) : NULL_RTX
;
6917 || !(GET_CODE (vec_pat
) == ADDR_VEC
6918 || GET_CODE (vec_pat
) == ADDR_DIFF_VEC
))
6919 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (insn
));
6922 /* If we have a direct jump (conditional or unconditional)
6923 or a casesi jump, check all potential targets. */
6924 else if (GET_CODE (insn
) == JUMP_INSN
)
6926 rtx pat
= PATTERN (insn
);
6927 if (GET_CODE (pat
) == PARALLEL
&& XVECLEN (pat
, 0) > 2)
6928 pat
= XVECEXP (pat
, 0, 0);
6930 if (GET_CODE (pat
) == SET
)
6932 rtx label
= JUMP_LABEL (insn
);
6935 if (s390_find_pool (pool_list
, label
)
6936 != s390_find_pool (pool_list
, insn
))
6937 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (label
));
6940 else if (GET_CODE (pat
) == PARALLEL
6941 && XVECLEN (pat
, 0) == 2
6942 && GET_CODE (XVECEXP (pat
, 0, 0)) == SET
6943 && GET_CODE (XVECEXP (pat
, 0, 1)) == USE
6944 && GET_CODE (XEXP (XVECEXP (pat
, 0, 1), 0)) == LABEL_REF
)
6946 /* Find the jump table used by this casesi jump. */
6947 rtx vec_label
= XEXP (XEXP (XVECEXP (pat
, 0, 1), 0), 0);
6948 rtx vec_insn
= next_real_insn (vec_label
);
6949 rtx vec_pat
= vec_insn
&& GET_CODE (vec_insn
) == JUMP_INSN
?
6950 PATTERN (vec_insn
) : NULL_RTX
;
6952 && (GET_CODE (vec_pat
) == ADDR_VEC
6953 || GET_CODE (vec_pat
) == ADDR_DIFF_VEC
))
6955 int i
, diff_p
= GET_CODE (vec_pat
) == ADDR_DIFF_VEC
;
6957 for (i
= 0; i
< XVECLEN (vec_pat
, diff_p
); i
++)
6959 rtx label
= XEXP (XVECEXP (vec_pat
, diff_p
, i
), 0);
6961 if (s390_find_pool (pool_list
, label
)
6962 != s390_find_pool (pool_list
, insn
))
6963 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (label
));
6970 /* Insert base register reload insns before every pool. */
6972 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
6974 rtx new_insn
= gen_reload_base (cfun
->machine
->base_reg
,
6976 rtx insn
= curr_pool
->first_insn
;
6977 INSN_ADDRESSES_NEW (emit_insn_before (new_insn
, insn
), -1);
6980 /* Insert base register reload insns at every far label. */
6982 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6983 if (GET_CODE (insn
) == CODE_LABEL
6984 && bitmap_bit_p (far_labels
, CODE_LABEL_NUMBER (insn
)))
6986 struct constant_pool
*pool
= s390_find_pool (pool_list
, insn
);
6989 rtx new_insn
= gen_reload_base (cfun
->machine
->base_reg
,
6991 INSN_ADDRESSES_NEW (emit_insn_after (new_insn
, insn
), -1);
6996 BITMAP_FREE (far_labels
);
6999 /* Recompute insn addresses. */
7001 init_insn_lengths ();
7002 shorten_branches (get_insns ());
7007 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
7008 After we have decided to use this list, finish implementing
7009 all changes to the current function as required. */
7012 s390_chunkify_finish (struct constant_pool
*pool_list
)
7014 struct constant_pool
*curr_pool
= NULL
;
7018 /* Replace all literal pool references. */
7020 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
7023 replace_ltrel_base (&PATTERN (insn
));
7025 curr_pool
= s390_find_pool (pool_list
, insn
);
7029 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
7031 rtx addr
, pool_ref
= NULL_RTX
;
7032 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
7035 if (s390_execute_label (insn
))
7036 addr
= s390_find_execute (curr_pool
, insn
);
7038 addr
= s390_find_constant (curr_pool
,
7039 get_pool_constant (pool_ref
),
7040 get_pool_mode (pool_ref
));
7042 replace_constant_pool_ref (&PATTERN (insn
), pool_ref
, addr
);
7043 INSN_CODE (insn
) = -1;
7048 /* Dump out all literal pools. */
7050 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
7051 s390_dump_pool (curr_pool
, 0);
7053 /* Free pool list. */
7057 struct constant_pool
*next
= pool_list
->next
;
7058 s390_free_pool (pool_list
);
7063 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
7064 We have decided we cannot use this list, so revert all changes
7065 to the current function that were done by s390_chunkify_start. */
7068 s390_chunkify_cancel (struct constant_pool
*pool_list
)
7070 struct constant_pool
*curr_pool
= NULL
;
7073 /* Remove all pool placeholder insns. */
7075 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
7077 /* Did we insert an extra barrier? Remove it. */
7078 rtx barrier
= PREV_INSN (curr_pool
->pool_insn
);
7079 rtx jump
= barrier
? PREV_INSN (barrier
) : NULL_RTX
;
7080 rtx label
= NEXT_INSN (curr_pool
->pool_insn
);
7082 if (jump
&& GET_CODE (jump
) == JUMP_INSN
7083 && barrier
&& GET_CODE (barrier
) == BARRIER
7084 && label
&& GET_CODE (label
) == CODE_LABEL
7085 && GET_CODE (PATTERN (jump
)) == SET
7086 && SET_DEST (PATTERN (jump
)) == pc_rtx
7087 && GET_CODE (SET_SRC (PATTERN (jump
))) == LABEL_REF
7088 && XEXP (SET_SRC (PATTERN (jump
)), 0) == label
)
7091 remove_insn (barrier
);
7092 remove_insn (label
);
7095 remove_insn (curr_pool
->pool_insn
);
7098 /* Remove all base register reload insns. */
7100 for (insn
= get_insns (); insn
; )
7102 rtx next_insn
= NEXT_INSN (insn
);
7104 if (GET_CODE (insn
) == INSN
7105 && GET_CODE (PATTERN (insn
)) == SET
7106 && GET_CODE (SET_SRC (PATTERN (insn
))) == UNSPEC
7107 && XINT (SET_SRC (PATTERN (insn
)), 1) == UNSPEC_RELOAD_BASE
)
7113 /* Free pool list. */
7117 struct constant_pool
*next
= pool_list
->next
;
7118 s390_free_pool (pool_list
);
7123 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
7126 s390_output_pool_entry (rtx exp
, enum machine_mode mode
, unsigned int align
)
7130 switch (GET_MODE_CLASS (mode
))
7133 case MODE_DECIMAL_FLOAT
:
7134 gcc_assert (GET_CODE (exp
) == CONST_DOUBLE
);
7136 REAL_VALUE_FROM_CONST_DOUBLE (r
, exp
);
7137 assemble_real (r
, mode
, align
);
7141 assemble_integer (exp
, GET_MODE_SIZE (mode
), align
, 1);
7142 mark_symbol_refs_as_used (exp
);
7151 /* Return an RTL expression representing the value of the return address
7152 for the frame COUNT steps up from the current frame. FRAME is the
7153 frame pointer of that frame. */
7156 s390_return_addr_rtx (int count
, rtx frame ATTRIBUTE_UNUSED
)
7161 /* Without backchain, we fail for all but the current frame. */
7163 if (!TARGET_BACKCHAIN
&& count
> 0)
7166 /* For the current frame, we need to make sure the initial
7167 value of RETURN_REGNUM is actually saved. */
7171 /* On non-z architectures branch splitting could overwrite r14. */
7172 if (TARGET_CPU_ZARCH
)
7173 return get_hard_reg_initial_val (Pmode
, RETURN_REGNUM
);
7176 cfun_frame_layout
.save_return_addr_p
= true;
7177 return gen_rtx_MEM (Pmode
, return_address_pointer_rtx
);
7181 if (TARGET_PACKED_STACK
)
7182 offset
= -2 * UNITS_PER_LONG
;
7184 offset
= RETURN_REGNUM
* UNITS_PER_LONG
;
7186 addr
= plus_constant (Pmode
, frame
, offset
);
7187 addr
= memory_address (Pmode
, addr
);
7188 return gen_rtx_MEM (Pmode
, addr
);
7191 /* Return an RTL expression representing the back chain stored in
7192 the current stack frame. */
7195 s390_back_chain_rtx (void)
7199 gcc_assert (TARGET_BACKCHAIN
);
7201 if (TARGET_PACKED_STACK
)
7202 chain
= plus_constant (Pmode
, stack_pointer_rtx
,
7203 STACK_POINTER_OFFSET
- UNITS_PER_LONG
);
7205 chain
= stack_pointer_rtx
;
7207 chain
= gen_rtx_MEM (Pmode
, chain
);
7211 /* Find first call clobbered register unused in a function.
7212 This could be used as base register in a leaf function
7213 or for holding the return address before epilogue. */
7216 find_unused_clobbered_reg (void)
7219 for (i
= 0; i
< 6; i
++)
7220 if (!df_regs_ever_live_p (i
))
7226 /* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all
7227 clobbered hard regs in SETREG. */
7230 s390_reg_clobbered_rtx (rtx setreg
, const_rtx set_insn ATTRIBUTE_UNUSED
, void *data
)
7232 int *regs_ever_clobbered
= (int *)data
;
7233 unsigned int i
, regno
;
7234 enum machine_mode mode
= GET_MODE (setreg
);
7236 if (GET_CODE (setreg
) == SUBREG
)
7238 rtx inner
= SUBREG_REG (setreg
);
7239 if (!GENERAL_REG_P (inner
))
7241 regno
= subreg_regno (setreg
);
7243 else if (GENERAL_REG_P (setreg
))
7244 regno
= REGNO (setreg
);
7249 i
< regno
+ HARD_REGNO_NREGS (regno
, mode
);
7251 regs_ever_clobbered
[i
] = 1;
7254 /* Walks through all basic blocks of the current function looking
7255 for clobbered hard regs using s390_reg_clobbered_rtx. The fields
7256 of the passed integer array REGS_EVER_CLOBBERED are set to one for
7257 each of those regs. */
7260 s390_regs_ever_clobbered (int *regs_ever_clobbered
)
7266 memset (regs_ever_clobbered
, 0, 16 * sizeof (int));
7268 /* For non-leaf functions we have to consider all call clobbered regs to be
7272 for (i
= 0; i
< 16; i
++)
7273 regs_ever_clobbered
[i
] = call_really_used_regs
[i
];
7276 /* Make the "magic" eh_return registers live if necessary. For regs_ever_live
7277 this work is done by liveness analysis (mark_regs_live_at_end).
7278 Special care is needed for functions containing landing pads. Landing pads
7279 may use the eh registers, but the code which sets these registers is not
7280 contained in that function. Hence s390_regs_ever_clobbered is not able to
7281 deal with this automatically. */
7282 if (crtl
->calls_eh_return
|| cfun
->machine
->has_landing_pad_p
)
7283 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; i
++)
7284 if (crtl
->calls_eh_return
7285 || (cfun
->machine
->has_landing_pad_p
7286 && df_regs_ever_live_p (EH_RETURN_DATA_REGNO (i
))))
7287 regs_ever_clobbered
[EH_RETURN_DATA_REGNO (i
)] = 1;
7289 /* For nonlocal gotos all call-saved registers have to be saved.
7290 This flag is also set for the unwinding code in libgcc.
7291 See expand_builtin_unwind_init. For regs_ever_live this is done by
7293 if (cfun
->has_nonlocal_label
)
7294 for (i
= 0; i
< 16; i
++)
7295 if (!call_really_used_regs
[i
])
7296 regs_ever_clobbered
[i
] = 1;
7298 FOR_EACH_BB (cur_bb
)
7300 FOR_BB_INSNS (cur_bb
, cur_insn
)
7302 if (INSN_P (cur_insn
))
7303 note_stores (PATTERN (cur_insn
),
7304 s390_reg_clobbered_rtx
,
7305 regs_ever_clobbered
);
7310 /* Determine the frame area which actually has to be accessed
7311 in the function epilogue. The values are stored at the
7312 given pointers AREA_BOTTOM (address of the lowest used stack
7313 address) and AREA_TOP (address of the first item which does
7314 not belong to the stack frame). */
7317 s390_frame_area (int *area_bottom
, int *area_top
)
7325 if (cfun_frame_layout
.first_restore_gpr
!= -1)
7327 b
= (cfun_frame_layout
.gprs_offset
7328 + cfun_frame_layout
.first_restore_gpr
* UNITS_PER_LONG
);
7329 t
= b
+ (cfun_frame_layout
.last_restore_gpr
7330 - cfun_frame_layout
.first_restore_gpr
+ 1) * UNITS_PER_LONG
;
7333 if (TARGET_64BIT
&& cfun_save_high_fprs_p
)
7335 b
= MIN (b
, cfun_frame_layout
.f8_offset
);
7336 t
= MAX (t
, (cfun_frame_layout
.f8_offset
7337 + cfun_frame_layout
.high_fprs
* 8));
7341 for (i
= 2; i
< 4; i
++)
7342 if (cfun_fpr_bit_p (i
))
7344 b
= MIN (b
, cfun_frame_layout
.f4_offset
+ (i
- 2) * 8);
7345 t
= MAX (t
, cfun_frame_layout
.f4_offset
+ (i
- 1) * 8);
7352 /* Fill cfun->machine with info about register usage of current function.
7353 Return in CLOBBERED_REGS which GPRs are currently considered set. */
7356 s390_register_info (int clobbered_regs
[])
7360 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
7361 cfun_frame_layout
.fpr_bitmap
= 0;
7362 cfun_frame_layout
.high_fprs
= 0;
7364 for (i
= 24; i
< 32; i
++)
7365 if (df_regs_ever_live_p (i
) && !global_regs
[i
])
7367 cfun_set_fpr_bit (i
- 16);
7368 cfun_frame_layout
.high_fprs
++;
7371 /* Find first and last gpr to be saved. We trust regs_ever_live
7372 data, except that we don't save and restore global registers.
7374 Also, all registers with special meaning to the compiler need
7375 to be handled extra. */
7377 s390_regs_ever_clobbered (clobbered_regs
);
7379 for (i
= 0; i
< 16; i
++)
7380 clobbered_regs
[i
] = clobbered_regs
[i
] && !global_regs
[i
] && !fixed_regs
[i
];
7382 if (frame_pointer_needed
)
7383 clobbered_regs
[HARD_FRAME_POINTER_REGNUM
] = 1;
7386 clobbered_regs
[PIC_OFFSET_TABLE_REGNUM
]
7387 |= df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM
);
7389 clobbered_regs
[BASE_REGNUM
]
7390 |= (cfun
->machine
->base_reg
7391 && REGNO (cfun
->machine
->base_reg
) == BASE_REGNUM
);
7393 clobbered_regs
[RETURN_REGNUM
]
7395 || TARGET_TPF_PROFILING
7396 || cfun
->machine
->split_branches_pending_p
7397 || cfun_frame_layout
.save_return_addr_p
7398 || crtl
->calls_eh_return
7401 clobbered_regs
[STACK_POINTER_REGNUM
]
7403 || TARGET_TPF_PROFILING
7404 || cfun_save_high_fprs_p
7405 || get_frame_size () > 0
7406 || cfun
->calls_alloca
7409 for (i
= 6; i
< 16; i
++)
7410 if (df_regs_ever_live_p (i
) || clobbered_regs
[i
])
7412 for (j
= 15; j
> i
; j
--)
7413 if (df_regs_ever_live_p (j
) || clobbered_regs
[j
])
7418 /* Nothing to save/restore. */
7419 cfun_frame_layout
.first_save_gpr_slot
= -1;
7420 cfun_frame_layout
.last_save_gpr_slot
= -1;
7421 cfun_frame_layout
.first_save_gpr
= -1;
7422 cfun_frame_layout
.first_restore_gpr
= -1;
7423 cfun_frame_layout
.last_save_gpr
= -1;
7424 cfun_frame_layout
.last_restore_gpr
= -1;
7428 /* Save slots for gprs from i to j. */
7429 cfun_frame_layout
.first_save_gpr_slot
= i
;
7430 cfun_frame_layout
.last_save_gpr_slot
= j
;
7432 for (i
= cfun_frame_layout
.first_save_gpr_slot
;
7433 i
< cfun_frame_layout
.last_save_gpr_slot
+ 1;
7435 if (clobbered_regs
[i
])
7438 for (j
= cfun_frame_layout
.last_save_gpr_slot
; j
> i
; j
--)
7439 if (clobbered_regs
[j
])
7442 if (i
== cfun_frame_layout
.last_save_gpr_slot
+ 1)
7444 /* Nothing to save/restore. */
7445 cfun_frame_layout
.first_save_gpr
= -1;
7446 cfun_frame_layout
.first_restore_gpr
= -1;
7447 cfun_frame_layout
.last_save_gpr
= -1;
7448 cfun_frame_layout
.last_restore_gpr
= -1;
7452 /* Save / Restore from gpr i to j. */
7453 cfun_frame_layout
.first_save_gpr
= i
;
7454 cfun_frame_layout
.first_restore_gpr
= i
;
7455 cfun_frame_layout
.last_save_gpr
= j
;
7456 cfun_frame_layout
.last_restore_gpr
= j
;
7462 /* Varargs functions need to save gprs 2 to 6. */
7463 if (cfun
->va_list_gpr_size
7464 && crtl
->args
.info
.gprs
< GP_ARG_NUM_REG
)
7466 int min_gpr
= crtl
->args
.info
.gprs
;
7467 int max_gpr
= min_gpr
+ cfun
->va_list_gpr_size
;
7468 if (max_gpr
> GP_ARG_NUM_REG
)
7469 max_gpr
= GP_ARG_NUM_REG
;
7471 if (cfun_frame_layout
.first_save_gpr
== -1
7472 || cfun_frame_layout
.first_save_gpr
> 2 + min_gpr
)
7474 cfun_frame_layout
.first_save_gpr
= 2 + min_gpr
;
7475 cfun_frame_layout
.first_save_gpr_slot
= 2 + min_gpr
;
7478 if (cfun_frame_layout
.last_save_gpr
== -1
7479 || cfun_frame_layout
.last_save_gpr
< 2 + max_gpr
- 1)
7481 cfun_frame_layout
.last_save_gpr
= 2 + max_gpr
- 1;
7482 cfun_frame_layout
.last_save_gpr_slot
= 2 + max_gpr
- 1;
7486 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
7487 if (TARGET_HARD_FLOAT
&& cfun
->va_list_fpr_size
7488 && crtl
->args
.info
.fprs
< FP_ARG_NUM_REG
)
7490 int min_fpr
= crtl
->args
.info
.fprs
;
7491 int max_fpr
= min_fpr
+ cfun
->va_list_fpr_size
;
7492 if (max_fpr
> FP_ARG_NUM_REG
)
7493 max_fpr
= FP_ARG_NUM_REG
;
7495 /* ??? This is currently required to ensure proper location
7496 of the fpr save slots within the va_list save area. */
7497 if (TARGET_PACKED_STACK
)
7500 for (i
= min_fpr
; i
< max_fpr
; i
++)
7501 cfun_set_fpr_bit (i
);
7506 for (i
= 2; i
< 4; i
++)
7507 if (df_regs_ever_live_p (i
+ 16) && !global_regs
[i
+ 16])
7508 cfun_set_fpr_bit (i
);
7511 /* Fill cfun->machine with info about frame of current function. */
7514 s390_frame_info (void)
7518 cfun_frame_layout
.frame_size
= get_frame_size ();
7519 if (!TARGET_64BIT
&& cfun_frame_layout
.frame_size
> 0x7fff0000)
7520 fatal_error ("total size of local variables exceeds architecture limit");
7522 if (!TARGET_PACKED_STACK
)
7524 cfun_frame_layout
.backchain_offset
= 0;
7525 cfun_frame_layout
.f0_offset
= 16 * UNITS_PER_LONG
;
7526 cfun_frame_layout
.f4_offset
= cfun_frame_layout
.f0_offset
+ 2 * 8;
7527 cfun_frame_layout
.f8_offset
= -cfun_frame_layout
.high_fprs
* 8;
7528 cfun_frame_layout
.gprs_offset
= (cfun_frame_layout
.first_save_gpr_slot
7531 else if (TARGET_BACKCHAIN
) /* kernel stack layout */
7533 cfun_frame_layout
.backchain_offset
= (STACK_POINTER_OFFSET
7535 cfun_frame_layout
.gprs_offset
7536 = (cfun_frame_layout
.backchain_offset
7537 - (STACK_POINTER_REGNUM
- cfun_frame_layout
.first_save_gpr_slot
+ 1)
7542 cfun_frame_layout
.f4_offset
7543 = (cfun_frame_layout
.gprs_offset
7544 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7546 cfun_frame_layout
.f0_offset
7547 = (cfun_frame_layout
.f4_offset
7548 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7552 /* On 31 bit we have to care about alignment of the
7553 floating point regs to provide fastest access. */
7554 cfun_frame_layout
.f0_offset
7555 = ((cfun_frame_layout
.gprs_offset
7556 & ~(STACK_BOUNDARY
/ BITS_PER_UNIT
- 1))
7557 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7559 cfun_frame_layout
.f4_offset
7560 = (cfun_frame_layout
.f0_offset
7561 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7564 else /* no backchain */
7566 cfun_frame_layout
.f4_offset
7567 = (STACK_POINTER_OFFSET
7568 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7570 cfun_frame_layout
.f0_offset
7571 = (cfun_frame_layout
.f4_offset
7572 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7574 cfun_frame_layout
.gprs_offset
7575 = cfun_frame_layout
.f0_offset
- cfun_gprs_save_area_size
;
7579 && !TARGET_TPF_PROFILING
7580 && cfun_frame_layout
.frame_size
== 0
7581 && !cfun_save_high_fprs_p
7582 && !cfun
->calls_alloca
7586 if (!TARGET_PACKED_STACK
)
7587 cfun_frame_layout
.frame_size
+= (STACK_POINTER_OFFSET
7588 + crtl
->outgoing_args_size
7589 + cfun_frame_layout
.high_fprs
* 8);
7592 if (TARGET_BACKCHAIN
)
7593 cfun_frame_layout
.frame_size
+= UNITS_PER_LONG
;
7595 /* No alignment trouble here because f8-f15 are only saved under
7597 cfun_frame_layout
.f8_offset
= (MIN (MIN (cfun_frame_layout
.f0_offset
,
7598 cfun_frame_layout
.f4_offset
),
7599 cfun_frame_layout
.gprs_offset
)
7600 - cfun_frame_layout
.high_fprs
* 8);
7602 cfun_frame_layout
.frame_size
+= cfun_frame_layout
.high_fprs
* 8;
7604 for (i
= 0; i
< 8; i
++)
7605 if (cfun_fpr_bit_p (i
))
7606 cfun_frame_layout
.frame_size
+= 8;
7608 cfun_frame_layout
.frame_size
+= cfun_gprs_save_area_size
;
7610 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
7611 the frame size to sustain 8 byte alignment of stack frames. */
7612 cfun_frame_layout
.frame_size
= ((cfun_frame_layout
.frame_size
+
7613 STACK_BOUNDARY
/ BITS_PER_UNIT
- 1)
7614 & ~(STACK_BOUNDARY
/ BITS_PER_UNIT
- 1));
7616 cfun_frame_layout
.frame_size
+= crtl
->outgoing_args_size
;
7620 /* Generate frame layout. Fills in register and frame data for the current
7621 function in cfun->machine. This routine can be called multiple times;
7622 it will re-do the complete frame layout every time. */
7625 s390_init_frame_layout (void)
7627 HOST_WIDE_INT frame_size
;
7629 int clobbered_regs
[16];
7631 /* On S/390 machines, we may need to perform branch splitting, which
7632 will require both base and return address register. We have no
7633 choice but to assume we're going to need them until right at the
7634 end of the machine dependent reorg phase. */
7635 if (!TARGET_CPU_ZARCH
)
7636 cfun
->machine
->split_branches_pending_p
= true;
7640 frame_size
= cfun_frame_layout
.frame_size
;
7642 /* Try to predict whether we'll need the base register. */
7643 base_used
= cfun
->machine
->split_branches_pending_p
7644 || crtl
->uses_const_pool
7645 || (!DISP_IN_RANGE (frame_size
)
7646 && !CONST_OK_FOR_K (frame_size
));
7648 /* Decide which register to use as literal pool base. In small
7649 leaf functions, try to use an unused call-clobbered register
7650 as base register to avoid save/restore overhead. */
7652 cfun
->machine
->base_reg
= NULL_RTX
;
7653 else if (crtl
->is_leaf
&& !df_regs_ever_live_p (5))
7654 cfun
->machine
->base_reg
= gen_rtx_REG (Pmode
, 5);
7656 cfun
->machine
->base_reg
= gen_rtx_REG (Pmode
, BASE_REGNUM
);
7658 s390_register_info (clobbered_regs
);
7661 while (frame_size
!= cfun_frame_layout
.frame_size
);
7664 /* Update frame layout. Recompute actual register save data based on
7665 current info and update regs_ever_live for the special registers.
7666 May be called multiple times, but may never cause *more* registers
7667 to be saved than s390_init_frame_layout allocated room for. */
7670 s390_update_frame_layout (void)
7672 int clobbered_regs
[16];
7674 s390_register_info (clobbered_regs
);
7676 df_set_regs_ever_live (BASE_REGNUM
,
7677 clobbered_regs
[BASE_REGNUM
] ? true : false);
7678 df_set_regs_ever_live (RETURN_REGNUM
,
7679 clobbered_regs
[RETURN_REGNUM
] ? true : false);
7680 df_set_regs_ever_live (STACK_POINTER_REGNUM
,
7681 clobbered_regs
[STACK_POINTER_REGNUM
] ? true : false);
7683 if (cfun
->machine
->base_reg
)
7684 df_set_regs_ever_live (REGNO (cfun
->machine
->base_reg
), true);
7687 /* Return true if it is legal to put a value with MODE into REGNO. */
7690 s390_hard_regno_mode_ok (unsigned int regno
, enum machine_mode mode
)
7692 switch (REGNO_REG_CLASS (regno
))
7695 if (REGNO_PAIR_OK (regno
, mode
))
7697 if (mode
== SImode
|| mode
== DImode
)
7700 if (FLOAT_MODE_P (mode
) && GET_MODE_CLASS (mode
) != MODE_VECTOR_FLOAT
)
7705 if (FRAME_REGNO_P (regno
) && mode
== Pmode
)
7710 if (REGNO_PAIR_OK (regno
, mode
))
7713 || (mode
!= TFmode
&& mode
!= TCmode
&& mode
!= TDmode
))
7718 if (GET_MODE_CLASS (mode
) == MODE_CC
)
7722 if (REGNO_PAIR_OK (regno
, mode
))
7724 if (mode
== SImode
|| mode
== Pmode
)
7735 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7738 s390_hard_regno_rename_ok (unsigned int old_reg
, unsigned int new_reg
)
7740 /* Once we've decided upon a register to use as base register, it must
7741 no longer be used for any other purpose. */
7742 if (cfun
->machine
->base_reg
)
7743 if (REGNO (cfun
->machine
->base_reg
) == old_reg
7744 || REGNO (cfun
->machine
->base_reg
) == new_reg
)
7750 /* Maximum number of registers to represent a value of mode MODE
7751 in a register of class RCLASS. */
7754 s390_class_max_nregs (enum reg_class rclass
, enum machine_mode mode
)
7759 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7760 return 2 * ((GET_MODE_SIZE (mode
) / 2 + 8 - 1) / 8);
7762 return (GET_MODE_SIZE (mode
) + 8 - 1) / 8;
7764 return (GET_MODE_SIZE (mode
) + 4 - 1) / 4;
7768 return (GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
7771 /* Return true if register FROM can be eliminated via register TO. */
7774 s390_can_eliminate (const int from
, const int to
)
7776 /* On zSeries machines, we have not marked the base register as fixed.
7777 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM.
7778 If a function requires the base register, we say here that this
7779 elimination cannot be performed. This will cause reload to free
7780 up the base register (as if it were fixed). On the other hand,
7781 if the current function does *not* require the base register, we
7782 say here the elimination succeeds, which in turn allows reload
7783 to allocate the base register for any other purpose. */
7784 if (from
== BASE_REGNUM
&& to
== BASE_REGNUM
)
7786 if (TARGET_CPU_ZARCH
)
7788 s390_init_frame_layout ();
7789 return cfun
->machine
->base_reg
== NULL_RTX
;
7795 /* Everything else must point into the stack frame. */
7796 gcc_assert (to
== STACK_POINTER_REGNUM
7797 || to
== HARD_FRAME_POINTER_REGNUM
);
7799 gcc_assert (from
== FRAME_POINTER_REGNUM
7800 || from
== ARG_POINTER_REGNUM
7801 || from
== RETURN_ADDRESS_POINTER_REGNUM
);
7803 /* Make sure we actually saved the return address. */
7804 if (from
== RETURN_ADDRESS_POINTER_REGNUM
)
7805 if (!crtl
->calls_eh_return
7807 && !cfun_frame_layout
.save_return_addr_p
)
7813 /* Return offset between register FROM and TO initially after prolog. */
7816 s390_initial_elimination_offset (int from
, int to
)
7818 HOST_WIDE_INT offset
;
7821 /* ??? Why are we called for non-eliminable pairs? */
7822 if (!s390_can_eliminate (from
, to
))
7827 case FRAME_POINTER_REGNUM
:
7828 offset
= (get_frame_size()
7829 + STACK_POINTER_OFFSET
7830 + crtl
->outgoing_args_size
);
7833 case ARG_POINTER_REGNUM
:
7834 s390_init_frame_layout ();
7835 offset
= cfun_frame_layout
.frame_size
+ STACK_POINTER_OFFSET
;
7838 case RETURN_ADDRESS_POINTER_REGNUM
:
7839 s390_init_frame_layout ();
7840 index
= RETURN_REGNUM
- cfun_frame_layout
.first_save_gpr_slot
;
7841 gcc_assert (index
>= 0);
7842 offset
= cfun_frame_layout
.frame_size
+ cfun_frame_layout
.gprs_offset
;
7843 offset
+= index
* UNITS_PER_LONG
;
7857 /* Emit insn to save fpr REGNUM at offset OFFSET relative
7858 to register BASE. Return generated insn. */
7861 save_fpr (rtx base
, int offset
, int regnum
)
7864 addr
= gen_rtx_MEM (DFmode
, plus_constant (Pmode
, base
, offset
));
7866 if (regnum
>= 16 && regnum
<= (16 + FP_ARG_NUM_REG
))
7867 set_mem_alias_set (addr
, get_varargs_alias_set ());
7869 set_mem_alias_set (addr
, get_frame_alias_set ());
7871 return emit_move_insn (addr
, gen_rtx_REG (DFmode
, regnum
));
7874 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
7875 to register BASE. Return generated insn. */
7878 restore_fpr (rtx base
, int offset
, int regnum
)
7881 addr
= gen_rtx_MEM (DFmode
, plus_constant (Pmode
, base
, offset
));
7882 set_mem_alias_set (addr
, get_frame_alias_set ());
7884 return emit_move_insn (gen_rtx_REG (DFmode
, regnum
), addr
);
7887 /* Return true if REGNO is a global register, but not one
7888 of the special ones that need to be saved/restored in anyway. */
7891 global_not_special_regno_p (int regno
)
7893 return (global_regs
[regno
]
7894 /* These registers are special and need to be
7895 restored in any case. */
7896 && !(regno
== STACK_POINTER_REGNUM
7897 || regno
== RETURN_REGNUM
7898 || regno
== BASE_REGNUM
7899 || (flag_pic
&& regno
== (int)PIC_OFFSET_TABLE_REGNUM
)));
7902 /* Generate insn to save registers FIRST to LAST into
7903 the register save area located at offset OFFSET
7904 relative to register BASE. */
7907 save_gprs (rtx base
, int offset
, int first
, int last
)
7909 rtx addr
, insn
, note
;
7912 addr
= plus_constant (Pmode
, base
, offset
);
7913 addr
= gen_rtx_MEM (Pmode
, addr
);
7915 set_mem_alias_set (addr
, get_frame_alias_set ());
7917 /* Special-case single register. */
7921 insn
= gen_movdi (addr
, gen_rtx_REG (Pmode
, first
));
7923 insn
= gen_movsi (addr
, gen_rtx_REG (Pmode
, first
));
7925 if (!global_not_special_regno_p (first
))
7926 RTX_FRAME_RELATED_P (insn
) = 1;
7931 insn
= gen_store_multiple (addr
,
7932 gen_rtx_REG (Pmode
, first
),
7933 GEN_INT (last
- first
+ 1));
7935 if (first
<= 6 && cfun
->stdarg
)
7936 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
7938 rtx mem
= XEXP (XVECEXP (PATTERN (insn
), 0, i
), 0);
7941 set_mem_alias_set (mem
, get_varargs_alias_set ());
7944 /* We need to set the FRAME_RELATED flag on all SETs
7945 inside the store-multiple pattern.
7947 However, we must not emit DWARF records for registers 2..5
7948 if they are stored for use by variable arguments ...
7950 ??? Unfortunately, it is not enough to simply not the
7951 FRAME_RELATED flags for those SETs, because the first SET
7952 of the PARALLEL is always treated as if it had the flag
7953 set, even if it does not. Therefore we emit a new pattern
7954 without those registers as REG_FRAME_RELATED_EXPR note. */
7956 if (first
>= 6 && !global_not_special_regno_p (first
))
7958 rtx pat
= PATTERN (insn
);
7960 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
7961 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
7962 && !global_not_special_regno_p (REGNO (SET_SRC (XVECEXP (pat
,
7964 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, i
)) = 1;
7966 RTX_FRAME_RELATED_P (insn
) = 1;
7972 for (start
= first
>= 6 ? first
: 6; start
<= last
; start
++)
7973 if (!global_not_special_regno_p (start
))
7979 addr
= plus_constant (Pmode
, base
,
7980 offset
+ (start
- first
) * UNITS_PER_LONG
);
7981 note
= gen_store_multiple (gen_rtx_MEM (Pmode
, addr
),
7982 gen_rtx_REG (Pmode
, start
),
7983 GEN_INT (last
- start
+ 1));
7984 note
= PATTERN (note
);
7986 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
, note
);
7988 for (i
= 0; i
< XVECLEN (note
, 0); i
++)
7989 if (GET_CODE (XVECEXP (note
, 0, i
)) == SET
7990 && !global_not_special_regno_p (REGNO (SET_SRC (XVECEXP (note
,
7992 RTX_FRAME_RELATED_P (XVECEXP (note
, 0, i
)) = 1;
7994 RTX_FRAME_RELATED_P (insn
) = 1;
8000 /* Generate insn to restore registers FIRST to LAST from
8001 the register save area located at offset OFFSET
8002 relative to register BASE. */
8005 restore_gprs (rtx base
, int offset
, int first
, int last
)
8009 addr
= plus_constant (Pmode
, base
, offset
);
8010 addr
= gen_rtx_MEM (Pmode
, addr
);
8011 set_mem_alias_set (addr
, get_frame_alias_set ());
8013 /* Special-case single register. */
8017 insn
= gen_movdi (gen_rtx_REG (Pmode
, first
), addr
);
8019 insn
= gen_movsi (gen_rtx_REG (Pmode
, first
), addr
);
8024 insn
= gen_load_multiple (gen_rtx_REG (Pmode
, first
),
8026 GEN_INT (last
- first
+ 1));
8030 /* Return insn sequence to load the GOT register. */
8032 static GTY(()) rtx got_symbol
;
8034 s390_load_got (void)
8038 /* We cannot use pic_offset_table_rtx here since we use this
8039 function also for non-pic if __tls_get_offset is called and in
8040 that case PIC_OFFSET_TABLE_REGNUM as well as pic_offset_table_rtx
8042 rtx got_rtx
= gen_rtx_REG (Pmode
, 12);
8046 got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
8047 SYMBOL_REF_FLAGS (got_symbol
) = SYMBOL_FLAG_LOCAL
;
8052 if (TARGET_CPU_ZARCH
)
8054 emit_move_insn (got_rtx
, got_symbol
);
8060 offset
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, got_symbol
),
8061 UNSPEC_LTREL_OFFSET
);
8062 offset
= gen_rtx_CONST (Pmode
, offset
);
8063 offset
= force_const_mem (Pmode
, offset
);
8065 emit_move_insn (got_rtx
, offset
);
8067 offset
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, XEXP (offset
, 0)),
8069 offset
= gen_rtx_PLUS (Pmode
, got_rtx
, offset
);
8071 emit_move_insn (got_rtx
, offset
);
8074 insns
= get_insns ();
8079 /* This ties together stack memory (MEM with an alias set of frame_alias_set)
8080 and the change to the stack pointer. */
8083 s390_emit_stack_tie (void)
8085 rtx mem
= gen_frame_mem (BLKmode
,
8086 gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
8088 emit_insn (gen_stack_tie (mem
));
8091 /* Expand the prologue into a bunch of separate insns. */
8094 s390_emit_prologue (void)
8102 /* Complete frame layout. */
8104 s390_update_frame_layout ();
8106 /* Annotate all constant pool references to let the scheduler know
8107 they implicitly use the base register. */
8109 push_topmost_sequence ();
8111 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
8114 annotate_constant_pool_refs (&PATTERN (insn
));
8115 df_insn_rescan (insn
);
8118 pop_topmost_sequence ();
8120 /* Choose best register to use for temp use within prologue.
8121 See below for why TPF must use the register 1. */
8123 if (!has_hard_reg_initial_val (Pmode
, RETURN_REGNUM
)
8125 && !TARGET_TPF_PROFILING
)
8126 temp_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
8128 temp_reg
= gen_rtx_REG (Pmode
, 1);
8130 /* Save call saved gprs. */
8131 if (cfun_frame_layout
.first_save_gpr
!= -1)
8133 insn
= save_gprs (stack_pointer_rtx
,
8134 cfun_frame_layout
.gprs_offset
+
8135 UNITS_PER_LONG
* (cfun_frame_layout
.first_save_gpr
8136 - cfun_frame_layout
.first_save_gpr_slot
),
8137 cfun_frame_layout
.first_save_gpr
,
8138 cfun_frame_layout
.last_save_gpr
);
8142 /* Dummy insn to mark literal pool slot. */
8144 if (cfun
->machine
->base_reg
)
8145 emit_insn (gen_main_pool (cfun
->machine
->base_reg
));
8147 offset
= cfun_frame_layout
.f0_offset
;
8149 /* Save f0 and f2. */
8150 for (i
= 0; i
< 2; i
++)
8152 if (cfun_fpr_bit_p (i
))
8154 save_fpr (stack_pointer_rtx
, offset
, i
+ 16);
8157 else if (!TARGET_PACKED_STACK
)
8161 /* Save f4 and f6. */
8162 offset
= cfun_frame_layout
.f4_offset
;
8163 for (i
= 2; i
< 4; i
++)
8165 if (cfun_fpr_bit_p (i
))
8167 insn
= save_fpr (stack_pointer_rtx
, offset
, i
+ 16);
8170 /* If f4 and f6 are call clobbered they are saved due to stdargs and
8171 therefore are not frame related. */
8172 if (!call_really_used_regs
[i
+ 16])
8173 RTX_FRAME_RELATED_P (insn
) = 1;
8175 else if (!TARGET_PACKED_STACK
)
8179 if (TARGET_PACKED_STACK
8180 && cfun_save_high_fprs_p
8181 && cfun_frame_layout
.f8_offset
+ cfun_frame_layout
.high_fprs
* 8 > 0)
8183 offset
= (cfun_frame_layout
.f8_offset
8184 + (cfun_frame_layout
.high_fprs
- 1) * 8);
8186 for (i
= 15; i
> 7 && offset
>= 0; i
--)
8187 if (cfun_fpr_bit_p (i
))
8189 insn
= save_fpr (stack_pointer_rtx
, offset
, i
+ 16);
8191 RTX_FRAME_RELATED_P (insn
) = 1;
8194 if (offset
>= cfun_frame_layout
.f8_offset
)
8198 if (!TARGET_PACKED_STACK
)
8199 next_fpr
= cfun_save_high_fprs_p
? 31 : 0;
8201 if (flag_stack_usage_info
)
8202 current_function_static_stack_size
= cfun_frame_layout
.frame_size
;
8204 /* Decrement stack pointer. */
8206 if (cfun_frame_layout
.frame_size
> 0)
8208 rtx frame_off
= GEN_INT (-cfun_frame_layout
.frame_size
);
8211 if (s390_stack_size
)
8213 HOST_WIDE_INT stack_guard
;
8215 if (s390_stack_guard
)
8216 stack_guard
= s390_stack_guard
;
8219 /* If no value for stack guard is provided the smallest power of 2
8220 larger than the current frame size is chosen. */
8222 while (stack_guard
< cfun_frame_layout
.frame_size
)
8226 if (cfun_frame_layout
.frame_size
>= s390_stack_size
)
8228 warning (0, "frame size of function %qs is %wd"
8229 " bytes exceeding user provided stack limit of "
8231 "An unconditional trap is added.",
8232 current_function_name(), cfun_frame_layout
.frame_size
,
8234 emit_insn (gen_trap ());
8238 /* stack_guard has to be smaller than s390_stack_size.
8239 Otherwise we would emit an AND with zero which would
8240 not match the test under mask pattern. */
8241 if (stack_guard
>= s390_stack_size
)
8243 warning (0, "frame size of function %qs is %wd"
8244 " bytes which is more than half the stack size. "
8245 "The dynamic check would not be reliable. "
8246 "No check emitted for this function.",
8247 current_function_name(),
8248 cfun_frame_layout
.frame_size
);
8252 HOST_WIDE_INT stack_check_mask
= ((s390_stack_size
- 1)
8253 & ~(stack_guard
- 1));
8255 rtx t
= gen_rtx_AND (Pmode
, stack_pointer_rtx
,
8256 GEN_INT (stack_check_mask
));
8258 emit_insn (gen_ctrapdi4 (gen_rtx_EQ (VOIDmode
,
8260 t
, const0_rtx
, const0_rtx
));
8262 emit_insn (gen_ctrapsi4 (gen_rtx_EQ (VOIDmode
,
8264 t
, const0_rtx
, const0_rtx
));
8269 if (s390_warn_framesize
> 0
8270 && cfun_frame_layout
.frame_size
>= s390_warn_framesize
)
8271 warning (0, "frame size of %qs is %wd bytes",
8272 current_function_name (), cfun_frame_layout
.frame_size
);
8274 if (s390_warn_dynamicstack_p
&& cfun
->calls_alloca
)
8275 warning (0, "%qs uses dynamic stack allocation", current_function_name ());
8277 /* Save incoming stack pointer into temp reg. */
8278 if (TARGET_BACKCHAIN
|| next_fpr
)
8279 insn
= emit_insn (gen_move_insn (temp_reg
, stack_pointer_rtx
));
8281 /* Subtract frame size from stack pointer. */
8283 if (DISP_IN_RANGE (INTVAL (frame_off
)))
8285 insn
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
8286 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
8288 insn
= emit_insn (insn
);
8292 if (!CONST_OK_FOR_K (INTVAL (frame_off
)))
8293 frame_off
= force_const_mem (Pmode
, frame_off
);
8295 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, frame_off
));
8296 annotate_constant_pool_refs (&PATTERN (insn
));
8299 RTX_FRAME_RELATED_P (insn
) = 1;
8300 real_frame_off
= GEN_INT (-cfun_frame_layout
.frame_size
);
8301 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
8302 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
8303 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
8306 /* Set backchain. */
8308 if (TARGET_BACKCHAIN
)
8310 if (cfun_frame_layout
.backchain_offset
)
8311 addr
= gen_rtx_MEM (Pmode
,
8312 plus_constant (Pmode
, stack_pointer_rtx
,
8313 cfun_frame_layout
.backchain_offset
));
8315 addr
= gen_rtx_MEM (Pmode
, stack_pointer_rtx
);
8316 set_mem_alias_set (addr
, get_frame_alias_set ());
8317 insn
= emit_insn (gen_move_insn (addr
, temp_reg
));
8320 /* If we support non-call exceptions (e.g. for Java),
8321 we need to make sure the backchain pointer is set up
8322 before any possibly trapping memory access. */
8323 if (TARGET_BACKCHAIN
&& cfun
->can_throw_non_call_exceptions
)
8325 addr
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
8326 emit_clobber (addr
);
8330 /* Save fprs 8 - 15 (64 bit ABI). */
8332 if (cfun_save_high_fprs_p
&& next_fpr
)
8334 /* If the stack might be accessed through a different register
8335 we have to make sure that the stack pointer decrement is not
8336 moved below the use of the stack slots. */
8337 s390_emit_stack_tie ();
8339 insn
= emit_insn (gen_add2_insn (temp_reg
,
8340 GEN_INT (cfun_frame_layout
.f8_offset
)));
8344 for (i
= 24; i
<= next_fpr
; i
++)
8345 if (cfun_fpr_bit_p (i
- 16))
8347 rtx addr
= plus_constant (Pmode
, stack_pointer_rtx
,
8348 cfun_frame_layout
.frame_size
8349 + cfun_frame_layout
.f8_offset
8352 insn
= save_fpr (temp_reg
, offset
, i
);
8354 RTX_FRAME_RELATED_P (insn
) = 1;
8355 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
8356 gen_rtx_SET (VOIDmode
,
8357 gen_rtx_MEM (DFmode
, addr
),
8358 gen_rtx_REG (DFmode
, i
)));
8362 /* Set frame pointer, if needed. */
8364 if (frame_pointer_needed
)
8366 insn
= emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
8367 RTX_FRAME_RELATED_P (insn
) = 1;
8370 /* Set up got pointer, if needed. */
8372 if (flag_pic
&& df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM
))
8374 rtx insns
= s390_load_got ();
8376 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
8377 annotate_constant_pool_refs (&PATTERN (insn
));
8382 if (TARGET_TPF_PROFILING
)
8384 /* Generate a BAS instruction to serve as a function
8385 entry intercept to facilitate the use of tracing
8386 algorithms located at the branch target. */
8387 emit_insn (gen_prologue_tpf ());
8389 /* Emit a blockage here so that all code
8390 lies between the profiling mechanisms. */
8391 emit_insn (gen_blockage ());
8395 /* Expand the epilogue into a bunch of separate insns. */
8398 s390_emit_epilogue (bool sibcall
)
8400 rtx frame_pointer
, return_reg
, cfa_restores
= NULL_RTX
;
8401 int area_bottom
, area_top
, offset
= 0;
8406 if (TARGET_TPF_PROFILING
)
8409 /* Generate a BAS instruction to serve as a function
8410 entry intercept to facilitate the use of tracing
8411 algorithms located at the branch target. */
8413 /* Emit a blockage here so that all code
8414 lies between the profiling mechanisms. */
8415 emit_insn (gen_blockage ());
8417 emit_insn (gen_epilogue_tpf ());
8420 /* Check whether to use frame or stack pointer for restore. */
8422 frame_pointer
= (frame_pointer_needed
8423 ? hard_frame_pointer_rtx
: stack_pointer_rtx
);
8425 s390_frame_area (&area_bottom
, &area_top
);
8427 /* Check whether we can access the register save area.
8428 If not, increment the frame pointer as required. */
8430 if (area_top
<= area_bottom
)
8432 /* Nothing to restore. */
8434 else if (DISP_IN_RANGE (cfun_frame_layout
.frame_size
+ area_bottom
)
8435 && DISP_IN_RANGE (cfun_frame_layout
.frame_size
+ area_top
- 1))
8437 /* Area is in range. */
8438 offset
= cfun_frame_layout
.frame_size
;
8442 rtx insn
, frame_off
, cfa
;
8444 offset
= area_bottom
< 0 ? -area_bottom
: 0;
8445 frame_off
= GEN_INT (cfun_frame_layout
.frame_size
- offset
);
8447 cfa
= gen_rtx_SET (VOIDmode
, frame_pointer
,
8448 gen_rtx_PLUS (Pmode
, frame_pointer
, frame_off
));
8449 if (DISP_IN_RANGE (INTVAL (frame_off
)))
8451 insn
= gen_rtx_SET (VOIDmode
, frame_pointer
,
8452 gen_rtx_PLUS (Pmode
, frame_pointer
, frame_off
));
8453 insn
= emit_insn (insn
);
8457 if (!CONST_OK_FOR_K (INTVAL (frame_off
)))
8458 frame_off
= force_const_mem (Pmode
, frame_off
);
8460 insn
= emit_insn (gen_add2_insn (frame_pointer
, frame_off
));
8461 annotate_constant_pool_refs (&PATTERN (insn
));
8463 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, cfa
);
8464 RTX_FRAME_RELATED_P (insn
) = 1;
8467 /* Restore call saved fprs. */
8471 if (cfun_save_high_fprs_p
)
8473 next_offset
= cfun_frame_layout
.f8_offset
;
8474 for (i
= 24; i
< 32; i
++)
8476 if (cfun_fpr_bit_p (i
- 16))
8478 restore_fpr (frame_pointer
,
8479 offset
+ next_offset
, i
);
8481 = alloc_reg_note (REG_CFA_RESTORE
,
8482 gen_rtx_REG (DFmode
, i
), cfa_restores
);
8491 next_offset
= cfun_frame_layout
.f4_offset
;
8492 for (i
= 18; i
< 20; i
++)
8494 if (cfun_fpr_bit_p (i
- 16))
8496 restore_fpr (frame_pointer
,
8497 offset
+ next_offset
, i
);
8499 = alloc_reg_note (REG_CFA_RESTORE
,
8500 gen_rtx_REG (DFmode
, i
), cfa_restores
);
8503 else if (!TARGET_PACKED_STACK
)
8509 /* Return register. */
8511 return_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
8513 /* Restore call saved gprs. */
8515 if (cfun_frame_layout
.first_restore_gpr
!= -1)
8520 /* Check for global register and save them
8521 to stack location from where they get restored. */
8523 for (i
= cfun_frame_layout
.first_restore_gpr
;
8524 i
<= cfun_frame_layout
.last_restore_gpr
;
8527 if (global_not_special_regno_p (i
))
8529 addr
= plus_constant (Pmode
, frame_pointer
,
8530 offset
+ cfun_frame_layout
.gprs_offset
8531 + (i
- cfun_frame_layout
.first_save_gpr_slot
)
8533 addr
= gen_rtx_MEM (Pmode
, addr
);
8534 set_mem_alias_set (addr
, get_frame_alias_set ());
8535 emit_move_insn (addr
, gen_rtx_REG (Pmode
, i
));
8539 = alloc_reg_note (REG_CFA_RESTORE
,
8540 gen_rtx_REG (Pmode
, i
), cfa_restores
);
8545 /* Fetch return address from stack before load multiple,
8546 this will do good for scheduling. */
8548 if (cfun_frame_layout
.save_return_addr_p
8549 || (cfun_frame_layout
.first_restore_gpr
< BASE_REGNUM
8550 && cfun_frame_layout
.last_restore_gpr
> RETURN_REGNUM
))
8552 int return_regnum
= find_unused_clobbered_reg();
8555 return_reg
= gen_rtx_REG (Pmode
, return_regnum
);
8557 addr
= plus_constant (Pmode
, frame_pointer
,
8558 offset
+ cfun_frame_layout
.gprs_offset
8560 - cfun_frame_layout
.first_save_gpr_slot
)
8562 addr
= gen_rtx_MEM (Pmode
, addr
);
8563 set_mem_alias_set (addr
, get_frame_alias_set ());
8564 emit_move_insn (return_reg
, addr
);
8568 insn
= restore_gprs (frame_pointer
,
8569 offset
+ cfun_frame_layout
.gprs_offset
8570 + (cfun_frame_layout
.first_restore_gpr
8571 - cfun_frame_layout
.first_save_gpr_slot
)
8573 cfun_frame_layout
.first_restore_gpr
,
8574 cfun_frame_layout
.last_restore_gpr
);
8575 insn
= emit_insn (insn
);
8576 REG_NOTES (insn
) = cfa_restores
;
8577 add_reg_note (insn
, REG_CFA_DEF_CFA
,
8578 plus_constant (Pmode
, stack_pointer_rtx
,
8579 STACK_POINTER_OFFSET
));
8580 RTX_FRAME_RELATED_P (insn
) = 1;
8586 /* Return to caller. */
8588 p
= rtvec_alloc (2);
8590 RTVEC_ELT (p
, 0) = ret_rtx
;
8591 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
, return_reg
);
8592 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
8597 /* Return the size in bytes of a function argument of
8598 type TYPE and/or mode MODE. At least one of TYPE or
8599 MODE must be specified. */
8602 s390_function_arg_size (enum machine_mode mode
, const_tree type
)
8605 return int_size_in_bytes (type
);
8607 /* No type info available for some library calls ... */
8608 if (mode
!= BLKmode
)
8609 return GET_MODE_SIZE (mode
);
8611 /* If we have neither type nor mode, abort */
8615 /* Return true if a function argument of type TYPE and mode MODE
8616 is to be passed in a floating-point register, if available. */
8619 s390_function_arg_float (enum machine_mode mode
, const_tree type
)
8621 int size
= s390_function_arg_size (mode
, type
);
8625 /* Soft-float changes the ABI: no floating-point registers are used. */
8626 if (TARGET_SOFT_FLOAT
)
8629 /* No type info available for some library calls ... */
8631 return mode
== SFmode
|| mode
== DFmode
|| mode
== SDmode
|| mode
== DDmode
;
8633 /* The ABI says that record types with a single member are treated
8634 just like that member would be. */
8635 while (TREE_CODE (type
) == RECORD_TYPE
)
8637 tree field
, single
= NULL_TREE
;
8639 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
8641 if (TREE_CODE (field
) != FIELD_DECL
)
8644 if (single
== NULL_TREE
)
8645 single
= TREE_TYPE (field
);
8650 if (single
== NULL_TREE
)
8656 return TREE_CODE (type
) == REAL_TYPE
;
8659 /* Return true if a function argument of type TYPE and mode MODE
8660 is to be passed in an integer register, or a pair of integer
8661 registers, if available. */
8664 s390_function_arg_integer (enum machine_mode mode
, const_tree type
)
8666 int size
= s390_function_arg_size (mode
, type
);
8670 /* No type info available for some library calls ... */
8672 return GET_MODE_CLASS (mode
) == MODE_INT
8673 || (TARGET_SOFT_FLOAT
&& SCALAR_FLOAT_MODE_P (mode
));
8675 /* We accept small integral (and similar) types. */
8676 if (INTEGRAL_TYPE_P (type
)
8677 || POINTER_TYPE_P (type
)
8678 || TREE_CODE (type
) == NULLPTR_TYPE
8679 || TREE_CODE (type
) == OFFSET_TYPE
8680 || (TARGET_SOFT_FLOAT
&& TREE_CODE (type
) == REAL_TYPE
))
8683 /* We also accept structs of size 1, 2, 4, 8 that are not
8684 passed in floating-point registers. */
8685 if (AGGREGATE_TYPE_P (type
)
8686 && exact_log2 (size
) >= 0
8687 && !s390_function_arg_float (mode
, type
))
8693 /* Return 1 if a function argument of type TYPE and mode MODE
8694 is to be passed by reference. The ABI specifies that only
8695 structures of size 1, 2, 4, or 8 bytes are passed by value,
8696 all other structures (and complex numbers) are passed by
8700 s390_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED
,
8701 enum machine_mode mode
, const_tree type
,
8702 bool named ATTRIBUTE_UNUSED
)
8704 int size
= s390_function_arg_size (mode
, type
);
8710 if (AGGREGATE_TYPE_P (type
) && exact_log2 (size
) < 0)
8713 if (TREE_CODE (type
) == COMPLEX_TYPE
8714 || TREE_CODE (type
) == VECTOR_TYPE
)
8721 /* Update the data in CUM to advance over an argument of mode MODE and
8722 data type TYPE. (TYPE is null for libcalls where that information
8723 may not be available.). The boolean NAMED specifies whether the
8724 argument is a named argument (as opposed to an unnamed argument
8725 matching an ellipsis). */
8728 s390_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
8729 const_tree type
, bool named ATTRIBUTE_UNUSED
)
8731 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
8733 if (s390_function_arg_float (mode
, type
))
8737 else if (s390_function_arg_integer (mode
, type
))
8739 int size
= s390_function_arg_size (mode
, type
);
8740 cum
->gprs
+= ((size
+ UNITS_PER_LONG
- 1) / UNITS_PER_LONG
);
8746 /* Define where to put the arguments to a function.
8747 Value is zero to push the argument on the stack,
8748 or a hard register in which to store the argument.
8750 MODE is the argument's machine mode.
8751 TYPE is the data type of the argument (as a tree).
8752 This is null for libcalls where that information may
8754 CUM is a variable of type CUMULATIVE_ARGS which gives info about
8755 the preceding args and about the function being called.
8756 NAMED is nonzero if this argument is a named parameter
8757 (otherwise it is an extra parameter matching an ellipsis).
8759 On S/390, we use general purpose registers 2 through 6 to
8760 pass integer, pointer, and certain structure arguments, and
8761 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
8762 to pass floating point arguments. All remaining arguments
8763 are pushed to the stack. */
8766 s390_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
8767 const_tree type
, bool named ATTRIBUTE_UNUSED
)
8769 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
8771 if (s390_function_arg_float (mode
, type
))
8773 if (cum
->fprs
+ 1 > FP_ARG_NUM_REG
)
8776 return gen_rtx_REG (mode
, cum
->fprs
+ 16);
8778 else if (s390_function_arg_integer (mode
, type
))
8780 int size
= s390_function_arg_size (mode
, type
);
8781 int n_gprs
= (size
+ UNITS_PER_LONG
- 1) / UNITS_PER_LONG
;
8783 if (cum
->gprs
+ n_gprs
> GP_ARG_NUM_REG
)
8785 else if (n_gprs
== 1 || UNITS_PER_WORD
== UNITS_PER_LONG
)
8786 return gen_rtx_REG (mode
, cum
->gprs
+ 2);
8787 else if (n_gprs
== 2)
8789 rtvec p
= rtvec_alloc (2);
8792 = gen_rtx_EXPR_LIST (SImode
, gen_rtx_REG (SImode
, cum
->gprs
+ 2),
8795 = gen_rtx_EXPR_LIST (SImode
, gen_rtx_REG (SImode
, cum
->gprs
+ 3),
8798 return gen_rtx_PARALLEL (mode
, p
);
8802 /* After the real arguments, expand_call calls us once again
8803 with a void_type_node type. Whatever we return here is
8804 passed as operand 2 to the call expanders.
8806 We don't need this feature ... */
8807 else if (type
== void_type_node
)
8813 /* Return true if return values of type TYPE should be returned
8814 in a memory buffer whose address is passed by the caller as
8815 hidden first argument. */
8818 s390_return_in_memory (const_tree type
, const_tree fundecl ATTRIBUTE_UNUSED
)
8820 /* We accept small integral (and similar) types. */
8821 if (INTEGRAL_TYPE_P (type
)
8822 || POINTER_TYPE_P (type
)
8823 || TREE_CODE (type
) == OFFSET_TYPE
8824 || TREE_CODE (type
) == REAL_TYPE
)
8825 return int_size_in_bytes (type
) > 8;
8827 /* Aggregates and similar constructs are always returned
8829 if (AGGREGATE_TYPE_P (type
)
8830 || TREE_CODE (type
) == COMPLEX_TYPE
8831 || TREE_CODE (type
) == VECTOR_TYPE
)
8834 /* ??? We get called on all sorts of random stuff from
8835 aggregate_value_p. We can't abort, but it's not clear
8836 what's safe to return. Pretend it's a struct I guess. */
8840 /* Function arguments and return values are promoted to word size. */
8842 static enum machine_mode
8843 s390_promote_function_mode (const_tree type
, enum machine_mode mode
,
8845 const_tree fntype ATTRIBUTE_UNUSED
,
8846 int for_return ATTRIBUTE_UNUSED
)
8848 if (INTEGRAL_MODE_P (mode
)
8849 && GET_MODE_SIZE (mode
) < UNITS_PER_LONG
)
8851 if (type
!= NULL_TREE
&& POINTER_TYPE_P (type
))
8852 *punsignedp
= POINTERS_EXTEND_UNSIGNED
;
8859 /* Define where to return a (scalar) value of type RET_TYPE.
8860 If RET_TYPE is null, define where to return a (scalar)
8861 value of mode MODE from a libcall. */
8864 s390_function_and_libcall_value (enum machine_mode mode
,
8865 const_tree ret_type
,
8866 const_tree fntype_or_decl
,
8867 bool outgoing ATTRIBUTE_UNUSED
)
8869 /* For normal functions perform the promotion as
8870 promote_function_mode would do. */
8873 int unsignedp
= TYPE_UNSIGNED (ret_type
);
8874 mode
= promote_function_mode (ret_type
, mode
, &unsignedp
,
8878 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
|| SCALAR_FLOAT_MODE_P (mode
));
8879 gcc_assert (GET_MODE_SIZE (mode
) <= 8);
8881 if (TARGET_HARD_FLOAT
&& SCALAR_FLOAT_MODE_P (mode
))
8882 return gen_rtx_REG (mode
, 16);
8883 else if (GET_MODE_SIZE (mode
) <= UNITS_PER_LONG
8884 || UNITS_PER_LONG
== UNITS_PER_WORD
)
8885 return gen_rtx_REG (mode
, 2);
8886 else if (GET_MODE_SIZE (mode
) == 2 * UNITS_PER_LONG
)
8888 /* This case is triggered when returning a 64 bit value with
8889 -m31 -mzarch. Although the value would fit into a single
8890 register it has to be forced into a 32 bit register pair in
8891 order to match the ABI. */
8892 rtvec p
= rtvec_alloc (2);
8895 = gen_rtx_EXPR_LIST (SImode
, gen_rtx_REG (SImode
, 2), const0_rtx
);
8897 = gen_rtx_EXPR_LIST (SImode
, gen_rtx_REG (SImode
, 3), GEN_INT (4));
8899 return gen_rtx_PARALLEL (mode
, p
);
8905 /* Define where to return a scalar return value of type RET_TYPE. */
8908 s390_function_value (const_tree ret_type
, const_tree fn_decl_or_type
,
8911 return s390_function_and_libcall_value (TYPE_MODE (ret_type
), ret_type
,
8912 fn_decl_or_type
, outgoing
);
8915 /* Define where to return a scalar libcall return value of mode
8919 s390_libcall_value (enum machine_mode mode
, const_rtx fun ATTRIBUTE_UNUSED
)
8921 return s390_function_and_libcall_value (mode
, NULL_TREE
,
8926 /* Create and return the va_list datatype.
8928 On S/390, va_list is an array type equivalent to
8930 typedef struct __va_list_tag
8934 void *__overflow_arg_area;
8935 void *__reg_save_area;
8938 where __gpr and __fpr hold the number of general purpose
8939 or floating point arguments used up to now, respectively,
8940 __overflow_arg_area points to the stack location of the
8941 next argument passed on the stack, and __reg_save_area
8942 always points to the start of the register area in the
8943 call frame of the current function. The function prologue
8944 saves all registers used for argument passing into this
8945 area if the function uses variable arguments. */
8948 s390_build_builtin_va_list (void)
8950 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
8952 record
= lang_hooks
.types
.make_type (RECORD_TYPE
);
8955 build_decl (BUILTINS_LOCATION
,
8956 TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
8958 f_gpr
= build_decl (BUILTINS_LOCATION
,
8959 FIELD_DECL
, get_identifier ("__gpr"),
8960 long_integer_type_node
);
8961 f_fpr
= build_decl (BUILTINS_LOCATION
,
8962 FIELD_DECL
, get_identifier ("__fpr"),
8963 long_integer_type_node
);
8964 f_ovf
= build_decl (BUILTINS_LOCATION
,
8965 FIELD_DECL
, get_identifier ("__overflow_arg_area"),
8967 f_sav
= build_decl (BUILTINS_LOCATION
,
8968 FIELD_DECL
, get_identifier ("__reg_save_area"),
8971 va_list_gpr_counter_field
= f_gpr
;
8972 va_list_fpr_counter_field
= f_fpr
;
8974 DECL_FIELD_CONTEXT (f_gpr
) = record
;
8975 DECL_FIELD_CONTEXT (f_fpr
) = record
;
8976 DECL_FIELD_CONTEXT (f_ovf
) = record
;
8977 DECL_FIELD_CONTEXT (f_sav
) = record
;
8979 TYPE_STUB_DECL (record
) = type_decl
;
8980 TYPE_NAME (record
) = type_decl
;
8981 TYPE_FIELDS (record
) = f_gpr
;
8982 DECL_CHAIN (f_gpr
) = f_fpr
;
8983 DECL_CHAIN (f_fpr
) = f_ovf
;
8984 DECL_CHAIN (f_ovf
) = f_sav
;
8986 layout_type (record
);
8988 /* The correct type is an array type of one element. */
8989 return build_array_type (record
, build_index_type (size_zero_node
));
8992 /* Implement va_start by filling the va_list structure VALIST.
8993 STDARG_P is always true, and ignored.
8994 NEXTARG points to the first anonymous stack argument.
8996 The following global variables are used to initialize
8997 the va_list structure:
9000 holds number of gprs and fprs used for named arguments.
9001 crtl->args.arg_offset_rtx:
9002 holds the offset of the first anonymous stack argument
9003 (relative to the virtual arg pointer). */
9006 s390_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
9008 HOST_WIDE_INT n_gpr
, n_fpr
;
9010 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
9011 tree gpr
, fpr
, ovf
, sav
, t
;
9013 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
9014 f_fpr
= DECL_CHAIN (f_gpr
);
9015 f_ovf
= DECL_CHAIN (f_fpr
);
9016 f_sav
= DECL_CHAIN (f_ovf
);
9018 valist
= build_simple_mem_ref (valist
);
9019 gpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
9020 fpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
9021 ovf
= build3 (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
9022 sav
= build3 (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
9024 /* Count number of gp and fp argument registers used. */
9026 n_gpr
= crtl
->args
.info
.gprs
;
9027 n_fpr
= crtl
->args
.info
.fprs
;
9029 if (cfun
->va_list_gpr_size
)
9031 t
= build2 (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
,
9032 build_int_cst (NULL_TREE
, n_gpr
));
9033 TREE_SIDE_EFFECTS (t
) = 1;
9034 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9037 if (cfun
->va_list_fpr_size
)
9039 t
= build2 (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
,
9040 build_int_cst (NULL_TREE
, n_fpr
));
9041 TREE_SIDE_EFFECTS (t
) = 1;
9042 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9045 /* Find the overflow area. */
9046 if (n_gpr
+ cfun
->va_list_gpr_size
> GP_ARG_NUM_REG
9047 || n_fpr
+ cfun
->va_list_fpr_size
> FP_ARG_NUM_REG
)
9049 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
9051 off
= INTVAL (crtl
->args
.arg_offset_rtx
);
9052 off
= off
< 0 ? 0 : off
;
9053 if (TARGET_DEBUG_ARG
)
9054 fprintf (stderr
, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
9055 (int)n_gpr
, (int)n_fpr
, off
);
9057 t
= fold_build_pointer_plus_hwi (t
, off
);
9059 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
9060 TREE_SIDE_EFFECTS (t
) = 1;
9061 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9064 /* Find the register save area. */
9065 if ((cfun
->va_list_gpr_size
&& n_gpr
< GP_ARG_NUM_REG
)
9066 || (cfun
->va_list_fpr_size
&& n_fpr
< FP_ARG_NUM_REG
))
9068 t
= make_tree (TREE_TYPE (sav
), return_address_pointer_rtx
);
9069 t
= fold_build_pointer_plus_hwi (t
, -RETURN_REGNUM
* UNITS_PER_LONG
);
9071 t
= build2 (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
9072 TREE_SIDE_EFFECTS (t
) = 1;
9073 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9077 /* Implement va_arg by updating the va_list structure
9078 VALIST as required to retrieve an argument of type
9079 TYPE, and returning that argument.
9081 Generates code equivalent to:
9083 if (integral value) {
9084 if (size <= 4 && args.gpr < 5 ||
9085 size > 4 && args.gpr < 4 )
9086 ret = args.reg_save_area[args.gpr+8]
9088 ret = *args.overflow_arg_area++;
9089 } else if (float value) {
9091 ret = args.reg_save_area[args.fpr+64]
9093 ret = *args.overflow_arg_area++;
9094 } else if (aggregate value) {
9096 ret = *args.reg_save_area[args.gpr]
9098 ret = **args.overflow_arg_area++;
9102 s390_gimplify_va_arg (tree valist
, tree type
, gimple_seq
*pre_p
,
9103 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
9105 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
9106 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
9107 int indirect_p
, size
, n_reg
, sav_ofs
, sav_scale
, max_reg
;
9108 tree lab_false
, lab_over
, addr
;
9110 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
9111 f_fpr
= DECL_CHAIN (f_gpr
);
9112 f_ovf
= DECL_CHAIN (f_fpr
);
9113 f_sav
= DECL_CHAIN (f_ovf
);
9115 valist
= build_va_arg_indirect_ref (valist
);
9116 gpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
9117 fpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
9118 sav
= build3 (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
9120 /* The tree for args* cannot be shared between gpr/fpr and ovf since
9121 both appear on a lhs. */
9122 valist
= unshare_expr (valist
);
9123 ovf
= build3 (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
9125 size
= int_size_in_bytes (type
);
9127 if (pass_by_reference (NULL
, TYPE_MODE (type
), type
, false))
9129 if (TARGET_DEBUG_ARG
)
9131 fprintf (stderr
, "va_arg: aggregate type");
9135 /* Aggregates are passed by reference. */
9140 /* kernel stack layout on 31 bit: It is assumed here that no padding
9141 will be added by s390_frame_info because for va_args always an even
9142 number of gprs has to be saved r15-r2 = 14 regs. */
9143 sav_ofs
= 2 * UNITS_PER_LONG
;
9144 sav_scale
= UNITS_PER_LONG
;
9145 size
= UNITS_PER_LONG
;
9146 max_reg
= GP_ARG_NUM_REG
- n_reg
;
9148 else if (s390_function_arg_float (TYPE_MODE (type
), type
))
9150 if (TARGET_DEBUG_ARG
)
9152 fprintf (stderr
, "va_arg: float type");
9156 /* FP args go in FP registers, if present. */
9160 sav_ofs
= 16 * UNITS_PER_LONG
;
9162 max_reg
= FP_ARG_NUM_REG
- n_reg
;
9166 if (TARGET_DEBUG_ARG
)
9168 fprintf (stderr
, "va_arg: other type");
9172 /* Otherwise into GP registers. */
9175 n_reg
= (size
+ UNITS_PER_LONG
- 1) / UNITS_PER_LONG
;
9177 /* kernel stack layout on 31 bit: It is assumed here that no padding
9178 will be added by s390_frame_info because for va_args always an even
9179 number of gprs has to be saved r15-r2 = 14 regs. */
9180 sav_ofs
= 2 * UNITS_PER_LONG
;
9182 if (size
< UNITS_PER_LONG
)
9183 sav_ofs
+= UNITS_PER_LONG
- size
;
9185 sav_scale
= UNITS_PER_LONG
;
9186 max_reg
= GP_ARG_NUM_REG
- n_reg
;
9189 /* Pull the value out of the saved registers ... */
9191 lab_false
= create_artificial_label (UNKNOWN_LOCATION
);
9192 lab_over
= create_artificial_label (UNKNOWN_LOCATION
);
9193 addr
= create_tmp_var (ptr_type_node
, "addr");
9195 t
= fold_convert (TREE_TYPE (reg
), size_int (max_reg
));
9196 t
= build2 (GT_EXPR
, boolean_type_node
, reg
, t
);
9197 u
= build1 (GOTO_EXPR
, void_type_node
, lab_false
);
9198 t
= build3 (COND_EXPR
, void_type_node
, t
, u
, NULL_TREE
);
9199 gimplify_and_add (t
, pre_p
);
9201 t
= fold_build_pointer_plus_hwi (sav
, sav_ofs
);
9202 u
= build2 (MULT_EXPR
, TREE_TYPE (reg
), reg
,
9203 fold_convert (TREE_TYPE (reg
), size_int (sav_scale
)));
9204 t
= fold_build_pointer_plus (t
, u
);
9206 gimplify_assign (addr
, t
, pre_p
);
9208 gimple_seq_add_stmt (pre_p
, gimple_build_goto (lab_over
));
9210 gimple_seq_add_stmt (pre_p
, gimple_build_label (lab_false
));
9213 /* ... Otherwise out of the overflow area. */
9216 if (size
< UNITS_PER_LONG
)
9217 t
= fold_build_pointer_plus_hwi (t
, UNITS_PER_LONG
- size
);
9219 gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
9221 gimplify_assign (addr
, t
, pre_p
);
9223 t
= fold_build_pointer_plus_hwi (t
, size
);
9224 gimplify_assign (ovf
, t
, pre_p
);
9226 gimple_seq_add_stmt (pre_p
, gimple_build_label (lab_over
));
9229 /* Increment register save count. */
9231 u
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
9232 fold_convert (TREE_TYPE (reg
), size_int (n_reg
)));
9233 gimplify_and_add (u
, pre_p
);
9237 t
= build_pointer_type_for_mode (build_pointer_type (type
),
9239 addr
= fold_convert (t
, addr
);
9240 addr
= build_va_arg_indirect_ref (addr
);
9244 t
= build_pointer_type_for_mode (type
, ptr_mode
, true);
9245 addr
= fold_convert (t
, addr
);
9248 return build_va_arg_indirect_ref (addr
);
9256 S390_BUILTIN_THREAD_POINTER
,
9257 S390_BUILTIN_SET_THREAD_POINTER
,
9262 static enum insn_code
const code_for_builtin_64
[S390_BUILTIN_max
] = {
9267 static enum insn_code
const code_for_builtin_31
[S390_BUILTIN_max
] = {
9273 s390_init_builtins (void)
9277 ftype
= build_function_type_list (ptr_type_node
, NULL_TREE
);
9278 add_builtin_function ("__builtin_thread_pointer", ftype
,
9279 S390_BUILTIN_THREAD_POINTER
, BUILT_IN_MD
,
9282 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9283 add_builtin_function ("__builtin_set_thread_pointer", ftype
,
9284 S390_BUILTIN_SET_THREAD_POINTER
, BUILT_IN_MD
,
9288 /* Expand an expression EXP that calls a built-in function,
9289 with result going to TARGET if that's convenient
9290 (and in mode MODE if that's convenient).
9291 SUBTARGET may be used as the target for computing one of EXP's operands.
9292 IGNORE is nonzero if the value is to be ignored. */
9295 s390_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
9296 enum machine_mode mode ATTRIBUTE_UNUSED
,
9297 int ignore ATTRIBUTE_UNUSED
)
9301 enum insn_code
const *code_for_builtin
=
9302 TARGET_64BIT
? code_for_builtin_64
: code_for_builtin_31
;
9304 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
9305 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
9306 enum insn_code icode
;
9307 rtx op
[MAX_ARGS
], pat
;
9311 call_expr_arg_iterator iter
;
9313 if (fcode
>= S390_BUILTIN_max
)
9314 internal_error ("bad builtin fcode");
9315 icode
= code_for_builtin
[fcode
];
9317 internal_error ("bad builtin fcode");
9319 nonvoid
= TREE_TYPE (TREE_TYPE (fndecl
)) != void_type_node
;
9322 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
9324 const struct insn_operand_data
*insn_op
;
9326 if (arg
== error_mark_node
)
9328 if (arity
> MAX_ARGS
)
9331 insn_op
= &insn_data
[icode
].operand
[arity
+ nonvoid
];
9333 op
[arity
] = expand_expr (arg
, NULL_RTX
, insn_op
->mode
, EXPAND_NORMAL
);
9335 if (!(*insn_op
->predicate
) (op
[arity
], insn_op
->mode
))
9336 op
[arity
] = copy_to_mode_reg (insn_op
->mode
, op
[arity
]);
9342 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
9344 || GET_MODE (target
) != tmode
9345 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
9346 target
= gen_reg_rtx (tmode
);
9352 pat
= GEN_FCN (icode
) (target
);
9356 pat
= GEN_FCN (icode
) (target
, op
[0]);
9358 pat
= GEN_FCN (icode
) (op
[0]);
9361 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1]);
9377 /* Output assembly code for the trampoline template to
9380 On S/390, we use gpr 1 internally in the trampoline code;
9381 gpr 0 is used to hold the static chain. */
9384 s390_asm_trampoline_template (FILE *file
)
9387 op
[0] = gen_rtx_REG (Pmode
, 0);
9388 op
[1] = gen_rtx_REG (Pmode
, 1);
9392 output_asm_insn ("basr\t%1,0", op
); /* 2 byte */
9393 output_asm_insn ("lmg\t%0,%1,14(%1)", op
); /* 6 byte */
9394 output_asm_insn ("br\t%1", op
); /* 2 byte */
9395 ASM_OUTPUT_SKIP (file
, (HOST_WIDE_INT
)(TRAMPOLINE_SIZE
- 10));
9399 output_asm_insn ("basr\t%1,0", op
); /* 2 byte */
9400 output_asm_insn ("lm\t%0,%1,6(%1)", op
); /* 4 byte */
9401 output_asm_insn ("br\t%1", op
); /* 2 byte */
9402 ASM_OUTPUT_SKIP (file
, (HOST_WIDE_INT
)(TRAMPOLINE_SIZE
- 8));
9406 /* Emit RTL insns to initialize the variable parts of a trampoline.
9407 FNADDR is an RTX for the address of the function's pure code.
9408 CXT is an RTX for the static chain value for the function. */
9411 s390_trampoline_init (rtx m_tramp
, tree fndecl
, rtx cxt
)
9413 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
9416 emit_block_move (m_tramp
, assemble_trampoline_template (),
9417 GEN_INT (2 * UNITS_PER_LONG
), BLOCK_OP_NORMAL
);
9419 mem
= adjust_address (m_tramp
, Pmode
, 2 * UNITS_PER_LONG
);
9420 emit_move_insn (mem
, cxt
);
9421 mem
= adjust_address (m_tramp
, Pmode
, 3 * UNITS_PER_LONG
);
9422 emit_move_insn (mem
, fnaddr
);
9425 /* Output assembler code to FILE to increment profiler label # LABELNO
9426 for profiling a function entry. */
9429 s390_function_profiler (FILE *file
, int labelno
)
9434 ASM_GENERATE_INTERNAL_LABEL (label
, "LP", labelno
);
9436 fprintf (file
, "# function profiler \n");
9438 op
[0] = gen_rtx_REG (Pmode
, RETURN_REGNUM
);
9439 op
[1] = gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
9440 op
[1] = gen_rtx_MEM (Pmode
, plus_constant (Pmode
, op
[1], UNITS_PER_LONG
));
9442 op
[2] = gen_rtx_REG (Pmode
, 1);
9443 op
[3] = gen_rtx_SYMBOL_REF (Pmode
, label
);
9444 SYMBOL_REF_FLAGS (op
[3]) = SYMBOL_FLAG_LOCAL
;
9446 op
[4] = gen_rtx_SYMBOL_REF (Pmode
, "_mcount");
9449 op
[4] = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op
[4]), UNSPEC_PLT
);
9450 op
[4] = gen_rtx_CONST (Pmode
, op
[4]);
9455 output_asm_insn ("stg\t%0,%1", op
);
9456 output_asm_insn ("larl\t%2,%3", op
);
9457 output_asm_insn ("brasl\t%0,%4", op
);
9458 output_asm_insn ("lg\t%0,%1", op
);
9462 op
[6] = gen_label_rtx ();
9464 output_asm_insn ("st\t%0,%1", op
);
9465 output_asm_insn ("bras\t%2,%l6", op
);
9466 output_asm_insn (".long\t%4", op
);
9467 output_asm_insn (".long\t%3", op
);
9468 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[6]));
9469 output_asm_insn ("l\t%0,0(%2)", op
);
9470 output_asm_insn ("l\t%2,4(%2)", op
);
9471 output_asm_insn ("basr\t%0,%0", op
);
9472 output_asm_insn ("l\t%0,%1", op
);
9476 op
[5] = gen_label_rtx ();
9477 op
[6] = gen_label_rtx ();
9479 output_asm_insn ("st\t%0,%1", op
);
9480 output_asm_insn ("bras\t%2,%l6", op
);
9481 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[5]));
9482 output_asm_insn (".long\t%4-%l5", op
);
9483 output_asm_insn (".long\t%3-%l5", op
);
9484 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[6]));
9485 output_asm_insn ("lr\t%0,%2", op
);
9486 output_asm_insn ("a\t%0,0(%2)", op
);
9487 output_asm_insn ("a\t%2,4(%2)", op
);
9488 output_asm_insn ("basr\t%0,%0", op
);
9489 output_asm_insn ("l\t%0,%1", op
);
9493 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
9494 into its SYMBOL_REF_FLAGS. */
9497 s390_encode_section_info (tree decl
, rtx rtl
, int first
)
9499 default_encode_section_info (decl
, rtl
, first
);
9501 if (TREE_CODE (decl
) == VAR_DECL
)
9503 /* If a variable has a forced alignment to < 2 bytes, mark it
9504 with SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL
9506 if (DECL_USER_ALIGN (decl
) && DECL_ALIGN (decl
) < 16)
9507 SYMBOL_REF_FLAGS (XEXP (rtl
, 0)) |= SYMBOL_FLAG_ALIGN1
;
9508 if (!DECL_SIZE (decl
)
9509 || !DECL_ALIGN (decl
)
9510 || !host_integerp (DECL_SIZE (decl
), 0)
9511 || (DECL_ALIGN (decl
) <= 64
9512 && DECL_ALIGN (decl
) != tree_low_cst (DECL_SIZE (decl
), 0)))
9513 SYMBOL_REF_FLAGS (XEXP (rtl
, 0)) |= SYMBOL_FLAG_NOT_NATURALLY_ALIGNED
;
9516 /* Literal pool references don't have a decl so they are handled
9517 differently here. We rely on the information in the MEM_ALIGN
9518 entry to decide upon natural alignment. */
9520 && GET_CODE (XEXP (rtl
, 0)) == SYMBOL_REF
9521 && TREE_CONSTANT_POOL_ADDRESS_P (XEXP (rtl
, 0))
9522 && (MEM_ALIGN (rtl
) == 0
9523 || GET_MODE_BITSIZE (GET_MODE (rtl
)) == 0
9524 || MEM_ALIGN (rtl
) < GET_MODE_BITSIZE (GET_MODE (rtl
))))
9525 SYMBOL_REF_FLAGS (XEXP (rtl
, 0)) |= SYMBOL_FLAG_NOT_NATURALLY_ALIGNED
;
9528 /* Output thunk to FILE that implements a C++ virtual function call (with
9529 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
9530 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
9531 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
9532 relative to the resulting this pointer. */
9535 s390_output_mi_thunk (FILE *file
, tree thunk ATTRIBUTE_UNUSED
,
9536 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
9542 /* Make sure unwind info is emitted for the thunk if needed. */
9543 final_start_function (emit_barrier (), file
, 1);
9545 /* Operand 0 is the target function. */
9546 op
[0] = XEXP (DECL_RTL (function
), 0);
9547 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (op
[0]))
9550 op
[0] = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op
[0]),
9551 TARGET_64BIT
? UNSPEC_PLT
: UNSPEC_GOT
);
9552 op
[0] = gen_rtx_CONST (Pmode
, op
[0]);
9555 /* Operand 1 is the 'this' pointer. */
9556 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
9557 op
[1] = gen_rtx_REG (Pmode
, 3);
9559 op
[1] = gen_rtx_REG (Pmode
, 2);
9561 /* Operand 2 is the delta. */
9562 op
[2] = GEN_INT (delta
);
9564 /* Operand 3 is the vcall_offset. */
9565 op
[3] = GEN_INT (vcall_offset
);
9567 /* Operand 4 is the temporary register. */
9568 op
[4] = gen_rtx_REG (Pmode
, 1);
9570 /* Operands 5 to 8 can be used as labels. */
9576 /* Operand 9 can be used for temporary register. */
9579 /* Generate code. */
9582 /* Setup literal pool pointer if required. */
9583 if ((!DISP_IN_RANGE (delta
)
9584 && !CONST_OK_FOR_K (delta
)
9585 && !CONST_OK_FOR_Os (delta
))
9586 || (!DISP_IN_RANGE (vcall_offset
)
9587 && !CONST_OK_FOR_K (vcall_offset
)
9588 && !CONST_OK_FOR_Os (vcall_offset
)))
9590 op
[5] = gen_label_rtx ();
9591 output_asm_insn ("larl\t%4,%5", op
);
9594 /* Add DELTA to this pointer. */
9597 if (CONST_OK_FOR_J (delta
))
9598 output_asm_insn ("la\t%1,%2(%1)", op
);
9599 else if (DISP_IN_RANGE (delta
))
9600 output_asm_insn ("lay\t%1,%2(%1)", op
);
9601 else if (CONST_OK_FOR_K (delta
))
9602 output_asm_insn ("aghi\t%1,%2", op
);
9603 else if (CONST_OK_FOR_Os (delta
))
9604 output_asm_insn ("agfi\t%1,%2", op
);
9607 op
[6] = gen_label_rtx ();
9608 output_asm_insn ("agf\t%1,%6-%5(%4)", op
);
9612 /* Perform vcall adjustment. */
9615 if (DISP_IN_RANGE (vcall_offset
))
9617 output_asm_insn ("lg\t%4,0(%1)", op
);
9618 output_asm_insn ("ag\t%1,%3(%4)", op
);
9620 else if (CONST_OK_FOR_K (vcall_offset
))
9622 output_asm_insn ("lghi\t%4,%3", op
);
9623 output_asm_insn ("ag\t%4,0(%1)", op
);
9624 output_asm_insn ("ag\t%1,0(%4)", op
);
9626 else if (CONST_OK_FOR_Os (vcall_offset
))
9628 output_asm_insn ("lgfi\t%4,%3", op
);
9629 output_asm_insn ("ag\t%4,0(%1)", op
);
9630 output_asm_insn ("ag\t%1,0(%4)", op
);
9634 op
[7] = gen_label_rtx ();
9635 output_asm_insn ("llgf\t%4,%7-%5(%4)", op
);
9636 output_asm_insn ("ag\t%4,0(%1)", op
);
9637 output_asm_insn ("ag\t%1,0(%4)", op
);
9641 /* Jump to target. */
9642 output_asm_insn ("jg\t%0", op
);
9644 /* Output literal pool if required. */
9647 output_asm_insn (".align\t4", op
);
9648 targetm
.asm_out
.internal_label (file
, "L",
9649 CODE_LABEL_NUMBER (op
[5]));
9653 targetm
.asm_out
.internal_label (file
, "L",
9654 CODE_LABEL_NUMBER (op
[6]));
9655 output_asm_insn (".long\t%2", op
);
9659 targetm
.asm_out
.internal_label (file
, "L",
9660 CODE_LABEL_NUMBER (op
[7]));
9661 output_asm_insn (".long\t%3", op
);
9666 /* Setup base pointer if required. */
9668 || (!DISP_IN_RANGE (delta
)
9669 && !CONST_OK_FOR_K (delta
)
9670 && !CONST_OK_FOR_Os (delta
))
9671 || (!DISP_IN_RANGE (delta
)
9672 && !CONST_OK_FOR_K (vcall_offset
)
9673 && !CONST_OK_FOR_Os (vcall_offset
)))
9675 op
[5] = gen_label_rtx ();
9676 output_asm_insn ("basr\t%4,0", op
);
9677 targetm
.asm_out
.internal_label (file
, "L",
9678 CODE_LABEL_NUMBER (op
[5]));
9681 /* Add DELTA to this pointer. */
9684 if (CONST_OK_FOR_J (delta
))
9685 output_asm_insn ("la\t%1,%2(%1)", op
);
9686 else if (DISP_IN_RANGE (delta
))
9687 output_asm_insn ("lay\t%1,%2(%1)", op
);
9688 else if (CONST_OK_FOR_K (delta
))
9689 output_asm_insn ("ahi\t%1,%2", op
);
9690 else if (CONST_OK_FOR_Os (delta
))
9691 output_asm_insn ("afi\t%1,%2", op
);
9694 op
[6] = gen_label_rtx ();
9695 output_asm_insn ("a\t%1,%6-%5(%4)", op
);
9699 /* Perform vcall adjustment. */
9702 if (CONST_OK_FOR_J (vcall_offset
))
9704 output_asm_insn ("l\t%4,0(%1)", op
);
9705 output_asm_insn ("a\t%1,%3(%4)", op
);
9707 else if (DISP_IN_RANGE (vcall_offset
))
9709 output_asm_insn ("l\t%4,0(%1)", op
);
9710 output_asm_insn ("ay\t%1,%3(%4)", op
);
9712 else if (CONST_OK_FOR_K (vcall_offset
))
9714 output_asm_insn ("lhi\t%4,%3", op
);
9715 output_asm_insn ("a\t%4,0(%1)", op
);
9716 output_asm_insn ("a\t%1,0(%4)", op
);
9718 else if (CONST_OK_FOR_Os (vcall_offset
))
9720 output_asm_insn ("iilf\t%4,%3", op
);
9721 output_asm_insn ("a\t%4,0(%1)", op
);
9722 output_asm_insn ("a\t%1,0(%4)", op
);
9726 op
[7] = gen_label_rtx ();
9727 output_asm_insn ("l\t%4,%7-%5(%4)", op
);
9728 output_asm_insn ("a\t%4,0(%1)", op
);
9729 output_asm_insn ("a\t%1,0(%4)", op
);
9732 /* We had to clobber the base pointer register.
9733 Re-setup the base pointer (with a different base). */
9734 op
[5] = gen_label_rtx ();
9735 output_asm_insn ("basr\t%4,0", op
);
9736 targetm
.asm_out
.internal_label (file
, "L",
9737 CODE_LABEL_NUMBER (op
[5]));
9740 /* Jump to target. */
9741 op
[8] = gen_label_rtx ();
9744 output_asm_insn ("l\t%4,%8-%5(%4)", op
);
9746 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
9747 /* We cannot call through .plt, since .plt requires %r12 loaded. */
9748 else if (flag_pic
== 1)
9750 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
9751 output_asm_insn ("l\t%4,%0(%4)", op
);
9753 else if (flag_pic
== 2)
9755 op
[9] = gen_rtx_REG (Pmode
, 0);
9756 output_asm_insn ("l\t%9,%8-4-%5(%4)", op
);
9757 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
9758 output_asm_insn ("ar\t%4,%9", op
);
9759 output_asm_insn ("l\t%4,0(%4)", op
);
9762 output_asm_insn ("br\t%4", op
);
9764 /* Output literal pool. */
9765 output_asm_insn (".align\t4", op
);
9767 if (nonlocal
&& flag_pic
== 2)
9768 output_asm_insn (".long\t%0", op
);
9771 op
[0] = gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
9772 SYMBOL_REF_FLAGS (op
[0]) = SYMBOL_FLAG_LOCAL
;
9775 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[8]));
9777 output_asm_insn (".long\t%0", op
);
9779 output_asm_insn (".long\t%0-%5", op
);
9783 targetm
.asm_out
.internal_label (file
, "L",
9784 CODE_LABEL_NUMBER (op
[6]));
9785 output_asm_insn (".long\t%2", op
);
9789 targetm
.asm_out
.internal_label (file
, "L",
9790 CODE_LABEL_NUMBER (op
[7]));
9791 output_asm_insn (".long\t%3", op
);
9794 final_end_function ();
9798 s390_valid_pointer_mode (enum machine_mode mode
)
9800 return (mode
== SImode
|| (TARGET_64BIT
&& mode
== DImode
));
9803 /* Checks whether the given CALL_EXPR would use a caller
9804 saved register. This is used to decide whether sibling call
9805 optimization could be performed on the respective function
9809 s390_call_saved_register_used (tree call_expr
)
9811 CUMULATIVE_ARGS cum_v
;
9812 cumulative_args_t cum
;
9814 enum machine_mode mode
;
9819 INIT_CUMULATIVE_ARGS (cum_v
, NULL
, NULL
, 0, 0);
9820 cum
= pack_cumulative_args (&cum_v
);
9822 for (i
= 0; i
< call_expr_nargs (call_expr
); i
++)
9824 parameter
= CALL_EXPR_ARG (call_expr
, i
);
9825 gcc_assert (parameter
);
9827 /* For an undeclared variable passed as parameter we will get
9828 an ERROR_MARK node here. */
9829 if (TREE_CODE (parameter
) == ERROR_MARK
)
9832 type
= TREE_TYPE (parameter
);
9835 mode
= TYPE_MODE (type
);
9838 if (pass_by_reference (&cum_v
, mode
, type
, true))
9841 type
= build_pointer_type (type
);
9844 parm_rtx
= s390_function_arg (cum
, mode
, type
, 0);
9846 s390_function_arg_advance (cum
, mode
, type
, 0);
9851 if (REG_P (parm_rtx
))
9854 reg
< HARD_REGNO_NREGS (REGNO (parm_rtx
), GET_MODE (parm_rtx
));
9856 if (!call_used_regs
[reg
+ REGNO (parm_rtx
)])
9860 if (GET_CODE (parm_rtx
) == PARALLEL
)
9864 for (i
= 0; i
< XVECLEN (parm_rtx
, 0); i
++)
9866 rtx r
= XEXP (XVECEXP (parm_rtx
, 0, i
), 0);
9868 gcc_assert (REG_P (r
));
9871 reg
< HARD_REGNO_NREGS (REGNO (r
), GET_MODE (r
));
9873 if (!call_used_regs
[reg
+ REGNO (r
)])
9882 /* Return true if the given call expression can be
9883 turned into a sibling call.
9884 DECL holds the declaration of the function to be called whereas
9885 EXP is the call expression itself. */
9888 s390_function_ok_for_sibcall (tree decl
, tree exp
)
9890 /* The TPF epilogue uses register 1. */
9891 if (TARGET_TPF_PROFILING
)
9894 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
9895 which would have to be restored before the sibcall. */
9896 if (!TARGET_64BIT
&& flag_pic
&& decl
&& !targetm
.binds_local_p (decl
))
9899 /* Register 6 on s390 is available as an argument register but unfortunately
9900 "caller saved". This makes functions needing this register for arguments
9901 not suitable for sibcalls. */
9902 return !s390_call_saved_register_used (exp
);
9905 /* Return the fixed registers used for condition codes. */
9908 s390_fixed_condition_code_regs (unsigned int *p1
, unsigned int *p2
)
9911 *p2
= INVALID_REGNUM
;
9916 /* This function is used by the call expanders of the machine description.
9917 It emits the call insn itself together with the necessary operations
9918 to adjust the target address and returns the emitted insn.
9919 ADDR_LOCATION is the target address rtx
9920 TLS_CALL the location of the thread-local symbol
9921 RESULT_REG the register where the result of the call should be stored
9922 RETADDR_REG the register where the return address should be stored
9923 If this parameter is NULL_RTX the call is considered
9924 to be a sibling call. */
9927 s390_emit_call (rtx addr_location
, rtx tls_call
, rtx result_reg
,
9930 bool plt_call
= false;
9936 /* Direct function calls need special treatment. */
9937 if (GET_CODE (addr_location
) == SYMBOL_REF
)
9939 /* When calling a global routine in PIC mode, we must
9940 replace the symbol itself with the PLT stub. */
9941 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (addr_location
))
9943 if (retaddr_reg
!= NULL_RTX
)
9945 addr_location
= gen_rtx_UNSPEC (Pmode
,
9946 gen_rtvec (1, addr_location
),
9948 addr_location
= gen_rtx_CONST (Pmode
, addr_location
);
9952 /* For -fpic code the PLT entries might use r12 which is
9953 call-saved. Therefore we cannot do a sibcall when
9954 calling directly using a symbol ref. When reaching
9955 this point we decided (in s390_function_ok_for_sibcall)
9956 to do a sibcall for a function pointer but one of the
9957 optimizers was able to get rid of the function pointer
9958 by propagating the symbol ref into the call. This
9959 optimization is illegal for S/390 so we turn the direct
9960 call into a indirect call again. */
9961 addr_location
= force_reg (Pmode
, addr_location
);
9964 /* Unless we can use the bras(l) insn, force the
9965 routine address into a register. */
9966 if (!TARGET_SMALL_EXEC
&& !TARGET_CPU_ZARCH
)
9969 addr_location
= legitimize_pic_address (addr_location
, 0);
9971 addr_location
= force_reg (Pmode
, addr_location
);
9975 /* If it is already an indirect call or the code above moved the
9976 SYMBOL_REF to somewhere else make sure the address can be found in
9978 if (retaddr_reg
== NULL_RTX
9979 && GET_CODE (addr_location
) != SYMBOL_REF
9982 emit_move_insn (gen_rtx_REG (Pmode
, SIBCALL_REGNUM
), addr_location
);
9983 addr_location
= gen_rtx_REG (Pmode
, SIBCALL_REGNUM
);
9986 addr_location
= gen_rtx_MEM (QImode
, addr_location
);
9987 call
= gen_rtx_CALL (VOIDmode
, addr_location
, const0_rtx
);
9989 if (result_reg
!= NULL_RTX
)
9990 call
= gen_rtx_SET (VOIDmode
, result_reg
, call
);
9992 if (retaddr_reg
!= NULL_RTX
)
9994 clobber
= gen_rtx_CLOBBER (VOIDmode
, retaddr_reg
);
9996 if (tls_call
!= NULL_RTX
)
9997 vec
= gen_rtvec (3, call
, clobber
,
9998 gen_rtx_USE (VOIDmode
, tls_call
));
10000 vec
= gen_rtvec (2, call
, clobber
);
10002 call
= gen_rtx_PARALLEL (VOIDmode
, vec
);
10005 insn
= emit_call_insn (call
);
10007 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
10008 if ((!TARGET_64BIT
&& plt_call
) || tls_call
!= NULL_RTX
)
10010 /* s390_function_ok_for_sibcall should
10011 have denied sibcalls in this case. */
10012 gcc_assert (retaddr_reg
!= NULL_RTX
);
10013 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), gen_rtx_REG (Pmode
, 12));
10018 /* Implement TARGET_CONDITIONAL_REGISTER_USAGE. */
10021 s390_conditional_register_usage (void)
10027 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
10028 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
10030 if (TARGET_CPU_ZARCH
)
10032 fixed_regs
[BASE_REGNUM
] = 0;
10033 call_used_regs
[BASE_REGNUM
] = 0;
10034 fixed_regs
[RETURN_REGNUM
] = 0;
10035 call_used_regs
[RETURN_REGNUM
] = 0;
10039 for (i
= 24; i
< 32; i
++)
10040 call_used_regs
[i
] = call_really_used_regs
[i
] = 0;
10044 for (i
= 18; i
< 20; i
++)
10045 call_used_regs
[i
] = call_really_used_regs
[i
] = 0;
10048 if (TARGET_SOFT_FLOAT
)
10050 for (i
= 16; i
< 32; i
++)
10051 call_used_regs
[i
] = fixed_regs
[i
] = 1;
10055 /* Corresponding function to eh_return expander. */
10057 static GTY(()) rtx s390_tpf_eh_return_symbol
;
10059 s390_emit_tpf_eh_return (rtx target
)
10063 if (!s390_tpf_eh_return_symbol
)
10064 s390_tpf_eh_return_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "__tpf_eh_return");
10066 reg
= gen_rtx_REG (Pmode
, 2);
10068 emit_move_insn (reg
, target
);
10069 insn
= s390_emit_call (s390_tpf_eh_return_symbol
, NULL_RTX
, reg
,
10070 gen_rtx_REG (Pmode
, RETURN_REGNUM
));
10071 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), reg
);
10073 emit_move_insn (EH_RETURN_HANDLER_RTX
, reg
);
10076 /* Rework the prologue/epilogue to avoid saving/restoring
10077 registers unnecessarily. */
10080 s390_optimize_prologue (void)
10082 rtx insn
, new_insn
, next_insn
;
10084 /* Do a final recompute of the frame-related data. */
10086 s390_update_frame_layout ();
10088 /* If all special registers are in fact used, there's nothing we
10089 can do, so no point in walking the insn list. */
10091 if (cfun_frame_layout
.first_save_gpr
<= BASE_REGNUM
10092 && cfun_frame_layout
.last_save_gpr
>= BASE_REGNUM
10093 && (TARGET_CPU_ZARCH
10094 || (cfun_frame_layout
.first_save_gpr
<= RETURN_REGNUM
10095 && cfun_frame_layout
.last_save_gpr
>= RETURN_REGNUM
)))
10098 /* Search for prologue/epilogue insns and replace them. */
10100 for (insn
= get_insns (); insn
; insn
= next_insn
)
10102 int first
, last
, off
;
10103 rtx set
, base
, offset
;
10105 next_insn
= NEXT_INSN (insn
);
10107 if (GET_CODE (insn
) != INSN
)
10110 if (GET_CODE (PATTERN (insn
)) == PARALLEL
10111 && store_multiple_operation (PATTERN (insn
), VOIDmode
))
10113 set
= XVECEXP (PATTERN (insn
), 0, 0);
10114 first
= REGNO (SET_SRC (set
));
10115 last
= first
+ XVECLEN (PATTERN (insn
), 0) - 1;
10116 offset
= const0_rtx
;
10117 base
= eliminate_constant_term (XEXP (SET_DEST (set
), 0), &offset
);
10118 off
= INTVAL (offset
);
10120 if (GET_CODE (base
) != REG
|| off
< 0)
10122 if (cfun_frame_layout
.first_save_gpr
!= -1
10123 && (cfun_frame_layout
.first_save_gpr
< first
10124 || cfun_frame_layout
.last_save_gpr
> last
))
10126 if (REGNO (base
) != STACK_POINTER_REGNUM
10127 && REGNO (base
) != HARD_FRAME_POINTER_REGNUM
)
10129 if (first
> BASE_REGNUM
|| last
< BASE_REGNUM
)
10132 if (cfun_frame_layout
.first_save_gpr
!= -1)
10134 new_insn
= save_gprs (base
,
10135 off
+ (cfun_frame_layout
.first_save_gpr
10136 - first
) * UNITS_PER_LONG
,
10137 cfun_frame_layout
.first_save_gpr
,
10138 cfun_frame_layout
.last_save_gpr
);
10139 new_insn
= emit_insn_before (new_insn
, insn
);
10140 INSN_ADDRESSES_NEW (new_insn
, -1);
10143 remove_insn (insn
);
10147 if (cfun_frame_layout
.first_save_gpr
== -1
10148 && GET_CODE (PATTERN (insn
)) == SET
10149 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
10150 && (REGNO (SET_SRC (PATTERN (insn
))) == BASE_REGNUM
10151 || (!TARGET_CPU_ZARCH
10152 && REGNO (SET_SRC (PATTERN (insn
))) == RETURN_REGNUM
))
10153 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
)
10155 set
= PATTERN (insn
);
10156 first
= REGNO (SET_SRC (set
));
10157 offset
= const0_rtx
;
10158 base
= eliminate_constant_term (XEXP (SET_DEST (set
), 0), &offset
);
10159 off
= INTVAL (offset
);
10161 if (GET_CODE (base
) != REG
|| off
< 0)
10163 if (REGNO (base
) != STACK_POINTER_REGNUM
10164 && REGNO (base
) != HARD_FRAME_POINTER_REGNUM
)
10167 remove_insn (insn
);
10171 if (GET_CODE (PATTERN (insn
)) == PARALLEL
10172 && load_multiple_operation (PATTERN (insn
), VOIDmode
))
10174 set
= XVECEXP (PATTERN (insn
), 0, 0);
10175 first
= REGNO (SET_DEST (set
));
10176 last
= first
+ XVECLEN (PATTERN (insn
), 0) - 1;
10177 offset
= const0_rtx
;
10178 base
= eliminate_constant_term (XEXP (SET_SRC (set
), 0), &offset
);
10179 off
= INTVAL (offset
);
10181 if (GET_CODE (base
) != REG
|| off
< 0)
10183 if (cfun_frame_layout
.first_restore_gpr
!= -1
10184 && (cfun_frame_layout
.first_restore_gpr
< first
10185 || cfun_frame_layout
.last_restore_gpr
> last
))
10187 if (REGNO (base
) != STACK_POINTER_REGNUM
10188 && REGNO (base
) != HARD_FRAME_POINTER_REGNUM
)
10190 if (first
> BASE_REGNUM
|| last
< BASE_REGNUM
)
10193 if (cfun_frame_layout
.first_restore_gpr
!= -1)
10195 new_insn
= restore_gprs (base
,
10196 off
+ (cfun_frame_layout
.first_restore_gpr
10197 - first
) * UNITS_PER_LONG
,
10198 cfun_frame_layout
.first_restore_gpr
,
10199 cfun_frame_layout
.last_restore_gpr
);
10200 new_insn
= emit_insn_before (new_insn
, insn
);
10201 INSN_ADDRESSES_NEW (new_insn
, -1);
10204 remove_insn (insn
);
10208 if (cfun_frame_layout
.first_restore_gpr
== -1
10209 && GET_CODE (PATTERN (insn
)) == SET
10210 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
10211 && (REGNO (SET_DEST (PATTERN (insn
))) == BASE_REGNUM
10212 || (!TARGET_CPU_ZARCH
10213 && REGNO (SET_DEST (PATTERN (insn
))) == RETURN_REGNUM
))
10214 && GET_CODE (SET_SRC (PATTERN (insn
))) == MEM
)
10216 set
= PATTERN (insn
);
10217 first
= REGNO (SET_DEST (set
));
10218 offset
= const0_rtx
;
10219 base
= eliminate_constant_term (XEXP (SET_SRC (set
), 0), &offset
);
10220 off
= INTVAL (offset
);
10222 if (GET_CODE (base
) != REG
|| off
< 0)
10224 if (REGNO (base
) != STACK_POINTER_REGNUM
10225 && REGNO (base
) != HARD_FRAME_POINTER_REGNUM
)
10228 remove_insn (insn
);
10234 /* On z10 and later the dynamic branch prediction must see the
10235 backward jump within a certain windows. If not it falls back to
10236 the static prediction. This function rearranges the loop backward
10237 branch in a way which makes the static prediction always correct.
10238 The function returns true if it added an instruction. */
10240 s390_fix_long_loop_prediction (rtx insn
)
10242 rtx set
= single_set (insn
);
10243 rtx code_label
, label_ref
, new_label
;
10249 /* This will exclude branch on count and branch on index patterns
10250 since these are correctly statically predicted. */
10252 || SET_DEST (set
) != pc_rtx
10253 || GET_CODE (SET_SRC(set
)) != IF_THEN_ELSE
)
10256 label_ref
= (GET_CODE (XEXP (SET_SRC (set
), 1)) == LABEL_REF
?
10257 XEXP (SET_SRC (set
), 1) : XEXP (SET_SRC (set
), 2));
10259 gcc_assert (GET_CODE (label_ref
) == LABEL_REF
);
10261 code_label
= XEXP (label_ref
, 0);
10263 if (INSN_ADDRESSES (INSN_UID (code_label
)) == -1
10264 || INSN_ADDRESSES (INSN_UID (insn
)) == -1
10265 || (INSN_ADDRESSES (INSN_UID (insn
))
10266 - INSN_ADDRESSES (INSN_UID (code_label
)) < PREDICT_DISTANCE
))
10269 for (distance
= 0, cur_insn
= PREV_INSN (insn
);
10270 distance
< PREDICT_DISTANCE
- 6;
10271 distance
+= get_attr_length (cur_insn
), cur_insn
= PREV_INSN (cur_insn
))
10272 if (!cur_insn
|| JUMP_P (cur_insn
) || LABEL_P (cur_insn
))
10275 new_label
= gen_label_rtx ();
10276 uncond_jump
= emit_jump_insn_after (
10277 gen_rtx_SET (VOIDmode
, pc_rtx
,
10278 gen_rtx_LABEL_REF (VOIDmode
, code_label
)),
10280 emit_label_after (new_label
, uncond_jump
);
10282 tmp
= XEXP (SET_SRC (set
), 1);
10283 XEXP (SET_SRC (set
), 1) = XEXP (SET_SRC (set
), 2);
10284 XEXP (SET_SRC (set
), 2) = tmp
;
10285 INSN_CODE (insn
) = -1;
10287 XEXP (label_ref
, 0) = new_label
;
10288 JUMP_LABEL (insn
) = new_label
;
10289 JUMP_LABEL (uncond_jump
) = code_label
;
10294 /* Returns 1 if INSN reads the value of REG for purposes not related
10295 to addressing of memory, and 0 otherwise. */
10297 s390_non_addr_reg_read_p (rtx reg
, rtx insn
)
10299 return reg_referenced_p (reg
, PATTERN (insn
))
10300 && !reg_used_in_mem_p (REGNO (reg
), PATTERN (insn
));
10303 /* Starting from INSN find_cond_jump looks downwards in the insn
10304 stream for a single jump insn which is the last user of the
10305 condition code set in INSN. */
10307 find_cond_jump (rtx insn
)
10309 for (; insn
; insn
= NEXT_INSN (insn
))
10313 if (LABEL_P (insn
))
10316 if (!JUMP_P (insn
))
10318 if (reg_mentioned_p (gen_rtx_REG (CCmode
, CC_REGNUM
), insn
))
10323 /* This will be triggered by a return. */
10324 if (GET_CODE (PATTERN (insn
)) != SET
)
10327 gcc_assert (SET_DEST (PATTERN (insn
)) == pc_rtx
);
10328 ite
= SET_SRC (PATTERN (insn
));
10330 if (GET_CODE (ite
) != IF_THEN_ELSE
)
10333 cc
= XEXP (XEXP (ite
, 0), 0);
10334 if (!REG_P (cc
) || !CC_REGNO_P (REGNO (cc
)))
10337 if (find_reg_note (insn
, REG_DEAD
, cc
))
10345 /* Swap the condition in COND and the operands in OP0 and OP1 so that
10346 the semantics does not change. If NULL_RTX is passed as COND the
10347 function tries to find the conditional jump starting with INSN. */
10349 s390_swap_cmp (rtx cond
, rtx
*op0
, rtx
*op1
, rtx insn
)
10353 if (cond
== NULL_RTX
)
10355 rtx jump
= find_cond_jump (NEXT_INSN (insn
));
10356 jump
= jump
? single_set (jump
) : NULL_RTX
;
10358 if (jump
== NULL_RTX
)
10361 cond
= XEXP (XEXP (jump
, 1), 0);
10366 PUT_CODE (cond
, swap_condition (GET_CODE (cond
)));
10369 /* On z10, instructions of the compare-and-branch family have the
10370 property to access the register occurring as second operand with
10371 its bits complemented. If such a compare is grouped with a second
10372 instruction that accesses the same register non-complemented, and
10373 if that register's value is delivered via a bypass, then the
10374 pipeline recycles, thereby causing significant performance decline.
10375 This function locates such situations and exchanges the two
10376 operands of the compare. The function return true whenever it
10379 s390_z10_optimize_cmp (rtx insn
)
10381 rtx prev_insn
, next_insn
;
10382 bool insn_added_p
= false;
10383 rtx cond
, *op0
, *op1
;
10385 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
10387 /* Handle compare and branch and branch on count
10389 rtx pattern
= single_set (insn
);
10392 || SET_DEST (pattern
) != pc_rtx
10393 || GET_CODE (SET_SRC (pattern
)) != IF_THEN_ELSE
)
10396 cond
= XEXP (SET_SRC (pattern
), 0);
10397 op0
= &XEXP (cond
, 0);
10398 op1
= &XEXP (cond
, 1);
10400 else if (GET_CODE (PATTERN (insn
)) == SET
)
10404 /* Handle normal compare instructions. */
10405 src
= SET_SRC (PATTERN (insn
));
10406 dest
= SET_DEST (PATTERN (insn
));
10409 || !CC_REGNO_P (REGNO (dest
))
10410 || GET_CODE (src
) != COMPARE
)
10413 /* s390_swap_cmp will try to find the conditional
10414 jump when passing NULL_RTX as condition. */
10416 op0
= &XEXP (src
, 0);
10417 op1
= &XEXP (src
, 1);
10422 if (!REG_P (*op0
) || !REG_P (*op1
))
10425 if (GET_MODE_CLASS (GET_MODE (*op0
)) != MODE_INT
)
10428 /* Swap the COMPARE arguments and its mask if there is a
10429 conflicting access in the previous insn. */
10430 prev_insn
= prev_active_insn (insn
);
10431 if (prev_insn
!= NULL_RTX
&& INSN_P (prev_insn
)
10432 && reg_referenced_p (*op1
, PATTERN (prev_insn
)))
10433 s390_swap_cmp (cond
, op0
, op1
, insn
);
10435 /* Check if there is a conflict with the next insn. If there
10436 was no conflict with the previous insn, then swap the
10437 COMPARE arguments and its mask. If we already swapped
10438 the operands, or if swapping them would cause a conflict
10439 with the previous insn, issue a NOP after the COMPARE in
10440 order to separate the two instuctions. */
10441 next_insn
= next_active_insn (insn
);
10442 if (next_insn
!= NULL_RTX
&& INSN_P (next_insn
)
10443 && s390_non_addr_reg_read_p (*op1
, next_insn
))
10445 if (prev_insn
!= NULL_RTX
&& INSN_P (prev_insn
)
10446 && s390_non_addr_reg_read_p (*op0
, prev_insn
))
10448 if (REGNO (*op1
) == 0)
10449 emit_insn_after (gen_nop1 (), insn
);
10451 emit_insn_after (gen_nop (), insn
);
10452 insn_added_p
= true;
10455 s390_swap_cmp (cond
, op0
, op1
, insn
);
10457 return insn_added_p
;
10460 /* Perform machine-dependent processing. */
10465 bool pool_overflow
= false;
10467 /* Make sure all splits have been performed; splits after
10468 machine_dependent_reorg might confuse insn length counts. */
10469 split_all_insns_noflow ();
10471 /* Install the main literal pool and the associated base
10472 register load insns.
10474 In addition, there are two problematic situations we need
10477 - the literal pool might be > 4096 bytes in size, so that
10478 some of its elements cannot be directly accessed
10480 - a branch target might be > 64K away from the branch, so that
10481 it is not possible to use a PC-relative instruction.
10483 To fix those, we split the single literal pool into multiple
10484 pool chunks, reloading the pool base register at various
10485 points throughout the function to ensure it always points to
10486 the pool chunk the following code expects, and / or replace
10487 PC-relative branches by absolute branches.
10489 However, the two problems are interdependent: splitting the
10490 literal pool can move a branch further away from its target,
10491 causing the 64K limit to overflow, and on the other hand,
10492 replacing a PC-relative branch by an absolute branch means
10493 we need to put the branch target address into the literal
10494 pool, possibly causing it to overflow.
10496 So, we loop trying to fix up both problems until we manage
10497 to satisfy both conditions at the same time. Note that the
10498 loop is guaranteed to terminate as every pass of the loop
10499 strictly decreases the total number of PC-relative branches
10500 in the function. (This is not completely true as there
10501 might be branch-over-pool insns introduced by chunkify_start.
10502 Those never need to be split however.) */
10506 struct constant_pool
*pool
= NULL
;
10508 /* Collect the literal pool. */
10509 if (!pool_overflow
)
10511 pool
= s390_mainpool_start ();
10513 pool_overflow
= true;
10516 /* If literal pool overflowed, start to chunkify it. */
10518 pool
= s390_chunkify_start ();
10520 /* Split out-of-range branches. If this has created new
10521 literal pool entries, cancel current chunk list and
10522 recompute it. zSeries machines have large branch
10523 instructions, so we never need to split a branch. */
10524 if (!TARGET_CPU_ZARCH
&& s390_split_branches ())
10527 s390_chunkify_cancel (pool
);
10529 s390_mainpool_cancel (pool
);
10534 /* If we made it up to here, both conditions are satisfied.
10535 Finish up literal pool related changes. */
10537 s390_chunkify_finish (pool
);
10539 s390_mainpool_finish (pool
);
10541 /* We're done splitting branches. */
10542 cfun
->machine
->split_branches_pending_p
= false;
10546 /* Generate out-of-pool execute target insns. */
10547 if (TARGET_CPU_ZARCH
)
10549 rtx insn
, label
, target
;
10551 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
10553 label
= s390_execute_label (insn
);
10557 gcc_assert (label
!= const0_rtx
);
10559 target
= emit_label (XEXP (label
, 0));
10560 INSN_ADDRESSES_NEW (target
, -1);
10562 target
= emit_insn (s390_execute_target (insn
));
10563 INSN_ADDRESSES_NEW (target
, -1);
10567 /* Try to optimize prologue and epilogue further. */
10568 s390_optimize_prologue ();
10570 /* Walk over the insns and do some >=z10 specific changes. */
10571 if (s390_tune
== PROCESSOR_2097_Z10
10572 || s390_tune
== PROCESSOR_2817_Z196
)
10575 bool insn_added_p
= false;
10577 /* The insn lengths and addresses have to be up to date for the
10578 following manipulations. */
10579 shorten_branches (get_insns ());
10581 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
10583 if (!INSN_P (insn
) || INSN_CODE (insn
) <= 0)
10587 insn_added_p
|= s390_fix_long_loop_prediction (insn
);
10589 if ((GET_CODE (PATTERN (insn
)) == PARALLEL
10590 || GET_CODE (PATTERN (insn
)) == SET
)
10591 && s390_tune
== PROCESSOR_2097_Z10
)
10592 insn_added_p
|= s390_z10_optimize_cmp (insn
);
10595 /* Adjust branches if we added new instructions. */
10597 shorten_branches (get_insns ());
10601 /* Return true if INSN is a fp load insn writing register REGNO. */
10603 s390_fpload_toreg (rtx insn
, unsigned int regno
)
10606 enum attr_type flag
= s390_safe_attr_type (insn
);
10608 if (flag
!= TYPE_FLOADSF
&& flag
!= TYPE_FLOADDF
)
10611 set
= single_set (insn
);
10613 if (set
== NULL_RTX
)
10616 if (!REG_P (SET_DEST (set
)) || !MEM_P (SET_SRC (set
)))
10619 if (REGNO (SET_DEST (set
)) != regno
)
10625 /* This value describes the distance to be avoided between an
10626 aritmetic fp instruction and an fp load writing the same register.
10627 Z10_EARLYLOAD_DISTANCE - 1 as well as Z10_EARLYLOAD_DISTANCE + 1 is
10628 fine but the exact value has to be avoided. Otherwise the FP
10629 pipeline will throw an exception causing a major penalty. */
10630 #define Z10_EARLYLOAD_DISTANCE 7
10632 /* Rearrange the ready list in order to avoid the situation described
10633 for Z10_EARLYLOAD_DISTANCE. A problematic load instruction is
10634 moved to the very end of the ready list. */
10636 s390_z10_prevent_earlyload_conflicts (rtx
*ready
, int *nready_p
)
10638 unsigned int regno
;
10639 int nready
= *nready_p
;
10644 enum attr_type flag
;
10647 /* Skip DISTANCE - 1 active insns. */
10648 for (insn
= last_scheduled_insn
, distance
= Z10_EARLYLOAD_DISTANCE
- 1;
10649 distance
> 0 && insn
!= NULL_RTX
;
10650 distance
--, insn
= prev_active_insn (insn
))
10651 if (CALL_P (insn
) || JUMP_P (insn
))
10654 if (insn
== NULL_RTX
)
10657 set
= single_set (insn
);
10659 if (set
== NULL_RTX
|| !REG_P (SET_DEST (set
))
10660 || GET_MODE_CLASS (GET_MODE (SET_DEST (set
))) != MODE_FLOAT
)
10663 flag
= s390_safe_attr_type (insn
);
10665 if (flag
== TYPE_FLOADSF
|| flag
== TYPE_FLOADDF
)
10668 regno
= REGNO (SET_DEST (set
));
10671 while (!s390_fpload_toreg (ready
[i
], regno
) && i
> 0)
10678 memmove (&ready
[1], &ready
[0], sizeof (rtx
) * i
);
10682 /* This function is called via hook TARGET_SCHED_REORDER before
10683 issuing one insn from list READY which contains *NREADYP entries.
10684 For target z10 it reorders load instructions to avoid early load
10685 conflicts in the floating point pipeline */
10687 s390_sched_reorder (FILE *file ATTRIBUTE_UNUSED
, int verbose ATTRIBUTE_UNUSED
,
10688 rtx
*ready
, int *nreadyp
, int clock ATTRIBUTE_UNUSED
)
10690 if (s390_tune
== PROCESSOR_2097_Z10
)
10691 if (reload_completed
&& *nreadyp
> 1)
10692 s390_z10_prevent_earlyload_conflicts (ready
, nreadyp
);
10694 return s390_issue_rate ();
10697 /* This function is called via hook TARGET_SCHED_VARIABLE_ISSUE after
10698 the scheduler has issued INSN. It stores the last issued insn into
10699 last_scheduled_insn in order to make it available for
10700 s390_sched_reorder. */
10702 s390_sched_variable_issue (FILE *file ATTRIBUTE_UNUSED
,
10703 int verbose ATTRIBUTE_UNUSED
,
10704 rtx insn
, int more
)
10706 last_scheduled_insn
= insn
;
10708 if (GET_CODE (PATTERN (insn
)) != USE
10709 && GET_CODE (PATTERN (insn
)) != CLOBBER
)
10716 s390_sched_init (FILE *file ATTRIBUTE_UNUSED
,
10717 int verbose ATTRIBUTE_UNUSED
,
10718 int max_ready ATTRIBUTE_UNUSED
)
10720 last_scheduled_insn
= NULL_RTX
;
10723 /* This function checks the whole of insn X for memory references. The
10724 function always returns zero because the framework it is called
10725 from would stop recursively analyzing the insn upon a return value
10726 other than zero. The real result of this function is updating
10727 counter variable MEM_COUNT. */
10729 check_dpu (rtx
*x
, unsigned *mem_count
)
10731 if (*x
!= NULL_RTX
&& MEM_P (*x
))
10736 /* This target hook implementation for TARGET_LOOP_UNROLL_ADJUST calculates
10737 a new number struct loop *loop should be unrolled if tuned for cpus with
10738 a built-in stride prefetcher.
10739 The loop is analyzed for memory accesses by calling check_dpu for
10740 each rtx of the loop. Depending on the loop_depth and the amount of
10741 memory accesses a new number <=nunroll is returned to improve the
10742 behaviour of the hardware prefetch unit. */
10744 s390_loop_unroll_adjust (unsigned nunroll
, struct loop
*loop
)
10749 unsigned mem_count
= 0;
10751 if (s390_tune
!= PROCESSOR_2097_Z10
&& s390_tune
!= PROCESSOR_2817_Z196
)
10754 /* Count the number of memory references within the loop body. */
10755 bbs
= get_loop_body (loop
);
10756 for (i
= 0; i
< loop
->num_nodes
; i
++)
10758 for (insn
= BB_HEAD (bbs
[i
]); insn
!= BB_END (bbs
[i
]); insn
= NEXT_INSN (insn
))
10759 if (INSN_P (insn
) && INSN_CODE (insn
) != -1)
10760 for_each_rtx (&insn
, (rtx_function
) check_dpu
, &mem_count
);
10764 /* Prevent division by zero, and we do not need to adjust nunroll in this case. */
10765 if (mem_count
== 0)
10768 switch (loop_depth(loop
))
10771 return MIN (nunroll
, 28 / mem_count
);
10773 return MIN (nunroll
, 22 / mem_count
);
10775 return MIN (nunroll
, 16 / mem_count
);
10779 /* Initialize GCC target structure. */
10781 #undef TARGET_ASM_ALIGNED_HI_OP
10782 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
10783 #undef TARGET_ASM_ALIGNED_DI_OP
10784 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
10785 #undef TARGET_ASM_INTEGER
10786 #define TARGET_ASM_INTEGER s390_assemble_integer
10788 #undef TARGET_ASM_OPEN_PAREN
10789 #define TARGET_ASM_OPEN_PAREN ""
10791 #undef TARGET_ASM_CLOSE_PAREN
10792 #define TARGET_ASM_CLOSE_PAREN ""
10794 #undef TARGET_OPTION_OVERRIDE
10795 #define TARGET_OPTION_OVERRIDE s390_option_override
10797 #undef TARGET_ENCODE_SECTION_INFO
10798 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
10800 #undef TARGET_SCALAR_MODE_SUPPORTED_P
10801 #define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
10804 #undef TARGET_HAVE_TLS
10805 #define TARGET_HAVE_TLS true
10807 #undef TARGET_CANNOT_FORCE_CONST_MEM
10808 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
10810 #undef TARGET_DELEGITIMIZE_ADDRESS
10811 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
10813 #undef TARGET_LEGITIMIZE_ADDRESS
10814 #define TARGET_LEGITIMIZE_ADDRESS s390_legitimize_address
10816 #undef TARGET_RETURN_IN_MEMORY
10817 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
10819 #undef TARGET_INIT_BUILTINS
10820 #define TARGET_INIT_BUILTINS s390_init_builtins
10821 #undef TARGET_EXPAND_BUILTIN
10822 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
10824 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
10825 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA s390_output_addr_const_extra
10827 #undef TARGET_ASM_OUTPUT_MI_THUNK
10828 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
10829 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
10830 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
10832 #undef TARGET_SCHED_ADJUST_PRIORITY
10833 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
10834 #undef TARGET_SCHED_ISSUE_RATE
10835 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
10836 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
10837 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
10839 #undef TARGET_SCHED_VARIABLE_ISSUE
10840 #define TARGET_SCHED_VARIABLE_ISSUE s390_sched_variable_issue
10841 #undef TARGET_SCHED_REORDER
10842 #define TARGET_SCHED_REORDER s390_sched_reorder
10843 #undef TARGET_SCHED_INIT
10844 #define TARGET_SCHED_INIT s390_sched_init
10846 #undef TARGET_CANNOT_COPY_INSN_P
10847 #define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
10848 #undef TARGET_RTX_COSTS
10849 #define TARGET_RTX_COSTS s390_rtx_costs
10850 #undef TARGET_ADDRESS_COST
10851 #define TARGET_ADDRESS_COST s390_address_cost
10852 #undef TARGET_REGISTER_MOVE_COST
10853 #define TARGET_REGISTER_MOVE_COST s390_register_move_cost
10854 #undef TARGET_MEMORY_MOVE_COST
10855 #define TARGET_MEMORY_MOVE_COST s390_memory_move_cost
10857 #undef TARGET_MACHINE_DEPENDENT_REORG
10858 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
10860 #undef TARGET_VALID_POINTER_MODE
10861 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
10863 #undef TARGET_BUILD_BUILTIN_VA_LIST
10864 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
10865 #undef TARGET_EXPAND_BUILTIN_VA_START
10866 #define TARGET_EXPAND_BUILTIN_VA_START s390_va_start
10867 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
10868 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
10870 #undef TARGET_PROMOTE_FUNCTION_MODE
10871 #define TARGET_PROMOTE_FUNCTION_MODE s390_promote_function_mode
10872 #undef TARGET_PASS_BY_REFERENCE
10873 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
10875 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
10876 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
10877 #undef TARGET_FUNCTION_ARG
10878 #define TARGET_FUNCTION_ARG s390_function_arg
10879 #undef TARGET_FUNCTION_ARG_ADVANCE
10880 #define TARGET_FUNCTION_ARG_ADVANCE s390_function_arg_advance
10881 #undef TARGET_FUNCTION_VALUE
10882 #define TARGET_FUNCTION_VALUE s390_function_value
10883 #undef TARGET_LIBCALL_VALUE
10884 #define TARGET_LIBCALL_VALUE s390_libcall_value
10886 #undef TARGET_FIXED_CONDITION_CODE_REGS
10887 #define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
10889 #undef TARGET_CC_MODES_COMPATIBLE
10890 #define TARGET_CC_MODES_COMPATIBLE s390_cc_modes_compatible
10892 #undef TARGET_INVALID_WITHIN_DOLOOP
10893 #define TARGET_INVALID_WITHIN_DOLOOP hook_constcharptr_const_rtx_null
10896 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
10897 #define TARGET_ASM_OUTPUT_DWARF_DTPREL s390_output_dwarf_dtprel
10900 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
10901 #undef TARGET_MANGLE_TYPE
10902 #define TARGET_MANGLE_TYPE s390_mangle_type
10905 #undef TARGET_SCALAR_MODE_SUPPORTED_P
10906 #define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
10908 #undef TARGET_PREFERRED_RELOAD_CLASS
10909 #define TARGET_PREFERRED_RELOAD_CLASS s390_preferred_reload_class
10911 #undef TARGET_SECONDARY_RELOAD
10912 #define TARGET_SECONDARY_RELOAD s390_secondary_reload
10914 #undef TARGET_LIBGCC_CMP_RETURN_MODE
10915 #define TARGET_LIBGCC_CMP_RETURN_MODE s390_libgcc_cmp_return_mode
10917 #undef TARGET_LIBGCC_SHIFT_COUNT_MODE
10918 #define TARGET_LIBGCC_SHIFT_COUNT_MODE s390_libgcc_shift_count_mode
10920 #undef TARGET_LEGITIMATE_ADDRESS_P
10921 #define TARGET_LEGITIMATE_ADDRESS_P s390_legitimate_address_p
10923 #undef TARGET_LEGITIMATE_CONSTANT_P
10924 #define TARGET_LEGITIMATE_CONSTANT_P s390_legitimate_constant_p
10926 #undef TARGET_CAN_ELIMINATE
10927 #define TARGET_CAN_ELIMINATE s390_can_eliminate
10929 #undef TARGET_CONDITIONAL_REGISTER_USAGE
10930 #define TARGET_CONDITIONAL_REGISTER_USAGE s390_conditional_register_usage
10932 #undef TARGET_LOOP_UNROLL_ADJUST
10933 #define TARGET_LOOP_UNROLL_ADJUST s390_loop_unroll_adjust
10935 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
10936 #define TARGET_ASM_TRAMPOLINE_TEMPLATE s390_asm_trampoline_template
10937 #undef TARGET_TRAMPOLINE_INIT
10938 #define TARGET_TRAMPOLINE_INIT s390_trampoline_init
10940 #undef TARGET_UNWIND_WORD_MODE
10941 #define TARGET_UNWIND_WORD_MODE s390_unwind_word_mode
10943 struct gcc_target targetm
= TARGET_INITIALIZER
;
10945 #include "gt-s390.h"