1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007, 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com) and
6 Andreas Krebbel (Andreas.Krebbel@de.ibm.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
43 #include "diagnostic-core.h"
44 #include "basic-block.h"
47 #include "target-def.h"
49 #include "langhooks.h"
57 /* Define the specific costs for a given cpu. */
59 struct processor_costs
62 const int m
; /* cost of an M instruction. */
63 const int mghi
; /* cost of an MGHI instruction. */
64 const int mh
; /* cost of an MH instruction. */
65 const int mhi
; /* cost of an MHI instruction. */
66 const int ml
; /* cost of an ML instruction. */
67 const int mr
; /* cost of an MR instruction. */
68 const int ms
; /* cost of an MS instruction. */
69 const int msg
; /* cost of an MSG instruction. */
70 const int msgf
; /* cost of an MSGF instruction. */
71 const int msgfr
; /* cost of an MSGFR instruction. */
72 const int msgr
; /* cost of an MSGR instruction. */
73 const int msr
; /* cost of an MSR instruction. */
74 const int mult_df
; /* cost of multiplication in DFmode. */
77 const int sqxbr
; /* cost of square root in TFmode. */
78 const int sqdbr
; /* cost of square root in DFmode. */
79 const int sqebr
; /* cost of square root in SFmode. */
80 /* multiply and add */
81 const int madbr
; /* cost of multiply and add in DFmode. */
82 const int maebr
; /* cost of multiply and add in SFmode. */
94 const struct processor_costs
*s390_cost
;
97 struct processor_costs z900_cost
=
99 COSTS_N_INSNS (5), /* M */
100 COSTS_N_INSNS (10), /* MGHI */
101 COSTS_N_INSNS (5), /* MH */
102 COSTS_N_INSNS (4), /* MHI */
103 COSTS_N_INSNS (5), /* ML */
104 COSTS_N_INSNS (5), /* MR */
105 COSTS_N_INSNS (4), /* MS */
106 COSTS_N_INSNS (15), /* MSG */
107 COSTS_N_INSNS (7), /* MSGF */
108 COSTS_N_INSNS (7), /* MSGFR */
109 COSTS_N_INSNS (10), /* MSGR */
110 COSTS_N_INSNS (4), /* MSR */
111 COSTS_N_INSNS (7), /* multiplication in DFmode */
112 COSTS_N_INSNS (13), /* MXBR */
113 COSTS_N_INSNS (136), /* SQXBR */
114 COSTS_N_INSNS (44), /* SQDBR */
115 COSTS_N_INSNS (35), /* SQEBR */
116 COSTS_N_INSNS (18), /* MADBR */
117 COSTS_N_INSNS (13), /* MAEBR */
118 COSTS_N_INSNS (134), /* DXBR */
119 COSTS_N_INSNS (30), /* DDBR */
120 COSTS_N_INSNS (27), /* DEBR */
121 COSTS_N_INSNS (220), /* DLGR */
122 COSTS_N_INSNS (34), /* DLR */
123 COSTS_N_INSNS (34), /* DR */
124 COSTS_N_INSNS (32), /* DSGFR */
125 COSTS_N_INSNS (32), /* DSGR */
129 struct processor_costs z990_cost
=
131 COSTS_N_INSNS (4), /* M */
132 COSTS_N_INSNS (2), /* MGHI */
133 COSTS_N_INSNS (2), /* MH */
134 COSTS_N_INSNS (2), /* MHI */
135 COSTS_N_INSNS (4), /* ML */
136 COSTS_N_INSNS (4), /* MR */
137 COSTS_N_INSNS (5), /* MS */
138 COSTS_N_INSNS (6), /* MSG */
139 COSTS_N_INSNS (4), /* MSGF */
140 COSTS_N_INSNS (4), /* MSGFR */
141 COSTS_N_INSNS (4), /* MSGR */
142 COSTS_N_INSNS (4), /* MSR */
143 COSTS_N_INSNS (1), /* multiplication in DFmode */
144 COSTS_N_INSNS (28), /* MXBR */
145 COSTS_N_INSNS (130), /* SQXBR */
146 COSTS_N_INSNS (66), /* SQDBR */
147 COSTS_N_INSNS (38), /* SQEBR */
148 COSTS_N_INSNS (1), /* MADBR */
149 COSTS_N_INSNS (1), /* MAEBR */
150 COSTS_N_INSNS (60), /* DXBR */
151 COSTS_N_INSNS (40), /* DDBR */
152 COSTS_N_INSNS (26), /* DEBR */
153 COSTS_N_INSNS (176), /* DLGR */
154 COSTS_N_INSNS (31), /* DLR */
155 COSTS_N_INSNS (31), /* DR */
156 COSTS_N_INSNS (31), /* DSGFR */
157 COSTS_N_INSNS (31), /* DSGR */
161 struct processor_costs z9_109_cost
=
163 COSTS_N_INSNS (4), /* M */
164 COSTS_N_INSNS (2), /* MGHI */
165 COSTS_N_INSNS (2), /* MH */
166 COSTS_N_INSNS (2), /* MHI */
167 COSTS_N_INSNS (4), /* ML */
168 COSTS_N_INSNS (4), /* MR */
169 COSTS_N_INSNS (5), /* MS */
170 COSTS_N_INSNS (6), /* MSG */
171 COSTS_N_INSNS (4), /* MSGF */
172 COSTS_N_INSNS (4), /* MSGFR */
173 COSTS_N_INSNS (4), /* MSGR */
174 COSTS_N_INSNS (4), /* MSR */
175 COSTS_N_INSNS (1), /* multiplication in DFmode */
176 COSTS_N_INSNS (28), /* MXBR */
177 COSTS_N_INSNS (130), /* SQXBR */
178 COSTS_N_INSNS (66), /* SQDBR */
179 COSTS_N_INSNS (38), /* SQEBR */
180 COSTS_N_INSNS (1), /* MADBR */
181 COSTS_N_INSNS (1), /* MAEBR */
182 COSTS_N_INSNS (60), /* DXBR */
183 COSTS_N_INSNS (40), /* DDBR */
184 COSTS_N_INSNS (26), /* DEBR */
185 COSTS_N_INSNS (30), /* DLGR */
186 COSTS_N_INSNS (23), /* DLR */
187 COSTS_N_INSNS (23), /* DR */
188 COSTS_N_INSNS (24), /* DSGFR */
189 COSTS_N_INSNS (24), /* DSGR */
193 struct processor_costs z10_cost
=
195 COSTS_N_INSNS (10), /* M */
196 COSTS_N_INSNS (10), /* MGHI */
197 COSTS_N_INSNS (10), /* MH */
198 COSTS_N_INSNS (10), /* MHI */
199 COSTS_N_INSNS (10), /* ML */
200 COSTS_N_INSNS (10), /* MR */
201 COSTS_N_INSNS (10), /* MS */
202 COSTS_N_INSNS (10), /* MSG */
203 COSTS_N_INSNS (10), /* MSGF */
204 COSTS_N_INSNS (10), /* MSGFR */
205 COSTS_N_INSNS (10), /* MSGR */
206 COSTS_N_INSNS (10), /* MSR */
207 COSTS_N_INSNS (1) , /* multiplication in DFmode */
208 COSTS_N_INSNS (50), /* MXBR */
209 COSTS_N_INSNS (120), /* SQXBR */
210 COSTS_N_INSNS (52), /* SQDBR */
211 COSTS_N_INSNS (38), /* SQEBR */
212 COSTS_N_INSNS (1), /* MADBR */
213 COSTS_N_INSNS (1), /* MAEBR */
214 COSTS_N_INSNS (111), /* DXBR */
215 COSTS_N_INSNS (39), /* DDBR */
216 COSTS_N_INSNS (32), /* DEBR */
217 COSTS_N_INSNS (160), /* DLGR */
218 COSTS_N_INSNS (71), /* DLR */
219 COSTS_N_INSNS (71), /* DR */
220 COSTS_N_INSNS (71), /* DSGFR */
221 COSTS_N_INSNS (71), /* DSGR */
225 struct processor_costs z196_cost
=
227 COSTS_N_INSNS (7), /* M */
228 COSTS_N_INSNS (5), /* MGHI */
229 COSTS_N_INSNS (5), /* MH */
230 COSTS_N_INSNS (5), /* MHI */
231 COSTS_N_INSNS (7), /* ML */
232 COSTS_N_INSNS (7), /* MR */
233 COSTS_N_INSNS (6), /* MS */
234 COSTS_N_INSNS (8), /* MSG */
235 COSTS_N_INSNS (6), /* MSGF */
236 COSTS_N_INSNS (6), /* MSGFR */
237 COSTS_N_INSNS (8), /* MSGR */
238 COSTS_N_INSNS (6), /* MSR */
239 COSTS_N_INSNS (1) , /* multiplication in DFmode */
240 COSTS_N_INSNS (40), /* MXBR B+40 */
241 COSTS_N_INSNS (100), /* SQXBR B+100 */
242 COSTS_N_INSNS (42), /* SQDBR B+42 */
243 COSTS_N_INSNS (28), /* SQEBR B+28 */
244 COSTS_N_INSNS (1), /* MADBR B */
245 COSTS_N_INSNS (1), /* MAEBR B */
246 COSTS_N_INSNS (101), /* DXBR B+101 */
247 COSTS_N_INSNS (29), /* DDBR */
248 COSTS_N_INSNS (22), /* DEBR */
249 COSTS_N_INSNS (160), /* DLGR cracked */
250 COSTS_N_INSNS (160), /* DLR cracked */
251 COSTS_N_INSNS (160), /* DR expanded */
252 COSTS_N_INSNS (160), /* DSGFR cracked */
253 COSTS_N_INSNS (160), /* DSGR cracked */
256 extern int reload_completed
;
258 /* Kept up to date using the SCHED_VARIABLE_ISSUE hook. */
259 static rtx last_scheduled_insn
;
261 /* Structure used to hold the components of a S/390 memory
262 address. A legitimate address on S/390 is of the general
264 base + index + displacement
265 where any of the components is optional.
267 base and index are registers of the class ADDR_REGS,
268 displacement is an unsigned 12-bit immediate constant. */
279 /* The following structure is embedded in the machine
280 specific part of struct function. */
282 struct GTY (()) s390_frame_layout
284 /* Offset within stack frame. */
285 HOST_WIDE_INT gprs_offset
;
286 HOST_WIDE_INT f0_offset
;
287 HOST_WIDE_INT f4_offset
;
288 HOST_WIDE_INT f8_offset
;
289 HOST_WIDE_INT backchain_offset
;
291 /* Number of first and last gpr where slots in the register
292 save area are reserved for. */
293 int first_save_gpr_slot
;
294 int last_save_gpr_slot
;
296 /* Number of first and last gpr to be saved, restored. */
298 int first_restore_gpr
;
300 int last_restore_gpr
;
302 /* Bits standing for floating point registers. Set, if the
303 respective register has to be saved. Starting with reg 16 (f0)
304 at the rightmost bit.
305 Bit 15 - 8 7 6 5 4 3 2 1 0
306 fpr 15 - 8 7 5 3 1 6 4 2 0
307 reg 31 - 24 23 22 21 20 19 18 17 16 */
308 unsigned int fpr_bitmap
;
310 /* Number of floating point registers f8-f15 which must be saved. */
313 /* Set if return address needs to be saved.
314 This flag is set by s390_return_addr_rtx if it could not use
315 the initial value of r14 and therefore depends on r14 saved
317 bool save_return_addr_p
;
319 /* Size of stack frame. */
320 HOST_WIDE_INT frame_size
;
323 /* Define the structure for the machine field in struct function. */
325 struct GTY(()) machine_function
327 struct s390_frame_layout frame_layout
;
329 /* Literal pool base register. */
332 /* True if we may need to perform branch splitting. */
333 bool split_branches_pending_p
;
335 /* Some local-dynamic TLS symbol name. */
336 const char *some_ld_name
;
338 bool has_landing_pad_p
;
341 /* Few accessor macros for struct cfun->machine->s390_frame_layout. */
343 #define cfun_frame_layout (cfun->machine->frame_layout)
344 #define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
345 #define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr_slot - \
346 cfun_frame_layout.first_save_gpr_slot + 1) * UNITS_PER_LONG)
347 #define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
349 #define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
352 /* Number of GPRs and FPRs used for argument passing. */
353 #define GP_ARG_NUM_REG 5
354 #define FP_ARG_NUM_REG (TARGET_64BIT? 4 : 2)
356 /* A couple of shortcuts. */
357 #define CONST_OK_FOR_J(x) \
358 CONST_OK_FOR_CONSTRAINT_P((x), 'J', "J")
359 #define CONST_OK_FOR_K(x) \
360 CONST_OK_FOR_CONSTRAINT_P((x), 'K', "K")
361 #define CONST_OK_FOR_Os(x) \
362 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Os")
363 #define CONST_OK_FOR_Op(x) \
364 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Op")
365 #define CONST_OK_FOR_On(x) \
366 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "On")
368 #define REGNO_PAIR_OK(REGNO, MODE) \
369 (HARD_REGNO_NREGS ((REGNO), (MODE)) == 1 || !((REGNO) & 1))
371 /* That's the read ahead of the dynamic branch prediction unit in
372 bytes on a z10 (or higher) CPU. */
373 #define PREDICT_DISTANCE (TARGET_Z10 ? 384 : 2048)
375 /* Return the alignment for LABEL. We default to the -falign-labels
376 value except for the literal pool base label. */
378 s390_label_align (rtx label
)
380 rtx prev_insn
= prev_active_insn (label
);
382 if (prev_insn
== NULL_RTX
)
385 prev_insn
= single_set (prev_insn
);
387 if (prev_insn
== NULL_RTX
)
390 prev_insn
= SET_SRC (prev_insn
);
392 /* Don't align literal pool base labels. */
393 if (GET_CODE (prev_insn
) == UNSPEC
394 && XINT (prev_insn
, 1) == UNSPEC_MAIN_BASE
)
398 return align_labels_log
;
401 static enum machine_mode
402 s390_libgcc_cmp_return_mode (void)
404 return TARGET_64BIT
? DImode
: SImode
;
407 static enum machine_mode
408 s390_libgcc_shift_count_mode (void)
410 return TARGET_64BIT
? DImode
: SImode
;
413 static enum machine_mode
414 s390_unwind_word_mode (void)
416 return TARGET_64BIT
? DImode
: SImode
;
419 /* Return true if the back end supports mode MODE. */
421 s390_scalar_mode_supported_p (enum machine_mode mode
)
423 /* In contrast to the default implementation reject TImode constants on 31bit
424 TARGET_ZARCH for ABI compliance. */
425 if (!TARGET_64BIT
&& TARGET_ZARCH
&& mode
== TImode
)
428 if (DECIMAL_FLOAT_MODE_P (mode
))
429 return default_decimal_float_supported_p ();
431 return default_scalar_mode_supported_p (mode
);
434 /* Set the has_landing_pad_p flag in struct machine_function to VALUE. */
437 s390_set_has_landing_pad_p (bool value
)
439 cfun
->machine
->has_landing_pad_p
= value
;
442 /* If two condition code modes are compatible, return a condition code
443 mode which is compatible with both. Otherwise, return
446 static enum machine_mode
447 s390_cc_modes_compatible (enum machine_mode m1
, enum machine_mode m2
)
455 if (m2
== CCUmode
|| m2
== CCTmode
|| m2
== CCZ1mode
456 || m2
== CCSmode
|| m2
== CCSRmode
|| m2
== CCURmode
)
477 /* Return true if SET either doesn't set the CC register, or else
478 the source and destination have matching CC modes and that
479 CC mode is at least as constrained as REQ_MODE. */
482 s390_match_ccmode_set (rtx set
, enum machine_mode req_mode
)
484 enum machine_mode set_mode
;
486 gcc_assert (GET_CODE (set
) == SET
);
488 if (GET_CODE (SET_DEST (set
)) != REG
|| !CC_REGNO_P (REGNO (SET_DEST (set
))))
491 set_mode
= GET_MODE (SET_DEST (set
));
505 if (req_mode
!= set_mode
)
510 if (req_mode
!= CCSmode
&& req_mode
!= CCUmode
&& req_mode
!= CCTmode
511 && req_mode
!= CCSRmode
&& req_mode
!= CCURmode
)
517 if (req_mode
!= CCAmode
)
525 return (GET_MODE (SET_SRC (set
)) == set_mode
);
528 /* Return true if every SET in INSN that sets the CC register
529 has source and destination with matching CC modes and that
530 CC mode is at least as constrained as REQ_MODE.
531 If REQ_MODE is VOIDmode, always return false. */
534 s390_match_ccmode (rtx insn
, enum machine_mode req_mode
)
538 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
539 if (req_mode
== VOIDmode
)
542 if (GET_CODE (PATTERN (insn
)) == SET
)
543 return s390_match_ccmode_set (PATTERN (insn
), req_mode
);
545 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
546 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
548 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
549 if (GET_CODE (set
) == SET
)
550 if (!s390_match_ccmode_set (set
, req_mode
))
557 /* If a test-under-mask instruction can be used to implement
558 (compare (and ... OP1) OP2), return the CC mode required
559 to do that. Otherwise, return VOIDmode.
560 MIXED is true if the instruction can distinguish between
561 CC1 and CC2 for mixed selected bits (TMxx), it is false
562 if the instruction cannot (TM). */
565 s390_tm_ccmode (rtx op1
, rtx op2
, bool mixed
)
569 /* ??? Fixme: should work on CONST_DOUBLE as well. */
570 if (GET_CODE (op1
) != CONST_INT
|| GET_CODE (op2
) != CONST_INT
)
573 /* Selected bits all zero: CC0.
574 e.g.: int a; if ((a & (16 + 128)) == 0) */
575 if (INTVAL (op2
) == 0)
578 /* Selected bits all one: CC3.
579 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */
580 if (INTVAL (op2
) == INTVAL (op1
))
583 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.:
585 if ((a & (16 + 128)) == 16) -> CCT1
586 if ((a & (16 + 128)) == 128) -> CCT2 */
589 bit1
= exact_log2 (INTVAL (op2
));
590 bit0
= exact_log2 (INTVAL (op1
) ^ INTVAL (op2
));
591 if (bit0
!= -1 && bit1
!= -1)
592 return bit0
> bit1
? CCT1mode
: CCT2mode
;
598 /* Given a comparison code OP (EQ, NE, etc.) and the operands
599 OP0 and OP1 of a COMPARE, return the mode to be used for the
603 s390_select_ccmode (enum rtx_code code
, rtx op0
, rtx op1
)
609 if ((GET_CODE (op0
) == NEG
|| GET_CODE (op0
) == ABS
)
610 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
612 if (GET_CODE (op0
) == PLUS
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
613 && CONST_OK_FOR_K (INTVAL (XEXP (op0
, 1))))
615 if ((GET_CODE (op0
) == PLUS
|| GET_CODE (op0
) == MINUS
616 || GET_CODE (op1
) == NEG
)
617 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
620 if (GET_CODE (op0
) == AND
)
622 /* Check whether we can potentially do it via TM. */
623 enum machine_mode ccmode
;
624 ccmode
= s390_tm_ccmode (XEXP (op0
, 1), op1
, 1);
625 if (ccmode
!= VOIDmode
)
627 /* Relax CCTmode to CCZmode to allow fall-back to AND
628 if that turns out to be beneficial. */
629 return ccmode
== CCTmode
? CCZmode
: ccmode
;
633 if (register_operand (op0
, HImode
)
634 && GET_CODE (op1
) == CONST_INT
635 && (INTVAL (op1
) == -1 || INTVAL (op1
) == 65535))
637 if (register_operand (op0
, QImode
)
638 && GET_CODE (op1
) == CONST_INT
639 && (INTVAL (op1
) == -1 || INTVAL (op1
) == 255))
648 /* The only overflow condition of NEG and ABS happens when
649 -INT_MAX is used as parameter, which stays negative. So
650 we have an overflow from a positive value to a negative.
651 Using CCAP mode the resulting cc can be used for comparisons. */
652 if ((GET_CODE (op0
) == NEG
|| GET_CODE (op0
) == ABS
)
653 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
656 /* If constants are involved in an add instruction it is possible to use
657 the resulting cc for comparisons with zero. Knowing the sign of the
658 constant the overflow behavior gets predictable. e.g.:
659 int a, b; if ((b = a + c) > 0)
660 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */
661 if (GET_CODE (op0
) == PLUS
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
662 && CONST_OK_FOR_K (INTVAL (XEXP (op0
, 1))))
664 if (INTVAL (XEXP((op0
), 1)) < 0)
678 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
679 && GET_CODE (op1
) != CONST_INT
)
685 if (GET_CODE (op0
) == PLUS
686 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
689 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
690 && GET_CODE (op1
) != CONST_INT
)
696 if (GET_CODE (op0
) == MINUS
697 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
700 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
701 && GET_CODE (op1
) != CONST_INT
)
710 /* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
711 that we can implement more efficiently. */
714 s390_canonicalize_comparison (enum rtx_code
*code
, rtx
*op0
, rtx
*op1
)
716 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
717 if ((*code
== EQ
|| *code
== NE
)
718 && *op1
== const0_rtx
719 && GET_CODE (*op0
) == ZERO_EXTRACT
720 && GET_CODE (XEXP (*op0
, 1)) == CONST_INT
721 && GET_CODE (XEXP (*op0
, 2)) == CONST_INT
722 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0
, 0))))
724 rtx inner
= XEXP (*op0
, 0);
725 HOST_WIDE_INT modesize
= GET_MODE_BITSIZE (GET_MODE (inner
));
726 HOST_WIDE_INT len
= INTVAL (XEXP (*op0
, 1));
727 HOST_WIDE_INT pos
= INTVAL (XEXP (*op0
, 2));
729 if (len
> 0 && len
< modesize
730 && pos
>= 0 && pos
+ len
<= modesize
731 && modesize
<= HOST_BITS_PER_WIDE_INT
)
733 unsigned HOST_WIDE_INT block
;
734 block
= ((unsigned HOST_WIDE_INT
) 1 << len
) - 1;
735 block
<<= modesize
- pos
- len
;
737 *op0
= gen_rtx_AND (GET_MODE (inner
), inner
,
738 gen_int_mode (block
, GET_MODE (inner
)));
742 /* Narrow AND of memory against immediate to enable TM. */
743 if ((*code
== EQ
|| *code
== NE
)
744 && *op1
== const0_rtx
745 && GET_CODE (*op0
) == AND
746 && GET_CODE (XEXP (*op0
, 1)) == CONST_INT
747 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0
, 0))))
749 rtx inner
= XEXP (*op0
, 0);
750 rtx mask
= XEXP (*op0
, 1);
752 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
753 if (GET_CODE (inner
) == SUBREG
754 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner
)))
755 && (GET_MODE_SIZE (GET_MODE (inner
))
756 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner
))))
758 & GET_MODE_MASK (GET_MODE (inner
))
759 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner
))))
761 inner
= SUBREG_REG (inner
);
763 /* Do not change volatile MEMs. */
764 if (MEM_P (inner
) && !MEM_VOLATILE_P (inner
))
766 int part
= s390_single_part (XEXP (*op0
, 1),
767 GET_MODE (inner
), QImode
, 0);
770 mask
= gen_int_mode (s390_extract_part (mask
, QImode
, 0), QImode
);
771 inner
= adjust_address_nv (inner
, QImode
, part
);
772 *op0
= gen_rtx_AND (QImode
, inner
, mask
);
777 /* Narrow comparisons against 0xffff to HImode if possible. */
778 if ((*code
== EQ
|| *code
== NE
)
779 && GET_CODE (*op1
) == CONST_INT
780 && INTVAL (*op1
) == 0xffff
781 && SCALAR_INT_MODE_P (GET_MODE (*op0
))
782 && (nonzero_bits (*op0
, GET_MODE (*op0
))
783 & ~(unsigned HOST_WIDE_INT
) 0xffff) == 0)
785 *op0
= gen_lowpart (HImode
, *op0
);
789 /* Remove redundant UNSPEC_CCU_TO_INT conversions if possible. */
790 if (GET_CODE (*op0
) == UNSPEC
791 && XINT (*op0
, 1) == UNSPEC_CCU_TO_INT
792 && XVECLEN (*op0
, 0) == 1
793 && GET_MODE (XVECEXP (*op0
, 0, 0)) == CCUmode
794 && GET_CODE (XVECEXP (*op0
, 0, 0)) == REG
795 && REGNO (XVECEXP (*op0
, 0, 0)) == CC_REGNUM
796 && *op1
== const0_rtx
)
798 enum rtx_code new_code
= UNKNOWN
;
801 case EQ
: new_code
= EQ
; break;
802 case NE
: new_code
= NE
; break;
803 case LT
: new_code
= GTU
; break;
804 case GT
: new_code
= LTU
; break;
805 case LE
: new_code
= GEU
; break;
806 case GE
: new_code
= LEU
; break;
810 if (new_code
!= UNKNOWN
)
812 *op0
= XVECEXP (*op0
, 0, 0);
817 /* Remove redundant UNSPEC_CCZ_TO_INT conversions if possible. */
818 if (GET_CODE (*op0
) == UNSPEC
819 && XINT (*op0
, 1) == UNSPEC_CCZ_TO_INT
820 && XVECLEN (*op0
, 0) == 1
821 && GET_MODE (XVECEXP (*op0
, 0, 0)) == CCZmode
822 && GET_CODE (XVECEXP (*op0
, 0, 0)) == REG
823 && REGNO (XVECEXP (*op0
, 0, 0)) == CC_REGNUM
824 && *op1
== const0_rtx
)
826 enum rtx_code new_code
= UNKNOWN
;
829 case EQ
: new_code
= EQ
; break;
830 case NE
: new_code
= NE
; break;
834 if (new_code
!= UNKNOWN
)
836 *op0
= XVECEXP (*op0
, 0, 0);
841 /* Simplify cascaded EQ, NE with const0_rtx. */
842 if ((*code
== NE
|| *code
== EQ
)
843 && (GET_CODE (*op0
) == EQ
|| GET_CODE (*op0
) == NE
)
844 && GET_MODE (*op0
) == SImode
845 && GET_MODE (XEXP (*op0
, 0)) == CCZ1mode
846 && REG_P (XEXP (*op0
, 0))
847 && XEXP (*op0
, 1) == const0_rtx
848 && *op1
== const0_rtx
)
850 if ((*code
== EQ
&& GET_CODE (*op0
) == NE
)
851 || (*code
== NE
&& GET_CODE (*op0
) == EQ
))
855 *op0
= XEXP (*op0
, 0);
858 /* Prefer register over memory as first operand. */
859 if (MEM_P (*op0
) && REG_P (*op1
))
861 rtx tem
= *op0
; *op0
= *op1
; *op1
= tem
;
862 *code
= swap_condition (*code
);
866 /* Emit a compare instruction suitable to implement the comparison
867 OP0 CODE OP1. Return the correct condition RTL to be placed in
868 the IF_THEN_ELSE of the conditional branch testing the result. */
871 s390_emit_compare (enum rtx_code code
, rtx op0
, rtx op1
)
873 enum machine_mode mode
= s390_select_ccmode (code
, op0
, op1
);
876 /* Do not output a redundant compare instruction if a compare_and_swap
877 pattern already computed the result and the machine modes are compatible. */
878 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_CC
)
880 gcc_assert (s390_cc_modes_compatible (GET_MODE (op0
), mode
)
886 cc
= gen_rtx_REG (mode
, CC_REGNUM
);
887 emit_insn (gen_rtx_SET (VOIDmode
, cc
, gen_rtx_COMPARE (mode
, op0
, op1
)));
890 return gen_rtx_fmt_ee (code
, VOIDmode
, cc
, const0_rtx
);
893 /* Emit a SImode compare and swap instruction setting MEM to NEW_RTX if OLD
895 Return the correct condition RTL to be placed in the IF_THEN_ELSE of the
896 conditional branch testing the result. */
899 s390_emit_compare_and_swap (enum rtx_code code
, rtx old
, rtx mem
,
900 rtx cmp
, rtx new_rtx
)
902 emit_insn (gen_atomic_compare_and_swapsi_internal (old
, mem
, cmp
, new_rtx
));
903 return s390_emit_compare (code
, gen_rtx_REG (CCZ1mode
, CC_REGNUM
),
907 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
908 unconditional jump, else a conditional jump under condition COND. */
911 s390_emit_jump (rtx target
, rtx cond
)
915 target
= gen_rtx_LABEL_REF (VOIDmode
, target
);
917 target
= gen_rtx_IF_THEN_ELSE (VOIDmode
, cond
, target
, pc_rtx
);
919 insn
= gen_rtx_SET (VOIDmode
, pc_rtx
, target
);
920 emit_jump_insn (insn
);
923 /* Return branch condition mask to implement a branch
924 specified by CODE. Return -1 for invalid comparisons. */
927 s390_branch_condition_mask (rtx code
)
929 const int CC0
= 1 << 3;
930 const int CC1
= 1 << 2;
931 const int CC2
= 1 << 1;
932 const int CC3
= 1 << 0;
934 gcc_assert (GET_CODE (XEXP (code
, 0)) == REG
);
935 gcc_assert (REGNO (XEXP (code
, 0)) == CC_REGNUM
);
936 gcc_assert (XEXP (code
, 1) == const0_rtx
);
938 switch (GET_MODE (XEXP (code
, 0)))
942 switch (GET_CODE (code
))
945 case NE
: return CC1
| CC2
| CC3
;
951 switch (GET_CODE (code
))
954 case NE
: return CC0
| CC2
| CC3
;
960 switch (GET_CODE (code
))
963 case NE
: return CC0
| CC1
| CC3
;
969 switch (GET_CODE (code
))
972 case NE
: return CC0
| CC1
| CC2
;
978 switch (GET_CODE (code
))
980 case EQ
: return CC0
| CC2
;
981 case NE
: return CC1
| CC3
;
987 switch (GET_CODE (code
))
989 case LTU
: return CC2
| CC3
; /* carry */
990 case GEU
: return CC0
| CC1
; /* no carry */
996 switch (GET_CODE (code
))
998 case GTU
: return CC0
| CC1
; /* borrow */
999 case LEU
: return CC2
| CC3
; /* no borrow */
1005 switch (GET_CODE (code
))
1007 case EQ
: return CC0
| CC2
;
1008 case NE
: return CC1
| CC3
;
1009 case LTU
: return CC1
;
1010 case GTU
: return CC3
;
1011 case LEU
: return CC1
| CC2
;
1012 case GEU
: return CC2
| CC3
;
1017 switch (GET_CODE (code
))
1019 case EQ
: return CC0
;
1020 case NE
: return CC1
| CC2
| CC3
;
1021 case LTU
: return CC1
;
1022 case GTU
: return CC2
;
1023 case LEU
: return CC0
| CC1
;
1024 case GEU
: return CC0
| CC2
;
1030 switch (GET_CODE (code
))
1032 case EQ
: return CC0
;
1033 case NE
: return CC2
| CC1
| CC3
;
1034 case LTU
: return CC2
;
1035 case GTU
: return CC1
;
1036 case LEU
: return CC0
| CC2
;
1037 case GEU
: return CC0
| CC1
;
1043 switch (GET_CODE (code
))
1045 case EQ
: return CC0
;
1046 case NE
: return CC1
| CC2
| CC3
;
1047 case LT
: return CC1
| CC3
;
1048 case GT
: return CC2
;
1049 case LE
: return CC0
| CC1
| CC3
;
1050 case GE
: return CC0
| CC2
;
1056 switch (GET_CODE (code
))
1058 case EQ
: return CC0
;
1059 case NE
: return CC1
| CC2
| CC3
;
1060 case LT
: return CC1
;
1061 case GT
: return CC2
| CC3
;
1062 case LE
: return CC0
| CC1
;
1063 case GE
: return CC0
| CC2
| CC3
;
1069 switch (GET_CODE (code
))
1071 case EQ
: return CC0
;
1072 case NE
: return CC1
| CC2
| CC3
;
1073 case LT
: return CC1
;
1074 case GT
: return CC2
;
1075 case LE
: return CC0
| CC1
;
1076 case GE
: return CC0
| CC2
;
1077 case UNORDERED
: return CC3
;
1078 case ORDERED
: return CC0
| CC1
| CC2
;
1079 case UNEQ
: return CC0
| CC3
;
1080 case UNLT
: return CC1
| CC3
;
1081 case UNGT
: return CC2
| CC3
;
1082 case UNLE
: return CC0
| CC1
| CC3
;
1083 case UNGE
: return CC0
| CC2
| CC3
;
1084 case LTGT
: return CC1
| CC2
;
1090 switch (GET_CODE (code
))
1092 case EQ
: return CC0
;
1093 case NE
: return CC2
| CC1
| CC3
;
1094 case LT
: return CC2
;
1095 case GT
: return CC1
;
1096 case LE
: return CC0
| CC2
;
1097 case GE
: return CC0
| CC1
;
1098 case UNORDERED
: return CC3
;
1099 case ORDERED
: return CC0
| CC2
| CC1
;
1100 case UNEQ
: return CC0
| CC3
;
1101 case UNLT
: return CC2
| CC3
;
1102 case UNGT
: return CC1
| CC3
;
1103 case UNLE
: return CC0
| CC2
| CC3
;
1104 case UNGE
: return CC0
| CC1
| CC3
;
1105 case LTGT
: return CC2
| CC1
;
1116 /* Return branch condition mask to implement a compare and branch
1117 specified by CODE. Return -1 for invalid comparisons. */
1120 s390_compare_and_branch_condition_mask (rtx code
)
1122 const int CC0
= 1 << 3;
1123 const int CC1
= 1 << 2;
1124 const int CC2
= 1 << 1;
1126 switch (GET_CODE (code
))
1150 /* If INV is false, return assembler mnemonic string to implement
1151 a branch specified by CODE. If INV is true, return mnemonic
1152 for the corresponding inverted branch. */
1155 s390_branch_condition_mnemonic (rtx code
, int inv
)
1159 static const char *const mnemonic
[16] =
1161 NULL
, "o", "h", "nle",
1162 "l", "nhe", "lh", "ne",
1163 "e", "nlh", "he", "nl",
1164 "le", "nh", "no", NULL
1167 if (GET_CODE (XEXP (code
, 0)) == REG
1168 && REGNO (XEXP (code
, 0)) == CC_REGNUM
1169 && XEXP (code
, 1) == const0_rtx
)
1170 mask
= s390_branch_condition_mask (code
);
1172 mask
= s390_compare_and_branch_condition_mask (code
);
1174 gcc_assert (mask
>= 0);
1179 gcc_assert (mask
>= 1 && mask
<= 14);
1181 return mnemonic
[mask
];
1184 /* Return the part of op which has a value different from def.
1185 The size of the part is determined by mode.
1186 Use this function only if you already know that op really
1187 contains such a part. */
1189 unsigned HOST_WIDE_INT
1190 s390_extract_part (rtx op
, enum machine_mode mode
, int def
)
1192 unsigned HOST_WIDE_INT value
= 0;
1193 int max_parts
= HOST_BITS_PER_WIDE_INT
/ GET_MODE_BITSIZE (mode
);
1194 int part_bits
= GET_MODE_BITSIZE (mode
);
1195 unsigned HOST_WIDE_INT part_mask
1196 = ((unsigned HOST_WIDE_INT
)1 << part_bits
) - 1;
1199 for (i
= 0; i
< max_parts
; i
++)
1202 value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
1204 value
>>= part_bits
;
1206 if ((value
& part_mask
) != (def
& part_mask
))
1207 return value
& part_mask
;
1213 /* If OP is an integer constant of mode MODE with exactly one
1214 part of mode PART_MODE unequal to DEF, return the number of that
1215 part. Otherwise, return -1. */
1218 s390_single_part (rtx op
,
1219 enum machine_mode mode
,
1220 enum machine_mode part_mode
,
1223 unsigned HOST_WIDE_INT value
= 0;
1224 int n_parts
= GET_MODE_SIZE (mode
) / GET_MODE_SIZE (part_mode
);
1225 unsigned HOST_WIDE_INT part_mask
1226 = ((unsigned HOST_WIDE_INT
)1 << GET_MODE_BITSIZE (part_mode
)) - 1;
1229 if (GET_CODE (op
) != CONST_INT
)
1232 for (i
= 0; i
< n_parts
; i
++)
1235 value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
1237 value
>>= GET_MODE_BITSIZE (part_mode
);
1239 if ((value
& part_mask
) != (def
& part_mask
))
1247 return part
== -1 ? -1 : n_parts
- 1 - part
;
1250 /* Return true if IN contains a contiguous bitfield in the lower SIZE
1251 bits and no other bits are set in IN. POS and LENGTH can be used
1252 to obtain the start position and the length of the bitfield.
1254 POS gives the position of the first bit of the bitfield counting
1255 from the lowest order bit starting with zero. In order to use this
1256 value for S/390 instructions this has to be converted to "bits big
1260 s390_contiguous_bitmask_p (unsigned HOST_WIDE_INT in
, int size
,
1261 int *pos
, int *length
)
1266 unsigned HOST_WIDE_INT mask
= 1ULL;
1267 bool contiguous
= false;
1269 for (i
= 0; i
< size
; mask
<<= 1, i
++)
1293 /* Calculate a mask for all bits beyond the contiguous bits. */
1294 mask
= (-1LL & ~(((1ULL << (tmp_length
+ tmp_pos
- 1)) << 1) - 1));
1299 if (tmp_length
+ tmp_pos
- 1 > size
)
1303 *length
= tmp_length
;
1311 /* Check whether we can (and want to) split a double-word
1312 move in mode MODE from SRC to DST into two single-word
1313 moves, moving the subword FIRST_SUBWORD first. */
1316 s390_split_ok_p (rtx dst
, rtx src
, enum machine_mode mode
, int first_subword
)
1318 /* Floating point registers cannot be split. */
1319 if (FP_REG_P (src
) || FP_REG_P (dst
))
1322 /* We don't need to split if operands are directly accessible. */
1323 if (s_operand (src
, mode
) || s_operand (dst
, mode
))
1326 /* Non-offsettable memory references cannot be split. */
1327 if ((GET_CODE (src
) == MEM
&& !offsettable_memref_p (src
))
1328 || (GET_CODE (dst
) == MEM
&& !offsettable_memref_p (dst
)))
1331 /* Moving the first subword must not clobber a register
1332 needed to move the second subword. */
1333 if (register_operand (dst
, mode
))
1335 rtx subreg
= operand_subword (dst
, first_subword
, 0, mode
);
1336 if (reg_overlap_mentioned_p (subreg
, src
))
1343 /* Return true if it can be proven that [MEM1, MEM1 + SIZE]
1344 and [MEM2, MEM2 + SIZE] do overlap and false
1348 s390_overlap_p (rtx mem1
, rtx mem2
, HOST_WIDE_INT size
)
1350 rtx addr1
, addr2
, addr_delta
;
1351 HOST_WIDE_INT delta
;
1353 if (GET_CODE (mem1
) != MEM
|| GET_CODE (mem2
) != MEM
)
1359 addr1
= XEXP (mem1
, 0);
1360 addr2
= XEXP (mem2
, 0);
1362 addr_delta
= simplify_binary_operation (MINUS
, Pmode
, addr2
, addr1
);
1364 /* This overlapping check is used by peepholes merging memory block operations.
1365 Overlapping operations would otherwise be recognized by the S/390 hardware
1366 and would fall back to a slower implementation. Allowing overlapping
1367 operations would lead to slow code but not to wrong code. Therefore we are
1368 somewhat optimistic if we cannot prove that the memory blocks are
1370 That's why we return false here although this may accept operations on
1371 overlapping memory areas. */
1372 if (!addr_delta
|| GET_CODE (addr_delta
) != CONST_INT
)
1375 delta
= INTVAL (addr_delta
);
1378 || (delta
> 0 && delta
< size
)
1379 || (delta
< 0 && -delta
< size
))
1385 /* Check whether the address of memory reference MEM2 equals exactly
1386 the address of memory reference MEM1 plus DELTA. Return true if
1387 we can prove this to be the case, false otherwise. */
1390 s390_offset_p (rtx mem1
, rtx mem2
, rtx delta
)
1392 rtx addr1
, addr2
, addr_delta
;
1394 if (GET_CODE (mem1
) != MEM
|| GET_CODE (mem2
) != MEM
)
1397 addr1
= XEXP (mem1
, 0);
1398 addr2
= XEXP (mem2
, 0);
1400 addr_delta
= simplify_binary_operation (MINUS
, Pmode
, addr2
, addr1
);
1401 if (!addr_delta
|| !rtx_equal_p (addr_delta
, delta
))
1407 /* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1410 s390_expand_logical_operator (enum rtx_code code
, enum machine_mode mode
,
1413 enum machine_mode wmode
= mode
;
1414 rtx dst
= operands
[0];
1415 rtx src1
= operands
[1];
1416 rtx src2
= operands
[2];
1419 /* If we cannot handle the operation directly, use a temp register. */
1420 if (!s390_logical_operator_ok_p (operands
))
1421 dst
= gen_reg_rtx (mode
);
1423 /* QImode and HImode patterns make sense only if we have a destination
1424 in memory. Otherwise perform the operation in SImode. */
1425 if ((mode
== QImode
|| mode
== HImode
) && GET_CODE (dst
) != MEM
)
1428 /* Widen operands if required. */
1431 if (GET_CODE (dst
) == SUBREG
1432 && (tem
= simplify_subreg (wmode
, dst
, mode
, 0)) != 0)
1434 else if (REG_P (dst
))
1435 dst
= gen_rtx_SUBREG (wmode
, dst
, 0);
1437 dst
= gen_reg_rtx (wmode
);
1439 if (GET_CODE (src1
) == SUBREG
1440 && (tem
= simplify_subreg (wmode
, src1
, mode
, 0)) != 0)
1442 else if (GET_MODE (src1
) != VOIDmode
)
1443 src1
= gen_rtx_SUBREG (wmode
, force_reg (mode
, src1
), 0);
1445 if (GET_CODE (src2
) == SUBREG
1446 && (tem
= simplify_subreg (wmode
, src2
, mode
, 0)) != 0)
1448 else if (GET_MODE (src2
) != VOIDmode
)
1449 src2
= gen_rtx_SUBREG (wmode
, force_reg (mode
, src2
), 0);
1452 /* Emit the instruction. */
1453 op
= gen_rtx_SET (VOIDmode
, dst
, gen_rtx_fmt_ee (code
, wmode
, src1
, src2
));
1454 clob
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
1455 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, op
, clob
)));
1457 /* Fix up the destination if needed. */
1458 if (dst
!= operands
[0])
1459 emit_move_insn (operands
[0], gen_lowpart (mode
, dst
));
1462 /* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1465 s390_logical_operator_ok_p (rtx
*operands
)
1467 /* If the destination operand is in memory, it needs to coincide
1468 with one of the source operands. After reload, it has to be
1469 the first source operand. */
1470 if (GET_CODE (operands
[0]) == MEM
)
1471 return rtx_equal_p (operands
[0], operands
[1])
1472 || (!reload_completed
&& rtx_equal_p (operands
[0], operands
[2]));
1477 /* Narrow logical operation CODE of memory operand MEMOP with immediate
1478 operand IMMOP to switch from SS to SI type instructions. */
1481 s390_narrow_logical_operator (enum rtx_code code
, rtx
*memop
, rtx
*immop
)
1483 int def
= code
== AND
? -1 : 0;
1487 gcc_assert (GET_CODE (*memop
) == MEM
);
1488 gcc_assert (!MEM_VOLATILE_P (*memop
));
1490 mask
= s390_extract_part (*immop
, QImode
, def
);
1491 part
= s390_single_part (*immop
, GET_MODE (*memop
), QImode
, def
);
1492 gcc_assert (part
>= 0);
1494 *memop
= adjust_address (*memop
, QImode
, part
);
1495 *immop
= gen_int_mode (mask
, QImode
);
1499 /* How to allocate a 'struct machine_function'. */
1501 static struct machine_function
*
1502 s390_init_machine_status (void)
1504 return ggc_alloc_cleared_machine_function ();
1508 s390_option_override (void)
1510 /* Set up function hooks. */
1511 init_machine_status
= s390_init_machine_status
;
1513 /* Architecture mode defaults according to ABI. */
1514 if (!(target_flags_explicit
& MASK_ZARCH
))
1517 target_flags
|= MASK_ZARCH
;
1519 target_flags
&= ~MASK_ZARCH
;
1522 /* Set the march default in case it hasn't been specified on
1524 if (s390_arch
== PROCESSOR_max
)
1526 s390_arch_string
= TARGET_ZARCH
? "z900" : "g5";
1527 s390_arch
= TARGET_ZARCH
? PROCESSOR_2064_Z900
: PROCESSOR_9672_G5
;
1528 s390_arch_flags
= processor_flags_table
[(int)s390_arch
];
1531 /* Determine processor to tune for. */
1532 if (s390_tune
== PROCESSOR_max
)
1534 s390_tune
= s390_arch
;
1535 s390_tune_flags
= s390_arch_flags
;
1538 /* Sanity checks. */
1539 if (TARGET_ZARCH
&& !TARGET_CPU_ZARCH
)
1540 error ("z/Architecture mode not supported on %s", s390_arch_string
);
1541 if (TARGET_64BIT
&& !TARGET_ZARCH
)
1542 error ("64-bit ABI not supported in ESA/390 mode");
1544 /* Use hardware DFP if available and not explicitly disabled by
1545 user. E.g. with -m31 -march=z10 -mzarch */
1546 if (!(target_flags_explicit
& MASK_HARD_DFP
) && TARGET_DFP
)
1547 target_flags
|= MASK_HARD_DFP
;
1549 if (TARGET_HARD_DFP
&& !TARGET_DFP
)
1551 if (target_flags_explicit
& MASK_HARD_DFP
)
1553 if (!TARGET_CPU_DFP
)
1554 error ("hardware decimal floating point instructions"
1555 " not available on %s", s390_arch_string
);
1557 error ("hardware decimal floating point instructions"
1558 " not available in ESA/390 mode");
1561 target_flags
&= ~MASK_HARD_DFP
;
1564 if ((target_flags_explicit
& MASK_SOFT_FLOAT
) && TARGET_SOFT_FLOAT
)
1566 if ((target_flags_explicit
& MASK_HARD_DFP
) && TARGET_HARD_DFP
)
1567 error ("-mhard-dfp can%'t be used in conjunction with -msoft-float");
1569 target_flags
&= ~MASK_HARD_DFP
;
1572 /* Set processor cost function. */
1575 case PROCESSOR_2084_Z990
:
1576 s390_cost
= &z990_cost
;
1578 case PROCESSOR_2094_Z9_109
:
1579 s390_cost
= &z9_109_cost
;
1581 case PROCESSOR_2097_Z10
:
1582 s390_cost
= &z10_cost
;
1583 case PROCESSOR_2817_Z196
:
1584 s390_cost
= &z196_cost
;
1587 s390_cost
= &z900_cost
;
1590 if (TARGET_BACKCHAIN
&& TARGET_PACKED_STACK
&& TARGET_HARD_FLOAT
)
1591 error ("-mbackchain -mpacked-stack -mhard-float are not supported "
1594 if (s390_stack_size
)
1596 if (s390_stack_guard
>= s390_stack_size
)
1597 error ("stack size must be greater than the stack guard value");
1598 else if (s390_stack_size
> 1 << 16)
1599 error ("stack size must not be greater than 64k");
1601 else if (s390_stack_guard
)
1602 error ("-mstack-guard implies use of -mstack-size");
1604 #ifdef TARGET_DEFAULT_LONG_DOUBLE_128
1605 if (!(target_flags_explicit
& MASK_LONG_DOUBLE_128
))
1606 target_flags
|= MASK_LONG_DOUBLE_128
;
1609 if (s390_tune
== PROCESSOR_2097_Z10
1610 || s390_tune
== PROCESSOR_2817_Z196
)
1612 maybe_set_param_value (PARAM_MAX_UNROLLED_INSNS
, 100,
1613 global_options
.x_param_values
,
1614 global_options_set
.x_param_values
);
1615 maybe_set_param_value (PARAM_MAX_UNROLL_TIMES
, 32,
1616 global_options
.x_param_values
,
1617 global_options_set
.x_param_values
);
1618 maybe_set_param_value (PARAM_MAX_COMPLETELY_PEELED_INSNS
, 2000,
1619 global_options
.x_param_values
,
1620 global_options_set
.x_param_values
);
1621 maybe_set_param_value (PARAM_MAX_COMPLETELY_PEEL_TIMES
, 64,
1622 global_options
.x_param_values
,
1623 global_options_set
.x_param_values
);
1626 maybe_set_param_value (PARAM_MAX_PENDING_LIST_LENGTH
, 256,
1627 global_options
.x_param_values
,
1628 global_options_set
.x_param_values
);
1629 /* values for loop prefetching */
1630 maybe_set_param_value (PARAM_L1_CACHE_LINE_SIZE
, 256,
1631 global_options
.x_param_values
,
1632 global_options_set
.x_param_values
);
1633 maybe_set_param_value (PARAM_L1_CACHE_SIZE
, 128,
1634 global_options
.x_param_values
,
1635 global_options_set
.x_param_values
);
1636 /* s390 has more than 2 levels and the size is much larger. Since
1637 we are always running virtualized assume that we only get a small
1638 part of the caches above l1. */
1639 maybe_set_param_value (PARAM_L2_CACHE_SIZE
, 1500,
1640 global_options
.x_param_values
,
1641 global_options_set
.x_param_values
);
1642 maybe_set_param_value (PARAM_PREFETCH_MIN_INSN_TO_MEM_RATIO
, 2,
1643 global_options
.x_param_values
,
1644 global_options_set
.x_param_values
);
1645 maybe_set_param_value (PARAM_SIMULTANEOUS_PREFETCHES
, 6,
1646 global_options
.x_param_values
,
1647 global_options_set
.x_param_values
);
1649 /* This cannot reside in s390_option_optimization_table since HAVE_prefetch
1650 requires the arch flags to be evaluated already. Since prefetching
1651 is beneficial on s390, we enable it if available. */
1652 if (flag_prefetch_loop_arrays
< 0 && HAVE_prefetch
&& optimize
>= 3)
1653 flag_prefetch_loop_arrays
= 1;
1655 /* Use the alternative scheduling-pressure algorithm by default. */
1656 maybe_set_param_value (PARAM_SCHED_PRESSURE_ALGORITHM
, 2,
1657 global_options
.x_param_values
,
1658 global_options_set
.x_param_values
);
1662 /* Don't emit DWARF3/4 unless specifically selected. The TPF
1663 debuggers do not yet support DWARF 3/4. */
1664 if (!global_options_set
.x_dwarf_strict
)
1666 if (!global_options_set
.x_dwarf_version
)
1671 /* Map for smallest class containing reg regno. */
1673 const enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
1674 { GENERAL_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1675 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1676 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1677 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1678 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1679 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1680 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1681 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1682 ADDR_REGS
, CC_REGS
, ADDR_REGS
, ADDR_REGS
,
1683 ACCESS_REGS
, ACCESS_REGS
1686 /* Return attribute type of insn. */
1688 static enum attr_type
1689 s390_safe_attr_type (rtx insn
)
1691 if (recog_memoized (insn
) >= 0)
1692 return get_attr_type (insn
);
1697 /* Return true if DISP is a valid short displacement. */
1700 s390_short_displacement (rtx disp
)
1702 /* No displacement is OK. */
1706 /* Without the long displacement facility we don't need to
1707 distingiush between long and short displacement. */
1708 if (!TARGET_LONG_DISPLACEMENT
)
1711 /* Integer displacement in range. */
1712 if (GET_CODE (disp
) == CONST_INT
)
1713 return INTVAL (disp
) >= 0 && INTVAL (disp
) < 4096;
1715 /* GOT offset is not OK, the GOT can be large. */
1716 if (GET_CODE (disp
) == CONST
1717 && GET_CODE (XEXP (disp
, 0)) == UNSPEC
1718 && (XINT (XEXP (disp
, 0), 1) == UNSPEC_GOT
1719 || XINT (XEXP (disp
, 0), 1) == UNSPEC_GOTNTPOFF
))
1722 /* All other symbolic constants are literal pool references,
1723 which are OK as the literal pool must be small. */
1724 if (GET_CODE (disp
) == CONST
)
1730 /* Decompose a RTL expression ADDR for a memory address into
1731 its components, returned in OUT.
1733 Returns false if ADDR is not a valid memory address, true
1734 otherwise. If OUT is NULL, don't return the components,
1735 but check for validity only.
1737 Note: Only addresses in canonical form are recognized.
1738 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1739 canonical form so that they will be recognized. */
1742 s390_decompose_address (rtx addr
, struct s390_address
*out
)
1744 HOST_WIDE_INT offset
= 0;
1745 rtx base
= NULL_RTX
;
1746 rtx indx
= NULL_RTX
;
1747 rtx disp
= NULL_RTX
;
1749 bool pointer
= false;
1750 bool base_ptr
= false;
1751 bool indx_ptr
= false;
1752 bool literal_pool
= false;
1754 /* We may need to substitute the literal pool base register into the address
1755 below. However, at this point we do not know which register is going to
1756 be used as base, so we substitute the arg pointer register. This is going
1757 to be treated as holding a pointer below -- it shouldn't be used for any
1759 rtx fake_pool_base
= gen_rtx_REG (Pmode
, ARG_POINTER_REGNUM
);
1761 /* Decompose address into base + index + displacement. */
1763 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == UNSPEC
)
1766 else if (GET_CODE (addr
) == PLUS
)
1768 rtx op0
= XEXP (addr
, 0);
1769 rtx op1
= XEXP (addr
, 1);
1770 enum rtx_code code0
= GET_CODE (op0
);
1771 enum rtx_code code1
= GET_CODE (op1
);
1773 if (code0
== REG
|| code0
== UNSPEC
)
1775 if (code1
== REG
|| code1
== UNSPEC
)
1777 indx
= op0
; /* index + base */
1783 base
= op0
; /* base + displacement */
1788 else if (code0
== PLUS
)
1790 indx
= XEXP (op0
, 0); /* index + base + disp */
1791 base
= XEXP (op0
, 1);
1802 disp
= addr
; /* displacement */
1804 /* Extract integer part of displacement. */
1808 if (GET_CODE (disp
) == CONST_INT
)
1810 offset
= INTVAL (disp
);
1813 else if (GET_CODE (disp
) == CONST
1814 && GET_CODE (XEXP (disp
, 0)) == PLUS
1815 && GET_CODE (XEXP (XEXP (disp
, 0), 1)) == CONST_INT
)
1817 offset
= INTVAL (XEXP (XEXP (disp
, 0), 1));
1818 disp
= XEXP (XEXP (disp
, 0), 0);
1822 /* Strip off CONST here to avoid special case tests later. */
1823 if (disp
&& GET_CODE (disp
) == CONST
)
1824 disp
= XEXP (disp
, 0);
1826 /* We can convert literal pool addresses to
1827 displacements by basing them off the base register. */
1828 if (disp
&& GET_CODE (disp
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (disp
))
1830 /* Either base or index must be free to hold the base register. */
1832 base
= fake_pool_base
, literal_pool
= true;
1834 indx
= fake_pool_base
, literal_pool
= true;
1838 /* Mark up the displacement. */
1839 disp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, disp
),
1840 UNSPEC_LTREL_OFFSET
);
1843 /* Validate base register. */
1846 if (GET_CODE (base
) == UNSPEC
)
1847 switch (XINT (base
, 1))
1851 disp
= gen_rtx_UNSPEC (Pmode
,
1852 gen_rtvec (1, XVECEXP (base
, 0, 0)),
1853 UNSPEC_LTREL_OFFSET
);
1857 base
= XVECEXP (base
, 0, 1);
1860 case UNSPEC_LTREL_BASE
:
1861 if (XVECLEN (base
, 0) == 1)
1862 base
= fake_pool_base
, literal_pool
= true;
1864 base
= XVECEXP (base
, 0, 1);
1872 || (GET_MODE (base
) != SImode
1873 && GET_MODE (base
) != Pmode
))
1876 if (REGNO (base
) == STACK_POINTER_REGNUM
1877 || REGNO (base
) == FRAME_POINTER_REGNUM
1878 || ((reload_completed
|| reload_in_progress
)
1879 && frame_pointer_needed
1880 && REGNO (base
) == HARD_FRAME_POINTER_REGNUM
)
1881 || REGNO (base
) == ARG_POINTER_REGNUM
1883 && REGNO (base
) == PIC_OFFSET_TABLE_REGNUM
))
1884 pointer
= base_ptr
= true;
1886 if ((reload_completed
|| reload_in_progress
)
1887 && base
== cfun
->machine
->base_reg
)
1888 pointer
= base_ptr
= literal_pool
= true;
1891 /* Validate index register. */
1894 if (GET_CODE (indx
) == UNSPEC
)
1895 switch (XINT (indx
, 1))
1899 disp
= gen_rtx_UNSPEC (Pmode
,
1900 gen_rtvec (1, XVECEXP (indx
, 0, 0)),
1901 UNSPEC_LTREL_OFFSET
);
1905 indx
= XVECEXP (indx
, 0, 1);
1908 case UNSPEC_LTREL_BASE
:
1909 if (XVECLEN (indx
, 0) == 1)
1910 indx
= fake_pool_base
, literal_pool
= true;
1912 indx
= XVECEXP (indx
, 0, 1);
1920 || (GET_MODE (indx
) != SImode
1921 && GET_MODE (indx
) != Pmode
))
1924 if (REGNO (indx
) == STACK_POINTER_REGNUM
1925 || REGNO (indx
) == FRAME_POINTER_REGNUM
1926 || ((reload_completed
|| reload_in_progress
)
1927 && frame_pointer_needed
1928 && REGNO (indx
) == HARD_FRAME_POINTER_REGNUM
)
1929 || REGNO (indx
) == ARG_POINTER_REGNUM
1931 && REGNO (indx
) == PIC_OFFSET_TABLE_REGNUM
))
1932 pointer
= indx_ptr
= true;
1934 if ((reload_completed
|| reload_in_progress
)
1935 && indx
== cfun
->machine
->base_reg
)
1936 pointer
= indx_ptr
= literal_pool
= true;
1939 /* Prefer to use pointer as base, not index. */
1940 if (base
&& indx
&& !base_ptr
1941 && (indx_ptr
|| (!REG_POINTER (base
) && REG_POINTER (indx
))))
1948 /* Validate displacement. */
1951 /* If virtual registers are involved, the displacement will change later
1952 anyway as the virtual registers get eliminated. This could make a
1953 valid displacement invalid, but it is more likely to make an invalid
1954 displacement valid, because we sometimes access the register save area
1955 via negative offsets to one of those registers.
1956 Thus we don't check the displacement for validity here. If after
1957 elimination the displacement turns out to be invalid after all,
1958 this is fixed up by reload in any case. */
1959 if (base
!= arg_pointer_rtx
1960 && indx
!= arg_pointer_rtx
1961 && base
!= return_address_pointer_rtx
1962 && indx
!= return_address_pointer_rtx
1963 && base
!= frame_pointer_rtx
1964 && indx
!= frame_pointer_rtx
1965 && base
!= virtual_stack_vars_rtx
1966 && indx
!= virtual_stack_vars_rtx
)
1967 if (!DISP_IN_RANGE (offset
))
1972 /* All the special cases are pointers. */
1975 /* In the small-PIC case, the linker converts @GOT
1976 and @GOTNTPOFF offsets to possible displacements. */
1977 if (GET_CODE (disp
) == UNSPEC
1978 && (XINT (disp
, 1) == UNSPEC_GOT
1979 || XINT (disp
, 1) == UNSPEC_GOTNTPOFF
)
1985 /* Accept pool label offsets. */
1986 else if (GET_CODE (disp
) == UNSPEC
1987 && XINT (disp
, 1) == UNSPEC_POOL_OFFSET
)
1990 /* Accept literal pool references. */
1991 else if (GET_CODE (disp
) == UNSPEC
1992 && XINT (disp
, 1) == UNSPEC_LTREL_OFFSET
)
1994 /* In case CSE pulled a non literal pool reference out of
1995 the pool we have to reject the address. This is
1996 especially important when loading the GOT pointer on non
1997 zarch CPUs. In this case the literal pool contains an lt
1998 relative offset to the _GLOBAL_OFFSET_TABLE_ label which
1999 will most likely exceed the displacement. */
2000 if (GET_CODE (XVECEXP (disp
, 0, 0)) != SYMBOL_REF
2001 || !CONSTANT_POOL_ADDRESS_P (XVECEXP (disp
, 0, 0)))
2004 orig_disp
= gen_rtx_CONST (Pmode
, disp
);
2007 /* If we have an offset, make sure it does not
2008 exceed the size of the constant pool entry. */
2009 rtx sym
= XVECEXP (disp
, 0, 0);
2010 if (offset
>= GET_MODE_SIZE (get_pool_mode (sym
)))
2013 orig_disp
= plus_constant (Pmode
, orig_disp
, offset
);
2028 out
->disp
= orig_disp
;
2029 out
->pointer
= pointer
;
2030 out
->literal_pool
= literal_pool
;
2036 /* Decompose a RTL expression OP for a shift count into its components,
2037 and return the base register in BASE and the offset in OFFSET.
2039 Return true if OP is a valid shift count, false if not. */
2042 s390_decompose_shift_count (rtx op
, rtx
*base
, HOST_WIDE_INT
*offset
)
2044 HOST_WIDE_INT off
= 0;
2046 /* We can have an integer constant, an address register,
2047 or a sum of the two. */
2048 if (GET_CODE (op
) == CONST_INT
)
2053 if (op
&& GET_CODE (op
) == PLUS
&& GET_CODE (XEXP (op
, 1)) == CONST_INT
)
2055 off
= INTVAL (XEXP (op
, 1));
2058 while (op
&& GET_CODE (op
) == SUBREG
)
2059 op
= SUBREG_REG (op
);
2061 if (op
&& GET_CODE (op
) != REG
)
2073 /* Return true if CODE is a valid address without index. */
2076 s390_legitimate_address_without_index_p (rtx op
)
2078 struct s390_address addr
;
2080 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
2089 /* Return true if ADDR is of kind symbol_ref or symbol_ref + const_int
2090 and return these parts in SYMREF and ADDEND. You can pass NULL in
2091 SYMREF and/or ADDEND if you are not interested in these values.
2092 Literal pool references are *not* considered symbol references. */
2095 s390_symref_operand_p (rtx addr
, rtx
*symref
, HOST_WIDE_INT
*addend
)
2097 HOST_WIDE_INT tmpaddend
= 0;
2099 if (GET_CODE (addr
) == CONST
)
2100 addr
= XEXP (addr
, 0);
2102 if (GET_CODE (addr
) == PLUS
)
2104 if (GET_CODE (XEXP (addr
, 0)) == SYMBOL_REF
2105 && !CONSTANT_POOL_ADDRESS_P (XEXP (addr
, 0))
2106 && CONST_INT_P (XEXP (addr
, 1)))
2108 tmpaddend
= INTVAL (XEXP (addr
, 1));
2109 addr
= XEXP (addr
, 0);
2115 if (GET_CODE (addr
) != SYMBOL_REF
|| CONSTANT_POOL_ADDRESS_P (addr
))
2121 *addend
= tmpaddend
;
2126 /* Return TRUE if ADDR is an operand valid for a load/store relative
2127 instructions. Be aware that the alignment of the operand needs to
2128 be checked separately. */
2130 s390_loadrelative_operand_p (rtx addr
)
2132 if (GET_CODE (addr
) == CONST
)
2133 addr
= XEXP (addr
, 0);
2135 /* Enable load relative for symbol@GOTENT. */
2136 if (GET_CODE (addr
) == UNSPEC
2137 && XINT (addr
, 1) == UNSPEC_GOTENT
)
2140 return s390_symref_operand_p (addr
, NULL
, NULL
);
2143 /* Return true if the address in OP is valid for constraint letter C
2144 if wrapped in a MEM rtx. Set LIT_POOL_OK to true if it literal
2145 pool MEMs should be accepted. Only the Q, R, S, T constraint
2146 letters are allowed for C. */
2149 s390_check_qrst_address (char c
, rtx op
, bool lit_pool_ok
)
2151 struct s390_address addr
;
2152 bool decomposed
= false;
2154 /* This check makes sure that no symbolic address (except literal
2155 pool references) are accepted by the R or T constraints. */
2156 if (s390_loadrelative_operand_p (op
))
2159 /* Ensure literal pool references are only accepted if LIT_POOL_OK. */
2162 if (!s390_decompose_address (op
, &addr
))
2164 if (addr
.literal_pool
)
2171 case 'Q': /* no index short displacement */
2172 if (!decomposed
&& !s390_decompose_address (op
, &addr
))
2176 if (!s390_short_displacement (addr
.disp
))
2180 case 'R': /* with index short displacement */
2181 if (TARGET_LONG_DISPLACEMENT
)
2183 if (!decomposed
&& !s390_decompose_address (op
, &addr
))
2185 if (!s390_short_displacement (addr
.disp
))
2188 /* Any invalid address here will be fixed up by reload,
2189 so accept it for the most generic constraint. */
2192 case 'S': /* no index long displacement */
2193 if (!TARGET_LONG_DISPLACEMENT
)
2195 if (!decomposed
&& !s390_decompose_address (op
, &addr
))
2199 if (s390_short_displacement (addr
.disp
))
2203 case 'T': /* with index long displacement */
2204 if (!TARGET_LONG_DISPLACEMENT
)
2206 /* Any invalid address here will be fixed up by reload,
2207 so accept it for the most generic constraint. */
2208 if ((decomposed
|| s390_decompose_address (op
, &addr
))
2209 && s390_short_displacement (addr
.disp
))
2219 /* Evaluates constraint strings described by the regular expression
2220 ([A|B|Z](Q|R|S|T))|U|W|Y and returns 1 if OP is a valid operand for
2221 the constraint given in STR, or 0 else. */
2224 s390_mem_constraint (const char *str
, rtx op
)
2231 /* Check for offsettable variants of memory constraints. */
2232 if (!MEM_P (op
) || MEM_VOLATILE_P (op
))
2234 if ((reload_completed
|| reload_in_progress
)
2235 ? !offsettable_memref_p (op
) : !offsettable_nonstrict_memref_p (op
))
2237 return s390_check_qrst_address (str
[1], XEXP (op
, 0), true);
2239 /* Check for non-literal-pool variants of memory constraints. */
2242 return s390_check_qrst_address (str
[1], XEXP (op
, 0), false);
2247 if (GET_CODE (op
) != MEM
)
2249 return s390_check_qrst_address (c
, XEXP (op
, 0), true);
2251 return (s390_check_qrst_address ('Q', op
, true)
2252 || s390_check_qrst_address ('R', op
, true));
2254 return (s390_check_qrst_address ('S', op
, true)
2255 || s390_check_qrst_address ('T', op
, true));
2257 /* Simply check for the basic form of a shift count. Reload will
2258 take care of making sure we have a proper base register. */
2259 if (!s390_decompose_shift_count (op
, NULL
, NULL
))
2263 return s390_check_qrst_address (str
[1], op
, true);
2271 /* Evaluates constraint strings starting with letter O. Input
2272 parameter C is the second letter following the "O" in the constraint
2273 string. Returns 1 if VALUE meets the respective constraint and 0
2277 s390_O_constraint_str (const char c
, HOST_WIDE_INT value
)
2285 return trunc_int_for_mode (value
, SImode
) == value
;
2289 || s390_single_part (GEN_INT (value
), DImode
, SImode
, 0) == 1;
2292 return s390_single_part (GEN_INT (value
- 1), DImode
, SImode
, -1) == 1;
2300 /* Evaluates constraint strings starting with letter N. Parameter STR
2301 contains the letters following letter "N" in the constraint string.
2302 Returns true if VALUE matches the constraint. */
2305 s390_N_constraint_str (const char *str
, HOST_WIDE_INT value
)
2307 enum machine_mode mode
, part_mode
;
2309 int part
, part_goal
;
2315 part_goal
= str
[0] - '0';
2359 if (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (part_mode
))
2362 part
= s390_single_part (GEN_INT (value
), mode
, part_mode
, def
);
2365 if (part_goal
!= -1 && part_goal
!= part
)
2372 /* Returns true if the input parameter VALUE is a float zero. */
2375 s390_float_const_zero_p (rtx value
)
2377 return (GET_MODE_CLASS (GET_MODE (value
)) == MODE_FLOAT
2378 && value
== CONST0_RTX (GET_MODE (value
)));
2381 /* Implement TARGET_REGISTER_MOVE_COST. */
2384 s390_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
2385 reg_class_t from
, reg_class_t to
)
2387 /* On s390, copy between fprs and gprs is expensive. */
2388 if ((reg_classes_intersect_p (from
, GENERAL_REGS
)
2389 && reg_classes_intersect_p (to
, FP_REGS
))
2390 || (reg_classes_intersect_p (from
, FP_REGS
)
2391 && reg_classes_intersect_p (to
, GENERAL_REGS
)))
2397 /* Implement TARGET_MEMORY_MOVE_COST. */
2400 s390_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
2401 reg_class_t rclass ATTRIBUTE_UNUSED
,
2402 bool in ATTRIBUTE_UNUSED
)
2407 /* Compute a (partial) cost for rtx X. Return true if the complete
2408 cost has been computed, and false if subexpressions should be
2409 scanned. In either case, *TOTAL contains the cost result.
2410 CODE contains GET_CODE (x), OUTER_CODE contains the code
2411 of the superexpression of x. */
2414 s390_rtx_costs (rtx x
, int code
, int outer_code
, int opno ATTRIBUTE_UNUSED
,
2415 int *total
, bool speed ATTRIBUTE_UNUSED
)
2438 *total
= COSTS_N_INSNS (1);
2443 *total
= COSTS_N_INSNS (1);
2447 switch (GET_MODE (x
))
2451 rtx left
= XEXP (x
, 0);
2452 rtx right
= XEXP (x
, 1);
2453 if (GET_CODE (right
) == CONST_INT
2454 && CONST_OK_FOR_K (INTVAL (right
)))
2455 *total
= s390_cost
->mhi
;
2456 else if (GET_CODE (left
) == SIGN_EXTEND
)
2457 *total
= s390_cost
->mh
;
2459 *total
= s390_cost
->ms
; /* msr, ms, msy */
2464 rtx left
= XEXP (x
, 0);
2465 rtx right
= XEXP (x
, 1);
2468 if (GET_CODE (right
) == CONST_INT
2469 && CONST_OK_FOR_K (INTVAL (right
)))
2470 *total
= s390_cost
->mghi
;
2471 else if (GET_CODE (left
) == SIGN_EXTEND
)
2472 *total
= s390_cost
->msgf
;
2474 *total
= s390_cost
->msg
; /* msgr, msg */
2476 else /* TARGET_31BIT */
2478 if (GET_CODE (left
) == SIGN_EXTEND
2479 && GET_CODE (right
) == SIGN_EXTEND
)
2480 /* mulsidi case: mr, m */
2481 *total
= s390_cost
->m
;
2482 else if (GET_CODE (left
) == ZERO_EXTEND
2483 && GET_CODE (right
) == ZERO_EXTEND
2484 && TARGET_CPU_ZARCH
)
2485 /* umulsidi case: ml, mlr */
2486 *total
= s390_cost
->ml
;
2488 /* Complex calculation is required. */
2489 *total
= COSTS_N_INSNS (40);
2495 *total
= s390_cost
->mult_df
;
2498 *total
= s390_cost
->mxbr
;
2506 switch (GET_MODE (x
))
2509 *total
= s390_cost
->madbr
;
2512 *total
= s390_cost
->maebr
;
2517 /* Negate in the third argument is free: FMSUB. */
2518 if (GET_CODE (XEXP (x
, 2)) == NEG
)
2520 *total
+= (rtx_cost (XEXP (x
, 0), FMA
, 0, speed
)
2521 + rtx_cost (XEXP (x
, 1), FMA
, 1, speed
)
2522 + rtx_cost (XEXP (XEXP (x
, 2), 0), FMA
, 2, speed
));
2529 if (GET_MODE (x
) == TImode
) /* 128 bit division */
2530 *total
= s390_cost
->dlgr
;
2531 else if (GET_MODE (x
) == DImode
)
2533 rtx right
= XEXP (x
, 1);
2534 if (GET_CODE (right
) == ZERO_EXTEND
) /* 64 by 32 bit division */
2535 *total
= s390_cost
->dlr
;
2536 else /* 64 by 64 bit division */
2537 *total
= s390_cost
->dlgr
;
2539 else if (GET_MODE (x
) == SImode
) /* 32 bit division */
2540 *total
= s390_cost
->dlr
;
2545 if (GET_MODE (x
) == DImode
)
2547 rtx right
= XEXP (x
, 1);
2548 if (GET_CODE (right
) == ZERO_EXTEND
) /* 64 by 32 bit division */
2550 *total
= s390_cost
->dsgfr
;
2552 *total
= s390_cost
->dr
;
2553 else /* 64 by 64 bit division */
2554 *total
= s390_cost
->dsgr
;
2556 else if (GET_MODE (x
) == SImode
) /* 32 bit division */
2557 *total
= s390_cost
->dlr
;
2558 else if (GET_MODE (x
) == SFmode
)
2560 *total
= s390_cost
->debr
;
2562 else if (GET_MODE (x
) == DFmode
)
2564 *total
= s390_cost
->ddbr
;
2566 else if (GET_MODE (x
) == TFmode
)
2568 *total
= s390_cost
->dxbr
;
2573 if (GET_MODE (x
) == SFmode
)
2574 *total
= s390_cost
->sqebr
;
2575 else if (GET_MODE (x
) == DFmode
)
2576 *total
= s390_cost
->sqdbr
;
2578 *total
= s390_cost
->sqxbr
;
2583 if (outer_code
== MULT
|| outer_code
== DIV
|| outer_code
== MOD
2584 || outer_code
== PLUS
|| outer_code
== MINUS
2585 || outer_code
== COMPARE
)
2590 *total
= COSTS_N_INSNS (1);
2591 if (GET_CODE (XEXP (x
, 0)) == AND
2592 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2593 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
)
2595 rtx op0
= XEXP (XEXP (x
, 0), 0);
2596 rtx op1
= XEXP (XEXP (x
, 0), 1);
2597 rtx op2
= XEXP (x
, 1);
2599 if (memory_operand (op0
, GET_MODE (op0
))
2600 && s390_tm_ccmode (op1
, op2
, 0) != VOIDmode
)
2602 if (register_operand (op0
, GET_MODE (op0
))
2603 && s390_tm_ccmode (op1
, op2
, 1) != VOIDmode
)
2613 /* Return the cost of an address rtx ADDR. */
2616 s390_address_cost (rtx addr
, bool speed ATTRIBUTE_UNUSED
)
2618 struct s390_address ad
;
2619 if (!s390_decompose_address (addr
, &ad
))
2622 return ad
.indx
? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
2625 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
2626 otherwise return 0. */
2629 tls_symbolic_operand (rtx op
)
2631 if (GET_CODE (op
) != SYMBOL_REF
)
2633 return SYMBOL_REF_TLS_MODEL (op
);
2636 /* Split DImode access register reference REG (on 64-bit) into its constituent
2637 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2638 gen_highpart cannot be used as they assume all registers are word-sized,
2639 while our access registers have only half that size. */
2642 s390_split_access_reg (rtx reg
, rtx
*lo
, rtx
*hi
)
2644 gcc_assert (TARGET_64BIT
);
2645 gcc_assert (ACCESS_REG_P (reg
));
2646 gcc_assert (GET_MODE (reg
) == DImode
);
2647 gcc_assert (!(REGNO (reg
) & 1));
2649 *lo
= gen_rtx_REG (SImode
, REGNO (reg
) + 1);
2650 *hi
= gen_rtx_REG (SImode
, REGNO (reg
));
2653 /* Return true if OP contains a symbol reference */
2656 symbolic_reference_mentioned_p (rtx op
)
2661 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
2664 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
2665 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
2671 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
2672 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
2676 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
2683 /* Return true if OP contains a reference to a thread-local symbol. */
2686 tls_symbolic_reference_mentioned_p (rtx op
)
2691 if (GET_CODE (op
) == SYMBOL_REF
)
2692 return tls_symbolic_operand (op
);
2694 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
2695 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
2701 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
2702 if (tls_symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
2706 else if (fmt
[i
] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op
, i
)))
2714 /* Return true if OP is a legitimate general operand when
2715 generating PIC code. It is given that flag_pic is on
2716 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2719 legitimate_pic_operand_p (rtx op
)
2721 /* Accept all non-symbolic constants. */
2722 if (!SYMBOLIC_CONST (op
))
2725 /* Reject everything else; must be handled
2726 via emit_symbolic_move. */
2730 /* Returns true if the constant value OP is a legitimate general operand.
2731 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2734 s390_legitimate_constant_p (enum machine_mode mode
, rtx op
)
2736 /* Accept all non-symbolic constants. */
2737 if (!SYMBOLIC_CONST (op
))
2740 /* Accept immediate LARL operands. */
2741 if (TARGET_CPU_ZARCH
&& larl_operand (op
, mode
))
2744 /* Thread-local symbols are never legal constants. This is
2745 so that emit_call knows that computing such addresses
2746 might require a function call. */
2747 if (TLS_SYMBOLIC_CONST (op
))
2750 /* In the PIC case, symbolic constants must *not* be
2751 forced into the literal pool. We accept them here,
2752 so that they will be handled by emit_symbolic_move. */
2756 /* All remaining non-PIC symbolic constants are
2757 forced into the literal pool. */
2761 /* Determine if it's legal to put X into the constant pool. This
2762 is not possible if X contains the address of a symbol that is
2763 not constant (TLS) or not known at final link time (PIC). */
2766 s390_cannot_force_const_mem (enum machine_mode mode
, rtx x
)
2768 switch (GET_CODE (x
))
2772 /* Accept all non-symbolic constants. */
2776 /* Labels are OK iff we are non-PIC. */
2777 return flag_pic
!= 0;
2780 /* 'Naked' TLS symbol references are never OK,
2781 non-TLS symbols are OK iff we are non-PIC. */
2782 if (tls_symbolic_operand (x
))
2785 return flag_pic
!= 0;
2788 return s390_cannot_force_const_mem (mode
, XEXP (x
, 0));
2791 return s390_cannot_force_const_mem (mode
, XEXP (x
, 0))
2792 || s390_cannot_force_const_mem (mode
, XEXP (x
, 1));
2795 switch (XINT (x
, 1))
2797 /* Only lt-relative or GOT-relative UNSPECs are OK. */
2798 case UNSPEC_LTREL_OFFSET
:
2806 case UNSPEC_GOTNTPOFF
:
2807 case UNSPEC_INDNTPOFF
:
2810 /* If the literal pool shares the code section, be put
2811 execute template placeholders into the pool as well. */
2813 return TARGET_CPU_ZARCH
;
2825 /* Returns true if the constant value OP is a legitimate general
2826 operand during and after reload. The difference to
2827 legitimate_constant_p is that this function will not accept
2828 a constant that would need to be forced to the literal pool
2829 before it can be used as operand.
2830 This function accepts all constants which can be loaded directly
2834 legitimate_reload_constant_p (rtx op
)
2836 /* Accept la(y) operands. */
2837 if (GET_CODE (op
) == CONST_INT
2838 && DISP_IN_RANGE (INTVAL (op
)))
2841 /* Accept l(g)hi/l(g)fi operands. */
2842 if (GET_CODE (op
) == CONST_INT
2843 && (CONST_OK_FOR_K (INTVAL (op
)) || CONST_OK_FOR_Os (INTVAL (op
))))
2846 /* Accept lliXX operands. */
2848 && GET_CODE (op
) == CONST_INT
2849 && trunc_int_for_mode (INTVAL (op
), word_mode
) == INTVAL (op
)
2850 && s390_single_part (op
, word_mode
, HImode
, 0) >= 0)
2854 && GET_CODE (op
) == CONST_INT
2855 && trunc_int_for_mode (INTVAL (op
), word_mode
) == INTVAL (op
)
2856 && s390_single_part (op
, word_mode
, SImode
, 0) >= 0)
2859 /* Accept larl operands. */
2860 if (TARGET_CPU_ZARCH
2861 && larl_operand (op
, VOIDmode
))
2864 /* Accept floating-point zero operands that fit into a single GPR. */
2865 if (GET_CODE (op
) == CONST_DOUBLE
2866 && s390_float_const_zero_p (op
)
2867 && GET_MODE_SIZE (GET_MODE (op
)) <= UNITS_PER_WORD
)
2870 /* Accept double-word operands that can be split. */
2871 if (GET_CODE (op
) == CONST_INT
2872 && trunc_int_for_mode (INTVAL (op
), word_mode
) != INTVAL (op
))
2874 enum machine_mode dword_mode
= word_mode
== SImode
? DImode
: TImode
;
2875 rtx hi
= operand_subword (op
, 0, 0, dword_mode
);
2876 rtx lo
= operand_subword (op
, 1, 0, dword_mode
);
2877 return legitimate_reload_constant_p (hi
)
2878 && legitimate_reload_constant_p (lo
);
2881 /* Everything else cannot be handled without reload. */
2885 /* Returns true if the constant value OP is a legitimate fp operand
2886 during and after reload.
2887 This function accepts all constants which can be loaded directly
2891 legitimate_reload_fp_constant_p (rtx op
)
2893 /* Accept floating-point zero operands if the load zero instruction
2896 && GET_CODE (op
) == CONST_DOUBLE
2897 && s390_float_const_zero_p (op
))
2903 /* Given an rtx OP being reloaded into a reg required to be in class RCLASS,
2904 return the class of reg to actually use. */
2907 s390_preferred_reload_class (rtx op
, reg_class_t rclass
)
2909 switch (GET_CODE (op
))
2911 /* Constants we cannot reload into general registers
2912 must be forced into the literal pool. */
2915 if (reg_class_subset_p (GENERAL_REGS
, rclass
)
2916 && legitimate_reload_constant_p (op
))
2917 return GENERAL_REGS
;
2918 else if (reg_class_subset_p (ADDR_REGS
, rclass
)
2919 && legitimate_reload_constant_p (op
))
2921 else if (reg_class_subset_p (FP_REGS
, rclass
)
2922 && legitimate_reload_fp_constant_p (op
))
2926 /* If a symbolic constant or a PLUS is reloaded,
2927 it is most likely being used as an address, so
2928 prefer ADDR_REGS. If 'class' is not a superset
2929 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
2933 if (!legitimate_reload_constant_p (op
))
2937 /* load address will be used. */
2938 if (reg_class_subset_p (ADDR_REGS
, rclass
))
2950 /* Return true if ADDR is SYMBOL_REF + addend with addend being a
2951 multiple of ALIGNMENT and the SYMBOL_REF being naturally
2955 s390_check_symref_alignment (rtx addr
, HOST_WIDE_INT alignment
)
2957 HOST_WIDE_INT addend
;
2960 /* Accept symbol@GOTENT with pointer size alignment. */
2961 if (GET_CODE (addr
) == CONST
2962 && GET_CODE (XEXP (addr
, 0)) == UNSPEC
2963 && XINT (XEXP (addr
, 0), 1) == UNSPEC_GOTENT
2964 && alignment
<= UNITS_PER_LONG
)
2967 if (!s390_symref_operand_p (addr
, &symref
, &addend
))
2970 return (!SYMBOL_REF_NOT_NATURALLY_ALIGNED_P (symref
)
2971 && !(addend
& (alignment
- 1)));
2974 /* ADDR is moved into REG using larl. If ADDR isn't a valid larl
2975 operand SCRATCH is used to reload the even part of the address and
2979 s390_reload_larl_operand (rtx reg
, rtx addr
, rtx scratch
)
2981 HOST_WIDE_INT addend
;
2984 if (!s390_symref_operand_p (addr
, &symref
, &addend
))
2988 /* Easy case. The addend is even so larl will do fine. */
2989 emit_move_insn (reg
, addr
);
2992 /* We can leave the scratch register untouched if the target
2993 register is a valid base register. */
2994 if (REGNO (reg
) < FIRST_PSEUDO_REGISTER
2995 && REGNO_REG_CLASS (REGNO (reg
)) == ADDR_REGS
)
2998 gcc_assert (REGNO (scratch
) < FIRST_PSEUDO_REGISTER
);
2999 gcc_assert (REGNO_REG_CLASS (REGNO (scratch
)) == ADDR_REGS
);
3002 emit_move_insn (scratch
,
3003 gen_rtx_CONST (Pmode
,
3004 gen_rtx_PLUS (Pmode
, symref
,
3005 GEN_INT (addend
- 1))));
3007 emit_move_insn (scratch
, symref
);
3009 /* Increment the address using la in order to avoid clobbering cc. */
3010 emit_move_insn (reg
, gen_rtx_PLUS (Pmode
, scratch
, const1_rtx
));
3014 /* Generate what is necessary to move between REG and MEM using
3015 SCRATCH. The direction is given by TOMEM. */
3018 s390_reload_symref_address (rtx reg
, rtx mem
, rtx scratch
, bool tomem
)
3020 /* Reload might have pulled a constant out of the literal pool.
3021 Force it back in. */
3022 if (CONST_INT_P (mem
) || GET_CODE (mem
) == CONST_DOUBLE
3023 || GET_CODE (mem
) == CONST
)
3024 mem
= force_const_mem (GET_MODE (reg
), mem
);
3026 gcc_assert (MEM_P (mem
));
3028 /* For a load from memory we can leave the scratch register
3029 untouched if the target register is a valid base register. */
3031 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
3032 && REGNO_REG_CLASS (REGNO (reg
)) == ADDR_REGS
3033 && GET_MODE (reg
) == GET_MODE (scratch
))
3036 /* Load address into scratch register. Since we can't have a
3037 secondary reload for a secondary reload we have to cover the case
3038 where larl would need a secondary reload here as well. */
3039 s390_reload_larl_operand (scratch
, XEXP (mem
, 0), scratch
);
3041 /* Now we can use a standard load/store to do the move. */
3043 emit_move_insn (replace_equiv_address (mem
, scratch
), reg
);
3045 emit_move_insn (reg
, replace_equiv_address (mem
, scratch
));
3048 /* Inform reload about cases where moving X with a mode MODE to a register in
3049 RCLASS requires an extra scratch or immediate register. Return the class
3050 needed for the immediate register. */
3053 s390_secondary_reload (bool in_p
, rtx x
, reg_class_t rclass_i
,
3054 enum machine_mode mode
, secondary_reload_info
*sri
)
3056 enum reg_class rclass
= (enum reg_class
) rclass_i
;
3058 /* Intermediate register needed. */
3059 if (reg_classes_intersect_p (CC_REGS
, rclass
))
3060 return GENERAL_REGS
;
3064 HOST_WIDE_INT offset
;
3067 /* On z10 several optimizer steps may generate larl operands with
3070 && s390_symref_operand_p (x
, &symref
, &offset
)
3072 && !SYMBOL_REF_ALIGN1_P (symref
)
3073 && (offset
& 1) == 1)
3074 sri
->icode
= ((mode
== DImode
) ? CODE_FOR_reloaddi_larl_odd_addend_z10
3075 : CODE_FOR_reloadsi_larl_odd_addend_z10
);
3077 /* On z10 we need a scratch register when moving QI, TI or floating
3078 point mode values from or to a memory location with a SYMBOL_REF
3079 or if the symref addend of a SI or DI move is not aligned to the
3080 width of the access. */
3082 && s390_symref_operand_p (XEXP (x
, 0), NULL
, NULL
)
3083 && (mode
== QImode
|| mode
== TImode
|| FLOAT_MODE_P (mode
)
3084 || (!TARGET_ZARCH
&& mode
== DImode
)
3085 || ((mode
== HImode
|| mode
== SImode
|| mode
== DImode
)
3086 && (!s390_check_symref_alignment (XEXP (x
, 0),
3087 GET_MODE_SIZE (mode
))))))
3089 #define __SECONDARY_RELOAD_CASE(M,m) \
3092 sri->icode = in_p ? CODE_FOR_reload##m##di_toreg_z10 : \
3093 CODE_FOR_reload##m##di_tomem_z10; \
3095 sri->icode = in_p ? CODE_FOR_reload##m##si_toreg_z10 : \
3096 CODE_FOR_reload##m##si_tomem_z10; \
3099 switch (GET_MODE (x
))
3101 __SECONDARY_RELOAD_CASE (QI
, qi
);
3102 __SECONDARY_RELOAD_CASE (HI
, hi
);
3103 __SECONDARY_RELOAD_CASE (SI
, si
);
3104 __SECONDARY_RELOAD_CASE (DI
, di
);
3105 __SECONDARY_RELOAD_CASE (TI
, ti
);
3106 __SECONDARY_RELOAD_CASE (SF
, sf
);
3107 __SECONDARY_RELOAD_CASE (DF
, df
);
3108 __SECONDARY_RELOAD_CASE (TF
, tf
);
3109 __SECONDARY_RELOAD_CASE (SD
, sd
);
3110 __SECONDARY_RELOAD_CASE (DD
, dd
);
3111 __SECONDARY_RELOAD_CASE (TD
, td
);
3116 #undef __SECONDARY_RELOAD_CASE
3120 /* We need a scratch register when loading a PLUS expression which
3121 is not a legitimate operand of the LOAD ADDRESS instruction. */
3122 if (in_p
&& s390_plus_operand (x
, mode
))
3123 sri
->icode
= (TARGET_64BIT
?
3124 CODE_FOR_reloaddi_plus
: CODE_FOR_reloadsi_plus
);
3126 /* Performing a multiword move from or to memory we have to make sure the
3127 second chunk in memory is addressable without causing a displacement
3128 overflow. If that would be the case we calculate the address in
3129 a scratch register. */
3131 && GET_CODE (XEXP (x
, 0)) == PLUS
3132 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3133 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (x
, 0), 1))
3134 + GET_MODE_SIZE (mode
) - 1))
3136 /* For GENERAL_REGS a displacement overflow is no problem if occurring
3137 in a s_operand address since we may fallback to lm/stm. So we only
3138 have to care about overflows in the b+i+d case. */
3139 if ((reg_classes_intersect_p (GENERAL_REGS
, rclass
)
3140 && s390_class_max_nregs (GENERAL_REGS
, mode
) > 1
3141 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == PLUS
)
3142 /* For FP_REGS no lm/stm is available so this check is triggered
3143 for displacement overflows in b+i+d and b+d like addresses. */
3144 || (reg_classes_intersect_p (FP_REGS
, rclass
)
3145 && s390_class_max_nregs (FP_REGS
, mode
) > 1))
3148 sri
->icode
= (TARGET_64BIT
?
3149 CODE_FOR_reloaddi_nonoffmem_in
:
3150 CODE_FOR_reloadsi_nonoffmem_in
);
3152 sri
->icode
= (TARGET_64BIT
?
3153 CODE_FOR_reloaddi_nonoffmem_out
:
3154 CODE_FOR_reloadsi_nonoffmem_out
);
3158 /* A scratch address register is needed when a symbolic constant is
3159 copied to r0 compiling with -fPIC. In other cases the target
3160 register might be used as temporary (see legitimize_pic_address). */
3161 if (in_p
&& SYMBOLIC_CONST (x
) && flag_pic
== 2 && rclass
!= ADDR_REGS
)
3162 sri
->icode
= (TARGET_64BIT
?
3163 CODE_FOR_reloaddi_PIC_addr
:
3164 CODE_FOR_reloadsi_PIC_addr
);
3166 /* Either scratch or no register needed. */
3170 /* Generate code to load SRC, which is PLUS that is not a
3171 legitimate operand for the LA instruction, into TARGET.
3172 SCRATCH may be used as scratch register. */
3175 s390_expand_plus_operand (rtx target
, rtx src
,
3179 struct s390_address ad
;
3181 /* src must be a PLUS; get its two operands. */
3182 gcc_assert (GET_CODE (src
) == PLUS
);
3183 gcc_assert (GET_MODE (src
) == Pmode
);
3185 /* Check if any of the two operands is already scheduled
3186 for replacement by reload. This can happen e.g. when
3187 float registers occur in an address. */
3188 sum1
= find_replacement (&XEXP (src
, 0));
3189 sum2
= find_replacement (&XEXP (src
, 1));
3190 src
= gen_rtx_PLUS (Pmode
, sum1
, sum2
);
3192 /* If the address is already strictly valid, there's nothing to do. */
3193 if (!s390_decompose_address (src
, &ad
)
3194 || (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
3195 || (ad
.indx
&& !REGNO_OK_FOR_INDEX_P (REGNO (ad
.indx
))))
3197 /* Otherwise, one of the operands cannot be an address register;
3198 we reload its value into the scratch register. */
3199 if (true_regnum (sum1
) < 1 || true_regnum (sum1
) > 15)
3201 emit_move_insn (scratch
, sum1
);
3204 if (true_regnum (sum2
) < 1 || true_regnum (sum2
) > 15)
3206 emit_move_insn (scratch
, sum2
);
3210 /* According to the way these invalid addresses are generated
3211 in reload.c, it should never happen (at least on s390) that
3212 *neither* of the PLUS components, after find_replacements
3213 was applied, is an address register. */
3214 if (sum1
== scratch
&& sum2
== scratch
)
3220 src
= gen_rtx_PLUS (Pmode
, sum1
, sum2
);
3223 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
3224 is only ever performed on addresses, so we can mark the
3225 sum as legitimate for LA in any case. */
3226 s390_load_address (target
, src
);
3230 /* Return true if ADDR is a valid memory address.
3231 STRICT specifies whether strict register checking applies. */
3234 s390_legitimate_address_p (enum machine_mode mode
, rtx addr
, bool strict
)
3236 struct s390_address ad
;
3239 && larl_operand (addr
, VOIDmode
)
3240 && (mode
== VOIDmode
3241 || s390_check_symref_alignment (addr
, GET_MODE_SIZE (mode
))))
3244 if (!s390_decompose_address (addr
, &ad
))
3249 if (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
3252 if (ad
.indx
&& !REGNO_OK_FOR_INDEX_P (REGNO (ad
.indx
)))
3258 && !(REGNO (ad
.base
) >= FIRST_PSEUDO_REGISTER
3259 || REGNO_REG_CLASS (REGNO (ad
.base
)) == ADDR_REGS
))
3263 && !(REGNO (ad
.indx
) >= FIRST_PSEUDO_REGISTER
3264 || REGNO_REG_CLASS (REGNO (ad
.indx
)) == ADDR_REGS
))
3270 /* Return true if OP is a valid operand for the LA instruction.
3271 In 31-bit, we need to prove that the result is used as an
3272 address, as LA performs only a 31-bit addition. */
3275 legitimate_la_operand_p (rtx op
)
3277 struct s390_address addr
;
3278 if (!s390_decompose_address (op
, &addr
))
3281 return (TARGET_64BIT
|| addr
.pointer
);
3284 /* Return true if it is valid *and* preferable to use LA to
3285 compute the sum of OP1 and OP2. */
3288 preferred_la_operand_p (rtx op1
, rtx op2
)
3290 struct s390_address addr
;
3292 if (op2
!= const0_rtx
)
3293 op1
= gen_rtx_PLUS (Pmode
, op1
, op2
);
3295 if (!s390_decompose_address (op1
, &addr
))
3297 if (addr
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (addr
.base
)))
3299 if (addr
.indx
&& !REGNO_OK_FOR_INDEX_P (REGNO (addr
.indx
)))
3302 /* Avoid LA instructions with index register on z196; it is
3303 preferable to use regular add instructions when possible. */
3304 if (addr
.indx
&& s390_tune
== PROCESSOR_2817_Z196
)
3307 if (!TARGET_64BIT
&& !addr
.pointer
)
3313 if ((addr
.base
&& REG_P (addr
.base
) && REG_POINTER (addr
.base
))
3314 || (addr
.indx
&& REG_P (addr
.indx
) && REG_POINTER (addr
.indx
)))
3320 /* Emit a forced load-address operation to load SRC into DST.
3321 This will use the LOAD ADDRESS instruction even in situations
3322 where legitimate_la_operand_p (SRC) returns false. */
3325 s390_load_address (rtx dst
, rtx src
)
3328 emit_move_insn (dst
, src
);
3330 emit_insn (gen_force_la_31 (dst
, src
));
3333 /* Return a legitimate reference for ORIG (an address) using the
3334 register REG. If REG is 0, a new pseudo is generated.
3336 There are two types of references that must be handled:
3338 1. Global data references must load the address from the GOT, via
3339 the PIC reg. An insn is emitted to do this load, and the reg is
3342 2. Static data references, constant pool addresses, and code labels
3343 compute the address as an offset from the GOT, whose base is in
3344 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
3345 differentiate them from global data objects. The returned
3346 address is the PIC reg + an unspec constant.
3348 TARGET_LEGITIMIZE_ADDRESS_P rejects symbolic references unless the PIC
3349 reg also appears in the address. */
3352 legitimize_pic_address (rtx orig
, rtx reg
)
3358 gcc_assert (!TLS_SYMBOLIC_CONST (addr
));
3360 if (GET_CODE (addr
) == LABEL_REF
3361 || (GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_LOCAL_P (addr
)))
3363 /* This is a local symbol. */
3364 if (TARGET_CPU_ZARCH
&& larl_operand (addr
, VOIDmode
))
3366 /* Access local symbols PC-relative via LARL.
3367 This is the same as in the non-PIC case, so it is
3368 handled automatically ... */
3372 /* Access local symbols relative to the GOT. */
3374 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3376 if (reload_in_progress
|| reload_completed
)
3377 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3379 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTOFF
);
3380 addr
= gen_rtx_CONST (Pmode
, addr
);
3381 addr
= force_const_mem (Pmode
, addr
);
3382 emit_move_insn (temp
, addr
);
3384 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3387 s390_load_address (reg
, new_rtx
);
3392 else if (GET_CODE (addr
) == SYMBOL_REF
)
3395 reg
= gen_reg_rtx (Pmode
);
3399 /* Assume GOT offset < 4k. This is handled the same way
3400 in both 31- and 64-bit code (@GOT). */
3402 if (reload_in_progress
|| reload_completed
)
3403 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3405 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOT
);
3406 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3407 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new_rtx
);
3408 new_rtx
= gen_const_mem (Pmode
, new_rtx
);
3409 emit_move_insn (reg
, new_rtx
);
3412 else if (TARGET_CPU_ZARCH
)
3414 /* If the GOT offset might be >= 4k, we determine the position
3415 of the GOT entry via a PC-relative LARL (@GOTENT). */
3417 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3419 gcc_assert (REGNO (temp
) >= FIRST_PSEUDO_REGISTER
3420 || REGNO_REG_CLASS (REGNO (temp
)) == ADDR_REGS
);
3422 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTENT
);
3423 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3427 emit_move_insn (temp
, new_rtx
);
3428 new_rtx
= gen_const_mem (Pmode
, temp
);
3431 new_rtx
= gen_const_mem (GET_MODE (reg
), new_rtx
);
3432 emit_move_insn (reg
, new_rtx
);
3437 /* If the GOT offset might be >= 4k, we have to load it
3438 from the literal pool (@GOT). */
3440 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3442 gcc_assert (REGNO (temp
) >= FIRST_PSEUDO_REGISTER
3443 || REGNO_REG_CLASS (REGNO (temp
)) == ADDR_REGS
);
3445 if (reload_in_progress
|| reload_completed
)
3446 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3448 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOT
);
3449 addr
= gen_rtx_CONST (Pmode
, addr
);
3450 addr
= force_const_mem (Pmode
, addr
);
3451 emit_move_insn (temp
, addr
);
3453 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3454 new_rtx
= gen_const_mem (Pmode
, new_rtx
);
3455 emit_move_insn (reg
, new_rtx
);
3461 if (GET_CODE (addr
) == CONST
)
3463 addr
= XEXP (addr
, 0);
3464 if (GET_CODE (addr
) == UNSPEC
)
3466 gcc_assert (XVECLEN (addr
, 0) == 1);
3467 switch (XINT (addr
, 1))
3469 /* If someone moved a GOT-relative UNSPEC
3470 out of the literal pool, force them back in. */
3473 new_rtx
= force_const_mem (Pmode
, orig
);
3476 /* @GOT is OK as is if small. */
3479 new_rtx
= force_const_mem (Pmode
, orig
);
3482 /* @GOTENT is OK as is. */
3486 /* @PLT is OK as is on 64-bit, must be converted to
3487 GOT-relative @PLTOFF on 31-bit. */
3489 if (!TARGET_CPU_ZARCH
)
3491 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3493 if (reload_in_progress
|| reload_completed
)
3494 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3496 addr
= XVECEXP (addr
, 0, 0);
3497 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
),
3499 addr
= gen_rtx_CONST (Pmode
, addr
);
3500 addr
= force_const_mem (Pmode
, addr
);
3501 emit_move_insn (temp
, addr
);
3503 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3506 s390_load_address (reg
, new_rtx
);
3512 /* Everything else cannot happen. */
3518 gcc_assert (GET_CODE (addr
) == PLUS
);
3520 if (GET_CODE (addr
) == PLUS
)
3522 rtx op0
= XEXP (addr
, 0), op1
= XEXP (addr
, 1);
3524 gcc_assert (!TLS_SYMBOLIC_CONST (op0
));
3525 gcc_assert (!TLS_SYMBOLIC_CONST (op1
));
3527 /* Check first to see if this is a constant offset
3528 from a local symbol reference. */
3529 if ((GET_CODE (op0
) == LABEL_REF
3530 || (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_LOCAL_P (op0
)))
3531 && GET_CODE (op1
) == CONST_INT
)
3533 if (TARGET_CPU_ZARCH
3534 && larl_operand (op0
, VOIDmode
)
3535 && INTVAL (op1
) < (HOST_WIDE_INT
)1 << 31
3536 && INTVAL (op1
) >= -((HOST_WIDE_INT
)1 << 31))
3538 if (INTVAL (op1
) & 1)
3540 /* LARL can't handle odd offsets, so emit a
3541 pair of LARL and LA. */
3542 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3544 if (!DISP_IN_RANGE (INTVAL (op1
)))
3546 HOST_WIDE_INT even
= INTVAL (op1
) - 1;
3547 op0
= gen_rtx_PLUS (Pmode
, op0
, GEN_INT (even
));
3548 op0
= gen_rtx_CONST (Pmode
, op0
);
3552 emit_move_insn (temp
, op0
);
3553 new_rtx
= gen_rtx_PLUS (Pmode
, temp
, op1
);
3557 s390_load_address (reg
, new_rtx
);
3563 /* If the offset is even, we can just use LARL.
3564 This will happen automatically. */
3569 /* Access local symbols relative to the GOT. */
3571 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3573 if (reload_in_progress
|| reload_completed
)
3574 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3576 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op0
),
3578 addr
= gen_rtx_PLUS (Pmode
, addr
, op1
);
3579 addr
= gen_rtx_CONST (Pmode
, addr
);
3580 addr
= force_const_mem (Pmode
, addr
);
3581 emit_move_insn (temp
, addr
);
3583 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3586 s390_load_address (reg
, new_rtx
);
3592 /* Now, check whether it is a GOT relative symbol plus offset
3593 that was pulled out of the literal pool. Force it back in. */
3595 else if (GET_CODE (op0
) == UNSPEC
3596 && GET_CODE (op1
) == CONST_INT
3597 && XINT (op0
, 1) == UNSPEC_GOTOFF
)
3599 gcc_assert (XVECLEN (op0
, 0) == 1);
3601 new_rtx
= force_const_mem (Pmode
, orig
);
3604 /* Otherwise, compute the sum. */
3607 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
3608 new_rtx
= legitimize_pic_address (XEXP (addr
, 1),
3609 base
== reg
? NULL_RTX
: reg
);
3610 if (GET_CODE (new_rtx
) == CONST_INT
)
3611 new_rtx
= plus_constant (Pmode
, base
, INTVAL (new_rtx
));
3614 if (GET_CODE (new_rtx
) == PLUS
&& CONSTANT_P (XEXP (new_rtx
, 1)))
3616 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (new_rtx
, 0));
3617 new_rtx
= XEXP (new_rtx
, 1);
3619 new_rtx
= gen_rtx_PLUS (Pmode
, base
, new_rtx
);
3622 if (GET_CODE (new_rtx
) == CONST
)
3623 new_rtx
= XEXP (new_rtx
, 0);
3624 new_rtx
= force_operand (new_rtx
, 0);
3631 /* Load the thread pointer into a register. */
3634 s390_get_thread_pointer (void)
3636 rtx tp
= gen_reg_rtx (Pmode
);
3638 emit_move_insn (tp
, gen_rtx_REG (Pmode
, TP_REGNUM
));
3639 mark_reg_pointer (tp
, BITS_PER_WORD
);
3644 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
3645 in s390_tls_symbol which always refers to __tls_get_offset.
3646 The returned offset is written to RESULT_REG and an USE rtx is
3647 generated for TLS_CALL. */
3649 static GTY(()) rtx s390_tls_symbol
;
3652 s390_emit_tls_call_insn (rtx result_reg
, rtx tls_call
)
3657 emit_insn (s390_load_got ());
3659 if (!s390_tls_symbol
)
3660 s390_tls_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "__tls_get_offset");
3662 insn
= s390_emit_call (s390_tls_symbol
, tls_call
, result_reg
,
3663 gen_rtx_REG (Pmode
, RETURN_REGNUM
));
3665 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), result_reg
);
3666 RTL_CONST_CALL_P (insn
) = 1;
3669 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3670 this (thread-local) address. REG may be used as temporary. */
3673 legitimize_tls_address (rtx addr
, rtx reg
)
3675 rtx new_rtx
, tls_call
, temp
, base
, r2
, insn
;
3677 if (GET_CODE (addr
) == SYMBOL_REF
)
3678 switch (tls_symbolic_operand (addr
))
3680 case TLS_MODEL_GLOBAL_DYNAMIC
:
3682 r2
= gen_rtx_REG (Pmode
, 2);
3683 tls_call
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_TLSGD
);
3684 new_rtx
= gen_rtx_CONST (Pmode
, tls_call
);
3685 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3686 emit_move_insn (r2
, new_rtx
);
3687 s390_emit_tls_call_insn (r2
, tls_call
);
3688 insn
= get_insns ();
3691 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_NTPOFF
);
3692 temp
= gen_reg_rtx (Pmode
);
3693 emit_libcall_block (insn
, temp
, r2
, new_rtx
);
3695 new_rtx
= gen_rtx_PLUS (Pmode
, s390_get_thread_pointer (), temp
);
3698 s390_load_address (reg
, new_rtx
);
3703 case TLS_MODEL_LOCAL_DYNAMIC
:
3705 r2
= gen_rtx_REG (Pmode
, 2);
3706 tls_call
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TLSLDM
);
3707 new_rtx
= gen_rtx_CONST (Pmode
, tls_call
);
3708 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3709 emit_move_insn (r2
, new_rtx
);
3710 s390_emit_tls_call_insn (r2
, tls_call
);
3711 insn
= get_insns ();
3714 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TLSLDM_NTPOFF
);
3715 temp
= gen_reg_rtx (Pmode
);
3716 emit_libcall_block (insn
, temp
, r2
, new_rtx
);
3718 new_rtx
= gen_rtx_PLUS (Pmode
, s390_get_thread_pointer (), temp
);
3719 base
= gen_reg_rtx (Pmode
);
3720 s390_load_address (base
, new_rtx
);
3722 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_DTPOFF
);
3723 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3724 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3725 temp
= gen_reg_rtx (Pmode
);
3726 emit_move_insn (temp
, new_rtx
);
3728 new_rtx
= gen_rtx_PLUS (Pmode
, base
, temp
);
3731 s390_load_address (reg
, new_rtx
);
3736 case TLS_MODEL_INITIAL_EXEC
:
3739 /* Assume GOT offset < 4k. This is handled the same way
3740 in both 31- and 64-bit code. */
3742 if (reload_in_progress
|| reload_completed
)
3743 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3745 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTNTPOFF
);
3746 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3747 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new_rtx
);
3748 new_rtx
= gen_const_mem (Pmode
, new_rtx
);
3749 temp
= gen_reg_rtx (Pmode
);
3750 emit_move_insn (temp
, new_rtx
);
3752 else if (TARGET_CPU_ZARCH
)
3754 /* If the GOT offset might be >= 4k, we determine the position
3755 of the GOT entry via a PC-relative LARL. */
3757 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_INDNTPOFF
);
3758 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3759 temp
= gen_reg_rtx (Pmode
);
3760 emit_move_insn (temp
, new_rtx
);
3762 new_rtx
= gen_const_mem (Pmode
, temp
);
3763 temp
= gen_reg_rtx (Pmode
);
3764 emit_move_insn (temp
, new_rtx
);
3768 /* If the GOT offset might be >= 4k, we have to load it
3769 from the literal pool. */
3771 if (reload_in_progress
|| reload_completed
)
3772 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3774 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTNTPOFF
);
3775 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3776 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3777 temp
= gen_reg_rtx (Pmode
);
3778 emit_move_insn (temp
, new_rtx
);
3780 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3781 new_rtx
= gen_const_mem (Pmode
, new_rtx
);
3783 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, new_rtx
, addr
), UNSPEC_TLS_LOAD
);
3784 temp
= gen_reg_rtx (Pmode
);
3785 emit_insn (gen_rtx_SET (Pmode
, temp
, new_rtx
));
3789 /* In position-dependent code, load the absolute address of
3790 the GOT entry from the literal pool. */
3792 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_INDNTPOFF
);
3793 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3794 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3795 temp
= gen_reg_rtx (Pmode
);
3796 emit_move_insn (temp
, new_rtx
);
3799 new_rtx
= gen_const_mem (Pmode
, new_rtx
);
3800 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, new_rtx
, addr
), UNSPEC_TLS_LOAD
);
3801 temp
= gen_reg_rtx (Pmode
);
3802 emit_insn (gen_rtx_SET (Pmode
, temp
, new_rtx
));
3805 new_rtx
= gen_rtx_PLUS (Pmode
, s390_get_thread_pointer (), temp
);
3808 s390_load_address (reg
, new_rtx
);
3813 case TLS_MODEL_LOCAL_EXEC
:
3814 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_NTPOFF
);
3815 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3816 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3817 temp
= gen_reg_rtx (Pmode
);
3818 emit_move_insn (temp
, new_rtx
);
3820 new_rtx
= gen_rtx_PLUS (Pmode
, s390_get_thread_pointer (), temp
);
3823 s390_load_address (reg
, new_rtx
);
3832 else if (GET_CODE (addr
) == CONST
&& GET_CODE (XEXP (addr
, 0)) == UNSPEC
)
3834 switch (XINT (XEXP (addr
, 0), 1))
3836 case UNSPEC_INDNTPOFF
:
3837 gcc_assert (TARGET_CPU_ZARCH
);
3846 else if (GET_CODE (addr
) == CONST
&& GET_CODE (XEXP (addr
, 0)) == PLUS
3847 && GET_CODE (XEXP (XEXP (addr
, 0), 1)) == CONST_INT
)
3849 new_rtx
= XEXP (XEXP (addr
, 0), 0);
3850 if (GET_CODE (new_rtx
) != SYMBOL_REF
)
3851 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3853 new_rtx
= legitimize_tls_address (new_rtx
, reg
);
3854 new_rtx
= plus_constant (Pmode
, new_rtx
,
3855 INTVAL (XEXP (XEXP (addr
, 0), 1)));
3856 new_rtx
= force_operand (new_rtx
, 0);
3860 gcc_unreachable (); /* for now ... */
3865 /* Emit insns making the address in operands[1] valid for a standard
3866 move to operands[0]. operands[1] is replaced by an address which
3867 should be used instead of the former RTX to emit the move
3871 emit_symbolic_move (rtx
*operands
)
3873 rtx temp
= !can_create_pseudo_p () ? operands
[0] : gen_reg_rtx (Pmode
);
3875 if (GET_CODE (operands
[0]) == MEM
)
3876 operands
[1] = force_reg (Pmode
, operands
[1]);
3877 else if (TLS_SYMBOLIC_CONST (operands
[1]))
3878 operands
[1] = legitimize_tls_address (operands
[1], temp
);
3880 operands
[1] = legitimize_pic_address (operands
[1], temp
);
3883 /* Try machine-dependent ways of modifying an illegitimate address X
3884 to be legitimate. If we find one, return the new, valid address.
3886 OLDX is the address as it was before break_out_memory_refs was called.
3887 In some cases it is useful to look at this to decide what needs to be done.
3889 MODE is the mode of the operand pointed to by X.
3891 When -fpic is used, special handling is needed for symbolic references.
3892 See comments by legitimize_pic_address for details. */
3895 s390_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
3896 enum machine_mode mode ATTRIBUTE_UNUSED
)
3898 rtx constant_term
= const0_rtx
;
3900 if (TLS_SYMBOLIC_CONST (x
))
3902 x
= legitimize_tls_address (x
, 0);
3904 if (s390_legitimate_address_p (mode
, x
, FALSE
))
3907 else if (GET_CODE (x
) == PLUS
3908 && (TLS_SYMBOLIC_CONST (XEXP (x
, 0))
3909 || TLS_SYMBOLIC_CONST (XEXP (x
, 1))))
3915 if (SYMBOLIC_CONST (x
)
3916 || (GET_CODE (x
) == PLUS
3917 && (SYMBOLIC_CONST (XEXP (x
, 0))
3918 || SYMBOLIC_CONST (XEXP (x
, 1)))))
3919 x
= legitimize_pic_address (x
, 0);
3921 if (s390_legitimate_address_p (mode
, x
, FALSE
))
3925 x
= eliminate_constant_term (x
, &constant_term
);
3927 /* Optimize loading of large displacements by splitting them
3928 into the multiple of 4K and the rest; this allows the
3929 former to be CSE'd if possible.
3931 Don't do this if the displacement is added to a register
3932 pointing into the stack frame, as the offsets will
3933 change later anyway. */
3935 if (GET_CODE (constant_term
) == CONST_INT
3936 && !TARGET_LONG_DISPLACEMENT
3937 && !DISP_IN_RANGE (INTVAL (constant_term
))
3938 && !(REG_P (x
) && REGNO_PTR_FRAME_P (REGNO (x
))))
3940 HOST_WIDE_INT lower
= INTVAL (constant_term
) & 0xfff;
3941 HOST_WIDE_INT upper
= INTVAL (constant_term
) ^ lower
;
3943 rtx temp
= gen_reg_rtx (Pmode
);
3944 rtx val
= force_operand (GEN_INT (upper
), temp
);
3946 emit_move_insn (temp
, val
);
3948 x
= gen_rtx_PLUS (Pmode
, x
, temp
);
3949 constant_term
= GEN_INT (lower
);
3952 if (GET_CODE (x
) == PLUS
)
3954 if (GET_CODE (XEXP (x
, 0)) == REG
)
3956 rtx temp
= gen_reg_rtx (Pmode
);
3957 rtx val
= force_operand (XEXP (x
, 1), temp
);
3959 emit_move_insn (temp
, val
);
3961 x
= gen_rtx_PLUS (Pmode
, XEXP (x
, 0), temp
);
3964 else if (GET_CODE (XEXP (x
, 1)) == REG
)
3966 rtx temp
= gen_reg_rtx (Pmode
);
3967 rtx val
= force_operand (XEXP (x
, 0), temp
);
3969 emit_move_insn (temp
, val
);
3971 x
= gen_rtx_PLUS (Pmode
, temp
, XEXP (x
, 1));
3975 if (constant_term
!= const0_rtx
)
3976 x
= gen_rtx_PLUS (Pmode
, x
, constant_term
);
3981 /* Try a machine-dependent way of reloading an illegitimate address AD
3982 operand. If we find one, push the reload and return the new address.
3984 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3985 and TYPE is the reload type of the current reload. */
3988 legitimize_reload_address (rtx ad
, enum machine_mode mode ATTRIBUTE_UNUSED
,
3989 int opnum
, int type
)
3991 if (!optimize
|| TARGET_LONG_DISPLACEMENT
)
3994 if (GET_CODE (ad
) == PLUS
)
3996 rtx tem
= simplify_binary_operation (PLUS
, Pmode
,
3997 XEXP (ad
, 0), XEXP (ad
, 1));
4002 if (GET_CODE (ad
) == PLUS
4003 && GET_CODE (XEXP (ad
, 0)) == REG
4004 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
4005 && !DISP_IN_RANGE (INTVAL (XEXP (ad
, 1))))
4007 HOST_WIDE_INT lower
= INTVAL (XEXP (ad
, 1)) & 0xfff;
4008 HOST_WIDE_INT upper
= INTVAL (XEXP (ad
, 1)) ^ lower
;
4009 rtx cst
, tem
, new_rtx
;
4011 cst
= GEN_INT (upper
);
4012 if (!legitimate_reload_constant_p (cst
))
4013 cst
= force_const_mem (Pmode
, cst
);
4015 tem
= gen_rtx_PLUS (Pmode
, XEXP (ad
, 0), cst
);
4016 new_rtx
= gen_rtx_PLUS (Pmode
, tem
, GEN_INT (lower
));
4018 push_reload (XEXP (tem
, 1), 0, &XEXP (tem
, 1), 0,
4019 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
4020 opnum
, (enum reload_type
) type
);
4027 /* Emit code to move LEN bytes from DST to SRC. */
4030 s390_expand_movmem (rtx dst
, rtx src
, rtx len
)
4032 /* When tuning for z10 or higher we rely on the Glibc functions to
4033 do the right thing. Only for constant lengths below 64k we will
4034 generate inline code. */
4035 if (s390_tune
>= PROCESSOR_2097_Z10
4036 && (GET_CODE (len
) != CONST_INT
|| INTVAL (len
) > (1<<16)))
4039 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
4041 if (INTVAL (len
) > 0)
4042 emit_insn (gen_movmem_short (dst
, src
, GEN_INT (INTVAL (len
) - 1)));
4045 else if (TARGET_MVCLE
)
4047 emit_insn (gen_movmem_long (dst
, src
, convert_to_mode (Pmode
, len
, 1)));
4052 rtx dst_addr
, src_addr
, count
, blocks
, temp
;
4053 rtx loop_start_label
= gen_label_rtx ();
4054 rtx loop_end_label
= gen_label_rtx ();
4055 rtx end_label
= gen_label_rtx ();
4056 enum machine_mode mode
;
4058 mode
= GET_MODE (len
);
4059 if (mode
== VOIDmode
)
4062 dst_addr
= gen_reg_rtx (Pmode
);
4063 src_addr
= gen_reg_rtx (Pmode
);
4064 count
= gen_reg_rtx (mode
);
4065 blocks
= gen_reg_rtx (mode
);
4067 convert_move (count
, len
, 1);
4068 emit_cmp_and_jump_insns (count
, const0_rtx
,
4069 EQ
, NULL_RTX
, mode
, 1, end_label
);
4071 emit_move_insn (dst_addr
, force_operand (XEXP (dst
, 0), NULL_RTX
));
4072 emit_move_insn (src_addr
, force_operand (XEXP (src
, 0), NULL_RTX
));
4073 dst
= change_address (dst
, VOIDmode
, dst_addr
);
4074 src
= change_address (src
, VOIDmode
, src_addr
);
4076 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1,
4079 emit_move_insn (count
, temp
);
4081 temp
= expand_binop (mode
, lshr_optab
, count
, GEN_INT (8), blocks
, 1,
4084 emit_move_insn (blocks
, temp
);
4086 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4087 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4089 emit_label (loop_start_label
);
4092 && (GET_CODE (len
) != CONST_INT
|| INTVAL (len
) > 768))
4096 /* Issue a read prefetch for the +3 cache line. */
4097 prefetch
= gen_prefetch (gen_rtx_PLUS (Pmode
, src_addr
, GEN_INT (768)),
4098 const0_rtx
, const0_rtx
);
4099 PREFETCH_SCHEDULE_BARRIER_P (prefetch
) = true;
4100 emit_insn (prefetch
);
4102 /* Issue a write prefetch for the +3 cache line. */
4103 prefetch
= gen_prefetch (gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (768)),
4104 const1_rtx
, const0_rtx
);
4105 PREFETCH_SCHEDULE_BARRIER_P (prefetch
) = true;
4106 emit_insn (prefetch
);
4109 emit_insn (gen_movmem_short (dst
, src
, GEN_INT (255)));
4110 s390_load_address (dst_addr
,
4111 gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (256)));
4112 s390_load_address (src_addr
,
4113 gen_rtx_PLUS (Pmode
, src_addr
, GEN_INT (256)));
4115 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1,
4118 emit_move_insn (blocks
, temp
);
4120 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4121 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4123 emit_jump (loop_start_label
);
4124 emit_label (loop_end_label
);
4126 emit_insn (gen_movmem_short (dst
, src
,
4127 convert_to_mode (Pmode
, count
, 1)));
4128 emit_label (end_label
);
4133 /* Emit code to set LEN bytes at DST to VAL.
4134 Make use of clrmem if VAL is zero. */
4137 s390_expand_setmem (rtx dst
, rtx len
, rtx val
)
4139 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) == 0)
4142 gcc_assert (GET_CODE (val
) == CONST_INT
|| GET_MODE (val
) == QImode
);
4144 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) > 0 && INTVAL (len
) <= 257)
4146 if (val
== const0_rtx
&& INTVAL (len
) <= 256)
4147 emit_insn (gen_clrmem_short (dst
, GEN_INT (INTVAL (len
) - 1)));
4150 /* Initialize memory by storing the first byte. */
4151 emit_move_insn (adjust_address (dst
, QImode
, 0), val
);
4153 if (INTVAL (len
) > 1)
4155 /* Initiate 1 byte overlap move.
4156 The first byte of DST is propagated through DSTP1.
4157 Prepare a movmem for: DST+1 = DST (length = LEN - 1).
4158 DST is set to size 1 so the rest of the memory location
4159 does not count as source operand. */
4160 rtx dstp1
= adjust_address (dst
, VOIDmode
, 1);
4161 set_mem_size (dst
, 1);
4163 emit_insn (gen_movmem_short (dstp1
, dst
,
4164 GEN_INT (INTVAL (len
) - 2)));
4169 else if (TARGET_MVCLE
)
4171 val
= force_not_mem (convert_modes (Pmode
, QImode
, val
, 1));
4172 emit_insn (gen_setmem_long (dst
, convert_to_mode (Pmode
, len
, 1), val
));
4177 rtx dst_addr
, count
, blocks
, temp
, dstp1
= NULL_RTX
;
4178 rtx loop_start_label
= gen_label_rtx ();
4179 rtx loop_end_label
= gen_label_rtx ();
4180 rtx end_label
= gen_label_rtx ();
4181 enum machine_mode mode
;
4183 mode
= GET_MODE (len
);
4184 if (mode
== VOIDmode
)
4187 dst_addr
= gen_reg_rtx (Pmode
);
4188 count
= gen_reg_rtx (mode
);
4189 blocks
= gen_reg_rtx (mode
);
4191 convert_move (count
, len
, 1);
4192 emit_cmp_and_jump_insns (count
, const0_rtx
,
4193 EQ
, NULL_RTX
, mode
, 1, end_label
);
4195 emit_move_insn (dst_addr
, force_operand (XEXP (dst
, 0), NULL_RTX
));
4196 dst
= change_address (dst
, VOIDmode
, dst_addr
);
4198 if (val
== const0_rtx
)
4199 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1,
4203 dstp1
= adjust_address (dst
, VOIDmode
, 1);
4204 set_mem_size (dst
, 1);
4206 /* Initialize memory by storing the first byte. */
4207 emit_move_insn (adjust_address (dst
, QImode
, 0), val
);
4209 /* If count is 1 we are done. */
4210 emit_cmp_and_jump_insns (count
, const1_rtx
,
4211 EQ
, NULL_RTX
, mode
, 1, end_label
);
4213 temp
= expand_binop (mode
, add_optab
, count
, GEN_INT (-2), count
, 1,
4217 emit_move_insn (count
, temp
);
4219 temp
= expand_binop (mode
, lshr_optab
, count
, GEN_INT (8), blocks
, 1,
4222 emit_move_insn (blocks
, temp
);
4224 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4225 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4227 emit_label (loop_start_label
);
4230 && (GET_CODE (len
) != CONST_INT
|| INTVAL (len
) > 1024))
4232 /* Issue a write prefetch for the +4 cache line. */
4233 rtx prefetch
= gen_prefetch (gen_rtx_PLUS (Pmode
, dst_addr
,
4235 const1_rtx
, const0_rtx
);
4236 emit_insn (prefetch
);
4237 PREFETCH_SCHEDULE_BARRIER_P (prefetch
) = true;
4240 if (val
== const0_rtx
)
4241 emit_insn (gen_clrmem_short (dst
, GEN_INT (255)));
4243 emit_insn (gen_movmem_short (dstp1
, dst
, GEN_INT (255)));
4244 s390_load_address (dst_addr
,
4245 gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (256)));
4247 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1,
4250 emit_move_insn (blocks
, temp
);
4252 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4253 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4255 emit_jump (loop_start_label
);
4256 emit_label (loop_end_label
);
4258 if (val
== const0_rtx
)
4259 emit_insn (gen_clrmem_short (dst
, convert_to_mode (Pmode
, count
, 1)));
4261 emit_insn (gen_movmem_short (dstp1
, dst
, convert_to_mode (Pmode
, count
, 1)));
4262 emit_label (end_label
);
4266 /* Emit code to compare LEN bytes at OP0 with those at OP1,
4267 and return the result in TARGET. */
4270 s390_expand_cmpmem (rtx target
, rtx op0
, rtx op1
, rtx len
)
4272 rtx ccreg
= gen_rtx_REG (CCUmode
, CC_REGNUM
);
4275 /* When tuning for z10 or higher we rely on the Glibc functions to
4276 do the right thing. Only for constant lengths below 64k we will
4277 generate inline code. */
4278 if (s390_tune
>= PROCESSOR_2097_Z10
4279 && (GET_CODE (len
) != CONST_INT
|| INTVAL (len
) > (1<<16)))
4282 /* As the result of CMPINT is inverted compared to what we need,
4283 we have to swap the operands. */
4284 tmp
= op0
; op0
= op1
; op1
= tmp
;
4286 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
4288 if (INTVAL (len
) > 0)
4290 emit_insn (gen_cmpmem_short (op0
, op1
, GEN_INT (INTVAL (len
) - 1)));
4291 emit_insn (gen_cmpint (target
, ccreg
));
4294 emit_move_insn (target
, const0_rtx
);
4296 else if (TARGET_MVCLE
)
4298 emit_insn (gen_cmpmem_long (op0
, op1
, convert_to_mode (Pmode
, len
, 1)));
4299 emit_insn (gen_cmpint (target
, ccreg
));
4303 rtx addr0
, addr1
, count
, blocks
, temp
;
4304 rtx loop_start_label
= gen_label_rtx ();
4305 rtx loop_end_label
= gen_label_rtx ();
4306 rtx end_label
= gen_label_rtx ();
4307 enum machine_mode mode
;
4309 mode
= GET_MODE (len
);
4310 if (mode
== VOIDmode
)
4313 addr0
= gen_reg_rtx (Pmode
);
4314 addr1
= gen_reg_rtx (Pmode
);
4315 count
= gen_reg_rtx (mode
);
4316 blocks
= gen_reg_rtx (mode
);
4318 convert_move (count
, len
, 1);
4319 emit_cmp_and_jump_insns (count
, const0_rtx
,
4320 EQ
, NULL_RTX
, mode
, 1, end_label
);
4322 emit_move_insn (addr0
, force_operand (XEXP (op0
, 0), NULL_RTX
));
4323 emit_move_insn (addr1
, force_operand (XEXP (op1
, 0), NULL_RTX
));
4324 op0
= change_address (op0
, VOIDmode
, addr0
);
4325 op1
= change_address (op1
, VOIDmode
, addr1
);
4327 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1,
4330 emit_move_insn (count
, temp
);
4332 temp
= expand_binop (mode
, lshr_optab
, count
, GEN_INT (8), blocks
, 1,
4335 emit_move_insn (blocks
, temp
);
4337 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4338 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4340 emit_label (loop_start_label
);
4343 && (GET_CODE (len
) != CONST_INT
|| INTVAL (len
) > 512))
4347 /* Issue a read prefetch for the +2 cache line of operand 1. */
4348 prefetch
= gen_prefetch (gen_rtx_PLUS (Pmode
, addr0
, GEN_INT (512)),
4349 const0_rtx
, const0_rtx
);
4350 emit_insn (prefetch
);
4351 PREFETCH_SCHEDULE_BARRIER_P (prefetch
) = true;
4353 /* Issue a read prefetch for the +2 cache line of operand 2. */
4354 prefetch
= gen_prefetch (gen_rtx_PLUS (Pmode
, addr1
, GEN_INT (512)),
4355 const0_rtx
, const0_rtx
);
4356 emit_insn (prefetch
);
4357 PREFETCH_SCHEDULE_BARRIER_P (prefetch
) = true;
4360 emit_insn (gen_cmpmem_short (op0
, op1
, GEN_INT (255)));
4361 temp
= gen_rtx_NE (VOIDmode
, ccreg
, const0_rtx
);
4362 temp
= gen_rtx_IF_THEN_ELSE (VOIDmode
, temp
,
4363 gen_rtx_LABEL_REF (VOIDmode
, end_label
), pc_rtx
);
4364 temp
= gen_rtx_SET (VOIDmode
, pc_rtx
, temp
);
4365 emit_jump_insn (temp
);
4367 s390_load_address (addr0
,
4368 gen_rtx_PLUS (Pmode
, addr0
, GEN_INT (256)));
4369 s390_load_address (addr1
,
4370 gen_rtx_PLUS (Pmode
, addr1
, GEN_INT (256)));
4372 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1,
4375 emit_move_insn (blocks
, temp
);
4377 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4378 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4380 emit_jump (loop_start_label
);
4381 emit_label (loop_end_label
);
4383 emit_insn (gen_cmpmem_short (op0
, op1
,
4384 convert_to_mode (Pmode
, count
, 1)));
4385 emit_label (end_label
);
4387 emit_insn (gen_cmpint (target
, ccreg
));
4393 /* Expand conditional increment or decrement using alc/slb instructions.
4394 Should generate code setting DST to either SRC or SRC + INCREMENT,
4395 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
4396 Returns true if successful, false otherwise.
4398 That makes it possible to implement some if-constructs without jumps e.g.:
4399 (borrow = CC0 | CC1 and carry = CC2 | CC3)
4400 unsigned int a, b, c;
4401 if (a < b) c++; -> CCU b > a -> CC2; c += carry;
4402 if (a < b) c--; -> CCL3 a - b -> borrow; c -= borrow;
4403 if (a <= b) c++; -> CCL3 b - a -> borrow; c += carry;
4404 if (a <= b) c--; -> CCU a <= b -> borrow; c -= borrow;
4406 Checks for EQ and NE with a nonzero value need an additional xor e.g.:
4407 if (a == b) c++; -> CCL3 a ^= b; 0 - a -> borrow; c += carry;
4408 if (a == b) c--; -> CCU a ^= b; a <= 0 -> CC0 | CC1; c -= borrow;
4409 if (a != b) c++; -> CCU a ^= b; a > 0 -> CC2; c += carry;
4410 if (a != b) c--; -> CCL3 a ^= b; 0 - a -> borrow; c -= borrow; */
4413 s390_expand_addcc (enum rtx_code cmp_code
, rtx cmp_op0
, rtx cmp_op1
,
4414 rtx dst
, rtx src
, rtx increment
)
4416 enum machine_mode cmp_mode
;
4417 enum machine_mode cc_mode
;
4423 if ((GET_MODE (cmp_op0
) == SImode
|| GET_MODE (cmp_op0
) == VOIDmode
)
4424 && (GET_MODE (cmp_op1
) == SImode
|| GET_MODE (cmp_op1
) == VOIDmode
))
4426 else if ((GET_MODE (cmp_op0
) == DImode
|| GET_MODE (cmp_op0
) == VOIDmode
)
4427 && (GET_MODE (cmp_op1
) == DImode
|| GET_MODE (cmp_op1
) == VOIDmode
))
4432 /* Try ADD LOGICAL WITH CARRY. */
4433 if (increment
== const1_rtx
)
4435 /* Determine CC mode to use. */
4436 if (cmp_code
== EQ
|| cmp_code
== NE
)
4438 if (cmp_op1
!= const0_rtx
)
4440 cmp_op0
= expand_simple_binop (cmp_mode
, XOR
, cmp_op0
, cmp_op1
,
4441 NULL_RTX
, 0, OPTAB_WIDEN
);
4442 cmp_op1
= const0_rtx
;
4445 cmp_code
= cmp_code
== EQ
? LEU
: GTU
;
4448 if (cmp_code
== LTU
|| cmp_code
== LEU
)
4453 cmp_code
= swap_condition (cmp_code
);
4470 /* Emit comparison instruction pattern. */
4471 if (!register_operand (cmp_op0
, cmp_mode
))
4472 cmp_op0
= force_reg (cmp_mode
, cmp_op0
);
4474 insn
= gen_rtx_SET (VOIDmode
, gen_rtx_REG (cc_mode
, CC_REGNUM
),
4475 gen_rtx_COMPARE (cc_mode
, cmp_op0
, cmp_op1
));
4476 /* We use insn_invalid_p here to add clobbers if required. */
4477 ret
= insn_invalid_p (emit_insn (insn
), false);
4480 /* Emit ALC instruction pattern. */
4481 op_res
= gen_rtx_fmt_ee (cmp_code
, GET_MODE (dst
),
4482 gen_rtx_REG (cc_mode
, CC_REGNUM
),
4485 if (src
!= const0_rtx
)
4487 if (!register_operand (src
, GET_MODE (dst
)))
4488 src
= force_reg (GET_MODE (dst
), src
);
4490 op_res
= gen_rtx_PLUS (GET_MODE (dst
), op_res
, src
);
4491 op_res
= gen_rtx_PLUS (GET_MODE (dst
), op_res
, const0_rtx
);
4494 p
= rtvec_alloc (2);
4496 gen_rtx_SET (VOIDmode
, dst
, op_res
);
4498 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
4499 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
4504 /* Try SUBTRACT LOGICAL WITH BORROW. */
4505 if (increment
== constm1_rtx
)
4507 /* Determine CC mode to use. */
4508 if (cmp_code
== EQ
|| cmp_code
== NE
)
4510 if (cmp_op1
!= const0_rtx
)
4512 cmp_op0
= expand_simple_binop (cmp_mode
, XOR
, cmp_op0
, cmp_op1
,
4513 NULL_RTX
, 0, OPTAB_WIDEN
);
4514 cmp_op1
= const0_rtx
;
4517 cmp_code
= cmp_code
== EQ
? LEU
: GTU
;
4520 if (cmp_code
== GTU
|| cmp_code
== GEU
)
4525 cmp_code
= swap_condition (cmp_code
);
4542 /* Emit comparison instruction pattern. */
4543 if (!register_operand (cmp_op0
, cmp_mode
))
4544 cmp_op0
= force_reg (cmp_mode
, cmp_op0
);
4546 insn
= gen_rtx_SET (VOIDmode
, gen_rtx_REG (cc_mode
, CC_REGNUM
),
4547 gen_rtx_COMPARE (cc_mode
, cmp_op0
, cmp_op1
));
4548 /* We use insn_invalid_p here to add clobbers if required. */
4549 ret
= insn_invalid_p (emit_insn (insn
), false);
4552 /* Emit SLB instruction pattern. */
4553 if (!register_operand (src
, GET_MODE (dst
)))
4554 src
= force_reg (GET_MODE (dst
), src
);
4556 op_res
= gen_rtx_MINUS (GET_MODE (dst
),
4557 gen_rtx_MINUS (GET_MODE (dst
), src
, const0_rtx
),
4558 gen_rtx_fmt_ee (cmp_code
, GET_MODE (dst
),
4559 gen_rtx_REG (cc_mode
, CC_REGNUM
),
4561 p
= rtvec_alloc (2);
4563 gen_rtx_SET (VOIDmode
, dst
, op_res
);
4565 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
4566 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
4574 /* Expand code for the insv template. Return true if successful. */
4577 s390_expand_insv (rtx dest
, rtx op1
, rtx op2
, rtx src
)
4579 int bitsize
= INTVAL (op1
);
4580 int bitpos
= INTVAL (op2
);
4581 enum machine_mode mode
= GET_MODE (dest
);
4582 enum machine_mode smode
;
4583 int smode_bsize
, mode_bsize
;
4586 /* Generate INSERT IMMEDIATE (IILL et al). */
4587 /* (set (ze (reg)) (const_int)). */
4589 && register_operand (dest
, word_mode
)
4590 && (bitpos
% 16) == 0
4591 && (bitsize
% 16) == 0
4592 && const_int_operand (src
, VOIDmode
))
4594 HOST_WIDE_INT val
= INTVAL (src
);
4595 int regpos
= bitpos
+ bitsize
;
4597 while (regpos
> bitpos
)
4599 enum machine_mode putmode
;
4602 if (TARGET_EXTIMM
&& (regpos
% 32 == 0) && (regpos
>= bitpos
+ 32))
4607 putsize
= GET_MODE_BITSIZE (putmode
);
4609 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode
, dest
,
4612 gen_int_mode (val
, putmode
));
4615 gcc_assert (regpos
== bitpos
);
4619 smode
= smallest_mode_for_size (bitsize
, MODE_INT
);
4620 smode_bsize
= GET_MODE_BITSIZE (smode
);
4621 mode_bsize
= GET_MODE_BITSIZE (mode
);
4623 /* Generate STORE CHARACTERS UNDER MASK (STCM et al). */
4625 && (bitsize
% BITS_PER_UNIT
) == 0
4627 && (register_operand (src
, word_mode
)
4628 || const_int_operand (src
, VOIDmode
)))
4630 /* Emit standard pattern if possible. */
4631 if (smode_bsize
== bitsize
)
4633 emit_move_insn (adjust_address (dest
, smode
, 0),
4634 gen_lowpart (smode
, src
));
4638 /* (set (ze (mem)) (const_int)). */
4639 else if (const_int_operand (src
, VOIDmode
))
4641 int size
= bitsize
/ BITS_PER_UNIT
;
4642 rtx src_mem
= adjust_address (force_const_mem (word_mode
, src
),
4644 UNITS_PER_WORD
- size
);
4646 dest
= adjust_address (dest
, BLKmode
, 0);
4647 set_mem_size (dest
, size
);
4648 s390_expand_movmem (dest
, src_mem
, GEN_INT (size
));
4652 /* (set (ze (mem)) (reg)). */
4653 else if (register_operand (src
, word_mode
))
4656 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode
, dest
, op1
,
4660 /* Emit st,stcmh sequence. */
4661 int stcmh_width
= bitsize
- 32;
4662 int size
= stcmh_width
/ BITS_PER_UNIT
;
4664 emit_move_insn (adjust_address (dest
, SImode
, size
),
4665 gen_lowpart (SImode
, src
));
4666 set_mem_size (dest
, size
);
4667 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode
, dest
,
4668 GEN_INT (stcmh_width
),
4670 gen_rtx_LSHIFTRT (word_mode
, src
, GEN_INT (32)));
4676 /* Generate INSERT CHARACTERS UNDER MASK (IC, ICM et al). */
4677 if ((bitpos
% BITS_PER_UNIT
) == 0
4678 && (bitsize
% BITS_PER_UNIT
) == 0
4679 && (bitpos
& 32) == ((bitpos
+ bitsize
- 1) & 32)
4681 && (mode
== DImode
|| mode
== SImode
)
4682 && register_operand (dest
, mode
))
4684 /* Emit a strict_low_part pattern if possible. */
4685 if (smode_bsize
== bitsize
&& bitpos
== mode_bsize
- smode_bsize
)
4687 op
= gen_rtx_STRICT_LOW_PART (VOIDmode
, gen_lowpart (smode
, dest
));
4688 op
= gen_rtx_SET (VOIDmode
, op
, gen_lowpart (smode
, src
));
4689 clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
4690 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, op
, clobber
)));
4694 /* ??? There are more powerful versions of ICM that are not
4695 completely represented in the md file. */
4698 /* For z10, generate ROTATE THEN INSERT SELECTED BITS (RISBG et al). */
4699 if (TARGET_Z10
&& (mode
== DImode
|| mode
== SImode
))
4701 enum machine_mode mode_s
= GET_MODE (src
);
4703 if (mode_s
== VOIDmode
)
4705 /* Assume const_int etc already in the proper mode. */
4706 src
= force_reg (mode
, src
);
4708 else if (mode_s
!= mode
)
4710 gcc_assert (GET_MODE_BITSIZE (mode_s
) >= bitsize
);
4711 src
= force_reg (mode_s
, src
);
4712 src
= gen_lowpart (mode
, src
);
4715 op
= gen_rtx_ZERO_EXTRACT (mode
, dest
, op1
, op2
),
4716 op
= gen_rtx_SET (VOIDmode
, op
, src
);
4717 clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
4718 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, op
, clobber
)));
4726 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic which returns a
4727 register that holds VAL of mode MODE shifted by COUNT bits. */
4730 s390_expand_mask_and_shift (rtx val
, enum machine_mode mode
, rtx count
)
4732 val
= expand_simple_binop (SImode
, AND
, val
, GEN_INT (GET_MODE_MASK (mode
)),
4733 NULL_RTX
, 1, OPTAB_DIRECT
);
4734 return expand_simple_binop (SImode
, ASHIFT
, val
, count
,
4735 NULL_RTX
, 1, OPTAB_DIRECT
);
4738 /* Structure to hold the initial parameters for a compare_and_swap operation
4739 in HImode and QImode. */
4741 struct alignment_context
4743 rtx memsi
; /* SI aligned memory location. */
4744 rtx shift
; /* Bit offset with regard to lsb. */
4745 rtx modemask
; /* Mask of the HQImode shifted by SHIFT bits. */
4746 rtx modemaski
; /* ~modemask */
4747 bool aligned
; /* True if memory is aligned, false else. */
4750 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic to initialize
4751 structure AC for transparent simplifying, if the memory alignment is known
4752 to be at least 32bit. MEM is the memory location for the actual operation
4753 and MODE its mode. */
4756 init_alignment_context (struct alignment_context
*ac
, rtx mem
,
4757 enum machine_mode mode
)
4759 ac
->shift
= GEN_INT (GET_MODE_SIZE (SImode
) - GET_MODE_SIZE (mode
));
4760 ac
->aligned
= (MEM_ALIGN (mem
) >= GET_MODE_BITSIZE (SImode
));
4763 ac
->memsi
= adjust_address (mem
, SImode
, 0); /* Memory is aligned. */
4766 /* Alignment is unknown. */
4767 rtx byteoffset
, addr
, align
;
4769 /* Force the address into a register. */
4770 addr
= force_reg (Pmode
, XEXP (mem
, 0));
4772 /* Align it to SImode. */
4773 align
= expand_simple_binop (Pmode
, AND
, addr
,
4774 GEN_INT (-GET_MODE_SIZE (SImode
)),
4775 NULL_RTX
, 1, OPTAB_DIRECT
);
4777 ac
->memsi
= gen_rtx_MEM (SImode
, align
);
4778 MEM_VOLATILE_P (ac
->memsi
) = MEM_VOLATILE_P (mem
);
4779 set_mem_alias_set (ac
->memsi
, ALIAS_SET_MEMORY_BARRIER
);
4780 set_mem_align (ac
->memsi
, GET_MODE_BITSIZE (SImode
));
4782 /* Calculate shiftcount. */
4783 byteoffset
= expand_simple_binop (Pmode
, AND
, addr
,
4784 GEN_INT (GET_MODE_SIZE (SImode
) - 1),
4785 NULL_RTX
, 1, OPTAB_DIRECT
);
4786 /* As we already have some offset, evaluate the remaining distance. */
4787 ac
->shift
= expand_simple_binop (SImode
, MINUS
, ac
->shift
, byteoffset
,
4788 NULL_RTX
, 1, OPTAB_DIRECT
);
4791 /* Shift is the byte count, but we need the bitcount. */
4792 ac
->shift
= expand_simple_binop (SImode
, ASHIFT
, ac
->shift
, GEN_INT (3),
4793 NULL_RTX
, 1, OPTAB_DIRECT
);
4795 /* Calculate masks. */
4796 ac
->modemask
= expand_simple_binop (SImode
, ASHIFT
,
4797 GEN_INT (GET_MODE_MASK (mode
)),
4798 ac
->shift
, NULL_RTX
, 1, OPTAB_DIRECT
);
4799 ac
->modemaski
= expand_simple_unop (SImode
, NOT
, ac
->modemask
,
4803 /* A subroutine of s390_expand_cs_hqi. Insert INS into VAL. If possible,
4804 use a single insv insn into SEQ2. Otherwise, put prep insns in SEQ1 and
4805 perform the merge in SEQ2. */
4808 s390_two_part_insv (struct alignment_context
*ac
, rtx
*seq1
, rtx
*seq2
,
4809 enum machine_mode mode
, rtx val
, rtx ins
)
4816 tmp
= copy_to_mode_reg (SImode
, val
);
4817 if (s390_expand_insv (tmp
, GEN_INT (GET_MODE_BITSIZE (mode
)),
4821 *seq2
= get_insns ();
4828 /* Failed to use insv. Generate a two part shift and mask. */
4830 tmp
= s390_expand_mask_and_shift (ins
, mode
, ac
->shift
);
4831 *seq1
= get_insns ();
4835 tmp
= expand_simple_binop (SImode
, IOR
, tmp
, val
, NULL_RTX
, 1, OPTAB_DIRECT
);
4836 *seq2
= get_insns ();
4842 /* Expand an atomic compare and swap operation for HImode and QImode. MEM is
4843 the memory location, CMP the old value to compare MEM with and NEW_RTX the
4844 value to set if CMP == MEM. */
4847 s390_expand_cs_hqi (enum machine_mode mode
, rtx btarget
, rtx vtarget
, rtx mem
,
4848 rtx cmp
, rtx new_rtx
, bool is_weak
)
4850 struct alignment_context ac
;
4851 rtx cmpv
, newv
, val
, cc
, seq0
, seq1
, seq2
, seq3
;
4852 rtx res
= gen_reg_rtx (SImode
);
4853 rtx csloop
= NULL
, csend
= NULL
;
4855 gcc_assert (MEM_P (mem
));
4857 init_alignment_context (&ac
, mem
, mode
);
4859 /* Load full word. Subsequent loads are performed by CS. */
4860 val
= expand_simple_binop (SImode
, AND
, ac
.memsi
, ac
.modemaski
,
4861 NULL_RTX
, 1, OPTAB_DIRECT
);
4863 /* Prepare insertions of cmp and new_rtx into the loaded value. When
4864 possible, we try to use insv to make this happen efficiently. If
4865 that fails we'll generate code both inside and outside the loop. */
4866 cmpv
= s390_two_part_insv (&ac
, &seq0
, &seq2
, mode
, val
, cmp
);
4867 newv
= s390_two_part_insv (&ac
, &seq1
, &seq3
, mode
, val
, new_rtx
);
4874 /* Start CS loop. */
4877 /* Begin assuming success. */
4878 emit_move_insn (btarget
, const1_rtx
);
4880 csloop
= gen_label_rtx ();
4881 csend
= gen_label_rtx ();
4882 emit_label (csloop
);
4885 /* val = "<mem>00..0<mem>"
4886 * cmp = "00..0<cmp>00..0"
4887 * new = "00..0<new>00..0"
4893 cc
= s390_emit_compare_and_swap (EQ
, res
, ac
.memsi
, cmpv
, newv
);
4895 emit_insn (gen_cstorecc4 (btarget
, cc
, XEXP (cc
, 0), XEXP (cc
, 1)));
4900 /* Jump to end if we're done (likely?). */
4901 s390_emit_jump (csend
, cc
);
4903 /* Check for changes outside mode, and loop internal if so.
4904 Arrange the moves so that the compare is adjacent to the
4905 branch so that we can generate CRJ. */
4906 tmp
= copy_to_reg (val
);
4907 force_expand_binop (SImode
, and_optab
, res
, ac
.modemaski
, val
,
4909 cc
= s390_emit_compare (NE
, val
, tmp
);
4910 s390_emit_jump (csloop
, cc
);
4913 emit_move_insn (btarget
, const0_rtx
);
4917 /* Return the correct part of the bitfield. */
4918 convert_move (vtarget
, expand_simple_binop (SImode
, LSHIFTRT
, res
, ac
.shift
,
4919 NULL_RTX
, 1, OPTAB_DIRECT
), 1);
4922 /* Expand an atomic operation CODE of mode MODE. MEM is the memory location
4923 and VAL the value to play with. If AFTER is true then store the value
4924 MEM holds after the operation, if AFTER is false then store the value MEM
4925 holds before the operation. If TARGET is zero then discard that value, else
4926 store it to TARGET. */
4929 s390_expand_atomic (enum machine_mode mode
, enum rtx_code code
,
4930 rtx target
, rtx mem
, rtx val
, bool after
)
4932 struct alignment_context ac
;
4934 rtx new_rtx
= gen_reg_rtx (SImode
);
4935 rtx orig
= gen_reg_rtx (SImode
);
4936 rtx csloop
= gen_label_rtx ();
4938 gcc_assert (!target
|| register_operand (target
, VOIDmode
));
4939 gcc_assert (MEM_P (mem
));
4941 init_alignment_context (&ac
, mem
, mode
);
4943 /* Shift val to the correct bit positions.
4944 Preserve "icm", but prevent "ex icm". */
4945 if (!(ac
.aligned
&& code
== SET
&& MEM_P (val
)))
4946 val
= s390_expand_mask_and_shift (val
, mode
, ac
.shift
);
4948 /* Further preparation insns. */
4949 if (code
== PLUS
|| code
== MINUS
)
4950 emit_move_insn (orig
, val
);
4951 else if (code
== MULT
|| code
== AND
) /* val = "11..1<val>11..1" */
4952 val
= expand_simple_binop (SImode
, XOR
, val
, ac
.modemaski
,
4953 NULL_RTX
, 1, OPTAB_DIRECT
);
4955 /* Load full word. Subsequent loads are performed by CS. */
4956 cmp
= force_reg (SImode
, ac
.memsi
);
4958 /* Start CS loop. */
4959 emit_label (csloop
);
4960 emit_move_insn (new_rtx
, cmp
);
4962 /* Patch new with val at correct position. */
4967 val
= expand_simple_binop (SImode
, code
, new_rtx
, orig
,
4968 NULL_RTX
, 1, OPTAB_DIRECT
);
4969 val
= expand_simple_binop (SImode
, AND
, val
, ac
.modemask
,
4970 NULL_RTX
, 1, OPTAB_DIRECT
);
4973 if (ac
.aligned
&& MEM_P (val
))
4974 store_bit_field (new_rtx
, GET_MODE_BITSIZE (mode
), 0,
4978 new_rtx
= expand_simple_binop (SImode
, AND
, new_rtx
, ac
.modemaski
,
4979 NULL_RTX
, 1, OPTAB_DIRECT
);
4980 new_rtx
= expand_simple_binop (SImode
, IOR
, new_rtx
, val
,
4981 NULL_RTX
, 1, OPTAB_DIRECT
);
4987 new_rtx
= expand_simple_binop (SImode
, code
, new_rtx
, val
,
4988 NULL_RTX
, 1, OPTAB_DIRECT
);
4990 case MULT
: /* NAND */
4991 new_rtx
= expand_simple_binop (SImode
, AND
, new_rtx
, val
,
4992 NULL_RTX
, 1, OPTAB_DIRECT
);
4993 new_rtx
= expand_simple_binop (SImode
, XOR
, new_rtx
, ac
.modemask
,
4994 NULL_RTX
, 1, OPTAB_DIRECT
);
5000 s390_emit_jump (csloop
, s390_emit_compare_and_swap (NE
, cmp
,
5001 ac
.memsi
, cmp
, new_rtx
));
5003 /* Return the correct part of the bitfield. */
5005 convert_move (target
, expand_simple_binop (SImode
, LSHIFTRT
,
5006 after
? new_rtx
: cmp
, ac
.shift
,
5007 NULL_RTX
, 1, OPTAB_DIRECT
), 1);
5010 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
5011 We need to emit DTP-relative relocations. */
5013 static void s390_output_dwarf_dtprel (FILE *, int, rtx
) ATTRIBUTE_UNUSED
;
5016 s390_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
5021 fputs ("\t.long\t", file
);
5024 fputs ("\t.quad\t", file
);
5029 output_addr_const (file
, x
);
5030 fputs ("@DTPOFF", file
);
5033 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
5034 /* Implement TARGET_MANGLE_TYPE. */
5037 s390_mangle_type (const_tree type
)
5039 if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
5040 && TARGET_LONG_DOUBLE_128
)
5043 /* For all other types, use normal C++ mangling. */
5048 /* In the name of slightly smaller debug output, and to cater to
5049 general assembler lossage, recognize various UNSPEC sequences
5050 and turn them back into a direct symbol reference. */
5053 s390_delegitimize_address (rtx orig_x
)
5057 orig_x
= delegitimize_mem_from_attrs (orig_x
);
5060 /* Extract the symbol ref from:
5061 (plus:SI (reg:SI 12 %r12)
5062 (const:SI (unspec:SI [(symbol_ref/f:SI ("*.LC0"))]
5063 UNSPEC_GOTOFF/PLTOFF)))
5065 (plus:SI (reg:SI 12 %r12)
5066 (const:SI (plus:SI (unspec:SI [(symbol_ref:SI ("L"))]
5067 UNSPEC_GOTOFF/PLTOFF)
5068 (const_int 4 [0x4])))) */
5069 if (GET_CODE (x
) == PLUS
5070 && REG_P (XEXP (x
, 0))
5071 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
5072 && GET_CODE (XEXP (x
, 1)) == CONST
)
5074 HOST_WIDE_INT offset
= 0;
5076 /* The const operand. */
5077 y
= XEXP (XEXP (x
, 1), 0);
5079 if (GET_CODE (y
) == PLUS
5080 && GET_CODE (XEXP (y
, 1)) == CONST_INT
)
5082 offset
= INTVAL (XEXP (y
, 1));
5086 if (GET_CODE (y
) == UNSPEC
5087 && (XINT (y
, 1) == UNSPEC_GOTOFF
5088 || XINT (y
, 1) == UNSPEC_PLTOFF
))
5089 return plus_constant (Pmode
, XVECEXP (y
, 0, 0), offset
);
5092 if (GET_CODE (x
) != MEM
)
5096 if (GET_CODE (x
) == PLUS
5097 && GET_CODE (XEXP (x
, 1)) == CONST
5098 && GET_CODE (XEXP (x
, 0)) == REG
5099 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
5101 y
= XEXP (XEXP (x
, 1), 0);
5102 if (GET_CODE (y
) == UNSPEC
5103 && XINT (y
, 1) == UNSPEC_GOT
)
5104 y
= XVECEXP (y
, 0, 0);
5108 else if (GET_CODE (x
) == CONST
)
5110 /* Extract the symbol ref from:
5111 (mem:QI (const:DI (unspec:DI [(symbol_ref:DI ("foo"))]
5112 UNSPEC_PLT/GOTENT))) */
5115 if (GET_CODE (y
) == UNSPEC
5116 && (XINT (y
, 1) == UNSPEC_GOTENT
5117 || XINT (y
, 1) == UNSPEC_PLT
))
5118 y
= XVECEXP (y
, 0, 0);
5125 if (GET_MODE (orig_x
) != Pmode
)
5127 if (GET_MODE (orig_x
) == BLKmode
)
5129 y
= lowpart_subreg (GET_MODE (orig_x
), y
, Pmode
);
5136 /* Output operand OP to stdio stream FILE.
5137 OP is an address (register + offset) which is not used to address data;
5138 instead the rightmost bits are interpreted as the value. */
5141 print_shift_count_operand (FILE *file
, rtx op
)
5143 HOST_WIDE_INT offset
;
5146 /* Extract base register and offset. */
5147 if (!s390_decompose_shift_count (op
, &base
, &offset
))
5153 gcc_assert (GET_CODE (base
) == REG
);
5154 gcc_assert (REGNO (base
) < FIRST_PSEUDO_REGISTER
);
5155 gcc_assert (REGNO_REG_CLASS (REGNO (base
)) == ADDR_REGS
);
5158 /* Offsets are constricted to twelve bits. */
5159 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, offset
& ((1 << 12) - 1));
5161 fprintf (file
, "(%s)", reg_names
[REGNO (base
)]);
5164 /* See 'get_some_local_dynamic_name'. */
5167 get_some_local_dynamic_name_1 (rtx
*px
, void *data ATTRIBUTE_UNUSED
)
5171 if (GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
5173 x
= get_pool_constant (x
);
5174 return for_each_rtx (&x
, get_some_local_dynamic_name_1
, 0);
5177 if (GET_CODE (x
) == SYMBOL_REF
5178 && tls_symbolic_operand (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
5180 cfun
->machine
->some_ld_name
= XSTR (x
, 0);
5187 /* Locate some local-dynamic symbol still in use by this function
5188 so that we can print its name in local-dynamic base patterns. */
5191 get_some_local_dynamic_name (void)
5195 if (cfun
->machine
->some_ld_name
)
5196 return cfun
->machine
->some_ld_name
;
5198 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5200 && for_each_rtx (&PATTERN (insn
), get_some_local_dynamic_name_1
, 0))
5201 return cfun
->machine
->some_ld_name
;
5206 /* Output machine-dependent UNSPECs occurring in address constant X
5207 in assembler syntax to stdio stream FILE. Returns true if the
5208 constant X could be recognized, false otherwise. */
5211 s390_output_addr_const_extra (FILE *file
, rtx x
)
5213 if (GET_CODE (x
) == UNSPEC
&& XVECLEN (x
, 0) == 1)
5214 switch (XINT (x
, 1))
5217 output_addr_const (file
, XVECEXP (x
, 0, 0));
5218 fprintf (file
, "@GOTENT");
5221 output_addr_const (file
, XVECEXP (x
, 0, 0));
5222 fprintf (file
, "@GOT");
5225 output_addr_const (file
, XVECEXP (x
, 0, 0));
5226 fprintf (file
, "@GOTOFF");
5229 output_addr_const (file
, XVECEXP (x
, 0, 0));
5230 fprintf (file
, "@PLT");
5233 output_addr_const (file
, XVECEXP (x
, 0, 0));
5234 fprintf (file
, "@PLTOFF");
5237 output_addr_const (file
, XVECEXP (x
, 0, 0));
5238 fprintf (file
, "@TLSGD");
5241 assemble_name (file
, get_some_local_dynamic_name ());
5242 fprintf (file
, "@TLSLDM");
5245 output_addr_const (file
, XVECEXP (x
, 0, 0));
5246 fprintf (file
, "@DTPOFF");
5249 output_addr_const (file
, XVECEXP (x
, 0, 0));
5250 fprintf (file
, "@NTPOFF");
5252 case UNSPEC_GOTNTPOFF
:
5253 output_addr_const (file
, XVECEXP (x
, 0, 0));
5254 fprintf (file
, "@GOTNTPOFF");
5256 case UNSPEC_INDNTPOFF
:
5257 output_addr_const (file
, XVECEXP (x
, 0, 0));
5258 fprintf (file
, "@INDNTPOFF");
5262 if (GET_CODE (x
) == UNSPEC
&& XVECLEN (x
, 0) == 2)
5263 switch (XINT (x
, 1))
5265 case UNSPEC_POOL_OFFSET
:
5266 x
= gen_rtx_MINUS (GET_MODE (x
), XVECEXP (x
, 0, 0), XVECEXP (x
, 0, 1));
5267 output_addr_const (file
, x
);
5273 /* Output address operand ADDR in assembler syntax to
5274 stdio stream FILE. */
5277 print_operand_address (FILE *file
, rtx addr
)
5279 struct s390_address ad
;
5281 if (s390_loadrelative_operand_p (addr
))
5285 output_operand_lossage ("symbolic memory references are "
5286 "only supported on z10 or later");
5289 output_addr_const (file
, addr
);
5293 if (!s390_decompose_address (addr
, &ad
)
5294 || (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
5295 || (ad
.indx
&& !REGNO_OK_FOR_INDEX_P (REGNO (ad
.indx
))))
5296 output_operand_lossage ("cannot decompose address");
5299 output_addr_const (file
, ad
.disp
);
5301 fprintf (file
, "0");
5303 if (ad
.base
&& ad
.indx
)
5304 fprintf (file
, "(%s,%s)", reg_names
[REGNO (ad
.indx
)],
5305 reg_names
[REGNO (ad
.base
)]);
5307 fprintf (file
, "(%s)", reg_names
[REGNO (ad
.base
)]);
5310 /* Output operand X in assembler syntax to stdio stream FILE.
5311 CODE specified the format flag. The following format flags
5314 'C': print opcode suffix for branch condition.
5315 'D': print opcode suffix for inverse branch condition.
5316 'E': print opcode suffix for branch on index instruction.
5317 'J': print tls_load/tls_gdcall/tls_ldcall suffix
5318 'G': print the size of the operand in bytes.
5319 'O': print only the displacement of a memory reference.
5320 'R': print only the base register of a memory reference.
5321 'S': print S-type memory reference (base+displacement).
5322 'N': print the second word of a DImode operand.
5323 'M': print the second word of a TImode operand.
5324 'Y': print shift count operand.
5326 'b': print integer X as if it's an unsigned byte.
5327 'c': print integer X as if it's an signed byte.
5328 'x': print integer X as if it's an unsigned halfword.
5329 'h': print integer X as if it's a signed halfword.
5330 'i': print the first nonzero HImode part of X.
5331 'j': print the first HImode part unequal to -1 of X.
5332 'k': print the first nonzero SImode part of X.
5333 'm': print the first SImode part unequal to -1 of X.
5334 'o': print integer X as if it's an unsigned 32bit word. */
5337 print_operand (FILE *file
, rtx x
, int code
)
5342 fprintf (file
, s390_branch_condition_mnemonic (x
, FALSE
));
5346 fprintf (file
, s390_branch_condition_mnemonic (x
, TRUE
));
5350 if (GET_CODE (x
) == LE
)
5351 fprintf (file
, "l");
5352 else if (GET_CODE (x
) == GT
)
5353 fprintf (file
, "h");
5355 output_operand_lossage ("invalid comparison operator "
5356 "for 'E' output modifier");
5360 if (GET_CODE (x
) == SYMBOL_REF
)
5362 fprintf (file
, "%s", ":tls_load:");
5363 output_addr_const (file
, x
);
5365 else if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_TLSGD
)
5367 fprintf (file
, "%s", ":tls_gdcall:");
5368 output_addr_const (file
, XVECEXP (x
, 0, 0));
5370 else if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_TLSLDM
)
5372 fprintf (file
, "%s", ":tls_ldcall:");
5373 assemble_name (file
, get_some_local_dynamic_name ());
5376 output_operand_lossage ("invalid reference for 'J' output modifier");
5380 fprintf (file
, "%u", GET_MODE_SIZE (GET_MODE (x
)));
5385 struct s390_address ad
;
5390 output_operand_lossage ("memory reference expected for "
5391 "'O' output modifier");
5395 ret
= s390_decompose_address (XEXP (x
, 0), &ad
);
5398 || (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
5401 output_operand_lossage ("invalid address for 'O' output modifier");
5406 output_addr_const (file
, ad
.disp
);
5408 fprintf (file
, "0");
5414 struct s390_address ad
;
5419 output_operand_lossage ("memory reference expected for "
5420 "'R' output modifier");
5424 ret
= s390_decompose_address (XEXP (x
, 0), &ad
);
5427 || (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
5430 output_operand_lossage ("invalid address for 'R' output modifier");
5435 fprintf (file
, "%s", reg_names
[REGNO (ad
.base
)]);
5437 fprintf (file
, "0");
5443 struct s390_address ad
;
5448 output_operand_lossage ("memory reference expected for "
5449 "'S' output modifier");
5452 ret
= s390_decompose_address (XEXP (x
, 0), &ad
);
5455 || (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
5458 output_operand_lossage ("invalid address for 'S' output modifier");
5463 output_addr_const (file
, ad
.disp
);
5465 fprintf (file
, "0");
5468 fprintf (file
, "(%s)", reg_names
[REGNO (ad
.base
)]);
5473 if (GET_CODE (x
) == REG
)
5474 x
= gen_rtx_REG (GET_MODE (x
), REGNO (x
) + 1);
5475 else if (GET_CODE (x
) == MEM
)
5476 x
= change_address (x
, VOIDmode
,
5477 plus_constant (Pmode
, XEXP (x
, 0), 4));
5479 output_operand_lossage ("register or memory expression expected "
5480 "for 'N' output modifier");
5484 if (GET_CODE (x
) == REG
)
5485 x
= gen_rtx_REG (GET_MODE (x
), REGNO (x
) + 1);
5486 else if (GET_CODE (x
) == MEM
)
5487 x
= change_address (x
, VOIDmode
,
5488 plus_constant (Pmode
, XEXP (x
, 0), 8));
5490 output_operand_lossage ("register or memory expression expected "
5491 "for 'M' output modifier");
5495 print_shift_count_operand (file
, x
);
5499 switch (GET_CODE (x
))
5502 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
5506 output_address (XEXP (x
, 0));
5513 output_addr_const (file
, x
);
5518 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xff);
5519 else if (code
== 'c')
5520 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ((INTVAL (x
) & 0xff) ^ 0x80) - 0x80);
5521 else if (code
== 'x')
5522 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xffff);
5523 else if (code
== 'h')
5524 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ((INTVAL (x
) & 0xffff) ^ 0x8000) - 0x8000);
5525 else if (code
== 'i')
5526 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
5527 s390_extract_part (x
, HImode
, 0));
5528 else if (code
== 'j')
5529 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
5530 s390_extract_part (x
, HImode
, -1));
5531 else if (code
== 'k')
5532 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
5533 s390_extract_part (x
, SImode
, 0));
5534 else if (code
== 'm')
5535 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
5536 s390_extract_part (x
, SImode
, -1));
5537 else if (code
== 'o')
5538 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xffffffff);
5540 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
5544 gcc_assert (GET_MODE (x
) == VOIDmode
);
5546 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
) & 0xff);
5547 else if (code
== 'x')
5548 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
) & 0xffff);
5549 else if (code
== 'h')
5550 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
5551 ((CONST_DOUBLE_LOW (x
) & 0xffff) ^ 0x8000) - 0x8000);
5555 output_operand_lossage ("invalid constant - try using "
5556 "an output modifier");
5558 output_operand_lossage ("invalid constant for output modifier '%c'",
5565 output_operand_lossage ("invalid expression - try using "
5566 "an output modifier");
5568 output_operand_lossage ("invalid expression for output "
5569 "modifier '%c'", code
);
5574 /* Target hook for assembling integer objects. We need to define it
5575 here to work a round a bug in some versions of GAS, which couldn't
5576 handle values smaller than INT_MIN when printed in decimal. */
5579 s390_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
5581 if (size
== 8 && aligned_p
5582 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < INT_MIN
)
5584 fprintf (asm_out_file
, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX
"\n",
5588 return default_assemble_integer (x
, size
, aligned_p
);
5591 /* Returns true if register REGNO is used for forming
5592 a memory address in expression X. */
5595 reg_used_in_mem_p (int regno
, rtx x
)
5597 enum rtx_code code
= GET_CODE (x
);
5603 if (refers_to_regno_p (regno
, regno
+1,
5607 else if (code
== SET
5608 && GET_CODE (SET_DEST (x
)) == PC
)
5610 if (refers_to_regno_p (regno
, regno
+1,
5615 fmt
= GET_RTX_FORMAT (code
);
5616 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5619 && reg_used_in_mem_p (regno
, XEXP (x
, i
)))
5622 else if (fmt
[i
] == 'E')
5623 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5624 if (reg_used_in_mem_p (regno
, XVECEXP (x
, i
, j
)))
5630 /* Returns true if expression DEP_RTX sets an address register
5631 used by instruction INSN to address memory. */
5634 addr_generation_dependency_p (rtx dep_rtx
, rtx insn
)
5638 if (GET_CODE (dep_rtx
) == INSN
)
5639 dep_rtx
= PATTERN (dep_rtx
);
5641 if (GET_CODE (dep_rtx
) == SET
)
5643 target
= SET_DEST (dep_rtx
);
5644 if (GET_CODE (target
) == STRICT_LOW_PART
)
5645 target
= XEXP (target
, 0);
5646 while (GET_CODE (target
) == SUBREG
)
5647 target
= SUBREG_REG (target
);
5649 if (GET_CODE (target
) == REG
)
5651 int regno
= REGNO (target
);
5653 if (s390_safe_attr_type (insn
) == TYPE_LA
)
5655 pat
= PATTERN (insn
);
5656 if (GET_CODE (pat
) == PARALLEL
)
5658 gcc_assert (XVECLEN (pat
, 0) == 2);
5659 pat
= XVECEXP (pat
, 0, 0);
5661 gcc_assert (GET_CODE (pat
) == SET
);
5662 return refers_to_regno_p (regno
, regno
+1, SET_SRC (pat
), 0);
5664 else if (get_attr_atype (insn
) == ATYPE_AGEN
)
5665 return reg_used_in_mem_p (regno
, PATTERN (insn
));
5671 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
5674 s390_agen_dep_p (rtx dep_insn
, rtx insn
)
5676 rtx dep_rtx
= PATTERN (dep_insn
);
5679 if (GET_CODE (dep_rtx
) == SET
5680 && addr_generation_dependency_p (dep_rtx
, insn
))
5682 else if (GET_CODE (dep_rtx
) == PARALLEL
)
5684 for (i
= 0; i
< XVECLEN (dep_rtx
, 0); i
++)
5686 if (addr_generation_dependency_p (XVECEXP (dep_rtx
, 0, i
), insn
))
5694 /* A C statement (sans semicolon) to update the integer scheduling priority
5695 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
5696 reduce the priority to execute INSN later. Do not define this macro if
5697 you do not need to adjust the scheduling priorities of insns.
5699 A STD instruction should be scheduled earlier,
5700 in order to use the bypass. */
5702 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED
, int priority
)
5704 if (! INSN_P (insn
))
5707 if (s390_tune
!= PROCESSOR_2084_Z990
5708 && s390_tune
!= PROCESSOR_2094_Z9_109
5709 && s390_tune
!= PROCESSOR_2097_Z10
5710 && s390_tune
!= PROCESSOR_2817_Z196
)
5713 switch (s390_safe_attr_type (insn
))
5717 priority
= priority
<< 3;
5721 priority
= priority
<< 1;
5730 /* The number of instructions that can be issued per cycle. */
5733 s390_issue_rate (void)
5737 case PROCESSOR_2084_Z990
:
5738 case PROCESSOR_2094_Z9_109
:
5739 case PROCESSOR_2817_Z196
:
5741 case PROCESSOR_2097_Z10
:
5749 s390_first_cycle_multipass_dfa_lookahead (void)
5754 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
5755 Fix up MEMs as required. */
5758 annotate_constant_pool_refs (rtx
*x
)
5763 gcc_assert (GET_CODE (*x
) != SYMBOL_REF
5764 || !CONSTANT_POOL_ADDRESS_P (*x
));
5766 /* Literal pool references can only occur inside a MEM ... */
5767 if (GET_CODE (*x
) == MEM
)
5769 rtx memref
= XEXP (*x
, 0);
5771 if (GET_CODE (memref
) == SYMBOL_REF
5772 && CONSTANT_POOL_ADDRESS_P (memref
))
5774 rtx base
= cfun
->machine
->base_reg
;
5775 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, memref
, base
),
5778 *x
= replace_equiv_address (*x
, addr
);
5782 if (GET_CODE (memref
) == CONST
5783 && GET_CODE (XEXP (memref
, 0)) == PLUS
5784 && GET_CODE (XEXP (XEXP (memref
, 0), 1)) == CONST_INT
5785 && GET_CODE (XEXP (XEXP (memref
, 0), 0)) == SYMBOL_REF
5786 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref
, 0), 0)))
5788 HOST_WIDE_INT off
= INTVAL (XEXP (XEXP (memref
, 0), 1));
5789 rtx sym
= XEXP (XEXP (memref
, 0), 0);
5790 rtx base
= cfun
->machine
->base_reg
;
5791 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, sym
, base
),
5794 *x
= replace_equiv_address (*x
, plus_constant (Pmode
, addr
, off
));
5799 /* ... or a load-address type pattern. */
5800 if (GET_CODE (*x
) == SET
)
5802 rtx addrref
= SET_SRC (*x
);
5804 if (GET_CODE (addrref
) == SYMBOL_REF
5805 && CONSTANT_POOL_ADDRESS_P (addrref
))
5807 rtx base
= cfun
->machine
->base_reg
;
5808 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, addrref
, base
),
5811 SET_SRC (*x
) = addr
;
5815 if (GET_CODE (addrref
) == CONST
5816 && GET_CODE (XEXP (addrref
, 0)) == PLUS
5817 && GET_CODE (XEXP (XEXP (addrref
, 0), 1)) == CONST_INT
5818 && GET_CODE (XEXP (XEXP (addrref
, 0), 0)) == SYMBOL_REF
5819 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref
, 0), 0)))
5821 HOST_WIDE_INT off
= INTVAL (XEXP (XEXP (addrref
, 0), 1));
5822 rtx sym
= XEXP (XEXP (addrref
, 0), 0);
5823 rtx base
= cfun
->machine
->base_reg
;
5824 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, sym
, base
),
5827 SET_SRC (*x
) = plus_constant (Pmode
, addr
, off
);
5832 /* Annotate LTREL_BASE as well. */
5833 if (GET_CODE (*x
) == UNSPEC
5834 && XINT (*x
, 1) == UNSPEC_LTREL_BASE
)
5836 rtx base
= cfun
->machine
->base_reg
;
5837 *x
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, XVECEXP (*x
, 0, 0), base
),
5842 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
5843 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
5847 annotate_constant_pool_refs (&XEXP (*x
, i
));
5849 else if (fmt
[i
] == 'E')
5851 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
5852 annotate_constant_pool_refs (&XVECEXP (*x
, i
, j
));
5857 /* Split all branches that exceed the maximum distance.
5858 Returns true if this created a new literal pool entry. */
5861 s390_split_branches (void)
5863 rtx temp_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
5864 int new_literal
= 0, ret
;
5865 rtx insn
, pat
, tmp
, target
;
5868 /* We need correct insn addresses. */
5870 shorten_branches (get_insns ());
5872 /* Find all branches that exceed 64KB, and split them. */
5874 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5876 if (GET_CODE (insn
) != JUMP_INSN
)
5879 pat
= PATTERN (insn
);
5880 if (GET_CODE (pat
) == PARALLEL
&& XVECLEN (pat
, 0) > 2)
5881 pat
= XVECEXP (pat
, 0, 0);
5882 if (GET_CODE (pat
) != SET
|| SET_DEST (pat
) != pc_rtx
)
5885 if (GET_CODE (SET_SRC (pat
)) == LABEL_REF
)
5887 label
= &SET_SRC (pat
);
5889 else if (GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
5891 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) == LABEL_REF
)
5892 label
= &XEXP (SET_SRC (pat
), 1);
5893 else if (GET_CODE (XEXP (SET_SRC (pat
), 2)) == LABEL_REF
)
5894 label
= &XEXP (SET_SRC (pat
), 2);
5901 if (get_attr_length (insn
) <= 4)
5904 /* We are going to use the return register as scratch register,
5905 make sure it will be saved/restored by the prologue/epilogue. */
5906 cfun_frame_layout
.save_return_addr_p
= 1;
5911 tmp
= force_const_mem (Pmode
, *label
);
5912 tmp
= emit_insn_before (gen_rtx_SET (Pmode
, temp_reg
, tmp
), insn
);
5913 INSN_ADDRESSES_NEW (tmp
, -1);
5914 annotate_constant_pool_refs (&PATTERN (tmp
));
5921 target
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, *label
),
5922 UNSPEC_LTREL_OFFSET
);
5923 target
= gen_rtx_CONST (Pmode
, target
);
5924 target
= force_const_mem (Pmode
, target
);
5925 tmp
= emit_insn_before (gen_rtx_SET (Pmode
, temp_reg
, target
), insn
);
5926 INSN_ADDRESSES_NEW (tmp
, -1);
5927 annotate_constant_pool_refs (&PATTERN (tmp
));
5929 target
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, XEXP (target
, 0),
5930 cfun
->machine
->base_reg
),
5932 target
= gen_rtx_PLUS (Pmode
, temp_reg
, target
);
5935 ret
= validate_change (insn
, label
, target
, 0);
5943 /* Find an annotated literal pool symbol referenced in RTX X,
5944 and store it at REF. Will abort if X contains references to
5945 more than one such pool symbol; multiple references to the same
5946 symbol are allowed, however.
5948 The rtx pointed to by REF must be initialized to NULL_RTX
5949 by the caller before calling this routine. */
5952 find_constant_pool_ref (rtx x
, rtx
*ref
)
5957 /* Ignore LTREL_BASE references. */
5958 if (GET_CODE (x
) == UNSPEC
5959 && XINT (x
, 1) == UNSPEC_LTREL_BASE
)
5961 /* Likewise POOL_ENTRY insns. */
5962 if (GET_CODE (x
) == UNSPEC_VOLATILE
5963 && XINT (x
, 1) == UNSPECV_POOL_ENTRY
)
5966 gcc_assert (GET_CODE (x
) != SYMBOL_REF
5967 || !CONSTANT_POOL_ADDRESS_P (x
));
5969 if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_LTREF
)
5971 rtx sym
= XVECEXP (x
, 0, 0);
5972 gcc_assert (GET_CODE (sym
) == SYMBOL_REF
5973 && CONSTANT_POOL_ADDRESS_P (sym
));
5975 if (*ref
== NULL_RTX
)
5978 gcc_assert (*ref
== sym
);
5983 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
5984 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
5988 find_constant_pool_ref (XEXP (x
, i
), ref
);
5990 else if (fmt
[i
] == 'E')
5992 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5993 find_constant_pool_ref (XVECEXP (x
, i
, j
), ref
);
5998 /* Replace every reference to the annotated literal pool
5999 symbol REF in X by its base plus OFFSET. */
6002 replace_constant_pool_ref (rtx
*x
, rtx ref
, rtx offset
)
6007 gcc_assert (*x
!= ref
);
6009 if (GET_CODE (*x
) == UNSPEC
6010 && XINT (*x
, 1) == UNSPEC_LTREF
6011 && XVECEXP (*x
, 0, 0) == ref
)
6013 *x
= gen_rtx_PLUS (Pmode
, XVECEXP (*x
, 0, 1), offset
);
6017 if (GET_CODE (*x
) == PLUS
6018 && GET_CODE (XEXP (*x
, 1)) == CONST_INT
6019 && GET_CODE (XEXP (*x
, 0)) == UNSPEC
6020 && XINT (XEXP (*x
, 0), 1) == UNSPEC_LTREF
6021 && XVECEXP (XEXP (*x
, 0), 0, 0) == ref
)
6023 rtx addr
= gen_rtx_PLUS (Pmode
, XVECEXP (XEXP (*x
, 0), 0, 1), offset
);
6024 *x
= plus_constant (Pmode
, addr
, INTVAL (XEXP (*x
, 1)));
6028 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
6029 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
6033 replace_constant_pool_ref (&XEXP (*x
, i
), ref
, offset
);
6035 else if (fmt
[i
] == 'E')
6037 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
6038 replace_constant_pool_ref (&XVECEXP (*x
, i
, j
), ref
, offset
);
6043 /* Check whether X contains an UNSPEC_LTREL_BASE.
6044 Return its constant pool symbol if found, NULL_RTX otherwise. */
6047 find_ltrel_base (rtx x
)
6052 if (GET_CODE (x
) == UNSPEC
6053 && XINT (x
, 1) == UNSPEC_LTREL_BASE
)
6054 return XVECEXP (x
, 0, 0);
6056 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
6057 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
6061 rtx fnd
= find_ltrel_base (XEXP (x
, i
));
6065 else if (fmt
[i
] == 'E')
6067 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
6069 rtx fnd
= find_ltrel_base (XVECEXP (x
, i
, j
));
6079 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
6082 replace_ltrel_base (rtx
*x
)
6087 if (GET_CODE (*x
) == UNSPEC
6088 && XINT (*x
, 1) == UNSPEC_LTREL_BASE
)
6090 *x
= XVECEXP (*x
, 0, 1);
6094 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
6095 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
6099 replace_ltrel_base (&XEXP (*x
, i
));
6101 else if (fmt
[i
] == 'E')
6103 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
6104 replace_ltrel_base (&XVECEXP (*x
, i
, j
));
6110 /* We keep a list of constants which we have to add to internal
6111 constant tables in the middle of large functions. */
6113 #define NR_C_MODES 11
6114 enum machine_mode constant_modes
[NR_C_MODES
] =
6116 TFmode
, TImode
, TDmode
,
6117 DFmode
, DImode
, DDmode
,
6118 SFmode
, SImode
, SDmode
,
6125 struct constant
*next
;
6130 struct constant_pool
6132 struct constant_pool
*next
;
6136 rtx emit_pool_after
;
6138 struct constant
*constants
[NR_C_MODES
];
6139 struct constant
*execute
;
6144 /* Allocate new constant_pool structure. */
6146 static struct constant_pool
*
6147 s390_alloc_pool (void)
6149 struct constant_pool
*pool
;
6152 pool
= (struct constant_pool
*) xmalloc (sizeof *pool
);
6154 for (i
= 0; i
< NR_C_MODES
; i
++)
6155 pool
->constants
[i
] = NULL
;
6157 pool
->execute
= NULL
;
6158 pool
->label
= gen_label_rtx ();
6159 pool
->first_insn
= NULL_RTX
;
6160 pool
->pool_insn
= NULL_RTX
;
6161 pool
->insns
= BITMAP_ALLOC (NULL
);
6163 pool
->emit_pool_after
= NULL_RTX
;
6168 /* Create new constant pool covering instructions starting at INSN
6169 and chain it to the end of POOL_LIST. */
6171 static struct constant_pool
*
6172 s390_start_pool (struct constant_pool
**pool_list
, rtx insn
)
6174 struct constant_pool
*pool
, **prev
;
6176 pool
= s390_alloc_pool ();
6177 pool
->first_insn
= insn
;
6179 for (prev
= pool_list
; *prev
; prev
= &(*prev
)->next
)
6186 /* End range of instructions covered by POOL at INSN and emit
6187 placeholder insn representing the pool. */
6190 s390_end_pool (struct constant_pool
*pool
, rtx insn
)
6192 rtx pool_size
= GEN_INT (pool
->size
+ 8 /* alignment slop */);
6195 insn
= get_last_insn ();
6197 pool
->pool_insn
= emit_insn_after (gen_pool (pool_size
), insn
);
6198 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
6201 /* Add INSN to the list of insns covered by POOL. */
6204 s390_add_pool_insn (struct constant_pool
*pool
, rtx insn
)
6206 bitmap_set_bit (pool
->insns
, INSN_UID (insn
));
6209 /* Return pool out of POOL_LIST that covers INSN. */
6211 static struct constant_pool
*
6212 s390_find_pool (struct constant_pool
*pool_list
, rtx insn
)
6214 struct constant_pool
*pool
;
6216 for (pool
= pool_list
; pool
; pool
= pool
->next
)
6217 if (bitmap_bit_p (pool
->insns
, INSN_UID (insn
)))
6223 /* Add constant VAL of mode MODE to the constant pool POOL. */
6226 s390_add_constant (struct constant_pool
*pool
, rtx val
, enum machine_mode mode
)
6231 for (i
= 0; i
< NR_C_MODES
; i
++)
6232 if (constant_modes
[i
] == mode
)
6234 gcc_assert (i
!= NR_C_MODES
);
6236 for (c
= pool
->constants
[i
]; c
!= NULL
; c
= c
->next
)
6237 if (rtx_equal_p (val
, c
->value
))
6242 c
= (struct constant
*) xmalloc (sizeof *c
);
6244 c
->label
= gen_label_rtx ();
6245 c
->next
= pool
->constants
[i
];
6246 pool
->constants
[i
] = c
;
6247 pool
->size
+= GET_MODE_SIZE (mode
);
6251 /* Return an rtx that represents the offset of X from the start of
6255 s390_pool_offset (struct constant_pool
*pool
, rtx x
)
6259 label
= gen_rtx_LABEL_REF (GET_MODE (x
), pool
->label
);
6260 x
= gen_rtx_UNSPEC (GET_MODE (x
), gen_rtvec (2, x
, label
),
6261 UNSPEC_POOL_OFFSET
);
6262 return gen_rtx_CONST (GET_MODE (x
), x
);
6265 /* Find constant VAL of mode MODE in the constant pool POOL.
6266 Return an RTX describing the distance from the start of
6267 the pool to the location of the new constant. */
6270 s390_find_constant (struct constant_pool
*pool
, rtx val
,
6271 enum machine_mode mode
)
6276 for (i
= 0; i
< NR_C_MODES
; i
++)
6277 if (constant_modes
[i
] == mode
)
6279 gcc_assert (i
!= NR_C_MODES
);
6281 for (c
= pool
->constants
[i
]; c
!= NULL
; c
= c
->next
)
6282 if (rtx_equal_p (val
, c
->value
))
6287 return s390_pool_offset (pool
, gen_rtx_LABEL_REF (Pmode
, c
->label
));
6290 /* Check whether INSN is an execute. Return the label_ref to its
6291 execute target template if so, NULL_RTX otherwise. */
6294 s390_execute_label (rtx insn
)
6296 if (GET_CODE (insn
) == INSN
6297 && GET_CODE (PATTERN (insn
)) == PARALLEL
6298 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == UNSPEC
6299 && XINT (XVECEXP (PATTERN (insn
), 0, 0), 1) == UNSPEC_EXECUTE
)
6300 return XVECEXP (XVECEXP (PATTERN (insn
), 0, 0), 0, 2);
6305 /* Add execute target for INSN to the constant pool POOL. */
6308 s390_add_execute (struct constant_pool
*pool
, rtx insn
)
6312 for (c
= pool
->execute
; c
!= NULL
; c
= c
->next
)
6313 if (INSN_UID (insn
) == INSN_UID (c
->value
))
6318 c
= (struct constant
*) xmalloc (sizeof *c
);
6320 c
->label
= gen_label_rtx ();
6321 c
->next
= pool
->execute
;
6327 /* Find execute target for INSN in the constant pool POOL.
6328 Return an RTX describing the distance from the start of
6329 the pool to the location of the execute target. */
6332 s390_find_execute (struct constant_pool
*pool
, rtx insn
)
6336 for (c
= pool
->execute
; c
!= NULL
; c
= c
->next
)
6337 if (INSN_UID (insn
) == INSN_UID (c
->value
))
6342 return s390_pool_offset (pool
, gen_rtx_LABEL_REF (Pmode
, c
->label
));
6345 /* For an execute INSN, extract the execute target template. */
6348 s390_execute_target (rtx insn
)
6350 rtx pattern
= PATTERN (insn
);
6351 gcc_assert (s390_execute_label (insn
));
6353 if (XVECLEN (pattern
, 0) == 2)
6355 pattern
= copy_rtx (XVECEXP (pattern
, 0, 1));
6359 rtvec vec
= rtvec_alloc (XVECLEN (pattern
, 0) - 1);
6362 for (i
= 0; i
< XVECLEN (pattern
, 0) - 1; i
++)
6363 RTVEC_ELT (vec
, i
) = copy_rtx (XVECEXP (pattern
, 0, i
+ 1));
6365 pattern
= gen_rtx_PARALLEL (VOIDmode
, vec
);
6371 /* Indicate that INSN cannot be duplicated. This is the case for
6372 execute insns that carry a unique label. */
6375 s390_cannot_copy_insn_p (rtx insn
)
6377 rtx label
= s390_execute_label (insn
);
6378 return label
&& label
!= const0_rtx
;
6381 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
6382 do not emit the pool base label. */
6385 s390_dump_pool (struct constant_pool
*pool
, bool remote_label
)
6388 rtx insn
= pool
->pool_insn
;
6391 /* Switch to rodata section. */
6392 if (TARGET_CPU_ZARCH
)
6394 insn
= emit_insn_after (gen_pool_section_start (), insn
);
6395 INSN_ADDRESSES_NEW (insn
, -1);
6398 /* Ensure minimum pool alignment. */
6399 if (TARGET_CPU_ZARCH
)
6400 insn
= emit_insn_after (gen_pool_align (GEN_INT (8)), insn
);
6402 insn
= emit_insn_after (gen_pool_align (GEN_INT (4)), insn
);
6403 INSN_ADDRESSES_NEW (insn
, -1);
6405 /* Emit pool base label. */
6408 insn
= emit_label_after (pool
->label
, insn
);
6409 INSN_ADDRESSES_NEW (insn
, -1);
6412 /* Dump constants in descending alignment requirement order,
6413 ensuring proper alignment for every constant. */
6414 for (i
= 0; i
< NR_C_MODES
; i
++)
6415 for (c
= pool
->constants
[i
]; c
; c
= c
->next
)
6417 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
6418 rtx value
= copy_rtx (c
->value
);
6419 if (GET_CODE (value
) == CONST
6420 && GET_CODE (XEXP (value
, 0)) == UNSPEC
6421 && XINT (XEXP (value
, 0), 1) == UNSPEC_LTREL_OFFSET
6422 && XVECLEN (XEXP (value
, 0), 0) == 1)
6423 value
= s390_pool_offset (pool
, XVECEXP (XEXP (value
, 0), 0, 0));
6425 insn
= emit_label_after (c
->label
, insn
);
6426 INSN_ADDRESSES_NEW (insn
, -1);
6428 value
= gen_rtx_UNSPEC_VOLATILE (constant_modes
[i
],
6429 gen_rtvec (1, value
),
6430 UNSPECV_POOL_ENTRY
);
6431 insn
= emit_insn_after (value
, insn
);
6432 INSN_ADDRESSES_NEW (insn
, -1);
6435 /* Ensure minimum alignment for instructions. */
6436 insn
= emit_insn_after (gen_pool_align (GEN_INT (2)), insn
);
6437 INSN_ADDRESSES_NEW (insn
, -1);
6439 /* Output in-pool execute template insns. */
6440 for (c
= pool
->execute
; c
; c
= c
->next
)
6442 insn
= emit_label_after (c
->label
, insn
);
6443 INSN_ADDRESSES_NEW (insn
, -1);
6445 insn
= emit_insn_after (s390_execute_target (c
->value
), insn
);
6446 INSN_ADDRESSES_NEW (insn
, -1);
6449 /* Switch back to previous section. */
6450 if (TARGET_CPU_ZARCH
)
6452 insn
= emit_insn_after (gen_pool_section_end (), insn
);
6453 INSN_ADDRESSES_NEW (insn
, -1);
6456 insn
= emit_barrier_after (insn
);
6457 INSN_ADDRESSES_NEW (insn
, -1);
6459 /* Remove placeholder insn. */
6460 remove_insn (pool
->pool_insn
);
6463 /* Free all memory used by POOL. */
6466 s390_free_pool (struct constant_pool
*pool
)
6468 struct constant
*c
, *next
;
6471 for (i
= 0; i
< NR_C_MODES
; i
++)
6472 for (c
= pool
->constants
[i
]; c
; c
= next
)
6478 for (c
= pool
->execute
; c
; c
= next
)
6484 BITMAP_FREE (pool
->insns
);
6489 /* Collect main literal pool. Return NULL on overflow. */
6491 static struct constant_pool
*
6492 s390_mainpool_start (void)
6494 struct constant_pool
*pool
;
6497 pool
= s390_alloc_pool ();
6499 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6501 if (GET_CODE (insn
) == INSN
6502 && GET_CODE (PATTERN (insn
)) == SET
6503 && GET_CODE (SET_SRC (PATTERN (insn
))) == UNSPEC_VOLATILE
6504 && XINT (SET_SRC (PATTERN (insn
)), 1) == UNSPECV_MAIN_POOL
)
6506 gcc_assert (!pool
->pool_insn
);
6507 pool
->pool_insn
= insn
;
6510 if (!TARGET_CPU_ZARCH
&& s390_execute_label (insn
))
6512 s390_add_execute (pool
, insn
);
6514 else if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
6516 rtx pool_ref
= NULL_RTX
;
6517 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
6520 rtx constant
= get_pool_constant (pool_ref
);
6521 enum machine_mode mode
= get_pool_mode (pool_ref
);
6522 s390_add_constant (pool
, constant
, mode
);
6526 /* If hot/cold partitioning is enabled we have to make sure that
6527 the literal pool is emitted in the same section where the
6528 initialization of the literal pool base pointer takes place.
6529 emit_pool_after is only used in the non-overflow case on non
6530 Z cpus where we can emit the literal pool at the end of the
6531 function body within the text section. */
6533 && NOTE_KIND (insn
) == NOTE_INSN_SWITCH_TEXT_SECTIONS
6534 && !pool
->emit_pool_after
)
6535 pool
->emit_pool_after
= PREV_INSN (insn
);
6538 gcc_assert (pool
->pool_insn
|| pool
->size
== 0);
6540 if (pool
->size
>= 4096)
6542 /* We're going to chunkify the pool, so remove the main
6543 pool placeholder insn. */
6544 remove_insn (pool
->pool_insn
);
6546 s390_free_pool (pool
);
6550 /* If the functions ends with the section where the literal pool
6551 should be emitted set the marker to its end. */
6552 if (pool
&& !pool
->emit_pool_after
)
6553 pool
->emit_pool_after
= get_last_insn ();
6558 /* POOL holds the main literal pool as collected by s390_mainpool_start.
6559 Modify the current function to output the pool constants as well as
6560 the pool register setup instruction. */
6563 s390_mainpool_finish (struct constant_pool
*pool
)
6565 rtx base_reg
= cfun
->machine
->base_reg
;
6568 /* If the pool is empty, we're done. */
6569 if (pool
->size
== 0)
6571 /* We don't actually need a base register after all. */
6572 cfun
->machine
->base_reg
= NULL_RTX
;
6574 if (pool
->pool_insn
)
6575 remove_insn (pool
->pool_insn
);
6576 s390_free_pool (pool
);
6580 /* We need correct insn addresses. */
6581 shorten_branches (get_insns ());
6583 /* On zSeries, we use a LARL to load the pool register. The pool is
6584 located in the .rodata section, so we emit it after the function. */
6585 if (TARGET_CPU_ZARCH
)
6587 insn
= gen_main_base_64 (base_reg
, pool
->label
);
6588 insn
= emit_insn_after (insn
, pool
->pool_insn
);
6589 INSN_ADDRESSES_NEW (insn
, -1);
6590 remove_insn (pool
->pool_insn
);
6592 insn
= get_last_insn ();
6593 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
6594 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
6596 s390_dump_pool (pool
, 0);
6599 /* On S/390, if the total size of the function's code plus literal pool
6600 does not exceed 4096 bytes, we use BASR to set up a function base
6601 pointer, and emit the literal pool at the end of the function. */
6602 else if (INSN_ADDRESSES (INSN_UID (pool
->emit_pool_after
))
6603 + pool
->size
+ 8 /* alignment slop */ < 4096)
6605 insn
= gen_main_base_31_small (base_reg
, pool
->label
);
6606 insn
= emit_insn_after (insn
, pool
->pool_insn
);
6607 INSN_ADDRESSES_NEW (insn
, -1);
6608 remove_insn (pool
->pool_insn
);
6610 insn
= emit_label_after (pool
->label
, insn
);
6611 INSN_ADDRESSES_NEW (insn
, -1);
6613 /* emit_pool_after will be set by s390_mainpool_start to the
6614 last insn of the section where the literal pool should be
6616 insn
= pool
->emit_pool_after
;
6618 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
6619 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
6621 s390_dump_pool (pool
, 1);
6624 /* Otherwise, we emit an inline literal pool and use BASR to branch
6625 over it, setting up the pool register at the same time. */
6628 rtx pool_end
= gen_label_rtx ();
6630 insn
= gen_main_base_31_large (base_reg
, pool
->label
, pool_end
);
6631 insn
= emit_jump_insn_after (insn
, pool
->pool_insn
);
6632 JUMP_LABEL (insn
) = pool_end
;
6633 INSN_ADDRESSES_NEW (insn
, -1);
6634 remove_insn (pool
->pool_insn
);
6636 insn
= emit_label_after (pool
->label
, insn
);
6637 INSN_ADDRESSES_NEW (insn
, -1);
6639 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
6640 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
6642 insn
= emit_label_after (pool_end
, pool
->pool_insn
);
6643 INSN_ADDRESSES_NEW (insn
, -1);
6645 s390_dump_pool (pool
, 1);
6649 /* Replace all literal pool references. */
6651 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6654 replace_ltrel_base (&PATTERN (insn
));
6656 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
6658 rtx addr
, pool_ref
= NULL_RTX
;
6659 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
6662 if (s390_execute_label (insn
))
6663 addr
= s390_find_execute (pool
, insn
);
6665 addr
= s390_find_constant (pool
, get_pool_constant (pool_ref
),
6666 get_pool_mode (pool_ref
));
6668 replace_constant_pool_ref (&PATTERN (insn
), pool_ref
, addr
);
6669 INSN_CODE (insn
) = -1;
6675 /* Free the pool. */
6676 s390_free_pool (pool
);
6679 /* POOL holds the main literal pool as collected by s390_mainpool_start.
6680 We have decided we cannot use this pool, so revert all changes
6681 to the current function that were done by s390_mainpool_start. */
6683 s390_mainpool_cancel (struct constant_pool
*pool
)
6685 /* We didn't actually change the instruction stream, so simply
6686 free the pool memory. */
6687 s390_free_pool (pool
);
6691 /* Chunkify the literal pool. */
6693 #define S390_POOL_CHUNK_MIN 0xc00
6694 #define S390_POOL_CHUNK_MAX 0xe00
6696 static struct constant_pool
*
6697 s390_chunkify_start (void)
6699 struct constant_pool
*curr_pool
= NULL
, *pool_list
= NULL
;
6702 rtx pending_ltrel
= NULL_RTX
;
6705 rtx (*gen_reload_base
) (rtx
, rtx
) =
6706 TARGET_CPU_ZARCH
? gen_reload_base_64
: gen_reload_base_31
;
6709 /* We need correct insn addresses. */
6711 shorten_branches (get_insns ());
6713 /* Scan all insns and move literals to pool chunks. */
6715 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6717 bool section_switch_p
= false;
6719 /* Check for pending LTREL_BASE. */
6722 rtx ltrel_base
= find_ltrel_base (PATTERN (insn
));
6725 gcc_assert (ltrel_base
== pending_ltrel
);
6726 pending_ltrel
= NULL_RTX
;
6730 if (!TARGET_CPU_ZARCH
&& s390_execute_label (insn
))
6733 curr_pool
= s390_start_pool (&pool_list
, insn
);
6735 s390_add_execute (curr_pool
, insn
);
6736 s390_add_pool_insn (curr_pool
, insn
);
6738 else if (GET_CODE (insn
) == INSN
|| CALL_P (insn
))
6740 rtx pool_ref
= NULL_RTX
;
6741 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
6744 rtx constant
= get_pool_constant (pool_ref
);
6745 enum machine_mode mode
= get_pool_mode (pool_ref
);
6748 curr_pool
= s390_start_pool (&pool_list
, insn
);
6750 s390_add_constant (curr_pool
, constant
, mode
);
6751 s390_add_pool_insn (curr_pool
, insn
);
6753 /* Don't split the pool chunk between a LTREL_OFFSET load
6754 and the corresponding LTREL_BASE. */
6755 if (GET_CODE (constant
) == CONST
6756 && GET_CODE (XEXP (constant
, 0)) == UNSPEC
6757 && XINT (XEXP (constant
, 0), 1) == UNSPEC_LTREL_OFFSET
)
6759 gcc_assert (!pending_ltrel
);
6760 pending_ltrel
= pool_ref
;
6765 if (GET_CODE (insn
) == JUMP_INSN
|| GET_CODE (insn
) == CODE_LABEL
)
6768 s390_add_pool_insn (curr_pool
, insn
);
6769 /* An LTREL_BASE must follow within the same basic block. */
6770 gcc_assert (!pending_ltrel
);
6774 switch (NOTE_KIND (insn
))
6776 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
6777 section_switch_p
= true;
6779 case NOTE_INSN_VAR_LOCATION
:
6780 case NOTE_INSN_CALL_ARG_LOCATION
:
6787 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn
)
6788 || INSN_ADDRESSES (INSN_UID (insn
)) == -1)
6791 if (TARGET_CPU_ZARCH
)
6793 if (curr_pool
->size
< S390_POOL_CHUNK_MAX
)
6796 s390_end_pool (curr_pool
, NULL_RTX
);
6801 int chunk_size
= INSN_ADDRESSES (INSN_UID (insn
))
6802 - INSN_ADDRESSES (INSN_UID (curr_pool
->first_insn
))
6805 /* We will later have to insert base register reload insns.
6806 Those will have an effect on code size, which we need to
6807 consider here. This calculation makes rather pessimistic
6808 worst-case assumptions. */
6809 if (GET_CODE (insn
) == CODE_LABEL
)
6812 if (chunk_size
< S390_POOL_CHUNK_MIN
6813 && curr_pool
->size
< S390_POOL_CHUNK_MIN
6814 && !section_switch_p
)
6817 /* Pool chunks can only be inserted after BARRIERs ... */
6818 if (GET_CODE (insn
) == BARRIER
)
6820 s390_end_pool (curr_pool
, insn
);
6825 /* ... so if we don't find one in time, create one. */
6826 else if (chunk_size
> S390_POOL_CHUNK_MAX
6827 || curr_pool
->size
> S390_POOL_CHUNK_MAX
6828 || section_switch_p
)
6830 rtx label
, jump
, barrier
, next
, prev
;
6832 if (!section_switch_p
)
6834 /* We can insert the barrier only after a 'real' insn. */
6835 if (GET_CODE (insn
) != INSN
&& GET_CODE (insn
) != CALL_INSN
)
6837 if (get_attr_length (insn
) == 0)
6839 /* Don't separate LTREL_BASE from the corresponding
6840 LTREL_OFFSET load. */
6847 next
= NEXT_INSN (insn
);
6851 && (NOTE_KIND (next
) == NOTE_INSN_VAR_LOCATION
6852 || NOTE_KIND (next
) == NOTE_INSN_CALL_ARG_LOCATION
));
6856 gcc_assert (!pending_ltrel
);
6858 /* The old pool has to end before the section switch
6859 note in order to make it part of the current
6861 insn
= PREV_INSN (insn
);
6864 label
= gen_label_rtx ();
6866 if (prev
&& NOTE_P (prev
))
6867 prev
= prev_nonnote_insn (prev
);
6869 jump
= emit_jump_insn_after_setloc (gen_jump (label
), insn
,
6870 INSN_LOCATOR (prev
));
6872 jump
= emit_jump_insn_after_noloc (gen_jump (label
), insn
);
6873 barrier
= emit_barrier_after (jump
);
6874 insn
= emit_label_after (label
, barrier
);
6875 JUMP_LABEL (jump
) = label
;
6876 LABEL_NUSES (label
) = 1;
6878 INSN_ADDRESSES_NEW (jump
, -1);
6879 INSN_ADDRESSES_NEW (barrier
, -1);
6880 INSN_ADDRESSES_NEW (insn
, -1);
6882 s390_end_pool (curr_pool
, barrier
);
6890 s390_end_pool (curr_pool
, NULL_RTX
);
6891 gcc_assert (!pending_ltrel
);
6893 /* Find all labels that are branched into
6894 from an insn belonging to a different chunk. */
6896 far_labels
= BITMAP_ALLOC (NULL
);
6898 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6900 /* Labels marked with LABEL_PRESERVE_P can be target
6901 of non-local jumps, so we have to mark them.
6902 The same holds for named labels.
6904 Don't do that, however, if it is the label before
6907 if (GET_CODE (insn
) == CODE_LABEL
6908 && (LABEL_PRESERVE_P (insn
) || LABEL_NAME (insn
)))
6910 rtx vec_insn
= next_real_insn (insn
);
6911 rtx vec_pat
= vec_insn
&& GET_CODE (vec_insn
) == JUMP_INSN
?
6912 PATTERN (vec_insn
) : NULL_RTX
;
6914 || !(GET_CODE (vec_pat
) == ADDR_VEC
6915 || GET_CODE (vec_pat
) == ADDR_DIFF_VEC
))
6916 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (insn
));
6919 /* If we have a direct jump (conditional or unconditional)
6920 or a casesi jump, check all potential targets. */
6921 else if (GET_CODE (insn
) == JUMP_INSN
)
6923 rtx pat
= PATTERN (insn
);
6924 if (GET_CODE (pat
) == PARALLEL
&& XVECLEN (pat
, 0) > 2)
6925 pat
= XVECEXP (pat
, 0, 0);
6927 if (GET_CODE (pat
) == SET
)
6929 rtx label
= JUMP_LABEL (insn
);
6932 if (s390_find_pool (pool_list
, label
)
6933 != s390_find_pool (pool_list
, insn
))
6934 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (label
));
6937 else if (GET_CODE (pat
) == PARALLEL
6938 && XVECLEN (pat
, 0) == 2
6939 && GET_CODE (XVECEXP (pat
, 0, 0)) == SET
6940 && GET_CODE (XVECEXP (pat
, 0, 1)) == USE
6941 && GET_CODE (XEXP (XVECEXP (pat
, 0, 1), 0)) == LABEL_REF
)
6943 /* Find the jump table used by this casesi jump. */
6944 rtx vec_label
= XEXP (XEXP (XVECEXP (pat
, 0, 1), 0), 0);
6945 rtx vec_insn
= next_real_insn (vec_label
);
6946 rtx vec_pat
= vec_insn
&& GET_CODE (vec_insn
) == JUMP_INSN
?
6947 PATTERN (vec_insn
) : NULL_RTX
;
6949 && (GET_CODE (vec_pat
) == ADDR_VEC
6950 || GET_CODE (vec_pat
) == ADDR_DIFF_VEC
))
6952 int i
, diff_p
= GET_CODE (vec_pat
) == ADDR_DIFF_VEC
;
6954 for (i
= 0; i
< XVECLEN (vec_pat
, diff_p
); i
++)
6956 rtx label
= XEXP (XVECEXP (vec_pat
, diff_p
, i
), 0);
6958 if (s390_find_pool (pool_list
, label
)
6959 != s390_find_pool (pool_list
, insn
))
6960 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (label
));
6967 /* Insert base register reload insns before every pool. */
6969 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
6971 rtx new_insn
= gen_reload_base (cfun
->machine
->base_reg
,
6973 rtx insn
= curr_pool
->first_insn
;
6974 INSN_ADDRESSES_NEW (emit_insn_before (new_insn
, insn
), -1);
6977 /* Insert base register reload insns at every far label. */
6979 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6980 if (GET_CODE (insn
) == CODE_LABEL
6981 && bitmap_bit_p (far_labels
, CODE_LABEL_NUMBER (insn
)))
6983 struct constant_pool
*pool
= s390_find_pool (pool_list
, insn
);
6986 rtx new_insn
= gen_reload_base (cfun
->machine
->base_reg
,
6988 INSN_ADDRESSES_NEW (emit_insn_after (new_insn
, insn
), -1);
6993 BITMAP_FREE (far_labels
);
6996 /* Recompute insn addresses. */
6998 init_insn_lengths ();
6999 shorten_branches (get_insns ());
7004 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
7005 After we have decided to use this list, finish implementing
7006 all changes to the current function as required. */
7009 s390_chunkify_finish (struct constant_pool
*pool_list
)
7011 struct constant_pool
*curr_pool
= NULL
;
7015 /* Replace all literal pool references. */
7017 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
7020 replace_ltrel_base (&PATTERN (insn
));
7022 curr_pool
= s390_find_pool (pool_list
, insn
);
7026 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
7028 rtx addr
, pool_ref
= NULL_RTX
;
7029 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
7032 if (s390_execute_label (insn
))
7033 addr
= s390_find_execute (curr_pool
, insn
);
7035 addr
= s390_find_constant (curr_pool
,
7036 get_pool_constant (pool_ref
),
7037 get_pool_mode (pool_ref
));
7039 replace_constant_pool_ref (&PATTERN (insn
), pool_ref
, addr
);
7040 INSN_CODE (insn
) = -1;
7045 /* Dump out all literal pools. */
7047 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
7048 s390_dump_pool (curr_pool
, 0);
7050 /* Free pool list. */
7054 struct constant_pool
*next
= pool_list
->next
;
7055 s390_free_pool (pool_list
);
7060 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
7061 We have decided we cannot use this list, so revert all changes
7062 to the current function that were done by s390_chunkify_start. */
7065 s390_chunkify_cancel (struct constant_pool
*pool_list
)
7067 struct constant_pool
*curr_pool
= NULL
;
7070 /* Remove all pool placeholder insns. */
7072 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
7074 /* Did we insert an extra barrier? Remove it. */
7075 rtx barrier
= PREV_INSN (curr_pool
->pool_insn
);
7076 rtx jump
= barrier
? PREV_INSN (barrier
) : NULL_RTX
;
7077 rtx label
= NEXT_INSN (curr_pool
->pool_insn
);
7079 if (jump
&& GET_CODE (jump
) == JUMP_INSN
7080 && barrier
&& GET_CODE (barrier
) == BARRIER
7081 && label
&& GET_CODE (label
) == CODE_LABEL
7082 && GET_CODE (PATTERN (jump
)) == SET
7083 && SET_DEST (PATTERN (jump
)) == pc_rtx
7084 && GET_CODE (SET_SRC (PATTERN (jump
))) == LABEL_REF
7085 && XEXP (SET_SRC (PATTERN (jump
)), 0) == label
)
7088 remove_insn (barrier
);
7089 remove_insn (label
);
7092 remove_insn (curr_pool
->pool_insn
);
7095 /* Remove all base register reload insns. */
7097 for (insn
= get_insns (); insn
; )
7099 rtx next_insn
= NEXT_INSN (insn
);
7101 if (GET_CODE (insn
) == INSN
7102 && GET_CODE (PATTERN (insn
)) == SET
7103 && GET_CODE (SET_SRC (PATTERN (insn
))) == UNSPEC
7104 && XINT (SET_SRC (PATTERN (insn
)), 1) == UNSPEC_RELOAD_BASE
)
7110 /* Free pool list. */
7114 struct constant_pool
*next
= pool_list
->next
;
7115 s390_free_pool (pool_list
);
7120 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
7123 s390_output_pool_entry (rtx exp
, enum machine_mode mode
, unsigned int align
)
7127 switch (GET_MODE_CLASS (mode
))
7130 case MODE_DECIMAL_FLOAT
:
7131 gcc_assert (GET_CODE (exp
) == CONST_DOUBLE
);
7133 REAL_VALUE_FROM_CONST_DOUBLE (r
, exp
);
7134 assemble_real (r
, mode
, align
);
7138 assemble_integer (exp
, GET_MODE_SIZE (mode
), align
, 1);
7139 mark_symbol_refs_as_used (exp
);
7148 /* Return an RTL expression representing the value of the return address
7149 for the frame COUNT steps up from the current frame. FRAME is the
7150 frame pointer of that frame. */
7153 s390_return_addr_rtx (int count
, rtx frame ATTRIBUTE_UNUSED
)
7158 /* Without backchain, we fail for all but the current frame. */
7160 if (!TARGET_BACKCHAIN
&& count
> 0)
7163 /* For the current frame, we need to make sure the initial
7164 value of RETURN_REGNUM is actually saved. */
7168 /* On non-z architectures branch splitting could overwrite r14. */
7169 if (TARGET_CPU_ZARCH
)
7170 return get_hard_reg_initial_val (Pmode
, RETURN_REGNUM
);
7173 cfun_frame_layout
.save_return_addr_p
= true;
7174 return gen_rtx_MEM (Pmode
, return_address_pointer_rtx
);
7178 if (TARGET_PACKED_STACK
)
7179 offset
= -2 * UNITS_PER_LONG
;
7181 offset
= RETURN_REGNUM
* UNITS_PER_LONG
;
7183 addr
= plus_constant (Pmode
, frame
, offset
);
7184 addr
= memory_address (Pmode
, addr
);
7185 return gen_rtx_MEM (Pmode
, addr
);
7188 /* Return an RTL expression representing the back chain stored in
7189 the current stack frame. */
7192 s390_back_chain_rtx (void)
7196 gcc_assert (TARGET_BACKCHAIN
);
7198 if (TARGET_PACKED_STACK
)
7199 chain
= plus_constant (Pmode
, stack_pointer_rtx
,
7200 STACK_POINTER_OFFSET
- UNITS_PER_LONG
);
7202 chain
= stack_pointer_rtx
;
7204 chain
= gen_rtx_MEM (Pmode
, chain
);
7208 /* Find first call clobbered register unused in a function.
7209 This could be used as base register in a leaf function
7210 or for holding the return address before epilogue. */
7213 find_unused_clobbered_reg (void)
7216 for (i
= 0; i
< 6; i
++)
7217 if (!df_regs_ever_live_p (i
))
7223 /* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all
7224 clobbered hard regs in SETREG. */
7227 s390_reg_clobbered_rtx (rtx setreg
, const_rtx set_insn ATTRIBUTE_UNUSED
, void *data
)
7229 int *regs_ever_clobbered
= (int *)data
;
7230 unsigned int i
, regno
;
7231 enum machine_mode mode
= GET_MODE (setreg
);
7233 if (GET_CODE (setreg
) == SUBREG
)
7235 rtx inner
= SUBREG_REG (setreg
);
7236 if (!GENERAL_REG_P (inner
))
7238 regno
= subreg_regno (setreg
);
7240 else if (GENERAL_REG_P (setreg
))
7241 regno
= REGNO (setreg
);
7246 i
< regno
+ HARD_REGNO_NREGS (regno
, mode
);
7248 regs_ever_clobbered
[i
] = 1;
7251 /* Walks through all basic blocks of the current function looking
7252 for clobbered hard regs using s390_reg_clobbered_rtx. The fields
7253 of the passed integer array REGS_EVER_CLOBBERED are set to one for
7254 each of those regs. */
7257 s390_regs_ever_clobbered (int *regs_ever_clobbered
)
7263 memset (regs_ever_clobbered
, 0, 16 * sizeof (int));
7265 /* For non-leaf functions we have to consider all call clobbered regs to be
7269 for (i
= 0; i
< 16; i
++)
7270 regs_ever_clobbered
[i
] = call_really_used_regs
[i
];
7273 /* Make the "magic" eh_return registers live if necessary. For regs_ever_live
7274 this work is done by liveness analysis (mark_regs_live_at_end).
7275 Special care is needed for functions containing landing pads. Landing pads
7276 may use the eh registers, but the code which sets these registers is not
7277 contained in that function. Hence s390_regs_ever_clobbered is not able to
7278 deal with this automatically. */
7279 if (crtl
->calls_eh_return
|| cfun
->machine
->has_landing_pad_p
)
7280 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; i
++)
7281 if (crtl
->calls_eh_return
7282 || (cfun
->machine
->has_landing_pad_p
7283 && df_regs_ever_live_p (EH_RETURN_DATA_REGNO (i
))))
7284 regs_ever_clobbered
[EH_RETURN_DATA_REGNO (i
)] = 1;
7286 /* For nonlocal gotos all call-saved registers have to be saved.
7287 This flag is also set for the unwinding code in libgcc.
7288 See expand_builtin_unwind_init. For regs_ever_live this is done by
7290 if (cfun
->has_nonlocal_label
)
7291 for (i
= 0; i
< 16; i
++)
7292 if (!call_really_used_regs
[i
])
7293 regs_ever_clobbered
[i
] = 1;
7295 FOR_EACH_BB (cur_bb
)
7297 FOR_BB_INSNS (cur_bb
, cur_insn
)
7299 if (INSN_P (cur_insn
))
7300 note_stores (PATTERN (cur_insn
),
7301 s390_reg_clobbered_rtx
,
7302 regs_ever_clobbered
);
7307 /* Determine the frame area which actually has to be accessed
7308 in the function epilogue. The values are stored at the
7309 given pointers AREA_BOTTOM (address of the lowest used stack
7310 address) and AREA_TOP (address of the first item which does
7311 not belong to the stack frame). */
7314 s390_frame_area (int *area_bottom
, int *area_top
)
7322 if (cfun_frame_layout
.first_restore_gpr
!= -1)
7324 b
= (cfun_frame_layout
.gprs_offset
7325 + cfun_frame_layout
.first_restore_gpr
* UNITS_PER_LONG
);
7326 t
= b
+ (cfun_frame_layout
.last_restore_gpr
7327 - cfun_frame_layout
.first_restore_gpr
+ 1) * UNITS_PER_LONG
;
7330 if (TARGET_64BIT
&& cfun_save_high_fprs_p
)
7332 b
= MIN (b
, cfun_frame_layout
.f8_offset
);
7333 t
= MAX (t
, (cfun_frame_layout
.f8_offset
7334 + cfun_frame_layout
.high_fprs
* 8));
7338 for (i
= 2; i
< 4; i
++)
7339 if (cfun_fpr_bit_p (i
))
7341 b
= MIN (b
, cfun_frame_layout
.f4_offset
+ (i
- 2) * 8);
7342 t
= MAX (t
, cfun_frame_layout
.f4_offset
+ (i
- 1) * 8);
7349 /* Fill cfun->machine with info about register usage of current function.
7350 Return in CLOBBERED_REGS which GPRs are currently considered set. */
7353 s390_register_info (int clobbered_regs
[])
7357 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
7358 cfun_frame_layout
.fpr_bitmap
= 0;
7359 cfun_frame_layout
.high_fprs
= 0;
7361 for (i
= 24; i
< 32; i
++)
7362 if (df_regs_ever_live_p (i
) && !global_regs
[i
])
7364 cfun_set_fpr_bit (i
- 16);
7365 cfun_frame_layout
.high_fprs
++;
7368 /* Find first and last gpr to be saved. We trust regs_ever_live
7369 data, except that we don't save and restore global registers.
7371 Also, all registers with special meaning to the compiler need
7372 to be handled extra. */
7374 s390_regs_ever_clobbered (clobbered_regs
);
7376 for (i
= 0; i
< 16; i
++)
7377 clobbered_regs
[i
] = clobbered_regs
[i
] && !global_regs
[i
] && !fixed_regs
[i
];
7379 if (frame_pointer_needed
)
7380 clobbered_regs
[HARD_FRAME_POINTER_REGNUM
] = 1;
7383 clobbered_regs
[PIC_OFFSET_TABLE_REGNUM
]
7384 |= df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM
);
7386 clobbered_regs
[BASE_REGNUM
]
7387 |= (cfun
->machine
->base_reg
7388 && REGNO (cfun
->machine
->base_reg
) == BASE_REGNUM
);
7390 clobbered_regs
[RETURN_REGNUM
]
7392 || TARGET_TPF_PROFILING
7393 || cfun
->machine
->split_branches_pending_p
7394 || cfun_frame_layout
.save_return_addr_p
7395 || crtl
->calls_eh_return
7398 clobbered_regs
[STACK_POINTER_REGNUM
]
7400 || TARGET_TPF_PROFILING
7401 || cfun_save_high_fprs_p
7402 || get_frame_size () > 0
7403 || cfun
->calls_alloca
7406 for (i
= 6; i
< 16; i
++)
7407 if (df_regs_ever_live_p (i
) || clobbered_regs
[i
])
7409 for (j
= 15; j
> i
; j
--)
7410 if (df_regs_ever_live_p (j
) || clobbered_regs
[j
])
7415 /* Nothing to save/restore. */
7416 cfun_frame_layout
.first_save_gpr_slot
= -1;
7417 cfun_frame_layout
.last_save_gpr_slot
= -1;
7418 cfun_frame_layout
.first_save_gpr
= -1;
7419 cfun_frame_layout
.first_restore_gpr
= -1;
7420 cfun_frame_layout
.last_save_gpr
= -1;
7421 cfun_frame_layout
.last_restore_gpr
= -1;
7425 /* Save slots for gprs from i to j. */
7426 cfun_frame_layout
.first_save_gpr_slot
= i
;
7427 cfun_frame_layout
.last_save_gpr_slot
= j
;
7429 for (i
= cfun_frame_layout
.first_save_gpr_slot
;
7430 i
< cfun_frame_layout
.last_save_gpr_slot
+ 1;
7432 if (clobbered_regs
[i
])
7435 for (j
= cfun_frame_layout
.last_save_gpr_slot
; j
> i
; j
--)
7436 if (clobbered_regs
[j
])
7439 if (i
== cfun_frame_layout
.last_save_gpr_slot
+ 1)
7441 /* Nothing to save/restore. */
7442 cfun_frame_layout
.first_save_gpr
= -1;
7443 cfun_frame_layout
.first_restore_gpr
= -1;
7444 cfun_frame_layout
.last_save_gpr
= -1;
7445 cfun_frame_layout
.last_restore_gpr
= -1;
7449 /* Save / Restore from gpr i to j. */
7450 cfun_frame_layout
.first_save_gpr
= i
;
7451 cfun_frame_layout
.first_restore_gpr
= i
;
7452 cfun_frame_layout
.last_save_gpr
= j
;
7453 cfun_frame_layout
.last_restore_gpr
= j
;
7459 /* Varargs functions need to save gprs 2 to 6. */
7460 if (cfun
->va_list_gpr_size
7461 && crtl
->args
.info
.gprs
< GP_ARG_NUM_REG
)
7463 int min_gpr
= crtl
->args
.info
.gprs
;
7464 int max_gpr
= min_gpr
+ cfun
->va_list_gpr_size
;
7465 if (max_gpr
> GP_ARG_NUM_REG
)
7466 max_gpr
= GP_ARG_NUM_REG
;
7468 if (cfun_frame_layout
.first_save_gpr
== -1
7469 || cfun_frame_layout
.first_save_gpr
> 2 + min_gpr
)
7471 cfun_frame_layout
.first_save_gpr
= 2 + min_gpr
;
7472 cfun_frame_layout
.first_save_gpr_slot
= 2 + min_gpr
;
7475 if (cfun_frame_layout
.last_save_gpr
== -1
7476 || cfun_frame_layout
.last_save_gpr
< 2 + max_gpr
- 1)
7478 cfun_frame_layout
.last_save_gpr
= 2 + max_gpr
- 1;
7479 cfun_frame_layout
.last_save_gpr_slot
= 2 + max_gpr
- 1;
7483 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
7484 if (TARGET_HARD_FLOAT
&& cfun
->va_list_fpr_size
7485 && crtl
->args
.info
.fprs
< FP_ARG_NUM_REG
)
7487 int min_fpr
= crtl
->args
.info
.fprs
;
7488 int max_fpr
= min_fpr
+ cfun
->va_list_fpr_size
;
7489 if (max_fpr
> FP_ARG_NUM_REG
)
7490 max_fpr
= FP_ARG_NUM_REG
;
7492 /* ??? This is currently required to ensure proper location
7493 of the fpr save slots within the va_list save area. */
7494 if (TARGET_PACKED_STACK
)
7497 for (i
= min_fpr
; i
< max_fpr
; i
++)
7498 cfun_set_fpr_bit (i
);
7503 for (i
= 2; i
< 4; i
++)
7504 if (df_regs_ever_live_p (i
+ 16) && !global_regs
[i
+ 16])
7505 cfun_set_fpr_bit (i
);
7508 /* Fill cfun->machine with info about frame of current function. */
7511 s390_frame_info (void)
7515 cfun_frame_layout
.frame_size
= get_frame_size ();
7516 if (!TARGET_64BIT
&& cfun_frame_layout
.frame_size
> 0x7fff0000)
7517 fatal_error ("total size of local variables exceeds architecture limit");
7519 if (!TARGET_PACKED_STACK
)
7521 cfun_frame_layout
.backchain_offset
= 0;
7522 cfun_frame_layout
.f0_offset
= 16 * UNITS_PER_LONG
;
7523 cfun_frame_layout
.f4_offset
= cfun_frame_layout
.f0_offset
+ 2 * 8;
7524 cfun_frame_layout
.f8_offset
= -cfun_frame_layout
.high_fprs
* 8;
7525 cfun_frame_layout
.gprs_offset
= (cfun_frame_layout
.first_save_gpr_slot
7528 else if (TARGET_BACKCHAIN
) /* kernel stack layout */
7530 cfun_frame_layout
.backchain_offset
= (STACK_POINTER_OFFSET
7532 cfun_frame_layout
.gprs_offset
7533 = (cfun_frame_layout
.backchain_offset
7534 - (STACK_POINTER_REGNUM
- cfun_frame_layout
.first_save_gpr_slot
+ 1)
7539 cfun_frame_layout
.f4_offset
7540 = (cfun_frame_layout
.gprs_offset
7541 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7543 cfun_frame_layout
.f0_offset
7544 = (cfun_frame_layout
.f4_offset
7545 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7549 /* On 31 bit we have to care about alignment of the
7550 floating point regs to provide fastest access. */
7551 cfun_frame_layout
.f0_offset
7552 = ((cfun_frame_layout
.gprs_offset
7553 & ~(STACK_BOUNDARY
/ BITS_PER_UNIT
- 1))
7554 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7556 cfun_frame_layout
.f4_offset
7557 = (cfun_frame_layout
.f0_offset
7558 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7561 else /* no backchain */
7563 cfun_frame_layout
.f4_offset
7564 = (STACK_POINTER_OFFSET
7565 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7567 cfun_frame_layout
.f0_offset
7568 = (cfun_frame_layout
.f4_offset
7569 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7571 cfun_frame_layout
.gprs_offset
7572 = cfun_frame_layout
.f0_offset
- cfun_gprs_save_area_size
;
7576 && !TARGET_TPF_PROFILING
7577 && cfun_frame_layout
.frame_size
== 0
7578 && !cfun_save_high_fprs_p
7579 && !cfun
->calls_alloca
7583 if (!TARGET_PACKED_STACK
)
7584 cfun_frame_layout
.frame_size
+= (STACK_POINTER_OFFSET
7585 + crtl
->outgoing_args_size
7586 + cfun_frame_layout
.high_fprs
* 8);
7589 if (TARGET_BACKCHAIN
)
7590 cfun_frame_layout
.frame_size
+= UNITS_PER_LONG
;
7592 /* No alignment trouble here because f8-f15 are only saved under
7594 cfun_frame_layout
.f8_offset
= (MIN (MIN (cfun_frame_layout
.f0_offset
,
7595 cfun_frame_layout
.f4_offset
),
7596 cfun_frame_layout
.gprs_offset
)
7597 - cfun_frame_layout
.high_fprs
* 8);
7599 cfun_frame_layout
.frame_size
+= cfun_frame_layout
.high_fprs
* 8;
7601 for (i
= 0; i
< 8; i
++)
7602 if (cfun_fpr_bit_p (i
))
7603 cfun_frame_layout
.frame_size
+= 8;
7605 cfun_frame_layout
.frame_size
+= cfun_gprs_save_area_size
;
7607 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
7608 the frame size to sustain 8 byte alignment of stack frames. */
7609 cfun_frame_layout
.frame_size
= ((cfun_frame_layout
.frame_size
+
7610 STACK_BOUNDARY
/ BITS_PER_UNIT
- 1)
7611 & ~(STACK_BOUNDARY
/ BITS_PER_UNIT
- 1));
7613 cfun_frame_layout
.frame_size
+= crtl
->outgoing_args_size
;
7617 /* Generate frame layout. Fills in register and frame data for the current
7618 function in cfun->machine. This routine can be called multiple times;
7619 it will re-do the complete frame layout every time. */
7622 s390_init_frame_layout (void)
7624 HOST_WIDE_INT frame_size
;
7626 int clobbered_regs
[16];
7628 /* On S/390 machines, we may need to perform branch splitting, which
7629 will require both base and return address register. We have no
7630 choice but to assume we're going to need them until right at the
7631 end of the machine dependent reorg phase. */
7632 if (!TARGET_CPU_ZARCH
)
7633 cfun
->machine
->split_branches_pending_p
= true;
7637 frame_size
= cfun_frame_layout
.frame_size
;
7639 /* Try to predict whether we'll need the base register. */
7640 base_used
= cfun
->machine
->split_branches_pending_p
7641 || crtl
->uses_const_pool
7642 || (!DISP_IN_RANGE (frame_size
)
7643 && !CONST_OK_FOR_K (frame_size
));
7645 /* Decide which register to use as literal pool base. In small
7646 leaf functions, try to use an unused call-clobbered register
7647 as base register to avoid save/restore overhead. */
7649 cfun
->machine
->base_reg
= NULL_RTX
;
7650 else if (crtl
->is_leaf
&& !df_regs_ever_live_p (5))
7651 cfun
->machine
->base_reg
= gen_rtx_REG (Pmode
, 5);
7653 cfun
->machine
->base_reg
= gen_rtx_REG (Pmode
, BASE_REGNUM
);
7655 s390_register_info (clobbered_regs
);
7658 while (frame_size
!= cfun_frame_layout
.frame_size
);
7661 /* Update frame layout. Recompute actual register save data based on
7662 current info and update regs_ever_live for the special registers.
7663 May be called multiple times, but may never cause *more* registers
7664 to be saved than s390_init_frame_layout allocated room for. */
7667 s390_update_frame_layout (void)
7669 int clobbered_regs
[16];
7671 s390_register_info (clobbered_regs
);
7673 df_set_regs_ever_live (BASE_REGNUM
,
7674 clobbered_regs
[BASE_REGNUM
] ? true : false);
7675 df_set_regs_ever_live (RETURN_REGNUM
,
7676 clobbered_regs
[RETURN_REGNUM
] ? true : false);
7677 df_set_regs_ever_live (STACK_POINTER_REGNUM
,
7678 clobbered_regs
[STACK_POINTER_REGNUM
] ? true : false);
7680 if (cfun
->machine
->base_reg
)
7681 df_set_regs_ever_live (REGNO (cfun
->machine
->base_reg
), true);
7684 /* Return true if it is legal to put a value with MODE into REGNO. */
7687 s390_hard_regno_mode_ok (unsigned int regno
, enum machine_mode mode
)
7689 switch (REGNO_REG_CLASS (regno
))
7692 if (REGNO_PAIR_OK (regno
, mode
))
7694 if (mode
== SImode
|| mode
== DImode
)
7697 if (FLOAT_MODE_P (mode
) && GET_MODE_CLASS (mode
) != MODE_VECTOR_FLOAT
)
7702 if (FRAME_REGNO_P (regno
) && mode
== Pmode
)
7707 if (REGNO_PAIR_OK (regno
, mode
))
7710 || (mode
!= TFmode
&& mode
!= TCmode
&& mode
!= TDmode
))
7715 if (GET_MODE_CLASS (mode
) == MODE_CC
)
7719 if (REGNO_PAIR_OK (regno
, mode
))
7721 if (mode
== SImode
|| mode
== Pmode
)
7732 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7735 s390_hard_regno_rename_ok (unsigned int old_reg
, unsigned int new_reg
)
7737 /* Once we've decided upon a register to use as base register, it must
7738 no longer be used for any other purpose. */
7739 if (cfun
->machine
->base_reg
)
7740 if (REGNO (cfun
->machine
->base_reg
) == old_reg
7741 || REGNO (cfun
->machine
->base_reg
) == new_reg
)
7747 /* Maximum number of registers to represent a value of mode MODE
7748 in a register of class RCLASS. */
7751 s390_class_max_nregs (enum reg_class rclass
, enum machine_mode mode
)
7756 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7757 return 2 * ((GET_MODE_SIZE (mode
) / 2 + 8 - 1) / 8);
7759 return (GET_MODE_SIZE (mode
) + 8 - 1) / 8;
7761 return (GET_MODE_SIZE (mode
) + 4 - 1) / 4;
7765 return (GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
7768 /* Return true if register FROM can be eliminated via register TO. */
7771 s390_can_eliminate (const int from
, const int to
)
7773 /* On zSeries machines, we have not marked the base register as fixed.
7774 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM.
7775 If a function requires the base register, we say here that this
7776 elimination cannot be performed. This will cause reload to free
7777 up the base register (as if it were fixed). On the other hand,
7778 if the current function does *not* require the base register, we
7779 say here the elimination succeeds, which in turn allows reload
7780 to allocate the base register for any other purpose. */
7781 if (from
== BASE_REGNUM
&& to
== BASE_REGNUM
)
7783 if (TARGET_CPU_ZARCH
)
7785 s390_init_frame_layout ();
7786 return cfun
->machine
->base_reg
== NULL_RTX
;
7792 /* Everything else must point into the stack frame. */
7793 gcc_assert (to
== STACK_POINTER_REGNUM
7794 || to
== HARD_FRAME_POINTER_REGNUM
);
7796 gcc_assert (from
== FRAME_POINTER_REGNUM
7797 || from
== ARG_POINTER_REGNUM
7798 || from
== RETURN_ADDRESS_POINTER_REGNUM
);
7800 /* Make sure we actually saved the return address. */
7801 if (from
== RETURN_ADDRESS_POINTER_REGNUM
)
7802 if (!crtl
->calls_eh_return
7804 && !cfun_frame_layout
.save_return_addr_p
)
7810 /* Return offset between register FROM and TO initially after prolog. */
7813 s390_initial_elimination_offset (int from
, int to
)
7815 HOST_WIDE_INT offset
;
7818 /* ??? Why are we called for non-eliminable pairs? */
7819 if (!s390_can_eliminate (from
, to
))
7824 case FRAME_POINTER_REGNUM
:
7825 offset
= (get_frame_size()
7826 + STACK_POINTER_OFFSET
7827 + crtl
->outgoing_args_size
);
7830 case ARG_POINTER_REGNUM
:
7831 s390_init_frame_layout ();
7832 offset
= cfun_frame_layout
.frame_size
+ STACK_POINTER_OFFSET
;
7835 case RETURN_ADDRESS_POINTER_REGNUM
:
7836 s390_init_frame_layout ();
7837 index
= RETURN_REGNUM
- cfun_frame_layout
.first_save_gpr_slot
;
7838 gcc_assert (index
>= 0);
7839 offset
= cfun_frame_layout
.frame_size
+ cfun_frame_layout
.gprs_offset
;
7840 offset
+= index
* UNITS_PER_LONG
;
7854 /* Emit insn to save fpr REGNUM at offset OFFSET relative
7855 to register BASE. Return generated insn. */
7858 save_fpr (rtx base
, int offset
, int regnum
)
7861 addr
= gen_rtx_MEM (DFmode
, plus_constant (Pmode
, base
, offset
));
7863 if (regnum
>= 16 && regnum
<= (16 + FP_ARG_NUM_REG
))
7864 set_mem_alias_set (addr
, get_varargs_alias_set ());
7866 set_mem_alias_set (addr
, get_frame_alias_set ());
7868 return emit_move_insn (addr
, gen_rtx_REG (DFmode
, regnum
));
7871 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
7872 to register BASE. Return generated insn. */
7875 restore_fpr (rtx base
, int offset
, int regnum
)
7878 addr
= gen_rtx_MEM (DFmode
, plus_constant (Pmode
, base
, offset
));
7879 set_mem_alias_set (addr
, get_frame_alias_set ());
7881 return emit_move_insn (gen_rtx_REG (DFmode
, regnum
), addr
);
7884 /* Return true if REGNO is a global register, but not one
7885 of the special ones that need to be saved/restored in anyway. */
7888 global_not_special_regno_p (int regno
)
7890 return (global_regs
[regno
]
7891 /* These registers are special and need to be
7892 restored in any case. */
7893 && !(regno
== STACK_POINTER_REGNUM
7894 || regno
== RETURN_REGNUM
7895 || regno
== BASE_REGNUM
7896 || (flag_pic
&& regno
== (int)PIC_OFFSET_TABLE_REGNUM
)));
7899 /* Generate insn to save registers FIRST to LAST into
7900 the register save area located at offset OFFSET
7901 relative to register BASE. */
7904 save_gprs (rtx base
, int offset
, int first
, int last
)
7906 rtx addr
, insn
, note
;
7909 addr
= plus_constant (Pmode
, base
, offset
);
7910 addr
= gen_rtx_MEM (Pmode
, addr
);
7912 set_mem_alias_set (addr
, get_frame_alias_set ());
7914 /* Special-case single register. */
7918 insn
= gen_movdi (addr
, gen_rtx_REG (Pmode
, first
));
7920 insn
= gen_movsi (addr
, gen_rtx_REG (Pmode
, first
));
7922 if (!global_not_special_regno_p (first
))
7923 RTX_FRAME_RELATED_P (insn
) = 1;
7928 insn
= gen_store_multiple (addr
,
7929 gen_rtx_REG (Pmode
, first
),
7930 GEN_INT (last
- first
+ 1));
7932 if (first
<= 6 && cfun
->stdarg
)
7933 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
7935 rtx mem
= XEXP (XVECEXP (PATTERN (insn
), 0, i
), 0);
7938 set_mem_alias_set (mem
, get_varargs_alias_set ());
7941 /* We need to set the FRAME_RELATED flag on all SETs
7942 inside the store-multiple pattern.
7944 However, we must not emit DWARF records for registers 2..5
7945 if they are stored for use by variable arguments ...
7947 ??? Unfortunately, it is not enough to simply not the
7948 FRAME_RELATED flags for those SETs, because the first SET
7949 of the PARALLEL is always treated as if it had the flag
7950 set, even if it does not. Therefore we emit a new pattern
7951 without those registers as REG_FRAME_RELATED_EXPR note. */
7953 if (first
>= 6 && !global_not_special_regno_p (first
))
7955 rtx pat
= PATTERN (insn
);
7957 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
7958 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
7959 && !global_not_special_regno_p (REGNO (SET_SRC (XVECEXP (pat
,
7961 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, i
)) = 1;
7963 RTX_FRAME_RELATED_P (insn
) = 1;
7969 for (start
= first
>= 6 ? first
: 6; start
<= last
; start
++)
7970 if (!global_not_special_regno_p (start
))
7976 addr
= plus_constant (Pmode
, base
,
7977 offset
+ (start
- first
) * UNITS_PER_LONG
);
7978 note
= gen_store_multiple (gen_rtx_MEM (Pmode
, addr
),
7979 gen_rtx_REG (Pmode
, start
),
7980 GEN_INT (last
- start
+ 1));
7981 note
= PATTERN (note
);
7983 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
, note
);
7985 for (i
= 0; i
< XVECLEN (note
, 0); i
++)
7986 if (GET_CODE (XVECEXP (note
, 0, i
)) == SET
7987 && !global_not_special_regno_p (REGNO (SET_SRC (XVECEXP (note
,
7989 RTX_FRAME_RELATED_P (XVECEXP (note
, 0, i
)) = 1;
7991 RTX_FRAME_RELATED_P (insn
) = 1;
7997 /* Generate insn to restore registers FIRST to LAST from
7998 the register save area located at offset OFFSET
7999 relative to register BASE. */
8002 restore_gprs (rtx base
, int offset
, int first
, int last
)
8006 addr
= plus_constant (Pmode
, base
, offset
);
8007 addr
= gen_rtx_MEM (Pmode
, addr
);
8008 set_mem_alias_set (addr
, get_frame_alias_set ());
8010 /* Special-case single register. */
8014 insn
= gen_movdi (gen_rtx_REG (Pmode
, first
), addr
);
8016 insn
= gen_movsi (gen_rtx_REG (Pmode
, first
), addr
);
8021 insn
= gen_load_multiple (gen_rtx_REG (Pmode
, first
),
8023 GEN_INT (last
- first
+ 1));
8027 /* Return insn sequence to load the GOT register. */
8029 static GTY(()) rtx got_symbol
;
8031 s390_load_got (void)
8035 /* We cannot use pic_offset_table_rtx here since we use this
8036 function also for non-pic if __tls_get_offset is called and in
8037 that case PIC_OFFSET_TABLE_REGNUM as well as pic_offset_table_rtx
8039 rtx got_rtx
= gen_rtx_REG (Pmode
, 12);
8043 got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
8044 SYMBOL_REF_FLAGS (got_symbol
) = SYMBOL_FLAG_LOCAL
;
8049 if (TARGET_CPU_ZARCH
)
8051 emit_move_insn (got_rtx
, got_symbol
);
8057 offset
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, got_symbol
),
8058 UNSPEC_LTREL_OFFSET
);
8059 offset
= gen_rtx_CONST (Pmode
, offset
);
8060 offset
= force_const_mem (Pmode
, offset
);
8062 emit_move_insn (got_rtx
, offset
);
8064 offset
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, XEXP (offset
, 0)),
8066 offset
= gen_rtx_PLUS (Pmode
, got_rtx
, offset
);
8068 emit_move_insn (got_rtx
, offset
);
8071 insns
= get_insns ();
8076 /* This ties together stack memory (MEM with an alias set of frame_alias_set)
8077 and the change to the stack pointer. */
8080 s390_emit_stack_tie (void)
8082 rtx mem
= gen_frame_mem (BLKmode
,
8083 gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
8085 emit_insn (gen_stack_tie (mem
));
8088 /* Expand the prologue into a bunch of separate insns. */
8091 s390_emit_prologue (void)
8099 /* Complete frame layout. */
8101 s390_update_frame_layout ();
8103 /* Annotate all constant pool references to let the scheduler know
8104 they implicitly use the base register. */
8106 push_topmost_sequence ();
8108 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
8111 annotate_constant_pool_refs (&PATTERN (insn
));
8112 df_insn_rescan (insn
);
8115 pop_topmost_sequence ();
8117 /* Choose best register to use for temp use within prologue.
8118 See below for why TPF must use the register 1. */
8120 if (!has_hard_reg_initial_val (Pmode
, RETURN_REGNUM
)
8122 && !TARGET_TPF_PROFILING
)
8123 temp_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
8125 temp_reg
= gen_rtx_REG (Pmode
, 1);
8127 /* Save call saved gprs. */
8128 if (cfun_frame_layout
.first_save_gpr
!= -1)
8130 insn
= save_gprs (stack_pointer_rtx
,
8131 cfun_frame_layout
.gprs_offset
+
8132 UNITS_PER_LONG
* (cfun_frame_layout
.first_save_gpr
8133 - cfun_frame_layout
.first_save_gpr_slot
),
8134 cfun_frame_layout
.first_save_gpr
,
8135 cfun_frame_layout
.last_save_gpr
);
8139 /* Dummy insn to mark literal pool slot. */
8141 if (cfun
->machine
->base_reg
)
8142 emit_insn (gen_main_pool (cfun
->machine
->base_reg
));
8144 offset
= cfun_frame_layout
.f0_offset
;
8146 /* Save f0 and f2. */
8147 for (i
= 0; i
< 2; i
++)
8149 if (cfun_fpr_bit_p (i
))
8151 save_fpr (stack_pointer_rtx
, offset
, i
+ 16);
8154 else if (!TARGET_PACKED_STACK
)
8158 /* Save f4 and f6. */
8159 offset
= cfun_frame_layout
.f4_offset
;
8160 for (i
= 2; i
< 4; i
++)
8162 if (cfun_fpr_bit_p (i
))
8164 insn
= save_fpr (stack_pointer_rtx
, offset
, i
+ 16);
8167 /* If f4 and f6 are call clobbered they are saved due to stdargs and
8168 therefore are not frame related. */
8169 if (!call_really_used_regs
[i
+ 16])
8170 RTX_FRAME_RELATED_P (insn
) = 1;
8172 else if (!TARGET_PACKED_STACK
)
8176 if (TARGET_PACKED_STACK
8177 && cfun_save_high_fprs_p
8178 && cfun_frame_layout
.f8_offset
+ cfun_frame_layout
.high_fprs
* 8 > 0)
8180 offset
= (cfun_frame_layout
.f8_offset
8181 + (cfun_frame_layout
.high_fprs
- 1) * 8);
8183 for (i
= 15; i
> 7 && offset
>= 0; i
--)
8184 if (cfun_fpr_bit_p (i
))
8186 insn
= save_fpr (stack_pointer_rtx
, offset
, i
+ 16);
8188 RTX_FRAME_RELATED_P (insn
) = 1;
8191 if (offset
>= cfun_frame_layout
.f8_offset
)
8195 if (!TARGET_PACKED_STACK
)
8196 next_fpr
= cfun_save_high_fprs_p
? 31 : 0;
8198 if (flag_stack_usage_info
)
8199 current_function_static_stack_size
= cfun_frame_layout
.frame_size
;
8201 /* Decrement stack pointer. */
8203 if (cfun_frame_layout
.frame_size
> 0)
8205 rtx frame_off
= GEN_INT (-cfun_frame_layout
.frame_size
);
8208 if (s390_stack_size
)
8210 HOST_WIDE_INT stack_guard
;
8212 if (s390_stack_guard
)
8213 stack_guard
= s390_stack_guard
;
8216 /* If no value for stack guard is provided the smallest power of 2
8217 larger than the current frame size is chosen. */
8219 while (stack_guard
< cfun_frame_layout
.frame_size
)
8223 if (cfun_frame_layout
.frame_size
>= s390_stack_size
)
8225 warning (0, "frame size of function %qs is %wd"
8226 " bytes exceeding user provided stack limit of "
8228 "An unconditional trap is added.",
8229 current_function_name(), cfun_frame_layout
.frame_size
,
8231 emit_insn (gen_trap ());
8235 /* stack_guard has to be smaller than s390_stack_size.
8236 Otherwise we would emit an AND with zero which would
8237 not match the test under mask pattern. */
8238 if (stack_guard
>= s390_stack_size
)
8240 warning (0, "frame size of function %qs is %wd"
8241 " bytes which is more than half the stack size. "
8242 "The dynamic check would not be reliable. "
8243 "No check emitted for this function.",
8244 current_function_name(),
8245 cfun_frame_layout
.frame_size
);
8249 HOST_WIDE_INT stack_check_mask
= ((s390_stack_size
- 1)
8250 & ~(stack_guard
- 1));
8252 rtx t
= gen_rtx_AND (Pmode
, stack_pointer_rtx
,
8253 GEN_INT (stack_check_mask
));
8255 emit_insn (gen_ctrapdi4 (gen_rtx_EQ (VOIDmode
,
8257 t
, const0_rtx
, const0_rtx
));
8259 emit_insn (gen_ctrapsi4 (gen_rtx_EQ (VOIDmode
,
8261 t
, const0_rtx
, const0_rtx
));
8266 if (s390_warn_framesize
> 0
8267 && cfun_frame_layout
.frame_size
>= s390_warn_framesize
)
8268 warning (0, "frame size of %qs is %wd bytes",
8269 current_function_name (), cfun_frame_layout
.frame_size
);
8271 if (s390_warn_dynamicstack_p
&& cfun
->calls_alloca
)
8272 warning (0, "%qs uses dynamic stack allocation", current_function_name ());
8274 /* Save incoming stack pointer into temp reg. */
8275 if (TARGET_BACKCHAIN
|| next_fpr
)
8276 insn
= emit_insn (gen_move_insn (temp_reg
, stack_pointer_rtx
));
8278 /* Subtract frame size from stack pointer. */
8280 if (DISP_IN_RANGE (INTVAL (frame_off
)))
8282 insn
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
8283 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
8285 insn
= emit_insn (insn
);
8289 if (!CONST_OK_FOR_K (INTVAL (frame_off
)))
8290 frame_off
= force_const_mem (Pmode
, frame_off
);
8292 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, frame_off
));
8293 annotate_constant_pool_refs (&PATTERN (insn
));
8296 RTX_FRAME_RELATED_P (insn
) = 1;
8297 real_frame_off
= GEN_INT (-cfun_frame_layout
.frame_size
);
8298 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
8299 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
8300 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
8303 /* Set backchain. */
8305 if (TARGET_BACKCHAIN
)
8307 if (cfun_frame_layout
.backchain_offset
)
8308 addr
= gen_rtx_MEM (Pmode
,
8309 plus_constant (Pmode
, stack_pointer_rtx
,
8310 cfun_frame_layout
.backchain_offset
));
8312 addr
= gen_rtx_MEM (Pmode
, stack_pointer_rtx
);
8313 set_mem_alias_set (addr
, get_frame_alias_set ());
8314 insn
= emit_insn (gen_move_insn (addr
, temp_reg
));
8317 /* If we support non-call exceptions (e.g. for Java),
8318 we need to make sure the backchain pointer is set up
8319 before any possibly trapping memory access. */
8320 if (TARGET_BACKCHAIN
&& cfun
->can_throw_non_call_exceptions
)
8322 addr
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
8323 emit_clobber (addr
);
8327 /* Save fprs 8 - 15 (64 bit ABI). */
8329 if (cfun_save_high_fprs_p
&& next_fpr
)
8331 /* If the stack might be accessed through a different register
8332 we have to make sure that the stack pointer decrement is not
8333 moved below the use of the stack slots. */
8334 s390_emit_stack_tie ();
8336 insn
= emit_insn (gen_add2_insn (temp_reg
,
8337 GEN_INT (cfun_frame_layout
.f8_offset
)));
8341 for (i
= 24; i
<= next_fpr
; i
++)
8342 if (cfun_fpr_bit_p (i
- 16))
8344 rtx addr
= plus_constant (Pmode
, stack_pointer_rtx
,
8345 cfun_frame_layout
.frame_size
8346 + cfun_frame_layout
.f8_offset
8349 insn
= save_fpr (temp_reg
, offset
, i
);
8351 RTX_FRAME_RELATED_P (insn
) = 1;
8352 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
8353 gen_rtx_SET (VOIDmode
,
8354 gen_rtx_MEM (DFmode
, addr
),
8355 gen_rtx_REG (DFmode
, i
)));
8359 /* Set frame pointer, if needed. */
8361 if (frame_pointer_needed
)
8363 insn
= emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
8364 RTX_FRAME_RELATED_P (insn
) = 1;
8367 /* Set up got pointer, if needed. */
8369 if (flag_pic
&& df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM
))
8371 rtx insns
= s390_load_got ();
8373 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
8374 annotate_constant_pool_refs (&PATTERN (insn
));
8379 if (TARGET_TPF_PROFILING
)
8381 /* Generate a BAS instruction to serve as a function
8382 entry intercept to facilitate the use of tracing
8383 algorithms located at the branch target. */
8384 emit_insn (gen_prologue_tpf ());
8386 /* Emit a blockage here so that all code
8387 lies between the profiling mechanisms. */
8388 emit_insn (gen_blockage ());
8392 /* Expand the epilogue into a bunch of separate insns. */
8395 s390_emit_epilogue (bool sibcall
)
8397 rtx frame_pointer
, return_reg
, cfa_restores
= NULL_RTX
;
8398 int area_bottom
, area_top
, offset
= 0;
8403 if (TARGET_TPF_PROFILING
)
8406 /* Generate a BAS instruction to serve as a function
8407 entry intercept to facilitate the use of tracing
8408 algorithms located at the branch target. */
8410 /* Emit a blockage here so that all code
8411 lies between the profiling mechanisms. */
8412 emit_insn (gen_blockage ());
8414 emit_insn (gen_epilogue_tpf ());
8417 /* Check whether to use frame or stack pointer for restore. */
8419 frame_pointer
= (frame_pointer_needed
8420 ? hard_frame_pointer_rtx
: stack_pointer_rtx
);
8422 s390_frame_area (&area_bottom
, &area_top
);
8424 /* Check whether we can access the register save area.
8425 If not, increment the frame pointer as required. */
8427 if (area_top
<= area_bottom
)
8429 /* Nothing to restore. */
8431 else if (DISP_IN_RANGE (cfun_frame_layout
.frame_size
+ area_bottom
)
8432 && DISP_IN_RANGE (cfun_frame_layout
.frame_size
+ area_top
- 1))
8434 /* Area is in range. */
8435 offset
= cfun_frame_layout
.frame_size
;
8439 rtx insn
, frame_off
, cfa
;
8441 offset
= area_bottom
< 0 ? -area_bottom
: 0;
8442 frame_off
= GEN_INT (cfun_frame_layout
.frame_size
- offset
);
8444 cfa
= gen_rtx_SET (VOIDmode
, frame_pointer
,
8445 gen_rtx_PLUS (Pmode
, frame_pointer
, frame_off
));
8446 if (DISP_IN_RANGE (INTVAL (frame_off
)))
8448 insn
= gen_rtx_SET (VOIDmode
, frame_pointer
,
8449 gen_rtx_PLUS (Pmode
, frame_pointer
, frame_off
));
8450 insn
= emit_insn (insn
);
8454 if (!CONST_OK_FOR_K (INTVAL (frame_off
)))
8455 frame_off
= force_const_mem (Pmode
, frame_off
);
8457 insn
= emit_insn (gen_add2_insn (frame_pointer
, frame_off
));
8458 annotate_constant_pool_refs (&PATTERN (insn
));
8460 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, cfa
);
8461 RTX_FRAME_RELATED_P (insn
) = 1;
8464 /* Restore call saved fprs. */
8468 if (cfun_save_high_fprs_p
)
8470 next_offset
= cfun_frame_layout
.f8_offset
;
8471 for (i
= 24; i
< 32; i
++)
8473 if (cfun_fpr_bit_p (i
- 16))
8475 restore_fpr (frame_pointer
,
8476 offset
+ next_offset
, i
);
8478 = alloc_reg_note (REG_CFA_RESTORE
,
8479 gen_rtx_REG (DFmode
, i
), cfa_restores
);
8488 next_offset
= cfun_frame_layout
.f4_offset
;
8489 for (i
= 18; i
< 20; i
++)
8491 if (cfun_fpr_bit_p (i
- 16))
8493 restore_fpr (frame_pointer
,
8494 offset
+ next_offset
, i
);
8496 = alloc_reg_note (REG_CFA_RESTORE
,
8497 gen_rtx_REG (DFmode
, i
), cfa_restores
);
8500 else if (!TARGET_PACKED_STACK
)
8506 /* Return register. */
8508 return_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
8510 /* Restore call saved gprs. */
8512 if (cfun_frame_layout
.first_restore_gpr
!= -1)
8517 /* Check for global register and save them
8518 to stack location from where they get restored. */
8520 for (i
= cfun_frame_layout
.first_restore_gpr
;
8521 i
<= cfun_frame_layout
.last_restore_gpr
;
8524 if (global_not_special_regno_p (i
))
8526 addr
= plus_constant (Pmode
, frame_pointer
,
8527 offset
+ cfun_frame_layout
.gprs_offset
8528 + (i
- cfun_frame_layout
.first_save_gpr_slot
)
8530 addr
= gen_rtx_MEM (Pmode
, addr
);
8531 set_mem_alias_set (addr
, get_frame_alias_set ());
8532 emit_move_insn (addr
, gen_rtx_REG (Pmode
, i
));
8536 = alloc_reg_note (REG_CFA_RESTORE
,
8537 gen_rtx_REG (Pmode
, i
), cfa_restores
);
8542 /* Fetch return address from stack before load multiple,
8543 this will do good for scheduling. */
8545 if (cfun_frame_layout
.save_return_addr_p
8546 || (cfun_frame_layout
.first_restore_gpr
< BASE_REGNUM
8547 && cfun_frame_layout
.last_restore_gpr
> RETURN_REGNUM
))
8549 int return_regnum
= find_unused_clobbered_reg();
8552 return_reg
= gen_rtx_REG (Pmode
, return_regnum
);
8554 addr
= plus_constant (Pmode
, frame_pointer
,
8555 offset
+ cfun_frame_layout
.gprs_offset
8557 - cfun_frame_layout
.first_save_gpr_slot
)
8559 addr
= gen_rtx_MEM (Pmode
, addr
);
8560 set_mem_alias_set (addr
, get_frame_alias_set ());
8561 emit_move_insn (return_reg
, addr
);
8565 insn
= restore_gprs (frame_pointer
,
8566 offset
+ cfun_frame_layout
.gprs_offset
8567 + (cfun_frame_layout
.first_restore_gpr
8568 - cfun_frame_layout
.first_save_gpr_slot
)
8570 cfun_frame_layout
.first_restore_gpr
,
8571 cfun_frame_layout
.last_restore_gpr
);
8572 insn
= emit_insn (insn
);
8573 REG_NOTES (insn
) = cfa_restores
;
8574 add_reg_note (insn
, REG_CFA_DEF_CFA
,
8575 plus_constant (Pmode
, stack_pointer_rtx
,
8576 STACK_POINTER_OFFSET
));
8577 RTX_FRAME_RELATED_P (insn
) = 1;
8583 /* Return to caller. */
8585 p
= rtvec_alloc (2);
8587 RTVEC_ELT (p
, 0) = ret_rtx
;
8588 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
, return_reg
);
8589 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
8594 /* Return the size in bytes of a function argument of
8595 type TYPE and/or mode MODE. At least one of TYPE or
8596 MODE must be specified. */
8599 s390_function_arg_size (enum machine_mode mode
, const_tree type
)
8602 return int_size_in_bytes (type
);
8604 /* No type info available for some library calls ... */
8605 if (mode
!= BLKmode
)
8606 return GET_MODE_SIZE (mode
);
8608 /* If we have neither type nor mode, abort */
8612 /* Return true if a function argument of type TYPE and mode MODE
8613 is to be passed in a floating-point register, if available. */
8616 s390_function_arg_float (enum machine_mode mode
, const_tree type
)
8618 int size
= s390_function_arg_size (mode
, type
);
8622 /* Soft-float changes the ABI: no floating-point registers are used. */
8623 if (TARGET_SOFT_FLOAT
)
8626 /* No type info available for some library calls ... */
8628 return mode
== SFmode
|| mode
== DFmode
|| mode
== SDmode
|| mode
== DDmode
;
8630 /* The ABI says that record types with a single member are treated
8631 just like that member would be. */
8632 while (TREE_CODE (type
) == RECORD_TYPE
)
8634 tree field
, single
= NULL_TREE
;
8636 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
8638 if (TREE_CODE (field
) != FIELD_DECL
)
8641 if (single
== NULL_TREE
)
8642 single
= TREE_TYPE (field
);
8647 if (single
== NULL_TREE
)
8653 return TREE_CODE (type
) == REAL_TYPE
;
8656 /* Return true if a function argument of type TYPE and mode MODE
8657 is to be passed in an integer register, or a pair of integer
8658 registers, if available. */
8661 s390_function_arg_integer (enum machine_mode mode
, const_tree type
)
8663 int size
= s390_function_arg_size (mode
, type
);
8667 /* No type info available for some library calls ... */
8669 return GET_MODE_CLASS (mode
) == MODE_INT
8670 || (TARGET_SOFT_FLOAT
&& SCALAR_FLOAT_MODE_P (mode
));
8672 /* We accept small integral (and similar) types. */
8673 if (INTEGRAL_TYPE_P (type
)
8674 || POINTER_TYPE_P (type
)
8675 || TREE_CODE (type
) == NULLPTR_TYPE
8676 || TREE_CODE (type
) == OFFSET_TYPE
8677 || (TARGET_SOFT_FLOAT
&& TREE_CODE (type
) == REAL_TYPE
))
8680 /* We also accept structs of size 1, 2, 4, 8 that are not
8681 passed in floating-point registers. */
8682 if (AGGREGATE_TYPE_P (type
)
8683 && exact_log2 (size
) >= 0
8684 && !s390_function_arg_float (mode
, type
))
8690 /* Return 1 if a function argument of type TYPE and mode MODE
8691 is to be passed by reference. The ABI specifies that only
8692 structures of size 1, 2, 4, or 8 bytes are passed by value,
8693 all other structures (and complex numbers) are passed by
8697 s390_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED
,
8698 enum machine_mode mode
, const_tree type
,
8699 bool named ATTRIBUTE_UNUSED
)
8701 int size
= s390_function_arg_size (mode
, type
);
8707 if (AGGREGATE_TYPE_P (type
) && exact_log2 (size
) < 0)
8710 if (TREE_CODE (type
) == COMPLEX_TYPE
8711 || TREE_CODE (type
) == VECTOR_TYPE
)
8718 /* Update the data in CUM to advance over an argument of mode MODE and
8719 data type TYPE. (TYPE is null for libcalls where that information
8720 may not be available.). The boolean NAMED specifies whether the
8721 argument is a named argument (as opposed to an unnamed argument
8722 matching an ellipsis). */
8725 s390_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
8726 const_tree type
, bool named ATTRIBUTE_UNUSED
)
8728 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
8730 if (s390_function_arg_float (mode
, type
))
8734 else if (s390_function_arg_integer (mode
, type
))
8736 int size
= s390_function_arg_size (mode
, type
);
8737 cum
->gprs
+= ((size
+ UNITS_PER_LONG
- 1) / UNITS_PER_LONG
);
8743 /* Define where to put the arguments to a function.
8744 Value is zero to push the argument on the stack,
8745 or a hard register in which to store the argument.
8747 MODE is the argument's machine mode.
8748 TYPE is the data type of the argument (as a tree).
8749 This is null for libcalls where that information may
8751 CUM is a variable of type CUMULATIVE_ARGS which gives info about
8752 the preceding args and about the function being called.
8753 NAMED is nonzero if this argument is a named parameter
8754 (otherwise it is an extra parameter matching an ellipsis).
8756 On S/390, we use general purpose registers 2 through 6 to
8757 pass integer, pointer, and certain structure arguments, and
8758 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
8759 to pass floating point arguments. All remaining arguments
8760 are pushed to the stack. */
8763 s390_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
8764 const_tree type
, bool named ATTRIBUTE_UNUSED
)
8766 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
8768 if (s390_function_arg_float (mode
, type
))
8770 if (cum
->fprs
+ 1 > FP_ARG_NUM_REG
)
8773 return gen_rtx_REG (mode
, cum
->fprs
+ 16);
8775 else if (s390_function_arg_integer (mode
, type
))
8777 int size
= s390_function_arg_size (mode
, type
);
8778 int n_gprs
= (size
+ UNITS_PER_LONG
- 1) / UNITS_PER_LONG
;
8780 if (cum
->gprs
+ n_gprs
> GP_ARG_NUM_REG
)
8782 else if (n_gprs
== 1 || UNITS_PER_WORD
== UNITS_PER_LONG
)
8783 return gen_rtx_REG (mode
, cum
->gprs
+ 2);
8784 else if (n_gprs
== 2)
8786 rtvec p
= rtvec_alloc (2);
8789 = gen_rtx_EXPR_LIST (SImode
, gen_rtx_REG (SImode
, cum
->gprs
+ 2),
8792 = gen_rtx_EXPR_LIST (SImode
, gen_rtx_REG (SImode
, cum
->gprs
+ 3),
8795 return gen_rtx_PARALLEL (mode
, p
);
8799 /* After the real arguments, expand_call calls us once again
8800 with a void_type_node type. Whatever we return here is
8801 passed as operand 2 to the call expanders.
8803 We don't need this feature ... */
8804 else if (type
== void_type_node
)
8810 /* Return true if return values of type TYPE should be returned
8811 in a memory buffer whose address is passed by the caller as
8812 hidden first argument. */
8815 s390_return_in_memory (const_tree type
, const_tree fundecl ATTRIBUTE_UNUSED
)
8817 /* We accept small integral (and similar) types. */
8818 if (INTEGRAL_TYPE_P (type
)
8819 || POINTER_TYPE_P (type
)
8820 || TREE_CODE (type
) == OFFSET_TYPE
8821 || TREE_CODE (type
) == REAL_TYPE
)
8822 return int_size_in_bytes (type
) > 8;
8824 /* Aggregates and similar constructs are always returned
8826 if (AGGREGATE_TYPE_P (type
)
8827 || TREE_CODE (type
) == COMPLEX_TYPE
8828 || TREE_CODE (type
) == VECTOR_TYPE
)
8831 /* ??? We get called on all sorts of random stuff from
8832 aggregate_value_p. We can't abort, but it's not clear
8833 what's safe to return. Pretend it's a struct I guess. */
8837 /* Function arguments and return values are promoted to word size. */
8839 static enum machine_mode
8840 s390_promote_function_mode (const_tree type
, enum machine_mode mode
,
8842 const_tree fntype ATTRIBUTE_UNUSED
,
8843 int for_return ATTRIBUTE_UNUSED
)
8845 if (INTEGRAL_MODE_P (mode
)
8846 && GET_MODE_SIZE (mode
) < UNITS_PER_LONG
)
8848 if (type
!= NULL_TREE
&& POINTER_TYPE_P (type
))
8849 *punsignedp
= POINTERS_EXTEND_UNSIGNED
;
8856 /* Define where to return a (scalar) value of type RET_TYPE.
8857 If RET_TYPE is null, define where to return a (scalar)
8858 value of mode MODE from a libcall. */
8861 s390_function_and_libcall_value (enum machine_mode mode
,
8862 const_tree ret_type
,
8863 const_tree fntype_or_decl
,
8864 bool outgoing ATTRIBUTE_UNUSED
)
8866 /* For normal functions perform the promotion as
8867 promote_function_mode would do. */
8870 int unsignedp
= TYPE_UNSIGNED (ret_type
);
8871 mode
= promote_function_mode (ret_type
, mode
, &unsignedp
,
8875 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
|| SCALAR_FLOAT_MODE_P (mode
));
8876 gcc_assert (GET_MODE_SIZE (mode
) <= 8);
8878 if (TARGET_HARD_FLOAT
&& SCALAR_FLOAT_MODE_P (mode
))
8879 return gen_rtx_REG (mode
, 16);
8880 else if (GET_MODE_SIZE (mode
) <= UNITS_PER_LONG
8881 || UNITS_PER_LONG
== UNITS_PER_WORD
)
8882 return gen_rtx_REG (mode
, 2);
8883 else if (GET_MODE_SIZE (mode
) == 2 * UNITS_PER_LONG
)
8885 /* This case is triggered when returning a 64 bit value with
8886 -m31 -mzarch. Although the value would fit into a single
8887 register it has to be forced into a 32 bit register pair in
8888 order to match the ABI. */
8889 rtvec p
= rtvec_alloc (2);
8892 = gen_rtx_EXPR_LIST (SImode
, gen_rtx_REG (SImode
, 2), const0_rtx
);
8894 = gen_rtx_EXPR_LIST (SImode
, gen_rtx_REG (SImode
, 3), GEN_INT (4));
8896 return gen_rtx_PARALLEL (mode
, p
);
8902 /* Define where to return a scalar return value of type RET_TYPE. */
8905 s390_function_value (const_tree ret_type
, const_tree fn_decl_or_type
,
8908 return s390_function_and_libcall_value (TYPE_MODE (ret_type
), ret_type
,
8909 fn_decl_or_type
, outgoing
);
8912 /* Define where to return a scalar libcall return value of mode
8916 s390_libcall_value (enum machine_mode mode
, const_rtx fun ATTRIBUTE_UNUSED
)
8918 return s390_function_and_libcall_value (mode
, NULL_TREE
,
8923 /* Create and return the va_list datatype.
8925 On S/390, va_list is an array type equivalent to
8927 typedef struct __va_list_tag
8931 void *__overflow_arg_area;
8932 void *__reg_save_area;
8935 where __gpr and __fpr hold the number of general purpose
8936 or floating point arguments used up to now, respectively,
8937 __overflow_arg_area points to the stack location of the
8938 next argument passed on the stack, and __reg_save_area
8939 always points to the start of the register area in the
8940 call frame of the current function. The function prologue
8941 saves all registers used for argument passing into this
8942 area if the function uses variable arguments. */
8945 s390_build_builtin_va_list (void)
8947 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
8949 record
= lang_hooks
.types
.make_type (RECORD_TYPE
);
8952 build_decl (BUILTINS_LOCATION
,
8953 TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
8955 f_gpr
= build_decl (BUILTINS_LOCATION
,
8956 FIELD_DECL
, get_identifier ("__gpr"),
8957 long_integer_type_node
);
8958 f_fpr
= build_decl (BUILTINS_LOCATION
,
8959 FIELD_DECL
, get_identifier ("__fpr"),
8960 long_integer_type_node
);
8961 f_ovf
= build_decl (BUILTINS_LOCATION
,
8962 FIELD_DECL
, get_identifier ("__overflow_arg_area"),
8964 f_sav
= build_decl (BUILTINS_LOCATION
,
8965 FIELD_DECL
, get_identifier ("__reg_save_area"),
8968 va_list_gpr_counter_field
= f_gpr
;
8969 va_list_fpr_counter_field
= f_fpr
;
8971 DECL_FIELD_CONTEXT (f_gpr
) = record
;
8972 DECL_FIELD_CONTEXT (f_fpr
) = record
;
8973 DECL_FIELD_CONTEXT (f_ovf
) = record
;
8974 DECL_FIELD_CONTEXT (f_sav
) = record
;
8976 TYPE_STUB_DECL (record
) = type_decl
;
8977 TYPE_NAME (record
) = type_decl
;
8978 TYPE_FIELDS (record
) = f_gpr
;
8979 DECL_CHAIN (f_gpr
) = f_fpr
;
8980 DECL_CHAIN (f_fpr
) = f_ovf
;
8981 DECL_CHAIN (f_ovf
) = f_sav
;
8983 layout_type (record
);
8985 /* The correct type is an array type of one element. */
8986 return build_array_type (record
, build_index_type (size_zero_node
));
8989 /* Implement va_start by filling the va_list structure VALIST.
8990 STDARG_P is always true, and ignored.
8991 NEXTARG points to the first anonymous stack argument.
8993 The following global variables are used to initialize
8994 the va_list structure:
8997 holds number of gprs and fprs used for named arguments.
8998 crtl->args.arg_offset_rtx:
8999 holds the offset of the first anonymous stack argument
9000 (relative to the virtual arg pointer). */
9003 s390_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
9005 HOST_WIDE_INT n_gpr
, n_fpr
;
9007 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
9008 tree gpr
, fpr
, ovf
, sav
, t
;
9010 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
9011 f_fpr
= DECL_CHAIN (f_gpr
);
9012 f_ovf
= DECL_CHAIN (f_fpr
);
9013 f_sav
= DECL_CHAIN (f_ovf
);
9015 valist
= build_simple_mem_ref (valist
);
9016 gpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
9017 fpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
9018 ovf
= build3 (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
9019 sav
= build3 (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
9021 /* Count number of gp and fp argument registers used. */
9023 n_gpr
= crtl
->args
.info
.gprs
;
9024 n_fpr
= crtl
->args
.info
.fprs
;
9026 if (cfun
->va_list_gpr_size
)
9028 t
= build2 (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
,
9029 build_int_cst (NULL_TREE
, n_gpr
));
9030 TREE_SIDE_EFFECTS (t
) = 1;
9031 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9034 if (cfun
->va_list_fpr_size
)
9036 t
= build2 (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
,
9037 build_int_cst (NULL_TREE
, n_fpr
));
9038 TREE_SIDE_EFFECTS (t
) = 1;
9039 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9042 /* Find the overflow area. */
9043 if (n_gpr
+ cfun
->va_list_gpr_size
> GP_ARG_NUM_REG
9044 || n_fpr
+ cfun
->va_list_fpr_size
> FP_ARG_NUM_REG
)
9046 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
9048 off
= INTVAL (crtl
->args
.arg_offset_rtx
);
9049 off
= off
< 0 ? 0 : off
;
9050 if (TARGET_DEBUG_ARG
)
9051 fprintf (stderr
, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
9052 (int)n_gpr
, (int)n_fpr
, off
);
9054 t
= fold_build_pointer_plus_hwi (t
, off
);
9056 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
9057 TREE_SIDE_EFFECTS (t
) = 1;
9058 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9061 /* Find the register save area. */
9062 if ((cfun
->va_list_gpr_size
&& n_gpr
< GP_ARG_NUM_REG
)
9063 || (cfun
->va_list_fpr_size
&& n_fpr
< FP_ARG_NUM_REG
))
9065 t
= make_tree (TREE_TYPE (sav
), return_address_pointer_rtx
);
9066 t
= fold_build_pointer_plus_hwi (t
, -RETURN_REGNUM
* UNITS_PER_LONG
);
9068 t
= build2 (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
9069 TREE_SIDE_EFFECTS (t
) = 1;
9070 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9074 /* Implement va_arg by updating the va_list structure
9075 VALIST as required to retrieve an argument of type
9076 TYPE, and returning that argument.
9078 Generates code equivalent to:
9080 if (integral value) {
9081 if (size <= 4 && args.gpr < 5 ||
9082 size > 4 && args.gpr < 4 )
9083 ret = args.reg_save_area[args.gpr+8]
9085 ret = *args.overflow_arg_area++;
9086 } else if (float value) {
9088 ret = args.reg_save_area[args.fpr+64]
9090 ret = *args.overflow_arg_area++;
9091 } else if (aggregate value) {
9093 ret = *args.reg_save_area[args.gpr]
9095 ret = **args.overflow_arg_area++;
9099 s390_gimplify_va_arg (tree valist
, tree type
, gimple_seq
*pre_p
,
9100 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
9102 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
9103 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
9104 int indirect_p
, size
, n_reg
, sav_ofs
, sav_scale
, max_reg
;
9105 tree lab_false
, lab_over
, addr
;
9107 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
9108 f_fpr
= DECL_CHAIN (f_gpr
);
9109 f_ovf
= DECL_CHAIN (f_fpr
);
9110 f_sav
= DECL_CHAIN (f_ovf
);
9112 valist
= build_va_arg_indirect_ref (valist
);
9113 gpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
9114 fpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
9115 sav
= build3 (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
9117 /* The tree for args* cannot be shared between gpr/fpr and ovf since
9118 both appear on a lhs. */
9119 valist
= unshare_expr (valist
);
9120 ovf
= build3 (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
9122 size
= int_size_in_bytes (type
);
9124 if (pass_by_reference (NULL
, TYPE_MODE (type
), type
, false))
9126 if (TARGET_DEBUG_ARG
)
9128 fprintf (stderr
, "va_arg: aggregate type");
9132 /* Aggregates are passed by reference. */
9137 /* kernel stack layout on 31 bit: It is assumed here that no padding
9138 will be added by s390_frame_info because for va_args always an even
9139 number of gprs has to be saved r15-r2 = 14 regs. */
9140 sav_ofs
= 2 * UNITS_PER_LONG
;
9141 sav_scale
= UNITS_PER_LONG
;
9142 size
= UNITS_PER_LONG
;
9143 max_reg
= GP_ARG_NUM_REG
- n_reg
;
9145 else if (s390_function_arg_float (TYPE_MODE (type
), type
))
9147 if (TARGET_DEBUG_ARG
)
9149 fprintf (stderr
, "va_arg: float type");
9153 /* FP args go in FP registers, if present. */
9157 sav_ofs
= 16 * UNITS_PER_LONG
;
9159 max_reg
= FP_ARG_NUM_REG
- n_reg
;
9163 if (TARGET_DEBUG_ARG
)
9165 fprintf (stderr
, "va_arg: other type");
9169 /* Otherwise into GP registers. */
9172 n_reg
= (size
+ UNITS_PER_LONG
- 1) / UNITS_PER_LONG
;
9174 /* kernel stack layout on 31 bit: It is assumed here that no padding
9175 will be added by s390_frame_info because for va_args always an even
9176 number of gprs has to be saved r15-r2 = 14 regs. */
9177 sav_ofs
= 2 * UNITS_PER_LONG
;
9179 if (size
< UNITS_PER_LONG
)
9180 sav_ofs
+= UNITS_PER_LONG
- size
;
9182 sav_scale
= UNITS_PER_LONG
;
9183 max_reg
= GP_ARG_NUM_REG
- n_reg
;
9186 /* Pull the value out of the saved registers ... */
9188 lab_false
= create_artificial_label (UNKNOWN_LOCATION
);
9189 lab_over
= create_artificial_label (UNKNOWN_LOCATION
);
9190 addr
= create_tmp_var (ptr_type_node
, "addr");
9192 t
= fold_convert (TREE_TYPE (reg
), size_int (max_reg
));
9193 t
= build2 (GT_EXPR
, boolean_type_node
, reg
, t
);
9194 u
= build1 (GOTO_EXPR
, void_type_node
, lab_false
);
9195 t
= build3 (COND_EXPR
, void_type_node
, t
, u
, NULL_TREE
);
9196 gimplify_and_add (t
, pre_p
);
9198 t
= fold_build_pointer_plus_hwi (sav
, sav_ofs
);
9199 u
= build2 (MULT_EXPR
, TREE_TYPE (reg
), reg
,
9200 fold_convert (TREE_TYPE (reg
), size_int (sav_scale
)));
9201 t
= fold_build_pointer_plus (t
, u
);
9203 gimplify_assign (addr
, t
, pre_p
);
9205 gimple_seq_add_stmt (pre_p
, gimple_build_goto (lab_over
));
9207 gimple_seq_add_stmt (pre_p
, gimple_build_label (lab_false
));
9210 /* ... Otherwise out of the overflow area. */
9213 if (size
< UNITS_PER_LONG
)
9214 t
= fold_build_pointer_plus_hwi (t
, UNITS_PER_LONG
- size
);
9216 gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
9218 gimplify_assign (addr
, t
, pre_p
);
9220 t
= fold_build_pointer_plus_hwi (t
, size
);
9221 gimplify_assign (ovf
, t
, pre_p
);
9223 gimple_seq_add_stmt (pre_p
, gimple_build_label (lab_over
));
9226 /* Increment register save count. */
9228 u
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
9229 fold_convert (TREE_TYPE (reg
), size_int (n_reg
)));
9230 gimplify_and_add (u
, pre_p
);
9234 t
= build_pointer_type_for_mode (build_pointer_type (type
),
9236 addr
= fold_convert (t
, addr
);
9237 addr
= build_va_arg_indirect_ref (addr
);
9241 t
= build_pointer_type_for_mode (type
, ptr_mode
, true);
9242 addr
= fold_convert (t
, addr
);
9245 return build_va_arg_indirect_ref (addr
);
9253 S390_BUILTIN_THREAD_POINTER
,
9254 S390_BUILTIN_SET_THREAD_POINTER
,
9259 static enum insn_code
const code_for_builtin_64
[S390_BUILTIN_max
] = {
9264 static enum insn_code
const code_for_builtin_31
[S390_BUILTIN_max
] = {
9270 s390_init_builtins (void)
9274 ftype
= build_function_type_list (ptr_type_node
, NULL_TREE
);
9275 add_builtin_function ("__builtin_thread_pointer", ftype
,
9276 S390_BUILTIN_THREAD_POINTER
, BUILT_IN_MD
,
9279 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9280 add_builtin_function ("__builtin_set_thread_pointer", ftype
,
9281 S390_BUILTIN_SET_THREAD_POINTER
, BUILT_IN_MD
,
9285 /* Expand an expression EXP that calls a built-in function,
9286 with result going to TARGET if that's convenient
9287 (and in mode MODE if that's convenient).
9288 SUBTARGET may be used as the target for computing one of EXP's operands.
9289 IGNORE is nonzero if the value is to be ignored. */
9292 s390_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
9293 enum machine_mode mode ATTRIBUTE_UNUSED
,
9294 int ignore ATTRIBUTE_UNUSED
)
9298 enum insn_code
const *code_for_builtin
=
9299 TARGET_64BIT
? code_for_builtin_64
: code_for_builtin_31
;
9301 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
9302 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
9303 enum insn_code icode
;
9304 rtx op
[MAX_ARGS
], pat
;
9308 call_expr_arg_iterator iter
;
9310 if (fcode
>= S390_BUILTIN_max
)
9311 internal_error ("bad builtin fcode");
9312 icode
= code_for_builtin
[fcode
];
9314 internal_error ("bad builtin fcode");
9316 nonvoid
= TREE_TYPE (TREE_TYPE (fndecl
)) != void_type_node
;
9319 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
9321 const struct insn_operand_data
*insn_op
;
9323 if (arg
== error_mark_node
)
9325 if (arity
> MAX_ARGS
)
9328 insn_op
= &insn_data
[icode
].operand
[arity
+ nonvoid
];
9330 op
[arity
] = expand_expr (arg
, NULL_RTX
, insn_op
->mode
, EXPAND_NORMAL
);
9332 if (!(*insn_op
->predicate
) (op
[arity
], insn_op
->mode
))
9333 op
[arity
] = copy_to_mode_reg (insn_op
->mode
, op
[arity
]);
9339 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
9341 || GET_MODE (target
) != tmode
9342 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
9343 target
= gen_reg_rtx (tmode
);
9349 pat
= GEN_FCN (icode
) (target
);
9353 pat
= GEN_FCN (icode
) (target
, op
[0]);
9355 pat
= GEN_FCN (icode
) (op
[0]);
9358 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1]);
9374 /* Output assembly code for the trampoline template to
9377 On S/390, we use gpr 1 internally in the trampoline code;
9378 gpr 0 is used to hold the static chain. */
9381 s390_asm_trampoline_template (FILE *file
)
9384 op
[0] = gen_rtx_REG (Pmode
, 0);
9385 op
[1] = gen_rtx_REG (Pmode
, 1);
9389 output_asm_insn ("basr\t%1,0", op
); /* 2 byte */
9390 output_asm_insn ("lmg\t%0,%1,14(%1)", op
); /* 6 byte */
9391 output_asm_insn ("br\t%1", op
); /* 2 byte */
9392 ASM_OUTPUT_SKIP (file
, (HOST_WIDE_INT
)(TRAMPOLINE_SIZE
- 10));
9396 output_asm_insn ("basr\t%1,0", op
); /* 2 byte */
9397 output_asm_insn ("lm\t%0,%1,6(%1)", op
); /* 4 byte */
9398 output_asm_insn ("br\t%1", op
); /* 2 byte */
9399 ASM_OUTPUT_SKIP (file
, (HOST_WIDE_INT
)(TRAMPOLINE_SIZE
- 8));
9403 /* Emit RTL insns to initialize the variable parts of a trampoline.
9404 FNADDR is an RTX for the address of the function's pure code.
9405 CXT is an RTX for the static chain value for the function. */
9408 s390_trampoline_init (rtx m_tramp
, tree fndecl
, rtx cxt
)
9410 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
9413 emit_block_move (m_tramp
, assemble_trampoline_template (),
9414 GEN_INT (2 * UNITS_PER_LONG
), BLOCK_OP_NORMAL
);
9416 mem
= adjust_address (m_tramp
, Pmode
, 2 * UNITS_PER_LONG
);
9417 emit_move_insn (mem
, cxt
);
9418 mem
= adjust_address (m_tramp
, Pmode
, 3 * UNITS_PER_LONG
);
9419 emit_move_insn (mem
, fnaddr
);
9422 /* Output assembler code to FILE to increment profiler label # LABELNO
9423 for profiling a function entry. */
9426 s390_function_profiler (FILE *file
, int labelno
)
9431 ASM_GENERATE_INTERNAL_LABEL (label
, "LP", labelno
);
9433 fprintf (file
, "# function profiler \n");
9435 op
[0] = gen_rtx_REG (Pmode
, RETURN_REGNUM
);
9436 op
[1] = gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
9437 op
[1] = gen_rtx_MEM (Pmode
, plus_constant (Pmode
, op
[1], UNITS_PER_LONG
));
9439 op
[2] = gen_rtx_REG (Pmode
, 1);
9440 op
[3] = gen_rtx_SYMBOL_REF (Pmode
, label
);
9441 SYMBOL_REF_FLAGS (op
[3]) = SYMBOL_FLAG_LOCAL
;
9443 op
[4] = gen_rtx_SYMBOL_REF (Pmode
, "_mcount");
9446 op
[4] = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op
[4]), UNSPEC_PLT
);
9447 op
[4] = gen_rtx_CONST (Pmode
, op
[4]);
9452 output_asm_insn ("stg\t%0,%1", op
);
9453 output_asm_insn ("larl\t%2,%3", op
);
9454 output_asm_insn ("brasl\t%0,%4", op
);
9455 output_asm_insn ("lg\t%0,%1", op
);
9459 op
[6] = gen_label_rtx ();
9461 output_asm_insn ("st\t%0,%1", op
);
9462 output_asm_insn ("bras\t%2,%l6", op
);
9463 output_asm_insn (".long\t%4", op
);
9464 output_asm_insn (".long\t%3", op
);
9465 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[6]));
9466 output_asm_insn ("l\t%0,0(%2)", op
);
9467 output_asm_insn ("l\t%2,4(%2)", op
);
9468 output_asm_insn ("basr\t%0,%0", op
);
9469 output_asm_insn ("l\t%0,%1", op
);
9473 op
[5] = gen_label_rtx ();
9474 op
[6] = gen_label_rtx ();
9476 output_asm_insn ("st\t%0,%1", op
);
9477 output_asm_insn ("bras\t%2,%l6", op
);
9478 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[5]));
9479 output_asm_insn (".long\t%4-%l5", op
);
9480 output_asm_insn (".long\t%3-%l5", op
);
9481 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[6]));
9482 output_asm_insn ("lr\t%0,%2", op
);
9483 output_asm_insn ("a\t%0,0(%2)", op
);
9484 output_asm_insn ("a\t%2,4(%2)", op
);
9485 output_asm_insn ("basr\t%0,%0", op
);
9486 output_asm_insn ("l\t%0,%1", op
);
9490 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
9491 into its SYMBOL_REF_FLAGS. */
9494 s390_encode_section_info (tree decl
, rtx rtl
, int first
)
9496 default_encode_section_info (decl
, rtl
, first
);
9498 if (TREE_CODE (decl
) == VAR_DECL
)
9500 /* If a variable has a forced alignment to < 2 bytes, mark it
9501 with SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL
9503 if (DECL_USER_ALIGN (decl
) && DECL_ALIGN (decl
) < 16)
9504 SYMBOL_REF_FLAGS (XEXP (rtl
, 0)) |= SYMBOL_FLAG_ALIGN1
;
9505 if (!DECL_SIZE (decl
)
9506 || !DECL_ALIGN (decl
)
9507 || !host_integerp (DECL_SIZE (decl
), 0)
9508 || (DECL_ALIGN (decl
) <= 64
9509 && DECL_ALIGN (decl
) != tree_low_cst (DECL_SIZE (decl
), 0)))
9510 SYMBOL_REF_FLAGS (XEXP (rtl
, 0)) |= SYMBOL_FLAG_NOT_NATURALLY_ALIGNED
;
9513 /* Literal pool references don't have a decl so they are handled
9514 differently here. We rely on the information in the MEM_ALIGN
9515 entry to decide upon natural alignment. */
9517 && GET_CODE (XEXP (rtl
, 0)) == SYMBOL_REF
9518 && TREE_CONSTANT_POOL_ADDRESS_P (XEXP (rtl
, 0))
9519 && (MEM_ALIGN (rtl
) == 0
9520 || GET_MODE_BITSIZE (GET_MODE (rtl
)) == 0
9521 || MEM_ALIGN (rtl
) < GET_MODE_BITSIZE (GET_MODE (rtl
))))
9522 SYMBOL_REF_FLAGS (XEXP (rtl
, 0)) |= SYMBOL_FLAG_NOT_NATURALLY_ALIGNED
;
9525 /* Output thunk to FILE that implements a C++ virtual function call (with
9526 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
9527 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
9528 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
9529 relative to the resulting this pointer. */
9532 s390_output_mi_thunk (FILE *file
, tree thunk ATTRIBUTE_UNUSED
,
9533 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
9539 /* Make sure unwind info is emitted for the thunk if needed. */
9540 final_start_function (emit_barrier (), file
, 1);
9542 /* Operand 0 is the target function. */
9543 op
[0] = XEXP (DECL_RTL (function
), 0);
9544 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (op
[0]))
9547 op
[0] = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op
[0]),
9548 TARGET_64BIT
? UNSPEC_PLT
: UNSPEC_GOT
);
9549 op
[0] = gen_rtx_CONST (Pmode
, op
[0]);
9552 /* Operand 1 is the 'this' pointer. */
9553 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
9554 op
[1] = gen_rtx_REG (Pmode
, 3);
9556 op
[1] = gen_rtx_REG (Pmode
, 2);
9558 /* Operand 2 is the delta. */
9559 op
[2] = GEN_INT (delta
);
9561 /* Operand 3 is the vcall_offset. */
9562 op
[3] = GEN_INT (vcall_offset
);
9564 /* Operand 4 is the temporary register. */
9565 op
[4] = gen_rtx_REG (Pmode
, 1);
9567 /* Operands 5 to 8 can be used as labels. */
9573 /* Operand 9 can be used for temporary register. */
9576 /* Generate code. */
9579 /* Setup literal pool pointer if required. */
9580 if ((!DISP_IN_RANGE (delta
)
9581 && !CONST_OK_FOR_K (delta
)
9582 && !CONST_OK_FOR_Os (delta
))
9583 || (!DISP_IN_RANGE (vcall_offset
)
9584 && !CONST_OK_FOR_K (vcall_offset
)
9585 && !CONST_OK_FOR_Os (vcall_offset
)))
9587 op
[5] = gen_label_rtx ();
9588 output_asm_insn ("larl\t%4,%5", op
);
9591 /* Add DELTA to this pointer. */
9594 if (CONST_OK_FOR_J (delta
))
9595 output_asm_insn ("la\t%1,%2(%1)", op
);
9596 else if (DISP_IN_RANGE (delta
))
9597 output_asm_insn ("lay\t%1,%2(%1)", op
);
9598 else if (CONST_OK_FOR_K (delta
))
9599 output_asm_insn ("aghi\t%1,%2", op
);
9600 else if (CONST_OK_FOR_Os (delta
))
9601 output_asm_insn ("agfi\t%1,%2", op
);
9604 op
[6] = gen_label_rtx ();
9605 output_asm_insn ("agf\t%1,%6-%5(%4)", op
);
9609 /* Perform vcall adjustment. */
9612 if (DISP_IN_RANGE (vcall_offset
))
9614 output_asm_insn ("lg\t%4,0(%1)", op
);
9615 output_asm_insn ("ag\t%1,%3(%4)", op
);
9617 else if (CONST_OK_FOR_K (vcall_offset
))
9619 output_asm_insn ("lghi\t%4,%3", op
);
9620 output_asm_insn ("ag\t%4,0(%1)", op
);
9621 output_asm_insn ("ag\t%1,0(%4)", op
);
9623 else if (CONST_OK_FOR_Os (vcall_offset
))
9625 output_asm_insn ("lgfi\t%4,%3", op
);
9626 output_asm_insn ("ag\t%4,0(%1)", op
);
9627 output_asm_insn ("ag\t%1,0(%4)", op
);
9631 op
[7] = gen_label_rtx ();
9632 output_asm_insn ("llgf\t%4,%7-%5(%4)", op
);
9633 output_asm_insn ("ag\t%4,0(%1)", op
);
9634 output_asm_insn ("ag\t%1,0(%4)", op
);
9638 /* Jump to target. */
9639 output_asm_insn ("jg\t%0", op
);
9641 /* Output literal pool if required. */
9644 output_asm_insn (".align\t4", op
);
9645 targetm
.asm_out
.internal_label (file
, "L",
9646 CODE_LABEL_NUMBER (op
[5]));
9650 targetm
.asm_out
.internal_label (file
, "L",
9651 CODE_LABEL_NUMBER (op
[6]));
9652 output_asm_insn (".long\t%2", op
);
9656 targetm
.asm_out
.internal_label (file
, "L",
9657 CODE_LABEL_NUMBER (op
[7]));
9658 output_asm_insn (".long\t%3", op
);
9663 /* Setup base pointer if required. */
9665 || (!DISP_IN_RANGE (delta
)
9666 && !CONST_OK_FOR_K (delta
)
9667 && !CONST_OK_FOR_Os (delta
))
9668 || (!DISP_IN_RANGE (delta
)
9669 && !CONST_OK_FOR_K (vcall_offset
)
9670 && !CONST_OK_FOR_Os (vcall_offset
)))
9672 op
[5] = gen_label_rtx ();
9673 output_asm_insn ("basr\t%4,0", op
);
9674 targetm
.asm_out
.internal_label (file
, "L",
9675 CODE_LABEL_NUMBER (op
[5]));
9678 /* Add DELTA to this pointer. */
9681 if (CONST_OK_FOR_J (delta
))
9682 output_asm_insn ("la\t%1,%2(%1)", op
);
9683 else if (DISP_IN_RANGE (delta
))
9684 output_asm_insn ("lay\t%1,%2(%1)", op
);
9685 else if (CONST_OK_FOR_K (delta
))
9686 output_asm_insn ("ahi\t%1,%2", op
);
9687 else if (CONST_OK_FOR_Os (delta
))
9688 output_asm_insn ("afi\t%1,%2", op
);
9691 op
[6] = gen_label_rtx ();
9692 output_asm_insn ("a\t%1,%6-%5(%4)", op
);
9696 /* Perform vcall adjustment. */
9699 if (CONST_OK_FOR_J (vcall_offset
))
9701 output_asm_insn ("l\t%4,0(%1)", op
);
9702 output_asm_insn ("a\t%1,%3(%4)", op
);
9704 else if (DISP_IN_RANGE (vcall_offset
))
9706 output_asm_insn ("l\t%4,0(%1)", op
);
9707 output_asm_insn ("ay\t%1,%3(%4)", op
);
9709 else if (CONST_OK_FOR_K (vcall_offset
))
9711 output_asm_insn ("lhi\t%4,%3", op
);
9712 output_asm_insn ("a\t%4,0(%1)", op
);
9713 output_asm_insn ("a\t%1,0(%4)", op
);
9715 else if (CONST_OK_FOR_Os (vcall_offset
))
9717 output_asm_insn ("iilf\t%4,%3", op
);
9718 output_asm_insn ("a\t%4,0(%1)", op
);
9719 output_asm_insn ("a\t%1,0(%4)", op
);
9723 op
[7] = gen_label_rtx ();
9724 output_asm_insn ("l\t%4,%7-%5(%4)", op
);
9725 output_asm_insn ("a\t%4,0(%1)", op
);
9726 output_asm_insn ("a\t%1,0(%4)", op
);
9729 /* We had to clobber the base pointer register.
9730 Re-setup the base pointer (with a different base). */
9731 op
[5] = gen_label_rtx ();
9732 output_asm_insn ("basr\t%4,0", op
);
9733 targetm
.asm_out
.internal_label (file
, "L",
9734 CODE_LABEL_NUMBER (op
[5]));
9737 /* Jump to target. */
9738 op
[8] = gen_label_rtx ();
9741 output_asm_insn ("l\t%4,%8-%5(%4)", op
);
9743 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
9744 /* We cannot call through .plt, since .plt requires %r12 loaded. */
9745 else if (flag_pic
== 1)
9747 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
9748 output_asm_insn ("l\t%4,%0(%4)", op
);
9750 else if (flag_pic
== 2)
9752 op
[9] = gen_rtx_REG (Pmode
, 0);
9753 output_asm_insn ("l\t%9,%8-4-%5(%4)", op
);
9754 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
9755 output_asm_insn ("ar\t%4,%9", op
);
9756 output_asm_insn ("l\t%4,0(%4)", op
);
9759 output_asm_insn ("br\t%4", op
);
9761 /* Output literal pool. */
9762 output_asm_insn (".align\t4", op
);
9764 if (nonlocal
&& flag_pic
== 2)
9765 output_asm_insn (".long\t%0", op
);
9768 op
[0] = gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
9769 SYMBOL_REF_FLAGS (op
[0]) = SYMBOL_FLAG_LOCAL
;
9772 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[8]));
9774 output_asm_insn (".long\t%0", op
);
9776 output_asm_insn (".long\t%0-%5", op
);
9780 targetm
.asm_out
.internal_label (file
, "L",
9781 CODE_LABEL_NUMBER (op
[6]));
9782 output_asm_insn (".long\t%2", op
);
9786 targetm
.asm_out
.internal_label (file
, "L",
9787 CODE_LABEL_NUMBER (op
[7]));
9788 output_asm_insn (".long\t%3", op
);
9791 final_end_function ();
9795 s390_valid_pointer_mode (enum machine_mode mode
)
9797 return (mode
== SImode
|| (TARGET_64BIT
&& mode
== DImode
));
9800 /* Checks whether the given CALL_EXPR would use a caller
9801 saved register. This is used to decide whether sibling call
9802 optimization could be performed on the respective function
9806 s390_call_saved_register_used (tree call_expr
)
9808 CUMULATIVE_ARGS cum_v
;
9809 cumulative_args_t cum
;
9811 enum machine_mode mode
;
9816 INIT_CUMULATIVE_ARGS (cum_v
, NULL
, NULL
, 0, 0);
9817 cum
= pack_cumulative_args (&cum_v
);
9819 for (i
= 0; i
< call_expr_nargs (call_expr
); i
++)
9821 parameter
= CALL_EXPR_ARG (call_expr
, i
);
9822 gcc_assert (parameter
);
9824 /* For an undeclared variable passed as parameter we will get
9825 an ERROR_MARK node here. */
9826 if (TREE_CODE (parameter
) == ERROR_MARK
)
9829 type
= TREE_TYPE (parameter
);
9832 mode
= TYPE_MODE (type
);
9835 if (pass_by_reference (&cum_v
, mode
, type
, true))
9838 type
= build_pointer_type (type
);
9841 parm_rtx
= s390_function_arg (cum
, mode
, type
, 0);
9843 s390_function_arg_advance (cum
, mode
, type
, 0);
9848 if (REG_P (parm_rtx
))
9851 reg
< HARD_REGNO_NREGS (REGNO (parm_rtx
), GET_MODE (parm_rtx
));
9853 if (!call_used_regs
[reg
+ REGNO (parm_rtx
)])
9857 if (GET_CODE (parm_rtx
) == PARALLEL
)
9861 for (i
= 0; i
< XVECLEN (parm_rtx
, 0); i
++)
9863 rtx r
= XEXP (XVECEXP (parm_rtx
, 0, i
), 0);
9865 gcc_assert (REG_P (r
));
9868 reg
< HARD_REGNO_NREGS (REGNO (r
), GET_MODE (r
));
9870 if (!call_used_regs
[reg
+ REGNO (r
)])
9879 /* Return true if the given call expression can be
9880 turned into a sibling call.
9881 DECL holds the declaration of the function to be called whereas
9882 EXP is the call expression itself. */
9885 s390_function_ok_for_sibcall (tree decl
, tree exp
)
9887 /* The TPF epilogue uses register 1. */
9888 if (TARGET_TPF_PROFILING
)
9891 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
9892 which would have to be restored before the sibcall. */
9893 if (!TARGET_64BIT
&& flag_pic
&& decl
&& !targetm
.binds_local_p (decl
))
9896 /* Register 6 on s390 is available as an argument register but unfortunately
9897 "caller saved". This makes functions needing this register for arguments
9898 not suitable for sibcalls. */
9899 return !s390_call_saved_register_used (exp
);
9902 /* Return the fixed registers used for condition codes. */
9905 s390_fixed_condition_code_regs (unsigned int *p1
, unsigned int *p2
)
9908 *p2
= INVALID_REGNUM
;
9913 /* This function is used by the call expanders of the machine description.
9914 It emits the call insn itself together with the necessary operations
9915 to adjust the target address and returns the emitted insn.
9916 ADDR_LOCATION is the target address rtx
9917 TLS_CALL the location of the thread-local symbol
9918 RESULT_REG the register where the result of the call should be stored
9919 RETADDR_REG the register where the return address should be stored
9920 If this parameter is NULL_RTX the call is considered
9921 to be a sibling call. */
9924 s390_emit_call (rtx addr_location
, rtx tls_call
, rtx result_reg
,
9927 bool plt_call
= false;
9933 /* Direct function calls need special treatment. */
9934 if (GET_CODE (addr_location
) == SYMBOL_REF
)
9936 /* When calling a global routine in PIC mode, we must
9937 replace the symbol itself with the PLT stub. */
9938 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (addr_location
))
9940 if (retaddr_reg
!= NULL_RTX
)
9942 addr_location
= gen_rtx_UNSPEC (Pmode
,
9943 gen_rtvec (1, addr_location
),
9945 addr_location
= gen_rtx_CONST (Pmode
, addr_location
);
9949 /* For -fpic code the PLT entries might use r12 which is
9950 call-saved. Therefore we cannot do a sibcall when
9951 calling directly using a symbol ref. When reaching
9952 this point we decided (in s390_function_ok_for_sibcall)
9953 to do a sibcall for a function pointer but one of the
9954 optimizers was able to get rid of the function pointer
9955 by propagating the symbol ref into the call. This
9956 optimization is illegal for S/390 so we turn the direct
9957 call into a indirect call again. */
9958 addr_location
= force_reg (Pmode
, addr_location
);
9961 /* Unless we can use the bras(l) insn, force the
9962 routine address into a register. */
9963 if (!TARGET_SMALL_EXEC
&& !TARGET_CPU_ZARCH
)
9966 addr_location
= legitimize_pic_address (addr_location
, 0);
9968 addr_location
= force_reg (Pmode
, addr_location
);
9972 /* If it is already an indirect call or the code above moved the
9973 SYMBOL_REF to somewhere else make sure the address can be found in
9975 if (retaddr_reg
== NULL_RTX
9976 && GET_CODE (addr_location
) != SYMBOL_REF
9979 emit_move_insn (gen_rtx_REG (Pmode
, SIBCALL_REGNUM
), addr_location
);
9980 addr_location
= gen_rtx_REG (Pmode
, SIBCALL_REGNUM
);
9983 addr_location
= gen_rtx_MEM (QImode
, addr_location
);
9984 call
= gen_rtx_CALL (VOIDmode
, addr_location
, const0_rtx
);
9986 if (result_reg
!= NULL_RTX
)
9987 call
= gen_rtx_SET (VOIDmode
, result_reg
, call
);
9989 if (retaddr_reg
!= NULL_RTX
)
9991 clobber
= gen_rtx_CLOBBER (VOIDmode
, retaddr_reg
);
9993 if (tls_call
!= NULL_RTX
)
9994 vec
= gen_rtvec (3, call
, clobber
,
9995 gen_rtx_USE (VOIDmode
, tls_call
));
9997 vec
= gen_rtvec (2, call
, clobber
);
9999 call
= gen_rtx_PARALLEL (VOIDmode
, vec
);
10002 insn
= emit_call_insn (call
);
10004 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
10005 if ((!TARGET_64BIT
&& plt_call
) || tls_call
!= NULL_RTX
)
10007 /* s390_function_ok_for_sibcall should
10008 have denied sibcalls in this case. */
10009 gcc_assert (retaddr_reg
!= NULL_RTX
);
10010 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), gen_rtx_REG (Pmode
, 12));
10015 /* Implement TARGET_CONDITIONAL_REGISTER_USAGE. */
10018 s390_conditional_register_usage (void)
10024 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
10025 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
10027 if (TARGET_CPU_ZARCH
)
10029 fixed_regs
[BASE_REGNUM
] = 0;
10030 call_used_regs
[BASE_REGNUM
] = 0;
10031 fixed_regs
[RETURN_REGNUM
] = 0;
10032 call_used_regs
[RETURN_REGNUM
] = 0;
10036 for (i
= 24; i
< 32; i
++)
10037 call_used_regs
[i
] = call_really_used_regs
[i
] = 0;
10041 for (i
= 18; i
< 20; i
++)
10042 call_used_regs
[i
] = call_really_used_regs
[i
] = 0;
10045 if (TARGET_SOFT_FLOAT
)
10047 for (i
= 16; i
< 32; i
++)
10048 call_used_regs
[i
] = fixed_regs
[i
] = 1;
10052 /* Corresponding function to eh_return expander. */
10054 static GTY(()) rtx s390_tpf_eh_return_symbol
;
10056 s390_emit_tpf_eh_return (rtx target
)
10060 if (!s390_tpf_eh_return_symbol
)
10061 s390_tpf_eh_return_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "__tpf_eh_return");
10063 reg
= gen_rtx_REG (Pmode
, 2);
10065 emit_move_insn (reg
, target
);
10066 insn
= s390_emit_call (s390_tpf_eh_return_symbol
, NULL_RTX
, reg
,
10067 gen_rtx_REG (Pmode
, RETURN_REGNUM
));
10068 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), reg
);
10070 emit_move_insn (EH_RETURN_HANDLER_RTX
, reg
);
10073 /* Rework the prologue/epilogue to avoid saving/restoring
10074 registers unnecessarily. */
10077 s390_optimize_prologue (void)
10079 rtx insn
, new_insn
, next_insn
;
10081 /* Do a final recompute of the frame-related data. */
10083 s390_update_frame_layout ();
10085 /* If all special registers are in fact used, there's nothing we
10086 can do, so no point in walking the insn list. */
10088 if (cfun_frame_layout
.first_save_gpr
<= BASE_REGNUM
10089 && cfun_frame_layout
.last_save_gpr
>= BASE_REGNUM
10090 && (TARGET_CPU_ZARCH
10091 || (cfun_frame_layout
.first_save_gpr
<= RETURN_REGNUM
10092 && cfun_frame_layout
.last_save_gpr
>= RETURN_REGNUM
)))
10095 /* Search for prologue/epilogue insns and replace them. */
10097 for (insn
= get_insns (); insn
; insn
= next_insn
)
10099 int first
, last
, off
;
10100 rtx set
, base
, offset
;
10102 next_insn
= NEXT_INSN (insn
);
10104 if (GET_CODE (insn
) != INSN
)
10107 if (GET_CODE (PATTERN (insn
)) == PARALLEL
10108 && store_multiple_operation (PATTERN (insn
), VOIDmode
))
10110 set
= XVECEXP (PATTERN (insn
), 0, 0);
10111 first
= REGNO (SET_SRC (set
));
10112 last
= first
+ XVECLEN (PATTERN (insn
), 0) - 1;
10113 offset
= const0_rtx
;
10114 base
= eliminate_constant_term (XEXP (SET_DEST (set
), 0), &offset
);
10115 off
= INTVAL (offset
);
10117 if (GET_CODE (base
) != REG
|| off
< 0)
10119 if (cfun_frame_layout
.first_save_gpr
!= -1
10120 && (cfun_frame_layout
.first_save_gpr
< first
10121 || cfun_frame_layout
.last_save_gpr
> last
))
10123 if (REGNO (base
) != STACK_POINTER_REGNUM
10124 && REGNO (base
) != HARD_FRAME_POINTER_REGNUM
)
10126 if (first
> BASE_REGNUM
|| last
< BASE_REGNUM
)
10129 if (cfun_frame_layout
.first_save_gpr
!= -1)
10131 new_insn
= save_gprs (base
,
10132 off
+ (cfun_frame_layout
.first_save_gpr
10133 - first
) * UNITS_PER_LONG
,
10134 cfun_frame_layout
.first_save_gpr
,
10135 cfun_frame_layout
.last_save_gpr
);
10136 new_insn
= emit_insn_before (new_insn
, insn
);
10137 INSN_ADDRESSES_NEW (new_insn
, -1);
10140 remove_insn (insn
);
10144 if (cfun_frame_layout
.first_save_gpr
== -1
10145 && GET_CODE (PATTERN (insn
)) == SET
10146 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
10147 && (REGNO (SET_SRC (PATTERN (insn
))) == BASE_REGNUM
10148 || (!TARGET_CPU_ZARCH
10149 && REGNO (SET_SRC (PATTERN (insn
))) == RETURN_REGNUM
))
10150 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
)
10152 set
= PATTERN (insn
);
10153 first
= REGNO (SET_SRC (set
));
10154 offset
= const0_rtx
;
10155 base
= eliminate_constant_term (XEXP (SET_DEST (set
), 0), &offset
);
10156 off
= INTVAL (offset
);
10158 if (GET_CODE (base
) != REG
|| off
< 0)
10160 if (REGNO (base
) != STACK_POINTER_REGNUM
10161 && REGNO (base
) != HARD_FRAME_POINTER_REGNUM
)
10164 remove_insn (insn
);
10168 if (GET_CODE (PATTERN (insn
)) == PARALLEL
10169 && load_multiple_operation (PATTERN (insn
), VOIDmode
))
10171 set
= XVECEXP (PATTERN (insn
), 0, 0);
10172 first
= REGNO (SET_DEST (set
));
10173 last
= first
+ XVECLEN (PATTERN (insn
), 0) - 1;
10174 offset
= const0_rtx
;
10175 base
= eliminate_constant_term (XEXP (SET_SRC (set
), 0), &offset
);
10176 off
= INTVAL (offset
);
10178 if (GET_CODE (base
) != REG
|| off
< 0)
10180 if (cfun_frame_layout
.first_restore_gpr
!= -1
10181 && (cfun_frame_layout
.first_restore_gpr
< first
10182 || cfun_frame_layout
.last_restore_gpr
> last
))
10184 if (REGNO (base
) != STACK_POINTER_REGNUM
10185 && REGNO (base
) != HARD_FRAME_POINTER_REGNUM
)
10187 if (first
> BASE_REGNUM
|| last
< BASE_REGNUM
)
10190 if (cfun_frame_layout
.first_restore_gpr
!= -1)
10192 new_insn
= restore_gprs (base
,
10193 off
+ (cfun_frame_layout
.first_restore_gpr
10194 - first
) * UNITS_PER_LONG
,
10195 cfun_frame_layout
.first_restore_gpr
,
10196 cfun_frame_layout
.last_restore_gpr
);
10197 new_insn
= emit_insn_before (new_insn
, insn
);
10198 INSN_ADDRESSES_NEW (new_insn
, -1);
10201 remove_insn (insn
);
10205 if (cfun_frame_layout
.first_restore_gpr
== -1
10206 && GET_CODE (PATTERN (insn
)) == SET
10207 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
10208 && (REGNO (SET_DEST (PATTERN (insn
))) == BASE_REGNUM
10209 || (!TARGET_CPU_ZARCH
10210 && REGNO (SET_DEST (PATTERN (insn
))) == RETURN_REGNUM
))
10211 && GET_CODE (SET_SRC (PATTERN (insn
))) == MEM
)
10213 set
= PATTERN (insn
);
10214 first
= REGNO (SET_DEST (set
));
10215 offset
= const0_rtx
;
10216 base
= eliminate_constant_term (XEXP (SET_SRC (set
), 0), &offset
);
10217 off
= INTVAL (offset
);
10219 if (GET_CODE (base
) != REG
|| off
< 0)
10221 if (REGNO (base
) != STACK_POINTER_REGNUM
10222 && REGNO (base
) != HARD_FRAME_POINTER_REGNUM
)
10225 remove_insn (insn
);
10231 /* On z10 and later the dynamic branch prediction must see the
10232 backward jump within a certain windows. If not it falls back to
10233 the static prediction. This function rearranges the loop backward
10234 branch in a way which makes the static prediction always correct.
10235 The function returns true if it added an instruction. */
10237 s390_fix_long_loop_prediction (rtx insn
)
10239 rtx set
= single_set (insn
);
10240 rtx code_label
, label_ref
, new_label
;
10246 /* This will exclude branch on count and branch on index patterns
10247 since these are correctly statically predicted. */
10249 || SET_DEST (set
) != pc_rtx
10250 || GET_CODE (SET_SRC(set
)) != IF_THEN_ELSE
)
10253 label_ref
= (GET_CODE (XEXP (SET_SRC (set
), 1)) == LABEL_REF
?
10254 XEXP (SET_SRC (set
), 1) : XEXP (SET_SRC (set
), 2));
10256 gcc_assert (GET_CODE (label_ref
) == LABEL_REF
);
10258 code_label
= XEXP (label_ref
, 0);
10260 if (INSN_ADDRESSES (INSN_UID (code_label
)) == -1
10261 || INSN_ADDRESSES (INSN_UID (insn
)) == -1
10262 || (INSN_ADDRESSES (INSN_UID (insn
))
10263 - INSN_ADDRESSES (INSN_UID (code_label
)) < PREDICT_DISTANCE
))
10266 for (distance
= 0, cur_insn
= PREV_INSN (insn
);
10267 distance
< PREDICT_DISTANCE
- 6;
10268 distance
+= get_attr_length (cur_insn
), cur_insn
= PREV_INSN (cur_insn
))
10269 if (!cur_insn
|| JUMP_P (cur_insn
) || LABEL_P (cur_insn
))
10272 new_label
= gen_label_rtx ();
10273 uncond_jump
= emit_jump_insn_after (
10274 gen_rtx_SET (VOIDmode
, pc_rtx
,
10275 gen_rtx_LABEL_REF (VOIDmode
, code_label
)),
10277 emit_label_after (new_label
, uncond_jump
);
10279 tmp
= XEXP (SET_SRC (set
), 1);
10280 XEXP (SET_SRC (set
), 1) = XEXP (SET_SRC (set
), 2);
10281 XEXP (SET_SRC (set
), 2) = tmp
;
10282 INSN_CODE (insn
) = -1;
10284 XEXP (label_ref
, 0) = new_label
;
10285 JUMP_LABEL (insn
) = new_label
;
10286 JUMP_LABEL (uncond_jump
) = code_label
;
10291 /* Returns 1 if INSN reads the value of REG for purposes not related
10292 to addressing of memory, and 0 otherwise. */
10294 s390_non_addr_reg_read_p (rtx reg
, rtx insn
)
10296 return reg_referenced_p (reg
, PATTERN (insn
))
10297 && !reg_used_in_mem_p (REGNO (reg
), PATTERN (insn
));
10300 /* Starting from INSN find_cond_jump looks downwards in the insn
10301 stream for a single jump insn which is the last user of the
10302 condition code set in INSN. */
10304 find_cond_jump (rtx insn
)
10306 for (; insn
; insn
= NEXT_INSN (insn
))
10310 if (LABEL_P (insn
))
10313 if (!JUMP_P (insn
))
10315 if (reg_mentioned_p (gen_rtx_REG (CCmode
, CC_REGNUM
), insn
))
10320 /* This will be triggered by a return. */
10321 if (GET_CODE (PATTERN (insn
)) != SET
)
10324 gcc_assert (SET_DEST (PATTERN (insn
)) == pc_rtx
);
10325 ite
= SET_SRC (PATTERN (insn
));
10327 if (GET_CODE (ite
) != IF_THEN_ELSE
)
10330 cc
= XEXP (XEXP (ite
, 0), 0);
10331 if (!REG_P (cc
) || !CC_REGNO_P (REGNO (cc
)))
10334 if (find_reg_note (insn
, REG_DEAD
, cc
))
10342 /* Swap the condition in COND and the operands in OP0 and OP1 so that
10343 the semantics does not change. If NULL_RTX is passed as COND the
10344 function tries to find the conditional jump starting with INSN. */
10346 s390_swap_cmp (rtx cond
, rtx
*op0
, rtx
*op1
, rtx insn
)
10350 if (cond
== NULL_RTX
)
10352 rtx jump
= find_cond_jump (NEXT_INSN (insn
));
10353 jump
= jump
? single_set (jump
) : NULL_RTX
;
10355 if (jump
== NULL_RTX
)
10358 cond
= XEXP (XEXP (jump
, 1), 0);
10363 PUT_CODE (cond
, swap_condition (GET_CODE (cond
)));
10366 /* On z10, instructions of the compare-and-branch family have the
10367 property to access the register occurring as second operand with
10368 its bits complemented. If such a compare is grouped with a second
10369 instruction that accesses the same register non-complemented, and
10370 if that register's value is delivered via a bypass, then the
10371 pipeline recycles, thereby causing significant performance decline.
10372 This function locates such situations and exchanges the two
10373 operands of the compare. The function return true whenever it
10376 s390_z10_optimize_cmp (rtx insn
)
10378 rtx prev_insn
, next_insn
;
10379 bool insn_added_p
= false;
10380 rtx cond
, *op0
, *op1
;
10382 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
10384 /* Handle compare and branch and branch on count
10386 rtx pattern
= single_set (insn
);
10389 || SET_DEST (pattern
) != pc_rtx
10390 || GET_CODE (SET_SRC (pattern
)) != IF_THEN_ELSE
)
10393 cond
= XEXP (SET_SRC (pattern
), 0);
10394 op0
= &XEXP (cond
, 0);
10395 op1
= &XEXP (cond
, 1);
10397 else if (GET_CODE (PATTERN (insn
)) == SET
)
10401 /* Handle normal compare instructions. */
10402 src
= SET_SRC (PATTERN (insn
));
10403 dest
= SET_DEST (PATTERN (insn
));
10406 || !CC_REGNO_P (REGNO (dest
))
10407 || GET_CODE (src
) != COMPARE
)
10410 /* s390_swap_cmp will try to find the conditional
10411 jump when passing NULL_RTX as condition. */
10413 op0
= &XEXP (src
, 0);
10414 op1
= &XEXP (src
, 1);
10419 if (!REG_P (*op0
) || !REG_P (*op1
))
10422 if (GET_MODE_CLASS (GET_MODE (*op0
)) != MODE_INT
)
10425 /* Swap the COMPARE arguments and its mask if there is a
10426 conflicting access in the previous insn. */
10427 prev_insn
= prev_active_insn (insn
);
10428 if (prev_insn
!= NULL_RTX
&& INSN_P (prev_insn
)
10429 && reg_referenced_p (*op1
, PATTERN (prev_insn
)))
10430 s390_swap_cmp (cond
, op0
, op1
, insn
);
10432 /* Check if there is a conflict with the next insn. If there
10433 was no conflict with the previous insn, then swap the
10434 COMPARE arguments and its mask. If we already swapped
10435 the operands, or if swapping them would cause a conflict
10436 with the previous insn, issue a NOP after the COMPARE in
10437 order to separate the two instuctions. */
10438 next_insn
= next_active_insn (insn
);
10439 if (next_insn
!= NULL_RTX
&& INSN_P (next_insn
)
10440 && s390_non_addr_reg_read_p (*op1
, next_insn
))
10442 if (prev_insn
!= NULL_RTX
&& INSN_P (prev_insn
)
10443 && s390_non_addr_reg_read_p (*op0
, prev_insn
))
10445 if (REGNO (*op1
) == 0)
10446 emit_insn_after (gen_nop1 (), insn
);
10448 emit_insn_after (gen_nop (), insn
);
10449 insn_added_p
= true;
10452 s390_swap_cmp (cond
, op0
, op1
, insn
);
10454 return insn_added_p
;
10457 /* Perform machine-dependent processing. */
10462 bool pool_overflow
= false;
10464 /* Make sure all splits have been performed; splits after
10465 machine_dependent_reorg might confuse insn length counts. */
10466 split_all_insns_noflow ();
10468 /* Install the main literal pool and the associated base
10469 register load insns.
10471 In addition, there are two problematic situations we need
10474 - the literal pool might be > 4096 bytes in size, so that
10475 some of its elements cannot be directly accessed
10477 - a branch target might be > 64K away from the branch, so that
10478 it is not possible to use a PC-relative instruction.
10480 To fix those, we split the single literal pool into multiple
10481 pool chunks, reloading the pool base register at various
10482 points throughout the function to ensure it always points to
10483 the pool chunk the following code expects, and / or replace
10484 PC-relative branches by absolute branches.
10486 However, the two problems are interdependent: splitting the
10487 literal pool can move a branch further away from its target,
10488 causing the 64K limit to overflow, and on the other hand,
10489 replacing a PC-relative branch by an absolute branch means
10490 we need to put the branch target address into the literal
10491 pool, possibly causing it to overflow.
10493 So, we loop trying to fix up both problems until we manage
10494 to satisfy both conditions at the same time. Note that the
10495 loop is guaranteed to terminate as every pass of the loop
10496 strictly decreases the total number of PC-relative branches
10497 in the function. (This is not completely true as there
10498 might be branch-over-pool insns introduced by chunkify_start.
10499 Those never need to be split however.) */
10503 struct constant_pool
*pool
= NULL
;
10505 /* Collect the literal pool. */
10506 if (!pool_overflow
)
10508 pool
= s390_mainpool_start ();
10510 pool_overflow
= true;
10513 /* If literal pool overflowed, start to chunkify it. */
10515 pool
= s390_chunkify_start ();
10517 /* Split out-of-range branches. If this has created new
10518 literal pool entries, cancel current chunk list and
10519 recompute it. zSeries machines have large branch
10520 instructions, so we never need to split a branch. */
10521 if (!TARGET_CPU_ZARCH
&& s390_split_branches ())
10524 s390_chunkify_cancel (pool
);
10526 s390_mainpool_cancel (pool
);
10531 /* If we made it up to here, both conditions are satisfied.
10532 Finish up literal pool related changes. */
10534 s390_chunkify_finish (pool
);
10536 s390_mainpool_finish (pool
);
10538 /* We're done splitting branches. */
10539 cfun
->machine
->split_branches_pending_p
= false;
10543 /* Generate out-of-pool execute target insns. */
10544 if (TARGET_CPU_ZARCH
)
10546 rtx insn
, label
, target
;
10548 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
10550 label
= s390_execute_label (insn
);
10554 gcc_assert (label
!= const0_rtx
);
10556 target
= emit_label (XEXP (label
, 0));
10557 INSN_ADDRESSES_NEW (target
, -1);
10559 target
= emit_insn (s390_execute_target (insn
));
10560 INSN_ADDRESSES_NEW (target
, -1);
10564 /* Try to optimize prologue and epilogue further. */
10565 s390_optimize_prologue ();
10567 /* Walk over the insns and do some >=z10 specific changes. */
10568 if (s390_tune
== PROCESSOR_2097_Z10
10569 || s390_tune
== PROCESSOR_2817_Z196
)
10572 bool insn_added_p
= false;
10574 /* The insn lengths and addresses have to be up to date for the
10575 following manipulations. */
10576 shorten_branches (get_insns ());
10578 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
10580 if (!INSN_P (insn
) || INSN_CODE (insn
) <= 0)
10584 insn_added_p
|= s390_fix_long_loop_prediction (insn
);
10586 if ((GET_CODE (PATTERN (insn
)) == PARALLEL
10587 || GET_CODE (PATTERN (insn
)) == SET
)
10588 && s390_tune
== PROCESSOR_2097_Z10
)
10589 insn_added_p
|= s390_z10_optimize_cmp (insn
);
10592 /* Adjust branches if we added new instructions. */
10594 shorten_branches (get_insns ());
10598 /* Return true if INSN is a fp load insn writing register REGNO. */
10600 s390_fpload_toreg (rtx insn
, unsigned int regno
)
10603 enum attr_type flag
= s390_safe_attr_type (insn
);
10605 if (flag
!= TYPE_FLOADSF
&& flag
!= TYPE_FLOADDF
)
10608 set
= single_set (insn
);
10610 if (set
== NULL_RTX
)
10613 if (!REG_P (SET_DEST (set
)) || !MEM_P (SET_SRC (set
)))
10616 if (REGNO (SET_DEST (set
)) != regno
)
10622 /* This value describes the distance to be avoided between an
10623 aritmetic fp instruction and an fp load writing the same register.
10624 Z10_EARLYLOAD_DISTANCE - 1 as well as Z10_EARLYLOAD_DISTANCE + 1 is
10625 fine but the exact value has to be avoided. Otherwise the FP
10626 pipeline will throw an exception causing a major penalty. */
10627 #define Z10_EARLYLOAD_DISTANCE 7
10629 /* Rearrange the ready list in order to avoid the situation described
10630 for Z10_EARLYLOAD_DISTANCE. A problematic load instruction is
10631 moved to the very end of the ready list. */
10633 s390_z10_prevent_earlyload_conflicts (rtx
*ready
, int *nready_p
)
10635 unsigned int regno
;
10636 int nready
= *nready_p
;
10641 enum attr_type flag
;
10644 /* Skip DISTANCE - 1 active insns. */
10645 for (insn
= last_scheduled_insn
, distance
= Z10_EARLYLOAD_DISTANCE
- 1;
10646 distance
> 0 && insn
!= NULL_RTX
;
10647 distance
--, insn
= prev_active_insn (insn
))
10648 if (CALL_P (insn
) || JUMP_P (insn
))
10651 if (insn
== NULL_RTX
)
10654 set
= single_set (insn
);
10656 if (set
== NULL_RTX
|| !REG_P (SET_DEST (set
))
10657 || GET_MODE_CLASS (GET_MODE (SET_DEST (set
))) != MODE_FLOAT
)
10660 flag
= s390_safe_attr_type (insn
);
10662 if (flag
== TYPE_FLOADSF
|| flag
== TYPE_FLOADDF
)
10665 regno
= REGNO (SET_DEST (set
));
10668 while (!s390_fpload_toreg (ready
[i
], regno
) && i
> 0)
10675 memmove (&ready
[1], &ready
[0], sizeof (rtx
) * i
);
10679 /* This function is called via hook TARGET_SCHED_REORDER before
10680 issuing one insn from list READY which contains *NREADYP entries.
10681 For target z10 it reorders load instructions to avoid early load
10682 conflicts in the floating point pipeline */
10684 s390_sched_reorder (FILE *file ATTRIBUTE_UNUSED
, int verbose ATTRIBUTE_UNUSED
,
10685 rtx
*ready
, int *nreadyp
, int clock ATTRIBUTE_UNUSED
)
10687 if (s390_tune
== PROCESSOR_2097_Z10
)
10688 if (reload_completed
&& *nreadyp
> 1)
10689 s390_z10_prevent_earlyload_conflicts (ready
, nreadyp
);
10691 return s390_issue_rate ();
10694 /* This function is called via hook TARGET_SCHED_VARIABLE_ISSUE after
10695 the scheduler has issued INSN. It stores the last issued insn into
10696 last_scheduled_insn in order to make it available for
10697 s390_sched_reorder. */
10699 s390_sched_variable_issue (FILE *file ATTRIBUTE_UNUSED
,
10700 int verbose ATTRIBUTE_UNUSED
,
10701 rtx insn
, int more
)
10703 last_scheduled_insn
= insn
;
10705 if (GET_CODE (PATTERN (insn
)) != USE
10706 && GET_CODE (PATTERN (insn
)) != CLOBBER
)
10713 s390_sched_init (FILE *file ATTRIBUTE_UNUSED
,
10714 int verbose ATTRIBUTE_UNUSED
,
10715 int max_ready ATTRIBUTE_UNUSED
)
10717 last_scheduled_insn
= NULL_RTX
;
10720 /* This function checks the whole of insn X for memory references. The
10721 function always returns zero because the framework it is called
10722 from would stop recursively analyzing the insn upon a return value
10723 other than zero. The real result of this function is updating
10724 counter variable MEM_COUNT. */
10726 check_dpu (rtx
*x
, unsigned *mem_count
)
10728 if (*x
!= NULL_RTX
&& MEM_P (*x
))
10733 /* This target hook implementation for TARGET_LOOP_UNROLL_ADJUST calculates
10734 a new number struct loop *loop should be unrolled if tuned for cpus with
10735 a built-in stride prefetcher.
10736 The loop is analyzed for memory accesses by calling check_dpu for
10737 each rtx of the loop. Depending on the loop_depth and the amount of
10738 memory accesses a new number <=nunroll is returned to improve the
10739 behaviour of the hardware prefetch unit. */
10741 s390_loop_unroll_adjust (unsigned nunroll
, struct loop
*loop
)
10746 unsigned mem_count
= 0;
10748 if (s390_tune
!= PROCESSOR_2097_Z10
&& s390_tune
!= PROCESSOR_2817_Z196
)
10751 /* Count the number of memory references within the loop body. */
10752 bbs
= get_loop_body (loop
);
10753 for (i
= 0; i
< loop
->num_nodes
; i
++)
10755 for (insn
= BB_HEAD (bbs
[i
]); insn
!= BB_END (bbs
[i
]); insn
= NEXT_INSN (insn
))
10756 if (INSN_P (insn
) && INSN_CODE (insn
) != -1)
10757 for_each_rtx (&insn
, (rtx_function
) check_dpu
, &mem_count
);
10761 /* Prevent division by zero, and we do not need to adjust nunroll in this case. */
10762 if (mem_count
== 0)
10765 switch (loop_depth(loop
))
10768 return MIN (nunroll
, 28 / mem_count
);
10770 return MIN (nunroll
, 22 / mem_count
);
10772 return MIN (nunroll
, 16 / mem_count
);
10776 /* Initialize GCC target structure. */
10778 #undef TARGET_ASM_ALIGNED_HI_OP
10779 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
10780 #undef TARGET_ASM_ALIGNED_DI_OP
10781 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
10782 #undef TARGET_ASM_INTEGER
10783 #define TARGET_ASM_INTEGER s390_assemble_integer
10785 #undef TARGET_ASM_OPEN_PAREN
10786 #define TARGET_ASM_OPEN_PAREN ""
10788 #undef TARGET_ASM_CLOSE_PAREN
10789 #define TARGET_ASM_CLOSE_PAREN ""
10791 #undef TARGET_OPTION_OVERRIDE
10792 #define TARGET_OPTION_OVERRIDE s390_option_override
10794 #undef TARGET_ENCODE_SECTION_INFO
10795 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
10797 #undef TARGET_SCALAR_MODE_SUPPORTED_P
10798 #define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
10801 #undef TARGET_HAVE_TLS
10802 #define TARGET_HAVE_TLS true
10804 #undef TARGET_CANNOT_FORCE_CONST_MEM
10805 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
10807 #undef TARGET_DELEGITIMIZE_ADDRESS
10808 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
10810 #undef TARGET_LEGITIMIZE_ADDRESS
10811 #define TARGET_LEGITIMIZE_ADDRESS s390_legitimize_address
10813 #undef TARGET_RETURN_IN_MEMORY
10814 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
10816 #undef TARGET_INIT_BUILTINS
10817 #define TARGET_INIT_BUILTINS s390_init_builtins
10818 #undef TARGET_EXPAND_BUILTIN
10819 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
10821 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
10822 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA s390_output_addr_const_extra
10824 #undef TARGET_ASM_OUTPUT_MI_THUNK
10825 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
10826 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
10827 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
10829 #undef TARGET_SCHED_ADJUST_PRIORITY
10830 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
10831 #undef TARGET_SCHED_ISSUE_RATE
10832 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
10833 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
10834 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
10836 #undef TARGET_SCHED_VARIABLE_ISSUE
10837 #define TARGET_SCHED_VARIABLE_ISSUE s390_sched_variable_issue
10838 #undef TARGET_SCHED_REORDER
10839 #define TARGET_SCHED_REORDER s390_sched_reorder
10840 #undef TARGET_SCHED_INIT
10841 #define TARGET_SCHED_INIT s390_sched_init
10843 #undef TARGET_CANNOT_COPY_INSN_P
10844 #define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
10845 #undef TARGET_RTX_COSTS
10846 #define TARGET_RTX_COSTS s390_rtx_costs
10847 #undef TARGET_ADDRESS_COST
10848 #define TARGET_ADDRESS_COST s390_address_cost
10849 #undef TARGET_REGISTER_MOVE_COST
10850 #define TARGET_REGISTER_MOVE_COST s390_register_move_cost
10851 #undef TARGET_MEMORY_MOVE_COST
10852 #define TARGET_MEMORY_MOVE_COST s390_memory_move_cost
10854 #undef TARGET_MACHINE_DEPENDENT_REORG
10855 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
10857 #undef TARGET_VALID_POINTER_MODE
10858 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
10860 #undef TARGET_BUILD_BUILTIN_VA_LIST
10861 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
10862 #undef TARGET_EXPAND_BUILTIN_VA_START
10863 #define TARGET_EXPAND_BUILTIN_VA_START s390_va_start
10864 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
10865 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
10867 #undef TARGET_PROMOTE_FUNCTION_MODE
10868 #define TARGET_PROMOTE_FUNCTION_MODE s390_promote_function_mode
10869 #undef TARGET_PASS_BY_REFERENCE
10870 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
10872 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
10873 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
10874 #undef TARGET_FUNCTION_ARG
10875 #define TARGET_FUNCTION_ARG s390_function_arg
10876 #undef TARGET_FUNCTION_ARG_ADVANCE
10877 #define TARGET_FUNCTION_ARG_ADVANCE s390_function_arg_advance
10878 #undef TARGET_FUNCTION_VALUE
10879 #define TARGET_FUNCTION_VALUE s390_function_value
10880 #undef TARGET_LIBCALL_VALUE
10881 #define TARGET_LIBCALL_VALUE s390_libcall_value
10883 #undef TARGET_FIXED_CONDITION_CODE_REGS
10884 #define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
10886 #undef TARGET_CC_MODES_COMPATIBLE
10887 #define TARGET_CC_MODES_COMPATIBLE s390_cc_modes_compatible
10889 #undef TARGET_INVALID_WITHIN_DOLOOP
10890 #define TARGET_INVALID_WITHIN_DOLOOP hook_constcharptr_const_rtx_null
10893 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
10894 #define TARGET_ASM_OUTPUT_DWARF_DTPREL s390_output_dwarf_dtprel
10897 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
10898 #undef TARGET_MANGLE_TYPE
10899 #define TARGET_MANGLE_TYPE s390_mangle_type
10902 #undef TARGET_SCALAR_MODE_SUPPORTED_P
10903 #define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
10905 #undef TARGET_PREFERRED_RELOAD_CLASS
10906 #define TARGET_PREFERRED_RELOAD_CLASS s390_preferred_reload_class
10908 #undef TARGET_SECONDARY_RELOAD
10909 #define TARGET_SECONDARY_RELOAD s390_secondary_reload
10911 #undef TARGET_LIBGCC_CMP_RETURN_MODE
10912 #define TARGET_LIBGCC_CMP_RETURN_MODE s390_libgcc_cmp_return_mode
10914 #undef TARGET_LIBGCC_SHIFT_COUNT_MODE
10915 #define TARGET_LIBGCC_SHIFT_COUNT_MODE s390_libgcc_shift_count_mode
10917 #undef TARGET_LEGITIMATE_ADDRESS_P
10918 #define TARGET_LEGITIMATE_ADDRESS_P s390_legitimate_address_p
10920 #undef TARGET_LEGITIMATE_CONSTANT_P
10921 #define TARGET_LEGITIMATE_CONSTANT_P s390_legitimate_constant_p
10923 #undef TARGET_CAN_ELIMINATE
10924 #define TARGET_CAN_ELIMINATE s390_can_eliminate
10926 #undef TARGET_CONDITIONAL_REGISTER_USAGE
10927 #define TARGET_CONDITIONAL_REGISTER_USAGE s390_conditional_register_usage
10929 #undef TARGET_LOOP_UNROLL_ADJUST
10930 #define TARGET_LOOP_UNROLL_ADJUST s390_loop_unroll_adjust
10932 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
10933 #define TARGET_ASM_TRAMPOLINE_TEMPLATE s390_asm_trampoline_template
10934 #undef TARGET_TRAMPOLINE_INIT
10935 #define TARGET_TRAMPOLINE_INIT s390_trampoline_init
10937 #undef TARGET_UNWIND_WORD_MODE
10938 #define TARGET_UNWIND_WORD_MODE s390_unwind_word_mode
10940 struct gcc_target targetm
= TARGET_INITIALIZER
;
10942 #include "gt-s390.h"