1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007, 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com) and
6 Andreas Krebbel (Andreas.Krebbel@de.ibm.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
43 #include "diagnostic-core.h"
44 #include "basic-block.h"
47 #include "target-def.h"
49 #include "langhooks.h"
57 /* Define the specific costs for a given cpu. */
59 struct processor_costs
62 const int m
; /* cost of an M instruction. */
63 const int mghi
; /* cost of an MGHI instruction. */
64 const int mh
; /* cost of an MH instruction. */
65 const int mhi
; /* cost of an MHI instruction. */
66 const int ml
; /* cost of an ML instruction. */
67 const int mr
; /* cost of an MR instruction. */
68 const int ms
; /* cost of an MS instruction. */
69 const int msg
; /* cost of an MSG instruction. */
70 const int msgf
; /* cost of an MSGF instruction. */
71 const int msgfr
; /* cost of an MSGFR instruction. */
72 const int msgr
; /* cost of an MSGR instruction. */
73 const int msr
; /* cost of an MSR instruction. */
74 const int mult_df
; /* cost of multiplication in DFmode. */
77 const int sqxbr
; /* cost of square root in TFmode. */
78 const int sqdbr
; /* cost of square root in DFmode. */
79 const int sqebr
; /* cost of square root in SFmode. */
80 /* multiply and add */
81 const int madbr
; /* cost of multiply and add in DFmode. */
82 const int maebr
; /* cost of multiply and add in SFmode. */
94 const struct processor_costs
*s390_cost
;
97 struct processor_costs z900_cost
=
99 COSTS_N_INSNS (5), /* M */
100 COSTS_N_INSNS (10), /* MGHI */
101 COSTS_N_INSNS (5), /* MH */
102 COSTS_N_INSNS (4), /* MHI */
103 COSTS_N_INSNS (5), /* ML */
104 COSTS_N_INSNS (5), /* MR */
105 COSTS_N_INSNS (4), /* MS */
106 COSTS_N_INSNS (15), /* MSG */
107 COSTS_N_INSNS (7), /* MSGF */
108 COSTS_N_INSNS (7), /* MSGFR */
109 COSTS_N_INSNS (10), /* MSGR */
110 COSTS_N_INSNS (4), /* MSR */
111 COSTS_N_INSNS (7), /* multiplication in DFmode */
112 COSTS_N_INSNS (13), /* MXBR */
113 COSTS_N_INSNS (136), /* SQXBR */
114 COSTS_N_INSNS (44), /* SQDBR */
115 COSTS_N_INSNS (35), /* SQEBR */
116 COSTS_N_INSNS (18), /* MADBR */
117 COSTS_N_INSNS (13), /* MAEBR */
118 COSTS_N_INSNS (134), /* DXBR */
119 COSTS_N_INSNS (30), /* DDBR */
120 COSTS_N_INSNS (27), /* DEBR */
121 COSTS_N_INSNS (220), /* DLGR */
122 COSTS_N_INSNS (34), /* DLR */
123 COSTS_N_INSNS (34), /* DR */
124 COSTS_N_INSNS (32), /* DSGFR */
125 COSTS_N_INSNS (32), /* DSGR */
129 struct processor_costs z990_cost
=
131 COSTS_N_INSNS (4), /* M */
132 COSTS_N_INSNS (2), /* MGHI */
133 COSTS_N_INSNS (2), /* MH */
134 COSTS_N_INSNS (2), /* MHI */
135 COSTS_N_INSNS (4), /* ML */
136 COSTS_N_INSNS (4), /* MR */
137 COSTS_N_INSNS (5), /* MS */
138 COSTS_N_INSNS (6), /* MSG */
139 COSTS_N_INSNS (4), /* MSGF */
140 COSTS_N_INSNS (4), /* MSGFR */
141 COSTS_N_INSNS (4), /* MSGR */
142 COSTS_N_INSNS (4), /* MSR */
143 COSTS_N_INSNS (1), /* multiplication in DFmode */
144 COSTS_N_INSNS (28), /* MXBR */
145 COSTS_N_INSNS (130), /* SQXBR */
146 COSTS_N_INSNS (66), /* SQDBR */
147 COSTS_N_INSNS (38), /* SQEBR */
148 COSTS_N_INSNS (1), /* MADBR */
149 COSTS_N_INSNS (1), /* MAEBR */
150 COSTS_N_INSNS (60), /* DXBR */
151 COSTS_N_INSNS (40), /* DDBR */
152 COSTS_N_INSNS (26), /* DEBR */
153 COSTS_N_INSNS (176), /* DLGR */
154 COSTS_N_INSNS (31), /* DLR */
155 COSTS_N_INSNS (31), /* DR */
156 COSTS_N_INSNS (31), /* DSGFR */
157 COSTS_N_INSNS (31), /* DSGR */
161 struct processor_costs z9_109_cost
=
163 COSTS_N_INSNS (4), /* M */
164 COSTS_N_INSNS (2), /* MGHI */
165 COSTS_N_INSNS (2), /* MH */
166 COSTS_N_INSNS (2), /* MHI */
167 COSTS_N_INSNS (4), /* ML */
168 COSTS_N_INSNS (4), /* MR */
169 COSTS_N_INSNS (5), /* MS */
170 COSTS_N_INSNS (6), /* MSG */
171 COSTS_N_INSNS (4), /* MSGF */
172 COSTS_N_INSNS (4), /* MSGFR */
173 COSTS_N_INSNS (4), /* MSGR */
174 COSTS_N_INSNS (4), /* MSR */
175 COSTS_N_INSNS (1), /* multiplication in DFmode */
176 COSTS_N_INSNS (28), /* MXBR */
177 COSTS_N_INSNS (130), /* SQXBR */
178 COSTS_N_INSNS (66), /* SQDBR */
179 COSTS_N_INSNS (38), /* SQEBR */
180 COSTS_N_INSNS (1), /* MADBR */
181 COSTS_N_INSNS (1), /* MAEBR */
182 COSTS_N_INSNS (60), /* DXBR */
183 COSTS_N_INSNS (40), /* DDBR */
184 COSTS_N_INSNS (26), /* DEBR */
185 COSTS_N_INSNS (30), /* DLGR */
186 COSTS_N_INSNS (23), /* DLR */
187 COSTS_N_INSNS (23), /* DR */
188 COSTS_N_INSNS (24), /* DSGFR */
189 COSTS_N_INSNS (24), /* DSGR */
193 struct processor_costs z10_cost
=
195 COSTS_N_INSNS (10), /* M */
196 COSTS_N_INSNS (10), /* MGHI */
197 COSTS_N_INSNS (10), /* MH */
198 COSTS_N_INSNS (10), /* MHI */
199 COSTS_N_INSNS (10), /* ML */
200 COSTS_N_INSNS (10), /* MR */
201 COSTS_N_INSNS (10), /* MS */
202 COSTS_N_INSNS (10), /* MSG */
203 COSTS_N_INSNS (10), /* MSGF */
204 COSTS_N_INSNS (10), /* MSGFR */
205 COSTS_N_INSNS (10), /* MSGR */
206 COSTS_N_INSNS (10), /* MSR */
207 COSTS_N_INSNS (1) , /* multiplication in DFmode */
208 COSTS_N_INSNS (50), /* MXBR */
209 COSTS_N_INSNS (120), /* SQXBR */
210 COSTS_N_INSNS (52), /* SQDBR */
211 COSTS_N_INSNS (38), /* SQEBR */
212 COSTS_N_INSNS (1), /* MADBR */
213 COSTS_N_INSNS (1), /* MAEBR */
214 COSTS_N_INSNS (111), /* DXBR */
215 COSTS_N_INSNS (39), /* DDBR */
216 COSTS_N_INSNS (32), /* DEBR */
217 COSTS_N_INSNS (160), /* DLGR */
218 COSTS_N_INSNS (71), /* DLR */
219 COSTS_N_INSNS (71), /* DR */
220 COSTS_N_INSNS (71), /* DSGFR */
221 COSTS_N_INSNS (71), /* DSGR */
225 struct processor_costs z196_cost
=
227 COSTS_N_INSNS (7), /* M */
228 COSTS_N_INSNS (5), /* MGHI */
229 COSTS_N_INSNS (5), /* MH */
230 COSTS_N_INSNS (5), /* MHI */
231 COSTS_N_INSNS (7), /* ML */
232 COSTS_N_INSNS (7), /* MR */
233 COSTS_N_INSNS (6), /* MS */
234 COSTS_N_INSNS (8), /* MSG */
235 COSTS_N_INSNS (6), /* MSGF */
236 COSTS_N_INSNS (6), /* MSGFR */
237 COSTS_N_INSNS (8), /* MSGR */
238 COSTS_N_INSNS (6), /* MSR */
239 COSTS_N_INSNS (1) , /* multiplication in DFmode */
240 COSTS_N_INSNS (40), /* MXBR B+40 */
241 COSTS_N_INSNS (100), /* SQXBR B+100 */
242 COSTS_N_INSNS (42), /* SQDBR B+42 */
243 COSTS_N_INSNS (28), /* SQEBR B+28 */
244 COSTS_N_INSNS (1), /* MADBR B */
245 COSTS_N_INSNS (1), /* MAEBR B */
246 COSTS_N_INSNS (101), /* DXBR B+101 */
247 COSTS_N_INSNS (29), /* DDBR */
248 COSTS_N_INSNS (22), /* DEBR */
249 COSTS_N_INSNS (160), /* DLGR cracked */
250 COSTS_N_INSNS (160), /* DLR cracked */
251 COSTS_N_INSNS (160), /* DR expanded */
252 COSTS_N_INSNS (160), /* DSGFR cracked */
253 COSTS_N_INSNS (160), /* DSGR cracked */
256 extern int reload_completed
;
258 /* Kept up to date using the SCHED_VARIABLE_ISSUE hook. */
259 static rtx last_scheduled_insn
;
261 /* Structure used to hold the components of a S/390 memory
262 address. A legitimate address on S/390 is of the general
264 base + index + displacement
265 where any of the components is optional.
267 base and index are registers of the class ADDR_REGS,
268 displacement is an unsigned 12-bit immediate constant. */
279 /* The following structure is embedded in the machine
280 specific part of struct function. */
282 struct GTY (()) s390_frame_layout
284 /* Offset within stack frame. */
285 HOST_WIDE_INT gprs_offset
;
286 HOST_WIDE_INT f0_offset
;
287 HOST_WIDE_INT f4_offset
;
288 HOST_WIDE_INT f8_offset
;
289 HOST_WIDE_INT backchain_offset
;
291 /* Number of first and last gpr where slots in the register
292 save area are reserved for. */
293 int first_save_gpr_slot
;
294 int last_save_gpr_slot
;
296 /* Number of first and last gpr to be saved, restored. */
298 int first_restore_gpr
;
300 int last_restore_gpr
;
302 /* Bits standing for floating point registers. Set, if the
303 respective register has to be saved. Starting with reg 16 (f0)
304 at the rightmost bit.
305 Bit 15 - 8 7 6 5 4 3 2 1 0
306 fpr 15 - 8 7 5 3 1 6 4 2 0
307 reg 31 - 24 23 22 21 20 19 18 17 16 */
308 unsigned int fpr_bitmap
;
310 /* Number of floating point registers f8-f15 which must be saved. */
313 /* Set if return address needs to be saved.
314 This flag is set by s390_return_addr_rtx if it could not use
315 the initial value of r14 and therefore depends on r14 saved
317 bool save_return_addr_p
;
319 /* Size of stack frame. */
320 HOST_WIDE_INT frame_size
;
323 /* Define the structure for the machine field in struct function. */
325 struct GTY(()) machine_function
327 struct s390_frame_layout frame_layout
;
329 /* Literal pool base register. */
332 /* True if we may need to perform branch splitting. */
333 bool split_branches_pending_p
;
335 /* Some local-dynamic TLS symbol name. */
336 const char *some_ld_name
;
338 bool has_landing_pad_p
;
341 /* Few accessor macros for struct cfun->machine->s390_frame_layout. */
343 #define cfun_frame_layout (cfun->machine->frame_layout)
344 #define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
345 #define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr_slot - \
346 cfun_frame_layout.first_save_gpr_slot + 1) * UNITS_PER_LONG)
347 #define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
349 #define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
352 /* Number of GPRs and FPRs used for argument passing. */
353 #define GP_ARG_NUM_REG 5
354 #define FP_ARG_NUM_REG (TARGET_64BIT? 4 : 2)
356 /* A couple of shortcuts. */
357 #define CONST_OK_FOR_J(x) \
358 CONST_OK_FOR_CONSTRAINT_P((x), 'J', "J")
359 #define CONST_OK_FOR_K(x) \
360 CONST_OK_FOR_CONSTRAINT_P((x), 'K', "K")
361 #define CONST_OK_FOR_Os(x) \
362 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Os")
363 #define CONST_OK_FOR_Op(x) \
364 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Op")
365 #define CONST_OK_FOR_On(x) \
366 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "On")
368 #define REGNO_PAIR_OK(REGNO, MODE) \
369 (HARD_REGNO_NREGS ((REGNO), (MODE)) == 1 || !((REGNO) & 1))
371 /* That's the read ahead of the dynamic branch prediction unit in
372 bytes on a z10 (or higher) CPU. */
373 #define PREDICT_DISTANCE (TARGET_Z10 ? 384 : 2048)
375 /* Return the alignment for LABEL. We default to the -falign-labels
376 value except for the literal pool base label. */
378 s390_label_align (rtx label
)
380 rtx prev_insn
= prev_active_insn (label
);
382 if (prev_insn
== NULL_RTX
)
385 prev_insn
= single_set (prev_insn
);
387 if (prev_insn
== NULL_RTX
)
390 prev_insn
= SET_SRC (prev_insn
);
392 /* Don't align literal pool base labels. */
393 if (GET_CODE (prev_insn
) == UNSPEC
394 && XINT (prev_insn
, 1) == UNSPEC_MAIN_BASE
)
398 return align_labels_log
;
401 static enum machine_mode
402 s390_libgcc_cmp_return_mode (void)
404 return TARGET_64BIT
? DImode
: SImode
;
407 static enum machine_mode
408 s390_libgcc_shift_count_mode (void)
410 return TARGET_64BIT
? DImode
: SImode
;
413 static enum machine_mode
414 s390_unwind_word_mode (void)
416 return TARGET_64BIT
? DImode
: SImode
;
419 /* Return true if the back end supports mode MODE. */
421 s390_scalar_mode_supported_p (enum machine_mode mode
)
423 /* In contrast to the default implementation reject TImode constants on 31bit
424 TARGET_ZARCH for ABI compliance. */
425 if (!TARGET_64BIT
&& TARGET_ZARCH
&& mode
== TImode
)
428 if (DECIMAL_FLOAT_MODE_P (mode
))
429 return default_decimal_float_supported_p ();
431 return default_scalar_mode_supported_p (mode
);
434 /* Set the has_landing_pad_p flag in struct machine_function to VALUE. */
437 s390_set_has_landing_pad_p (bool value
)
439 cfun
->machine
->has_landing_pad_p
= value
;
442 /* If two condition code modes are compatible, return a condition code
443 mode which is compatible with both. Otherwise, return
446 static enum machine_mode
447 s390_cc_modes_compatible (enum machine_mode m1
, enum machine_mode m2
)
455 if (m2
== CCUmode
|| m2
== CCTmode
|| m2
== CCZ1mode
456 || m2
== CCSmode
|| m2
== CCSRmode
|| m2
== CCURmode
)
477 /* Return true if SET either doesn't set the CC register, or else
478 the source and destination have matching CC modes and that
479 CC mode is at least as constrained as REQ_MODE. */
482 s390_match_ccmode_set (rtx set
, enum machine_mode req_mode
)
484 enum machine_mode set_mode
;
486 gcc_assert (GET_CODE (set
) == SET
);
488 if (GET_CODE (SET_DEST (set
)) != REG
|| !CC_REGNO_P (REGNO (SET_DEST (set
))))
491 set_mode
= GET_MODE (SET_DEST (set
));
505 if (req_mode
!= set_mode
)
510 if (req_mode
!= CCSmode
&& req_mode
!= CCUmode
&& req_mode
!= CCTmode
511 && req_mode
!= CCSRmode
&& req_mode
!= CCURmode
)
517 if (req_mode
!= CCAmode
)
525 return (GET_MODE (SET_SRC (set
)) == set_mode
);
528 /* Return true if every SET in INSN that sets the CC register
529 has source and destination with matching CC modes and that
530 CC mode is at least as constrained as REQ_MODE.
531 If REQ_MODE is VOIDmode, always return false. */
534 s390_match_ccmode (rtx insn
, enum machine_mode req_mode
)
538 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
539 if (req_mode
== VOIDmode
)
542 if (GET_CODE (PATTERN (insn
)) == SET
)
543 return s390_match_ccmode_set (PATTERN (insn
), req_mode
);
545 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
546 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
548 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
549 if (GET_CODE (set
) == SET
)
550 if (!s390_match_ccmode_set (set
, req_mode
))
557 /* If a test-under-mask instruction can be used to implement
558 (compare (and ... OP1) OP2), return the CC mode required
559 to do that. Otherwise, return VOIDmode.
560 MIXED is true if the instruction can distinguish between
561 CC1 and CC2 for mixed selected bits (TMxx), it is false
562 if the instruction cannot (TM). */
565 s390_tm_ccmode (rtx op1
, rtx op2
, bool mixed
)
569 /* ??? Fixme: should work on CONST_DOUBLE as well. */
570 if (GET_CODE (op1
) != CONST_INT
|| GET_CODE (op2
) != CONST_INT
)
573 /* Selected bits all zero: CC0.
574 e.g.: int a; if ((a & (16 + 128)) == 0) */
575 if (INTVAL (op2
) == 0)
578 /* Selected bits all one: CC3.
579 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */
580 if (INTVAL (op2
) == INTVAL (op1
))
583 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.:
585 if ((a & (16 + 128)) == 16) -> CCT1
586 if ((a & (16 + 128)) == 128) -> CCT2 */
589 bit1
= exact_log2 (INTVAL (op2
));
590 bit0
= exact_log2 (INTVAL (op1
) ^ INTVAL (op2
));
591 if (bit0
!= -1 && bit1
!= -1)
592 return bit0
> bit1
? CCT1mode
: CCT2mode
;
598 /* Given a comparison code OP (EQ, NE, etc.) and the operands
599 OP0 and OP1 of a COMPARE, return the mode to be used for the
603 s390_select_ccmode (enum rtx_code code
, rtx op0
, rtx op1
)
609 if ((GET_CODE (op0
) == NEG
|| GET_CODE (op0
) == ABS
)
610 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
612 if (GET_CODE (op0
) == PLUS
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
613 && CONST_OK_FOR_K (INTVAL (XEXP (op0
, 1))))
615 if ((GET_CODE (op0
) == PLUS
|| GET_CODE (op0
) == MINUS
616 || GET_CODE (op1
) == NEG
)
617 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
620 if (GET_CODE (op0
) == AND
)
622 /* Check whether we can potentially do it via TM. */
623 enum machine_mode ccmode
;
624 ccmode
= s390_tm_ccmode (XEXP (op0
, 1), op1
, 1);
625 if (ccmode
!= VOIDmode
)
627 /* Relax CCTmode to CCZmode to allow fall-back to AND
628 if that turns out to be beneficial. */
629 return ccmode
== CCTmode
? CCZmode
: ccmode
;
633 if (register_operand (op0
, HImode
)
634 && GET_CODE (op1
) == CONST_INT
635 && (INTVAL (op1
) == -1 || INTVAL (op1
) == 65535))
637 if (register_operand (op0
, QImode
)
638 && GET_CODE (op1
) == CONST_INT
639 && (INTVAL (op1
) == -1 || INTVAL (op1
) == 255))
648 /* The only overflow condition of NEG and ABS happens when
649 -INT_MAX is used as parameter, which stays negative. So
650 we have an overflow from a positive value to a negative.
651 Using CCAP mode the resulting cc can be used for comparisons. */
652 if ((GET_CODE (op0
) == NEG
|| GET_CODE (op0
) == ABS
)
653 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
656 /* If constants are involved in an add instruction it is possible to use
657 the resulting cc for comparisons with zero. Knowing the sign of the
658 constant the overflow behavior gets predictable. e.g.:
659 int a, b; if ((b = a + c) > 0)
660 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */
661 if (GET_CODE (op0
) == PLUS
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
662 && CONST_OK_FOR_K (INTVAL (XEXP (op0
, 1))))
664 if (INTVAL (XEXP((op0
), 1)) < 0)
678 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
679 && GET_CODE (op1
) != CONST_INT
)
685 if (GET_CODE (op0
) == PLUS
686 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
689 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
690 && GET_CODE (op1
) != CONST_INT
)
696 if (GET_CODE (op0
) == MINUS
697 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
700 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
701 && GET_CODE (op1
) != CONST_INT
)
710 /* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
711 that we can implement more efficiently. */
714 s390_canonicalize_comparison (enum rtx_code
*code
, rtx
*op0
, rtx
*op1
)
716 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
717 if ((*code
== EQ
|| *code
== NE
)
718 && *op1
== const0_rtx
719 && GET_CODE (*op0
) == ZERO_EXTRACT
720 && GET_CODE (XEXP (*op0
, 1)) == CONST_INT
721 && GET_CODE (XEXP (*op0
, 2)) == CONST_INT
722 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0
, 0))))
724 rtx inner
= XEXP (*op0
, 0);
725 HOST_WIDE_INT modesize
= GET_MODE_BITSIZE (GET_MODE (inner
));
726 HOST_WIDE_INT len
= INTVAL (XEXP (*op0
, 1));
727 HOST_WIDE_INT pos
= INTVAL (XEXP (*op0
, 2));
729 if (len
> 0 && len
< modesize
730 && pos
>= 0 && pos
+ len
<= modesize
731 && modesize
<= HOST_BITS_PER_WIDE_INT
)
733 unsigned HOST_WIDE_INT block
;
734 block
= ((unsigned HOST_WIDE_INT
) 1 << len
) - 1;
735 block
<<= modesize
- pos
- len
;
737 *op0
= gen_rtx_AND (GET_MODE (inner
), inner
,
738 gen_int_mode (block
, GET_MODE (inner
)));
742 /* Narrow AND of memory against immediate to enable TM. */
743 if ((*code
== EQ
|| *code
== NE
)
744 && *op1
== const0_rtx
745 && GET_CODE (*op0
) == AND
746 && GET_CODE (XEXP (*op0
, 1)) == CONST_INT
747 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0
, 0))))
749 rtx inner
= XEXP (*op0
, 0);
750 rtx mask
= XEXP (*op0
, 1);
752 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
753 if (GET_CODE (inner
) == SUBREG
754 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner
)))
755 && (GET_MODE_SIZE (GET_MODE (inner
))
756 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner
))))
758 & GET_MODE_MASK (GET_MODE (inner
))
759 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner
))))
761 inner
= SUBREG_REG (inner
);
763 /* Do not change volatile MEMs. */
764 if (MEM_P (inner
) && !MEM_VOLATILE_P (inner
))
766 int part
= s390_single_part (XEXP (*op0
, 1),
767 GET_MODE (inner
), QImode
, 0);
770 mask
= gen_int_mode (s390_extract_part (mask
, QImode
, 0), QImode
);
771 inner
= adjust_address_nv (inner
, QImode
, part
);
772 *op0
= gen_rtx_AND (QImode
, inner
, mask
);
777 /* Narrow comparisons against 0xffff to HImode if possible. */
778 if ((*code
== EQ
|| *code
== NE
)
779 && GET_CODE (*op1
) == CONST_INT
780 && INTVAL (*op1
) == 0xffff
781 && SCALAR_INT_MODE_P (GET_MODE (*op0
))
782 && (nonzero_bits (*op0
, GET_MODE (*op0
))
783 & ~(unsigned HOST_WIDE_INT
) 0xffff) == 0)
785 *op0
= gen_lowpart (HImode
, *op0
);
789 /* Remove redundant UNSPEC_CCU_TO_INT conversions if possible. */
790 if (GET_CODE (*op0
) == UNSPEC
791 && XINT (*op0
, 1) == UNSPEC_CCU_TO_INT
792 && XVECLEN (*op0
, 0) == 1
793 && GET_MODE (XVECEXP (*op0
, 0, 0)) == CCUmode
794 && GET_CODE (XVECEXP (*op0
, 0, 0)) == REG
795 && REGNO (XVECEXP (*op0
, 0, 0)) == CC_REGNUM
796 && *op1
== const0_rtx
)
798 enum rtx_code new_code
= UNKNOWN
;
801 case EQ
: new_code
= EQ
; break;
802 case NE
: new_code
= NE
; break;
803 case LT
: new_code
= GTU
; break;
804 case GT
: new_code
= LTU
; break;
805 case LE
: new_code
= GEU
; break;
806 case GE
: new_code
= LEU
; break;
810 if (new_code
!= UNKNOWN
)
812 *op0
= XVECEXP (*op0
, 0, 0);
817 /* Remove redundant UNSPEC_CCZ_TO_INT conversions if possible. */
818 if (GET_CODE (*op0
) == UNSPEC
819 && XINT (*op0
, 1) == UNSPEC_CCZ_TO_INT
820 && XVECLEN (*op0
, 0) == 1
821 && GET_MODE (XVECEXP (*op0
, 0, 0)) == CCZmode
822 && GET_CODE (XVECEXP (*op0
, 0, 0)) == REG
823 && REGNO (XVECEXP (*op0
, 0, 0)) == CC_REGNUM
824 && *op1
== const0_rtx
)
826 enum rtx_code new_code
= UNKNOWN
;
829 case EQ
: new_code
= EQ
; break;
830 case NE
: new_code
= NE
; break;
834 if (new_code
!= UNKNOWN
)
836 *op0
= XVECEXP (*op0
, 0, 0);
841 /* Simplify cascaded EQ, NE with const0_rtx. */
842 if ((*code
== NE
|| *code
== EQ
)
843 && (GET_CODE (*op0
) == EQ
|| GET_CODE (*op0
) == NE
)
844 && GET_MODE (*op0
) == SImode
845 && GET_MODE (XEXP (*op0
, 0)) == CCZ1mode
846 && REG_P (XEXP (*op0
, 0))
847 && XEXP (*op0
, 1) == const0_rtx
848 && *op1
== const0_rtx
)
850 if ((*code
== EQ
&& GET_CODE (*op0
) == NE
)
851 || (*code
== NE
&& GET_CODE (*op0
) == EQ
))
855 *op0
= XEXP (*op0
, 0);
858 /* Prefer register over memory as first operand. */
859 if (MEM_P (*op0
) && REG_P (*op1
))
861 rtx tem
= *op0
; *op0
= *op1
; *op1
= tem
;
862 *code
= swap_condition (*code
);
866 /* Emit a compare instruction suitable to implement the comparison
867 OP0 CODE OP1. Return the correct condition RTL to be placed in
868 the IF_THEN_ELSE of the conditional branch testing the result. */
871 s390_emit_compare (enum rtx_code code
, rtx op0
, rtx op1
)
873 enum machine_mode mode
= s390_select_ccmode (code
, op0
, op1
);
876 /* Do not output a redundant compare instruction if a compare_and_swap
877 pattern already computed the result and the machine modes are compatible. */
878 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_CC
)
880 gcc_assert (s390_cc_modes_compatible (GET_MODE (op0
), mode
)
886 cc
= gen_rtx_REG (mode
, CC_REGNUM
);
887 emit_insn (gen_rtx_SET (VOIDmode
, cc
, gen_rtx_COMPARE (mode
, op0
, op1
)));
890 return gen_rtx_fmt_ee (code
, VOIDmode
, cc
, const0_rtx
);
893 /* Emit a SImode compare and swap instruction setting MEM to NEW_RTX if OLD
895 Return the correct condition RTL to be placed in the IF_THEN_ELSE of the
896 conditional branch testing the result. */
899 s390_emit_compare_and_swap (enum rtx_code code
, rtx old
, rtx mem
,
900 rtx cmp
, rtx new_rtx
)
902 emit_insn (gen_atomic_compare_and_swapsi_internal (old
, mem
, cmp
, new_rtx
));
903 return s390_emit_compare (code
, gen_rtx_REG (CCZ1mode
, CC_REGNUM
),
907 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
908 unconditional jump, else a conditional jump under condition COND. */
911 s390_emit_jump (rtx target
, rtx cond
)
915 target
= gen_rtx_LABEL_REF (VOIDmode
, target
);
917 target
= gen_rtx_IF_THEN_ELSE (VOIDmode
, cond
, target
, pc_rtx
);
919 insn
= gen_rtx_SET (VOIDmode
, pc_rtx
, target
);
920 emit_jump_insn (insn
);
923 /* Return branch condition mask to implement a branch
924 specified by CODE. Return -1 for invalid comparisons. */
927 s390_branch_condition_mask (rtx code
)
929 const int CC0
= 1 << 3;
930 const int CC1
= 1 << 2;
931 const int CC2
= 1 << 1;
932 const int CC3
= 1 << 0;
934 gcc_assert (GET_CODE (XEXP (code
, 0)) == REG
);
935 gcc_assert (REGNO (XEXP (code
, 0)) == CC_REGNUM
);
936 gcc_assert (XEXP (code
, 1) == const0_rtx
);
938 switch (GET_MODE (XEXP (code
, 0)))
942 switch (GET_CODE (code
))
945 case NE
: return CC1
| CC2
| CC3
;
951 switch (GET_CODE (code
))
954 case NE
: return CC0
| CC2
| CC3
;
960 switch (GET_CODE (code
))
963 case NE
: return CC0
| CC1
| CC3
;
969 switch (GET_CODE (code
))
972 case NE
: return CC0
| CC1
| CC2
;
978 switch (GET_CODE (code
))
980 case EQ
: return CC0
| CC2
;
981 case NE
: return CC1
| CC3
;
987 switch (GET_CODE (code
))
989 case LTU
: return CC2
| CC3
; /* carry */
990 case GEU
: return CC0
| CC1
; /* no carry */
996 switch (GET_CODE (code
))
998 case GTU
: return CC0
| CC1
; /* borrow */
999 case LEU
: return CC2
| CC3
; /* no borrow */
1005 switch (GET_CODE (code
))
1007 case EQ
: return CC0
| CC2
;
1008 case NE
: return CC1
| CC3
;
1009 case LTU
: return CC1
;
1010 case GTU
: return CC3
;
1011 case LEU
: return CC1
| CC2
;
1012 case GEU
: return CC2
| CC3
;
1017 switch (GET_CODE (code
))
1019 case EQ
: return CC0
;
1020 case NE
: return CC1
| CC2
| CC3
;
1021 case LTU
: return CC1
;
1022 case GTU
: return CC2
;
1023 case LEU
: return CC0
| CC1
;
1024 case GEU
: return CC0
| CC2
;
1030 switch (GET_CODE (code
))
1032 case EQ
: return CC0
;
1033 case NE
: return CC2
| CC1
| CC3
;
1034 case LTU
: return CC2
;
1035 case GTU
: return CC1
;
1036 case LEU
: return CC0
| CC2
;
1037 case GEU
: return CC0
| CC1
;
1043 switch (GET_CODE (code
))
1045 case EQ
: return CC0
;
1046 case NE
: return CC1
| CC2
| CC3
;
1047 case LT
: return CC1
| CC3
;
1048 case GT
: return CC2
;
1049 case LE
: return CC0
| CC1
| CC3
;
1050 case GE
: return CC0
| CC2
;
1056 switch (GET_CODE (code
))
1058 case EQ
: return CC0
;
1059 case NE
: return CC1
| CC2
| CC3
;
1060 case LT
: return CC1
;
1061 case GT
: return CC2
| CC3
;
1062 case LE
: return CC0
| CC1
;
1063 case GE
: return CC0
| CC2
| CC3
;
1069 switch (GET_CODE (code
))
1071 case EQ
: return CC0
;
1072 case NE
: return CC1
| CC2
| CC3
;
1073 case LT
: return CC1
;
1074 case GT
: return CC2
;
1075 case LE
: return CC0
| CC1
;
1076 case GE
: return CC0
| CC2
;
1077 case UNORDERED
: return CC3
;
1078 case ORDERED
: return CC0
| CC1
| CC2
;
1079 case UNEQ
: return CC0
| CC3
;
1080 case UNLT
: return CC1
| CC3
;
1081 case UNGT
: return CC2
| CC3
;
1082 case UNLE
: return CC0
| CC1
| CC3
;
1083 case UNGE
: return CC0
| CC2
| CC3
;
1084 case LTGT
: return CC1
| CC2
;
1090 switch (GET_CODE (code
))
1092 case EQ
: return CC0
;
1093 case NE
: return CC2
| CC1
| CC3
;
1094 case LT
: return CC2
;
1095 case GT
: return CC1
;
1096 case LE
: return CC0
| CC2
;
1097 case GE
: return CC0
| CC1
;
1098 case UNORDERED
: return CC3
;
1099 case ORDERED
: return CC0
| CC2
| CC1
;
1100 case UNEQ
: return CC0
| CC3
;
1101 case UNLT
: return CC2
| CC3
;
1102 case UNGT
: return CC1
| CC3
;
1103 case UNLE
: return CC0
| CC2
| CC3
;
1104 case UNGE
: return CC0
| CC1
| CC3
;
1105 case LTGT
: return CC2
| CC1
;
1116 /* Return branch condition mask to implement a compare and branch
1117 specified by CODE. Return -1 for invalid comparisons. */
1120 s390_compare_and_branch_condition_mask (rtx code
)
1122 const int CC0
= 1 << 3;
1123 const int CC1
= 1 << 2;
1124 const int CC2
= 1 << 1;
1126 switch (GET_CODE (code
))
1150 /* If INV is false, return assembler mnemonic string to implement
1151 a branch specified by CODE. If INV is true, return mnemonic
1152 for the corresponding inverted branch. */
1155 s390_branch_condition_mnemonic (rtx code
, int inv
)
1159 static const char *const mnemonic
[16] =
1161 NULL
, "o", "h", "nle",
1162 "l", "nhe", "lh", "ne",
1163 "e", "nlh", "he", "nl",
1164 "le", "nh", "no", NULL
1167 if (GET_CODE (XEXP (code
, 0)) == REG
1168 && REGNO (XEXP (code
, 0)) == CC_REGNUM
1169 && XEXP (code
, 1) == const0_rtx
)
1170 mask
= s390_branch_condition_mask (code
);
1172 mask
= s390_compare_and_branch_condition_mask (code
);
1174 gcc_assert (mask
>= 0);
1179 gcc_assert (mask
>= 1 && mask
<= 14);
1181 return mnemonic
[mask
];
1184 /* Return the part of op which has a value different from def.
1185 The size of the part is determined by mode.
1186 Use this function only if you already know that op really
1187 contains such a part. */
1189 unsigned HOST_WIDE_INT
1190 s390_extract_part (rtx op
, enum machine_mode mode
, int def
)
1192 unsigned HOST_WIDE_INT value
= 0;
1193 int max_parts
= HOST_BITS_PER_WIDE_INT
/ GET_MODE_BITSIZE (mode
);
1194 int part_bits
= GET_MODE_BITSIZE (mode
);
1195 unsigned HOST_WIDE_INT part_mask
1196 = ((unsigned HOST_WIDE_INT
)1 << part_bits
) - 1;
1199 for (i
= 0; i
< max_parts
; i
++)
1202 value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
1204 value
>>= part_bits
;
1206 if ((value
& part_mask
) != (def
& part_mask
))
1207 return value
& part_mask
;
1213 /* If OP is an integer constant of mode MODE with exactly one
1214 part of mode PART_MODE unequal to DEF, return the number of that
1215 part. Otherwise, return -1. */
1218 s390_single_part (rtx op
,
1219 enum machine_mode mode
,
1220 enum machine_mode part_mode
,
1223 unsigned HOST_WIDE_INT value
= 0;
1224 int n_parts
= GET_MODE_SIZE (mode
) / GET_MODE_SIZE (part_mode
);
1225 unsigned HOST_WIDE_INT part_mask
1226 = ((unsigned HOST_WIDE_INT
)1 << GET_MODE_BITSIZE (part_mode
)) - 1;
1229 if (GET_CODE (op
) != CONST_INT
)
1232 for (i
= 0; i
< n_parts
; i
++)
1235 value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
1237 value
>>= GET_MODE_BITSIZE (part_mode
);
1239 if ((value
& part_mask
) != (def
& part_mask
))
1247 return part
== -1 ? -1 : n_parts
- 1 - part
;
1250 /* Return true if IN contains a contiguous bitfield in the lower SIZE
1251 bits and no other bits are set in IN. POS and LENGTH can be used
1252 to obtain the start position and the length of the bitfield.
1254 POS gives the position of the first bit of the bitfield counting
1255 from the lowest order bit starting with zero. In order to use this
1256 value for S/390 instructions this has to be converted to "bits big
1260 s390_contiguous_bitmask_p (unsigned HOST_WIDE_INT in
, int size
,
1261 int *pos
, int *length
)
1266 unsigned HOST_WIDE_INT mask
= 1ULL;
1267 bool contiguous
= false;
1269 for (i
= 0; i
< size
; mask
<<= 1, i
++)
1293 /* Calculate a mask for all bits beyond the contiguous bits. */
1294 mask
= (-1LL & ~(((1ULL << (tmp_length
+ tmp_pos
- 1)) << 1) - 1));
1299 if (tmp_length
+ tmp_pos
- 1 > size
)
1303 *length
= tmp_length
;
1311 /* Check whether we can (and want to) split a double-word
1312 move in mode MODE from SRC to DST into two single-word
1313 moves, moving the subword FIRST_SUBWORD first. */
1316 s390_split_ok_p (rtx dst
, rtx src
, enum machine_mode mode
, int first_subword
)
1318 /* Floating point registers cannot be split. */
1319 if (FP_REG_P (src
) || FP_REG_P (dst
))
1322 /* We don't need to split if operands are directly accessible. */
1323 if (s_operand (src
, mode
) || s_operand (dst
, mode
))
1326 /* Non-offsettable memory references cannot be split. */
1327 if ((GET_CODE (src
) == MEM
&& !offsettable_memref_p (src
))
1328 || (GET_CODE (dst
) == MEM
&& !offsettable_memref_p (dst
)))
1331 /* Moving the first subword must not clobber a register
1332 needed to move the second subword. */
1333 if (register_operand (dst
, mode
))
1335 rtx subreg
= operand_subword (dst
, first_subword
, 0, mode
);
1336 if (reg_overlap_mentioned_p (subreg
, src
))
1343 /* Return true if it can be proven that [MEM1, MEM1 + SIZE]
1344 and [MEM2, MEM2 + SIZE] do overlap and false
1348 s390_overlap_p (rtx mem1
, rtx mem2
, HOST_WIDE_INT size
)
1350 rtx addr1
, addr2
, addr_delta
;
1351 HOST_WIDE_INT delta
;
1353 if (GET_CODE (mem1
) != MEM
|| GET_CODE (mem2
) != MEM
)
1359 addr1
= XEXP (mem1
, 0);
1360 addr2
= XEXP (mem2
, 0);
1362 addr_delta
= simplify_binary_operation (MINUS
, Pmode
, addr2
, addr1
);
1364 /* This overlapping check is used by peepholes merging memory block operations.
1365 Overlapping operations would otherwise be recognized by the S/390 hardware
1366 and would fall back to a slower implementation. Allowing overlapping
1367 operations would lead to slow code but not to wrong code. Therefore we are
1368 somewhat optimistic if we cannot prove that the memory blocks are
1370 That's why we return false here although this may accept operations on
1371 overlapping memory areas. */
1372 if (!addr_delta
|| GET_CODE (addr_delta
) != CONST_INT
)
1375 delta
= INTVAL (addr_delta
);
1378 || (delta
> 0 && delta
< size
)
1379 || (delta
< 0 && -delta
< size
))
1385 /* Check whether the address of memory reference MEM2 equals exactly
1386 the address of memory reference MEM1 plus DELTA. Return true if
1387 we can prove this to be the case, false otherwise. */
1390 s390_offset_p (rtx mem1
, rtx mem2
, rtx delta
)
1392 rtx addr1
, addr2
, addr_delta
;
1394 if (GET_CODE (mem1
) != MEM
|| GET_CODE (mem2
) != MEM
)
1397 addr1
= XEXP (mem1
, 0);
1398 addr2
= XEXP (mem2
, 0);
1400 addr_delta
= simplify_binary_operation (MINUS
, Pmode
, addr2
, addr1
);
1401 if (!addr_delta
|| !rtx_equal_p (addr_delta
, delta
))
1407 /* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1410 s390_expand_logical_operator (enum rtx_code code
, enum machine_mode mode
,
1413 enum machine_mode wmode
= mode
;
1414 rtx dst
= operands
[0];
1415 rtx src1
= operands
[1];
1416 rtx src2
= operands
[2];
1419 /* If we cannot handle the operation directly, use a temp register. */
1420 if (!s390_logical_operator_ok_p (operands
))
1421 dst
= gen_reg_rtx (mode
);
1423 /* QImode and HImode patterns make sense only if we have a destination
1424 in memory. Otherwise perform the operation in SImode. */
1425 if ((mode
== QImode
|| mode
== HImode
) && GET_CODE (dst
) != MEM
)
1428 /* Widen operands if required. */
1431 if (GET_CODE (dst
) == SUBREG
1432 && (tem
= simplify_subreg (wmode
, dst
, mode
, 0)) != 0)
1434 else if (REG_P (dst
))
1435 dst
= gen_rtx_SUBREG (wmode
, dst
, 0);
1437 dst
= gen_reg_rtx (wmode
);
1439 if (GET_CODE (src1
) == SUBREG
1440 && (tem
= simplify_subreg (wmode
, src1
, mode
, 0)) != 0)
1442 else if (GET_MODE (src1
) != VOIDmode
)
1443 src1
= gen_rtx_SUBREG (wmode
, force_reg (mode
, src1
), 0);
1445 if (GET_CODE (src2
) == SUBREG
1446 && (tem
= simplify_subreg (wmode
, src2
, mode
, 0)) != 0)
1448 else if (GET_MODE (src2
) != VOIDmode
)
1449 src2
= gen_rtx_SUBREG (wmode
, force_reg (mode
, src2
), 0);
1452 /* Emit the instruction. */
1453 op
= gen_rtx_SET (VOIDmode
, dst
, gen_rtx_fmt_ee (code
, wmode
, src1
, src2
));
1454 clob
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
1455 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, op
, clob
)));
1457 /* Fix up the destination if needed. */
1458 if (dst
!= operands
[0])
1459 emit_move_insn (operands
[0], gen_lowpart (mode
, dst
));
1462 /* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1465 s390_logical_operator_ok_p (rtx
*operands
)
1467 /* If the destination operand is in memory, it needs to coincide
1468 with one of the source operands. After reload, it has to be
1469 the first source operand. */
1470 if (GET_CODE (operands
[0]) == MEM
)
1471 return rtx_equal_p (operands
[0], operands
[1])
1472 || (!reload_completed
&& rtx_equal_p (operands
[0], operands
[2]));
1477 /* Narrow logical operation CODE of memory operand MEMOP with immediate
1478 operand IMMOP to switch from SS to SI type instructions. */
1481 s390_narrow_logical_operator (enum rtx_code code
, rtx
*memop
, rtx
*immop
)
1483 int def
= code
== AND
? -1 : 0;
1487 gcc_assert (GET_CODE (*memop
) == MEM
);
1488 gcc_assert (!MEM_VOLATILE_P (*memop
));
1490 mask
= s390_extract_part (*immop
, QImode
, def
);
1491 part
= s390_single_part (*immop
, GET_MODE (*memop
), QImode
, def
);
1492 gcc_assert (part
>= 0);
1494 *memop
= adjust_address (*memop
, QImode
, part
);
1495 *immop
= gen_int_mode (mask
, QImode
);
1499 /* How to allocate a 'struct machine_function'. */
1501 static struct machine_function
*
1502 s390_init_machine_status (void)
1504 return ggc_alloc_cleared_machine_function ();
1508 s390_option_override (void)
1510 /* Set up function hooks. */
1511 init_machine_status
= s390_init_machine_status
;
1513 /* Architecture mode defaults according to ABI. */
1514 if (!(target_flags_explicit
& MASK_ZARCH
))
1517 target_flags
|= MASK_ZARCH
;
1519 target_flags
&= ~MASK_ZARCH
;
1522 /* Set the march default in case it hasn't been specified on
1524 if (s390_arch
== PROCESSOR_max
)
1526 s390_arch_string
= TARGET_ZARCH
? "z900" : "g5";
1527 s390_arch
= TARGET_ZARCH
? PROCESSOR_2064_Z900
: PROCESSOR_9672_G5
;
1528 s390_arch_flags
= processor_flags_table
[(int)s390_arch
];
1531 /* Determine processor to tune for. */
1532 if (s390_tune
== PROCESSOR_max
)
1534 s390_tune
= s390_arch
;
1535 s390_tune_flags
= s390_arch_flags
;
1538 /* Sanity checks. */
1539 if (TARGET_ZARCH
&& !TARGET_CPU_ZARCH
)
1540 error ("z/Architecture mode not supported on %s", s390_arch_string
);
1541 if (TARGET_64BIT
&& !TARGET_ZARCH
)
1542 error ("64-bit ABI not supported in ESA/390 mode");
1544 /* Use hardware DFP if available and not explicitly disabled by
1545 user. E.g. with -m31 -march=z10 -mzarch */
1546 if (!(target_flags_explicit
& MASK_HARD_DFP
) && TARGET_DFP
)
1547 target_flags
|= MASK_HARD_DFP
;
1549 if (TARGET_HARD_DFP
&& !TARGET_DFP
)
1551 if (target_flags_explicit
& MASK_HARD_DFP
)
1553 if (!TARGET_CPU_DFP
)
1554 error ("hardware decimal floating point instructions"
1555 " not available on %s", s390_arch_string
);
1557 error ("hardware decimal floating point instructions"
1558 " not available in ESA/390 mode");
1561 target_flags
&= ~MASK_HARD_DFP
;
1564 if ((target_flags_explicit
& MASK_SOFT_FLOAT
) && TARGET_SOFT_FLOAT
)
1566 if ((target_flags_explicit
& MASK_HARD_DFP
) && TARGET_HARD_DFP
)
1567 error ("-mhard-dfp can%'t be used in conjunction with -msoft-float");
1569 target_flags
&= ~MASK_HARD_DFP
;
1572 /* Set processor cost function. */
1575 case PROCESSOR_2084_Z990
:
1576 s390_cost
= &z990_cost
;
1578 case PROCESSOR_2094_Z9_109
:
1579 s390_cost
= &z9_109_cost
;
1581 case PROCESSOR_2097_Z10
:
1582 s390_cost
= &z10_cost
;
1583 case PROCESSOR_2817_Z196
:
1584 s390_cost
= &z196_cost
;
1587 s390_cost
= &z900_cost
;
1590 if (TARGET_BACKCHAIN
&& TARGET_PACKED_STACK
&& TARGET_HARD_FLOAT
)
1591 error ("-mbackchain -mpacked-stack -mhard-float are not supported "
1594 if (s390_stack_size
)
1596 if (s390_stack_guard
>= s390_stack_size
)
1597 error ("stack size must be greater than the stack guard value");
1598 else if (s390_stack_size
> 1 << 16)
1599 error ("stack size must not be greater than 64k");
1601 else if (s390_stack_guard
)
1602 error ("-mstack-guard implies use of -mstack-size");
1604 #ifdef TARGET_DEFAULT_LONG_DOUBLE_128
1605 if (!(target_flags_explicit
& MASK_LONG_DOUBLE_128
))
1606 target_flags
|= MASK_LONG_DOUBLE_128
;
1609 if (s390_tune
== PROCESSOR_2097_Z10
1610 || s390_tune
== PROCESSOR_2817_Z196
)
1612 maybe_set_param_value (PARAM_MAX_UNROLLED_INSNS
, 100,
1613 global_options
.x_param_values
,
1614 global_options_set
.x_param_values
);
1615 maybe_set_param_value (PARAM_MAX_UNROLL_TIMES
, 32,
1616 global_options
.x_param_values
,
1617 global_options_set
.x_param_values
);
1618 maybe_set_param_value (PARAM_MAX_COMPLETELY_PEELED_INSNS
, 2000,
1619 global_options
.x_param_values
,
1620 global_options_set
.x_param_values
);
1621 maybe_set_param_value (PARAM_MAX_COMPLETELY_PEEL_TIMES
, 64,
1622 global_options
.x_param_values
,
1623 global_options_set
.x_param_values
);
1626 maybe_set_param_value (PARAM_MAX_PENDING_LIST_LENGTH
, 256,
1627 global_options
.x_param_values
,
1628 global_options_set
.x_param_values
);
1629 /* values for loop prefetching */
1630 maybe_set_param_value (PARAM_L1_CACHE_LINE_SIZE
, 256,
1631 global_options
.x_param_values
,
1632 global_options_set
.x_param_values
);
1633 maybe_set_param_value (PARAM_L1_CACHE_SIZE
, 128,
1634 global_options
.x_param_values
,
1635 global_options_set
.x_param_values
);
1636 /* s390 has more than 2 levels and the size is much larger. Since
1637 we are always running virtualized assume that we only get a small
1638 part of the caches above l1. */
1639 maybe_set_param_value (PARAM_L2_CACHE_SIZE
, 1500,
1640 global_options
.x_param_values
,
1641 global_options_set
.x_param_values
);
1642 maybe_set_param_value (PARAM_PREFETCH_MIN_INSN_TO_MEM_RATIO
, 2,
1643 global_options
.x_param_values
,
1644 global_options_set
.x_param_values
);
1645 maybe_set_param_value (PARAM_SIMULTANEOUS_PREFETCHES
, 6,
1646 global_options
.x_param_values
,
1647 global_options_set
.x_param_values
);
1649 /* This cannot reside in s390_option_optimization_table since HAVE_prefetch
1650 requires the arch flags to be evaluated already. Since prefetching
1651 is beneficial on s390, we enable it if available. */
1652 if (flag_prefetch_loop_arrays
< 0 && HAVE_prefetch
&& optimize
>= 3)
1653 flag_prefetch_loop_arrays
= 1;
1655 /* Use the alternative scheduling-pressure algorithm by default. */
1656 maybe_set_param_value (PARAM_SCHED_PRESSURE_ALGORITHM
, 2,
1657 global_options
.x_param_values
,
1658 global_options_set
.x_param_values
);
1662 /* Don't emit DWARF3/4 unless specifically selected. The TPF
1663 debuggers do not yet support DWARF 3/4. */
1664 if (!global_options_set
.x_dwarf_strict
)
1666 if (!global_options_set
.x_dwarf_version
)
1671 /* Map for smallest class containing reg regno. */
1673 const enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
1674 { GENERAL_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1675 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1676 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1677 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1678 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1679 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1680 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1681 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1682 ADDR_REGS
, CC_REGS
, ADDR_REGS
, ADDR_REGS
,
1683 ACCESS_REGS
, ACCESS_REGS
1686 /* Return attribute type of insn. */
1688 static enum attr_type
1689 s390_safe_attr_type (rtx insn
)
1691 if (recog_memoized (insn
) >= 0)
1692 return get_attr_type (insn
);
1697 /* Return true if DISP is a valid short displacement. */
1700 s390_short_displacement (rtx disp
)
1702 /* No displacement is OK. */
1706 /* Without the long displacement facility we don't need to
1707 distingiush between long and short displacement. */
1708 if (!TARGET_LONG_DISPLACEMENT
)
1711 /* Integer displacement in range. */
1712 if (GET_CODE (disp
) == CONST_INT
)
1713 return INTVAL (disp
) >= 0 && INTVAL (disp
) < 4096;
1715 /* GOT offset is not OK, the GOT can be large. */
1716 if (GET_CODE (disp
) == CONST
1717 && GET_CODE (XEXP (disp
, 0)) == UNSPEC
1718 && (XINT (XEXP (disp
, 0), 1) == UNSPEC_GOT
1719 || XINT (XEXP (disp
, 0), 1) == UNSPEC_GOTNTPOFF
))
1722 /* All other symbolic constants are literal pool references,
1723 which are OK as the literal pool must be small. */
1724 if (GET_CODE (disp
) == CONST
)
1730 /* Decompose a RTL expression ADDR for a memory address into
1731 its components, returned in OUT.
1733 Returns false if ADDR is not a valid memory address, true
1734 otherwise. If OUT is NULL, don't return the components,
1735 but check for validity only.
1737 Note: Only addresses in canonical form are recognized.
1738 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1739 canonical form so that they will be recognized. */
1742 s390_decompose_address (rtx addr
, struct s390_address
*out
)
1744 HOST_WIDE_INT offset
= 0;
1745 rtx base
= NULL_RTX
;
1746 rtx indx
= NULL_RTX
;
1747 rtx disp
= NULL_RTX
;
1749 bool pointer
= false;
1750 bool base_ptr
= false;
1751 bool indx_ptr
= false;
1752 bool literal_pool
= false;
1754 /* We may need to substitute the literal pool base register into the address
1755 below. However, at this point we do not know which register is going to
1756 be used as base, so we substitute the arg pointer register. This is going
1757 to be treated as holding a pointer below -- it shouldn't be used for any
1759 rtx fake_pool_base
= gen_rtx_REG (Pmode
, ARG_POINTER_REGNUM
);
1761 /* Decompose address into base + index + displacement. */
1763 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == UNSPEC
)
1766 else if (GET_CODE (addr
) == PLUS
)
1768 rtx op0
= XEXP (addr
, 0);
1769 rtx op1
= XEXP (addr
, 1);
1770 enum rtx_code code0
= GET_CODE (op0
);
1771 enum rtx_code code1
= GET_CODE (op1
);
1773 if (code0
== REG
|| code0
== UNSPEC
)
1775 if (code1
== REG
|| code1
== UNSPEC
)
1777 indx
= op0
; /* index + base */
1783 base
= op0
; /* base + displacement */
1788 else if (code0
== PLUS
)
1790 indx
= XEXP (op0
, 0); /* index + base + disp */
1791 base
= XEXP (op0
, 1);
1802 disp
= addr
; /* displacement */
1804 /* Extract integer part of displacement. */
1808 if (GET_CODE (disp
) == CONST_INT
)
1810 offset
= INTVAL (disp
);
1813 else if (GET_CODE (disp
) == CONST
1814 && GET_CODE (XEXP (disp
, 0)) == PLUS
1815 && GET_CODE (XEXP (XEXP (disp
, 0), 1)) == CONST_INT
)
1817 offset
= INTVAL (XEXP (XEXP (disp
, 0), 1));
1818 disp
= XEXP (XEXP (disp
, 0), 0);
1822 /* Strip off CONST here to avoid special case tests later. */
1823 if (disp
&& GET_CODE (disp
) == CONST
)
1824 disp
= XEXP (disp
, 0);
1826 /* We can convert literal pool addresses to
1827 displacements by basing them off the base register. */
1828 if (disp
&& GET_CODE (disp
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (disp
))
1830 /* Either base or index must be free to hold the base register. */
1832 base
= fake_pool_base
, literal_pool
= true;
1834 indx
= fake_pool_base
, literal_pool
= true;
1838 /* Mark up the displacement. */
1839 disp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, disp
),
1840 UNSPEC_LTREL_OFFSET
);
1843 /* Validate base register. */
1846 if (GET_CODE (base
) == UNSPEC
)
1847 switch (XINT (base
, 1))
1851 disp
= gen_rtx_UNSPEC (Pmode
,
1852 gen_rtvec (1, XVECEXP (base
, 0, 0)),
1853 UNSPEC_LTREL_OFFSET
);
1857 base
= XVECEXP (base
, 0, 1);
1860 case UNSPEC_LTREL_BASE
:
1861 if (XVECLEN (base
, 0) == 1)
1862 base
= fake_pool_base
, literal_pool
= true;
1864 base
= XVECEXP (base
, 0, 1);
1872 || (GET_MODE (base
) != SImode
1873 && GET_MODE (base
) != Pmode
))
1876 if (REGNO (base
) == STACK_POINTER_REGNUM
1877 || REGNO (base
) == FRAME_POINTER_REGNUM
1878 || ((reload_completed
|| reload_in_progress
)
1879 && frame_pointer_needed
1880 && REGNO (base
) == HARD_FRAME_POINTER_REGNUM
)
1881 || REGNO (base
) == ARG_POINTER_REGNUM
1883 && REGNO (base
) == PIC_OFFSET_TABLE_REGNUM
))
1884 pointer
= base_ptr
= true;
1886 if ((reload_completed
|| reload_in_progress
)
1887 && base
== cfun
->machine
->base_reg
)
1888 pointer
= base_ptr
= literal_pool
= true;
1891 /* Validate index register. */
1894 if (GET_CODE (indx
) == UNSPEC
)
1895 switch (XINT (indx
, 1))
1899 disp
= gen_rtx_UNSPEC (Pmode
,
1900 gen_rtvec (1, XVECEXP (indx
, 0, 0)),
1901 UNSPEC_LTREL_OFFSET
);
1905 indx
= XVECEXP (indx
, 0, 1);
1908 case UNSPEC_LTREL_BASE
:
1909 if (XVECLEN (indx
, 0) == 1)
1910 indx
= fake_pool_base
, literal_pool
= true;
1912 indx
= XVECEXP (indx
, 0, 1);
1920 || (GET_MODE (indx
) != SImode
1921 && GET_MODE (indx
) != Pmode
))
1924 if (REGNO (indx
) == STACK_POINTER_REGNUM
1925 || REGNO (indx
) == FRAME_POINTER_REGNUM
1926 || ((reload_completed
|| reload_in_progress
)
1927 && frame_pointer_needed
1928 && REGNO (indx
) == HARD_FRAME_POINTER_REGNUM
)
1929 || REGNO (indx
) == ARG_POINTER_REGNUM
1931 && REGNO (indx
) == PIC_OFFSET_TABLE_REGNUM
))
1932 pointer
= indx_ptr
= true;
1934 if ((reload_completed
|| reload_in_progress
)
1935 && indx
== cfun
->machine
->base_reg
)
1936 pointer
= indx_ptr
= literal_pool
= true;
1939 /* Prefer to use pointer as base, not index. */
1940 if (base
&& indx
&& !base_ptr
1941 && (indx_ptr
|| (!REG_POINTER (base
) && REG_POINTER (indx
))))
1948 /* Validate displacement. */
1951 /* If virtual registers are involved, the displacement will change later
1952 anyway as the virtual registers get eliminated. This could make a
1953 valid displacement invalid, but it is more likely to make an invalid
1954 displacement valid, because we sometimes access the register save area
1955 via negative offsets to one of those registers.
1956 Thus we don't check the displacement for validity here. If after
1957 elimination the displacement turns out to be invalid after all,
1958 this is fixed up by reload in any case. */
1959 if (base
!= arg_pointer_rtx
1960 && indx
!= arg_pointer_rtx
1961 && base
!= return_address_pointer_rtx
1962 && indx
!= return_address_pointer_rtx
1963 && base
!= frame_pointer_rtx
1964 && indx
!= frame_pointer_rtx
1965 && base
!= virtual_stack_vars_rtx
1966 && indx
!= virtual_stack_vars_rtx
)
1967 if (!DISP_IN_RANGE (offset
))
1972 /* All the special cases are pointers. */
1975 /* In the small-PIC case, the linker converts @GOT
1976 and @GOTNTPOFF offsets to possible displacements. */
1977 if (GET_CODE (disp
) == UNSPEC
1978 && (XINT (disp
, 1) == UNSPEC_GOT
1979 || XINT (disp
, 1) == UNSPEC_GOTNTPOFF
)
1985 /* Accept pool label offsets. */
1986 else if (GET_CODE (disp
) == UNSPEC
1987 && XINT (disp
, 1) == UNSPEC_POOL_OFFSET
)
1990 /* Accept literal pool references. */
1991 else if (GET_CODE (disp
) == UNSPEC
1992 && XINT (disp
, 1) == UNSPEC_LTREL_OFFSET
)
1994 /* In case CSE pulled a non literal pool reference out of
1995 the pool we have to reject the address. This is
1996 especially important when loading the GOT pointer on non
1997 zarch CPUs. In this case the literal pool contains an lt
1998 relative offset to the _GLOBAL_OFFSET_TABLE_ label which
1999 will most likely exceed the displacement. */
2000 if (GET_CODE (XVECEXP (disp
, 0, 0)) != SYMBOL_REF
2001 || !CONSTANT_POOL_ADDRESS_P (XVECEXP (disp
, 0, 0)))
2004 orig_disp
= gen_rtx_CONST (Pmode
, disp
);
2007 /* If we have an offset, make sure it does not
2008 exceed the size of the constant pool entry. */
2009 rtx sym
= XVECEXP (disp
, 0, 0);
2010 if (offset
>= GET_MODE_SIZE (get_pool_mode (sym
)))
2013 orig_disp
= plus_constant (Pmode
, orig_disp
, offset
);
2028 out
->disp
= orig_disp
;
2029 out
->pointer
= pointer
;
2030 out
->literal_pool
= literal_pool
;
2036 /* Decompose a RTL expression OP for a shift count into its components,
2037 and return the base register in BASE and the offset in OFFSET.
2039 Return true if OP is a valid shift count, false if not. */
2042 s390_decompose_shift_count (rtx op
, rtx
*base
, HOST_WIDE_INT
*offset
)
2044 HOST_WIDE_INT off
= 0;
2046 /* We can have an integer constant, an address register,
2047 or a sum of the two. */
2048 if (GET_CODE (op
) == CONST_INT
)
2053 if (op
&& GET_CODE (op
) == PLUS
&& GET_CODE (XEXP (op
, 1)) == CONST_INT
)
2055 off
= INTVAL (XEXP (op
, 1));
2058 while (op
&& GET_CODE (op
) == SUBREG
)
2059 op
= SUBREG_REG (op
);
2061 if (op
&& GET_CODE (op
) != REG
)
2073 /* Return true if CODE is a valid address without index. */
2076 s390_legitimate_address_without_index_p (rtx op
)
2078 struct s390_address addr
;
2080 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
2089 /* Return true if ADDR is of kind symbol_ref or symbol_ref + const_int
2090 and return these parts in SYMREF and ADDEND. You can pass NULL in
2091 SYMREF and/or ADDEND if you are not interested in these values.
2092 Literal pool references are *not* considered symbol references. */
2095 s390_symref_operand_p (rtx addr
, rtx
*symref
, HOST_WIDE_INT
*addend
)
2097 HOST_WIDE_INT tmpaddend
= 0;
2099 if (GET_CODE (addr
) == CONST
)
2100 addr
= XEXP (addr
, 0);
2102 if (GET_CODE (addr
) == PLUS
)
2104 if (GET_CODE (XEXP (addr
, 0)) == SYMBOL_REF
2105 && !CONSTANT_POOL_ADDRESS_P (XEXP (addr
, 0))
2106 && CONST_INT_P (XEXP (addr
, 1)))
2108 tmpaddend
= INTVAL (XEXP (addr
, 1));
2109 addr
= XEXP (addr
, 0);
2115 if (GET_CODE (addr
) != SYMBOL_REF
|| CONSTANT_POOL_ADDRESS_P (addr
))
2121 *addend
= tmpaddend
;
2127 /* Return true if the address in OP is valid for constraint letter C
2128 if wrapped in a MEM rtx. Set LIT_POOL_OK to true if it literal
2129 pool MEMs should be accepted. Only the Q, R, S, T constraint
2130 letters are allowed for C. */
2133 s390_check_qrst_address (char c
, rtx op
, bool lit_pool_ok
)
2135 struct s390_address addr
;
2136 bool decomposed
= false;
2138 /* This check makes sure that no symbolic address (except literal
2139 pool references) are accepted by the R or T constraints. */
2140 if (s390_symref_operand_p (op
, NULL
, NULL
))
2143 /* Ensure literal pool references are only accepted if LIT_POOL_OK. */
2146 if (!s390_decompose_address (op
, &addr
))
2148 if (addr
.literal_pool
)
2155 case 'Q': /* no index short displacement */
2156 if (!decomposed
&& !s390_decompose_address (op
, &addr
))
2160 if (!s390_short_displacement (addr
.disp
))
2164 case 'R': /* with index short displacement */
2165 if (TARGET_LONG_DISPLACEMENT
)
2167 if (!decomposed
&& !s390_decompose_address (op
, &addr
))
2169 if (!s390_short_displacement (addr
.disp
))
2172 /* Any invalid address here will be fixed up by reload,
2173 so accept it for the most generic constraint. */
2176 case 'S': /* no index long displacement */
2177 if (!TARGET_LONG_DISPLACEMENT
)
2179 if (!decomposed
&& !s390_decompose_address (op
, &addr
))
2183 if (s390_short_displacement (addr
.disp
))
2187 case 'T': /* with index long displacement */
2188 if (!TARGET_LONG_DISPLACEMENT
)
2190 /* Any invalid address here will be fixed up by reload,
2191 so accept it for the most generic constraint. */
2192 if ((decomposed
|| s390_decompose_address (op
, &addr
))
2193 && s390_short_displacement (addr
.disp
))
2203 /* Evaluates constraint strings described by the regular expression
2204 ([A|B|Z](Q|R|S|T))|U|W|Y and returns 1 if OP is a valid operand for
2205 the constraint given in STR, or 0 else. */
2208 s390_mem_constraint (const char *str
, rtx op
)
2215 /* Check for offsettable variants of memory constraints. */
2216 if (!MEM_P (op
) || MEM_VOLATILE_P (op
))
2218 if ((reload_completed
|| reload_in_progress
)
2219 ? !offsettable_memref_p (op
) : !offsettable_nonstrict_memref_p (op
))
2221 return s390_check_qrst_address (str
[1], XEXP (op
, 0), true);
2223 /* Check for non-literal-pool variants of memory constraints. */
2226 return s390_check_qrst_address (str
[1], XEXP (op
, 0), false);
2231 if (GET_CODE (op
) != MEM
)
2233 return s390_check_qrst_address (c
, XEXP (op
, 0), true);
2235 return (s390_check_qrst_address ('Q', op
, true)
2236 || s390_check_qrst_address ('R', op
, true));
2238 return (s390_check_qrst_address ('S', op
, true)
2239 || s390_check_qrst_address ('T', op
, true));
2241 /* Simply check for the basic form of a shift count. Reload will
2242 take care of making sure we have a proper base register. */
2243 if (!s390_decompose_shift_count (op
, NULL
, NULL
))
2247 return s390_check_qrst_address (str
[1], op
, true);
2255 /* Evaluates constraint strings starting with letter O. Input
2256 parameter C is the second letter following the "O" in the constraint
2257 string. Returns 1 if VALUE meets the respective constraint and 0
2261 s390_O_constraint_str (const char c
, HOST_WIDE_INT value
)
2269 return trunc_int_for_mode (value
, SImode
) == value
;
2273 || s390_single_part (GEN_INT (value
), DImode
, SImode
, 0) == 1;
2276 return s390_single_part (GEN_INT (value
- 1), DImode
, SImode
, -1) == 1;
2284 /* Evaluates constraint strings starting with letter N. Parameter STR
2285 contains the letters following letter "N" in the constraint string.
2286 Returns true if VALUE matches the constraint. */
2289 s390_N_constraint_str (const char *str
, HOST_WIDE_INT value
)
2291 enum machine_mode mode
, part_mode
;
2293 int part
, part_goal
;
2299 part_goal
= str
[0] - '0';
2343 if (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (part_mode
))
2346 part
= s390_single_part (GEN_INT (value
), mode
, part_mode
, def
);
2349 if (part_goal
!= -1 && part_goal
!= part
)
2356 /* Returns true if the input parameter VALUE is a float zero. */
2359 s390_float_const_zero_p (rtx value
)
2361 return (GET_MODE_CLASS (GET_MODE (value
)) == MODE_FLOAT
2362 && value
== CONST0_RTX (GET_MODE (value
)));
2365 /* Implement TARGET_REGISTER_MOVE_COST. */
2368 s390_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
2369 reg_class_t from
, reg_class_t to
)
2371 /* On s390, copy between fprs and gprs is expensive. */
2372 if ((reg_classes_intersect_p (from
, GENERAL_REGS
)
2373 && reg_classes_intersect_p (to
, FP_REGS
))
2374 || (reg_classes_intersect_p (from
, FP_REGS
)
2375 && reg_classes_intersect_p (to
, GENERAL_REGS
)))
2381 /* Implement TARGET_MEMORY_MOVE_COST. */
2384 s390_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
2385 reg_class_t rclass ATTRIBUTE_UNUSED
,
2386 bool in ATTRIBUTE_UNUSED
)
2391 /* Compute a (partial) cost for rtx X. Return true if the complete
2392 cost has been computed, and false if subexpressions should be
2393 scanned. In either case, *TOTAL contains the cost result.
2394 CODE contains GET_CODE (x), OUTER_CODE contains the code
2395 of the superexpression of x. */
2398 s390_rtx_costs (rtx x
, int code
, int outer_code
, int opno ATTRIBUTE_UNUSED
,
2399 int *total
, bool speed ATTRIBUTE_UNUSED
)
2422 *total
= COSTS_N_INSNS (1);
2427 *total
= COSTS_N_INSNS (1);
2431 switch (GET_MODE (x
))
2435 rtx left
= XEXP (x
, 0);
2436 rtx right
= XEXP (x
, 1);
2437 if (GET_CODE (right
) == CONST_INT
2438 && CONST_OK_FOR_K (INTVAL (right
)))
2439 *total
= s390_cost
->mhi
;
2440 else if (GET_CODE (left
) == SIGN_EXTEND
)
2441 *total
= s390_cost
->mh
;
2443 *total
= s390_cost
->ms
; /* msr, ms, msy */
2448 rtx left
= XEXP (x
, 0);
2449 rtx right
= XEXP (x
, 1);
2452 if (GET_CODE (right
) == CONST_INT
2453 && CONST_OK_FOR_K (INTVAL (right
)))
2454 *total
= s390_cost
->mghi
;
2455 else if (GET_CODE (left
) == SIGN_EXTEND
)
2456 *total
= s390_cost
->msgf
;
2458 *total
= s390_cost
->msg
; /* msgr, msg */
2460 else /* TARGET_31BIT */
2462 if (GET_CODE (left
) == SIGN_EXTEND
2463 && GET_CODE (right
) == SIGN_EXTEND
)
2464 /* mulsidi case: mr, m */
2465 *total
= s390_cost
->m
;
2466 else if (GET_CODE (left
) == ZERO_EXTEND
2467 && GET_CODE (right
) == ZERO_EXTEND
2468 && TARGET_CPU_ZARCH
)
2469 /* umulsidi case: ml, mlr */
2470 *total
= s390_cost
->ml
;
2472 /* Complex calculation is required. */
2473 *total
= COSTS_N_INSNS (40);
2479 *total
= s390_cost
->mult_df
;
2482 *total
= s390_cost
->mxbr
;
2490 switch (GET_MODE (x
))
2493 *total
= s390_cost
->madbr
;
2496 *total
= s390_cost
->maebr
;
2501 /* Negate in the third argument is free: FMSUB. */
2502 if (GET_CODE (XEXP (x
, 2)) == NEG
)
2504 *total
+= (rtx_cost (XEXP (x
, 0), FMA
, 0, speed
)
2505 + rtx_cost (XEXP (x
, 1), FMA
, 1, speed
)
2506 + rtx_cost (XEXP (XEXP (x
, 2), 0), FMA
, 2, speed
));
2513 if (GET_MODE (x
) == TImode
) /* 128 bit division */
2514 *total
= s390_cost
->dlgr
;
2515 else if (GET_MODE (x
) == DImode
)
2517 rtx right
= XEXP (x
, 1);
2518 if (GET_CODE (right
) == ZERO_EXTEND
) /* 64 by 32 bit division */
2519 *total
= s390_cost
->dlr
;
2520 else /* 64 by 64 bit division */
2521 *total
= s390_cost
->dlgr
;
2523 else if (GET_MODE (x
) == SImode
) /* 32 bit division */
2524 *total
= s390_cost
->dlr
;
2529 if (GET_MODE (x
) == DImode
)
2531 rtx right
= XEXP (x
, 1);
2532 if (GET_CODE (right
) == ZERO_EXTEND
) /* 64 by 32 bit division */
2534 *total
= s390_cost
->dsgfr
;
2536 *total
= s390_cost
->dr
;
2537 else /* 64 by 64 bit division */
2538 *total
= s390_cost
->dsgr
;
2540 else if (GET_MODE (x
) == SImode
) /* 32 bit division */
2541 *total
= s390_cost
->dlr
;
2542 else if (GET_MODE (x
) == SFmode
)
2544 *total
= s390_cost
->debr
;
2546 else if (GET_MODE (x
) == DFmode
)
2548 *total
= s390_cost
->ddbr
;
2550 else if (GET_MODE (x
) == TFmode
)
2552 *total
= s390_cost
->dxbr
;
2557 if (GET_MODE (x
) == SFmode
)
2558 *total
= s390_cost
->sqebr
;
2559 else if (GET_MODE (x
) == DFmode
)
2560 *total
= s390_cost
->sqdbr
;
2562 *total
= s390_cost
->sqxbr
;
2567 if (outer_code
== MULT
|| outer_code
== DIV
|| outer_code
== MOD
2568 || outer_code
== PLUS
|| outer_code
== MINUS
2569 || outer_code
== COMPARE
)
2574 *total
= COSTS_N_INSNS (1);
2575 if (GET_CODE (XEXP (x
, 0)) == AND
2576 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2577 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
)
2579 rtx op0
= XEXP (XEXP (x
, 0), 0);
2580 rtx op1
= XEXP (XEXP (x
, 0), 1);
2581 rtx op2
= XEXP (x
, 1);
2583 if (memory_operand (op0
, GET_MODE (op0
))
2584 && s390_tm_ccmode (op1
, op2
, 0) != VOIDmode
)
2586 if (register_operand (op0
, GET_MODE (op0
))
2587 && s390_tm_ccmode (op1
, op2
, 1) != VOIDmode
)
2597 /* Return the cost of an address rtx ADDR. */
2600 s390_address_cost (rtx addr
, bool speed ATTRIBUTE_UNUSED
)
2602 struct s390_address ad
;
2603 if (!s390_decompose_address (addr
, &ad
))
2606 return ad
.indx
? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
2609 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
2610 otherwise return 0. */
2613 tls_symbolic_operand (rtx op
)
2615 if (GET_CODE (op
) != SYMBOL_REF
)
2617 return SYMBOL_REF_TLS_MODEL (op
);
2620 /* Split DImode access register reference REG (on 64-bit) into its constituent
2621 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2622 gen_highpart cannot be used as they assume all registers are word-sized,
2623 while our access registers have only half that size. */
2626 s390_split_access_reg (rtx reg
, rtx
*lo
, rtx
*hi
)
2628 gcc_assert (TARGET_64BIT
);
2629 gcc_assert (ACCESS_REG_P (reg
));
2630 gcc_assert (GET_MODE (reg
) == DImode
);
2631 gcc_assert (!(REGNO (reg
) & 1));
2633 *lo
= gen_rtx_REG (SImode
, REGNO (reg
) + 1);
2634 *hi
= gen_rtx_REG (SImode
, REGNO (reg
));
2637 /* Return true if OP contains a symbol reference */
2640 symbolic_reference_mentioned_p (rtx op
)
2645 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
2648 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
2649 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
2655 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
2656 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
2660 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
2667 /* Return true if OP contains a reference to a thread-local symbol. */
2670 tls_symbolic_reference_mentioned_p (rtx op
)
2675 if (GET_CODE (op
) == SYMBOL_REF
)
2676 return tls_symbolic_operand (op
);
2678 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
2679 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
2685 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
2686 if (tls_symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
2690 else if (fmt
[i
] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op
, i
)))
2698 /* Return true if OP is a legitimate general operand when
2699 generating PIC code. It is given that flag_pic is on
2700 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2703 legitimate_pic_operand_p (rtx op
)
2705 /* Accept all non-symbolic constants. */
2706 if (!SYMBOLIC_CONST (op
))
2709 /* Reject everything else; must be handled
2710 via emit_symbolic_move. */
2714 /* Returns true if the constant value OP is a legitimate general operand.
2715 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2718 s390_legitimate_constant_p (enum machine_mode mode
, rtx op
)
2720 /* Accept all non-symbolic constants. */
2721 if (!SYMBOLIC_CONST (op
))
2724 /* Accept immediate LARL operands. */
2725 if (TARGET_CPU_ZARCH
&& larl_operand (op
, mode
))
2728 /* Thread-local symbols are never legal constants. This is
2729 so that emit_call knows that computing such addresses
2730 might require a function call. */
2731 if (TLS_SYMBOLIC_CONST (op
))
2734 /* In the PIC case, symbolic constants must *not* be
2735 forced into the literal pool. We accept them here,
2736 so that they will be handled by emit_symbolic_move. */
2740 /* All remaining non-PIC symbolic constants are
2741 forced into the literal pool. */
2745 /* Determine if it's legal to put X into the constant pool. This
2746 is not possible if X contains the address of a symbol that is
2747 not constant (TLS) or not known at final link time (PIC). */
2750 s390_cannot_force_const_mem (enum machine_mode mode
, rtx x
)
2752 switch (GET_CODE (x
))
2756 /* Accept all non-symbolic constants. */
2760 /* Labels are OK iff we are non-PIC. */
2761 return flag_pic
!= 0;
2764 /* 'Naked' TLS symbol references are never OK,
2765 non-TLS symbols are OK iff we are non-PIC. */
2766 if (tls_symbolic_operand (x
))
2769 return flag_pic
!= 0;
2772 return s390_cannot_force_const_mem (mode
, XEXP (x
, 0));
2775 return s390_cannot_force_const_mem (mode
, XEXP (x
, 0))
2776 || s390_cannot_force_const_mem (mode
, XEXP (x
, 1));
2779 switch (XINT (x
, 1))
2781 /* Only lt-relative or GOT-relative UNSPECs are OK. */
2782 case UNSPEC_LTREL_OFFSET
:
2790 case UNSPEC_GOTNTPOFF
:
2791 case UNSPEC_INDNTPOFF
:
2794 /* If the literal pool shares the code section, be put
2795 execute template placeholders into the pool as well. */
2797 return TARGET_CPU_ZARCH
;
2809 /* Returns true if the constant value OP is a legitimate general
2810 operand during and after reload. The difference to
2811 legitimate_constant_p is that this function will not accept
2812 a constant that would need to be forced to the literal pool
2813 before it can be used as operand.
2814 This function accepts all constants which can be loaded directly
2818 legitimate_reload_constant_p (rtx op
)
2820 /* Accept la(y) operands. */
2821 if (GET_CODE (op
) == CONST_INT
2822 && DISP_IN_RANGE (INTVAL (op
)))
2825 /* Accept l(g)hi/l(g)fi operands. */
2826 if (GET_CODE (op
) == CONST_INT
2827 && (CONST_OK_FOR_K (INTVAL (op
)) || CONST_OK_FOR_Os (INTVAL (op
))))
2830 /* Accept lliXX operands. */
2832 && GET_CODE (op
) == CONST_INT
2833 && trunc_int_for_mode (INTVAL (op
), word_mode
) == INTVAL (op
)
2834 && s390_single_part (op
, word_mode
, HImode
, 0) >= 0)
2838 && GET_CODE (op
) == CONST_INT
2839 && trunc_int_for_mode (INTVAL (op
), word_mode
) == INTVAL (op
)
2840 && s390_single_part (op
, word_mode
, SImode
, 0) >= 0)
2843 /* Accept larl operands. */
2844 if (TARGET_CPU_ZARCH
2845 && larl_operand (op
, VOIDmode
))
2848 /* Accept floating-point zero operands that fit into a single GPR. */
2849 if (GET_CODE (op
) == CONST_DOUBLE
2850 && s390_float_const_zero_p (op
)
2851 && GET_MODE_SIZE (GET_MODE (op
)) <= UNITS_PER_WORD
)
2854 /* Accept double-word operands that can be split. */
2855 if (GET_CODE (op
) == CONST_INT
2856 && trunc_int_for_mode (INTVAL (op
), word_mode
) != INTVAL (op
))
2858 enum machine_mode dword_mode
= word_mode
== SImode
? DImode
: TImode
;
2859 rtx hi
= operand_subword (op
, 0, 0, dword_mode
);
2860 rtx lo
= operand_subword (op
, 1, 0, dword_mode
);
2861 return legitimate_reload_constant_p (hi
)
2862 && legitimate_reload_constant_p (lo
);
2865 /* Everything else cannot be handled without reload. */
2869 /* Returns true if the constant value OP is a legitimate fp operand
2870 during and after reload.
2871 This function accepts all constants which can be loaded directly
2875 legitimate_reload_fp_constant_p (rtx op
)
2877 /* Accept floating-point zero operands if the load zero instruction
2880 && GET_CODE (op
) == CONST_DOUBLE
2881 && s390_float_const_zero_p (op
))
2887 /* Given an rtx OP being reloaded into a reg required to be in class RCLASS,
2888 return the class of reg to actually use. */
2891 s390_preferred_reload_class (rtx op
, reg_class_t rclass
)
2893 switch (GET_CODE (op
))
2895 /* Constants we cannot reload into general registers
2896 must be forced into the literal pool. */
2899 if (reg_class_subset_p (GENERAL_REGS
, rclass
)
2900 && legitimate_reload_constant_p (op
))
2901 return GENERAL_REGS
;
2902 else if (reg_class_subset_p (ADDR_REGS
, rclass
)
2903 && legitimate_reload_constant_p (op
))
2905 else if (reg_class_subset_p (FP_REGS
, rclass
)
2906 && legitimate_reload_fp_constant_p (op
))
2910 /* If a symbolic constant or a PLUS is reloaded,
2911 it is most likely being used as an address, so
2912 prefer ADDR_REGS. If 'class' is not a superset
2913 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
2917 if (!legitimate_reload_constant_p (op
))
2921 /* load address will be used. */
2922 if (reg_class_subset_p (ADDR_REGS
, rclass
))
2934 /* Return true if ADDR is SYMBOL_REF + addend with addend being a
2935 multiple of ALIGNMENT and the SYMBOL_REF being naturally
2939 s390_check_symref_alignment (rtx addr
, HOST_WIDE_INT alignment
)
2941 HOST_WIDE_INT addend
;
2944 if (!s390_symref_operand_p (addr
, &symref
, &addend
))
2947 return (!SYMBOL_REF_NOT_NATURALLY_ALIGNED_P (symref
)
2948 && !(addend
& (alignment
- 1)));
2951 /* ADDR is moved into REG using larl. If ADDR isn't a valid larl
2952 operand SCRATCH is used to reload the even part of the address and
2956 s390_reload_larl_operand (rtx reg
, rtx addr
, rtx scratch
)
2958 HOST_WIDE_INT addend
;
2961 if (!s390_symref_operand_p (addr
, &symref
, &addend
))
2965 /* Easy case. The addend is even so larl will do fine. */
2966 emit_move_insn (reg
, addr
);
2969 /* We can leave the scratch register untouched if the target
2970 register is a valid base register. */
2971 if (REGNO (reg
) < FIRST_PSEUDO_REGISTER
2972 && REGNO_REG_CLASS (REGNO (reg
)) == ADDR_REGS
)
2975 gcc_assert (REGNO (scratch
) < FIRST_PSEUDO_REGISTER
);
2976 gcc_assert (REGNO_REG_CLASS (REGNO (scratch
)) == ADDR_REGS
);
2979 emit_move_insn (scratch
,
2980 gen_rtx_CONST (Pmode
,
2981 gen_rtx_PLUS (Pmode
, symref
,
2982 GEN_INT (addend
- 1))));
2984 emit_move_insn (scratch
, symref
);
2986 /* Increment the address using la in order to avoid clobbering cc. */
2987 emit_move_insn (reg
, gen_rtx_PLUS (Pmode
, scratch
, const1_rtx
));
2991 /* Generate what is necessary to move between REG and MEM using
2992 SCRATCH. The direction is given by TOMEM. */
2995 s390_reload_symref_address (rtx reg
, rtx mem
, rtx scratch
, bool tomem
)
2997 /* Reload might have pulled a constant out of the literal pool.
2998 Force it back in. */
2999 if (CONST_INT_P (mem
) || GET_CODE (mem
) == CONST_DOUBLE
3000 || GET_CODE (mem
) == CONST
)
3001 mem
= force_const_mem (GET_MODE (reg
), mem
);
3003 gcc_assert (MEM_P (mem
));
3005 /* For a load from memory we can leave the scratch register
3006 untouched if the target register is a valid base register. */
3008 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
3009 && REGNO_REG_CLASS (REGNO (reg
)) == ADDR_REGS
3010 && GET_MODE (reg
) == GET_MODE (scratch
))
3013 /* Load address into scratch register. Since we can't have a
3014 secondary reload for a secondary reload we have to cover the case
3015 where larl would need a secondary reload here as well. */
3016 s390_reload_larl_operand (scratch
, XEXP (mem
, 0), scratch
);
3018 /* Now we can use a standard load/store to do the move. */
3020 emit_move_insn (replace_equiv_address (mem
, scratch
), reg
);
3022 emit_move_insn (reg
, replace_equiv_address (mem
, scratch
));
3025 /* Inform reload about cases where moving X with a mode MODE to a register in
3026 RCLASS requires an extra scratch or immediate register. Return the class
3027 needed for the immediate register. */
3030 s390_secondary_reload (bool in_p
, rtx x
, reg_class_t rclass_i
,
3031 enum machine_mode mode
, secondary_reload_info
*sri
)
3033 enum reg_class rclass
= (enum reg_class
) rclass_i
;
3035 /* Intermediate register needed. */
3036 if (reg_classes_intersect_p (CC_REGS
, rclass
))
3037 return GENERAL_REGS
;
3041 HOST_WIDE_INT offset
;
3044 /* On z10 several optimizer steps may generate larl operands with
3047 && s390_symref_operand_p (x
, &symref
, &offset
)
3049 && !SYMBOL_REF_ALIGN1_P (symref
)
3050 && (offset
& 1) == 1)
3051 sri
->icode
= ((mode
== DImode
) ? CODE_FOR_reloaddi_larl_odd_addend_z10
3052 : CODE_FOR_reloadsi_larl_odd_addend_z10
);
3054 /* On z10 we need a scratch register when moving QI, TI or floating
3055 point mode values from or to a memory location with a SYMBOL_REF
3056 or if the symref addend of a SI or DI move is not aligned to the
3057 width of the access. */
3059 && s390_symref_operand_p (XEXP (x
, 0), NULL
, NULL
)
3060 && (mode
== QImode
|| mode
== TImode
|| FLOAT_MODE_P (mode
)
3061 || (!TARGET_ZARCH
&& mode
== DImode
)
3062 || ((mode
== HImode
|| mode
== SImode
|| mode
== DImode
)
3063 && (!s390_check_symref_alignment (XEXP (x
, 0),
3064 GET_MODE_SIZE (mode
))))))
3066 #define __SECONDARY_RELOAD_CASE(M,m) \
3069 sri->icode = in_p ? CODE_FOR_reload##m##di_toreg_z10 : \
3070 CODE_FOR_reload##m##di_tomem_z10; \
3072 sri->icode = in_p ? CODE_FOR_reload##m##si_toreg_z10 : \
3073 CODE_FOR_reload##m##si_tomem_z10; \
3076 switch (GET_MODE (x
))
3078 __SECONDARY_RELOAD_CASE (QI
, qi
);
3079 __SECONDARY_RELOAD_CASE (HI
, hi
);
3080 __SECONDARY_RELOAD_CASE (SI
, si
);
3081 __SECONDARY_RELOAD_CASE (DI
, di
);
3082 __SECONDARY_RELOAD_CASE (TI
, ti
);
3083 __SECONDARY_RELOAD_CASE (SF
, sf
);
3084 __SECONDARY_RELOAD_CASE (DF
, df
);
3085 __SECONDARY_RELOAD_CASE (TF
, tf
);
3086 __SECONDARY_RELOAD_CASE (SD
, sd
);
3087 __SECONDARY_RELOAD_CASE (DD
, dd
);
3088 __SECONDARY_RELOAD_CASE (TD
, td
);
3093 #undef __SECONDARY_RELOAD_CASE
3097 /* We need a scratch register when loading a PLUS expression which
3098 is not a legitimate operand of the LOAD ADDRESS instruction. */
3099 if (in_p
&& s390_plus_operand (x
, mode
))
3100 sri
->icode
= (TARGET_64BIT
?
3101 CODE_FOR_reloaddi_plus
: CODE_FOR_reloadsi_plus
);
3103 /* Performing a multiword move from or to memory we have to make sure the
3104 second chunk in memory is addressable without causing a displacement
3105 overflow. If that would be the case we calculate the address in
3106 a scratch register. */
3108 && GET_CODE (XEXP (x
, 0)) == PLUS
3109 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3110 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (x
, 0), 1))
3111 + GET_MODE_SIZE (mode
) - 1))
3113 /* For GENERAL_REGS a displacement overflow is no problem if occurring
3114 in a s_operand address since we may fallback to lm/stm. So we only
3115 have to care about overflows in the b+i+d case. */
3116 if ((reg_classes_intersect_p (GENERAL_REGS
, rclass
)
3117 && s390_class_max_nregs (GENERAL_REGS
, mode
) > 1
3118 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == PLUS
)
3119 /* For FP_REGS no lm/stm is available so this check is triggered
3120 for displacement overflows in b+i+d and b+d like addresses. */
3121 || (reg_classes_intersect_p (FP_REGS
, rclass
)
3122 && s390_class_max_nregs (FP_REGS
, mode
) > 1))
3125 sri
->icode
= (TARGET_64BIT
?
3126 CODE_FOR_reloaddi_nonoffmem_in
:
3127 CODE_FOR_reloadsi_nonoffmem_in
);
3129 sri
->icode
= (TARGET_64BIT
?
3130 CODE_FOR_reloaddi_nonoffmem_out
:
3131 CODE_FOR_reloadsi_nonoffmem_out
);
3135 /* A scratch address register is needed when a symbolic constant is
3136 copied to r0 compiling with -fPIC. In other cases the target
3137 register might be used as temporary (see legitimize_pic_address). */
3138 if (in_p
&& SYMBOLIC_CONST (x
) && flag_pic
== 2 && rclass
!= ADDR_REGS
)
3139 sri
->icode
= (TARGET_64BIT
?
3140 CODE_FOR_reloaddi_PIC_addr
:
3141 CODE_FOR_reloadsi_PIC_addr
);
3143 /* Either scratch or no register needed. */
3147 /* Generate code to load SRC, which is PLUS that is not a
3148 legitimate operand for the LA instruction, into TARGET.
3149 SCRATCH may be used as scratch register. */
3152 s390_expand_plus_operand (rtx target
, rtx src
,
3156 struct s390_address ad
;
3158 /* src must be a PLUS; get its two operands. */
3159 gcc_assert (GET_CODE (src
) == PLUS
);
3160 gcc_assert (GET_MODE (src
) == Pmode
);
3162 /* Check if any of the two operands is already scheduled
3163 for replacement by reload. This can happen e.g. when
3164 float registers occur in an address. */
3165 sum1
= find_replacement (&XEXP (src
, 0));
3166 sum2
= find_replacement (&XEXP (src
, 1));
3167 src
= gen_rtx_PLUS (Pmode
, sum1
, sum2
);
3169 /* If the address is already strictly valid, there's nothing to do. */
3170 if (!s390_decompose_address (src
, &ad
)
3171 || (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
3172 || (ad
.indx
&& !REGNO_OK_FOR_INDEX_P (REGNO (ad
.indx
))))
3174 /* Otherwise, one of the operands cannot be an address register;
3175 we reload its value into the scratch register. */
3176 if (true_regnum (sum1
) < 1 || true_regnum (sum1
) > 15)
3178 emit_move_insn (scratch
, sum1
);
3181 if (true_regnum (sum2
) < 1 || true_regnum (sum2
) > 15)
3183 emit_move_insn (scratch
, sum2
);
3187 /* According to the way these invalid addresses are generated
3188 in reload.c, it should never happen (at least on s390) that
3189 *neither* of the PLUS components, after find_replacements
3190 was applied, is an address register. */
3191 if (sum1
== scratch
&& sum2
== scratch
)
3197 src
= gen_rtx_PLUS (Pmode
, sum1
, sum2
);
3200 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
3201 is only ever performed on addresses, so we can mark the
3202 sum as legitimate for LA in any case. */
3203 s390_load_address (target
, src
);
3207 /* Return true if ADDR is a valid memory address.
3208 STRICT specifies whether strict register checking applies. */
3211 s390_legitimate_address_p (enum machine_mode mode
, rtx addr
, bool strict
)
3213 struct s390_address ad
;
3216 && larl_operand (addr
, VOIDmode
)
3217 && (mode
== VOIDmode
3218 || s390_check_symref_alignment (addr
, GET_MODE_SIZE (mode
))))
3221 if (!s390_decompose_address (addr
, &ad
))
3226 if (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
3229 if (ad
.indx
&& !REGNO_OK_FOR_INDEX_P (REGNO (ad
.indx
)))
3235 && !(REGNO (ad
.base
) >= FIRST_PSEUDO_REGISTER
3236 || REGNO_REG_CLASS (REGNO (ad
.base
)) == ADDR_REGS
))
3240 && !(REGNO (ad
.indx
) >= FIRST_PSEUDO_REGISTER
3241 || REGNO_REG_CLASS (REGNO (ad
.indx
)) == ADDR_REGS
))
3247 /* Return true if OP is a valid operand for the LA instruction.
3248 In 31-bit, we need to prove that the result is used as an
3249 address, as LA performs only a 31-bit addition. */
3252 legitimate_la_operand_p (rtx op
)
3254 struct s390_address addr
;
3255 if (!s390_decompose_address (op
, &addr
))
3258 return (TARGET_64BIT
|| addr
.pointer
);
3261 /* Return true if it is valid *and* preferable to use LA to
3262 compute the sum of OP1 and OP2. */
3265 preferred_la_operand_p (rtx op1
, rtx op2
)
3267 struct s390_address addr
;
3269 if (op2
!= const0_rtx
)
3270 op1
= gen_rtx_PLUS (Pmode
, op1
, op2
);
3272 if (!s390_decompose_address (op1
, &addr
))
3274 if (addr
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (addr
.base
)))
3276 if (addr
.indx
&& !REGNO_OK_FOR_INDEX_P (REGNO (addr
.indx
)))
3279 /* Avoid LA instructions with index register on z196; it is
3280 preferable to use regular add instructions when possible. */
3281 if (addr
.indx
&& s390_tune
== PROCESSOR_2817_Z196
)
3284 if (!TARGET_64BIT
&& !addr
.pointer
)
3290 if ((addr
.base
&& REG_P (addr
.base
) && REG_POINTER (addr
.base
))
3291 || (addr
.indx
&& REG_P (addr
.indx
) && REG_POINTER (addr
.indx
)))
3297 /* Emit a forced load-address operation to load SRC into DST.
3298 This will use the LOAD ADDRESS instruction even in situations
3299 where legitimate_la_operand_p (SRC) returns false. */
3302 s390_load_address (rtx dst
, rtx src
)
3305 emit_move_insn (dst
, src
);
3307 emit_insn (gen_force_la_31 (dst
, src
));
3310 /* Return a legitimate reference for ORIG (an address) using the
3311 register REG. If REG is 0, a new pseudo is generated.
3313 There are two types of references that must be handled:
3315 1. Global data references must load the address from the GOT, via
3316 the PIC reg. An insn is emitted to do this load, and the reg is
3319 2. Static data references, constant pool addresses, and code labels
3320 compute the address as an offset from the GOT, whose base is in
3321 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
3322 differentiate them from global data objects. The returned
3323 address is the PIC reg + an unspec constant.
3325 TARGET_LEGITIMIZE_ADDRESS_P rejects symbolic references unless the PIC
3326 reg also appears in the address. */
3329 legitimize_pic_address (rtx orig
, rtx reg
)
3335 gcc_assert (!TLS_SYMBOLIC_CONST (addr
));
3337 if (GET_CODE (addr
) == LABEL_REF
3338 || (GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_LOCAL_P (addr
)))
3340 /* This is a local symbol. */
3341 if (TARGET_CPU_ZARCH
&& larl_operand (addr
, VOIDmode
))
3343 /* Access local symbols PC-relative via LARL.
3344 This is the same as in the non-PIC case, so it is
3345 handled automatically ... */
3349 /* Access local symbols relative to the GOT. */
3351 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3353 if (reload_in_progress
|| reload_completed
)
3354 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3356 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTOFF
);
3357 addr
= gen_rtx_CONST (Pmode
, addr
);
3358 addr
= force_const_mem (Pmode
, addr
);
3359 emit_move_insn (temp
, addr
);
3361 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3364 s390_load_address (reg
, new_rtx
);
3369 else if (GET_CODE (addr
) == SYMBOL_REF
)
3372 reg
= gen_reg_rtx (Pmode
);
3376 /* Assume GOT offset < 4k. This is handled the same way
3377 in both 31- and 64-bit code (@GOT). */
3379 if (reload_in_progress
|| reload_completed
)
3380 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3382 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOT
);
3383 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3384 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new_rtx
);
3385 new_rtx
= gen_const_mem (Pmode
, new_rtx
);
3386 emit_move_insn (reg
, new_rtx
);
3389 else if (TARGET_CPU_ZARCH
)
3391 /* If the GOT offset might be >= 4k, we determine the position
3392 of the GOT entry via a PC-relative LARL (@GOTENT). */
3394 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3396 gcc_assert (REGNO (temp
) >= FIRST_PSEUDO_REGISTER
3397 || REGNO_REG_CLASS (REGNO (temp
)) == ADDR_REGS
);
3399 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTENT
);
3400 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3401 emit_move_insn (temp
, new_rtx
);
3403 new_rtx
= gen_const_mem (Pmode
, temp
);
3404 emit_move_insn (reg
, new_rtx
);
3409 /* If the GOT offset might be >= 4k, we have to load it
3410 from the literal pool (@GOT). */
3412 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3414 gcc_assert (REGNO (temp
) >= FIRST_PSEUDO_REGISTER
3415 || REGNO_REG_CLASS (REGNO (temp
)) == ADDR_REGS
);
3417 if (reload_in_progress
|| reload_completed
)
3418 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3420 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOT
);
3421 addr
= gen_rtx_CONST (Pmode
, addr
);
3422 addr
= force_const_mem (Pmode
, addr
);
3423 emit_move_insn (temp
, addr
);
3425 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3426 new_rtx
= gen_const_mem (Pmode
, new_rtx
);
3427 emit_move_insn (reg
, new_rtx
);
3433 if (GET_CODE (addr
) == CONST
)
3435 addr
= XEXP (addr
, 0);
3436 if (GET_CODE (addr
) == UNSPEC
)
3438 gcc_assert (XVECLEN (addr
, 0) == 1);
3439 switch (XINT (addr
, 1))
3441 /* If someone moved a GOT-relative UNSPEC
3442 out of the literal pool, force them back in. */
3445 new_rtx
= force_const_mem (Pmode
, orig
);
3448 /* @GOT is OK as is if small. */
3451 new_rtx
= force_const_mem (Pmode
, orig
);
3454 /* @GOTENT is OK as is. */
3458 /* @PLT is OK as is on 64-bit, must be converted to
3459 GOT-relative @PLTOFF on 31-bit. */
3461 if (!TARGET_CPU_ZARCH
)
3463 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3465 if (reload_in_progress
|| reload_completed
)
3466 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3468 addr
= XVECEXP (addr
, 0, 0);
3469 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
),
3471 addr
= gen_rtx_CONST (Pmode
, addr
);
3472 addr
= force_const_mem (Pmode
, addr
);
3473 emit_move_insn (temp
, addr
);
3475 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3478 s390_load_address (reg
, new_rtx
);
3484 /* Everything else cannot happen. */
3490 gcc_assert (GET_CODE (addr
) == PLUS
);
3492 if (GET_CODE (addr
) == PLUS
)
3494 rtx op0
= XEXP (addr
, 0), op1
= XEXP (addr
, 1);
3496 gcc_assert (!TLS_SYMBOLIC_CONST (op0
));
3497 gcc_assert (!TLS_SYMBOLIC_CONST (op1
));
3499 /* Check first to see if this is a constant offset
3500 from a local symbol reference. */
3501 if ((GET_CODE (op0
) == LABEL_REF
3502 || (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_LOCAL_P (op0
)))
3503 && GET_CODE (op1
) == CONST_INT
)
3505 if (TARGET_CPU_ZARCH
3506 && larl_operand (op0
, VOIDmode
)
3507 && INTVAL (op1
) < (HOST_WIDE_INT
)1 << 31
3508 && INTVAL (op1
) >= -((HOST_WIDE_INT
)1 << 31))
3510 if (INTVAL (op1
) & 1)
3512 /* LARL can't handle odd offsets, so emit a
3513 pair of LARL and LA. */
3514 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3516 if (!DISP_IN_RANGE (INTVAL (op1
)))
3518 HOST_WIDE_INT even
= INTVAL (op1
) - 1;
3519 op0
= gen_rtx_PLUS (Pmode
, op0
, GEN_INT (even
));
3520 op0
= gen_rtx_CONST (Pmode
, op0
);
3524 emit_move_insn (temp
, op0
);
3525 new_rtx
= gen_rtx_PLUS (Pmode
, temp
, op1
);
3529 s390_load_address (reg
, new_rtx
);
3535 /* If the offset is even, we can just use LARL.
3536 This will happen automatically. */
3541 /* Access local symbols relative to the GOT. */
3543 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3545 if (reload_in_progress
|| reload_completed
)
3546 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3548 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op0
),
3550 addr
= gen_rtx_PLUS (Pmode
, addr
, op1
);
3551 addr
= gen_rtx_CONST (Pmode
, addr
);
3552 addr
= force_const_mem (Pmode
, addr
);
3553 emit_move_insn (temp
, addr
);
3555 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3558 s390_load_address (reg
, new_rtx
);
3564 /* Now, check whether it is a GOT relative symbol plus offset
3565 that was pulled out of the literal pool. Force it back in. */
3567 else if (GET_CODE (op0
) == UNSPEC
3568 && GET_CODE (op1
) == CONST_INT
3569 && XINT (op0
, 1) == UNSPEC_GOTOFF
)
3571 gcc_assert (XVECLEN (op0
, 0) == 1);
3573 new_rtx
= force_const_mem (Pmode
, orig
);
3576 /* Otherwise, compute the sum. */
3579 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
3580 new_rtx
= legitimize_pic_address (XEXP (addr
, 1),
3581 base
== reg
? NULL_RTX
: reg
);
3582 if (GET_CODE (new_rtx
) == CONST_INT
)
3583 new_rtx
= plus_constant (Pmode
, base
, INTVAL (new_rtx
));
3586 if (GET_CODE (new_rtx
) == PLUS
&& CONSTANT_P (XEXP (new_rtx
, 1)))
3588 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (new_rtx
, 0));
3589 new_rtx
= XEXP (new_rtx
, 1);
3591 new_rtx
= gen_rtx_PLUS (Pmode
, base
, new_rtx
);
3594 if (GET_CODE (new_rtx
) == CONST
)
3595 new_rtx
= XEXP (new_rtx
, 0);
3596 new_rtx
= force_operand (new_rtx
, 0);
3603 /* Load the thread pointer into a register. */
3606 s390_get_thread_pointer (void)
3608 rtx tp
= gen_reg_rtx (Pmode
);
3610 emit_move_insn (tp
, gen_rtx_REG (Pmode
, TP_REGNUM
));
3611 mark_reg_pointer (tp
, BITS_PER_WORD
);
3616 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
3617 in s390_tls_symbol which always refers to __tls_get_offset.
3618 The returned offset is written to RESULT_REG and an USE rtx is
3619 generated for TLS_CALL. */
3621 static GTY(()) rtx s390_tls_symbol
;
3624 s390_emit_tls_call_insn (rtx result_reg
, rtx tls_call
)
3629 emit_insn (s390_load_got ());
3631 if (!s390_tls_symbol
)
3632 s390_tls_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "__tls_get_offset");
3634 insn
= s390_emit_call (s390_tls_symbol
, tls_call
, result_reg
,
3635 gen_rtx_REG (Pmode
, RETURN_REGNUM
));
3637 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), result_reg
);
3638 RTL_CONST_CALL_P (insn
) = 1;
3641 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3642 this (thread-local) address. REG may be used as temporary. */
3645 legitimize_tls_address (rtx addr
, rtx reg
)
3647 rtx new_rtx
, tls_call
, temp
, base
, r2
, insn
;
3649 if (GET_CODE (addr
) == SYMBOL_REF
)
3650 switch (tls_symbolic_operand (addr
))
3652 case TLS_MODEL_GLOBAL_DYNAMIC
:
3654 r2
= gen_rtx_REG (Pmode
, 2);
3655 tls_call
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_TLSGD
);
3656 new_rtx
= gen_rtx_CONST (Pmode
, tls_call
);
3657 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3658 emit_move_insn (r2
, new_rtx
);
3659 s390_emit_tls_call_insn (r2
, tls_call
);
3660 insn
= get_insns ();
3663 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_NTPOFF
);
3664 temp
= gen_reg_rtx (Pmode
);
3665 emit_libcall_block (insn
, temp
, r2
, new_rtx
);
3667 new_rtx
= gen_rtx_PLUS (Pmode
, s390_get_thread_pointer (), temp
);
3670 s390_load_address (reg
, new_rtx
);
3675 case TLS_MODEL_LOCAL_DYNAMIC
:
3677 r2
= gen_rtx_REG (Pmode
, 2);
3678 tls_call
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TLSLDM
);
3679 new_rtx
= gen_rtx_CONST (Pmode
, tls_call
);
3680 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3681 emit_move_insn (r2
, new_rtx
);
3682 s390_emit_tls_call_insn (r2
, tls_call
);
3683 insn
= get_insns ();
3686 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TLSLDM_NTPOFF
);
3687 temp
= gen_reg_rtx (Pmode
);
3688 emit_libcall_block (insn
, temp
, r2
, new_rtx
);
3690 new_rtx
= gen_rtx_PLUS (Pmode
, s390_get_thread_pointer (), temp
);
3691 base
= gen_reg_rtx (Pmode
);
3692 s390_load_address (base
, new_rtx
);
3694 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_DTPOFF
);
3695 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3696 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3697 temp
= gen_reg_rtx (Pmode
);
3698 emit_move_insn (temp
, new_rtx
);
3700 new_rtx
= gen_rtx_PLUS (Pmode
, base
, temp
);
3703 s390_load_address (reg
, new_rtx
);
3708 case TLS_MODEL_INITIAL_EXEC
:
3711 /* Assume GOT offset < 4k. This is handled the same way
3712 in both 31- and 64-bit code. */
3714 if (reload_in_progress
|| reload_completed
)
3715 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3717 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTNTPOFF
);
3718 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3719 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new_rtx
);
3720 new_rtx
= gen_const_mem (Pmode
, new_rtx
);
3721 temp
= gen_reg_rtx (Pmode
);
3722 emit_move_insn (temp
, new_rtx
);
3724 else if (TARGET_CPU_ZARCH
)
3726 /* If the GOT offset might be >= 4k, we determine the position
3727 of the GOT entry via a PC-relative LARL. */
3729 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_INDNTPOFF
);
3730 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3731 temp
= gen_reg_rtx (Pmode
);
3732 emit_move_insn (temp
, new_rtx
);
3734 new_rtx
= gen_const_mem (Pmode
, temp
);
3735 temp
= gen_reg_rtx (Pmode
);
3736 emit_move_insn (temp
, new_rtx
);
3740 /* If the GOT offset might be >= 4k, we have to load it
3741 from the literal pool. */
3743 if (reload_in_progress
|| reload_completed
)
3744 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM
, true);
3746 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTNTPOFF
);
3747 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3748 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3749 temp
= gen_reg_rtx (Pmode
);
3750 emit_move_insn (temp
, new_rtx
);
3752 new_rtx
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3753 new_rtx
= gen_const_mem (Pmode
, new_rtx
);
3755 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, new_rtx
, addr
), UNSPEC_TLS_LOAD
);
3756 temp
= gen_reg_rtx (Pmode
);
3757 emit_insn (gen_rtx_SET (Pmode
, temp
, new_rtx
));
3761 /* In position-dependent code, load the absolute address of
3762 the GOT entry from the literal pool. */
3764 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_INDNTPOFF
);
3765 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3766 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3767 temp
= gen_reg_rtx (Pmode
);
3768 emit_move_insn (temp
, new_rtx
);
3771 new_rtx
= gen_const_mem (Pmode
, new_rtx
);
3772 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, new_rtx
, addr
), UNSPEC_TLS_LOAD
);
3773 temp
= gen_reg_rtx (Pmode
);
3774 emit_insn (gen_rtx_SET (Pmode
, temp
, new_rtx
));
3777 new_rtx
= gen_rtx_PLUS (Pmode
, s390_get_thread_pointer (), temp
);
3780 s390_load_address (reg
, new_rtx
);
3785 case TLS_MODEL_LOCAL_EXEC
:
3786 new_rtx
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_NTPOFF
);
3787 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3788 new_rtx
= force_const_mem (Pmode
, new_rtx
);
3789 temp
= gen_reg_rtx (Pmode
);
3790 emit_move_insn (temp
, new_rtx
);
3792 new_rtx
= gen_rtx_PLUS (Pmode
, s390_get_thread_pointer (), temp
);
3795 s390_load_address (reg
, new_rtx
);
3804 else if (GET_CODE (addr
) == CONST
&& GET_CODE (XEXP (addr
, 0)) == UNSPEC
)
3806 switch (XINT (XEXP (addr
, 0), 1))
3808 case UNSPEC_INDNTPOFF
:
3809 gcc_assert (TARGET_CPU_ZARCH
);
3818 else if (GET_CODE (addr
) == CONST
&& GET_CODE (XEXP (addr
, 0)) == PLUS
3819 && GET_CODE (XEXP (XEXP (addr
, 0), 1)) == CONST_INT
)
3821 new_rtx
= XEXP (XEXP (addr
, 0), 0);
3822 if (GET_CODE (new_rtx
) != SYMBOL_REF
)
3823 new_rtx
= gen_rtx_CONST (Pmode
, new_rtx
);
3825 new_rtx
= legitimize_tls_address (new_rtx
, reg
);
3826 new_rtx
= plus_constant (Pmode
, new_rtx
,
3827 INTVAL (XEXP (XEXP (addr
, 0), 1)));
3828 new_rtx
= force_operand (new_rtx
, 0);
3832 gcc_unreachable (); /* for now ... */
3837 /* Emit insns making the address in operands[1] valid for a standard
3838 move to operands[0]. operands[1] is replaced by an address which
3839 should be used instead of the former RTX to emit the move
3843 emit_symbolic_move (rtx
*operands
)
3845 rtx temp
= !can_create_pseudo_p () ? operands
[0] : gen_reg_rtx (Pmode
);
3847 if (GET_CODE (operands
[0]) == MEM
)
3848 operands
[1] = force_reg (Pmode
, operands
[1]);
3849 else if (TLS_SYMBOLIC_CONST (operands
[1]))
3850 operands
[1] = legitimize_tls_address (operands
[1], temp
);
3852 operands
[1] = legitimize_pic_address (operands
[1], temp
);
3855 /* Try machine-dependent ways of modifying an illegitimate address X
3856 to be legitimate. If we find one, return the new, valid address.
3858 OLDX is the address as it was before break_out_memory_refs was called.
3859 In some cases it is useful to look at this to decide what needs to be done.
3861 MODE is the mode of the operand pointed to by X.
3863 When -fpic is used, special handling is needed for symbolic references.
3864 See comments by legitimize_pic_address for details. */
3867 s390_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
3868 enum machine_mode mode ATTRIBUTE_UNUSED
)
3870 rtx constant_term
= const0_rtx
;
3872 if (TLS_SYMBOLIC_CONST (x
))
3874 x
= legitimize_tls_address (x
, 0);
3876 if (s390_legitimate_address_p (mode
, x
, FALSE
))
3879 else if (GET_CODE (x
) == PLUS
3880 && (TLS_SYMBOLIC_CONST (XEXP (x
, 0))
3881 || TLS_SYMBOLIC_CONST (XEXP (x
, 1))))
3887 if (SYMBOLIC_CONST (x
)
3888 || (GET_CODE (x
) == PLUS
3889 && (SYMBOLIC_CONST (XEXP (x
, 0))
3890 || SYMBOLIC_CONST (XEXP (x
, 1)))))
3891 x
= legitimize_pic_address (x
, 0);
3893 if (s390_legitimate_address_p (mode
, x
, FALSE
))
3897 x
= eliminate_constant_term (x
, &constant_term
);
3899 /* Optimize loading of large displacements by splitting them
3900 into the multiple of 4K and the rest; this allows the
3901 former to be CSE'd if possible.
3903 Don't do this if the displacement is added to a register
3904 pointing into the stack frame, as the offsets will
3905 change later anyway. */
3907 if (GET_CODE (constant_term
) == CONST_INT
3908 && !TARGET_LONG_DISPLACEMENT
3909 && !DISP_IN_RANGE (INTVAL (constant_term
))
3910 && !(REG_P (x
) && REGNO_PTR_FRAME_P (REGNO (x
))))
3912 HOST_WIDE_INT lower
= INTVAL (constant_term
) & 0xfff;
3913 HOST_WIDE_INT upper
= INTVAL (constant_term
) ^ lower
;
3915 rtx temp
= gen_reg_rtx (Pmode
);
3916 rtx val
= force_operand (GEN_INT (upper
), temp
);
3918 emit_move_insn (temp
, val
);
3920 x
= gen_rtx_PLUS (Pmode
, x
, temp
);
3921 constant_term
= GEN_INT (lower
);
3924 if (GET_CODE (x
) == PLUS
)
3926 if (GET_CODE (XEXP (x
, 0)) == REG
)
3928 rtx temp
= gen_reg_rtx (Pmode
);
3929 rtx val
= force_operand (XEXP (x
, 1), temp
);
3931 emit_move_insn (temp
, val
);
3933 x
= gen_rtx_PLUS (Pmode
, XEXP (x
, 0), temp
);
3936 else if (GET_CODE (XEXP (x
, 1)) == REG
)
3938 rtx temp
= gen_reg_rtx (Pmode
);
3939 rtx val
= force_operand (XEXP (x
, 0), temp
);
3941 emit_move_insn (temp
, val
);
3943 x
= gen_rtx_PLUS (Pmode
, temp
, XEXP (x
, 1));
3947 if (constant_term
!= const0_rtx
)
3948 x
= gen_rtx_PLUS (Pmode
, x
, constant_term
);
3953 /* Try a machine-dependent way of reloading an illegitimate address AD
3954 operand. If we find one, push the reload and return the new address.
3956 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3957 and TYPE is the reload type of the current reload. */
3960 legitimize_reload_address (rtx ad
, enum machine_mode mode ATTRIBUTE_UNUSED
,
3961 int opnum
, int type
)
3963 if (!optimize
|| TARGET_LONG_DISPLACEMENT
)
3966 if (GET_CODE (ad
) == PLUS
)
3968 rtx tem
= simplify_binary_operation (PLUS
, Pmode
,
3969 XEXP (ad
, 0), XEXP (ad
, 1));
3974 if (GET_CODE (ad
) == PLUS
3975 && GET_CODE (XEXP (ad
, 0)) == REG
3976 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
3977 && !DISP_IN_RANGE (INTVAL (XEXP (ad
, 1))))
3979 HOST_WIDE_INT lower
= INTVAL (XEXP (ad
, 1)) & 0xfff;
3980 HOST_WIDE_INT upper
= INTVAL (XEXP (ad
, 1)) ^ lower
;
3981 rtx cst
, tem
, new_rtx
;
3983 cst
= GEN_INT (upper
);
3984 if (!legitimate_reload_constant_p (cst
))
3985 cst
= force_const_mem (Pmode
, cst
);
3987 tem
= gen_rtx_PLUS (Pmode
, XEXP (ad
, 0), cst
);
3988 new_rtx
= gen_rtx_PLUS (Pmode
, tem
, GEN_INT (lower
));
3990 push_reload (XEXP (tem
, 1), 0, &XEXP (tem
, 1), 0,
3991 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
3992 opnum
, (enum reload_type
) type
);
3999 /* Emit code to move LEN bytes from DST to SRC. */
4002 s390_expand_movmem (rtx dst
, rtx src
, rtx len
)
4004 /* When tuning for z10 or higher we rely on the Glibc functions to
4005 do the right thing. Only for constant lengths below 64k we will
4006 generate inline code. */
4007 if (s390_tune
>= PROCESSOR_2097_Z10
4008 && (GET_CODE (len
) != CONST_INT
|| INTVAL (len
) > (1<<16)))
4011 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
4013 if (INTVAL (len
) > 0)
4014 emit_insn (gen_movmem_short (dst
, src
, GEN_INT (INTVAL (len
) - 1)));
4017 else if (TARGET_MVCLE
)
4019 emit_insn (gen_movmem_long (dst
, src
, convert_to_mode (Pmode
, len
, 1)));
4024 rtx dst_addr
, src_addr
, count
, blocks
, temp
;
4025 rtx loop_start_label
= gen_label_rtx ();
4026 rtx loop_end_label
= gen_label_rtx ();
4027 rtx end_label
= gen_label_rtx ();
4028 enum machine_mode mode
;
4030 mode
= GET_MODE (len
);
4031 if (mode
== VOIDmode
)
4034 dst_addr
= gen_reg_rtx (Pmode
);
4035 src_addr
= gen_reg_rtx (Pmode
);
4036 count
= gen_reg_rtx (mode
);
4037 blocks
= gen_reg_rtx (mode
);
4039 convert_move (count
, len
, 1);
4040 emit_cmp_and_jump_insns (count
, const0_rtx
,
4041 EQ
, NULL_RTX
, mode
, 1, end_label
);
4043 emit_move_insn (dst_addr
, force_operand (XEXP (dst
, 0), NULL_RTX
));
4044 emit_move_insn (src_addr
, force_operand (XEXP (src
, 0), NULL_RTX
));
4045 dst
= change_address (dst
, VOIDmode
, dst_addr
);
4046 src
= change_address (src
, VOIDmode
, src_addr
);
4048 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1,
4051 emit_move_insn (count
, temp
);
4053 temp
= expand_binop (mode
, lshr_optab
, count
, GEN_INT (8), blocks
, 1,
4056 emit_move_insn (blocks
, temp
);
4058 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4059 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4061 emit_label (loop_start_label
);
4064 && (GET_CODE (len
) != CONST_INT
|| INTVAL (len
) > 768))
4068 /* Issue a read prefetch for the +3 cache line. */
4069 prefetch
= gen_prefetch (gen_rtx_PLUS (Pmode
, src_addr
, GEN_INT (768)),
4070 const0_rtx
, const0_rtx
);
4071 PREFETCH_SCHEDULE_BARRIER_P (prefetch
) = true;
4072 emit_insn (prefetch
);
4074 /* Issue a write prefetch for the +3 cache line. */
4075 prefetch
= gen_prefetch (gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (768)),
4076 const1_rtx
, const0_rtx
);
4077 PREFETCH_SCHEDULE_BARRIER_P (prefetch
) = true;
4078 emit_insn (prefetch
);
4081 emit_insn (gen_movmem_short (dst
, src
, GEN_INT (255)));
4082 s390_load_address (dst_addr
,
4083 gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (256)));
4084 s390_load_address (src_addr
,
4085 gen_rtx_PLUS (Pmode
, src_addr
, GEN_INT (256)));
4087 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1,
4090 emit_move_insn (blocks
, temp
);
4092 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4093 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4095 emit_jump (loop_start_label
);
4096 emit_label (loop_end_label
);
4098 emit_insn (gen_movmem_short (dst
, src
,
4099 convert_to_mode (Pmode
, count
, 1)));
4100 emit_label (end_label
);
4105 /* Emit code to set LEN bytes at DST to VAL.
4106 Make use of clrmem if VAL is zero. */
4109 s390_expand_setmem (rtx dst
, rtx len
, rtx val
)
4111 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) == 0)
4114 gcc_assert (GET_CODE (val
) == CONST_INT
|| GET_MODE (val
) == QImode
);
4116 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) > 0 && INTVAL (len
) <= 257)
4118 if (val
== const0_rtx
&& INTVAL (len
) <= 256)
4119 emit_insn (gen_clrmem_short (dst
, GEN_INT (INTVAL (len
) - 1)));
4122 /* Initialize memory by storing the first byte. */
4123 emit_move_insn (adjust_address (dst
, QImode
, 0), val
);
4125 if (INTVAL (len
) > 1)
4127 /* Initiate 1 byte overlap move.
4128 The first byte of DST is propagated through DSTP1.
4129 Prepare a movmem for: DST+1 = DST (length = LEN - 1).
4130 DST is set to size 1 so the rest of the memory location
4131 does not count as source operand. */
4132 rtx dstp1
= adjust_address (dst
, VOIDmode
, 1);
4133 set_mem_size (dst
, 1);
4135 emit_insn (gen_movmem_short (dstp1
, dst
,
4136 GEN_INT (INTVAL (len
) - 2)));
4141 else if (TARGET_MVCLE
)
4143 val
= force_not_mem (convert_modes (Pmode
, QImode
, val
, 1));
4144 emit_insn (gen_setmem_long (dst
, convert_to_mode (Pmode
, len
, 1), val
));
4149 rtx dst_addr
, count
, blocks
, temp
, dstp1
= NULL_RTX
;
4150 rtx loop_start_label
= gen_label_rtx ();
4151 rtx loop_end_label
= gen_label_rtx ();
4152 rtx end_label
= gen_label_rtx ();
4153 enum machine_mode mode
;
4155 mode
= GET_MODE (len
);
4156 if (mode
== VOIDmode
)
4159 dst_addr
= gen_reg_rtx (Pmode
);
4160 count
= gen_reg_rtx (mode
);
4161 blocks
= gen_reg_rtx (mode
);
4163 convert_move (count
, len
, 1);
4164 emit_cmp_and_jump_insns (count
, const0_rtx
,
4165 EQ
, NULL_RTX
, mode
, 1, end_label
);
4167 emit_move_insn (dst_addr
, force_operand (XEXP (dst
, 0), NULL_RTX
));
4168 dst
= change_address (dst
, VOIDmode
, dst_addr
);
4170 if (val
== const0_rtx
)
4171 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1,
4175 dstp1
= adjust_address (dst
, VOIDmode
, 1);
4176 set_mem_size (dst
, 1);
4178 /* Initialize memory by storing the first byte. */
4179 emit_move_insn (adjust_address (dst
, QImode
, 0), val
);
4181 /* If count is 1 we are done. */
4182 emit_cmp_and_jump_insns (count
, const1_rtx
,
4183 EQ
, NULL_RTX
, mode
, 1, end_label
);
4185 temp
= expand_binop (mode
, add_optab
, count
, GEN_INT (-2), count
, 1,
4189 emit_move_insn (count
, temp
);
4191 temp
= expand_binop (mode
, lshr_optab
, count
, GEN_INT (8), blocks
, 1,
4194 emit_move_insn (blocks
, temp
);
4196 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4197 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4199 emit_label (loop_start_label
);
4202 && (GET_CODE (len
) != CONST_INT
|| INTVAL (len
) > 1024))
4204 /* Issue a write prefetch for the +4 cache line. */
4205 rtx prefetch
= gen_prefetch (gen_rtx_PLUS (Pmode
, dst_addr
,
4207 const1_rtx
, const0_rtx
);
4208 emit_insn (prefetch
);
4209 PREFETCH_SCHEDULE_BARRIER_P (prefetch
) = true;
4212 if (val
== const0_rtx
)
4213 emit_insn (gen_clrmem_short (dst
, GEN_INT (255)));
4215 emit_insn (gen_movmem_short (dstp1
, dst
, GEN_INT (255)));
4216 s390_load_address (dst_addr
,
4217 gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (256)));
4219 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1,
4222 emit_move_insn (blocks
, temp
);
4224 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4225 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4227 emit_jump (loop_start_label
);
4228 emit_label (loop_end_label
);
4230 if (val
== const0_rtx
)
4231 emit_insn (gen_clrmem_short (dst
, convert_to_mode (Pmode
, count
, 1)));
4233 emit_insn (gen_movmem_short (dstp1
, dst
, convert_to_mode (Pmode
, count
, 1)));
4234 emit_label (end_label
);
4238 /* Emit code to compare LEN bytes at OP0 with those at OP1,
4239 and return the result in TARGET. */
4242 s390_expand_cmpmem (rtx target
, rtx op0
, rtx op1
, rtx len
)
4244 rtx ccreg
= gen_rtx_REG (CCUmode
, CC_REGNUM
);
4247 /* When tuning for z10 or higher we rely on the Glibc functions to
4248 do the right thing. Only for constant lengths below 64k we will
4249 generate inline code. */
4250 if (s390_tune
>= PROCESSOR_2097_Z10
4251 && (GET_CODE (len
) != CONST_INT
|| INTVAL (len
) > (1<<16)))
4254 /* As the result of CMPINT is inverted compared to what we need,
4255 we have to swap the operands. */
4256 tmp
= op0
; op0
= op1
; op1
= tmp
;
4258 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
4260 if (INTVAL (len
) > 0)
4262 emit_insn (gen_cmpmem_short (op0
, op1
, GEN_INT (INTVAL (len
) - 1)));
4263 emit_insn (gen_cmpint (target
, ccreg
));
4266 emit_move_insn (target
, const0_rtx
);
4268 else if (TARGET_MVCLE
)
4270 emit_insn (gen_cmpmem_long (op0
, op1
, convert_to_mode (Pmode
, len
, 1)));
4271 emit_insn (gen_cmpint (target
, ccreg
));
4275 rtx addr0
, addr1
, count
, blocks
, temp
;
4276 rtx loop_start_label
= gen_label_rtx ();
4277 rtx loop_end_label
= gen_label_rtx ();
4278 rtx end_label
= gen_label_rtx ();
4279 enum machine_mode mode
;
4281 mode
= GET_MODE (len
);
4282 if (mode
== VOIDmode
)
4285 addr0
= gen_reg_rtx (Pmode
);
4286 addr1
= gen_reg_rtx (Pmode
);
4287 count
= gen_reg_rtx (mode
);
4288 blocks
= gen_reg_rtx (mode
);
4290 convert_move (count
, len
, 1);
4291 emit_cmp_and_jump_insns (count
, const0_rtx
,
4292 EQ
, NULL_RTX
, mode
, 1, end_label
);
4294 emit_move_insn (addr0
, force_operand (XEXP (op0
, 0), NULL_RTX
));
4295 emit_move_insn (addr1
, force_operand (XEXP (op1
, 0), NULL_RTX
));
4296 op0
= change_address (op0
, VOIDmode
, addr0
);
4297 op1
= change_address (op1
, VOIDmode
, addr1
);
4299 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1,
4302 emit_move_insn (count
, temp
);
4304 temp
= expand_binop (mode
, lshr_optab
, count
, GEN_INT (8), blocks
, 1,
4307 emit_move_insn (blocks
, temp
);
4309 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4310 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4312 emit_label (loop_start_label
);
4315 && (GET_CODE (len
) != CONST_INT
|| INTVAL (len
) > 512))
4319 /* Issue a read prefetch for the +2 cache line of operand 1. */
4320 prefetch
= gen_prefetch (gen_rtx_PLUS (Pmode
, addr0
, GEN_INT (512)),
4321 const0_rtx
, const0_rtx
);
4322 emit_insn (prefetch
);
4323 PREFETCH_SCHEDULE_BARRIER_P (prefetch
) = true;
4325 /* Issue a read prefetch for the +2 cache line of operand 2. */
4326 prefetch
= gen_prefetch (gen_rtx_PLUS (Pmode
, addr1
, GEN_INT (512)),
4327 const0_rtx
, const0_rtx
);
4328 emit_insn (prefetch
);
4329 PREFETCH_SCHEDULE_BARRIER_P (prefetch
) = true;
4332 emit_insn (gen_cmpmem_short (op0
, op1
, GEN_INT (255)));
4333 temp
= gen_rtx_NE (VOIDmode
, ccreg
, const0_rtx
);
4334 temp
= gen_rtx_IF_THEN_ELSE (VOIDmode
, temp
,
4335 gen_rtx_LABEL_REF (VOIDmode
, end_label
), pc_rtx
);
4336 temp
= gen_rtx_SET (VOIDmode
, pc_rtx
, temp
);
4337 emit_jump_insn (temp
);
4339 s390_load_address (addr0
,
4340 gen_rtx_PLUS (Pmode
, addr0
, GEN_INT (256)));
4341 s390_load_address (addr1
,
4342 gen_rtx_PLUS (Pmode
, addr1
, GEN_INT (256)));
4344 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1,
4347 emit_move_insn (blocks
, temp
);
4349 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
4350 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
4352 emit_jump (loop_start_label
);
4353 emit_label (loop_end_label
);
4355 emit_insn (gen_cmpmem_short (op0
, op1
,
4356 convert_to_mode (Pmode
, count
, 1)));
4357 emit_label (end_label
);
4359 emit_insn (gen_cmpint (target
, ccreg
));
4365 /* Expand conditional increment or decrement using alc/slb instructions.
4366 Should generate code setting DST to either SRC or SRC + INCREMENT,
4367 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
4368 Returns true if successful, false otherwise.
4370 That makes it possible to implement some if-constructs without jumps e.g.:
4371 (borrow = CC0 | CC1 and carry = CC2 | CC3)
4372 unsigned int a, b, c;
4373 if (a < b) c++; -> CCU b > a -> CC2; c += carry;
4374 if (a < b) c--; -> CCL3 a - b -> borrow; c -= borrow;
4375 if (a <= b) c++; -> CCL3 b - a -> borrow; c += carry;
4376 if (a <= b) c--; -> CCU a <= b -> borrow; c -= borrow;
4378 Checks for EQ and NE with a nonzero value need an additional xor e.g.:
4379 if (a == b) c++; -> CCL3 a ^= b; 0 - a -> borrow; c += carry;
4380 if (a == b) c--; -> CCU a ^= b; a <= 0 -> CC0 | CC1; c -= borrow;
4381 if (a != b) c++; -> CCU a ^= b; a > 0 -> CC2; c += carry;
4382 if (a != b) c--; -> CCL3 a ^= b; 0 - a -> borrow; c -= borrow; */
4385 s390_expand_addcc (enum rtx_code cmp_code
, rtx cmp_op0
, rtx cmp_op1
,
4386 rtx dst
, rtx src
, rtx increment
)
4388 enum machine_mode cmp_mode
;
4389 enum machine_mode cc_mode
;
4395 if ((GET_MODE (cmp_op0
) == SImode
|| GET_MODE (cmp_op0
) == VOIDmode
)
4396 && (GET_MODE (cmp_op1
) == SImode
|| GET_MODE (cmp_op1
) == VOIDmode
))
4398 else if ((GET_MODE (cmp_op0
) == DImode
|| GET_MODE (cmp_op0
) == VOIDmode
)
4399 && (GET_MODE (cmp_op1
) == DImode
|| GET_MODE (cmp_op1
) == VOIDmode
))
4404 /* Try ADD LOGICAL WITH CARRY. */
4405 if (increment
== const1_rtx
)
4407 /* Determine CC mode to use. */
4408 if (cmp_code
== EQ
|| cmp_code
== NE
)
4410 if (cmp_op1
!= const0_rtx
)
4412 cmp_op0
= expand_simple_binop (cmp_mode
, XOR
, cmp_op0
, cmp_op1
,
4413 NULL_RTX
, 0, OPTAB_WIDEN
);
4414 cmp_op1
= const0_rtx
;
4417 cmp_code
= cmp_code
== EQ
? LEU
: GTU
;
4420 if (cmp_code
== LTU
|| cmp_code
== LEU
)
4425 cmp_code
= swap_condition (cmp_code
);
4442 /* Emit comparison instruction pattern. */
4443 if (!register_operand (cmp_op0
, cmp_mode
))
4444 cmp_op0
= force_reg (cmp_mode
, cmp_op0
);
4446 insn
= gen_rtx_SET (VOIDmode
, gen_rtx_REG (cc_mode
, CC_REGNUM
),
4447 gen_rtx_COMPARE (cc_mode
, cmp_op0
, cmp_op1
));
4448 /* We use insn_invalid_p here to add clobbers if required. */
4449 ret
= insn_invalid_p (emit_insn (insn
), false);
4452 /* Emit ALC instruction pattern. */
4453 op_res
= gen_rtx_fmt_ee (cmp_code
, GET_MODE (dst
),
4454 gen_rtx_REG (cc_mode
, CC_REGNUM
),
4457 if (src
!= const0_rtx
)
4459 if (!register_operand (src
, GET_MODE (dst
)))
4460 src
= force_reg (GET_MODE (dst
), src
);
4462 op_res
= gen_rtx_PLUS (GET_MODE (dst
), op_res
, src
);
4463 op_res
= gen_rtx_PLUS (GET_MODE (dst
), op_res
, const0_rtx
);
4466 p
= rtvec_alloc (2);
4468 gen_rtx_SET (VOIDmode
, dst
, op_res
);
4470 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
4471 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
4476 /* Try SUBTRACT LOGICAL WITH BORROW. */
4477 if (increment
== constm1_rtx
)
4479 /* Determine CC mode to use. */
4480 if (cmp_code
== EQ
|| cmp_code
== NE
)
4482 if (cmp_op1
!= const0_rtx
)
4484 cmp_op0
= expand_simple_binop (cmp_mode
, XOR
, cmp_op0
, cmp_op1
,
4485 NULL_RTX
, 0, OPTAB_WIDEN
);
4486 cmp_op1
= const0_rtx
;
4489 cmp_code
= cmp_code
== EQ
? LEU
: GTU
;
4492 if (cmp_code
== GTU
|| cmp_code
== GEU
)
4497 cmp_code
= swap_condition (cmp_code
);
4514 /* Emit comparison instruction pattern. */
4515 if (!register_operand (cmp_op0
, cmp_mode
))
4516 cmp_op0
= force_reg (cmp_mode
, cmp_op0
);
4518 insn
= gen_rtx_SET (VOIDmode
, gen_rtx_REG (cc_mode
, CC_REGNUM
),
4519 gen_rtx_COMPARE (cc_mode
, cmp_op0
, cmp_op1
));
4520 /* We use insn_invalid_p here to add clobbers if required. */
4521 ret
= insn_invalid_p (emit_insn (insn
), false);
4524 /* Emit SLB instruction pattern. */
4525 if (!register_operand (src
, GET_MODE (dst
)))
4526 src
= force_reg (GET_MODE (dst
), src
);
4528 op_res
= gen_rtx_MINUS (GET_MODE (dst
),
4529 gen_rtx_MINUS (GET_MODE (dst
), src
, const0_rtx
),
4530 gen_rtx_fmt_ee (cmp_code
, GET_MODE (dst
),
4531 gen_rtx_REG (cc_mode
, CC_REGNUM
),
4533 p
= rtvec_alloc (2);
4535 gen_rtx_SET (VOIDmode
, dst
, op_res
);
4537 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
4538 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
4546 /* Expand code for the insv template. Return true if successful. */
4549 s390_expand_insv (rtx dest
, rtx op1
, rtx op2
, rtx src
)
4551 int bitsize
= INTVAL (op1
);
4552 int bitpos
= INTVAL (op2
);
4553 enum machine_mode mode
= GET_MODE (dest
);
4554 enum machine_mode smode
;
4555 int smode_bsize
, mode_bsize
;
4558 /* Generate INSERT IMMEDIATE (IILL et al). */
4559 /* (set (ze (reg)) (const_int)). */
4561 && register_operand (dest
, word_mode
)
4562 && (bitpos
% 16) == 0
4563 && (bitsize
% 16) == 0
4564 && const_int_operand (src
, VOIDmode
))
4566 HOST_WIDE_INT val
= INTVAL (src
);
4567 int regpos
= bitpos
+ bitsize
;
4569 while (regpos
> bitpos
)
4571 enum machine_mode putmode
;
4574 if (TARGET_EXTIMM
&& (regpos
% 32 == 0) && (regpos
>= bitpos
+ 32))
4579 putsize
= GET_MODE_BITSIZE (putmode
);
4581 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode
, dest
,
4584 gen_int_mode (val
, putmode
));
4587 gcc_assert (regpos
== bitpos
);
4591 smode
= smallest_mode_for_size (bitsize
, MODE_INT
);
4592 smode_bsize
= GET_MODE_BITSIZE (smode
);
4593 mode_bsize
= GET_MODE_BITSIZE (mode
);
4595 /* Generate STORE CHARACTERS UNDER MASK (STCM et al). */
4597 && (bitsize
% BITS_PER_UNIT
) == 0
4599 && (register_operand (src
, word_mode
)
4600 || const_int_operand (src
, VOIDmode
)))
4602 /* Emit standard pattern if possible. */
4603 if (smode_bsize
== bitsize
)
4605 emit_move_insn (adjust_address (dest
, smode
, 0),
4606 gen_lowpart (smode
, src
));
4610 /* (set (ze (mem)) (const_int)). */
4611 else if (const_int_operand (src
, VOIDmode
))
4613 int size
= bitsize
/ BITS_PER_UNIT
;
4614 rtx src_mem
= adjust_address (force_const_mem (word_mode
, src
),
4616 UNITS_PER_WORD
- size
);
4618 dest
= adjust_address (dest
, BLKmode
, 0);
4619 set_mem_size (dest
, size
);
4620 s390_expand_movmem (dest
, src_mem
, GEN_INT (size
));
4624 /* (set (ze (mem)) (reg)). */
4625 else if (register_operand (src
, word_mode
))
4628 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode
, dest
, op1
,
4632 /* Emit st,stcmh sequence. */
4633 int stcmh_width
= bitsize
- 32;
4634 int size
= stcmh_width
/ BITS_PER_UNIT
;
4636 emit_move_insn (adjust_address (dest
, SImode
, size
),
4637 gen_lowpart (SImode
, src
));
4638 set_mem_size (dest
, size
);
4639 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode
, dest
,
4640 GEN_INT (stcmh_width
),
4642 gen_rtx_LSHIFTRT (word_mode
, src
, GEN_INT (32)));
4648 /* Generate INSERT CHARACTERS UNDER MASK (IC, ICM et al). */
4649 if ((bitpos
% BITS_PER_UNIT
) == 0
4650 && (bitsize
% BITS_PER_UNIT
) == 0
4651 && (bitpos
& 32) == ((bitpos
+ bitsize
- 1) & 32)
4653 && (mode
== DImode
|| mode
== SImode
)
4654 && register_operand (dest
, mode
))
4656 /* Emit a strict_low_part pattern if possible. */
4657 if (smode_bsize
== bitsize
&& bitpos
== mode_bsize
- smode_bsize
)
4659 op
= gen_rtx_STRICT_LOW_PART (VOIDmode
, gen_lowpart (smode
, dest
));
4660 op
= gen_rtx_SET (VOIDmode
, op
, gen_lowpart (smode
, src
));
4661 clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
4662 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, op
, clobber
)));
4666 /* ??? There are more powerful versions of ICM that are not
4667 completely represented in the md file. */
4670 /* For z10, generate ROTATE THEN INSERT SELECTED BITS (RISBG et al). */
4671 if (TARGET_Z10
&& (mode
== DImode
|| mode
== SImode
))
4673 enum machine_mode mode_s
= GET_MODE (src
);
4675 if (mode_s
== VOIDmode
)
4677 /* Assume const_int etc already in the proper mode. */
4678 src
= force_reg (mode
, src
);
4680 else if (mode_s
!= mode
)
4682 gcc_assert (GET_MODE_BITSIZE (mode_s
) >= bitsize
);
4683 src
= force_reg (mode_s
, src
);
4684 src
= gen_lowpart (mode
, src
);
4687 op
= gen_rtx_SET (mode
,
4688 gen_rtx_ZERO_EXTRACT (mode
, dest
, op1
, op2
),
4690 clobber
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
4691 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, op
, clobber
)));
4699 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic which returns a
4700 register that holds VAL of mode MODE shifted by COUNT bits. */
4703 s390_expand_mask_and_shift (rtx val
, enum machine_mode mode
, rtx count
)
4705 val
= expand_simple_binop (SImode
, AND
, val
, GEN_INT (GET_MODE_MASK (mode
)),
4706 NULL_RTX
, 1, OPTAB_DIRECT
);
4707 return expand_simple_binop (SImode
, ASHIFT
, val
, count
,
4708 NULL_RTX
, 1, OPTAB_DIRECT
);
4711 /* Structure to hold the initial parameters for a compare_and_swap operation
4712 in HImode and QImode. */
4714 struct alignment_context
4716 rtx memsi
; /* SI aligned memory location. */
4717 rtx shift
; /* Bit offset with regard to lsb. */
4718 rtx modemask
; /* Mask of the HQImode shifted by SHIFT bits. */
4719 rtx modemaski
; /* ~modemask */
4720 bool aligned
; /* True if memory is aligned, false else. */
4723 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic to initialize
4724 structure AC for transparent simplifying, if the memory alignment is known
4725 to be at least 32bit. MEM is the memory location for the actual operation
4726 and MODE its mode. */
4729 init_alignment_context (struct alignment_context
*ac
, rtx mem
,
4730 enum machine_mode mode
)
4732 ac
->shift
= GEN_INT (GET_MODE_SIZE (SImode
) - GET_MODE_SIZE (mode
));
4733 ac
->aligned
= (MEM_ALIGN (mem
) >= GET_MODE_BITSIZE (SImode
));
4736 ac
->memsi
= adjust_address (mem
, SImode
, 0); /* Memory is aligned. */
4739 /* Alignment is unknown. */
4740 rtx byteoffset
, addr
, align
;
4742 /* Force the address into a register. */
4743 addr
= force_reg (Pmode
, XEXP (mem
, 0));
4745 /* Align it to SImode. */
4746 align
= expand_simple_binop (Pmode
, AND
, addr
,
4747 GEN_INT (-GET_MODE_SIZE (SImode
)),
4748 NULL_RTX
, 1, OPTAB_DIRECT
);
4750 ac
->memsi
= gen_rtx_MEM (SImode
, align
);
4751 MEM_VOLATILE_P (ac
->memsi
) = MEM_VOLATILE_P (mem
);
4752 set_mem_alias_set (ac
->memsi
, ALIAS_SET_MEMORY_BARRIER
);
4753 set_mem_align (ac
->memsi
, GET_MODE_BITSIZE (SImode
));
4755 /* Calculate shiftcount. */
4756 byteoffset
= expand_simple_binop (Pmode
, AND
, addr
,
4757 GEN_INT (GET_MODE_SIZE (SImode
) - 1),
4758 NULL_RTX
, 1, OPTAB_DIRECT
);
4759 /* As we already have some offset, evaluate the remaining distance. */
4760 ac
->shift
= expand_simple_binop (SImode
, MINUS
, ac
->shift
, byteoffset
,
4761 NULL_RTX
, 1, OPTAB_DIRECT
);
4764 /* Shift is the byte count, but we need the bitcount. */
4765 ac
->shift
= expand_simple_binop (SImode
, ASHIFT
, ac
->shift
, GEN_INT (3),
4766 NULL_RTX
, 1, OPTAB_DIRECT
);
4768 /* Calculate masks. */
4769 ac
->modemask
= expand_simple_binop (SImode
, ASHIFT
,
4770 GEN_INT (GET_MODE_MASK (mode
)),
4771 ac
->shift
, NULL_RTX
, 1, OPTAB_DIRECT
);
4772 ac
->modemaski
= expand_simple_unop (SImode
, NOT
, ac
->modemask
,
4776 /* A subroutine of s390_expand_cs_hqi. Insert INS into VAL. If possible,
4777 use a single insv insn into SEQ2. Otherwise, put prep insns in SEQ1 and
4778 perform the merge in SEQ2. */
4781 s390_two_part_insv (struct alignment_context
*ac
, rtx
*seq1
, rtx
*seq2
,
4782 enum machine_mode mode
, rtx val
, rtx ins
)
4789 tmp
= copy_to_mode_reg (SImode
, val
);
4790 if (s390_expand_insv (tmp
, GEN_INT (GET_MODE_BITSIZE (mode
)),
4794 *seq2
= get_insns ();
4801 /* Failed to use insv. Generate a two part shift and mask. */
4803 tmp
= s390_expand_mask_and_shift (ins
, mode
, ac
->shift
);
4804 *seq1
= get_insns ();
4808 tmp
= expand_simple_binop (SImode
, IOR
, tmp
, val
, NULL_RTX
, 1, OPTAB_DIRECT
);
4809 *seq2
= get_insns ();
4815 /* Expand an atomic compare and swap operation for HImode and QImode. MEM is
4816 the memory location, CMP the old value to compare MEM with and NEW_RTX the
4817 value to set if CMP == MEM. */
4820 s390_expand_cs_hqi (enum machine_mode mode
, rtx btarget
, rtx vtarget
, rtx mem
,
4821 rtx cmp
, rtx new_rtx
, bool is_weak
)
4823 struct alignment_context ac
;
4824 rtx cmpv
, newv
, val
, resv
, cc
, seq0
, seq1
, seq2
, seq3
;
4825 rtx res
= gen_reg_rtx (SImode
);
4826 rtx csloop
= NULL
, csend
= NULL
;
4828 gcc_assert (register_operand (vtarget
, VOIDmode
));
4829 gcc_assert (MEM_P (mem
));
4831 init_alignment_context (&ac
, mem
, mode
);
4833 /* Load full word. Subsequent loads are performed by CS. */
4834 val
= expand_simple_binop (SImode
, AND
, ac
.memsi
, ac
.modemaski
,
4835 NULL_RTX
, 1, OPTAB_DIRECT
);
4837 /* Prepare insertions of cmp and new_rtx into the loaded value. When
4838 possible, we try to use insv to make this happen efficiently. If
4839 that fails we'll generate code both inside and outside the loop. */
4840 cmpv
= s390_two_part_insv (&ac
, &seq0
, &seq2
, mode
, val
, cmp
);
4841 newv
= s390_two_part_insv (&ac
, &seq1
, &seq3
, mode
, val
, new_rtx
);
4848 /* Start CS loop. */
4851 /* Begin assuming success. */
4852 emit_move_insn (btarget
, const1_rtx
);
4854 csloop
= gen_label_rtx ();
4855 csend
= gen_label_rtx ();
4856 emit_label (csloop
);
4859 /* val = "<mem>00..0<mem>"
4860 * cmp = "00..0<cmp>00..0"
4861 * new = "00..0<new>00..0"
4867 cc
= s390_emit_compare_and_swap (EQ
, res
, ac
.memsi
, cmpv
, newv
);
4869 emit_insn (gen_cstorecc4 (btarget
, cc
, XEXP (cc
, 0), XEXP (cc
, 1)));
4872 /* Jump to end if we're done (likely?). */
4873 s390_emit_jump (csend
, cc
);
4875 /* Check for changes outside mode, and loop internal if so. */
4876 resv
= expand_simple_binop (SImode
, AND
, res
, ac
.modemaski
,
4877 NULL_RTX
, 1, OPTAB_DIRECT
);
4878 cc
= s390_emit_compare (NE
, resv
, val
);
4879 emit_move_insn (val
, resv
);
4880 s390_emit_jump (csloop
, cc
);
4883 emit_move_insn (btarget
, const0_rtx
);
4887 /* Return the correct part of the bitfield. */
4888 convert_move (vtarget
, expand_simple_binop (SImode
, LSHIFTRT
, res
, ac
.shift
,
4889 NULL_RTX
, 1, OPTAB_DIRECT
), 1);
4892 /* Expand an atomic operation CODE of mode MODE. MEM is the memory location
4893 and VAL the value to play with. If AFTER is true then store the value
4894 MEM holds after the operation, if AFTER is false then store the value MEM
4895 holds before the operation. If TARGET is zero then discard that value, else
4896 store it to TARGET. */
4899 s390_expand_atomic (enum machine_mode mode
, enum rtx_code code
,
4900 rtx target
, rtx mem
, rtx val
, bool after
)
4902 struct alignment_context ac
;
4904 rtx new_rtx
= gen_reg_rtx (SImode
);
4905 rtx orig
= gen_reg_rtx (SImode
);
4906 rtx csloop
= gen_label_rtx ();
4908 gcc_assert (!target
|| register_operand (target
, VOIDmode
));
4909 gcc_assert (MEM_P (mem
));
4911 init_alignment_context (&ac
, mem
, mode
);
4913 /* Shift val to the correct bit positions.
4914 Preserve "icm", but prevent "ex icm". */
4915 if (!(ac
.aligned
&& code
== SET
&& MEM_P (val
)))
4916 val
= s390_expand_mask_and_shift (val
, mode
, ac
.shift
);
4918 /* Further preparation insns. */
4919 if (code
== PLUS
|| code
== MINUS
)
4920 emit_move_insn (orig
, val
);
4921 else if (code
== MULT
|| code
== AND
) /* val = "11..1<val>11..1" */
4922 val
= expand_simple_binop (SImode
, XOR
, val
, ac
.modemaski
,
4923 NULL_RTX
, 1, OPTAB_DIRECT
);
4925 /* Load full word. Subsequent loads are performed by CS. */
4926 cmp
= force_reg (SImode
, ac
.memsi
);
4928 /* Start CS loop. */
4929 emit_label (csloop
);
4930 emit_move_insn (new_rtx
, cmp
);
4932 /* Patch new with val at correct position. */
4937 val
= expand_simple_binop (SImode
, code
, new_rtx
, orig
,
4938 NULL_RTX
, 1, OPTAB_DIRECT
);
4939 val
= expand_simple_binop (SImode
, AND
, val
, ac
.modemask
,
4940 NULL_RTX
, 1, OPTAB_DIRECT
);
4943 if (ac
.aligned
&& MEM_P (val
))
4944 store_bit_field (new_rtx
, GET_MODE_BITSIZE (mode
), 0,
4948 new_rtx
= expand_simple_binop (SImode
, AND
, new_rtx
, ac
.modemaski
,
4949 NULL_RTX
, 1, OPTAB_DIRECT
);
4950 new_rtx
= expand_simple_binop (SImode
, IOR
, new_rtx
, val
,
4951 NULL_RTX
, 1, OPTAB_DIRECT
);
4957 new_rtx
= expand_simple_binop (SImode
, code
, new_rtx
, val
,
4958 NULL_RTX
, 1, OPTAB_DIRECT
);
4960 case MULT
: /* NAND */
4961 new_rtx
= expand_simple_binop (SImode
, AND
, new_rtx
, val
,
4962 NULL_RTX
, 1, OPTAB_DIRECT
);
4963 new_rtx
= expand_simple_binop (SImode
, XOR
, new_rtx
, ac
.modemask
,
4964 NULL_RTX
, 1, OPTAB_DIRECT
);
4970 s390_emit_jump (csloop
, s390_emit_compare_and_swap (NE
, cmp
,
4971 ac
.memsi
, cmp
, new_rtx
));
4973 /* Return the correct part of the bitfield. */
4975 convert_move (target
, expand_simple_binop (SImode
, LSHIFTRT
,
4976 after
? new_rtx
: cmp
, ac
.shift
,
4977 NULL_RTX
, 1, OPTAB_DIRECT
), 1);
4980 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
4981 We need to emit DTP-relative relocations. */
4983 static void s390_output_dwarf_dtprel (FILE *, int, rtx
) ATTRIBUTE_UNUSED
;
4986 s390_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
4991 fputs ("\t.long\t", file
);
4994 fputs ("\t.quad\t", file
);
4999 output_addr_const (file
, x
);
5000 fputs ("@DTPOFF", file
);
5003 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
5004 /* Implement TARGET_MANGLE_TYPE. */
5007 s390_mangle_type (const_tree type
)
5009 if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
5010 && TARGET_LONG_DOUBLE_128
)
5013 /* For all other types, use normal C++ mangling. */
5018 /* In the name of slightly smaller debug output, and to cater to
5019 general assembler lossage, recognize various UNSPEC sequences
5020 and turn them back into a direct symbol reference. */
5023 s390_delegitimize_address (rtx orig_x
)
5027 orig_x
= delegitimize_mem_from_attrs (orig_x
);
5030 /* Extract the symbol ref from:
5031 (plus:SI (reg:SI 12 %r12)
5032 (const:SI (unspec:SI [(symbol_ref/f:SI ("*.LC0"))]
5033 UNSPEC_GOTOFF/PLTOFF)))
5035 (plus:SI (reg:SI 12 %r12)
5036 (const:SI (plus:SI (unspec:SI [(symbol_ref:SI ("L"))]
5037 UNSPEC_GOTOFF/PLTOFF)
5038 (const_int 4 [0x4])))) */
5039 if (GET_CODE (x
) == PLUS
5040 && REG_P (XEXP (x
, 0))
5041 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
5042 && GET_CODE (XEXP (x
, 1)) == CONST
)
5044 HOST_WIDE_INT offset
= 0;
5046 /* The const operand. */
5047 y
= XEXP (XEXP (x
, 1), 0);
5049 if (GET_CODE (y
) == PLUS
5050 && GET_CODE (XEXP (y
, 1)) == CONST_INT
)
5052 offset
= INTVAL (XEXP (y
, 1));
5056 if (GET_CODE (y
) == UNSPEC
5057 && (XINT (y
, 1) == UNSPEC_GOTOFF
5058 || XINT (y
, 1) == UNSPEC_PLTOFF
))
5059 return plus_constant (Pmode
, XVECEXP (y
, 0, 0), offset
);
5062 if (GET_CODE (x
) != MEM
)
5066 if (GET_CODE (x
) == PLUS
5067 && GET_CODE (XEXP (x
, 1)) == CONST
5068 && GET_CODE (XEXP (x
, 0)) == REG
5069 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
5071 y
= XEXP (XEXP (x
, 1), 0);
5072 if (GET_CODE (y
) == UNSPEC
5073 && XINT (y
, 1) == UNSPEC_GOT
)
5074 y
= XVECEXP (y
, 0, 0);
5078 else if (GET_CODE (x
) == CONST
)
5080 /* Extract the symbol ref from:
5081 (mem:QI (const:DI (unspec:DI [(symbol_ref:DI ("foo"))]
5082 UNSPEC_PLT/GOTENT))) */
5085 if (GET_CODE (y
) == UNSPEC
5086 && (XINT (y
, 1) == UNSPEC_GOTENT
5087 || XINT (y
, 1) == UNSPEC_PLT
))
5088 y
= XVECEXP (y
, 0, 0);
5095 if (GET_MODE (orig_x
) != Pmode
)
5097 if (GET_MODE (orig_x
) == BLKmode
)
5099 y
= lowpart_subreg (GET_MODE (orig_x
), y
, Pmode
);
5106 /* Output operand OP to stdio stream FILE.
5107 OP is an address (register + offset) which is not used to address data;
5108 instead the rightmost bits are interpreted as the value. */
5111 print_shift_count_operand (FILE *file
, rtx op
)
5113 HOST_WIDE_INT offset
;
5116 /* Extract base register and offset. */
5117 if (!s390_decompose_shift_count (op
, &base
, &offset
))
5123 gcc_assert (GET_CODE (base
) == REG
);
5124 gcc_assert (REGNO (base
) < FIRST_PSEUDO_REGISTER
);
5125 gcc_assert (REGNO_REG_CLASS (REGNO (base
)) == ADDR_REGS
);
5128 /* Offsets are constricted to twelve bits. */
5129 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, offset
& ((1 << 12) - 1));
5131 fprintf (file
, "(%s)", reg_names
[REGNO (base
)]);
5134 /* See 'get_some_local_dynamic_name'. */
5137 get_some_local_dynamic_name_1 (rtx
*px
, void *data ATTRIBUTE_UNUSED
)
5141 if (GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
5143 x
= get_pool_constant (x
);
5144 return for_each_rtx (&x
, get_some_local_dynamic_name_1
, 0);
5147 if (GET_CODE (x
) == SYMBOL_REF
5148 && tls_symbolic_operand (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
5150 cfun
->machine
->some_ld_name
= XSTR (x
, 0);
5157 /* Locate some local-dynamic symbol still in use by this function
5158 so that we can print its name in local-dynamic base patterns. */
5161 get_some_local_dynamic_name (void)
5165 if (cfun
->machine
->some_ld_name
)
5166 return cfun
->machine
->some_ld_name
;
5168 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5170 && for_each_rtx (&PATTERN (insn
), get_some_local_dynamic_name_1
, 0))
5171 return cfun
->machine
->some_ld_name
;
5176 /* Output machine-dependent UNSPECs occurring in address constant X
5177 in assembler syntax to stdio stream FILE. Returns true if the
5178 constant X could be recognized, false otherwise. */
5181 s390_output_addr_const_extra (FILE *file
, rtx x
)
5183 if (GET_CODE (x
) == UNSPEC
&& XVECLEN (x
, 0) == 1)
5184 switch (XINT (x
, 1))
5187 output_addr_const (file
, XVECEXP (x
, 0, 0));
5188 fprintf (file
, "@GOTENT");
5191 output_addr_const (file
, XVECEXP (x
, 0, 0));
5192 fprintf (file
, "@GOT");
5195 output_addr_const (file
, XVECEXP (x
, 0, 0));
5196 fprintf (file
, "@GOTOFF");
5199 output_addr_const (file
, XVECEXP (x
, 0, 0));
5200 fprintf (file
, "@PLT");
5203 output_addr_const (file
, XVECEXP (x
, 0, 0));
5204 fprintf (file
, "@PLTOFF");
5207 output_addr_const (file
, XVECEXP (x
, 0, 0));
5208 fprintf (file
, "@TLSGD");
5211 assemble_name (file
, get_some_local_dynamic_name ());
5212 fprintf (file
, "@TLSLDM");
5215 output_addr_const (file
, XVECEXP (x
, 0, 0));
5216 fprintf (file
, "@DTPOFF");
5219 output_addr_const (file
, XVECEXP (x
, 0, 0));
5220 fprintf (file
, "@NTPOFF");
5222 case UNSPEC_GOTNTPOFF
:
5223 output_addr_const (file
, XVECEXP (x
, 0, 0));
5224 fprintf (file
, "@GOTNTPOFF");
5226 case UNSPEC_INDNTPOFF
:
5227 output_addr_const (file
, XVECEXP (x
, 0, 0));
5228 fprintf (file
, "@INDNTPOFF");
5232 if (GET_CODE (x
) == UNSPEC
&& XVECLEN (x
, 0) == 2)
5233 switch (XINT (x
, 1))
5235 case UNSPEC_POOL_OFFSET
:
5236 x
= gen_rtx_MINUS (GET_MODE (x
), XVECEXP (x
, 0, 0), XVECEXP (x
, 0, 1));
5237 output_addr_const (file
, x
);
5243 /* Output address operand ADDR in assembler syntax to
5244 stdio stream FILE. */
5247 print_operand_address (FILE *file
, rtx addr
)
5249 struct s390_address ad
;
5251 if (s390_symref_operand_p (addr
, NULL
, NULL
))
5255 output_operand_lossage ("symbolic memory references are "
5256 "only supported on z10 or later");
5259 output_addr_const (file
, addr
);
5263 if (!s390_decompose_address (addr
, &ad
)
5264 || (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
5265 || (ad
.indx
&& !REGNO_OK_FOR_INDEX_P (REGNO (ad
.indx
))))
5266 output_operand_lossage ("cannot decompose address");
5269 output_addr_const (file
, ad
.disp
);
5271 fprintf (file
, "0");
5273 if (ad
.base
&& ad
.indx
)
5274 fprintf (file
, "(%s,%s)", reg_names
[REGNO (ad
.indx
)],
5275 reg_names
[REGNO (ad
.base
)]);
5277 fprintf (file
, "(%s)", reg_names
[REGNO (ad
.base
)]);
5280 /* Output operand X in assembler syntax to stdio stream FILE.
5281 CODE specified the format flag. The following format flags
5284 'C': print opcode suffix for branch condition.
5285 'D': print opcode suffix for inverse branch condition.
5286 'E': print opcode suffix for branch on index instruction.
5287 'J': print tls_load/tls_gdcall/tls_ldcall suffix
5288 'G': print the size of the operand in bytes.
5289 'O': print only the displacement of a memory reference.
5290 'R': print only the base register of a memory reference.
5291 'S': print S-type memory reference (base+displacement).
5292 'N': print the second word of a DImode operand.
5293 'M': print the second word of a TImode operand.
5294 'Y': print shift count operand.
5296 'b': print integer X as if it's an unsigned byte.
5297 'c': print integer X as if it's an signed byte.
5298 'x': print integer X as if it's an unsigned halfword.
5299 'h': print integer X as if it's a signed halfword.
5300 'i': print the first nonzero HImode part of X.
5301 'j': print the first HImode part unequal to -1 of X.
5302 'k': print the first nonzero SImode part of X.
5303 'm': print the first SImode part unequal to -1 of X.
5304 'o': print integer X as if it's an unsigned 32bit word. */
5307 print_operand (FILE *file
, rtx x
, int code
)
5312 fprintf (file
, s390_branch_condition_mnemonic (x
, FALSE
));
5316 fprintf (file
, s390_branch_condition_mnemonic (x
, TRUE
));
5320 if (GET_CODE (x
) == LE
)
5321 fprintf (file
, "l");
5322 else if (GET_CODE (x
) == GT
)
5323 fprintf (file
, "h");
5325 output_operand_lossage ("invalid comparison operator "
5326 "for 'E' output modifier");
5330 if (GET_CODE (x
) == SYMBOL_REF
)
5332 fprintf (file
, "%s", ":tls_load:");
5333 output_addr_const (file
, x
);
5335 else if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_TLSGD
)
5337 fprintf (file
, "%s", ":tls_gdcall:");
5338 output_addr_const (file
, XVECEXP (x
, 0, 0));
5340 else if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_TLSLDM
)
5342 fprintf (file
, "%s", ":tls_ldcall:");
5343 assemble_name (file
, get_some_local_dynamic_name ());
5346 output_operand_lossage ("invalid reference for 'J' output modifier");
5350 fprintf (file
, "%u", GET_MODE_SIZE (GET_MODE (x
)));
5355 struct s390_address ad
;
5360 output_operand_lossage ("memory reference expected for "
5361 "'O' output modifier");
5365 ret
= s390_decompose_address (XEXP (x
, 0), &ad
);
5368 || (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
5371 output_operand_lossage ("invalid address for 'O' output modifier");
5376 output_addr_const (file
, ad
.disp
);
5378 fprintf (file
, "0");
5384 struct s390_address ad
;
5389 output_operand_lossage ("memory reference expected for "
5390 "'R' output modifier");
5394 ret
= s390_decompose_address (XEXP (x
, 0), &ad
);
5397 || (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
5400 output_operand_lossage ("invalid address for 'R' output modifier");
5405 fprintf (file
, "%s", reg_names
[REGNO (ad
.base
)]);
5407 fprintf (file
, "0");
5413 struct s390_address ad
;
5418 output_operand_lossage ("memory reference expected for "
5419 "'S' output modifier");
5422 ret
= s390_decompose_address (XEXP (x
, 0), &ad
);
5425 || (ad
.base
&& !REGNO_OK_FOR_BASE_P (REGNO (ad
.base
)))
5428 output_operand_lossage ("invalid address for 'S' output modifier");
5433 output_addr_const (file
, ad
.disp
);
5435 fprintf (file
, "0");
5438 fprintf (file
, "(%s)", reg_names
[REGNO (ad
.base
)]);
5443 if (GET_CODE (x
) == REG
)
5444 x
= gen_rtx_REG (GET_MODE (x
), REGNO (x
) + 1);
5445 else if (GET_CODE (x
) == MEM
)
5446 x
= change_address (x
, VOIDmode
,
5447 plus_constant (Pmode
, XEXP (x
, 0), 4));
5449 output_operand_lossage ("register or memory expression expected "
5450 "for 'N' output modifier");
5454 if (GET_CODE (x
) == REG
)
5455 x
= gen_rtx_REG (GET_MODE (x
), REGNO (x
) + 1);
5456 else if (GET_CODE (x
) == MEM
)
5457 x
= change_address (x
, VOIDmode
,
5458 plus_constant (Pmode
, XEXP (x
, 0), 8));
5460 output_operand_lossage ("register or memory expression expected "
5461 "for 'M' output modifier");
5465 print_shift_count_operand (file
, x
);
5469 switch (GET_CODE (x
))
5472 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
5476 output_address (XEXP (x
, 0));
5483 output_addr_const (file
, x
);
5488 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xff);
5489 else if (code
== 'c')
5490 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ((INTVAL (x
) & 0xff) ^ 0x80) - 0x80);
5491 else if (code
== 'x')
5492 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xffff);
5493 else if (code
== 'h')
5494 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ((INTVAL (x
) & 0xffff) ^ 0x8000) - 0x8000);
5495 else if (code
== 'i')
5496 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
5497 s390_extract_part (x
, HImode
, 0));
5498 else if (code
== 'j')
5499 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
5500 s390_extract_part (x
, HImode
, -1));
5501 else if (code
== 'k')
5502 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
5503 s390_extract_part (x
, SImode
, 0));
5504 else if (code
== 'm')
5505 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
5506 s390_extract_part (x
, SImode
, -1));
5507 else if (code
== 'o')
5508 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xffffffff);
5510 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
5514 gcc_assert (GET_MODE (x
) == VOIDmode
);
5516 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
) & 0xff);
5517 else if (code
== 'x')
5518 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
) & 0xffff);
5519 else if (code
== 'h')
5520 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
5521 ((CONST_DOUBLE_LOW (x
) & 0xffff) ^ 0x8000) - 0x8000);
5525 output_operand_lossage ("invalid constant - try using "
5526 "an output modifier");
5528 output_operand_lossage ("invalid constant for output modifier '%c'",
5535 output_operand_lossage ("invalid expression - try using "
5536 "an output modifier");
5538 output_operand_lossage ("invalid expression for output "
5539 "modifier '%c'", code
);
5544 /* Target hook for assembling integer objects. We need to define it
5545 here to work a round a bug in some versions of GAS, which couldn't
5546 handle values smaller than INT_MIN when printed in decimal. */
5549 s390_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
5551 if (size
== 8 && aligned_p
5552 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < INT_MIN
)
5554 fprintf (asm_out_file
, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX
"\n",
5558 return default_assemble_integer (x
, size
, aligned_p
);
5561 /* Returns true if register REGNO is used for forming
5562 a memory address in expression X. */
5565 reg_used_in_mem_p (int regno
, rtx x
)
5567 enum rtx_code code
= GET_CODE (x
);
5573 if (refers_to_regno_p (regno
, regno
+1,
5577 else if (code
== SET
5578 && GET_CODE (SET_DEST (x
)) == PC
)
5580 if (refers_to_regno_p (regno
, regno
+1,
5585 fmt
= GET_RTX_FORMAT (code
);
5586 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5589 && reg_used_in_mem_p (regno
, XEXP (x
, i
)))
5592 else if (fmt
[i
] == 'E')
5593 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5594 if (reg_used_in_mem_p (regno
, XVECEXP (x
, i
, j
)))
5600 /* Returns true if expression DEP_RTX sets an address register
5601 used by instruction INSN to address memory. */
5604 addr_generation_dependency_p (rtx dep_rtx
, rtx insn
)
5608 if (GET_CODE (dep_rtx
) == INSN
)
5609 dep_rtx
= PATTERN (dep_rtx
);
5611 if (GET_CODE (dep_rtx
) == SET
)
5613 target
= SET_DEST (dep_rtx
);
5614 if (GET_CODE (target
) == STRICT_LOW_PART
)
5615 target
= XEXP (target
, 0);
5616 while (GET_CODE (target
) == SUBREG
)
5617 target
= SUBREG_REG (target
);
5619 if (GET_CODE (target
) == REG
)
5621 int regno
= REGNO (target
);
5623 if (s390_safe_attr_type (insn
) == TYPE_LA
)
5625 pat
= PATTERN (insn
);
5626 if (GET_CODE (pat
) == PARALLEL
)
5628 gcc_assert (XVECLEN (pat
, 0) == 2);
5629 pat
= XVECEXP (pat
, 0, 0);
5631 gcc_assert (GET_CODE (pat
) == SET
);
5632 return refers_to_regno_p (regno
, regno
+1, SET_SRC (pat
), 0);
5634 else if (get_attr_atype (insn
) == ATYPE_AGEN
)
5635 return reg_used_in_mem_p (regno
, PATTERN (insn
));
5641 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
5644 s390_agen_dep_p (rtx dep_insn
, rtx insn
)
5646 rtx dep_rtx
= PATTERN (dep_insn
);
5649 if (GET_CODE (dep_rtx
) == SET
5650 && addr_generation_dependency_p (dep_rtx
, insn
))
5652 else if (GET_CODE (dep_rtx
) == PARALLEL
)
5654 for (i
= 0; i
< XVECLEN (dep_rtx
, 0); i
++)
5656 if (addr_generation_dependency_p (XVECEXP (dep_rtx
, 0, i
), insn
))
5664 /* A C statement (sans semicolon) to update the integer scheduling priority
5665 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
5666 reduce the priority to execute INSN later. Do not define this macro if
5667 you do not need to adjust the scheduling priorities of insns.
5669 A STD instruction should be scheduled earlier,
5670 in order to use the bypass. */
5672 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED
, int priority
)
5674 if (! INSN_P (insn
))
5677 if (s390_tune
!= PROCESSOR_2084_Z990
5678 && s390_tune
!= PROCESSOR_2094_Z9_109
5679 && s390_tune
!= PROCESSOR_2097_Z10
5680 && s390_tune
!= PROCESSOR_2817_Z196
)
5683 switch (s390_safe_attr_type (insn
))
5687 priority
= priority
<< 3;
5691 priority
= priority
<< 1;
5700 /* The number of instructions that can be issued per cycle. */
5703 s390_issue_rate (void)
5707 case PROCESSOR_2084_Z990
:
5708 case PROCESSOR_2094_Z9_109
:
5709 case PROCESSOR_2817_Z196
:
5711 case PROCESSOR_2097_Z10
:
5719 s390_first_cycle_multipass_dfa_lookahead (void)
5724 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
5725 Fix up MEMs as required. */
5728 annotate_constant_pool_refs (rtx
*x
)
5733 gcc_assert (GET_CODE (*x
) != SYMBOL_REF
5734 || !CONSTANT_POOL_ADDRESS_P (*x
));
5736 /* Literal pool references can only occur inside a MEM ... */
5737 if (GET_CODE (*x
) == MEM
)
5739 rtx memref
= XEXP (*x
, 0);
5741 if (GET_CODE (memref
) == SYMBOL_REF
5742 && CONSTANT_POOL_ADDRESS_P (memref
))
5744 rtx base
= cfun
->machine
->base_reg
;
5745 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, memref
, base
),
5748 *x
= replace_equiv_address (*x
, addr
);
5752 if (GET_CODE (memref
) == CONST
5753 && GET_CODE (XEXP (memref
, 0)) == PLUS
5754 && GET_CODE (XEXP (XEXP (memref
, 0), 1)) == CONST_INT
5755 && GET_CODE (XEXP (XEXP (memref
, 0), 0)) == SYMBOL_REF
5756 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref
, 0), 0)))
5758 HOST_WIDE_INT off
= INTVAL (XEXP (XEXP (memref
, 0), 1));
5759 rtx sym
= XEXP (XEXP (memref
, 0), 0);
5760 rtx base
= cfun
->machine
->base_reg
;
5761 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, sym
, base
),
5764 *x
= replace_equiv_address (*x
, plus_constant (Pmode
, addr
, off
));
5769 /* ... or a load-address type pattern. */
5770 if (GET_CODE (*x
) == SET
)
5772 rtx addrref
= SET_SRC (*x
);
5774 if (GET_CODE (addrref
) == SYMBOL_REF
5775 && CONSTANT_POOL_ADDRESS_P (addrref
))
5777 rtx base
= cfun
->machine
->base_reg
;
5778 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, addrref
, base
),
5781 SET_SRC (*x
) = addr
;
5785 if (GET_CODE (addrref
) == CONST
5786 && GET_CODE (XEXP (addrref
, 0)) == PLUS
5787 && GET_CODE (XEXP (XEXP (addrref
, 0), 1)) == CONST_INT
5788 && GET_CODE (XEXP (XEXP (addrref
, 0), 0)) == SYMBOL_REF
5789 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref
, 0), 0)))
5791 HOST_WIDE_INT off
= INTVAL (XEXP (XEXP (addrref
, 0), 1));
5792 rtx sym
= XEXP (XEXP (addrref
, 0), 0);
5793 rtx base
= cfun
->machine
->base_reg
;
5794 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, sym
, base
),
5797 SET_SRC (*x
) = plus_constant (Pmode
, addr
, off
);
5802 /* Annotate LTREL_BASE as well. */
5803 if (GET_CODE (*x
) == UNSPEC
5804 && XINT (*x
, 1) == UNSPEC_LTREL_BASE
)
5806 rtx base
= cfun
->machine
->base_reg
;
5807 *x
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, XVECEXP (*x
, 0, 0), base
),
5812 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
5813 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
5817 annotate_constant_pool_refs (&XEXP (*x
, i
));
5819 else if (fmt
[i
] == 'E')
5821 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
5822 annotate_constant_pool_refs (&XVECEXP (*x
, i
, j
));
5827 /* Split all branches that exceed the maximum distance.
5828 Returns true if this created a new literal pool entry. */
5831 s390_split_branches (void)
5833 rtx temp_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
5834 int new_literal
= 0, ret
;
5835 rtx insn
, pat
, tmp
, target
;
5838 /* We need correct insn addresses. */
5840 shorten_branches (get_insns ());
5842 /* Find all branches that exceed 64KB, and split them. */
5844 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5846 if (GET_CODE (insn
) != JUMP_INSN
)
5849 pat
= PATTERN (insn
);
5850 if (GET_CODE (pat
) == PARALLEL
&& XVECLEN (pat
, 0) > 2)
5851 pat
= XVECEXP (pat
, 0, 0);
5852 if (GET_CODE (pat
) != SET
|| SET_DEST (pat
) != pc_rtx
)
5855 if (GET_CODE (SET_SRC (pat
)) == LABEL_REF
)
5857 label
= &SET_SRC (pat
);
5859 else if (GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
5861 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) == LABEL_REF
)
5862 label
= &XEXP (SET_SRC (pat
), 1);
5863 else if (GET_CODE (XEXP (SET_SRC (pat
), 2)) == LABEL_REF
)
5864 label
= &XEXP (SET_SRC (pat
), 2);
5871 if (get_attr_length (insn
) <= 4)
5874 /* We are going to use the return register as scratch register,
5875 make sure it will be saved/restored by the prologue/epilogue. */
5876 cfun_frame_layout
.save_return_addr_p
= 1;
5881 tmp
= force_const_mem (Pmode
, *label
);
5882 tmp
= emit_insn_before (gen_rtx_SET (Pmode
, temp_reg
, tmp
), insn
);
5883 INSN_ADDRESSES_NEW (tmp
, -1);
5884 annotate_constant_pool_refs (&PATTERN (tmp
));
5891 target
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, *label
),
5892 UNSPEC_LTREL_OFFSET
);
5893 target
= gen_rtx_CONST (Pmode
, target
);
5894 target
= force_const_mem (Pmode
, target
);
5895 tmp
= emit_insn_before (gen_rtx_SET (Pmode
, temp_reg
, target
), insn
);
5896 INSN_ADDRESSES_NEW (tmp
, -1);
5897 annotate_constant_pool_refs (&PATTERN (tmp
));
5899 target
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, XEXP (target
, 0),
5900 cfun
->machine
->base_reg
),
5902 target
= gen_rtx_PLUS (Pmode
, temp_reg
, target
);
5905 ret
= validate_change (insn
, label
, target
, 0);
5913 /* Find an annotated literal pool symbol referenced in RTX X,
5914 and store it at REF. Will abort if X contains references to
5915 more than one such pool symbol; multiple references to the same
5916 symbol are allowed, however.
5918 The rtx pointed to by REF must be initialized to NULL_RTX
5919 by the caller before calling this routine. */
5922 find_constant_pool_ref (rtx x
, rtx
*ref
)
5927 /* Ignore LTREL_BASE references. */
5928 if (GET_CODE (x
) == UNSPEC
5929 && XINT (x
, 1) == UNSPEC_LTREL_BASE
)
5931 /* Likewise POOL_ENTRY insns. */
5932 if (GET_CODE (x
) == UNSPEC_VOLATILE
5933 && XINT (x
, 1) == UNSPECV_POOL_ENTRY
)
5936 gcc_assert (GET_CODE (x
) != SYMBOL_REF
5937 || !CONSTANT_POOL_ADDRESS_P (x
));
5939 if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_LTREF
)
5941 rtx sym
= XVECEXP (x
, 0, 0);
5942 gcc_assert (GET_CODE (sym
) == SYMBOL_REF
5943 && CONSTANT_POOL_ADDRESS_P (sym
));
5945 if (*ref
== NULL_RTX
)
5948 gcc_assert (*ref
== sym
);
5953 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
5954 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
5958 find_constant_pool_ref (XEXP (x
, i
), ref
);
5960 else if (fmt
[i
] == 'E')
5962 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5963 find_constant_pool_ref (XVECEXP (x
, i
, j
), ref
);
5968 /* Replace every reference to the annotated literal pool
5969 symbol REF in X by its base plus OFFSET. */
5972 replace_constant_pool_ref (rtx
*x
, rtx ref
, rtx offset
)
5977 gcc_assert (*x
!= ref
);
5979 if (GET_CODE (*x
) == UNSPEC
5980 && XINT (*x
, 1) == UNSPEC_LTREF
5981 && XVECEXP (*x
, 0, 0) == ref
)
5983 *x
= gen_rtx_PLUS (Pmode
, XVECEXP (*x
, 0, 1), offset
);
5987 if (GET_CODE (*x
) == PLUS
5988 && GET_CODE (XEXP (*x
, 1)) == CONST_INT
5989 && GET_CODE (XEXP (*x
, 0)) == UNSPEC
5990 && XINT (XEXP (*x
, 0), 1) == UNSPEC_LTREF
5991 && XVECEXP (XEXP (*x
, 0), 0, 0) == ref
)
5993 rtx addr
= gen_rtx_PLUS (Pmode
, XVECEXP (XEXP (*x
, 0), 0, 1), offset
);
5994 *x
= plus_constant (Pmode
, addr
, INTVAL (XEXP (*x
, 1)));
5998 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
5999 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
6003 replace_constant_pool_ref (&XEXP (*x
, i
), ref
, offset
);
6005 else if (fmt
[i
] == 'E')
6007 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
6008 replace_constant_pool_ref (&XVECEXP (*x
, i
, j
), ref
, offset
);
6013 /* Check whether X contains an UNSPEC_LTREL_BASE.
6014 Return its constant pool symbol if found, NULL_RTX otherwise. */
6017 find_ltrel_base (rtx x
)
6022 if (GET_CODE (x
) == UNSPEC
6023 && XINT (x
, 1) == UNSPEC_LTREL_BASE
)
6024 return XVECEXP (x
, 0, 0);
6026 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
6027 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
6031 rtx fnd
= find_ltrel_base (XEXP (x
, i
));
6035 else if (fmt
[i
] == 'E')
6037 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
6039 rtx fnd
= find_ltrel_base (XVECEXP (x
, i
, j
));
6049 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
6052 replace_ltrel_base (rtx
*x
)
6057 if (GET_CODE (*x
) == UNSPEC
6058 && XINT (*x
, 1) == UNSPEC_LTREL_BASE
)
6060 *x
= XVECEXP (*x
, 0, 1);
6064 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
6065 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
6069 replace_ltrel_base (&XEXP (*x
, i
));
6071 else if (fmt
[i
] == 'E')
6073 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
6074 replace_ltrel_base (&XVECEXP (*x
, i
, j
));
6080 /* We keep a list of constants which we have to add to internal
6081 constant tables in the middle of large functions. */
6083 #define NR_C_MODES 11
6084 enum machine_mode constant_modes
[NR_C_MODES
] =
6086 TFmode
, TImode
, TDmode
,
6087 DFmode
, DImode
, DDmode
,
6088 SFmode
, SImode
, SDmode
,
6095 struct constant
*next
;
6100 struct constant_pool
6102 struct constant_pool
*next
;
6106 rtx emit_pool_after
;
6108 struct constant
*constants
[NR_C_MODES
];
6109 struct constant
*execute
;
6114 /* Allocate new constant_pool structure. */
6116 static struct constant_pool
*
6117 s390_alloc_pool (void)
6119 struct constant_pool
*pool
;
6122 pool
= (struct constant_pool
*) xmalloc (sizeof *pool
);
6124 for (i
= 0; i
< NR_C_MODES
; i
++)
6125 pool
->constants
[i
] = NULL
;
6127 pool
->execute
= NULL
;
6128 pool
->label
= gen_label_rtx ();
6129 pool
->first_insn
= NULL_RTX
;
6130 pool
->pool_insn
= NULL_RTX
;
6131 pool
->insns
= BITMAP_ALLOC (NULL
);
6133 pool
->emit_pool_after
= NULL_RTX
;
6138 /* Create new constant pool covering instructions starting at INSN
6139 and chain it to the end of POOL_LIST. */
6141 static struct constant_pool
*
6142 s390_start_pool (struct constant_pool
**pool_list
, rtx insn
)
6144 struct constant_pool
*pool
, **prev
;
6146 pool
= s390_alloc_pool ();
6147 pool
->first_insn
= insn
;
6149 for (prev
= pool_list
; *prev
; prev
= &(*prev
)->next
)
6156 /* End range of instructions covered by POOL at INSN and emit
6157 placeholder insn representing the pool. */
6160 s390_end_pool (struct constant_pool
*pool
, rtx insn
)
6162 rtx pool_size
= GEN_INT (pool
->size
+ 8 /* alignment slop */);
6165 insn
= get_last_insn ();
6167 pool
->pool_insn
= emit_insn_after (gen_pool (pool_size
), insn
);
6168 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
6171 /* Add INSN to the list of insns covered by POOL. */
6174 s390_add_pool_insn (struct constant_pool
*pool
, rtx insn
)
6176 bitmap_set_bit (pool
->insns
, INSN_UID (insn
));
6179 /* Return pool out of POOL_LIST that covers INSN. */
6181 static struct constant_pool
*
6182 s390_find_pool (struct constant_pool
*pool_list
, rtx insn
)
6184 struct constant_pool
*pool
;
6186 for (pool
= pool_list
; pool
; pool
= pool
->next
)
6187 if (bitmap_bit_p (pool
->insns
, INSN_UID (insn
)))
6193 /* Add constant VAL of mode MODE to the constant pool POOL. */
6196 s390_add_constant (struct constant_pool
*pool
, rtx val
, enum machine_mode mode
)
6201 for (i
= 0; i
< NR_C_MODES
; i
++)
6202 if (constant_modes
[i
] == mode
)
6204 gcc_assert (i
!= NR_C_MODES
);
6206 for (c
= pool
->constants
[i
]; c
!= NULL
; c
= c
->next
)
6207 if (rtx_equal_p (val
, c
->value
))
6212 c
= (struct constant
*) xmalloc (sizeof *c
);
6214 c
->label
= gen_label_rtx ();
6215 c
->next
= pool
->constants
[i
];
6216 pool
->constants
[i
] = c
;
6217 pool
->size
+= GET_MODE_SIZE (mode
);
6221 /* Return an rtx that represents the offset of X from the start of
6225 s390_pool_offset (struct constant_pool
*pool
, rtx x
)
6229 label
= gen_rtx_LABEL_REF (GET_MODE (x
), pool
->label
);
6230 x
= gen_rtx_UNSPEC (GET_MODE (x
), gen_rtvec (2, x
, label
),
6231 UNSPEC_POOL_OFFSET
);
6232 return gen_rtx_CONST (GET_MODE (x
), x
);
6235 /* Find constant VAL of mode MODE in the constant pool POOL.
6236 Return an RTX describing the distance from the start of
6237 the pool to the location of the new constant. */
6240 s390_find_constant (struct constant_pool
*pool
, rtx val
,
6241 enum machine_mode mode
)
6246 for (i
= 0; i
< NR_C_MODES
; i
++)
6247 if (constant_modes
[i
] == mode
)
6249 gcc_assert (i
!= NR_C_MODES
);
6251 for (c
= pool
->constants
[i
]; c
!= NULL
; c
= c
->next
)
6252 if (rtx_equal_p (val
, c
->value
))
6257 return s390_pool_offset (pool
, gen_rtx_LABEL_REF (Pmode
, c
->label
));
6260 /* Check whether INSN is an execute. Return the label_ref to its
6261 execute target template if so, NULL_RTX otherwise. */
6264 s390_execute_label (rtx insn
)
6266 if (GET_CODE (insn
) == INSN
6267 && GET_CODE (PATTERN (insn
)) == PARALLEL
6268 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == UNSPEC
6269 && XINT (XVECEXP (PATTERN (insn
), 0, 0), 1) == UNSPEC_EXECUTE
)
6270 return XVECEXP (XVECEXP (PATTERN (insn
), 0, 0), 0, 2);
6275 /* Add execute target for INSN to the constant pool POOL. */
6278 s390_add_execute (struct constant_pool
*pool
, rtx insn
)
6282 for (c
= pool
->execute
; c
!= NULL
; c
= c
->next
)
6283 if (INSN_UID (insn
) == INSN_UID (c
->value
))
6288 c
= (struct constant
*) xmalloc (sizeof *c
);
6290 c
->label
= gen_label_rtx ();
6291 c
->next
= pool
->execute
;
6297 /* Find execute target for INSN in the constant pool POOL.
6298 Return an RTX describing the distance from the start of
6299 the pool to the location of the execute target. */
6302 s390_find_execute (struct constant_pool
*pool
, rtx insn
)
6306 for (c
= pool
->execute
; c
!= NULL
; c
= c
->next
)
6307 if (INSN_UID (insn
) == INSN_UID (c
->value
))
6312 return s390_pool_offset (pool
, gen_rtx_LABEL_REF (Pmode
, c
->label
));
6315 /* For an execute INSN, extract the execute target template. */
6318 s390_execute_target (rtx insn
)
6320 rtx pattern
= PATTERN (insn
);
6321 gcc_assert (s390_execute_label (insn
));
6323 if (XVECLEN (pattern
, 0) == 2)
6325 pattern
= copy_rtx (XVECEXP (pattern
, 0, 1));
6329 rtvec vec
= rtvec_alloc (XVECLEN (pattern
, 0) - 1);
6332 for (i
= 0; i
< XVECLEN (pattern
, 0) - 1; i
++)
6333 RTVEC_ELT (vec
, i
) = copy_rtx (XVECEXP (pattern
, 0, i
+ 1));
6335 pattern
= gen_rtx_PARALLEL (VOIDmode
, vec
);
6341 /* Indicate that INSN cannot be duplicated. This is the case for
6342 execute insns that carry a unique label. */
6345 s390_cannot_copy_insn_p (rtx insn
)
6347 rtx label
= s390_execute_label (insn
);
6348 return label
&& label
!= const0_rtx
;
6351 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
6352 do not emit the pool base label. */
6355 s390_dump_pool (struct constant_pool
*pool
, bool remote_label
)
6358 rtx insn
= pool
->pool_insn
;
6361 /* Switch to rodata section. */
6362 if (TARGET_CPU_ZARCH
)
6364 insn
= emit_insn_after (gen_pool_section_start (), insn
);
6365 INSN_ADDRESSES_NEW (insn
, -1);
6368 /* Ensure minimum pool alignment. */
6369 if (TARGET_CPU_ZARCH
)
6370 insn
= emit_insn_after (gen_pool_align (GEN_INT (8)), insn
);
6372 insn
= emit_insn_after (gen_pool_align (GEN_INT (4)), insn
);
6373 INSN_ADDRESSES_NEW (insn
, -1);
6375 /* Emit pool base label. */
6378 insn
= emit_label_after (pool
->label
, insn
);
6379 INSN_ADDRESSES_NEW (insn
, -1);
6382 /* Dump constants in descending alignment requirement order,
6383 ensuring proper alignment for every constant. */
6384 for (i
= 0; i
< NR_C_MODES
; i
++)
6385 for (c
= pool
->constants
[i
]; c
; c
= c
->next
)
6387 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
6388 rtx value
= copy_rtx (c
->value
);
6389 if (GET_CODE (value
) == CONST
6390 && GET_CODE (XEXP (value
, 0)) == UNSPEC
6391 && XINT (XEXP (value
, 0), 1) == UNSPEC_LTREL_OFFSET
6392 && XVECLEN (XEXP (value
, 0), 0) == 1)
6393 value
= s390_pool_offset (pool
, XVECEXP (XEXP (value
, 0), 0, 0));
6395 insn
= emit_label_after (c
->label
, insn
);
6396 INSN_ADDRESSES_NEW (insn
, -1);
6398 value
= gen_rtx_UNSPEC_VOLATILE (constant_modes
[i
],
6399 gen_rtvec (1, value
),
6400 UNSPECV_POOL_ENTRY
);
6401 insn
= emit_insn_after (value
, insn
);
6402 INSN_ADDRESSES_NEW (insn
, -1);
6405 /* Ensure minimum alignment for instructions. */
6406 insn
= emit_insn_after (gen_pool_align (GEN_INT (2)), insn
);
6407 INSN_ADDRESSES_NEW (insn
, -1);
6409 /* Output in-pool execute template insns. */
6410 for (c
= pool
->execute
; c
; c
= c
->next
)
6412 insn
= emit_label_after (c
->label
, insn
);
6413 INSN_ADDRESSES_NEW (insn
, -1);
6415 insn
= emit_insn_after (s390_execute_target (c
->value
), insn
);
6416 INSN_ADDRESSES_NEW (insn
, -1);
6419 /* Switch back to previous section. */
6420 if (TARGET_CPU_ZARCH
)
6422 insn
= emit_insn_after (gen_pool_section_end (), insn
);
6423 INSN_ADDRESSES_NEW (insn
, -1);
6426 insn
= emit_barrier_after (insn
);
6427 INSN_ADDRESSES_NEW (insn
, -1);
6429 /* Remove placeholder insn. */
6430 remove_insn (pool
->pool_insn
);
6433 /* Free all memory used by POOL. */
6436 s390_free_pool (struct constant_pool
*pool
)
6438 struct constant
*c
, *next
;
6441 for (i
= 0; i
< NR_C_MODES
; i
++)
6442 for (c
= pool
->constants
[i
]; c
; c
= next
)
6448 for (c
= pool
->execute
; c
; c
= next
)
6454 BITMAP_FREE (pool
->insns
);
6459 /* Collect main literal pool. Return NULL on overflow. */
6461 static struct constant_pool
*
6462 s390_mainpool_start (void)
6464 struct constant_pool
*pool
;
6467 pool
= s390_alloc_pool ();
6469 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6471 if (GET_CODE (insn
) == INSN
6472 && GET_CODE (PATTERN (insn
)) == SET
6473 && GET_CODE (SET_SRC (PATTERN (insn
))) == UNSPEC_VOLATILE
6474 && XINT (SET_SRC (PATTERN (insn
)), 1) == UNSPECV_MAIN_POOL
)
6476 gcc_assert (!pool
->pool_insn
);
6477 pool
->pool_insn
= insn
;
6480 if (!TARGET_CPU_ZARCH
&& s390_execute_label (insn
))
6482 s390_add_execute (pool
, insn
);
6484 else if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
6486 rtx pool_ref
= NULL_RTX
;
6487 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
6490 rtx constant
= get_pool_constant (pool_ref
);
6491 enum machine_mode mode
= get_pool_mode (pool_ref
);
6492 s390_add_constant (pool
, constant
, mode
);
6496 /* If hot/cold partitioning is enabled we have to make sure that
6497 the literal pool is emitted in the same section where the
6498 initialization of the literal pool base pointer takes place.
6499 emit_pool_after is only used in the non-overflow case on non
6500 Z cpus where we can emit the literal pool at the end of the
6501 function body within the text section. */
6503 && NOTE_KIND (insn
) == NOTE_INSN_SWITCH_TEXT_SECTIONS
6504 && !pool
->emit_pool_after
)
6505 pool
->emit_pool_after
= PREV_INSN (insn
);
6508 gcc_assert (pool
->pool_insn
|| pool
->size
== 0);
6510 if (pool
->size
>= 4096)
6512 /* We're going to chunkify the pool, so remove the main
6513 pool placeholder insn. */
6514 remove_insn (pool
->pool_insn
);
6516 s390_free_pool (pool
);
6520 /* If the functions ends with the section where the literal pool
6521 should be emitted set the marker to its end. */
6522 if (pool
&& !pool
->emit_pool_after
)
6523 pool
->emit_pool_after
= get_last_insn ();
6528 /* POOL holds the main literal pool as collected by s390_mainpool_start.
6529 Modify the current function to output the pool constants as well as
6530 the pool register setup instruction. */
6533 s390_mainpool_finish (struct constant_pool
*pool
)
6535 rtx base_reg
= cfun
->machine
->base_reg
;
6538 /* If the pool is empty, we're done. */
6539 if (pool
->size
== 0)
6541 /* We don't actually need a base register after all. */
6542 cfun
->machine
->base_reg
= NULL_RTX
;
6544 if (pool
->pool_insn
)
6545 remove_insn (pool
->pool_insn
);
6546 s390_free_pool (pool
);
6550 /* We need correct insn addresses. */
6551 shorten_branches (get_insns ());
6553 /* On zSeries, we use a LARL to load the pool register. The pool is
6554 located in the .rodata section, so we emit it after the function. */
6555 if (TARGET_CPU_ZARCH
)
6557 insn
= gen_main_base_64 (base_reg
, pool
->label
);
6558 insn
= emit_insn_after (insn
, pool
->pool_insn
);
6559 INSN_ADDRESSES_NEW (insn
, -1);
6560 remove_insn (pool
->pool_insn
);
6562 insn
= get_last_insn ();
6563 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
6564 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
6566 s390_dump_pool (pool
, 0);
6569 /* On S/390, if the total size of the function's code plus literal pool
6570 does not exceed 4096 bytes, we use BASR to set up a function base
6571 pointer, and emit the literal pool at the end of the function. */
6572 else if (INSN_ADDRESSES (INSN_UID (pool
->emit_pool_after
))
6573 + pool
->size
+ 8 /* alignment slop */ < 4096)
6575 insn
= gen_main_base_31_small (base_reg
, pool
->label
);
6576 insn
= emit_insn_after (insn
, pool
->pool_insn
);
6577 INSN_ADDRESSES_NEW (insn
, -1);
6578 remove_insn (pool
->pool_insn
);
6580 insn
= emit_label_after (pool
->label
, insn
);
6581 INSN_ADDRESSES_NEW (insn
, -1);
6583 /* emit_pool_after will be set by s390_mainpool_start to the
6584 last insn of the section where the literal pool should be
6586 insn
= pool
->emit_pool_after
;
6588 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
6589 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
6591 s390_dump_pool (pool
, 1);
6594 /* Otherwise, we emit an inline literal pool and use BASR to branch
6595 over it, setting up the pool register at the same time. */
6598 rtx pool_end
= gen_label_rtx ();
6600 insn
= gen_main_base_31_large (base_reg
, pool
->label
, pool_end
);
6601 insn
= emit_jump_insn_after (insn
, pool
->pool_insn
);
6602 JUMP_LABEL (insn
) = pool_end
;
6603 INSN_ADDRESSES_NEW (insn
, -1);
6604 remove_insn (pool
->pool_insn
);
6606 insn
= emit_label_after (pool
->label
, insn
);
6607 INSN_ADDRESSES_NEW (insn
, -1);
6609 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
6610 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
6612 insn
= emit_label_after (pool_end
, pool
->pool_insn
);
6613 INSN_ADDRESSES_NEW (insn
, -1);
6615 s390_dump_pool (pool
, 1);
6619 /* Replace all literal pool references. */
6621 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6624 replace_ltrel_base (&PATTERN (insn
));
6626 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
6628 rtx addr
, pool_ref
= NULL_RTX
;
6629 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
6632 if (s390_execute_label (insn
))
6633 addr
= s390_find_execute (pool
, insn
);
6635 addr
= s390_find_constant (pool
, get_pool_constant (pool_ref
),
6636 get_pool_mode (pool_ref
));
6638 replace_constant_pool_ref (&PATTERN (insn
), pool_ref
, addr
);
6639 INSN_CODE (insn
) = -1;
6645 /* Free the pool. */
6646 s390_free_pool (pool
);
6649 /* POOL holds the main literal pool as collected by s390_mainpool_start.
6650 We have decided we cannot use this pool, so revert all changes
6651 to the current function that were done by s390_mainpool_start. */
6653 s390_mainpool_cancel (struct constant_pool
*pool
)
6655 /* We didn't actually change the instruction stream, so simply
6656 free the pool memory. */
6657 s390_free_pool (pool
);
6661 /* Chunkify the literal pool. */
6663 #define S390_POOL_CHUNK_MIN 0xc00
6664 #define S390_POOL_CHUNK_MAX 0xe00
6666 static struct constant_pool
*
6667 s390_chunkify_start (void)
6669 struct constant_pool
*curr_pool
= NULL
, *pool_list
= NULL
;
6672 rtx pending_ltrel
= NULL_RTX
;
6675 rtx (*gen_reload_base
) (rtx
, rtx
) =
6676 TARGET_CPU_ZARCH
? gen_reload_base_64
: gen_reload_base_31
;
6679 /* We need correct insn addresses. */
6681 shorten_branches (get_insns ());
6683 /* Scan all insns and move literals to pool chunks. */
6685 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6687 bool section_switch_p
= false;
6689 /* Check for pending LTREL_BASE. */
6692 rtx ltrel_base
= find_ltrel_base (PATTERN (insn
));
6695 gcc_assert (ltrel_base
== pending_ltrel
);
6696 pending_ltrel
= NULL_RTX
;
6700 if (!TARGET_CPU_ZARCH
&& s390_execute_label (insn
))
6703 curr_pool
= s390_start_pool (&pool_list
, insn
);
6705 s390_add_execute (curr_pool
, insn
);
6706 s390_add_pool_insn (curr_pool
, insn
);
6708 else if (GET_CODE (insn
) == INSN
|| CALL_P (insn
))
6710 rtx pool_ref
= NULL_RTX
;
6711 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
6714 rtx constant
= get_pool_constant (pool_ref
);
6715 enum machine_mode mode
= get_pool_mode (pool_ref
);
6718 curr_pool
= s390_start_pool (&pool_list
, insn
);
6720 s390_add_constant (curr_pool
, constant
, mode
);
6721 s390_add_pool_insn (curr_pool
, insn
);
6723 /* Don't split the pool chunk between a LTREL_OFFSET load
6724 and the corresponding LTREL_BASE. */
6725 if (GET_CODE (constant
) == CONST
6726 && GET_CODE (XEXP (constant
, 0)) == UNSPEC
6727 && XINT (XEXP (constant
, 0), 1) == UNSPEC_LTREL_OFFSET
)
6729 gcc_assert (!pending_ltrel
);
6730 pending_ltrel
= pool_ref
;
6735 if (GET_CODE (insn
) == JUMP_INSN
|| GET_CODE (insn
) == CODE_LABEL
)
6738 s390_add_pool_insn (curr_pool
, insn
);
6739 /* An LTREL_BASE must follow within the same basic block. */
6740 gcc_assert (!pending_ltrel
);
6744 switch (NOTE_KIND (insn
))
6746 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
6747 section_switch_p
= true;
6749 case NOTE_INSN_VAR_LOCATION
:
6750 case NOTE_INSN_CALL_ARG_LOCATION
:
6757 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn
)
6758 || INSN_ADDRESSES (INSN_UID (insn
)) == -1)
6761 if (TARGET_CPU_ZARCH
)
6763 if (curr_pool
->size
< S390_POOL_CHUNK_MAX
)
6766 s390_end_pool (curr_pool
, NULL_RTX
);
6771 int chunk_size
= INSN_ADDRESSES (INSN_UID (insn
))
6772 - INSN_ADDRESSES (INSN_UID (curr_pool
->first_insn
))
6775 /* We will later have to insert base register reload insns.
6776 Those will have an effect on code size, which we need to
6777 consider here. This calculation makes rather pessimistic
6778 worst-case assumptions. */
6779 if (GET_CODE (insn
) == CODE_LABEL
)
6782 if (chunk_size
< S390_POOL_CHUNK_MIN
6783 && curr_pool
->size
< S390_POOL_CHUNK_MIN
6784 && !section_switch_p
)
6787 /* Pool chunks can only be inserted after BARRIERs ... */
6788 if (GET_CODE (insn
) == BARRIER
)
6790 s390_end_pool (curr_pool
, insn
);
6795 /* ... so if we don't find one in time, create one. */
6796 else if (chunk_size
> S390_POOL_CHUNK_MAX
6797 || curr_pool
->size
> S390_POOL_CHUNK_MAX
6798 || section_switch_p
)
6800 rtx label
, jump
, barrier
, next
, prev
;
6802 if (!section_switch_p
)
6804 /* We can insert the barrier only after a 'real' insn. */
6805 if (GET_CODE (insn
) != INSN
&& GET_CODE (insn
) != CALL_INSN
)
6807 if (get_attr_length (insn
) == 0)
6809 /* Don't separate LTREL_BASE from the corresponding
6810 LTREL_OFFSET load. */
6817 next
= NEXT_INSN (insn
);
6821 && (NOTE_KIND (next
) == NOTE_INSN_VAR_LOCATION
6822 || NOTE_KIND (next
) == NOTE_INSN_CALL_ARG_LOCATION
));
6826 gcc_assert (!pending_ltrel
);
6828 /* The old pool has to end before the section switch
6829 note in order to make it part of the current
6831 insn
= PREV_INSN (insn
);
6834 label
= gen_label_rtx ();
6836 if (prev
&& NOTE_P (prev
))
6837 prev
= prev_nonnote_insn (prev
);
6839 jump
= emit_jump_insn_after_setloc (gen_jump (label
), insn
,
6840 INSN_LOCATOR (prev
));
6842 jump
= emit_jump_insn_after_noloc (gen_jump (label
), insn
);
6843 barrier
= emit_barrier_after (jump
);
6844 insn
= emit_label_after (label
, barrier
);
6845 JUMP_LABEL (jump
) = label
;
6846 LABEL_NUSES (label
) = 1;
6848 INSN_ADDRESSES_NEW (jump
, -1);
6849 INSN_ADDRESSES_NEW (barrier
, -1);
6850 INSN_ADDRESSES_NEW (insn
, -1);
6852 s390_end_pool (curr_pool
, barrier
);
6860 s390_end_pool (curr_pool
, NULL_RTX
);
6861 gcc_assert (!pending_ltrel
);
6863 /* Find all labels that are branched into
6864 from an insn belonging to a different chunk. */
6866 far_labels
= BITMAP_ALLOC (NULL
);
6868 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6870 /* Labels marked with LABEL_PRESERVE_P can be target
6871 of non-local jumps, so we have to mark them.
6872 The same holds for named labels.
6874 Don't do that, however, if it is the label before
6877 if (GET_CODE (insn
) == CODE_LABEL
6878 && (LABEL_PRESERVE_P (insn
) || LABEL_NAME (insn
)))
6880 rtx vec_insn
= next_real_insn (insn
);
6881 rtx vec_pat
= vec_insn
&& GET_CODE (vec_insn
) == JUMP_INSN
?
6882 PATTERN (vec_insn
) : NULL_RTX
;
6884 || !(GET_CODE (vec_pat
) == ADDR_VEC
6885 || GET_CODE (vec_pat
) == ADDR_DIFF_VEC
))
6886 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (insn
));
6889 /* If we have a direct jump (conditional or unconditional)
6890 or a casesi jump, check all potential targets. */
6891 else if (GET_CODE (insn
) == JUMP_INSN
)
6893 rtx pat
= PATTERN (insn
);
6894 if (GET_CODE (pat
) == PARALLEL
&& XVECLEN (pat
, 0) > 2)
6895 pat
= XVECEXP (pat
, 0, 0);
6897 if (GET_CODE (pat
) == SET
)
6899 rtx label
= JUMP_LABEL (insn
);
6902 if (s390_find_pool (pool_list
, label
)
6903 != s390_find_pool (pool_list
, insn
))
6904 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (label
));
6907 else if (GET_CODE (pat
) == PARALLEL
6908 && XVECLEN (pat
, 0) == 2
6909 && GET_CODE (XVECEXP (pat
, 0, 0)) == SET
6910 && GET_CODE (XVECEXP (pat
, 0, 1)) == USE
6911 && GET_CODE (XEXP (XVECEXP (pat
, 0, 1), 0)) == LABEL_REF
)
6913 /* Find the jump table used by this casesi jump. */
6914 rtx vec_label
= XEXP (XEXP (XVECEXP (pat
, 0, 1), 0), 0);
6915 rtx vec_insn
= next_real_insn (vec_label
);
6916 rtx vec_pat
= vec_insn
&& GET_CODE (vec_insn
) == JUMP_INSN
?
6917 PATTERN (vec_insn
) : NULL_RTX
;
6919 && (GET_CODE (vec_pat
) == ADDR_VEC
6920 || GET_CODE (vec_pat
) == ADDR_DIFF_VEC
))
6922 int i
, diff_p
= GET_CODE (vec_pat
) == ADDR_DIFF_VEC
;
6924 for (i
= 0; i
< XVECLEN (vec_pat
, diff_p
); i
++)
6926 rtx label
= XEXP (XVECEXP (vec_pat
, diff_p
, i
), 0);
6928 if (s390_find_pool (pool_list
, label
)
6929 != s390_find_pool (pool_list
, insn
))
6930 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (label
));
6937 /* Insert base register reload insns before every pool. */
6939 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
6941 rtx new_insn
= gen_reload_base (cfun
->machine
->base_reg
,
6943 rtx insn
= curr_pool
->first_insn
;
6944 INSN_ADDRESSES_NEW (emit_insn_before (new_insn
, insn
), -1);
6947 /* Insert base register reload insns at every far label. */
6949 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6950 if (GET_CODE (insn
) == CODE_LABEL
6951 && bitmap_bit_p (far_labels
, CODE_LABEL_NUMBER (insn
)))
6953 struct constant_pool
*pool
= s390_find_pool (pool_list
, insn
);
6956 rtx new_insn
= gen_reload_base (cfun
->machine
->base_reg
,
6958 INSN_ADDRESSES_NEW (emit_insn_after (new_insn
, insn
), -1);
6963 BITMAP_FREE (far_labels
);
6966 /* Recompute insn addresses. */
6968 init_insn_lengths ();
6969 shorten_branches (get_insns ());
6974 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
6975 After we have decided to use this list, finish implementing
6976 all changes to the current function as required. */
6979 s390_chunkify_finish (struct constant_pool
*pool_list
)
6981 struct constant_pool
*curr_pool
= NULL
;
6985 /* Replace all literal pool references. */
6987 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6990 replace_ltrel_base (&PATTERN (insn
));
6992 curr_pool
= s390_find_pool (pool_list
, insn
);
6996 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
6998 rtx addr
, pool_ref
= NULL_RTX
;
6999 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
7002 if (s390_execute_label (insn
))
7003 addr
= s390_find_execute (curr_pool
, insn
);
7005 addr
= s390_find_constant (curr_pool
,
7006 get_pool_constant (pool_ref
),
7007 get_pool_mode (pool_ref
));
7009 replace_constant_pool_ref (&PATTERN (insn
), pool_ref
, addr
);
7010 INSN_CODE (insn
) = -1;
7015 /* Dump out all literal pools. */
7017 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
7018 s390_dump_pool (curr_pool
, 0);
7020 /* Free pool list. */
7024 struct constant_pool
*next
= pool_list
->next
;
7025 s390_free_pool (pool_list
);
7030 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
7031 We have decided we cannot use this list, so revert all changes
7032 to the current function that were done by s390_chunkify_start. */
7035 s390_chunkify_cancel (struct constant_pool
*pool_list
)
7037 struct constant_pool
*curr_pool
= NULL
;
7040 /* Remove all pool placeholder insns. */
7042 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
7044 /* Did we insert an extra barrier? Remove it. */
7045 rtx barrier
= PREV_INSN (curr_pool
->pool_insn
);
7046 rtx jump
= barrier
? PREV_INSN (barrier
) : NULL_RTX
;
7047 rtx label
= NEXT_INSN (curr_pool
->pool_insn
);
7049 if (jump
&& GET_CODE (jump
) == JUMP_INSN
7050 && barrier
&& GET_CODE (barrier
) == BARRIER
7051 && label
&& GET_CODE (label
) == CODE_LABEL
7052 && GET_CODE (PATTERN (jump
)) == SET
7053 && SET_DEST (PATTERN (jump
)) == pc_rtx
7054 && GET_CODE (SET_SRC (PATTERN (jump
))) == LABEL_REF
7055 && XEXP (SET_SRC (PATTERN (jump
)), 0) == label
)
7058 remove_insn (barrier
);
7059 remove_insn (label
);
7062 remove_insn (curr_pool
->pool_insn
);
7065 /* Remove all base register reload insns. */
7067 for (insn
= get_insns (); insn
; )
7069 rtx next_insn
= NEXT_INSN (insn
);
7071 if (GET_CODE (insn
) == INSN
7072 && GET_CODE (PATTERN (insn
)) == SET
7073 && GET_CODE (SET_SRC (PATTERN (insn
))) == UNSPEC
7074 && XINT (SET_SRC (PATTERN (insn
)), 1) == UNSPEC_RELOAD_BASE
)
7080 /* Free pool list. */
7084 struct constant_pool
*next
= pool_list
->next
;
7085 s390_free_pool (pool_list
);
7090 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
7093 s390_output_pool_entry (rtx exp
, enum machine_mode mode
, unsigned int align
)
7097 switch (GET_MODE_CLASS (mode
))
7100 case MODE_DECIMAL_FLOAT
:
7101 gcc_assert (GET_CODE (exp
) == CONST_DOUBLE
);
7103 REAL_VALUE_FROM_CONST_DOUBLE (r
, exp
);
7104 assemble_real (r
, mode
, align
);
7108 assemble_integer (exp
, GET_MODE_SIZE (mode
), align
, 1);
7109 mark_symbol_refs_as_used (exp
);
7118 /* Return an RTL expression representing the value of the return address
7119 for the frame COUNT steps up from the current frame. FRAME is the
7120 frame pointer of that frame. */
7123 s390_return_addr_rtx (int count
, rtx frame ATTRIBUTE_UNUSED
)
7128 /* Without backchain, we fail for all but the current frame. */
7130 if (!TARGET_BACKCHAIN
&& count
> 0)
7133 /* For the current frame, we need to make sure the initial
7134 value of RETURN_REGNUM is actually saved. */
7138 /* On non-z architectures branch splitting could overwrite r14. */
7139 if (TARGET_CPU_ZARCH
)
7140 return get_hard_reg_initial_val (Pmode
, RETURN_REGNUM
);
7143 cfun_frame_layout
.save_return_addr_p
= true;
7144 return gen_rtx_MEM (Pmode
, return_address_pointer_rtx
);
7148 if (TARGET_PACKED_STACK
)
7149 offset
= -2 * UNITS_PER_LONG
;
7151 offset
= RETURN_REGNUM
* UNITS_PER_LONG
;
7153 addr
= plus_constant (Pmode
, frame
, offset
);
7154 addr
= memory_address (Pmode
, addr
);
7155 return gen_rtx_MEM (Pmode
, addr
);
7158 /* Return an RTL expression representing the back chain stored in
7159 the current stack frame. */
7162 s390_back_chain_rtx (void)
7166 gcc_assert (TARGET_BACKCHAIN
);
7168 if (TARGET_PACKED_STACK
)
7169 chain
= plus_constant (Pmode
, stack_pointer_rtx
,
7170 STACK_POINTER_OFFSET
- UNITS_PER_LONG
);
7172 chain
= stack_pointer_rtx
;
7174 chain
= gen_rtx_MEM (Pmode
, chain
);
7178 /* Find first call clobbered register unused in a function.
7179 This could be used as base register in a leaf function
7180 or for holding the return address before epilogue. */
7183 find_unused_clobbered_reg (void)
7186 for (i
= 0; i
< 6; i
++)
7187 if (!df_regs_ever_live_p (i
))
7193 /* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all
7194 clobbered hard regs in SETREG. */
7197 s390_reg_clobbered_rtx (rtx setreg
, const_rtx set_insn ATTRIBUTE_UNUSED
, void *data
)
7199 int *regs_ever_clobbered
= (int *)data
;
7200 unsigned int i
, regno
;
7201 enum machine_mode mode
= GET_MODE (setreg
);
7203 if (GET_CODE (setreg
) == SUBREG
)
7205 rtx inner
= SUBREG_REG (setreg
);
7206 if (!GENERAL_REG_P (inner
))
7208 regno
= subreg_regno (setreg
);
7210 else if (GENERAL_REG_P (setreg
))
7211 regno
= REGNO (setreg
);
7216 i
< regno
+ HARD_REGNO_NREGS (regno
, mode
);
7218 regs_ever_clobbered
[i
] = 1;
7221 /* Walks through all basic blocks of the current function looking
7222 for clobbered hard regs using s390_reg_clobbered_rtx. The fields
7223 of the passed integer array REGS_EVER_CLOBBERED are set to one for
7224 each of those regs. */
7227 s390_regs_ever_clobbered (int *regs_ever_clobbered
)
7233 memset (regs_ever_clobbered
, 0, 16 * sizeof (int));
7235 /* For non-leaf functions we have to consider all call clobbered regs to be
7239 for (i
= 0; i
< 16; i
++)
7240 regs_ever_clobbered
[i
] = call_really_used_regs
[i
];
7243 /* Make the "magic" eh_return registers live if necessary. For regs_ever_live
7244 this work is done by liveness analysis (mark_regs_live_at_end).
7245 Special care is needed for functions containing landing pads. Landing pads
7246 may use the eh registers, but the code which sets these registers is not
7247 contained in that function. Hence s390_regs_ever_clobbered is not able to
7248 deal with this automatically. */
7249 if (crtl
->calls_eh_return
|| cfun
->machine
->has_landing_pad_p
)
7250 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; i
++)
7251 if (crtl
->calls_eh_return
7252 || (cfun
->machine
->has_landing_pad_p
7253 && df_regs_ever_live_p (EH_RETURN_DATA_REGNO (i
))))
7254 regs_ever_clobbered
[EH_RETURN_DATA_REGNO (i
)] = 1;
7256 /* For nonlocal gotos all call-saved registers have to be saved.
7257 This flag is also set for the unwinding code in libgcc.
7258 See expand_builtin_unwind_init. For regs_ever_live this is done by
7260 if (cfun
->has_nonlocal_label
)
7261 for (i
= 0; i
< 16; i
++)
7262 if (!call_really_used_regs
[i
])
7263 regs_ever_clobbered
[i
] = 1;
7265 FOR_EACH_BB (cur_bb
)
7267 FOR_BB_INSNS (cur_bb
, cur_insn
)
7269 if (INSN_P (cur_insn
))
7270 note_stores (PATTERN (cur_insn
),
7271 s390_reg_clobbered_rtx
,
7272 regs_ever_clobbered
);
7277 /* Determine the frame area which actually has to be accessed
7278 in the function epilogue. The values are stored at the
7279 given pointers AREA_BOTTOM (address of the lowest used stack
7280 address) and AREA_TOP (address of the first item which does
7281 not belong to the stack frame). */
7284 s390_frame_area (int *area_bottom
, int *area_top
)
7292 if (cfun_frame_layout
.first_restore_gpr
!= -1)
7294 b
= (cfun_frame_layout
.gprs_offset
7295 + cfun_frame_layout
.first_restore_gpr
* UNITS_PER_LONG
);
7296 t
= b
+ (cfun_frame_layout
.last_restore_gpr
7297 - cfun_frame_layout
.first_restore_gpr
+ 1) * UNITS_PER_LONG
;
7300 if (TARGET_64BIT
&& cfun_save_high_fprs_p
)
7302 b
= MIN (b
, cfun_frame_layout
.f8_offset
);
7303 t
= MAX (t
, (cfun_frame_layout
.f8_offset
7304 + cfun_frame_layout
.high_fprs
* 8));
7308 for (i
= 2; i
< 4; i
++)
7309 if (cfun_fpr_bit_p (i
))
7311 b
= MIN (b
, cfun_frame_layout
.f4_offset
+ (i
- 2) * 8);
7312 t
= MAX (t
, cfun_frame_layout
.f4_offset
+ (i
- 1) * 8);
7319 /* Fill cfun->machine with info about register usage of current function.
7320 Return in CLOBBERED_REGS which GPRs are currently considered set. */
7323 s390_register_info (int clobbered_regs
[])
7327 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
7328 cfun_frame_layout
.fpr_bitmap
= 0;
7329 cfun_frame_layout
.high_fprs
= 0;
7331 for (i
= 24; i
< 32; i
++)
7332 if (df_regs_ever_live_p (i
) && !global_regs
[i
])
7334 cfun_set_fpr_bit (i
- 16);
7335 cfun_frame_layout
.high_fprs
++;
7338 /* Find first and last gpr to be saved. We trust regs_ever_live
7339 data, except that we don't save and restore global registers.
7341 Also, all registers with special meaning to the compiler need
7342 to be handled extra. */
7344 s390_regs_ever_clobbered (clobbered_regs
);
7346 for (i
= 0; i
< 16; i
++)
7347 clobbered_regs
[i
] = clobbered_regs
[i
] && !global_regs
[i
] && !fixed_regs
[i
];
7349 if (frame_pointer_needed
)
7350 clobbered_regs
[HARD_FRAME_POINTER_REGNUM
] = 1;
7353 clobbered_regs
[PIC_OFFSET_TABLE_REGNUM
]
7354 |= df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM
);
7356 clobbered_regs
[BASE_REGNUM
]
7357 |= (cfun
->machine
->base_reg
7358 && REGNO (cfun
->machine
->base_reg
) == BASE_REGNUM
);
7360 clobbered_regs
[RETURN_REGNUM
]
7362 || TARGET_TPF_PROFILING
7363 || cfun
->machine
->split_branches_pending_p
7364 || cfun_frame_layout
.save_return_addr_p
7365 || crtl
->calls_eh_return
7368 clobbered_regs
[STACK_POINTER_REGNUM
]
7370 || TARGET_TPF_PROFILING
7371 || cfun_save_high_fprs_p
7372 || get_frame_size () > 0
7373 || cfun
->calls_alloca
7376 for (i
= 6; i
< 16; i
++)
7377 if (df_regs_ever_live_p (i
) || clobbered_regs
[i
])
7379 for (j
= 15; j
> i
; j
--)
7380 if (df_regs_ever_live_p (j
) || clobbered_regs
[j
])
7385 /* Nothing to save/restore. */
7386 cfun_frame_layout
.first_save_gpr_slot
= -1;
7387 cfun_frame_layout
.last_save_gpr_slot
= -1;
7388 cfun_frame_layout
.first_save_gpr
= -1;
7389 cfun_frame_layout
.first_restore_gpr
= -1;
7390 cfun_frame_layout
.last_save_gpr
= -1;
7391 cfun_frame_layout
.last_restore_gpr
= -1;
7395 /* Save slots for gprs from i to j. */
7396 cfun_frame_layout
.first_save_gpr_slot
= i
;
7397 cfun_frame_layout
.last_save_gpr_slot
= j
;
7399 for (i
= cfun_frame_layout
.first_save_gpr_slot
;
7400 i
< cfun_frame_layout
.last_save_gpr_slot
+ 1;
7402 if (clobbered_regs
[i
])
7405 for (j
= cfun_frame_layout
.last_save_gpr_slot
; j
> i
; j
--)
7406 if (clobbered_regs
[j
])
7409 if (i
== cfun_frame_layout
.last_save_gpr_slot
+ 1)
7411 /* Nothing to save/restore. */
7412 cfun_frame_layout
.first_save_gpr
= -1;
7413 cfun_frame_layout
.first_restore_gpr
= -1;
7414 cfun_frame_layout
.last_save_gpr
= -1;
7415 cfun_frame_layout
.last_restore_gpr
= -1;
7419 /* Save / Restore from gpr i to j. */
7420 cfun_frame_layout
.first_save_gpr
= i
;
7421 cfun_frame_layout
.first_restore_gpr
= i
;
7422 cfun_frame_layout
.last_save_gpr
= j
;
7423 cfun_frame_layout
.last_restore_gpr
= j
;
7429 /* Varargs functions need to save gprs 2 to 6. */
7430 if (cfun
->va_list_gpr_size
7431 && crtl
->args
.info
.gprs
< GP_ARG_NUM_REG
)
7433 int min_gpr
= crtl
->args
.info
.gprs
;
7434 int max_gpr
= min_gpr
+ cfun
->va_list_gpr_size
;
7435 if (max_gpr
> GP_ARG_NUM_REG
)
7436 max_gpr
= GP_ARG_NUM_REG
;
7438 if (cfun_frame_layout
.first_save_gpr
== -1
7439 || cfun_frame_layout
.first_save_gpr
> 2 + min_gpr
)
7441 cfun_frame_layout
.first_save_gpr
= 2 + min_gpr
;
7442 cfun_frame_layout
.first_save_gpr_slot
= 2 + min_gpr
;
7445 if (cfun_frame_layout
.last_save_gpr
== -1
7446 || cfun_frame_layout
.last_save_gpr
< 2 + max_gpr
- 1)
7448 cfun_frame_layout
.last_save_gpr
= 2 + max_gpr
- 1;
7449 cfun_frame_layout
.last_save_gpr_slot
= 2 + max_gpr
- 1;
7453 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
7454 if (TARGET_HARD_FLOAT
&& cfun
->va_list_fpr_size
7455 && crtl
->args
.info
.fprs
< FP_ARG_NUM_REG
)
7457 int min_fpr
= crtl
->args
.info
.fprs
;
7458 int max_fpr
= min_fpr
+ cfun
->va_list_fpr_size
;
7459 if (max_fpr
> FP_ARG_NUM_REG
)
7460 max_fpr
= FP_ARG_NUM_REG
;
7462 /* ??? This is currently required to ensure proper location
7463 of the fpr save slots within the va_list save area. */
7464 if (TARGET_PACKED_STACK
)
7467 for (i
= min_fpr
; i
< max_fpr
; i
++)
7468 cfun_set_fpr_bit (i
);
7473 for (i
= 2; i
< 4; i
++)
7474 if (df_regs_ever_live_p (i
+ 16) && !global_regs
[i
+ 16])
7475 cfun_set_fpr_bit (i
);
7478 /* Fill cfun->machine with info about frame of current function. */
7481 s390_frame_info (void)
7485 cfun_frame_layout
.frame_size
= get_frame_size ();
7486 if (!TARGET_64BIT
&& cfun_frame_layout
.frame_size
> 0x7fff0000)
7487 fatal_error ("total size of local variables exceeds architecture limit");
7489 if (!TARGET_PACKED_STACK
)
7491 cfun_frame_layout
.backchain_offset
= 0;
7492 cfun_frame_layout
.f0_offset
= 16 * UNITS_PER_LONG
;
7493 cfun_frame_layout
.f4_offset
= cfun_frame_layout
.f0_offset
+ 2 * 8;
7494 cfun_frame_layout
.f8_offset
= -cfun_frame_layout
.high_fprs
* 8;
7495 cfun_frame_layout
.gprs_offset
= (cfun_frame_layout
.first_save_gpr_slot
7498 else if (TARGET_BACKCHAIN
) /* kernel stack layout */
7500 cfun_frame_layout
.backchain_offset
= (STACK_POINTER_OFFSET
7502 cfun_frame_layout
.gprs_offset
7503 = (cfun_frame_layout
.backchain_offset
7504 - (STACK_POINTER_REGNUM
- cfun_frame_layout
.first_save_gpr_slot
+ 1)
7509 cfun_frame_layout
.f4_offset
7510 = (cfun_frame_layout
.gprs_offset
7511 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7513 cfun_frame_layout
.f0_offset
7514 = (cfun_frame_layout
.f4_offset
7515 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7519 /* On 31 bit we have to care about alignment of the
7520 floating point regs to provide fastest access. */
7521 cfun_frame_layout
.f0_offset
7522 = ((cfun_frame_layout
.gprs_offset
7523 & ~(STACK_BOUNDARY
/ BITS_PER_UNIT
- 1))
7524 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7526 cfun_frame_layout
.f4_offset
7527 = (cfun_frame_layout
.f0_offset
7528 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7531 else /* no backchain */
7533 cfun_frame_layout
.f4_offset
7534 = (STACK_POINTER_OFFSET
7535 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7537 cfun_frame_layout
.f0_offset
7538 = (cfun_frame_layout
.f4_offset
7539 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7541 cfun_frame_layout
.gprs_offset
7542 = cfun_frame_layout
.f0_offset
- cfun_gprs_save_area_size
;
7546 && !TARGET_TPF_PROFILING
7547 && cfun_frame_layout
.frame_size
== 0
7548 && !cfun_save_high_fprs_p
7549 && !cfun
->calls_alloca
7553 if (!TARGET_PACKED_STACK
)
7554 cfun_frame_layout
.frame_size
+= (STACK_POINTER_OFFSET
7555 + crtl
->outgoing_args_size
7556 + cfun_frame_layout
.high_fprs
* 8);
7559 if (TARGET_BACKCHAIN
)
7560 cfun_frame_layout
.frame_size
+= UNITS_PER_LONG
;
7562 /* No alignment trouble here because f8-f15 are only saved under
7564 cfun_frame_layout
.f8_offset
= (MIN (MIN (cfun_frame_layout
.f0_offset
,
7565 cfun_frame_layout
.f4_offset
),
7566 cfun_frame_layout
.gprs_offset
)
7567 - cfun_frame_layout
.high_fprs
* 8);
7569 cfun_frame_layout
.frame_size
+= cfun_frame_layout
.high_fprs
* 8;
7571 for (i
= 0; i
< 8; i
++)
7572 if (cfun_fpr_bit_p (i
))
7573 cfun_frame_layout
.frame_size
+= 8;
7575 cfun_frame_layout
.frame_size
+= cfun_gprs_save_area_size
;
7577 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
7578 the frame size to sustain 8 byte alignment of stack frames. */
7579 cfun_frame_layout
.frame_size
= ((cfun_frame_layout
.frame_size
+
7580 STACK_BOUNDARY
/ BITS_PER_UNIT
- 1)
7581 & ~(STACK_BOUNDARY
/ BITS_PER_UNIT
- 1));
7583 cfun_frame_layout
.frame_size
+= crtl
->outgoing_args_size
;
7587 /* Generate frame layout. Fills in register and frame data for the current
7588 function in cfun->machine. This routine can be called multiple times;
7589 it will re-do the complete frame layout every time. */
7592 s390_init_frame_layout (void)
7594 HOST_WIDE_INT frame_size
;
7596 int clobbered_regs
[16];
7598 /* On S/390 machines, we may need to perform branch splitting, which
7599 will require both base and return address register. We have no
7600 choice but to assume we're going to need them until right at the
7601 end of the machine dependent reorg phase. */
7602 if (!TARGET_CPU_ZARCH
)
7603 cfun
->machine
->split_branches_pending_p
= true;
7607 frame_size
= cfun_frame_layout
.frame_size
;
7609 /* Try to predict whether we'll need the base register. */
7610 base_used
= cfun
->machine
->split_branches_pending_p
7611 || crtl
->uses_const_pool
7612 || (!DISP_IN_RANGE (frame_size
)
7613 && !CONST_OK_FOR_K (frame_size
));
7615 /* Decide which register to use as literal pool base. In small
7616 leaf functions, try to use an unused call-clobbered register
7617 as base register to avoid save/restore overhead. */
7619 cfun
->machine
->base_reg
= NULL_RTX
;
7620 else if (crtl
->is_leaf
&& !df_regs_ever_live_p (5))
7621 cfun
->machine
->base_reg
= gen_rtx_REG (Pmode
, 5);
7623 cfun
->machine
->base_reg
= gen_rtx_REG (Pmode
, BASE_REGNUM
);
7625 s390_register_info (clobbered_regs
);
7628 while (frame_size
!= cfun_frame_layout
.frame_size
);
7631 /* Update frame layout. Recompute actual register save data based on
7632 current info and update regs_ever_live for the special registers.
7633 May be called multiple times, but may never cause *more* registers
7634 to be saved than s390_init_frame_layout allocated room for. */
7637 s390_update_frame_layout (void)
7639 int clobbered_regs
[16];
7641 s390_register_info (clobbered_regs
);
7643 df_set_regs_ever_live (BASE_REGNUM
,
7644 clobbered_regs
[BASE_REGNUM
] ? true : false);
7645 df_set_regs_ever_live (RETURN_REGNUM
,
7646 clobbered_regs
[RETURN_REGNUM
] ? true : false);
7647 df_set_regs_ever_live (STACK_POINTER_REGNUM
,
7648 clobbered_regs
[STACK_POINTER_REGNUM
] ? true : false);
7650 if (cfun
->machine
->base_reg
)
7651 df_set_regs_ever_live (REGNO (cfun
->machine
->base_reg
), true);
7654 /* Return true if it is legal to put a value with MODE into REGNO. */
7657 s390_hard_regno_mode_ok (unsigned int regno
, enum machine_mode mode
)
7659 switch (REGNO_REG_CLASS (regno
))
7662 if (REGNO_PAIR_OK (regno
, mode
))
7664 if (mode
== SImode
|| mode
== DImode
)
7667 if (FLOAT_MODE_P (mode
) && GET_MODE_CLASS (mode
) != MODE_VECTOR_FLOAT
)
7672 if (FRAME_REGNO_P (regno
) && mode
== Pmode
)
7677 if (REGNO_PAIR_OK (regno
, mode
))
7680 || (mode
!= TFmode
&& mode
!= TCmode
&& mode
!= TDmode
))
7685 if (GET_MODE_CLASS (mode
) == MODE_CC
)
7689 if (REGNO_PAIR_OK (regno
, mode
))
7691 if (mode
== SImode
|| mode
== Pmode
)
7702 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7705 s390_hard_regno_rename_ok (unsigned int old_reg
, unsigned int new_reg
)
7707 /* Once we've decided upon a register to use as base register, it must
7708 no longer be used for any other purpose. */
7709 if (cfun
->machine
->base_reg
)
7710 if (REGNO (cfun
->machine
->base_reg
) == old_reg
7711 || REGNO (cfun
->machine
->base_reg
) == new_reg
)
7717 /* Maximum number of registers to represent a value of mode MODE
7718 in a register of class RCLASS. */
7721 s390_class_max_nregs (enum reg_class rclass
, enum machine_mode mode
)
7726 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
)
7727 return 2 * ((GET_MODE_SIZE (mode
) / 2 + 8 - 1) / 8);
7729 return (GET_MODE_SIZE (mode
) + 8 - 1) / 8;
7731 return (GET_MODE_SIZE (mode
) + 4 - 1) / 4;
7735 return (GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
7738 /* Return true if register FROM can be eliminated via register TO. */
7741 s390_can_eliminate (const int from
, const int to
)
7743 /* On zSeries machines, we have not marked the base register as fixed.
7744 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM.
7745 If a function requires the base register, we say here that this
7746 elimination cannot be performed. This will cause reload to free
7747 up the base register (as if it were fixed). On the other hand,
7748 if the current function does *not* require the base register, we
7749 say here the elimination succeeds, which in turn allows reload
7750 to allocate the base register for any other purpose. */
7751 if (from
== BASE_REGNUM
&& to
== BASE_REGNUM
)
7753 if (TARGET_CPU_ZARCH
)
7755 s390_init_frame_layout ();
7756 return cfun
->machine
->base_reg
== NULL_RTX
;
7762 /* Everything else must point into the stack frame. */
7763 gcc_assert (to
== STACK_POINTER_REGNUM
7764 || to
== HARD_FRAME_POINTER_REGNUM
);
7766 gcc_assert (from
== FRAME_POINTER_REGNUM
7767 || from
== ARG_POINTER_REGNUM
7768 || from
== RETURN_ADDRESS_POINTER_REGNUM
);
7770 /* Make sure we actually saved the return address. */
7771 if (from
== RETURN_ADDRESS_POINTER_REGNUM
)
7772 if (!crtl
->calls_eh_return
7774 && !cfun_frame_layout
.save_return_addr_p
)
7780 /* Return offset between register FROM and TO initially after prolog. */
7783 s390_initial_elimination_offset (int from
, int to
)
7785 HOST_WIDE_INT offset
;
7788 /* ??? Why are we called for non-eliminable pairs? */
7789 if (!s390_can_eliminate (from
, to
))
7794 case FRAME_POINTER_REGNUM
:
7795 offset
= (get_frame_size()
7796 + STACK_POINTER_OFFSET
7797 + crtl
->outgoing_args_size
);
7800 case ARG_POINTER_REGNUM
:
7801 s390_init_frame_layout ();
7802 offset
= cfun_frame_layout
.frame_size
+ STACK_POINTER_OFFSET
;
7805 case RETURN_ADDRESS_POINTER_REGNUM
:
7806 s390_init_frame_layout ();
7807 index
= RETURN_REGNUM
- cfun_frame_layout
.first_save_gpr_slot
;
7808 gcc_assert (index
>= 0);
7809 offset
= cfun_frame_layout
.frame_size
+ cfun_frame_layout
.gprs_offset
;
7810 offset
+= index
* UNITS_PER_LONG
;
7824 /* Emit insn to save fpr REGNUM at offset OFFSET relative
7825 to register BASE. Return generated insn. */
7828 save_fpr (rtx base
, int offset
, int regnum
)
7831 addr
= gen_rtx_MEM (DFmode
, plus_constant (Pmode
, base
, offset
));
7833 if (regnum
>= 16 && regnum
<= (16 + FP_ARG_NUM_REG
))
7834 set_mem_alias_set (addr
, get_varargs_alias_set ());
7836 set_mem_alias_set (addr
, get_frame_alias_set ());
7838 return emit_move_insn (addr
, gen_rtx_REG (DFmode
, regnum
));
7841 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
7842 to register BASE. Return generated insn. */
7845 restore_fpr (rtx base
, int offset
, int regnum
)
7848 addr
= gen_rtx_MEM (DFmode
, plus_constant (Pmode
, base
, offset
));
7849 set_mem_alias_set (addr
, get_frame_alias_set ());
7851 return emit_move_insn (gen_rtx_REG (DFmode
, regnum
), addr
);
7854 /* Return true if REGNO is a global register, but not one
7855 of the special ones that need to be saved/restored in anyway. */
7858 global_not_special_regno_p (int regno
)
7860 return (global_regs
[regno
]
7861 /* These registers are special and need to be
7862 restored in any case. */
7863 && !(regno
== STACK_POINTER_REGNUM
7864 || regno
== RETURN_REGNUM
7865 || regno
== BASE_REGNUM
7866 || (flag_pic
&& regno
== (int)PIC_OFFSET_TABLE_REGNUM
)));
7869 /* Generate insn to save registers FIRST to LAST into
7870 the register save area located at offset OFFSET
7871 relative to register BASE. */
7874 save_gprs (rtx base
, int offset
, int first
, int last
)
7876 rtx addr
, insn
, note
;
7879 addr
= plus_constant (Pmode
, base
, offset
);
7880 addr
= gen_rtx_MEM (Pmode
, addr
);
7882 set_mem_alias_set (addr
, get_frame_alias_set ());
7884 /* Special-case single register. */
7888 insn
= gen_movdi (addr
, gen_rtx_REG (Pmode
, first
));
7890 insn
= gen_movsi (addr
, gen_rtx_REG (Pmode
, first
));
7892 if (!global_not_special_regno_p (first
))
7893 RTX_FRAME_RELATED_P (insn
) = 1;
7898 insn
= gen_store_multiple (addr
,
7899 gen_rtx_REG (Pmode
, first
),
7900 GEN_INT (last
- first
+ 1));
7902 if (first
<= 6 && cfun
->stdarg
)
7903 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
7905 rtx mem
= XEXP (XVECEXP (PATTERN (insn
), 0, i
), 0);
7908 set_mem_alias_set (mem
, get_varargs_alias_set ());
7911 /* We need to set the FRAME_RELATED flag on all SETs
7912 inside the store-multiple pattern.
7914 However, we must not emit DWARF records for registers 2..5
7915 if they are stored for use by variable arguments ...
7917 ??? Unfortunately, it is not enough to simply not the
7918 FRAME_RELATED flags for those SETs, because the first SET
7919 of the PARALLEL is always treated as if it had the flag
7920 set, even if it does not. Therefore we emit a new pattern
7921 without those registers as REG_FRAME_RELATED_EXPR note. */
7923 if (first
>= 6 && !global_not_special_regno_p (first
))
7925 rtx pat
= PATTERN (insn
);
7927 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
7928 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
7929 && !global_not_special_regno_p (REGNO (SET_SRC (XVECEXP (pat
,
7931 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, i
)) = 1;
7933 RTX_FRAME_RELATED_P (insn
) = 1;
7939 for (start
= first
>= 6 ? first
: 6; start
<= last
; start
++)
7940 if (!global_not_special_regno_p (start
))
7946 addr
= plus_constant (Pmode
, base
,
7947 offset
+ (start
- first
) * UNITS_PER_LONG
);
7948 note
= gen_store_multiple (gen_rtx_MEM (Pmode
, addr
),
7949 gen_rtx_REG (Pmode
, start
),
7950 GEN_INT (last
- start
+ 1));
7951 note
= PATTERN (note
);
7953 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
, note
);
7955 for (i
= 0; i
< XVECLEN (note
, 0); i
++)
7956 if (GET_CODE (XVECEXP (note
, 0, i
)) == SET
7957 && !global_not_special_regno_p (REGNO (SET_SRC (XVECEXP (note
,
7959 RTX_FRAME_RELATED_P (XVECEXP (note
, 0, i
)) = 1;
7961 RTX_FRAME_RELATED_P (insn
) = 1;
7967 /* Generate insn to restore registers FIRST to LAST from
7968 the register save area located at offset OFFSET
7969 relative to register BASE. */
7972 restore_gprs (rtx base
, int offset
, int first
, int last
)
7976 addr
= plus_constant (Pmode
, base
, offset
);
7977 addr
= gen_rtx_MEM (Pmode
, addr
);
7978 set_mem_alias_set (addr
, get_frame_alias_set ());
7980 /* Special-case single register. */
7984 insn
= gen_movdi (gen_rtx_REG (Pmode
, first
), addr
);
7986 insn
= gen_movsi (gen_rtx_REG (Pmode
, first
), addr
);
7991 insn
= gen_load_multiple (gen_rtx_REG (Pmode
, first
),
7993 GEN_INT (last
- first
+ 1));
7997 /* Return insn sequence to load the GOT register. */
7999 static GTY(()) rtx got_symbol
;
8001 s390_load_got (void)
8005 /* We cannot use pic_offset_table_rtx here since we use this
8006 function also for non-pic if __tls_get_offset is called and in
8007 that case PIC_OFFSET_TABLE_REGNUM as well as pic_offset_table_rtx
8009 rtx got_rtx
= gen_rtx_REG (Pmode
, 12);
8013 got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
8014 SYMBOL_REF_FLAGS (got_symbol
) = SYMBOL_FLAG_LOCAL
;
8019 if (TARGET_CPU_ZARCH
)
8021 emit_move_insn (got_rtx
, got_symbol
);
8027 offset
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, got_symbol
),
8028 UNSPEC_LTREL_OFFSET
);
8029 offset
= gen_rtx_CONST (Pmode
, offset
);
8030 offset
= force_const_mem (Pmode
, offset
);
8032 emit_move_insn (got_rtx
, offset
);
8034 offset
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, XEXP (offset
, 0)),
8036 offset
= gen_rtx_PLUS (Pmode
, got_rtx
, offset
);
8038 emit_move_insn (got_rtx
, offset
);
8041 insns
= get_insns ();
8046 /* This ties together stack memory (MEM with an alias set of frame_alias_set)
8047 and the change to the stack pointer. */
8050 s390_emit_stack_tie (void)
8052 rtx mem
= gen_frame_mem (BLKmode
,
8053 gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
8055 emit_insn (gen_stack_tie (mem
));
8058 /* Expand the prologue into a bunch of separate insns. */
8061 s390_emit_prologue (void)
8069 /* Complete frame layout. */
8071 s390_update_frame_layout ();
8073 /* Annotate all constant pool references to let the scheduler know
8074 they implicitly use the base register. */
8076 push_topmost_sequence ();
8078 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
8081 annotate_constant_pool_refs (&PATTERN (insn
));
8082 df_insn_rescan (insn
);
8085 pop_topmost_sequence ();
8087 /* Choose best register to use for temp use within prologue.
8088 See below for why TPF must use the register 1. */
8090 if (!has_hard_reg_initial_val (Pmode
, RETURN_REGNUM
)
8092 && !TARGET_TPF_PROFILING
)
8093 temp_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
8095 temp_reg
= gen_rtx_REG (Pmode
, 1);
8097 /* Save call saved gprs. */
8098 if (cfun_frame_layout
.first_save_gpr
!= -1)
8100 insn
= save_gprs (stack_pointer_rtx
,
8101 cfun_frame_layout
.gprs_offset
+
8102 UNITS_PER_LONG
* (cfun_frame_layout
.first_save_gpr
8103 - cfun_frame_layout
.first_save_gpr_slot
),
8104 cfun_frame_layout
.first_save_gpr
,
8105 cfun_frame_layout
.last_save_gpr
);
8109 /* Dummy insn to mark literal pool slot. */
8111 if (cfun
->machine
->base_reg
)
8112 emit_insn (gen_main_pool (cfun
->machine
->base_reg
));
8114 offset
= cfun_frame_layout
.f0_offset
;
8116 /* Save f0 and f2. */
8117 for (i
= 0; i
< 2; i
++)
8119 if (cfun_fpr_bit_p (i
))
8121 save_fpr (stack_pointer_rtx
, offset
, i
+ 16);
8124 else if (!TARGET_PACKED_STACK
)
8128 /* Save f4 and f6. */
8129 offset
= cfun_frame_layout
.f4_offset
;
8130 for (i
= 2; i
< 4; i
++)
8132 if (cfun_fpr_bit_p (i
))
8134 insn
= save_fpr (stack_pointer_rtx
, offset
, i
+ 16);
8137 /* If f4 and f6 are call clobbered they are saved due to stdargs and
8138 therefore are not frame related. */
8139 if (!call_really_used_regs
[i
+ 16])
8140 RTX_FRAME_RELATED_P (insn
) = 1;
8142 else if (!TARGET_PACKED_STACK
)
8146 if (TARGET_PACKED_STACK
8147 && cfun_save_high_fprs_p
8148 && cfun_frame_layout
.f8_offset
+ cfun_frame_layout
.high_fprs
* 8 > 0)
8150 offset
= (cfun_frame_layout
.f8_offset
8151 + (cfun_frame_layout
.high_fprs
- 1) * 8);
8153 for (i
= 15; i
> 7 && offset
>= 0; i
--)
8154 if (cfun_fpr_bit_p (i
))
8156 insn
= save_fpr (stack_pointer_rtx
, offset
, i
+ 16);
8158 RTX_FRAME_RELATED_P (insn
) = 1;
8161 if (offset
>= cfun_frame_layout
.f8_offset
)
8165 if (!TARGET_PACKED_STACK
)
8166 next_fpr
= cfun_save_high_fprs_p
? 31 : 0;
8168 if (flag_stack_usage_info
)
8169 current_function_static_stack_size
= cfun_frame_layout
.frame_size
;
8171 /* Decrement stack pointer. */
8173 if (cfun_frame_layout
.frame_size
> 0)
8175 rtx frame_off
= GEN_INT (-cfun_frame_layout
.frame_size
);
8178 if (s390_stack_size
)
8180 HOST_WIDE_INT stack_guard
;
8182 if (s390_stack_guard
)
8183 stack_guard
= s390_stack_guard
;
8186 /* If no value for stack guard is provided the smallest power of 2
8187 larger than the current frame size is chosen. */
8189 while (stack_guard
< cfun_frame_layout
.frame_size
)
8193 if (cfun_frame_layout
.frame_size
>= s390_stack_size
)
8195 warning (0, "frame size of function %qs is %wd"
8196 " bytes exceeding user provided stack limit of "
8198 "An unconditional trap is added.",
8199 current_function_name(), cfun_frame_layout
.frame_size
,
8201 emit_insn (gen_trap ());
8205 /* stack_guard has to be smaller than s390_stack_size.
8206 Otherwise we would emit an AND with zero which would
8207 not match the test under mask pattern. */
8208 if (stack_guard
>= s390_stack_size
)
8210 warning (0, "frame size of function %qs is %wd"
8211 " bytes which is more than half the stack size. "
8212 "The dynamic check would not be reliable. "
8213 "No check emitted for this function.",
8214 current_function_name(),
8215 cfun_frame_layout
.frame_size
);
8219 HOST_WIDE_INT stack_check_mask
= ((s390_stack_size
- 1)
8220 & ~(stack_guard
- 1));
8222 rtx t
= gen_rtx_AND (Pmode
, stack_pointer_rtx
,
8223 GEN_INT (stack_check_mask
));
8225 emit_insn (gen_ctrapdi4 (gen_rtx_EQ (VOIDmode
,
8227 t
, const0_rtx
, const0_rtx
));
8229 emit_insn (gen_ctrapsi4 (gen_rtx_EQ (VOIDmode
,
8231 t
, const0_rtx
, const0_rtx
));
8236 if (s390_warn_framesize
> 0
8237 && cfun_frame_layout
.frame_size
>= s390_warn_framesize
)
8238 warning (0, "frame size of %qs is %wd bytes",
8239 current_function_name (), cfun_frame_layout
.frame_size
);
8241 if (s390_warn_dynamicstack_p
&& cfun
->calls_alloca
)
8242 warning (0, "%qs uses dynamic stack allocation", current_function_name ());
8244 /* Save incoming stack pointer into temp reg. */
8245 if (TARGET_BACKCHAIN
|| next_fpr
)
8246 insn
= emit_insn (gen_move_insn (temp_reg
, stack_pointer_rtx
));
8248 /* Subtract frame size from stack pointer. */
8250 if (DISP_IN_RANGE (INTVAL (frame_off
)))
8252 insn
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
8253 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
8255 insn
= emit_insn (insn
);
8259 if (!CONST_OK_FOR_K (INTVAL (frame_off
)))
8260 frame_off
= force_const_mem (Pmode
, frame_off
);
8262 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, frame_off
));
8263 annotate_constant_pool_refs (&PATTERN (insn
));
8266 RTX_FRAME_RELATED_P (insn
) = 1;
8267 real_frame_off
= GEN_INT (-cfun_frame_layout
.frame_size
);
8268 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
8269 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
8270 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
8273 /* Set backchain. */
8275 if (TARGET_BACKCHAIN
)
8277 if (cfun_frame_layout
.backchain_offset
)
8278 addr
= gen_rtx_MEM (Pmode
,
8279 plus_constant (Pmode
, stack_pointer_rtx
,
8280 cfun_frame_layout
.backchain_offset
));
8282 addr
= gen_rtx_MEM (Pmode
, stack_pointer_rtx
);
8283 set_mem_alias_set (addr
, get_frame_alias_set ());
8284 insn
= emit_insn (gen_move_insn (addr
, temp_reg
));
8287 /* If we support non-call exceptions (e.g. for Java),
8288 we need to make sure the backchain pointer is set up
8289 before any possibly trapping memory access. */
8290 if (TARGET_BACKCHAIN
&& cfun
->can_throw_non_call_exceptions
)
8292 addr
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
8293 emit_clobber (addr
);
8297 /* Save fprs 8 - 15 (64 bit ABI). */
8299 if (cfun_save_high_fprs_p
&& next_fpr
)
8301 /* If the stack might be accessed through a different register
8302 we have to make sure that the stack pointer decrement is not
8303 moved below the use of the stack slots. */
8304 s390_emit_stack_tie ();
8306 insn
= emit_insn (gen_add2_insn (temp_reg
,
8307 GEN_INT (cfun_frame_layout
.f8_offset
)));
8311 for (i
= 24; i
<= next_fpr
; i
++)
8312 if (cfun_fpr_bit_p (i
- 16))
8314 rtx addr
= plus_constant (Pmode
, stack_pointer_rtx
,
8315 cfun_frame_layout
.frame_size
8316 + cfun_frame_layout
.f8_offset
8319 insn
= save_fpr (temp_reg
, offset
, i
);
8321 RTX_FRAME_RELATED_P (insn
) = 1;
8322 add_reg_note (insn
, REG_FRAME_RELATED_EXPR
,
8323 gen_rtx_SET (VOIDmode
,
8324 gen_rtx_MEM (DFmode
, addr
),
8325 gen_rtx_REG (DFmode
, i
)));
8329 /* Set frame pointer, if needed. */
8331 if (frame_pointer_needed
)
8333 insn
= emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
8334 RTX_FRAME_RELATED_P (insn
) = 1;
8337 /* Set up got pointer, if needed. */
8339 if (flag_pic
&& df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM
))
8341 rtx insns
= s390_load_got ();
8343 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
8344 annotate_constant_pool_refs (&PATTERN (insn
));
8349 if (TARGET_TPF_PROFILING
)
8351 /* Generate a BAS instruction to serve as a function
8352 entry intercept to facilitate the use of tracing
8353 algorithms located at the branch target. */
8354 emit_insn (gen_prologue_tpf ());
8356 /* Emit a blockage here so that all code
8357 lies between the profiling mechanisms. */
8358 emit_insn (gen_blockage ());
8362 /* Expand the epilogue into a bunch of separate insns. */
8365 s390_emit_epilogue (bool sibcall
)
8367 rtx frame_pointer
, return_reg
, cfa_restores
= NULL_RTX
;
8368 int area_bottom
, area_top
, offset
= 0;
8373 if (TARGET_TPF_PROFILING
)
8376 /* Generate a BAS instruction to serve as a function
8377 entry intercept to facilitate the use of tracing
8378 algorithms located at the branch target. */
8380 /* Emit a blockage here so that all code
8381 lies between the profiling mechanisms. */
8382 emit_insn (gen_blockage ());
8384 emit_insn (gen_epilogue_tpf ());
8387 /* Check whether to use frame or stack pointer for restore. */
8389 frame_pointer
= (frame_pointer_needed
8390 ? hard_frame_pointer_rtx
: stack_pointer_rtx
);
8392 s390_frame_area (&area_bottom
, &area_top
);
8394 /* Check whether we can access the register save area.
8395 If not, increment the frame pointer as required. */
8397 if (area_top
<= area_bottom
)
8399 /* Nothing to restore. */
8401 else if (DISP_IN_RANGE (cfun_frame_layout
.frame_size
+ area_bottom
)
8402 && DISP_IN_RANGE (cfun_frame_layout
.frame_size
+ area_top
- 1))
8404 /* Area is in range. */
8405 offset
= cfun_frame_layout
.frame_size
;
8409 rtx insn
, frame_off
, cfa
;
8411 offset
= area_bottom
< 0 ? -area_bottom
: 0;
8412 frame_off
= GEN_INT (cfun_frame_layout
.frame_size
- offset
);
8414 cfa
= gen_rtx_SET (VOIDmode
, frame_pointer
,
8415 gen_rtx_PLUS (Pmode
, frame_pointer
, frame_off
));
8416 if (DISP_IN_RANGE (INTVAL (frame_off
)))
8418 insn
= gen_rtx_SET (VOIDmode
, frame_pointer
,
8419 gen_rtx_PLUS (Pmode
, frame_pointer
, frame_off
));
8420 insn
= emit_insn (insn
);
8424 if (!CONST_OK_FOR_K (INTVAL (frame_off
)))
8425 frame_off
= force_const_mem (Pmode
, frame_off
);
8427 insn
= emit_insn (gen_add2_insn (frame_pointer
, frame_off
));
8428 annotate_constant_pool_refs (&PATTERN (insn
));
8430 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, cfa
);
8431 RTX_FRAME_RELATED_P (insn
) = 1;
8434 /* Restore call saved fprs. */
8438 if (cfun_save_high_fprs_p
)
8440 next_offset
= cfun_frame_layout
.f8_offset
;
8441 for (i
= 24; i
< 32; i
++)
8443 if (cfun_fpr_bit_p (i
- 16))
8445 restore_fpr (frame_pointer
,
8446 offset
+ next_offset
, i
);
8448 = alloc_reg_note (REG_CFA_RESTORE
,
8449 gen_rtx_REG (DFmode
, i
), cfa_restores
);
8458 next_offset
= cfun_frame_layout
.f4_offset
;
8459 for (i
= 18; i
< 20; i
++)
8461 if (cfun_fpr_bit_p (i
- 16))
8463 restore_fpr (frame_pointer
,
8464 offset
+ next_offset
, i
);
8466 = alloc_reg_note (REG_CFA_RESTORE
,
8467 gen_rtx_REG (DFmode
, i
), cfa_restores
);
8470 else if (!TARGET_PACKED_STACK
)
8476 /* Return register. */
8478 return_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
8480 /* Restore call saved gprs. */
8482 if (cfun_frame_layout
.first_restore_gpr
!= -1)
8487 /* Check for global register and save them
8488 to stack location from where they get restored. */
8490 for (i
= cfun_frame_layout
.first_restore_gpr
;
8491 i
<= cfun_frame_layout
.last_restore_gpr
;
8494 if (global_not_special_regno_p (i
))
8496 addr
= plus_constant (Pmode
, frame_pointer
,
8497 offset
+ cfun_frame_layout
.gprs_offset
8498 + (i
- cfun_frame_layout
.first_save_gpr_slot
)
8500 addr
= gen_rtx_MEM (Pmode
, addr
);
8501 set_mem_alias_set (addr
, get_frame_alias_set ());
8502 emit_move_insn (addr
, gen_rtx_REG (Pmode
, i
));
8506 = alloc_reg_note (REG_CFA_RESTORE
,
8507 gen_rtx_REG (Pmode
, i
), cfa_restores
);
8512 /* Fetch return address from stack before load multiple,
8513 this will do good for scheduling. */
8515 if (cfun_frame_layout
.save_return_addr_p
8516 || (cfun_frame_layout
.first_restore_gpr
< BASE_REGNUM
8517 && cfun_frame_layout
.last_restore_gpr
> RETURN_REGNUM
))
8519 int return_regnum
= find_unused_clobbered_reg();
8522 return_reg
= gen_rtx_REG (Pmode
, return_regnum
);
8524 addr
= plus_constant (Pmode
, frame_pointer
,
8525 offset
+ cfun_frame_layout
.gprs_offset
8527 - cfun_frame_layout
.first_save_gpr_slot
)
8529 addr
= gen_rtx_MEM (Pmode
, addr
);
8530 set_mem_alias_set (addr
, get_frame_alias_set ());
8531 emit_move_insn (return_reg
, addr
);
8535 insn
= restore_gprs (frame_pointer
,
8536 offset
+ cfun_frame_layout
.gprs_offset
8537 + (cfun_frame_layout
.first_restore_gpr
8538 - cfun_frame_layout
.first_save_gpr_slot
)
8540 cfun_frame_layout
.first_restore_gpr
,
8541 cfun_frame_layout
.last_restore_gpr
);
8542 insn
= emit_insn (insn
);
8543 REG_NOTES (insn
) = cfa_restores
;
8544 add_reg_note (insn
, REG_CFA_DEF_CFA
,
8545 plus_constant (Pmode
, stack_pointer_rtx
,
8546 STACK_POINTER_OFFSET
));
8547 RTX_FRAME_RELATED_P (insn
) = 1;
8553 /* Return to caller. */
8555 p
= rtvec_alloc (2);
8557 RTVEC_ELT (p
, 0) = ret_rtx
;
8558 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
, return_reg
);
8559 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
8564 /* Return the size in bytes of a function argument of
8565 type TYPE and/or mode MODE. At least one of TYPE or
8566 MODE must be specified. */
8569 s390_function_arg_size (enum machine_mode mode
, const_tree type
)
8572 return int_size_in_bytes (type
);
8574 /* No type info available for some library calls ... */
8575 if (mode
!= BLKmode
)
8576 return GET_MODE_SIZE (mode
);
8578 /* If we have neither type nor mode, abort */
8582 /* Return true if a function argument of type TYPE and mode MODE
8583 is to be passed in a floating-point register, if available. */
8586 s390_function_arg_float (enum machine_mode mode
, const_tree type
)
8588 int size
= s390_function_arg_size (mode
, type
);
8592 /* Soft-float changes the ABI: no floating-point registers are used. */
8593 if (TARGET_SOFT_FLOAT
)
8596 /* No type info available for some library calls ... */
8598 return mode
== SFmode
|| mode
== DFmode
|| mode
== SDmode
|| mode
== DDmode
;
8600 /* The ABI says that record types with a single member are treated
8601 just like that member would be. */
8602 while (TREE_CODE (type
) == RECORD_TYPE
)
8604 tree field
, single
= NULL_TREE
;
8606 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
8608 if (TREE_CODE (field
) != FIELD_DECL
)
8611 if (single
== NULL_TREE
)
8612 single
= TREE_TYPE (field
);
8617 if (single
== NULL_TREE
)
8623 return TREE_CODE (type
) == REAL_TYPE
;
8626 /* Return true if a function argument of type TYPE and mode MODE
8627 is to be passed in an integer register, or a pair of integer
8628 registers, if available. */
8631 s390_function_arg_integer (enum machine_mode mode
, const_tree type
)
8633 int size
= s390_function_arg_size (mode
, type
);
8637 /* No type info available for some library calls ... */
8639 return GET_MODE_CLASS (mode
) == MODE_INT
8640 || (TARGET_SOFT_FLOAT
&& SCALAR_FLOAT_MODE_P (mode
));
8642 /* We accept small integral (and similar) types. */
8643 if (INTEGRAL_TYPE_P (type
)
8644 || POINTER_TYPE_P (type
)
8645 || TREE_CODE (type
) == NULLPTR_TYPE
8646 || TREE_CODE (type
) == OFFSET_TYPE
8647 || (TARGET_SOFT_FLOAT
&& TREE_CODE (type
) == REAL_TYPE
))
8650 /* We also accept structs of size 1, 2, 4, 8 that are not
8651 passed in floating-point registers. */
8652 if (AGGREGATE_TYPE_P (type
)
8653 && exact_log2 (size
) >= 0
8654 && !s390_function_arg_float (mode
, type
))
8660 /* Return 1 if a function argument of type TYPE and mode MODE
8661 is to be passed by reference. The ABI specifies that only
8662 structures of size 1, 2, 4, or 8 bytes are passed by value,
8663 all other structures (and complex numbers) are passed by
8667 s390_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED
,
8668 enum machine_mode mode
, const_tree type
,
8669 bool named ATTRIBUTE_UNUSED
)
8671 int size
= s390_function_arg_size (mode
, type
);
8677 if (AGGREGATE_TYPE_P (type
) && exact_log2 (size
) < 0)
8680 if (TREE_CODE (type
) == COMPLEX_TYPE
8681 || TREE_CODE (type
) == VECTOR_TYPE
)
8688 /* Update the data in CUM to advance over an argument of mode MODE and
8689 data type TYPE. (TYPE is null for libcalls where that information
8690 may not be available.). The boolean NAMED specifies whether the
8691 argument is a named argument (as opposed to an unnamed argument
8692 matching an ellipsis). */
8695 s390_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
8696 const_tree type
, bool named ATTRIBUTE_UNUSED
)
8698 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
8700 if (s390_function_arg_float (mode
, type
))
8704 else if (s390_function_arg_integer (mode
, type
))
8706 int size
= s390_function_arg_size (mode
, type
);
8707 cum
->gprs
+= ((size
+ UNITS_PER_LONG
- 1) / UNITS_PER_LONG
);
8713 /* Define where to put the arguments to a function.
8714 Value is zero to push the argument on the stack,
8715 or a hard register in which to store the argument.
8717 MODE is the argument's machine mode.
8718 TYPE is the data type of the argument (as a tree).
8719 This is null for libcalls where that information may
8721 CUM is a variable of type CUMULATIVE_ARGS which gives info about
8722 the preceding args and about the function being called.
8723 NAMED is nonzero if this argument is a named parameter
8724 (otherwise it is an extra parameter matching an ellipsis).
8726 On S/390, we use general purpose registers 2 through 6 to
8727 pass integer, pointer, and certain structure arguments, and
8728 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
8729 to pass floating point arguments. All remaining arguments
8730 are pushed to the stack. */
8733 s390_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
8734 const_tree type
, bool named ATTRIBUTE_UNUSED
)
8736 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
8738 if (s390_function_arg_float (mode
, type
))
8740 if (cum
->fprs
+ 1 > FP_ARG_NUM_REG
)
8743 return gen_rtx_REG (mode
, cum
->fprs
+ 16);
8745 else if (s390_function_arg_integer (mode
, type
))
8747 int size
= s390_function_arg_size (mode
, type
);
8748 int n_gprs
= (size
+ UNITS_PER_LONG
- 1) / UNITS_PER_LONG
;
8750 if (cum
->gprs
+ n_gprs
> GP_ARG_NUM_REG
)
8752 else if (n_gprs
== 1 || UNITS_PER_WORD
== UNITS_PER_LONG
)
8753 return gen_rtx_REG (mode
, cum
->gprs
+ 2);
8754 else if (n_gprs
== 2)
8756 rtvec p
= rtvec_alloc (2);
8759 = gen_rtx_EXPR_LIST (SImode
, gen_rtx_REG (SImode
, cum
->gprs
+ 2),
8762 = gen_rtx_EXPR_LIST (SImode
, gen_rtx_REG (SImode
, cum
->gprs
+ 3),
8765 return gen_rtx_PARALLEL (mode
, p
);
8769 /* After the real arguments, expand_call calls us once again
8770 with a void_type_node type. Whatever we return here is
8771 passed as operand 2 to the call expanders.
8773 We don't need this feature ... */
8774 else if (type
== void_type_node
)
8780 /* Return true if return values of type TYPE should be returned
8781 in a memory buffer whose address is passed by the caller as
8782 hidden first argument. */
8785 s390_return_in_memory (const_tree type
, const_tree fundecl ATTRIBUTE_UNUSED
)
8787 /* We accept small integral (and similar) types. */
8788 if (INTEGRAL_TYPE_P (type
)
8789 || POINTER_TYPE_P (type
)
8790 || TREE_CODE (type
) == OFFSET_TYPE
8791 || TREE_CODE (type
) == REAL_TYPE
)
8792 return int_size_in_bytes (type
) > 8;
8794 /* Aggregates and similar constructs are always returned
8796 if (AGGREGATE_TYPE_P (type
)
8797 || TREE_CODE (type
) == COMPLEX_TYPE
8798 || TREE_CODE (type
) == VECTOR_TYPE
)
8801 /* ??? We get called on all sorts of random stuff from
8802 aggregate_value_p. We can't abort, but it's not clear
8803 what's safe to return. Pretend it's a struct I guess. */
8807 /* Function arguments and return values are promoted to word size. */
8809 static enum machine_mode
8810 s390_promote_function_mode (const_tree type
, enum machine_mode mode
,
8812 const_tree fntype ATTRIBUTE_UNUSED
,
8813 int for_return ATTRIBUTE_UNUSED
)
8815 if (INTEGRAL_MODE_P (mode
)
8816 && GET_MODE_SIZE (mode
) < UNITS_PER_LONG
)
8818 if (type
!= NULL_TREE
&& POINTER_TYPE_P (type
))
8819 *punsignedp
= POINTERS_EXTEND_UNSIGNED
;
8826 /* Define where to return a (scalar) value of type RET_TYPE.
8827 If RET_TYPE is null, define where to return a (scalar)
8828 value of mode MODE from a libcall. */
8831 s390_function_and_libcall_value (enum machine_mode mode
,
8832 const_tree ret_type
,
8833 const_tree fntype_or_decl
,
8834 bool outgoing ATTRIBUTE_UNUSED
)
8836 /* For normal functions perform the promotion as
8837 promote_function_mode would do. */
8840 int unsignedp
= TYPE_UNSIGNED (ret_type
);
8841 mode
= promote_function_mode (ret_type
, mode
, &unsignedp
,
8845 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
|| SCALAR_FLOAT_MODE_P (mode
));
8846 gcc_assert (GET_MODE_SIZE (mode
) <= 8);
8848 if (TARGET_HARD_FLOAT
&& SCALAR_FLOAT_MODE_P (mode
))
8849 return gen_rtx_REG (mode
, 16);
8850 else if (GET_MODE_SIZE (mode
) <= UNITS_PER_LONG
8851 || UNITS_PER_LONG
== UNITS_PER_WORD
)
8852 return gen_rtx_REG (mode
, 2);
8853 else if (GET_MODE_SIZE (mode
) == 2 * UNITS_PER_LONG
)
8855 /* This case is triggered when returning a 64 bit value with
8856 -m31 -mzarch. Although the value would fit into a single
8857 register it has to be forced into a 32 bit register pair in
8858 order to match the ABI. */
8859 rtvec p
= rtvec_alloc (2);
8862 = gen_rtx_EXPR_LIST (SImode
, gen_rtx_REG (SImode
, 2), const0_rtx
);
8864 = gen_rtx_EXPR_LIST (SImode
, gen_rtx_REG (SImode
, 3), GEN_INT (4));
8866 return gen_rtx_PARALLEL (mode
, p
);
8872 /* Define where to return a scalar return value of type RET_TYPE. */
8875 s390_function_value (const_tree ret_type
, const_tree fn_decl_or_type
,
8878 return s390_function_and_libcall_value (TYPE_MODE (ret_type
), ret_type
,
8879 fn_decl_or_type
, outgoing
);
8882 /* Define where to return a scalar libcall return value of mode
8886 s390_libcall_value (enum machine_mode mode
, const_rtx fun ATTRIBUTE_UNUSED
)
8888 return s390_function_and_libcall_value (mode
, NULL_TREE
,
8893 /* Create and return the va_list datatype.
8895 On S/390, va_list is an array type equivalent to
8897 typedef struct __va_list_tag
8901 void *__overflow_arg_area;
8902 void *__reg_save_area;
8905 where __gpr and __fpr hold the number of general purpose
8906 or floating point arguments used up to now, respectively,
8907 __overflow_arg_area points to the stack location of the
8908 next argument passed on the stack, and __reg_save_area
8909 always points to the start of the register area in the
8910 call frame of the current function. The function prologue
8911 saves all registers used for argument passing into this
8912 area if the function uses variable arguments. */
8915 s390_build_builtin_va_list (void)
8917 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
8919 record
= lang_hooks
.types
.make_type (RECORD_TYPE
);
8922 build_decl (BUILTINS_LOCATION
,
8923 TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
8925 f_gpr
= build_decl (BUILTINS_LOCATION
,
8926 FIELD_DECL
, get_identifier ("__gpr"),
8927 long_integer_type_node
);
8928 f_fpr
= build_decl (BUILTINS_LOCATION
,
8929 FIELD_DECL
, get_identifier ("__fpr"),
8930 long_integer_type_node
);
8931 f_ovf
= build_decl (BUILTINS_LOCATION
,
8932 FIELD_DECL
, get_identifier ("__overflow_arg_area"),
8934 f_sav
= build_decl (BUILTINS_LOCATION
,
8935 FIELD_DECL
, get_identifier ("__reg_save_area"),
8938 va_list_gpr_counter_field
= f_gpr
;
8939 va_list_fpr_counter_field
= f_fpr
;
8941 DECL_FIELD_CONTEXT (f_gpr
) = record
;
8942 DECL_FIELD_CONTEXT (f_fpr
) = record
;
8943 DECL_FIELD_CONTEXT (f_ovf
) = record
;
8944 DECL_FIELD_CONTEXT (f_sav
) = record
;
8946 TYPE_STUB_DECL (record
) = type_decl
;
8947 TYPE_NAME (record
) = type_decl
;
8948 TYPE_FIELDS (record
) = f_gpr
;
8949 DECL_CHAIN (f_gpr
) = f_fpr
;
8950 DECL_CHAIN (f_fpr
) = f_ovf
;
8951 DECL_CHAIN (f_ovf
) = f_sav
;
8953 layout_type (record
);
8955 /* The correct type is an array type of one element. */
8956 return build_array_type (record
, build_index_type (size_zero_node
));
8959 /* Implement va_start by filling the va_list structure VALIST.
8960 STDARG_P is always true, and ignored.
8961 NEXTARG points to the first anonymous stack argument.
8963 The following global variables are used to initialize
8964 the va_list structure:
8967 holds number of gprs and fprs used for named arguments.
8968 crtl->args.arg_offset_rtx:
8969 holds the offset of the first anonymous stack argument
8970 (relative to the virtual arg pointer). */
8973 s390_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
8975 HOST_WIDE_INT n_gpr
, n_fpr
;
8977 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
8978 tree gpr
, fpr
, ovf
, sav
, t
;
8980 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
8981 f_fpr
= DECL_CHAIN (f_gpr
);
8982 f_ovf
= DECL_CHAIN (f_fpr
);
8983 f_sav
= DECL_CHAIN (f_ovf
);
8985 valist
= build_simple_mem_ref (valist
);
8986 gpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
8987 fpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
8988 ovf
= build3 (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
8989 sav
= build3 (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
8991 /* Count number of gp and fp argument registers used. */
8993 n_gpr
= crtl
->args
.info
.gprs
;
8994 n_fpr
= crtl
->args
.info
.fprs
;
8996 if (cfun
->va_list_gpr_size
)
8998 t
= build2 (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
,
8999 build_int_cst (NULL_TREE
, n_gpr
));
9000 TREE_SIDE_EFFECTS (t
) = 1;
9001 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9004 if (cfun
->va_list_fpr_size
)
9006 t
= build2 (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
,
9007 build_int_cst (NULL_TREE
, n_fpr
));
9008 TREE_SIDE_EFFECTS (t
) = 1;
9009 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9012 /* Find the overflow area. */
9013 if (n_gpr
+ cfun
->va_list_gpr_size
> GP_ARG_NUM_REG
9014 || n_fpr
+ cfun
->va_list_fpr_size
> FP_ARG_NUM_REG
)
9016 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
9018 off
= INTVAL (crtl
->args
.arg_offset_rtx
);
9019 off
= off
< 0 ? 0 : off
;
9020 if (TARGET_DEBUG_ARG
)
9021 fprintf (stderr
, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
9022 (int)n_gpr
, (int)n_fpr
, off
);
9024 t
= fold_build_pointer_plus_hwi (t
, off
);
9026 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
9027 TREE_SIDE_EFFECTS (t
) = 1;
9028 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9031 /* Find the register save area. */
9032 if ((cfun
->va_list_gpr_size
&& n_gpr
< GP_ARG_NUM_REG
)
9033 || (cfun
->va_list_fpr_size
&& n_fpr
< FP_ARG_NUM_REG
))
9035 t
= make_tree (TREE_TYPE (sav
), return_address_pointer_rtx
);
9036 t
= fold_build_pointer_plus_hwi (t
, -RETURN_REGNUM
* UNITS_PER_LONG
);
9038 t
= build2 (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
9039 TREE_SIDE_EFFECTS (t
) = 1;
9040 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9044 /* Implement va_arg by updating the va_list structure
9045 VALIST as required to retrieve an argument of type
9046 TYPE, and returning that argument.
9048 Generates code equivalent to:
9050 if (integral value) {
9051 if (size <= 4 && args.gpr < 5 ||
9052 size > 4 && args.gpr < 4 )
9053 ret = args.reg_save_area[args.gpr+8]
9055 ret = *args.overflow_arg_area++;
9056 } else if (float value) {
9058 ret = args.reg_save_area[args.fpr+64]
9060 ret = *args.overflow_arg_area++;
9061 } else if (aggregate value) {
9063 ret = *args.reg_save_area[args.gpr]
9065 ret = **args.overflow_arg_area++;
9069 s390_gimplify_va_arg (tree valist
, tree type
, gimple_seq
*pre_p
,
9070 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
9072 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
9073 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
9074 int indirect_p
, size
, n_reg
, sav_ofs
, sav_scale
, max_reg
;
9075 tree lab_false
, lab_over
, addr
;
9077 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
9078 f_fpr
= DECL_CHAIN (f_gpr
);
9079 f_ovf
= DECL_CHAIN (f_fpr
);
9080 f_sav
= DECL_CHAIN (f_ovf
);
9082 valist
= build_va_arg_indirect_ref (valist
);
9083 gpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
9084 fpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
9085 sav
= build3 (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
9087 /* The tree for args* cannot be shared between gpr/fpr and ovf since
9088 both appear on a lhs. */
9089 valist
= unshare_expr (valist
);
9090 ovf
= build3 (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
9092 size
= int_size_in_bytes (type
);
9094 if (pass_by_reference (NULL
, TYPE_MODE (type
), type
, false))
9096 if (TARGET_DEBUG_ARG
)
9098 fprintf (stderr
, "va_arg: aggregate type");
9102 /* Aggregates are passed by reference. */
9107 /* kernel stack layout on 31 bit: It is assumed here that no padding
9108 will be added by s390_frame_info because for va_args always an even
9109 number of gprs has to be saved r15-r2 = 14 regs. */
9110 sav_ofs
= 2 * UNITS_PER_LONG
;
9111 sav_scale
= UNITS_PER_LONG
;
9112 size
= UNITS_PER_LONG
;
9113 max_reg
= GP_ARG_NUM_REG
- n_reg
;
9115 else if (s390_function_arg_float (TYPE_MODE (type
), type
))
9117 if (TARGET_DEBUG_ARG
)
9119 fprintf (stderr
, "va_arg: float type");
9123 /* FP args go in FP registers, if present. */
9127 sav_ofs
= 16 * UNITS_PER_LONG
;
9129 max_reg
= FP_ARG_NUM_REG
- n_reg
;
9133 if (TARGET_DEBUG_ARG
)
9135 fprintf (stderr
, "va_arg: other type");
9139 /* Otherwise into GP registers. */
9142 n_reg
= (size
+ UNITS_PER_LONG
- 1) / UNITS_PER_LONG
;
9144 /* kernel stack layout on 31 bit: It is assumed here that no padding
9145 will be added by s390_frame_info because for va_args always an even
9146 number of gprs has to be saved r15-r2 = 14 regs. */
9147 sav_ofs
= 2 * UNITS_PER_LONG
;
9149 if (size
< UNITS_PER_LONG
)
9150 sav_ofs
+= UNITS_PER_LONG
- size
;
9152 sav_scale
= UNITS_PER_LONG
;
9153 max_reg
= GP_ARG_NUM_REG
- n_reg
;
9156 /* Pull the value out of the saved registers ... */
9158 lab_false
= create_artificial_label (UNKNOWN_LOCATION
);
9159 lab_over
= create_artificial_label (UNKNOWN_LOCATION
);
9160 addr
= create_tmp_var (ptr_type_node
, "addr");
9162 t
= fold_convert (TREE_TYPE (reg
), size_int (max_reg
));
9163 t
= build2 (GT_EXPR
, boolean_type_node
, reg
, t
);
9164 u
= build1 (GOTO_EXPR
, void_type_node
, lab_false
);
9165 t
= build3 (COND_EXPR
, void_type_node
, t
, u
, NULL_TREE
);
9166 gimplify_and_add (t
, pre_p
);
9168 t
= fold_build_pointer_plus_hwi (sav
, sav_ofs
);
9169 u
= build2 (MULT_EXPR
, TREE_TYPE (reg
), reg
,
9170 fold_convert (TREE_TYPE (reg
), size_int (sav_scale
)));
9171 t
= fold_build_pointer_plus (t
, u
);
9173 gimplify_assign (addr
, t
, pre_p
);
9175 gimple_seq_add_stmt (pre_p
, gimple_build_goto (lab_over
));
9177 gimple_seq_add_stmt (pre_p
, gimple_build_label (lab_false
));
9180 /* ... Otherwise out of the overflow area. */
9183 if (size
< UNITS_PER_LONG
)
9184 t
= fold_build_pointer_plus_hwi (t
, UNITS_PER_LONG
- size
);
9186 gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
9188 gimplify_assign (addr
, t
, pre_p
);
9190 t
= fold_build_pointer_plus_hwi (t
, size
);
9191 gimplify_assign (ovf
, t
, pre_p
);
9193 gimple_seq_add_stmt (pre_p
, gimple_build_label (lab_over
));
9196 /* Increment register save count. */
9198 u
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
9199 fold_convert (TREE_TYPE (reg
), size_int (n_reg
)));
9200 gimplify_and_add (u
, pre_p
);
9204 t
= build_pointer_type_for_mode (build_pointer_type (type
),
9206 addr
= fold_convert (t
, addr
);
9207 addr
= build_va_arg_indirect_ref (addr
);
9211 t
= build_pointer_type_for_mode (type
, ptr_mode
, true);
9212 addr
= fold_convert (t
, addr
);
9215 return build_va_arg_indirect_ref (addr
);
9223 S390_BUILTIN_THREAD_POINTER
,
9224 S390_BUILTIN_SET_THREAD_POINTER
,
9229 static enum insn_code
const code_for_builtin_64
[S390_BUILTIN_max
] = {
9234 static enum insn_code
const code_for_builtin_31
[S390_BUILTIN_max
] = {
9240 s390_init_builtins (void)
9244 ftype
= build_function_type_list (ptr_type_node
, NULL_TREE
);
9245 add_builtin_function ("__builtin_thread_pointer", ftype
,
9246 S390_BUILTIN_THREAD_POINTER
, BUILT_IN_MD
,
9249 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9250 add_builtin_function ("__builtin_set_thread_pointer", ftype
,
9251 S390_BUILTIN_SET_THREAD_POINTER
, BUILT_IN_MD
,
9255 /* Expand an expression EXP that calls a built-in function,
9256 with result going to TARGET if that's convenient
9257 (and in mode MODE if that's convenient).
9258 SUBTARGET may be used as the target for computing one of EXP's operands.
9259 IGNORE is nonzero if the value is to be ignored. */
9262 s390_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
9263 enum machine_mode mode ATTRIBUTE_UNUSED
,
9264 int ignore ATTRIBUTE_UNUSED
)
9268 enum insn_code
const *code_for_builtin
=
9269 TARGET_64BIT
? code_for_builtin_64
: code_for_builtin_31
;
9271 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
9272 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
9273 enum insn_code icode
;
9274 rtx op
[MAX_ARGS
], pat
;
9278 call_expr_arg_iterator iter
;
9280 if (fcode
>= S390_BUILTIN_max
)
9281 internal_error ("bad builtin fcode");
9282 icode
= code_for_builtin
[fcode
];
9284 internal_error ("bad builtin fcode");
9286 nonvoid
= TREE_TYPE (TREE_TYPE (fndecl
)) != void_type_node
;
9289 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
9291 const struct insn_operand_data
*insn_op
;
9293 if (arg
== error_mark_node
)
9295 if (arity
> MAX_ARGS
)
9298 insn_op
= &insn_data
[icode
].operand
[arity
+ nonvoid
];
9300 op
[arity
] = expand_expr (arg
, NULL_RTX
, insn_op
->mode
, EXPAND_NORMAL
);
9302 if (!(*insn_op
->predicate
) (op
[arity
], insn_op
->mode
))
9303 op
[arity
] = copy_to_mode_reg (insn_op
->mode
, op
[arity
]);
9309 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
9311 || GET_MODE (target
) != tmode
9312 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
9313 target
= gen_reg_rtx (tmode
);
9319 pat
= GEN_FCN (icode
) (target
);
9323 pat
= GEN_FCN (icode
) (target
, op
[0]);
9325 pat
= GEN_FCN (icode
) (op
[0]);
9328 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1]);
9344 /* Output assembly code for the trampoline template to
9347 On S/390, we use gpr 1 internally in the trampoline code;
9348 gpr 0 is used to hold the static chain. */
9351 s390_asm_trampoline_template (FILE *file
)
9354 op
[0] = gen_rtx_REG (Pmode
, 0);
9355 op
[1] = gen_rtx_REG (Pmode
, 1);
9359 output_asm_insn ("basr\t%1,0", op
); /* 2 byte */
9360 output_asm_insn ("lmg\t%0,%1,14(%1)", op
); /* 6 byte */
9361 output_asm_insn ("br\t%1", op
); /* 2 byte */
9362 ASM_OUTPUT_SKIP (file
, (HOST_WIDE_INT
)(TRAMPOLINE_SIZE
- 10));
9366 output_asm_insn ("basr\t%1,0", op
); /* 2 byte */
9367 output_asm_insn ("lm\t%0,%1,6(%1)", op
); /* 4 byte */
9368 output_asm_insn ("br\t%1", op
); /* 2 byte */
9369 ASM_OUTPUT_SKIP (file
, (HOST_WIDE_INT
)(TRAMPOLINE_SIZE
- 8));
9373 /* Emit RTL insns to initialize the variable parts of a trampoline.
9374 FNADDR is an RTX for the address of the function's pure code.
9375 CXT is an RTX for the static chain value for the function. */
9378 s390_trampoline_init (rtx m_tramp
, tree fndecl
, rtx cxt
)
9380 rtx fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
9383 emit_block_move (m_tramp
, assemble_trampoline_template (),
9384 GEN_INT (2 * UNITS_PER_LONG
), BLOCK_OP_NORMAL
);
9386 mem
= adjust_address (m_tramp
, Pmode
, 2 * UNITS_PER_LONG
);
9387 emit_move_insn (mem
, cxt
);
9388 mem
= adjust_address (m_tramp
, Pmode
, 3 * UNITS_PER_LONG
);
9389 emit_move_insn (mem
, fnaddr
);
9392 /* Output assembler code to FILE to increment profiler label # LABELNO
9393 for profiling a function entry. */
9396 s390_function_profiler (FILE *file
, int labelno
)
9401 ASM_GENERATE_INTERNAL_LABEL (label
, "LP", labelno
);
9403 fprintf (file
, "# function profiler \n");
9405 op
[0] = gen_rtx_REG (Pmode
, RETURN_REGNUM
);
9406 op
[1] = gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
9407 op
[1] = gen_rtx_MEM (Pmode
, plus_constant (Pmode
, op
[1], UNITS_PER_LONG
));
9409 op
[2] = gen_rtx_REG (Pmode
, 1);
9410 op
[3] = gen_rtx_SYMBOL_REF (Pmode
, label
);
9411 SYMBOL_REF_FLAGS (op
[3]) = SYMBOL_FLAG_LOCAL
;
9413 op
[4] = gen_rtx_SYMBOL_REF (Pmode
, "_mcount");
9416 op
[4] = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op
[4]), UNSPEC_PLT
);
9417 op
[4] = gen_rtx_CONST (Pmode
, op
[4]);
9422 output_asm_insn ("stg\t%0,%1", op
);
9423 output_asm_insn ("larl\t%2,%3", op
);
9424 output_asm_insn ("brasl\t%0,%4", op
);
9425 output_asm_insn ("lg\t%0,%1", op
);
9429 op
[6] = gen_label_rtx ();
9431 output_asm_insn ("st\t%0,%1", op
);
9432 output_asm_insn ("bras\t%2,%l6", op
);
9433 output_asm_insn (".long\t%4", op
);
9434 output_asm_insn (".long\t%3", op
);
9435 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[6]));
9436 output_asm_insn ("l\t%0,0(%2)", op
);
9437 output_asm_insn ("l\t%2,4(%2)", op
);
9438 output_asm_insn ("basr\t%0,%0", op
);
9439 output_asm_insn ("l\t%0,%1", op
);
9443 op
[5] = gen_label_rtx ();
9444 op
[6] = gen_label_rtx ();
9446 output_asm_insn ("st\t%0,%1", op
);
9447 output_asm_insn ("bras\t%2,%l6", op
);
9448 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[5]));
9449 output_asm_insn (".long\t%4-%l5", op
);
9450 output_asm_insn (".long\t%3-%l5", op
);
9451 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[6]));
9452 output_asm_insn ("lr\t%0,%2", op
);
9453 output_asm_insn ("a\t%0,0(%2)", op
);
9454 output_asm_insn ("a\t%2,4(%2)", op
);
9455 output_asm_insn ("basr\t%0,%0", op
);
9456 output_asm_insn ("l\t%0,%1", op
);
9460 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
9461 into its SYMBOL_REF_FLAGS. */
9464 s390_encode_section_info (tree decl
, rtx rtl
, int first
)
9466 default_encode_section_info (decl
, rtl
, first
);
9468 if (TREE_CODE (decl
) == VAR_DECL
)
9470 /* If a variable has a forced alignment to < 2 bytes, mark it
9471 with SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL
9473 if (DECL_USER_ALIGN (decl
) && DECL_ALIGN (decl
) < 16)
9474 SYMBOL_REF_FLAGS (XEXP (rtl
, 0)) |= SYMBOL_FLAG_ALIGN1
;
9475 if (!DECL_SIZE (decl
)
9476 || !DECL_ALIGN (decl
)
9477 || !host_integerp (DECL_SIZE (decl
), 0)
9478 || (DECL_ALIGN (decl
) <= 64
9479 && DECL_ALIGN (decl
) != tree_low_cst (DECL_SIZE (decl
), 0)))
9480 SYMBOL_REF_FLAGS (XEXP (rtl
, 0)) |= SYMBOL_FLAG_NOT_NATURALLY_ALIGNED
;
9483 /* Literal pool references don't have a decl so they are handled
9484 differently here. We rely on the information in the MEM_ALIGN
9485 entry to decide upon natural alignment. */
9487 && GET_CODE (XEXP (rtl
, 0)) == SYMBOL_REF
9488 && TREE_CONSTANT_POOL_ADDRESS_P (XEXP (rtl
, 0))
9489 && (MEM_ALIGN (rtl
) == 0
9490 || GET_MODE_BITSIZE (GET_MODE (rtl
)) == 0
9491 || MEM_ALIGN (rtl
) < GET_MODE_BITSIZE (GET_MODE (rtl
))))
9492 SYMBOL_REF_FLAGS (XEXP (rtl
, 0)) |= SYMBOL_FLAG_NOT_NATURALLY_ALIGNED
;
9495 /* Output thunk to FILE that implements a C++ virtual function call (with
9496 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
9497 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
9498 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
9499 relative to the resulting this pointer. */
9502 s390_output_mi_thunk (FILE *file
, tree thunk ATTRIBUTE_UNUSED
,
9503 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
9509 /* Make sure unwind info is emitted for the thunk if needed. */
9510 final_start_function (emit_barrier (), file
, 1);
9512 /* Operand 0 is the target function. */
9513 op
[0] = XEXP (DECL_RTL (function
), 0);
9514 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (op
[0]))
9517 op
[0] = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op
[0]),
9518 TARGET_64BIT
? UNSPEC_PLT
: UNSPEC_GOT
);
9519 op
[0] = gen_rtx_CONST (Pmode
, op
[0]);
9522 /* Operand 1 is the 'this' pointer. */
9523 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
9524 op
[1] = gen_rtx_REG (Pmode
, 3);
9526 op
[1] = gen_rtx_REG (Pmode
, 2);
9528 /* Operand 2 is the delta. */
9529 op
[2] = GEN_INT (delta
);
9531 /* Operand 3 is the vcall_offset. */
9532 op
[3] = GEN_INT (vcall_offset
);
9534 /* Operand 4 is the temporary register. */
9535 op
[4] = gen_rtx_REG (Pmode
, 1);
9537 /* Operands 5 to 8 can be used as labels. */
9543 /* Operand 9 can be used for temporary register. */
9546 /* Generate code. */
9549 /* Setup literal pool pointer if required. */
9550 if ((!DISP_IN_RANGE (delta
)
9551 && !CONST_OK_FOR_K (delta
)
9552 && !CONST_OK_FOR_Os (delta
))
9553 || (!DISP_IN_RANGE (vcall_offset
)
9554 && !CONST_OK_FOR_K (vcall_offset
)
9555 && !CONST_OK_FOR_Os (vcall_offset
)))
9557 op
[5] = gen_label_rtx ();
9558 output_asm_insn ("larl\t%4,%5", op
);
9561 /* Add DELTA to this pointer. */
9564 if (CONST_OK_FOR_J (delta
))
9565 output_asm_insn ("la\t%1,%2(%1)", op
);
9566 else if (DISP_IN_RANGE (delta
))
9567 output_asm_insn ("lay\t%1,%2(%1)", op
);
9568 else if (CONST_OK_FOR_K (delta
))
9569 output_asm_insn ("aghi\t%1,%2", op
);
9570 else if (CONST_OK_FOR_Os (delta
))
9571 output_asm_insn ("agfi\t%1,%2", op
);
9574 op
[6] = gen_label_rtx ();
9575 output_asm_insn ("agf\t%1,%6-%5(%4)", op
);
9579 /* Perform vcall adjustment. */
9582 if (DISP_IN_RANGE (vcall_offset
))
9584 output_asm_insn ("lg\t%4,0(%1)", op
);
9585 output_asm_insn ("ag\t%1,%3(%4)", op
);
9587 else if (CONST_OK_FOR_K (vcall_offset
))
9589 output_asm_insn ("lghi\t%4,%3", op
);
9590 output_asm_insn ("ag\t%4,0(%1)", op
);
9591 output_asm_insn ("ag\t%1,0(%4)", op
);
9593 else if (CONST_OK_FOR_Os (vcall_offset
))
9595 output_asm_insn ("lgfi\t%4,%3", op
);
9596 output_asm_insn ("ag\t%4,0(%1)", op
);
9597 output_asm_insn ("ag\t%1,0(%4)", op
);
9601 op
[7] = gen_label_rtx ();
9602 output_asm_insn ("llgf\t%4,%7-%5(%4)", op
);
9603 output_asm_insn ("ag\t%4,0(%1)", op
);
9604 output_asm_insn ("ag\t%1,0(%4)", op
);
9608 /* Jump to target. */
9609 output_asm_insn ("jg\t%0", op
);
9611 /* Output literal pool if required. */
9614 output_asm_insn (".align\t4", op
);
9615 targetm
.asm_out
.internal_label (file
, "L",
9616 CODE_LABEL_NUMBER (op
[5]));
9620 targetm
.asm_out
.internal_label (file
, "L",
9621 CODE_LABEL_NUMBER (op
[6]));
9622 output_asm_insn (".long\t%2", op
);
9626 targetm
.asm_out
.internal_label (file
, "L",
9627 CODE_LABEL_NUMBER (op
[7]));
9628 output_asm_insn (".long\t%3", op
);
9633 /* Setup base pointer if required. */
9635 || (!DISP_IN_RANGE (delta
)
9636 && !CONST_OK_FOR_K (delta
)
9637 && !CONST_OK_FOR_Os (delta
))
9638 || (!DISP_IN_RANGE (delta
)
9639 && !CONST_OK_FOR_K (vcall_offset
)
9640 && !CONST_OK_FOR_Os (vcall_offset
)))
9642 op
[5] = gen_label_rtx ();
9643 output_asm_insn ("basr\t%4,0", op
);
9644 targetm
.asm_out
.internal_label (file
, "L",
9645 CODE_LABEL_NUMBER (op
[5]));
9648 /* Add DELTA to this pointer. */
9651 if (CONST_OK_FOR_J (delta
))
9652 output_asm_insn ("la\t%1,%2(%1)", op
);
9653 else if (DISP_IN_RANGE (delta
))
9654 output_asm_insn ("lay\t%1,%2(%1)", op
);
9655 else if (CONST_OK_FOR_K (delta
))
9656 output_asm_insn ("ahi\t%1,%2", op
);
9657 else if (CONST_OK_FOR_Os (delta
))
9658 output_asm_insn ("afi\t%1,%2", op
);
9661 op
[6] = gen_label_rtx ();
9662 output_asm_insn ("a\t%1,%6-%5(%4)", op
);
9666 /* Perform vcall adjustment. */
9669 if (CONST_OK_FOR_J (vcall_offset
))
9671 output_asm_insn ("l\t%4,0(%1)", op
);
9672 output_asm_insn ("a\t%1,%3(%4)", op
);
9674 else if (DISP_IN_RANGE (vcall_offset
))
9676 output_asm_insn ("l\t%4,0(%1)", op
);
9677 output_asm_insn ("ay\t%1,%3(%4)", op
);
9679 else if (CONST_OK_FOR_K (vcall_offset
))
9681 output_asm_insn ("lhi\t%4,%3", op
);
9682 output_asm_insn ("a\t%4,0(%1)", op
);
9683 output_asm_insn ("a\t%1,0(%4)", op
);
9685 else if (CONST_OK_FOR_Os (vcall_offset
))
9687 output_asm_insn ("iilf\t%4,%3", op
);
9688 output_asm_insn ("a\t%4,0(%1)", op
);
9689 output_asm_insn ("a\t%1,0(%4)", op
);
9693 op
[7] = gen_label_rtx ();
9694 output_asm_insn ("l\t%4,%7-%5(%4)", op
);
9695 output_asm_insn ("a\t%4,0(%1)", op
);
9696 output_asm_insn ("a\t%1,0(%4)", op
);
9699 /* We had to clobber the base pointer register.
9700 Re-setup the base pointer (with a different base). */
9701 op
[5] = gen_label_rtx ();
9702 output_asm_insn ("basr\t%4,0", op
);
9703 targetm
.asm_out
.internal_label (file
, "L",
9704 CODE_LABEL_NUMBER (op
[5]));
9707 /* Jump to target. */
9708 op
[8] = gen_label_rtx ();
9711 output_asm_insn ("l\t%4,%8-%5(%4)", op
);
9713 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
9714 /* We cannot call through .plt, since .plt requires %r12 loaded. */
9715 else if (flag_pic
== 1)
9717 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
9718 output_asm_insn ("l\t%4,%0(%4)", op
);
9720 else if (flag_pic
== 2)
9722 op
[9] = gen_rtx_REG (Pmode
, 0);
9723 output_asm_insn ("l\t%9,%8-4-%5(%4)", op
);
9724 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
9725 output_asm_insn ("ar\t%4,%9", op
);
9726 output_asm_insn ("l\t%4,0(%4)", op
);
9729 output_asm_insn ("br\t%4", op
);
9731 /* Output literal pool. */
9732 output_asm_insn (".align\t4", op
);
9734 if (nonlocal
&& flag_pic
== 2)
9735 output_asm_insn (".long\t%0", op
);
9738 op
[0] = gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
9739 SYMBOL_REF_FLAGS (op
[0]) = SYMBOL_FLAG_LOCAL
;
9742 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[8]));
9744 output_asm_insn (".long\t%0", op
);
9746 output_asm_insn (".long\t%0-%5", op
);
9750 targetm
.asm_out
.internal_label (file
, "L",
9751 CODE_LABEL_NUMBER (op
[6]));
9752 output_asm_insn (".long\t%2", op
);
9756 targetm
.asm_out
.internal_label (file
, "L",
9757 CODE_LABEL_NUMBER (op
[7]));
9758 output_asm_insn (".long\t%3", op
);
9761 final_end_function ();
9765 s390_valid_pointer_mode (enum machine_mode mode
)
9767 return (mode
== SImode
|| (TARGET_64BIT
&& mode
== DImode
));
9770 /* Checks whether the given CALL_EXPR would use a caller
9771 saved register. This is used to decide whether sibling call
9772 optimization could be performed on the respective function
9776 s390_call_saved_register_used (tree call_expr
)
9778 CUMULATIVE_ARGS cum_v
;
9779 cumulative_args_t cum
;
9781 enum machine_mode mode
;
9786 INIT_CUMULATIVE_ARGS (cum_v
, NULL
, NULL
, 0, 0);
9787 cum
= pack_cumulative_args (&cum_v
);
9789 for (i
= 0; i
< call_expr_nargs (call_expr
); i
++)
9791 parameter
= CALL_EXPR_ARG (call_expr
, i
);
9792 gcc_assert (parameter
);
9794 /* For an undeclared variable passed as parameter we will get
9795 an ERROR_MARK node here. */
9796 if (TREE_CODE (parameter
) == ERROR_MARK
)
9799 type
= TREE_TYPE (parameter
);
9802 mode
= TYPE_MODE (type
);
9805 if (pass_by_reference (&cum_v
, mode
, type
, true))
9808 type
= build_pointer_type (type
);
9811 parm_rtx
= s390_function_arg (cum
, mode
, type
, 0);
9813 s390_function_arg_advance (cum
, mode
, type
, 0);
9818 if (REG_P (parm_rtx
))
9821 reg
< HARD_REGNO_NREGS (REGNO (parm_rtx
), GET_MODE (parm_rtx
));
9823 if (!call_used_regs
[reg
+ REGNO (parm_rtx
)])
9827 if (GET_CODE (parm_rtx
) == PARALLEL
)
9831 for (i
= 0; i
< XVECLEN (parm_rtx
, 0); i
++)
9833 rtx r
= XEXP (XVECEXP (parm_rtx
, 0, i
), 0);
9835 gcc_assert (REG_P (r
));
9838 reg
< HARD_REGNO_NREGS (REGNO (r
), GET_MODE (r
));
9840 if (!call_used_regs
[reg
+ REGNO (r
)])
9849 /* Return true if the given call expression can be
9850 turned into a sibling call.
9851 DECL holds the declaration of the function to be called whereas
9852 EXP is the call expression itself. */
9855 s390_function_ok_for_sibcall (tree decl
, tree exp
)
9857 /* The TPF epilogue uses register 1. */
9858 if (TARGET_TPF_PROFILING
)
9861 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
9862 which would have to be restored before the sibcall. */
9863 if (!TARGET_64BIT
&& flag_pic
&& decl
&& !targetm
.binds_local_p (decl
))
9866 /* Register 6 on s390 is available as an argument register but unfortunately
9867 "caller saved". This makes functions needing this register for arguments
9868 not suitable for sibcalls. */
9869 return !s390_call_saved_register_used (exp
);
9872 /* Return the fixed registers used for condition codes. */
9875 s390_fixed_condition_code_regs (unsigned int *p1
, unsigned int *p2
)
9878 *p2
= INVALID_REGNUM
;
9883 /* This function is used by the call expanders of the machine description.
9884 It emits the call insn itself together with the necessary operations
9885 to adjust the target address and returns the emitted insn.
9886 ADDR_LOCATION is the target address rtx
9887 TLS_CALL the location of the thread-local symbol
9888 RESULT_REG the register where the result of the call should be stored
9889 RETADDR_REG the register where the return address should be stored
9890 If this parameter is NULL_RTX the call is considered
9891 to be a sibling call. */
9894 s390_emit_call (rtx addr_location
, rtx tls_call
, rtx result_reg
,
9897 bool plt_call
= false;
9903 /* Direct function calls need special treatment. */
9904 if (GET_CODE (addr_location
) == SYMBOL_REF
)
9906 /* When calling a global routine in PIC mode, we must
9907 replace the symbol itself with the PLT stub. */
9908 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (addr_location
))
9910 if (retaddr_reg
!= NULL_RTX
)
9912 addr_location
= gen_rtx_UNSPEC (Pmode
,
9913 gen_rtvec (1, addr_location
),
9915 addr_location
= gen_rtx_CONST (Pmode
, addr_location
);
9919 /* For -fpic code the PLT entries might use r12 which is
9920 call-saved. Therefore we cannot do a sibcall when
9921 calling directly using a symbol ref. When reaching
9922 this point we decided (in s390_function_ok_for_sibcall)
9923 to do a sibcall for a function pointer but one of the
9924 optimizers was able to get rid of the function pointer
9925 by propagating the symbol ref into the call. This
9926 optimization is illegal for S/390 so we turn the direct
9927 call into a indirect call again. */
9928 addr_location
= force_reg (Pmode
, addr_location
);
9931 /* Unless we can use the bras(l) insn, force the
9932 routine address into a register. */
9933 if (!TARGET_SMALL_EXEC
&& !TARGET_CPU_ZARCH
)
9936 addr_location
= legitimize_pic_address (addr_location
, 0);
9938 addr_location
= force_reg (Pmode
, addr_location
);
9942 /* If it is already an indirect call or the code above moved the
9943 SYMBOL_REF to somewhere else make sure the address can be found in
9945 if (retaddr_reg
== NULL_RTX
9946 && GET_CODE (addr_location
) != SYMBOL_REF
9949 emit_move_insn (gen_rtx_REG (Pmode
, SIBCALL_REGNUM
), addr_location
);
9950 addr_location
= gen_rtx_REG (Pmode
, SIBCALL_REGNUM
);
9953 addr_location
= gen_rtx_MEM (QImode
, addr_location
);
9954 call
= gen_rtx_CALL (VOIDmode
, addr_location
, const0_rtx
);
9956 if (result_reg
!= NULL_RTX
)
9957 call
= gen_rtx_SET (VOIDmode
, result_reg
, call
);
9959 if (retaddr_reg
!= NULL_RTX
)
9961 clobber
= gen_rtx_CLOBBER (VOIDmode
, retaddr_reg
);
9963 if (tls_call
!= NULL_RTX
)
9964 vec
= gen_rtvec (3, call
, clobber
,
9965 gen_rtx_USE (VOIDmode
, tls_call
));
9967 vec
= gen_rtvec (2, call
, clobber
);
9969 call
= gen_rtx_PARALLEL (VOIDmode
, vec
);
9972 insn
= emit_call_insn (call
);
9974 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
9975 if ((!TARGET_64BIT
&& plt_call
) || tls_call
!= NULL_RTX
)
9977 /* s390_function_ok_for_sibcall should
9978 have denied sibcalls in this case. */
9979 gcc_assert (retaddr_reg
!= NULL_RTX
);
9980 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), gen_rtx_REG (Pmode
, 12));
9985 /* Implement TARGET_CONDITIONAL_REGISTER_USAGE. */
9988 s390_conditional_register_usage (void)
9994 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
9995 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
9997 if (TARGET_CPU_ZARCH
)
9999 fixed_regs
[BASE_REGNUM
] = 0;
10000 call_used_regs
[BASE_REGNUM
] = 0;
10001 fixed_regs
[RETURN_REGNUM
] = 0;
10002 call_used_regs
[RETURN_REGNUM
] = 0;
10006 for (i
= 24; i
< 32; i
++)
10007 call_used_regs
[i
] = call_really_used_regs
[i
] = 0;
10011 for (i
= 18; i
< 20; i
++)
10012 call_used_regs
[i
] = call_really_used_regs
[i
] = 0;
10015 if (TARGET_SOFT_FLOAT
)
10017 for (i
= 16; i
< 32; i
++)
10018 call_used_regs
[i
] = fixed_regs
[i
] = 1;
10022 /* Corresponding function to eh_return expander. */
10024 static GTY(()) rtx s390_tpf_eh_return_symbol
;
10026 s390_emit_tpf_eh_return (rtx target
)
10030 if (!s390_tpf_eh_return_symbol
)
10031 s390_tpf_eh_return_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "__tpf_eh_return");
10033 reg
= gen_rtx_REG (Pmode
, 2);
10035 emit_move_insn (reg
, target
);
10036 insn
= s390_emit_call (s390_tpf_eh_return_symbol
, NULL_RTX
, reg
,
10037 gen_rtx_REG (Pmode
, RETURN_REGNUM
));
10038 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), reg
);
10040 emit_move_insn (EH_RETURN_HANDLER_RTX
, reg
);
10043 /* Rework the prologue/epilogue to avoid saving/restoring
10044 registers unnecessarily. */
10047 s390_optimize_prologue (void)
10049 rtx insn
, new_insn
, next_insn
;
10051 /* Do a final recompute of the frame-related data. */
10053 s390_update_frame_layout ();
10055 /* If all special registers are in fact used, there's nothing we
10056 can do, so no point in walking the insn list. */
10058 if (cfun_frame_layout
.first_save_gpr
<= BASE_REGNUM
10059 && cfun_frame_layout
.last_save_gpr
>= BASE_REGNUM
10060 && (TARGET_CPU_ZARCH
10061 || (cfun_frame_layout
.first_save_gpr
<= RETURN_REGNUM
10062 && cfun_frame_layout
.last_save_gpr
>= RETURN_REGNUM
)))
10065 /* Search for prologue/epilogue insns and replace them. */
10067 for (insn
= get_insns (); insn
; insn
= next_insn
)
10069 int first
, last
, off
;
10070 rtx set
, base
, offset
;
10072 next_insn
= NEXT_INSN (insn
);
10074 if (GET_CODE (insn
) != INSN
)
10077 if (GET_CODE (PATTERN (insn
)) == PARALLEL
10078 && store_multiple_operation (PATTERN (insn
), VOIDmode
))
10080 set
= XVECEXP (PATTERN (insn
), 0, 0);
10081 first
= REGNO (SET_SRC (set
));
10082 last
= first
+ XVECLEN (PATTERN (insn
), 0) - 1;
10083 offset
= const0_rtx
;
10084 base
= eliminate_constant_term (XEXP (SET_DEST (set
), 0), &offset
);
10085 off
= INTVAL (offset
);
10087 if (GET_CODE (base
) != REG
|| off
< 0)
10089 if (cfun_frame_layout
.first_save_gpr
!= -1
10090 && (cfun_frame_layout
.first_save_gpr
< first
10091 || cfun_frame_layout
.last_save_gpr
> last
))
10093 if (REGNO (base
) != STACK_POINTER_REGNUM
10094 && REGNO (base
) != HARD_FRAME_POINTER_REGNUM
)
10096 if (first
> BASE_REGNUM
|| last
< BASE_REGNUM
)
10099 if (cfun_frame_layout
.first_save_gpr
!= -1)
10101 new_insn
= save_gprs (base
,
10102 off
+ (cfun_frame_layout
.first_save_gpr
10103 - first
) * UNITS_PER_LONG
,
10104 cfun_frame_layout
.first_save_gpr
,
10105 cfun_frame_layout
.last_save_gpr
);
10106 new_insn
= emit_insn_before (new_insn
, insn
);
10107 INSN_ADDRESSES_NEW (new_insn
, -1);
10110 remove_insn (insn
);
10114 if (cfun_frame_layout
.first_save_gpr
== -1
10115 && GET_CODE (PATTERN (insn
)) == SET
10116 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
10117 && (REGNO (SET_SRC (PATTERN (insn
))) == BASE_REGNUM
10118 || (!TARGET_CPU_ZARCH
10119 && REGNO (SET_SRC (PATTERN (insn
))) == RETURN_REGNUM
))
10120 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
)
10122 set
= PATTERN (insn
);
10123 first
= REGNO (SET_SRC (set
));
10124 offset
= const0_rtx
;
10125 base
= eliminate_constant_term (XEXP (SET_DEST (set
), 0), &offset
);
10126 off
= INTVAL (offset
);
10128 if (GET_CODE (base
) != REG
|| off
< 0)
10130 if (REGNO (base
) != STACK_POINTER_REGNUM
10131 && REGNO (base
) != HARD_FRAME_POINTER_REGNUM
)
10134 remove_insn (insn
);
10138 if (GET_CODE (PATTERN (insn
)) == PARALLEL
10139 && load_multiple_operation (PATTERN (insn
), VOIDmode
))
10141 set
= XVECEXP (PATTERN (insn
), 0, 0);
10142 first
= REGNO (SET_DEST (set
));
10143 last
= first
+ XVECLEN (PATTERN (insn
), 0) - 1;
10144 offset
= const0_rtx
;
10145 base
= eliminate_constant_term (XEXP (SET_SRC (set
), 0), &offset
);
10146 off
= INTVAL (offset
);
10148 if (GET_CODE (base
) != REG
|| off
< 0)
10150 if (cfun_frame_layout
.first_restore_gpr
!= -1
10151 && (cfun_frame_layout
.first_restore_gpr
< first
10152 || cfun_frame_layout
.last_restore_gpr
> last
))
10154 if (REGNO (base
) != STACK_POINTER_REGNUM
10155 && REGNO (base
) != HARD_FRAME_POINTER_REGNUM
)
10157 if (first
> BASE_REGNUM
|| last
< BASE_REGNUM
)
10160 if (cfun_frame_layout
.first_restore_gpr
!= -1)
10162 new_insn
= restore_gprs (base
,
10163 off
+ (cfun_frame_layout
.first_restore_gpr
10164 - first
) * UNITS_PER_LONG
,
10165 cfun_frame_layout
.first_restore_gpr
,
10166 cfun_frame_layout
.last_restore_gpr
);
10167 new_insn
= emit_insn_before (new_insn
, insn
);
10168 INSN_ADDRESSES_NEW (new_insn
, -1);
10171 remove_insn (insn
);
10175 if (cfun_frame_layout
.first_restore_gpr
== -1
10176 && GET_CODE (PATTERN (insn
)) == SET
10177 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
10178 && (REGNO (SET_DEST (PATTERN (insn
))) == BASE_REGNUM
10179 || (!TARGET_CPU_ZARCH
10180 && REGNO (SET_DEST (PATTERN (insn
))) == RETURN_REGNUM
))
10181 && GET_CODE (SET_SRC (PATTERN (insn
))) == MEM
)
10183 set
= PATTERN (insn
);
10184 first
= REGNO (SET_DEST (set
));
10185 offset
= const0_rtx
;
10186 base
= eliminate_constant_term (XEXP (SET_SRC (set
), 0), &offset
);
10187 off
= INTVAL (offset
);
10189 if (GET_CODE (base
) != REG
|| off
< 0)
10191 if (REGNO (base
) != STACK_POINTER_REGNUM
10192 && REGNO (base
) != HARD_FRAME_POINTER_REGNUM
)
10195 remove_insn (insn
);
10201 /* On z10 and later the dynamic branch prediction must see the
10202 backward jump within a certain windows. If not it falls back to
10203 the static prediction. This function rearranges the loop backward
10204 branch in a way which makes the static prediction always correct.
10205 The function returns true if it added an instruction. */
10207 s390_fix_long_loop_prediction (rtx insn
)
10209 rtx set
= single_set (insn
);
10210 rtx code_label
, label_ref
, new_label
;
10216 /* This will exclude branch on count and branch on index patterns
10217 since these are correctly statically predicted. */
10219 || SET_DEST (set
) != pc_rtx
10220 || GET_CODE (SET_SRC(set
)) != IF_THEN_ELSE
)
10223 label_ref
= (GET_CODE (XEXP (SET_SRC (set
), 1)) == LABEL_REF
?
10224 XEXP (SET_SRC (set
), 1) : XEXP (SET_SRC (set
), 2));
10226 gcc_assert (GET_CODE (label_ref
) == LABEL_REF
);
10228 code_label
= XEXP (label_ref
, 0);
10230 if (INSN_ADDRESSES (INSN_UID (code_label
)) == -1
10231 || INSN_ADDRESSES (INSN_UID (insn
)) == -1
10232 || (INSN_ADDRESSES (INSN_UID (insn
))
10233 - INSN_ADDRESSES (INSN_UID (code_label
)) < PREDICT_DISTANCE
))
10236 for (distance
= 0, cur_insn
= PREV_INSN (insn
);
10237 distance
< PREDICT_DISTANCE
- 6;
10238 distance
+= get_attr_length (cur_insn
), cur_insn
= PREV_INSN (cur_insn
))
10239 if (!cur_insn
|| JUMP_P (cur_insn
) || LABEL_P (cur_insn
))
10242 new_label
= gen_label_rtx ();
10243 uncond_jump
= emit_jump_insn_after (
10244 gen_rtx_SET (VOIDmode
, pc_rtx
,
10245 gen_rtx_LABEL_REF (VOIDmode
, code_label
)),
10247 emit_label_after (new_label
, uncond_jump
);
10249 tmp
= XEXP (SET_SRC (set
), 1);
10250 XEXP (SET_SRC (set
), 1) = XEXP (SET_SRC (set
), 2);
10251 XEXP (SET_SRC (set
), 2) = tmp
;
10252 INSN_CODE (insn
) = -1;
10254 XEXP (label_ref
, 0) = new_label
;
10255 JUMP_LABEL (insn
) = new_label
;
10256 JUMP_LABEL (uncond_jump
) = code_label
;
10261 /* Returns 1 if INSN reads the value of REG for purposes not related
10262 to addressing of memory, and 0 otherwise. */
10264 s390_non_addr_reg_read_p (rtx reg
, rtx insn
)
10266 return reg_referenced_p (reg
, PATTERN (insn
))
10267 && !reg_used_in_mem_p (REGNO (reg
), PATTERN (insn
));
10270 /* Starting from INSN find_cond_jump looks downwards in the insn
10271 stream for a single jump insn which is the last user of the
10272 condition code set in INSN. */
10274 find_cond_jump (rtx insn
)
10276 for (; insn
; insn
= NEXT_INSN (insn
))
10280 if (LABEL_P (insn
))
10283 if (!JUMP_P (insn
))
10285 if (reg_mentioned_p (gen_rtx_REG (CCmode
, CC_REGNUM
), insn
))
10290 /* This will be triggered by a return. */
10291 if (GET_CODE (PATTERN (insn
)) != SET
)
10294 gcc_assert (SET_DEST (PATTERN (insn
)) == pc_rtx
);
10295 ite
= SET_SRC (PATTERN (insn
));
10297 if (GET_CODE (ite
) != IF_THEN_ELSE
)
10300 cc
= XEXP (XEXP (ite
, 0), 0);
10301 if (!REG_P (cc
) || !CC_REGNO_P (REGNO (cc
)))
10304 if (find_reg_note (insn
, REG_DEAD
, cc
))
10312 /* Swap the condition in COND and the operands in OP0 and OP1 so that
10313 the semantics does not change. If NULL_RTX is passed as COND the
10314 function tries to find the conditional jump starting with INSN. */
10316 s390_swap_cmp (rtx cond
, rtx
*op0
, rtx
*op1
, rtx insn
)
10320 if (cond
== NULL_RTX
)
10322 rtx jump
= find_cond_jump (NEXT_INSN (insn
));
10323 jump
= jump
? single_set (jump
) : NULL_RTX
;
10325 if (jump
== NULL_RTX
)
10328 cond
= XEXP (XEXP (jump
, 1), 0);
10333 PUT_CODE (cond
, swap_condition (GET_CODE (cond
)));
10336 /* On z10, instructions of the compare-and-branch family have the
10337 property to access the register occurring as second operand with
10338 its bits complemented. If such a compare is grouped with a second
10339 instruction that accesses the same register non-complemented, and
10340 if that register's value is delivered via a bypass, then the
10341 pipeline recycles, thereby causing significant performance decline.
10342 This function locates such situations and exchanges the two
10343 operands of the compare. The function return true whenever it
10346 s390_z10_optimize_cmp (rtx insn
)
10348 rtx prev_insn
, next_insn
;
10349 bool insn_added_p
= false;
10350 rtx cond
, *op0
, *op1
;
10352 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
10354 /* Handle compare and branch and branch on count
10356 rtx pattern
= single_set (insn
);
10359 || SET_DEST (pattern
) != pc_rtx
10360 || GET_CODE (SET_SRC (pattern
)) != IF_THEN_ELSE
)
10363 cond
= XEXP (SET_SRC (pattern
), 0);
10364 op0
= &XEXP (cond
, 0);
10365 op1
= &XEXP (cond
, 1);
10367 else if (GET_CODE (PATTERN (insn
)) == SET
)
10371 /* Handle normal compare instructions. */
10372 src
= SET_SRC (PATTERN (insn
));
10373 dest
= SET_DEST (PATTERN (insn
));
10376 || !CC_REGNO_P (REGNO (dest
))
10377 || GET_CODE (src
) != COMPARE
)
10380 /* s390_swap_cmp will try to find the conditional
10381 jump when passing NULL_RTX as condition. */
10383 op0
= &XEXP (src
, 0);
10384 op1
= &XEXP (src
, 1);
10389 if (!REG_P (*op0
) || !REG_P (*op1
))
10392 if (GET_MODE_CLASS (GET_MODE (*op0
)) != MODE_INT
)
10395 /* Swap the COMPARE arguments and its mask if there is a
10396 conflicting access in the previous insn. */
10397 prev_insn
= prev_active_insn (insn
);
10398 if (prev_insn
!= NULL_RTX
&& INSN_P (prev_insn
)
10399 && reg_referenced_p (*op1
, PATTERN (prev_insn
)))
10400 s390_swap_cmp (cond
, op0
, op1
, insn
);
10402 /* Check if there is a conflict with the next insn. If there
10403 was no conflict with the previous insn, then swap the
10404 COMPARE arguments and its mask. If we already swapped
10405 the operands, or if swapping them would cause a conflict
10406 with the previous insn, issue a NOP after the COMPARE in
10407 order to separate the two instuctions. */
10408 next_insn
= next_active_insn (insn
);
10409 if (next_insn
!= NULL_RTX
&& INSN_P (next_insn
)
10410 && s390_non_addr_reg_read_p (*op1
, next_insn
))
10412 if (prev_insn
!= NULL_RTX
&& INSN_P (prev_insn
)
10413 && s390_non_addr_reg_read_p (*op0
, prev_insn
))
10415 if (REGNO (*op1
) == 0)
10416 emit_insn_after (gen_nop1 (), insn
);
10418 emit_insn_after (gen_nop (), insn
);
10419 insn_added_p
= true;
10422 s390_swap_cmp (cond
, op0
, op1
, insn
);
10424 return insn_added_p
;
10427 /* Perform machine-dependent processing. */
10432 bool pool_overflow
= false;
10434 /* Make sure all splits have been performed; splits after
10435 machine_dependent_reorg might confuse insn length counts. */
10436 split_all_insns_noflow ();
10438 /* Install the main literal pool and the associated base
10439 register load insns.
10441 In addition, there are two problematic situations we need
10444 - the literal pool might be > 4096 bytes in size, so that
10445 some of its elements cannot be directly accessed
10447 - a branch target might be > 64K away from the branch, so that
10448 it is not possible to use a PC-relative instruction.
10450 To fix those, we split the single literal pool into multiple
10451 pool chunks, reloading the pool base register at various
10452 points throughout the function to ensure it always points to
10453 the pool chunk the following code expects, and / or replace
10454 PC-relative branches by absolute branches.
10456 However, the two problems are interdependent: splitting the
10457 literal pool can move a branch further away from its target,
10458 causing the 64K limit to overflow, and on the other hand,
10459 replacing a PC-relative branch by an absolute branch means
10460 we need to put the branch target address into the literal
10461 pool, possibly causing it to overflow.
10463 So, we loop trying to fix up both problems until we manage
10464 to satisfy both conditions at the same time. Note that the
10465 loop is guaranteed to terminate as every pass of the loop
10466 strictly decreases the total number of PC-relative branches
10467 in the function. (This is not completely true as there
10468 might be branch-over-pool insns introduced by chunkify_start.
10469 Those never need to be split however.) */
10473 struct constant_pool
*pool
= NULL
;
10475 /* Collect the literal pool. */
10476 if (!pool_overflow
)
10478 pool
= s390_mainpool_start ();
10480 pool_overflow
= true;
10483 /* If literal pool overflowed, start to chunkify it. */
10485 pool
= s390_chunkify_start ();
10487 /* Split out-of-range branches. If this has created new
10488 literal pool entries, cancel current chunk list and
10489 recompute it. zSeries machines have large branch
10490 instructions, so we never need to split a branch. */
10491 if (!TARGET_CPU_ZARCH
&& s390_split_branches ())
10494 s390_chunkify_cancel (pool
);
10496 s390_mainpool_cancel (pool
);
10501 /* If we made it up to here, both conditions are satisfied.
10502 Finish up literal pool related changes. */
10504 s390_chunkify_finish (pool
);
10506 s390_mainpool_finish (pool
);
10508 /* We're done splitting branches. */
10509 cfun
->machine
->split_branches_pending_p
= false;
10513 /* Generate out-of-pool execute target insns. */
10514 if (TARGET_CPU_ZARCH
)
10516 rtx insn
, label
, target
;
10518 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
10520 label
= s390_execute_label (insn
);
10524 gcc_assert (label
!= const0_rtx
);
10526 target
= emit_label (XEXP (label
, 0));
10527 INSN_ADDRESSES_NEW (target
, -1);
10529 target
= emit_insn (s390_execute_target (insn
));
10530 INSN_ADDRESSES_NEW (target
, -1);
10534 /* Try to optimize prologue and epilogue further. */
10535 s390_optimize_prologue ();
10537 /* Walk over the insns and do some >=z10 specific changes. */
10538 if (s390_tune
== PROCESSOR_2097_Z10
10539 || s390_tune
== PROCESSOR_2817_Z196
)
10542 bool insn_added_p
= false;
10544 /* The insn lengths and addresses have to be up to date for the
10545 following manipulations. */
10546 shorten_branches (get_insns ());
10548 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
10550 if (!INSN_P (insn
) || INSN_CODE (insn
) <= 0)
10554 insn_added_p
|= s390_fix_long_loop_prediction (insn
);
10556 if ((GET_CODE (PATTERN (insn
)) == PARALLEL
10557 || GET_CODE (PATTERN (insn
)) == SET
)
10558 && s390_tune
== PROCESSOR_2097_Z10
)
10559 insn_added_p
|= s390_z10_optimize_cmp (insn
);
10562 /* Adjust branches if we added new instructions. */
10564 shorten_branches (get_insns ());
10568 /* Return true if INSN is a fp load insn writing register REGNO. */
10570 s390_fpload_toreg (rtx insn
, unsigned int regno
)
10573 enum attr_type flag
= s390_safe_attr_type (insn
);
10575 if (flag
!= TYPE_FLOADSF
&& flag
!= TYPE_FLOADDF
)
10578 set
= single_set (insn
);
10580 if (set
== NULL_RTX
)
10583 if (!REG_P (SET_DEST (set
)) || !MEM_P (SET_SRC (set
)))
10586 if (REGNO (SET_DEST (set
)) != regno
)
10592 /* This value describes the distance to be avoided between an
10593 aritmetic fp instruction and an fp load writing the same register.
10594 Z10_EARLYLOAD_DISTANCE - 1 as well as Z10_EARLYLOAD_DISTANCE + 1 is
10595 fine but the exact value has to be avoided. Otherwise the FP
10596 pipeline will throw an exception causing a major penalty. */
10597 #define Z10_EARLYLOAD_DISTANCE 7
10599 /* Rearrange the ready list in order to avoid the situation described
10600 for Z10_EARLYLOAD_DISTANCE. A problematic load instruction is
10601 moved to the very end of the ready list. */
10603 s390_z10_prevent_earlyload_conflicts (rtx
*ready
, int *nready_p
)
10605 unsigned int regno
;
10606 int nready
= *nready_p
;
10611 enum attr_type flag
;
10614 /* Skip DISTANCE - 1 active insns. */
10615 for (insn
= last_scheduled_insn
, distance
= Z10_EARLYLOAD_DISTANCE
- 1;
10616 distance
> 0 && insn
!= NULL_RTX
;
10617 distance
--, insn
= prev_active_insn (insn
))
10618 if (CALL_P (insn
) || JUMP_P (insn
))
10621 if (insn
== NULL_RTX
)
10624 set
= single_set (insn
);
10626 if (set
== NULL_RTX
|| !REG_P (SET_DEST (set
))
10627 || GET_MODE_CLASS (GET_MODE (SET_DEST (set
))) != MODE_FLOAT
)
10630 flag
= s390_safe_attr_type (insn
);
10632 if (flag
== TYPE_FLOADSF
|| flag
== TYPE_FLOADDF
)
10635 regno
= REGNO (SET_DEST (set
));
10638 while (!s390_fpload_toreg (ready
[i
], regno
) && i
> 0)
10645 memmove (&ready
[1], &ready
[0], sizeof (rtx
) * i
);
10649 /* This function is called via hook TARGET_SCHED_REORDER before
10650 issuing one insn from list READY which contains *NREADYP entries.
10651 For target z10 it reorders load instructions to avoid early load
10652 conflicts in the floating point pipeline */
10654 s390_sched_reorder (FILE *file ATTRIBUTE_UNUSED
, int verbose ATTRIBUTE_UNUSED
,
10655 rtx
*ready
, int *nreadyp
, int clock ATTRIBUTE_UNUSED
)
10657 if (s390_tune
== PROCESSOR_2097_Z10
)
10658 if (reload_completed
&& *nreadyp
> 1)
10659 s390_z10_prevent_earlyload_conflicts (ready
, nreadyp
);
10661 return s390_issue_rate ();
10664 /* This function is called via hook TARGET_SCHED_VARIABLE_ISSUE after
10665 the scheduler has issued INSN. It stores the last issued insn into
10666 last_scheduled_insn in order to make it available for
10667 s390_sched_reorder. */
10669 s390_sched_variable_issue (FILE *file ATTRIBUTE_UNUSED
,
10670 int verbose ATTRIBUTE_UNUSED
,
10671 rtx insn
, int more
)
10673 last_scheduled_insn
= insn
;
10675 if (GET_CODE (PATTERN (insn
)) != USE
10676 && GET_CODE (PATTERN (insn
)) != CLOBBER
)
10683 s390_sched_init (FILE *file ATTRIBUTE_UNUSED
,
10684 int verbose ATTRIBUTE_UNUSED
,
10685 int max_ready ATTRIBUTE_UNUSED
)
10687 last_scheduled_insn
= NULL_RTX
;
10690 /* This function checks the whole of insn X for memory references. The
10691 function always returns zero because the framework it is called
10692 from would stop recursively analyzing the insn upon a return value
10693 other than zero. The real result of this function is updating
10694 counter variable MEM_COUNT. */
10696 check_dpu (rtx
*x
, unsigned *mem_count
)
10698 if (*x
!= NULL_RTX
&& MEM_P (*x
))
10703 /* This target hook implementation for TARGET_LOOP_UNROLL_ADJUST calculates
10704 a new number struct loop *loop should be unrolled if tuned for cpus with
10705 a built-in stride prefetcher.
10706 The loop is analyzed for memory accesses by calling check_dpu for
10707 each rtx of the loop. Depending on the loop_depth and the amount of
10708 memory accesses a new number <=nunroll is returned to improve the
10709 behaviour of the hardware prefetch unit. */
10711 s390_loop_unroll_adjust (unsigned nunroll
, struct loop
*loop
)
10716 unsigned mem_count
= 0;
10718 if (s390_tune
!= PROCESSOR_2097_Z10
&& s390_tune
!= PROCESSOR_2817_Z196
)
10721 /* Count the number of memory references within the loop body. */
10722 bbs
= get_loop_body (loop
);
10723 for (i
= 0; i
< loop
->num_nodes
; i
++)
10725 for (insn
= BB_HEAD (bbs
[i
]); insn
!= BB_END (bbs
[i
]); insn
= NEXT_INSN (insn
))
10726 if (INSN_P (insn
) && INSN_CODE (insn
) != -1)
10727 for_each_rtx (&insn
, (rtx_function
) check_dpu
, &mem_count
);
10731 /* Prevent division by zero, and we do not need to adjust nunroll in this case. */
10732 if (mem_count
== 0)
10735 switch (loop_depth(loop
))
10738 return MIN (nunroll
, 28 / mem_count
);
10740 return MIN (nunroll
, 22 / mem_count
);
10742 return MIN (nunroll
, 16 / mem_count
);
10746 /* Initialize GCC target structure. */
10748 #undef TARGET_ASM_ALIGNED_HI_OP
10749 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
10750 #undef TARGET_ASM_ALIGNED_DI_OP
10751 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
10752 #undef TARGET_ASM_INTEGER
10753 #define TARGET_ASM_INTEGER s390_assemble_integer
10755 #undef TARGET_ASM_OPEN_PAREN
10756 #define TARGET_ASM_OPEN_PAREN ""
10758 #undef TARGET_ASM_CLOSE_PAREN
10759 #define TARGET_ASM_CLOSE_PAREN ""
10761 #undef TARGET_OPTION_OVERRIDE
10762 #define TARGET_OPTION_OVERRIDE s390_option_override
10764 #undef TARGET_ENCODE_SECTION_INFO
10765 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
10767 #undef TARGET_SCALAR_MODE_SUPPORTED_P
10768 #define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
10771 #undef TARGET_HAVE_TLS
10772 #define TARGET_HAVE_TLS true
10774 #undef TARGET_CANNOT_FORCE_CONST_MEM
10775 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
10777 #undef TARGET_DELEGITIMIZE_ADDRESS
10778 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
10780 #undef TARGET_LEGITIMIZE_ADDRESS
10781 #define TARGET_LEGITIMIZE_ADDRESS s390_legitimize_address
10783 #undef TARGET_RETURN_IN_MEMORY
10784 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
10786 #undef TARGET_INIT_BUILTINS
10787 #define TARGET_INIT_BUILTINS s390_init_builtins
10788 #undef TARGET_EXPAND_BUILTIN
10789 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
10791 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
10792 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA s390_output_addr_const_extra
10794 #undef TARGET_ASM_OUTPUT_MI_THUNK
10795 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
10796 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
10797 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
10799 #undef TARGET_SCHED_ADJUST_PRIORITY
10800 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
10801 #undef TARGET_SCHED_ISSUE_RATE
10802 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
10803 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
10804 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
10806 #undef TARGET_SCHED_VARIABLE_ISSUE
10807 #define TARGET_SCHED_VARIABLE_ISSUE s390_sched_variable_issue
10808 #undef TARGET_SCHED_REORDER
10809 #define TARGET_SCHED_REORDER s390_sched_reorder
10810 #undef TARGET_SCHED_INIT
10811 #define TARGET_SCHED_INIT s390_sched_init
10813 #undef TARGET_CANNOT_COPY_INSN_P
10814 #define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
10815 #undef TARGET_RTX_COSTS
10816 #define TARGET_RTX_COSTS s390_rtx_costs
10817 #undef TARGET_ADDRESS_COST
10818 #define TARGET_ADDRESS_COST s390_address_cost
10819 #undef TARGET_REGISTER_MOVE_COST
10820 #define TARGET_REGISTER_MOVE_COST s390_register_move_cost
10821 #undef TARGET_MEMORY_MOVE_COST
10822 #define TARGET_MEMORY_MOVE_COST s390_memory_move_cost
10824 #undef TARGET_MACHINE_DEPENDENT_REORG
10825 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
10827 #undef TARGET_VALID_POINTER_MODE
10828 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
10830 #undef TARGET_BUILD_BUILTIN_VA_LIST
10831 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
10832 #undef TARGET_EXPAND_BUILTIN_VA_START
10833 #define TARGET_EXPAND_BUILTIN_VA_START s390_va_start
10834 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
10835 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
10837 #undef TARGET_PROMOTE_FUNCTION_MODE
10838 #define TARGET_PROMOTE_FUNCTION_MODE s390_promote_function_mode
10839 #undef TARGET_PASS_BY_REFERENCE
10840 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
10842 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
10843 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
10844 #undef TARGET_FUNCTION_ARG
10845 #define TARGET_FUNCTION_ARG s390_function_arg
10846 #undef TARGET_FUNCTION_ARG_ADVANCE
10847 #define TARGET_FUNCTION_ARG_ADVANCE s390_function_arg_advance
10848 #undef TARGET_FUNCTION_VALUE
10849 #define TARGET_FUNCTION_VALUE s390_function_value
10850 #undef TARGET_LIBCALL_VALUE
10851 #define TARGET_LIBCALL_VALUE s390_libcall_value
10853 #undef TARGET_FIXED_CONDITION_CODE_REGS
10854 #define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
10856 #undef TARGET_CC_MODES_COMPATIBLE
10857 #define TARGET_CC_MODES_COMPATIBLE s390_cc_modes_compatible
10859 #undef TARGET_INVALID_WITHIN_DOLOOP
10860 #define TARGET_INVALID_WITHIN_DOLOOP hook_constcharptr_const_rtx_null
10863 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
10864 #define TARGET_ASM_OUTPUT_DWARF_DTPREL s390_output_dwarf_dtprel
10867 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
10868 #undef TARGET_MANGLE_TYPE
10869 #define TARGET_MANGLE_TYPE s390_mangle_type
10872 #undef TARGET_SCALAR_MODE_SUPPORTED_P
10873 #define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
10875 #undef TARGET_PREFERRED_RELOAD_CLASS
10876 #define TARGET_PREFERRED_RELOAD_CLASS s390_preferred_reload_class
10878 #undef TARGET_SECONDARY_RELOAD
10879 #define TARGET_SECONDARY_RELOAD s390_secondary_reload
10881 #undef TARGET_LIBGCC_CMP_RETURN_MODE
10882 #define TARGET_LIBGCC_CMP_RETURN_MODE s390_libgcc_cmp_return_mode
10884 #undef TARGET_LIBGCC_SHIFT_COUNT_MODE
10885 #define TARGET_LIBGCC_SHIFT_COUNT_MODE s390_libgcc_shift_count_mode
10887 #undef TARGET_LEGITIMATE_ADDRESS_P
10888 #define TARGET_LEGITIMATE_ADDRESS_P s390_legitimate_address_p
10890 #undef TARGET_LEGITIMATE_CONSTANT_P
10891 #define TARGET_LEGITIMATE_CONSTANT_P s390_legitimate_constant_p
10893 #undef TARGET_CAN_ELIMINATE
10894 #define TARGET_CAN_ELIMINATE s390_can_eliminate
10896 #undef TARGET_CONDITIONAL_REGISTER_USAGE
10897 #define TARGET_CONDITIONAL_REGISTER_USAGE s390_conditional_register_usage
10899 #undef TARGET_LOOP_UNROLL_ADJUST
10900 #define TARGET_LOOP_UNROLL_ADJUST s390_loop_unroll_adjust
10902 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
10903 #define TARGET_ASM_TRAMPOLINE_TEMPLATE s390_asm_trampoline_template
10904 #undef TARGET_TRAMPOLINE_INIT
10905 #define TARGET_TRAMPOLINE_INIT s390_trampoline_init
10907 #undef TARGET_UNWIND_WORD_MODE
10908 #define TARGET_UNWIND_WORD_MODE s390_unwind_word_mode
10910 struct gcc_target targetm
= TARGET_INITIALIZER
;
10912 #include "gt-s390.h"