s390.c (struct constant_pool): New field emit_pool_after added.
[gcc.git] / gcc / config / s390 / s390.c
1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "except.h"
39 #include "function.h"
40 #include "recog.h"
41 #include "expr.h"
42 #include "reload.h"
43 #include "toplev.h"
44 #include "basic-block.h"
45 #include "integrate.h"
46 #include "ggc.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "debug.h"
50 #include "langhooks.h"
51 #include "optabs.h"
52 #include "tree-gimple.h"
53 #include "df.h"
54
55
56 /* Define the specific costs for a given cpu. */
57
58 struct processor_costs
59 {
60 /* multiplication */
61 const int m; /* cost of an M instruction. */
62 const int mghi; /* cost of an MGHI instruction. */
63 const int mh; /* cost of an MH instruction. */
64 const int mhi; /* cost of an MHI instruction. */
65 const int ml; /* cost of an ML instruction. */
66 const int mr; /* cost of an MR instruction. */
67 const int ms; /* cost of an MS instruction. */
68 const int msg; /* cost of an MSG instruction. */
69 const int msgf; /* cost of an MSGF instruction. */
70 const int msgfr; /* cost of an MSGFR instruction. */
71 const int msgr; /* cost of an MSGR instruction. */
72 const int msr; /* cost of an MSR instruction. */
73 const int mult_df; /* cost of multiplication in DFmode. */
74 const int mxbr;
75 /* square root */
76 const int sqxbr; /* cost of square root in TFmode. */
77 const int sqdbr; /* cost of square root in DFmode. */
78 const int sqebr; /* cost of square root in SFmode. */
79 /* multiply and add */
80 const int madbr; /* cost of multiply and add in DFmode. */
81 const int maebr; /* cost of multiply and add in SFmode. */
82 /* division */
83 const int dxbr;
84 const int ddbr;
85 const int debr;
86 const int dlgr;
87 const int dlr;
88 const int dr;
89 const int dsgfr;
90 const int dsgr;
91 };
92
93 const struct processor_costs *s390_cost;
94
95 static const
96 struct processor_costs z900_cost =
97 {
98 COSTS_N_INSNS (5), /* M */
99 COSTS_N_INSNS (10), /* MGHI */
100 COSTS_N_INSNS (5), /* MH */
101 COSTS_N_INSNS (4), /* MHI */
102 COSTS_N_INSNS (5), /* ML */
103 COSTS_N_INSNS (5), /* MR */
104 COSTS_N_INSNS (4), /* MS */
105 COSTS_N_INSNS (15), /* MSG */
106 COSTS_N_INSNS (7), /* MSGF */
107 COSTS_N_INSNS (7), /* MSGFR */
108 COSTS_N_INSNS (10), /* MSGR */
109 COSTS_N_INSNS (4), /* MSR */
110 COSTS_N_INSNS (7), /* multiplication in DFmode */
111 COSTS_N_INSNS (13), /* MXBR */
112 COSTS_N_INSNS (136), /* SQXBR */
113 COSTS_N_INSNS (44), /* SQDBR */
114 COSTS_N_INSNS (35), /* SQEBR */
115 COSTS_N_INSNS (18), /* MADBR */
116 COSTS_N_INSNS (13), /* MAEBR */
117 COSTS_N_INSNS (134), /* DXBR */
118 COSTS_N_INSNS (30), /* DDBR */
119 COSTS_N_INSNS (27), /* DEBR */
120 COSTS_N_INSNS (220), /* DLGR */
121 COSTS_N_INSNS (34), /* DLR */
122 COSTS_N_INSNS (34), /* DR */
123 COSTS_N_INSNS (32), /* DSGFR */
124 COSTS_N_INSNS (32), /* DSGR */
125 };
126
127 static const
128 struct processor_costs z990_cost =
129 {
130 COSTS_N_INSNS (4), /* M */
131 COSTS_N_INSNS (2), /* MGHI */
132 COSTS_N_INSNS (2), /* MH */
133 COSTS_N_INSNS (2), /* MHI */
134 COSTS_N_INSNS (4), /* ML */
135 COSTS_N_INSNS (4), /* MR */
136 COSTS_N_INSNS (5), /* MS */
137 COSTS_N_INSNS (6), /* MSG */
138 COSTS_N_INSNS (4), /* MSGF */
139 COSTS_N_INSNS (4), /* MSGFR */
140 COSTS_N_INSNS (4), /* MSGR */
141 COSTS_N_INSNS (4), /* MSR */
142 COSTS_N_INSNS (1), /* multiplication in DFmode */
143 COSTS_N_INSNS (28), /* MXBR */
144 COSTS_N_INSNS (130), /* SQXBR */
145 COSTS_N_INSNS (66), /* SQDBR */
146 COSTS_N_INSNS (38), /* SQEBR */
147 COSTS_N_INSNS (1), /* MADBR */
148 COSTS_N_INSNS (1), /* MAEBR */
149 COSTS_N_INSNS (60), /* DXBR */
150 COSTS_N_INSNS (40), /* DDBR */
151 COSTS_N_INSNS (26), /* DEBR */
152 COSTS_N_INSNS (176), /* DLGR */
153 COSTS_N_INSNS (31), /* DLR */
154 COSTS_N_INSNS (31), /* DR */
155 COSTS_N_INSNS (31), /* DSGFR */
156 COSTS_N_INSNS (31), /* DSGR */
157 };
158
159 static const
160 struct processor_costs z9_109_cost =
161 {
162 COSTS_N_INSNS (4), /* M */
163 COSTS_N_INSNS (2), /* MGHI */
164 COSTS_N_INSNS (2), /* MH */
165 COSTS_N_INSNS (2), /* MHI */
166 COSTS_N_INSNS (4), /* ML */
167 COSTS_N_INSNS (4), /* MR */
168 COSTS_N_INSNS (5), /* MS */
169 COSTS_N_INSNS (6), /* MSG */
170 COSTS_N_INSNS (4), /* MSGF */
171 COSTS_N_INSNS (4), /* MSGFR */
172 COSTS_N_INSNS (4), /* MSGR */
173 COSTS_N_INSNS (4), /* MSR */
174 COSTS_N_INSNS (1), /* multiplication in DFmode */
175 COSTS_N_INSNS (28), /* MXBR */
176 COSTS_N_INSNS (130), /* SQXBR */
177 COSTS_N_INSNS (66), /* SQDBR */
178 COSTS_N_INSNS (38), /* SQEBR */
179 COSTS_N_INSNS (1), /* MADBR */
180 COSTS_N_INSNS (1), /* MAEBR */
181 COSTS_N_INSNS (60), /* DXBR */
182 COSTS_N_INSNS (40), /* DDBR */
183 COSTS_N_INSNS (26), /* DEBR */
184 COSTS_N_INSNS (30), /* DLGR */
185 COSTS_N_INSNS (23), /* DLR */
186 COSTS_N_INSNS (23), /* DR */
187 COSTS_N_INSNS (24), /* DSGFR */
188 COSTS_N_INSNS (24), /* DSGR */
189 };
190
191 extern int reload_completed;
192
193 /* Save information from a "cmpxx" operation until the branch or scc is
194 emitted. */
195 rtx s390_compare_op0, s390_compare_op1;
196
197 /* Save the result of a compare_and_swap until the branch or scc is
198 emitted. */
199 rtx s390_compare_emitted = NULL_RTX;
200
201 /* Structure used to hold the components of a S/390 memory
202 address. A legitimate address on S/390 is of the general
203 form
204 base + index + displacement
205 where any of the components is optional.
206
207 base and index are registers of the class ADDR_REGS,
208 displacement is an unsigned 12-bit immediate constant. */
209
210 struct s390_address
211 {
212 rtx base;
213 rtx indx;
214 rtx disp;
215 bool pointer;
216 bool literal_pool;
217 };
218
219 /* Which cpu are we tuning for. */
220 enum processor_type s390_tune = PROCESSOR_max;
221 enum processor_flags s390_tune_flags;
222 /* Which instruction set architecture to use. */
223 enum processor_type s390_arch;
224 enum processor_flags s390_arch_flags;
225
226 HOST_WIDE_INT s390_warn_framesize = 0;
227 HOST_WIDE_INT s390_stack_size = 0;
228 HOST_WIDE_INT s390_stack_guard = 0;
229
230 /* The following structure is embedded in the machine
231 specific part of struct function. */
232
233 struct s390_frame_layout GTY (())
234 {
235 /* Offset within stack frame. */
236 HOST_WIDE_INT gprs_offset;
237 HOST_WIDE_INT f0_offset;
238 HOST_WIDE_INT f4_offset;
239 HOST_WIDE_INT f8_offset;
240 HOST_WIDE_INT backchain_offset;
241
242 /* Number of first and last gpr where slots in the register
243 save area are reserved for. */
244 int first_save_gpr_slot;
245 int last_save_gpr_slot;
246
247 /* Number of first and last gpr to be saved, restored. */
248 int first_save_gpr;
249 int first_restore_gpr;
250 int last_save_gpr;
251 int last_restore_gpr;
252
253 /* Bits standing for floating point registers. Set, if the
254 respective register has to be saved. Starting with reg 16 (f0)
255 at the rightmost bit.
256 Bit 15 - 8 7 6 5 4 3 2 1 0
257 fpr 15 - 8 7 5 3 1 6 4 2 0
258 reg 31 - 24 23 22 21 20 19 18 17 16 */
259 unsigned int fpr_bitmap;
260
261 /* Number of floating point registers f8-f15 which must be saved. */
262 int high_fprs;
263
264 /* Set if return address needs to be saved.
265 This flag is set by s390_return_addr_rtx if it could not use
266 the initial value of r14 and therefore depends on r14 saved
267 to the stack. */
268 bool save_return_addr_p;
269
270 /* Size of stack frame. */
271 HOST_WIDE_INT frame_size;
272 };
273
274 /* Define the structure for the machine field in struct function. */
275
276 struct machine_function GTY(())
277 {
278 struct s390_frame_layout frame_layout;
279
280 /* Literal pool base register. */
281 rtx base_reg;
282
283 /* True if we may need to perform branch splitting. */
284 bool split_branches_pending_p;
285
286 /* True during final stage of literal pool processing. */
287 bool decomposed_literal_pool_addresses_ok_p;
288
289 /* Some local-dynamic TLS symbol name. */
290 const char *some_ld_name;
291
292 bool has_landing_pad_p;
293 };
294
295 /* Few accessor macros for struct cfun->machine->s390_frame_layout. */
296
297 #define cfun_frame_layout (cfun->machine->frame_layout)
298 #define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
299 #define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr_slot - \
300 cfun_frame_layout.first_save_gpr_slot + 1) * UNITS_PER_WORD)
301 #define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
302 (1 << (BITNUM)))
303 #define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
304 (1 << (BITNUM))))
305
306 /* Number of GPRs and FPRs used for argument passing. */
307 #define GP_ARG_NUM_REG 5
308 #define FP_ARG_NUM_REG (TARGET_64BIT? 4 : 2)
309
310 /* A couple of shortcuts. */
311 #define CONST_OK_FOR_J(x) \
312 CONST_OK_FOR_CONSTRAINT_P((x), 'J', "J")
313 #define CONST_OK_FOR_K(x) \
314 CONST_OK_FOR_CONSTRAINT_P((x), 'K', "K")
315 #define CONST_OK_FOR_Os(x) \
316 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Os")
317 #define CONST_OK_FOR_Op(x) \
318 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Op")
319 #define CONST_OK_FOR_On(x) \
320 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "On")
321
322 #define REGNO_PAIR_OK(REGNO, MODE) \
323 (HARD_REGNO_NREGS ((REGNO), (MODE)) == 1 || !((REGNO) & 1))
324
325 static enum machine_mode
326 s390_libgcc_cmp_return_mode (void)
327 {
328 return TARGET_64BIT ? DImode : SImode;
329 }
330
331 static enum machine_mode
332 s390_libgcc_shift_count_mode (void)
333 {
334 return TARGET_64BIT ? DImode : SImode;
335 }
336
337 /* Return true if the back end supports mode MODE. */
338 static bool
339 s390_scalar_mode_supported_p (enum machine_mode mode)
340 {
341 if (DECIMAL_FLOAT_MODE_P (mode))
342 return true;
343 else
344 return default_scalar_mode_supported_p (mode);
345 }
346
347 /* Set the has_landing_pad_p flag in struct machine_function to VALUE. */
348
349 void
350 s390_set_has_landing_pad_p (bool value)
351 {
352 cfun->machine->has_landing_pad_p = value;
353 }
354
355 /* If two condition code modes are compatible, return a condition code
356 mode which is compatible with both. Otherwise, return
357 VOIDmode. */
358
359 static enum machine_mode
360 s390_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
361 {
362 if (m1 == m2)
363 return m1;
364
365 switch (m1)
366 {
367 case CCZmode:
368 if (m2 == CCUmode || m2 == CCTmode || m2 == CCZ1mode
369 || m2 == CCSmode || m2 == CCSRmode || m2 == CCURmode)
370 return m2;
371 return VOIDmode;
372
373 case CCSmode:
374 case CCUmode:
375 case CCTmode:
376 case CCSRmode:
377 case CCURmode:
378 case CCZ1mode:
379 if (m2 == CCZmode)
380 return m1;
381
382 return VOIDmode;
383
384 default:
385 return VOIDmode;
386 }
387 return VOIDmode;
388 }
389
390 /* Return true if SET either doesn't set the CC register, or else
391 the source and destination have matching CC modes and that
392 CC mode is at least as constrained as REQ_MODE. */
393
394 static bool
395 s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
396 {
397 enum machine_mode set_mode;
398
399 gcc_assert (GET_CODE (set) == SET);
400
401 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
402 return 1;
403
404 set_mode = GET_MODE (SET_DEST (set));
405 switch (set_mode)
406 {
407 case CCSmode:
408 case CCSRmode:
409 case CCUmode:
410 case CCURmode:
411 case CCLmode:
412 case CCL1mode:
413 case CCL2mode:
414 case CCL3mode:
415 case CCT1mode:
416 case CCT2mode:
417 case CCT3mode:
418 if (req_mode != set_mode)
419 return 0;
420 break;
421
422 case CCZmode:
423 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
424 && req_mode != CCSRmode && req_mode != CCURmode)
425 return 0;
426 break;
427
428 case CCAPmode:
429 case CCANmode:
430 if (req_mode != CCAmode)
431 return 0;
432 break;
433
434 default:
435 gcc_unreachable ();
436 }
437
438 return (GET_MODE (SET_SRC (set)) == set_mode);
439 }
440
441 /* Return true if every SET in INSN that sets the CC register
442 has source and destination with matching CC modes and that
443 CC mode is at least as constrained as REQ_MODE.
444 If REQ_MODE is VOIDmode, always return false. */
445
446 bool
447 s390_match_ccmode (rtx insn, enum machine_mode req_mode)
448 {
449 int i;
450
451 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
452 if (req_mode == VOIDmode)
453 return false;
454
455 if (GET_CODE (PATTERN (insn)) == SET)
456 return s390_match_ccmode_set (PATTERN (insn), req_mode);
457
458 if (GET_CODE (PATTERN (insn)) == PARALLEL)
459 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
460 {
461 rtx set = XVECEXP (PATTERN (insn), 0, i);
462 if (GET_CODE (set) == SET)
463 if (!s390_match_ccmode_set (set, req_mode))
464 return false;
465 }
466
467 return true;
468 }
469
470 /* If a test-under-mask instruction can be used to implement
471 (compare (and ... OP1) OP2), return the CC mode required
472 to do that. Otherwise, return VOIDmode.
473 MIXED is true if the instruction can distinguish between
474 CC1 and CC2 for mixed selected bits (TMxx), it is false
475 if the instruction cannot (TM). */
476
477 enum machine_mode
478 s390_tm_ccmode (rtx op1, rtx op2, bool mixed)
479 {
480 int bit0, bit1;
481
482 /* ??? Fixme: should work on CONST_DOUBLE as well. */
483 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
484 return VOIDmode;
485
486 /* Selected bits all zero: CC0.
487 e.g.: int a; if ((a & (16 + 128)) == 0) */
488 if (INTVAL (op2) == 0)
489 return CCTmode;
490
491 /* Selected bits all one: CC3.
492 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */
493 if (INTVAL (op2) == INTVAL (op1))
494 return CCT3mode;
495
496 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.:
497 int a;
498 if ((a & (16 + 128)) == 16) -> CCT1
499 if ((a & (16 + 128)) == 128) -> CCT2 */
500 if (mixed)
501 {
502 bit1 = exact_log2 (INTVAL (op2));
503 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
504 if (bit0 != -1 && bit1 != -1)
505 return bit0 > bit1 ? CCT1mode : CCT2mode;
506 }
507
508 return VOIDmode;
509 }
510
511 /* Given a comparison code OP (EQ, NE, etc.) and the operands
512 OP0 and OP1 of a COMPARE, return the mode to be used for the
513 comparison. */
514
515 enum machine_mode
516 s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
517 {
518 switch (code)
519 {
520 case EQ:
521 case NE:
522 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
523 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
524 return CCAPmode;
525 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
526 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
527 return CCAPmode;
528 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
529 || GET_CODE (op1) == NEG)
530 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
531 return CCLmode;
532
533 if (GET_CODE (op0) == AND)
534 {
535 /* Check whether we can potentially do it via TM. */
536 enum machine_mode ccmode;
537 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
538 if (ccmode != VOIDmode)
539 {
540 /* Relax CCTmode to CCZmode to allow fall-back to AND
541 if that turns out to be beneficial. */
542 return ccmode == CCTmode ? CCZmode : ccmode;
543 }
544 }
545
546 if (register_operand (op0, HImode)
547 && GET_CODE (op1) == CONST_INT
548 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
549 return CCT3mode;
550 if (register_operand (op0, QImode)
551 && GET_CODE (op1) == CONST_INT
552 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
553 return CCT3mode;
554
555 return CCZmode;
556
557 case LE:
558 case LT:
559 case GE:
560 case GT:
561 /* The only overflow condition of NEG and ABS happens when
562 -INT_MAX is used as parameter, which stays negative. So
563 we have an overflow from a positive value to a negative.
564 Using CCAP mode the resulting cc can be used for comparisons. */
565 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
566 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
567 return CCAPmode;
568
569 /* If constants are involved in an add instruction it is possible to use
570 the resulting cc for comparisons with zero. Knowing the sign of the
571 constant the overflow behavior gets predictable. e.g.:
572 int a, b; if ((b = a + c) > 0)
573 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */
574 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
575 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
576 {
577 if (INTVAL (XEXP((op0), 1)) < 0)
578 return CCANmode;
579 else
580 return CCAPmode;
581 }
582 /* Fall through. */
583 case UNORDERED:
584 case ORDERED:
585 case UNEQ:
586 case UNLE:
587 case UNLT:
588 case UNGE:
589 case UNGT:
590 case LTGT:
591 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
592 && GET_CODE (op1) != CONST_INT)
593 return CCSRmode;
594 return CCSmode;
595
596 case LTU:
597 case GEU:
598 if (GET_CODE (op0) == PLUS
599 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
600 return CCL1mode;
601
602 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
603 && GET_CODE (op1) != CONST_INT)
604 return CCURmode;
605 return CCUmode;
606
607 case LEU:
608 case GTU:
609 if (GET_CODE (op0) == MINUS
610 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
611 return CCL2mode;
612
613 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
614 && GET_CODE (op1) != CONST_INT)
615 return CCURmode;
616 return CCUmode;
617
618 default:
619 gcc_unreachable ();
620 }
621 }
622
623 /* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
624 that we can implement more efficiently. */
625
626 void
627 s390_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1)
628 {
629 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
630 if ((*code == EQ || *code == NE)
631 && *op1 == const0_rtx
632 && GET_CODE (*op0) == ZERO_EXTRACT
633 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
634 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
635 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
636 {
637 rtx inner = XEXP (*op0, 0);
638 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
639 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
640 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
641
642 if (len > 0 && len < modesize
643 && pos >= 0 && pos + len <= modesize
644 && modesize <= HOST_BITS_PER_WIDE_INT)
645 {
646 unsigned HOST_WIDE_INT block;
647 block = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
648 block <<= modesize - pos - len;
649
650 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
651 gen_int_mode (block, GET_MODE (inner)));
652 }
653 }
654
655 /* Narrow AND of memory against immediate to enable TM. */
656 if ((*code == EQ || *code == NE)
657 && *op1 == const0_rtx
658 && GET_CODE (*op0) == AND
659 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
660 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
661 {
662 rtx inner = XEXP (*op0, 0);
663 rtx mask = XEXP (*op0, 1);
664
665 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
666 if (GET_CODE (inner) == SUBREG
667 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
668 && (GET_MODE_SIZE (GET_MODE (inner))
669 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
670 && ((INTVAL (mask)
671 & GET_MODE_MASK (GET_MODE (inner))
672 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
673 == 0))
674 inner = SUBREG_REG (inner);
675
676 /* Do not change volatile MEMs. */
677 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
678 {
679 int part = s390_single_part (XEXP (*op0, 1),
680 GET_MODE (inner), QImode, 0);
681 if (part >= 0)
682 {
683 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
684 inner = adjust_address_nv (inner, QImode, part);
685 *op0 = gen_rtx_AND (QImode, inner, mask);
686 }
687 }
688 }
689
690 /* Narrow comparisons against 0xffff to HImode if possible. */
691 if ((*code == EQ || *code == NE)
692 && GET_CODE (*op1) == CONST_INT
693 && INTVAL (*op1) == 0xffff
694 && SCALAR_INT_MODE_P (GET_MODE (*op0))
695 && (nonzero_bits (*op0, GET_MODE (*op0))
696 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0)
697 {
698 *op0 = gen_lowpart (HImode, *op0);
699 *op1 = constm1_rtx;
700 }
701
702 /* Remove redundant UNSPEC_CCU_TO_INT conversions if possible. */
703 if (GET_CODE (*op0) == UNSPEC
704 && XINT (*op0, 1) == UNSPEC_CCU_TO_INT
705 && XVECLEN (*op0, 0) == 1
706 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
707 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
708 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
709 && *op1 == const0_rtx)
710 {
711 enum rtx_code new_code = UNKNOWN;
712 switch (*code)
713 {
714 case EQ: new_code = EQ; break;
715 case NE: new_code = NE; break;
716 case LT: new_code = GTU; break;
717 case GT: new_code = LTU; break;
718 case LE: new_code = GEU; break;
719 case GE: new_code = LEU; break;
720 default: break;
721 }
722
723 if (new_code != UNKNOWN)
724 {
725 *op0 = XVECEXP (*op0, 0, 0);
726 *code = new_code;
727 }
728 }
729
730 /* Remove redundant UNSPEC_CCZ_TO_INT conversions if possible. */
731 if (GET_CODE (*op0) == UNSPEC
732 && XINT (*op0, 1) == UNSPEC_CCZ_TO_INT
733 && XVECLEN (*op0, 0) == 1
734 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCZmode
735 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
736 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
737 && *op1 == const0_rtx)
738 {
739 enum rtx_code new_code = UNKNOWN;
740 switch (*code)
741 {
742 case EQ: new_code = EQ; break;
743 case NE: new_code = NE; break;
744 default: break;
745 }
746
747 if (new_code != UNKNOWN)
748 {
749 *op0 = XVECEXP (*op0, 0, 0);
750 *code = new_code;
751 }
752 }
753
754 /* Simplify cascaded EQ, NE with const0_rtx. */
755 if ((*code == NE || *code == EQ)
756 && (GET_CODE (*op0) == EQ || GET_CODE (*op0) == NE)
757 && GET_MODE (*op0) == SImode
758 && GET_MODE (XEXP (*op0, 0)) == CCZ1mode
759 && REG_P (XEXP (*op0, 0))
760 && XEXP (*op0, 1) == const0_rtx
761 && *op1 == const0_rtx)
762 {
763 if ((*code == EQ && GET_CODE (*op0) == NE)
764 || (*code == NE && GET_CODE (*op0) == EQ))
765 *code = EQ;
766 else
767 *code = NE;
768 *op0 = XEXP (*op0, 0);
769 }
770
771 /* Prefer register over memory as first operand. */
772 if (MEM_P (*op0) && REG_P (*op1))
773 {
774 rtx tem = *op0; *op0 = *op1; *op1 = tem;
775 *code = swap_condition (*code);
776 }
777 }
778
779 /* Emit a compare instruction suitable to implement the comparison
780 OP0 CODE OP1. Return the correct condition RTL to be placed in
781 the IF_THEN_ELSE of the conditional branch testing the result. */
782
783 rtx
784 s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
785 {
786 enum machine_mode mode = s390_select_ccmode (code, op0, op1);
787 rtx ret = NULL_RTX;
788
789 /* Do not output a redundant compare instruction if a compare_and_swap
790 pattern already computed the result and the machine modes are compatible. */
791 if (s390_compare_emitted
792 && (s390_cc_modes_compatible (GET_MODE (s390_compare_emitted), mode)
793 == GET_MODE (s390_compare_emitted)))
794 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
795 else
796 {
797 rtx cc = gen_rtx_REG (mode, CC_REGNUM);
798
799 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1)));
800 ret = gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
801 }
802 s390_compare_emitted = NULL_RTX;
803 return ret;
804 }
805
806 /* Emit a SImode compare and swap instruction setting MEM to NEW if OLD
807 matches CMP.
808 Return the correct condition RTL to be placed in the IF_THEN_ELSE of the
809 conditional branch testing the result. */
810
811 static rtx
812 s390_emit_compare_and_swap (enum rtx_code code, rtx old, rtx mem, rtx cmp, rtx new)
813 {
814 rtx ret;
815
816 emit_insn (gen_sync_compare_and_swap_ccsi (old, mem, cmp, new));
817 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
818
819 s390_compare_emitted = NULL_RTX;
820
821 return ret;
822 }
823
824 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
825 unconditional jump, else a conditional jump under condition COND. */
826
827 void
828 s390_emit_jump (rtx target, rtx cond)
829 {
830 rtx insn;
831
832 target = gen_rtx_LABEL_REF (VOIDmode, target);
833 if (cond)
834 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
835
836 insn = gen_rtx_SET (VOIDmode, pc_rtx, target);
837 emit_jump_insn (insn);
838 }
839
840 /* Return branch condition mask to implement a branch
841 specified by CODE. Return -1 for invalid comparisons. */
842
843 int
844 s390_branch_condition_mask (rtx code)
845 {
846 const int CC0 = 1 << 3;
847 const int CC1 = 1 << 2;
848 const int CC2 = 1 << 1;
849 const int CC3 = 1 << 0;
850
851 gcc_assert (GET_CODE (XEXP (code, 0)) == REG);
852 gcc_assert (REGNO (XEXP (code, 0)) == CC_REGNUM);
853 gcc_assert (XEXP (code, 1) == const0_rtx);
854
855 switch (GET_MODE (XEXP (code, 0)))
856 {
857 case CCZmode:
858 case CCZ1mode:
859 switch (GET_CODE (code))
860 {
861 case EQ: return CC0;
862 case NE: return CC1 | CC2 | CC3;
863 default: return -1;
864 }
865 break;
866
867 case CCT1mode:
868 switch (GET_CODE (code))
869 {
870 case EQ: return CC1;
871 case NE: return CC0 | CC2 | CC3;
872 default: return -1;
873 }
874 break;
875
876 case CCT2mode:
877 switch (GET_CODE (code))
878 {
879 case EQ: return CC2;
880 case NE: return CC0 | CC1 | CC3;
881 default: return -1;
882 }
883 break;
884
885 case CCT3mode:
886 switch (GET_CODE (code))
887 {
888 case EQ: return CC3;
889 case NE: return CC0 | CC1 | CC2;
890 default: return -1;
891 }
892 break;
893
894 case CCLmode:
895 switch (GET_CODE (code))
896 {
897 case EQ: return CC0 | CC2;
898 case NE: return CC1 | CC3;
899 default: return -1;
900 }
901 break;
902
903 case CCL1mode:
904 switch (GET_CODE (code))
905 {
906 case LTU: return CC2 | CC3; /* carry */
907 case GEU: return CC0 | CC1; /* no carry */
908 default: return -1;
909 }
910 break;
911
912 case CCL2mode:
913 switch (GET_CODE (code))
914 {
915 case GTU: return CC0 | CC1; /* borrow */
916 case LEU: return CC2 | CC3; /* no borrow */
917 default: return -1;
918 }
919 break;
920
921 case CCL3mode:
922 switch (GET_CODE (code))
923 {
924 case EQ: return CC0 | CC2;
925 case NE: return CC1 | CC3;
926 case LTU: return CC1;
927 case GTU: return CC3;
928 case LEU: return CC1 | CC2;
929 case GEU: return CC2 | CC3;
930 default: return -1;
931 }
932
933 case CCUmode:
934 switch (GET_CODE (code))
935 {
936 case EQ: return CC0;
937 case NE: return CC1 | CC2 | CC3;
938 case LTU: return CC1;
939 case GTU: return CC2;
940 case LEU: return CC0 | CC1;
941 case GEU: return CC0 | CC2;
942 default: return -1;
943 }
944 break;
945
946 case CCURmode:
947 switch (GET_CODE (code))
948 {
949 case EQ: return CC0;
950 case NE: return CC2 | CC1 | CC3;
951 case LTU: return CC2;
952 case GTU: return CC1;
953 case LEU: return CC0 | CC2;
954 case GEU: return CC0 | CC1;
955 default: return -1;
956 }
957 break;
958
959 case CCAPmode:
960 switch (GET_CODE (code))
961 {
962 case EQ: return CC0;
963 case NE: return CC1 | CC2 | CC3;
964 case LT: return CC1 | CC3;
965 case GT: return CC2;
966 case LE: return CC0 | CC1 | CC3;
967 case GE: return CC0 | CC2;
968 default: return -1;
969 }
970 break;
971
972 case CCANmode:
973 switch (GET_CODE (code))
974 {
975 case EQ: return CC0;
976 case NE: return CC1 | CC2 | CC3;
977 case LT: return CC1;
978 case GT: return CC2 | CC3;
979 case LE: return CC0 | CC1;
980 case GE: return CC0 | CC2 | CC3;
981 default: return -1;
982 }
983 break;
984
985 case CCSmode:
986 switch (GET_CODE (code))
987 {
988 case EQ: return CC0;
989 case NE: return CC1 | CC2 | CC3;
990 case LT: return CC1;
991 case GT: return CC2;
992 case LE: return CC0 | CC1;
993 case GE: return CC0 | CC2;
994 case UNORDERED: return CC3;
995 case ORDERED: return CC0 | CC1 | CC2;
996 case UNEQ: return CC0 | CC3;
997 case UNLT: return CC1 | CC3;
998 case UNGT: return CC2 | CC3;
999 case UNLE: return CC0 | CC1 | CC3;
1000 case UNGE: return CC0 | CC2 | CC3;
1001 case LTGT: return CC1 | CC2;
1002 default: return -1;
1003 }
1004 break;
1005
1006 case CCSRmode:
1007 switch (GET_CODE (code))
1008 {
1009 case EQ: return CC0;
1010 case NE: return CC2 | CC1 | CC3;
1011 case LT: return CC2;
1012 case GT: return CC1;
1013 case LE: return CC0 | CC2;
1014 case GE: return CC0 | CC1;
1015 case UNORDERED: return CC3;
1016 case ORDERED: return CC0 | CC2 | CC1;
1017 case UNEQ: return CC0 | CC3;
1018 case UNLT: return CC2 | CC3;
1019 case UNGT: return CC1 | CC3;
1020 case UNLE: return CC0 | CC2 | CC3;
1021 case UNGE: return CC0 | CC1 | CC3;
1022 case LTGT: return CC2 | CC1;
1023 default: return -1;
1024 }
1025 break;
1026
1027 default:
1028 return -1;
1029 }
1030 }
1031
1032 /* If INV is false, return assembler mnemonic string to implement
1033 a branch specified by CODE. If INV is true, return mnemonic
1034 for the corresponding inverted branch. */
1035
1036 static const char *
1037 s390_branch_condition_mnemonic (rtx code, int inv)
1038 {
1039 static const char *const mnemonic[16] =
1040 {
1041 NULL, "o", "h", "nle",
1042 "l", "nhe", "lh", "ne",
1043 "e", "nlh", "he", "nl",
1044 "le", "nh", "no", NULL
1045 };
1046
1047 int mask = s390_branch_condition_mask (code);
1048 gcc_assert (mask >= 0);
1049
1050 if (inv)
1051 mask ^= 15;
1052
1053 gcc_assert (mask >= 1 && mask <= 14);
1054
1055 return mnemonic[mask];
1056 }
1057
1058 /* Return the part of op which has a value different from def.
1059 The size of the part is determined by mode.
1060 Use this function only if you already know that op really
1061 contains such a part. */
1062
1063 unsigned HOST_WIDE_INT
1064 s390_extract_part (rtx op, enum machine_mode mode, int def)
1065 {
1066 unsigned HOST_WIDE_INT value = 0;
1067 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
1068 int part_bits = GET_MODE_BITSIZE (mode);
1069 unsigned HOST_WIDE_INT part_mask
1070 = ((unsigned HOST_WIDE_INT)1 << part_bits) - 1;
1071 int i;
1072
1073 for (i = 0; i < max_parts; i++)
1074 {
1075 if (i == 0)
1076 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1077 else
1078 value >>= part_bits;
1079
1080 if ((value & part_mask) != (def & part_mask))
1081 return value & part_mask;
1082 }
1083
1084 gcc_unreachable ();
1085 }
1086
1087 /* If OP is an integer constant of mode MODE with exactly one
1088 part of mode PART_MODE unequal to DEF, return the number of that
1089 part. Otherwise, return -1. */
1090
1091 int
1092 s390_single_part (rtx op,
1093 enum machine_mode mode,
1094 enum machine_mode part_mode,
1095 int def)
1096 {
1097 unsigned HOST_WIDE_INT value = 0;
1098 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
1099 unsigned HOST_WIDE_INT part_mask
1100 = ((unsigned HOST_WIDE_INT)1 << GET_MODE_BITSIZE (part_mode)) - 1;
1101 int i, part = -1;
1102
1103 if (GET_CODE (op) != CONST_INT)
1104 return -1;
1105
1106 for (i = 0; i < n_parts; i++)
1107 {
1108 if (i == 0)
1109 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1110 else
1111 value >>= GET_MODE_BITSIZE (part_mode);
1112
1113 if ((value & part_mask) != (def & part_mask))
1114 {
1115 if (part != -1)
1116 return -1;
1117 else
1118 part = i;
1119 }
1120 }
1121 return part == -1 ? -1 : n_parts - 1 - part;
1122 }
1123
1124 /* Check whether we can (and want to) split a double-word
1125 move in mode MODE from SRC to DST into two single-word
1126 moves, moving the subword FIRST_SUBWORD first. */
1127
1128 bool
1129 s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
1130 {
1131 /* Floating point registers cannot be split. */
1132 if (FP_REG_P (src) || FP_REG_P (dst))
1133 return false;
1134
1135 /* We don't need to split if operands are directly accessible. */
1136 if (s_operand (src, mode) || s_operand (dst, mode))
1137 return false;
1138
1139 /* Non-offsettable memory references cannot be split. */
1140 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
1141 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
1142 return false;
1143
1144 /* Moving the first subword must not clobber a register
1145 needed to move the second subword. */
1146 if (register_operand (dst, mode))
1147 {
1148 rtx subreg = operand_subword (dst, first_subword, 0, mode);
1149 if (reg_overlap_mentioned_p (subreg, src))
1150 return false;
1151 }
1152
1153 return true;
1154 }
1155
1156 /* Return true if it can be proven that [MEM1, MEM1 + SIZE]
1157 and [MEM2, MEM2 + SIZE] do overlap and false
1158 otherwise. */
1159
1160 bool
1161 s390_overlap_p (rtx mem1, rtx mem2, HOST_WIDE_INT size)
1162 {
1163 rtx addr1, addr2, addr_delta;
1164 HOST_WIDE_INT delta;
1165
1166 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1167 return true;
1168
1169 if (size == 0)
1170 return false;
1171
1172 addr1 = XEXP (mem1, 0);
1173 addr2 = XEXP (mem2, 0);
1174
1175 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1176
1177 /* This overlapping check is used by peepholes merging memory block operations.
1178 Overlapping operations would otherwise be recognized by the S/390 hardware
1179 and would fall back to a slower implementation. Allowing overlapping
1180 operations would lead to slow code but not to wrong code. Therefore we are
1181 somewhat optimistic if we cannot prove that the memory blocks are
1182 overlapping.
1183 That's why we return false here although this may accept operations on
1184 overlapping memory areas. */
1185 if (!addr_delta || GET_CODE (addr_delta) != CONST_INT)
1186 return false;
1187
1188 delta = INTVAL (addr_delta);
1189
1190 if (delta == 0
1191 || (delta > 0 && delta < size)
1192 || (delta < 0 && -delta < size))
1193 return true;
1194
1195 return false;
1196 }
1197
1198 /* Check whether the address of memory reference MEM2 equals exactly
1199 the address of memory reference MEM1 plus DELTA. Return true if
1200 we can prove this to be the case, false otherwise. */
1201
1202 bool
1203 s390_offset_p (rtx mem1, rtx mem2, rtx delta)
1204 {
1205 rtx addr1, addr2, addr_delta;
1206
1207 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1208 return false;
1209
1210 addr1 = XEXP (mem1, 0);
1211 addr2 = XEXP (mem2, 0);
1212
1213 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1214 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
1215 return false;
1216
1217 return true;
1218 }
1219
1220 /* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1221
1222 void
1223 s390_expand_logical_operator (enum rtx_code code, enum machine_mode mode,
1224 rtx *operands)
1225 {
1226 enum machine_mode wmode = mode;
1227 rtx dst = operands[0];
1228 rtx src1 = operands[1];
1229 rtx src2 = operands[2];
1230 rtx op, clob, tem;
1231
1232 /* If we cannot handle the operation directly, use a temp register. */
1233 if (!s390_logical_operator_ok_p (operands))
1234 dst = gen_reg_rtx (mode);
1235
1236 /* QImode and HImode patterns make sense only if we have a destination
1237 in memory. Otherwise perform the operation in SImode. */
1238 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
1239 wmode = SImode;
1240
1241 /* Widen operands if required. */
1242 if (mode != wmode)
1243 {
1244 if (GET_CODE (dst) == SUBREG
1245 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
1246 dst = tem;
1247 else if (REG_P (dst))
1248 dst = gen_rtx_SUBREG (wmode, dst, 0);
1249 else
1250 dst = gen_reg_rtx (wmode);
1251
1252 if (GET_CODE (src1) == SUBREG
1253 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
1254 src1 = tem;
1255 else if (GET_MODE (src1) != VOIDmode)
1256 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
1257
1258 if (GET_CODE (src2) == SUBREG
1259 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
1260 src2 = tem;
1261 else if (GET_MODE (src2) != VOIDmode)
1262 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
1263 }
1264
1265 /* Emit the instruction. */
1266 op = gen_rtx_SET (VOIDmode, dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
1267 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
1268 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
1269
1270 /* Fix up the destination if needed. */
1271 if (dst != operands[0])
1272 emit_move_insn (operands[0], gen_lowpart (mode, dst));
1273 }
1274
1275 /* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1276
1277 bool
1278 s390_logical_operator_ok_p (rtx *operands)
1279 {
1280 /* If the destination operand is in memory, it needs to coincide
1281 with one of the source operands. After reload, it has to be
1282 the first source operand. */
1283 if (GET_CODE (operands[0]) == MEM)
1284 return rtx_equal_p (operands[0], operands[1])
1285 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
1286
1287 return true;
1288 }
1289
1290 /* Narrow logical operation CODE of memory operand MEMOP with immediate
1291 operand IMMOP to switch from SS to SI type instructions. */
1292
1293 void
1294 s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
1295 {
1296 int def = code == AND ? -1 : 0;
1297 HOST_WIDE_INT mask;
1298 int part;
1299
1300 gcc_assert (GET_CODE (*memop) == MEM);
1301 gcc_assert (!MEM_VOLATILE_P (*memop));
1302
1303 mask = s390_extract_part (*immop, QImode, def);
1304 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
1305 gcc_assert (part >= 0);
1306
1307 *memop = adjust_address (*memop, QImode, part);
1308 *immop = gen_int_mode (mask, QImode);
1309 }
1310
1311
1312 /* How to allocate a 'struct machine_function'. */
1313
1314 static struct machine_function *
1315 s390_init_machine_status (void)
1316 {
1317 return ggc_alloc_cleared (sizeof (struct machine_function));
1318 }
1319
1320 /* Change optimizations to be performed, depending on the
1321 optimization level.
1322
1323 LEVEL is the optimization level specified; 2 if `-O2' is
1324 specified, 1 if `-O' is specified, and 0 if neither is specified.
1325
1326 SIZE is nonzero if `-Os' is specified and zero otherwise. */
1327
1328 void
1329 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1330 {
1331 /* ??? There are apparently still problems with -fcaller-saves. */
1332 flag_caller_saves = 0;
1333
1334 /* By default, always emit DWARF-2 unwind info. This allows debugging
1335 without maintaining a stack frame back-chain. */
1336 flag_asynchronous_unwind_tables = 1;
1337
1338 /* Use MVCLE instructions to decrease code size if requested. */
1339 if (size != 0)
1340 target_flags |= MASK_MVCLE;
1341 }
1342
1343 /* Return true if ARG is the name of a processor. Set *TYPE and *FLAGS
1344 to the associated processor_type and processor_flags if so. */
1345
1346 static bool
1347 s390_handle_arch_option (const char *arg,
1348 enum processor_type *type,
1349 enum processor_flags *flags)
1350 {
1351 static struct pta
1352 {
1353 const char *const name; /* processor name or nickname. */
1354 const enum processor_type processor;
1355 const enum processor_flags flags;
1356 }
1357 const processor_alias_table[] =
1358 {
1359 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
1360 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
1361 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
1362 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
1363 | PF_LONG_DISPLACEMENT},
1364 {"z9-109", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1365 | PF_LONG_DISPLACEMENT | PF_EXTIMM},
1366 {"z9-ec", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1367 | PF_LONG_DISPLACEMENT | PF_EXTIMM | PF_DFP },
1368 };
1369 size_t i;
1370
1371 for (i = 0; i < ARRAY_SIZE (processor_alias_table); i++)
1372 if (strcmp (arg, processor_alias_table[i].name) == 0)
1373 {
1374 *type = processor_alias_table[i].processor;
1375 *flags = processor_alias_table[i].flags;
1376 return true;
1377 }
1378 return false;
1379 }
1380
1381 /* Implement TARGET_HANDLE_OPTION. */
1382
1383 static bool
1384 s390_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
1385 {
1386 switch (code)
1387 {
1388 case OPT_march_:
1389 return s390_handle_arch_option (arg, &s390_arch, &s390_arch_flags);
1390
1391 case OPT_mstack_guard_:
1392 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_guard) != 1)
1393 return false;
1394 if (exact_log2 (s390_stack_guard) == -1)
1395 error ("stack guard value must be an exact power of 2");
1396 return true;
1397
1398 case OPT_mstack_size_:
1399 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_size) != 1)
1400 return false;
1401 if (exact_log2 (s390_stack_size) == -1)
1402 error ("stack size must be an exact power of 2");
1403 return true;
1404
1405 case OPT_mtune_:
1406 return s390_handle_arch_option (arg, &s390_tune, &s390_tune_flags);
1407
1408 case OPT_mwarn_framesize_:
1409 return sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_warn_framesize) == 1;
1410
1411 default:
1412 return true;
1413 }
1414 }
1415
1416 void
1417 override_options (void)
1418 {
1419 /* Set up function hooks. */
1420 init_machine_status = s390_init_machine_status;
1421
1422 /* Architecture mode defaults according to ABI. */
1423 if (!(target_flags_explicit & MASK_ZARCH))
1424 {
1425 if (TARGET_64BIT)
1426 target_flags |= MASK_ZARCH;
1427 else
1428 target_flags &= ~MASK_ZARCH;
1429 }
1430
1431 /* Determine processor architectural level. */
1432 if (!s390_arch_string)
1433 {
1434 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
1435 s390_handle_arch_option (s390_arch_string, &s390_arch, &s390_arch_flags);
1436 }
1437
1438 /* Determine processor to tune for. */
1439 if (s390_tune == PROCESSOR_max)
1440 {
1441 s390_tune = s390_arch;
1442 s390_tune_flags = s390_arch_flags;
1443 }
1444
1445 /* Sanity checks. */
1446 if (TARGET_ZARCH && !TARGET_CPU_ZARCH)
1447 error ("z/Architecture mode not supported on %s", s390_arch_string);
1448 if (TARGET_64BIT && !TARGET_ZARCH)
1449 error ("64-bit ABI not supported in ESA/390 mode");
1450
1451 if (TARGET_HARD_DFP && !TARGET_DFP)
1452 {
1453 if (target_flags_explicit & MASK_HARD_DFP)
1454 {
1455 if (!TARGET_CPU_DFP)
1456 error ("Hardware decimal floating point instructions"
1457 " not available on %s", s390_arch_string);
1458 if (!TARGET_ZARCH)
1459 error ("Hardware decimal floating point instructions"
1460 " not available in ESA/390 mode");
1461 }
1462 else
1463 target_flags &= ~MASK_HARD_DFP;
1464 }
1465
1466 if ((target_flags_explicit & MASK_SOFT_FLOAT) && TARGET_SOFT_FLOAT)
1467 {
1468 if ((target_flags_explicit & MASK_HARD_DFP) && TARGET_HARD_DFP)
1469 error ("-mhard-dfp can't be used in conjunction with -msoft-float");
1470
1471 target_flags &= ~MASK_HARD_DFP;
1472 }
1473
1474 /* Set processor cost function. */
1475 if (s390_tune == PROCESSOR_2094_Z9_109)
1476 s390_cost = &z9_109_cost;
1477 else if (s390_tune == PROCESSOR_2084_Z990)
1478 s390_cost = &z990_cost;
1479 else
1480 s390_cost = &z900_cost;
1481
1482 if (TARGET_BACKCHAIN && TARGET_PACKED_STACK && TARGET_HARD_FLOAT)
1483 error ("-mbackchain -mpacked-stack -mhard-float are not supported "
1484 "in combination");
1485
1486 if (s390_stack_size)
1487 {
1488 if (s390_stack_guard >= s390_stack_size)
1489 error ("stack size must be greater than the stack guard value");
1490 else if (s390_stack_size > 1 << 16)
1491 error ("stack size must not be greater than 64k");
1492 }
1493 else if (s390_stack_guard)
1494 error ("-mstack-guard implies use of -mstack-size");
1495
1496 #ifdef TARGET_DEFAULT_LONG_DOUBLE_128
1497 if (!(target_flags_explicit & MASK_LONG_DOUBLE_128))
1498 target_flags |= MASK_LONG_DOUBLE_128;
1499 #endif
1500 }
1501
1502 /* Map for smallest class containing reg regno. */
1503
1504 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
1505 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1506 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1507 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1508 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1509 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1510 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1511 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1512 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1513 ADDR_REGS, CC_REGS, ADDR_REGS, ADDR_REGS,
1514 ACCESS_REGS, ACCESS_REGS
1515 };
1516
1517 /* Return attribute type of insn. */
1518
1519 static enum attr_type
1520 s390_safe_attr_type (rtx insn)
1521 {
1522 if (recog_memoized (insn) >= 0)
1523 return get_attr_type (insn);
1524 else
1525 return TYPE_NONE;
1526 }
1527
1528 /* Return true if DISP is a valid short displacement. */
1529
1530 static bool
1531 s390_short_displacement (rtx disp)
1532 {
1533 /* No displacement is OK. */
1534 if (!disp)
1535 return true;
1536
1537 /* Integer displacement in range. */
1538 if (GET_CODE (disp) == CONST_INT)
1539 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1540
1541 /* GOT offset is not OK, the GOT can be large. */
1542 if (GET_CODE (disp) == CONST
1543 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1544 && (XINT (XEXP (disp, 0), 1) == UNSPEC_GOT
1545 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
1546 return false;
1547
1548 /* All other symbolic constants are literal pool references,
1549 which are OK as the literal pool must be small. */
1550 if (GET_CODE (disp) == CONST)
1551 return true;
1552
1553 return false;
1554 }
1555
1556 /* Decompose a RTL expression ADDR for a memory address into
1557 its components, returned in OUT.
1558
1559 Returns false if ADDR is not a valid memory address, true
1560 otherwise. If OUT is NULL, don't return the components,
1561 but check for validity only.
1562
1563 Note: Only addresses in canonical form are recognized.
1564 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1565 canonical form so that they will be recognized. */
1566
1567 static int
1568 s390_decompose_address (rtx addr, struct s390_address *out)
1569 {
1570 HOST_WIDE_INT offset = 0;
1571 rtx base = NULL_RTX;
1572 rtx indx = NULL_RTX;
1573 rtx disp = NULL_RTX;
1574 rtx orig_disp;
1575 bool pointer = false;
1576 bool base_ptr = false;
1577 bool indx_ptr = false;
1578 bool literal_pool = false;
1579
1580 /* We may need to substitute the literal pool base register into the address
1581 below. However, at this point we do not know which register is going to
1582 be used as base, so we substitute the arg pointer register. This is going
1583 to be treated as holding a pointer below -- it shouldn't be used for any
1584 other purpose. */
1585 rtx fake_pool_base = gen_rtx_REG (Pmode, ARG_POINTER_REGNUM);
1586
1587 /* Decompose address into base + index + displacement. */
1588
1589 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1590 base = addr;
1591
1592 else if (GET_CODE (addr) == PLUS)
1593 {
1594 rtx op0 = XEXP (addr, 0);
1595 rtx op1 = XEXP (addr, 1);
1596 enum rtx_code code0 = GET_CODE (op0);
1597 enum rtx_code code1 = GET_CODE (op1);
1598
1599 if (code0 == REG || code0 == UNSPEC)
1600 {
1601 if (code1 == REG || code1 == UNSPEC)
1602 {
1603 indx = op0; /* index + base */
1604 base = op1;
1605 }
1606
1607 else
1608 {
1609 base = op0; /* base + displacement */
1610 disp = op1;
1611 }
1612 }
1613
1614 else if (code0 == PLUS)
1615 {
1616 indx = XEXP (op0, 0); /* index + base + disp */
1617 base = XEXP (op0, 1);
1618 disp = op1;
1619 }
1620
1621 else
1622 {
1623 return false;
1624 }
1625 }
1626
1627 else
1628 disp = addr; /* displacement */
1629
1630 /* Extract integer part of displacement. */
1631 orig_disp = disp;
1632 if (disp)
1633 {
1634 if (GET_CODE (disp) == CONST_INT)
1635 {
1636 offset = INTVAL (disp);
1637 disp = NULL_RTX;
1638 }
1639 else if (GET_CODE (disp) == CONST
1640 && GET_CODE (XEXP (disp, 0)) == PLUS
1641 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1642 {
1643 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1644 disp = XEXP (XEXP (disp, 0), 0);
1645 }
1646 }
1647
1648 /* Strip off CONST here to avoid special case tests later. */
1649 if (disp && GET_CODE (disp) == CONST)
1650 disp = XEXP (disp, 0);
1651
1652 /* We can convert literal pool addresses to
1653 displacements by basing them off the base register. */
1654 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
1655 {
1656 /* Either base or index must be free to hold the base register. */
1657 if (!base)
1658 base = fake_pool_base, literal_pool = true;
1659 else if (!indx)
1660 indx = fake_pool_base, literal_pool = true;
1661 else
1662 return false;
1663
1664 /* Mark up the displacement. */
1665 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
1666 UNSPEC_LTREL_OFFSET);
1667 }
1668
1669 /* Validate base register. */
1670 if (base)
1671 {
1672 if (GET_CODE (base) == UNSPEC)
1673 switch (XINT (base, 1))
1674 {
1675 case UNSPEC_LTREF:
1676 if (!disp)
1677 disp = gen_rtx_UNSPEC (Pmode,
1678 gen_rtvec (1, XVECEXP (base, 0, 0)),
1679 UNSPEC_LTREL_OFFSET);
1680 else
1681 return false;
1682
1683 base = XVECEXP (base, 0, 1);
1684 break;
1685
1686 case UNSPEC_LTREL_BASE:
1687 if (XVECLEN (base, 0) == 1)
1688 base = fake_pool_base, literal_pool = true;
1689 else
1690 base = XVECEXP (base, 0, 1);
1691 break;
1692
1693 default:
1694 return false;
1695 }
1696
1697 if (!REG_P (base)
1698 || (GET_MODE (base) != SImode
1699 && GET_MODE (base) != Pmode))
1700 return false;
1701
1702 if (REGNO (base) == STACK_POINTER_REGNUM
1703 || REGNO (base) == FRAME_POINTER_REGNUM
1704 || ((reload_completed || reload_in_progress)
1705 && frame_pointer_needed
1706 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1707 || REGNO (base) == ARG_POINTER_REGNUM
1708 || (flag_pic
1709 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1710 pointer = base_ptr = true;
1711
1712 if ((reload_completed || reload_in_progress)
1713 && base == cfun->machine->base_reg)
1714 pointer = base_ptr = literal_pool = true;
1715 }
1716
1717 /* Validate index register. */
1718 if (indx)
1719 {
1720 if (GET_CODE (indx) == UNSPEC)
1721 switch (XINT (indx, 1))
1722 {
1723 case UNSPEC_LTREF:
1724 if (!disp)
1725 disp = gen_rtx_UNSPEC (Pmode,
1726 gen_rtvec (1, XVECEXP (indx, 0, 0)),
1727 UNSPEC_LTREL_OFFSET);
1728 else
1729 return false;
1730
1731 indx = XVECEXP (indx, 0, 1);
1732 break;
1733
1734 case UNSPEC_LTREL_BASE:
1735 if (XVECLEN (indx, 0) == 1)
1736 indx = fake_pool_base, literal_pool = true;
1737 else
1738 indx = XVECEXP (indx, 0, 1);
1739 break;
1740
1741 default:
1742 return false;
1743 }
1744
1745 if (!REG_P (indx)
1746 || (GET_MODE (indx) != SImode
1747 && GET_MODE (indx) != Pmode))
1748 return false;
1749
1750 if (REGNO (indx) == STACK_POINTER_REGNUM
1751 || REGNO (indx) == FRAME_POINTER_REGNUM
1752 || ((reload_completed || reload_in_progress)
1753 && frame_pointer_needed
1754 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1755 || REGNO (indx) == ARG_POINTER_REGNUM
1756 || (flag_pic
1757 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
1758 pointer = indx_ptr = true;
1759
1760 if ((reload_completed || reload_in_progress)
1761 && indx == cfun->machine->base_reg)
1762 pointer = indx_ptr = literal_pool = true;
1763 }
1764
1765 /* Prefer to use pointer as base, not index. */
1766 if (base && indx && !base_ptr
1767 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
1768 {
1769 rtx tmp = base;
1770 base = indx;
1771 indx = tmp;
1772 }
1773
1774 /* Validate displacement. */
1775 if (!disp)
1776 {
1777 /* If virtual registers are involved, the displacement will change later
1778 anyway as the virtual registers get eliminated. This could make a
1779 valid displacement invalid, but it is more likely to make an invalid
1780 displacement valid, because we sometimes access the register save area
1781 via negative offsets to one of those registers.
1782 Thus we don't check the displacement for validity here. If after
1783 elimination the displacement turns out to be invalid after all,
1784 this is fixed up by reload in any case. */
1785 if (base != arg_pointer_rtx
1786 && indx != arg_pointer_rtx
1787 && base != return_address_pointer_rtx
1788 && indx != return_address_pointer_rtx
1789 && base != frame_pointer_rtx
1790 && indx != frame_pointer_rtx
1791 && base != virtual_stack_vars_rtx
1792 && indx != virtual_stack_vars_rtx)
1793 if (!DISP_IN_RANGE (offset))
1794 return false;
1795 }
1796 else
1797 {
1798 /* All the special cases are pointers. */
1799 pointer = true;
1800
1801 /* In the small-PIC case, the linker converts @GOT
1802 and @GOTNTPOFF offsets to possible displacements. */
1803 if (GET_CODE (disp) == UNSPEC
1804 && (XINT (disp, 1) == UNSPEC_GOT
1805 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
1806 && flag_pic == 1)
1807 {
1808 ;
1809 }
1810
1811 /* Accept chunkified literal pool symbol references. */
1812 else if (cfun && cfun->machine
1813 && cfun->machine->decomposed_literal_pool_addresses_ok_p
1814 && GET_CODE (disp) == MINUS
1815 && GET_CODE (XEXP (disp, 0)) == LABEL_REF
1816 && GET_CODE (XEXP (disp, 1)) == LABEL_REF)
1817 {
1818 ;
1819 }
1820
1821 /* Accept literal pool references. */
1822 else if (GET_CODE (disp) == UNSPEC
1823 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
1824 {
1825 orig_disp = gen_rtx_CONST (Pmode, disp);
1826 if (offset)
1827 {
1828 /* If we have an offset, make sure it does not
1829 exceed the size of the constant pool entry. */
1830 rtx sym = XVECEXP (disp, 0, 0);
1831 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
1832 return false;
1833
1834 orig_disp = plus_constant (orig_disp, offset);
1835 }
1836 }
1837
1838 else
1839 return false;
1840 }
1841
1842 if (!base && !indx)
1843 pointer = true;
1844
1845 if (out)
1846 {
1847 out->base = base;
1848 out->indx = indx;
1849 out->disp = orig_disp;
1850 out->pointer = pointer;
1851 out->literal_pool = literal_pool;
1852 }
1853
1854 return true;
1855 }
1856
1857 /* Decompose a RTL expression OP for a shift count into its components,
1858 and return the base register in BASE and the offset in OFFSET.
1859
1860 Return true if OP is a valid shift count, false if not. */
1861
1862 bool
1863 s390_decompose_shift_count (rtx op, rtx *base, HOST_WIDE_INT *offset)
1864 {
1865 HOST_WIDE_INT off = 0;
1866
1867 /* We can have an integer constant, an address register,
1868 or a sum of the two. */
1869 if (GET_CODE (op) == CONST_INT)
1870 {
1871 off = INTVAL (op);
1872 op = NULL_RTX;
1873 }
1874 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
1875 {
1876 off = INTVAL (XEXP (op, 1));
1877 op = XEXP (op, 0);
1878 }
1879 while (op && GET_CODE (op) == SUBREG)
1880 op = SUBREG_REG (op);
1881
1882 if (op && GET_CODE (op) != REG)
1883 return false;
1884
1885 if (offset)
1886 *offset = off;
1887 if (base)
1888 *base = op;
1889
1890 return true;
1891 }
1892
1893
1894 /* Return true if CODE is a valid address without index. */
1895
1896 bool
1897 s390_legitimate_address_without_index_p (rtx op)
1898 {
1899 struct s390_address addr;
1900
1901 if (!s390_decompose_address (XEXP (op, 0), &addr))
1902 return false;
1903 if (addr.indx)
1904 return false;
1905
1906 return true;
1907 }
1908
1909
1910 /* Evaluates constraint strings described by the regular expression
1911 ([A|B](Q|R|S|T))|U|W and returns 1 if OP is a valid operand for the
1912 constraint given in STR, or 0 else. */
1913
1914 int
1915 s390_mem_constraint (const char *str, rtx op)
1916 {
1917 struct s390_address addr;
1918 char c = str[0];
1919
1920 /* Check for offsettable variants of memory constraints. */
1921 if (c == 'A')
1922 {
1923 /* Only accept non-volatile MEMs. */
1924 if (!MEM_P (op) || MEM_VOLATILE_P (op))
1925 return 0;
1926
1927 if ((reload_completed || reload_in_progress)
1928 ? !offsettable_memref_p (op) : !offsettable_nonstrict_memref_p (op))
1929 return 0;
1930
1931 c = str[1];
1932 }
1933
1934 /* Check for non-literal-pool variants of memory constraints. */
1935 else if (c == 'B')
1936 {
1937 if (GET_CODE (op) != MEM)
1938 return 0;
1939 if (!s390_decompose_address (XEXP (op, 0), &addr))
1940 return 0;
1941 if (addr.literal_pool)
1942 return 0;
1943
1944 c = str[1];
1945 }
1946
1947 switch (c)
1948 {
1949 case 'Q':
1950 if (GET_CODE (op) != MEM)
1951 return 0;
1952 if (!s390_decompose_address (XEXP (op, 0), &addr))
1953 return 0;
1954 if (addr.indx)
1955 return 0;
1956
1957 if (TARGET_LONG_DISPLACEMENT)
1958 {
1959 if (!s390_short_displacement (addr.disp))
1960 return 0;
1961 }
1962 break;
1963
1964 case 'R':
1965 if (GET_CODE (op) != MEM)
1966 return 0;
1967
1968 if (TARGET_LONG_DISPLACEMENT)
1969 {
1970 if (!s390_decompose_address (XEXP (op, 0), &addr))
1971 return 0;
1972 if (!s390_short_displacement (addr.disp))
1973 return 0;
1974 }
1975 break;
1976
1977 case 'S':
1978 if (!TARGET_LONG_DISPLACEMENT)
1979 return 0;
1980 if (GET_CODE (op) != MEM)
1981 return 0;
1982 if (!s390_decompose_address (XEXP (op, 0), &addr))
1983 return 0;
1984 if (addr.indx)
1985 return 0;
1986 if (s390_short_displacement (addr.disp))
1987 return 0;
1988 break;
1989
1990 case 'T':
1991 if (!TARGET_LONG_DISPLACEMENT)
1992 return 0;
1993 if (GET_CODE (op) != MEM)
1994 return 0;
1995 /* Any invalid address here will be fixed up by reload,
1996 so accept it for the most generic constraint. */
1997 if (s390_decompose_address (XEXP (op, 0), &addr)
1998 && s390_short_displacement (addr.disp))
1999 return 0;
2000 break;
2001
2002 case 'U':
2003 if (TARGET_LONG_DISPLACEMENT)
2004 {
2005 if (!s390_decompose_address (op, &addr))
2006 return 0;
2007 if (!s390_short_displacement (addr.disp))
2008 return 0;
2009 }
2010 break;
2011
2012 case 'W':
2013 if (!TARGET_LONG_DISPLACEMENT)
2014 return 0;
2015 /* Any invalid address here will be fixed up by reload,
2016 so accept it for the most generic constraint. */
2017 if (s390_decompose_address (op, &addr)
2018 && s390_short_displacement (addr.disp))
2019 return 0;
2020 break;
2021
2022 case 'Y':
2023 /* Simply check for the basic form of a shift count. Reload will
2024 take care of making sure we have a proper base register. */
2025 if (!s390_decompose_shift_count (op, NULL, NULL))
2026 return 0;
2027 break;
2028
2029 default:
2030 return 0;
2031 }
2032
2033 return 1;
2034 }
2035
2036
2037
2038 /* Evaluates constraint strings starting with letter O. Input
2039 parameter C is the second letter following the "O" in the constraint
2040 string. Returns 1 if VALUE meets the respective constraint and 0
2041 otherwise. */
2042
2043 int
2044 s390_O_constraint_str (const char c, HOST_WIDE_INT value)
2045 {
2046 if (!TARGET_EXTIMM)
2047 return 0;
2048
2049 switch (c)
2050 {
2051 case 's':
2052 return trunc_int_for_mode (value, SImode) == value;
2053
2054 case 'p':
2055 return value == 0
2056 || s390_single_part (GEN_INT (value), DImode, SImode, 0) == 1;
2057
2058 case 'n':
2059 return s390_single_part (GEN_INT (value - 1), DImode, SImode, -1) == 1;
2060
2061 default:
2062 gcc_unreachable ();
2063 }
2064 }
2065
2066
2067 /* Evaluates constraint strings starting with letter N. Parameter STR
2068 contains the letters following letter "N" in the constraint string.
2069 Returns true if VALUE matches the constraint. */
2070
2071 int
2072 s390_N_constraint_str (const char *str, HOST_WIDE_INT value)
2073 {
2074 enum machine_mode mode, part_mode;
2075 int def;
2076 int part, part_goal;
2077
2078
2079 if (str[0] == 'x')
2080 part_goal = -1;
2081 else
2082 part_goal = str[0] - '0';
2083
2084 switch (str[1])
2085 {
2086 case 'Q':
2087 part_mode = QImode;
2088 break;
2089 case 'H':
2090 part_mode = HImode;
2091 break;
2092 case 'S':
2093 part_mode = SImode;
2094 break;
2095 default:
2096 return 0;
2097 }
2098
2099 switch (str[2])
2100 {
2101 case 'H':
2102 mode = HImode;
2103 break;
2104 case 'S':
2105 mode = SImode;
2106 break;
2107 case 'D':
2108 mode = DImode;
2109 break;
2110 default:
2111 return 0;
2112 }
2113
2114 switch (str[3])
2115 {
2116 case '0':
2117 def = 0;
2118 break;
2119 case 'F':
2120 def = -1;
2121 break;
2122 default:
2123 return 0;
2124 }
2125
2126 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
2127 return 0;
2128
2129 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
2130 if (part < 0)
2131 return 0;
2132 if (part_goal != -1 && part_goal != part)
2133 return 0;
2134
2135 return 1;
2136 }
2137
2138
2139 /* Returns true if the input parameter VALUE is a float zero. */
2140
2141 int
2142 s390_float_const_zero_p (rtx value)
2143 {
2144 return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
2145 && value == CONST0_RTX (GET_MODE (value)));
2146 }
2147
2148
2149 /* Compute a (partial) cost for rtx X. Return true if the complete
2150 cost has been computed, and false if subexpressions should be
2151 scanned. In either case, *TOTAL contains the cost result.
2152 CODE contains GET_CODE (x), OUTER_CODE contains the code
2153 of the superexpression of x. */
2154
2155 static bool
2156 s390_rtx_costs (rtx x, int code, int outer_code, int *total)
2157 {
2158 switch (code)
2159 {
2160 case CONST:
2161 case CONST_INT:
2162 case LABEL_REF:
2163 case SYMBOL_REF:
2164 case CONST_DOUBLE:
2165 case MEM:
2166 *total = 0;
2167 return true;
2168
2169 case ASHIFT:
2170 case ASHIFTRT:
2171 case LSHIFTRT:
2172 case ROTATE:
2173 case ROTATERT:
2174 case AND:
2175 case IOR:
2176 case XOR:
2177 case NEG:
2178 case NOT:
2179 *total = COSTS_N_INSNS (1);
2180 return false;
2181
2182 case PLUS:
2183 case MINUS:
2184 /* Check for multiply and add. */
2185 if ((GET_MODE (x) == DFmode || GET_MODE (x) == SFmode)
2186 && GET_CODE (XEXP (x, 0)) == MULT
2187 && TARGET_HARD_FLOAT && TARGET_FUSED_MADD)
2188 {
2189 /* This is the multiply and add case. */
2190 if (GET_MODE (x) == DFmode)
2191 *total = s390_cost->madbr;
2192 else
2193 *total = s390_cost->maebr;
2194 *total += rtx_cost (XEXP (XEXP (x, 0), 0), MULT)
2195 + rtx_cost (XEXP (XEXP (x, 0), 1), MULT)
2196 + rtx_cost (XEXP (x, 1), code);
2197 return true; /* Do not do an additional recursive descent. */
2198 }
2199 *total = COSTS_N_INSNS (1);
2200 return false;
2201
2202 case MULT:
2203 switch (GET_MODE (x))
2204 {
2205 case SImode:
2206 {
2207 rtx left = XEXP (x, 0);
2208 rtx right = XEXP (x, 1);
2209 if (GET_CODE (right) == CONST_INT
2210 && CONST_OK_FOR_K (INTVAL (right)))
2211 *total = s390_cost->mhi;
2212 else if (GET_CODE (left) == SIGN_EXTEND)
2213 *total = s390_cost->mh;
2214 else
2215 *total = s390_cost->ms; /* msr, ms, msy */
2216 break;
2217 }
2218 case DImode:
2219 {
2220 rtx left = XEXP (x, 0);
2221 rtx right = XEXP (x, 1);
2222 if (TARGET_64BIT)
2223 {
2224 if (GET_CODE (right) == CONST_INT
2225 && CONST_OK_FOR_K (INTVAL (right)))
2226 *total = s390_cost->mghi;
2227 else if (GET_CODE (left) == SIGN_EXTEND)
2228 *total = s390_cost->msgf;
2229 else
2230 *total = s390_cost->msg; /* msgr, msg */
2231 }
2232 else /* TARGET_31BIT */
2233 {
2234 if (GET_CODE (left) == SIGN_EXTEND
2235 && GET_CODE (right) == SIGN_EXTEND)
2236 /* mulsidi case: mr, m */
2237 *total = s390_cost->m;
2238 else if (GET_CODE (left) == ZERO_EXTEND
2239 && GET_CODE (right) == ZERO_EXTEND
2240 && TARGET_CPU_ZARCH)
2241 /* umulsidi case: ml, mlr */
2242 *total = s390_cost->ml;
2243 else
2244 /* Complex calculation is required. */
2245 *total = COSTS_N_INSNS (40);
2246 }
2247 break;
2248 }
2249 case SFmode:
2250 case DFmode:
2251 *total = s390_cost->mult_df;
2252 break;
2253 case TFmode:
2254 *total = s390_cost->mxbr;
2255 break;
2256 default:
2257 return false;
2258 }
2259 return false;
2260
2261 case UDIV:
2262 case UMOD:
2263 if (GET_MODE (x) == TImode) /* 128 bit division */
2264 *total = s390_cost->dlgr;
2265 else if (GET_MODE (x) == DImode)
2266 {
2267 rtx right = XEXP (x, 1);
2268 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2269 *total = s390_cost->dlr;
2270 else /* 64 by 64 bit division */
2271 *total = s390_cost->dlgr;
2272 }
2273 else if (GET_MODE (x) == SImode) /* 32 bit division */
2274 *total = s390_cost->dlr;
2275 return false;
2276
2277 case DIV:
2278 case MOD:
2279 if (GET_MODE (x) == DImode)
2280 {
2281 rtx right = XEXP (x, 1);
2282 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2283 if (TARGET_64BIT)
2284 *total = s390_cost->dsgfr;
2285 else
2286 *total = s390_cost->dr;
2287 else /* 64 by 64 bit division */
2288 *total = s390_cost->dsgr;
2289 }
2290 else if (GET_MODE (x) == SImode) /* 32 bit division */
2291 *total = s390_cost->dlr;
2292 else if (GET_MODE (x) == SFmode)
2293 {
2294 *total = s390_cost->debr;
2295 }
2296 else if (GET_MODE (x) == DFmode)
2297 {
2298 *total = s390_cost->ddbr;
2299 }
2300 else if (GET_MODE (x) == TFmode)
2301 {
2302 *total = s390_cost->dxbr;
2303 }
2304 return false;
2305
2306 case SQRT:
2307 if (GET_MODE (x) == SFmode)
2308 *total = s390_cost->sqebr;
2309 else if (GET_MODE (x) == DFmode)
2310 *total = s390_cost->sqdbr;
2311 else /* TFmode */
2312 *total = s390_cost->sqxbr;
2313 return false;
2314
2315 case SIGN_EXTEND:
2316 case ZERO_EXTEND:
2317 if (outer_code == MULT || outer_code == DIV || outer_code == MOD
2318 || outer_code == PLUS || outer_code == MINUS
2319 || outer_code == COMPARE)
2320 *total = 0;
2321 return false;
2322
2323 case COMPARE:
2324 *total = COSTS_N_INSNS (1);
2325 if (GET_CODE (XEXP (x, 0)) == AND
2326 && GET_CODE (XEXP (x, 1)) == CONST_INT
2327 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2328 {
2329 rtx op0 = XEXP (XEXP (x, 0), 0);
2330 rtx op1 = XEXP (XEXP (x, 0), 1);
2331 rtx op2 = XEXP (x, 1);
2332
2333 if (memory_operand (op0, GET_MODE (op0))
2334 && s390_tm_ccmode (op1, op2, 0) != VOIDmode)
2335 return true;
2336 if (register_operand (op0, GET_MODE (op0))
2337 && s390_tm_ccmode (op1, op2, 1) != VOIDmode)
2338 return true;
2339 }
2340 return false;
2341
2342 default:
2343 return false;
2344 }
2345 }
2346
2347 /* Return the cost of an address rtx ADDR. */
2348
2349 static int
2350 s390_address_cost (rtx addr)
2351 {
2352 struct s390_address ad;
2353 if (!s390_decompose_address (addr, &ad))
2354 return 1000;
2355
2356 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
2357 }
2358
2359 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
2360 otherwise return 0. */
2361
2362 int
2363 tls_symbolic_operand (rtx op)
2364 {
2365 if (GET_CODE (op) != SYMBOL_REF)
2366 return 0;
2367 return SYMBOL_REF_TLS_MODEL (op);
2368 }
2369 \f
2370 /* Split DImode access register reference REG (on 64-bit) into its constituent
2371 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2372 gen_highpart cannot be used as they assume all registers are word-sized,
2373 while our access registers have only half that size. */
2374
2375 void
2376 s390_split_access_reg (rtx reg, rtx *lo, rtx *hi)
2377 {
2378 gcc_assert (TARGET_64BIT);
2379 gcc_assert (ACCESS_REG_P (reg));
2380 gcc_assert (GET_MODE (reg) == DImode);
2381 gcc_assert (!(REGNO (reg) & 1));
2382
2383 *lo = gen_rtx_REG (SImode, REGNO (reg) + 1);
2384 *hi = gen_rtx_REG (SImode, REGNO (reg));
2385 }
2386
2387 /* Return true if OP contains a symbol reference */
2388
2389 bool
2390 symbolic_reference_mentioned_p (rtx op)
2391 {
2392 const char *fmt;
2393 int i;
2394
2395 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2396 return 1;
2397
2398 fmt = GET_RTX_FORMAT (GET_CODE (op));
2399 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2400 {
2401 if (fmt[i] == 'E')
2402 {
2403 int j;
2404
2405 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2406 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2407 return 1;
2408 }
2409
2410 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2411 return 1;
2412 }
2413
2414 return 0;
2415 }
2416
2417 /* Return true if OP contains a reference to a thread-local symbol. */
2418
2419 bool
2420 tls_symbolic_reference_mentioned_p (rtx op)
2421 {
2422 const char *fmt;
2423 int i;
2424
2425 if (GET_CODE (op) == SYMBOL_REF)
2426 return tls_symbolic_operand (op);
2427
2428 fmt = GET_RTX_FORMAT (GET_CODE (op));
2429 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2430 {
2431 if (fmt[i] == 'E')
2432 {
2433 int j;
2434
2435 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2436 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2437 return true;
2438 }
2439
2440 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
2441 return true;
2442 }
2443
2444 return false;
2445 }
2446
2447
2448 /* Return true if OP is a legitimate general operand when
2449 generating PIC code. It is given that flag_pic is on
2450 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2451
2452 int
2453 legitimate_pic_operand_p (rtx op)
2454 {
2455 /* Accept all non-symbolic constants. */
2456 if (!SYMBOLIC_CONST (op))
2457 return 1;
2458
2459 /* Reject everything else; must be handled
2460 via emit_symbolic_move. */
2461 return 0;
2462 }
2463
2464 /* Returns true if the constant value OP is a legitimate general operand.
2465 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2466
2467 int
2468 legitimate_constant_p (rtx op)
2469 {
2470 /* Accept all non-symbolic constants. */
2471 if (!SYMBOLIC_CONST (op))
2472 return 1;
2473
2474 /* Accept immediate LARL operands. */
2475 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
2476 return 1;
2477
2478 /* Thread-local symbols are never legal constants. This is
2479 so that emit_call knows that computing such addresses
2480 might require a function call. */
2481 if (TLS_SYMBOLIC_CONST (op))
2482 return 0;
2483
2484 /* In the PIC case, symbolic constants must *not* be
2485 forced into the literal pool. We accept them here,
2486 so that they will be handled by emit_symbolic_move. */
2487 if (flag_pic)
2488 return 1;
2489
2490 /* All remaining non-PIC symbolic constants are
2491 forced into the literal pool. */
2492 return 0;
2493 }
2494
2495 /* Determine if it's legal to put X into the constant pool. This
2496 is not possible if X contains the address of a symbol that is
2497 not constant (TLS) or not known at final link time (PIC). */
2498
2499 static bool
2500 s390_cannot_force_const_mem (rtx x)
2501 {
2502 switch (GET_CODE (x))
2503 {
2504 case CONST_INT:
2505 case CONST_DOUBLE:
2506 /* Accept all non-symbolic constants. */
2507 return false;
2508
2509 case LABEL_REF:
2510 /* Labels are OK iff we are non-PIC. */
2511 return flag_pic != 0;
2512
2513 case SYMBOL_REF:
2514 /* 'Naked' TLS symbol references are never OK,
2515 non-TLS symbols are OK iff we are non-PIC. */
2516 if (tls_symbolic_operand (x))
2517 return true;
2518 else
2519 return flag_pic != 0;
2520
2521 case CONST:
2522 return s390_cannot_force_const_mem (XEXP (x, 0));
2523 case PLUS:
2524 case MINUS:
2525 return s390_cannot_force_const_mem (XEXP (x, 0))
2526 || s390_cannot_force_const_mem (XEXP (x, 1));
2527
2528 case UNSPEC:
2529 switch (XINT (x, 1))
2530 {
2531 /* Only lt-relative or GOT-relative UNSPECs are OK. */
2532 case UNSPEC_LTREL_OFFSET:
2533 case UNSPEC_GOT:
2534 case UNSPEC_GOTOFF:
2535 case UNSPEC_PLTOFF:
2536 case UNSPEC_TLSGD:
2537 case UNSPEC_TLSLDM:
2538 case UNSPEC_NTPOFF:
2539 case UNSPEC_DTPOFF:
2540 case UNSPEC_GOTNTPOFF:
2541 case UNSPEC_INDNTPOFF:
2542 return false;
2543
2544 /* If the literal pool shares the code section, be put
2545 execute template placeholders into the pool as well. */
2546 case UNSPEC_INSN:
2547 return TARGET_CPU_ZARCH;
2548
2549 default:
2550 return true;
2551 }
2552 break;
2553
2554 default:
2555 gcc_unreachable ();
2556 }
2557 }
2558
2559 /* Returns true if the constant value OP is a legitimate general
2560 operand during and after reload. The difference to
2561 legitimate_constant_p is that this function will not accept
2562 a constant that would need to be forced to the literal pool
2563 before it can be used as operand. */
2564
2565 bool
2566 legitimate_reload_constant_p (rtx op)
2567 {
2568 /* Accept la(y) operands. */
2569 if (GET_CODE (op) == CONST_INT
2570 && DISP_IN_RANGE (INTVAL (op)))
2571 return true;
2572
2573 /* Accept l(g)hi/l(g)fi operands. */
2574 if (GET_CODE (op) == CONST_INT
2575 && (CONST_OK_FOR_K (INTVAL (op)) || CONST_OK_FOR_Os (INTVAL (op))))
2576 return true;
2577
2578 /* Accept lliXX operands. */
2579 if (TARGET_ZARCH
2580 && GET_CODE (op) == CONST_INT
2581 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2582 && s390_single_part (op, word_mode, HImode, 0) >= 0)
2583 return true;
2584
2585 if (TARGET_EXTIMM
2586 && GET_CODE (op) == CONST_INT
2587 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2588 && s390_single_part (op, word_mode, SImode, 0) >= 0)
2589 return true;
2590
2591 /* Accept larl operands. */
2592 if (TARGET_CPU_ZARCH
2593 && larl_operand (op, VOIDmode))
2594 return true;
2595
2596 /* Accept lzXX operands. */
2597 if (GET_CODE (op) == CONST_DOUBLE
2598 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', "G"))
2599 return true;
2600
2601 /* Accept double-word operands that can be split. */
2602 if (GET_CODE (op) == CONST_INT
2603 && trunc_int_for_mode (INTVAL (op), word_mode) != INTVAL (op))
2604 {
2605 enum machine_mode dword_mode = word_mode == SImode ? DImode : TImode;
2606 rtx hi = operand_subword (op, 0, 0, dword_mode);
2607 rtx lo = operand_subword (op, 1, 0, dword_mode);
2608 return legitimate_reload_constant_p (hi)
2609 && legitimate_reload_constant_p (lo);
2610 }
2611
2612 /* Everything else cannot be handled without reload. */
2613 return false;
2614 }
2615
2616 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
2617 return the class of reg to actually use. */
2618
2619 enum reg_class
2620 s390_preferred_reload_class (rtx op, enum reg_class class)
2621 {
2622 switch (GET_CODE (op))
2623 {
2624 /* Constants we cannot reload must be forced into the
2625 literal pool. */
2626
2627 case CONST_DOUBLE:
2628 case CONST_INT:
2629 if (legitimate_reload_constant_p (op))
2630 return class;
2631 else
2632 return NO_REGS;
2633
2634 /* If a symbolic constant or a PLUS is reloaded,
2635 it is most likely being used as an address, so
2636 prefer ADDR_REGS. If 'class' is not a superset
2637 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
2638 case PLUS:
2639 case LABEL_REF:
2640 case SYMBOL_REF:
2641 case CONST:
2642 if (reg_class_subset_p (ADDR_REGS, class))
2643 return ADDR_REGS;
2644 else
2645 return NO_REGS;
2646
2647 default:
2648 break;
2649 }
2650
2651 return class;
2652 }
2653
2654 /* Inform reload about cases where moving X with a mode MODE to a register in
2655 CLASS requires an extra scratch or immediate register. Return the class
2656 needed for the immediate register. */
2657
2658 static enum reg_class
2659 s390_secondary_reload (bool in_p, rtx x, enum reg_class class,
2660 enum machine_mode mode, secondary_reload_info *sri)
2661 {
2662 /* Intermediate register needed. */
2663 if (reg_classes_intersect_p (CC_REGS, class))
2664 return GENERAL_REGS;
2665
2666 /* We need a scratch register when loading a PLUS expression which
2667 is not a legitimate operand of the LOAD ADDRESS instruction. */
2668 if (in_p && s390_plus_operand (x, mode))
2669 sri->icode = (TARGET_64BIT ?
2670 CODE_FOR_reloaddi_plus : CODE_FOR_reloadsi_plus);
2671
2672 /* Performing a multiword move from or to memory we have to make sure the
2673 second chunk in memory is addressable without causing a displacement
2674 overflow. If that would be the case we calculate the address in
2675 a scratch register. */
2676 if (MEM_P (x)
2677 && GET_CODE (XEXP (x, 0)) == PLUS
2678 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2679 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (x, 0), 1))
2680 + GET_MODE_SIZE (mode) - 1))
2681 {
2682 /* For GENERAL_REGS a displacement overflow is no problem if occurring
2683 in a s_operand address since we may fallback to lm/stm. So we only
2684 have to care about overflows in the b+i+d case. */
2685 if ((reg_classes_intersect_p (GENERAL_REGS, class)
2686 && s390_class_max_nregs (GENERAL_REGS, mode) > 1
2687 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS)
2688 /* For FP_REGS no lm/stm is available so this check is triggered
2689 for displacement overflows in b+i+d and b+d like addresses. */
2690 || (reg_classes_intersect_p (FP_REGS, class)
2691 && s390_class_max_nregs (FP_REGS, mode) > 1))
2692 {
2693 if (in_p)
2694 sri->icode = (TARGET_64BIT ?
2695 CODE_FOR_reloaddi_nonoffmem_in :
2696 CODE_FOR_reloadsi_nonoffmem_in);
2697 else
2698 sri->icode = (TARGET_64BIT ?
2699 CODE_FOR_reloaddi_nonoffmem_out :
2700 CODE_FOR_reloadsi_nonoffmem_out);
2701 }
2702 }
2703
2704 /* Either scratch or no register needed. */
2705 return NO_REGS;
2706 }
2707
2708 /* Generate code to load SRC, which is PLUS that is not a
2709 legitimate operand for the LA instruction, into TARGET.
2710 SCRATCH may be used as scratch register. */
2711
2712 void
2713 s390_expand_plus_operand (rtx target, rtx src,
2714 rtx scratch)
2715 {
2716 rtx sum1, sum2;
2717 struct s390_address ad;
2718
2719 /* src must be a PLUS; get its two operands. */
2720 gcc_assert (GET_CODE (src) == PLUS);
2721 gcc_assert (GET_MODE (src) == Pmode);
2722
2723 /* Check if any of the two operands is already scheduled
2724 for replacement by reload. This can happen e.g. when
2725 float registers occur in an address. */
2726 sum1 = find_replacement (&XEXP (src, 0));
2727 sum2 = find_replacement (&XEXP (src, 1));
2728 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2729
2730 /* If the address is already strictly valid, there's nothing to do. */
2731 if (!s390_decompose_address (src, &ad)
2732 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
2733 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
2734 {
2735 /* Otherwise, one of the operands cannot be an address register;
2736 we reload its value into the scratch register. */
2737 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
2738 {
2739 emit_move_insn (scratch, sum1);
2740 sum1 = scratch;
2741 }
2742 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
2743 {
2744 emit_move_insn (scratch, sum2);
2745 sum2 = scratch;
2746 }
2747
2748 /* According to the way these invalid addresses are generated
2749 in reload.c, it should never happen (at least on s390) that
2750 *neither* of the PLUS components, after find_replacements
2751 was applied, is an address register. */
2752 if (sum1 == scratch && sum2 == scratch)
2753 {
2754 debug_rtx (src);
2755 gcc_unreachable ();
2756 }
2757
2758 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2759 }
2760
2761 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
2762 is only ever performed on addresses, so we can mark the
2763 sum as legitimate for LA in any case. */
2764 s390_load_address (target, src);
2765 }
2766
2767
2768 /* Return true if ADDR is a valid memory address.
2769 STRICT specifies whether strict register checking applies. */
2770
2771 bool
2772 legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2773 rtx addr, int strict)
2774 {
2775 struct s390_address ad;
2776 if (!s390_decompose_address (addr, &ad))
2777 return false;
2778
2779 if (strict)
2780 {
2781 if (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
2782 return false;
2783
2784 if (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx)))
2785 return false;
2786 }
2787 else
2788 {
2789 if (ad.base
2790 && !(REGNO (ad.base) >= FIRST_PSEUDO_REGISTER
2791 || REGNO_REG_CLASS (REGNO (ad.base)) == ADDR_REGS))
2792 return false;
2793
2794 if (ad.indx
2795 && !(REGNO (ad.indx) >= FIRST_PSEUDO_REGISTER
2796 || REGNO_REG_CLASS (REGNO (ad.indx)) == ADDR_REGS))
2797 return false;
2798 }
2799 return true;
2800 }
2801
2802 /* Return true if OP is a valid operand for the LA instruction.
2803 In 31-bit, we need to prove that the result is used as an
2804 address, as LA performs only a 31-bit addition. */
2805
2806 bool
2807 legitimate_la_operand_p (rtx op)
2808 {
2809 struct s390_address addr;
2810 if (!s390_decompose_address (op, &addr))
2811 return false;
2812
2813 return (TARGET_64BIT || addr.pointer);
2814 }
2815
2816 /* Return true if it is valid *and* preferable to use LA to
2817 compute the sum of OP1 and OP2. */
2818
2819 bool
2820 preferred_la_operand_p (rtx op1, rtx op2)
2821 {
2822 struct s390_address addr;
2823
2824 if (op2 != const0_rtx)
2825 op1 = gen_rtx_PLUS (Pmode, op1, op2);
2826
2827 if (!s390_decompose_address (op1, &addr))
2828 return false;
2829 if (addr.base && !REGNO_OK_FOR_BASE_P (REGNO (addr.base)))
2830 return false;
2831 if (addr.indx && !REGNO_OK_FOR_INDEX_P (REGNO (addr.indx)))
2832 return false;
2833
2834 if (!TARGET_64BIT && !addr.pointer)
2835 return false;
2836
2837 if (addr.pointer)
2838 return true;
2839
2840 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2841 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2842 return true;
2843
2844 return false;
2845 }
2846
2847 /* Emit a forced load-address operation to load SRC into DST.
2848 This will use the LOAD ADDRESS instruction even in situations
2849 where legitimate_la_operand_p (SRC) returns false. */
2850
2851 void
2852 s390_load_address (rtx dst, rtx src)
2853 {
2854 if (TARGET_64BIT)
2855 emit_move_insn (dst, src);
2856 else
2857 emit_insn (gen_force_la_31 (dst, src));
2858 }
2859
2860 /* Return a legitimate reference for ORIG (an address) using the
2861 register REG. If REG is 0, a new pseudo is generated.
2862
2863 There are two types of references that must be handled:
2864
2865 1. Global data references must load the address from the GOT, via
2866 the PIC reg. An insn is emitted to do this load, and the reg is
2867 returned.
2868
2869 2. Static data references, constant pool addresses, and code labels
2870 compute the address as an offset from the GOT, whose base is in
2871 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2872 differentiate them from global data objects. The returned
2873 address is the PIC reg + an unspec constant.
2874
2875 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2876 reg also appears in the address. */
2877
2878 rtx
2879 legitimize_pic_address (rtx orig, rtx reg)
2880 {
2881 rtx addr = orig;
2882 rtx new = orig;
2883 rtx base;
2884
2885 gcc_assert (!TLS_SYMBOLIC_CONST (addr));
2886
2887 if (GET_CODE (addr) == LABEL_REF
2888 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
2889 {
2890 /* This is a local symbol. */
2891 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
2892 {
2893 /* Access local symbols PC-relative via LARL.
2894 This is the same as in the non-PIC case, so it is
2895 handled automatically ... */
2896 }
2897 else
2898 {
2899 /* Access local symbols relative to the GOT. */
2900
2901 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2902
2903 if (reload_in_progress || reload_completed)
2904 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
2905
2906 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
2907 addr = gen_rtx_CONST (Pmode, addr);
2908 addr = force_const_mem (Pmode, addr);
2909 emit_move_insn (temp, addr);
2910
2911 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2912 if (reg != 0)
2913 {
2914 s390_load_address (reg, new);
2915 new = reg;
2916 }
2917 }
2918 }
2919 else if (GET_CODE (addr) == SYMBOL_REF)
2920 {
2921 if (reg == 0)
2922 reg = gen_reg_rtx (Pmode);
2923
2924 if (flag_pic == 1)
2925 {
2926 /* Assume GOT offset < 4k. This is handled the same way
2927 in both 31- and 64-bit code (@GOT). */
2928
2929 if (reload_in_progress || reload_completed)
2930 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
2931
2932 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2933 new = gen_rtx_CONST (Pmode, new);
2934 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2935 new = gen_const_mem (Pmode, new);
2936 emit_move_insn (reg, new);
2937 new = reg;
2938 }
2939 else if (TARGET_CPU_ZARCH)
2940 {
2941 /* If the GOT offset might be >= 4k, we determine the position
2942 of the GOT entry via a PC-relative LARL (@GOTENT). */
2943
2944 rtx temp = gen_reg_rtx (Pmode);
2945
2946 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
2947 new = gen_rtx_CONST (Pmode, new);
2948 emit_move_insn (temp, new);
2949
2950 new = gen_const_mem (Pmode, temp);
2951 emit_move_insn (reg, new);
2952 new = reg;
2953 }
2954 else
2955 {
2956 /* If the GOT offset might be >= 4k, we have to load it
2957 from the literal pool (@GOT). */
2958
2959 rtx temp = gen_reg_rtx (Pmode);
2960
2961 if (reload_in_progress || reload_completed)
2962 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
2963
2964 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2965 addr = gen_rtx_CONST (Pmode, addr);
2966 addr = force_const_mem (Pmode, addr);
2967 emit_move_insn (temp, addr);
2968
2969 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2970 new = gen_const_mem (Pmode, new);
2971 emit_move_insn (reg, new);
2972 new = reg;
2973 }
2974 }
2975 else
2976 {
2977 if (GET_CODE (addr) == CONST)
2978 {
2979 addr = XEXP (addr, 0);
2980 if (GET_CODE (addr) == UNSPEC)
2981 {
2982 gcc_assert (XVECLEN (addr, 0) == 1);
2983 switch (XINT (addr, 1))
2984 {
2985 /* If someone moved a GOT-relative UNSPEC
2986 out of the literal pool, force them back in. */
2987 case UNSPEC_GOTOFF:
2988 case UNSPEC_PLTOFF:
2989 new = force_const_mem (Pmode, orig);
2990 break;
2991
2992 /* @GOT is OK as is if small. */
2993 case UNSPEC_GOT:
2994 if (flag_pic == 2)
2995 new = force_const_mem (Pmode, orig);
2996 break;
2997
2998 /* @GOTENT is OK as is. */
2999 case UNSPEC_GOTENT:
3000 break;
3001
3002 /* @PLT is OK as is on 64-bit, must be converted to
3003 GOT-relative @PLTOFF on 31-bit. */
3004 case UNSPEC_PLT:
3005 if (!TARGET_CPU_ZARCH)
3006 {
3007 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3008
3009 if (reload_in_progress || reload_completed)
3010 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3011
3012 addr = XVECEXP (addr, 0, 0);
3013 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
3014 UNSPEC_PLTOFF);
3015 addr = gen_rtx_CONST (Pmode, addr);
3016 addr = force_const_mem (Pmode, addr);
3017 emit_move_insn (temp, addr);
3018
3019 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3020 if (reg != 0)
3021 {
3022 s390_load_address (reg, new);
3023 new = reg;
3024 }
3025 }
3026 break;
3027
3028 /* Everything else cannot happen. */
3029 default:
3030 gcc_unreachable ();
3031 }
3032 }
3033 else
3034 gcc_assert (GET_CODE (addr) == PLUS);
3035 }
3036 if (GET_CODE (addr) == PLUS)
3037 {
3038 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
3039
3040 gcc_assert (!TLS_SYMBOLIC_CONST (op0));
3041 gcc_assert (!TLS_SYMBOLIC_CONST (op1));
3042
3043 /* Check first to see if this is a constant offset
3044 from a local symbol reference. */
3045 if ((GET_CODE (op0) == LABEL_REF
3046 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
3047 && GET_CODE (op1) == CONST_INT)
3048 {
3049 if (TARGET_CPU_ZARCH
3050 && larl_operand (op0, VOIDmode)
3051 && INTVAL (op1) < (HOST_WIDE_INT)1 << 31
3052 && INTVAL (op1) >= -((HOST_WIDE_INT)1 << 31))
3053 {
3054 if (INTVAL (op1) & 1)
3055 {
3056 /* LARL can't handle odd offsets, so emit a
3057 pair of LARL and LA. */
3058 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3059
3060 if (!DISP_IN_RANGE (INTVAL (op1)))
3061 {
3062 HOST_WIDE_INT even = INTVAL (op1) - 1;
3063 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
3064 op0 = gen_rtx_CONST (Pmode, op0);
3065 op1 = const1_rtx;
3066 }
3067
3068 emit_move_insn (temp, op0);
3069 new = gen_rtx_PLUS (Pmode, temp, op1);
3070
3071 if (reg != 0)
3072 {
3073 s390_load_address (reg, new);
3074 new = reg;
3075 }
3076 }
3077 else
3078 {
3079 /* If the offset is even, we can just use LARL.
3080 This will happen automatically. */
3081 }
3082 }
3083 else
3084 {
3085 /* Access local symbols relative to the GOT. */
3086
3087 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3088
3089 if (reload_in_progress || reload_completed)
3090 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3091
3092 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
3093 UNSPEC_GOTOFF);
3094 addr = gen_rtx_PLUS (Pmode, addr, op1);
3095 addr = gen_rtx_CONST (Pmode, addr);
3096 addr = force_const_mem (Pmode, addr);
3097 emit_move_insn (temp, addr);
3098
3099 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3100 if (reg != 0)
3101 {
3102 s390_load_address (reg, new);
3103 new = reg;
3104 }
3105 }
3106 }
3107
3108 /* Now, check whether it is a GOT relative symbol plus offset
3109 that was pulled out of the literal pool. Force it back in. */
3110
3111 else if (GET_CODE (op0) == UNSPEC
3112 && GET_CODE (op1) == CONST_INT
3113 && XINT (op0, 1) == UNSPEC_GOTOFF)
3114 {
3115 gcc_assert (XVECLEN (op0, 0) == 1);
3116
3117 new = force_const_mem (Pmode, orig);
3118 }
3119
3120 /* Otherwise, compute the sum. */
3121 else
3122 {
3123 base = legitimize_pic_address (XEXP (addr, 0), reg);
3124 new = legitimize_pic_address (XEXP (addr, 1),
3125 base == reg ? NULL_RTX : reg);
3126 if (GET_CODE (new) == CONST_INT)
3127 new = plus_constant (base, INTVAL (new));
3128 else
3129 {
3130 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
3131 {
3132 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
3133 new = XEXP (new, 1);
3134 }
3135 new = gen_rtx_PLUS (Pmode, base, new);
3136 }
3137
3138 if (GET_CODE (new) == CONST)
3139 new = XEXP (new, 0);
3140 new = force_operand (new, 0);
3141 }
3142 }
3143 }
3144 return new;
3145 }
3146
3147 /* Load the thread pointer into a register. */
3148
3149 rtx
3150 s390_get_thread_pointer (void)
3151 {
3152 rtx tp = gen_reg_rtx (Pmode);
3153
3154 emit_move_insn (tp, gen_rtx_REG (Pmode, TP_REGNUM));
3155 mark_reg_pointer (tp, BITS_PER_WORD);
3156
3157 return tp;
3158 }
3159
3160 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
3161 in s390_tls_symbol which always refers to __tls_get_offset.
3162 The returned offset is written to RESULT_REG and an USE rtx is
3163 generated for TLS_CALL. */
3164
3165 static GTY(()) rtx s390_tls_symbol;
3166
3167 static void
3168 s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
3169 {
3170 rtx insn;
3171
3172 gcc_assert (flag_pic);
3173
3174 if (!s390_tls_symbol)
3175 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
3176
3177 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
3178 gen_rtx_REG (Pmode, RETURN_REGNUM));
3179
3180 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
3181 CONST_OR_PURE_CALL_P (insn) = 1;
3182 }
3183
3184 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3185 this (thread-local) address. REG may be used as temporary. */
3186
3187 static rtx
3188 legitimize_tls_address (rtx addr, rtx reg)
3189 {
3190 rtx new, tls_call, temp, base, r2, insn;
3191
3192 if (GET_CODE (addr) == SYMBOL_REF)
3193 switch (tls_symbolic_operand (addr))
3194 {
3195 case TLS_MODEL_GLOBAL_DYNAMIC:
3196 start_sequence ();
3197 r2 = gen_rtx_REG (Pmode, 2);
3198 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
3199 new = gen_rtx_CONST (Pmode, tls_call);
3200 new = force_const_mem (Pmode, new);
3201 emit_move_insn (r2, new);
3202 s390_emit_tls_call_insn (r2, tls_call);
3203 insn = get_insns ();
3204 end_sequence ();
3205
3206 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3207 temp = gen_reg_rtx (Pmode);
3208 emit_libcall_block (insn, temp, r2, new);
3209
3210 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3211 if (reg != 0)
3212 {
3213 s390_load_address (reg, new);
3214 new = reg;
3215 }
3216 break;
3217
3218 case TLS_MODEL_LOCAL_DYNAMIC:
3219 start_sequence ();
3220 r2 = gen_rtx_REG (Pmode, 2);
3221 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
3222 new = gen_rtx_CONST (Pmode, tls_call);
3223 new = force_const_mem (Pmode, new);
3224 emit_move_insn (r2, new);
3225 s390_emit_tls_call_insn (r2, tls_call);
3226 insn = get_insns ();
3227 end_sequence ();
3228
3229 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
3230 temp = gen_reg_rtx (Pmode);
3231 emit_libcall_block (insn, temp, r2, new);
3232
3233 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3234 base = gen_reg_rtx (Pmode);
3235 s390_load_address (base, new);
3236
3237 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
3238 new = gen_rtx_CONST (Pmode, new);
3239 new = force_const_mem (Pmode, new);
3240 temp = gen_reg_rtx (Pmode);
3241 emit_move_insn (temp, new);
3242
3243 new = gen_rtx_PLUS (Pmode, base, temp);
3244 if (reg != 0)
3245 {
3246 s390_load_address (reg, new);
3247 new = reg;
3248 }
3249 break;
3250
3251 case TLS_MODEL_INITIAL_EXEC:
3252 if (flag_pic == 1)
3253 {
3254 /* Assume GOT offset < 4k. This is handled the same way
3255 in both 31- and 64-bit code. */
3256
3257 if (reload_in_progress || reload_completed)
3258 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3259
3260 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3261 new = gen_rtx_CONST (Pmode, new);
3262 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
3263 new = gen_const_mem (Pmode, new);
3264 temp = gen_reg_rtx (Pmode);
3265 emit_move_insn (temp, new);
3266 }
3267 else if (TARGET_CPU_ZARCH)
3268 {
3269 /* If the GOT offset might be >= 4k, we determine the position
3270 of the GOT entry via a PC-relative LARL. */
3271
3272 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3273 new = gen_rtx_CONST (Pmode, new);
3274 temp = gen_reg_rtx (Pmode);
3275 emit_move_insn (temp, new);
3276
3277 new = gen_const_mem (Pmode, temp);
3278 temp = gen_reg_rtx (Pmode);
3279 emit_move_insn (temp, new);
3280 }
3281 else if (flag_pic)
3282 {
3283 /* If the GOT offset might be >= 4k, we have to load it
3284 from the literal pool. */
3285
3286 if (reload_in_progress || reload_completed)
3287 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3288
3289 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3290 new = gen_rtx_CONST (Pmode, new);
3291 new = force_const_mem (Pmode, new);
3292 temp = gen_reg_rtx (Pmode);
3293 emit_move_insn (temp, new);
3294
3295 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3296 new = gen_const_mem (Pmode, new);
3297
3298 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3299 temp = gen_reg_rtx (Pmode);
3300 emit_insn (gen_rtx_SET (Pmode, temp, new));
3301 }
3302 else
3303 {
3304 /* In position-dependent code, load the absolute address of
3305 the GOT entry from the literal pool. */
3306
3307 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3308 new = gen_rtx_CONST (Pmode, new);
3309 new = force_const_mem (Pmode, new);
3310 temp = gen_reg_rtx (Pmode);
3311 emit_move_insn (temp, new);
3312
3313 new = temp;
3314 new = gen_const_mem (Pmode, new);
3315 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3316 temp = gen_reg_rtx (Pmode);
3317 emit_insn (gen_rtx_SET (Pmode, temp, new));
3318 }
3319
3320 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3321 if (reg != 0)
3322 {
3323 s390_load_address (reg, new);
3324 new = reg;
3325 }
3326 break;
3327
3328 case TLS_MODEL_LOCAL_EXEC:
3329 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3330 new = gen_rtx_CONST (Pmode, new);
3331 new = force_const_mem (Pmode, new);
3332 temp = gen_reg_rtx (Pmode);
3333 emit_move_insn (temp, new);
3334
3335 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3336 if (reg != 0)
3337 {
3338 s390_load_address (reg, new);
3339 new = reg;
3340 }
3341 break;
3342
3343 default:
3344 gcc_unreachable ();
3345 }
3346
3347 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
3348 {
3349 switch (XINT (XEXP (addr, 0), 1))
3350 {
3351 case UNSPEC_INDNTPOFF:
3352 gcc_assert (TARGET_CPU_ZARCH);
3353 new = addr;
3354 break;
3355
3356 default:
3357 gcc_unreachable ();
3358 }
3359 }
3360
3361 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3362 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3363 {
3364 new = XEXP (XEXP (addr, 0), 0);
3365 if (GET_CODE (new) != SYMBOL_REF)
3366 new = gen_rtx_CONST (Pmode, new);
3367
3368 new = legitimize_tls_address (new, reg);
3369 new = plus_constant (new, INTVAL (XEXP (XEXP (addr, 0), 1)));
3370 new = force_operand (new, 0);
3371 }
3372
3373 else
3374 gcc_unreachable (); /* for now ... */
3375
3376 return new;
3377 }
3378
3379 /* Emit insns to move operands[1] into operands[0]. */
3380
3381 void
3382 emit_symbolic_move (rtx *operands)
3383 {
3384 rtx temp = !can_create_pseudo_p () ? operands[0] : gen_reg_rtx (Pmode);
3385
3386 if (GET_CODE (operands[0]) == MEM)
3387 operands[1] = force_reg (Pmode, operands[1]);
3388 else if (TLS_SYMBOLIC_CONST (operands[1]))
3389 operands[1] = legitimize_tls_address (operands[1], temp);
3390 else if (flag_pic)
3391 operands[1] = legitimize_pic_address (operands[1], temp);
3392 }
3393
3394 /* Try machine-dependent ways of modifying an illegitimate address X
3395 to be legitimate. If we find one, return the new, valid address.
3396
3397 OLDX is the address as it was before break_out_memory_refs was called.
3398 In some cases it is useful to look at this to decide what needs to be done.
3399
3400 MODE is the mode of the operand pointed to by X.
3401
3402 When -fpic is used, special handling is needed for symbolic references.
3403 See comments by legitimize_pic_address for details. */
3404
3405 rtx
3406 legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3407 enum machine_mode mode ATTRIBUTE_UNUSED)
3408 {
3409 rtx constant_term = const0_rtx;
3410
3411 if (TLS_SYMBOLIC_CONST (x))
3412 {
3413 x = legitimize_tls_address (x, 0);
3414
3415 if (legitimate_address_p (mode, x, FALSE))
3416 return x;
3417 }
3418 else if (GET_CODE (x) == PLUS
3419 && (TLS_SYMBOLIC_CONST (XEXP (x, 0))
3420 || TLS_SYMBOLIC_CONST (XEXP (x, 1))))
3421 {
3422 return x;
3423 }
3424 else if (flag_pic)
3425 {
3426 if (SYMBOLIC_CONST (x)
3427 || (GET_CODE (x) == PLUS
3428 && (SYMBOLIC_CONST (XEXP (x, 0))
3429 || SYMBOLIC_CONST (XEXP (x, 1)))))
3430 x = legitimize_pic_address (x, 0);
3431
3432 if (legitimate_address_p (mode, x, FALSE))
3433 return x;
3434 }
3435
3436 x = eliminate_constant_term (x, &constant_term);
3437
3438 /* Optimize loading of large displacements by splitting them
3439 into the multiple of 4K and the rest; this allows the
3440 former to be CSE'd if possible.
3441
3442 Don't do this if the displacement is added to a register
3443 pointing into the stack frame, as the offsets will
3444 change later anyway. */
3445
3446 if (GET_CODE (constant_term) == CONST_INT
3447 && !TARGET_LONG_DISPLACEMENT
3448 && !DISP_IN_RANGE (INTVAL (constant_term))
3449 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
3450 {
3451 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
3452 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
3453
3454 rtx temp = gen_reg_rtx (Pmode);
3455 rtx val = force_operand (GEN_INT (upper), temp);
3456 if (val != temp)
3457 emit_move_insn (temp, val);
3458
3459 x = gen_rtx_PLUS (Pmode, x, temp);
3460 constant_term = GEN_INT (lower);
3461 }
3462
3463 if (GET_CODE (x) == PLUS)
3464 {
3465 if (GET_CODE (XEXP (x, 0)) == REG)
3466 {
3467 rtx temp = gen_reg_rtx (Pmode);
3468 rtx val = force_operand (XEXP (x, 1), temp);
3469 if (val != temp)
3470 emit_move_insn (temp, val);
3471
3472 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
3473 }
3474
3475 else if (GET_CODE (XEXP (x, 1)) == REG)
3476 {
3477 rtx temp = gen_reg_rtx (Pmode);
3478 rtx val = force_operand (XEXP (x, 0), temp);
3479 if (val != temp)
3480 emit_move_insn (temp, val);
3481
3482 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
3483 }
3484 }
3485
3486 if (constant_term != const0_rtx)
3487 x = gen_rtx_PLUS (Pmode, x, constant_term);
3488
3489 return x;
3490 }
3491
3492 /* Try a machine-dependent way of reloading an illegitimate address AD
3493 operand. If we find one, push the reload and and return the new address.
3494
3495 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3496 and TYPE is the reload type of the current reload. */
3497
3498 rtx
3499 legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
3500 int opnum, int type)
3501 {
3502 if (!optimize || TARGET_LONG_DISPLACEMENT)
3503 return NULL_RTX;
3504
3505 if (GET_CODE (ad) == PLUS)
3506 {
3507 rtx tem = simplify_binary_operation (PLUS, Pmode,
3508 XEXP (ad, 0), XEXP (ad, 1));
3509 if (tem)
3510 ad = tem;
3511 }
3512
3513 if (GET_CODE (ad) == PLUS
3514 && GET_CODE (XEXP (ad, 0)) == REG
3515 && GET_CODE (XEXP (ad, 1)) == CONST_INT
3516 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
3517 {
3518 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
3519 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
3520 rtx cst, tem, new;
3521
3522 cst = GEN_INT (upper);
3523 if (!legitimate_reload_constant_p (cst))
3524 cst = force_const_mem (Pmode, cst);
3525
3526 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
3527 new = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
3528
3529 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
3530 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3531 opnum, (enum reload_type) type);
3532 return new;
3533 }
3534
3535 return NULL_RTX;
3536 }
3537
3538 /* Emit code to move LEN bytes from DST to SRC. */
3539
3540 void
3541 s390_expand_movmem (rtx dst, rtx src, rtx len)
3542 {
3543 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3544 {
3545 if (INTVAL (len) > 0)
3546 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
3547 }
3548
3549 else if (TARGET_MVCLE)
3550 {
3551 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
3552 }
3553
3554 else
3555 {
3556 rtx dst_addr, src_addr, count, blocks, temp;
3557 rtx loop_start_label = gen_label_rtx ();
3558 rtx loop_end_label = gen_label_rtx ();
3559 rtx end_label = gen_label_rtx ();
3560 enum machine_mode mode;
3561
3562 mode = GET_MODE (len);
3563 if (mode == VOIDmode)
3564 mode = Pmode;
3565
3566 dst_addr = gen_reg_rtx (Pmode);
3567 src_addr = gen_reg_rtx (Pmode);
3568 count = gen_reg_rtx (mode);
3569 blocks = gen_reg_rtx (mode);
3570
3571 convert_move (count, len, 1);
3572 emit_cmp_and_jump_insns (count, const0_rtx,
3573 EQ, NULL_RTX, mode, 1, end_label);
3574
3575 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3576 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
3577 dst = change_address (dst, VOIDmode, dst_addr);
3578 src = change_address (src, VOIDmode, src_addr);
3579
3580 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3581 if (temp != count)
3582 emit_move_insn (count, temp);
3583
3584 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
3585 if (temp != blocks)
3586 emit_move_insn (blocks, temp);
3587
3588 emit_cmp_and_jump_insns (blocks, const0_rtx,
3589 EQ, NULL_RTX, mode, 1, loop_end_label);
3590
3591 emit_label (loop_start_label);
3592
3593 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
3594 s390_load_address (dst_addr,
3595 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3596 s390_load_address (src_addr,
3597 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
3598
3599 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3600 if (temp != blocks)
3601 emit_move_insn (blocks, temp);
3602
3603 emit_cmp_and_jump_insns (blocks, const0_rtx,
3604 EQ, NULL_RTX, mode, 1, loop_end_label);
3605
3606 emit_jump (loop_start_label);
3607 emit_label (loop_end_label);
3608
3609 emit_insn (gen_movmem_short (dst, src,
3610 convert_to_mode (Pmode, count, 1)));
3611 emit_label (end_label);
3612 }
3613 }
3614
3615 /* Emit code to set LEN bytes at DST to VAL.
3616 Make use of clrmem if VAL is zero. */
3617
3618 void
3619 s390_expand_setmem (rtx dst, rtx len, rtx val)
3620 {
3621 if (GET_CODE (len) == CONST_INT && INTVAL (len) == 0)
3622 return;
3623
3624 gcc_assert (GET_CODE (val) == CONST_INT || GET_MODE (val) == QImode);
3625
3626 if (GET_CODE (len) == CONST_INT && INTVAL (len) > 0 && INTVAL (len) <= 257)
3627 {
3628 if (val == const0_rtx && INTVAL (len) <= 256)
3629 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
3630 else
3631 {
3632 /* Initialize memory by storing the first byte. */
3633 emit_move_insn (adjust_address (dst, QImode, 0), val);
3634
3635 if (INTVAL (len) > 1)
3636 {
3637 /* Initiate 1 byte overlap move.
3638 The first byte of DST is propagated through DSTP1.
3639 Prepare a movmem for: DST+1 = DST (length = LEN - 1).
3640 DST is set to size 1 so the rest of the memory location
3641 does not count as source operand. */
3642 rtx dstp1 = adjust_address (dst, VOIDmode, 1);
3643 set_mem_size (dst, const1_rtx);
3644
3645 emit_insn (gen_movmem_short (dstp1, dst,
3646 GEN_INT (INTVAL (len) - 2)));
3647 }
3648 }
3649 }
3650
3651 else if (TARGET_MVCLE)
3652 {
3653 val = force_not_mem (convert_modes (Pmode, QImode, val, 1));
3654 emit_insn (gen_setmem_long (dst, convert_to_mode (Pmode, len, 1), val));
3655 }
3656
3657 else
3658 {
3659 rtx dst_addr, src_addr, count, blocks, temp, dstp1 = NULL_RTX;
3660 rtx loop_start_label = gen_label_rtx ();
3661 rtx loop_end_label = gen_label_rtx ();
3662 rtx end_label = gen_label_rtx ();
3663 enum machine_mode mode;
3664
3665 mode = GET_MODE (len);
3666 if (mode == VOIDmode)
3667 mode = Pmode;
3668
3669 dst_addr = gen_reg_rtx (Pmode);
3670 src_addr = gen_reg_rtx (Pmode);
3671 count = gen_reg_rtx (mode);
3672 blocks = gen_reg_rtx (mode);
3673
3674 convert_move (count, len, 1);
3675 emit_cmp_and_jump_insns (count, const0_rtx,
3676 EQ, NULL_RTX, mode, 1, end_label);
3677
3678 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3679 dst = change_address (dst, VOIDmode, dst_addr);
3680
3681 if (val == const0_rtx)
3682 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3683 else
3684 {
3685 dstp1 = adjust_address (dst, VOIDmode, 1);
3686 set_mem_size (dst, const1_rtx);
3687
3688 /* Initialize memory by storing the first byte. */
3689 emit_move_insn (adjust_address (dst, QImode, 0), val);
3690
3691 /* If count is 1 we are done. */
3692 emit_cmp_and_jump_insns (count, const1_rtx,
3693 EQ, NULL_RTX, mode, 1, end_label);
3694
3695 temp = expand_binop (mode, add_optab, count, GEN_INT (-2), count, 1, 0);
3696 }
3697 if (temp != count)
3698 emit_move_insn (count, temp);
3699
3700 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
3701 if (temp != blocks)
3702 emit_move_insn (blocks, temp);
3703
3704 emit_cmp_and_jump_insns (blocks, const0_rtx,
3705 EQ, NULL_RTX, mode, 1, loop_end_label);
3706
3707 emit_label (loop_start_label);
3708
3709 if (val == const0_rtx)
3710 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
3711 else
3712 emit_insn (gen_movmem_short (dstp1, dst, GEN_INT (255)));
3713 s390_load_address (dst_addr,
3714 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3715
3716 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3717 if (temp != blocks)
3718 emit_move_insn (blocks, temp);
3719
3720 emit_cmp_and_jump_insns (blocks, const0_rtx,
3721 EQ, NULL_RTX, mode, 1, loop_end_label);
3722
3723 emit_jump (loop_start_label);
3724 emit_label (loop_end_label);
3725
3726 if (val == const0_rtx)
3727 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
3728 else
3729 emit_insn (gen_movmem_short (dstp1, dst, convert_to_mode (Pmode, count, 1)));
3730 emit_label (end_label);
3731 }
3732 }
3733
3734 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3735 and return the result in TARGET. */
3736
3737 void
3738 s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
3739 {
3740 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
3741 rtx tmp;
3742
3743 /* As the result of CMPINT is inverted compared to what we need,
3744 we have to swap the operands. */
3745 tmp = op0; op0 = op1; op1 = tmp;
3746
3747 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3748 {
3749 if (INTVAL (len) > 0)
3750 {
3751 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
3752 emit_insn (gen_cmpint (target, ccreg));
3753 }
3754 else
3755 emit_move_insn (target, const0_rtx);
3756 }
3757 else if (TARGET_MVCLE)
3758 {
3759 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
3760 emit_insn (gen_cmpint (target, ccreg));
3761 }
3762 else
3763 {
3764 rtx addr0, addr1, count, blocks, temp;
3765 rtx loop_start_label = gen_label_rtx ();
3766 rtx loop_end_label = gen_label_rtx ();
3767 rtx end_label = gen_label_rtx ();
3768 enum machine_mode mode;
3769
3770 mode = GET_MODE (len);
3771 if (mode == VOIDmode)
3772 mode = Pmode;
3773
3774 addr0 = gen_reg_rtx (Pmode);
3775 addr1 = gen_reg_rtx (Pmode);
3776 count = gen_reg_rtx (mode);
3777 blocks = gen_reg_rtx (mode);
3778
3779 convert_move (count, len, 1);
3780 emit_cmp_and_jump_insns (count, const0_rtx,
3781 EQ, NULL_RTX, mode, 1, end_label);
3782
3783 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3784 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3785 op0 = change_address (op0, VOIDmode, addr0);
3786 op1 = change_address (op1, VOIDmode, addr1);
3787
3788 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3789 if (temp != count)
3790 emit_move_insn (count, temp);
3791
3792 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0);
3793 if (temp != blocks)
3794 emit_move_insn (blocks, temp);
3795
3796 emit_cmp_and_jump_insns (blocks, const0_rtx,
3797 EQ, NULL_RTX, mode, 1, loop_end_label);
3798
3799 emit_label (loop_start_label);
3800
3801 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
3802 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
3803 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
3804 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3805 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3806 emit_jump_insn (temp);
3807
3808 s390_load_address (addr0,
3809 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
3810 s390_load_address (addr1,
3811 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
3812
3813 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3814 if (temp != blocks)
3815 emit_move_insn (blocks, temp);
3816
3817 emit_cmp_and_jump_insns (blocks, const0_rtx,
3818 EQ, NULL_RTX, mode, 1, loop_end_label);
3819
3820 emit_jump (loop_start_label);
3821 emit_label (loop_end_label);
3822
3823 emit_insn (gen_cmpmem_short (op0, op1,
3824 convert_to_mode (Pmode, count, 1)));
3825 emit_label (end_label);
3826
3827 emit_insn (gen_cmpint (target, ccreg));
3828 }
3829 }
3830
3831
3832 /* Expand conditional increment or decrement using alc/slb instructions.
3833 Should generate code setting DST to either SRC or SRC + INCREMENT,
3834 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
3835 Returns true if successful, false otherwise.
3836
3837 That makes it possible to implement some if-constructs without jumps e.g.:
3838 (borrow = CC0 | CC1 and carry = CC2 | CC3)
3839 unsigned int a, b, c;
3840 if (a < b) c++; -> CCU b > a -> CC2; c += carry;
3841 if (a < b) c--; -> CCL3 a - b -> borrow; c -= borrow;
3842 if (a <= b) c++; -> CCL3 b - a -> borrow; c += carry;
3843 if (a <= b) c--; -> CCU a <= b -> borrow; c -= borrow;
3844
3845 Checks for EQ and NE with a nonzero value need an additional xor e.g.:
3846 if (a == b) c++; -> CCL3 a ^= b; 0 - a -> borrow; c += carry;
3847 if (a == b) c--; -> CCU a ^= b; a <= 0 -> CC0 | CC1; c -= borrow;
3848 if (a != b) c++; -> CCU a ^= b; a > 0 -> CC2; c += carry;
3849 if (a != b) c--; -> CCL3 a ^= b; 0 - a -> borrow; c -= borrow; */
3850
3851 bool
3852 s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
3853 rtx dst, rtx src, rtx increment)
3854 {
3855 enum machine_mode cmp_mode;
3856 enum machine_mode cc_mode;
3857 rtx op_res;
3858 rtx insn;
3859 rtvec p;
3860 int ret;
3861
3862 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
3863 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
3864 cmp_mode = SImode;
3865 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
3866 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
3867 cmp_mode = DImode;
3868 else
3869 return false;
3870
3871 /* Try ADD LOGICAL WITH CARRY. */
3872 if (increment == const1_rtx)
3873 {
3874 /* Determine CC mode to use. */
3875 if (cmp_code == EQ || cmp_code == NE)
3876 {
3877 if (cmp_op1 != const0_rtx)
3878 {
3879 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3880 NULL_RTX, 0, OPTAB_WIDEN);
3881 cmp_op1 = const0_rtx;
3882 }
3883
3884 cmp_code = cmp_code == EQ ? LEU : GTU;
3885 }
3886
3887 if (cmp_code == LTU || cmp_code == LEU)
3888 {
3889 rtx tem = cmp_op0;
3890 cmp_op0 = cmp_op1;
3891 cmp_op1 = tem;
3892 cmp_code = swap_condition (cmp_code);
3893 }
3894
3895 switch (cmp_code)
3896 {
3897 case GTU:
3898 cc_mode = CCUmode;
3899 break;
3900
3901 case GEU:
3902 cc_mode = CCL3mode;
3903 break;
3904
3905 default:
3906 return false;
3907 }
3908
3909 /* Emit comparison instruction pattern. */
3910 if (!register_operand (cmp_op0, cmp_mode))
3911 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3912
3913 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3914 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3915 /* We use insn_invalid_p here to add clobbers if required. */
3916 ret = insn_invalid_p (emit_insn (insn));
3917 gcc_assert (!ret);
3918
3919 /* Emit ALC instruction pattern. */
3920 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3921 gen_rtx_REG (cc_mode, CC_REGNUM),
3922 const0_rtx);
3923
3924 if (src != const0_rtx)
3925 {
3926 if (!register_operand (src, GET_MODE (dst)))
3927 src = force_reg (GET_MODE (dst), src);
3928
3929 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, src);
3930 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, const0_rtx);
3931 }
3932
3933 p = rtvec_alloc (2);
3934 RTVEC_ELT (p, 0) =
3935 gen_rtx_SET (VOIDmode, dst, op_res);
3936 RTVEC_ELT (p, 1) =
3937 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3938 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3939
3940 return true;
3941 }
3942
3943 /* Try SUBTRACT LOGICAL WITH BORROW. */
3944 if (increment == constm1_rtx)
3945 {
3946 /* Determine CC mode to use. */
3947 if (cmp_code == EQ || cmp_code == NE)
3948 {
3949 if (cmp_op1 != const0_rtx)
3950 {
3951 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3952 NULL_RTX, 0, OPTAB_WIDEN);
3953 cmp_op1 = const0_rtx;
3954 }
3955
3956 cmp_code = cmp_code == EQ ? LEU : GTU;
3957 }
3958
3959 if (cmp_code == GTU || cmp_code == GEU)
3960 {
3961 rtx tem = cmp_op0;
3962 cmp_op0 = cmp_op1;
3963 cmp_op1 = tem;
3964 cmp_code = swap_condition (cmp_code);
3965 }
3966
3967 switch (cmp_code)
3968 {
3969 case LEU:
3970 cc_mode = CCUmode;
3971 break;
3972
3973 case LTU:
3974 cc_mode = CCL3mode;
3975 break;
3976
3977 default:
3978 return false;
3979 }
3980
3981 /* Emit comparison instruction pattern. */
3982 if (!register_operand (cmp_op0, cmp_mode))
3983 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3984
3985 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3986 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3987 /* We use insn_invalid_p here to add clobbers if required. */
3988 ret = insn_invalid_p (emit_insn (insn));
3989 gcc_assert (!ret);
3990
3991 /* Emit SLB instruction pattern. */
3992 if (!register_operand (src, GET_MODE (dst)))
3993 src = force_reg (GET_MODE (dst), src);
3994
3995 op_res = gen_rtx_MINUS (GET_MODE (dst),
3996 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
3997 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3998 gen_rtx_REG (cc_mode, CC_REGNUM),
3999 const0_rtx));
4000 p = rtvec_alloc (2);
4001 RTVEC_ELT (p, 0) =
4002 gen_rtx_SET (VOIDmode, dst, op_res);
4003 RTVEC_ELT (p, 1) =
4004 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
4005 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
4006
4007 return true;
4008 }
4009
4010 return false;
4011 }
4012
4013 /* Expand code for the insv template. Return true if successful, false else. */
4014
4015 bool
4016 s390_expand_insv (rtx dest, rtx op1, rtx op2, rtx src)
4017 {
4018 int bitsize = INTVAL (op1);
4019 int bitpos = INTVAL (op2);
4020
4021 /* We need byte alignment. */
4022 if (bitsize % BITS_PER_UNIT)
4023 return false;
4024
4025 if (bitpos == 0
4026 && memory_operand (dest, VOIDmode)
4027 && (register_operand (src, word_mode)
4028 || const_int_operand (src, VOIDmode)))
4029 {
4030 /* Emit standard pattern if possible. */
4031 enum machine_mode mode = smallest_mode_for_size (bitsize, MODE_INT);
4032 if (GET_MODE_BITSIZE (mode) == bitsize)
4033 emit_move_insn (adjust_address (dest, mode, 0), gen_lowpart (mode, src));
4034
4035 /* (set (ze (mem)) (const_int)). */
4036 else if (const_int_operand (src, VOIDmode))
4037 {
4038 int size = bitsize / BITS_PER_UNIT;
4039 rtx src_mem = adjust_address (force_const_mem (word_mode, src), BLKmode,
4040 GET_MODE_SIZE (word_mode) - size);
4041
4042 dest = adjust_address (dest, BLKmode, 0);
4043 set_mem_size (dest, GEN_INT (size));
4044 s390_expand_movmem (dest, src_mem, GEN_INT (size));
4045 }
4046
4047 /* (set (ze (mem)) (reg)). */
4048 else if (register_operand (src, word_mode))
4049 {
4050 if (bitsize <= GET_MODE_BITSIZE (SImode))
4051 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, op1,
4052 const0_rtx), src);
4053 else
4054 {
4055 /* Emit st,stcmh sequence. */
4056 int stcmh_width = bitsize - GET_MODE_BITSIZE (SImode);
4057 int size = stcmh_width / BITS_PER_UNIT;
4058
4059 emit_move_insn (adjust_address (dest, SImode, size),
4060 gen_lowpart (SImode, src));
4061 set_mem_size (dest, GEN_INT (size));
4062 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, GEN_INT
4063 (stcmh_width), const0_rtx),
4064 gen_rtx_LSHIFTRT (word_mode, src, GEN_INT
4065 (GET_MODE_BITSIZE (SImode))));
4066 }
4067 }
4068 else
4069 return false;
4070
4071 return true;
4072 }
4073
4074 /* (set (ze (reg)) (const_int)). */
4075 if (TARGET_ZARCH
4076 && register_operand (dest, word_mode)
4077 && (bitpos % 16) == 0
4078 && (bitsize % 16) == 0
4079 && const_int_operand (src, VOIDmode))
4080 {
4081 HOST_WIDE_INT val = INTVAL (src);
4082 int regpos = bitpos + bitsize;
4083
4084 while (regpos > bitpos)
4085 {
4086 enum machine_mode putmode;
4087 int putsize;
4088
4089 if (TARGET_EXTIMM && (regpos % 32 == 0) && (regpos >= bitpos + 32))
4090 putmode = SImode;
4091 else
4092 putmode = HImode;
4093
4094 putsize = GET_MODE_BITSIZE (putmode);
4095 regpos -= putsize;
4096 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest,
4097 GEN_INT (putsize),
4098 GEN_INT (regpos)),
4099 gen_int_mode (val, putmode));
4100 val >>= putsize;
4101 }
4102 gcc_assert (regpos == bitpos);
4103 return true;
4104 }
4105
4106 return false;
4107 }
4108
4109 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic which returns a
4110 register that holds VAL of mode MODE shifted by COUNT bits. */
4111
4112 static inline rtx
4113 s390_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
4114 {
4115 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
4116 NULL_RTX, 1, OPTAB_DIRECT);
4117 return expand_simple_binop (SImode, ASHIFT, val, count,
4118 NULL_RTX, 1, OPTAB_DIRECT);
4119 }
4120
4121 /* Structure to hold the initial parameters for a compare_and_swap operation
4122 in HImode and QImode. */
4123
4124 struct alignment_context
4125 {
4126 rtx memsi; /* SI aligned memory location. */
4127 rtx shift; /* Bit offset with regard to lsb. */
4128 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
4129 rtx modemaski; /* ~modemask */
4130 bool aligned; /* True if memory is aligned, false else. */
4131 };
4132
4133 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic to initialize
4134 structure AC for transparent simplifying, if the memory alignment is known
4135 to be at least 32bit. MEM is the memory location for the actual operation
4136 and MODE its mode. */
4137
4138 static void
4139 init_alignment_context (struct alignment_context *ac, rtx mem,
4140 enum machine_mode mode)
4141 {
4142 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
4143 ac->aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
4144
4145 if (ac->aligned)
4146 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
4147 else
4148 {
4149 /* Alignment is unknown. */
4150 rtx byteoffset, addr, align;
4151
4152 /* Force the address into a register. */
4153 addr = force_reg (Pmode, XEXP (mem, 0));
4154
4155 /* Align it to SImode. */
4156 align = expand_simple_binop (Pmode, AND, addr,
4157 GEN_INT (-GET_MODE_SIZE (SImode)),
4158 NULL_RTX, 1, OPTAB_DIRECT);
4159 /* Generate MEM. */
4160 ac->memsi = gen_rtx_MEM (SImode, align);
4161 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
4162 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
4163 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
4164
4165 /* Calculate shiftcount. */
4166 byteoffset = expand_simple_binop (Pmode, AND, addr,
4167 GEN_INT (GET_MODE_SIZE (SImode) - 1),
4168 NULL_RTX, 1, OPTAB_DIRECT);
4169 /* As we already have some offset, evaluate the remaining distance. */
4170 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
4171 NULL_RTX, 1, OPTAB_DIRECT);
4172
4173 }
4174 /* Shift is the byte count, but we need the bitcount. */
4175 ac->shift = expand_simple_binop (SImode, MULT, ac->shift, GEN_INT (BITS_PER_UNIT),
4176 NULL_RTX, 1, OPTAB_DIRECT);
4177 /* Calculate masks. */
4178 ac->modemask = expand_simple_binop (SImode, ASHIFT,
4179 GEN_INT (GET_MODE_MASK (mode)), ac->shift,
4180 NULL_RTX, 1, OPTAB_DIRECT);
4181 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
4182 }
4183
4184 /* Expand an atomic compare and swap operation for HImode and QImode. MEM is
4185 the memory location, CMP the old value to compare MEM with and NEW the value
4186 to set if CMP == MEM.
4187 CMP is never in memory for compare_and_swap_cc because
4188 expand_bool_compare_and_swap puts it into a register for later compare. */
4189
4190 void
4191 s390_expand_cs_hqi (enum machine_mode mode, rtx target, rtx mem, rtx cmp, rtx new)
4192 {
4193 struct alignment_context ac;
4194 rtx cmpv, newv, val, resv, cc;
4195 rtx res = gen_reg_rtx (SImode);
4196 rtx csloop = gen_label_rtx ();
4197 rtx csend = gen_label_rtx ();
4198
4199 gcc_assert (register_operand (target, VOIDmode));
4200 gcc_assert (MEM_P (mem));
4201
4202 init_alignment_context (&ac, mem, mode);
4203
4204 /* Shift the values to the correct bit positions. */
4205 if (!(ac.aligned && MEM_P (cmp)))
4206 cmp = s390_expand_mask_and_shift (cmp, mode, ac.shift);
4207 if (!(ac.aligned && MEM_P (new)))
4208 new = s390_expand_mask_and_shift (new, mode, ac.shift);
4209
4210 /* Load full word. Subsequent loads are performed by CS. */
4211 val = expand_simple_binop (SImode, AND, ac.memsi, ac.modemaski,
4212 NULL_RTX, 1, OPTAB_DIRECT);
4213
4214 /* Start CS loop. */
4215 emit_label (csloop);
4216 /* val = "<mem>00..0<mem>"
4217 * cmp = "00..0<cmp>00..0"
4218 * new = "00..0<new>00..0"
4219 */
4220
4221 /* Patch cmp and new with val at correct position. */
4222 if (ac.aligned && MEM_P (cmp))
4223 {
4224 cmpv = force_reg (SImode, val);
4225 store_bit_field (cmpv, GET_MODE_BITSIZE (mode), 0, SImode, cmp);
4226 }
4227 else
4228 cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
4229 NULL_RTX, 1, OPTAB_DIRECT));
4230 if (ac.aligned && MEM_P (new))
4231 {
4232 newv = force_reg (SImode, val);
4233 store_bit_field (newv, GET_MODE_BITSIZE (mode), 0, SImode, new);
4234 }
4235 else
4236 newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new, val,
4237 NULL_RTX, 1, OPTAB_DIRECT));
4238
4239 /* Jump to end if we're done (likely?). */
4240 s390_emit_jump (csend, s390_emit_compare_and_swap (EQ, res, ac.memsi,
4241 cmpv, newv));
4242
4243 /* Check for changes outside mode. */
4244 resv = expand_simple_binop (SImode, AND, res, ac.modemaski,
4245 NULL_RTX, 1, OPTAB_DIRECT);
4246 cc = s390_emit_compare (NE, resv, val);
4247 emit_move_insn (val, resv);
4248 /* Loop internal if so. */
4249 s390_emit_jump (csloop, cc);
4250
4251 emit_label (csend);
4252
4253 /* Return the correct part of the bitfield. */
4254 convert_move (target, expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
4255 NULL_RTX, 1, OPTAB_DIRECT), 1);
4256 }
4257
4258 /* Expand an atomic operation CODE of mode MODE. MEM is the memory location
4259 and VAL the value to play with. If AFTER is true then store the value
4260 MEM holds after the operation, if AFTER is false then store the value MEM
4261 holds before the operation. If TARGET is zero then discard that value, else
4262 store it to TARGET. */
4263
4264 void
4265 s390_expand_atomic (enum machine_mode mode, enum rtx_code code,
4266 rtx target, rtx mem, rtx val, bool after)
4267 {
4268 struct alignment_context ac;
4269 rtx cmp;
4270 rtx new = gen_reg_rtx (SImode);
4271 rtx orig = gen_reg_rtx (SImode);
4272 rtx csloop = gen_label_rtx ();
4273
4274 gcc_assert (!target || register_operand (target, VOIDmode));
4275 gcc_assert (MEM_P (mem));
4276
4277 init_alignment_context (&ac, mem, mode);
4278
4279 /* Shift val to the correct bit positions.
4280 Preserve "icm", but prevent "ex icm". */
4281 if (!(ac.aligned && code == SET && MEM_P (val)))
4282 val = s390_expand_mask_and_shift (val, mode, ac.shift);
4283
4284 /* Further preparation insns. */
4285 if (code == PLUS || code == MINUS)
4286 emit_move_insn (orig, val);
4287 else if (code == MULT || code == AND) /* val = "11..1<val>11..1" */
4288 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
4289 NULL_RTX, 1, OPTAB_DIRECT);
4290
4291 /* Load full word. Subsequent loads are performed by CS. */
4292 cmp = force_reg (SImode, ac.memsi);
4293
4294 /* Start CS loop. */
4295 emit_label (csloop);
4296 emit_move_insn (new, cmp);
4297
4298 /* Patch new with val at correct position. */
4299 switch (code)
4300 {
4301 case PLUS:
4302 case MINUS:
4303 val = expand_simple_binop (SImode, code, new, orig,
4304 NULL_RTX, 1, OPTAB_DIRECT);
4305 val = expand_simple_binop (SImode, AND, val, ac.modemask,
4306 NULL_RTX, 1, OPTAB_DIRECT);
4307 /* FALLTHRU */
4308 case SET:
4309 if (ac.aligned && MEM_P (val))
4310 store_bit_field (new, GET_MODE_BITSIZE (mode), 0, SImode, val);
4311 else
4312 {
4313 new = expand_simple_binop (SImode, AND, new, ac.modemaski,
4314 NULL_RTX, 1, OPTAB_DIRECT);
4315 new = expand_simple_binop (SImode, IOR, new, val,
4316 NULL_RTX, 1, OPTAB_DIRECT);
4317 }
4318 break;
4319 case AND:
4320 case IOR:
4321 case XOR:
4322 new = expand_simple_binop (SImode, code, new, val,
4323 NULL_RTX, 1, OPTAB_DIRECT);
4324 break;
4325 case MULT: /* NAND */
4326 new = expand_simple_binop (SImode, XOR, new, ac.modemask,
4327 NULL_RTX, 1, OPTAB_DIRECT);
4328 new = expand_simple_binop (SImode, AND, new, val,
4329 NULL_RTX, 1, OPTAB_DIRECT);
4330 break;
4331 default:
4332 gcc_unreachable ();
4333 }
4334
4335 s390_emit_jump (csloop, s390_emit_compare_and_swap (NE, cmp,
4336 ac.memsi, cmp, new));
4337
4338 /* Return the correct part of the bitfield. */
4339 if (target)
4340 convert_move (target, expand_simple_binop (SImode, LSHIFTRT,
4341 after ? new : cmp, ac.shift,
4342 NULL_RTX, 1, OPTAB_DIRECT), 1);
4343 }
4344
4345 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
4346 We need to emit DTP-relative relocations. */
4347
4348 static void s390_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
4349
4350 static void
4351 s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
4352 {
4353 switch (size)
4354 {
4355 case 4:
4356 fputs ("\t.long\t", file);
4357 break;
4358 case 8:
4359 fputs ("\t.quad\t", file);
4360 break;
4361 default:
4362 gcc_unreachable ();
4363 }
4364 output_addr_const (file, x);
4365 fputs ("@DTPOFF", file);
4366 }
4367
4368 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
4369 /* Implement TARGET_MANGLE_TYPE. */
4370
4371 static const char *
4372 s390_mangle_type (const_tree type)
4373 {
4374 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
4375 && TARGET_LONG_DOUBLE_128)
4376 return "g";
4377
4378 /* For all other types, use normal C++ mangling. */
4379 return NULL;
4380 }
4381 #endif
4382
4383 /* In the name of slightly smaller debug output, and to cater to
4384 general assembler lossage, recognize various UNSPEC sequences
4385 and turn them back into a direct symbol reference. */
4386
4387 static rtx
4388 s390_delegitimize_address (rtx orig_x)
4389 {
4390 rtx x = orig_x, y;
4391
4392 if (GET_CODE (x) != MEM)
4393 return orig_x;
4394
4395 x = XEXP (x, 0);
4396 if (GET_CODE (x) == PLUS
4397 && GET_CODE (XEXP (x, 1)) == CONST
4398 && GET_CODE (XEXP (x, 0)) == REG
4399 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
4400 {
4401 y = XEXP (XEXP (x, 1), 0);
4402 if (GET_CODE (y) == UNSPEC
4403 && XINT (y, 1) == UNSPEC_GOT)
4404 return XVECEXP (y, 0, 0);
4405 return orig_x;
4406 }
4407
4408 if (GET_CODE (x) == CONST)
4409 {
4410 y = XEXP (x, 0);
4411 if (GET_CODE (y) == UNSPEC
4412 && XINT (y, 1) == UNSPEC_GOTENT)
4413 return XVECEXP (y, 0, 0);
4414 return orig_x;
4415 }
4416
4417 return orig_x;
4418 }
4419
4420 /* Output operand OP to stdio stream FILE.
4421 OP is an address (register + offset) which is not used to address data;
4422 instead the rightmost bits are interpreted as the value. */
4423
4424 static void
4425 print_shift_count_operand (FILE *file, rtx op)
4426 {
4427 HOST_WIDE_INT offset;
4428 rtx base;
4429
4430 /* Extract base register and offset. */
4431 if (!s390_decompose_shift_count (op, &base, &offset))
4432 gcc_unreachable ();
4433
4434 /* Sanity check. */
4435 if (base)
4436 {
4437 gcc_assert (GET_CODE (base) == REG);
4438 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
4439 gcc_assert (REGNO_REG_CLASS (REGNO (base)) == ADDR_REGS);
4440 }
4441
4442 /* Offsets are constricted to twelve bits. */
4443 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & ((1 << 12) - 1));
4444 if (base)
4445 fprintf (file, "(%s)", reg_names[REGNO (base)]);
4446 }
4447
4448 /* See 'get_some_local_dynamic_name'. */
4449
4450 static int
4451 get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
4452 {
4453 rtx x = *px;
4454
4455 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
4456 {
4457 x = get_pool_constant (x);
4458 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
4459 }
4460
4461 if (GET_CODE (x) == SYMBOL_REF
4462 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
4463 {
4464 cfun->machine->some_ld_name = XSTR (x, 0);
4465 return 1;
4466 }
4467
4468 return 0;
4469 }
4470
4471 /* Locate some local-dynamic symbol still in use by this function
4472 so that we can print its name in local-dynamic base patterns. */
4473
4474 static const char *
4475 get_some_local_dynamic_name (void)
4476 {
4477 rtx insn;
4478
4479 if (cfun->machine->some_ld_name)
4480 return cfun->machine->some_ld_name;
4481
4482 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
4483 if (INSN_P (insn)
4484 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
4485 return cfun->machine->some_ld_name;
4486
4487 gcc_unreachable ();
4488 }
4489
4490 /* Output machine-dependent UNSPECs occurring in address constant X
4491 in assembler syntax to stdio stream FILE. Returns true if the
4492 constant X could be recognized, false otherwise. */
4493
4494 bool
4495 s390_output_addr_const_extra (FILE *file, rtx x)
4496 {
4497 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
4498 switch (XINT (x, 1))
4499 {
4500 case UNSPEC_GOTENT:
4501 output_addr_const (file, XVECEXP (x, 0, 0));
4502 fprintf (file, "@GOTENT");
4503 return true;
4504 case UNSPEC_GOT:
4505 output_addr_const (file, XVECEXP (x, 0, 0));
4506 fprintf (file, "@GOT");
4507 return true;
4508 case UNSPEC_GOTOFF:
4509 output_addr_const (file, XVECEXP (x, 0, 0));
4510 fprintf (file, "@GOTOFF");
4511 return true;
4512 case UNSPEC_PLT:
4513 output_addr_const (file, XVECEXP (x, 0, 0));
4514 fprintf (file, "@PLT");
4515 return true;
4516 case UNSPEC_PLTOFF:
4517 output_addr_const (file, XVECEXP (x, 0, 0));
4518 fprintf (file, "@PLTOFF");
4519 return true;
4520 case UNSPEC_TLSGD:
4521 output_addr_const (file, XVECEXP (x, 0, 0));
4522 fprintf (file, "@TLSGD");
4523 return true;
4524 case UNSPEC_TLSLDM:
4525 assemble_name (file, get_some_local_dynamic_name ());
4526 fprintf (file, "@TLSLDM");
4527 return true;
4528 case UNSPEC_DTPOFF:
4529 output_addr_const (file, XVECEXP (x, 0, 0));
4530 fprintf (file, "@DTPOFF");
4531 return true;
4532 case UNSPEC_NTPOFF:
4533 output_addr_const (file, XVECEXP (x, 0, 0));
4534 fprintf (file, "@NTPOFF");
4535 return true;
4536 case UNSPEC_GOTNTPOFF:
4537 output_addr_const (file, XVECEXP (x, 0, 0));
4538 fprintf (file, "@GOTNTPOFF");
4539 return true;
4540 case UNSPEC_INDNTPOFF:
4541 output_addr_const (file, XVECEXP (x, 0, 0));
4542 fprintf (file, "@INDNTPOFF");
4543 return true;
4544 }
4545
4546 return false;
4547 }
4548
4549 /* Output address operand ADDR in assembler syntax to
4550 stdio stream FILE. */
4551
4552 void
4553 print_operand_address (FILE *file, rtx addr)
4554 {
4555 struct s390_address ad;
4556
4557 if (!s390_decompose_address (addr, &ad)
4558 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
4559 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
4560 output_operand_lossage ("cannot decompose address");
4561
4562 if (ad.disp)
4563 output_addr_const (file, ad.disp);
4564 else
4565 fprintf (file, "0");
4566
4567 if (ad.base && ad.indx)
4568 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
4569 reg_names[REGNO (ad.base)]);
4570 else if (ad.base)
4571 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4572 }
4573
4574 /* Output operand X in assembler syntax to stdio stream FILE.
4575 CODE specified the format flag. The following format flags
4576 are recognized:
4577
4578 'C': print opcode suffix for branch condition.
4579 'D': print opcode suffix for inverse branch condition.
4580 'J': print tls_load/tls_gdcall/tls_ldcall suffix
4581 'G': print the size of the operand in bytes.
4582 'O': print only the displacement of a memory reference.
4583 'R': print only the base register of a memory reference.
4584 'S': print S-type memory reference (base+displacement).
4585 'N': print the second word of a DImode operand.
4586 'M': print the second word of a TImode operand.
4587 'Y': print shift count operand.
4588
4589 'b': print integer X as if it's an unsigned byte.
4590 'x': print integer X as if it's an unsigned halfword.
4591 'h': print integer X as if it's a signed halfword.
4592 'i': print the first nonzero HImode part of X.
4593 'j': print the first HImode part unequal to -1 of X.
4594 'k': print the first nonzero SImode part of X.
4595 'm': print the first SImode part unequal to -1 of X.
4596 'o': print integer X as if it's an unsigned 32bit word. */
4597
4598 void
4599 print_operand (FILE *file, rtx x, int code)
4600 {
4601 switch (code)
4602 {
4603 case 'C':
4604 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
4605 return;
4606
4607 case 'D':
4608 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
4609 return;
4610
4611 case 'J':
4612 if (GET_CODE (x) == SYMBOL_REF)
4613 {
4614 fprintf (file, "%s", ":tls_load:");
4615 output_addr_const (file, x);
4616 }
4617 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
4618 {
4619 fprintf (file, "%s", ":tls_gdcall:");
4620 output_addr_const (file, XVECEXP (x, 0, 0));
4621 }
4622 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
4623 {
4624 fprintf (file, "%s", ":tls_ldcall:");
4625 assemble_name (file, get_some_local_dynamic_name ());
4626 }
4627 else
4628 gcc_unreachable ();
4629 return;
4630
4631 case 'G':
4632 fprintf (file, "%u", GET_MODE_SIZE (GET_MODE (x)));
4633 return;
4634
4635 case 'O':
4636 {
4637 struct s390_address ad;
4638 int ret;
4639
4640 gcc_assert (GET_CODE (x) == MEM);
4641 ret = s390_decompose_address (XEXP (x, 0), &ad);
4642 gcc_assert (ret);
4643 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
4644 gcc_assert (!ad.indx);
4645
4646 if (ad.disp)
4647 output_addr_const (file, ad.disp);
4648 else
4649 fprintf (file, "0");
4650 }
4651 return;
4652
4653 case 'R':
4654 {
4655 struct s390_address ad;
4656 int ret;
4657
4658 gcc_assert (GET_CODE (x) == MEM);
4659 ret = s390_decompose_address (XEXP (x, 0), &ad);
4660 gcc_assert (ret);
4661 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
4662 gcc_assert (!ad.indx);
4663
4664 if (ad.base)
4665 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
4666 else
4667 fprintf (file, "0");
4668 }
4669 return;
4670
4671 case 'S':
4672 {
4673 struct s390_address ad;
4674 int ret;
4675
4676 gcc_assert (GET_CODE (x) == MEM);
4677 ret = s390_decompose_address (XEXP (x, 0), &ad);
4678 gcc_assert (ret);
4679 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
4680 gcc_assert (!ad.indx);
4681
4682 if (ad.disp)
4683 output_addr_const (file, ad.disp);
4684 else
4685 fprintf (file, "0");
4686
4687 if (ad.base)
4688 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4689 }
4690 return;
4691
4692 case 'N':
4693 if (GET_CODE (x) == REG)
4694 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4695 else if (GET_CODE (x) == MEM)
4696 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
4697 else
4698 gcc_unreachable ();
4699 break;
4700
4701 case 'M':
4702 if (GET_CODE (x) == REG)
4703 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4704 else if (GET_CODE (x) == MEM)
4705 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
4706 else
4707 gcc_unreachable ();
4708 break;
4709
4710 case 'Y':
4711 print_shift_count_operand (file, x);
4712 return;
4713 }
4714
4715 switch (GET_CODE (x))
4716 {
4717 case REG:
4718 fprintf (file, "%s", reg_names[REGNO (x)]);
4719 break;
4720
4721 case MEM:
4722 output_address (XEXP (x, 0));
4723 break;
4724
4725 case CONST:
4726 case CODE_LABEL:
4727 case LABEL_REF:
4728 case SYMBOL_REF:
4729 output_addr_const (file, x);
4730 break;
4731
4732 case CONST_INT:
4733 if (code == 'b')
4734 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
4735 else if (code == 'x')
4736 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
4737 else if (code == 'h')
4738 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
4739 else if (code == 'i')
4740 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4741 s390_extract_part (x, HImode, 0));
4742 else if (code == 'j')
4743 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4744 s390_extract_part (x, HImode, -1));
4745 else if (code == 'k')
4746 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4747 s390_extract_part (x, SImode, 0));
4748 else if (code == 'm')
4749 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4750 s390_extract_part (x, SImode, -1));
4751 else if (code == 'o')
4752 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffffffff);
4753 else
4754 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
4755 break;
4756
4757 case CONST_DOUBLE:
4758 gcc_assert (GET_MODE (x) == VOIDmode);
4759 if (code == 'b')
4760 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
4761 else if (code == 'x')
4762 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
4763 else if (code == 'h')
4764 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
4765 else
4766 gcc_unreachable ();
4767 break;
4768
4769 default:
4770 fatal_insn ("UNKNOWN in print_operand !?", x);
4771 break;
4772 }
4773 }
4774
4775 /* Target hook for assembling integer objects. We need to define it
4776 here to work a round a bug in some versions of GAS, which couldn't
4777 handle values smaller than INT_MIN when printed in decimal. */
4778
4779 static bool
4780 s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
4781 {
4782 if (size == 8 && aligned_p
4783 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
4784 {
4785 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
4786 INTVAL (x));
4787 return true;
4788 }
4789 return default_assemble_integer (x, size, aligned_p);
4790 }
4791
4792 /* Returns true if register REGNO is used for forming
4793 a memory address in expression X. */
4794
4795 static bool
4796 reg_used_in_mem_p (int regno, rtx x)
4797 {
4798 enum rtx_code code = GET_CODE (x);
4799 int i, j;
4800 const char *fmt;
4801
4802 if (code == MEM)
4803 {
4804 if (refers_to_regno_p (regno, regno+1,
4805 XEXP (x, 0), 0))
4806 return true;
4807 }
4808 else if (code == SET
4809 && GET_CODE (SET_DEST (x)) == PC)
4810 {
4811 if (refers_to_regno_p (regno, regno+1,
4812 SET_SRC (x), 0))
4813 return true;
4814 }
4815
4816 fmt = GET_RTX_FORMAT (code);
4817 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4818 {
4819 if (fmt[i] == 'e'
4820 && reg_used_in_mem_p (regno, XEXP (x, i)))
4821 return true;
4822
4823 else if (fmt[i] == 'E')
4824 for (j = 0; j < XVECLEN (x, i); j++)
4825 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
4826 return true;
4827 }
4828 return false;
4829 }
4830
4831 /* Returns true if expression DEP_RTX sets an address register
4832 used by instruction INSN to address memory. */
4833
4834 static bool
4835 addr_generation_dependency_p (rtx dep_rtx, rtx insn)
4836 {
4837 rtx target, pat;
4838
4839 if (GET_CODE (dep_rtx) == INSN)
4840 dep_rtx = PATTERN (dep_rtx);
4841
4842 if (GET_CODE (dep_rtx) == SET)
4843 {
4844 target = SET_DEST (dep_rtx);
4845 if (GET_CODE (target) == STRICT_LOW_PART)
4846 target = XEXP (target, 0);
4847 while (GET_CODE (target) == SUBREG)
4848 target = SUBREG_REG (target);
4849
4850 if (GET_CODE (target) == REG)
4851 {
4852 int regno = REGNO (target);
4853
4854 if (s390_safe_attr_type (insn) == TYPE_LA)
4855 {
4856 pat = PATTERN (insn);
4857 if (GET_CODE (pat) == PARALLEL)
4858 {
4859 gcc_assert (XVECLEN (pat, 0) == 2);
4860 pat = XVECEXP (pat, 0, 0);
4861 }
4862 gcc_assert (GET_CODE (pat) == SET);
4863 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
4864 }
4865 else if (get_attr_atype (insn) == ATYPE_AGEN)
4866 return reg_used_in_mem_p (regno, PATTERN (insn));
4867 }
4868 }
4869 return false;
4870 }
4871
4872 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
4873
4874 int
4875 s390_agen_dep_p (rtx dep_insn, rtx insn)
4876 {
4877 rtx dep_rtx = PATTERN (dep_insn);
4878 int i;
4879
4880 if (GET_CODE (dep_rtx) == SET
4881 && addr_generation_dependency_p (dep_rtx, insn))
4882 return 1;
4883 else if (GET_CODE (dep_rtx) == PARALLEL)
4884 {
4885 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
4886 {
4887 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
4888 return 1;
4889 }
4890 }
4891 return 0;
4892 }
4893
4894 /* A C statement (sans semicolon) to update the integer scheduling priority
4895 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
4896 reduce the priority to execute INSN later. Do not define this macro if
4897 you do not need to adjust the scheduling priorities of insns.
4898
4899 A STD instruction should be scheduled earlier,
4900 in order to use the bypass. */
4901
4902 static int
4903 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
4904 {
4905 if (! INSN_P (insn))
4906 return priority;
4907
4908 if (s390_tune != PROCESSOR_2084_Z990
4909 && s390_tune != PROCESSOR_2094_Z9_109)
4910 return priority;
4911
4912 switch (s390_safe_attr_type (insn))
4913 {
4914 case TYPE_FSTOREDF:
4915 case TYPE_FSTORESF:
4916 priority = priority << 3;
4917 break;
4918 case TYPE_STORE:
4919 case TYPE_STM:
4920 priority = priority << 1;
4921 break;
4922 default:
4923 break;
4924 }
4925 return priority;
4926 }
4927
4928 /* The number of instructions that can be issued per cycle. */
4929
4930 static int
4931 s390_issue_rate (void)
4932 {
4933 if (s390_tune == PROCESSOR_2084_Z990
4934 || s390_tune == PROCESSOR_2094_Z9_109)
4935 return 3;
4936 return 1;
4937 }
4938
4939 static int
4940 s390_first_cycle_multipass_dfa_lookahead (void)
4941 {
4942 return 4;
4943 }
4944
4945
4946 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
4947 Fix up MEMs as required. */
4948
4949 static void
4950 annotate_constant_pool_refs (rtx *x)
4951 {
4952 int i, j;
4953 const char *fmt;
4954
4955 gcc_assert (GET_CODE (*x) != SYMBOL_REF
4956 || !CONSTANT_POOL_ADDRESS_P (*x));
4957
4958 /* Literal pool references can only occur inside a MEM ... */
4959 if (GET_CODE (*x) == MEM)
4960 {
4961 rtx memref = XEXP (*x, 0);
4962
4963 if (GET_CODE (memref) == SYMBOL_REF
4964 && CONSTANT_POOL_ADDRESS_P (memref))
4965 {
4966 rtx base = cfun->machine->base_reg;
4967 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
4968 UNSPEC_LTREF);
4969
4970 *x = replace_equiv_address (*x, addr);
4971 return;
4972 }
4973
4974 if (GET_CODE (memref) == CONST
4975 && GET_CODE (XEXP (memref, 0)) == PLUS
4976 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
4977 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
4978 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
4979 {
4980 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
4981 rtx sym = XEXP (XEXP (memref, 0), 0);
4982 rtx base = cfun->machine->base_reg;
4983 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4984 UNSPEC_LTREF);
4985
4986 *x = replace_equiv_address (*x, plus_constant (addr, off));
4987 return;
4988 }
4989 }
4990
4991 /* ... or a load-address type pattern. */
4992 if (GET_CODE (*x) == SET)
4993 {
4994 rtx addrref = SET_SRC (*x);
4995
4996 if (GET_CODE (addrref) == SYMBOL_REF
4997 && CONSTANT_POOL_ADDRESS_P (addrref))
4998 {
4999 rtx base = cfun->machine->base_reg;
5000 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
5001 UNSPEC_LTREF);
5002
5003 SET_SRC (*x) = addr;
5004 return;
5005 }
5006
5007 if (GET_CODE (addrref) == CONST
5008 && GET_CODE (XEXP (addrref, 0)) == PLUS
5009 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
5010 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
5011 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
5012 {
5013 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
5014 rtx sym = XEXP (XEXP (addrref, 0), 0);
5015 rtx base = cfun->machine->base_reg;
5016 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
5017 UNSPEC_LTREF);
5018
5019 SET_SRC (*x) = plus_constant (addr, off);
5020 return;
5021 }
5022 }
5023
5024 /* Annotate LTREL_BASE as well. */
5025 if (GET_CODE (*x) == UNSPEC
5026 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
5027 {
5028 rtx base = cfun->machine->base_reg;
5029 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
5030 UNSPEC_LTREL_BASE);
5031 return;
5032 }
5033
5034 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5035 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5036 {
5037 if (fmt[i] == 'e')
5038 {
5039 annotate_constant_pool_refs (&XEXP (*x, i));
5040 }
5041 else if (fmt[i] == 'E')
5042 {
5043 for (j = 0; j < XVECLEN (*x, i); j++)
5044 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
5045 }
5046 }
5047 }
5048
5049 /* Split all branches that exceed the maximum distance.
5050 Returns true if this created a new literal pool entry. */
5051
5052 static int
5053 s390_split_branches (void)
5054 {
5055 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5056 int new_literal = 0, ret;
5057 rtx insn, pat, tmp, target;
5058 rtx *label;
5059
5060 /* We need correct insn addresses. */
5061
5062 shorten_branches (get_insns ());
5063
5064 /* Find all branches that exceed 64KB, and split them. */
5065
5066 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5067 {
5068 if (GET_CODE (insn) != JUMP_INSN)
5069 continue;
5070
5071 pat = PATTERN (insn);
5072 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
5073 pat = XVECEXP (pat, 0, 0);
5074 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
5075 continue;
5076
5077 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
5078 {
5079 label = &SET_SRC (pat);
5080 }
5081 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
5082 {
5083 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
5084 label = &XEXP (SET_SRC (pat), 1);
5085 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
5086 label = &XEXP (SET_SRC (pat), 2);
5087 else
5088 continue;
5089 }
5090 else
5091 continue;
5092
5093 if (get_attr_length (insn) <= 4)
5094 continue;
5095
5096 /* We are going to use the return register as scratch register,
5097 make sure it will be saved/restored by the prologue/epilogue. */
5098 cfun_frame_layout.save_return_addr_p = 1;
5099
5100 if (!flag_pic)
5101 {
5102 new_literal = 1;
5103 tmp = force_const_mem (Pmode, *label);
5104 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
5105 INSN_ADDRESSES_NEW (tmp, -1);
5106 annotate_constant_pool_refs (&PATTERN (tmp));
5107
5108 target = temp_reg;
5109 }
5110 else
5111 {
5112 new_literal = 1;
5113 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
5114 UNSPEC_LTREL_OFFSET);
5115 target = gen_rtx_CONST (Pmode, target);
5116 target = force_const_mem (Pmode, target);
5117 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
5118 INSN_ADDRESSES_NEW (tmp, -1);
5119 annotate_constant_pool_refs (&PATTERN (tmp));
5120
5121 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
5122 cfun->machine->base_reg),
5123 UNSPEC_LTREL_BASE);
5124 target = gen_rtx_PLUS (Pmode, temp_reg, target);
5125 }
5126
5127 ret = validate_change (insn, label, target, 0);
5128 gcc_assert (ret);
5129 }
5130
5131 return new_literal;
5132 }
5133
5134
5135 /* Find an annotated literal pool symbol referenced in RTX X,
5136 and store it at REF. Will abort if X contains references to
5137 more than one such pool symbol; multiple references to the same
5138 symbol are allowed, however.
5139
5140 The rtx pointed to by REF must be initialized to NULL_RTX
5141 by the caller before calling this routine. */
5142
5143 static void
5144 find_constant_pool_ref (rtx x, rtx *ref)
5145 {
5146 int i, j;
5147 const char *fmt;
5148
5149 /* Ignore LTREL_BASE references. */
5150 if (GET_CODE (x) == UNSPEC
5151 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5152 return;
5153 /* Likewise POOL_ENTRY insns. */
5154 if (GET_CODE (x) == UNSPEC_VOLATILE
5155 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
5156 return;
5157
5158 gcc_assert (GET_CODE (x) != SYMBOL_REF
5159 || !CONSTANT_POOL_ADDRESS_P (x));
5160
5161 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
5162 {
5163 rtx sym = XVECEXP (x, 0, 0);
5164 gcc_assert (GET_CODE (sym) == SYMBOL_REF
5165 && CONSTANT_POOL_ADDRESS_P (sym));
5166
5167 if (*ref == NULL_RTX)
5168 *ref = sym;
5169 else
5170 gcc_assert (*ref == sym);
5171
5172 return;
5173 }
5174
5175 fmt = GET_RTX_FORMAT (GET_CODE (x));
5176 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5177 {
5178 if (fmt[i] == 'e')
5179 {
5180 find_constant_pool_ref (XEXP (x, i), ref);
5181 }
5182 else if (fmt[i] == 'E')
5183 {
5184 for (j = 0; j < XVECLEN (x, i); j++)
5185 find_constant_pool_ref (XVECEXP (x, i, j), ref);
5186 }
5187 }
5188 }
5189
5190 /* Replace every reference to the annotated literal pool
5191 symbol REF in X by its base plus OFFSET. */
5192
5193 static void
5194 replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
5195 {
5196 int i, j;
5197 const char *fmt;
5198
5199 gcc_assert (*x != ref);
5200
5201 if (GET_CODE (*x) == UNSPEC
5202 && XINT (*x, 1) == UNSPEC_LTREF
5203 && XVECEXP (*x, 0, 0) == ref)
5204 {
5205 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
5206 return;
5207 }
5208
5209 if (GET_CODE (*x) == PLUS
5210 && GET_CODE (XEXP (*x, 1)) == CONST_INT
5211 && GET_CODE (XEXP (*x, 0)) == UNSPEC
5212 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
5213 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
5214 {
5215 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
5216 *x = plus_constant (addr, INTVAL (XEXP (*x, 1)));
5217 return;
5218 }
5219
5220 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5221 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5222 {
5223 if (fmt[i] == 'e')
5224 {
5225 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
5226 }
5227 else if (fmt[i] == 'E')
5228 {
5229 for (j = 0; j < XVECLEN (*x, i); j++)
5230 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
5231 }
5232 }
5233 }
5234
5235 /* Check whether X contains an UNSPEC_LTREL_BASE.
5236 Return its constant pool symbol if found, NULL_RTX otherwise. */
5237
5238 static rtx
5239 find_ltrel_base (rtx x)
5240 {
5241 int i, j;
5242 const char *fmt;
5243
5244 if (GET_CODE (x) == UNSPEC
5245 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5246 return XVECEXP (x, 0, 0);
5247
5248 fmt = GET_RTX_FORMAT (GET_CODE (x));
5249 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5250 {
5251 if (fmt[i] == 'e')
5252 {
5253 rtx fnd = find_ltrel_base (XEXP (x, i));
5254 if (fnd)
5255 return fnd;
5256 }
5257 else if (fmt[i] == 'E')
5258 {
5259 for (j = 0; j < XVECLEN (x, i); j++)
5260 {
5261 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
5262 if (fnd)
5263 return fnd;
5264 }
5265 }
5266 }
5267
5268 return NULL_RTX;
5269 }
5270
5271 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
5272
5273 static void
5274 replace_ltrel_base (rtx *x)
5275 {
5276 int i, j;
5277 const char *fmt;
5278
5279 if (GET_CODE (*x) == UNSPEC
5280 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
5281 {
5282 *x = XVECEXP (*x, 0, 1);
5283 return;
5284 }
5285
5286 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5287 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5288 {
5289 if (fmt[i] == 'e')
5290 {
5291 replace_ltrel_base (&XEXP (*x, i));
5292 }
5293 else if (fmt[i] == 'E')
5294 {
5295 for (j = 0; j < XVECLEN (*x, i); j++)
5296 replace_ltrel_base (&XVECEXP (*x, i, j));
5297 }
5298 }
5299 }
5300
5301
5302 /* We keep a list of constants which we have to add to internal
5303 constant tables in the middle of large functions. */
5304
5305 #define NR_C_MODES 11
5306 enum machine_mode constant_modes[NR_C_MODES] =
5307 {
5308 TFmode, TImode, TDmode,
5309 DFmode, DImode, DDmode,
5310 SFmode, SImode, SDmode,
5311 HImode,
5312 QImode
5313 };
5314
5315 struct constant
5316 {
5317 struct constant *next;
5318 rtx value;
5319 rtx label;
5320 };
5321
5322 struct constant_pool
5323 {
5324 struct constant_pool *next;
5325 rtx first_insn;
5326 rtx pool_insn;
5327 bitmap insns;
5328 rtx emit_pool_after;
5329
5330 struct constant *constants[NR_C_MODES];
5331 struct constant *execute;
5332 rtx label;
5333 int size;
5334 };
5335
5336 /* Allocate new constant_pool structure. */
5337
5338 static struct constant_pool *
5339 s390_alloc_pool (void)
5340 {
5341 struct constant_pool *pool;
5342 int i;
5343
5344 pool = (struct constant_pool *) xmalloc (sizeof *pool);
5345 pool->next = NULL;
5346 for (i = 0; i < NR_C_MODES; i++)
5347 pool->constants[i] = NULL;
5348
5349 pool->execute = NULL;
5350 pool->label = gen_label_rtx ();
5351 pool->first_insn = NULL_RTX;
5352 pool->pool_insn = NULL_RTX;
5353 pool->insns = BITMAP_ALLOC (NULL);
5354 pool->size = 0;
5355 pool->emit_pool_after = NULL_RTX;
5356
5357 return pool;
5358 }
5359
5360 /* Create new constant pool covering instructions starting at INSN
5361 and chain it to the end of POOL_LIST. */
5362
5363 static struct constant_pool *
5364 s390_start_pool (struct constant_pool **pool_list, rtx insn)
5365 {
5366 struct constant_pool *pool, **prev;
5367
5368 pool = s390_alloc_pool ();
5369 pool->first_insn = insn;
5370
5371 for (prev = pool_list; *prev; prev = &(*prev)->next)
5372 ;
5373 *prev = pool;
5374
5375 return pool;
5376 }
5377
5378 /* End range of instructions covered by POOL at INSN and emit
5379 placeholder insn representing the pool. */
5380
5381 static void
5382 s390_end_pool (struct constant_pool *pool, rtx insn)
5383 {
5384 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
5385
5386 if (!insn)
5387 insn = get_last_insn ();
5388
5389 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
5390 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5391 }
5392
5393 /* Add INSN to the list of insns covered by POOL. */
5394
5395 static void
5396 s390_add_pool_insn (struct constant_pool *pool, rtx insn)
5397 {
5398 bitmap_set_bit (pool->insns, INSN_UID (insn));
5399 }
5400
5401 /* Return pool out of POOL_LIST that covers INSN. */
5402
5403 static struct constant_pool *
5404 s390_find_pool (struct constant_pool *pool_list, rtx insn)
5405 {
5406 struct constant_pool *pool;
5407
5408 for (pool = pool_list; pool; pool = pool->next)
5409 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
5410 break;
5411
5412 return pool;
5413 }
5414
5415 /* Add constant VAL of mode MODE to the constant pool POOL. */
5416
5417 static void
5418 s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
5419 {
5420 struct constant *c;
5421 int i;
5422
5423 for (i = 0; i < NR_C_MODES; i++)
5424 if (constant_modes[i] == mode)
5425 break;
5426 gcc_assert (i != NR_C_MODES);
5427
5428 for (c = pool->constants[i]; c != NULL; c = c->next)
5429 if (rtx_equal_p (val, c->value))
5430 break;
5431
5432 if (c == NULL)
5433 {
5434 c = (struct constant *) xmalloc (sizeof *c);
5435 c->value = val;
5436 c->label = gen_label_rtx ();
5437 c->next = pool->constants[i];
5438 pool->constants[i] = c;
5439 pool->size += GET_MODE_SIZE (mode);
5440 }
5441 }
5442
5443 /* Find constant VAL of mode MODE in the constant pool POOL.
5444 Return an RTX describing the distance from the start of
5445 the pool to the location of the new constant. */
5446
5447 static rtx
5448 s390_find_constant (struct constant_pool *pool, rtx val,
5449 enum machine_mode mode)
5450 {
5451 struct constant *c;
5452 rtx offset;
5453 int i;
5454
5455 for (i = 0; i < NR_C_MODES; i++)
5456 if (constant_modes[i] == mode)
5457 break;
5458 gcc_assert (i != NR_C_MODES);
5459
5460 for (c = pool->constants[i]; c != NULL; c = c->next)
5461 if (rtx_equal_p (val, c->value))
5462 break;
5463
5464 gcc_assert (c);
5465
5466 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5467 gen_rtx_LABEL_REF (Pmode, pool->label));
5468 offset = gen_rtx_CONST (Pmode, offset);
5469 return offset;
5470 }
5471
5472 /* Check whether INSN is an execute. Return the label_ref to its
5473 execute target template if so, NULL_RTX otherwise. */
5474
5475 static rtx
5476 s390_execute_label (rtx insn)
5477 {
5478 if (GET_CODE (insn) == INSN
5479 && GET_CODE (PATTERN (insn)) == PARALLEL
5480 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
5481 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
5482 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
5483
5484 return NULL_RTX;
5485 }
5486
5487 /* Add execute target for INSN to the constant pool POOL. */
5488
5489 static void
5490 s390_add_execute (struct constant_pool *pool, rtx insn)
5491 {
5492 struct constant *c;
5493
5494 for (c = pool->execute; c != NULL; c = c->next)
5495 if (INSN_UID (insn) == INSN_UID (c->value))
5496 break;
5497
5498 if (c == NULL)
5499 {
5500 c = (struct constant *) xmalloc (sizeof *c);
5501 c->value = insn;
5502 c->label = gen_label_rtx ();
5503 c->next = pool->execute;
5504 pool->execute = c;
5505 pool->size += 6;
5506 }
5507 }
5508
5509 /* Find execute target for INSN in the constant pool POOL.
5510 Return an RTX describing the distance from the start of
5511 the pool to the location of the execute target. */
5512
5513 static rtx
5514 s390_find_execute (struct constant_pool *pool, rtx insn)
5515 {
5516 struct constant *c;
5517 rtx offset;
5518
5519 for (c = pool->execute; c != NULL; c = c->next)
5520 if (INSN_UID (insn) == INSN_UID (c->value))
5521 break;
5522
5523 gcc_assert (c);
5524
5525 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5526 gen_rtx_LABEL_REF (Pmode, pool->label));
5527 offset = gen_rtx_CONST (Pmode, offset);
5528 return offset;
5529 }
5530
5531 /* For an execute INSN, extract the execute target template. */
5532
5533 static rtx
5534 s390_execute_target (rtx insn)
5535 {
5536 rtx pattern = PATTERN (insn);
5537 gcc_assert (s390_execute_label (insn));
5538
5539 if (XVECLEN (pattern, 0) == 2)
5540 {
5541 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
5542 }
5543 else
5544 {
5545 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
5546 int i;
5547
5548 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
5549 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
5550
5551 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
5552 }
5553
5554 return pattern;
5555 }
5556
5557 /* Indicate that INSN cannot be duplicated. This is the case for
5558 execute insns that carry a unique label. */
5559
5560 static bool
5561 s390_cannot_copy_insn_p (rtx insn)
5562 {
5563 rtx label = s390_execute_label (insn);
5564 return label && label != const0_rtx;
5565 }
5566
5567 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
5568 do not emit the pool base label. */
5569
5570 static void
5571 s390_dump_pool (struct constant_pool *pool, bool remote_label)
5572 {
5573 struct constant *c;
5574 rtx insn = pool->pool_insn;
5575 int i;
5576
5577 /* Switch to rodata section. */
5578 if (TARGET_CPU_ZARCH)
5579 {
5580 insn = emit_insn_after (gen_pool_section_start (), insn);
5581 INSN_ADDRESSES_NEW (insn, -1);
5582 }
5583
5584 /* Ensure minimum pool alignment. */
5585 if (TARGET_CPU_ZARCH)
5586 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
5587 else
5588 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
5589 INSN_ADDRESSES_NEW (insn, -1);
5590
5591 /* Emit pool base label. */
5592 if (!remote_label)
5593 {
5594 insn = emit_label_after (pool->label, insn);
5595 INSN_ADDRESSES_NEW (insn, -1);
5596 }
5597
5598 /* Dump constants in descending alignment requirement order,
5599 ensuring proper alignment for every constant. */
5600 for (i = 0; i < NR_C_MODES; i++)
5601 for (c = pool->constants[i]; c; c = c->next)
5602 {
5603 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
5604 rtx value = copy_rtx (c->value);
5605 if (GET_CODE (value) == CONST
5606 && GET_CODE (XEXP (value, 0)) == UNSPEC
5607 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
5608 && XVECLEN (XEXP (value, 0), 0) == 1)
5609 {
5610 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
5611 gen_rtx_LABEL_REF (VOIDmode, pool->label));
5612 value = gen_rtx_CONST (VOIDmode, value);
5613 }
5614
5615 insn = emit_label_after (c->label, insn);
5616 INSN_ADDRESSES_NEW (insn, -1);
5617
5618 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
5619 gen_rtvec (1, value),
5620 UNSPECV_POOL_ENTRY);
5621 insn = emit_insn_after (value, insn);
5622 INSN_ADDRESSES_NEW (insn, -1);
5623 }
5624
5625 /* Ensure minimum alignment for instructions. */
5626 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
5627 INSN_ADDRESSES_NEW (insn, -1);
5628
5629 /* Output in-pool execute template insns. */
5630 for (c = pool->execute; c; c = c->next)
5631 {
5632 insn = emit_label_after (c->label, insn);
5633 INSN_ADDRESSES_NEW (insn, -1);
5634
5635 insn = emit_insn_after (s390_execute_target (c->value), insn);
5636 INSN_ADDRESSES_NEW (insn, -1);
5637 }
5638
5639 /* Switch back to previous section. */
5640 if (TARGET_CPU_ZARCH)
5641 {
5642 insn = emit_insn_after (gen_pool_section_end (), insn);
5643 INSN_ADDRESSES_NEW (insn, -1);
5644 }
5645
5646 insn = emit_barrier_after (insn);
5647 INSN_ADDRESSES_NEW (insn, -1);
5648
5649 /* Remove placeholder insn. */
5650 remove_insn (pool->pool_insn);
5651 }
5652
5653 /* Free all memory used by POOL. */
5654
5655 static void
5656 s390_free_pool (struct constant_pool *pool)
5657 {
5658 struct constant *c, *next;
5659 int i;
5660
5661 for (i = 0; i < NR_C_MODES; i++)
5662 for (c = pool->constants[i]; c; c = next)
5663 {
5664 next = c->next;
5665 free (c);
5666 }
5667
5668 for (c = pool->execute; c; c = next)
5669 {
5670 next = c->next;
5671 free (c);
5672 }
5673
5674 BITMAP_FREE (pool->insns);
5675 free (pool);
5676 }
5677
5678
5679 /* Collect main literal pool. Return NULL on overflow. */
5680
5681 static struct constant_pool *
5682 s390_mainpool_start (void)
5683 {
5684 struct constant_pool *pool;
5685 rtx insn;
5686 bool in_pool_section_p = false;
5687
5688 pool = s390_alloc_pool ();
5689
5690 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5691 {
5692 if (GET_CODE (insn) == INSN
5693 && GET_CODE (PATTERN (insn)) == SET
5694 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
5695 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
5696 {
5697 gcc_assert (!pool->pool_insn);
5698 pool->pool_insn = insn;
5699 in_pool_section_p = true;
5700 }
5701
5702 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
5703 {
5704 s390_add_execute (pool, insn);
5705 }
5706 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5707 {
5708 rtx pool_ref = NULL_RTX;
5709 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5710 if (pool_ref)
5711 {
5712 rtx constant = get_pool_constant (pool_ref);
5713 enum machine_mode mode = get_pool_mode (pool_ref);
5714 s390_add_constant (pool, constant, mode);
5715 }
5716 }
5717
5718 /* If hot/cold partitioning is enabled we have to make sure that
5719 the literal pool is emitted in the same section where the
5720 initialization of the literal pool base pointer takes place.
5721 emit_pool_after is only used in the non-overflow case on non
5722 Z cpus where we can emit the literal pool at the end of the
5723 function body within the text section. */
5724 if (NOTE_P (insn)
5725 && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
5726 {
5727 if (in_pool_section_p)
5728 pool->emit_pool_after = PREV_INSN (insn);
5729 in_pool_section_p = !in_pool_section_p;
5730 }
5731 }
5732
5733 gcc_assert (pool->pool_insn || pool->size == 0);
5734
5735 if (pool->size >= 4096)
5736 {
5737 /* We're going to chunkify the pool, so remove the main
5738 pool placeholder insn. */
5739 remove_insn (pool->pool_insn);
5740
5741 s390_free_pool (pool);
5742 pool = NULL;
5743 }
5744
5745 /* If the functions ends with the section where the literal pool
5746 should be emitted set the marker to its end. */
5747 if (pool && in_pool_section_p)
5748 pool->emit_pool_after = get_last_insn ();
5749
5750 return pool;
5751 }
5752
5753 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5754 Modify the current function to output the pool constants as well as
5755 the pool register setup instruction. */
5756
5757 static void
5758 s390_mainpool_finish (struct constant_pool *pool)
5759 {
5760 rtx base_reg = cfun->machine->base_reg;
5761 rtx insn;
5762
5763 /* If the pool is empty, we're done. */
5764 if (pool->size == 0)
5765 {
5766 /* We don't actually need a base register after all. */
5767 cfun->machine->base_reg = NULL_RTX;
5768
5769 if (pool->pool_insn)
5770 remove_insn (pool->pool_insn);
5771 s390_free_pool (pool);
5772 return;
5773 }
5774
5775 /* We need correct insn addresses. */
5776 shorten_branches (get_insns ());
5777
5778 /* On zSeries, we use a LARL to load the pool register. The pool is
5779 located in the .rodata section, so we emit it after the function. */
5780 if (TARGET_CPU_ZARCH)
5781 {
5782 insn = gen_main_base_64 (base_reg, pool->label);
5783 insn = emit_insn_after (insn, pool->pool_insn);
5784 INSN_ADDRESSES_NEW (insn, -1);
5785 remove_insn (pool->pool_insn);
5786
5787 insn = get_last_insn ();
5788 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5789 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5790
5791 s390_dump_pool (pool, 0);
5792 }
5793
5794 /* On S/390, if the total size of the function's code plus literal pool
5795 does not exceed 4096 bytes, we use BASR to set up a function base
5796 pointer, and emit the literal pool at the end of the function. */
5797 else if (INSN_ADDRESSES (INSN_UID (pool->emit_pool_after))
5798 + pool->size + 8 /* alignment slop */ < 4096)
5799 {
5800 insn = gen_main_base_31_small (base_reg, pool->label);
5801 insn = emit_insn_after (insn, pool->pool_insn);
5802 INSN_ADDRESSES_NEW (insn, -1);
5803 remove_insn (pool->pool_insn);
5804
5805 insn = emit_label_after (pool->label, insn);
5806 INSN_ADDRESSES_NEW (insn, -1);
5807
5808 /* emit_pool_after will be set by s390_mainpool_start to the
5809 last insn of the section where the literal pool should be
5810 emitted. */
5811 insn = pool->emit_pool_after;
5812
5813 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5814 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5815
5816 s390_dump_pool (pool, 1);
5817 }
5818
5819 /* Otherwise, we emit an inline literal pool and use BASR to branch
5820 over it, setting up the pool register at the same time. */
5821 else
5822 {
5823 rtx pool_end = gen_label_rtx ();
5824
5825 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
5826 insn = emit_insn_after (insn, pool->pool_insn);
5827 INSN_ADDRESSES_NEW (insn, -1);
5828 remove_insn (pool->pool_insn);
5829
5830 insn = emit_label_after (pool->label, insn);
5831 INSN_ADDRESSES_NEW (insn, -1);
5832
5833 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5834 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5835
5836 insn = emit_label_after (pool_end, pool->pool_insn);
5837 INSN_ADDRESSES_NEW (insn, -1);
5838
5839 s390_dump_pool (pool, 1);
5840 }
5841
5842
5843 /* Replace all literal pool references. */
5844
5845 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5846 {
5847 if (INSN_P (insn))
5848 replace_ltrel_base (&PATTERN (insn));
5849
5850 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5851 {
5852 rtx addr, pool_ref = NULL_RTX;
5853 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5854 if (pool_ref)
5855 {
5856 if (s390_execute_label (insn))
5857 addr = s390_find_execute (pool, insn);
5858 else
5859 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
5860 get_pool_mode (pool_ref));
5861
5862 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5863 INSN_CODE (insn) = -1;
5864 }
5865 }
5866 }
5867
5868
5869 /* Free the pool. */
5870 s390_free_pool (pool);
5871 }
5872
5873 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5874 We have decided we cannot use this pool, so revert all changes
5875 to the current function that were done by s390_mainpool_start. */
5876 static void
5877 s390_mainpool_cancel (struct constant_pool *pool)
5878 {
5879 /* We didn't actually change the instruction stream, so simply
5880 free the pool memory. */
5881 s390_free_pool (pool);
5882 }
5883
5884
5885 /* Chunkify the literal pool. */
5886
5887 #define S390_POOL_CHUNK_MIN 0xc00
5888 #define S390_POOL_CHUNK_MAX 0xe00
5889
5890 static struct constant_pool *
5891 s390_chunkify_start (void)
5892 {
5893 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
5894 int extra_size = 0;
5895 bitmap far_labels;
5896 rtx pending_ltrel = NULL_RTX;
5897 rtx insn;
5898
5899 rtx (*gen_reload_base) (rtx, rtx) =
5900 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
5901
5902
5903 /* We need correct insn addresses. */
5904
5905 shorten_branches (get_insns ());
5906
5907 /* Scan all insns and move literals to pool chunks. */
5908
5909 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5910 {
5911 bool section_switch_p = false;
5912
5913 /* Check for pending LTREL_BASE. */
5914 if (INSN_P (insn))
5915 {
5916 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
5917 if (ltrel_base)
5918 {
5919 gcc_assert (ltrel_base == pending_ltrel);
5920 pending_ltrel = NULL_RTX;
5921 }
5922 }
5923
5924 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
5925 {
5926 if (!curr_pool)
5927 curr_pool = s390_start_pool (&pool_list, insn);
5928
5929 s390_add_execute (curr_pool, insn);
5930 s390_add_pool_insn (curr_pool, insn);
5931 }
5932 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5933 {
5934 rtx pool_ref = NULL_RTX;
5935 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5936 if (pool_ref)
5937 {
5938 rtx constant = get_pool_constant (pool_ref);
5939 enum machine_mode mode = get_pool_mode (pool_ref);
5940
5941 if (!curr_pool)
5942 curr_pool = s390_start_pool (&pool_list, insn);
5943
5944 s390_add_constant (curr_pool, constant, mode);
5945 s390_add_pool_insn (curr_pool, insn);
5946
5947 /* Don't split the pool chunk between a LTREL_OFFSET load
5948 and the corresponding LTREL_BASE. */
5949 if (GET_CODE (constant) == CONST
5950 && GET_CODE (XEXP (constant, 0)) == UNSPEC
5951 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
5952 {
5953 gcc_assert (!pending_ltrel);
5954 pending_ltrel = pool_ref;
5955 }
5956 }
5957 }
5958
5959 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
5960 {
5961 if (curr_pool)
5962 s390_add_pool_insn (curr_pool, insn);
5963 /* An LTREL_BASE must follow within the same basic block. */
5964 gcc_assert (!pending_ltrel);
5965 }
5966
5967 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
5968 section_switch_p = true;
5969
5970 if (!curr_pool
5971 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
5972 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
5973 continue;
5974
5975 if (TARGET_CPU_ZARCH)
5976 {
5977 if (curr_pool->size < S390_POOL_CHUNK_MAX)
5978 continue;
5979
5980 s390_end_pool (curr_pool, NULL_RTX);
5981 curr_pool = NULL;
5982 }
5983 else
5984 {
5985 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
5986 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
5987 + extra_size;
5988
5989 /* We will later have to insert base register reload insns.
5990 Those will have an effect on code size, which we need to
5991 consider here. This calculation makes rather pessimistic
5992 worst-case assumptions. */
5993 if (GET_CODE (insn) == CODE_LABEL)
5994 extra_size += 6;
5995
5996 if (chunk_size < S390_POOL_CHUNK_MIN
5997 && curr_pool->size < S390_POOL_CHUNK_MIN
5998 && !section_switch_p)
5999 continue;
6000
6001 /* Pool chunks can only be inserted after BARRIERs ... */
6002 if (GET_CODE (insn) == BARRIER)
6003 {
6004 s390_end_pool (curr_pool, insn);
6005 curr_pool = NULL;
6006 extra_size = 0;
6007 }
6008
6009 /* ... so if we don't find one in time, create one. */
6010 else if (chunk_size > S390_POOL_CHUNK_MAX
6011 || curr_pool->size > S390_POOL_CHUNK_MAX
6012 || section_switch_p)
6013 {
6014 rtx label, jump, barrier;
6015
6016 if (!section_switch_p)
6017 {
6018 /* We can insert the barrier only after a 'real' insn. */
6019 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
6020 continue;
6021 if (get_attr_length (insn) == 0)
6022 continue;
6023 /* Don't separate LTREL_BASE from the corresponding
6024 LTREL_OFFSET load. */
6025 if (pending_ltrel)
6026 continue;
6027 }
6028 else
6029 {
6030 gcc_assert (!pending_ltrel);
6031
6032 /* The old pool has to end before the section switch
6033 note in order to make it part of the current
6034 section. */
6035 insn = PREV_INSN (insn);
6036 }
6037
6038 label = gen_label_rtx ();
6039 jump = emit_jump_insn_after (gen_jump (label), insn);
6040 barrier = emit_barrier_after (jump);
6041 insn = emit_label_after (label, barrier);
6042 JUMP_LABEL (jump) = label;
6043 LABEL_NUSES (label) = 1;
6044
6045 INSN_ADDRESSES_NEW (jump, -1);
6046 INSN_ADDRESSES_NEW (barrier, -1);
6047 INSN_ADDRESSES_NEW (insn, -1);
6048
6049 s390_end_pool (curr_pool, barrier);
6050 curr_pool = NULL;
6051 extra_size = 0;
6052 }
6053 }
6054 }
6055
6056 if (curr_pool)
6057 s390_end_pool (curr_pool, NULL_RTX);
6058 gcc_assert (!pending_ltrel);
6059
6060 /* Find all labels that are branched into
6061 from an insn belonging to a different chunk. */
6062
6063 far_labels = BITMAP_ALLOC (NULL);
6064
6065 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6066 {
6067 /* Labels marked with LABEL_PRESERVE_P can be target
6068 of non-local jumps, so we have to mark them.
6069 The same holds for named labels.
6070
6071 Don't do that, however, if it is the label before
6072 a jump table. */
6073
6074 if (GET_CODE (insn) == CODE_LABEL
6075 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
6076 {
6077 rtx vec_insn = next_real_insn (insn);
6078 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
6079 PATTERN (vec_insn) : NULL_RTX;
6080 if (!vec_pat
6081 || !(GET_CODE (vec_pat) == ADDR_VEC
6082 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6083 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
6084 }
6085
6086 /* If we have a direct jump (conditional or unconditional)
6087 or a casesi jump, check all potential targets. */
6088 else if (GET_CODE (insn) == JUMP_INSN)
6089 {
6090 rtx pat = PATTERN (insn);
6091 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
6092 pat = XVECEXP (pat, 0, 0);
6093
6094 if (GET_CODE (pat) == SET)
6095 {
6096 rtx label = JUMP_LABEL (insn);
6097 if (label)
6098 {
6099 if (s390_find_pool (pool_list, label)
6100 != s390_find_pool (pool_list, insn))
6101 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
6102 }
6103 }
6104 else if (GET_CODE (pat) == PARALLEL
6105 && XVECLEN (pat, 0) == 2
6106 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
6107 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
6108 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
6109 {
6110 /* Find the jump table used by this casesi jump. */
6111 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
6112 rtx vec_insn = next_real_insn (vec_label);
6113 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
6114 PATTERN (vec_insn) : NULL_RTX;
6115 if (vec_pat
6116 && (GET_CODE (vec_pat) == ADDR_VEC
6117 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6118 {
6119 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
6120
6121 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
6122 {
6123 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
6124
6125 if (s390_find_pool (pool_list, label)
6126 != s390_find_pool (pool_list, insn))
6127 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
6128 }
6129 }
6130 }
6131 }
6132 }
6133
6134 /* Insert base register reload insns before every pool. */
6135
6136 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6137 {
6138 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6139 curr_pool->label);
6140 rtx insn = curr_pool->first_insn;
6141 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
6142 }
6143
6144 /* Insert base register reload insns at every far label. */
6145
6146 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6147 if (GET_CODE (insn) == CODE_LABEL
6148 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
6149 {
6150 struct constant_pool *pool = s390_find_pool (pool_list, insn);
6151 if (pool)
6152 {
6153 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6154 pool->label);
6155 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
6156 }
6157 }
6158
6159
6160 BITMAP_FREE (far_labels);
6161
6162
6163 /* Recompute insn addresses. */
6164
6165 init_insn_lengths ();
6166 shorten_branches (get_insns ());
6167
6168 return pool_list;
6169 }
6170
6171 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
6172 After we have decided to use this list, finish implementing
6173 all changes to the current function as required. */
6174
6175 static void
6176 s390_chunkify_finish (struct constant_pool *pool_list)
6177 {
6178 struct constant_pool *curr_pool = NULL;
6179 rtx insn;
6180
6181
6182 /* Replace all literal pool references. */
6183
6184 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6185 {
6186 if (INSN_P (insn))
6187 replace_ltrel_base (&PATTERN (insn));
6188
6189 curr_pool = s390_find_pool (pool_list, insn);
6190 if (!curr_pool)
6191 continue;
6192
6193 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6194 {
6195 rtx addr, pool_ref = NULL_RTX;
6196 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6197 if (pool_ref)
6198 {
6199 if (s390_execute_label (insn))
6200 addr = s390_find_execute (curr_pool, insn);
6201 else
6202 addr = s390_find_constant (curr_pool,
6203 get_pool_constant (pool_ref),
6204 get_pool_mode (pool_ref));
6205
6206 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
6207 INSN_CODE (insn) = -1;
6208 }
6209 }
6210 }
6211
6212 /* Dump out all literal pools. */
6213
6214 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6215 s390_dump_pool (curr_pool, 0);
6216
6217 /* Free pool list. */
6218
6219 while (pool_list)
6220 {
6221 struct constant_pool *next = pool_list->next;
6222 s390_free_pool (pool_list);
6223 pool_list = next;
6224 }
6225 }
6226
6227 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
6228 We have decided we cannot use this list, so revert all changes
6229 to the current function that were done by s390_chunkify_start. */
6230
6231 static void
6232 s390_chunkify_cancel (struct constant_pool *pool_list)
6233 {
6234 struct constant_pool *curr_pool = NULL;
6235 rtx insn;
6236
6237 /* Remove all pool placeholder insns. */
6238
6239 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6240 {
6241 /* Did we insert an extra barrier? Remove it. */
6242 rtx barrier = PREV_INSN (curr_pool->pool_insn);
6243 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
6244 rtx label = NEXT_INSN (curr_pool->pool_insn);
6245
6246 if (jump && GET_CODE (jump) == JUMP_INSN
6247 && barrier && GET_CODE (barrier) == BARRIER
6248 && label && GET_CODE (label) == CODE_LABEL
6249 && GET_CODE (PATTERN (jump)) == SET
6250 && SET_DEST (PATTERN (jump)) == pc_rtx
6251 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
6252 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
6253 {
6254 remove_insn (jump);
6255 remove_insn (barrier);
6256 remove_insn (label);
6257 }
6258
6259 remove_insn (curr_pool->pool_insn);
6260 }
6261
6262 /* Remove all base register reload insns. */
6263
6264 for (insn = get_insns (); insn; )
6265 {
6266 rtx next_insn = NEXT_INSN (insn);
6267
6268 if (GET_CODE (insn) == INSN
6269 && GET_CODE (PATTERN (insn)) == SET
6270 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
6271 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
6272 remove_insn (insn);
6273
6274 insn = next_insn;
6275 }
6276
6277 /* Free pool list. */
6278
6279 while (pool_list)
6280 {
6281 struct constant_pool *next = pool_list->next;
6282 s390_free_pool (pool_list);
6283 pool_list = next;
6284 }
6285 }
6286
6287
6288 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
6289
6290 void
6291 s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
6292 {
6293 REAL_VALUE_TYPE r;
6294
6295 switch (GET_MODE_CLASS (mode))
6296 {
6297 case MODE_FLOAT:
6298 case MODE_DECIMAL_FLOAT:
6299 gcc_assert (GET_CODE (exp) == CONST_DOUBLE);
6300
6301 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
6302 assemble_real (r, mode, align);
6303 break;
6304
6305 case MODE_INT:
6306 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
6307 break;
6308
6309 default:
6310 gcc_unreachable ();
6311 }
6312 }
6313
6314
6315 /* Return an RTL expression representing the value of the return address
6316 for the frame COUNT steps up from the current frame. FRAME is the
6317 frame pointer of that frame. */
6318
6319 rtx
6320 s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
6321 {
6322 int offset;
6323 rtx addr;
6324
6325 /* Without backchain, we fail for all but the current frame. */
6326
6327 if (!TARGET_BACKCHAIN && count > 0)
6328 return NULL_RTX;
6329
6330 /* For the current frame, we need to make sure the initial
6331 value of RETURN_REGNUM is actually saved. */
6332
6333 if (count == 0)
6334 {
6335 /* On non-z architectures branch splitting could overwrite r14. */
6336 if (TARGET_CPU_ZARCH)
6337 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
6338 else
6339 {
6340 cfun_frame_layout.save_return_addr_p = true;
6341 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
6342 }
6343 }
6344
6345 if (TARGET_PACKED_STACK)
6346 offset = -2 * UNITS_PER_WORD;
6347 else
6348 offset = RETURN_REGNUM * UNITS_PER_WORD;
6349
6350 addr = plus_constant (frame, offset);
6351 addr = memory_address (Pmode, addr);
6352 return gen_rtx_MEM (Pmode, addr);
6353 }
6354
6355 /* Return an RTL expression representing the back chain stored in
6356 the current stack frame. */
6357
6358 rtx
6359 s390_back_chain_rtx (void)
6360 {
6361 rtx chain;
6362
6363 gcc_assert (TARGET_BACKCHAIN);
6364
6365 if (TARGET_PACKED_STACK)
6366 chain = plus_constant (stack_pointer_rtx,
6367 STACK_POINTER_OFFSET - UNITS_PER_WORD);
6368 else
6369 chain = stack_pointer_rtx;
6370
6371 chain = gen_rtx_MEM (Pmode, chain);
6372 return chain;
6373 }
6374
6375 /* Find first call clobbered register unused in a function.
6376 This could be used as base register in a leaf function
6377 or for holding the return address before epilogue. */
6378
6379 static int
6380 find_unused_clobbered_reg (void)
6381 {
6382 int i;
6383 for (i = 0; i < 6; i++)
6384 if (!df_regs_ever_live_p (i))
6385 return i;
6386 return 0;
6387 }
6388
6389
6390 /* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all
6391 clobbered hard regs in SETREG. */
6392
6393 static void
6394 s390_reg_clobbered_rtx (rtx setreg, const_rtx set_insn ATTRIBUTE_UNUSED, void *data)
6395 {
6396 int *regs_ever_clobbered = (int *)data;
6397 unsigned int i, regno;
6398 enum machine_mode mode = GET_MODE (setreg);
6399
6400 if (GET_CODE (setreg) == SUBREG)
6401 {
6402 rtx inner = SUBREG_REG (setreg);
6403 if (!GENERAL_REG_P (inner))
6404 return;
6405 regno = subreg_regno (setreg);
6406 }
6407 else if (GENERAL_REG_P (setreg))
6408 regno = REGNO (setreg);
6409 else
6410 return;
6411
6412 for (i = regno;
6413 i < regno + HARD_REGNO_NREGS (regno, mode);
6414 i++)
6415 regs_ever_clobbered[i] = 1;
6416 }
6417
6418 /* Walks through all basic blocks of the current function looking
6419 for clobbered hard regs using s390_reg_clobbered_rtx. The fields
6420 of the passed integer array REGS_EVER_CLOBBERED are set to one for
6421 each of those regs. */
6422
6423 static void
6424 s390_regs_ever_clobbered (int *regs_ever_clobbered)
6425 {
6426 basic_block cur_bb;
6427 rtx cur_insn;
6428 unsigned int i;
6429
6430 memset (regs_ever_clobbered, 0, 16 * sizeof (int));
6431
6432 /* For non-leaf functions we have to consider all call clobbered regs to be
6433 clobbered. */
6434 if (!current_function_is_leaf)
6435 {
6436 for (i = 0; i < 16; i++)
6437 regs_ever_clobbered[i] = call_really_used_regs[i];
6438 }
6439
6440 /* Make the "magic" eh_return registers live if necessary. For regs_ever_live
6441 this work is done by liveness analysis (mark_regs_live_at_end).
6442 Special care is needed for functions containing landing pads. Landing pads
6443 may use the eh registers, but the code which sets these registers is not
6444 contained in that function. Hence s390_regs_ever_clobbered is not able to
6445 deal with this automatically. */
6446 if (current_function_calls_eh_return || cfun->machine->has_landing_pad_p)
6447 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM ; i++)
6448 if (current_function_calls_eh_return
6449 || (cfun->machine->has_landing_pad_p
6450 && df_regs_ever_live_p (EH_RETURN_DATA_REGNO (i))))
6451 regs_ever_clobbered[EH_RETURN_DATA_REGNO (i)] = 1;
6452
6453 /* For nonlocal gotos all call-saved registers have to be saved.
6454 This flag is also set for the unwinding code in libgcc.
6455 See expand_builtin_unwind_init. For regs_ever_live this is done by
6456 reload. */
6457 if (current_function_has_nonlocal_label)
6458 for (i = 0; i < 16; i++)
6459 if (!call_really_used_regs[i])
6460 regs_ever_clobbered[i] = 1;
6461
6462 FOR_EACH_BB (cur_bb)
6463 {
6464 FOR_BB_INSNS (cur_bb, cur_insn)
6465 {
6466 if (INSN_P (cur_insn))
6467 note_stores (PATTERN (cur_insn),
6468 s390_reg_clobbered_rtx,
6469 regs_ever_clobbered);
6470 }
6471 }
6472 }
6473
6474 /* Determine the frame area which actually has to be accessed
6475 in the function epilogue. The values are stored at the
6476 given pointers AREA_BOTTOM (address of the lowest used stack
6477 address) and AREA_TOP (address of the first item which does
6478 not belong to the stack frame). */
6479
6480 static void
6481 s390_frame_area (int *area_bottom, int *area_top)
6482 {
6483 int b, t;
6484 int i;
6485
6486 b = INT_MAX;
6487 t = INT_MIN;
6488
6489 if (cfun_frame_layout.first_restore_gpr != -1)
6490 {
6491 b = (cfun_frame_layout.gprs_offset
6492 + cfun_frame_layout.first_restore_gpr * UNITS_PER_WORD);
6493 t = b + (cfun_frame_layout.last_restore_gpr
6494 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_WORD;
6495 }
6496
6497 if (TARGET_64BIT && cfun_save_high_fprs_p)
6498 {
6499 b = MIN (b, cfun_frame_layout.f8_offset);
6500 t = MAX (t, (cfun_frame_layout.f8_offset
6501 + cfun_frame_layout.high_fprs * 8));
6502 }
6503
6504 if (!TARGET_64BIT)
6505 for (i = 2; i < 4; i++)
6506 if (cfun_fpr_bit_p (i))
6507 {
6508 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8);
6509 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8);
6510 }
6511
6512 *area_bottom = b;
6513 *area_top = t;
6514 }
6515
6516 /* Fill cfun->machine with info about register usage of current function.
6517 Return in CLOBBERED_REGS which GPRs are currently considered set. */
6518
6519 static void
6520 s390_register_info (int clobbered_regs[])
6521 {
6522 int i, j;
6523
6524 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
6525 cfun_frame_layout.fpr_bitmap = 0;
6526 cfun_frame_layout.high_fprs = 0;
6527 if (TARGET_64BIT)
6528 for (i = 24; i < 32; i++)
6529 if (df_regs_ever_live_p (i) && !global_regs[i])
6530 {
6531 cfun_set_fpr_bit (i - 16);
6532 cfun_frame_layout.high_fprs++;
6533 }
6534
6535 /* Find first and last gpr to be saved. We trust regs_ever_live
6536 data, except that we don't save and restore global registers.
6537
6538 Also, all registers with special meaning to the compiler need
6539 to be handled extra. */
6540
6541 s390_regs_ever_clobbered (clobbered_regs);
6542
6543 for (i = 0; i < 16; i++)
6544 clobbered_regs[i] = clobbered_regs[i] && !global_regs[i] && !fixed_regs[i];
6545
6546 if (frame_pointer_needed)
6547 clobbered_regs[HARD_FRAME_POINTER_REGNUM] = 1;
6548
6549 if (flag_pic)
6550 clobbered_regs[PIC_OFFSET_TABLE_REGNUM]
6551 |= df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM);
6552
6553 clobbered_regs[BASE_REGNUM]
6554 |= (cfun->machine->base_reg
6555 && REGNO (cfun->machine->base_reg) == BASE_REGNUM);
6556
6557 clobbered_regs[RETURN_REGNUM]
6558 |= (!current_function_is_leaf
6559 || TARGET_TPF_PROFILING
6560 || cfun->machine->split_branches_pending_p
6561 || cfun_frame_layout.save_return_addr_p
6562 || current_function_calls_eh_return
6563 || current_function_stdarg);
6564
6565 clobbered_regs[STACK_POINTER_REGNUM]
6566 |= (!current_function_is_leaf
6567 || TARGET_TPF_PROFILING
6568 || cfun_save_high_fprs_p
6569 || get_frame_size () > 0
6570 || current_function_calls_alloca
6571 || current_function_stdarg);
6572
6573 for (i = 6; i < 16; i++)
6574 if (df_regs_ever_live_p (i) || clobbered_regs[i])
6575 break;
6576 for (j = 15; j > i; j--)
6577 if (df_regs_ever_live_p (j) || clobbered_regs[j])
6578 break;
6579
6580 if (i == 16)
6581 {
6582 /* Nothing to save/restore. */
6583 cfun_frame_layout.first_save_gpr_slot = -1;
6584 cfun_frame_layout.last_save_gpr_slot = -1;
6585 cfun_frame_layout.first_save_gpr = -1;
6586 cfun_frame_layout.first_restore_gpr = -1;
6587 cfun_frame_layout.last_save_gpr = -1;
6588 cfun_frame_layout.last_restore_gpr = -1;
6589 }
6590 else
6591 {
6592 /* Save slots for gprs from i to j. */
6593 cfun_frame_layout.first_save_gpr_slot = i;
6594 cfun_frame_layout.last_save_gpr_slot = j;
6595
6596 for (i = cfun_frame_layout.first_save_gpr_slot;
6597 i < cfun_frame_layout.last_save_gpr_slot + 1;
6598 i++)
6599 if (clobbered_regs[i])
6600 break;
6601
6602 for (j = cfun_frame_layout.last_save_gpr_slot; j > i; j--)
6603 if (clobbered_regs[j])
6604 break;
6605
6606 if (i == cfun_frame_layout.last_save_gpr_slot + 1)
6607 {
6608 /* Nothing to save/restore. */
6609 cfun_frame_layout.first_save_gpr = -1;
6610 cfun_frame_layout.first_restore_gpr = -1;
6611 cfun_frame_layout.last_save_gpr = -1;
6612 cfun_frame_layout.last_restore_gpr = -1;
6613 }
6614 else
6615 {
6616 /* Save / Restore from gpr i to j. */
6617 cfun_frame_layout.first_save_gpr = i;
6618 cfun_frame_layout.first_restore_gpr = i;
6619 cfun_frame_layout.last_save_gpr = j;
6620 cfun_frame_layout.last_restore_gpr = j;
6621 }
6622 }
6623
6624 if (current_function_stdarg)
6625 {
6626 /* Varargs functions need to save gprs 2 to 6. */
6627 if (cfun->va_list_gpr_size
6628 && current_function_args_info.gprs < GP_ARG_NUM_REG)
6629 {
6630 int min_gpr = current_function_args_info.gprs;
6631 int max_gpr = min_gpr + cfun->va_list_gpr_size;
6632 if (max_gpr > GP_ARG_NUM_REG)
6633 max_gpr = GP_ARG_NUM_REG;
6634
6635 if (cfun_frame_layout.first_save_gpr == -1
6636 || cfun_frame_layout.first_save_gpr > 2 + min_gpr)
6637 {
6638 cfun_frame_layout.first_save_gpr = 2 + min_gpr;
6639 cfun_frame_layout.first_save_gpr_slot = 2 + min_gpr;
6640 }
6641
6642 if (cfun_frame_layout.last_save_gpr == -1
6643 || cfun_frame_layout.last_save_gpr < 2 + max_gpr - 1)
6644 {
6645 cfun_frame_layout.last_save_gpr = 2 + max_gpr - 1;
6646 cfun_frame_layout.last_save_gpr_slot = 2 + max_gpr - 1;
6647 }
6648 }
6649
6650 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
6651 if (TARGET_HARD_FLOAT && cfun->va_list_fpr_size
6652 && current_function_args_info.fprs < FP_ARG_NUM_REG)
6653 {
6654 int min_fpr = current_function_args_info.fprs;
6655 int max_fpr = min_fpr + cfun->va_list_fpr_size;
6656 if (max_fpr > FP_ARG_NUM_REG)
6657 max_fpr = FP_ARG_NUM_REG;
6658
6659 /* ??? This is currently required to ensure proper location
6660 of the fpr save slots within the va_list save area. */
6661 if (TARGET_PACKED_STACK)
6662 min_fpr = 0;
6663
6664 for (i = min_fpr; i < max_fpr; i++)
6665 cfun_set_fpr_bit (i);
6666 }
6667 }
6668
6669 if (!TARGET_64BIT)
6670 for (i = 2; i < 4; i++)
6671 if (df_regs_ever_live_p (i + 16) && !global_regs[i + 16])
6672 cfun_set_fpr_bit (i);
6673 }
6674
6675 /* Fill cfun->machine with info about frame of current function. */
6676
6677 static void
6678 s390_frame_info (void)
6679 {
6680 int i;
6681
6682 cfun_frame_layout.frame_size = get_frame_size ();
6683 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
6684 fatal_error ("total size of local variables exceeds architecture limit");
6685
6686 if (!TARGET_PACKED_STACK)
6687 {
6688 cfun_frame_layout.backchain_offset = 0;
6689 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD;
6690 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
6691 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
6692 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr_slot
6693 * UNITS_PER_WORD);
6694 }
6695 else if (TARGET_BACKCHAIN) /* kernel stack layout */
6696 {
6697 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
6698 - UNITS_PER_WORD);
6699 cfun_frame_layout.gprs_offset
6700 = (cfun_frame_layout.backchain_offset
6701 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr_slot + 1)
6702 * UNITS_PER_WORD);
6703
6704 if (TARGET_64BIT)
6705 {
6706 cfun_frame_layout.f4_offset
6707 = (cfun_frame_layout.gprs_offset
6708 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6709
6710 cfun_frame_layout.f0_offset
6711 = (cfun_frame_layout.f4_offset
6712 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6713 }
6714 else
6715 {
6716 /* On 31 bit we have to care about alignment of the
6717 floating point regs to provide fastest access. */
6718 cfun_frame_layout.f0_offset
6719 = ((cfun_frame_layout.gprs_offset
6720 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1))
6721 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6722
6723 cfun_frame_layout.f4_offset
6724 = (cfun_frame_layout.f0_offset
6725 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6726 }
6727 }
6728 else /* no backchain */
6729 {
6730 cfun_frame_layout.f4_offset
6731 = (STACK_POINTER_OFFSET
6732 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6733
6734 cfun_frame_layout.f0_offset
6735 = (cfun_frame_layout.f4_offset
6736 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6737
6738 cfun_frame_layout.gprs_offset
6739 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
6740 }
6741
6742 if (current_function_is_leaf
6743 && !TARGET_TPF_PROFILING
6744 && cfun_frame_layout.frame_size == 0
6745 && !cfun_save_high_fprs_p
6746 && !current_function_calls_alloca
6747 && !current_function_stdarg)
6748 return;
6749
6750 if (!TARGET_PACKED_STACK)
6751 cfun_frame_layout.frame_size += (STACK_POINTER_OFFSET
6752 + current_function_outgoing_args_size
6753 + cfun_frame_layout.high_fprs * 8);
6754 else
6755 {
6756 if (TARGET_BACKCHAIN)
6757 cfun_frame_layout.frame_size += UNITS_PER_WORD;
6758
6759 /* No alignment trouble here because f8-f15 are only saved under
6760 64 bit. */
6761 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset,
6762 cfun_frame_layout.f4_offset),
6763 cfun_frame_layout.gprs_offset)
6764 - cfun_frame_layout.high_fprs * 8);
6765
6766 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
6767
6768 for (i = 0; i < 8; i++)
6769 if (cfun_fpr_bit_p (i))
6770 cfun_frame_layout.frame_size += 8;
6771
6772 cfun_frame_layout.frame_size += cfun_gprs_save_area_size;
6773
6774 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
6775 the frame size to sustain 8 byte alignment of stack frames. */
6776 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
6777 STACK_BOUNDARY / BITS_PER_UNIT - 1)
6778 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
6779
6780 cfun_frame_layout.frame_size += current_function_outgoing_args_size;
6781 }
6782 }
6783
6784 /* Generate frame layout. Fills in register and frame data for the current
6785 function in cfun->machine. This routine can be called multiple times;
6786 it will re-do the complete frame layout every time. */
6787
6788 static void
6789 s390_init_frame_layout (void)
6790 {
6791 HOST_WIDE_INT frame_size;
6792 int base_used;
6793 int clobbered_regs[16];
6794
6795 /* On S/390 machines, we may need to perform branch splitting, which
6796 will require both base and return address register. We have no
6797 choice but to assume we're going to need them until right at the
6798 end of the machine dependent reorg phase. */
6799 if (!TARGET_CPU_ZARCH)
6800 cfun->machine->split_branches_pending_p = true;
6801
6802 do
6803 {
6804 frame_size = cfun_frame_layout.frame_size;
6805
6806 /* Try to predict whether we'll need the base register. */
6807 base_used = cfun->machine->split_branches_pending_p
6808 || current_function_uses_const_pool
6809 || (!DISP_IN_RANGE (frame_size)
6810 && !CONST_OK_FOR_K (frame_size));
6811
6812 /* Decide which register to use as literal pool base. In small
6813 leaf functions, try to use an unused call-clobbered register
6814 as base register to avoid save/restore overhead. */
6815 if (!base_used)
6816 cfun->machine->base_reg = NULL_RTX;
6817 else if (current_function_is_leaf && !df_regs_ever_live_p (5))
6818 cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
6819 else
6820 cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
6821
6822 s390_register_info (clobbered_regs);
6823 s390_frame_info ();
6824 }
6825 while (frame_size != cfun_frame_layout.frame_size);
6826 }
6827
6828 /* Update frame layout. Recompute actual register save data based on
6829 current info and update regs_ever_live for the special registers.
6830 May be called multiple times, but may never cause *more* registers
6831 to be saved than s390_init_frame_layout allocated room for. */
6832
6833 static void
6834 s390_update_frame_layout (void)
6835 {
6836 int clobbered_regs[16];
6837
6838 s390_register_info (clobbered_regs);
6839
6840 df_set_regs_ever_live (BASE_REGNUM,
6841 clobbered_regs[BASE_REGNUM] ? true : false);
6842 df_set_regs_ever_live (RETURN_REGNUM,
6843 clobbered_regs[RETURN_REGNUM] ? true : false);
6844 df_set_regs_ever_live (STACK_POINTER_REGNUM,
6845 clobbered_regs[STACK_POINTER_REGNUM] ? true : false);
6846
6847 if (cfun->machine->base_reg)
6848 df_set_regs_ever_live (REGNO (cfun->machine->base_reg), true);
6849 }
6850
6851 /* Return true if it is legal to put a value with MODE into REGNO. */
6852
6853 bool
6854 s390_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
6855 {
6856 switch (REGNO_REG_CLASS (regno))
6857 {
6858 case FP_REGS:
6859 if (REGNO_PAIR_OK (regno, mode))
6860 {
6861 if (mode == SImode || mode == DImode)
6862 return true;
6863
6864 if (FLOAT_MODE_P (mode) && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
6865 return true;
6866 }
6867 break;
6868 case ADDR_REGS:
6869 if (FRAME_REGNO_P (regno) && mode == Pmode)
6870 return true;
6871
6872 /* fallthrough */
6873 case GENERAL_REGS:
6874 if (REGNO_PAIR_OK (regno, mode))
6875 {
6876 if (TARGET_64BIT
6877 || (mode != TFmode && mode != TCmode && mode != TDmode))
6878 return true;
6879 }
6880 break;
6881 case CC_REGS:
6882 if (GET_MODE_CLASS (mode) == MODE_CC)
6883 return true;
6884 break;
6885 case ACCESS_REGS:
6886 if (REGNO_PAIR_OK (regno, mode))
6887 {
6888 if (mode == SImode || mode == Pmode)
6889 return true;
6890 }
6891 break;
6892 default:
6893 return false;
6894 }
6895
6896 return false;
6897 }
6898
6899 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6900
6901 bool
6902 s390_hard_regno_rename_ok (unsigned int old_reg, unsigned int new_reg)
6903 {
6904 /* Once we've decided upon a register to use as base register, it must
6905 no longer be used for any other purpose. */
6906 if (cfun->machine->base_reg)
6907 if (REGNO (cfun->machine->base_reg) == old_reg
6908 || REGNO (cfun->machine->base_reg) == new_reg)
6909 return false;
6910
6911 return true;
6912 }
6913
6914 /* Maximum number of registers to represent a value of mode MODE
6915 in a register of class CLASS. */
6916
6917 bool
6918 s390_class_max_nregs (enum reg_class class, enum machine_mode mode)
6919 {
6920 switch (class)
6921 {
6922 case FP_REGS:
6923 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6924 return 2 * ((GET_MODE_SIZE (mode) / 2 + 8 - 1) / 8);
6925 else
6926 return (GET_MODE_SIZE (mode) + 8 - 1) / 8;
6927 case ACCESS_REGS:
6928 return (GET_MODE_SIZE (mode) + 4 - 1) / 4;
6929 default:
6930 break;
6931 }
6932 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6933 }
6934
6935 /* Return true if register FROM can be eliminated via register TO. */
6936
6937 bool
6938 s390_can_eliminate (int from, int to)
6939 {
6940 /* On zSeries machines, we have not marked the base register as fixed.
6941 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM.
6942 If a function requires the base register, we say here that this
6943 elimination cannot be performed. This will cause reload to free
6944 up the base register (as if it were fixed). On the other hand,
6945 if the current function does *not* require the base register, we
6946 say here the elimination succeeds, which in turn allows reload
6947 to allocate the base register for any other purpose. */
6948 if (from == BASE_REGNUM && to == BASE_REGNUM)
6949 {
6950 if (TARGET_CPU_ZARCH)
6951 {
6952 s390_init_frame_layout ();
6953 return cfun->machine->base_reg == NULL_RTX;
6954 }
6955
6956 return false;
6957 }
6958
6959 /* Everything else must point into the stack frame. */
6960 gcc_assert (to == STACK_POINTER_REGNUM
6961 || to == HARD_FRAME_POINTER_REGNUM);
6962
6963 gcc_assert (from == FRAME_POINTER_REGNUM
6964 || from == ARG_POINTER_REGNUM
6965 || from == RETURN_ADDRESS_POINTER_REGNUM);
6966
6967 /* Make sure we actually saved the return address. */
6968 if (from == RETURN_ADDRESS_POINTER_REGNUM)
6969 if (!current_function_calls_eh_return
6970 && !current_function_stdarg
6971 && !cfun_frame_layout.save_return_addr_p)
6972 return false;
6973
6974 return true;
6975 }
6976
6977 /* Return offset between register FROM and TO initially after prolog. */
6978
6979 HOST_WIDE_INT
6980 s390_initial_elimination_offset (int from, int to)
6981 {
6982 HOST_WIDE_INT offset;
6983 int index;
6984
6985 /* ??? Why are we called for non-eliminable pairs? */
6986 if (!s390_can_eliminate (from, to))
6987 return 0;
6988
6989 switch (from)
6990 {
6991 case FRAME_POINTER_REGNUM:
6992 offset = (get_frame_size()
6993 + STACK_POINTER_OFFSET
6994 + current_function_outgoing_args_size);
6995 break;
6996
6997 case ARG_POINTER_REGNUM:
6998 s390_init_frame_layout ();
6999 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
7000 break;
7001
7002 case RETURN_ADDRESS_POINTER_REGNUM:
7003 s390_init_frame_layout ();
7004 index = RETURN_REGNUM - cfun_frame_layout.first_save_gpr_slot;
7005 gcc_assert (index >= 0);
7006 offset = cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset;
7007 offset += index * UNITS_PER_WORD;
7008 break;
7009
7010 case BASE_REGNUM:
7011 offset = 0;
7012 break;
7013
7014 default:
7015 gcc_unreachable ();
7016 }
7017
7018 return offset;
7019 }
7020
7021 /* Emit insn to save fpr REGNUM at offset OFFSET relative
7022 to register BASE. Return generated insn. */
7023
7024 static rtx
7025 save_fpr (rtx base, int offset, int regnum)
7026 {
7027 rtx addr;
7028 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
7029
7030 if (regnum >= 16 && regnum <= (16 + FP_ARG_NUM_REG))
7031 set_mem_alias_set (addr, get_varargs_alias_set ());
7032 else
7033 set_mem_alias_set (addr, get_frame_alias_set ());
7034
7035 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
7036 }
7037
7038 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
7039 to register BASE. Return generated insn. */
7040
7041 static rtx
7042 restore_fpr (rtx base, int offset, int regnum)
7043 {
7044 rtx addr;
7045 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
7046 set_mem_alias_set (addr, get_frame_alias_set ());
7047
7048 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
7049 }
7050
7051 /* Generate insn to save registers FIRST to LAST into
7052 the register save area located at offset OFFSET
7053 relative to register BASE. */
7054
7055 static rtx
7056 save_gprs (rtx base, int offset, int first, int last)
7057 {
7058 rtx addr, insn, note;
7059 int i;
7060
7061 addr = plus_constant (base, offset);
7062 addr = gen_rtx_MEM (Pmode, addr);
7063
7064 set_mem_alias_set (addr, get_frame_alias_set ());
7065
7066 /* Special-case single register. */
7067 if (first == last)
7068 {
7069 if (TARGET_64BIT)
7070 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
7071 else
7072 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
7073
7074 RTX_FRAME_RELATED_P (insn) = 1;
7075 return insn;
7076 }
7077
7078
7079 insn = gen_store_multiple (addr,
7080 gen_rtx_REG (Pmode, first),
7081 GEN_INT (last - first + 1));
7082
7083 if (first <= 6 && current_function_stdarg)
7084 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7085 {
7086 rtx mem = XEXP (XVECEXP (PATTERN (insn), 0, i), 0);
7087
7088 if (first + i <= 6)
7089 set_mem_alias_set (mem, get_varargs_alias_set ());
7090 }
7091
7092 /* We need to set the FRAME_RELATED flag on all SETs
7093 inside the store-multiple pattern.
7094
7095 However, we must not emit DWARF records for registers 2..5
7096 if they are stored for use by variable arguments ...
7097
7098 ??? Unfortunately, it is not enough to simply not the
7099 FRAME_RELATED flags for those SETs, because the first SET
7100 of the PARALLEL is always treated as if it had the flag
7101 set, even if it does not. Therefore we emit a new pattern
7102 without those registers as REG_FRAME_RELATED_EXPR note. */
7103
7104 if (first >= 6)
7105 {
7106 rtx pat = PATTERN (insn);
7107
7108 for (i = 0; i < XVECLEN (pat, 0); i++)
7109 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
7110 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
7111
7112 RTX_FRAME_RELATED_P (insn) = 1;
7113 }
7114 else if (last >= 6)
7115 {
7116 addr = plus_constant (base, offset + (6 - first) * UNITS_PER_WORD);
7117 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
7118 gen_rtx_REG (Pmode, 6),
7119 GEN_INT (last - 6 + 1));
7120 note = PATTERN (note);
7121
7122 REG_NOTES (insn) =
7123 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7124 note, REG_NOTES (insn));
7125
7126 for (i = 0; i < XVECLEN (note, 0); i++)
7127 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
7128 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
7129
7130 RTX_FRAME_RELATED_P (insn) = 1;
7131 }
7132
7133 return insn;
7134 }
7135
7136 /* Generate insn to restore registers FIRST to LAST from
7137 the register save area located at offset OFFSET
7138 relative to register BASE. */
7139
7140 static rtx
7141 restore_gprs (rtx base, int offset, int first, int last)
7142 {
7143 rtx addr, insn;
7144
7145 addr = plus_constant (base, offset);
7146 addr = gen_rtx_MEM (Pmode, addr);
7147 set_mem_alias_set (addr, get_frame_alias_set ());
7148
7149 /* Special-case single register. */
7150 if (first == last)
7151 {
7152 if (TARGET_64BIT)
7153 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
7154 else
7155 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
7156
7157 return insn;
7158 }
7159
7160 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
7161 addr,
7162 GEN_INT (last - first + 1));
7163 return insn;
7164 }
7165
7166 /* Return insn sequence to load the GOT register. */
7167
7168 static GTY(()) rtx got_symbol;
7169 rtx
7170 s390_load_got (void)
7171 {
7172 rtx insns;
7173
7174 if (!got_symbol)
7175 {
7176 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
7177 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
7178 }
7179
7180 start_sequence ();
7181
7182 if (TARGET_CPU_ZARCH)
7183 {
7184 emit_move_insn (pic_offset_table_rtx, got_symbol);
7185 }
7186 else
7187 {
7188 rtx offset;
7189
7190 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
7191 UNSPEC_LTREL_OFFSET);
7192 offset = gen_rtx_CONST (Pmode, offset);
7193 offset = force_const_mem (Pmode, offset);
7194
7195 emit_move_insn (pic_offset_table_rtx, offset);
7196
7197 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
7198 UNSPEC_LTREL_BASE);
7199 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
7200
7201 emit_move_insn (pic_offset_table_rtx, offset);
7202 }
7203
7204 insns = get_insns ();
7205 end_sequence ();
7206 return insns;
7207 }
7208
7209 /* This ties together stack memory (MEM with an alias set of frame_alias_set)
7210 and the change to the stack pointer. */
7211
7212 static void
7213 s390_emit_stack_tie (void)
7214 {
7215 rtx mem = gen_frame_mem (BLKmode,
7216 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
7217
7218 emit_insn (gen_stack_tie (mem));
7219 }
7220
7221 /* Expand the prologue into a bunch of separate insns. */
7222
7223 void
7224 s390_emit_prologue (void)
7225 {
7226 rtx insn, addr;
7227 rtx temp_reg;
7228 int i;
7229 int offset;
7230 int next_fpr = 0;
7231
7232 /* Complete frame layout. */
7233
7234 s390_update_frame_layout ();
7235
7236 /* Annotate all constant pool references to let the scheduler know
7237 they implicitly use the base register. */
7238
7239 push_topmost_sequence ();
7240
7241 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7242 if (INSN_P (insn))
7243 {
7244 annotate_constant_pool_refs (&PATTERN (insn));
7245 df_insn_rescan (insn);
7246 }
7247
7248 pop_topmost_sequence ();
7249
7250 /* Choose best register to use for temp use within prologue.
7251 See below for why TPF must use the register 1. */
7252
7253 if (!has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
7254 && !current_function_is_leaf
7255 && !TARGET_TPF_PROFILING)
7256 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
7257 else
7258 temp_reg = gen_rtx_REG (Pmode, 1);
7259
7260 /* Save call saved gprs. */
7261 if (cfun_frame_layout.first_save_gpr != -1)
7262 {
7263 insn = save_gprs (stack_pointer_rtx,
7264 cfun_frame_layout.gprs_offset +
7265 UNITS_PER_WORD * (cfun_frame_layout.first_save_gpr
7266 - cfun_frame_layout.first_save_gpr_slot),
7267 cfun_frame_layout.first_save_gpr,
7268 cfun_frame_layout.last_save_gpr);
7269 emit_insn (insn);
7270 }
7271
7272 /* Dummy insn to mark literal pool slot. */
7273
7274 if (cfun->machine->base_reg)
7275 emit_insn (gen_main_pool (cfun->machine->base_reg));
7276
7277 offset = cfun_frame_layout.f0_offset;
7278
7279 /* Save f0 and f2. */
7280 for (i = 0; i < 2; i++)
7281 {
7282 if (cfun_fpr_bit_p (i))
7283 {
7284 save_fpr (stack_pointer_rtx, offset, i + 16);
7285 offset += 8;
7286 }
7287 else if (!TARGET_PACKED_STACK)
7288 offset += 8;
7289 }
7290
7291 /* Save f4 and f6. */
7292 offset = cfun_frame_layout.f4_offset;
7293 for (i = 2; i < 4; i++)
7294 {
7295 if (cfun_fpr_bit_p (i))
7296 {
7297 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7298 offset += 8;
7299
7300 /* If f4 and f6 are call clobbered they are saved due to stdargs and
7301 therefore are not frame related. */
7302 if (!call_really_used_regs[i + 16])
7303 RTX_FRAME_RELATED_P (insn) = 1;
7304 }
7305 else if (!TARGET_PACKED_STACK)
7306 offset += 8;
7307 }
7308
7309 if (TARGET_PACKED_STACK
7310 && cfun_save_high_fprs_p
7311 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
7312 {
7313 offset = (cfun_frame_layout.f8_offset
7314 + (cfun_frame_layout.high_fprs - 1) * 8);
7315
7316 for (i = 15; i > 7 && offset >= 0; i--)
7317 if (cfun_fpr_bit_p (i))
7318 {
7319 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7320
7321 RTX_FRAME_RELATED_P (insn) = 1;
7322 offset -= 8;
7323 }
7324 if (offset >= cfun_frame_layout.f8_offset)
7325 next_fpr = i + 16;
7326 }
7327
7328 if (!TARGET_PACKED_STACK)
7329 next_fpr = cfun_save_high_fprs_p ? 31 : 0;
7330
7331 /* Decrement stack pointer. */
7332
7333 if (cfun_frame_layout.frame_size > 0)
7334 {
7335 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
7336
7337 if (s390_stack_size)
7338 {
7339 HOST_WIDE_INT stack_guard;
7340
7341 if (s390_stack_guard)
7342 stack_guard = s390_stack_guard;
7343 else
7344 {
7345 /* If no value for stack guard is provided the smallest power of 2
7346 larger than the current frame size is chosen. */
7347 stack_guard = 1;
7348 while (stack_guard < cfun_frame_layout.frame_size)
7349 stack_guard <<= 1;
7350 }
7351
7352 if (cfun_frame_layout.frame_size >= s390_stack_size)
7353 {
7354 warning (0, "frame size of function %qs is "
7355 HOST_WIDE_INT_PRINT_DEC
7356 " bytes exceeding user provided stack limit of "
7357 HOST_WIDE_INT_PRINT_DEC " bytes. "
7358 "An unconditional trap is added.",
7359 current_function_name(), cfun_frame_layout.frame_size,
7360 s390_stack_size);
7361 emit_insn (gen_trap ());
7362 }
7363 else
7364 {
7365 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
7366 & ~(stack_guard - 1));
7367 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
7368 GEN_INT (stack_check_mask));
7369 if (TARGET_64BIT)
7370 gen_cmpdi (t, const0_rtx);
7371 else
7372 gen_cmpsi (t, const0_rtx);
7373
7374 emit_insn (gen_conditional_trap (gen_rtx_EQ (CCmode,
7375 gen_rtx_REG (CCmode,
7376 CC_REGNUM),
7377 const0_rtx),
7378 const0_rtx));
7379 }
7380 }
7381
7382 if (s390_warn_framesize > 0
7383 && cfun_frame_layout.frame_size >= s390_warn_framesize)
7384 warning (0, "frame size of %qs is " HOST_WIDE_INT_PRINT_DEC " bytes",
7385 current_function_name (), cfun_frame_layout.frame_size);
7386
7387 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
7388 warning (0, "%qs uses dynamic stack allocation", current_function_name ());
7389
7390 /* Save incoming stack pointer into temp reg. */
7391 if (TARGET_BACKCHAIN || next_fpr)
7392 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
7393
7394 /* Subtract frame size from stack pointer. */
7395
7396 if (DISP_IN_RANGE (INTVAL (frame_off)))
7397 {
7398 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7399 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7400 frame_off));
7401 insn = emit_insn (insn);
7402 }
7403 else
7404 {
7405 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
7406 frame_off = force_const_mem (Pmode, frame_off);
7407
7408 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
7409 annotate_constant_pool_refs (&PATTERN (insn));
7410 }
7411
7412 RTX_FRAME_RELATED_P (insn) = 1;
7413 REG_NOTES (insn) =
7414 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7415 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7416 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7417 GEN_INT (-cfun_frame_layout.frame_size))),
7418 REG_NOTES (insn));
7419
7420 /* Set backchain. */
7421
7422 if (TARGET_BACKCHAIN)
7423 {
7424 if (cfun_frame_layout.backchain_offset)
7425 addr = gen_rtx_MEM (Pmode,
7426 plus_constant (stack_pointer_rtx,
7427 cfun_frame_layout.backchain_offset));
7428 else
7429 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
7430 set_mem_alias_set (addr, get_frame_alias_set ());
7431 insn = emit_insn (gen_move_insn (addr, temp_reg));
7432 }
7433
7434 /* If we support asynchronous exceptions (e.g. for Java),
7435 we need to make sure the backchain pointer is set up
7436 before any possibly trapping memory access. */
7437
7438 if (TARGET_BACKCHAIN && flag_non_call_exceptions)
7439 {
7440 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
7441 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
7442 }
7443 }
7444
7445 /* Save fprs 8 - 15 (64 bit ABI). */
7446
7447 if (cfun_save_high_fprs_p && next_fpr)
7448 {
7449 /* If the stack might be accessed through a different register
7450 we have to make sure that the stack pointer decrement is not
7451 moved below the use of the stack slots. */
7452 s390_emit_stack_tie ();
7453
7454 insn = emit_insn (gen_add2_insn (temp_reg,
7455 GEN_INT (cfun_frame_layout.f8_offset)));
7456
7457 offset = 0;
7458
7459 for (i = 24; i <= next_fpr; i++)
7460 if (cfun_fpr_bit_p (i - 16))
7461 {
7462 rtx addr = plus_constant (stack_pointer_rtx,
7463 cfun_frame_layout.frame_size
7464 + cfun_frame_layout.f8_offset
7465 + offset);
7466
7467 insn = save_fpr (temp_reg, offset, i);
7468 offset += 8;
7469 RTX_FRAME_RELATED_P (insn) = 1;
7470 REG_NOTES (insn) =
7471 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7472 gen_rtx_SET (VOIDmode,
7473 gen_rtx_MEM (DFmode, addr),
7474 gen_rtx_REG (DFmode, i)),
7475 REG_NOTES (insn));
7476 }
7477 }
7478
7479 /* Set frame pointer, if needed. */
7480
7481 if (frame_pointer_needed)
7482 {
7483 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
7484 RTX_FRAME_RELATED_P (insn) = 1;
7485 }
7486
7487 /* Set up got pointer, if needed. */
7488
7489 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
7490 {
7491 rtx insns = s390_load_got ();
7492
7493 for (insn = insns; insn; insn = NEXT_INSN (insn))
7494 annotate_constant_pool_refs (&PATTERN (insn));
7495
7496 emit_insn (insns);
7497 }
7498
7499 if (TARGET_TPF_PROFILING)
7500 {
7501 /* Generate a BAS instruction to serve as a function
7502 entry intercept to facilitate the use of tracing
7503 algorithms located at the branch target. */
7504 emit_insn (gen_prologue_tpf ());
7505
7506 /* Emit a blockage here so that all code
7507 lies between the profiling mechanisms. */
7508 emit_insn (gen_blockage ());
7509 }
7510 }
7511
7512 /* Expand the epilogue into a bunch of separate insns. */
7513
7514 void
7515 s390_emit_epilogue (bool sibcall)
7516 {
7517 rtx frame_pointer, return_reg;
7518 int area_bottom, area_top, offset = 0;
7519 int next_offset;
7520 rtvec p;
7521 int i;
7522
7523 if (TARGET_TPF_PROFILING)
7524 {
7525
7526 /* Generate a BAS instruction to serve as a function
7527 entry intercept to facilitate the use of tracing
7528 algorithms located at the branch target. */
7529
7530 /* Emit a blockage here so that all code
7531 lies between the profiling mechanisms. */
7532 emit_insn (gen_blockage ());
7533
7534 emit_insn (gen_epilogue_tpf ());
7535 }
7536
7537 /* Check whether to use frame or stack pointer for restore. */
7538
7539 frame_pointer = (frame_pointer_needed
7540 ? hard_frame_pointer_rtx : stack_pointer_rtx);
7541
7542 s390_frame_area (&area_bottom, &area_top);
7543
7544 /* Check whether we can access the register save area.
7545 If not, increment the frame pointer as required. */
7546
7547 if (area_top <= area_bottom)
7548 {
7549 /* Nothing to restore. */
7550 }
7551 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
7552 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
7553 {
7554 /* Area is in range. */
7555 offset = cfun_frame_layout.frame_size;
7556 }
7557 else
7558 {
7559 rtx insn, frame_off;
7560
7561 offset = area_bottom < 0 ? -area_bottom : 0;
7562 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
7563
7564 if (DISP_IN_RANGE (INTVAL (frame_off)))
7565 {
7566 insn = gen_rtx_SET (VOIDmode, frame_pointer,
7567 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
7568 insn = emit_insn (insn);
7569 }
7570 else
7571 {
7572 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
7573 frame_off = force_const_mem (Pmode, frame_off);
7574
7575 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
7576 annotate_constant_pool_refs (&PATTERN (insn));
7577 }
7578 }
7579
7580 /* Restore call saved fprs. */
7581
7582 if (TARGET_64BIT)
7583 {
7584 if (cfun_save_high_fprs_p)
7585 {
7586 next_offset = cfun_frame_layout.f8_offset;
7587 for (i = 24; i < 32; i++)
7588 {
7589 if (cfun_fpr_bit_p (i - 16))
7590 {
7591 restore_fpr (frame_pointer,
7592 offset + next_offset, i);
7593 next_offset += 8;
7594 }
7595 }
7596 }
7597
7598 }
7599 else
7600 {
7601 next_offset = cfun_frame_layout.f4_offset;
7602 for (i = 18; i < 20; i++)
7603 {
7604 if (cfun_fpr_bit_p (i - 16))
7605 {
7606 restore_fpr (frame_pointer,
7607 offset + next_offset, i);
7608 next_offset += 8;
7609 }
7610 else if (!TARGET_PACKED_STACK)
7611 next_offset += 8;
7612 }
7613
7614 }
7615
7616 /* Return register. */
7617
7618 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
7619
7620 /* Restore call saved gprs. */
7621
7622 if (cfun_frame_layout.first_restore_gpr != -1)
7623 {
7624 rtx insn, addr;
7625 int i;
7626
7627 /* Check for global register and save them
7628 to stack location from where they get restored. */
7629
7630 for (i = cfun_frame_layout.first_restore_gpr;
7631 i <= cfun_frame_layout.last_restore_gpr;
7632 i++)
7633 {
7634 /* These registers are special and need to be
7635 restored in any case. */
7636 if (i == STACK_POINTER_REGNUM
7637 || i == RETURN_REGNUM
7638 || i == BASE_REGNUM
7639 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
7640 continue;
7641
7642 if (global_regs[i])
7643 {
7644 addr = plus_constant (frame_pointer,
7645 offset + cfun_frame_layout.gprs_offset
7646 + (i - cfun_frame_layout.first_save_gpr_slot)
7647 * UNITS_PER_WORD);
7648 addr = gen_rtx_MEM (Pmode, addr);
7649 set_mem_alias_set (addr, get_frame_alias_set ());
7650 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
7651 }
7652 }
7653
7654 if (! sibcall)
7655 {
7656 /* Fetch return address from stack before load multiple,
7657 this will do good for scheduling. */
7658
7659 if (cfun_frame_layout.save_return_addr_p
7660 || (cfun_frame_layout.first_restore_gpr < BASE_REGNUM
7661 && cfun_frame_layout.last_restore_gpr > RETURN_REGNUM))
7662 {
7663 int return_regnum = find_unused_clobbered_reg();
7664 if (!return_regnum)
7665 return_regnum = 4;
7666 return_reg = gen_rtx_REG (Pmode, return_regnum);
7667
7668 addr = plus_constant (frame_pointer,
7669 offset + cfun_frame_layout.gprs_offset
7670 + (RETURN_REGNUM
7671 - cfun_frame_layout.first_save_gpr_slot)
7672 * UNITS_PER_WORD);
7673 addr = gen_rtx_MEM (Pmode, addr);
7674 set_mem_alias_set (addr, get_frame_alias_set ());
7675 emit_move_insn (return_reg, addr);
7676 }
7677 }
7678
7679 insn = restore_gprs (frame_pointer,
7680 offset + cfun_frame_layout.gprs_offset
7681 + (cfun_frame_layout.first_restore_gpr
7682 - cfun_frame_layout.first_save_gpr_slot)
7683 * UNITS_PER_WORD,
7684 cfun_frame_layout.first_restore_gpr,
7685 cfun_frame_layout.last_restore_gpr);
7686 emit_insn (insn);
7687 }
7688
7689 if (! sibcall)
7690 {
7691
7692 /* Return to caller. */
7693
7694 p = rtvec_alloc (2);
7695
7696 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
7697 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
7698 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
7699 }
7700 }
7701
7702
7703 /* Return the size in bytes of a function argument of
7704 type TYPE and/or mode MODE. At least one of TYPE or
7705 MODE must be specified. */
7706
7707 static int
7708 s390_function_arg_size (enum machine_mode mode, const_tree type)
7709 {
7710 if (type)
7711 return int_size_in_bytes (type);
7712
7713 /* No type info available for some library calls ... */
7714 if (mode != BLKmode)
7715 return GET_MODE_SIZE (mode);
7716
7717 /* If we have neither type nor mode, abort */
7718 gcc_unreachable ();
7719 }
7720
7721 /* Return true if a function argument of type TYPE and mode MODE
7722 is to be passed in a floating-point register, if available. */
7723
7724 static bool
7725 s390_function_arg_float (enum machine_mode mode, tree type)
7726 {
7727 int size = s390_function_arg_size (mode, type);
7728 if (size > 8)
7729 return false;
7730
7731 /* Soft-float changes the ABI: no floating-point registers are used. */
7732 if (TARGET_SOFT_FLOAT)
7733 return false;
7734
7735 /* No type info available for some library calls ... */
7736 if (!type)
7737 return mode == SFmode || mode == DFmode || mode == SDmode || mode == DDmode;
7738
7739 /* The ABI says that record types with a single member are treated
7740 just like that member would be. */
7741 while (TREE_CODE (type) == RECORD_TYPE)
7742 {
7743 tree field, single = NULL_TREE;
7744
7745 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7746 {
7747 if (TREE_CODE (field) != FIELD_DECL)
7748 continue;
7749
7750 if (single == NULL_TREE)
7751 single = TREE_TYPE (field);
7752 else
7753 return false;
7754 }
7755
7756 if (single == NULL_TREE)
7757 return false;
7758 else
7759 type = single;
7760 }
7761
7762 return TREE_CODE (type) == REAL_TYPE;
7763 }
7764
7765 /* Return true if a function argument of type TYPE and mode MODE
7766 is to be passed in an integer register, or a pair of integer
7767 registers, if available. */
7768
7769 static bool
7770 s390_function_arg_integer (enum machine_mode mode, tree type)
7771 {
7772 int size = s390_function_arg_size (mode, type);
7773 if (size > 8)
7774 return false;
7775
7776 /* No type info available for some library calls ... */
7777 if (!type)
7778 return GET_MODE_CLASS (mode) == MODE_INT
7779 || (TARGET_SOFT_FLOAT && SCALAR_FLOAT_MODE_P (mode));
7780
7781 /* We accept small integral (and similar) types. */
7782 if (INTEGRAL_TYPE_P (type)
7783 || POINTER_TYPE_P (type)
7784 || TREE_CODE (type) == OFFSET_TYPE
7785 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
7786 return true;
7787
7788 /* We also accept structs of size 1, 2, 4, 8 that are not
7789 passed in floating-point registers. */
7790 if (AGGREGATE_TYPE_P (type)
7791 && exact_log2 (size) >= 0
7792 && !s390_function_arg_float (mode, type))
7793 return true;
7794
7795 return false;
7796 }
7797
7798 /* Return 1 if a function argument of type TYPE and mode MODE
7799 is to be passed by reference. The ABI specifies that only
7800 structures of size 1, 2, 4, or 8 bytes are passed by value,
7801 all other structures (and complex numbers) are passed by
7802 reference. */
7803
7804 static bool
7805 s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
7806 enum machine_mode mode, const_tree type,
7807 bool named ATTRIBUTE_UNUSED)
7808 {
7809 int size = s390_function_arg_size (mode, type);
7810 if (size > 8)
7811 return true;
7812
7813 if (type)
7814 {
7815 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
7816 return 1;
7817
7818 if (TREE_CODE (type) == COMPLEX_TYPE
7819 || TREE_CODE (type) == VECTOR_TYPE)
7820 return 1;
7821 }
7822
7823 return 0;
7824 }
7825
7826 /* Update the data in CUM to advance over an argument of mode MODE and
7827 data type TYPE. (TYPE is null for libcalls where that information
7828 may not be available.). The boolean NAMED specifies whether the
7829 argument is a named argument (as opposed to an unnamed argument
7830 matching an ellipsis). */
7831
7832 void
7833 s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
7834 tree type, int named ATTRIBUTE_UNUSED)
7835 {
7836 if (s390_function_arg_float (mode, type))
7837 {
7838 cum->fprs += 1;
7839 }
7840 else if (s390_function_arg_integer (mode, type))
7841 {
7842 int size = s390_function_arg_size (mode, type);
7843 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
7844 }
7845 else
7846 gcc_unreachable ();
7847 }
7848
7849 /* Define where to put the arguments to a function.
7850 Value is zero to push the argument on the stack,
7851 or a hard register in which to store the argument.
7852
7853 MODE is the argument's machine mode.
7854 TYPE is the data type of the argument (as a tree).
7855 This is null for libcalls where that information may
7856 not be available.
7857 CUM is a variable of type CUMULATIVE_ARGS which gives info about
7858 the preceding args and about the function being called.
7859 NAMED is nonzero if this argument is a named parameter
7860 (otherwise it is an extra parameter matching an ellipsis).
7861
7862 On S/390, we use general purpose registers 2 through 6 to
7863 pass integer, pointer, and certain structure arguments, and
7864 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
7865 to pass floating point arguments. All remaining arguments
7866 are pushed to the stack. */
7867
7868 rtx
7869 s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
7870 int named ATTRIBUTE_UNUSED)
7871 {
7872 if (s390_function_arg_float (mode, type))
7873 {
7874 if (cum->fprs + 1 > FP_ARG_NUM_REG)
7875 return 0;
7876 else
7877 return gen_rtx_REG (mode, cum->fprs + 16);
7878 }
7879 else if (s390_function_arg_integer (mode, type))
7880 {
7881 int size = s390_function_arg_size (mode, type);
7882 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
7883
7884 if (cum->gprs + n_gprs > GP_ARG_NUM_REG)
7885 return 0;
7886 else
7887 return gen_rtx_REG (mode, cum->gprs + 2);
7888 }
7889
7890 /* After the real arguments, expand_call calls us once again
7891 with a void_type_node type. Whatever we return here is
7892 passed as operand 2 to the call expanders.
7893
7894 We don't need this feature ... */
7895 else if (type == void_type_node)
7896 return const0_rtx;
7897
7898 gcc_unreachable ();
7899 }
7900
7901 /* Return true if return values of type TYPE should be returned
7902 in a memory buffer whose address is passed by the caller as
7903 hidden first argument. */
7904
7905 static bool
7906 s390_return_in_memory (const_tree type, const_tree fundecl ATTRIBUTE_UNUSED)
7907 {
7908 /* We accept small integral (and similar) types. */
7909 if (INTEGRAL_TYPE_P (type)
7910 || POINTER_TYPE_P (type)
7911 || TREE_CODE (type) == OFFSET_TYPE
7912 || TREE_CODE (type) == REAL_TYPE)
7913 return int_size_in_bytes (type) > 8;
7914
7915 /* Aggregates and similar constructs are always returned
7916 in memory. */
7917 if (AGGREGATE_TYPE_P (type)
7918 || TREE_CODE (type) == COMPLEX_TYPE
7919 || TREE_CODE (type) == VECTOR_TYPE)
7920 return true;
7921
7922 /* ??? We get called on all sorts of random stuff from
7923 aggregate_value_p. We can't abort, but it's not clear
7924 what's safe to return. Pretend it's a struct I guess. */
7925 return true;
7926 }
7927
7928 /* Define where to return a (scalar) value of type TYPE.
7929 If TYPE is null, define where to return a (scalar)
7930 value of mode MODE from a libcall. */
7931
7932 rtx
7933 s390_function_value (const_tree type, enum machine_mode mode)
7934 {
7935 if (type)
7936 {
7937 int unsignedp = TYPE_UNSIGNED (type);
7938 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
7939 }
7940
7941 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT || SCALAR_FLOAT_MODE_P (mode));
7942 gcc_assert (GET_MODE_SIZE (mode) <= 8);
7943
7944 if (TARGET_HARD_FLOAT && SCALAR_FLOAT_MODE_P (mode))
7945 return gen_rtx_REG (mode, 16);
7946 else
7947 return gen_rtx_REG (mode, 2);
7948 }
7949
7950
7951 /* Create and return the va_list datatype.
7952
7953 On S/390, va_list is an array type equivalent to
7954
7955 typedef struct __va_list_tag
7956 {
7957 long __gpr;
7958 long __fpr;
7959 void *__overflow_arg_area;
7960 void *__reg_save_area;
7961 } va_list[1];
7962
7963 where __gpr and __fpr hold the number of general purpose
7964 or floating point arguments used up to now, respectively,
7965 __overflow_arg_area points to the stack location of the
7966 next argument passed on the stack, and __reg_save_area
7967 always points to the start of the register area in the
7968 call frame of the current function. The function prologue
7969 saves all registers used for argument passing into this
7970 area if the function uses variable arguments. */
7971
7972 static tree
7973 s390_build_builtin_va_list (void)
7974 {
7975 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
7976
7977 record = lang_hooks.types.make_type (RECORD_TYPE);
7978
7979 type_decl =
7980 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
7981
7982 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
7983 long_integer_type_node);
7984 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
7985 long_integer_type_node);
7986 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
7987 ptr_type_node);
7988 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
7989 ptr_type_node);
7990
7991 va_list_gpr_counter_field = f_gpr;
7992 va_list_fpr_counter_field = f_fpr;
7993
7994 DECL_FIELD_CONTEXT (f_gpr) = record;
7995 DECL_FIELD_CONTEXT (f_fpr) = record;
7996 DECL_FIELD_CONTEXT (f_ovf) = record;
7997 DECL_FIELD_CONTEXT (f_sav) = record;
7998
7999 TREE_CHAIN (record) = type_decl;
8000 TYPE_NAME (record) = type_decl;
8001 TYPE_FIELDS (record) = f_gpr;
8002 TREE_CHAIN (f_gpr) = f_fpr;
8003 TREE_CHAIN (f_fpr) = f_ovf;
8004 TREE_CHAIN (f_ovf) = f_sav;
8005
8006 layout_type (record);
8007
8008 /* The correct type is an array type of one element. */
8009 return build_array_type (record, build_index_type (size_zero_node));
8010 }
8011
8012 /* Implement va_start by filling the va_list structure VALIST.
8013 STDARG_P is always true, and ignored.
8014 NEXTARG points to the first anonymous stack argument.
8015
8016 The following global variables are used to initialize
8017 the va_list structure:
8018
8019 current_function_args_info:
8020 holds number of gprs and fprs used for named arguments.
8021 current_function_arg_offset_rtx:
8022 holds the offset of the first anonymous stack argument
8023 (relative to the virtual arg pointer). */
8024
8025 static void
8026 s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
8027 {
8028 HOST_WIDE_INT n_gpr, n_fpr;
8029 int off;
8030 tree f_gpr, f_fpr, f_ovf, f_sav;
8031 tree gpr, fpr, ovf, sav, t;
8032
8033 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
8034 f_fpr = TREE_CHAIN (f_gpr);
8035 f_ovf = TREE_CHAIN (f_fpr);
8036 f_sav = TREE_CHAIN (f_ovf);
8037
8038 valist = build_va_arg_indirect_ref (valist);
8039 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
8040 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
8041 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
8042 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
8043
8044 /* Count number of gp and fp argument registers used. */
8045
8046 n_gpr = current_function_args_info.gprs;
8047 n_fpr = current_function_args_info.fprs;
8048
8049 if (cfun->va_list_gpr_size)
8050 {
8051 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
8052 build_int_cst (NULL_TREE, n_gpr));
8053 TREE_SIDE_EFFECTS (t) = 1;
8054 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8055 }
8056
8057 if (cfun->va_list_fpr_size)
8058 {
8059 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
8060 build_int_cst (NULL_TREE, n_fpr));
8061 TREE_SIDE_EFFECTS (t) = 1;
8062 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8063 }
8064
8065 /* Find the overflow area. */
8066 if (n_gpr + cfun->va_list_gpr_size > GP_ARG_NUM_REG
8067 || n_fpr + cfun->va_list_fpr_size > FP_ARG_NUM_REG)
8068 {
8069 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
8070
8071 off = INTVAL (current_function_arg_offset_rtx);
8072 off = off < 0 ? 0 : off;
8073 if (TARGET_DEBUG_ARG)
8074 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
8075 (int)n_gpr, (int)n_fpr, off);
8076
8077 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t, size_int (off));
8078
8079 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
8080 TREE_SIDE_EFFECTS (t) = 1;
8081 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8082 }
8083
8084 /* Find the register save area. */
8085 if ((cfun->va_list_gpr_size && n_gpr < GP_ARG_NUM_REG)
8086 || (cfun->va_list_fpr_size && n_fpr < FP_ARG_NUM_REG))
8087 {
8088 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
8089 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
8090 size_int (-RETURN_REGNUM * UNITS_PER_WORD));
8091
8092 t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
8093 TREE_SIDE_EFFECTS (t) = 1;
8094 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8095 }
8096 }
8097
8098 /* Implement va_arg by updating the va_list structure
8099 VALIST as required to retrieve an argument of type
8100 TYPE, and returning that argument.
8101
8102 Generates code equivalent to:
8103
8104 if (integral value) {
8105 if (size <= 4 && args.gpr < 5 ||
8106 size > 4 && args.gpr < 4 )
8107 ret = args.reg_save_area[args.gpr+8]
8108 else
8109 ret = *args.overflow_arg_area++;
8110 } else if (float value) {
8111 if (args.fgpr < 2)
8112 ret = args.reg_save_area[args.fpr+64]
8113 else
8114 ret = *args.overflow_arg_area++;
8115 } else if (aggregate value) {
8116 if (args.gpr < 5)
8117 ret = *args.reg_save_area[args.gpr]
8118 else
8119 ret = **args.overflow_arg_area++;
8120 } */
8121
8122 static tree
8123 s390_gimplify_va_arg (tree valist, tree type, tree *pre_p,
8124 tree *post_p ATTRIBUTE_UNUSED)
8125 {
8126 tree f_gpr, f_fpr, f_ovf, f_sav;
8127 tree gpr, fpr, ovf, sav, reg, t, u;
8128 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
8129 tree lab_false, lab_over, addr;
8130
8131 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
8132 f_fpr = TREE_CHAIN (f_gpr);
8133 f_ovf = TREE_CHAIN (f_fpr);
8134 f_sav = TREE_CHAIN (f_ovf);
8135
8136 valist = build_va_arg_indirect_ref (valist);
8137 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
8138 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
8139 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
8140 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
8141
8142 size = int_size_in_bytes (type);
8143
8144 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
8145 {
8146 if (TARGET_DEBUG_ARG)
8147 {
8148 fprintf (stderr, "va_arg: aggregate type");
8149 debug_tree (type);
8150 }
8151
8152 /* Aggregates are passed by reference. */
8153 indirect_p = 1;
8154 reg = gpr;
8155 n_reg = 1;
8156
8157 /* kernel stack layout on 31 bit: It is assumed here that no padding
8158 will be added by s390_frame_info because for va_args always an even
8159 number of gprs has to be saved r15-r2 = 14 regs. */
8160 sav_ofs = 2 * UNITS_PER_WORD;
8161 sav_scale = UNITS_PER_WORD;
8162 size = UNITS_PER_WORD;
8163 max_reg = GP_ARG_NUM_REG - n_reg;
8164 }
8165 else if (s390_function_arg_float (TYPE_MODE (type), type))
8166 {
8167 if (TARGET_DEBUG_ARG)
8168 {
8169 fprintf (stderr, "va_arg: float type");
8170 debug_tree (type);
8171 }
8172
8173 /* FP args go in FP registers, if present. */
8174 indirect_p = 0;
8175 reg = fpr;
8176 n_reg = 1;
8177 sav_ofs = 16 * UNITS_PER_WORD;
8178 sav_scale = 8;
8179 max_reg = FP_ARG_NUM_REG - n_reg;
8180 }
8181 else
8182 {
8183 if (TARGET_DEBUG_ARG)
8184 {
8185 fprintf (stderr, "va_arg: other type");
8186 debug_tree (type);
8187 }
8188
8189 /* Otherwise into GP registers. */
8190 indirect_p = 0;
8191 reg = gpr;
8192 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
8193
8194 /* kernel stack layout on 31 bit: It is assumed here that no padding
8195 will be added by s390_frame_info because for va_args always an even
8196 number of gprs has to be saved r15-r2 = 14 regs. */
8197 sav_ofs = 2 * UNITS_PER_WORD;
8198
8199 if (size < UNITS_PER_WORD)
8200 sav_ofs += UNITS_PER_WORD - size;
8201
8202 sav_scale = UNITS_PER_WORD;
8203 max_reg = GP_ARG_NUM_REG - n_reg;
8204 }
8205
8206 /* Pull the value out of the saved registers ... */
8207
8208 lab_false = create_artificial_label ();
8209 lab_over = create_artificial_label ();
8210 addr = create_tmp_var (ptr_type_node, "addr");
8211 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
8212
8213 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
8214 t = build2 (GT_EXPR, boolean_type_node, reg, t);
8215 u = build1 (GOTO_EXPR, void_type_node, lab_false);
8216 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
8217 gimplify_and_add (t, pre_p);
8218
8219 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav,
8220 size_int (sav_ofs));
8221 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
8222 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
8223 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, fold_convert (sizetype, u));
8224
8225 t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
8226 gimplify_and_add (t, pre_p);
8227
8228 t = build1 (GOTO_EXPR, void_type_node, lab_over);
8229 gimplify_and_add (t, pre_p);
8230
8231 t = build1 (LABEL_EXPR, void_type_node, lab_false);
8232 append_to_statement_list (t, pre_p);
8233
8234
8235 /* ... Otherwise out of the overflow area. */
8236
8237 t = ovf;
8238 if (size < UNITS_PER_WORD)
8239 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
8240 size_int (UNITS_PER_WORD - size));
8241
8242 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
8243
8244 u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
8245 gimplify_and_add (u, pre_p);
8246
8247 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
8248 size_int (size));
8249 t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, ovf, t);
8250 gimplify_and_add (t, pre_p);
8251
8252 t = build1 (LABEL_EXPR, void_type_node, lab_over);
8253 append_to_statement_list (t, pre_p);
8254
8255
8256 /* Increment register save count. */
8257
8258 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
8259 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
8260 gimplify_and_add (u, pre_p);
8261
8262 if (indirect_p)
8263 {
8264 t = build_pointer_type (build_pointer_type (type));
8265 addr = fold_convert (t, addr);
8266 addr = build_va_arg_indirect_ref (addr);
8267 }
8268 else
8269 {
8270 t = build_pointer_type (type);
8271 addr = fold_convert (t, addr);
8272 }
8273
8274 return build_va_arg_indirect_ref (addr);
8275 }
8276
8277
8278 /* Builtins. */
8279
8280 enum s390_builtin
8281 {
8282 S390_BUILTIN_THREAD_POINTER,
8283 S390_BUILTIN_SET_THREAD_POINTER,
8284
8285 S390_BUILTIN_max
8286 };
8287
8288 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
8289 CODE_FOR_get_tp_64,
8290 CODE_FOR_set_tp_64
8291 };
8292
8293 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
8294 CODE_FOR_get_tp_31,
8295 CODE_FOR_set_tp_31
8296 };
8297
8298 static void
8299 s390_init_builtins (void)
8300 {
8301 tree ftype;
8302
8303 ftype = build_function_type (ptr_type_node, void_list_node);
8304 add_builtin_function ("__builtin_thread_pointer", ftype,
8305 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
8306 NULL, NULL_TREE);
8307
8308 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
8309 add_builtin_function ("__builtin_set_thread_pointer", ftype,
8310 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
8311 NULL, NULL_TREE);
8312 }
8313
8314 /* Expand an expression EXP that calls a built-in function,
8315 with result going to TARGET if that's convenient
8316 (and in mode MODE if that's convenient).
8317 SUBTARGET may be used as the target for computing one of EXP's operands.
8318 IGNORE is nonzero if the value is to be ignored. */
8319
8320 static rtx
8321 s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
8322 enum machine_mode mode ATTRIBUTE_UNUSED,
8323 int ignore ATTRIBUTE_UNUSED)
8324 {
8325 #define MAX_ARGS 2
8326
8327 unsigned int const *code_for_builtin =
8328 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
8329
8330 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8331 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8332 enum insn_code icode;
8333 rtx op[MAX_ARGS], pat;
8334 int arity;
8335 bool nonvoid;
8336 tree arg;
8337 call_expr_arg_iterator iter;
8338
8339 if (fcode >= S390_BUILTIN_max)
8340 internal_error ("bad builtin fcode");
8341 icode = code_for_builtin[fcode];
8342 if (icode == 0)
8343 internal_error ("bad builtin fcode");
8344
8345 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
8346
8347 arity = 0;
8348 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
8349 {
8350 const struct insn_operand_data *insn_op;
8351
8352 if (arg == error_mark_node)
8353 return NULL_RTX;
8354 if (arity > MAX_ARGS)
8355 return NULL_RTX;
8356
8357 insn_op = &insn_data[icode].operand[arity + nonvoid];
8358
8359 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
8360
8361 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
8362 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
8363 arity++;
8364 }
8365
8366 if (nonvoid)
8367 {
8368 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8369 if (!target
8370 || GET_MODE (target) != tmode
8371 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
8372 target = gen_reg_rtx (tmode);
8373 }
8374
8375 switch (arity)
8376 {
8377 case 0:
8378 pat = GEN_FCN (icode) (target);
8379 break;
8380 case 1:
8381 if (nonvoid)
8382 pat = GEN_FCN (icode) (target, op[0]);
8383 else
8384 pat = GEN_FCN (icode) (op[0]);
8385 break;
8386 case 2:
8387 pat = GEN_FCN (icode) (target, op[0], op[1]);
8388 break;
8389 default:
8390 gcc_unreachable ();
8391 }
8392 if (!pat)
8393 return NULL_RTX;
8394 emit_insn (pat);
8395
8396 if (nonvoid)
8397 return target;
8398 else
8399 return const0_rtx;
8400 }
8401
8402
8403 /* Output assembly code for the trampoline template to
8404 stdio stream FILE.
8405
8406 On S/390, we use gpr 1 internally in the trampoline code;
8407 gpr 0 is used to hold the static chain. */
8408
8409 void
8410 s390_trampoline_template (FILE *file)
8411 {
8412 rtx op[2];
8413 op[0] = gen_rtx_REG (Pmode, 0);
8414 op[1] = gen_rtx_REG (Pmode, 1);
8415
8416 if (TARGET_64BIT)
8417 {
8418 output_asm_insn ("basr\t%1,0", op);
8419 output_asm_insn ("lmg\t%0,%1,14(%1)", op);
8420 output_asm_insn ("br\t%1", op);
8421 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
8422 }
8423 else
8424 {
8425 output_asm_insn ("basr\t%1,0", op);
8426 output_asm_insn ("lm\t%0,%1,6(%1)", op);
8427 output_asm_insn ("br\t%1", op);
8428 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
8429 }
8430 }
8431
8432 /* Emit RTL insns to initialize the variable parts of a trampoline.
8433 FNADDR is an RTX for the address of the function's pure code.
8434 CXT is an RTX for the static chain value for the function. */
8435
8436 void
8437 s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
8438 {
8439 emit_move_insn (gen_rtx_MEM (Pmode,
8440 memory_address (Pmode,
8441 plus_constant (addr, (TARGET_64BIT ? 16 : 8)))), cxt);
8442 emit_move_insn (gen_rtx_MEM (Pmode,
8443 memory_address (Pmode,
8444 plus_constant (addr, (TARGET_64BIT ? 24 : 12)))), fnaddr);
8445 }
8446
8447 /* Output assembler code to FILE to increment profiler label # LABELNO
8448 for profiling a function entry. */
8449
8450 void
8451 s390_function_profiler (FILE *file, int labelno)
8452 {
8453 rtx op[7];
8454
8455 char label[128];
8456 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
8457
8458 fprintf (file, "# function profiler \n");
8459
8460 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
8461 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8462 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
8463
8464 op[2] = gen_rtx_REG (Pmode, 1);
8465 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
8466 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
8467
8468 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
8469 if (flag_pic)
8470 {
8471 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
8472 op[4] = gen_rtx_CONST (Pmode, op[4]);
8473 }
8474
8475 if (TARGET_64BIT)
8476 {
8477 output_asm_insn ("stg\t%0,%1", op);
8478 output_asm_insn ("larl\t%2,%3", op);
8479 output_asm_insn ("brasl\t%0,%4", op);
8480 output_asm_insn ("lg\t%0,%1", op);
8481 }
8482 else if (!flag_pic)
8483 {
8484 op[6] = gen_label_rtx ();
8485
8486 output_asm_insn ("st\t%0,%1", op);
8487 output_asm_insn ("bras\t%2,%l6", op);
8488 output_asm_insn (".long\t%4", op);
8489 output_asm_insn (".long\t%3", op);
8490 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
8491 output_asm_insn ("l\t%0,0(%2)", op);
8492 output_asm_insn ("l\t%2,4(%2)", op);
8493 output_asm_insn ("basr\t%0,%0", op);
8494 output_asm_insn ("l\t%0,%1", op);
8495 }
8496 else
8497 {
8498 op[5] = gen_label_rtx ();
8499 op[6] = gen_label_rtx ();
8500
8501 output_asm_insn ("st\t%0,%1", op);
8502 output_asm_insn ("bras\t%2,%l6", op);
8503 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
8504 output_asm_insn (".long\t%4-%l5", op);
8505 output_asm_insn (".long\t%3-%l5", op);
8506 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
8507 output_asm_insn ("lr\t%0,%2", op);
8508 output_asm_insn ("a\t%0,0(%2)", op);
8509 output_asm_insn ("a\t%2,4(%2)", op);
8510 output_asm_insn ("basr\t%0,%0", op);
8511 output_asm_insn ("l\t%0,%1", op);
8512 }
8513 }
8514
8515 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
8516 into its SYMBOL_REF_FLAGS. */
8517
8518 static void
8519 s390_encode_section_info (tree decl, rtx rtl, int first)
8520 {
8521 default_encode_section_info (decl, rtl, first);
8522
8523 /* If a variable has a forced alignment to < 2 bytes, mark it with
8524 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
8525 if (TREE_CODE (decl) == VAR_DECL
8526 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
8527 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
8528 }
8529
8530 /* Output thunk to FILE that implements a C++ virtual function call (with
8531 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
8532 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
8533 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
8534 relative to the resulting this pointer. */
8535
8536 static void
8537 s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
8538 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8539 tree function)
8540 {
8541 rtx op[10];
8542 int nonlocal = 0;
8543
8544 /* Operand 0 is the target function. */
8545 op[0] = XEXP (DECL_RTL (function), 0);
8546 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
8547 {
8548 nonlocal = 1;
8549 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
8550 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
8551 op[0] = gen_rtx_CONST (Pmode, op[0]);
8552 }
8553
8554 /* Operand 1 is the 'this' pointer. */
8555 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
8556 op[1] = gen_rtx_REG (Pmode, 3);
8557 else
8558 op[1] = gen_rtx_REG (Pmode, 2);
8559
8560 /* Operand 2 is the delta. */
8561 op[2] = GEN_INT (delta);
8562
8563 /* Operand 3 is the vcall_offset. */
8564 op[3] = GEN_INT (vcall_offset);
8565
8566 /* Operand 4 is the temporary register. */
8567 op[4] = gen_rtx_REG (Pmode, 1);
8568
8569 /* Operands 5 to 8 can be used as labels. */
8570 op[5] = NULL_RTX;
8571 op[6] = NULL_RTX;
8572 op[7] = NULL_RTX;
8573 op[8] = NULL_RTX;
8574
8575 /* Operand 9 can be used for temporary register. */
8576 op[9] = NULL_RTX;
8577
8578 /* Generate code. */
8579 if (TARGET_64BIT)
8580 {
8581 /* Setup literal pool pointer if required. */
8582 if ((!DISP_IN_RANGE (delta)
8583 && !CONST_OK_FOR_K (delta)
8584 && !CONST_OK_FOR_Os (delta))
8585 || (!DISP_IN_RANGE (vcall_offset)
8586 && !CONST_OK_FOR_K (vcall_offset)
8587 && !CONST_OK_FOR_Os (vcall_offset)))
8588 {
8589 op[5] = gen_label_rtx ();
8590 output_asm_insn ("larl\t%4,%5", op);
8591 }
8592
8593 /* Add DELTA to this pointer. */
8594 if (delta)
8595 {
8596 if (CONST_OK_FOR_J (delta))
8597 output_asm_insn ("la\t%1,%2(%1)", op);
8598 else if (DISP_IN_RANGE (delta))
8599 output_asm_insn ("lay\t%1,%2(%1)", op);
8600 else if (CONST_OK_FOR_K (delta))
8601 output_asm_insn ("aghi\t%1,%2", op);
8602 else if (CONST_OK_FOR_Os (delta))
8603 output_asm_insn ("agfi\t%1,%2", op);
8604 else
8605 {
8606 op[6] = gen_label_rtx ();
8607 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
8608 }
8609 }
8610
8611 /* Perform vcall adjustment. */
8612 if (vcall_offset)
8613 {
8614 if (DISP_IN_RANGE (vcall_offset))
8615 {
8616 output_asm_insn ("lg\t%4,0(%1)", op);
8617 output_asm_insn ("ag\t%1,%3(%4)", op);
8618 }
8619 else if (CONST_OK_FOR_K (vcall_offset))
8620 {
8621 output_asm_insn ("lghi\t%4,%3", op);
8622 output_asm_insn ("ag\t%4,0(%1)", op);
8623 output_asm_insn ("ag\t%1,0(%4)", op);
8624 }
8625 else if (CONST_OK_FOR_Os (vcall_offset))
8626 {
8627 output_asm_insn ("lgfi\t%4,%3", op);
8628 output_asm_insn ("ag\t%4,0(%1)", op);
8629 output_asm_insn ("ag\t%1,0(%4)", op);
8630 }
8631 else
8632 {
8633 op[7] = gen_label_rtx ();
8634 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
8635 output_asm_insn ("ag\t%4,0(%1)", op);
8636 output_asm_insn ("ag\t%1,0(%4)", op);
8637 }
8638 }
8639
8640 /* Jump to target. */
8641 output_asm_insn ("jg\t%0", op);
8642
8643 /* Output literal pool if required. */
8644 if (op[5])
8645 {
8646 output_asm_insn (".align\t4", op);
8647 targetm.asm_out.internal_label (file, "L",
8648 CODE_LABEL_NUMBER (op[5]));
8649 }
8650 if (op[6])
8651 {
8652 targetm.asm_out.internal_label (file, "L",
8653 CODE_LABEL_NUMBER (op[6]));
8654 output_asm_insn (".long\t%2", op);
8655 }
8656 if (op[7])
8657 {
8658 targetm.asm_out.internal_label (file, "L",
8659 CODE_LABEL_NUMBER (op[7]));
8660 output_asm_insn (".long\t%3", op);
8661 }
8662 }
8663 else
8664 {
8665 /* Setup base pointer if required. */
8666 if (!vcall_offset
8667 || (!DISP_IN_RANGE (delta)
8668 && !CONST_OK_FOR_K (delta)
8669 && !CONST_OK_FOR_Os (delta))
8670 || (!DISP_IN_RANGE (delta)
8671 && !CONST_OK_FOR_K (vcall_offset)
8672 && !CONST_OK_FOR_Os (vcall_offset)))
8673 {
8674 op[5] = gen_label_rtx ();
8675 output_asm_insn ("basr\t%4,0", op);
8676 targetm.asm_out.internal_label (file, "L",
8677 CODE_LABEL_NUMBER (op[5]));
8678 }
8679
8680 /* Add DELTA to this pointer. */
8681 if (delta)
8682 {
8683 if (CONST_OK_FOR_J (delta))
8684 output_asm_insn ("la\t%1,%2(%1)", op);
8685 else if (DISP_IN_RANGE (delta))
8686 output_asm_insn ("lay\t%1,%2(%1)", op);
8687 else if (CONST_OK_FOR_K (delta))
8688 output_asm_insn ("ahi\t%1,%2", op);
8689 else if (CONST_OK_FOR_Os (delta))
8690 output_asm_insn ("afi\t%1,%2", op);
8691 else
8692 {
8693 op[6] = gen_label_rtx ();
8694 output_asm_insn ("a\t%1,%6-%5(%4)", op);
8695 }
8696 }
8697
8698 /* Perform vcall adjustment. */
8699 if (vcall_offset)
8700 {
8701 if (CONST_OK_FOR_J (vcall_offset))
8702 {
8703 output_asm_insn ("l\t%4,0(%1)", op);
8704 output_asm_insn ("a\t%1,%3(%4)", op);
8705 }
8706 else if (DISP_IN_RANGE (vcall_offset))
8707 {
8708 output_asm_insn ("l\t%4,0(%1)", op);
8709 output_asm_insn ("ay\t%1,%3(%4)", op);
8710 }
8711 else if (CONST_OK_FOR_K (vcall_offset))
8712 {
8713 output_asm_insn ("lhi\t%4,%3", op);
8714 output_asm_insn ("a\t%4,0(%1)", op);
8715 output_asm_insn ("a\t%1,0(%4)", op);
8716 }
8717 else if (CONST_OK_FOR_Os (vcall_offset))
8718 {
8719 output_asm_insn ("iilf\t%4,%3", op);
8720 output_asm_insn ("a\t%4,0(%1)", op);
8721 output_asm_insn ("a\t%1,0(%4)", op);
8722 }
8723 else
8724 {
8725 op[7] = gen_label_rtx ();
8726 output_asm_insn ("l\t%4,%7-%5(%4)", op);
8727 output_asm_insn ("a\t%4,0(%1)", op);
8728 output_asm_insn ("a\t%1,0(%4)", op);
8729 }
8730
8731 /* We had to clobber the base pointer register.
8732 Re-setup the base pointer (with a different base). */
8733 op[5] = gen_label_rtx ();
8734 output_asm_insn ("basr\t%4,0", op);
8735 targetm.asm_out.internal_label (file, "L",
8736 CODE_LABEL_NUMBER (op[5]));
8737 }
8738
8739 /* Jump to target. */
8740 op[8] = gen_label_rtx ();
8741
8742 if (!flag_pic)
8743 output_asm_insn ("l\t%4,%8-%5(%4)", op);
8744 else if (!nonlocal)
8745 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8746 /* We cannot call through .plt, since .plt requires %r12 loaded. */
8747 else if (flag_pic == 1)
8748 {
8749 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8750 output_asm_insn ("l\t%4,%0(%4)", op);
8751 }
8752 else if (flag_pic == 2)
8753 {
8754 op[9] = gen_rtx_REG (Pmode, 0);
8755 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
8756 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8757 output_asm_insn ("ar\t%4,%9", op);
8758 output_asm_insn ("l\t%4,0(%4)", op);
8759 }
8760
8761 output_asm_insn ("br\t%4", op);
8762
8763 /* Output literal pool. */
8764 output_asm_insn (".align\t4", op);
8765
8766 if (nonlocal && flag_pic == 2)
8767 output_asm_insn (".long\t%0", op);
8768 if (nonlocal)
8769 {
8770 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
8771 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
8772 }
8773
8774 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
8775 if (!flag_pic)
8776 output_asm_insn (".long\t%0", op);
8777 else
8778 output_asm_insn (".long\t%0-%5", op);
8779
8780 if (op[6])
8781 {
8782 targetm.asm_out.internal_label (file, "L",
8783 CODE_LABEL_NUMBER (op[6]));
8784 output_asm_insn (".long\t%2", op);
8785 }
8786 if (op[7])
8787 {
8788 targetm.asm_out.internal_label (file, "L",
8789 CODE_LABEL_NUMBER (op[7]));
8790 output_asm_insn (".long\t%3", op);
8791 }
8792 }
8793 }
8794
8795 static bool
8796 s390_valid_pointer_mode (enum machine_mode mode)
8797 {
8798 return (mode == SImode || (TARGET_64BIT && mode == DImode));
8799 }
8800
8801 /* Checks whether the given CALL_EXPR would use a caller
8802 saved register. This is used to decide whether sibling call
8803 optimization could be performed on the respective function
8804 call. */
8805
8806 static bool
8807 s390_call_saved_register_used (tree call_expr)
8808 {
8809 CUMULATIVE_ARGS cum;
8810 tree parameter;
8811 enum machine_mode mode;
8812 tree type;
8813 rtx parm_rtx;
8814 int reg, i;
8815
8816 INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
8817
8818 for (i = 0; i < call_expr_nargs (call_expr); i++)
8819 {
8820 parameter = CALL_EXPR_ARG (call_expr, i);
8821 gcc_assert (parameter);
8822
8823 /* For an undeclared variable passed as parameter we will get
8824 an ERROR_MARK node here. */
8825 if (TREE_CODE (parameter) == ERROR_MARK)
8826 return true;
8827
8828 type = TREE_TYPE (parameter);
8829 gcc_assert (type);
8830
8831 mode = TYPE_MODE (type);
8832 gcc_assert (mode);
8833
8834 if (pass_by_reference (&cum, mode, type, true))
8835 {
8836 mode = Pmode;
8837 type = build_pointer_type (type);
8838 }
8839
8840 parm_rtx = s390_function_arg (&cum, mode, type, 0);
8841
8842 s390_function_arg_advance (&cum, mode, type, 0);
8843
8844 if (parm_rtx && REG_P (parm_rtx))
8845 {
8846 for (reg = 0;
8847 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
8848 reg++)
8849 if (! call_used_regs[reg + REGNO (parm_rtx)])
8850 return true;
8851 }
8852 }
8853 return false;
8854 }
8855
8856 /* Return true if the given call expression can be
8857 turned into a sibling call.
8858 DECL holds the declaration of the function to be called whereas
8859 EXP is the call expression itself. */
8860
8861 static bool
8862 s390_function_ok_for_sibcall (tree decl, tree exp)
8863 {
8864 /* The TPF epilogue uses register 1. */
8865 if (TARGET_TPF_PROFILING)
8866 return false;
8867
8868 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
8869 which would have to be restored before the sibcall. */
8870 if (!TARGET_64BIT && flag_pic && decl && !targetm.binds_local_p (decl))
8871 return false;
8872
8873 /* Register 6 on s390 is available as an argument register but unfortunately
8874 "caller saved". This makes functions needing this register for arguments
8875 not suitable for sibcalls. */
8876 return !s390_call_saved_register_used (exp);
8877 }
8878
8879 /* Return the fixed registers used for condition codes. */
8880
8881 static bool
8882 s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
8883 {
8884 *p1 = CC_REGNUM;
8885 *p2 = INVALID_REGNUM;
8886
8887 return true;
8888 }
8889
8890 /* This function is used by the call expanders of the machine description.
8891 It emits the call insn itself together with the necessary operations
8892 to adjust the target address and returns the emitted insn.
8893 ADDR_LOCATION is the target address rtx
8894 TLS_CALL the location of the thread-local symbol
8895 RESULT_REG the register where the result of the call should be stored
8896 RETADDR_REG the register where the return address should be stored
8897 If this parameter is NULL_RTX the call is considered
8898 to be a sibling call. */
8899
8900 rtx
8901 s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
8902 rtx retaddr_reg)
8903 {
8904 bool plt_call = false;
8905 rtx insn;
8906 rtx call;
8907 rtx clobber;
8908 rtvec vec;
8909
8910 /* Direct function calls need special treatment. */
8911 if (GET_CODE (addr_location) == SYMBOL_REF)
8912 {
8913 /* When calling a global routine in PIC mode, we must
8914 replace the symbol itself with the PLT stub. */
8915 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
8916 {
8917 addr_location = gen_rtx_UNSPEC (Pmode,
8918 gen_rtvec (1, addr_location),
8919 UNSPEC_PLT);
8920 addr_location = gen_rtx_CONST (Pmode, addr_location);
8921 plt_call = true;
8922 }
8923
8924 /* Unless we can use the bras(l) insn, force the
8925 routine address into a register. */
8926 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
8927 {
8928 if (flag_pic)
8929 addr_location = legitimize_pic_address (addr_location, 0);
8930 else
8931 addr_location = force_reg (Pmode, addr_location);
8932 }
8933 }
8934
8935 /* If it is already an indirect call or the code above moved the
8936 SYMBOL_REF to somewhere else make sure the address can be found in
8937 register 1. */
8938 if (retaddr_reg == NULL_RTX
8939 && GET_CODE (addr_location) != SYMBOL_REF
8940 && !plt_call)
8941 {
8942 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
8943 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
8944 }
8945
8946 addr_location = gen_rtx_MEM (QImode, addr_location);
8947 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
8948
8949 if (result_reg != NULL_RTX)
8950 call = gen_rtx_SET (VOIDmode, result_reg, call);
8951
8952 if (retaddr_reg != NULL_RTX)
8953 {
8954 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
8955
8956 if (tls_call != NULL_RTX)
8957 vec = gen_rtvec (3, call, clobber,
8958 gen_rtx_USE (VOIDmode, tls_call));
8959 else
8960 vec = gen_rtvec (2, call, clobber);
8961
8962 call = gen_rtx_PARALLEL (VOIDmode, vec);
8963 }
8964
8965 insn = emit_call_insn (call);
8966
8967 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
8968 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
8969 {
8970 /* s390_function_ok_for_sibcall should
8971 have denied sibcalls in this case. */
8972 gcc_assert (retaddr_reg != NULL_RTX);
8973
8974 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
8975 }
8976 return insn;
8977 }
8978
8979 /* Implement CONDITIONAL_REGISTER_USAGE. */
8980
8981 void
8982 s390_conditional_register_usage (void)
8983 {
8984 int i;
8985
8986 if (flag_pic)
8987 {
8988 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8989 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8990 }
8991 if (TARGET_CPU_ZARCH)
8992 {
8993 fixed_regs[BASE_REGNUM] = 0;
8994 call_used_regs[BASE_REGNUM] = 0;
8995 fixed_regs[RETURN_REGNUM] = 0;
8996 call_used_regs[RETURN_REGNUM] = 0;
8997 }
8998 if (TARGET_64BIT)
8999 {
9000 for (i = 24; i < 32; i++)
9001 call_used_regs[i] = call_really_used_regs[i] = 0;
9002 }
9003 else
9004 {
9005 for (i = 18; i < 20; i++)
9006 call_used_regs[i] = call_really_used_regs[i] = 0;
9007 }
9008
9009 if (TARGET_SOFT_FLOAT)
9010 {
9011 for (i = 16; i < 32; i++)
9012 call_used_regs[i] = fixed_regs[i] = 1;
9013 }
9014 }
9015
9016 /* Corresponding function to eh_return expander. */
9017
9018 static GTY(()) rtx s390_tpf_eh_return_symbol;
9019 void
9020 s390_emit_tpf_eh_return (rtx target)
9021 {
9022 rtx insn, reg;
9023
9024 if (!s390_tpf_eh_return_symbol)
9025 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
9026
9027 reg = gen_rtx_REG (Pmode, 2);
9028
9029 emit_move_insn (reg, target);
9030 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
9031 gen_rtx_REG (Pmode, RETURN_REGNUM));
9032 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
9033
9034 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
9035 }
9036
9037 /* Rework the prologue/epilogue to avoid saving/restoring
9038 registers unnecessarily. */
9039
9040 static void
9041 s390_optimize_prologue (void)
9042 {
9043 rtx insn, new_insn, next_insn;
9044
9045 /* Do a final recompute of the frame-related data. */
9046
9047 s390_update_frame_layout ();
9048
9049 /* If all special registers are in fact used, there's nothing we
9050 can do, so no point in walking the insn list. */
9051
9052 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
9053 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
9054 && (TARGET_CPU_ZARCH
9055 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
9056 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
9057 return;
9058
9059 /* Search for prologue/epilogue insns and replace them. */
9060
9061 for (insn = get_insns (); insn; insn = next_insn)
9062 {
9063 int first, last, off;
9064 rtx set, base, offset;
9065
9066 next_insn = NEXT_INSN (insn);
9067
9068 if (GET_CODE (insn) != INSN)
9069 continue;
9070
9071 if (GET_CODE (PATTERN (insn)) == PARALLEL
9072 && store_multiple_operation (PATTERN (insn), VOIDmode))
9073 {
9074 set = XVECEXP (PATTERN (insn), 0, 0);
9075 first = REGNO (SET_SRC (set));
9076 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9077 offset = const0_rtx;
9078 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9079 off = INTVAL (offset);
9080
9081 if (GET_CODE (base) != REG || off < 0)
9082 continue;
9083 if (cfun_frame_layout.first_save_gpr != -1
9084 && (cfun_frame_layout.first_save_gpr < first
9085 || cfun_frame_layout.last_save_gpr > last))
9086 continue;
9087 if (REGNO (base) != STACK_POINTER_REGNUM
9088 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9089 continue;
9090 if (first > BASE_REGNUM || last < BASE_REGNUM)
9091 continue;
9092
9093 if (cfun_frame_layout.first_save_gpr != -1)
9094 {
9095 new_insn = save_gprs (base,
9096 off + (cfun_frame_layout.first_save_gpr
9097 - first) * UNITS_PER_WORD,
9098 cfun_frame_layout.first_save_gpr,
9099 cfun_frame_layout.last_save_gpr);
9100 new_insn = emit_insn_before (new_insn, insn);
9101 INSN_ADDRESSES_NEW (new_insn, -1);
9102 }
9103
9104 remove_insn (insn);
9105 continue;
9106 }
9107
9108 if (cfun_frame_layout.first_save_gpr == -1
9109 && GET_CODE (PATTERN (insn)) == SET
9110 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
9111 && (REGNO (SET_SRC (PATTERN (insn))) == BASE_REGNUM
9112 || (!TARGET_CPU_ZARCH
9113 && REGNO (SET_SRC (PATTERN (insn))) == RETURN_REGNUM))
9114 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
9115 {
9116 set = PATTERN (insn);
9117 first = REGNO (SET_SRC (set));
9118 offset = const0_rtx;
9119 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9120 off = INTVAL (offset);
9121
9122 if (GET_CODE (base) != REG || off < 0)
9123 continue;
9124 if (REGNO (base) != STACK_POINTER_REGNUM
9125 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9126 continue;
9127
9128 remove_insn (insn);
9129 continue;
9130 }
9131
9132 if (GET_CODE (PATTERN (insn)) == PARALLEL
9133 && load_multiple_operation (PATTERN (insn), VOIDmode))
9134 {
9135 set = XVECEXP (PATTERN (insn), 0, 0);
9136 first = REGNO (SET_DEST (set));
9137 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9138 offset = const0_rtx;
9139 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9140 off = INTVAL (offset);
9141
9142 if (GET_CODE (base) != REG || off < 0)
9143 continue;
9144 if (cfun_frame_layout.first_restore_gpr != -1
9145 && (cfun_frame_layout.first_restore_gpr < first
9146 || cfun_frame_layout.last_restore_gpr > last))
9147 continue;
9148 if (REGNO (base) != STACK_POINTER_REGNUM
9149 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9150 continue;
9151 if (first > BASE_REGNUM || last < BASE_REGNUM)
9152 continue;
9153
9154 if (cfun_frame_layout.first_restore_gpr != -1)
9155 {
9156 new_insn = restore_gprs (base,
9157 off + (cfun_frame_layout.first_restore_gpr
9158 - first) * UNITS_PER_WORD,
9159 cfun_frame_layout.first_restore_gpr,
9160 cfun_frame_layout.last_restore_gpr);
9161 new_insn = emit_insn_before (new_insn, insn);
9162 INSN_ADDRESSES_NEW (new_insn, -1);
9163 }
9164
9165 remove_insn (insn);
9166 continue;
9167 }
9168
9169 if (cfun_frame_layout.first_restore_gpr == -1
9170 && GET_CODE (PATTERN (insn)) == SET
9171 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
9172 && (REGNO (SET_DEST (PATTERN (insn))) == BASE_REGNUM
9173 || (!TARGET_CPU_ZARCH
9174 && REGNO (SET_DEST (PATTERN (insn))) == RETURN_REGNUM))
9175 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
9176 {
9177 set = PATTERN (insn);
9178 first = REGNO (SET_DEST (set));
9179 offset = const0_rtx;
9180 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9181 off = INTVAL (offset);
9182
9183 if (GET_CODE (base) != REG || off < 0)
9184 continue;
9185 if (REGNO (base) != STACK_POINTER_REGNUM
9186 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9187 continue;
9188
9189 remove_insn (insn);
9190 continue;
9191 }
9192 }
9193 }
9194
9195 /* Perform machine-dependent processing. */
9196
9197 static void
9198 s390_reorg (void)
9199 {
9200 bool pool_overflow = false;
9201
9202 /* Make sure all splits have been performed; splits after
9203 machine_dependent_reorg might confuse insn length counts. */
9204 split_all_insns_noflow ();
9205
9206 /* From here on decomposed literal pool addresses must be accepted. */
9207 cfun->machine->decomposed_literal_pool_addresses_ok_p = true;
9208
9209 /* Install the main literal pool and the associated base
9210 register load insns.
9211
9212 In addition, there are two problematic situations we need
9213 to correct:
9214
9215 - the literal pool might be > 4096 bytes in size, so that
9216 some of its elements cannot be directly accessed
9217
9218 - a branch target might be > 64K away from the branch, so that
9219 it is not possible to use a PC-relative instruction.
9220
9221 To fix those, we split the single literal pool into multiple
9222 pool chunks, reloading the pool base register at various
9223 points throughout the function to ensure it always points to
9224 the pool chunk the following code expects, and / or replace
9225 PC-relative branches by absolute branches.
9226
9227 However, the two problems are interdependent: splitting the
9228 literal pool can move a branch further away from its target,
9229 causing the 64K limit to overflow, and on the other hand,
9230 replacing a PC-relative branch by an absolute branch means
9231 we need to put the branch target address into the literal
9232 pool, possibly causing it to overflow.
9233
9234 So, we loop trying to fix up both problems until we manage
9235 to satisfy both conditions at the same time. Note that the
9236 loop is guaranteed to terminate as every pass of the loop
9237 strictly decreases the total number of PC-relative branches
9238 in the function. (This is not completely true as there
9239 might be branch-over-pool insns introduced by chunkify_start.
9240 Those never need to be split however.) */
9241
9242 for (;;)
9243 {
9244 struct constant_pool *pool = NULL;
9245
9246 /* Collect the literal pool. */
9247 if (!pool_overflow)
9248 {
9249 pool = s390_mainpool_start ();
9250 if (!pool)
9251 pool_overflow = true;
9252 }
9253
9254 /* If literal pool overflowed, start to chunkify it. */
9255 if (pool_overflow)
9256 pool = s390_chunkify_start ();
9257
9258 /* Split out-of-range branches. If this has created new
9259 literal pool entries, cancel current chunk list and
9260 recompute it. zSeries machines have large branch
9261 instructions, so we never need to split a branch. */
9262 if (!TARGET_CPU_ZARCH && s390_split_branches ())
9263 {
9264 if (pool_overflow)
9265 s390_chunkify_cancel (pool);
9266 else
9267 s390_mainpool_cancel (pool);
9268
9269 continue;
9270 }
9271
9272 /* If we made it up to here, both conditions are satisfied.
9273 Finish up literal pool related changes. */
9274 if (pool_overflow)
9275 s390_chunkify_finish (pool);
9276 else
9277 s390_mainpool_finish (pool);
9278
9279 /* We're done splitting branches. */
9280 cfun->machine->split_branches_pending_p = false;
9281 break;
9282 }
9283
9284 /* Generate out-of-pool execute target insns. */
9285 if (TARGET_CPU_ZARCH)
9286 {
9287 rtx insn, label, target;
9288
9289 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9290 {
9291 label = s390_execute_label (insn);
9292 if (!label)
9293 continue;
9294
9295 gcc_assert (label != const0_rtx);
9296
9297 target = emit_label (XEXP (label, 0));
9298 INSN_ADDRESSES_NEW (target, -1);
9299
9300 target = emit_insn (s390_execute_target (insn));
9301 INSN_ADDRESSES_NEW (target, -1);
9302 }
9303 }
9304
9305 /* Try to optimize prologue and epilogue further. */
9306 s390_optimize_prologue ();
9307 }
9308
9309
9310 /* Initialize GCC target structure. */
9311
9312 #undef TARGET_ASM_ALIGNED_HI_OP
9313 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
9314 #undef TARGET_ASM_ALIGNED_DI_OP
9315 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
9316 #undef TARGET_ASM_INTEGER
9317 #define TARGET_ASM_INTEGER s390_assemble_integer
9318
9319 #undef TARGET_ASM_OPEN_PAREN
9320 #define TARGET_ASM_OPEN_PAREN ""
9321
9322 #undef TARGET_ASM_CLOSE_PAREN
9323 #define TARGET_ASM_CLOSE_PAREN ""
9324
9325 #undef TARGET_DEFAULT_TARGET_FLAGS
9326 #define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
9327 #undef TARGET_HANDLE_OPTION
9328 #define TARGET_HANDLE_OPTION s390_handle_option
9329
9330 #undef TARGET_ENCODE_SECTION_INFO
9331 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
9332
9333 #ifdef HAVE_AS_TLS
9334 #undef TARGET_HAVE_TLS
9335 #define TARGET_HAVE_TLS true
9336 #endif
9337 #undef TARGET_CANNOT_FORCE_CONST_MEM
9338 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
9339
9340 #undef TARGET_DELEGITIMIZE_ADDRESS
9341 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
9342
9343 #undef TARGET_RETURN_IN_MEMORY
9344 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
9345
9346 #undef TARGET_INIT_BUILTINS
9347 #define TARGET_INIT_BUILTINS s390_init_builtins
9348 #undef TARGET_EXPAND_BUILTIN
9349 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
9350
9351 #undef TARGET_ASM_OUTPUT_MI_THUNK
9352 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
9353 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
9354 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
9355
9356 #undef TARGET_SCHED_ADJUST_PRIORITY
9357 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
9358 #undef TARGET_SCHED_ISSUE_RATE
9359 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
9360 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
9361 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
9362
9363 #undef TARGET_CANNOT_COPY_INSN_P
9364 #define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
9365 #undef TARGET_RTX_COSTS
9366 #define TARGET_RTX_COSTS s390_rtx_costs
9367 #undef TARGET_ADDRESS_COST
9368 #define TARGET_ADDRESS_COST s390_address_cost
9369
9370 #undef TARGET_MACHINE_DEPENDENT_REORG
9371 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
9372
9373 #undef TARGET_VALID_POINTER_MODE
9374 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
9375
9376 #undef TARGET_BUILD_BUILTIN_VA_LIST
9377 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
9378 #undef TARGET_EXPAND_BUILTIN_VA_START
9379 #define TARGET_EXPAND_BUILTIN_VA_START s390_va_start
9380 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
9381 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
9382
9383 #undef TARGET_PROMOTE_FUNCTION_ARGS
9384 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
9385 #undef TARGET_PROMOTE_FUNCTION_RETURN
9386 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
9387 #undef TARGET_PASS_BY_REFERENCE
9388 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
9389
9390 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
9391 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
9392
9393 #undef TARGET_FIXED_CONDITION_CODE_REGS
9394 #define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
9395
9396 #undef TARGET_CC_MODES_COMPATIBLE
9397 #define TARGET_CC_MODES_COMPATIBLE s390_cc_modes_compatible
9398
9399 #undef TARGET_INVALID_WITHIN_DOLOOP
9400 #define TARGET_INVALID_WITHIN_DOLOOP hook_constcharptr_const_rtx_null
9401
9402 #ifdef HAVE_AS_TLS
9403 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
9404 #define TARGET_ASM_OUTPUT_DWARF_DTPREL s390_output_dwarf_dtprel
9405 #endif
9406
9407 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
9408 #undef TARGET_MANGLE_TYPE
9409 #define TARGET_MANGLE_TYPE s390_mangle_type
9410 #endif
9411
9412 #undef TARGET_SCALAR_MODE_SUPPORTED_P
9413 #define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
9414
9415 #undef TARGET_SECONDARY_RELOAD
9416 #define TARGET_SECONDARY_RELOAD s390_secondary_reload
9417
9418 #undef TARGET_LIBGCC_CMP_RETURN_MODE
9419 #define TARGET_LIBGCC_CMP_RETURN_MODE s390_libgcc_cmp_return_mode
9420
9421 #undef TARGET_LIBGCC_SHIFT_COUNT_MODE
9422 #define TARGET_LIBGCC_SHIFT_COUNT_MODE s390_libgcc_shift_count_mode
9423
9424 struct gcc_target targetm = TARGET_INITIALIZER;
9425
9426 #include "gt-s390.h"