romp.c: Fix comment formatting.
[gcc.git] / gcc / config / s390 / s390.c
1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001 Free Software Foundation, Inc.
3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 Ulrich Weigand (uweigand@de.ibm.com).
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "except.h"
37 #include "function.h"
38 #include "recog.h"
39 #include "expr.h"
40 #include "toplev.h"
41 #include "basic-block.h"
42 #include "integrate.h"
43 #include "ggc.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "debug.h"
47
48
49 static bool s390_assemble_integer PARAMS ((rtx, unsigned int, int));
50 static int s390_adjust_cost PARAMS ((rtx, rtx, rtx, int));
51 static int s390_adjust_priority PARAMS ((rtx, int));
52
53 #undef TARGET_ASM_ALIGNED_HI_OP
54 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
55 #undef TARGET_ASM_ALIGNED_DI_OP
56 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
57 #undef TARGET_ASM_INTEGER
58 #define TARGET_ASM_INTEGER s390_assemble_integer
59
60 #undef TARGET_ASM_FUNCTION_PROLOGUE
61 #define TARGET_ASM_FUNCTION_PROLOGUE s390_function_prologue
62
63 #undef TARGET_ASM_FUNCTION_EPILOGUE
64 #define TARGET_ASM_FUNCTION_EPILOGUE s390_function_epilogue
65
66 #undef TARGET_ASM_OPEN_PAREN
67 #define TARGET_ASM_OPEN_PAREN ""
68
69 #undef TARGET_ASM_CLOSE_PAREN
70 #define TARGET_ASM_CLOSE_PAREN ""
71
72 #undef TARGET_SCHED_ADJUST_COST
73 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
74
75 #undef TARGET_SCHED_ADJUST_PRIORITY
76 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
77
78 struct gcc_target targetm = TARGET_INITIALIZER;
79
80 extern int reload_completed;
81
82 /* The alias set for prologue/epilogue register save/restore. */
83 static int s390_sr_alias_set = 0;
84
85 /* Function count for creating unique internal labels in a compile unit. */
86 int s390_function_count = 0;
87
88 /* Save information from a "cmpxx" operation until the branch or scc is
89 emitted. */
90 rtx s390_compare_op0, s390_compare_op1;
91
92 /* Structure used to hold the components of a S/390 memory
93 address. A legitimate address on S/390 is of the general
94 form
95 base + index + displacement
96 where any of the components is optional.
97
98 base and index are registers of the class ADDR_REGS,
99 displacement is an unsigned 12-bit immediate constant. */
100
101 struct s390_address
102 {
103 rtx base;
104 rtx indx;
105 rtx disp;
106 };
107
108 /* Structure containing information for prologue and epilogue. */
109
110 struct s390_frame
111 {
112 int frame_pointer_p;
113 int return_reg_saved_p;
114 int save_fprs_p;
115 int first_save_gpr;
116 int first_restore_gpr;
117 int last_save_gpr;
118 int arg_frame_offset;
119
120 HOST_WIDE_INT frame_size;
121 };
122
123 static int s390_match_ccmode_set PARAMS ((rtx, enum machine_mode));
124 static int s390_branch_condition_mask PARAMS ((rtx));
125 static const char *s390_branch_condition_mnemonic PARAMS ((rtx, int));
126 static int check_mode PARAMS ((rtx, enum machine_mode *));
127 static int general_s_operand PARAMS ((rtx, enum machine_mode, int));
128 static int s390_decompose_address PARAMS ((rtx, struct s390_address *, int));
129 static int reg_used_in_mem_p PARAMS ((int, rtx));
130 static int addr_generation_dependency_p PARAMS ((rtx, rtx));
131 static int other_chunk PARAMS ((int *, int, int));
132 static int far_away PARAMS ((int, int));
133 static rtx check_and_change_labels PARAMS ((rtx, int *));
134 static void s390_final_chunkify PARAMS ((int));
135 static int save_fprs_p PARAMS ((void));
136 static int find_unused_clobbered_reg PARAMS ((void));
137 static void s390_frame_info PARAMS ((struct s390_frame *));
138 static rtx save_fpr PARAMS ((rtx, int, int));
139 static rtx restore_fpr PARAMS ((rtx, int, int));
140 static int s390_function_arg_size PARAMS ((enum machine_mode, tree));
141
142
143 /* Return true if SET either doesn't set the CC register, or else
144 the source and destination have matching CC modes and that
145 CC mode is at least as constrained as REQ_MODE. */
146
147 static int
148 s390_match_ccmode_set (set, req_mode)
149 rtx set;
150 enum machine_mode req_mode;
151 {
152 enum machine_mode set_mode;
153
154 if (GET_CODE (set) != SET)
155 abort ();
156
157 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
158 return 1;
159
160 set_mode = GET_MODE (SET_DEST (set));
161 switch (set_mode)
162 {
163 case CCSmode:
164 if (req_mode != CCSmode)
165 return 0;
166 break;
167 case CCUmode:
168 if (req_mode != CCUmode)
169 return 0;
170 break;
171 case CCLmode:
172 if (req_mode != CCLmode)
173 return 0;
174 break;
175 case CCZmode:
176 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode)
177 return 0;
178 break;
179
180 default:
181 abort ();
182 }
183
184 return (GET_MODE (SET_SRC (set)) == set_mode);
185 }
186
187 /* Return true if every SET in INSN that sets the CC register
188 has source and destination with matching CC modes and that
189 CC mode is at least as constrained as REQ_MODE. */
190
191 int
192 s390_match_ccmode (insn, req_mode)
193 rtx insn;
194 enum machine_mode req_mode;
195 {
196 int i;
197
198 if (GET_CODE (PATTERN (insn)) == SET)
199 return s390_match_ccmode_set (PATTERN (insn), req_mode);
200
201 if (GET_CODE (PATTERN (insn)) == PARALLEL)
202 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
203 {
204 rtx set = XVECEXP (PATTERN (insn), 0, i);
205 if (GET_CODE (set) == SET)
206 if (!s390_match_ccmode_set (set, req_mode))
207 return 0;
208 }
209
210 return 1;
211 }
212
213 /* Given a comparison code OP (EQ, NE, etc.) and the operands
214 OP0 and OP1 of a COMPARE, return the mode to be used for the
215 comparison. */
216
217 enum machine_mode
218 s390_select_ccmode (code, op0, op1)
219 enum rtx_code code;
220 rtx op0;
221 rtx op1;
222 {
223 switch (code)
224 {
225 case EQ:
226 case NE:
227 if (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
228 || GET_CODE (op1) == NEG)
229 return CCLmode;
230
231 return CCZmode;
232
233 case LE:
234 case LT:
235 case GE:
236 case GT:
237 case UNORDERED:
238 case ORDERED:
239 case UNEQ:
240 case UNLE:
241 case UNLT:
242 case UNGE:
243 case UNGT:
244 case LTGT:
245 return CCSmode;
246
247 case LEU:
248 case LTU:
249 case GEU:
250 case GTU:
251 return CCUmode;
252
253 default:
254 abort ();
255 }
256 }
257
258 /* Return branch condition mask to implement a branch
259 specified by CODE. */
260
261 static int
262 s390_branch_condition_mask (code)
263 rtx code;
264 {
265 const int CC0 = 1 << 3;
266 const int CC1 = 1 << 2;
267 const int CC2 = 1 << 1;
268 const int CC3 = 1 << 0;
269
270 if (GET_CODE (XEXP (code, 0)) != REG
271 || REGNO (XEXP (code, 0)) != CC_REGNUM
272 || XEXP (code, 1) != const0_rtx)
273 abort ();
274
275 switch (GET_MODE (XEXP (code, 0)))
276 {
277 case CCZmode:
278 switch (GET_CODE (code))
279 {
280 case EQ: return CC0;
281 case NE: return CC1 | CC2 | CC3;
282 default:
283 abort ();
284 }
285 break;
286
287 case CCLmode:
288 switch (GET_CODE (code))
289 {
290 case EQ: return CC0 | CC2;
291 case NE: return CC1 | CC3;
292 case UNORDERED: return CC2 | CC3; /* carry */
293 case ORDERED: return CC0 | CC1; /* no carry */
294 default:
295 abort ();
296 }
297 break;
298
299 case CCUmode:
300 switch (GET_CODE (code))
301 {
302 case EQ: return CC0;
303 case NE: return CC1 | CC2 | CC3;
304 case LTU: return CC1;
305 case GTU: return CC2;
306 case LEU: return CC0 | CC1;
307 case GEU: return CC0 | CC2;
308 default:
309 abort ();
310 }
311 break;
312
313 case CCSmode:
314 switch (GET_CODE (code))
315 {
316 case EQ: return CC0;
317 case NE: return CC1 | CC2 | CC3;
318 case LT: return CC1;
319 case GT: return CC2;
320 case LE: return CC0 | CC1;
321 case GE: return CC0 | CC2;
322 case UNORDERED: return CC3;
323 case ORDERED: return CC0 | CC1 | CC2;
324 case UNEQ: return CC0 | CC3;
325 case UNLT: return CC1 | CC3;
326 case UNGT: return CC2 | CC3;
327 case UNLE: return CC0 | CC1 | CC3;
328 case UNGE: return CC0 | CC2 | CC3;
329 case LTGT: return CC1 | CC2;
330 default:
331 abort ();
332 }
333
334 default:
335 abort ();
336 }
337 }
338
339 /* If INV is false, return assembler mnemonic string to implement
340 a branch specified by CODE. If INV is true, return mnemonic
341 for the corresponding inverted branch. */
342
343 static const char *
344 s390_branch_condition_mnemonic (code, inv)
345 rtx code;
346 int inv;
347 {
348 static const char *mnemonic[16] =
349 {
350 NULL, "o", "h", "nle",
351 "l", "nhe", "lh", "ne",
352 "e", "nlh", "he", "nl",
353 "le", "nh", "no", NULL
354 };
355
356 int mask = s390_branch_condition_mask (code);
357
358 if (inv)
359 mask ^= 15;
360
361 if (mask < 1 || mask > 14)
362 abort ();
363
364 return mnemonic[mask];
365 }
366
367 /* If OP is an integer constant of mode MODE with exactly one
368 HImode subpart unequal to DEF, return the number of that
369 subpart. As a special case, all HImode subparts of OP are
370 equal to DEF, return zero. Otherwise, return -1. */
371
372 int
373 s390_single_hi (op, mode, def)
374 rtx op;
375 enum machine_mode mode;
376 int def;
377 {
378 if (GET_CODE (op) == CONST_INT)
379 {
380 unsigned HOST_WIDE_INT value;
381 int n_parts = GET_MODE_SIZE (mode) / 2;
382 int i, part = -1;
383
384 for (i = 0; i < n_parts; i++)
385 {
386 if (i == 0)
387 value = (unsigned HOST_WIDE_INT) INTVAL (op);
388 else
389 value >>= 16;
390
391 if ((value & 0xffff) != (unsigned)(def & 0xffff))
392 {
393 if (part != -1)
394 return -1;
395 else
396 part = i;
397 }
398 }
399
400 return part == -1 ? 0 : (n_parts - 1 - part);
401 }
402
403 else if (GET_CODE (op) == CONST_DOUBLE
404 && GET_MODE (op) == VOIDmode)
405 {
406 unsigned HOST_WIDE_INT value;
407 int n_parts = GET_MODE_SIZE (mode) / 2;
408 int i, part = -1;
409
410 for (i = 0; i < n_parts; i++)
411 {
412 if (i == 0)
413 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
414 else if (i == HOST_BITS_PER_WIDE_INT / 16)
415 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
416 else
417 value >>= 16;
418
419 if ((value & 0xffff) != (unsigned)(def & 0xffff))
420 {
421 if (part != -1)
422 return -1;
423 else
424 part = i;
425 }
426 }
427
428 return part == -1 ? 0 : (n_parts - 1 - part);
429 }
430
431 return -1;
432 }
433
434 /* Extract the HImode part number PART from integer
435 constant OP of mode MODE. */
436
437 int
438 s390_extract_hi (op, mode, part)
439 rtx op;
440 enum machine_mode mode;
441 int part;
442 {
443 int n_parts = GET_MODE_SIZE (mode) / 2;
444 if (part < 0 || part >= n_parts)
445 abort();
446 else
447 part = n_parts - 1 - part;
448
449 if (GET_CODE (op) == CONST_INT)
450 {
451 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
452 return ((value >> (16 * part)) & 0xffff);
453 }
454 else if (GET_CODE (op) == CONST_DOUBLE
455 && GET_MODE (op) == VOIDmode)
456 {
457 unsigned HOST_WIDE_INT value;
458 if (part < HOST_BITS_PER_WIDE_INT / 16)
459 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
460 else
461 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
462 part -= HOST_BITS_PER_WIDE_INT / 16;
463
464 return ((value >> (16 * part)) & 0xffff);
465 }
466
467 abort ();
468 }
469
470 /* If OP is an integer constant of mode MODE with exactly one
471 QImode subpart unequal to DEF, return the number of that
472 subpart. As a special case, all QImode subparts of OP are
473 equal to DEF, return zero. Otherwise, return -1. */
474
475 int
476 s390_single_qi (op, mode, def)
477 rtx op;
478 enum machine_mode mode;
479 int def;
480 {
481 if (GET_CODE (op) == CONST_INT)
482 {
483 unsigned HOST_WIDE_INT value;
484 int n_parts = GET_MODE_SIZE (mode);
485 int i, part = -1;
486
487 for (i = 0; i < n_parts; i++)
488 {
489 if (i == 0)
490 value = (unsigned HOST_WIDE_INT) INTVAL (op);
491 else
492 value >>= 8;
493
494 if ((value & 0xff) != (unsigned)(def & 0xff))
495 {
496 if (part != -1)
497 return -1;
498 else
499 part = i;
500 }
501 }
502
503 return part == -1 ? 0 : (n_parts - 1 - part);
504 }
505
506 else if (GET_CODE (op) == CONST_DOUBLE
507 && GET_MODE (op) == VOIDmode)
508 {
509 unsigned HOST_WIDE_INT value;
510 int n_parts = GET_MODE_SIZE (mode);
511 int i, part = -1;
512
513 for (i = 0; i < n_parts; i++)
514 {
515 if (i == 0)
516 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
517 else if (i == HOST_BITS_PER_WIDE_INT / 8)
518 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
519 else
520 value >>= 8;
521
522 if ((value & 0xff) != (unsigned)(def & 0xff))
523 {
524 if (part != -1)
525 return -1;
526 else
527 part = i;
528 }
529 }
530
531 return part == -1 ? 0 : (n_parts - 1 - part);
532 }
533
534 return -1;
535 }
536
537 /* Extract the QImode part number PART from integer
538 constant OP of mode MODE. */
539
540 int
541 s390_extract_qi (op, mode, part)
542 rtx op;
543 enum machine_mode mode;
544 int part;
545 {
546 int n_parts = GET_MODE_SIZE (mode);
547 if (part < 0 || part >= n_parts)
548 abort();
549 else
550 part = n_parts - 1 - part;
551
552 if (GET_CODE (op) == CONST_INT)
553 {
554 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
555 return ((value >> (8 * part)) & 0xff);
556 }
557 else if (GET_CODE (op) == CONST_DOUBLE
558 && GET_MODE (op) == VOIDmode)
559 {
560 unsigned HOST_WIDE_INT value;
561 if (part < HOST_BITS_PER_WIDE_INT / 8)
562 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
563 else
564 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
565 part -= HOST_BITS_PER_WIDE_INT / 8;
566
567 return ((value >> (8 * part)) & 0xff);
568 }
569
570 abort ();
571 }
572
573
574 /* Change optimizations to be performed, depending on the
575 optimization level.
576
577 LEVEL is the optimization level specified; 2 if `-O2' is
578 specified, 1 if `-O' is specified, and 0 if neither is specified.
579
580 SIZE is non-zero if `-Os' is specified and zero otherwise. */
581
582 void
583 optimization_options (level, size)
584 int level ATTRIBUTE_UNUSED;
585 int size ATTRIBUTE_UNUSED;
586 {
587 #ifdef HAVE_decrement_and_branch_on_count
588 /* When optimizing, enable use of BRCT instruction. */
589 if (level >= 1)
590 flag_branch_on_count_reg = 1;
591 #endif
592 }
593
594 void
595 override_options ()
596 {
597 /* Acquire a unique set number for our register saves and restores. */
598 s390_sr_alias_set = new_alias_set ();
599 }
600
601
602 /* Map for smallest class containing reg regno. */
603
604 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
605 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
606 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
607 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
608 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
609 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
610 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
611 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
612 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
613 ADDR_REGS, NO_REGS, ADDR_REGS
614 };
615
616
617 /* Return true if OP a (const_int 0) operand.
618 OP is the current operation.
619 MODE is the current operation mode. */
620
621 int
622 const0_operand (op, mode)
623 register rtx op;
624 enum machine_mode mode;
625 {
626 return op == CONST0_RTX (mode);
627 }
628
629 /* Return true if the mode of operand OP matches MODE.
630 If MODE is set to VOIDmode, set it to the mode of OP. */
631
632 static int
633 check_mode (op, mode)
634 register rtx op;
635 enum machine_mode *mode;
636 {
637 if (*mode == VOIDmode)
638 *mode = GET_MODE (op);
639 else
640 {
641 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
642 return 0;
643 }
644 return 1;
645 }
646
647 /* Return true if OP a valid operand for the LARL instruction.
648 OP is the current operation.
649 MODE is the current operation mode. */
650
651 int
652 larl_operand (op, mode)
653 register rtx op;
654 enum machine_mode mode;
655 {
656 if (! check_mode (op, &mode))
657 return 0;
658
659 /* Allow labels and local symbols. */
660 if (GET_CODE (op) == LABEL_REF)
661 return 1;
662 if (GET_CODE (op) == SYMBOL_REF
663 && (!flag_pic || SYMBOL_REF_FLAG (op)
664 || CONSTANT_POOL_ADDRESS_P (op)))
665 return 1;
666
667 /* Everything else must have a CONST, so strip it. */
668 if (GET_CODE (op) != CONST)
669 return 0;
670 op = XEXP (op, 0);
671
672 /* Allow adding *even* constants. */
673 if (GET_CODE (op) == PLUS)
674 {
675 if (GET_CODE (XEXP (op, 1)) != CONST_INT
676 || (INTVAL (XEXP (op, 1)) & 1) != 0)
677 return 0;
678 op = XEXP (op, 0);
679 }
680
681 /* Labels and local symbols allowed here as well. */
682 if (GET_CODE (op) == LABEL_REF)
683 return 1;
684 if (GET_CODE (op) == SYMBOL_REF
685 && (!flag_pic || SYMBOL_REF_FLAG (op)
686 || CONSTANT_POOL_ADDRESS_P (op)))
687 return 1;
688
689 /* Now we must have a @GOTENT offset or @PLT stub. */
690 if (GET_CODE (op) == UNSPEC
691 && XINT (op, 1) == 111)
692 return 1;
693 if (GET_CODE (op) == UNSPEC
694 && XINT (op, 1) == 113)
695 return 1;
696
697 return 0;
698 }
699
700 /* Return true if OP is a valid FP-Register.
701 OP is the current operation.
702 MODE is the current operation mode. */
703
704 int
705 fp_operand (op, mode)
706 register rtx op;
707 enum machine_mode mode;
708 {
709 register enum rtx_code code = GET_CODE (op);
710 if (! check_mode (op, &mode))
711 return 0;
712 if (code == REG && REGNO_OK_FOR_FP_P (REGNO (op)))
713 return 1;
714 else
715 return 0;
716 }
717
718 /* Helper routine to implement s_operand and s_imm_operand.
719 OP is the current operation.
720 MODE is the current operation mode.
721 ALLOW_IMMEDIATE specifies whether immediate operands should
722 be accepted or not. */
723
724 static int
725 general_s_operand (op, mode, allow_immediate)
726 register rtx op;
727 enum machine_mode mode;
728 int allow_immediate;
729 {
730 struct s390_address addr;
731
732 /* Call general_operand first, so that we don't have to
733 check for many special cases. */
734 if (!general_operand (op, mode))
735 return 0;
736
737 /* Just like memory_operand, allow (subreg (mem ...))
738 after reload. */
739 if (reload_completed
740 && GET_CODE (op) == SUBREG
741 && GET_CODE (SUBREG_REG (op)) == MEM)
742 op = SUBREG_REG (op);
743
744 switch (GET_CODE (op))
745 {
746 /* Constants that we are sure will be forced to the
747 literal pool in reload are OK as s-operand. Note
748 that we cannot call s390_preferred_reload_class here
749 because it might not be known yet at this point
750 whether the current function is a leaf or not. */
751 case CONST_INT:
752 case CONST_DOUBLE:
753 if (!allow_immediate || reload_completed)
754 break;
755 if (!legitimate_reload_constant_p (op))
756 return 1;
757 if (!TARGET_64BIT)
758 return 1;
759 break;
760
761 /* Memory operands are OK unless they already use an
762 index register. */
763 case MEM:
764 if (GET_CODE (XEXP (op, 0)) == ADDRESSOF)
765 return 1;
766 if (s390_decompose_address (XEXP (op, 0), &addr, FALSE)
767 && !addr.indx)
768 return 1;
769 break;
770
771 default:
772 break;
773 }
774
775 return 0;
776 }
777
778 /* Return true if OP is a valid S-type operand.
779 OP is the current operation.
780 MODE is the current operation mode. */
781
782 int
783 s_operand (op, mode)
784 register rtx op;
785 enum machine_mode mode;
786 {
787 return general_s_operand (op, mode, 0);
788 }
789
790 /* Return true if OP is a valid S-type operand or an immediate
791 operand that can be addressed as S-type operand by forcing
792 it into the literal pool.
793 OP is the current operation.
794 MODE is the current operation mode. */
795
796 int
797 s_imm_operand (op, mode)
798 register rtx op;
799 enum machine_mode mode;
800 {
801 return general_s_operand (op, mode, 1);
802 }
803
804 /* Return true if OP is a valid operand for the BRAS instruction.
805 OP is the current operation.
806 MODE is the current operation mode. */
807
808 int
809 bras_sym_operand (op, mode)
810 register rtx op;
811 enum machine_mode mode ATTRIBUTE_UNUSED;
812 {
813 register enum rtx_code code = GET_CODE (op);
814
815 /* Allow SYMBOL_REFs. */
816 if (code == SYMBOL_REF)
817 return 1;
818
819 /* Allow @PLT stubs. */
820 if (code == CONST
821 && GET_CODE (XEXP (op, 0)) == UNSPEC
822 && XINT (XEXP (op, 0), 1) == 113)
823 return 1;
824 return 0;
825 }
826
827 \f
828 /* Return true if OP is a load multiple operation. It is known to be a
829 PARALLEL and the first section will be tested.
830 OP is the current operation.
831 MODE is the current operation mode. */
832
833 int
834 load_multiple_operation (op, mode)
835 rtx op;
836 enum machine_mode mode ATTRIBUTE_UNUSED;
837 {
838 int count = XVECLEN (op, 0);
839 unsigned int dest_regno;
840 rtx src_addr;
841 int i, off;
842
843
844 /* Perform a quick check so we don't blow up below. */
845 if (count <= 1
846 || GET_CODE (XVECEXP (op, 0, 0)) != SET
847 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
848 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
849 return 0;
850
851 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
852 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
853
854 /* Check, is base, or base + displacement. */
855
856 if (GET_CODE (src_addr) == REG)
857 off = 0;
858 else if (GET_CODE (src_addr) == PLUS
859 && GET_CODE (XEXP (src_addr, 0)) == REG
860 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
861 {
862 off = INTVAL (XEXP (src_addr, 1));
863 src_addr = XEXP (src_addr, 0);
864 }
865 else
866 return 0;
867
868 if (src_addr == frame_pointer_rtx || src_addr == arg_pointer_rtx)
869 return 0;
870
871 for (i = 1; i < count; i++)
872 {
873 rtx elt = XVECEXP (op, 0, i);
874
875 if (GET_CODE (elt) != SET
876 || GET_CODE (SET_DEST (elt)) != REG
877 || GET_MODE (SET_DEST (elt)) != Pmode
878 || REGNO (SET_DEST (elt)) != dest_regno + i
879 || GET_CODE (SET_SRC (elt)) != MEM
880 || GET_MODE (SET_SRC (elt)) != Pmode
881 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
882 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
883 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
884 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
885 != off + i * UNITS_PER_WORD)
886 return 0;
887 }
888
889 return 1;
890 }
891
892 /* Return true if OP is a store multiple operation. It is known to be a
893 PARALLEL and the first section will be tested.
894 OP is the current operation.
895 MODE is the current operation mode. */
896
897 int
898 store_multiple_operation (op, mode)
899 rtx op;
900 enum machine_mode mode ATTRIBUTE_UNUSED;
901 {
902 int count = XVECLEN (op, 0);
903 unsigned int src_regno;
904 rtx dest_addr;
905 int i, off;
906
907 /* Perform a quick check so we don't blow up below. */
908 if (count <= 1
909 || GET_CODE (XVECEXP (op, 0, 0)) != SET
910 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
911 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
912 return 0;
913
914 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
915 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
916
917 /* Check, is base, or base + displacement. */
918
919 if (GET_CODE (dest_addr) == REG)
920 off = 0;
921 else if (GET_CODE (dest_addr) == PLUS
922 && GET_CODE (XEXP (dest_addr, 0)) == REG
923 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
924 {
925 off = INTVAL (XEXP (dest_addr, 1));
926 dest_addr = XEXP (dest_addr, 0);
927 }
928 else
929 return 0;
930
931 if (dest_addr == frame_pointer_rtx || dest_addr == arg_pointer_rtx)
932 return 0;
933
934 for (i = 1; i < count; i++)
935 {
936 rtx elt = XVECEXP (op, 0, i);
937
938 if (GET_CODE (elt) != SET
939 || GET_CODE (SET_SRC (elt)) != REG
940 || GET_MODE (SET_SRC (elt)) != Pmode
941 || REGNO (SET_SRC (elt)) != src_regno + i
942 || GET_CODE (SET_DEST (elt)) != MEM
943 || GET_MODE (SET_DEST (elt)) != Pmode
944 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
945 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
946 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
947 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
948 != off + i * UNITS_PER_WORD)
949 return 0;
950 }
951 return 1;
952 }
953
954
955 /* Return true if OP contains a symbol reference */
956
957 int
958 symbolic_reference_mentioned_p (op)
959 rtx op;
960 {
961 register const char *fmt;
962 register int i;
963
964 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
965 return 1;
966
967 fmt = GET_RTX_FORMAT (GET_CODE (op));
968 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
969 {
970 if (fmt[i] == 'E')
971 {
972 register int j;
973
974 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
975 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
976 return 1;
977 }
978
979 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
980 return 1;
981 }
982
983 return 0;
984 }
985
986
987 /* Return true if OP is a legitimate general operand when
988 generating PIC code. It is given that flag_pic is on
989 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
990
991 int
992 legitimate_pic_operand_p (op)
993 register rtx op;
994 {
995 /* Accept all non-symbolic constants. */
996 if (!SYMBOLIC_CONST (op))
997 return 1;
998
999 /* Accept immediate LARL operands. */
1000 if (TARGET_64BIT)
1001 return larl_operand (op, VOIDmode);
1002
1003 /* Reject everything else; must be handled
1004 via emit_pic_move. */
1005 return 0;
1006 }
1007
1008 /* Returns true if the constant value OP is a legitimate general operand.
1009 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1010
1011 int
1012 legitimate_constant_p (op)
1013 register rtx op;
1014 {
1015 /* Accept all non-symbolic constants. */
1016 if (!SYMBOLIC_CONST (op))
1017 return 1;
1018
1019 /* In the PIC case, symbolic constants must *not* be
1020 forced into the literal pool. We accept them here,
1021 so that they will be handled by emit_pic_move. */
1022 if (flag_pic)
1023 return 1;
1024
1025 /* Even in the non-PIC case, we can accept immediate
1026 LARL operands here. */
1027 if (TARGET_64BIT)
1028 return larl_operand (op, VOIDmode);
1029
1030 /* All remaining non-PIC symbolic constants are
1031 forced into the literal pool. */
1032 return 0;
1033 }
1034
1035 /* Returns true if the constant value OP is a legitimate general
1036 operand during and after reload. The difference to
1037 legitimate_constant_p is that this function will not accept
1038 a constant that would need to be forced to the literal pool
1039 before it can be used as operand. */
1040
1041 int
1042 legitimate_reload_constant_p (op)
1043 register rtx op;
1044 {
1045 /* Accept l(g)hi operands. */
1046 if (GET_CODE (op) == CONST_INT
1047 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1048 return 1;
1049
1050 /* Accept lliXX operands. */
1051 if (TARGET_64BIT
1052 && s390_single_hi (op, DImode, 0) >= 0)
1053 return 1;
1054
1055 /* Accept larl operands. */
1056 if (TARGET_64BIT
1057 && larl_operand (op, VOIDmode))
1058 return 1;
1059
1060 /* If reload is completed, and we do not already have a
1061 literal pool, and OP must be forced to the literal
1062 pool, then something must have gone wrong earlier.
1063 We *cannot* force the constant any more, because the
1064 prolog generation already decided we don't need to
1065 set up the base register. */
1066 if (reload_completed && !regs_ever_live[BASE_REGISTER])
1067 abort ();
1068
1069 /* Everything else cannot be handled without reload. */
1070 return 0;
1071 }
1072
1073 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1074 return the class of reg to actually use. */
1075
1076 enum reg_class
1077 s390_preferred_reload_class (op, class)
1078 rtx op;
1079 enum reg_class class;
1080 {
1081 /* This can happen if a floating point constant is being
1082 reloaded into an integer register. Leave well alone. */
1083 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1084 && class != FP_REGS)
1085 return class;
1086
1087 switch (GET_CODE (op))
1088 {
1089 /* Constants we cannot reload must be forced into the
1090 literal pool. For constants we *could* handle directly,
1091 it might still be preferable to put them in the pool and
1092 use a memory-to-memory instruction.
1093
1094 However, try to avoid needlessly allocating a literal
1095 pool in a routine that wouldn't otherwise need any.
1096 Heuristically, we assume that 64-bit leaf functions
1097 typically don't need a literal pool, all others do. */
1098 case CONST_DOUBLE:
1099 case CONST_INT:
1100 if (!legitimate_reload_constant_p (op))
1101 return NO_REGS;
1102
1103 if (TARGET_64BIT && current_function_is_leaf)
1104 return class;
1105
1106 return NO_REGS;
1107
1108 /* If a symbolic constant or a PLUS is reloaded,
1109 it is most likely being used as an address. */
1110 case PLUS:
1111 case LABEL_REF:
1112 case SYMBOL_REF:
1113 case CONST:
1114 return ADDR_REGS;
1115
1116 default:
1117 break;
1118 }
1119
1120 return class;
1121 }
1122
1123 /* Decompose a RTL expression ADDR for a memory address into
1124 its components, returned in OUT. The boolean STRICT
1125 specifies whether strict register checking applies.
1126 Returns 0 if ADDR is not a valid memory address, nonzero
1127 otherwise. If OUT is NULL, don't return the components,
1128 but check for validity only.
1129
1130 Note: Only addresses in canonical form are recognized.
1131 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1132 canonical form so that they will be recognized. */
1133
1134 static int
1135 s390_decompose_address (addr, out, strict)
1136 register rtx addr;
1137 struct s390_address *out;
1138 int strict;
1139 {
1140 rtx base = NULL_RTX;
1141 rtx indx = NULL_RTX;
1142 rtx disp = NULL_RTX;
1143
1144 /* Decompose address into base + index + displacement. */
1145
1146 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1147 base = addr;
1148
1149 else if (GET_CODE (addr) == PLUS)
1150 {
1151 rtx op0 = XEXP (addr, 0);
1152 rtx op1 = XEXP (addr, 1);
1153 enum rtx_code code0 = GET_CODE (op0);
1154 enum rtx_code code1 = GET_CODE (op1);
1155
1156 if (code0 == REG || code0 == UNSPEC)
1157 {
1158 if (code1 == REG || code1 == UNSPEC)
1159 {
1160 indx = op0; /* index + base */
1161 base = op1;
1162 }
1163
1164 else
1165 {
1166 base = op0; /* base + displacement */
1167 disp = op1;
1168 }
1169 }
1170
1171 else if (code0 == PLUS)
1172 {
1173 indx = XEXP (op0, 0); /* index + base + disp */
1174 base = XEXP (op0, 1);
1175 disp = op1;
1176 }
1177
1178 else
1179 {
1180 return FALSE;
1181 }
1182 }
1183
1184 else
1185 disp = addr; /* displacement */
1186
1187
1188 /* Validate base register. */
1189 if (base)
1190 {
1191 if (GET_CODE (base) == UNSPEC)
1192 {
1193 if (XVECLEN (base, 0) != 1 || XINT (base, 1) != 101)
1194 return FALSE;
1195 base = XVECEXP (base, 0, 0);
1196 }
1197
1198 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
1199 return FALSE;
1200
1201 if ((strict && ! REG_OK_FOR_BASE_STRICT_P (base))
1202 || (! strict && ! REG_OK_FOR_BASE_NONSTRICT_P (base)))
1203 return FALSE;
1204 }
1205
1206 /* Validate index register. */
1207 if (indx)
1208 {
1209 if (GET_CODE (indx) == UNSPEC)
1210 {
1211 if (XVECLEN (indx, 0) != 1 || XINT (indx, 1) != 101)
1212 return FALSE;
1213 indx = XVECEXP (indx, 0, 0);
1214 }
1215
1216 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
1217 return FALSE;
1218
1219 if ((strict && ! REG_OK_FOR_BASE_STRICT_P (indx))
1220 || (! strict && ! REG_OK_FOR_BASE_NONSTRICT_P (indx)))
1221 return FALSE;
1222 }
1223
1224 /* Validate displacement. */
1225 if (disp)
1226 {
1227 /* Allow integer constant in range. */
1228 if (GET_CODE (disp) == CONST_INT)
1229 {
1230 if (INTVAL (disp) < 0 || INTVAL (disp) >= 4096)
1231 return FALSE;
1232 }
1233
1234 /* In the small-PIC case, the linker converts @GOT12
1235 offsets to possible displacements. */
1236 else if (GET_CODE (disp) == CONST
1237 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1238 && XINT (XEXP (disp, 0), 1) == 110)
1239 {
1240 if (flag_pic != 1)
1241 return FALSE;
1242 }
1243
1244 /* We can convert literal pool addresses to
1245 displacements by basing them off the base register. */
1246 else
1247 {
1248 /* In some cases, we can accept an additional
1249 small constant offset. Split these off here. */
1250
1251 unsigned int offset = 0;
1252
1253 if (GET_CODE (disp) == CONST
1254 && GET_CODE (XEXP (disp, 0)) == PLUS
1255 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1256 {
1257 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1258 disp = XEXP (XEXP (disp, 0), 0);
1259 }
1260
1261 /* Now we must have a literal pool address. */
1262 if (GET_CODE (disp) != SYMBOL_REF
1263 || !CONSTANT_POOL_ADDRESS_P (disp))
1264 return FALSE;
1265
1266 /* In 64-bit PIC mode we cannot accept symbolic
1267 constants in the constant pool. */
1268 if (TARGET_64BIT && flag_pic
1269 && SYMBOLIC_CONST (get_pool_constant (disp)))
1270 return FALSE;
1271
1272 /* If we have an offset, make sure it does not
1273 exceed the size of the constant pool entry. */
1274 if (offset && offset >= GET_MODE_SIZE (get_pool_mode (disp)))
1275 return FALSE;
1276
1277 /* Either base or index must be free to
1278 hold the base register. */
1279 if (base && indx)
1280 return FALSE;
1281
1282 /* Convert the address. */
1283 if (base)
1284 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
1285 else
1286 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1287
1288 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp), 100);
1289 disp = gen_rtx_CONST (Pmode, disp);
1290
1291 if (offset)
1292 disp = plus_constant (disp, offset);
1293 }
1294 }
1295
1296 if (out)
1297 {
1298 out->base = base;
1299 out->indx = indx;
1300 out->disp = disp;
1301 }
1302
1303 return TRUE;
1304 }
1305
1306 /* Return nonzero if ADDR is a valid memory address.
1307 STRICT specifies whether strict register checking applies. */
1308
1309 int
1310 legitimate_address_p (mode, addr, strict)
1311 enum machine_mode mode ATTRIBUTE_UNUSED;
1312 register rtx addr;
1313 int strict;
1314 {
1315 return s390_decompose_address (addr, NULL, strict);
1316 }
1317
1318 /* Return 1 if OP is a valid operand for the LA instruction.
1319 In 31-bit, we need to prove that the result is used as an
1320 address, as LA performs only a 31-bit addition. */
1321
1322 int
1323 legitimate_la_operand_p (op)
1324 register rtx op;
1325 {
1326 struct s390_address addr;
1327 if (!s390_decompose_address (op, &addr, FALSE))
1328 return FALSE;
1329
1330 if (TARGET_64BIT)
1331 return TRUE;
1332
1333 /* Use of the base or stack pointer implies address. */
1334
1335 if (addr.base && GET_CODE (addr.base) == REG)
1336 {
1337 if (REGNO (addr.base) == BASE_REGISTER
1338 || REGNO (addr.base) == STACK_POINTER_REGNUM
1339 || REGNO (addr.base) == FRAME_POINTER_REGNUM)
1340 return TRUE;
1341 }
1342
1343 if (addr.indx && GET_CODE (addr.indx) == REG)
1344 {
1345 if (REGNO (addr.indx) == BASE_REGISTER
1346 || REGNO (addr.indx) == STACK_POINTER_REGNUM
1347 || REGNO (addr.base) == FRAME_POINTER_REGNUM)
1348 return TRUE;
1349 }
1350
1351 return FALSE;
1352 }
1353
1354 /* Return a legitimate reference for ORIG (an address) using the
1355 register REG. If REG is 0, a new pseudo is generated.
1356
1357 There are two types of references that must be handled:
1358
1359 1. Global data references must load the address from the GOT, via
1360 the PIC reg. An insn is emitted to do this load, and the reg is
1361 returned.
1362
1363 2. Static data references, constant pool addresses, and code labels
1364 compute the address as an offset from the GOT, whose base is in
1365 the PIC reg. Static data objects have SYMBOL_REF_FLAG set to
1366 differentiate them from global data objects. The returned
1367 address is the PIC reg + an unspec constant.
1368
1369 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
1370 reg also appears in the address. */
1371
1372 rtx
1373 legitimize_pic_address (orig, reg)
1374 rtx orig;
1375 rtx reg;
1376 {
1377 rtx addr = orig;
1378 rtx new = orig;
1379 rtx base;
1380
1381 if (GET_CODE (addr) == LABEL_REF
1382 || (GET_CODE (addr) == SYMBOL_REF
1383 && (SYMBOL_REF_FLAG (addr)
1384 || CONSTANT_POOL_ADDRESS_P (addr))))
1385 {
1386 /* This is a local symbol. */
1387 if (TARGET_64BIT)
1388 {
1389 /* Access local symbols PC-relative via LARL.
1390 This is the same as in the non-PIC case, so it is
1391 handled automatically ... */
1392 }
1393 else
1394 {
1395 /* Access local symbols relative to the literal pool. */
1396
1397 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1398
1399 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 100);
1400 addr = gen_rtx_CONST (SImode, addr);
1401 addr = force_const_mem (SImode, addr);
1402 emit_move_insn (temp, addr);
1403
1404 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1405 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1406 new = gen_rtx_PLUS (Pmode, base, temp);
1407
1408 if (reg != 0)
1409 {
1410 emit_move_insn (reg, new);
1411 new = reg;
1412 }
1413 }
1414 }
1415 else if (GET_CODE (addr) == SYMBOL_REF)
1416 {
1417 if (reg == 0)
1418 reg = gen_reg_rtx (Pmode);
1419
1420 if (flag_pic == 1)
1421 {
1422 /* Assume GOT offset < 4k. This is handled the same way
1423 in both 31- and 64-bit code (@GOT12). */
1424
1425 current_function_uses_pic_offset_table = 1;
1426
1427 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 110);
1428 new = gen_rtx_CONST (Pmode, new);
1429 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
1430 new = gen_rtx_MEM (Pmode, new);
1431 RTX_UNCHANGING_P (new) = 1;
1432 emit_move_insn (reg, new);
1433 new = reg;
1434 }
1435 else if (TARGET_64BIT)
1436 {
1437 /* If the GOT offset might be >= 4k, we determine the position
1438 of the GOT entry via a PC-relative LARL (@GOTENT). */
1439
1440 rtx temp = gen_reg_rtx (Pmode);
1441
1442 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 111);
1443 new = gen_rtx_CONST (Pmode, new);
1444 emit_move_insn (temp, new);
1445
1446 new = gen_rtx_MEM (Pmode, temp);
1447 RTX_UNCHANGING_P (new) = 1;
1448 emit_move_insn (reg, new);
1449 new = reg;
1450 }
1451 else
1452 {
1453 /* If the GOT offset might be >= 4k, we have to load it
1454 from the literal pool (@GOT). */
1455
1456 rtx temp = gen_reg_rtx (Pmode);
1457
1458 current_function_uses_pic_offset_table = 1;
1459
1460 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 112);
1461 addr = gen_rtx_CONST (SImode, addr);
1462 addr = force_const_mem (SImode, addr);
1463 emit_move_insn (temp, addr);
1464
1465 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
1466 new = gen_rtx_MEM (Pmode, new);
1467 RTX_UNCHANGING_P (new) = 1;
1468 emit_move_insn (reg, new);
1469 new = reg;
1470 }
1471 }
1472 else
1473 {
1474 if (GET_CODE (addr) == CONST)
1475 {
1476 addr = XEXP (addr, 0);
1477 if (GET_CODE (addr) == UNSPEC)
1478 {
1479 if (XVECLEN (addr, 0) != 1)
1480 abort ();
1481 switch (XINT (addr, 1))
1482 {
1483 /* If someone moved an @GOT or lt-relative UNSPEC
1484 out of the literal pool, force them back in. */
1485 case 100:
1486 case 112:
1487 case 114:
1488 new = force_const_mem (SImode, orig);
1489 break;
1490
1491 /* @GOTENT is OK as is. */
1492 case 111:
1493 break;
1494
1495 /* @PLT is OK as is on 64-bit, must be converted to
1496 lt-relative PLT on 31-bit. */
1497 case 113:
1498 if (!TARGET_64BIT)
1499 {
1500 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1501
1502 addr = XVECEXP (addr, 0, 0);
1503 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 114);
1504 addr = gen_rtx_CONST (SImode, addr);
1505 addr = force_const_mem (SImode, addr);
1506 emit_move_insn (temp, addr);
1507
1508 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1509 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1510 new = gen_rtx_PLUS (Pmode, base, temp);
1511
1512 if (reg != 0)
1513 {
1514 emit_move_insn (reg, new);
1515 new = reg;
1516 }
1517 }
1518 break;
1519
1520 /* Everything else cannot happen. */
1521 default:
1522 abort ();
1523 }
1524 }
1525 else if (GET_CODE (addr) != PLUS)
1526 abort ();
1527 }
1528 if (GET_CODE (addr) == PLUS)
1529 {
1530 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
1531 /* Check first to see if this is a constant offset
1532 from a local symbol reference. */
1533 if ((GET_CODE (op0) == LABEL_REF
1534 || (GET_CODE (op0) == SYMBOL_REF
1535 && (SYMBOL_REF_FLAG (op0)
1536 || CONSTANT_POOL_ADDRESS_P (op0))))
1537 && GET_CODE (op1) == CONST_INT)
1538 {
1539 if (TARGET_64BIT)
1540 {
1541 if (INTVAL (op1) & 1)
1542 {
1543 /* LARL can't handle odd offsets, so emit a
1544 pair of LARL and LA. */
1545 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1546
1547 if (INTVAL (op1) < 0 || INTVAL (op1) >= 4096)
1548 {
1549 int even = INTVAL (op1) - 1;
1550 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
1551 op1 = GEN_INT (1);
1552 }
1553
1554 emit_move_insn (temp, op0);
1555 new = gen_rtx_PLUS (Pmode, temp, op1);
1556
1557 if (reg != 0)
1558 {
1559 emit_move_insn (reg, new);
1560 new = reg;
1561 }
1562 }
1563 else
1564 {
1565 /* If the offset is even, we can just use LARL.
1566 This will happen automatically. */
1567 }
1568 }
1569 else
1570 {
1571 /* Access local symbols relative to the literal pool. */
1572
1573 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1574
1575 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, op0), 100);
1576 addr = gen_rtx_PLUS (SImode, addr, op1);
1577 addr = gen_rtx_CONST (SImode, addr);
1578 addr = force_const_mem (SImode, addr);
1579 emit_move_insn (temp, addr);
1580
1581 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1582 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1583 new = gen_rtx_PLUS (Pmode, base, temp);
1584
1585 if (reg != 0)
1586 {
1587 emit_move_insn (reg, new);
1588 new = reg;
1589 }
1590 }
1591 }
1592
1593 /* Now, check whether it is an LT-relative symbol plus offset
1594 that was pulled out of the literal pool. Force it back in. */
1595
1596 else if (GET_CODE (op0) == UNSPEC
1597 && GET_CODE (op1) == CONST_INT)
1598 {
1599 if (XVECLEN (op0, 0) != 1)
1600 abort ();
1601 if (XINT (op0, 1) != 100)
1602 abort ();
1603
1604 new = force_const_mem (SImode, orig);
1605 }
1606
1607 /* Otherwise, compute the sum. */
1608 else
1609 {
1610 base = legitimize_pic_address (XEXP (addr, 0), reg);
1611 new = legitimize_pic_address (XEXP (addr, 1),
1612 base == reg ? NULL_RTX : reg);
1613 if (GET_CODE (new) == CONST_INT)
1614 new = plus_constant (base, INTVAL (new));
1615 else
1616 {
1617 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
1618 {
1619 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
1620 new = XEXP (new, 1);
1621 }
1622 new = gen_rtx_PLUS (Pmode, base, new);
1623 }
1624
1625 if (GET_CODE (new) == CONST)
1626 new = XEXP (new, 0);
1627 new = force_operand (new, 0);
1628 }
1629 }
1630 }
1631 return new;
1632 }
1633
1634 /* Emit insns to move operands[1] into operands[0]. */
1635
1636 void
1637 emit_pic_move (operands, mode)
1638 rtx *operands;
1639 enum machine_mode mode ATTRIBUTE_UNUSED;
1640 {
1641 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
1642
1643 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1644 operands[1] = force_reg (Pmode, operands[1]);
1645 else
1646 operands[1] = legitimize_pic_address (operands[1], temp);
1647 }
1648
1649 /* Try machine-dependent ways of modifying an illegitimate address X
1650 to be legitimate. If we find one, return the new, valid address.
1651
1652 OLDX is the address as it was before break_out_memory_refs was called.
1653 In some cases it is useful to look at this to decide what needs to be done.
1654
1655 MODE is the mode of the operand pointed to by X.
1656
1657 When -fpic is used, special handling is needed for symbolic references.
1658 See comments by legitimize_pic_address for details. */
1659
1660 rtx
1661 legitimize_address (x, oldx, mode)
1662 register rtx x;
1663 register rtx oldx ATTRIBUTE_UNUSED;
1664 enum machine_mode mode ATTRIBUTE_UNUSED;
1665 {
1666 rtx constant_term = const0_rtx;
1667
1668 if (flag_pic)
1669 {
1670 if (SYMBOLIC_CONST (x)
1671 || (GET_CODE (x) == PLUS
1672 && (SYMBOLIC_CONST (XEXP (x, 0))
1673 || SYMBOLIC_CONST (XEXP (x, 1)))))
1674 x = legitimize_pic_address (x, 0);
1675
1676 if (legitimate_address_p (mode, x, FALSE))
1677 return x;
1678 }
1679
1680 x = eliminate_constant_term (x, &constant_term);
1681
1682 if (GET_CODE (x) == PLUS)
1683 {
1684 if (GET_CODE (XEXP (x, 0)) == REG)
1685 {
1686 register rtx temp = gen_reg_rtx (Pmode);
1687 register rtx val = force_operand (XEXP (x, 1), temp);
1688 if (val != temp)
1689 emit_move_insn (temp, val);
1690
1691 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
1692 }
1693
1694 else if (GET_CODE (XEXP (x, 1)) == REG)
1695 {
1696 register rtx temp = gen_reg_rtx (Pmode);
1697 register rtx val = force_operand (XEXP (x, 0), temp);
1698 if (val != temp)
1699 emit_move_insn (temp, val);
1700
1701 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
1702 }
1703 }
1704
1705 if (constant_term != const0_rtx)
1706 x = gen_rtx_PLUS (Pmode, x, constant_term);
1707
1708 return x;
1709 }
1710
1711
1712 /* Output symbolic constant X in assembler syntax to
1713 stdio stream FILE. */
1714
1715 void
1716 s390_output_symbolic_const (file, x)
1717 FILE *file;
1718 rtx x;
1719 {
1720 switch (GET_CODE (x))
1721 {
1722 case CONST:
1723 case ZERO_EXTEND:
1724 case SIGN_EXTEND:
1725 s390_output_symbolic_const (file, XEXP (x, 0));
1726 break;
1727
1728 case PLUS:
1729 s390_output_symbolic_const (file, XEXP (x, 0));
1730 fprintf (file, "+");
1731 s390_output_symbolic_const (file, XEXP (x, 1));
1732 break;
1733
1734 case MINUS:
1735 s390_output_symbolic_const (file, XEXP (x, 0));
1736 fprintf (file, "-");
1737 s390_output_symbolic_const (file, XEXP (x, 1));
1738 break;
1739
1740 case CONST_INT:
1741 output_addr_const (file, x);
1742 break;
1743
1744 case LABEL_REF:
1745 case CODE_LABEL:
1746 output_addr_const (file, x);
1747 break;
1748
1749 case SYMBOL_REF:
1750 output_addr_const (file, x);
1751 if (CONSTANT_POOL_ADDRESS_P (x) && s390_pool_count != 0)
1752 fprintf (file, "_%X", s390_pool_count);
1753 break;
1754
1755 case UNSPEC:
1756 if (XVECLEN (x, 0) != 1)
1757 output_operand_lossage ("invalid UNSPEC as operand (1)");
1758 switch (XINT (x, 1))
1759 {
1760 case 100:
1761 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
1762 fprintf (file, "-.LT%X_%X",
1763 s390_function_count, s390_pool_count);
1764 break;
1765 case 110:
1766 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
1767 fprintf (file, "@GOT12");
1768 break;
1769 case 111:
1770 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
1771 fprintf (file, "@GOTENT");
1772 break;
1773 case 112:
1774 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
1775 fprintf (file, "@GOT");
1776 break;
1777 case 113:
1778 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
1779 fprintf (file, "@PLT");
1780 break;
1781 case 114:
1782 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
1783 fprintf (file, "@PLT-.LT%X_%X",
1784 s390_function_count, s390_pool_count);
1785 break;
1786 default:
1787 output_operand_lossage ("invalid UNSPEC as operand (2)");
1788 break;
1789 }
1790 break;
1791
1792 default:
1793 fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x);
1794 break;
1795 }
1796 }
1797
1798 /* Output address operand ADDR in assembler syntax to
1799 stdio stream FILE. */
1800
1801 void
1802 print_operand_address (file, addr)
1803 FILE *file;
1804 rtx addr;
1805 {
1806 struct s390_address ad;
1807
1808 if (!s390_decompose_address (addr, &ad, TRUE))
1809 output_operand_lossage ("Cannot decompose address.");
1810
1811 if (ad.disp)
1812 s390_output_symbolic_const (file, ad.disp);
1813 else
1814 fprintf (file, "0");
1815
1816 if (ad.base && ad.indx)
1817 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
1818 reg_names[REGNO (ad.base)]);
1819 else if (ad.base)
1820 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
1821 }
1822
1823 /* Output operand X in assembler syntax to stdio stream FILE.
1824 CODE specified the format flag. The following format flags
1825 are recognized:
1826
1827 'C': print opcode suffix for branch condition.
1828 'D': print opcode suffix for inverse branch condition.
1829 'Y': print current constant pool address (pc-relative).
1830 'y': print current constant pool address (absolute).
1831 'O': print only the displacement of a memory reference.
1832 'R': print only the base register of a memory reference.
1833 'N': print the second word of a DImode operand.
1834 'M': print the second word of a TImode operand.
1835
1836 'b': print integer X as if it's an unsigned byte.
1837 'x': print integer X as if it's an unsigned word.
1838 'h': print integer X as if it's a signed word. */
1839
1840 void
1841 print_operand (file, x, code)
1842 FILE *file;
1843 rtx x;
1844 int code;
1845 {
1846 switch (code)
1847 {
1848 case 'C':
1849 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
1850 return;
1851
1852 case 'D':
1853 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
1854 return;
1855
1856 case 'Y':
1857 fprintf (file, ".LT%X_%X-.", s390_function_count, s390_pool_count);
1858 return;
1859
1860 case 'y':
1861 fprintf (file, ".LT%X_%X", s390_function_count, s390_pool_count);
1862 return;
1863
1864 case 'O':
1865 {
1866 struct s390_address ad;
1867
1868 if (GET_CODE (x) != MEM
1869 || !s390_decompose_address (XEXP (x, 0), &ad, TRUE)
1870 || ad.indx)
1871 abort ();
1872
1873 if (ad.disp)
1874 s390_output_symbolic_const (file, ad.disp);
1875 else
1876 fprintf (file, "0");
1877 }
1878 return;
1879
1880 case 'R':
1881 {
1882 struct s390_address ad;
1883
1884 if (GET_CODE (x) != MEM
1885 || !s390_decompose_address (XEXP (x, 0), &ad, TRUE)
1886 || ad.indx)
1887 abort ();
1888
1889 if (ad.base)
1890 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
1891 else
1892 fprintf (file, "0");
1893 }
1894 return;
1895
1896 case 'N':
1897 if (GET_CODE (x) == REG)
1898 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
1899 else if (GET_CODE (x) == MEM)
1900 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
1901 else
1902 abort ();
1903 break;
1904
1905 case 'M':
1906 if (GET_CODE (x) == REG)
1907 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
1908 else if (GET_CODE (x) == MEM)
1909 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
1910 else
1911 abort ();
1912 break;
1913 }
1914
1915 switch (GET_CODE (x))
1916 {
1917 case REG:
1918 fprintf (file, "%s", reg_names[REGNO (x)]);
1919 break;
1920
1921 case MEM:
1922 output_address (XEXP (x, 0));
1923 break;
1924
1925 case CONST:
1926 case CODE_LABEL:
1927 case LABEL_REF:
1928 case SYMBOL_REF:
1929 s390_output_symbolic_const (file, x);
1930 break;
1931
1932 case CONST_INT:
1933 if (code == 'b')
1934 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
1935 else if (code == 'x')
1936 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
1937 else if (code == 'h')
1938 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
1939 else
1940 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
1941 break;
1942
1943 case CONST_DOUBLE:
1944 if (GET_MODE (x) != VOIDmode)
1945 abort ();
1946 if (code == 'b')
1947 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
1948 else if (code == 'x')
1949 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
1950 else if (code == 'h')
1951 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
1952 else
1953 abort ();
1954 break;
1955
1956 default:
1957 fatal_insn ("UNKNOWN in print_operand !?", x);
1958 break;
1959 }
1960 }
1961
1962 /* Target hook for assembling integer objects. We need to define it
1963 here to work a round a bug in some versions of GAS, which couldn't
1964 handle values smaller than INT_MIN when printed in decimal. */
1965
1966 static bool
1967 s390_assemble_integer (x, size, aligned_p)
1968 rtx x;
1969 unsigned int size;
1970 int aligned_p;
1971 {
1972 if (size == 8 && aligned_p
1973 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
1974 {
1975 fputs ("\t.quad\t", asm_out_file);
1976 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
1977 putc ('\n', asm_out_file);
1978 return true;
1979 }
1980 return default_assemble_integer (x, size, aligned_p);
1981 }
1982
1983
1984 #define DEBUG_SCHED 0
1985
1986 /* Returns true if register REGNO is used for forming
1987 a memory address in expression X. */
1988
1989 static int
1990 reg_used_in_mem_p (regno, x)
1991 int regno;
1992 rtx x;
1993 {
1994 enum rtx_code code = GET_CODE (x);
1995 int i, j;
1996 const char *fmt;
1997
1998 if (code == MEM)
1999 {
2000 if (refers_to_regno_p (regno, regno+1,
2001 XEXP (x, 0), 0))
2002 return 1;
2003 }
2004 else if (code == SET
2005 && GET_CODE (SET_DEST (x)) == PC)
2006 {
2007 if (refers_to_regno_p (regno, regno+1,
2008 SET_SRC (x), 0))
2009 return 1;
2010 }
2011
2012 fmt = GET_RTX_FORMAT (code);
2013 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2014 {
2015 if (fmt[i] == 'e'
2016 && reg_used_in_mem_p (regno, XEXP (x, i)))
2017 return 1;
2018
2019 else if (fmt[i] == 'E')
2020 for (j = 0; j < XVECLEN (x, i); j++)
2021 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
2022 return 1;
2023 }
2024 return 0;
2025 }
2026
2027 /* Returns true if expression DEP_RTX sets an address register
2028 used by instruction INSN to address memory. */
2029
2030 static int
2031 addr_generation_dependency_p (dep_rtx, insn)
2032 rtx dep_rtx;
2033 rtx insn;
2034 {
2035 rtx target, pat;
2036
2037 if (GET_CODE (dep_rtx) == SET)
2038 {
2039 target = SET_DEST (dep_rtx);
2040
2041 if (GET_CODE (target) == REG)
2042 {
2043 int regno = REGNO (target);
2044
2045 if (get_attr_type (insn) == TYPE_LA)
2046 {
2047 pat = PATTERN (insn);
2048 if (GET_CODE (pat) == PARALLEL)
2049 {
2050 if (XVECLEN (pat, 0) != 2)
2051 abort();
2052 pat = XVECEXP (pat, 0, 0);
2053 }
2054 if (GET_CODE (pat) == SET)
2055 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
2056 else
2057 abort();
2058 }
2059 else if (get_attr_atype (insn) == ATYPE_MEM)
2060 return reg_used_in_mem_p (regno, PATTERN (insn));
2061 }
2062 }
2063 return 0;
2064 }
2065
2066
2067 /* Return the modified cost of the dependency of instruction INSN
2068 on instruction DEP_INSN through the link LINK. COST is the
2069 default cost of that dependency.
2070
2071 Data dependencies are all handled without delay. However, if a
2072 register is modified and subsequently used as base or index
2073 register of a memory reference, at least 4 cycles need to pass
2074 between setting and using the register to avoid pipeline stalls.
2075 An exception is the LA instruction. An address generated by LA can
2076 be used by introducing only a one cycle stall on the pipeline. */
2077
2078 static int
2079 s390_adjust_cost (insn, link, dep_insn, cost)
2080 rtx insn;
2081 rtx link;
2082 rtx dep_insn;
2083 int cost;
2084 {
2085 rtx dep_rtx;
2086 int i;
2087
2088 /* If the dependence is an anti-dependence, there is no cost. For an
2089 output dependence, there is sometimes a cost, but it doesn't seem
2090 worth handling those few cases. */
2091
2092 if (REG_NOTE_KIND (link) != 0)
2093 return 0;
2094
2095 /* If we can't recognize the insns, we can't really do anything. */
2096 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
2097 return cost;
2098
2099 dep_rtx = PATTERN (dep_insn);
2100
2101 if (GET_CODE (dep_rtx) == SET)
2102 {
2103 if (addr_generation_dependency_p (dep_rtx, insn))
2104 {
2105 cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
2106 if (DEBUG_SCHED)
2107 {
2108 fprintf (stderr, "\n\nAddress dependency detected: cost %d\n",
2109 cost);
2110 debug_rtx (dep_insn);
2111 debug_rtx (insn);
2112 }
2113 }
2114 }
2115 else if (GET_CODE (dep_rtx) == PARALLEL)
2116 {
2117 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
2118 {
2119 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i),
2120 insn))
2121 {
2122 cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
2123 if (DEBUG_SCHED)
2124 {
2125 fprintf (stderr, "\n\nAddress dependency detected: cost %d\n"
2126 ,cost);
2127 debug_rtx (dep_insn);
2128 debug_rtx (insn);
2129 }
2130 }
2131 }
2132 }
2133
2134 return cost;
2135 }
2136
2137
2138 /* A C statement (sans semicolon) to update the integer scheduling priority
2139 INSN_PRIORITY (INSN). Reduce the priority to execute the INSN earlier,
2140 increase the priority to execute INSN later. Do not define this macro if
2141 you do not need to adjust the scheduling priorities of insns.
2142
2143 A LA instruction maybe scheduled later, since the pipeline bypasses the
2144 calculated value. */
2145
2146 static int
2147 s390_adjust_priority (insn, priority)
2148 rtx insn ATTRIBUTE_UNUSED;
2149 int priority;
2150 {
2151 if (! INSN_P (insn))
2152 return priority;
2153
2154 if (GET_CODE (PATTERN (insn)) == USE
2155 || GET_CODE (PATTERN (insn)) == CLOBBER)
2156 return priority;
2157
2158 switch (get_attr_type (insn))
2159 {
2160 default:
2161 break;
2162
2163 case TYPE_LA:
2164 if (priority >= 0 && priority < 0x01000000)
2165 priority <<= 3;
2166 break;
2167 case TYPE_LM:
2168 /* LM in epilogue should never be scheduled. This
2169 is due to literal access done in function body.
2170 The usage of register 13 is not mentioned explicitly,
2171 leading to scheduling 'LM' accross this instructions.
2172 */
2173 priority = 0x7fffffff;
2174 break;
2175 }
2176
2177 return priority;
2178 }
2179
2180
2181 /* Pool concept for Linux 390:
2182 - Function prologue saves used register
2183 - literal pool is dumped in prologue and jump across with bras
2184 - If function has more than 4 k literals, at about every
2185 S390_CHUNK_MAX offset in the function a literal pool will be
2186 dumped
2187 - in this case, a branch from one chunk to other chunk needs
2188 a reload of base register at the code label branched to. */
2189
2190 /* Index of constant pool chunk that is currently being processed.
2191 Set to -1 before function output has started. */
2192 int s390_pool_count = -1;
2193
2194 /* First insn using the constant pool chunk that is currently being
2195 processed. */
2196 rtx s390_pool_start_insn = NULL_RTX;
2197
2198 /* UID of last insn using the constant pool chunk that is currently
2199 being processed. */
2200 static int pool_stop_uid;
2201
2202 /* Called from the ASM_OUTPUT_POOL_PROLOGUE macro to
2203 prepare for printing a literal pool chunk to stdio stream FILE.
2204
2205 FNAME and FNDECL specify the name and type of the current function.
2206 SIZE is the size in bytes of the current literal pool. */
2207
2208 void
2209 s390_asm_output_pool_prologue (file, fname, fndecl, size)
2210 FILE *file;
2211 const char *fname ATTRIBUTE_UNUSED;
2212 tree fndecl;
2213 int size ATTRIBUTE_UNUSED;
2214 {
2215
2216 if (s390_pool_count>0) {
2217 /*
2218 * We are in an internal pool, branch over
2219 */
2220 if (TARGET_64BIT)
2221 {
2222 fprintf (file, "\tlarl\t%s,.LT%X_%X\n",
2223 reg_names[BASE_REGISTER],
2224 s390_function_count, s390_pool_count);
2225 readonly_data_section ();
2226 ASM_OUTPUT_ALIGN (file, floor_log2 (3));
2227 fprintf (file, ".LT%X_%X:\t# Pool %d\n",
2228 s390_function_count, s390_pool_count, s390_pool_count);
2229 }
2230 else
2231 fprintf (file,"\t.align 4\n\tbras\t%s,0f\n.LT%X_%X:\t# Pool %d \n",
2232 reg_names[BASE_REGISTER],
2233 s390_function_count, s390_pool_count, s390_pool_count);
2234 }
2235 if (!TARGET_64BIT)
2236 function_section (fndecl);
2237 }
2238
2239 /* Return true if OTHER_ADDR is in different chunk than MY_ADDR.
2240 LTORG points to a list of all literal pools inserted
2241 into the current function. */
2242
2243 static int
2244 other_chunk (ltorg, my_addr, other_addr)
2245 int *ltorg;
2246 int my_addr;
2247 int other_addr;
2248 {
2249 int ad, i=0, j=0;
2250
2251 while ((ad = ltorg[i++])) {
2252 if (INSN_ADDRESSES (ad) >= my_addr)
2253 break;
2254 }
2255
2256 while ((ad = ltorg[j++])) {
2257 if (INSN_ADDRESSES (ad) > other_addr)
2258 break;
2259 }
2260
2261 if (i==j)
2262 return 0;
2263
2264 return 1;
2265 }
2266
2267 /* Return true if OTHER_ADDR is too far away from MY_ADDR
2268 to use a relative branch instruction. */
2269
2270 static int
2271 far_away (my_addr, other_addr)
2272 int my_addr;
2273 int other_addr;
2274 {
2275 /* In 64 bit mode we can jump +- 4GB. */
2276 if (TARGET_64BIT)
2277 return 0;
2278 if (abs (my_addr - other_addr) > S390_REL_MAX)
2279 return 1;
2280 return 0;
2281 }
2282
2283 /* Go through all insns in the current function (starting
2284 at INSN), replacing branch insn if necessary. A branch
2285 needs to be modified if either the distance to the
2286 target is too far to use a relative branch, or if the
2287 target uses a different literal pool than the origin.
2288 LTORG_UIDS points to a list of all literal pool insns
2289 that have been inserted. */
2290
2291 static rtx
2292 check_and_change_labels (insn, ltorg_uids)
2293 rtx insn;
2294 int *ltorg_uids;
2295 {
2296 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
2297 rtx target, jump, cjump;
2298 rtx pattern, tmp, body, label1;
2299 int addr0, addr1;
2300
2301 if (GET_CODE (insn) != JUMP_INSN)
2302 return insn;
2303
2304 pattern = PATTERN (insn);
2305
2306 addr0 = INSN_ADDRESSES (INSN_UID (insn));
2307 if (GET_CODE (pattern) == SET)
2308 {
2309 body = XEXP (pattern, 1);
2310 if (GET_CODE (body) == LABEL_REF)
2311 {
2312 addr1 = INSN_ADDRESSES (INSN_UID (XEXP (body, 0)));
2313
2314 if (other_chunk (ltorg_uids, addr0, addr1))
2315 {
2316 SYMBOL_REF_USED (XEXP (body, 0)) = 1;
2317 }
2318 if (far_away (addr0, addr1))
2319 {
2320 if (flag_pic)
2321 {
2322 target = gen_rtx_UNSPEC (SImode, gen_rtvec (1, body), 100);
2323 target = gen_rtx_CONST (SImode, target);
2324 target = force_const_mem (SImode, target);
2325 jump = gen_rtx_REG (Pmode, BASE_REGISTER);
2326 jump = gen_rtx_PLUS (Pmode, jump, temp_reg);
2327 }
2328 else
2329 {
2330 target = force_const_mem (Pmode, body);
2331 jump = temp_reg;
2332 }
2333
2334 emit_insn_before (gen_movsi (temp_reg, target), insn);
2335 tmp = emit_jump_insn_before (gen_indirect_jump (jump), insn);
2336 remove_insn (insn);
2337 INSN_ADDRESSES_NEW (tmp, -1);
2338 return tmp;
2339 }
2340 }
2341 else if (GET_CODE (body) == IF_THEN_ELSE)
2342 {
2343 if (GET_CODE (XEXP (body, 1)) == LABEL_REF)
2344 {
2345 addr1 = INSN_ADDRESSES (INSN_UID (XEXP (XEXP (body, 1), 0)));
2346
2347 if (other_chunk (ltorg_uids, addr0, addr1))
2348 {
2349 SYMBOL_REF_USED (XEXP (XEXP (body, 1), 0)) = 1;
2350 }
2351
2352 if (far_away (addr0, addr1))
2353 {
2354 if (flag_pic)
2355 {
2356 target = gen_rtx_UNSPEC (SImode, gen_rtvec (1, XEXP (body, 1)), 100);
2357 target = gen_rtx_CONST (SImode, target);
2358 target = force_const_mem (SImode, target);
2359 jump = gen_rtx_REG (Pmode, BASE_REGISTER);
2360 jump = gen_rtx_PLUS (Pmode, jump, temp_reg);
2361 }
2362 else
2363 {
2364 target = force_const_mem (Pmode, XEXP (body, 1));
2365 jump = temp_reg;
2366 }
2367
2368 label1 = gen_label_rtx ();
2369 cjump = gen_rtx_LABEL_REF (VOIDmode, label1);
2370 cjump = gen_rtx_IF_THEN_ELSE (VOIDmode, XEXP (body, 0), pc_rtx, cjump);
2371 cjump = gen_rtx_SET (VOIDmode, pc_rtx, cjump);
2372 emit_jump_insn_before (cjump, insn);
2373 emit_insn_before (gen_movsi (temp_reg, target), insn);
2374 tmp = emit_jump_insn_before (gen_indirect_jump (jump), insn);
2375 INSN_ADDRESSES_NEW (emit_label_before (label1, insn), -1);
2376 remove_insn (insn);
2377 return tmp;
2378 }
2379 }
2380 else if (GET_CODE (XEXP (body, 2)) == LABEL_REF)
2381 {
2382 addr1 = INSN_ADDRESSES (INSN_UID (XEXP (XEXP (body, 2), 0)));
2383
2384 if (other_chunk (ltorg_uids, addr0, addr1))
2385 {
2386 SYMBOL_REF_USED (XEXP (XEXP (body, 2), 0)) = 1;
2387 }
2388
2389 if (far_away (addr0, addr1))
2390 {
2391 if (flag_pic)
2392 {
2393 target = gen_rtx_UNSPEC (SImode, gen_rtvec (1, XEXP (body, 2)), 100);
2394 target = gen_rtx_CONST (SImode, target);
2395 target = force_const_mem (SImode, target);
2396 jump = gen_rtx_REG (Pmode, BASE_REGISTER);
2397 jump = gen_rtx_PLUS (Pmode, jump, temp_reg);
2398 }
2399 else
2400 {
2401 target = force_const_mem (Pmode, XEXP (body, 2));
2402 jump = temp_reg;
2403 }
2404
2405 label1 = gen_label_rtx ();
2406 cjump = gen_rtx_LABEL_REF (VOIDmode, label1);
2407 cjump = gen_rtx_IF_THEN_ELSE (VOIDmode, XEXP (body, 0), cjump, pc_rtx);
2408 cjump = gen_rtx_SET (VOIDmode, pc_rtx, cjump);
2409 emit_jump_insn_before (cjump, insn);
2410 emit_insn_before (gen_movsi (temp_reg, target), insn);
2411 tmp = emit_jump_insn_before (gen_indirect_jump (jump), insn);
2412 INSN_ADDRESSES_NEW (emit_label_before (label1, insn), -1);
2413 remove_insn (insn);
2414 return tmp;
2415 }
2416 }
2417 }
2418 }
2419 else if (GET_CODE (pattern) == ADDR_VEC ||
2420 GET_CODE (pattern) == ADDR_DIFF_VEC)
2421 {
2422 int i, diff_vec_p = GET_CODE (pattern) == ADDR_DIFF_VEC;
2423 int len = XVECLEN (pattern, diff_vec_p);
2424
2425 for (i = 0; i < len; i++)
2426 {
2427 addr1 = INSN_ADDRESSES (INSN_UID (XEXP (XVECEXP (pattern, diff_vec_p, i), 0)));
2428 if (other_chunk (ltorg_uids, addr0, addr1))
2429 {
2430 SYMBOL_REF_USED (XEXP (XVECEXP (pattern, diff_vec_p, i), 0)) = 1;
2431 }
2432 }
2433 }
2434 return insn;
2435 }
2436
2437 /* Called from s390_function_prologue to make final adjustments
2438 before outputting code. CHUNKIFY specifies whether we need
2439 to use multiple literal pools (because the total size of the
2440 literals exceeds 4K). */
2441
2442 static void
2443 s390_final_chunkify (chunkify)
2444 int chunkify;
2445 {
2446 rtx insn, ninsn, tmp;
2447 int addr, naddr = 0, uids;
2448 int chunk_max = 0;
2449
2450 int size = insn_current_address;
2451
2452 int *ltorg_uids;
2453 int max_ltorg=0;
2454
2455 ltorg_uids = alloca (size / 1024 + 1024);
2456 memset (ltorg_uids, 0, size / 1024 + 1024);
2457
2458 if (chunkify == 1)
2459 {
2460 chunk_max = size * 2048 / get_pool_size ();
2461 chunk_max = chunk_max > S390_CHUNK_MAX
2462 ? S390_CHUNK_MAX : chunk_max;
2463 }
2464
2465 for (insn=get_insns (); insn;insn = next_real_insn (insn))
2466 {
2467 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
2468 continue;
2469
2470 addr = INSN_ADDRESSES (INSN_UID (insn));
2471 if ((ninsn = next_real_insn (insn)))
2472 {
2473 naddr = INSN_ADDRESSES (INSN_UID (ninsn));
2474 }
2475
2476 if (chunkify && (addr / chunk_max != naddr / chunk_max))
2477 {
2478 for (tmp = insn; tmp; tmp = NEXT_INSN (tmp))
2479 {
2480 if (GET_CODE (tmp) == CODE_LABEL &&
2481 GET_CODE (NEXT_INSN (tmp)) != JUMP_INSN)
2482 {
2483 ltorg_uids[max_ltorg++] = INSN_UID (prev_real_insn (tmp));
2484 break;
2485 }
2486 if (GET_CODE (tmp) == CALL_INSN)
2487 {
2488 ltorg_uids[max_ltorg++] = INSN_UID (tmp);
2489 break;
2490 }
2491 if (INSN_ADDRESSES (INSN_UID (tmp)) - naddr > S390_CHUNK_OV)
2492 {
2493 debug_rtx (insn);
2494 debug_rtx (tmp);
2495 fprintf (stderr, "s390 multiple literalpool support:\n No code label between this insn %X %X",
2496 naddr, INSN_ADDRESSES (INSN_UID (tmp)));
2497 abort ();
2498 }
2499 }
2500 if (tmp == NULL)
2501 {
2502 warning ("no code label found");
2503 }
2504 }
2505 }
2506 ltorg_uids[max_ltorg] = 0;
2507 for (insn=get_insns (),uids=0; insn;insn = next_real_insn (insn))
2508 {
2509 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
2510 continue;
2511 if (INSN_UID (insn) == ltorg_uids[uids])
2512 {
2513 INSN_ADDRESSES_NEW (emit_insn_after (gen_ltorg (
2514 gen_rtx_CONST_INT (Pmode, ltorg_uids[++uids])),
2515 insn), -1);
2516 }
2517 if (GET_CODE (insn) == JUMP_INSN)
2518 {
2519 insn = check_and_change_labels (insn, ltorg_uids);
2520 }
2521 }
2522 if (chunkify)
2523 {
2524 for (insn=get_insns (); insn;insn = next_insn (insn))
2525 {
2526 if (GET_CODE (insn) == CODE_LABEL)
2527 {
2528 if (SYMBOL_REF_USED (insn))
2529 {
2530 INSN_ADDRESSES_NEW (emit_insn_after (gen_reload_base (
2531 gen_rtx_LABEL_REF (Pmode, XEXP (insn, 0))), insn), -1);
2532 }
2533 }
2534 }
2535 }
2536 pool_stop_uid = ltorg_uids[0];
2537 }
2538
2539 /* Return true if INSN is a 'ltorg' insn. */
2540
2541 int
2542 s390_stop_dump_lit_p (insn)
2543 rtx insn;
2544 {
2545 rtx body=PATTERN (insn);
2546 if (GET_CODE (body) == PARALLEL
2547 && GET_CODE (XVECEXP (body, 0, 0)) == SET
2548 && GET_CODE (XVECEXP (body, 0, 1)) == USE
2549 && GET_CODE (XEXP ((XVECEXP (body, 0, 1)),0)) == CONST_INT
2550 && GET_CODE (SET_DEST (XVECEXP (body, 0, 0))) == REG
2551 && REGNO (SET_DEST (XVECEXP (body, 0, 0))) == BASE_REGISTER
2552 && SET_SRC (XVECEXP (body, 0, 0)) == pc_rtx) {
2553 return 1;
2554 }
2555 else
2556 return 0;
2557 }
2558
2559 /* Output literal pool chunk to be used for insns
2560 between insn ACT_INSN and the insn with UID STOP. */
2561
2562 void
2563 s390_dump_literal_pool (act_insn, stop)
2564 rtx act_insn;
2565 rtx stop;
2566 {
2567 s390_pool_start_insn = act_insn;
2568 pool_stop_uid = INTVAL (stop);
2569 s390_pool_count++;
2570 output_constant_pool (current_function_name, current_function_decl);
2571 function_section (current_function_decl);
2572 }
2573
2574 /* Number of elements of current constant pool. */
2575 int s390_nr_constants;
2576
2577 /* Return true if floating point registers need to be saved. */
2578
2579 static int
2580 save_fprs_p ()
2581 {
2582 int i;
2583 if (!TARGET_64BIT)
2584 return 0;
2585 for (i=24; i<=31; i++)
2586 {
2587 if (regs_ever_live[i] == 1)
2588 return 1;
2589 }
2590 return 0;
2591 }
2592
2593 /* Output main constant pool to stdio stream FILE. */
2594
2595 void
2596 s390_output_constant_pool (file)
2597 FILE *file;
2598 {
2599 /* Output constant pool. */
2600 if (s390_nr_constants)
2601 {
2602 s390_pool_count = 0;
2603 if (TARGET_64BIT)
2604 {
2605 fprintf (file, "\tlarl\t%s,.LT%X_%X\n", reg_names[BASE_REGISTER],
2606 s390_function_count, s390_pool_count);
2607 readonly_data_section ();
2608 ASM_OUTPUT_ALIGN (file, floor_log2 (3));
2609 }
2610 else
2611 {
2612 fprintf (file, "\tbras\t%s,.LTN%X_%X\n", reg_names[BASE_REGISTER],
2613 s390_function_count, s390_pool_count);
2614 }
2615 fprintf (file, ".LT%X_%X:\n", s390_function_count, s390_pool_count);
2616 output_constant_pool (current_function_name, current_function_decl);
2617 fprintf (file, ".LTN%X_%X:\n", s390_function_count,
2618 s390_pool_count);
2619 if (TARGET_64BIT)
2620 function_section (current_function_decl);
2621 }
2622 }
2623
2624 /* Find first call clobbered register unsused in a function.
2625 This could be used as base register in a leaf function
2626 or for holding the return address before epilogue. */
2627
2628 static int
2629 find_unused_clobbered_reg ()
2630 {
2631 int i;
2632 for (i = 0; i < 6; i++)
2633 if (!regs_ever_live[i])
2634 return i;
2635 return 0;
2636 }
2637
2638 /* Fill FRAME with info about frame of current function. */
2639
2640 static void
2641 s390_frame_info (frame)
2642 struct s390_frame *frame;
2643 {
2644 int i, j;
2645 HOST_WIDE_INT fsize = get_frame_size ();
2646
2647 if (fsize > 0x7fff0000)
2648 fatal_error ("Total size of local variables exceeds architecture limit.");
2649
2650 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
2651 frame->save_fprs_p = save_fprs_p ();
2652
2653 frame->frame_size = fsize + frame->save_fprs_p * 64;
2654
2655 /* Does function need to setup frame and save area. */
2656
2657 if (! current_function_is_leaf
2658 || frame->frame_size > 0
2659 || current_function_calls_alloca
2660 || current_function_stdarg
2661 || current_function_varargs)
2662 frame->frame_size += STARTING_FRAME_OFFSET;
2663
2664 /* If we need to allocate a frame, the stack pointer is changed. */
2665
2666 if (frame->frame_size > 0)
2667 regs_ever_live[STACK_POINTER_REGNUM] = 1;
2668
2669 /* If there is (possibly) any pool entry, we need to
2670 load base register. */
2671
2672 if (get_pool_size ()
2673 || !CONST_OK_FOR_LETTER_P (frame->frame_size, 'K')
2674 || (!TARGET_64BIT && current_function_uses_pic_offset_table))
2675 regs_ever_live[BASE_REGISTER] = 1;
2676
2677 /* If we need the GOT pointer, remember to save/restore it. */
2678
2679 if (current_function_uses_pic_offset_table)
2680 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2681
2682 /* Frame pointer needed. */
2683
2684 frame->frame_pointer_p = frame_pointer_needed;
2685
2686 /* Find first and last gpr to be saved. */
2687
2688 for (i = 6; i < 16; i++)
2689 if (regs_ever_live[i])
2690 break;
2691
2692 for (j = 15; j > i; j--)
2693 if (regs_ever_live[j])
2694 break;
2695
2696 if (i == 16)
2697 {
2698 /* Nothing to save / restore. */
2699 frame->first_save_gpr = -1;
2700 frame->first_restore_gpr = -1;
2701 frame->last_save_gpr = -1;
2702 frame->return_reg_saved_p = 0;
2703 }
2704 else
2705 {
2706 /* Save / Restore from gpr i to j. */
2707 frame->first_save_gpr = i;
2708 frame->first_restore_gpr = i;
2709 frame->last_save_gpr = j;
2710 frame->return_reg_saved_p = (j >= RETURN_REGNUM && i <= RETURN_REGNUM);
2711 }
2712
2713 if (current_function_stdarg || current_function_varargs)
2714 {
2715 /* Varargs function need to save from gpr 2 to gpr 15. */
2716 frame->first_save_gpr = 2;
2717 }
2718 }
2719
2720 /* Return offset between argument pointer and frame pointer
2721 initially after prologue. */
2722
2723 int
2724 s390_arg_frame_offset ()
2725 {
2726 struct s390_frame frame;
2727
2728 /* Compute frame_info. */
2729
2730 s390_frame_info (&frame);
2731
2732 return frame.frame_size + STACK_POINTER_OFFSET;
2733 }
2734
2735 /* Emit insn to save fpr REGNUM at offset OFFSET relative
2736 to register BASE. Return generated insn. */
2737
2738 static rtx
2739 save_fpr (base, offset, regnum)
2740 rtx base;
2741 int offset;
2742 int regnum;
2743 {
2744 rtx addr;
2745 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
2746 set_mem_alias_set (addr, s390_sr_alias_set);
2747
2748 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
2749 }
2750
2751 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
2752 to register BASE. Return generated insn. */
2753
2754 static rtx
2755 restore_fpr (base, offset, regnum)
2756 rtx base;
2757 int offset;
2758 int regnum;
2759 {
2760 rtx addr;
2761 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
2762 set_mem_alias_set (addr, s390_sr_alias_set);
2763
2764 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
2765 }
2766
2767 /* Output the function prologue assembly code to the
2768 stdio stream FILE. The local frame size is passed
2769 in LSIZE. */
2770
2771 void
2772 s390_function_prologue (file, lsize)
2773 FILE *file ATTRIBUTE_UNUSED;
2774 HOST_WIDE_INT lsize ATTRIBUTE_UNUSED;
2775 {
2776 if (get_pool_size () > S390_POOL_MAX)
2777 s390_final_chunkify (1);
2778 else
2779 s390_final_chunkify (0);
2780 }
2781
2782 /* Output the function epilogue assembly code to the
2783 stdio stream FILE. The local frame size is passed
2784 in LSIZE. */
2785
2786 void
2787 s390_function_epilogue (file, lsize)
2788 FILE *file ATTRIBUTE_UNUSED;
2789 HOST_WIDE_INT lsize ATTRIBUTE_UNUSED;
2790 {
2791 current_function_uses_pic_offset_table = 0;
2792 s390_pool_start_insn = NULL_RTX;
2793 s390_pool_count = -1;
2794 s390_function_count++;
2795 }
2796
2797 /* Expand the prologue into a bunch of separate insns. */
2798
2799 void
2800 s390_emit_prologue ()
2801 {
2802 struct s390_frame frame;
2803 rtx insn, addr;
2804 rtx temp_reg;
2805 int i, limit;
2806
2807 /* Compute frame_info. */
2808
2809 s390_frame_info (&frame);
2810
2811 /* Choose best register to use for temp use within prologue. */
2812
2813 if (frame.return_reg_saved_p
2814 && !has_hard_reg_initial_val (Pmode, RETURN_REGNUM))
2815 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
2816 else
2817 temp_reg = gen_rtx_REG (Pmode, 1);
2818
2819 /* Save call saved gprs. */
2820
2821 if (frame.first_save_gpr != -1)
2822 {
2823 addr = plus_constant (stack_pointer_rtx,
2824 frame.first_save_gpr * UNITS_PER_WORD);
2825 addr = gen_rtx_MEM (Pmode, addr);
2826 set_mem_alias_set (addr, s390_sr_alias_set);
2827
2828 if (frame.first_save_gpr != frame.last_save_gpr )
2829 {
2830 insn = emit_insn (gen_store_multiple (addr,
2831 gen_rtx_REG (Pmode, frame.first_save_gpr),
2832 GEN_INT (frame.last_save_gpr
2833 - frame.first_save_gpr + 1)));
2834
2835 /* Set RTX_FRAME_RELATED_P for all sets within store multiple. */
2836
2837 limit = XVECLEN (PATTERN (insn), 0);
2838
2839 for (i = 0; i < limit; i++)
2840 {
2841 rtx x = XVECEXP (PATTERN (insn), 0, i);
2842
2843 if (GET_CODE (x) == SET)
2844 RTX_FRAME_RELATED_P (x) = 1;
2845 }
2846 }
2847 else
2848 {
2849 insn = emit_move_insn (addr,
2850 gen_rtx_REG (Pmode, frame.first_save_gpr));
2851 }
2852 RTX_FRAME_RELATED_P (insn) = 1;
2853 }
2854
2855 /* Dump constant pool and set constant pool register (13). */
2856
2857 insn = emit_insn (gen_lit ());
2858
2859 /* Save fprs for variable args. */
2860
2861 if (current_function_stdarg || current_function_varargs)
2862 {
2863 /* Save fpr 0 and 2. */
2864
2865 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 32, 16);
2866 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 24, 17);
2867
2868 if (TARGET_64BIT)
2869 {
2870 /* Save fpr 4 and 6. */
2871
2872 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
2873 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
2874 }
2875 }
2876
2877 /* Save fprs 4 and 6 if used (31 bit ABI). */
2878
2879 if (!TARGET_64BIT)
2880 {
2881 /* Save fpr 4 and 6. */
2882 if (regs_ever_live[18])
2883 {
2884 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
2885 RTX_FRAME_RELATED_P (insn) = 1;
2886 }
2887 if (regs_ever_live[19])
2888 {
2889 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
2890 RTX_FRAME_RELATED_P (insn) = 1;
2891 }
2892 }
2893
2894 /* Decrement stack pointer. */
2895
2896 if (frame.frame_size > 0)
2897 {
2898 rtx frame_off = GEN_INT (-frame.frame_size);
2899
2900 /* Save incoming stack pointer into temp reg. */
2901
2902 if (TARGET_BACKCHAIN || frame.save_fprs_p)
2903 {
2904 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
2905 }
2906
2907 /* Substract frame size from stack pointer. */
2908
2909 frame_off = GEN_INT (-frame.frame_size);
2910 if (!CONST_OK_FOR_LETTER_P (-frame.frame_size, 'K'))
2911 frame_off = force_const_mem (Pmode, frame_off);
2912
2913 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
2914 RTX_FRAME_RELATED_P (insn) = 1;
2915 REG_NOTES (insn) =
2916 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
2917 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
2918 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2919 GEN_INT (-frame.frame_size))),
2920 REG_NOTES (insn));
2921
2922 /* Set backchain. */
2923
2924 if (TARGET_BACKCHAIN)
2925 {
2926 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
2927 set_mem_alias_set (addr, s390_sr_alias_set);
2928 insn = emit_insn (gen_move_insn (addr, temp_reg));
2929 }
2930 }
2931
2932 /* Save fprs 8 - 15 (64 bit ABI). */
2933
2934 if (frame.save_fprs_p)
2935 {
2936 insn = emit_insn (gen_add2_insn (temp_reg, GEN_INT(-64)));
2937
2938 for (i = 24; i < 32; i++)
2939 if (regs_ever_live[i])
2940 {
2941 rtx addr = plus_constant (stack_pointer_rtx,
2942 frame.frame_size - 64 + (i-24)*8);
2943
2944 insn = save_fpr (temp_reg, (i-24)*8, i);
2945 RTX_FRAME_RELATED_P (insn) = 1;
2946 REG_NOTES (insn) =
2947 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
2948 gen_rtx_SET (VOIDmode,
2949 gen_rtx_MEM (DFmode, addr),
2950 gen_rtx_REG (DFmode, i)),
2951 REG_NOTES (insn));
2952 }
2953 }
2954
2955 /* Set frame pointer, if needed. */
2956
2957 if (frame.frame_pointer_p)
2958 {
2959 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
2960 RTX_FRAME_RELATED_P (insn) = 1;
2961 }
2962
2963 /* Set up got pointer, if needed. */
2964
2965 if (current_function_uses_pic_offset_table)
2966 {
2967 rtx got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2968 SYMBOL_REF_FLAG (got_symbol) = 1;
2969
2970 if (TARGET_64BIT)
2971 {
2972 insn = emit_insn (gen_movdi (pic_offset_table_rtx,
2973 got_symbol));
2974
2975 /* It can happen that the GOT pointer isn't really needed ... */
2976 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
2977 REG_NOTES (insn));
2978 }
2979 else
2980 {
2981 got_symbol = gen_rtx_UNSPEC (VOIDmode,
2982 gen_rtvec (1, got_symbol), 100);
2983 got_symbol = gen_rtx_CONST (VOIDmode, got_symbol);
2984 got_symbol = force_const_mem (Pmode, got_symbol);
2985 insn = emit_move_insn (pic_offset_table_rtx,
2986 got_symbol);
2987 insn = emit_insn (gen_add2_insn (pic_offset_table_rtx,
2988 gen_rtx_REG (Pmode, BASE_REGISTER)));
2989
2990 /* We need the GOT pointer even if we don't know it ... */
2991 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2992 }
2993 }
2994 }
2995
2996 /* Expand the epilogue into a bunch of separate insns. */
2997
2998 void
2999 s390_emit_epilogue ()
3000 {
3001 struct s390_frame frame;
3002 rtx frame_pointer, return_reg;
3003 int area_bottom, area_top, offset;
3004 rtvec p;
3005
3006 /* Compute frame_info. */
3007
3008 s390_frame_info (&frame);
3009
3010 /* Check whether to use frame or stack pointer for restore. */
3011
3012 frame_pointer = frame.frame_pointer_p ?
3013 hard_frame_pointer_rtx : stack_pointer_rtx;
3014
3015 /* Compute which parts of the save area we need to access. */
3016
3017 if (frame.first_restore_gpr != -1)
3018 {
3019 area_bottom = frame.first_restore_gpr * UNITS_PER_WORD;
3020 area_top = (frame.last_save_gpr + 1) * UNITS_PER_WORD;
3021 }
3022 else
3023 {
3024 area_bottom = INT_MAX;
3025 area_top = INT_MIN;
3026 }
3027
3028 if (TARGET_64BIT)
3029 {
3030 if (frame.save_fprs_p)
3031 {
3032 if (area_bottom > -64)
3033 area_bottom = -64;
3034 if (area_top < 0)
3035 area_top = 0;
3036 }
3037 }
3038 else
3039 {
3040 if (regs_ever_live[18])
3041 {
3042 if (area_bottom > STACK_POINTER_OFFSET - 16)
3043 area_bottom = STACK_POINTER_OFFSET - 16;
3044 if (area_top < STACK_POINTER_OFFSET - 8)
3045 area_top = STACK_POINTER_OFFSET - 8;
3046 }
3047 if (regs_ever_live[19])
3048 {
3049 if (area_bottom > STACK_POINTER_OFFSET - 8)
3050 area_bottom = STACK_POINTER_OFFSET - 8;
3051 if (area_top < STACK_POINTER_OFFSET)
3052 area_top = STACK_POINTER_OFFSET;
3053 }
3054 }
3055
3056 /* Check whether we can access the register save area.
3057 If not, increment the frame pointer as required. */
3058
3059 if (area_top <= area_bottom)
3060 {
3061 /* Nothing to restore. */
3062 }
3063 else if (frame.frame_size + area_bottom >= 0
3064 && frame.frame_size + area_top <= 4096)
3065 {
3066 /* Area is in range. */
3067 offset = frame.frame_size;
3068 }
3069 else
3070 {
3071 rtx insn, frame_off;
3072
3073 offset = area_bottom < 0 ? -area_bottom : 0;
3074 frame_off = GEN_INT (frame.frame_size - offset);
3075
3076 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
3077 frame_off = force_const_mem (Pmode, frame_off);
3078
3079 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
3080 RTX_FRAME_RELATED_P (insn) = 1;
3081 }
3082
3083 /* Restore call saved fprs. */
3084
3085 if (TARGET_64BIT)
3086 {
3087 int i;
3088
3089 if (frame.save_fprs_p)
3090 for (i = 24; i < 32; i++)
3091 if (regs_ever_live[i])
3092 restore_fpr (frame_pointer,
3093 offset - 64 + (i-24) * 8, i);
3094 }
3095 else
3096 {
3097 if (regs_ever_live[18])
3098 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 16, 18);
3099 if (regs_ever_live[19])
3100 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 8, 19);
3101 }
3102
3103 /* Return register. */
3104
3105 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
3106
3107 /* Restore call saved gprs. */
3108
3109 if (frame.first_restore_gpr != -1)
3110 {
3111 rtx addr;
3112
3113 /* Fetch return address from stack before load multiple,
3114 this will do good for scheduling. */
3115
3116 if (frame.last_save_gpr >= RETURN_REGNUM
3117 && frame.first_restore_gpr < RETURN_REGNUM)
3118 {
3119 int return_regnum = find_unused_clobbered_reg();
3120 if (!return_regnum)
3121 return_regnum = 4;
3122 return_reg = gen_rtx_REG (Pmode, return_regnum);
3123
3124 addr = plus_constant (frame_pointer,
3125 offset + RETURN_REGNUM * UNITS_PER_WORD);
3126 addr = gen_rtx_MEM (Pmode, addr);
3127 set_mem_alias_set (addr, s390_sr_alias_set);
3128 emit_move_insn (return_reg, addr);
3129 }
3130
3131 /* ??? As references to the base register are not made
3132 explicit in insn RTX code, we have to add a barrier here
3133 to prevent incorrect scheduling. */
3134
3135 emit_insn (gen_blockage());
3136
3137 addr = plus_constant (frame_pointer,
3138 offset + frame.first_restore_gpr * UNITS_PER_WORD);
3139 addr = gen_rtx_MEM (Pmode, addr);
3140 set_mem_alias_set (addr, s390_sr_alias_set);
3141
3142 if (frame.first_restore_gpr != frame.last_save_gpr)
3143 {
3144 emit_insn (gen_load_multiple (
3145 gen_rtx_REG (Pmode, frame.first_restore_gpr),
3146 addr,
3147 GEN_INT (frame.last_save_gpr - frame.first_restore_gpr + 1)));
3148 }
3149 else
3150 {
3151 emit_move_insn (gen_rtx_REG (Pmode, frame.first_restore_gpr),
3152 addr);
3153 }
3154 }
3155
3156 /* Return to caller. */
3157
3158 p = rtvec_alloc (2);
3159
3160 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
3161 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
3162 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3163 }
3164
3165
3166 /* Return the size in bytes of a function argument of
3167 type TYPE and/or mode MODE. At least one of TYPE or
3168 MODE must be specified. */
3169
3170 static int
3171 s390_function_arg_size (mode, type)
3172 enum machine_mode mode;
3173 tree type;
3174 {
3175 if (type)
3176 return int_size_in_bytes (type);
3177
3178 /* No type info available for some library calls ... */
3179 if (mode != BLKmode)
3180 return GET_MODE_SIZE (mode);
3181
3182 /* If we have neither type nor mode, abort */
3183 abort ();
3184 }
3185
3186 /* Return 1 if a function argument of type TYPE and mode MODE
3187 is to be passed by reference. The ABI specifies that only
3188 structures of size 1, 2, 4, or 8 bytes are passed by value,
3189 all other structures (and complex numbers) are passed by
3190 reference. */
3191
3192 int
3193 s390_function_arg_pass_by_reference (mode, type)
3194 enum machine_mode mode;
3195 tree type;
3196 {
3197 int size = s390_function_arg_size (mode, type);
3198
3199 if (type)
3200 {
3201 if (AGGREGATE_TYPE_P (type) &&
3202 size != 1 && size != 2 && size != 4 && size != 8)
3203 return 1;
3204
3205 if (TREE_CODE (type) == COMPLEX_TYPE)
3206 return 1;
3207 }
3208 return 0;
3209
3210 }
3211
3212 /* Update the data in CUM to advance over an argument of mode MODE and
3213 data type TYPE. (TYPE is null for libcalls where that information
3214 may not be available.). The boolean NAMED specifies whether the
3215 argument is a named argument (as opposed to an unnamed argument
3216 matching an ellipsis). */
3217
3218 void
3219 s390_function_arg_advance (cum, mode, type, named)
3220 CUMULATIVE_ARGS *cum;
3221 enum machine_mode mode;
3222 tree type;
3223 int named ATTRIBUTE_UNUSED;
3224 {
3225 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
3226 {
3227 cum->fprs++;
3228 }
3229 else if (s390_function_arg_pass_by_reference (mode, type))
3230 {
3231 cum->gprs += 1;
3232 }
3233 else
3234 {
3235 int size = s390_function_arg_size (mode, type);
3236 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
3237 }
3238 }
3239
3240 /* Define where to put the arguments to a function.
3241 Value is zero to push the argument on the stack,
3242 or a hard register in which to store the argument.
3243
3244 MODE is the argument's machine mode.
3245 TYPE is the data type of the argument (as a tree).
3246 This is null for libcalls where that information may
3247 not be available.
3248 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3249 the preceding args and about the function being called.
3250 NAMED is nonzero if this argument is a named parameter
3251 (otherwise it is an extra parameter matching an ellipsis).
3252
3253 On S/390, we use general purpose registers 2 through 6 to
3254 pass integer, pointer, and certain structure arguments, and
3255 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
3256 to pass floating point arguments. All remaining arguments
3257 are pushed to the stack. */
3258
3259 rtx
3260 s390_function_arg (cum, mode, type, named)
3261 CUMULATIVE_ARGS *cum;
3262 enum machine_mode mode;
3263 tree type;
3264 int named ATTRIBUTE_UNUSED;
3265 {
3266 if (s390_function_arg_pass_by_reference (mode, type))
3267 return 0;
3268
3269 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
3270 {
3271 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
3272 return 0;
3273 else
3274 return gen_rtx (REG, mode, cum->fprs + 16);
3275 }
3276 else
3277 {
3278 int size = s390_function_arg_size (mode, type);
3279 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
3280
3281 if (cum->gprs + n_gprs > 5)
3282 return 0;
3283 else
3284 return gen_rtx (REG, mode, cum->gprs + 2);
3285 }
3286 }
3287
3288
3289 /* Create and return the va_list datatype.
3290
3291 On S/390, va_list is an array type equivalent to
3292
3293 typedef struct __va_list_tag
3294 {
3295 long __gpr;
3296 long __fpr;
3297 void *__overflow_arg_area;
3298 void *__reg_save_area;
3299
3300 } va_list[1];
3301
3302 where __gpr and __fpr hold the number of general purpose
3303 or floating point arguments used up to now, respectively,
3304 __overflow_arg_area points to the stack location of the
3305 next argument passed on the stack, and __reg_save_area
3306 always points to the start of the register area in the
3307 call frame of the current function. The function prologue
3308 saves all registers used for argument passing into this
3309 area if the function uses variable arguments. */
3310
3311 tree
3312 s390_build_va_list ()
3313 {
3314 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3315
3316 record = make_lang_type (RECORD_TYPE);
3317
3318 type_decl =
3319 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3320
3321 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
3322 long_integer_type_node);
3323 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
3324 long_integer_type_node);
3325 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
3326 ptr_type_node);
3327 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
3328 ptr_type_node);
3329
3330 DECL_FIELD_CONTEXT (f_gpr) = record;
3331 DECL_FIELD_CONTEXT (f_fpr) = record;
3332 DECL_FIELD_CONTEXT (f_ovf) = record;
3333 DECL_FIELD_CONTEXT (f_sav) = record;
3334
3335 TREE_CHAIN (record) = type_decl;
3336 TYPE_NAME (record) = type_decl;
3337 TYPE_FIELDS (record) = f_gpr;
3338 TREE_CHAIN (f_gpr) = f_fpr;
3339 TREE_CHAIN (f_fpr) = f_ovf;
3340 TREE_CHAIN (f_ovf) = f_sav;
3341
3342 layout_type (record);
3343
3344 /* The correct type is an array type of one element. */
3345 return build_array_type (record, build_index_type (size_zero_node));
3346 }
3347
3348 /* Implement va_start by filling the va_list structure VALIST.
3349 STDARG_P is true if implementing __builtin_stdarg_va_start,
3350 false if implementing __builtin_varargs_va_start. NEXTARG
3351 points to the first anonymous stack argument.
3352
3353 The following global variables are used to initialize
3354 the va_list structure:
3355
3356 current_function_args_info:
3357 holds number of gprs and fprs used for named arguments.
3358 current_function_arg_offset_rtx:
3359 holds the offset of the first anonymous stack argument
3360 (relative to the virtual arg pointer). */
3361
3362 void
3363 s390_va_start (stdarg_p, valist, nextarg)
3364 int stdarg_p;
3365 tree valist;
3366 rtx nextarg ATTRIBUTE_UNUSED;
3367 {
3368 HOST_WIDE_INT n_gpr, n_fpr;
3369 int off;
3370 tree f_gpr, f_fpr, f_ovf, f_sav;
3371 tree gpr, fpr, ovf, sav, t;
3372
3373 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3374 f_fpr = TREE_CHAIN (f_gpr);
3375 f_ovf = TREE_CHAIN (f_fpr);
3376 f_sav = TREE_CHAIN (f_ovf);
3377
3378 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3379 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3380 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3381 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3382 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3383
3384 /* Count number of gp and fp argument registers used. */
3385
3386 n_gpr = current_function_args_info.gprs;
3387 n_fpr = current_function_args_info.fprs;
3388
3389 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3390 TREE_SIDE_EFFECTS (t) = 1;
3391 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3392
3393 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3394 TREE_SIDE_EFFECTS (t) = 1;
3395 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3396
3397 /* Find the overflow area. */
3398 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3399
3400 off = INTVAL (current_function_arg_offset_rtx);
3401 off = off < 0 ? 0 : off;
3402 if (! stdarg_p)
3403 off = off > 0 ? off - UNITS_PER_WORD : off;
3404 if (TARGET_DEBUG_ARG)
3405 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
3406 (int)n_gpr, (int)n_fpr, off);
3407
3408 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
3409
3410 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3411 TREE_SIDE_EFFECTS (t) = 1;
3412 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3413
3414 /* Find the register save area. */
3415 t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
3416 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3417 build_int_2 (-STACK_POINTER_OFFSET, -1));
3418 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3419 TREE_SIDE_EFFECTS (t) = 1;
3420 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3421 }
3422
3423 /* Implement va_arg by updating the va_list structure
3424 VALIST as required to retrieve an argument of type
3425 TYPE, and returning that argument.
3426
3427 Generates code equivalent to:
3428
3429 if (integral value) {
3430 if (size <= 4 && args.gpr < 5 ||
3431 size > 4 && args.gpr < 4 )
3432 ret = args.reg_save_area[args.gpr+8]
3433 else
3434 ret = *args.overflow_arg_area++;
3435 } else if (float value) {
3436 if (args.fgpr < 2)
3437 ret = args.reg_save_area[args.fpr+64]
3438 else
3439 ret = *args.overflow_arg_area++;
3440 } else if (aggregate value) {
3441 if (args.gpr < 5)
3442 ret = *args.reg_save_area[args.gpr]
3443 else
3444 ret = **args.overflow_arg_area++;
3445 } */
3446
3447 rtx
3448 s390_va_arg (valist, type)
3449 tree valist;
3450 tree type;
3451 {
3452 tree f_gpr, f_fpr, f_ovf, f_sav;
3453 tree gpr, fpr, ovf, sav, reg, t, u;
3454 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
3455 rtx lab_false, lab_over, addr_rtx, r;
3456
3457 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3458 f_fpr = TREE_CHAIN (f_gpr);
3459 f_ovf = TREE_CHAIN (f_fpr);
3460 f_sav = TREE_CHAIN (f_ovf);
3461
3462 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3463 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3464 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3465 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3466 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3467
3468 size = int_size_in_bytes (type);
3469
3470 if (s390_function_arg_pass_by_reference (TYPE_MODE (type), type))
3471 {
3472 if (TARGET_DEBUG_ARG)
3473 {
3474 fprintf (stderr, "va_arg: aggregate type");
3475 debug_tree (type);
3476 }
3477
3478 /* Aggregates are passed by reference. */
3479 indirect_p = 1;
3480 reg = gpr;
3481 n_reg = 1;
3482 sav_ofs = 2 * UNITS_PER_WORD;
3483 sav_scale = UNITS_PER_WORD;
3484 size = UNITS_PER_WORD;
3485 max_reg = 4;
3486 }
3487 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3488 {
3489 if (TARGET_DEBUG_ARG)
3490 {
3491 fprintf (stderr, "va_arg: float type");
3492 debug_tree (type);
3493 }
3494
3495 /* FP args go in FP registers, if present. */
3496 indirect_p = 0;
3497 reg = fpr;
3498 n_reg = 1;
3499 sav_ofs = 16 * UNITS_PER_WORD;
3500 sav_scale = 8;
3501 /* TARGET_64BIT has up to 4 parameter in fprs */
3502 max_reg = TARGET_64BIT ? 3 : 1;
3503 }
3504 else
3505 {
3506 if (TARGET_DEBUG_ARG)
3507 {
3508 fprintf (stderr, "va_arg: other type");
3509 debug_tree (type);
3510 }
3511
3512 /* Otherwise into GP registers. */
3513 indirect_p = 0;
3514 reg = gpr;
3515 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3516 sav_ofs = 2 * UNITS_PER_WORD;
3517 if (TARGET_64BIT)
3518 sav_ofs += TYPE_MODE (type) == SImode ? 4 :
3519 TYPE_MODE (type) == HImode ? 6 :
3520 TYPE_MODE (type) == QImode ? 7 : 0;
3521 else
3522 sav_ofs += TYPE_MODE (type) == HImode ? 2 :
3523 TYPE_MODE (type) == QImode ? 3 : 0;
3524
3525 sav_scale = UNITS_PER_WORD;
3526 if (n_reg > 1)
3527 max_reg = 3;
3528 else
3529 max_reg = 4;
3530 }
3531
3532 /* Pull the value out of the saved registers ... */
3533
3534 lab_false = gen_label_rtx ();
3535 lab_over = gen_label_rtx ();
3536 addr_rtx = gen_reg_rtx (Pmode);
3537
3538 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, Pmode, EXPAND_NORMAL),
3539 GEN_INT (max_reg),
3540 GT, const1_rtx, Pmode, 0, lab_false);
3541
3542 if (sav_ofs)
3543 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3544 else
3545 t = sav;
3546
3547 u = build (MULT_EXPR, long_integer_type_node,
3548 reg, build_int_2 (sav_scale, 0));
3549 TREE_SIDE_EFFECTS (u) = 1;
3550
3551 t = build (PLUS_EXPR, ptr_type_node, t, u);
3552 TREE_SIDE_EFFECTS (t) = 1;
3553
3554 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3555 if (r != addr_rtx)
3556 emit_move_insn (addr_rtx, r);
3557
3558
3559 emit_jump_insn (gen_jump (lab_over));
3560 emit_barrier ();
3561 emit_label (lab_false);
3562
3563 /* ... Otherwise out of the overflow area. */
3564
3565 t = save_expr (ovf);
3566
3567
3568 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
3569 if (size < UNITS_PER_WORD)
3570 {
3571 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (UNITS_PER_WORD-size, 0));
3572 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3573 TREE_SIDE_EFFECTS (t) = 1;
3574 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3575
3576 t = save_expr (ovf);
3577 }
3578
3579 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3580 if (r != addr_rtx)
3581 emit_move_insn (addr_rtx, r);
3582
3583 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3584 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3585 TREE_SIDE_EFFECTS (t) = 1;
3586 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3587
3588 emit_label (lab_over);
3589
3590 /* If less than max_regs a registers are retrieved out
3591 of register save area, increment. */
3592
3593 u = build (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
3594 build_int_2 (n_reg, 0));
3595 TREE_SIDE_EFFECTS (u) = 1;
3596 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3597
3598 if (indirect_p)
3599 {
3600 r = gen_rtx_MEM (Pmode, addr_rtx);
3601 set_mem_alias_set (r, get_varargs_alias_set ());
3602 emit_move_insn (addr_rtx, r);
3603 }
3604
3605
3606 return addr_rtx;
3607 }
3608
3609
3610 /* Output assembly code for the trampoline template to
3611 stdio stream FILE.
3612
3613 On S/390, we use gpr 1 internally in the trampoline code;
3614 gpr 0 is used to hold the static chain. */
3615
3616 void
3617 s390_trampoline_template (file)
3618 FILE *file;
3619 {
3620 if (TARGET_64BIT)
3621 {
3622 fprintf (file, "larl\t%s,0f\n", reg_names[1]);
3623 fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
3624 fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
3625 fprintf (file, "br\t%s\n", reg_names[1]);
3626 fprintf (file, "0:\t.quad\t0\n");
3627 fprintf (file, ".quad\t0\n");
3628 }
3629 else
3630 {
3631 fprintf (file, "basr\t%s,0\n", reg_names[1]);
3632 fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
3633 fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
3634 fprintf (file, "br\t%s\n", reg_names[1]);
3635 fprintf (file, ".long\t0\n");
3636 fprintf (file, ".long\t0\n");
3637 }
3638 }
3639
3640 /* Emit RTL insns to initialize the variable parts of a trampoline.
3641 FNADDR is an RTX for the address of the function's pure code.
3642 CXT is an RTX for the static chain value for the function. */
3643
3644 void
3645 s390_initialize_trampoline (addr, fnaddr, cxt)
3646 rtx addr;
3647 rtx fnaddr;
3648 rtx cxt;
3649 {
3650 emit_move_insn (gen_rtx
3651 (MEM, Pmode,
3652 memory_address (Pmode,
3653 plus_constant (addr, (TARGET_64BIT ? 20 : 12) ))), cxt);
3654 emit_move_insn (gen_rtx
3655 (MEM, Pmode,
3656 memory_address (Pmode,
3657 plus_constant (addr, (TARGET_64BIT ? 28 : 16) ))), fnaddr);
3658 }
3659
3660 /* Return rtx for 64-bit constant formed from the 32-bit subwords
3661 LOW and HIGH, independent of the host word size. */
3662
3663 rtx
3664 s390_gen_rtx_const_DI (high, low)
3665 int high;
3666 int low;
3667 {
3668 #if HOST_BITS_PER_WIDE_INT >= 64
3669 HOST_WIDE_INT val;
3670 val = (HOST_WIDE_INT)high;
3671 val <<= 32;
3672 val |= (HOST_WIDE_INT)low;
3673
3674 return GEN_INT (val);
3675 #else
3676 #if HOST_BITS_PER_WIDE_INT >= 32
3677 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
3678 #else
3679 abort ();
3680 #endif
3681 #endif
3682 }
3683
3684 /* Output assembler code to FILE to increment profiler label # LABELNO
3685 for profiling a function entry. */
3686
3687 void
3688 s390_function_profiler (file, labelno)
3689 FILE *file;
3690 int labelno;
3691 {
3692 rtx op[7];
3693
3694 char label[128];
3695 sprintf (label, "%sP%d", LPREFIX, labelno);
3696
3697 fprintf (file, "# function profiler \n");
3698
3699 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
3700 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
3701 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
3702
3703 op[2] = gen_rtx_REG (Pmode, 1);
3704 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
3705 SYMBOL_REF_FLAG (op[3]) = 1;
3706
3707 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
3708 if (flag_pic)
3709 {
3710 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), 113);
3711 op[4] = gen_rtx_CONST (Pmode, op[4]);
3712 }
3713
3714 if (TARGET_64BIT)
3715 {
3716 output_asm_insn ("stg\t%0,%1", op);
3717 output_asm_insn ("larl\t%2,%3", op);
3718 output_asm_insn ("brasl\t%0,%4", op);
3719 output_asm_insn ("lg\t%0,%1", op);
3720 }
3721 else if (!flag_pic)
3722 {
3723 op[6] = gen_label_rtx ();
3724
3725 output_asm_insn ("st\t%0,%1", op);
3726 output_asm_insn ("bras\t%2,%l6", op);
3727 output_asm_insn (".long\t%3", op);
3728 output_asm_insn (".long\t%4", op);
3729 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
3730 output_asm_insn ("l\t%0,0(%2)", op);
3731 output_asm_insn ("l\t%2,4(%2)", op);
3732 output_asm_insn ("basr\t%0,%0", op);
3733 output_asm_insn ("l\t%0,%1", op);
3734 }
3735 else
3736 {
3737 op[5] = gen_label_rtx ();
3738 op[6] = gen_label_rtx ();
3739
3740 output_asm_insn ("st\t%0,%1", op);
3741 output_asm_insn ("bras\t%2,%l6", op);
3742 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[5]));
3743 output_asm_insn (".long\t%3-%l5", op);
3744 output_asm_insn (".long\t%4-%l5", op);
3745 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
3746 output_asm_insn ("lr\t%0,%2", op);
3747 output_asm_insn ("a\t%0,0(%2)", op);
3748 output_asm_insn ("a\t%2,4(%2)", op);
3749 output_asm_insn ("basr\t%0,%0", op);
3750 output_asm_insn ("l\t%0,%1", op);
3751 }
3752 }
3753