1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 Ulrich Weigand (uweigand@de.ibm.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
31 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
44 #include "basic-block.h"
45 #include "integrate.h"
48 #include "target-def.h"
50 #include "langhooks.h"
53 /* Machine-specific symbol_ref flags. */
54 #define SYMBOL_FLAG_ALIGN1 (SYMBOL_FLAG_MACH_DEP << 0)
57 static bool s390_assemble_integer (rtx
, unsigned int, int);
58 static void s390_select_rtx_section (enum machine_mode
, rtx
,
59 unsigned HOST_WIDE_INT
);
60 static void s390_encode_section_info (tree
, rtx
, int);
61 static bool s390_cannot_force_const_mem (rtx
);
62 static rtx
s390_delegitimize_address (rtx
);
63 static bool s390_return_in_memory (tree
, tree
);
64 static void s390_init_builtins (void);
65 static rtx
s390_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
66 static void s390_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
,
68 static enum attr_type
s390_safe_attr_type (rtx
);
70 static int s390_adjust_cost (rtx
, rtx
, rtx
, int);
71 static int s390_adjust_priority (rtx
, int);
72 static int s390_issue_rate (void);
73 static int s390_use_dfa_pipeline_interface (void);
74 static int s390_first_cycle_multipass_dfa_lookahead (void);
75 static int s390_sched_reorder2 (FILE *, int, rtx
*, int *, int);
76 static bool s390_rtx_costs (rtx
, int, int, int *);
77 static int s390_address_cost (rtx
);
78 static void s390_reorg (void);
79 static bool s390_valid_pointer_mode (enum machine_mode
);
80 static tree
s390_build_builtin_va_list (void);
82 #undef TARGET_ASM_ALIGNED_HI_OP
83 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
84 #undef TARGET_ASM_ALIGNED_DI_OP
85 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
86 #undef TARGET_ASM_INTEGER
87 #define TARGET_ASM_INTEGER s390_assemble_integer
89 #undef TARGET_ASM_OPEN_PAREN
90 #define TARGET_ASM_OPEN_PAREN ""
92 #undef TARGET_ASM_CLOSE_PAREN
93 #define TARGET_ASM_CLOSE_PAREN ""
95 #undef TARGET_ASM_SELECT_RTX_SECTION
96 #define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
98 #undef TARGET_ENCODE_SECTION_INFO
99 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
102 #undef TARGET_HAVE_TLS
103 #define TARGET_HAVE_TLS true
105 #undef TARGET_CANNOT_FORCE_CONST_MEM
106 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
108 #undef TARGET_DELEGITIMIZE_ADDRESS
109 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
111 #undef TARGET_RETURN_IN_MEMORY
112 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
114 #undef TARGET_INIT_BUILTINS
115 #define TARGET_INIT_BUILTINS s390_init_builtins
116 #undef TARGET_EXPAND_BUILTIN
117 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
119 #undef TARGET_ASM_OUTPUT_MI_THUNK
120 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
121 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
122 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
124 #undef TARGET_SCHED_ADJUST_COST
125 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
126 #undef TARGET_SCHED_ADJUST_PRIORITY
127 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
128 #undef TARGET_SCHED_ISSUE_RATE
129 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
130 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
131 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE s390_use_dfa_pipeline_interface
132 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
133 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
134 #undef TARGET_SCHED_REORDER2
135 #define TARGET_SCHED_REORDER2 s390_sched_reorder2
137 #undef TARGET_RTX_COSTS
138 #define TARGET_RTX_COSTS s390_rtx_costs
139 #undef TARGET_ADDRESS_COST
140 #define TARGET_ADDRESS_COST s390_address_cost
141 #undef TARGET_DIRECT_POOL_LOAD_P
142 #define TARGET_DIRECT_POOL_LOAD_P hook_bool_machine_mode_true
144 #undef TARGET_MACHINE_DEPENDENT_REORG
145 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
147 #undef TARGET_VALID_POINTER_MODE
148 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
150 #undef TARGET_BUILD_BUILTIN_VA_LIST
151 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
153 struct gcc_target targetm
= TARGET_INITIALIZER
;
155 extern int reload_completed
;
157 /* The alias set for prologue/epilogue register save/restore. */
158 static int s390_sr_alias_set
= 0;
160 /* Save information from a "cmpxx" operation until the branch or scc is
162 rtx s390_compare_op0
, s390_compare_op1
;
164 /* Structure used to hold the components of a S/390 memory
165 address. A legitimate address on S/390 is of the general
167 base + index + displacement
168 where any of the components is optional.
170 base and index are registers of the class ADDR_REGS,
171 displacement is an unsigned 12-bit immediate constant. */
181 /* Which cpu are we tuning for. */
182 enum processor_type s390_tune
;
183 enum processor_flags s390_tune_flags
;
184 /* Which instruction set architecture to use. */
185 enum processor_type s390_arch
;
186 enum processor_flags s390_arch_flags
;
188 /* Strings to hold which cpu and instruction set architecture to use. */
189 const char *s390_tune_string
; /* for -mtune=<xxx> */
190 const char *s390_arch_string
; /* for -march=<xxx> */
192 /* Define the structure for the machine field in struct function. */
194 struct machine_function
GTY(())
196 /* Set, if some of the fprs 8-15 need to be saved (64 bit abi). */
199 /* Set if return address needs to be saved. */
200 bool save_return_addr_p
;
202 /* Number of first and last gpr to be saved, restored. */
204 int first_restore_gpr
;
207 /* Size of stack frame. */
208 HOST_WIDE_INT frame_size
;
210 /* Some local-dynamic TLS symbol name. */
211 const char *some_ld_name
;
214 static int s390_match_ccmode_set (rtx
, enum machine_mode
);
215 static int s390_branch_condition_mask (rtx
);
216 static const char *s390_branch_condition_mnemonic (rtx
, int);
217 static int check_mode (rtx
, enum machine_mode
*);
218 static int general_s_operand (rtx
, enum machine_mode
, int);
219 static int s390_short_displacement (rtx
);
220 static int s390_decompose_address (rtx
, struct s390_address
*);
221 static rtx
get_thread_pointer (void);
222 static rtx
legitimize_tls_address (rtx
, rtx
);
223 static void print_shift_count_operand (FILE *, rtx
);
224 static const char *get_some_local_dynamic_name (void);
225 static int get_some_local_dynamic_name_1 (rtx
*, void *);
226 static int reg_used_in_mem_p (int, rtx
);
227 static int addr_generation_dependency_p (rtx
, rtx
);
228 static int s390_split_branches (void);
229 static void find_constant_pool_ref (rtx
, rtx
*);
230 static void replace_constant_pool_ref (rtx
*, rtx
, rtx
);
231 static rtx
find_ltrel_base (rtx
);
232 static void replace_ltrel_base (rtx
*, rtx
);
233 static void s390_optimize_prolog (bool);
234 static int find_unused_clobbered_reg (void);
235 static void s390_frame_info (void);
236 static rtx
save_fpr (rtx
, int, int);
237 static rtx
restore_fpr (rtx
, int, int);
238 static rtx
save_gprs (rtx
, int, int, int);
239 static rtx
restore_gprs (rtx
, int, int, int);
240 static int s390_function_arg_size (enum machine_mode
, tree
);
241 static bool s390_function_arg_float (enum machine_mode
, tree
);
242 static struct machine_function
* s390_init_machine_status (void);
244 /* Check whether integer displacement is in range. */
245 #define DISP_IN_RANGE(d) \
246 (TARGET_LONG_DISPLACEMENT? ((d) >= -524288 && (d) <= 524287) \
247 : ((d) >= 0 && (d) <= 4095))
249 /* Return true if SET either doesn't set the CC register, or else
250 the source and destination have matching CC modes and that
251 CC mode is at least as constrained as REQ_MODE. */
254 s390_match_ccmode_set (rtx set
, enum machine_mode req_mode
)
256 enum machine_mode set_mode
;
258 if (GET_CODE (set
) != SET
)
261 if (GET_CODE (SET_DEST (set
)) != REG
|| !CC_REGNO_P (REGNO (SET_DEST (set
))))
264 set_mode
= GET_MODE (SET_DEST (set
));
277 if (req_mode
!= set_mode
)
282 if (req_mode
!= CCSmode
&& req_mode
!= CCUmode
&& req_mode
!= CCTmode
283 && req_mode
!= CCSRmode
&& req_mode
!= CCURmode
)
289 if (req_mode
!= CCAmode
)
297 return (GET_MODE (SET_SRC (set
)) == set_mode
);
300 /* Return true if every SET in INSN that sets the CC register
301 has source and destination with matching CC modes and that
302 CC mode is at least as constrained as REQ_MODE.
303 If REQ_MODE is VOIDmode, always return false. */
306 s390_match_ccmode (rtx insn
, enum machine_mode req_mode
)
310 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
311 if (req_mode
== VOIDmode
)
314 if (GET_CODE (PATTERN (insn
)) == SET
)
315 return s390_match_ccmode_set (PATTERN (insn
), req_mode
);
317 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
318 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
320 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
321 if (GET_CODE (set
) == SET
)
322 if (!s390_match_ccmode_set (set
, req_mode
))
329 /* If a test-under-mask instruction can be used to implement
330 (compare (and ... OP1) OP2), return the CC mode required
331 to do that. Otherwise, return VOIDmode.
332 MIXED is true if the instruction can distinguish between
333 CC1 and CC2 for mixed selected bits (TMxx), it is false
334 if the instruction cannot (TM). */
337 s390_tm_ccmode (rtx op1
, rtx op2
, int mixed
)
341 /* ??? Fixme: should work on CONST_DOUBLE as well. */
342 if (GET_CODE (op1
) != CONST_INT
|| GET_CODE (op2
) != CONST_INT
)
345 /* Selected bits all zero: CC0. */
346 if (INTVAL (op2
) == 0)
349 /* Selected bits all one: CC3. */
350 if (INTVAL (op2
) == INTVAL (op1
))
353 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
356 bit1
= exact_log2 (INTVAL (op2
));
357 bit0
= exact_log2 (INTVAL (op1
) ^ INTVAL (op2
));
358 if (bit0
!= -1 && bit1
!= -1)
359 return bit0
> bit1
? CCT1mode
: CCT2mode
;
365 /* Given a comparison code OP (EQ, NE, etc.) and the operands
366 OP0 and OP1 of a COMPARE, return the mode to be used for the
370 s390_select_ccmode (enum rtx_code code
, rtx op0
, rtx op1
)
376 if (GET_CODE (op0
) == PLUS
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
377 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0
, 1)), 'K'))
379 if ((GET_CODE (op0
) == PLUS
|| GET_CODE (op0
) == MINUS
380 || GET_CODE (op1
) == NEG
)
381 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
384 if (GET_CODE (op0
) == AND
)
386 /* Check whether we can potentially do it via TM. */
387 enum machine_mode ccmode
;
388 ccmode
= s390_tm_ccmode (XEXP (op0
, 1), op1
, 1);
389 if (ccmode
!= VOIDmode
)
391 /* Relax CCTmode to CCZmode to allow fall-back to AND
392 if that turns out to be beneficial. */
393 return ccmode
== CCTmode
? CCZmode
: ccmode
;
397 if (register_operand (op0
, HImode
)
398 && GET_CODE (op1
) == CONST_INT
399 && (INTVAL (op1
) == -1 || INTVAL (op1
) == 65535))
401 if (register_operand (op0
, QImode
)
402 && GET_CODE (op1
) == CONST_INT
403 && (INTVAL (op1
) == -1 || INTVAL (op1
) == 255))
412 if (GET_CODE (op0
) == PLUS
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
413 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0
, 1)), 'K'))
415 if (INTVAL (XEXP((op0
), 1)) < 0)
428 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
429 && GET_CODE (op1
) != CONST_INT
)
435 if (GET_CODE (op0
) == PLUS
436 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
439 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
440 && GET_CODE (op1
) != CONST_INT
)
446 if (GET_CODE (op0
) == MINUS
447 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
450 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
451 && GET_CODE (op1
) != CONST_INT
)
460 /* Return nonzero if OP is a valid comparison operator
461 for an ALC condition in mode MODE. */
464 s390_alc_comparison (rtx op
, enum machine_mode mode
)
466 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
469 if (GET_RTX_CLASS (GET_CODE (op
)) != '<')
472 if (GET_CODE (XEXP (op
, 0)) != REG
473 || REGNO (XEXP (op
, 0)) != CC_REGNUM
474 || XEXP (op
, 1) != const0_rtx
)
477 switch (GET_MODE (XEXP (op
, 0)))
480 return GET_CODE (op
) == LTU
;
483 return GET_CODE (op
) == LEU
;
486 return GET_CODE (op
) == GTU
;
489 return GET_CODE (op
) == LTU
;
492 return GET_CODE (op
) == UNGT
;
495 return GET_CODE (op
) == UNLT
;
502 /* Return nonzero if OP is a valid comparison operator
503 for an SLB condition in mode MODE. */
506 s390_slb_comparison (rtx op
, enum machine_mode mode
)
508 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
511 if (GET_RTX_CLASS (GET_CODE (op
)) != '<')
514 if (GET_CODE (XEXP (op
, 0)) != REG
515 || REGNO (XEXP (op
, 0)) != CC_REGNUM
516 || XEXP (op
, 1) != const0_rtx
)
519 switch (GET_MODE (XEXP (op
, 0)))
522 return GET_CODE (op
) == GEU
;
525 return GET_CODE (op
) == GTU
;
528 return GET_CODE (op
) == LEU
;
531 return GET_CODE (op
) == GEU
;
534 return GET_CODE (op
) == LE
;
537 return GET_CODE (op
) == GE
;
544 /* Return branch condition mask to implement a branch
545 specified by CODE. */
548 s390_branch_condition_mask (rtx code
)
550 const int CC0
= 1 << 3;
551 const int CC1
= 1 << 2;
552 const int CC2
= 1 << 1;
553 const int CC3
= 1 << 0;
555 if (GET_CODE (XEXP (code
, 0)) != REG
556 || REGNO (XEXP (code
, 0)) != CC_REGNUM
557 || XEXP (code
, 1) != const0_rtx
)
560 switch (GET_MODE (XEXP (code
, 0)))
563 switch (GET_CODE (code
))
566 case NE
: return CC1
| CC2
| CC3
;
573 switch (GET_CODE (code
))
576 case NE
: return CC0
| CC2
| CC3
;
583 switch (GET_CODE (code
))
586 case NE
: return CC0
| CC1
| CC3
;
593 switch (GET_CODE (code
))
596 case NE
: return CC0
| CC1
| CC2
;
603 switch (GET_CODE (code
))
605 case EQ
: return CC0
| CC2
;
606 case NE
: return CC1
| CC3
;
613 switch (GET_CODE (code
))
615 case LTU
: return CC2
| CC3
; /* carry */
616 case GEU
: return CC0
| CC1
; /* no carry */
623 switch (GET_CODE (code
))
625 case GTU
: return CC0
| CC1
; /* borrow */
626 case LEU
: return CC2
| CC3
; /* no borrow */
633 switch (GET_CODE (code
))
636 case NE
: return CC1
| CC2
| CC3
;
637 case LTU
: return CC1
;
638 case GTU
: return CC2
;
639 case LEU
: return CC0
| CC1
;
640 case GEU
: return CC0
| CC2
;
647 switch (GET_CODE (code
))
650 case NE
: return CC2
| CC1
| CC3
;
651 case LTU
: return CC2
;
652 case GTU
: return CC1
;
653 case LEU
: return CC0
| CC2
;
654 case GEU
: return CC0
| CC1
;
661 switch (GET_CODE (code
))
664 case NE
: return CC1
| CC2
| CC3
;
665 case LT
: return CC1
| CC3
;
667 case LE
: return CC0
| CC1
| CC3
;
668 case GE
: return CC0
| CC2
;
675 switch (GET_CODE (code
))
678 case NE
: return CC1
| CC2
| CC3
;
680 case GT
: return CC2
| CC3
;
681 case LE
: return CC0
| CC1
;
682 case GE
: return CC0
| CC2
| CC3
;
689 switch (GET_CODE (code
))
692 case NE
: return CC1
| CC2
| CC3
;
695 case LE
: return CC0
| CC1
;
696 case GE
: return CC0
| CC2
;
697 case UNORDERED
: return CC3
;
698 case ORDERED
: return CC0
| CC1
| CC2
;
699 case UNEQ
: return CC0
| CC3
;
700 case UNLT
: return CC1
| CC3
;
701 case UNGT
: return CC2
| CC3
;
702 case UNLE
: return CC0
| CC1
| CC3
;
703 case UNGE
: return CC0
| CC2
| CC3
;
704 case LTGT
: return CC1
| CC2
;
711 switch (GET_CODE (code
))
714 case NE
: return CC2
| CC1
| CC3
;
717 case LE
: return CC0
| CC2
;
718 case GE
: return CC0
| CC1
;
719 case UNORDERED
: return CC3
;
720 case ORDERED
: return CC0
| CC2
| CC1
;
721 case UNEQ
: return CC0
| CC3
;
722 case UNLT
: return CC2
| CC3
;
723 case UNGT
: return CC1
| CC3
;
724 case UNLE
: return CC0
| CC2
| CC3
;
725 case UNGE
: return CC0
| CC1
| CC3
;
726 case LTGT
: return CC2
| CC1
;
737 /* If INV is false, return assembler mnemonic string to implement
738 a branch specified by CODE. If INV is true, return mnemonic
739 for the corresponding inverted branch. */
742 s390_branch_condition_mnemonic (rtx code
, int inv
)
744 static const char *const mnemonic
[16] =
746 NULL
, "o", "h", "nle",
747 "l", "nhe", "lh", "ne",
748 "e", "nlh", "he", "nl",
749 "le", "nh", "no", NULL
752 int mask
= s390_branch_condition_mask (code
);
757 if (mask
< 1 || mask
> 14)
760 return mnemonic
[mask
];
763 /* If OP is an integer constant of mode MODE with exactly one
764 HImode subpart unequal to DEF, return the number of that
765 subpart. As a special case, all HImode subparts of OP are
766 equal to DEF, return zero. Otherwise, return -1. */
769 s390_single_hi (rtx op
, enum machine_mode mode
, int def
)
771 if (GET_CODE (op
) == CONST_INT
)
773 unsigned HOST_WIDE_INT value
= 0;
774 int n_parts
= GET_MODE_SIZE (mode
) / 2;
777 for (i
= 0; i
< n_parts
; i
++)
780 value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
784 if ((value
& 0xffff) != (unsigned)(def
& 0xffff))
793 return part
== -1 ? 0 : (n_parts
- 1 - part
);
796 else if (GET_CODE (op
) == CONST_DOUBLE
797 && GET_MODE (op
) == VOIDmode
)
799 unsigned HOST_WIDE_INT value
= 0;
800 int n_parts
= GET_MODE_SIZE (mode
) / 2;
803 for (i
= 0; i
< n_parts
; i
++)
806 value
= (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (op
);
807 else if (i
== HOST_BITS_PER_WIDE_INT
/ 16)
808 value
= (unsigned HOST_WIDE_INT
) CONST_DOUBLE_HIGH (op
);
812 if ((value
& 0xffff) != (unsigned)(def
& 0xffff))
821 return part
== -1 ? 0 : (n_parts
- 1 - part
);
827 /* Extract the HImode part number PART from integer
828 constant OP of mode MODE. */
831 s390_extract_hi (rtx op
, enum machine_mode mode
, int part
)
833 int n_parts
= GET_MODE_SIZE (mode
) / 2;
834 if (part
< 0 || part
>= n_parts
)
837 part
= n_parts
- 1 - part
;
839 if (GET_CODE (op
) == CONST_INT
)
841 unsigned HOST_WIDE_INT value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
842 return ((value
>> (16 * part
)) & 0xffff);
844 else if (GET_CODE (op
) == CONST_DOUBLE
845 && GET_MODE (op
) == VOIDmode
)
847 unsigned HOST_WIDE_INT value
;
848 if (part
< HOST_BITS_PER_WIDE_INT
/ 16)
849 value
= (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (op
);
851 value
= (unsigned HOST_WIDE_INT
) CONST_DOUBLE_HIGH (op
),
852 part
-= HOST_BITS_PER_WIDE_INT
/ 16;
854 return ((value
>> (16 * part
)) & 0xffff);
860 /* If OP is an integer constant of mode MODE with exactly one
861 QImode subpart unequal to DEF, return the number of that
862 subpart. As a special case, all QImode subparts of OP are
863 equal to DEF, return zero. Otherwise, return -1. */
866 s390_single_qi (rtx op
, enum machine_mode mode
, int def
)
868 if (GET_CODE (op
) == CONST_INT
)
870 unsigned HOST_WIDE_INT value
= 0;
871 int n_parts
= GET_MODE_SIZE (mode
);
874 for (i
= 0; i
< n_parts
; i
++)
877 value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
881 if ((value
& 0xff) != (unsigned)(def
& 0xff))
890 return part
== -1 ? 0 : (n_parts
- 1 - part
);
893 else if (GET_CODE (op
) == CONST_DOUBLE
894 && GET_MODE (op
) == VOIDmode
)
896 unsigned HOST_WIDE_INT value
= 0;
897 int n_parts
= GET_MODE_SIZE (mode
);
900 for (i
= 0; i
< n_parts
; i
++)
903 value
= (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (op
);
904 else if (i
== HOST_BITS_PER_WIDE_INT
/ 8)
905 value
= (unsigned HOST_WIDE_INT
) CONST_DOUBLE_HIGH (op
);
909 if ((value
& 0xff) != (unsigned)(def
& 0xff))
918 return part
== -1 ? 0 : (n_parts
- 1 - part
);
924 /* Extract the QImode part number PART from integer
925 constant OP of mode MODE. */
928 s390_extract_qi (rtx op
, enum machine_mode mode
, int part
)
930 int n_parts
= GET_MODE_SIZE (mode
);
931 if (part
< 0 || part
>= n_parts
)
934 part
= n_parts
- 1 - part
;
936 if (GET_CODE (op
) == CONST_INT
)
938 unsigned HOST_WIDE_INT value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
939 return ((value
>> (8 * part
)) & 0xff);
941 else if (GET_CODE (op
) == CONST_DOUBLE
942 && GET_MODE (op
) == VOIDmode
)
944 unsigned HOST_WIDE_INT value
;
945 if (part
< HOST_BITS_PER_WIDE_INT
/ 8)
946 value
= (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (op
);
948 value
= (unsigned HOST_WIDE_INT
) CONST_DOUBLE_HIGH (op
),
949 part
-= HOST_BITS_PER_WIDE_INT
/ 8;
951 return ((value
>> (8 * part
)) & 0xff);
957 /* Check whether we can (and want to) split a double-word
958 move in mode MODE from SRC to DST into two single-word
959 moves, moving the subword FIRST_SUBWORD first. */
962 s390_split_ok_p (rtx dst
, rtx src
, enum machine_mode mode
, int first_subword
)
964 /* Floating point registers cannot be split. */
965 if (FP_REG_P (src
) || FP_REG_P (dst
))
968 /* We don't need to split if operands are directly accessible. */
969 if (s_operand (src
, mode
) || s_operand (dst
, mode
))
972 /* Non-offsettable memory references cannot be split. */
973 if ((GET_CODE (src
) == MEM
&& !offsettable_memref_p (src
))
974 || (GET_CODE (dst
) == MEM
&& !offsettable_memref_p (dst
)))
977 /* Moving the first subword must not clobber a register
978 needed to move the second subword. */
979 if (register_operand (dst
, mode
))
981 rtx subreg
= operand_subword (dst
, first_subword
, 0, mode
);
982 if (reg_overlap_mentioned_p (subreg
, src
))
990 /* Change optimizations to be performed, depending on the
993 LEVEL is the optimization level specified; 2 if `-O2' is
994 specified, 1 if `-O' is specified, and 0 if neither is specified.
996 SIZE is nonzero if `-Os' is specified and zero otherwise. */
999 optimization_options (int level ATTRIBUTE_UNUSED
, int size ATTRIBUTE_UNUSED
)
1001 /* ??? There are apparently still problems with -fcaller-saves. */
1002 flag_caller_saves
= 0;
1004 /* By default, always emit DWARF-2 unwind info. This allows debugging
1005 without maintaining a stack frame back-chain. */
1006 flag_asynchronous_unwind_tables
= 1;
1010 override_options (void)
1015 const char *const name
; /* processor name or nickname. */
1016 const enum processor_type processor
;
1017 const enum processor_flags flags
;
1019 const processor_alias_table
[] =
1021 {"g5", PROCESSOR_9672_G5
, PF_IEEE_FLOAT
},
1022 {"g6", PROCESSOR_9672_G6
, PF_IEEE_FLOAT
},
1023 {"z900", PROCESSOR_2064_Z900
, PF_IEEE_FLOAT
| PF_ZARCH
},
1024 {"z990", PROCESSOR_2084_Z990
, PF_IEEE_FLOAT
| PF_ZARCH
1025 | PF_LONG_DISPLACEMENT
},
1028 int const pta_size
= ARRAY_SIZE (processor_alias_table
);
1030 /* Acquire a unique set number for our register saves and restores. */
1031 s390_sr_alias_set
= new_alias_set ();
1033 /* Set up function hooks. */
1034 init_machine_status
= s390_init_machine_status
;
1036 /* Architecture mode defaults according to ABI. */
1037 if (!(target_flags_explicit
& MASK_ZARCH
))
1040 target_flags
|= MASK_ZARCH
;
1042 target_flags
&= ~MASK_ZARCH
;
1045 /* Determine processor architectural level. */
1046 if (!s390_arch_string
)
1047 s390_arch_string
= TARGET_ZARCH
? "z900" : "g5";
1049 for (i
= 0; i
< pta_size
; i
++)
1050 if (! strcmp (s390_arch_string
, processor_alias_table
[i
].name
))
1052 s390_arch
= processor_alias_table
[i
].processor
;
1053 s390_arch_flags
= processor_alias_table
[i
].flags
;
1057 error ("Unknown cpu used in -march=%s.", s390_arch_string
);
1059 /* Determine processor to tune for. */
1060 if (!s390_tune_string
)
1062 s390_tune
= s390_arch
;
1063 s390_tune_flags
= s390_arch_flags
;
1064 s390_tune_string
= s390_arch_string
;
1068 for (i
= 0; i
< pta_size
; i
++)
1069 if (! strcmp (s390_tune_string
, processor_alias_table
[i
].name
))
1071 s390_tune
= processor_alias_table
[i
].processor
;
1072 s390_tune_flags
= processor_alias_table
[i
].flags
;
1076 error ("Unknown cpu used in -mtune=%s.", s390_tune_string
);
1079 /* Sanity checks. */
1080 if (TARGET_ZARCH
&& !(s390_arch_flags
& PF_ZARCH
))
1081 error ("z/Architecture mode not supported on %s.", s390_arch_string
);
1082 if (TARGET_64BIT
&& !TARGET_ZARCH
)
1083 error ("64-bit ABI not supported in ESA/390 mode.");
1086 /* Map for smallest class containing reg regno. */
1088 const enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
1089 { GENERAL_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1090 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1091 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1092 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1093 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1094 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1095 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1096 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1097 ADDR_REGS
, NO_REGS
, ADDR_REGS
1100 /* Return attribute type of insn. */
1102 static enum attr_type
1103 s390_safe_attr_type (rtx insn
)
1105 if (recog_memoized (insn
) >= 0)
1106 return get_attr_type (insn
);
1111 /* Return true if OP a (const_int 0) operand.
1112 OP is the current operation.
1113 MODE is the current operation mode. */
1116 const0_operand (register rtx op
, enum machine_mode mode
)
1118 return op
== CONST0_RTX (mode
);
1121 /* Return true if OP is constant.
1122 OP is the current operation.
1123 MODE is the current operation mode. */
1126 consttable_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1128 return CONSTANT_P (op
);
1131 /* Return true if the mode of operand OP matches MODE.
1132 If MODE is set to VOIDmode, set it to the mode of OP. */
1135 check_mode (register rtx op
, enum machine_mode
*mode
)
1137 if (*mode
== VOIDmode
)
1138 *mode
= GET_MODE (op
);
1141 if (GET_MODE (op
) != VOIDmode
&& GET_MODE (op
) != *mode
)
1147 /* Return true if OP a valid operand for the LARL instruction.
1148 OP is the current operation.
1149 MODE is the current operation mode. */
1152 larl_operand (register rtx op
, enum machine_mode mode
)
1154 if (! check_mode (op
, &mode
))
1157 /* Allow labels and local symbols. */
1158 if (GET_CODE (op
) == LABEL_REF
)
1160 if (GET_CODE (op
) == SYMBOL_REF
)
1161 return ((SYMBOL_REF_FLAGS (op
) & SYMBOL_FLAG_ALIGN1
) == 0
1162 && SYMBOL_REF_TLS_MODEL (op
) == 0
1163 && (!flag_pic
|| SYMBOL_REF_LOCAL_P (op
)));
1165 /* Everything else must have a CONST, so strip it. */
1166 if (GET_CODE (op
) != CONST
)
1170 /* Allow adding *even* in-range constants. */
1171 if (GET_CODE (op
) == PLUS
)
1173 if (GET_CODE (XEXP (op
, 1)) != CONST_INT
1174 || (INTVAL (XEXP (op
, 1)) & 1) != 0)
1176 #if HOST_BITS_PER_WIDE_INT > 32
1177 if (INTVAL (XEXP (op
, 1)) >= (HOST_WIDE_INT
)1 << 32
1178 || INTVAL (XEXP (op
, 1)) < -((HOST_WIDE_INT
)1 << 32))
1184 /* Labels and local symbols allowed here as well. */
1185 if (GET_CODE (op
) == LABEL_REF
)
1187 if (GET_CODE (op
) == SYMBOL_REF
)
1188 return ((SYMBOL_REF_FLAGS (op
) & SYMBOL_FLAG_ALIGN1
) == 0
1189 && SYMBOL_REF_TLS_MODEL (op
) == 0
1190 && (!flag_pic
|| SYMBOL_REF_LOCAL_P (op
)));
1192 /* Now we must have a @GOTENT offset or @PLT stub
1193 or an @INDNTPOFF TLS offset. */
1194 if (GET_CODE (op
) == UNSPEC
1195 && XINT (op
, 1) == UNSPEC_GOTENT
)
1197 if (GET_CODE (op
) == UNSPEC
1198 && XINT (op
, 1) == UNSPEC_PLT
)
1200 if (GET_CODE (op
) == UNSPEC
1201 && XINT (op
, 1) == UNSPEC_INDNTPOFF
)
1207 /* Helper routine to implement s_operand and s_imm_operand.
1208 OP is the current operation.
1209 MODE is the current operation mode.
1210 ALLOW_IMMEDIATE specifies whether immediate operands should
1211 be accepted or not. */
1214 general_s_operand (register rtx op
, enum machine_mode mode
,
1215 int allow_immediate
)
1217 struct s390_address addr
;
1219 /* Call general_operand first, so that we don't have to
1220 check for many special cases. */
1221 if (!general_operand (op
, mode
))
1224 /* Just like memory_operand, allow (subreg (mem ...))
1226 if (reload_completed
1227 && GET_CODE (op
) == SUBREG
1228 && GET_CODE (SUBREG_REG (op
)) == MEM
)
1229 op
= SUBREG_REG (op
);
1231 switch (GET_CODE (op
))
1233 /* Constants are OK as s-operand if ALLOW_IMMEDIATE
1234 is true and we are still before reload. */
1237 if (!allow_immediate
|| reload_completed
)
1241 /* Memory operands are OK unless they already use an
1244 if (GET_CODE (XEXP (op
, 0)) == ADDRESSOF
)
1246 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1250 /* Do not allow literal pool references unless ALLOW_IMMEDIATE
1251 is true. This prevents compares between two literal pool
1252 entries from being accepted. */
1253 if (!allow_immediate
1254 && addr
.base
&& REGNO (addr
.base
) == BASE_REGISTER
)
1265 /* Return true if OP is a valid S-type operand.
1266 OP is the current operation.
1267 MODE is the current operation mode. */
1270 s_operand (register rtx op
, enum machine_mode mode
)
1272 return general_s_operand (op
, mode
, 0);
1275 /* Return true if OP is a valid S-type operand or an immediate
1276 operand that can be addressed as S-type operand by forcing
1277 it into the literal pool.
1278 OP is the current operation.
1279 MODE is the current operation mode. */
1282 s_imm_operand (register rtx op
, enum machine_mode mode
)
1284 return general_s_operand (op
, mode
, 1);
1287 /* Return true if OP a valid shift count operand.
1288 OP is the current operation.
1289 MODE is the current operation mode. */
1292 shift_count_operand (rtx op
, enum machine_mode mode
)
1294 HOST_WIDE_INT offset
= 0;
1296 if (! check_mode (op
, &mode
))
1299 /* We can have an integer constant, an address register,
1300 or a sum of the two. Note that reload already checks
1301 that any register present is an address register, so
1302 we just check for any register here. */
1303 if (GET_CODE (op
) == CONST_INT
)
1305 offset
= INTVAL (op
);
1308 if (op
&& GET_CODE (op
) == PLUS
&& GET_CODE (XEXP (op
, 1)) == CONST_INT
)
1310 offset
= INTVAL (XEXP (op
, 1));
1313 while (op
&& GET_CODE (op
) == SUBREG
)
1314 op
= SUBREG_REG (op
);
1315 if (op
&& GET_CODE (op
) != REG
)
1318 /* Unfortunately we have to reject constants that are invalid
1319 for an address, or else reload will get confused. */
1320 if (!DISP_IN_RANGE (offset
))
1326 /* Return true if DISP is a valid short displacement. */
1329 s390_short_displacement (rtx disp
)
1331 /* No displacement is OK. */
1335 /* Integer displacement in range. */
1336 if (GET_CODE (disp
) == CONST_INT
)
1337 return INTVAL (disp
) >= 0 && INTVAL (disp
) < 4096;
1339 /* GOT offset is not OK, the GOT can be large. */
1340 if (GET_CODE (disp
) == CONST
1341 && GET_CODE (XEXP (disp
, 0)) == UNSPEC
1342 && XINT (XEXP (disp
, 0), 1) == UNSPEC_GOT
)
1345 /* All other symbolic constants are literal pool references,
1346 which are OK as the literal pool must be small. */
1347 if (GET_CODE (disp
) == CONST
)
1353 /* Return true if OP is a valid operand for a C constraint. */
1356 s390_extra_constraint (rtx op
, int c
)
1358 struct s390_address addr
;
1363 if (GET_CODE (op
) != MEM
)
1365 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1370 if (TARGET_LONG_DISPLACEMENT
)
1372 if (!s390_short_displacement (addr
.disp
))
1378 if (GET_CODE (op
) != MEM
)
1381 if (TARGET_LONG_DISPLACEMENT
)
1383 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1385 if (!s390_short_displacement (addr
.disp
))
1391 if (!TARGET_LONG_DISPLACEMENT
)
1393 if (GET_CODE (op
) != MEM
)
1395 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1399 if (s390_short_displacement (addr
.disp
))
1404 if (!TARGET_LONG_DISPLACEMENT
)
1406 if (GET_CODE (op
) != MEM
)
1408 /* Any invalid address here will be fixed up by reload,
1409 so accept it for the most generic constraint. */
1410 if (s390_decompose_address (XEXP (op
, 0), &addr
)
1411 && s390_short_displacement (addr
.disp
))
1416 if (TARGET_LONG_DISPLACEMENT
)
1418 if (!s390_decompose_address (op
, &addr
))
1420 if (!s390_short_displacement (addr
.disp
))
1426 if (!TARGET_LONG_DISPLACEMENT
)
1428 /* Any invalid address here will be fixed up by reload,
1429 so accept it for the most generic constraint. */
1430 if (s390_decompose_address (op
, &addr
)
1431 && s390_short_displacement (addr
.disp
))
1436 return shift_count_operand (op
, VOIDmode
);
1445 /* Compute a (partial) cost for rtx X. Return true if the complete
1446 cost has been computed, and false if subexpressions should be
1447 scanned. In either case, *TOTAL contains the cost result. */
1450 s390_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
1455 if (GET_CODE (XEXP (x
, 0)) == MINUS
1456 && GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
1463 /* Force_const_mem does not work out of reload, because the
1464 saveable_obstack is set to reload_obstack, which does not
1465 live long enough. Because of this we cannot use force_const_mem
1466 in addsi3. This leads to problems with gen_add2_insn with a
1467 constant greater than a short. Because of that we give an
1468 addition of greater constants a cost of 3 (reload1.c 10096). */
1469 /* ??? saveable_obstack no longer exists. */
1470 if (outer_code
== PLUS
1471 && (INTVAL (x
) > 32767 || INTVAL (x
) < -32768))
1472 *total
= COSTS_N_INSNS (3);
1493 *total
= COSTS_N_INSNS (1);
1497 if (GET_MODE (XEXP (x
, 0)) == DImode
)
1498 *total
= COSTS_N_INSNS (40);
1500 *total
= COSTS_N_INSNS (7);
1507 *total
= COSTS_N_INSNS (33);
1515 /* Return the cost of an address rtx ADDR. */
1518 s390_address_cost (rtx addr
)
1520 struct s390_address ad
;
1521 if (!s390_decompose_address (addr
, &ad
))
1524 return ad
.indx
? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1527 /* Return true if OP is a valid operand for the BRAS instruction.
1528 OP is the current operation.
1529 MODE is the current operation mode. */
1532 bras_sym_operand (register rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1534 register enum rtx_code code
= GET_CODE (op
);
1536 /* Allow SYMBOL_REFs. */
1537 if (code
== SYMBOL_REF
)
1540 /* Allow @PLT stubs. */
1542 && GET_CODE (XEXP (op
, 0)) == UNSPEC
1543 && XINT (XEXP (op
, 0), 1) == UNSPEC_PLT
)
1548 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1549 otherwise return 0. */
1552 tls_symbolic_operand (register rtx op
)
1554 if (GET_CODE (op
) != SYMBOL_REF
)
1556 return SYMBOL_REF_TLS_MODEL (op
);
1559 /* Return true if OP is a load multiple operation. It is known to be a
1560 PARALLEL and the first section will be tested.
1561 OP is the current operation.
1562 MODE is the current operation mode. */
1565 load_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1567 enum machine_mode elt_mode
;
1568 int count
= XVECLEN (op
, 0);
1569 unsigned int dest_regno
;
1574 /* Perform a quick check so we don't blow up below. */
1576 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
1577 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
1578 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
1581 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
1582 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
1583 elt_mode
= GET_MODE (SET_DEST (XVECEXP (op
, 0, 0)));
1585 /* Check, is base, or base + displacement. */
1587 if (GET_CODE (src_addr
) == REG
)
1589 else if (GET_CODE (src_addr
) == PLUS
1590 && GET_CODE (XEXP (src_addr
, 0)) == REG
1591 && GET_CODE (XEXP (src_addr
, 1)) == CONST_INT
)
1593 off
= INTVAL (XEXP (src_addr
, 1));
1594 src_addr
= XEXP (src_addr
, 0);
1599 if (src_addr
== frame_pointer_rtx
|| src_addr
== arg_pointer_rtx
)
1602 for (i
= 1; i
< count
; i
++)
1604 rtx elt
= XVECEXP (op
, 0, i
);
1606 if (GET_CODE (elt
) != SET
1607 || GET_CODE (SET_DEST (elt
)) != REG
1608 || GET_MODE (SET_DEST (elt
)) != elt_mode
1609 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
1610 || GET_CODE (SET_SRC (elt
)) != MEM
1611 || GET_MODE (SET_SRC (elt
)) != elt_mode
1612 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
1613 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
1614 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
1615 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1))
1616 != off
+ i
* GET_MODE_SIZE (elt_mode
))
1623 /* Return true if OP is a store multiple operation. It is known to be a
1624 PARALLEL and the first section will be tested.
1625 OP is the current operation.
1626 MODE is the current operation mode. */
1629 store_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1631 enum machine_mode elt_mode
;
1632 int count
= XVECLEN (op
, 0);
1633 unsigned int src_regno
;
1637 /* Perform a quick check so we don't blow up below. */
1639 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
1640 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
1641 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
1644 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
1645 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
1646 elt_mode
= GET_MODE (SET_SRC (XVECEXP (op
, 0, 0)));
1648 /* Check, is base, or base + displacement. */
1650 if (GET_CODE (dest_addr
) == REG
)
1652 else if (GET_CODE (dest_addr
) == PLUS
1653 && GET_CODE (XEXP (dest_addr
, 0)) == REG
1654 && GET_CODE (XEXP (dest_addr
, 1)) == CONST_INT
)
1656 off
= INTVAL (XEXP (dest_addr
, 1));
1657 dest_addr
= XEXP (dest_addr
, 0);
1662 if (dest_addr
== frame_pointer_rtx
|| dest_addr
== arg_pointer_rtx
)
1665 for (i
= 1; i
< count
; i
++)
1667 rtx elt
= XVECEXP (op
, 0, i
);
1669 if (GET_CODE (elt
) != SET
1670 || GET_CODE (SET_SRC (elt
)) != REG
1671 || GET_MODE (SET_SRC (elt
)) != elt_mode
1672 || REGNO (SET_SRC (elt
)) != src_regno
+ i
1673 || GET_CODE (SET_DEST (elt
)) != MEM
1674 || GET_MODE (SET_DEST (elt
)) != elt_mode
1675 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
1676 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
1677 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
1678 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1))
1679 != off
+ i
* GET_MODE_SIZE (elt_mode
))
1686 /* Return true if OP contains a symbol reference */
1689 symbolic_reference_mentioned_p (rtx op
)
1691 register const char *fmt
;
1694 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1697 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1698 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1704 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1705 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1709 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1716 /* Return true if OP contains a reference to a thread-local symbol. */
1719 tls_symbolic_reference_mentioned_p (rtx op
)
1721 register const char *fmt
;
1724 if (GET_CODE (op
) == SYMBOL_REF
)
1725 return tls_symbolic_operand (op
);
1727 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1728 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1734 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1735 if (tls_symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1739 else if (fmt
[i
] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op
, i
)))
1747 /* Return true if OP is a legitimate general operand when
1748 generating PIC code. It is given that flag_pic is on
1749 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1752 legitimate_pic_operand_p (register rtx op
)
1754 /* Accept all non-symbolic constants. */
1755 if (!SYMBOLIC_CONST (op
))
1758 /* Reject everything else; must be handled
1759 via emit_symbolic_move. */
1763 /* Returns true if the constant value OP is a legitimate general operand.
1764 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1767 legitimate_constant_p (register rtx op
)
1769 /* Accept all non-symbolic constants. */
1770 if (!SYMBOLIC_CONST (op
))
1773 /* Accept immediate LARL operands. */
1774 if (TARGET_CPU_ZARCH
&& larl_operand (op
, VOIDmode
))
1777 /* Thread-local symbols are never legal constants. This is
1778 so that emit_call knows that computing such addresses
1779 might require a function call. */
1780 if (TLS_SYMBOLIC_CONST (op
))
1783 /* In the PIC case, symbolic constants must *not* be
1784 forced into the literal pool. We accept them here,
1785 so that they will be handled by emit_symbolic_move. */
1789 /* All remaining non-PIC symbolic constants are
1790 forced into the literal pool. */
1794 /* Determine if it's legal to put X into the constant pool. This
1795 is not possible if X contains the address of a symbol that is
1796 not constant (TLS) or not known at final link time (PIC). */
1799 s390_cannot_force_const_mem (rtx x
)
1801 switch (GET_CODE (x
))
1805 /* Accept all non-symbolic constants. */
1809 /* Labels are OK iff we are non-PIC. */
1810 return flag_pic
!= 0;
1813 /* 'Naked' TLS symbol references are never OK,
1814 non-TLS symbols are OK iff we are non-PIC. */
1815 if (tls_symbolic_operand (x
))
1818 return flag_pic
!= 0;
1821 return s390_cannot_force_const_mem (XEXP (x
, 0));
1824 return s390_cannot_force_const_mem (XEXP (x
, 0))
1825 || s390_cannot_force_const_mem (XEXP (x
, 1));
1828 switch (XINT (x
, 1))
1830 /* Only lt-relative or GOT-relative UNSPECs are OK. */
1831 case UNSPEC_LTREL_OFFSET
:
1839 case UNSPEC_GOTNTPOFF
:
1840 case UNSPEC_INDNTPOFF
:
1853 /* Returns true if the constant value OP is a legitimate general
1854 operand during and after reload. The difference to
1855 legitimate_constant_p is that this function will not accept
1856 a constant that would need to be forced to the literal pool
1857 before it can be used as operand. */
1860 legitimate_reload_constant_p (register rtx op
)
1862 /* Accept la(y) operands. */
1863 if (GET_CODE (op
) == CONST_INT
1864 && DISP_IN_RANGE (INTVAL (op
)))
1867 /* Accept l(g)hi operands. */
1868 if (GET_CODE (op
) == CONST_INT
1869 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'K'))
1872 /* Accept lliXX operands. */
1874 && s390_single_hi (op
, DImode
, 0) >= 0)
1877 /* Accept larl operands. */
1878 if (TARGET_CPU_ZARCH
1879 && larl_operand (op
, VOIDmode
))
1882 /* Everything else cannot be handled without reload. */
1886 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1887 return the class of reg to actually use. */
1890 s390_preferred_reload_class (rtx op
, enum reg_class
class)
1892 /* This can happen if a floating point constant is being
1893 reloaded into an integer register. Leave well alone. */
1894 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1895 && class != FP_REGS
)
1898 switch (GET_CODE (op
))
1900 /* Constants we cannot reload must be forced into the
1905 if (legitimate_reload_constant_p (op
))
1910 /* If a symbolic constant or a PLUS is reloaded,
1911 it is most likely being used as an address, so
1912 prefer ADDR_REGS. If 'class' is not a superset
1913 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1918 if (reg_class_subset_p (ADDR_REGS
, class))
1930 /* Return the register class of a scratch register needed to
1931 load IN into a register of class CLASS in MODE.
1933 We need a temporary when loading a PLUS expression which
1934 is not a legitimate operand of the LOAD ADDRESS instruction. */
1937 s390_secondary_input_reload_class (enum reg_class
class ATTRIBUTE_UNUSED
,
1938 enum machine_mode mode
, rtx in
)
1940 if (s390_plus_operand (in
, mode
))
1946 /* Return the register class of a scratch register needed to
1947 store a register of class CLASS in MODE into OUT:
1949 We need a temporary when storing a double-word to a
1950 non-offsettable memory address. */
1953 s390_secondary_output_reload_class (enum reg_class
class,
1954 enum machine_mode mode
, rtx out
)
1956 if ((TARGET_64BIT
? mode
== TImode
1957 : (mode
== DImode
|| mode
== DFmode
))
1958 && reg_classes_intersect_p (GENERAL_REGS
, class)
1959 && GET_CODE (out
) == MEM
1960 && !offsettable_memref_p (out
)
1961 && !s_operand (out
, VOIDmode
))
1967 /* Return true if OP is a PLUS that is not a legitimate
1968 operand for the LA instruction.
1969 OP is the current operation.
1970 MODE is the current operation mode. */
1973 s390_plus_operand (register rtx op
, enum machine_mode mode
)
1975 if (!check_mode (op
, &mode
) || mode
!= Pmode
)
1978 if (GET_CODE (op
) != PLUS
)
1981 if (legitimate_la_operand_p (op
))
1987 /* Generate code to load SRC, which is PLUS that is not a
1988 legitimate operand for the LA instruction, into TARGET.
1989 SCRATCH may be used as scratch register. */
1992 s390_expand_plus_operand (register rtx target
, register rtx src
,
1993 register rtx scratch
)
1996 struct s390_address ad
;
1998 /* src must be a PLUS; get its two operands. */
1999 if (GET_CODE (src
) != PLUS
|| GET_MODE (src
) != Pmode
)
2002 /* Check if any of the two operands is already scheduled
2003 for replacement by reload. This can happen e.g. when
2004 float registers occur in an address. */
2005 sum1
= find_replacement (&XEXP (src
, 0));
2006 sum2
= find_replacement (&XEXP (src
, 1));
2007 src
= gen_rtx_PLUS (Pmode
, sum1
, sum2
);
2009 /* If the address is already strictly valid, there's nothing to do. */
2010 if (!s390_decompose_address (src
, &ad
)
2011 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
2012 || (ad
.indx
&& !REG_OK_FOR_INDEX_STRICT_P (ad
.indx
)))
2014 /* Otherwise, one of the operands cannot be an address register;
2015 we reload its value into the scratch register. */
2016 if (true_regnum (sum1
) < 1 || true_regnum (sum1
) > 15)
2018 emit_move_insn (scratch
, sum1
);
2021 if (true_regnum (sum2
) < 1 || true_regnum (sum2
) > 15)
2023 emit_move_insn (scratch
, sum2
);
2027 /* According to the way these invalid addresses are generated
2028 in reload.c, it should never happen (at least on s390) that
2029 *neither* of the PLUS components, after find_replacements
2030 was applied, is an address register. */
2031 if (sum1
== scratch
&& sum2
== scratch
)
2037 src
= gen_rtx_PLUS (Pmode
, sum1
, sum2
);
2040 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
2041 is only ever performed on addresses, so we can mark the
2042 sum as legitimate for LA in any case. */
2043 s390_load_address (target
, src
);
2047 /* Decompose a RTL expression ADDR for a memory address into
2048 its components, returned in OUT.
2050 Returns 0 if ADDR is not a valid memory address, nonzero
2051 otherwise. If OUT is NULL, don't return the components,
2052 but check for validity only.
2054 Note: Only addresses in canonical form are recognized.
2055 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
2056 canonical form so that they will be recognized. */
2059 s390_decompose_address (register rtx addr
, struct s390_address
*out
)
2061 rtx base
= NULL_RTX
;
2062 rtx indx
= NULL_RTX
;
2063 rtx disp
= NULL_RTX
;
2064 int pointer
= FALSE
;
2065 int base_ptr
= FALSE
;
2066 int indx_ptr
= FALSE
;
2068 /* Decompose address into base + index + displacement. */
2070 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == UNSPEC
)
2073 else if (GET_CODE (addr
) == PLUS
)
2075 rtx op0
= XEXP (addr
, 0);
2076 rtx op1
= XEXP (addr
, 1);
2077 enum rtx_code code0
= GET_CODE (op0
);
2078 enum rtx_code code1
= GET_CODE (op1
);
2080 if (code0
== REG
|| code0
== UNSPEC
)
2082 if (code1
== REG
|| code1
== UNSPEC
)
2084 indx
= op0
; /* index + base */
2090 base
= op0
; /* base + displacement */
2095 else if (code0
== PLUS
)
2097 indx
= XEXP (op0
, 0); /* index + base + disp */
2098 base
= XEXP (op0
, 1);
2109 disp
= addr
; /* displacement */
2112 /* Validate base register. */
2115 if (GET_CODE (base
) == UNSPEC
)
2117 if (XVECLEN (base
, 0) != 1 || XINT (base
, 1) != UNSPEC_LTREL_BASE
)
2119 base
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
2122 if (GET_CODE (base
) != REG
|| GET_MODE (base
) != Pmode
)
2125 if (REGNO (base
) == BASE_REGISTER
2126 || REGNO (base
) == STACK_POINTER_REGNUM
2127 || REGNO (base
) == FRAME_POINTER_REGNUM
2128 || ((reload_completed
|| reload_in_progress
)
2129 && frame_pointer_needed
2130 && REGNO (base
) == HARD_FRAME_POINTER_REGNUM
)
2131 || REGNO (base
) == ARG_POINTER_REGNUM
2132 || (REGNO (base
) >= FIRST_VIRTUAL_REGISTER
2133 && REGNO (base
) <= LAST_VIRTUAL_REGISTER
)
2135 && REGNO (base
) == PIC_OFFSET_TABLE_REGNUM
))
2136 pointer
= base_ptr
= TRUE
;
2139 /* Validate index register. */
2142 if (GET_CODE (indx
) == UNSPEC
)
2144 if (XVECLEN (indx
, 0) != 1 || XINT (indx
, 1) != UNSPEC_LTREL_BASE
)
2146 indx
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
2149 if (GET_CODE (indx
) != REG
|| GET_MODE (indx
) != Pmode
)
2152 if (REGNO (indx
) == BASE_REGISTER
2153 || REGNO (indx
) == STACK_POINTER_REGNUM
2154 || REGNO (indx
) == FRAME_POINTER_REGNUM
2155 || ((reload_completed
|| reload_in_progress
)
2156 && frame_pointer_needed
2157 && REGNO (indx
) == HARD_FRAME_POINTER_REGNUM
)
2158 || REGNO (indx
) == ARG_POINTER_REGNUM
2159 || (REGNO (indx
) >= FIRST_VIRTUAL_REGISTER
2160 && REGNO (indx
) <= LAST_VIRTUAL_REGISTER
)
2162 && REGNO (indx
) == PIC_OFFSET_TABLE_REGNUM
))
2163 pointer
= indx_ptr
= TRUE
;
2166 /* Prefer to use pointer as base, not index. */
2167 if (base
&& indx
&& !base_ptr
2168 && (indx_ptr
|| (!REG_POINTER (base
) && REG_POINTER (indx
))))
2175 /* Validate displacement. */
2178 /* Allow integer constant in range. */
2179 if (GET_CODE (disp
) == CONST_INT
)
2181 /* If the argument pointer is involved, the displacement will change
2182 later anyway as the argument pointer gets eliminated. This could
2183 make a valid displacement invalid, but it is more likely to make
2184 an invalid displacement valid, because we sometimes access the
2185 register save area via negative offsets to the arg pointer.
2186 Thus we don't check the displacement for validity here. If after
2187 elimination the displacement turns out to be invalid after all,
2188 this is fixed up by reload in any case. */
2189 if (base
!= arg_pointer_rtx
&& indx
!= arg_pointer_rtx
)
2191 if (!DISP_IN_RANGE (INTVAL (disp
)))
2196 /* In the small-PIC case, the linker converts @GOT
2197 and @GOTNTPOFF offsets to possible displacements. */
2198 else if (GET_CODE (disp
) == CONST
2199 && GET_CODE (XEXP (disp
, 0)) == UNSPEC
2200 && (XINT (XEXP (disp
, 0), 1) == UNSPEC_GOT
2201 || XINT (XEXP (disp
, 0), 1) == UNSPEC_GOTNTPOFF
))
2209 /* Accept chunkfied literal pool symbol references. */
2210 else if (GET_CODE (disp
) == CONST
2211 && GET_CODE (XEXP (disp
, 0)) == MINUS
2212 && GET_CODE (XEXP (XEXP (disp
, 0), 0)) == LABEL_REF
2213 && GET_CODE (XEXP (XEXP (disp
, 0), 1)) == LABEL_REF
)
2218 /* Likewise if a constant offset is present. */
2219 else if (GET_CODE (disp
) == CONST
2220 && GET_CODE (XEXP (disp
, 0)) == PLUS
2221 && GET_CODE (XEXP (XEXP (disp
, 0), 1)) == CONST_INT
2222 && GET_CODE (XEXP (XEXP (disp
, 0), 0)) == MINUS
2223 && GET_CODE (XEXP (XEXP (XEXP (disp
, 0), 0), 0)) == LABEL_REF
2224 && GET_CODE (XEXP (XEXP (XEXP (disp
, 0), 0), 1)) == LABEL_REF
)
2229 /* We can convert literal pool addresses to
2230 displacements by basing them off the base register. */
2233 /* In some cases, we can accept an additional
2234 small constant offset. Split these off here. */
2236 unsigned int offset
= 0;
2238 if (GET_CODE (disp
) == CONST
2239 && GET_CODE (XEXP (disp
, 0)) == PLUS
2240 && GET_CODE (XEXP (XEXP (disp
, 0), 1)) == CONST_INT
)
2242 offset
= INTVAL (XEXP (XEXP (disp
, 0), 1));
2243 disp
= XEXP (XEXP (disp
, 0), 0);
2246 /* Now we must have a literal pool address. */
2247 if (GET_CODE (disp
) != SYMBOL_REF
2248 || !CONSTANT_POOL_ADDRESS_P (disp
))
2251 /* If we have an offset, make sure it does not
2252 exceed the size of the constant pool entry. */
2253 if (offset
&& offset
>= GET_MODE_SIZE (get_pool_mode (disp
)))
2256 /* Either base or index must be free to
2257 hold the base register. */
2261 /* Convert the address. */
2263 indx
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
2265 base
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
2267 disp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, disp
),
2268 UNSPEC_LTREL_OFFSET
);
2269 disp
= gen_rtx_CONST (Pmode
, disp
);
2272 disp
= plus_constant (disp
, offset
);
2286 out
->pointer
= pointer
;
2292 /* Return nonzero if ADDR is a valid memory address.
2293 STRICT specifies whether strict register checking applies. */
2296 legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
,
2297 register rtx addr
, int strict
)
2299 struct s390_address ad
;
2300 if (!s390_decompose_address (addr
, &ad
))
2305 if (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
2307 if (ad
.indx
&& !REG_OK_FOR_INDEX_STRICT_P (ad
.indx
))
2312 if (ad
.base
&& !REG_OK_FOR_BASE_NONSTRICT_P (ad
.base
))
2314 if (ad
.indx
&& !REG_OK_FOR_INDEX_NONSTRICT_P (ad
.indx
))
2321 /* Return 1 if OP is a valid operand for the LA instruction.
2322 In 31-bit, we need to prove that the result is used as an
2323 address, as LA performs only a 31-bit addition. */
2326 legitimate_la_operand_p (register rtx op
)
2328 struct s390_address addr
;
2329 if (!s390_decompose_address (op
, &addr
))
2332 if (TARGET_64BIT
|| addr
.pointer
)
2338 /* Return 1 if OP is a valid operand for the LA instruction,
2339 and we prefer to use LA over addition to compute it. */
2342 preferred_la_operand_p (register rtx op
)
2344 struct s390_address addr
;
2345 if (!s390_decompose_address (op
, &addr
))
2348 if (!TARGET_64BIT
&& !addr
.pointer
)
2354 if ((addr
.base
&& REG_P (addr
.base
) && REG_POINTER (addr
.base
))
2355 || (addr
.indx
&& REG_P (addr
.indx
) && REG_POINTER (addr
.indx
)))
2361 /* Emit a forced load-address operation to load SRC into DST.
2362 This will use the LOAD ADDRESS instruction even in situations
2363 where legitimate_la_operand_p (SRC) returns false. */
2366 s390_load_address (rtx dst
, rtx src
)
2369 emit_move_insn (dst
, src
);
2371 emit_insn (gen_force_la_31 (dst
, src
));
2374 /* Return a legitimate reference for ORIG (an address) using the
2375 register REG. If REG is 0, a new pseudo is generated.
2377 There are two types of references that must be handled:
2379 1. Global data references must load the address from the GOT, via
2380 the PIC reg. An insn is emitted to do this load, and the reg is
2383 2. Static data references, constant pool addresses, and code labels
2384 compute the address as an offset from the GOT, whose base is in
2385 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2386 differentiate them from global data objects. The returned
2387 address is the PIC reg + an unspec constant.
2389 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2390 reg also appears in the address. */
2393 legitimize_pic_address (rtx orig
, rtx reg
)
2399 if (GET_CODE (addr
) == LABEL_REF
2400 || (GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_LOCAL_P (addr
)))
2402 /* This is a local symbol. */
2403 if (TARGET_CPU_ZARCH
&& larl_operand (addr
, VOIDmode
))
2405 /* Access local symbols PC-relative via LARL.
2406 This is the same as in the non-PIC case, so it is
2407 handled automatically ... */
2411 /* Access local symbols relative to the GOT. */
2413 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
2415 if (reload_in_progress
|| reload_completed
)
2416 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2418 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTOFF
);
2419 addr
= gen_rtx_CONST (Pmode
, addr
);
2420 addr
= force_const_mem (Pmode
, addr
);
2421 emit_move_insn (temp
, addr
);
2423 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2426 emit_move_insn (reg
, new);
2431 else if (GET_CODE (addr
) == SYMBOL_REF
)
2434 reg
= gen_reg_rtx (Pmode
);
2438 /* Assume GOT offset < 4k. This is handled the same way
2439 in both 31- and 64-bit code (@GOT). */
2441 if (reload_in_progress
|| reload_completed
)
2442 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2444 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOT
);
2445 new = gen_rtx_CONST (Pmode
, new);
2446 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new);
2447 new = gen_rtx_MEM (Pmode
, new);
2448 RTX_UNCHANGING_P (new) = 1;
2449 emit_move_insn (reg
, new);
2452 else if (TARGET_CPU_ZARCH
)
2454 /* If the GOT offset might be >= 4k, we determine the position
2455 of the GOT entry via a PC-relative LARL (@GOTENT). */
2457 rtx temp
= gen_reg_rtx (Pmode
);
2459 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTENT
);
2460 new = gen_rtx_CONST (Pmode
, new);
2461 emit_move_insn (temp
, new);
2463 new = gen_rtx_MEM (Pmode
, temp
);
2464 RTX_UNCHANGING_P (new) = 1;
2465 emit_move_insn (reg
, new);
2470 /* If the GOT offset might be >= 4k, we have to load it
2471 from the literal pool (@GOT). */
2473 rtx temp
= gen_reg_rtx (Pmode
);
2475 if (reload_in_progress
|| reload_completed
)
2476 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2478 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOT
);
2479 addr
= gen_rtx_CONST (Pmode
, addr
);
2480 addr
= force_const_mem (Pmode
, addr
);
2481 emit_move_insn (temp
, addr
);
2483 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2484 new = gen_rtx_MEM (Pmode
, new);
2485 RTX_UNCHANGING_P (new) = 1;
2486 emit_move_insn (reg
, new);
2492 if (GET_CODE (addr
) == CONST
)
2494 addr
= XEXP (addr
, 0);
2495 if (GET_CODE (addr
) == UNSPEC
)
2497 if (XVECLEN (addr
, 0) != 1)
2499 switch (XINT (addr
, 1))
2501 /* If someone moved a GOT-relative UNSPEC
2502 out of the literal pool, force them back in. */
2505 new = force_const_mem (Pmode
, orig
);
2508 /* @GOT is OK as is if small. */
2511 new = force_const_mem (Pmode
, orig
);
2514 /* @GOTENT is OK as is. */
2518 /* @PLT is OK as is on 64-bit, must be converted to
2519 GOT-relative @PLTOFF on 31-bit. */
2521 if (!TARGET_CPU_ZARCH
)
2523 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
2525 if (reload_in_progress
|| reload_completed
)
2526 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2528 addr
= XVECEXP (addr
, 0, 0);
2529 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
),
2531 addr
= gen_rtx_CONST (Pmode
, addr
);
2532 addr
= force_const_mem (Pmode
, addr
);
2533 emit_move_insn (temp
, addr
);
2535 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2538 emit_move_insn (reg
, new);
2544 /* Everything else cannot happen. */
2549 else if (GET_CODE (addr
) != PLUS
)
2552 if (GET_CODE (addr
) == PLUS
)
2554 rtx op0
= XEXP (addr
, 0), op1
= XEXP (addr
, 1);
2555 /* Check first to see if this is a constant offset
2556 from a local symbol reference. */
2557 if ((GET_CODE (op0
) == LABEL_REF
2558 || (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_LOCAL_P (op0
)))
2559 && GET_CODE (op1
) == CONST_INT
)
2561 if (TARGET_CPU_ZARCH
&& larl_operand (op0
, VOIDmode
))
2563 if (INTVAL (op1
) & 1)
2565 /* LARL can't handle odd offsets, so emit a
2566 pair of LARL and LA. */
2567 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
2569 if (!DISP_IN_RANGE (INTVAL (op1
)))
2571 int even
= INTVAL (op1
) - 1;
2572 op0
= gen_rtx_PLUS (Pmode
, op0
, GEN_INT (even
));
2573 op0
= gen_rtx_CONST (Pmode
, op0
);
2577 emit_move_insn (temp
, op0
);
2578 new = gen_rtx_PLUS (Pmode
, temp
, op1
);
2582 emit_move_insn (reg
, new);
2588 /* If the offset is even, we can just use LARL.
2589 This will happen automatically. */
2594 /* Access local symbols relative to the GOT. */
2596 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
2598 if (reload_in_progress
|| reload_completed
)
2599 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2601 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op0
),
2603 addr
= gen_rtx_PLUS (Pmode
, addr
, op1
);
2604 addr
= gen_rtx_CONST (Pmode
, addr
);
2605 addr
= force_const_mem (Pmode
, addr
);
2606 emit_move_insn (temp
, addr
);
2608 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2611 emit_move_insn (reg
, new);
2617 /* Now, check whether it is a GOT relative symbol plus offset
2618 that was pulled out of the literal pool. Force it back in. */
2620 else if (GET_CODE (op0
) == UNSPEC
2621 && GET_CODE (op1
) == CONST_INT
)
2623 if (XVECLEN (op0
, 0) != 1)
2625 if (XINT (op0
, 1) != UNSPEC_GOTOFF
)
2628 new = force_const_mem (Pmode
, orig
);
2631 /* Otherwise, compute the sum. */
2634 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
2635 new = legitimize_pic_address (XEXP (addr
, 1),
2636 base
== reg
? NULL_RTX
: reg
);
2637 if (GET_CODE (new) == CONST_INT
)
2638 new = plus_constant (base
, INTVAL (new));
2641 if (GET_CODE (new) == PLUS
&& CONSTANT_P (XEXP (new, 1)))
2643 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (new, 0));
2644 new = XEXP (new, 1);
2646 new = gen_rtx_PLUS (Pmode
, base
, new);
2649 if (GET_CODE (new) == CONST
)
2650 new = XEXP (new, 0);
2651 new = force_operand (new, 0);
2658 /* Load the thread pointer into a register. */
2661 get_thread_pointer (void)
2665 tp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TP
);
2666 tp
= force_reg (Pmode
, tp
);
2667 mark_reg_pointer (tp
, BITS_PER_WORD
);
2672 /* Construct the SYMBOL_REF for the tls_get_offset function. */
2674 static GTY(()) rtx s390_tls_symbol
;
2676 s390_tls_get_offset (void)
2678 if (!s390_tls_symbol
)
2679 s390_tls_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "__tls_get_offset");
2681 return s390_tls_symbol
;
2684 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2685 this (thread-local) address. REG may be used as temporary. */
2688 legitimize_tls_address (rtx addr
, rtx reg
)
2690 rtx
new, tls_call
, temp
, base
, r2
, insn
;
2692 if (GET_CODE (addr
) == SYMBOL_REF
)
2693 switch (tls_symbolic_operand (addr
))
2695 case TLS_MODEL_GLOBAL_DYNAMIC
:
2697 r2
= gen_rtx_REG (Pmode
, 2);
2698 tls_call
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_TLSGD
);
2699 new = gen_rtx_CONST (Pmode
, tls_call
);
2700 new = force_const_mem (Pmode
, new);
2701 emit_move_insn (r2
, new);
2702 emit_call_insn (gen_call_value_tls (r2
, tls_call
));
2703 insn
= get_insns ();
2706 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_NTPOFF
);
2707 temp
= gen_reg_rtx (Pmode
);
2708 emit_libcall_block (insn
, temp
, r2
, new);
2710 new = gen_rtx_PLUS (Pmode
, get_thread_pointer (), temp
);
2713 s390_load_address (reg
, new);
2718 case TLS_MODEL_LOCAL_DYNAMIC
:
2720 r2
= gen_rtx_REG (Pmode
, 2);
2721 tls_call
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TLSLDM
);
2722 new = gen_rtx_CONST (Pmode
, tls_call
);
2723 new = force_const_mem (Pmode
, new);
2724 emit_move_insn (r2
, new);
2725 emit_call_insn (gen_call_value_tls (r2
, tls_call
));
2726 insn
= get_insns ();
2729 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TLSLDM_NTPOFF
);
2730 temp
= gen_reg_rtx (Pmode
);
2731 emit_libcall_block (insn
, temp
, r2
, new);
2733 new = gen_rtx_PLUS (Pmode
, get_thread_pointer (), temp
);
2734 base
= gen_reg_rtx (Pmode
);
2735 s390_load_address (base
, new);
2737 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_DTPOFF
);
2738 new = gen_rtx_CONST (Pmode
, new);
2739 new = force_const_mem (Pmode
, new);
2740 temp
= gen_reg_rtx (Pmode
);
2741 emit_move_insn (temp
, new);
2743 new = gen_rtx_PLUS (Pmode
, base
, temp
);
2746 s390_load_address (reg
, new);
2751 case TLS_MODEL_INITIAL_EXEC
:
2754 /* Assume GOT offset < 4k. This is handled the same way
2755 in both 31- and 64-bit code. */
2757 if (reload_in_progress
|| reload_completed
)
2758 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2760 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTNTPOFF
);
2761 new = gen_rtx_CONST (Pmode
, new);
2762 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new);
2763 new = gen_rtx_MEM (Pmode
, new);
2764 RTX_UNCHANGING_P (new) = 1;
2765 temp
= gen_reg_rtx (Pmode
);
2766 emit_move_insn (temp
, new);
2768 else if (TARGET_CPU_ZARCH
)
2770 /* If the GOT offset might be >= 4k, we determine the position
2771 of the GOT entry via a PC-relative LARL. */
2773 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_INDNTPOFF
);
2774 new = gen_rtx_CONST (Pmode
, new);
2775 temp
= gen_reg_rtx (Pmode
);
2776 emit_move_insn (temp
, new);
2778 new = gen_rtx_MEM (Pmode
, temp
);
2779 RTX_UNCHANGING_P (new) = 1;
2780 temp
= gen_reg_rtx (Pmode
);
2781 emit_move_insn (temp
, new);
2785 /* If the GOT offset might be >= 4k, we have to load it
2786 from the literal pool. */
2788 if (reload_in_progress
|| reload_completed
)
2789 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2791 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTNTPOFF
);
2792 new = gen_rtx_CONST (Pmode
, new);
2793 new = force_const_mem (Pmode
, new);
2794 temp
= gen_reg_rtx (Pmode
);
2795 emit_move_insn (temp
, new);
2797 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2798 new = gen_rtx_MEM (Pmode
, new);
2799 RTX_UNCHANGING_P (new) = 1;
2801 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, new, addr
), UNSPEC_TLS_LOAD
);
2802 temp
= gen_reg_rtx (Pmode
);
2803 emit_insn (gen_rtx_SET (Pmode
, temp
, new));
2807 /* In position-dependent code, load the absolute address of
2808 the GOT entry from the literal pool. */
2810 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_INDNTPOFF
);
2811 new = gen_rtx_CONST (Pmode
, new);
2812 new = force_const_mem (Pmode
, new);
2813 temp
= gen_reg_rtx (Pmode
);
2814 emit_move_insn (temp
, new);
2817 new = gen_rtx_MEM (Pmode
, new);
2818 RTX_UNCHANGING_P (new) = 1;
2820 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, new, addr
), UNSPEC_TLS_LOAD
);
2821 temp
= gen_reg_rtx (Pmode
);
2822 emit_insn (gen_rtx_SET (Pmode
, temp
, new));
2825 new = gen_rtx_PLUS (Pmode
, get_thread_pointer (), temp
);
2828 s390_load_address (reg
, new);
2833 case TLS_MODEL_LOCAL_EXEC
:
2834 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_NTPOFF
);
2835 new = gen_rtx_CONST (Pmode
, new);
2836 new = force_const_mem (Pmode
, new);
2837 temp
= gen_reg_rtx (Pmode
);
2838 emit_move_insn (temp
, new);
2840 new = gen_rtx_PLUS (Pmode
, get_thread_pointer (), temp
);
2843 s390_load_address (reg
, new);
2852 else if (GET_CODE (addr
) == CONST
&& GET_CODE (XEXP (addr
, 0)) == UNSPEC
)
2854 switch (XINT (XEXP (addr
, 0), 1))
2856 case UNSPEC_INDNTPOFF
:
2857 if (TARGET_CPU_ZARCH
)
2869 abort (); /* for now ... */
2874 /* Emit insns to move operands[1] into operands[0]. */
2877 emit_symbolic_move (rtx
*operands
)
2879 rtx temp
= no_new_pseudos
? operands
[0] : gen_reg_rtx (Pmode
);
2881 if (GET_CODE (operands
[0]) == MEM
)
2882 operands
[1] = force_reg (Pmode
, operands
[1]);
2883 else if (TLS_SYMBOLIC_CONST (operands
[1]))
2884 operands
[1] = legitimize_tls_address (operands
[1], temp
);
2886 operands
[1] = legitimize_pic_address (operands
[1], temp
);
2889 /* Try machine-dependent ways of modifying an illegitimate address X
2890 to be legitimate. If we find one, return the new, valid address.
2892 OLDX is the address as it was before break_out_memory_refs was called.
2893 In some cases it is useful to look at this to decide what needs to be done.
2895 MODE is the mode of the operand pointed to by X.
2897 When -fpic is used, special handling is needed for symbolic references.
2898 See comments by legitimize_pic_address for details. */
2901 legitimize_address (register rtx x
, register rtx oldx ATTRIBUTE_UNUSED
,
2902 enum machine_mode mode ATTRIBUTE_UNUSED
)
2904 rtx constant_term
= const0_rtx
;
2906 if (TLS_SYMBOLIC_CONST (x
))
2908 x
= legitimize_tls_address (x
, 0);
2910 if (legitimate_address_p (mode
, x
, FALSE
))
2915 if (SYMBOLIC_CONST (x
)
2916 || (GET_CODE (x
) == PLUS
2917 && (SYMBOLIC_CONST (XEXP (x
, 0))
2918 || SYMBOLIC_CONST (XEXP (x
, 1)))))
2919 x
= legitimize_pic_address (x
, 0);
2921 if (legitimate_address_p (mode
, x
, FALSE
))
2925 x
= eliminate_constant_term (x
, &constant_term
);
2927 /* Optimize loading of large displacements by splitting them
2928 into the multiple of 4K and the rest; this allows the
2929 former to be CSE'd if possible.
2931 Don't do this if the displacement is added to a register
2932 pointing into the stack frame, as the offsets will
2933 change later anyway. */
2935 if (GET_CODE (constant_term
) == CONST_INT
2936 && !TARGET_LONG_DISPLACEMENT
2937 && !DISP_IN_RANGE (INTVAL (constant_term
))
2938 && !(REG_P (x
) && REGNO_PTR_FRAME_P (REGNO (x
))))
2940 HOST_WIDE_INT lower
= INTVAL (constant_term
) & 0xfff;
2941 HOST_WIDE_INT upper
= INTVAL (constant_term
) ^ lower
;
2943 rtx temp
= gen_reg_rtx (Pmode
);
2944 rtx val
= force_operand (GEN_INT (upper
), temp
);
2946 emit_move_insn (temp
, val
);
2948 x
= gen_rtx_PLUS (Pmode
, x
, temp
);
2949 constant_term
= GEN_INT (lower
);
2952 if (GET_CODE (x
) == PLUS
)
2954 if (GET_CODE (XEXP (x
, 0)) == REG
)
2956 register rtx temp
= gen_reg_rtx (Pmode
);
2957 register rtx val
= force_operand (XEXP (x
, 1), temp
);
2959 emit_move_insn (temp
, val
);
2961 x
= gen_rtx_PLUS (Pmode
, XEXP (x
, 0), temp
);
2964 else if (GET_CODE (XEXP (x
, 1)) == REG
)
2966 register rtx temp
= gen_reg_rtx (Pmode
);
2967 register rtx val
= force_operand (XEXP (x
, 0), temp
);
2969 emit_move_insn (temp
, val
);
2971 x
= gen_rtx_PLUS (Pmode
, temp
, XEXP (x
, 1));
2975 if (constant_term
!= const0_rtx
)
2976 x
= gen_rtx_PLUS (Pmode
, x
, constant_term
);
2981 /* Emit code to move LEN bytes from DST to SRC. */
2984 s390_expand_movstr (rtx dst
, rtx src
, rtx len
)
2986 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
2988 if (INTVAL (len
) > 0)
2989 emit_insn (gen_movstr_short (dst
, src
, GEN_INT (INTVAL (len
) - 1)));
2992 else if (TARGET_MVCLE
)
2994 emit_insn (gen_movstr_long (dst
, src
, convert_to_mode (Pmode
, len
, 1)));
2999 rtx dst_addr
, src_addr
, count
, blocks
, temp
;
3000 rtx end_label
= gen_label_rtx ();
3001 enum machine_mode mode
;
3004 mode
= GET_MODE (len
);
3005 if (mode
== VOIDmode
)
3008 type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3012 dst_addr
= gen_reg_rtx (Pmode
);
3013 src_addr
= gen_reg_rtx (Pmode
);
3014 count
= gen_reg_rtx (mode
);
3015 blocks
= gen_reg_rtx (mode
);
3017 convert_move (count
, len
, 1);
3018 emit_cmp_and_jump_insns (count
, const0_rtx
,
3019 EQ
, NULL_RTX
, mode
, 1, end_label
);
3021 emit_move_insn (dst_addr
, force_operand (XEXP (dst
, 0), NULL_RTX
));
3022 emit_move_insn (src_addr
, force_operand (XEXP (src
, 0), NULL_RTX
));
3023 dst
= change_address (dst
, VOIDmode
, dst_addr
);
3024 src
= change_address (src
, VOIDmode
, src_addr
);
3026 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1, 0);
3028 emit_move_insn (count
, temp
);
3030 temp
= expand_binop (mode
, ashr_optab
, count
, GEN_INT (8), blocks
, 1, 0);
3032 emit_move_insn (blocks
, temp
);
3034 expand_start_loop (1);
3035 expand_exit_loop_top_cond (0, build (NE_EXPR
, type
,
3036 make_tree (type
, blocks
),
3037 make_tree (type
, const0_rtx
)));
3039 emit_insn (gen_movstr_short (dst
, src
, GEN_INT (255)));
3040 s390_load_address (dst_addr
,
3041 gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (256)));
3042 s390_load_address (src_addr
,
3043 gen_rtx_PLUS (Pmode
, src_addr
, GEN_INT (256)));
3045 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1, 0);
3047 emit_move_insn (blocks
, temp
);
3051 emit_insn (gen_movstr_short (dst
, src
,
3052 convert_to_mode (Pmode
, count
, 1)));
3053 emit_label (end_label
);
3057 /* Emit code to clear LEN bytes at DST. */
3060 s390_expand_clrstr (rtx dst
, rtx len
)
3062 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
3064 if (INTVAL (len
) > 0)
3065 emit_insn (gen_clrstr_short (dst
, GEN_INT (INTVAL (len
) - 1)));
3068 else if (TARGET_MVCLE
)
3070 emit_insn (gen_clrstr_long (dst
, convert_to_mode (Pmode
, len
, 1)));
3075 rtx dst_addr
, src_addr
, count
, blocks
, temp
;
3076 rtx end_label
= gen_label_rtx ();
3077 enum machine_mode mode
;
3080 mode
= GET_MODE (len
);
3081 if (mode
== VOIDmode
)
3084 type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3088 dst_addr
= gen_reg_rtx (Pmode
);
3089 src_addr
= gen_reg_rtx (Pmode
);
3090 count
= gen_reg_rtx (mode
);
3091 blocks
= gen_reg_rtx (mode
);
3093 convert_move (count
, len
, 1);
3094 emit_cmp_and_jump_insns (count
, const0_rtx
,
3095 EQ
, NULL_RTX
, mode
, 1, end_label
);
3097 emit_move_insn (dst_addr
, force_operand (XEXP (dst
, 0), NULL_RTX
));
3098 dst
= change_address (dst
, VOIDmode
, dst_addr
);
3100 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1, 0);
3102 emit_move_insn (count
, temp
);
3104 temp
= expand_binop (mode
, ashr_optab
, count
, GEN_INT (8), blocks
, 1, 0);
3106 emit_move_insn (blocks
, temp
);
3108 expand_start_loop (1);
3109 expand_exit_loop_top_cond (0, build (NE_EXPR
, type
,
3110 make_tree (type
, blocks
),
3111 make_tree (type
, const0_rtx
)));
3113 emit_insn (gen_clrstr_short (dst
, GEN_INT (255)));
3114 s390_load_address (dst_addr
,
3115 gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (256)));
3117 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1, 0);
3119 emit_move_insn (blocks
, temp
);
3123 emit_insn (gen_clrstr_short (dst
, convert_to_mode (Pmode
, count
, 1)));
3124 emit_label (end_label
);
3128 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3129 and return the result in TARGET. */
3132 s390_expand_cmpmem (rtx target
, rtx op0
, rtx op1
, rtx len
)
3134 rtx (*gen_result
) (rtx
) =
3135 GET_MODE (target
) == DImode
? gen_cmpint_di
: gen_cmpint_si
;
3137 op0
= protect_from_queue (op0
, 0);
3138 op1
= protect_from_queue (op1
, 0);
3139 len
= protect_from_queue (len
, 0);
3141 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
3143 if (INTVAL (len
) > 0)
3145 emit_insn (gen_cmpmem_short (op0
, op1
, GEN_INT (INTVAL (len
) - 1)));
3146 emit_insn (gen_result (target
));
3149 emit_move_insn (target
, const0_rtx
);
3152 else /* if (TARGET_MVCLE) */
3154 emit_insn (gen_cmpmem_long (op0
, op1
, convert_to_mode (Pmode
, len
, 1)));
3155 emit_insn (gen_result (target
));
3159 /* Deactivate for now as profile code cannot cope with
3160 CC being live across basic block boundaries. */
3163 rtx addr0
, addr1
, count
, blocks
, temp
;
3164 rtx end_label
= gen_label_rtx ();
3165 enum machine_mode mode
;
3168 mode
= GET_MODE (len
);
3169 if (mode
== VOIDmode
)
3172 type
= lang_hooks
.types
.type_for_mode (mode
, 1);
3176 addr0
= gen_reg_rtx (Pmode
);
3177 addr1
= gen_reg_rtx (Pmode
);
3178 count
= gen_reg_rtx (mode
);
3179 blocks
= gen_reg_rtx (mode
);
3181 convert_move (count
, len
, 1);
3182 emit_cmp_and_jump_insns (count
, const0_rtx
,
3183 EQ
, NULL_RTX
, mode
, 1, end_label
);
3185 emit_move_insn (addr0
, force_operand (XEXP (op0
, 0), NULL_RTX
));
3186 emit_move_insn (addr1
, force_operand (XEXP (op1
, 0), NULL_RTX
));
3187 op0
= change_address (op0
, VOIDmode
, addr0
);
3188 op1
= change_address (op1
, VOIDmode
, addr1
);
3190 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1, 0);
3192 emit_move_insn (count
, temp
);
3194 temp
= expand_binop (mode
, ashr_optab
, count
, GEN_INT (8), blocks
, 1, 0);
3196 emit_move_insn (blocks
, temp
);
3198 expand_start_loop (1);
3199 expand_exit_loop_top_cond (0, build (NE_EXPR
, type
,
3200 make_tree (type
, blocks
),
3201 make_tree (type
, const0_rtx
)));
3203 emit_insn (gen_cmpmem_short (op0
, op1
, GEN_INT (255)));
3204 temp
= gen_rtx_NE (VOIDmode
, gen_rtx_REG (CCSmode
, 33), const0_rtx
);
3205 temp
= gen_rtx_IF_THEN_ELSE (VOIDmode
, temp
,
3206 gen_rtx_LABEL_REF (VOIDmode
, end_label
), pc_rtx
);
3207 temp
= gen_rtx_SET (VOIDmode
, pc_rtx
, temp
);
3208 emit_jump_insn (temp
);
3210 s390_load_address (addr0
,
3211 gen_rtx_PLUS (Pmode
, addr0
, GEN_INT (256)));
3212 s390_load_address (addr1
,
3213 gen_rtx_PLUS (Pmode
, addr1
, GEN_INT (256)));
3215 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1, 0);
3217 emit_move_insn (blocks
, temp
);
3221 emit_insn (gen_cmpmem_short (op0
, op1
,
3222 convert_to_mode (Pmode
, count
, 1)));
3223 emit_label (end_label
);
3225 emit_insn (gen_result (target
));
3230 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3231 We need to emit DTP-relative relocations. */
3234 s390_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
3239 fputs ("\t.long\t", file
);
3242 fputs ("\t.quad\t", file
);
3247 output_addr_const (file
, x
);
3248 fputs ("@DTPOFF", file
);
3251 /* In the name of slightly smaller debug output, and to cater to
3252 general assembler losage, recognize various UNSPEC sequences
3253 and turn them back into a direct symbol reference. */
3256 s390_delegitimize_address (rtx orig_x
)
3260 if (GET_CODE (x
) != MEM
)
3264 if (GET_CODE (x
) == PLUS
3265 && GET_CODE (XEXP (x
, 1)) == CONST
3266 && GET_CODE (XEXP (x
, 0)) == REG
3267 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
3269 y
= XEXP (XEXP (x
, 1), 0);
3270 if (GET_CODE (y
) == UNSPEC
3271 && XINT (y
, 1) == UNSPEC_GOT
)
3272 return XVECEXP (y
, 0, 0);
3276 if (GET_CODE (x
) == CONST
)
3279 if (GET_CODE (y
) == UNSPEC
3280 && XINT (y
, 1) == UNSPEC_GOTENT
)
3281 return XVECEXP (y
, 0, 0);
3288 /* Output shift count operand OP to stdio stream FILE. */
3291 print_shift_count_operand (FILE *file
, rtx op
)
3293 HOST_WIDE_INT offset
= 0;
3295 /* We can have an integer constant, an address register,
3296 or a sum of the two. */
3297 if (GET_CODE (op
) == CONST_INT
)
3299 offset
= INTVAL (op
);
3302 if (op
&& GET_CODE (op
) == PLUS
&& GET_CODE (XEXP (op
, 1)) == CONST_INT
)
3304 offset
= INTVAL (XEXP (op
, 1));
3307 while (op
&& GET_CODE (op
) == SUBREG
)
3308 op
= SUBREG_REG (op
);
3311 if (op
&& (GET_CODE (op
) != REG
3312 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
3313 || REGNO_REG_CLASS (REGNO (op
)) != ADDR_REGS
))
3316 /* Shift counts are truncated to the low six bits anyway. */
3317 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, offset
& 63);
3319 fprintf (file
, "(%s)", reg_names
[REGNO (op
)]);
3322 /* Locate some local-dynamic symbol still in use by this function
3323 so that we can print its name in local-dynamic base patterns. */
3326 get_some_local_dynamic_name (void)
3330 if (cfun
->machine
->some_ld_name
)
3331 return cfun
->machine
->some_ld_name
;
3333 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3335 && for_each_rtx (&PATTERN (insn
), get_some_local_dynamic_name_1
, 0))
3336 return cfun
->machine
->some_ld_name
;
3342 get_some_local_dynamic_name_1 (rtx
*px
, void *data ATTRIBUTE_UNUSED
)
3346 if (GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
3348 x
= get_pool_constant (x
);
3349 return for_each_rtx (&x
, get_some_local_dynamic_name_1
, 0);
3352 if (GET_CODE (x
) == SYMBOL_REF
3353 && tls_symbolic_operand (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
3355 cfun
->machine
->some_ld_name
= XSTR (x
, 0);
3362 /* Output symbolic constant X in assembler syntax to
3363 stdio stream FILE. */
3366 s390_output_symbolic_const (FILE *file
, rtx x
)
3368 switch (GET_CODE (x
))
3373 s390_output_symbolic_const (file
, XEXP (x
, 0));
3377 s390_output_symbolic_const (file
, XEXP (x
, 0));
3378 fprintf (file
, "+");
3379 s390_output_symbolic_const (file
, XEXP (x
, 1));
3383 s390_output_symbolic_const (file
, XEXP (x
, 0));
3384 fprintf (file
, "-");
3385 s390_output_symbolic_const (file
, XEXP (x
, 1));
3392 output_addr_const (file
, x
);
3396 if (XVECLEN (x
, 0) != 1)
3397 output_operand_lossage ("invalid UNSPEC as operand (1)");
3398 switch (XINT (x
, 1))
3401 s390_output_symbolic_const (file
, XVECEXP (x
, 0, 0));
3402 fprintf (file
, "@GOTENT");
3405 s390_output_symbolic_const (file
, XVECEXP (x
, 0, 0));
3406 fprintf (file
, "@GOT");
3409 s390_output_symbolic_const (file
, XVECEXP (x
, 0, 0));
3410 fprintf (file
, "@GOTOFF");
3413 s390_output_symbolic_const (file
, XVECEXP (x
, 0, 0));
3414 fprintf (file
, "@PLT");
3417 s390_output_symbolic_const (file
, XVECEXP (x
, 0, 0));
3418 fprintf (file
, "@PLTOFF");
3421 s390_output_symbolic_const (file
, XVECEXP (x
, 0, 0));
3422 fprintf (file
, "@TLSGD");
3425 assemble_name (file
, get_some_local_dynamic_name ());
3426 fprintf (file
, "@TLSLDM");
3429 s390_output_symbolic_const (file
, XVECEXP (x
, 0, 0));
3430 fprintf (file
, "@DTPOFF");
3433 s390_output_symbolic_const (file
, XVECEXP (x
, 0, 0));
3434 fprintf (file
, "@NTPOFF");
3436 case UNSPEC_GOTNTPOFF
:
3437 s390_output_symbolic_const (file
, XVECEXP (x
, 0, 0));
3438 fprintf (file
, "@GOTNTPOFF");
3440 case UNSPEC_INDNTPOFF
:
3441 s390_output_symbolic_const (file
, XVECEXP (x
, 0, 0));
3442 fprintf (file
, "@INDNTPOFF");
3445 output_operand_lossage ("invalid UNSPEC as operand (2)");
3451 fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x
);
3456 /* Output address operand ADDR in assembler syntax to
3457 stdio stream FILE. */
3460 print_operand_address (FILE *file
, rtx addr
)
3462 struct s390_address ad
;
3464 if (!s390_decompose_address (addr
, &ad
)
3465 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
3466 || (ad
.indx
&& !REG_OK_FOR_INDEX_STRICT_P (ad
.indx
)))
3467 output_operand_lossage ("Cannot decompose address.");
3470 s390_output_symbolic_const (file
, ad
.disp
);
3472 fprintf (file
, "0");
3474 if (ad
.base
&& ad
.indx
)
3475 fprintf (file
, "(%s,%s)", reg_names
[REGNO (ad
.indx
)],
3476 reg_names
[REGNO (ad
.base
)]);
3478 fprintf (file
, "(%s)", reg_names
[REGNO (ad
.base
)]);
3481 /* Output operand X in assembler syntax to stdio stream FILE.
3482 CODE specified the format flag. The following format flags
3485 'C': print opcode suffix for branch condition.
3486 'D': print opcode suffix for inverse branch condition.
3487 'J': print tls_load/tls_gdcall/tls_ldcall suffix
3488 'O': print only the displacement of a memory reference.
3489 'R': print only the base register of a memory reference.
3490 'N': print the second word of a DImode operand.
3491 'M': print the second word of a TImode operand.
3492 'Y': print shift count operand.
3494 'b': print integer X as if it's an unsigned byte.
3495 'x': print integer X as if it's an unsigned word.
3496 'h': print integer X as if it's a signed word. */
3499 print_operand (FILE *file
, rtx x
, int code
)
3504 fprintf (file
, s390_branch_condition_mnemonic (x
, FALSE
));
3508 fprintf (file
, s390_branch_condition_mnemonic (x
, TRUE
));
3512 if (GET_CODE (x
) == SYMBOL_REF
)
3514 fprintf (file
, "%s", ":tls_load:");
3515 output_addr_const (file
, x
);
3517 else if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_TLSGD
)
3519 fprintf (file
, "%s", ":tls_gdcall:");
3520 output_addr_const (file
, XVECEXP (x
, 0, 0));
3522 else if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_TLSLDM
)
3524 fprintf (file
, "%s", ":tls_ldcall:");
3525 assemble_name (file
, get_some_local_dynamic_name ());
3533 struct s390_address ad
;
3535 if (GET_CODE (x
) != MEM
3536 || !s390_decompose_address (XEXP (x
, 0), &ad
)
3537 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
3542 s390_output_symbolic_const (file
, ad
.disp
);
3544 fprintf (file
, "0");
3550 struct s390_address ad
;
3552 if (GET_CODE (x
) != MEM
3553 || !s390_decompose_address (XEXP (x
, 0), &ad
)
3554 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
3559 fprintf (file
, "%s", reg_names
[REGNO (ad
.base
)]);
3561 fprintf (file
, "0");
3566 if (GET_CODE (x
) == REG
)
3567 x
= gen_rtx_REG (GET_MODE (x
), REGNO (x
) + 1);
3568 else if (GET_CODE (x
) == MEM
)
3569 x
= change_address (x
, VOIDmode
, plus_constant (XEXP (x
, 0), 4));
3575 if (GET_CODE (x
) == REG
)
3576 x
= gen_rtx_REG (GET_MODE (x
), REGNO (x
) + 1);
3577 else if (GET_CODE (x
) == MEM
)
3578 x
= change_address (x
, VOIDmode
, plus_constant (XEXP (x
, 0), 8));
3584 print_shift_count_operand (file
, x
);
3588 switch (GET_CODE (x
))
3591 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
3595 output_address (XEXP (x
, 0));
3602 s390_output_symbolic_const (file
, x
);
3607 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xff);
3608 else if (code
== 'x')
3609 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xffff);
3610 else if (code
== 'h')
3611 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ((INTVAL (x
) & 0xffff) ^ 0x8000) - 0x8000);
3613 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
3617 if (GET_MODE (x
) != VOIDmode
)
3620 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
) & 0xff);
3621 else if (code
== 'x')
3622 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
) & 0xffff);
3623 else if (code
== 'h')
3624 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ((CONST_DOUBLE_LOW (x
) & 0xffff) ^ 0x8000) - 0x8000);
3630 fatal_insn ("UNKNOWN in print_operand !?", x
);
3635 /* Target hook for assembling integer objects. We need to define it
3636 here to work a round a bug in some versions of GAS, which couldn't
3637 handle values smaller than INT_MIN when printed in decimal. */
3640 s390_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
3642 if (size
== 8 && aligned_p
3643 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < INT_MIN
)
3645 fprintf (asm_out_file
, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX
"\n",
3649 return default_assemble_integer (x
, size
, aligned_p
);
3652 /* Returns true if register REGNO is used for forming
3653 a memory address in expression X. */
3656 reg_used_in_mem_p (int regno
, rtx x
)
3658 enum rtx_code code
= GET_CODE (x
);
3664 if (refers_to_regno_p (regno
, regno
+1,
3668 else if (code
== SET
3669 && GET_CODE (SET_DEST (x
)) == PC
)
3671 if (refers_to_regno_p (regno
, regno
+1,
3676 fmt
= GET_RTX_FORMAT (code
);
3677 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3680 && reg_used_in_mem_p (regno
, XEXP (x
, i
)))
3683 else if (fmt
[i
] == 'E')
3684 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3685 if (reg_used_in_mem_p (regno
, XVECEXP (x
, i
, j
)))
3691 /* Returns true if expression DEP_RTX sets an address register
3692 used by instruction INSN to address memory. */
3695 addr_generation_dependency_p (rtx dep_rtx
, rtx insn
)
3699 if (GET_CODE (dep_rtx
) == INSN
)
3700 dep_rtx
= PATTERN (dep_rtx
);
3702 if (GET_CODE (dep_rtx
) == SET
)
3704 target
= SET_DEST (dep_rtx
);
3705 if (GET_CODE (target
) == STRICT_LOW_PART
)
3706 target
= XEXP (target
, 0);
3707 while (GET_CODE (target
) == SUBREG
)
3708 target
= SUBREG_REG (target
);
3710 if (GET_CODE (target
) == REG
)
3712 int regno
= REGNO (target
);
3714 if (s390_safe_attr_type (insn
) == TYPE_LA
)
3716 pat
= PATTERN (insn
);
3717 if (GET_CODE (pat
) == PARALLEL
)
3719 if (XVECLEN (pat
, 0) != 2)
3721 pat
= XVECEXP (pat
, 0, 0);
3723 if (GET_CODE (pat
) == SET
)
3724 return refers_to_regno_p (regno
, regno
+1, SET_SRC (pat
), 0);
3728 else if (get_attr_atype (insn
) == ATYPE_AGEN
)
3729 return reg_used_in_mem_p (regno
, PATTERN (insn
));
3735 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
3738 s390_agen_dep_p (rtx dep_insn
, rtx insn
)
3740 rtx dep_rtx
= PATTERN (dep_insn
);
3743 if (GET_CODE (dep_rtx
) == SET
3744 && addr_generation_dependency_p (dep_rtx
, insn
))
3746 else if (GET_CODE (dep_rtx
) == PARALLEL
)
3748 for (i
= 0; i
< XVECLEN (dep_rtx
, 0); i
++)
3750 if (addr_generation_dependency_p (XVECEXP (dep_rtx
, 0, i
), insn
))
3757 /* Return the modified cost of the dependency of instruction INSN
3758 on instruction DEP_INSN through the link LINK. COST is the
3759 default cost of that dependency.
3761 Data dependencies are all handled without delay. However, if a
3762 register is modified and subsequently used as base or index
3763 register of a memory reference, at least 4 cycles need to pass
3764 between setting and using the register to avoid pipeline stalls.
3765 An exception is the LA instruction. An address generated by LA can
3766 be used by introducing only a one cycle stall on the pipeline. */
3769 s390_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
3774 /* If the dependence is an anti-dependence, there is no cost. For an
3775 output dependence, there is sometimes a cost, but it doesn't seem
3776 worth handling those few cases. */
3778 if (REG_NOTE_KIND (link
) != 0)
3781 /* If we can't recognize the insns, we can't really do anything. */
3782 if (recog_memoized (insn
) < 0 || recog_memoized (dep_insn
) < 0)
3785 /* DFA based scheduling checks address dependency in md file. */
3786 if (s390_use_dfa_pipeline_interface ())
3788 /* Operand forward in case of lr, load and la. */
3789 if (s390_tune
== PROCESSOR_2084_Z990
3791 && (s390_safe_attr_type (dep_insn
) == TYPE_LA
3792 || s390_safe_attr_type (dep_insn
) == TYPE_LR
3793 || s390_safe_attr_type (dep_insn
) == TYPE_LOAD
))
3798 dep_rtx
= PATTERN (dep_insn
);
3800 if (GET_CODE (dep_rtx
) == SET
3801 && addr_generation_dependency_p (dep_rtx
, insn
))
3802 cost
+= (s390_safe_attr_type (dep_insn
) == TYPE_LA
) ? 1 : 4;
3803 else if (GET_CODE (dep_rtx
) == PARALLEL
)
3805 for (i
= 0; i
< XVECLEN (dep_rtx
, 0); i
++)
3807 if (addr_generation_dependency_p (XVECEXP (dep_rtx
, 0, i
), insn
))
3808 cost
+= (s390_safe_attr_type (dep_insn
) == TYPE_LA
) ? 1 : 4;
3814 /* A C statement (sans semicolon) to update the integer scheduling priority
3815 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
3816 reduce the priority to execute INSN later. Do not define this macro if
3817 you do not need to adjust the scheduling priorities of insns.
3819 A STD instruction should be scheduled earlier,
3820 in order to use the bypass. */
3823 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED
, int priority
)
3825 if (! INSN_P (insn
))
3828 if (s390_tune
!= PROCESSOR_2084_Z990
)
3831 switch (s390_safe_attr_type (insn
))
3835 priority
= priority
<< 3;
3838 priority
= priority
<< 1;
3846 /* The number of instructions that can be issued per cycle. */
3849 s390_issue_rate (void)
3851 if (s390_tune
== PROCESSOR_2084_Z990
)
3856 /* If the following function returns TRUE, we will use the the DFA
3860 s390_use_dfa_pipeline_interface (void)
3862 if (s390_tune
== PROCESSOR_2064_Z900
3863 || s390_tune
== PROCESSOR_2084_Z990
)
3870 s390_first_cycle_multipass_dfa_lookahead (void)
3872 return s390_use_dfa_pipeline_interface () ? 4 : 0;
3875 /* Called after issuing each insn.
3876 Triggers default sort algorithm to better slot instructions. */
3879 s390_sched_reorder2 (FILE *dump ATTRIBUTE_UNUSED
,
3880 int sched_verbose ATTRIBUTE_UNUSED
,
3881 rtx
*ready ATTRIBUTE_UNUSED
,
3882 int *pn_ready ATTRIBUTE_UNUSED
,
3883 int clock_var ATTRIBUTE_UNUSED
)
3885 return s390_issue_rate();
3889 /* Split all branches that exceed the maximum distance.
3890 Returns true if this created a new literal pool entry. */
3893 s390_split_branches (void)
3895 rtx temp_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
3896 int new_literal
= 0;
3897 rtx insn
, pat
, tmp
, target
;
3900 /* We need correct insn addresses. */
3902 shorten_branches (get_insns ());
3904 /* Find all branches that exceed 64KB, and split them. */
3906 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3908 if (GET_CODE (insn
) != JUMP_INSN
)
3911 pat
= PATTERN (insn
);
3912 if (GET_CODE (pat
) == PARALLEL
&& XVECLEN (pat
, 0) > 2)
3913 pat
= XVECEXP (pat
, 0, 0);
3914 if (GET_CODE (pat
) != SET
|| SET_DEST (pat
) != pc_rtx
)
3917 if (GET_CODE (SET_SRC (pat
)) == LABEL_REF
)
3919 label
= &SET_SRC (pat
);
3921 else if (GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
3923 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) == LABEL_REF
)
3924 label
= &XEXP (SET_SRC (pat
), 1);
3925 else if (GET_CODE (XEXP (SET_SRC (pat
), 2)) == LABEL_REF
)
3926 label
= &XEXP (SET_SRC (pat
), 2);
3933 if (get_attr_length (insn
) <= 4)
3936 /* We are going to use the return register as scratch register,
3937 make sure it will be saved/restored by the prologue/epilogue. */
3938 cfun
->machine
->save_return_addr_p
= 1;
3943 tmp
= force_const_mem (Pmode
, *label
);
3944 tmp
= emit_insn_before (gen_rtx_SET (Pmode
, temp_reg
, tmp
), insn
);
3945 INSN_ADDRESSES_NEW (tmp
, -1);
3952 target
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, *label
),
3953 UNSPEC_LTREL_OFFSET
);
3954 target
= gen_rtx_CONST (Pmode
, target
);
3955 target
= force_const_mem (Pmode
, target
);
3956 tmp
= emit_insn_before (gen_rtx_SET (Pmode
, temp_reg
, target
), insn
);
3957 INSN_ADDRESSES_NEW (tmp
, -1);
3959 target
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, XEXP (target
, 0)),
3961 target
= gen_rtx_PLUS (Pmode
, temp_reg
, target
);
3964 if (!validate_change (insn
, label
, target
, 0))
3972 /* Find a literal pool symbol referenced in RTX X, and store
3973 it at REF. Will abort if X contains references to more than
3974 one such pool symbol; multiple references to the same symbol
3975 are allowed, however.
3977 The rtx pointed to by REF must be initialized to NULL_RTX
3978 by the caller before calling this routine. */
3981 find_constant_pool_ref (rtx x
, rtx
*ref
)
3986 /* Ignore LTREL_BASE references. */
3987 if (GET_CODE (x
) == UNSPEC
3988 && XINT (x
, 1) == UNSPEC_LTREL_BASE
)
3990 /* Likewise POOL_ENTRY insns. */
3991 if (GET_CODE (x
) == UNSPEC_VOLATILE
3992 && XINT (x
, 1) == UNSPECV_POOL_ENTRY
)
3995 if (GET_CODE (x
) == SYMBOL_REF
3996 && CONSTANT_POOL_ADDRESS_P (x
))
3998 if (*ref
== NULL_RTX
)
4004 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
4005 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
4009 find_constant_pool_ref (XEXP (x
, i
), ref
);
4011 else if (fmt
[i
] == 'E')
4013 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4014 find_constant_pool_ref (XVECEXP (x
, i
, j
), ref
);
4019 /* Replace every reference to the literal pool symbol REF
4020 in X by the address ADDR. Fix up MEMs as required. */
4023 replace_constant_pool_ref (rtx
*x
, rtx ref
, rtx addr
)
4031 /* Literal pool references can only occur inside a MEM ... */
4032 if (GET_CODE (*x
) == MEM
)
4034 rtx memref
= XEXP (*x
, 0);
4038 *x
= replace_equiv_address (*x
, addr
);
4042 if (GET_CODE (memref
) == CONST
4043 && GET_CODE (XEXP (memref
, 0)) == PLUS
4044 && GET_CODE (XEXP (XEXP (memref
, 0), 1)) == CONST_INT
4045 && XEXP (XEXP (memref
, 0), 0) == ref
)
4047 HOST_WIDE_INT off
= INTVAL (XEXP (XEXP (memref
, 0), 1));
4048 *x
= replace_equiv_address (*x
, plus_constant (addr
, off
));
4053 /* ... or a load-address type pattern. */
4054 if (GET_CODE (*x
) == SET
)
4056 rtx addrref
= SET_SRC (*x
);
4060 SET_SRC (*x
) = addr
;
4064 if (GET_CODE (addrref
) == CONST
4065 && GET_CODE (XEXP (addrref
, 0)) == PLUS
4066 && GET_CODE (XEXP (XEXP (addrref
, 0), 1)) == CONST_INT
4067 && XEXP (XEXP (addrref
, 0), 0) == ref
)
4069 HOST_WIDE_INT off
= INTVAL (XEXP (XEXP (addrref
, 0), 1));
4070 SET_SRC (*x
) = plus_constant (addr
, off
);
4075 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
4076 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
4080 replace_constant_pool_ref (&XEXP (*x
, i
), ref
, addr
);
4082 else if (fmt
[i
] == 'E')
4084 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
4085 replace_constant_pool_ref (&XVECEXP (*x
, i
, j
), ref
, addr
);
4090 /* Check whether X contains an UNSPEC_LTREL_BASE.
4091 Return its constant pool symbol if found, NULL_RTX otherwise. */
4094 find_ltrel_base (rtx x
)
4099 if (GET_CODE (x
) == UNSPEC
4100 && XINT (x
, 1) == UNSPEC_LTREL_BASE
)
4101 return XVECEXP (x
, 0, 0);
4103 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
4104 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
4108 rtx fnd
= find_ltrel_base (XEXP (x
, i
));
4112 else if (fmt
[i
] == 'E')
4114 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4116 rtx fnd
= find_ltrel_base (XVECEXP (x
, i
, j
));
4126 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with BASE. */
4129 replace_ltrel_base (rtx
*x
, rtx base
)
4134 if (GET_CODE (*x
) == UNSPEC
4135 && XINT (*x
, 1) == UNSPEC_LTREL_BASE
)
4141 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
4142 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
4146 replace_ltrel_base (&XEXP (*x
, i
), base
);
4148 else if (fmt
[i
] == 'E')
4150 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
4151 replace_ltrel_base (&XVECEXP (*x
, i
, j
), base
);
4157 /* We keep a list of constants which we have to add to internal
4158 constant tables in the middle of large functions. */
4160 #define NR_C_MODES 7
4161 enum machine_mode constant_modes
[NR_C_MODES
] =
4172 struct constant
*next
;
4177 struct constant_pool
4179 struct constant_pool
*next
;
4184 struct constant
*constants
[NR_C_MODES
];
4189 static struct constant_pool
* s390_mainpool_start (void);
4190 static void s390_mainpool_finish (struct constant_pool
*, rtx base_reg
);
4191 static void s390_mainpool_cancel (struct constant_pool
*);
4193 static struct constant_pool
* s390_chunkify_start (rtx base_reg
);
4194 static void s390_chunkify_finish (struct constant_pool
*, rtx base_reg
);
4195 static void s390_chunkify_cancel (struct constant_pool
*);
4197 static struct constant_pool
*s390_start_pool (struct constant_pool
**, rtx
);
4198 static void s390_end_pool (struct constant_pool
*, rtx
);
4199 static void s390_add_pool_insn (struct constant_pool
*, rtx
);
4200 static struct constant_pool
*s390_find_pool (struct constant_pool
*, rtx
);
4201 static void s390_add_constant (struct constant_pool
*, rtx
, enum machine_mode
);
4202 static rtx
s390_find_constant (struct constant_pool
*, rtx
, enum machine_mode
);
4203 static rtx
s390_dump_pool (struct constant_pool
*, bool);
4204 static struct constant_pool
*s390_alloc_pool (void);
4205 static void s390_free_pool (struct constant_pool
*);
4207 /* Create new constant pool covering instructions starting at INSN
4208 and chain it to the end of POOL_LIST. */
4210 static struct constant_pool
*
4211 s390_start_pool (struct constant_pool
**pool_list
, rtx insn
)
4213 struct constant_pool
*pool
, **prev
;
4215 pool
= s390_alloc_pool ();
4216 pool
->first_insn
= insn
;
4218 for (prev
= pool_list
; *prev
; prev
= &(*prev
)->next
)
4225 /* End range of instructions covered by POOL at INSN and emit
4226 placeholder insn representing the pool. */
4229 s390_end_pool (struct constant_pool
*pool
, rtx insn
)
4231 rtx pool_size
= GEN_INT (pool
->size
+ 8 /* alignment slop */);
4234 insn
= get_last_insn ();
4236 pool
->pool_insn
= emit_insn_after (gen_pool (pool_size
), insn
);
4237 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
4240 /* Add INSN to the list of insns covered by POOL. */
4243 s390_add_pool_insn (struct constant_pool
*pool
, rtx insn
)
4245 bitmap_set_bit (pool
->insns
, INSN_UID (insn
));
4248 /* Return pool out of POOL_LIST that covers INSN. */
4250 static struct constant_pool
*
4251 s390_find_pool (struct constant_pool
*pool_list
, rtx insn
)
4253 struct constant_pool
*pool
;
4255 for (pool
= pool_list
; pool
; pool
= pool
->next
)
4256 if (bitmap_bit_p (pool
->insns
, INSN_UID (insn
)))
4262 /* Add constant VAL of mode MODE to the constant pool POOL. */
4265 s390_add_constant (struct constant_pool
*pool
, rtx val
, enum machine_mode mode
)
4270 for (i
= 0; i
< NR_C_MODES
; i
++)
4271 if (constant_modes
[i
] == mode
)
4273 if (i
== NR_C_MODES
)
4276 for (c
= pool
->constants
[i
]; c
!= NULL
; c
= c
->next
)
4277 if (rtx_equal_p (val
, c
->value
))
4282 c
= (struct constant
*) xmalloc (sizeof *c
);
4284 c
->label
= gen_label_rtx ();
4285 c
->next
= pool
->constants
[i
];
4286 pool
->constants
[i
] = c
;
4287 pool
->size
+= GET_MODE_SIZE (mode
);
4291 /* Find constant VAL of mode MODE in the constant pool POOL.
4292 Return an RTX describing the distance from the start of
4293 the pool to the location of the new constant. */
4296 s390_find_constant (struct constant_pool
*pool
, rtx val
,
4297 enum machine_mode mode
)
4303 for (i
= 0; i
< NR_C_MODES
; i
++)
4304 if (constant_modes
[i
] == mode
)
4306 if (i
== NR_C_MODES
)
4309 for (c
= pool
->constants
[i
]; c
!= NULL
; c
= c
->next
)
4310 if (rtx_equal_p (val
, c
->value
))
4316 offset
= gen_rtx_MINUS (Pmode
, gen_rtx_LABEL_REF (Pmode
, c
->label
),
4317 gen_rtx_LABEL_REF (Pmode
, pool
->label
));
4318 offset
= gen_rtx_CONST (Pmode
, offset
);
4322 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
4323 do not emit the pool base label. */
4326 s390_dump_pool (struct constant_pool
*pool
, bool remote_label
)
4332 /* Pool start insn switches to proper section
4333 and guarantees necessary alignment. */
4334 if (TARGET_CPU_ZARCH
)
4335 insn
= emit_insn_after (gen_pool_start_64 (), pool
->pool_insn
);
4337 insn
= emit_insn_after (gen_pool_start_31 (), pool
->pool_insn
);
4338 INSN_ADDRESSES_NEW (insn
, -1);
4342 insn
= emit_label_after (pool
->label
, insn
);
4343 INSN_ADDRESSES_NEW (insn
, -1);
4346 /* Dump constants in descending alignment requirement order,
4347 ensuring proper alignment for every constant. */
4348 for (i
= 0; i
< NR_C_MODES
; i
++)
4349 for (c
= pool
->constants
[i
]; c
; c
= c
->next
)
4351 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
4352 rtx value
= c
->value
;
4353 if (GET_CODE (value
) == CONST
4354 && GET_CODE (XEXP (value
, 0)) == UNSPEC
4355 && XINT (XEXP (value
, 0), 1) == UNSPEC_LTREL_OFFSET
4356 && XVECLEN (XEXP (value
, 0), 0) == 1)
4358 value
= gen_rtx_MINUS (Pmode
, XVECEXP (XEXP (value
, 0), 0, 0),
4359 gen_rtx_LABEL_REF (VOIDmode
, pool
->label
));
4360 value
= gen_rtx_CONST (VOIDmode
, value
);
4363 insn
= emit_label_after (c
->label
, insn
);
4364 INSN_ADDRESSES_NEW (insn
, -1);
4366 value
= gen_rtx_UNSPEC_VOLATILE (constant_modes
[i
],
4367 gen_rtvec (1, value
),
4368 UNSPECV_POOL_ENTRY
);
4369 insn
= emit_insn_after (value
, insn
);
4370 INSN_ADDRESSES_NEW (insn
, -1);
4373 /* Pool end insn switches back to previous section
4374 and guarantees necessary alignment. */
4375 if (TARGET_CPU_ZARCH
)
4376 insn
= emit_insn_after (gen_pool_end_64 (), insn
);
4378 insn
= emit_insn_after (gen_pool_end_31 (), insn
);
4379 INSN_ADDRESSES_NEW (insn
, -1);
4381 insn
= emit_barrier_after (insn
);
4382 INSN_ADDRESSES_NEW (insn
, -1);
4384 /* Remove placeholder insn. */
4385 remove_insn (pool
->pool_insn
);
4390 /* Allocate new constant_pool structure. */
4392 static struct constant_pool
*
4393 s390_alloc_pool (void)
4395 struct constant_pool
*pool
;
4398 pool
= (struct constant_pool
*) xmalloc (sizeof *pool
);
4400 for (i
= 0; i
< NR_C_MODES
; i
++)
4401 pool
->constants
[i
] = NULL
;
4403 pool
->label
= gen_label_rtx ();
4404 pool
->first_insn
= NULL_RTX
;
4405 pool
->pool_insn
= NULL_RTX
;
4406 pool
->insns
= BITMAP_XMALLOC ();
4412 /* Free all memory used by POOL. */
4415 s390_free_pool (struct constant_pool
*pool
)
4419 for (i
= 0; i
< NR_C_MODES
; i
++)
4421 struct constant
*c
= pool
->constants
[i
];
4424 struct constant
*next
= c
->next
;
4430 BITMAP_XFREE (pool
->insns
);
4435 /* Collect main literal pool. Return NULL on overflow. */
4437 static struct constant_pool
*
4438 s390_mainpool_start (void)
4440 struct constant_pool
*pool
;
4443 pool
= s390_alloc_pool ();
4445 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4447 if (GET_CODE (insn
) == INSN
4448 && GET_CODE (PATTERN (insn
)) == UNSPEC_VOLATILE
4449 && XINT (PATTERN (insn
), 1) == UNSPECV_MAIN_POOL
)
4451 if (pool
->pool_insn
)
4453 pool
->pool_insn
= insn
;
4456 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
4458 rtx pool_ref
= NULL_RTX
;
4459 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
4462 rtx constant
= get_pool_constant (pool_ref
);
4463 enum machine_mode mode
= get_pool_mode (pool_ref
);
4464 s390_add_constant (pool
, constant
, mode
);
4469 if (!pool
->pool_insn
)
4472 if (pool
->size
>= 4096)
4474 s390_free_pool (pool
);
4481 /* POOL holds the main literal pool as collected by s390_mainpool_start.
4482 Modify the current function to output the pool constants as well as
4483 the pool register setup instruction. BASE_REG is the register to
4484 be used as pool base register. */
4487 s390_mainpool_finish (struct constant_pool
*pool
, rtx base_reg
)
4491 /* If the pool is empty, we're done. */
4492 if (pool
->size
== 0)
4494 remove_insn (pool
->pool_insn
);
4495 s390_free_pool (pool
);
4499 /* We need correct insn addresses. */
4500 shorten_branches (get_insns ());
4502 /* On zSeries, we use a LARL to load the pool register. The pool is
4503 located in the .rodata section, so we emit it after the function. */
4504 if (TARGET_CPU_ZARCH
)
4506 insn
= gen_main_base_64 (base_reg
, pool
->label
);
4507 insn
= emit_insn_after (insn
, pool
->pool_insn
);
4508 INSN_ADDRESSES_NEW (insn
, -1);
4509 remove_insn (pool
->pool_insn
);
4511 insn
= get_last_insn ();
4512 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
4513 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
4515 s390_dump_pool (pool
, 0);
4518 /* On S/390, if the total size of the function's code plus literal pool
4519 does not exceed 4096 bytes, we use BASR to set up a function base
4520 pointer, and emit the literal pool at the end of the function. */
4521 else if (INSN_ADDRESSES (INSN_UID (get_last_insn ()))
4522 + pool
->size
+ 8 /* alignment slop */ < 4096)
4524 insn
= gen_main_base_31_small (base_reg
, pool
->label
);
4525 insn
= emit_insn_after (insn
, pool
->pool_insn
);
4526 INSN_ADDRESSES_NEW (insn
, -1);
4527 remove_insn (pool
->pool_insn
);
4529 insn
= emit_label_after (pool
->label
, insn
);
4530 INSN_ADDRESSES_NEW (insn
, -1);
4532 insn
= get_last_insn ();
4533 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
4534 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
4536 s390_dump_pool (pool
, 1);
4539 /* Otherwise, we emit an inline literal pool and use BASR to branch
4540 over it, setting up the pool register at the same time. */
4543 rtx pool_end
= gen_label_rtx ();
4545 insn
= gen_main_base_31_large (base_reg
, pool
->label
, pool_end
);
4546 insn
= emit_insn_after (insn
, pool
->pool_insn
);
4547 INSN_ADDRESSES_NEW (insn
, -1);
4548 remove_insn (pool
->pool_insn
);
4550 insn
= emit_label_after (pool
->label
, insn
);
4551 INSN_ADDRESSES_NEW (insn
, -1);
4553 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
4554 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
4556 insn
= emit_label_after (pool_end
, pool
->pool_insn
);
4557 INSN_ADDRESSES_NEW (insn
, -1);
4559 s390_dump_pool (pool
, 1);
4563 /* Replace all literal pool references. */
4565 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4568 replace_ltrel_base (&PATTERN (insn
), base_reg
);
4570 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
4572 rtx addr
, pool_ref
= NULL_RTX
;
4573 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
4576 addr
= s390_find_constant (pool
, get_pool_constant (pool_ref
),
4577 get_pool_mode (pool_ref
));
4578 addr
= gen_rtx_PLUS (Pmode
, base_reg
, addr
);
4579 replace_constant_pool_ref (&PATTERN (insn
), pool_ref
, addr
);
4580 INSN_CODE (insn
) = -1;
4586 /* Free the pool. */
4587 s390_free_pool (pool
);
4590 /* POOL holds the main literal pool as collected by s390_mainpool_start.
4591 We have decided we cannot use this pool, so revert all changes
4592 to the current function that were done by s390_mainpool_start. */
4594 s390_mainpool_cancel (struct constant_pool
*pool
)
4596 /* We didn't actually change the instruction stream, so simply
4597 free the pool memory. */
4598 s390_free_pool (pool
);
4602 /* Chunkify the literal pool. BASE_REG is to be used as pool
4605 #define S390_POOL_CHUNK_MIN 0xc00
4606 #define S390_POOL_CHUNK_MAX 0xe00
4608 static struct constant_pool
*
4609 s390_chunkify_start (rtx base_reg
)
4611 struct constant_pool
*curr_pool
= NULL
, *pool_list
= NULL
;
4614 rtx pending_ltrel
= NULL_RTX
;
4617 rtx (*gen_reload_base
) (rtx
, rtx
) =
4618 TARGET_CPU_ZARCH
? gen_reload_base_64
: gen_reload_base_31
;
4621 /* We need correct insn addresses. */
4623 shorten_branches (get_insns ());
4625 /* Scan all insns and move literals to pool chunks. */
4627 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4629 /* Check for pending LTREL_BASE. */
4632 rtx ltrel_base
= find_ltrel_base (PATTERN (insn
));
4635 if (ltrel_base
== pending_ltrel
)
4636 pending_ltrel
= NULL_RTX
;
4642 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
4644 rtx pool_ref
= NULL_RTX
;
4645 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
4648 rtx constant
= get_pool_constant (pool_ref
);
4649 enum machine_mode mode
= get_pool_mode (pool_ref
);
4652 curr_pool
= s390_start_pool (&pool_list
, insn
);
4654 s390_add_constant (curr_pool
, constant
, mode
);
4655 s390_add_pool_insn (curr_pool
, insn
);
4657 /* Don't split the pool chunk between a LTREL_OFFSET load
4658 and the corresponding LTREL_BASE. */
4659 if (GET_CODE (constant
) == CONST
4660 && GET_CODE (XEXP (constant
, 0)) == UNSPEC
4661 && XINT (XEXP (constant
, 0), 1) == UNSPEC_LTREL_OFFSET
)
4665 pending_ltrel
= pool_ref
;
4670 if (GET_CODE (insn
) == JUMP_INSN
|| GET_CODE (insn
) == CODE_LABEL
)
4673 s390_add_pool_insn (curr_pool
, insn
);
4674 /* An LTREL_BASE must follow within the same basic block. */
4680 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn
)
4681 || INSN_ADDRESSES (INSN_UID (insn
)) == -1)
4684 if (TARGET_CPU_ZARCH
)
4686 if (curr_pool
->size
< S390_POOL_CHUNK_MAX
)
4689 s390_end_pool (curr_pool
, NULL_RTX
);
4694 int chunk_size
= INSN_ADDRESSES (INSN_UID (insn
))
4695 - INSN_ADDRESSES (INSN_UID (curr_pool
->first_insn
))
4698 /* We will later have to insert base register reload insns.
4699 Those will have an effect on code size, which we need to
4700 consider here. This calculation makes rather pessimistic
4701 worst-case assumptions. */
4702 if (GET_CODE (insn
) == CODE_LABEL
)
4705 if (chunk_size
< S390_POOL_CHUNK_MIN
4706 && curr_pool
->size
< S390_POOL_CHUNK_MIN
)
4709 /* Pool chunks can only be inserted after BARRIERs ... */
4710 if (GET_CODE (insn
) == BARRIER
)
4712 s390_end_pool (curr_pool
, insn
);
4717 /* ... so if we don't find one in time, create one. */
4718 else if ((chunk_size
> S390_POOL_CHUNK_MAX
4719 || curr_pool
->size
> S390_POOL_CHUNK_MAX
))
4721 rtx label
, jump
, barrier
;
4723 /* We can insert the barrier only after a 'real' insn. */
4724 if (GET_CODE (insn
) != INSN
&& GET_CODE (insn
) != CALL_INSN
)
4726 if (get_attr_length (insn
) == 0)
4729 /* Don't separate LTREL_BASE from the corresponding
4730 LTREL_OFFSET load. */
4734 label
= gen_label_rtx ();
4735 jump
= emit_jump_insn_after (gen_jump (label
), insn
);
4736 barrier
= emit_barrier_after (jump
);
4737 insn
= emit_label_after (label
, barrier
);
4738 JUMP_LABEL (jump
) = label
;
4739 LABEL_NUSES (label
) = 1;
4741 INSN_ADDRESSES_NEW (jump
, -1);
4742 INSN_ADDRESSES_NEW (barrier
, -1);
4743 INSN_ADDRESSES_NEW (insn
, -1);
4745 s390_end_pool (curr_pool
, barrier
);
4753 s390_end_pool (curr_pool
, NULL_RTX
);
4758 /* Find all labels that are branched into
4759 from an insn belonging to a different chunk. */
4761 far_labels
= BITMAP_XMALLOC ();
4763 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4765 /* Labels marked with LABEL_PRESERVE_P can be target
4766 of non-local jumps, so we have to mark them.
4767 The same holds for named labels.
4769 Don't do that, however, if it is the label before
4772 if (GET_CODE (insn
) == CODE_LABEL
4773 && (LABEL_PRESERVE_P (insn
) || LABEL_NAME (insn
)))
4775 rtx vec_insn
= next_real_insn (insn
);
4776 rtx vec_pat
= vec_insn
&& GET_CODE (vec_insn
) == JUMP_INSN
?
4777 PATTERN (vec_insn
) : NULL_RTX
;
4779 || !(GET_CODE (vec_pat
) == ADDR_VEC
4780 || GET_CODE (vec_pat
) == ADDR_DIFF_VEC
))
4781 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (insn
));
4784 /* If we have a direct jump (conditional or unconditional)
4785 or a casesi jump, check all potential targets. */
4786 else if (GET_CODE (insn
) == JUMP_INSN
)
4788 rtx pat
= PATTERN (insn
);
4789 if (GET_CODE (pat
) == PARALLEL
&& XVECLEN (pat
, 0) > 2)
4790 pat
= XVECEXP (pat
, 0, 0);
4792 if (GET_CODE (pat
) == SET
)
4794 rtx label
= JUMP_LABEL (insn
);
4797 if (s390_find_pool (pool_list
, label
)
4798 != s390_find_pool (pool_list
, insn
))
4799 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (label
));
4802 else if (GET_CODE (pat
) == PARALLEL
4803 && XVECLEN (pat
, 0) == 2
4804 && GET_CODE (XVECEXP (pat
, 0, 0)) == SET
4805 && GET_CODE (XVECEXP (pat
, 0, 1)) == USE
4806 && GET_CODE (XEXP (XVECEXP (pat
, 0, 1), 0)) == LABEL_REF
)
4808 /* Find the jump table used by this casesi jump. */
4809 rtx vec_label
= XEXP (XEXP (XVECEXP (pat
, 0, 1), 0), 0);
4810 rtx vec_insn
= next_real_insn (vec_label
);
4811 rtx vec_pat
= vec_insn
&& GET_CODE (vec_insn
) == JUMP_INSN
?
4812 PATTERN (vec_insn
) : NULL_RTX
;
4814 && (GET_CODE (vec_pat
) == ADDR_VEC
4815 || GET_CODE (vec_pat
) == ADDR_DIFF_VEC
))
4817 int i
, diff_p
= GET_CODE (vec_pat
) == ADDR_DIFF_VEC
;
4819 for (i
= 0; i
< XVECLEN (vec_pat
, diff_p
); i
++)
4821 rtx label
= XEXP (XVECEXP (vec_pat
, diff_p
, i
), 0);
4823 if (s390_find_pool (pool_list
, label
)
4824 != s390_find_pool (pool_list
, insn
))
4825 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (label
));
4832 /* Insert base register reload insns before every pool. */
4834 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
4836 rtx new_insn
= gen_reload_base (base_reg
, curr_pool
->label
);
4837 rtx insn
= curr_pool
->first_insn
;
4838 INSN_ADDRESSES_NEW (emit_insn_before (new_insn
, insn
), -1);
4841 /* Insert base register reload insns at every far label. */
4843 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4844 if (GET_CODE (insn
) == CODE_LABEL
4845 && bitmap_bit_p (far_labels
, CODE_LABEL_NUMBER (insn
)))
4847 struct constant_pool
*pool
= s390_find_pool (pool_list
, insn
);
4850 rtx new_insn
= gen_reload_base (base_reg
, pool
->label
);
4851 INSN_ADDRESSES_NEW (emit_insn_after (new_insn
, insn
), -1);
4856 BITMAP_XFREE (far_labels
);
4859 /* Recompute insn addresses. */
4861 init_insn_lengths ();
4862 shorten_branches (get_insns ());
4867 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4868 After we have decided to use this list, finish implementing
4869 all changes to the current function as required. BASE_REG is
4870 to be used as pool base register. */
4873 s390_chunkify_finish (struct constant_pool
*pool_list
, rtx base_reg
)
4875 struct constant_pool
*curr_pool
= NULL
;
4879 /* Replace all literal pool references. */
4881 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4884 replace_ltrel_base (&PATTERN (insn
), base_reg
);
4886 curr_pool
= s390_find_pool (pool_list
, insn
);
4890 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
4892 rtx addr
, pool_ref
= NULL_RTX
;
4893 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
4896 addr
= s390_find_constant (curr_pool
, get_pool_constant (pool_ref
),
4897 get_pool_mode (pool_ref
));
4898 addr
= gen_rtx_PLUS (Pmode
, base_reg
, addr
);
4899 replace_constant_pool_ref (&PATTERN (insn
), pool_ref
, addr
);
4900 INSN_CODE (insn
) = -1;
4905 /* Dump out all literal pools. */
4907 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
4908 s390_dump_pool (curr_pool
, 0);
4910 /* Free pool list. */
4914 struct constant_pool
*next
= pool_list
->next
;
4915 s390_free_pool (pool_list
);
4920 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4921 We have decided we cannot use this list, so revert all changes
4922 to the current function that were done by s390_chunkify_start. */
4925 s390_chunkify_cancel (struct constant_pool
*pool_list
)
4927 struct constant_pool
*curr_pool
= NULL
;
4930 /* Remove all pool placeholder insns. */
4932 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
4934 /* Did we insert an extra barrier? Remove it. */
4935 rtx barrier
= PREV_INSN (curr_pool
->pool_insn
);
4936 rtx jump
= barrier
? PREV_INSN (barrier
) : NULL_RTX
;
4937 rtx label
= NEXT_INSN (curr_pool
->pool_insn
);
4939 if (jump
&& GET_CODE (jump
) == JUMP_INSN
4940 && barrier
&& GET_CODE (barrier
) == BARRIER
4941 && label
&& GET_CODE (label
) == CODE_LABEL
4942 && GET_CODE (PATTERN (jump
)) == SET
4943 && SET_DEST (PATTERN (jump
)) == pc_rtx
4944 && GET_CODE (SET_SRC (PATTERN (jump
))) == LABEL_REF
4945 && XEXP (SET_SRC (PATTERN (jump
)), 0) == label
)
4948 remove_insn (barrier
);
4949 remove_insn (label
);
4952 remove_insn (curr_pool
->pool_insn
);
4955 /* Remove all base register reload insns. */
4957 for (insn
= get_insns (); insn
; )
4959 rtx next_insn
= NEXT_INSN (insn
);
4961 if (GET_CODE (insn
) == INSN
4962 && GET_CODE (PATTERN (insn
)) == SET
4963 && GET_CODE (SET_SRC (PATTERN (insn
))) == UNSPEC
4964 && XINT (SET_SRC (PATTERN (insn
)), 1) == UNSPEC_RELOAD_BASE
)
4970 /* Free pool list. */
4974 struct constant_pool
*next
= pool_list
->next
;
4975 s390_free_pool (pool_list
);
4981 /* Output to FILE the constant pool entry EXP in mode MODE
4982 with alignment ALIGN. */
4985 s390_output_pool_entry (FILE *file
, rtx exp
, enum machine_mode mode
,
4990 switch (GET_MODE_CLASS (mode
))
4993 if (GET_CODE (exp
) != CONST_DOUBLE
)
4996 REAL_VALUE_FROM_CONST_DOUBLE (r
, exp
);
4997 assemble_real (r
, mode
, align
);
5001 if (GET_CODE (exp
) == CONST
5002 || GET_CODE (exp
) == SYMBOL_REF
5003 || GET_CODE (exp
) == LABEL_REF
)
5005 fputs (integer_asm_op (GET_MODE_SIZE (mode
), TRUE
), file
);
5006 s390_output_symbolic_const (file
, exp
);
5011 assemble_integer (exp
, GET_MODE_SIZE (mode
), align
, 1);
5021 /* Rework the prolog/epilog to avoid saving/restoring
5022 registers unnecessarily. BASE_USED specifies whether
5023 the literal pool base register needs to be saved. */
5026 s390_optimize_prolog (bool base_used
)
5028 int save_first
, save_last
, restore_first
, restore_last
;
5030 rtx insn
, new_insn
, next_insn
;
5032 /* Recompute regs_ever_live data for special registers. */
5033 regs_ever_live
[BASE_REGISTER
] = base_used
;
5034 regs_ever_live
[RETURN_REGNUM
] = cfun
->machine
->save_return_addr_p
;
5035 regs_ever_live
[STACK_POINTER_REGNUM
] = cfun
->machine
->frame_size
> 0;
5038 /* Find first and last gpr to be saved. */
5040 for (i
= 6; i
< 16; i
++)
5041 if (regs_ever_live
[i
])
5043 || i
== STACK_POINTER_REGNUM
5044 || i
== RETURN_REGNUM
5045 || i
== BASE_REGISTER
5046 || (flag_pic
&& i
== (int)PIC_OFFSET_TABLE_REGNUM
))
5049 for (j
= 15; j
> i
; j
--)
5050 if (regs_ever_live
[j
])
5052 || j
== STACK_POINTER_REGNUM
5053 || j
== RETURN_REGNUM
5054 || j
== BASE_REGISTER
5055 || (flag_pic
&& j
== (int)PIC_OFFSET_TABLE_REGNUM
))
5060 /* Nothing to save/restore. */
5061 save_first
= restore_first
= -1;
5062 save_last
= restore_last
= -1;
5066 /* Save/restore from i to j. */
5067 save_first
= restore_first
= i
;
5068 save_last
= restore_last
= j
;
5071 /* Varargs functions need to save gprs 2 to 6. */
5072 if (current_function_stdarg
)
5080 /* If all special registers are in fact used, there's nothing we
5081 can do, so no point in walking the insn list. */
5082 if (i
<= BASE_REGISTER
&& j
>= BASE_REGISTER
5083 && (TARGET_CPU_ZARCH
|| (i
<= RETURN_REGNUM
&& j
>= RETURN_REGNUM
)))
5087 /* Search for prolog/epilog insns and replace them. */
5089 for (insn
= get_insns (); insn
; insn
= next_insn
)
5091 int first
, last
, off
;
5092 rtx set
, base
, offset
;
5094 next_insn
= NEXT_INSN (insn
);
5096 if (GET_CODE (insn
) != INSN
)
5099 if (GET_CODE (PATTERN (insn
)) == PARALLEL
5100 && store_multiple_operation (PATTERN (insn
), VOIDmode
))
5102 set
= XVECEXP (PATTERN (insn
), 0, 0);
5103 first
= REGNO (SET_SRC (set
));
5104 last
= first
+ XVECLEN (PATTERN (insn
), 0) - 1;
5105 offset
= const0_rtx
;
5106 base
= eliminate_constant_term (XEXP (SET_DEST (set
), 0), &offset
);
5107 off
= INTVAL (offset
) - first
* UNITS_PER_WORD
;
5109 if (GET_CODE (base
) != REG
|| off
< 0)
5111 if (first
> BASE_REGISTER
|| last
< BASE_REGISTER
)
5114 if (save_first
!= -1)
5116 new_insn
= save_gprs (base
, off
, save_first
, save_last
);
5117 new_insn
= emit_insn_before (new_insn
, insn
);
5118 INSN_ADDRESSES_NEW (new_insn
, -1);
5125 if (GET_CODE (PATTERN (insn
)) == SET
5126 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
5127 && REGNO (SET_SRC (PATTERN (insn
))) == BASE_REGISTER
5128 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
)
5130 set
= PATTERN (insn
);
5131 offset
= const0_rtx
;
5132 base
= eliminate_constant_term (XEXP (SET_DEST (set
), 0), &offset
);
5133 off
= INTVAL (offset
) - BASE_REGISTER
* UNITS_PER_WORD
;
5135 if (GET_CODE (base
) != REG
|| off
< 0)
5138 if (save_first
!= -1)
5140 new_insn
= save_gprs (base
, off
, save_first
, save_last
);
5141 new_insn
= emit_insn_before (new_insn
, insn
);
5142 INSN_ADDRESSES_NEW (new_insn
, -1);
5149 if (GET_CODE (PATTERN (insn
)) == PARALLEL
5150 && load_multiple_operation (PATTERN (insn
), VOIDmode
))
5152 set
= XVECEXP (PATTERN (insn
), 0, 0);
5153 first
= REGNO (SET_DEST (set
));
5154 last
= first
+ XVECLEN (PATTERN (insn
), 0) - 1;
5155 offset
= const0_rtx
;
5156 base
= eliminate_constant_term (XEXP (SET_SRC (set
), 0), &offset
);
5157 off
= INTVAL (offset
) - first
* UNITS_PER_WORD
;
5159 if (GET_CODE (base
) != REG
|| off
< 0)
5161 if (first
> BASE_REGISTER
|| last
< BASE_REGISTER
)
5164 if (restore_first
!= -1)
5166 new_insn
= restore_gprs (base
, off
, restore_first
, restore_last
);
5167 new_insn
= emit_insn_before (new_insn
, insn
);
5168 INSN_ADDRESSES_NEW (new_insn
, -1);
5175 if (GET_CODE (PATTERN (insn
)) == SET
5176 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
5177 && REGNO (SET_DEST (PATTERN (insn
))) == BASE_REGISTER
5178 && GET_CODE (SET_SRC (PATTERN (insn
))) == MEM
)
5180 set
= PATTERN (insn
);
5181 offset
= const0_rtx
;
5182 base
= eliminate_constant_term (XEXP (SET_SRC (set
), 0), &offset
);
5183 off
= INTVAL (offset
) - BASE_REGISTER
* UNITS_PER_WORD
;
5185 if (GET_CODE (base
) != REG
|| off
< 0)
5188 if (restore_first
!= -1)
5190 new_insn
= restore_gprs (base
, off
, restore_first
, restore_last
);
5191 new_insn
= emit_insn_before (new_insn
, insn
);
5192 INSN_ADDRESSES_NEW (new_insn
, -1);
5201 /* Perform machine-dependent processing. */
5206 rtx base_reg
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
5207 bool base_used
= false;
5208 bool pool_overflow
= false;
5210 /* Make sure all splits have been performed; splits after
5211 machine_dependent_reorg might confuse insn length counts. */
5212 split_all_insns_noflow ();
5215 /* In small leaf functions, try to use an unused call-clobbered
5216 register as base register to avoid save/restore overhead. */
5217 if (current_function_is_leaf
&& !regs_ever_live
[5])
5218 base_reg
= gen_rtx_REG (Pmode
, 5);
5221 /* Install the main literal pool and the associated base
5222 register load insns.
5224 In addition, there are two problematic situations we need
5227 - the literal pool might be > 4096 bytes in size, so that
5228 some of its elements cannot be directly accessed
5230 - a branch target might be > 64K away from the branch, so that
5231 it is not possible to use a PC-relative instruction.
5233 To fix those, we split the single literal pool into multiple
5234 pool chunks, reloading the pool base register at various
5235 points throughout the function to ensure it always points to
5236 the pool chunk the following code expects, and / or replace
5237 PC-relative branches by absolute branches.
5239 However, the two problems are interdependent: splitting the
5240 literal pool can move a branch further away from its target,
5241 causing the 64K limit to overflow, and on the other hand,
5242 replacing a PC-relative branch by an absolute branch means
5243 we need to put the branch target address into the literal
5244 pool, possibly causing it to overflow.
5246 So, we loop trying to fix up both problems until we manage
5247 to satisfy both conditions at the same time. Note that the
5248 loop is guaranteed to terminate as every pass of the loop
5249 strictly decreases the total number of PC-relative branches
5250 in the function. (This is not completely true as there
5251 might be branch-over-pool insns introduced by chunkify_start.
5252 Those never need to be split however.) */
5256 struct constant_pool
*pool
= NULL
;
5258 /* Collect the literal pool. */
5261 pool
= s390_mainpool_start ();
5263 pool_overflow
= true;
5266 /* If literal pool overflowed, start to chunkify it. */
5268 pool
= s390_chunkify_start (base_reg
);
5270 /* Split out-of-range branches. If this has created new
5271 literal pool entries, cancel current chunk list and
5272 recompute it. zSeries machines have large branch
5273 instructions, so we never need to split a branch. */
5274 if (!TARGET_CPU_ZARCH
&& s390_split_branches ())
5277 s390_chunkify_cancel (pool
);
5279 s390_mainpool_cancel (pool
);
5284 /* If we made it up to here, both conditions are satisfied.
5285 Finish up literal pool related changes. */
5286 if ((pool_overflow
|| pool
->size
> 0)
5287 && REGNO (base_reg
) == BASE_REGISTER
)
5291 s390_chunkify_finish (pool
, base_reg
);
5293 s390_mainpool_finish (pool
, base_reg
);
5298 s390_optimize_prolog (base_used
);
5302 /* Return an RTL expression representing the value of the return address
5303 for the frame COUNT steps up from the current frame. FRAME is the
5304 frame pointer of that frame. */
5307 s390_return_addr_rtx (int count
, rtx frame
)
5311 /* For the current frame, we need to make sure the initial
5312 value of RETURN_REGNUM is actually saved. */
5315 cfun
->machine
->save_return_addr_p
= true;
5317 /* To retrieve the return address we read the stack slot where the
5318 corresponding RETURN_REGNUM value was saved. */
5320 addr
= plus_constant (frame
, RETURN_REGNUM
* UNITS_PER_WORD
);
5321 addr
= memory_address (Pmode
, addr
);
5322 return gen_rtx_MEM (Pmode
, addr
);
5325 /* Find first call clobbered register unsused in a function.
5326 This could be used as base register in a leaf function
5327 or for holding the return address before epilogue. */
5330 find_unused_clobbered_reg (void)
5333 for (i
= 0; i
< 6; i
++)
5334 if (!regs_ever_live
[i
])
5339 /* Fill FRAME with info about frame of current function. */
5342 s390_frame_info (void)
5345 HOST_WIDE_INT fsize
= get_frame_size ();
5347 if (fsize
> 0x7fff0000)
5348 fatal_error ("Total size of local variables exceeds architecture limit.");
5350 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5351 cfun
->machine
->save_fprs_p
= 0;
5353 for (i
= 24; i
< 32; i
++)
5354 if (regs_ever_live
[i
] && !global_regs
[i
])
5356 cfun
->machine
->save_fprs_p
= 1;
5360 cfun
->machine
->frame_size
= fsize
+ cfun
->machine
->save_fprs_p
* 64;
5362 /* Does function need to setup frame and save area. */
5364 if (! current_function_is_leaf
5365 || cfun
->machine
->frame_size
> 0
5366 || current_function_calls_alloca
5367 || current_function_stdarg
)
5368 cfun
->machine
->frame_size
+= STARTING_FRAME_OFFSET
;
5370 /* If we use the return register, we'll need to make sure
5371 it is going to be saved/restored. */
5373 if (!current_function_is_leaf
5374 || regs_ever_live
[RETURN_REGNUM
])
5375 cfun
->machine
->save_return_addr_p
= 1;
5377 /* Find first and last gpr to be saved. Note that at this point,
5378 we assume the base register and -on S/390- the return register
5379 always need to be saved. This is done because the usage of these
5380 register might change even after the prolog was emitted.
5381 If it turns out later that we really don't need them, the
5382 prolog/epilog code is modified again. */
5384 regs_ever_live
[BASE_REGISTER
] = 1;
5385 if (!TARGET_CPU_ZARCH
|| cfun
->machine
->save_return_addr_p
)
5386 regs_ever_live
[RETURN_REGNUM
] = 1;
5387 regs_ever_live
[STACK_POINTER_REGNUM
] = cfun
->machine
->frame_size
> 0;
5389 for (i
= 6; i
< 16; i
++)
5390 if (regs_ever_live
[i
])
5392 || i
== STACK_POINTER_REGNUM
5393 || i
== RETURN_REGNUM
5394 || i
== BASE_REGISTER
5395 || (flag_pic
&& i
== (int)PIC_OFFSET_TABLE_REGNUM
))
5398 for (j
= 15; j
> i
; j
--)
5399 if (regs_ever_live
[j
])
5401 || j
== STACK_POINTER_REGNUM
5402 || j
== RETURN_REGNUM
5403 || j
== BASE_REGISTER
5404 || (flag_pic
&& j
== (int)PIC_OFFSET_TABLE_REGNUM
))
5407 /* Save / Restore from gpr i to j. */
5408 cfun
->machine
->first_save_gpr
= i
;
5409 cfun
->machine
->first_restore_gpr
= i
;
5410 cfun
->machine
->last_save_gpr
= j
;
5412 /* Varargs functions need to save gprs 2 to 6. */
5413 if (current_function_stdarg
)
5414 cfun
->machine
->first_save_gpr
= 2;
5417 /* Return offset between argument pointer and frame pointer
5418 initially after prologue. */
5421 s390_arg_frame_offset (void)
5423 HOST_WIDE_INT fsize
= get_frame_size ();
5426 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5429 for (i
= 24; i
< 32; i
++)
5430 if (regs_ever_live
[i
] && !global_regs
[i
])
5436 fsize
= fsize
+ save_fprs_p
* 64;
5438 /* Does function need to setup frame and save area. */
5440 if (! current_function_is_leaf
5442 || current_function_calls_alloca
5443 || current_function_stdarg
)
5444 fsize
+= STARTING_FRAME_OFFSET
;
5445 return fsize
+ STACK_POINTER_OFFSET
;
5448 /* Emit insn to save fpr REGNUM at offset OFFSET relative
5449 to register BASE. Return generated insn. */
5452 save_fpr (rtx base
, int offset
, int regnum
)
5455 addr
= gen_rtx_MEM (DFmode
, plus_constant (base
, offset
));
5456 set_mem_alias_set (addr
, s390_sr_alias_set
);
5458 return emit_move_insn (addr
, gen_rtx_REG (DFmode
, regnum
));
5461 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
5462 to register BASE. Return generated insn. */
5465 restore_fpr (rtx base
, int offset
, int regnum
)
5468 addr
= gen_rtx_MEM (DFmode
, plus_constant (base
, offset
));
5469 set_mem_alias_set (addr
, s390_sr_alias_set
);
5471 return emit_move_insn (gen_rtx_REG (DFmode
, regnum
), addr
);
5474 /* Generate insn to save registers FIRST to LAST into
5475 the register save area located at offset OFFSET
5476 relative to register BASE. */
5479 save_gprs (rtx base
, int offset
, int first
, int last
)
5481 rtx addr
, insn
, note
;
5484 addr
= plus_constant (base
, offset
+ first
* UNITS_PER_WORD
);
5485 addr
= gen_rtx_MEM (Pmode
, addr
);
5486 set_mem_alias_set (addr
, s390_sr_alias_set
);
5488 /* Special-case single register. */
5492 insn
= gen_movdi (addr
, gen_rtx_REG (Pmode
, first
));
5494 insn
= gen_movsi (addr
, gen_rtx_REG (Pmode
, first
));
5496 RTX_FRAME_RELATED_P (insn
) = 1;
5501 insn
= gen_store_multiple (addr
,
5502 gen_rtx_REG (Pmode
, first
),
5503 GEN_INT (last
- first
+ 1));
5506 /* We need to set the FRAME_RELATED flag on all SETs
5507 inside the store-multiple pattern.
5509 However, we must not emit DWARF records for registers 2..5
5510 if they are stored for use by variable arguments ...
5512 ??? Unfortunately, it is not enough to simply not the the
5513 FRAME_RELATED flags for those SETs, because the first SET
5514 of the PARALLEL is always treated as if it had the flag
5515 set, even if it does not. Therefore we emit a new pattern
5516 without those registers as REG_FRAME_RELATED_EXPR note. */
5520 rtx pat
= PATTERN (insn
);
5522 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
5523 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
5524 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, i
)) = 1;
5526 RTX_FRAME_RELATED_P (insn
) = 1;
5530 addr
= plus_constant (base
, offset
+ 6 * UNITS_PER_WORD
);
5531 note
= gen_store_multiple (gen_rtx_MEM (Pmode
, addr
),
5532 gen_rtx_REG (Pmode
, 6),
5533 GEN_INT (last
- 6 + 1));
5534 note
= PATTERN (note
);
5537 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
5538 note
, REG_NOTES (insn
));
5540 for (i
= 0; i
< XVECLEN (note
, 0); i
++)
5541 if (GET_CODE (XVECEXP (note
, 0, i
)) == SET
)
5542 RTX_FRAME_RELATED_P (XVECEXP (note
, 0, i
)) = 1;
5544 RTX_FRAME_RELATED_P (insn
) = 1;
5550 /* Generate insn to restore registers FIRST to LAST from
5551 the register save area located at offset OFFSET
5552 relative to register BASE. */
5555 restore_gprs (rtx base
, int offset
, int first
, int last
)
5559 addr
= plus_constant (base
, offset
+ first
* UNITS_PER_WORD
);
5560 addr
= gen_rtx_MEM (Pmode
, addr
);
5561 set_mem_alias_set (addr
, s390_sr_alias_set
);
5563 /* Special-case single register. */
5567 insn
= gen_movdi (gen_rtx_REG (Pmode
, first
), addr
);
5569 insn
= gen_movsi (gen_rtx_REG (Pmode
, first
), addr
);
5574 insn
= gen_load_multiple (gen_rtx_REG (Pmode
, first
),
5576 GEN_INT (last
- first
+ 1));
5580 /* Emit code to load the GOT register. If MAYBE_DEAD is true,
5581 annotate generated insns with REG_MAYBE_DEAD notes. */
5583 static GTY(()) rtx got_symbol
;
5585 s390_load_got (int maybe_dead
)
5589 got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
5590 SYMBOL_REF_FLAGS (got_symbol
) = SYMBOL_FLAG_LOCAL
;
5593 if (TARGET_CPU_ZARCH
)
5595 rtx insn
= emit_move_insn (pic_offset_table_rtx
, got_symbol
);
5597 REG_NOTES(insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, NULL_RTX
,
5604 offset
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, got_symbol
),
5605 UNSPEC_LTREL_OFFSET
);
5606 offset
= gen_rtx_CONST (Pmode
, offset
);
5607 offset
= force_const_mem (Pmode
, offset
);
5609 insn
= emit_move_insn (pic_offset_table_rtx
, offset
);
5611 REG_NOTES(insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, NULL_RTX
,
5614 offset
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, XEXP (offset
, 0)),
5616 offset
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, offset
);
5618 insn
= emit_move_insn (pic_offset_table_rtx
, offset
);
5620 REG_NOTES(insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, NULL_RTX
,
5625 /* Expand the prologue into a bunch of separate insns. */
5628 s390_emit_prologue (void)
5634 /* Compute frame_info. */
5638 /* Choose best register to use for temp use within prologue.
5639 See below for why TPF must use the register 1. */
5641 if (!current_function_is_leaf
5643 temp_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
5645 temp_reg
= gen_rtx_REG (Pmode
, 1);
5647 /* Save call saved gprs. */
5649 insn
= save_gprs (stack_pointer_rtx
, 0,
5650 cfun
->machine
->first_save_gpr
, cfun
->machine
->last_save_gpr
);
5653 /* Dummy insn to mark literal pool slot. */
5655 emit_insn (gen_main_pool ());
5657 /* Save fprs for variable args. */
5659 if (current_function_stdarg
)
5660 for (i
= 16; i
< (TARGET_64BIT
? 20 : 18); i
++)
5661 save_fpr (stack_pointer_rtx
, 16*UNITS_PER_WORD
+ 8*(i
-16), i
);
5663 /* Save fprs 4 and 6 if used (31 bit ABI). */
5666 for (i
= 18; i
< 20; i
++)
5667 if (regs_ever_live
[i
] && !global_regs
[i
])
5669 insn
= save_fpr (stack_pointer_rtx
, 16*UNITS_PER_WORD
+ 8*(i
-16), i
);
5670 RTX_FRAME_RELATED_P (insn
) = 1;
5673 /* Decrement stack pointer. */
5675 if (cfun
->machine
->frame_size
> 0)
5677 rtx frame_off
= GEN_INT (-cfun
->machine
->frame_size
);
5679 /* Save incoming stack pointer into temp reg. */
5681 if (TARGET_BACKCHAIN
|| cfun
->machine
->save_fprs_p
)
5683 insn
= emit_insn (gen_move_insn (temp_reg
, stack_pointer_rtx
));
5686 /* Subtract frame size from stack pointer. */
5688 if (DISP_IN_RANGE (INTVAL (frame_off
)))
5690 insn
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
5691 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
5693 insn
= emit_insn (insn
);
5697 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off
), 'K'))
5698 frame_off
= force_const_mem (Pmode
, frame_off
);
5700 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, frame_off
));
5703 RTX_FRAME_RELATED_P (insn
) = 1;
5705 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
5706 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
5707 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
5708 GEN_INT (-cfun
->machine
->frame_size
))),
5711 /* Set backchain. */
5713 if (TARGET_BACKCHAIN
)
5715 addr
= gen_rtx_MEM (Pmode
, stack_pointer_rtx
);
5716 set_mem_alias_set (addr
, s390_sr_alias_set
);
5717 insn
= emit_insn (gen_move_insn (addr
, temp_reg
));
5720 /* If we support asynchronous exceptions (e.g. for Java),
5721 we need to make sure the backchain pointer is set up
5722 before any possibly trapping memory access. */
5724 if (TARGET_BACKCHAIN
&& flag_non_call_exceptions
)
5726 addr
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
5727 emit_insn (gen_rtx_CLOBBER (VOIDmode
, addr
));
5731 /* Save fprs 8 - 15 (64 bit ABI). */
5733 if (cfun
->machine
->save_fprs_p
)
5735 insn
= emit_insn (gen_add2_insn (temp_reg
, GEN_INT(-64)));
5737 for (i
= 24; i
< 32; i
++)
5738 if (regs_ever_live
[i
] && !global_regs
[i
])
5740 rtx addr
= plus_constant (stack_pointer_rtx
,
5741 cfun
->machine
->frame_size
- 64 + (i
-24)*8);
5743 insn
= save_fpr (temp_reg
, (i
-24)*8, i
);
5744 RTX_FRAME_RELATED_P (insn
) = 1;
5746 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
5747 gen_rtx_SET (VOIDmode
,
5748 gen_rtx_MEM (DFmode
, addr
),
5749 gen_rtx_REG (DFmode
, i
)),
5754 /* Set frame pointer, if needed. */
5756 if (frame_pointer_needed
)
5758 insn
= emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
5759 RTX_FRAME_RELATED_P (insn
) = 1;
5762 /* Set up got pointer, if needed. */
5764 if (flag_pic
&& regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
])
5765 s390_load_got(true);
5769 /* Generate a BAS instruction to serve as a function
5770 entry intercept to facilitate the use of tracing
5771 algorithms located at the branch target.
5773 This must use register 1. */
5778 addr
= GEN_INT (0xfe0);
5779 unkn
= CONST0_RTX (SImode
);
5780 link
= gen_rtx_REG (Pmode
, 1);
5782 emit_call_insn (gen_call_exp (gen_rtx_MEM (QImode
, addr
), unkn
, link
));
5784 /* Emit a blockage here so that all code
5785 lies between the profiling mechanisms. */
5786 emit_insn (gen_blockage ());
5790 /* Expand the epilogue into a bunch of separate insns. */
5793 s390_emit_epilogue (void)
5795 rtx frame_pointer
, return_reg
;
5796 int area_bottom
, area_top
, offset
= 0;
5803 /* Generate a BAS instruction to serve as a function
5804 entry intercept to facilitate the use of tracing
5805 algorithms located at the branch target.
5807 This must use register 1. */
5813 addr
= GEN_INT (0xfe6);
5814 unkn
= CONST0_RTX (SImode
);
5815 link
= gen_rtx_REG (Pmode
, 1);
5817 /* Emit a blockage here so that all code
5818 lies between the profiling mechanisms. */
5819 emit_insn (gen_blockage ());
5821 emit_call_insn (gen_call_exp (gen_rtx_MEM (QImode
, addr
), unkn
, link
));
5824 /* Check whether to use frame or stack pointer for restore. */
5826 frame_pointer
= frame_pointer_needed
?
5827 hard_frame_pointer_rtx
: stack_pointer_rtx
;
5829 /* Compute which parts of the save area we need to access. */
5831 if (cfun
->machine
->first_restore_gpr
!= -1)
5833 area_bottom
= cfun
->machine
->first_restore_gpr
* UNITS_PER_WORD
;
5834 area_top
= (cfun
->machine
->last_save_gpr
+ 1) * UNITS_PER_WORD
;
5838 area_bottom
= INT_MAX
;
5844 if (cfun
->machine
->save_fprs_p
)
5846 if (area_bottom
> -64)
5854 for (i
= 18; i
< 20; i
++)
5855 if (regs_ever_live
[i
] && !global_regs
[i
])
5857 if (area_bottom
> 16*UNITS_PER_WORD
+ 8*(i
-16))
5858 area_bottom
= 16*UNITS_PER_WORD
+ 8*(i
-16);
5859 if (area_top
< 16*UNITS_PER_WORD
+ 8*(i
-16) + 8)
5860 area_top
= 16*UNITS_PER_WORD
+ 8*(i
-16) + 8;
5864 /* Check whether we can access the register save area.
5865 If not, increment the frame pointer as required. */
5867 if (area_top
<= area_bottom
)
5869 /* Nothing to restore. */
5871 else if (DISP_IN_RANGE (cfun
->machine
->frame_size
+ area_bottom
)
5872 && DISP_IN_RANGE (cfun
->machine
->frame_size
+ area_top
-1))
5874 /* Area is in range. */
5875 offset
= cfun
->machine
->frame_size
;
5879 rtx insn
, frame_off
;
5881 offset
= area_bottom
< 0 ? -area_bottom
: 0;
5882 frame_off
= GEN_INT (cfun
->machine
->frame_size
- offset
);
5884 if (DISP_IN_RANGE (INTVAL (frame_off
)))
5886 insn
= gen_rtx_SET (VOIDmode
, frame_pointer
,
5887 gen_rtx_PLUS (Pmode
, frame_pointer
, frame_off
));
5888 insn
= emit_insn (insn
);
5892 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off
), 'K'))
5893 frame_off
= force_const_mem (Pmode
, frame_off
);
5895 insn
= emit_insn (gen_add2_insn (frame_pointer
, frame_off
));
5899 /* Restore call saved fprs. */
5903 if (cfun
->machine
->save_fprs_p
)
5904 for (i
= 24; i
< 32; i
++)
5905 if (regs_ever_live
[i
] && !global_regs
[i
])
5906 restore_fpr (frame_pointer
,
5907 offset
- 64 + (i
-24) * 8, i
);
5911 for (i
= 18; i
< 20; i
++)
5912 if (regs_ever_live
[i
] && !global_regs
[i
])
5913 restore_fpr (frame_pointer
,
5914 offset
+ 16*UNITS_PER_WORD
+ 8*(i
-16), i
);
5917 /* Return register. */
5919 return_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
5921 /* Restore call saved gprs. */
5923 if (cfun
->machine
->first_restore_gpr
!= -1)
5928 /* Check for global register and save them
5929 to stack location from where they get restored. */
5931 for (i
= cfun
->machine
->first_restore_gpr
;
5932 i
<= cfun
->machine
->last_save_gpr
;
5935 /* These registers are special and need to be
5936 restored in any case. */
5937 if (i
== STACK_POINTER_REGNUM
5938 || i
== RETURN_REGNUM
5939 || i
== BASE_REGISTER
5940 || (flag_pic
&& i
== (int)PIC_OFFSET_TABLE_REGNUM
))
5945 addr
= plus_constant (frame_pointer
,
5946 offset
+ i
* UNITS_PER_WORD
);
5947 addr
= gen_rtx_MEM (Pmode
, addr
);
5948 set_mem_alias_set (addr
, s390_sr_alias_set
);
5949 emit_move_insn (addr
, gen_rtx_REG (Pmode
, i
));
5953 /* Fetch return address from stack before load multiple,
5954 this will do good for scheduling. */
5956 if (cfun
->machine
->save_return_addr_p
5957 || (cfun
->machine
->first_restore_gpr
< BASE_REGISTER
5958 && cfun
->machine
->last_save_gpr
> RETURN_REGNUM
))
5960 int return_regnum
= find_unused_clobbered_reg();
5963 return_reg
= gen_rtx_REG (Pmode
, return_regnum
);
5965 addr
= plus_constant (frame_pointer
,
5966 offset
+ RETURN_REGNUM
* UNITS_PER_WORD
);
5967 addr
= gen_rtx_MEM (Pmode
, addr
);
5968 set_mem_alias_set (addr
, s390_sr_alias_set
);
5969 emit_move_insn (return_reg
, addr
);
5972 /* ??? As references to the base register are not made
5973 explicit in insn RTX code, we have to add a barrier here
5974 to prevent incorrect scheduling. */
5976 emit_insn (gen_blockage());
5978 insn
= restore_gprs (frame_pointer
, offset
,
5979 cfun
->machine
->first_restore_gpr
,
5980 cfun
->machine
->last_save_gpr
);
5984 /* Return to caller. */
5986 p
= rtvec_alloc (2);
5988 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
5989 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
, return_reg
);
5990 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
5994 /* Return the size in bytes of a function argument of
5995 type TYPE and/or mode MODE. At least one of TYPE or
5996 MODE must be specified. */
5999 s390_function_arg_size (enum machine_mode mode
, tree type
)
6002 return int_size_in_bytes (type
);
6004 /* No type info available for some library calls ... */
6005 if (mode
!= BLKmode
)
6006 return GET_MODE_SIZE (mode
);
6008 /* If we have neither type nor mode, abort */
6012 /* Return true if a function argument of type TYPE and mode MODE
6013 is to be passed in a floating-point register, if available. */
6016 s390_function_arg_float (enum machine_mode mode
, tree type
)
6018 int size
= s390_function_arg_size (mode
, type
);
6022 /* Soft-float changes the ABI: no floating-point registers are used. */
6023 if (TARGET_SOFT_FLOAT
)
6026 /* No type info available for some library calls ... */
6028 return mode
== SFmode
|| mode
== DFmode
;
6030 /* The ABI says that record types with a single member are treated
6031 just like that member would be. */
6032 while (TREE_CODE (type
) == RECORD_TYPE
)
6034 tree field
, single
= NULL_TREE
;
6036 for (field
= TYPE_FIELDS (type
); field
; field
= TREE_CHAIN (field
))
6038 if (TREE_CODE (field
) != FIELD_DECL
)
6041 if (single
== NULL_TREE
)
6042 single
= TREE_TYPE (field
);
6047 if (single
== NULL_TREE
)
6053 return TREE_CODE (type
) == REAL_TYPE
;
6056 /* Return true if a function argument of type TYPE and mode MODE
6057 is to be passed in an integer register, or a pair of integer
6058 registers, if available. */
6061 s390_function_arg_integer (enum machine_mode mode
, tree type
)
6063 int size
= s390_function_arg_size (mode
, type
);
6067 /* No type info available for some library calls ... */
6069 return GET_MODE_CLASS (mode
) == MODE_INT
6070 || (TARGET_SOFT_FLOAT
&& GET_MODE_CLASS (mode
) == MODE_FLOAT
);
6072 /* We accept small integral (and similar) types. */
6073 if (INTEGRAL_TYPE_P (type
)
6074 || POINTER_TYPE_P (type
)
6075 || TREE_CODE (type
) == OFFSET_TYPE
6076 || (TARGET_SOFT_FLOAT
&& TREE_CODE (type
) == REAL_TYPE
))
6079 /* We also accept structs of size 1, 2, 4, 8 that are not
6080 passed in floating-point registers. */
6081 if (AGGREGATE_TYPE_P (type
)
6082 && exact_log2 (size
) >= 0
6083 && !s390_function_arg_float (mode
, type
))
6089 /* Return 1 if a function argument of type TYPE and mode MODE
6090 is to be passed by reference. The ABI specifies that only
6091 structures of size 1, 2, 4, or 8 bytes are passed by value,
6092 all other structures (and complex numbers) are passed by
6096 s390_function_arg_pass_by_reference (enum machine_mode mode
, tree type
)
6098 int size
= s390_function_arg_size (mode
, type
);
6104 if (AGGREGATE_TYPE_P (type
) && exact_log2 (size
) < 0)
6107 if (TREE_CODE (type
) == COMPLEX_TYPE
6108 || TREE_CODE (type
) == VECTOR_TYPE
)
6115 /* Update the data in CUM to advance over an argument of mode MODE and
6116 data type TYPE. (TYPE is null for libcalls where that information
6117 may not be available.). The boolean NAMED specifies whether the
6118 argument is a named argument (as opposed to an unnamed argument
6119 matching an ellipsis). */
6122 s390_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
6123 tree type
, int named ATTRIBUTE_UNUSED
)
6125 if (s390_function_arg_pass_by_reference (mode
, type
))
6129 else if (s390_function_arg_float (mode
, type
))
6133 else if (s390_function_arg_integer (mode
, type
))
6135 int size
= s390_function_arg_size (mode
, type
);
6136 cum
->gprs
+= ((size
+ UNITS_PER_WORD
-1) / UNITS_PER_WORD
);
6142 /* Define where to put the arguments to a function.
6143 Value is zero to push the argument on the stack,
6144 or a hard register in which to store the argument.
6146 MODE is the argument's machine mode.
6147 TYPE is the data type of the argument (as a tree).
6148 This is null for libcalls where that information may
6150 CUM is a variable of type CUMULATIVE_ARGS which gives info about
6151 the preceding args and about the function being called.
6152 NAMED is nonzero if this argument is a named parameter
6153 (otherwise it is an extra parameter matching an ellipsis).
6155 On S/390, we use general purpose registers 2 through 6 to
6156 pass integer, pointer, and certain structure arguments, and
6157 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
6158 to pass floating point arguments. All remaining arguments
6159 are pushed to the stack. */
6162 s390_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
6163 int named ATTRIBUTE_UNUSED
)
6165 if (s390_function_arg_pass_by_reference (mode
, type
))
6168 if (s390_function_arg_float (mode
, type
))
6170 if (cum
->fprs
+ 1 > (TARGET_64BIT
? 4 : 2))
6173 return gen_rtx (REG
, mode
, cum
->fprs
+ 16);
6175 else if (s390_function_arg_integer (mode
, type
))
6177 int size
= s390_function_arg_size (mode
, type
);
6178 int n_gprs
= (size
+ UNITS_PER_WORD
-1) / UNITS_PER_WORD
;
6180 if (cum
->gprs
+ n_gprs
> 5)
6183 return gen_rtx (REG
, mode
, cum
->gprs
+ 2);
6186 /* After the real arguments, expand_call calls us once again
6187 with a void_type_node type. Whatever we return here is
6188 passed as operand 2 to the call expanders.
6190 We don't need this feature ... */
6191 else if (type
== void_type_node
)
6197 /* Return true if return values of type TYPE should be returned
6198 in a memory buffer whose address is passed by the caller as
6199 hidden first argument. */
6202 s390_return_in_memory (tree type
, tree fundecl ATTRIBUTE_UNUSED
)
6204 /* We accept small integral (and similar) types. */
6205 if (INTEGRAL_TYPE_P (type
)
6206 || POINTER_TYPE_P (type
)
6207 || TREE_CODE (type
) == OFFSET_TYPE
6208 || TREE_CODE (type
) == REAL_TYPE
)
6209 return int_size_in_bytes (type
) > 8;
6211 /* Aggregates and similar constructs are always returned
6213 if (AGGREGATE_TYPE_P (type
)
6214 || TREE_CODE (type
) == COMPLEX_TYPE
6215 || TREE_CODE (type
) == VECTOR_TYPE
)
6218 /* ??? We get called on all sorts of random stuff from
6219 aggregate_value_p. We can't abort, but it's not clear
6220 what's safe to return. Pretend it's a struct I guess. */
6224 /* Define where to return a (scalar) value of type TYPE.
6225 If TYPE is null, define where to return a (scalar)
6226 value of mode MODE from a libcall. */
6229 s390_function_value (tree type
, enum machine_mode mode
)
6233 int unsignedp
= TREE_UNSIGNED (type
);
6234 mode
= promote_mode (type
, TYPE_MODE (type
), &unsignedp
, 1);
6237 if (GET_MODE_CLASS (mode
) != MODE_INT
6238 && GET_MODE_CLASS (mode
) != MODE_FLOAT
)
6240 if (GET_MODE_SIZE (mode
) > 8)
6243 if (TARGET_HARD_FLOAT
&& GET_MODE_CLASS (mode
) == MODE_FLOAT
)
6244 return gen_rtx_REG (mode
, 16);
6246 return gen_rtx_REG (mode
, 2);
6250 /* Create and return the va_list datatype.
6252 On S/390, va_list is an array type equivalent to
6254 typedef struct __va_list_tag
6258 void *__overflow_arg_area;
6259 void *__reg_save_area;
6262 where __gpr and __fpr hold the number of general purpose
6263 or floating point arguments used up to now, respectively,
6264 __overflow_arg_area points to the stack location of the
6265 next argument passed on the stack, and __reg_save_area
6266 always points to the start of the register area in the
6267 call frame of the current function. The function prologue
6268 saves all registers used for argument passing into this
6269 area if the function uses variable arguments. */
6272 s390_build_builtin_va_list (void)
6274 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
6276 record
= lang_hooks
.types
.make_type (RECORD_TYPE
);
6279 build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
6281 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("__gpr"),
6282 long_integer_type_node
);
6283 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("__fpr"),
6284 long_integer_type_node
);
6285 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("__overflow_arg_area"),
6287 f_sav
= build_decl (FIELD_DECL
, get_identifier ("__reg_save_area"),
6290 DECL_FIELD_CONTEXT (f_gpr
) = record
;
6291 DECL_FIELD_CONTEXT (f_fpr
) = record
;
6292 DECL_FIELD_CONTEXT (f_ovf
) = record
;
6293 DECL_FIELD_CONTEXT (f_sav
) = record
;
6295 TREE_CHAIN (record
) = type_decl
;
6296 TYPE_NAME (record
) = type_decl
;
6297 TYPE_FIELDS (record
) = f_gpr
;
6298 TREE_CHAIN (f_gpr
) = f_fpr
;
6299 TREE_CHAIN (f_fpr
) = f_ovf
;
6300 TREE_CHAIN (f_ovf
) = f_sav
;
6302 layout_type (record
);
6304 /* The correct type is an array type of one element. */
6305 return build_array_type (record
, build_index_type (size_zero_node
));
6308 /* Implement va_start by filling the va_list structure VALIST.
6309 STDARG_P is always true, and ignored.
6310 NEXTARG points to the first anonymous stack argument.
6312 The following global variables are used to initialize
6313 the va_list structure:
6315 current_function_args_info:
6316 holds number of gprs and fprs used for named arguments.
6317 current_function_arg_offset_rtx:
6318 holds the offset of the first anonymous stack argument
6319 (relative to the virtual arg pointer). */
6322 s390_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
6324 HOST_WIDE_INT n_gpr
, n_fpr
;
6326 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
6327 tree gpr
, fpr
, ovf
, sav
, t
;
6329 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
6330 f_fpr
= TREE_CHAIN (f_gpr
);
6331 f_ovf
= TREE_CHAIN (f_fpr
);
6332 f_sav
= TREE_CHAIN (f_ovf
);
6334 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
6335 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
6336 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
6337 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
6338 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
6340 /* Count number of gp and fp argument registers used. */
6342 n_gpr
= current_function_args_info
.gprs
;
6343 n_fpr
= current_function_args_info
.fprs
;
6345 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
6346 TREE_SIDE_EFFECTS (t
) = 1;
6347 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6349 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
6350 TREE_SIDE_EFFECTS (t
) = 1;
6351 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6353 /* Find the overflow area. */
6354 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
6356 off
= INTVAL (current_function_arg_offset_rtx
);
6357 off
= off
< 0 ? 0 : off
;
6358 if (TARGET_DEBUG_ARG
)
6359 fprintf (stderr
, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
6360 (int)n_gpr
, (int)n_fpr
, off
);
6362 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
, build_int_2 (off
, 0));
6364 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
6365 TREE_SIDE_EFFECTS (t
) = 1;
6366 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6368 /* Find the register save area. */
6369 t
= make_tree (TREE_TYPE (sav
), virtual_incoming_args_rtx
);
6370 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
6371 build_int_2 (-STACK_POINTER_OFFSET
, -1));
6372 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
6373 TREE_SIDE_EFFECTS (t
) = 1;
6374 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6377 /* Implement va_arg by updating the va_list structure
6378 VALIST as required to retrieve an argument of type
6379 TYPE, and returning that argument.
6381 Generates code equivalent to:
6383 if (integral value) {
6384 if (size <= 4 && args.gpr < 5 ||
6385 size > 4 && args.gpr < 4 )
6386 ret = args.reg_save_area[args.gpr+8]
6388 ret = *args.overflow_arg_area++;
6389 } else if (float value) {
6391 ret = args.reg_save_area[args.fpr+64]
6393 ret = *args.overflow_arg_area++;
6394 } else if (aggregate value) {
6396 ret = *args.reg_save_area[args.gpr]
6398 ret = **args.overflow_arg_area++;
6402 s390_va_arg (tree valist
, tree type
)
6404 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
6405 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
6406 int indirect_p
, size
, n_reg
, sav_ofs
, sav_scale
, max_reg
;
6407 rtx lab_false
, lab_over
, addr_rtx
, r
;
6409 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
6410 f_fpr
= TREE_CHAIN (f_gpr
);
6411 f_ovf
= TREE_CHAIN (f_fpr
);
6412 f_sav
= TREE_CHAIN (f_ovf
);
6414 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
6415 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
6416 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
6417 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
6418 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
6420 size
= int_size_in_bytes (type
);
6422 if (s390_function_arg_pass_by_reference (TYPE_MODE (type
), type
))
6424 if (TARGET_DEBUG_ARG
)
6426 fprintf (stderr
, "va_arg: aggregate type");
6430 /* Aggregates are passed by reference. */
6434 sav_ofs
= 2 * UNITS_PER_WORD
;
6435 sav_scale
= UNITS_PER_WORD
;
6436 size
= UNITS_PER_WORD
;
6439 else if (s390_function_arg_float (TYPE_MODE (type
), type
))
6441 if (TARGET_DEBUG_ARG
)
6443 fprintf (stderr
, "va_arg: float type");
6447 /* FP args go in FP registers, if present. */
6451 sav_ofs
= 16 * UNITS_PER_WORD
;
6453 /* TARGET_64BIT has up to 4 parameter in fprs */
6454 max_reg
= TARGET_64BIT
? 3 : 1;
6458 if (TARGET_DEBUG_ARG
)
6460 fprintf (stderr
, "va_arg: other type");
6464 /* Otherwise into GP registers. */
6467 n_reg
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
6468 sav_ofs
= 2 * UNITS_PER_WORD
;
6470 if (size
< UNITS_PER_WORD
)
6471 sav_ofs
+= UNITS_PER_WORD
- size
;
6473 sav_scale
= UNITS_PER_WORD
;
6480 /* Pull the value out of the saved registers ... */
6482 lab_false
= gen_label_rtx ();
6483 lab_over
= gen_label_rtx ();
6484 addr_rtx
= gen_reg_rtx (Pmode
);
6486 emit_cmp_and_jump_insns (expand_expr (reg
, NULL_RTX
, Pmode
, EXPAND_NORMAL
),
6488 GT
, const1_rtx
, Pmode
, 0, lab_false
);
6491 t
= build (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
6495 u
= build (MULT_EXPR
, long_integer_type_node
,
6496 reg
, build_int_2 (sav_scale
, 0));
6497 TREE_SIDE_EFFECTS (u
) = 1;
6499 t
= build (PLUS_EXPR
, ptr_type_node
, t
, u
);
6500 TREE_SIDE_EFFECTS (t
) = 1;
6502 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
6504 emit_move_insn (addr_rtx
, r
);
6507 emit_jump_insn (gen_jump (lab_over
));
6509 emit_label (lab_false
);
6511 /* ... Otherwise out of the overflow area. */
6513 t
= save_expr (ovf
);
6516 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
6517 if (size
< UNITS_PER_WORD
)
6519 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (UNITS_PER_WORD
-size
, 0));
6520 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
6521 TREE_SIDE_EFFECTS (t
) = 1;
6522 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6524 t
= save_expr (ovf
);
6527 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
6529 emit_move_insn (addr_rtx
, r
);
6531 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
6532 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
6533 TREE_SIDE_EFFECTS (t
) = 1;
6534 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6536 emit_label (lab_over
);
6538 /* If less than max_regs a registers are retrieved out
6539 of register save area, increment. */
6541 u
= build (PREINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
6542 build_int_2 (n_reg
, 0));
6543 TREE_SIDE_EFFECTS (u
) = 1;
6544 expand_expr (u
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6548 r
= gen_rtx_MEM (Pmode
, addr_rtx
);
6549 set_mem_alias_set (r
, get_varargs_alias_set ());
6550 emit_move_insn (addr_rtx
, r
);
6562 S390_BUILTIN_THREAD_POINTER
,
6563 S390_BUILTIN_SET_THREAD_POINTER
,
6568 static unsigned int const code_for_builtin_64
[S390_BUILTIN_max
] = {
6573 static unsigned int const code_for_builtin_31
[S390_BUILTIN_max
] = {
6579 s390_init_builtins (void)
6583 ftype
= build_function_type (ptr_type_node
, void_list_node
);
6584 builtin_function ("__builtin_thread_pointer", ftype
,
6585 S390_BUILTIN_THREAD_POINTER
, BUILT_IN_MD
,
6588 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
6589 builtin_function ("__builtin_set_thread_pointer", ftype
,
6590 S390_BUILTIN_SET_THREAD_POINTER
, BUILT_IN_MD
,
6594 /* Expand an expression EXP that calls a built-in function,
6595 with result going to TARGET if that's convenient
6596 (and in mode MODE if that's convenient).
6597 SUBTARGET may be used as the target for computing one of EXP's operands.
6598 IGNORE is nonzero if the value is to be ignored. */
6601 s390_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
6602 enum machine_mode mode ATTRIBUTE_UNUSED
,
6603 int ignore ATTRIBUTE_UNUSED
)
6607 unsigned int const *code_for_builtin
=
6608 TARGET_64BIT
? code_for_builtin_64
: code_for_builtin_31
;
6610 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6611 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6612 tree arglist
= TREE_OPERAND (exp
, 1);
6613 enum insn_code icode
;
6614 rtx op
[MAX_ARGS
], pat
;
6618 if (fcode
>= S390_BUILTIN_max
)
6619 internal_error ("bad builtin fcode");
6620 icode
= code_for_builtin
[fcode
];
6622 internal_error ("bad builtin fcode");
6624 nonvoid
= TREE_TYPE (TREE_TYPE (fndecl
)) != void_type_node
;
6626 for (arglist
= TREE_OPERAND (exp
, 1), arity
= 0;
6628 arglist
= TREE_CHAIN (arglist
), arity
++)
6630 const struct insn_operand_data
*insn_op
;
6632 tree arg
= TREE_VALUE (arglist
);
6633 if (arg
== error_mark_node
)
6635 if (arity
> MAX_ARGS
)
6638 insn_op
= &insn_data
[icode
].operand
[arity
+ nonvoid
];
6640 op
[arity
] = expand_expr (arg
, NULL_RTX
, insn_op
->mode
, 0);
6642 if (!(*insn_op
->predicate
) (op
[arity
], insn_op
->mode
))
6643 op
[arity
] = copy_to_mode_reg (insn_op
->mode
, op
[arity
]);
6648 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6650 || GET_MODE (target
) != tmode
6651 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6652 target
= gen_reg_rtx (tmode
);
6658 pat
= GEN_FCN (icode
) (target
);
6662 pat
= GEN_FCN (icode
) (target
, op
[0]);
6664 pat
= GEN_FCN (icode
) (op
[0]);
6667 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1]);
6683 /* Output assembly code for the trampoline template to
6686 On S/390, we use gpr 1 internally in the trampoline code;
6687 gpr 0 is used to hold the static chain. */
6690 s390_trampoline_template (FILE *file
)
6694 fprintf (file
, "larl\t%s,0f\n", reg_names
[1]);
6695 fprintf (file
, "lg\t%s,0(%s)\n", reg_names
[0], reg_names
[1]);
6696 fprintf (file
, "lg\t%s,8(%s)\n", reg_names
[1], reg_names
[1]);
6697 fprintf (file
, "br\t%s\n", reg_names
[1]);
6698 fprintf (file
, "0:\t.quad\t0\n");
6699 fprintf (file
, ".quad\t0\n");
6703 fprintf (file
, "basr\t%s,0\n", reg_names
[1]);
6704 fprintf (file
, "l\t%s,10(%s)\n", reg_names
[0], reg_names
[1]);
6705 fprintf (file
, "l\t%s,14(%s)\n", reg_names
[1], reg_names
[1]);
6706 fprintf (file
, "br\t%s\n", reg_names
[1]);
6707 fprintf (file
, ".long\t0\n");
6708 fprintf (file
, ".long\t0\n");
6712 /* Emit RTL insns to initialize the variable parts of a trampoline.
6713 FNADDR is an RTX for the address of the function's pure code.
6714 CXT is an RTX for the static chain value for the function. */
6717 s390_initialize_trampoline (rtx addr
, rtx fnaddr
, rtx cxt
)
6719 emit_move_insn (gen_rtx
6721 memory_address (Pmode
,
6722 plus_constant (addr
, (TARGET_64BIT
? 20 : 12) ))), cxt
);
6723 emit_move_insn (gen_rtx
6725 memory_address (Pmode
,
6726 plus_constant (addr
, (TARGET_64BIT
? 28 : 16) ))), fnaddr
);
6729 /* Return rtx for 64-bit constant formed from the 32-bit subwords
6730 LOW and HIGH, independent of the host word size. */
6733 s390_gen_rtx_const_DI (int high
, int low
)
6735 #if HOST_BITS_PER_WIDE_INT >= 64
6737 val
= (HOST_WIDE_INT
)high
;
6739 val
|= (HOST_WIDE_INT
)low
;
6741 return GEN_INT (val
);
6743 #if HOST_BITS_PER_WIDE_INT >= 32
6744 return immed_double_const ((HOST_WIDE_INT
)low
, (HOST_WIDE_INT
)high
, DImode
);
6751 /* Output assembler code to FILE to increment profiler label # LABELNO
6752 for profiling a function entry. */
6755 s390_function_profiler (FILE *file
, int labelno
)
6760 ASM_GENERATE_INTERNAL_LABEL (label
, "LP", labelno
);
6762 fprintf (file
, "# function profiler \n");
6764 op
[0] = gen_rtx_REG (Pmode
, RETURN_REGNUM
);
6765 op
[1] = gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
6766 op
[1] = gen_rtx_MEM (Pmode
, plus_constant (op
[1], UNITS_PER_WORD
));
6768 op
[2] = gen_rtx_REG (Pmode
, 1);
6769 op
[3] = gen_rtx_SYMBOL_REF (Pmode
, label
);
6770 SYMBOL_REF_FLAGS (op
[3]) = SYMBOL_FLAG_LOCAL
;
6772 op
[4] = gen_rtx_SYMBOL_REF (Pmode
, "_mcount");
6775 op
[4] = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op
[4]), UNSPEC_PLT
);
6776 op
[4] = gen_rtx_CONST (Pmode
, op
[4]);
6781 output_asm_insn ("stg\t%0,%1", op
);
6782 output_asm_insn ("larl\t%2,%3", op
);
6783 output_asm_insn ("brasl\t%0,%4", op
);
6784 output_asm_insn ("lg\t%0,%1", op
);
6788 op
[6] = gen_label_rtx ();
6790 output_asm_insn ("st\t%0,%1", op
);
6791 output_asm_insn ("bras\t%2,%l6", op
);
6792 output_asm_insn (".long\t%4", op
);
6793 output_asm_insn (".long\t%3", op
);
6794 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[6]));
6795 output_asm_insn ("l\t%0,0(%2)", op
);
6796 output_asm_insn ("l\t%2,4(%2)", op
);
6797 output_asm_insn ("basr\t%0,%0", op
);
6798 output_asm_insn ("l\t%0,%1", op
);
6802 op
[5] = gen_label_rtx ();
6803 op
[6] = gen_label_rtx ();
6805 output_asm_insn ("st\t%0,%1", op
);
6806 output_asm_insn ("bras\t%2,%l6", op
);
6807 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[5]));
6808 output_asm_insn (".long\t%4-%l5", op
);
6809 output_asm_insn (".long\t%3-%l5", op
);
6810 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[6]));
6811 output_asm_insn ("lr\t%0,%2", op
);
6812 output_asm_insn ("a\t%0,0(%2)", op
);
6813 output_asm_insn ("a\t%2,4(%2)", op
);
6814 output_asm_insn ("basr\t%0,%0", op
);
6815 output_asm_insn ("l\t%0,%1", op
);
6819 /* Select section for constant in constant pool. In 32-bit mode,
6820 constants go in the function section; in 64-bit mode in .rodata. */
6823 s390_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED
,
6824 rtx x ATTRIBUTE_UNUSED
,
6825 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
6827 if (TARGET_CPU_ZARCH
)
6828 readonly_data_section ();
6830 function_section (current_function_decl
);
6833 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
6834 into its SYMBOL_REF_FLAGS. */
6837 s390_encode_section_info (tree decl
, rtx rtl
, int first
)
6839 default_encode_section_info (decl
, rtl
, first
);
6841 /* If a variable has a forced alignment to < 2 bytes, mark it with
6842 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
6843 if (TREE_CODE (decl
) == VAR_DECL
6844 && DECL_USER_ALIGN (decl
) && DECL_ALIGN (decl
) < 16)
6845 SYMBOL_REF_FLAGS (XEXP (rtl
, 0)) |= SYMBOL_FLAG_ALIGN1
;
6848 /* Output thunk to FILE that implements a C++ virtual function call (with
6849 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
6850 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
6851 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
6852 relative to the resulting this pointer. */
6855 s390_output_mi_thunk (FILE *file
, tree thunk ATTRIBUTE_UNUSED
,
6856 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
6862 /* Operand 0 is the target function. */
6863 op
[0] = XEXP (DECL_RTL (function
), 0);
6864 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (op
[0]))
6867 op
[0] = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op
[0]),
6868 TARGET_64BIT
? UNSPEC_PLT
: UNSPEC_GOT
);
6869 op
[0] = gen_rtx_CONST (Pmode
, op
[0]);
6872 /* Operand 1 is the 'this' pointer. */
6873 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
6874 op
[1] = gen_rtx_REG (Pmode
, 3);
6876 op
[1] = gen_rtx_REG (Pmode
, 2);
6878 /* Operand 2 is the delta. */
6879 op
[2] = GEN_INT (delta
);
6881 /* Operand 3 is the vcall_offset. */
6882 op
[3] = GEN_INT (vcall_offset
);
6884 /* Operand 4 is the temporary register. */
6885 op
[4] = gen_rtx_REG (Pmode
, 1);
6887 /* Operands 5 to 8 can be used as labels. */
6893 /* Operand 9 can be used for temporary register. */
6896 /* Generate code. */
6899 /* Setup literal pool pointer if required. */
6900 if ((!DISP_IN_RANGE (delta
)
6901 && !CONST_OK_FOR_LETTER_P (delta
, 'K'))
6902 || (!DISP_IN_RANGE (vcall_offset
)
6903 && !CONST_OK_FOR_LETTER_P (vcall_offset
, 'K')))
6905 op
[5] = gen_label_rtx ();
6906 output_asm_insn ("larl\t%4,%5", op
);
6909 /* Add DELTA to this pointer. */
6912 if (CONST_OK_FOR_LETTER_P (delta
, 'J'))
6913 output_asm_insn ("la\t%1,%2(%1)", op
);
6914 else if (DISP_IN_RANGE (delta
))
6915 output_asm_insn ("lay\t%1,%2(%1)", op
);
6916 else if (CONST_OK_FOR_LETTER_P (delta
, 'K'))
6917 output_asm_insn ("aghi\t%1,%2", op
);
6920 op
[6] = gen_label_rtx ();
6921 output_asm_insn ("agf\t%1,%6-%5(%4)", op
);
6925 /* Perform vcall adjustment. */
6928 if (DISP_IN_RANGE (vcall_offset
))
6930 output_asm_insn ("lg\t%4,0(%1)", op
);
6931 output_asm_insn ("ag\t%1,%3(%4)", op
);
6933 else if (CONST_OK_FOR_LETTER_P (vcall_offset
, 'K'))
6935 output_asm_insn ("lghi\t%4,%3", op
);
6936 output_asm_insn ("ag\t%4,0(%1)", op
);
6937 output_asm_insn ("ag\t%1,0(%4)", op
);
6941 op
[7] = gen_label_rtx ();
6942 output_asm_insn ("llgf\t%4,%7-%5(%4)", op
);
6943 output_asm_insn ("ag\t%4,0(%1)", op
);
6944 output_asm_insn ("ag\t%1,0(%4)", op
);
6948 /* Jump to target. */
6949 output_asm_insn ("jg\t%0", op
);
6951 /* Output literal pool if required. */
6954 output_asm_insn (".align\t4", op
);
6955 targetm
.asm_out
.internal_label (file
, "L",
6956 CODE_LABEL_NUMBER (op
[5]));
6960 targetm
.asm_out
.internal_label (file
, "L",
6961 CODE_LABEL_NUMBER (op
[6]));
6962 output_asm_insn (".long\t%2", op
);
6966 targetm
.asm_out
.internal_label (file
, "L",
6967 CODE_LABEL_NUMBER (op
[7]));
6968 output_asm_insn (".long\t%3", op
);
6973 /* Setup base pointer if required. */
6975 || (!DISP_IN_RANGE (delta
)
6976 && !CONST_OK_FOR_LETTER_P (delta
, 'K'))
6977 || (!DISP_IN_RANGE (delta
)
6978 && !CONST_OK_FOR_LETTER_P (vcall_offset
, 'K')))
6980 op
[5] = gen_label_rtx ();
6981 output_asm_insn ("basr\t%4,0", op
);
6982 targetm
.asm_out
.internal_label (file
, "L",
6983 CODE_LABEL_NUMBER (op
[5]));
6986 /* Add DELTA to this pointer. */
6989 if (CONST_OK_FOR_LETTER_P (delta
, 'J'))
6990 output_asm_insn ("la\t%1,%2(%1)", op
);
6991 else if (DISP_IN_RANGE (delta
))
6992 output_asm_insn ("lay\t%1,%2(%1)", op
);
6993 else if (CONST_OK_FOR_LETTER_P (delta
, 'K'))
6994 output_asm_insn ("ahi\t%1,%2", op
);
6997 op
[6] = gen_label_rtx ();
6998 output_asm_insn ("a\t%1,%6-%5(%4)", op
);
7002 /* Perform vcall adjustment. */
7005 if (CONST_OK_FOR_LETTER_P (vcall_offset
, 'J'))
7007 output_asm_insn ("lg\t%4,0(%1)", op
);
7008 output_asm_insn ("a\t%1,%3(%4)", op
);
7010 else if (DISP_IN_RANGE (vcall_offset
))
7012 output_asm_insn ("lg\t%4,0(%1)", op
);
7013 output_asm_insn ("ay\t%1,%3(%4)", op
);
7015 else if (CONST_OK_FOR_LETTER_P (vcall_offset
, 'K'))
7017 output_asm_insn ("lhi\t%4,%3", op
);
7018 output_asm_insn ("a\t%4,0(%1)", op
);
7019 output_asm_insn ("a\t%1,0(%4)", op
);
7023 op
[7] = gen_label_rtx ();
7024 output_asm_insn ("l\t%4,%7-%5(%4)", op
);
7025 output_asm_insn ("a\t%4,0(%1)", op
);
7026 output_asm_insn ("a\t%1,0(%4)", op
);
7029 /* We had to clobber the base pointer register.
7030 Re-setup the base pointer (with a different base). */
7031 op
[5] = gen_label_rtx ();
7032 output_asm_insn ("basr\t%4,0", op
);
7033 targetm
.asm_out
.internal_label (file
, "L",
7034 CODE_LABEL_NUMBER (op
[5]));
7037 /* Jump to target. */
7038 op
[8] = gen_label_rtx ();
7041 output_asm_insn ("l\t%4,%8-%5(%4)", op
);
7043 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
7044 /* We cannot call through .plt, since .plt requires %r12 loaded. */
7045 else if (flag_pic
== 1)
7047 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
7048 output_asm_insn ("l\t%4,%0(%4)", op
);
7050 else if (flag_pic
== 2)
7052 op
[9] = gen_rtx_REG (Pmode
, 0);
7053 output_asm_insn ("l\t%9,%8-4-%5(%4)", op
);
7054 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
7055 output_asm_insn ("ar\t%4,%9", op
);
7056 output_asm_insn ("l\t%4,0(%4)", op
);
7059 output_asm_insn ("br\t%4", op
);
7061 /* Output literal pool. */
7062 output_asm_insn (".align\t4", op
);
7064 if (nonlocal
&& flag_pic
== 2)
7065 output_asm_insn (".long\t%0", op
);
7068 op
[0] = gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
7069 SYMBOL_REF_FLAGS (op
[0]) = SYMBOL_FLAG_LOCAL
;
7072 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[8]));
7074 output_asm_insn (".long\t%0", op
);
7076 output_asm_insn (".long\t%0-%5", op
);
7080 targetm
.asm_out
.internal_label (file
, "L",
7081 CODE_LABEL_NUMBER (op
[6]));
7082 output_asm_insn (".long\t%2", op
);
7086 targetm
.asm_out
.internal_label (file
, "L",
7087 CODE_LABEL_NUMBER (op
[7]));
7088 output_asm_insn (".long\t%3", op
);
7094 s390_valid_pointer_mode (enum machine_mode mode
)
7096 return (mode
== SImode
|| (TARGET_64BIT
&& mode
== DImode
));
7099 /* How to allocate a 'struct machine_function'. */
7101 static struct machine_function
*
7102 s390_init_machine_status (void)
7104 return ggc_alloc_cleared (sizeof (struct machine_function
));
7107 #include "gt-s390.h"