1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
56 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
59 #ifndef TARGET_NO_PROTOTYPE
60 #define TARGET_NO_PROTOTYPE 0
63 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
64 && easy_vector_same (x, y))
66 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
68 && easy_vector_same (x, y))
70 #define min(A,B) ((A) < (B) ? (A) : (B))
71 #define max(A,B) ((A) > (B) ? (A) : (B))
73 /* Structure used to define the rs6000 stack */
74 typedef struct rs6000_stack
{
75 int first_gp_reg_save
; /* first callee saved GP register used */
76 int first_fp_reg_save
; /* first callee saved FP register used */
77 int first_altivec_reg_save
; /* first callee saved AltiVec register used */
78 int lr_save_p
; /* true if the link reg needs to be saved */
79 int cr_save_p
; /* true if the CR reg needs to be saved */
80 unsigned int vrsave_mask
; /* mask of vec registers to save */
81 int toc_save_p
; /* true if the TOC needs to be saved */
82 int push_p
; /* true if we need to allocate stack space */
83 int calls_p
; /* true if the function makes any calls */
84 enum rs6000_abi abi
; /* which ABI to use */
85 int gp_save_offset
; /* offset to save GP regs from initial SP */
86 int fp_save_offset
; /* offset to save FP regs from initial SP */
87 int altivec_save_offset
; /* offset to save AltiVec regs from initial SP */
88 int lr_save_offset
; /* offset to save LR from initial SP */
89 int cr_save_offset
; /* offset to save CR from initial SP */
90 int vrsave_save_offset
; /* offset to save VRSAVE from initial SP */
91 int spe_gp_save_offset
; /* offset to save spe 64-bit gprs */
92 int toc_save_offset
; /* offset to save the TOC pointer */
93 int varargs_save_offset
; /* offset to save the varargs registers */
94 int ehrd_offset
; /* offset to EH return data */
95 int reg_size
; /* register size (4 or 8) */
96 int varargs_size
; /* size to hold V.4 args passed in regs */
97 HOST_WIDE_INT vars_size
; /* variable save area size */
98 int parm_size
; /* outgoing parameter size */
99 int save_size
; /* save area size */
100 int fixed_size
; /* fixed size of stack frame */
101 int gp_size
; /* size of saved GP registers */
102 int fp_size
; /* size of saved FP registers */
103 int altivec_size
; /* size of saved AltiVec registers */
104 int cr_size
; /* size to hold CR if not in save_size */
105 int lr_size
; /* size to hold LR if not in save_size */
106 int vrsave_size
; /* size to hold VRSAVE if not in save_size */
107 int altivec_padding_size
; /* size of altivec alignment padding if
109 int spe_gp_size
; /* size of 64-bit GPR save size for SPE */
110 int spe_padding_size
;
111 int toc_size
; /* size to hold TOC if not in save_size */
112 HOST_WIDE_INT total_size
; /* total bytes allocated for stack */
113 int spe_64bit_regs_used
;
116 /* Target cpu type */
118 enum processor_type rs6000_cpu
;
119 struct rs6000_cpu_select rs6000_select
[3] =
121 /* switch name, tune arch */
122 { (const char *)0, "--with-cpu=", 1, 1 },
123 { (const char *)0, "-mcpu=", 1, 1 },
124 { (const char *)0, "-mtune=", 1, 0 },
127 /* Support adjust_priority scheduler hook
128 and -mprioritize-restricted-insns= option. */
129 const char *rs6000_sched_restricted_insns_priority_str
;
130 int rs6000_sched_restricted_insns_priority
;
132 /* Support for -msched-costly-dep option. */
133 const char *rs6000_sched_costly_dep_str
;
134 enum rs6000_dependence_cost rs6000_sched_costly_dep
;
136 /* Support for -minsert-sched-nops option. */
137 const char *rs6000_sched_insert_nops_str
;
138 enum rs6000_nop_insertion rs6000_sched_insert_nops
;
140 /* Size of long double */
141 const char *rs6000_long_double_size_string
;
142 int rs6000_long_double_type_size
;
144 /* Whether -mabi=altivec has appeared */
145 int rs6000_altivec_abi
;
147 /* Whether VRSAVE instructions should be generated. */
148 int rs6000_altivec_vrsave
;
150 /* String from -mvrsave= option. */
151 const char *rs6000_altivec_vrsave_string
;
153 /* Nonzero if we want SPE ABI extensions. */
156 /* Whether isel instructions should be generated. */
159 /* Whether SPE simd instructions should be generated. */
162 /* Nonzero if floating point operations are done in the GPRs. */
163 int rs6000_float_gprs
= 0;
165 /* String from -mfloat-gprs=. */
166 const char *rs6000_float_gprs_string
;
168 /* String from -misel=. */
169 const char *rs6000_isel_string
;
171 /* String from -mspe=. */
172 const char *rs6000_spe_string
;
174 /* Set to nonzero once AIX common-mode calls have been defined. */
175 static GTY(()) int common_mode_defined
;
177 /* Save information from a "cmpxx" operation until the branch or scc is
179 rtx rs6000_compare_op0
, rs6000_compare_op1
;
180 int rs6000_compare_fp_p
;
182 /* Label number of label created for -mrelocatable, to call to so we can
183 get the address of the GOT section */
184 int rs6000_pic_labelno
;
187 /* Which abi to adhere to */
188 const char *rs6000_abi_name
;
190 /* Semantics of the small data area */
191 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
193 /* Which small data model to use */
194 const char *rs6000_sdata_name
= (char *)0;
196 /* Counter for labels which are to be placed in .fixup. */
197 int fixuplabelno
= 0;
200 /* Bit size of immediate TLS offsets and string from which it is decoded. */
201 int rs6000_tls_size
= 32;
202 const char *rs6000_tls_size_string
;
204 /* ABI enumeration available for subtarget to use. */
205 enum rs6000_abi rs6000_current_abi
;
207 /* ABI string from -mabi= option. */
208 const char *rs6000_abi_string
;
211 const char *rs6000_debug_name
;
212 int rs6000_debug_stack
; /* debug stack applications */
213 int rs6000_debug_arg
; /* debug argument handling */
216 static GTY(()) tree opaque_V2SI_type_node
;
217 static GTY(()) tree opaque_V2SF_type_node
;
218 static GTY(()) tree opaque_p_V2SI_type_node
;
220 const char *rs6000_traceback_name
;
222 traceback_default
= 0,
228 /* Flag to say the TOC is initialized */
230 char toc_label_name
[10];
232 /* Alias set for saves and restores from the rs6000 stack. */
233 static int rs6000_sr_alias_set
;
235 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
236 The only place that looks at this is rs6000_set_default_type_attributes;
237 everywhere else should rely on the presence or absence of a longcall
238 attribute on the function declaration. */
239 int rs6000_default_long_calls
;
240 const char *rs6000_longcall_switch
;
242 /* Control alignment for fields within structures. */
243 /* String from -malign-XXXXX. */
244 const char *rs6000_alignment_string
;
245 int rs6000_alignment_flags
;
247 struct builtin_description
249 /* mask is not const because we're going to alter it below. This
250 nonsense will go away when we rewrite the -march infrastructure
251 to give us more target flag bits. */
253 const enum insn_code icode
;
254 const char *const name
;
255 const enum rs6000_builtins code
;
258 static bool rs6000_function_ok_for_sibcall (tree
, tree
);
259 static int num_insns_constant_wide (HOST_WIDE_INT
);
260 static void validate_condition_mode (enum rtx_code
, enum machine_mode
);
261 static rtx
rs6000_generate_compare (enum rtx_code
);
262 static void rs6000_maybe_dead (rtx
);
263 static void rs6000_emit_stack_tie (void);
264 static void rs6000_frame_related (rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
);
265 static rtx
spe_synthesize_frame_save (rtx
);
266 static bool spe_func_has_64bit_regs_p (void);
267 static void emit_frame_save (rtx
, rtx
, enum machine_mode
, unsigned int,
269 static rtx
gen_frame_mem_offset (enum machine_mode
, rtx
, int);
270 static void rs6000_emit_allocate_stack (HOST_WIDE_INT
, int);
271 static unsigned rs6000_hash_constant (rtx
);
272 static unsigned toc_hash_function (const void *);
273 static int toc_hash_eq (const void *, const void *);
274 static int constant_pool_expr_1 (rtx
, int *, int *);
275 static bool constant_pool_expr_p (rtx
);
276 static bool toc_relative_expr_p (rtx
);
277 static bool legitimate_small_data_p (enum machine_mode
, rtx
);
278 static bool legitimate_offset_address_p (enum machine_mode
, rtx
, int);
279 static bool legitimate_indexed_address_p (rtx
, int);
280 static bool legitimate_indirect_address_p (rtx
, int);
281 static bool macho_lo_sum_memory_operand (rtx x
, enum machine_mode mode
);
282 static bool legitimate_lo_sum_address_p (enum machine_mode
, rtx
, int);
283 static struct machine_function
* rs6000_init_machine_status (void);
284 static bool rs6000_assemble_integer (rtx
, unsigned int, int);
285 #ifdef HAVE_GAS_HIDDEN
286 static void rs6000_assemble_visibility (tree
, int);
288 static int rs6000_ra_ever_killed (void);
289 static tree
rs6000_handle_longcall_attribute (tree
*, tree
, tree
, int, bool *);
290 extern const struct attribute_spec rs6000_attribute_table
[];
291 static void rs6000_set_default_type_attributes (tree
);
292 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT
);
293 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT
);
294 static void rs6000_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
, HOST_WIDE_INT
,
296 static rtx
rs6000_emit_set_long_const (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
);
297 static bool rs6000_return_in_memory (tree
, tree
);
298 static void rs6000_file_start (void);
300 static unsigned int rs6000_elf_section_type_flags (tree
, const char *, int);
301 static void rs6000_elf_asm_out_constructor (rtx
, int);
302 static void rs6000_elf_asm_out_destructor (rtx
, int);
303 static void rs6000_elf_select_section (tree
, int, unsigned HOST_WIDE_INT
);
304 static void rs6000_elf_unique_section (tree
, int);
305 static void rs6000_elf_select_rtx_section (enum machine_mode
, rtx
,
306 unsigned HOST_WIDE_INT
);
307 static void rs6000_elf_encode_section_info (tree
, rtx
, int)
309 static bool rs6000_elf_in_small_data_p (tree
);
312 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
313 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
314 static void rs6000_xcoff_select_section (tree
, int, unsigned HOST_WIDE_INT
);
315 static void rs6000_xcoff_unique_section (tree
, int);
316 static void rs6000_xcoff_select_rtx_section (enum machine_mode
, rtx
,
317 unsigned HOST_WIDE_INT
);
318 static const char * rs6000_xcoff_strip_name_encoding (const char *);
319 static unsigned int rs6000_xcoff_section_type_flags (tree
, const char *, int);
320 static void rs6000_xcoff_file_start (void);
321 static void rs6000_xcoff_file_end (void);
324 static bool rs6000_binds_local_p (tree
);
326 static int rs6000_use_dfa_pipeline_interface (void);
327 static int rs6000_variable_issue (FILE *, int, rtx
, int);
328 static bool rs6000_rtx_costs (rtx
, int, int, int *);
329 static int rs6000_adjust_cost (rtx
, rtx
, rtx
, int);
330 static bool is_microcoded_insn (rtx
);
331 static int is_dispatch_slot_restricted (rtx
);
332 static bool is_cracked_insn (rtx
);
333 static bool is_branch_slot_insn (rtx
);
334 static int rs6000_adjust_priority (rtx
, int);
335 static int rs6000_issue_rate (void);
336 static bool rs6000_is_costly_dependence (rtx
, rtx
, rtx
, int, int);
337 static rtx
get_next_active_insn (rtx
, rtx
);
338 static bool insn_terminates_group_p (rtx
, enum group_termination
);
339 static bool is_costly_group (rtx
*, rtx
);
340 static int force_new_group (int, FILE *, rtx
*, rtx
, bool *, int, int *);
341 static int redefine_groups (FILE *, int, rtx
, rtx
);
342 static int pad_groups (FILE *, int, rtx
, rtx
);
343 static void rs6000_sched_finish (FILE *, int);
344 static int rs6000_use_sched_lookahead (void);
346 static void rs6000_init_builtins (void);
347 static rtx
rs6000_expand_unop_builtin (enum insn_code
, tree
, rtx
);
348 static rtx
rs6000_expand_binop_builtin (enum insn_code
, tree
, rtx
);
349 static rtx
rs6000_expand_ternop_builtin (enum insn_code
, tree
, rtx
);
350 static rtx
rs6000_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
351 static void altivec_init_builtins (void);
352 static void rs6000_common_init_builtins (void);
353 static void rs6000_init_libfuncs (void);
355 static void enable_mask_for_builtins (struct builtin_description
*, int,
356 enum rs6000_builtins
,
357 enum rs6000_builtins
);
358 static void spe_init_builtins (void);
359 static rtx
spe_expand_builtin (tree
, rtx
, bool *);
360 static rtx
spe_expand_predicate_builtin (enum insn_code
, tree
, rtx
);
361 static rtx
spe_expand_evsel_builtin (enum insn_code
, tree
, rtx
);
362 static int rs6000_emit_int_cmove (rtx
, rtx
, rtx
, rtx
);
363 static rs6000_stack_t
*rs6000_stack_info (void);
364 static void debug_stack_info (rs6000_stack_t
*);
366 static rtx
altivec_expand_builtin (tree
, rtx
, bool *);
367 static rtx
altivec_expand_ld_builtin (tree
, rtx
, bool *);
368 static rtx
altivec_expand_st_builtin (tree
, rtx
, bool *);
369 static rtx
altivec_expand_dst_builtin (tree
, rtx
, bool *);
370 static rtx
altivec_expand_abs_builtin (enum insn_code
, tree
, rtx
);
371 static rtx
altivec_expand_predicate_builtin (enum insn_code
,
372 const char *, tree
, rtx
);
373 static rtx
altivec_expand_lv_builtin (enum insn_code
, tree
, rtx
);
374 static rtx
altivec_expand_stv_builtin (enum insn_code
, tree
);
375 static void rs6000_parse_abi_options (void);
376 static void rs6000_parse_alignment_option (void);
377 static void rs6000_parse_tls_size_option (void);
378 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
379 static int first_altivec_reg_to_save (void);
380 static unsigned int compute_vrsave_mask (void);
381 static void is_altivec_return_reg (rtx
, void *);
382 static rtx
generate_set_vrsave (rtx
, rs6000_stack_t
*, int);
383 int easy_vector_constant (rtx
, enum machine_mode
);
384 static int easy_vector_same (rtx
, enum machine_mode
);
385 static bool is_ev64_opaque_type (tree
);
386 static rtx
rs6000_dwarf_register_span (rtx
);
387 static rtx
rs6000_legitimize_tls_address (rtx
, enum tls_model
);
388 static rtx
rs6000_tls_get_addr (void);
389 static rtx
rs6000_got_sym (void);
390 static inline int rs6000_tls_symbol_ref_1 (rtx
*, void *);
391 static const char *rs6000_get_some_local_dynamic_name (void);
392 static int rs6000_get_some_local_dynamic_name_1 (rtx
*, void *);
393 static rtx
rs6000_complex_function_value (enum machine_mode
);
394 static rtx
rs6000_spe_function_arg (CUMULATIVE_ARGS
*,
395 enum machine_mode
, tree
);
396 static rtx
rs6000_mixed_function_arg (CUMULATIVE_ARGS
*,
397 enum machine_mode
, tree
, int);
398 static void rs6000_move_block_from_reg(int regno
, rtx x
, int nregs
);
399 static void setup_incoming_varargs (CUMULATIVE_ARGS
*,
400 enum machine_mode
, tree
,
403 static void macho_branch_islands (void);
404 static void add_compiler_branch_island (tree
, tree
, int);
405 static int no_previous_def (tree function_name
);
406 static tree
get_prev_label (tree function_name
);
409 static tree
rs6000_build_builtin_va_list (void);
411 /* Hash table stuff for keeping track of TOC entries. */
413 struct toc_hash_struct
GTY(())
415 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
416 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
418 enum machine_mode key_mode
;
422 static GTY ((param_is (struct toc_hash_struct
))) htab_t toc_hash_table
;
424 /* Default register names. */
425 char rs6000_reg_names
[][8] =
427 "0", "1", "2", "3", "4", "5", "6", "7",
428 "8", "9", "10", "11", "12", "13", "14", "15",
429 "16", "17", "18", "19", "20", "21", "22", "23",
430 "24", "25", "26", "27", "28", "29", "30", "31",
431 "0", "1", "2", "3", "4", "5", "6", "7",
432 "8", "9", "10", "11", "12", "13", "14", "15",
433 "16", "17", "18", "19", "20", "21", "22", "23",
434 "24", "25", "26", "27", "28", "29", "30", "31",
435 "mq", "lr", "ctr","ap",
436 "0", "1", "2", "3", "4", "5", "6", "7",
438 /* AltiVec registers. */
439 "0", "1", "2", "3", "4", "5", "6", "7",
440 "8", "9", "10", "11", "12", "13", "14", "15",
441 "16", "17", "18", "19", "20", "21", "22", "23",
442 "24", "25", "26", "27", "28", "29", "30", "31",
448 #ifdef TARGET_REGNAMES
449 static const char alt_reg_names
[][8] =
451 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
452 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
453 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
454 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
455 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
456 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
457 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
458 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
459 "mq", "lr", "ctr", "ap",
460 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
462 /* AltiVec registers. */
463 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
464 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
465 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
466 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
473 #ifndef MASK_STRICT_ALIGN
474 #define MASK_STRICT_ALIGN 0
476 #ifndef TARGET_PROFILE_KERNEL
477 #define TARGET_PROFILE_KERNEL 0
480 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
481 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
483 /* Return 1 for a symbol ref for a thread-local storage symbol. */
484 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
485 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
487 /* Initialize the GCC target structure. */
488 #undef TARGET_ATTRIBUTE_TABLE
489 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
490 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
491 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
493 #undef TARGET_ASM_ALIGNED_DI_OP
494 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
496 /* Default unaligned ops are only provided for ELF. Find the ops needed
497 for non-ELF systems. */
498 #ifndef OBJECT_FORMAT_ELF
500 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
502 #undef TARGET_ASM_UNALIGNED_HI_OP
503 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
504 #undef TARGET_ASM_UNALIGNED_SI_OP
505 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
506 #undef TARGET_ASM_UNALIGNED_DI_OP
507 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
510 #undef TARGET_ASM_UNALIGNED_HI_OP
511 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
512 #undef TARGET_ASM_UNALIGNED_SI_OP
513 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
517 /* This hook deals with fixups for relocatable code and DI-mode objects
519 #undef TARGET_ASM_INTEGER
520 #define TARGET_ASM_INTEGER rs6000_assemble_integer
522 #ifdef HAVE_GAS_HIDDEN
523 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
524 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
527 #undef TARGET_HAVE_TLS
528 #define TARGET_HAVE_TLS HAVE_AS_TLS
530 #undef TARGET_CANNOT_FORCE_CONST_MEM
531 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
533 #undef TARGET_ASM_FUNCTION_PROLOGUE
534 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
535 #undef TARGET_ASM_FUNCTION_EPILOGUE
536 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
538 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
539 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
540 #undef TARGET_SCHED_VARIABLE_ISSUE
541 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
543 #undef TARGET_SCHED_ISSUE_RATE
544 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
545 #undef TARGET_SCHED_ADJUST_COST
546 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
547 #undef TARGET_SCHED_ADJUST_PRIORITY
548 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
549 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
550 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
551 #undef TARGET_SCHED_FINISH
552 #define TARGET_SCHED_FINISH rs6000_sched_finish
554 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
555 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
557 #undef TARGET_INIT_BUILTINS
558 #define TARGET_INIT_BUILTINS rs6000_init_builtins
560 #undef TARGET_EXPAND_BUILTIN
561 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
563 #undef TARGET_INIT_LIBFUNCS
564 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
567 #undef TARGET_BINDS_LOCAL_P
568 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
571 #undef TARGET_ASM_OUTPUT_MI_THUNK
572 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
574 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
575 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
577 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
578 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
580 #undef TARGET_RTX_COSTS
581 #define TARGET_RTX_COSTS rs6000_rtx_costs
582 #undef TARGET_ADDRESS_COST
583 #define TARGET_ADDRESS_COST hook_int_rtx_0
585 #undef TARGET_VECTOR_OPAQUE_P
586 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
588 #undef TARGET_DWARF_REGISTER_SPAN
589 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
591 /* On rs6000, function arguments are promoted, as are function return
593 #undef TARGET_PROMOTE_FUNCTION_ARGS
594 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
595 #undef TARGET_PROMOTE_FUNCTION_RETURN
596 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
598 /* Structure return values are passed as an extra parameter. */
599 #undef TARGET_STRUCT_VALUE_RTX
600 #define TARGET_STRUCT_VALUE_RTX hook_rtx_tree_int_null
602 #undef TARGET_RETURN_IN_MEMORY
603 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
605 #undef TARGET_SETUP_INCOMING_VARARGS
606 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
608 /* Always strict argument naming on rs6000. */
609 #undef TARGET_STRICT_ARGUMENT_NAMING
610 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
611 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
612 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
614 #undef TARGET_BUILD_BUILTIN_VA_LIST
615 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
617 struct gcc_target targetm
= TARGET_INITIALIZER
;
619 /* Override command line options. Mostly we process the processor
620 type and sometimes adjust other TARGET_ options. */
623 rs6000_override_options (const char *default_cpu
)
626 struct rs6000_cpu_select
*ptr
;
629 /* Simplifications for entries below. */
632 POWERPC_BASE_MASK
= MASK_POWERPC
| MASK_NEW_MNEMONICS
,
633 POWERPC_7400_MASK
= POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_ALTIVEC
636 /* This table occasionally claims that a processor does not support
637 a particular feature even though it does, but the feature is slower
638 than the alternative. Thus, it shouldn't be relied on as a
639 complete description of the processor's support.
641 Please keep this list in order, and don't forget to update the
642 documentation in invoke.texi when adding a new processor or
646 const char *const name
; /* Canonical processor name. */
647 const enum processor_type processor
; /* Processor type enum value. */
648 const int target_enable
; /* Target flags to enable. */
649 } const processor_target_table
[]
650 = {{"401", PROCESSOR_PPC403
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
651 {"403", PROCESSOR_PPC403
,
652 POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
| MASK_STRICT_ALIGN
},
653 {"405", PROCESSOR_PPC405
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
654 {"405fp", PROCESSOR_PPC405
, POWERPC_BASE_MASK
},
655 {"440", PROCESSOR_PPC440
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
656 {"440fp", PROCESSOR_PPC440
, POWERPC_BASE_MASK
},
657 {"505", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
},
658 {"601", PROCESSOR_PPC601
,
659 MASK_POWER
| POWERPC_BASE_MASK
| MASK_MULTIPLE
| MASK_STRING
},
660 {"602", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
661 {"603", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
662 {"603e", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
663 {"604", PROCESSOR_PPC604
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
664 {"604e", PROCESSOR_PPC604e
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
665 {"620", PROCESSOR_PPC620
,
666 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
667 {"630", PROCESSOR_PPC630
,
668 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
669 {"740", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
670 {"7400", PROCESSOR_PPC7400
, POWERPC_7400_MASK
},
671 {"7450", PROCESSOR_PPC7450
, POWERPC_7400_MASK
},
672 {"750", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
673 {"801", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
674 {"821", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
675 {"823", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
676 {"8540", PROCESSOR_PPC8540
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
677 {"860", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
678 {"970", PROCESSOR_POWER4
,
679 POWERPC_7400_MASK
| MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
680 {"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
},
681 {"ec603e", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
682 {"G3", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
683 {"G4", PROCESSOR_PPC7450
, POWERPC_7400_MASK
},
684 {"G5", PROCESSOR_POWER4
,
685 POWERPC_7400_MASK
| MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
686 {"power", PROCESSOR_POWER
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
687 {"power2", PROCESSOR_POWER
,
688 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
},
689 {"power3", PROCESSOR_PPC630
,
690 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
691 {"power4", PROCESSOR_POWER4
,
692 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
693 {"powerpc", PROCESSOR_POWERPC
, POWERPC_BASE_MASK
},
694 {"powerpc64", PROCESSOR_POWERPC64
,
695 POWERPC_BASE_MASK
| MASK_POWERPC64
},
696 {"rios", PROCESSOR_RIOS1
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
697 {"rios1", PROCESSOR_RIOS1
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
698 {"rios2", PROCESSOR_RIOS2
,
699 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
},
700 {"rsc", PROCESSOR_PPC601
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
701 {"rsc1", PROCESSOR_PPC601
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
702 {"rs64a", PROCESSOR_RS64A
, POWERPC_BASE_MASK
| MASK_POWERPC64
},
705 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
707 /* Save current -mmultiple/-mno-multiple status. */
708 int multiple
= TARGET_MULTIPLE
;
709 /* Save current -mstring/-mno-string status. */
710 int string
= TARGET_STRING
;
712 /* Some OSs don't support saving the high part of 64-bit registers on
713 context switch. Other OSs don't support saving Altivec registers.
714 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
715 settings; if the user wants either, the user must explicitly specify
716 them and we won't interfere with the user's specification. */
719 POWER_MASKS
= MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
720 POWERPC_MASKS
= (POWERPC_BASE_MASK
| MASK_PPC_GPOPT
721 | MASK_PPC_GFXOPT
| MASK_POWERPC64
| MASK_ALTIVEC
724 set_masks
= POWER_MASKS
| POWERPC_MASKS
| MASK_SOFT_FLOAT
;
725 #ifdef OS_MISSING_POWERPC64
726 if (OS_MISSING_POWERPC64
)
727 set_masks
&= ~MASK_POWERPC64
;
729 #ifdef OS_MISSING_ALTIVEC
730 if (OS_MISSING_ALTIVEC
)
731 set_masks
&= ~MASK_ALTIVEC
;
734 /* Identify the processor type. */
735 rs6000_select
[0].string
= default_cpu
;
736 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
738 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
740 ptr
= &rs6000_select
[i
];
741 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
743 for (j
= 0; j
< ptt_size
; j
++)
744 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
747 rs6000_cpu
= processor_target_table
[j
].processor
;
751 target_flags
&= ~set_masks
;
752 target_flags
|= (processor_target_table
[j
].target_enable
759 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
766 /* If we are optimizing big endian systems for space, use the load/store
767 multiple and string instructions. */
768 if (BYTES_BIG_ENDIAN
&& optimize_size
)
769 target_flags
|= MASK_MULTIPLE
| MASK_STRING
;
771 /* If -mmultiple or -mno-multiple was explicitly used, don't
772 override with the processor default */
773 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
774 target_flags
= (target_flags
& ~MASK_MULTIPLE
) | multiple
;
776 /* If -mstring or -mno-string was explicitly used, don't override
777 with the processor default. */
778 if ((target_flags_explicit
& MASK_STRING
) != 0)
779 target_flags
= (target_flags
& ~MASK_STRING
) | string
;
781 /* Don't allow -mmultiple or -mstring on little endian systems
782 unless the cpu is a 750, because the hardware doesn't support the
783 instructions used in little endian mode, and causes an alignment
784 trap. The 750 does not cause an alignment trap (except when the
785 target is unaligned). */
787 if (!BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
791 target_flags
&= ~MASK_MULTIPLE
;
792 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
793 warning ("-mmultiple is not supported on little endian systems");
798 target_flags
&= ~MASK_STRING
;
799 if ((target_flags_explicit
& MASK_STRING
) != 0)
800 warning ("-mstring is not supported on little endian systems");
804 /* Set debug flags */
805 if (rs6000_debug_name
)
807 if (! strcmp (rs6000_debug_name
, "all"))
808 rs6000_debug_stack
= rs6000_debug_arg
= 1;
809 else if (! strcmp (rs6000_debug_name
, "stack"))
810 rs6000_debug_stack
= 1;
811 else if (! strcmp (rs6000_debug_name
, "arg"))
812 rs6000_debug_arg
= 1;
814 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
817 if (rs6000_traceback_name
)
819 if (! strncmp (rs6000_traceback_name
, "full", 4))
820 rs6000_traceback
= traceback_full
;
821 else if (! strncmp (rs6000_traceback_name
, "part", 4))
822 rs6000_traceback
= traceback_part
;
823 else if (! strncmp (rs6000_traceback_name
, "no", 2))
824 rs6000_traceback
= traceback_none
;
826 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
827 rs6000_traceback_name
);
830 /* Set size of long double */
831 rs6000_long_double_type_size
= 64;
832 if (rs6000_long_double_size_string
)
835 int size
= strtol (rs6000_long_double_size_string
, &tail
, 10);
836 if (*tail
!= '\0' || (size
!= 64 && size
!= 128))
837 error ("Unknown switch -mlong-double-%s",
838 rs6000_long_double_size_string
);
840 rs6000_long_double_type_size
= size
;
843 /* Handle -mabi= options. */
844 rs6000_parse_abi_options ();
846 /* Handle -malign-XXXXX option. */
847 rs6000_parse_alignment_option ();
849 /* Handle generic -mFOO=YES/NO options. */
850 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string
,
851 &rs6000_altivec_vrsave
);
852 rs6000_parse_yes_no_option ("isel", rs6000_isel_string
,
854 rs6000_parse_yes_no_option ("spe", rs6000_spe_string
, &rs6000_spe
);
855 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string
,
858 /* Handle -mtls-size option. */
859 rs6000_parse_tls_size_option ();
861 #ifdef SUBTARGET_OVERRIDE_OPTIONS
862 SUBTARGET_OVERRIDE_OPTIONS
;
864 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
865 SUBSUBTARGET_OVERRIDE_OPTIONS
;
870 /* The e500 does not have string instructions, and we set
871 MASK_STRING above when optimizing for size. */
872 if ((target_flags
& MASK_STRING
) != 0)
873 target_flags
= target_flags
& ~MASK_STRING
;
875 /* No SPE means 64-bit long doubles, even if an E500. */
876 if (rs6000_spe_string
!= 0
877 && !strcmp (rs6000_spe_string
, "no"))
878 rs6000_long_double_type_size
= 64;
880 else if (rs6000_select
[1].string
!= NULL
)
882 /* For the powerpc-eabispe configuration, we set all these by
883 default, so let's unset them if we manually set another
884 CPU that is not the E500. */
885 if (rs6000_abi_string
== 0)
887 if (rs6000_spe_string
== 0)
889 if (rs6000_float_gprs_string
== 0)
890 rs6000_float_gprs
= 0;
891 if (rs6000_isel_string
== 0)
893 if (rs6000_long_double_size_string
== 0)
894 rs6000_long_double_type_size
= 64;
897 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
898 using TARGET_OPTIONS to handle a toggle switch, but we're out of
899 bits in target_flags so TARGET_SWITCHES cannot be used.
900 Assumption here is that rs6000_longcall_switch points into the
901 text of the complete option, rather than being a copy, so we can
902 scan back for the presence or absence of the no- modifier. */
903 if (rs6000_longcall_switch
)
905 const char *base
= rs6000_longcall_switch
;
906 while (base
[-1] != 'm') base
--;
908 if (*rs6000_longcall_switch
!= '\0')
909 error ("invalid option `%s'", base
);
910 rs6000_default_long_calls
= (base
[0] != 'n');
913 /* Handle -mprioritize-restricted-insns option. */
914 rs6000_sched_restricted_insns_priority
= DEFAULT_RESTRICTED_INSNS_PRIORITY
;
915 if (rs6000_sched_restricted_insns_priority_str
)
916 rs6000_sched_restricted_insns_priority
=
917 atoi (rs6000_sched_restricted_insns_priority_str
);
919 /* Handle -msched-costly-dep option. */
920 rs6000_sched_costly_dep
= DEFAULT_SCHED_COSTLY_DEP
;
921 if (rs6000_sched_costly_dep_str
)
923 if (! strcmp (rs6000_sched_costly_dep_str
, "no"))
924 rs6000_sched_costly_dep
= no_dep_costly
;
925 else if (! strcmp (rs6000_sched_costly_dep_str
, "all"))
926 rs6000_sched_costly_dep
= all_deps_costly
;
927 else if (! strcmp (rs6000_sched_costly_dep_str
, "true_store_to_load"))
928 rs6000_sched_costly_dep
= true_store_to_load_dep_costly
;
929 else if (! strcmp (rs6000_sched_costly_dep_str
, "store_to_load"))
930 rs6000_sched_costly_dep
= store_to_load_dep_costly
;
932 rs6000_sched_costly_dep
= atoi (rs6000_sched_costly_dep_str
);
935 /* Handle -minsert-sched-nops option. */
936 rs6000_sched_insert_nops
= DEFAULT_SCHED_FINISH_NOP_INSERTION_SCHEME
;
937 if (rs6000_sched_insert_nops_str
)
939 if (! strcmp (rs6000_sched_insert_nops_str
, "no"))
940 rs6000_sched_insert_nops
= sched_finish_none
;
941 else if (! strcmp (rs6000_sched_insert_nops_str
, "pad"))
942 rs6000_sched_insert_nops
= sched_finish_pad_groups
;
943 else if (! strcmp (rs6000_sched_insert_nops_str
, "regroup_exact"))
944 rs6000_sched_insert_nops
= sched_finish_regroup_exact
;
946 rs6000_sched_insert_nops
= atoi (rs6000_sched_insert_nops_str
);
949 #ifdef TARGET_REGNAMES
950 /* If the user desires alternate register names, copy in the
951 alternate names now. */
953 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
956 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
957 If -maix-struct-return or -msvr4-struct-return was explicitly
958 used, don't override with the ABI default. */
959 if ((target_flags_explicit
& MASK_AIX_STRUCT_RET
) == 0)
961 if (DEFAULT_ABI
== ABI_V4
&& !DRAFT_V4_STRUCT_RET
)
962 target_flags
= (target_flags
& ~MASK_AIX_STRUCT_RET
);
964 target_flags
|= MASK_AIX_STRUCT_RET
;
967 if (TARGET_LONG_DOUBLE_128
968 && (DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
))
969 REAL_MODE_FORMAT (TFmode
) = &ibm_extended_format
;
971 /* Allocate an alias set for register saves & restores from stack. */
972 rs6000_sr_alias_set
= new_alias_set ();
975 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
977 /* We can only guarantee the availability of DI pseudo-ops when
978 assembling for 64-bit targets. */
981 targetm
.asm_out
.aligned_op
.di
= NULL
;
982 targetm
.asm_out
.unaligned_op
.di
= NULL
;
985 /* Set maximum branch target alignment at two instructions, eight bytes. */
986 align_jumps_max_skip
= 8;
987 align_loops_max_skip
= 8;
989 /* Arrange to save and restore machine status around nested functions. */
990 init_machine_status
= rs6000_init_machine_status
;
993 /* Handle generic options of the form -mfoo=yes/no.
994 NAME is the option name.
995 VALUE is the option value.
996 FLAG is the pointer to the flag where to store a 1 or 0, depending on
997 whether the option value is 'yes' or 'no' respectively. */
999 rs6000_parse_yes_no_option (const char *name
, const char *value
, int *flag
)
1003 else if (!strcmp (value
, "yes"))
1005 else if (!strcmp (value
, "no"))
1008 error ("unknown -m%s= option specified: '%s'", name
, value
);
1011 /* Handle -mabi= options. */
1013 rs6000_parse_abi_options (void)
1015 if (rs6000_abi_string
== 0)
1017 else if (! strcmp (rs6000_abi_string
, "altivec"))
1018 rs6000_altivec_abi
= 1;
1019 else if (! strcmp (rs6000_abi_string
, "no-altivec"))
1020 rs6000_altivec_abi
= 0;
1021 else if (! strcmp (rs6000_abi_string
, "spe"))
1024 if (!TARGET_SPE_ABI
)
1025 error ("not configured for ABI: '%s'", rs6000_abi_string
);
1028 else if (! strcmp (rs6000_abi_string
, "no-spe"))
1031 error ("unknown ABI specified: '%s'", rs6000_abi_string
);
1034 /* Handle -malign-XXXXXX options. */
1036 rs6000_parse_alignment_option (void)
1038 if (rs6000_alignment_string
== 0)
1040 else if (! strcmp (rs6000_alignment_string
, "power"))
1041 rs6000_alignment_flags
= MASK_ALIGN_POWER
;
1042 else if (! strcmp (rs6000_alignment_string
, "natural"))
1043 rs6000_alignment_flags
= MASK_ALIGN_NATURAL
;
1045 error ("unknown -malign-XXXXX option specified: '%s'",
1046 rs6000_alignment_string
);
1049 /* Validate and record the size specified with the -mtls-size option. */
1052 rs6000_parse_tls_size_option (void)
1054 if (rs6000_tls_size_string
== 0)
1056 else if (strcmp (rs6000_tls_size_string
, "16") == 0)
1057 rs6000_tls_size
= 16;
1058 else if (strcmp (rs6000_tls_size_string
, "32") == 0)
1059 rs6000_tls_size
= 32;
1060 else if (strcmp (rs6000_tls_size_string
, "64") == 0)
1061 rs6000_tls_size
= 64;
1063 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string
);
1067 optimization_options (int level ATTRIBUTE_UNUSED
, int size ATTRIBUTE_UNUSED
)
1071 /* Do anything needed at the start of the asm file. */
1074 rs6000_file_start (void)
1078 const char *start
= buffer
;
1079 struct rs6000_cpu_select
*ptr
;
1080 const char *default_cpu
= TARGET_CPU_DEFAULT
;
1081 FILE *file
= asm_out_file
;
1083 default_file_start ();
1085 #ifdef TARGET_BI_ARCH
1086 if ((TARGET_DEFAULT
^ target_flags
) & MASK_64BIT
)
1090 if (flag_verbose_asm
)
1092 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
1093 rs6000_select
[0].string
= default_cpu
;
1095 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
1097 ptr
= &rs6000_select
[i
];
1098 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
1100 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
1105 #ifdef USING_ELFOS_H
1106 switch (rs6000_sdata
)
1108 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
1109 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
1110 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
1111 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
1114 if (rs6000_sdata
&& g_switch_value
)
1116 fprintf (file
, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED
, start
,
1127 /* Return nonzero if this function is known to have a null epilogue. */
1130 direct_return (void)
1132 if (reload_completed
)
1134 rs6000_stack_t
*info
= rs6000_stack_info ();
1136 if (info
->first_gp_reg_save
== 32
1137 && info
->first_fp_reg_save
== 64
1138 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
1139 && ! info
->lr_save_p
1140 && ! info
->cr_save_p
1141 && info
->vrsave_mask
== 0
1149 /* Returns 1 always. */
1152 any_operand (rtx op ATTRIBUTE_UNUSED
,
1153 enum machine_mode mode ATTRIBUTE_UNUSED
)
1158 /* Returns 1 if op is the count register. */
1160 count_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1162 if (GET_CODE (op
) != REG
)
1165 if (REGNO (op
) == COUNT_REGISTER_REGNUM
)
1168 if (REGNO (op
) > FIRST_PSEUDO_REGISTER
)
1174 /* Returns 1 if op is an altivec register. */
1176 altivec_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1179 return (register_operand (op
, mode
)
1180 && (GET_CODE (op
) != REG
1181 || REGNO (op
) > FIRST_PSEUDO_REGISTER
1182 || ALTIVEC_REGNO_P (REGNO (op
))));
1186 xer_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1188 if (GET_CODE (op
) != REG
)
1191 if (XER_REGNO_P (REGNO (op
)))
1197 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1198 by such constants completes more quickly. */
1201 s8bit_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1203 return ( GET_CODE (op
) == CONST_INT
1204 && (INTVAL (op
) >= -128 && INTVAL (op
) <= 127));
1207 /* Return 1 if OP is a constant that can fit in a D field. */
1210 short_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1212 return (GET_CODE (op
) == CONST_INT
1213 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'));
1216 /* Similar for an unsigned D field. */
1219 u_short_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1221 return (GET_CODE (op
) == CONST_INT
1222 && CONST_OK_FOR_LETTER_P (INTVAL (op
) & GET_MODE_MASK (mode
), 'K'));
1225 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1228 non_short_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1230 return (GET_CODE (op
) == CONST_INT
1231 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x8000) >= 0x10000);
1234 /* Returns 1 if OP is a CONST_INT that is a positive value
1235 and an exact power of 2. */
1238 exact_log2_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1240 return (GET_CODE (op
) == CONST_INT
1242 && exact_log2 (INTVAL (op
)) >= 0);
1245 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1249 gpc_reg_operand (rtx op
, enum machine_mode mode
)
1251 return (register_operand (op
, mode
)
1252 && (GET_CODE (op
) != REG
1253 || (REGNO (op
) >= ARG_POINTER_REGNUM
1254 && !XER_REGNO_P (REGNO (op
)))
1255 || REGNO (op
) < MQ_REGNO
));
1258 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1262 cc_reg_operand (rtx op
, enum machine_mode mode
)
1264 return (register_operand (op
, mode
)
1265 && (GET_CODE (op
) != REG
1266 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1267 || CR_REGNO_P (REGNO (op
))));
1270 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1271 CR field that isn't CR0. */
1274 cc_reg_not_cr0_operand (rtx op
, enum machine_mode mode
)
1276 return (register_operand (op
, mode
)
1277 && (GET_CODE (op
) != REG
1278 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1279 || CR_REGNO_NOT_CR0_P (REGNO (op
))));
1282 /* Returns 1 if OP is either a constant integer valid for a D-field or
1283 a non-special register. If a register, it must be in the proper
1284 mode unless MODE is VOIDmode. */
1287 reg_or_short_operand (rtx op
, enum machine_mode mode
)
1289 return short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1292 /* Similar, except check if the negation of the constant would be
1293 valid for a D-field. */
1296 reg_or_neg_short_operand (rtx op
, enum machine_mode mode
)
1298 if (GET_CODE (op
) == CONST_INT
)
1299 return CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P');
1301 return gpc_reg_operand (op
, mode
);
1304 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1305 a non-special register. If a register, it must be in the proper
1306 mode unless MODE is VOIDmode. */
1309 reg_or_aligned_short_operand (rtx op
, enum machine_mode mode
)
1311 if (gpc_reg_operand (op
, mode
))
1313 else if (short_cint_operand (op
, mode
) && !(INTVAL (op
) & 3))
1320 /* Return 1 if the operand is either a register or an integer whose
1321 high-order 16 bits are zero. */
1324 reg_or_u_short_operand (rtx op
, enum machine_mode mode
)
1326 return u_short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1329 /* Return 1 is the operand is either a non-special register or ANY
1330 constant integer. */
1333 reg_or_cint_operand (rtx op
, enum machine_mode mode
)
1335 return (GET_CODE (op
) == CONST_INT
|| gpc_reg_operand (op
, mode
));
1338 /* Return 1 is the operand is either a non-special register or ANY
1339 32-bit signed constant integer. */
1342 reg_or_arith_cint_operand (rtx op
, enum machine_mode mode
)
1344 return (gpc_reg_operand (op
, mode
)
1345 || (GET_CODE (op
) == CONST_INT
1346 #if HOST_BITS_PER_WIDE_INT != 32
1347 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80000000)
1348 < (unsigned HOST_WIDE_INT
) 0x100000000ll
)
1353 /* Return 1 is the operand is either a non-special register or a 32-bit
1354 signed constant integer valid for 64-bit addition. */
1357 reg_or_add_cint64_operand (rtx op
, enum machine_mode mode
)
1359 return (gpc_reg_operand (op
, mode
)
1360 || (GET_CODE (op
) == CONST_INT
1361 #if HOST_BITS_PER_WIDE_INT == 32
1362 && INTVAL (op
) < 0x7fff8000
1364 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80008000)
1370 /* Return 1 is the operand is either a non-special register or a 32-bit
1371 signed constant integer valid for 64-bit subtraction. */
1374 reg_or_sub_cint64_operand (rtx op
, enum machine_mode mode
)
1376 return (gpc_reg_operand (op
, mode
)
1377 || (GET_CODE (op
) == CONST_INT
1378 #if HOST_BITS_PER_WIDE_INT == 32
1379 && (- INTVAL (op
)) < 0x7fff8000
1381 && ((unsigned HOST_WIDE_INT
) ((- INTVAL (op
)) + 0x80008000)
1387 /* Return 1 is the operand is either a non-special register or ANY
1388 32-bit unsigned constant integer. */
1391 reg_or_logical_cint_operand (rtx op
, enum machine_mode mode
)
1393 if (GET_CODE (op
) == CONST_INT
)
1395 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
1397 if (GET_MODE_BITSIZE (mode
) <= 32)
1400 if (INTVAL (op
) < 0)
1404 return ((INTVAL (op
) & GET_MODE_MASK (mode
)
1405 & (~ (unsigned HOST_WIDE_INT
) 0xffffffff)) == 0);
1407 else if (GET_CODE (op
) == CONST_DOUBLE
)
1409 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
1413 return CONST_DOUBLE_HIGH (op
) == 0;
1416 return gpc_reg_operand (op
, mode
);
1419 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1422 got_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1424 return (GET_CODE (op
) == SYMBOL_REF
1425 || GET_CODE (op
) == CONST
1426 || GET_CODE (op
) == LABEL_REF
);
1429 /* Return 1 if the operand is a simple references that can be loaded via
1430 the GOT (labels involving addition aren't allowed). */
1433 got_no_const_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1435 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
);
1438 /* Return the number of instructions it takes to form a constant in an
1439 integer register. */
1442 num_insns_constant_wide (HOST_WIDE_INT value
)
1444 /* signed constant loadable with {cal|addi} */
1445 if (CONST_OK_FOR_LETTER_P (value
, 'I'))
1448 /* constant loadable with {cau|addis} */
1449 else if (CONST_OK_FOR_LETTER_P (value
, 'L'))
1452 #if HOST_BITS_PER_WIDE_INT == 64
1453 else if (TARGET_POWERPC64
)
1455 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1456 HOST_WIDE_INT high
= value
>> 31;
1458 if (high
== 0 || high
== -1)
1464 return num_insns_constant_wide (high
) + 1;
1466 return (num_insns_constant_wide (high
)
1467 + num_insns_constant_wide (low
) + 1);
1476 num_insns_constant (rtx op
, enum machine_mode mode
)
1478 if (GET_CODE (op
) == CONST_INT
)
1480 #if HOST_BITS_PER_WIDE_INT == 64
1481 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
1482 && mask64_operand (op
, mode
))
1486 return num_insns_constant_wide (INTVAL (op
));
1489 else if (GET_CODE (op
) == CONST_DOUBLE
&& mode
== SFmode
)
1494 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1495 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1496 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
1499 else if (GET_CODE (op
) == CONST_DOUBLE
)
1505 int endian
= (WORDS_BIG_ENDIAN
== 0);
1507 if (mode
== VOIDmode
|| mode
== DImode
)
1509 high
= CONST_DOUBLE_HIGH (op
);
1510 low
= CONST_DOUBLE_LOW (op
);
1514 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1515 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
1517 low
= l
[1 - endian
];
1521 return (num_insns_constant_wide (low
)
1522 + num_insns_constant_wide (high
));
1526 if (high
== 0 && low
>= 0)
1527 return num_insns_constant_wide (low
);
1529 else if (high
== -1 && low
< 0)
1530 return num_insns_constant_wide (low
);
1532 else if (mask64_operand (op
, mode
))
1536 return num_insns_constant_wide (high
) + 1;
1539 return (num_insns_constant_wide (high
)
1540 + num_insns_constant_wide (low
) + 1);
1548 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1549 register with one instruction per word. We only do this if we can
1550 safely read CONST_DOUBLE_{LOW,HIGH}. */
1553 easy_fp_constant (rtx op
, enum machine_mode mode
)
1555 if (GET_CODE (op
) != CONST_DOUBLE
1556 || GET_MODE (op
) != mode
1557 || (GET_MODE_CLASS (mode
) != MODE_FLOAT
&& mode
!= DImode
))
1560 /* Consider all constants with -msoft-float to be easy. */
1561 if ((TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
1565 /* If we are using V.4 style PIC, consider all constants to be hard. */
1566 if (flag_pic
&& DEFAULT_ABI
== ABI_V4
)
1569 #ifdef TARGET_RELOCATABLE
1570 /* Similarly if we are using -mrelocatable, consider all constants
1572 if (TARGET_RELOCATABLE
)
1581 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1582 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
1584 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1585 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1
1586 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[2]) == 1
1587 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[3]) == 1);
1590 else if (mode
== DFmode
)
1595 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1596 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
1598 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1599 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1);
1602 else if (mode
== SFmode
)
1607 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1608 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1610 return num_insns_constant_wide (l
) == 1;
1613 else if (mode
== DImode
)
1614 return ((TARGET_POWERPC64
1615 && GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_LOW (op
) == 0)
1616 || (num_insns_constant (op
, DImode
) <= 2));
1618 else if (mode
== SImode
)
1624 /* Return nonzero if all elements of a vector have the same value. */
1627 easy_vector_same (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1631 units
= CONST_VECTOR_NUNITS (op
);
1633 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
1634 for (i
= 1; i
< units
; ++i
)
1635 if (INTVAL (CONST_VECTOR_ELT (op
, i
)) != cst
)
1642 /* Return 1 if the operand is a CONST_INT and can be put into a
1643 register without using memory. */
1646 easy_vector_constant (rtx op
, enum machine_mode mode
)
1650 if (GET_CODE (op
) != CONST_VECTOR
1655 if (zero_constant (op
, mode
)
1656 && ((TARGET_ALTIVEC
&& ALTIVEC_VECTOR_MODE (mode
))
1657 || (TARGET_SPE
&& SPE_VECTOR_MODE (mode
))))
1660 if (GET_MODE_CLASS (mode
) != MODE_VECTOR_INT
)
1663 if (TARGET_SPE
&& mode
== V1DImode
)
1666 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
1667 cst2
= INTVAL (CONST_VECTOR_ELT (op
, 1));
1669 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1671 evmergelo r0, r0, r0
1674 I don't know how efficient it would be to allow bigger constants,
1675 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1676 instructions is better than a 64-bit memory load, but I don't
1677 have the e500 timing specs. */
1678 if (TARGET_SPE
&& mode
== V2SImode
1679 && cst
>= -0x7fff && cst
<= 0x7fff
1680 && cst2
>= -0x7fff && cst2
<= 0x7fff)
1687 if (EASY_VECTOR_15 (cst
, op
, mode
))
1689 if ((cst
& 0xffff) != ((cst
>> 16) & 0xffff))
1693 if (EASY_VECTOR_15 (cst
, op
, mode
))
1695 if ((cst
& 0xff) != ((cst
>> 8) & 0xff))
1699 if (EASY_VECTOR_15 (cst
, op
, mode
))
1705 if (TARGET_ALTIVEC
&& EASY_VECTOR_15_ADD_SELF (cst
, op
, mode
))
1711 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1714 easy_vector_constant_add_self (rtx op
, enum machine_mode mode
)
1718 if (!easy_vector_constant (op
, mode
))
1721 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
1723 return TARGET_ALTIVEC
&& EASY_VECTOR_15_ADD_SELF (cst
, op
, mode
);
1727 output_vec_const_move (rtx
*operands
)
1730 enum machine_mode mode
;
1736 cst
= INTVAL (CONST_VECTOR_ELT (vec
, 0));
1737 cst2
= INTVAL (CONST_VECTOR_ELT (vec
, 1));
1738 mode
= GET_MODE (dest
);
1742 if (zero_constant (vec
, mode
))
1743 return "vxor %0,%0,%0";
1744 else if (EASY_VECTOR_15_ADD_SELF (cst
, vec
, mode
))
1746 else if (easy_vector_constant (vec
, mode
))
1748 operands
[1] = GEN_INT (cst
);
1752 if (EASY_VECTOR_15 (cst
, vec
, mode
))
1754 operands
[1] = GEN_INT (cst
);
1755 return "vspltisw %0,%1";
1759 if (EASY_VECTOR_15 (cst
, vec
, mode
))
1761 operands
[1] = GEN_INT (cst
);
1762 return "vspltish %0,%1";
1766 if (EASY_VECTOR_15 (cst
, vec
, mode
))
1768 operands
[1] = GEN_INT (cst
);
1769 return "vspltisb %0,%1";
1781 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1782 pattern of V1DI, V4HI, and V2SF.
1784 FIXME: We should probably return # and add post reload
1785 splitters for these, but this way is so easy ;-).
1787 operands
[1] = GEN_INT (cst
);
1788 operands
[2] = GEN_INT (cst2
);
1790 return "li %0,%1\n\tevmergelo %0,%0,%0";
1792 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1798 /* Return 1 if the operand is the constant 0. This works for scalars
1799 as well as vectors. */
1801 zero_constant (rtx op
, enum machine_mode mode
)
1803 return op
== CONST0_RTX (mode
);
1806 /* Return 1 if the operand is 0.0. */
1808 zero_fp_constant (rtx op
, enum machine_mode mode
)
1810 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& op
== CONST0_RTX (mode
);
1813 /* Return 1 if the operand is in volatile memory. Note that during
1814 the RTL generation phase, memory_operand does not return TRUE for
1815 volatile memory references. So this function allows us to
1816 recognize volatile references where its safe. */
1819 volatile_mem_operand (rtx op
, enum machine_mode mode
)
1821 if (GET_CODE (op
) != MEM
)
1824 if (!MEM_VOLATILE_P (op
))
1827 if (mode
!= GET_MODE (op
))
1830 if (reload_completed
)
1831 return memory_operand (op
, mode
);
1833 if (reload_in_progress
)
1834 return strict_memory_address_p (mode
, XEXP (op
, 0));
1836 return memory_address_p (mode
, XEXP (op
, 0));
1839 /* Return 1 if the operand is an offsettable memory operand. */
1842 offsettable_mem_operand (rtx op
, enum machine_mode mode
)
1844 return ((GET_CODE (op
) == MEM
)
1845 && offsettable_address_p (reload_completed
|| reload_in_progress
,
1846 mode
, XEXP (op
, 0)));
1849 /* Return 1 if the operand is either an easy FP constant (see above) or
1853 mem_or_easy_const_operand (rtx op
, enum machine_mode mode
)
1855 return memory_operand (op
, mode
) || easy_fp_constant (op
, mode
);
1858 /* Return 1 if the operand is either a non-special register or an item
1859 that can be used as the operand of a `mode' add insn. */
1862 add_operand (rtx op
, enum machine_mode mode
)
1864 if (GET_CODE (op
) == CONST_INT
)
1865 return (CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1866 || CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1868 return gpc_reg_operand (op
, mode
);
1871 /* Return 1 if OP is a constant but not a valid add_operand. */
1874 non_add_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1876 return (GET_CODE (op
) == CONST_INT
1877 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1878 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1881 /* Return 1 if the operand is a non-special register or a constant that
1882 can be used as the operand of an OR or XOR insn on the RS/6000. */
1885 logical_operand (rtx op
, enum machine_mode mode
)
1887 HOST_WIDE_INT opl
, oph
;
1889 if (gpc_reg_operand (op
, mode
))
1892 if (GET_CODE (op
) == CONST_INT
)
1894 opl
= INTVAL (op
) & GET_MODE_MASK (mode
);
1896 #if HOST_BITS_PER_WIDE_INT <= 32
1897 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
&& opl
< 0)
1901 else if (GET_CODE (op
) == CONST_DOUBLE
)
1903 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1906 opl
= CONST_DOUBLE_LOW (op
);
1907 oph
= CONST_DOUBLE_HIGH (op
);
1914 return ((opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0
1915 || (opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff0000) == 0);
1918 /* Return 1 if C is a constant that is not a logical operand (as
1919 above), but could be split into one. */
1922 non_logical_cint_operand (rtx op
, enum machine_mode mode
)
1924 return ((GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
1925 && ! logical_operand (op
, mode
)
1926 && reg_or_logical_cint_operand (op
, mode
));
1929 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1930 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1931 Reject all ones and all zeros, since these should have been optimized
1932 away and confuse the making of MB and ME. */
1935 mask_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1937 HOST_WIDE_INT c
, lsb
;
1939 if (GET_CODE (op
) != CONST_INT
)
1944 /* Fail in 64-bit mode if the mask wraps around because the upper
1945 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1946 if (TARGET_POWERPC64
&& (c
& 0x80000001) == 0x80000001)
1949 /* We don't change the number of transitions by inverting,
1950 so make sure we start with the LS bit zero. */
1954 /* Reject all zeros or all ones. */
1958 /* Find the first transition. */
1961 /* Invert to look for a second transition. */
1964 /* Erase first transition. */
1967 /* Find the second transition (if any). */
1970 /* Match if all the bits above are 1's (or c is zero). */
1974 /* Return 1 for the PowerPC64 rlwinm corner case. */
1977 mask_operand_wrap (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1979 HOST_WIDE_INT c
, lsb
;
1981 if (GET_CODE (op
) != CONST_INT
)
1986 if ((c
& 0x80000001) != 0x80000001)
2000 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2001 It is if there are no more than one 1->0 or 0->1 transitions.
2002 Reject all zeros, since zero should have been optimized away and
2003 confuses the making of MB and ME. */
2006 mask64_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2008 if (GET_CODE (op
) == CONST_INT
)
2010 HOST_WIDE_INT c
, lsb
;
2014 /* Reject all zeros. */
2018 /* We don't change the number of transitions by inverting,
2019 so make sure we start with the LS bit zero. */
2023 /* Find the transition, and check that all bits above are 1's. */
2026 /* Match if all the bits above are 1's (or c is zero). */
2032 /* Like mask64_operand, but allow up to three transitions. This
2033 predicate is used by insn patterns that generate two rldicl or
2034 rldicr machine insns. */
2037 mask64_2_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2039 if (GET_CODE (op
) == CONST_INT
)
2041 HOST_WIDE_INT c
, lsb
;
2045 /* Disallow all zeros. */
2049 /* We don't change the number of transitions by inverting,
2050 so make sure we start with the LS bit zero. */
2054 /* Find the first transition. */
2057 /* Invert to look for a second transition. */
2060 /* Erase first transition. */
2063 /* Find the second transition. */
2066 /* Invert to look for a third transition. */
2069 /* Erase second transition. */
2072 /* Find the third transition (if any). */
2075 /* Match if all the bits above are 1's (or c is zero). */
2081 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2082 implement ANDing by the mask IN. */
2084 build_mask64_2_operands (rtx in
, rtx
*out
)
2086 #if HOST_BITS_PER_WIDE_INT >= 64
2087 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
2090 if (GET_CODE (in
) != CONST_INT
)
2096 /* Assume c initially something like 0x00fff000000fffff. The idea
2097 is to rotate the word so that the middle ^^^^^^ group of zeros
2098 is at the MS end and can be cleared with an rldicl mask. We then
2099 rotate back and clear off the MS ^^ group of zeros with a
2101 c
= ~c
; /* c == 0xff000ffffff00000 */
2102 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
2103 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
2104 c
= ~c
; /* c == 0x00fff000000fffff */
2105 c
&= -lsb
; /* c == 0x00fff00000000000 */
2106 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
2107 c
= ~c
; /* c == 0xff000fffffffffff */
2108 c
&= -lsb
; /* c == 0xff00000000000000 */
2110 while ((lsb
>>= 1) != 0)
2111 shift
++; /* shift == 44 on exit from loop */
2112 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
2113 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
2114 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
2118 /* Assume c initially something like 0xff000f0000000000. The idea
2119 is to rotate the word so that the ^^^ middle group of zeros
2120 is at the LS end and can be cleared with an rldicr mask. We then
2121 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2123 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
2124 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
2125 c
= ~c
; /* c == 0x00fff0ffffffffff */
2126 c
&= -lsb
; /* c == 0x00fff00000000000 */
2127 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
2128 c
= ~c
; /* c == 0xff000fffffffffff */
2129 c
&= -lsb
; /* c == 0xff00000000000000 */
2131 while ((lsb
>>= 1) != 0)
2132 shift
++; /* shift == 44 on exit from loop */
2133 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
2134 m1
>>= shift
; /* m1 == 0x0000000000000fff */
2135 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
2138 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2139 masks will be all 1's. We are guaranteed more than one transition. */
2140 out
[0] = GEN_INT (64 - shift
);
2141 out
[1] = GEN_INT (m1
);
2142 out
[2] = GEN_INT (shift
);
2143 out
[3] = GEN_INT (m2
);
2151 /* Return 1 if the operand is either a non-special register or a constant
2152 that can be used as the operand of a PowerPC64 logical AND insn. */
2155 and64_operand (rtx op
, enum machine_mode mode
)
2157 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2158 return (gpc_reg_operand (op
, mode
) || mask64_operand (op
, mode
));
2160 return (logical_operand (op
, mode
) || mask64_operand (op
, mode
));
2163 /* Like the above, but also match constants that can be implemented
2164 with two rldicl or rldicr insns. */
2167 and64_2_operand (rtx op
, enum machine_mode mode
)
2169 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2170 return gpc_reg_operand (op
, mode
) || mask64_2_operand (op
, mode
);
2172 return logical_operand (op
, mode
) || mask64_2_operand (op
, mode
);
2175 /* Return 1 if the operand is either a non-special register or a
2176 constant that can be used as the operand of an RS/6000 logical AND insn. */
2179 and_operand (rtx op
, enum machine_mode mode
)
2181 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2182 return (gpc_reg_operand (op
, mode
) || mask_operand (op
, mode
));
2184 return (logical_operand (op
, mode
) || mask_operand (op
, mode
));
2187 /* Return 1 if the operand is a general register or memory operand. */
2190 reg_or_mem_operand (rtx op
, enum machine_mode mode
)
2192 return (gpc_reg_operand (op
, mode
)
2193 || memory_operand (op
, mode
)
2194 || macho_lo_sum_memory_operand (op
, mode
)
2195 || volatile_mem_operand (op
, mode
));
2198 /* Return 1 if the operand is a general register or memory operand without
2199 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2203 lwa_operand (rtx op
, enum machine_mode mode
)
2207 if (reload_completed
&& GET_CODE (inner
) == SUBREG
)
2208 inner
= SUBREG_REG (inner
);
2210 return gpc_reg_operand (inner
, mode
)
2211 || (memory_operand (inner
, mode
)
2212 && GET_CODE (XEXP (inner
, 0)) != PRE_INC
2213 && GET_CODE (XEXP (inner
, 0)) != PRE_DEC
2214 && (GET_CODE (XEXP (inner
, 0)) != PLUS
2215 || GET_CODE (XEXP (XEXP (inner
, 0), 1)) != CONST_INT
2216 || INTVAL (XEXP (XEXP (inner
, 0), 1)) % 4 == 0));
2219 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2222 symbol_ref_operand (rtx op
, enum machine_mode mode
)
2224 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
2227 return (GET_CODE (op
) == SYMBOL_REF
2228 && (DEFAULT_ABI
!= ABI_AIX
|| SYMBOL_REF_FUNCTION_P (op
)));
2231 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2232 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2235 call_operand (rtx op
, enum machine_mode mode
)
2237 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
2240 return (GET_CODE (op
) == SYMBOL_REF
2241 || (GET_CODE (op
) == REG
2242 && (REGNO (op
) == LINK_REGISTER_REGNUM
2243 || REGNO (op
) == COUNT_REGISTER_REGNUM
2244 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
2247 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2251 current_file_function_operand (rtx op
,
2252 enum machine_mode mode ATTRIBUTE_UNUSED
)
2254 return (GET_CODE (op
) == SYMBOL_REF
2255 && (DEFAULT_ABI
!= ABI_AIX
|| SYMBOL_REF_FUNCTION_P (op
))
2256 && (SYMBOL_REF_LOCAL_P (op
)
2257 || (op
== XEXP (DECL_RTL (current_function_decl
), 0))));
2260 /* Return 1 if this operand is a valid input for a move insn. */
2263 input_operand (rtx op
, enum machine_mode mode
)
2265 /* Memory is always valid. */
2266 if (memory_operand (op
, mode
))
2269 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2270 if (GET_CODE (op
) == CONSTANT_P_RTX
)
2273 /* For floating-point, easy constants are valid. */
2274 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2276 && easy_fp_constant (op
, mode
))
2279 /* Allow any integer constant. */
2280 if (GET_MODE_CLASS (mode
) == MODE_INT
2281 && (GET_CODE (op
) == CONST_INT
2282 || GET_CODE (op
) == CONST_DOUBLE
))
2285 /* Allow easy vector constants. */
2286 if (GET_CODE (op
) == CONST_VECTOR
2287 && easy_vector_constant (op
, mode
))
2290 /* For floating-point or multi-word mode, the only remaining valid type
2292 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2293 || GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2294 return register_operand (op
, mode
);
2296 /* The only cases left are integral modes one word or smaller (we
2297 do not get called for MODE_CC values). These can be in any
2299 if (register_operand (op
, mode
))
2302 /* A SYMBOL_REF referring to the TOC is valid. */
2303 if (legitimate_constant_pool_address_p (op
))
2306 /* A constant pool expression (relative to the TOC) is valid */
2307 if (toc_relative_expr_p (op
))
2310 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2312 if (DEFAULT_ABI
== ABI_V4
2313 && (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
)
2314 && small_data_operand (op
, Pmode
))
2320 /* Return 1 for an operand in small memory on V.4/eabi. */
2323 small_data_operand (rtx op ATTRIBUTE_UNUSED
,
2324 enum machine_mode mode ATTRIBUTE_UNUSED
)
2329 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
2332 if (DEFAULT_ABI
!= ABI_V4
)
2335 if (GET_CODE (op
) == SYMBOL_REF
)
2338 else if (GET_CODE (op
) != CONST
2339 || GET_CODE (XEXP (op
, 0)) != PLUS
2340 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
2341 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
2346 rtx sum
= XEXP (op
, 0);
2347 HOST_WIDE_INT summand
;
2349 /* We have to be careful here, because it is the referenced address
2350 that must be 32k from _SDA_BASE_, not just the symbol. */
2351 summand
= INTVAL (XEXP (sum
, 1));
2352 if (summand
< 0 || (unsigned HOST_WIDE_INT
) summand
> g_switch_value
)
2355 sym_ref
= XEXP (sum
, 0);
2358 return SYMBOL_REF_SMALL_P (sym_ref
);
2364 /* Return true, if operand is a memory operand and has a
2365 displacement divisible by 4. */
2368 word_offset_memref_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2373 if (!memory_operand (op
, mode
))
2376 addr
= XEXP (op
, 0);
2377 if (GET_CODE (addr
) == PLUS
2378 && GET_CODE (XEXP (addr
, 0)) == REG
2379 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
2380 off
= INTVAL (XEXP (addr
, 1));
2382 return (off
% 4) == 0;
2385 /* Return true if either operand is a general purpose register. */
2388 gpr_or_gpr_p (rtx op0
, rtx op1
)
2390 return ((REG_P (op0
) && INT_REGNO_P (REGNO (op0
)))
2391 || (REG_P (op1
) && INT_REGNO_P (REGNO (op1
))));
2395 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2398 constant_pool_expr_1 (rtx op
, int *have_sym
, int *have_toc
)
2400 switch (GET_CODE(op
))
2403 if (RS6000_SYMBOL_REF_TLS_P (op
))
2405 else if (CONSTANT_POOL_ADDRESS_P (op
))
2407 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
2415 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
2424 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
2425 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
2427 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
2436 constant_pool_expr_p (rtx op
)
2440 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
2444 toc_relative_expr_p (rtx op
)
2448 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
2451 /* SPE offset addressing is limited to 5-bits worth of double words. */
2452 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2455 legitimate_constant_pool_address_p (rtx x
)
2458 && GET_CODE (x
) == PLUS
2459 && GET_CODE (XEXP (x
, 0)) == REG
2460 && (TARGET_MINIMAL_TOC
|| REGNO (XEXP (x
, 0)) == TOC_REGISTER
)
2461 && constant_pool_expr_p (XEXP (x
, 1)));
2465 legitimate_small_data_p (enum machine_mode mode
, rtx x
)
2467 return (DEFAULT_ABI
== ABI_V4
2468 && !flag_pic
&& !TARGET_TOC
2469 && (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
)
2470 && small_data_operand (x
, mode
));
2474 legitimate_offset_address_p (enum machine_mode mode
, rtx x
, int strict
)
2476 unsigned HOST_WIDE_INT offset
, extra
;
2478 if (GET_CODE (x
) != PLUS
)
2480 if (GET_CODE (XEXP (x
, 0)) != REG
)
2482 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), strict
))
2484 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2487 offset
= INTVAL (XEXP (x
, 1));
2495 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2496 which leaves the only valid constant offset of zero, which by
2497 canonicalization rules is also invalid. */
2504 /* SPE vector modes. */
2505 return SPE_CONST_OFFSET_OK (offset
);
2509 if (mode
== DFmode
|| !TARGET_POWERPC64
)
2511 else if (offset
& 3)
2517 if (mode
== TFmode
|| !TARGET_POWERPC64
)
2519 else if (offset
& 3)
2529 return (offset
+ extra
>= offset
) && (offset
+ extra
+ 0x8000 < 0x10000);
2533 legitimate_indexed_address_p (rtx x
, int strict
)
2537 if (GET_CODE (x
) != PLUS
)
2542 if (!REG_P (op0
) || !REG_P (op1
))
2545 return ((INT_REG_OK_FOR_BASE_P (op0
, strict
)
2546 && INT_REG_OK_FOR_INDEX_P (op1
, strict
))
2547 || (INT_REG_OK_FOR_BASE_P (op1
, strict
)
2548 && INT_REG_OK_FOR_INDEX_P (op0
, strict
)));
2552 legitimate_indirect_address_p (rtx x
, int strict
)
2554 return GET_CODE (x
) == REG
&& INT_REG_OK_FOR_BASE_P (x
, strict
);
2558 macho_lo_sum_memory_operand (rtx x
, enum machine_mode mode
)
2560 if (!TARGET_MACHO
|| !flag_pic
2561 || mode
!= SImode
|| GET_CODE(x
) != MEM
)
2565 if (GET_CODE (x
) != LO_SUM
)
2567 if (GET_CODE (XEXP (x
, 0)) != REG
)
2569 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), 0))
2573 return CONSTANT_P (x
);
2577 legitimate_lo_sum_address_p (enum machine_mode mode
, rtx x
, int strict
)
2579 if (GET_CODE (x
) != LO_SUM
)
2581 if (GET_CODE (XEXP (x
, 0)) != REG
)
2583 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), strict
))
2587 if (TARGET_ELF
|| TARGET_MACHO
)
2589 if (DEFAULT_ABI
!= ABI_AIX
&& DEFAULT_ABI
!= ABI_DARWIN
&& flag_pic
)
2593 if (GET_MODE_NUNITS (mode
) != 1)
2595 if (GET_MODE_BITSIZE (mode
) > 32
2596 && !(TARGET_HARD_FLOAT
&& TARGET_FPRS
&& mode
== DFmode
))
2599 return CONSTANT_P (x
);
2606 /* Try machine-dependent ways of modifying an illegitimate address
2607 to be legitimate. If we find one, return the new, valid address.
2608 This is used from only one place: `memory_address' in explow.c.
2610 OLDX is the address as it was before break_out_memory_refs was
2611 called. In some cases it is useful to look at this to decide what
2614 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2616 It is always safe for this function to do nothing. It exists to
2617 recognize opportunities to optimize the output.
2619 On RS/6000, first check for the sum of a register with a constant
2620 integer that is out of range. If so, generate code to add the
2621 constant with the low-order 16 bits masked to the register and force
2622 this result into another register (this can be done with `cau').
2623 Then generate an address of REG+(CONST&0xffff), allowing for the
2624 possibility of bit 16 being a one.
2626 Then check for the sum of a register and something not constant, try to
2627 load the other things into a register and return the sum. */
2630 rs6000_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
2631 enum machine_mode mode
)
2633 if (GET_CODE (x
) == SYMBOL_REF
)
2635 enum tls_model model
= SYMBOL_REF_TLS_MODEL (x
);
2637 return rs6000_legitimize_tls_address (x
, model
);
2640 if (GET_CODE (x
) == PLUS
2641 && GET_CODE (XEXP (x
, 0)) == REG
2642 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2643 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
2645 HOST_WIDE_INT high_int
, low_int
;
2647 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2648 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
2649 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2650 GEN_INT (high_int
)), 0);
2651 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
2653 else if (GET_CODE (x
) == PLUS
2654 && GET_CODE (XEXP (x
, 0)) == REG
2655 && GET_CODE (XEXP (x
, 1)) != CONST_INT
2656 && GET_MODE_NUNITS (mode
) == 1
2657 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2659 || (mode
!= DFmode
&& mode
!= TFmode
))
2660 && (TARGET_POWERPC64
|| mode
!= DImode
)
2663 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2664 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
2666 else if (ALTIVEC_VECTOR_MODE (mode
))
2670 /* Make sure both operands are registers. */
2671 if (GET_CODE (x
) == PLUS
)
2672 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2673 force_reg (Pmode
, XEXP (x
, 1)));
2675 reg
= force_reg (Pmode
, x
);
2678 else if (SPE_VECTOR_MODE (mode
))
2680 /* We accept [reg + reg] and [reg + OFFSET]. */
2682 if (GET_CODE (x
) == PLUS
)
2684 rtx op1
= XEXP (x
, 0);
2685 rtx op2
= XEXP (x
, 1);
2687 op1
= force_reg (Pmode
, op1
);
2689 if (GET_CODE (op2
) != REG
2690 && (GET_CODE (op2
) != CONST_INT
2691 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
2692 op2
= force_reg (Pmode
, op2
);
2694 return gen_rtx_PLUS (Pmode
, op1
, op2
);
2697 return force_reg (Pmode
, x
);
2703 && GET_CODE (x
) != CONST_INT
2704 && GET_CODE (x
) != CONST_DOUBLE
2706 && GET_MODE_NUNITS (mode
) == 1
2707 && (GET_MODE_BITSIZE (mode
) <= 32
2708 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
2710 rtx reg
= gen_reg_rtx (Pmode
);
2711 emit_insn (gen_elf_high (reg
, x
));
2712 return gen_rtx_LO_SUM (Pmode
, reg
, x
);
2714 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
2717 && ! MACHO_DYNAMIC_NO_PIC_P
2719 && GET_CODE (x
) != CONST_INT
2720 && GET_CODE (x
) != CONST_DOUBLE
2722 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
2726 rtx reg
= gen_reg_rtx (Pmode
);
2727 emit_insn (gen_macho_high (reg
, x
));
2728 return gen_rtx_LO_SUM (Pmode
, reg
, x
);
2731 && constant_pool_expr_p (x
)
2732 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
2734 return create_TOC_reference (x
);
2740 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2742 static GTY(()) rtx rs6000_tls_symbol
;
2744 rs6000_tls_get_addr (void)
2746 if (!rs6000_tls_symbol
)
2747 rs6000_tls_symbol
= init_one_libfunc ("__tls_get_addr");
2749 return rs6000_tls_symbol
;
2752 /* Construct the SYMBOL_REF for TLS GOT references. */
2754 static GTY(()) rtx rs6000_got_symbol
;
2756 rs6000_got_sym (void)
2758 if (!rs6000_got_symbol
)
2760 rs6000_got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
2761 SYMBOL_REF_FLAGS (rs6000_got_symbol
) |= SYMBOL_FLAG_LOCAL
;
2762 SYMBOL_REF_FLAGS (rs6000_got_symbol
) |= SYMBOL_FLAG_EXTERNAL
;
2765 return rs6000_got_symbol
;
2768 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2769 this (thread-local) address. */
2772 rs6000_legitimize_tls_address (rtx addr
, enum tls_model model
)
2776 dest
= gen_reg_rtx (Pmode
);
2777 if (model
== TLS_MODEL_LOCAL_EXEC
&& rs6000_tls_size
== 16)
2783 tlsreg
= gen_rtx_REG (Pmode
, 13);
2784 insn
= gen_tls_tprel_64 (dest
, tlsreg
, addr
);
2788 tlsreg
= gen_rtx_REG (Pmode
, 2);
2789 insn
= gen_tls_tprel_32 (dest
, tlsreg
, addr
);
2793 else if (model
== TLS_MODEL_LOCAL_EXEC
&& rs6000_tls_size
== 32)
2797 tmp
= gen_reg_rtx (Pmode
);
2800 tlsreg
= gen_rtx_REG (Pmode
, 13);
2801 insn
= gen_tls_tprel_ha_64 (tmp
, tlsreg
, addr
);
2805 tlsreg
= gen_rtx_REG (Pmode
, 2);
2806 insn
= gen_tls_tprel_ha_32 (tmp
, tlsreg
, addr
);
2810 insn
= gen_tls_tprel_lo_64 (dest
, tmp
, addr
);
2812 insn
= gen_tls_tprel_lo_32 (dest
, tmp
, addr
);
2817 rtx r3
, got
, tga
, tmp1
, tmp2
, eqv
;
2820 got
= gen_rtx_REG (Pmode
, TOC_REGISTER
);
2824 got
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
2827 rtx gsym
= rs6000_got_sym ();
2828 got
= gen_reg_rtx (Pmode
);
2830 rs6000_emit_move (got
, gsym
, Pmode
);
2834 static int tls_got_labelno
= 0;
2835 rtx tempLR
, lab
, tmp3
, mem
;
2838 ASM_GENERATE_INTERNAL_LABEL (buf
, "LTLS", tls_got_labelno
++);
2839 lab
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
2840 tempLR
= gen_reg_rtx (Pmode
);
2841 tmp1
= gen_reg_rtx (Pmode
);
2842 tmp2
= gen_reg_rtx (Pmode
);
2843 tmp3
= gen_reg_rtx (Pmode
);
2844 mem
= gen_rtx_MEM (Pmode
, tmp1
);
2845 RTX_UNCHANGING_P (mem
) = 1;
2847 first
= emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, lab
,
2849 emit_move_insn (tmp1
, tempLR
);
2850 emit_move_insn (tmp2
, mem
);
2851 emit_insn (gen_addsi3 (tmp3
, tmp1
, tmp2
));
2852 last
= emit_move_insn (got
, tmp3
);
2853 REG_NOTES (last
) = gen_rtx_EXPR_LIST (REG_EQUAL
, gsym
,
2855 REG_NOTES (first
) = gen_rtx_INSN_LIST (REG_LIBCALL
, last
,
2857 REG_NOTES (last
) = gen_rtx_INSN_LIST (REG_RETVAL
, first
,
2863 if (model
== TLS_MODEL_GLOBAL_DYNAMIC
)
2865 r3
= gen_rtx_REG (Pmode
, 3);
2867 insn
= gen_tls_gd_64 (r3
, got
, addr
);
2869 insn
= gen_tls_gd_32 (r3
, got
, addr
);
2872 tga
= gen_rtx_MEM (Pmode
, rs6000_tls_get_addr ());
2873 insn
= gen_call_value (r3
, tga
, const0_rtx
, const0_rtx
);
2874 insn
= emit_call_insn (insn
);
2875 CONST_OR_PURE_CALL_P (insn
) = 1;
2876 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), r3
);
2877 insn
= get_insns ();
2879 emit_libcall_block (insn
, dest
, r3
, addr
);
2881 else if (model
== TLS_MODEL_LOCAL_DYNAMIC
)
2883 r3
= gen_rtx_REG (Pmode
, 3);
2885 insn
= gen_tls_ld_64 (r3
, got
);
2887 insn
= gen_tls_ld_32 (r3
, got
);
2890 tga
= gen_rtx_MEM (Pmode
, rs6000_tls_get_addr ());
2891 insn
= gen_call_value (r3
, tga
, const0_rtx
, const0_rtx
);
2892 insn
= emit_call_insn (insn
);
2893 CONST_OR_PURE_CALL_P (insn
) = 1;
2894 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), r3
);
2895 insn
= get_insns ();
2897 tmp1
= gen_reg_rtx (Pmode
);
2898 eqv
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
2900 emit_libcall_block (insn
, tmp1
, r3
, eqv
);
2901 if (rs6000_tls_size
== 16)
2904 insn
= gen_tls_dtprel_64 (dest
, tmp1
, addr
);
2906 insn
= gen_tls_dtprel_32 (dest
, tmp1
, addr
);
2908 else if (rs6000_tls_size
== 32)
2910 tmp2
= gen_reg_rtx (Pmode
);
2912 insn
= gen_tls_dtprel_ha_64 (tmp2
, tmp1
, addr
);
2914 insn
= gen_tls_dtprel_ha_32 (tmp2
, tmp1
, addr
);
2917 insn
= gen_tls_dtprel_lo_64 (dest
, tmp2
, addr
);
2919 insn
= gen_tls_dtprel_lo_32 (dest
, tmp2
, addr
);
2923 tmp2
= gen_reg_rtx (Pmode
);
2925 insn
= gen_tls_got_dtprel_64 (tmp2
, got
, addr
);
2927 insn
= gen_tls_got_dtprel_32 (tmp2
, got
, addr
);
2929 insn
= gen_rtx_SET (Pmode
, dest
,
2930 gen_rtx_PLUS (Pmode
, tmp2
, tmp1
));
2936 /* IE, or 64 bit offset LE. */
2937 tmp2
= gen_reg_rtx (Pmode
);
2939 insn
= gen_tls_got_tprel_64 (tmp2
, got
, addr
);
2941 insn
= gen_tls_got_tprel_32 (tmp2
, got
, addr
);
2944 insn
= gen_tls_tls_64 (dest
, tmp2
, addr
);
2946 insn
= gen_tls_tls_32 (dest
, tmp2
, addr
);
2954 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2955 instruction definitions. */
2958 rs6000_tls_symbol_ref (rtx x
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2960 return RS6000_SYMBOL_REF_TLS_P (x
);
2963 /* Return 1 if X contains a thread-local symbol. */
2966 rs6000_tls_referenced_p (rtx x
)
2968 if (! TARGET_HAVE_TLS
)
2971 return for_each_rtx (&x
, &rs6000_tls_symbol_ref_1
, 0);
2974 /* Return 1 if *X is a thread-local symbol. This is the same as
2975 rs6000_tls_symbol_ref except for the type of the unused argument. */
2978 rs6000_tls_symbol_ref_1 (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
2980 return RS6000_SYMBOL_REF_TLS_P (*x
);
2983 /* The convention appears to be to define this wherever it is used.
2984 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2985 is now used here. */
2986 #ifndef REG_MODE_OK_FOR_BASE_P
2987 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2990 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2991 replace the input X, or the original X if no replacement is called for.
2992 The output parameter *WIN is 1 if the calling macro should goto WIN,
2995 For RS/6000, we wish to handle large displacements off a base
2996 register by splitting the addend across an addiu/addis and the mem insn.
2997 This cuts number of extra insns needed from 3 to 1.
2999 On Darwin, we use this to generate code for floating point constants.
3000 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3001 The Darwin code is inside #if TARGET_MACHO because only then is
3002 machopic_function_base_name() defined. */
3004 rs6000_legitimize_reload_address (rtx x
, enum machine_mode mode
,
3005 int opnum
, int type
, int ind_levels ATTRIBUTE_UNUSED
, int *win
)
3007 /* We must recognize output that we have already generated ourselves. */
3008 if (GET_CODE (x
) == PLUS
3009 && GET_CODE (XEXP (x
, 0)) == PLUS
3010 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
3011 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3012 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3014 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3015 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
3016 opnum
, (enum reload_type
)type
);
3022 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
3023 && GET_CODE (x
) == LO_SUM
3024 && GET_CODE (XEXP (x
, 0)) == PLUS
3025 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
3026 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
3027 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
3028 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
3029 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
3030 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
3031 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
3033 /* Result of previous invocation of this function on Darwin
3034 floating point constant. */
3035 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3036 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
3037 opnum
, (enum reload_type
)type
);
3042 if (GET_CODE (x
) == PLUS
3043 && GET_CODE (XEXP (x
, 0)) == REG
3044 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
3045 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
3046 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3047 && !SPE_VECTOR_MODE (mode
)
3048 && !ALTIVEC_VECTOR_MODE (mode
))
3050 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
3051 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
3053 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3055 /* Check for 32-bit overflow. */
3056 if (high
+ low
!= val
)
3062 /* Reload the high part into a base reg; leave the low part
3063 in the mem directly. */
3065 x
= gen_rtx_PLUS (GET_MODE (x
),
3066 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
3070 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3071 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
3072 opnum
, (enum reload_type
)type
);
3077 if (GET_CODE (x
) == SYMBOL_REF
3078 && DEFAULT_ABI
== ABI_DARWIN
3079 && !ALTIVEC_VECTOR_MODE (mode
)
3080 && (flag_pic
|| MACHO_DYNAMIC_NO_PIC_P
)
3081 /* Don't do this for TFmode, since the result isn't offsettable. */
3086 rtx offset
= gen_rtx_CONST (Pmode
,
3087 gen_rtx_MINUS (Pmode
, x
,
3088 gen_rtx_SYMBOL_REF (Pmode
,
3089 machopic_function_base_name ())));
3090 x
= gen_rtx_LO_SUM (GET_MODE (x
),
3091 gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
3092 gen_rtx_HIGH (Pmode
, offset
)), offset
);
3095 x
= gen_rtx_LO_SUM (GET_MODE (x
),
3096 gen_rtx_HIGH (Pmode
, x
), x
);
3098 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3099 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
3100 opnum
, (enum reload_type
)type
);
3106 && constant_pool_expr_p (x
)
3107 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
3109 (x
) = create_TOC_reference (x
);
3117 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3118 that is a valid memory address for an instruction.
3119 The MODE argument is the machine mode for the MEM expression
3120 that wants to use this address.
3122 On the RS/6000, there are four valid address: a SYMBOL_REF that
3123 refers to a constant pool entry of an address (or the sum of it
3124 plus a constant), a short (16-bit signed) constant plus a register,
3125 the sum of two registers, or a register indirect, possibly with an
3126 auto-increment. For DFmode and DImode with a constant plus register,
3127 we must ensure that both words are addressable or PowerPC64 with offset
3130 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3131 32-bit DImode, TImode), indexed addressing cannot be used because
3132 adjacent memory cells are accessed by adding word-sized offsets
3133 during assembly output. */
3135 rs6000_legitimate_address (enum machine_mode mode
, rtx x
, int reg_ok_strict
)
3137 if (RS6000_SYMBOL_REF_TLS_P (x
))
3139 if (legitimate_indirect_address_p (x
, reg_ok_strict
))
3141 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
3142 && !ALTIVEC_VECTOR_MODE (mode
)
3143 && !SPE_VECTOR_MODE (mode
)
3145 && legitimate_indirect_address_p (XEXP (x
, 0), reg_ok_strict
))
3147 if (legitimate_small_data_p (mode
, x
))
3149 if (legitimate_constant_pool_address_p (x
))
3151 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3153 && GET_CODE (x
) == PLUS
3154 && GET_CODE (XEXP (x
, 0)) == REG
3155 && XEXP (x
, 0) == virtual_stack_vars_rtx
3156 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3158 if (legitimate_offset_address_p (mode
, x
, reg_ok_strict
))
3161 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3163 || (mode
!= DFmode
&& mode
!= TFmode
))
3164 && (TARGET_POWERPC64
|| mode
!= DImode
)
3165 && legitimate_indexed_address_p (x
, reg_ok_strict
))
3167 if (legitimate_lo_sum_address_p (mode
, x
, reg_ok_strict
))
3172 /* Go to LABEL if ADDR (a legitimate address expression)
3173 has an effect that depends on the machine mode it is used for.
3175 On the RS/6000 this is true of all integral offsets (since AltiVec
3176 modes don't allow them) or is a pre-increment or decrement.
3178 ??? Except that due to conceptual problems in offsettable_address_p
3179 we can't really report the problems of integral offsets. So leave
3180 this assuming that the adjustable offset must be valid for the
3181 sub-words of a TFmode operand, which is what we had before. */
3184 rs6000_mode_dependent_address (rtx addr
)
3186 switch (GET_CODE (addr
))
3189 if (GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
3191 unsigned HOST_WIDE_INT val
= INTVAL (XEXP (addr
, 1));
3192 return val
+ 12 + 0x8000 >= 0x10000;
3201 return TARGET_UPDATE
;
3210 /* Try to output insns to set TARGET equal to the constant C if it can
3211 be done in less than N insns. Do all computations in MODE.
3212 Returns the place where the output has been placed if it can be
3213 done and the insns have been emitted. If it would take more than N
3214 insns, zero is returned and no insns and emitted. */
3217 rs6000_emit_set_const (rtx dest
, enum machine_mode mode
,
3218 rtx source
, int n ATTRIBUTE_UNUSED
)
3220 rtx result
, insn
, set
;
3221 HOST_WIDE_INT c0
, c1
;
3223 if (mode
== QImode
|| mode
== HImode
)
3226 dest
= gen_reg_rtx (mode
);
3227 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
3230 else if (mode
== SImode
)
3232 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
3234 emit_insn (gen_rtx_SET (VOIDmode
, result
,
3235 GEN_INT (INTVAL (source
)
3236 & (~ (HOST_WIDE_INT
) 0xffff))));
3237 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
3238 gen_rtx_IOR (SImode
, result
,
3239 GEN_INT (INTVAL (source
) & 0xffff))));
3242 else if (mode
== DImode
)
3244 if (GET_CODE (source
) == CONST_INT
)
3246 c0
= INTVAL (source
);
3249 else if (GET_CODE (source
) == CONST_DOUBLE
)
3251 #if HOST_BITS_PER_WIDE_INT >= 64
3252 c0
= CONST_DOUBLE_LOW (source
);
3255 c0
= CONST_DOUBLE_LOW (source
);
3256 c1
= CONST_DOUBLE_HIGH (source
);
3262 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
3267 insn
= get_last_insn ();
3268 set
= single_set (insn
);
3269 if (! CONSTANT_P (SET_SRC (set
)))
3270 set_unique_reg_note (insn
, REG_EQUAL
, source
);
3275 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3276 fall back to a straight forward decomposition. We do this to avoid
3277 exponential run times encountered when looking for longer sequences
3278 with rs6000_emit_set_const. */
3280 rs6000_emit_set_long_const (rtx dest
, HOST_WIDE_INT c1
, HOST_WIDE_INT c2
)
3282 if (!TARGET_POWERPC64
)
3284 rtx operand1
, operand2
;
3286 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
3288 operand2
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
!= 0,
3290 emit_move_insn (operand1
, GEN_INT (c1
));
3291 emit_move_insn (operand2
, GEN_INT (c2
));
3295 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
3298 ud2
= (c1
& 0xffff0000) >> 16;
3299 #if HOST_BITS_PER_WIDE_INT >= 64
3303 ud4
= (c2
& 0xffff0000) >> 16;
3305 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
3306 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
3309 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
3311 emit_move_insn (dest
, GEN_INT (ud1
));
3314 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
3315 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
3318 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
3321 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
3323 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
3325 else if ((ud4
== 0xffff && (ud3
& 0x8000))
3326 || (ud4
== 0 && ! (ud3
& 0x8000)))
3329 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
3332 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
3335 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud2
)));
3336 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (16)));
3338 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
3343 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
3346 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
3349 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud3
)));
3351 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (32)));
3353 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
,
3354 GEN_INT (ud2
<< 16)));
3356 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
3362 /* Emit a move from SOURCE to DEST in mode MODE. */
3364 rs6000_emit_move (rtx dest
, rtx source
, enum machine_mode mode
)
3368 operands
[1] = source
;
3370 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3371 if (GET_CODE (operands
[1]) == CONST_DOUBLE
3372 && ! FLOAT_MODE_P (mode
)
3373 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3375 /* FIXME. This should never happen. */
3376 /* Since it seems that it does, do the safe thing and convert
3378 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
3380 if (GET_CODE (operands
[1]) == CONST_DOUBLE
3381 && ! FLOAT_MODE_P (mode
)
3382 && ((CONST_DOUBLE_HIGH (operands
[1]) == 0
3383 && CONST_DOUBLE_LOW (operands
[1]) >= 0)
3384 || (CONST_DOUBLE_HIGH (operands
[1]) == -1
3385 && CONST_DOUBLE_LOW (operands
[1]) < 0)))
3388 /* Check if GCC is setting up a block move that will end up using FP
3389 registers as temporaries. We must make sure this is acceptable. */
3390 if (GET_CODE (operands
[0]) == MEM
3391 && GET_CODE (operands
[1]) == MEM
3393 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
3394 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
3395 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
3396 ? 32 : MEM_ALIGN (operands
[0])))
3397 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
3399 : MEM_ALIGN (operands
[1]))))
3400 && ! MEM_VOLATILE_P (operands
[0])
3401 && ! MEM_VOLATILE_P (operands
[1]))
3403 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
3404 adjust_address (operands
[1], SImode
, 0));
3405 emit_move_insn (adjust_address (operands
[0], SImode
, 4),
3406 adjust_address (operands
[1], SImode
, 4));
3410 if (!no_new_pseudos
)
3412 if (GET_CODE (operands
[1]) == MEM
&& optimize
> 0
3413 && (mode
== QImode
|| mode
== HImode
|| mode
== SImode
)
3414 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
3416 rtx reg
= gen_reg_rtx (word_mode
);
3418 emit_insn (gen_rtx_SET (word_mode
, reg
,
3419 gen_rtx_ZERO_EXTEND (word_mode
,
3421 operands
[1] = gen_lowpart (mode
, reg
);
3423 if (GET_CODE (operands
[0]) != REG
)
3424 operands
[1] = force_reg (mode
, operands
[1]);
3427 if (mode
== SFmode
&& ! TARGET_POWERPC
3428 && TARGET_HARD_FLOAT
&& TARGET_FPRS
3429 && GET_CODE (operands
[0]) == MEM
)
3433 if (reload_in_progress
|| reload_completed
)
3434 regnum
= true_regnum (operands
[1]);
3435 else if (GET_CODE (operands
[1]) == REG
)
3436 regnum
= REGNO (operands
[1]);
3440 /* If operands[1] is a register, on POWER it may have
3441 double-precision data in it, so truncate it to single
3443 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
3446 newreg
= (no_new_pseudos
? operands
[1] : gen_reg_rtx (mode
));
3447 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
3448 operands
[1] = newreg
;
3452 /* Recognize the case where operand[1] is a reference to thread-local
3453 data and load its address to a register. */
3454 if (GET_CODE (operands
[1]) == SYMBOL_REF
)
3456 enum tls_model model
= SYMBOL_REF_TLS_MODEL (operands
[1]);
3458 operands
[1] = rs6000_legitimize_tls_address (operands
[1], model
);
3461 /* Handle the case where reload calls us with an invalid address. */
3462 if (reload_in_progress
&& mode
== Pmode
3463 && (! general_operand (operands
[1], mode
)
3464 || ! nonimmediate_operand (operands
[0], mode
)))
3467 /* Handle the case of CONSTANT_P_RTX. */
3468 if (GET_CODE (operands
[1]) == CONSTANT_P_RTX
)
3471 /* 128-bit constant floating-point values on Darwin should really be
3472 loaded as two parts. */
3473 if ((DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
)
3474 && TARGET_HARD_FLOAT
&& TARGET_FPRS
&& TARGET_LONG_DOUBLE_128
3475 && mode
== TFmode
&& GET_CODE (operands
[1]) == CONST_DOUBLE
)
3477 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3478 know how to get a DFmode SUBREG of a TFmode. */
3479 rs6000_emit_move (simplify_gen_subreg (DImode
, operands
[0], mode
, 0),
3480 simplify_gen_subreg (DImode
, operands
[1], mode
, 0),
3482 rs6000_emit_move (simplify_gen_subreg (DImode
, operands
[0], mode
,
3483 GET_MODE_SIZE (DImode
)),
3484 simplify_gen_subreg (DImode
, operands
[1], mode
,
3485 GET_MODE_SIZE (DImode
)),
3490 /* FIXME: In the long term, this switch statement should go away
3491 and be replaced by a sequence of tests based on things like
3497 if (CONSTANT_P (operands
[1])
3498 && GET_CODE (operands
[1]) != CONST_INT
)
3499 operands
[1] = force_const_mem (mode
, operands
[1]);
3505 if (CONSTANT_P (operands
[1])
3506 && ! easy_fp_constant (operands
[1], mode
))
3507 operands
[1] = force_const_mem (mode
, operands
[1]);
3518 if (CONSTANT_P (operands
[1])
3519 && !easy_vector_constant (operands
[1], mode
))
3520 operands
[1] = force_const_mem (mode
, operands
[1]);
3525 /* Use default pattern for address of ELF small data */
3528 && DEFAULT_ABI
== ABI_V4
3529 && (GET_CODE (operands
[1]) == SYMBOL_REF
3530 || GET_CODE (operands
[1]) == CONST
)
3531 && small_data_operand (operands
[1], mode
))
3533 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
3537 if (DEFAULT_ABI
== ABI_V4
3538 && mode
== Pmode
&& mode
== SImode
3539 && flag_pic
== 1 && got_operand (operands
[1], mode
))
3541 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
3545 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
3549 && CONSTANT_P (operands
[1])
3550 && GET_CODE (operands
[1]) != HIGH
3551 && GET_CODE (operands
[1]) != CONST_INT
)
3553 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
3555 /* If this is a function address on -mcall-aixdesc,
3556 convert it to the address of the descriptor. */
3557 if (DEFAULT_ABI
== ABI_AIX
3558 && GET_CODE (operands
[1]) == SYMBOL_REF
3559 && XSTR (operands
[1], 0)[0] == '.')
3561 const char *name
= XSTR (operands
[1], 0);
3563 while (*name
== '.')
3565 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
3566 CONSTANT_POOL_ADDRESS_P (new_ref
)
3567 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
3568 SYMBOL_REF_FLAGS (new_ref
) = SYMBOL_REF_FLAGS (operands
[1]);
3569 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
3570 SYMBOL_REF_DECL (new_ref
) = SYMBOL_REF_DECL (operands
[1]);
3571 operands
[1] = new_ref
;
3574 if (DEFAULT_ABI
== ABI_DARWIN
)
3577 if (MACHO_DYNAMIC_NO_PIC_P
)
3579 /* Take care of any required data indirection. */
3580 operands
[1] = rs6000_machopic_legitimize_pic_address (
3581 operands
[1], mode
, operands
[0]);
3582 if (operands
[0] != operands
[1])
3583 emit_insn (gen_rtx_SET (VOIDmode
,
3584 operands
[0], operands
[1]));
3588 emit_insn (gen_macho_high (target
, operands
[1]));
3589 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
3593 emit_insn (gen_elf_high (target
, operands
[1]));
3594 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
3598 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3599 and we have put it in the TOC, we just need to make a TOC-relative
3602 && GET_CODE (operands
[1]) == SYMBOL_REF
3603 && constant_pool_expr_p (operands
[1])
3604 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
3605 get_pool_mode (operands
[1])))
3607 operands
[1] = create_TOC_reference (operands
[1]);
3609 else if (mode
== Pmode
3610 && CONSTANT_P (operands
[1])
3611 && ((GET_CODE (operands
[1]) != CONST_INT
3612 && ! easy_fp_constant (operands
[1], mode
))
3613 || (GET_CODE (operands
[1]) == CONST_INT
3614 && num_insns_constant (operands
[1], mode
) > 2)
3615 || (GET_CODE (operands
[0]) == REG
3616 && FP_REGNO_P (REGNO (operands
[0]))))
3617 && GET_CODE (operands
[1]) != HIGH
3618 && ! legitimate_constant_pool_address_p (operands
[1])
3619 && ! toc_relative_expr_p (operands
[1]))
3621 /* Emit a USE operation so that the constant isn't deleted if
3622 expensive optimizations are turned on because nobody
3623 references it. This should only be done for operands that
3624 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3625 This should not be done for operands that contain LABEL_REFs.
3626 For now, we just handle the obvious case. */
3627 if (GET_CODE (operands
[1]) != LABEL_REF
)
3628 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
3631 /* Darwin uses a special PIC legitimizer. */
3632 if (DEFAULT_ABI
== ABI_DARWIN
&& MACHOPIC_INDIRECT
)
3635 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
3637 if (operands
[0] != operands
[1])
3638 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
3643 /* If we are to limit the number of things we put in the TOC and
3644 this is a symbol plus a constant we can add in one insn,
3645 just put the symbol in the TOC and add the constant. Don't do
3646 this if reload is in progress. */
3647 if (GET_CODE (operands
[1]) == CONST
3648 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
3649 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
3650 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
3651 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
3652 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
3653 && ! side_effects_p (operands
[0]))
3656 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
3657 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
3659 sym
= force_reg (mode
, sym
);
3661 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
3663 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
3667 operands
[1] = force_const_mem (mode
, operands
[1]);
3670 && constant_pool_expr_p (XEXP (operands
[1], 0))
3671 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3672 get_pool_constant (XEXP (operands
[1], 0)),
3673 get_pool_mode (XEXP (operands
[1], 0))))
3676 = gen_rtx_MEM (mode
,
3677 create_TOC_reference (XEXP (operands
[1], 0)));
3678 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
3679 RTX_UNCHANGING_P (operands
[1]) = 1;
3685 if (GET_CODE (operands
[0]) == MEM
3686 && GET_CODE (XEXP (operands
[0], 0)) != REG
3687 && ! reload_in_progress
)
3689 = replace_equiv_address (operands
[0],
3690 copy_addr_to_reg (XEXP (operands
[0], 0)));
3692 if (GET_CODE (operands
[1]) == MEM
3693 && GET_CODE (XEXP (operands
[1], 0)) != REG
3694 && ! reload_in_progress
)
3696 = replace_equiv_address (operands
[1],
3697 copy_addr_to_reg (XEXP (operands
[1], 0)));
3700 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
3702 gen_rtx_SET (VOIDmode
,
3703 operands
[0], operands
[1]),
3704 gen_rtx_CLOBBER (VOIDmode
,
3705 gen_rtx_SCRATCH (SImode
)))));
3714 /* Above, we may have called force_const_mem which may have returned
3715 an invalid address. If we can, fix this up; otherwise, reload will
3716 have to deal with it. */
3717 if (GET_CODE (operands
[1]) == MEM
&& ! reload_in_progress
)
3718 operands
[1] = validize_mem (operands
[1]);
3721 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
3724 /* Nonzero if we can use a floating-point register to pass this arg. */
3725 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
3726 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
3727 && (CUM)->fregno <= FP_ARG_MAX_REG \
3728 && TARGET_HARD_FLOAT && TARGET_FPRS)
3730 /* Nonzero if we can use an AltiVec register to pass this arg. */
3731 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
3732 (ALTIVEC_VECTOR_MODE (MODE) \
3733 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
3734 && TARGET_ALTIVEC_ABI \
3737 /* Return a nonzero value to say to return the function value in
3738 memory, just as large structures are always returned. TYPE will be
3739 the data type of the value, and FNTYPE will be the type of the
3740 function doing the returning, or @code{NULL} for libcalls.
3742 The AIX ABI for the RS/6000 specifies that all structures are
3743 returned in memory. The Darwin ABI does the same. The SVR4 ABI
3744 specifies that structures <= 8 bytes are returned in r3/r4, but a
3745 draft put them in memory, and GCC used to implement the draft
3746 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
3747 controls this instead of DEFAULT_ABI; V.4 targets needing backward
3748 compatibility can change DRAFT_V4_STRUCT_RET to override the
3749 default, and -m switches get the final word. See
3750 rs6000_override_options for more details.
3752 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3753 long double support is enabled. These values are returned in memory.
3755 int_size_in_bytes returns -1 for variable size objects, which go in
3756 memory always. The cast to unsigned makes -1 > 8. */
3759 rs6000_return_in_memory (tree type
, tree fntype ATTRIBUTE_UNUSED
)
3761 if (AGGREGATE_TYPE_P (type
)
3762 && (TARGET_AIX_STRUCT_RET
3763 || (unsigned HOST_WIDE_INT
) int_size_in_bytes (type
) > 8))
3765 if (DEFAULT_ABI
== ABI_V4
&& TYPE_MODE (type
) == TFmode
)
3770 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3771 for a call to a function whose data type is FNTYPE.
3772 For a library call, FNTYPE is 0.
3774 For incoming args we set the number of arguments in the prototype large
3775 so we never return a PARALLEL. */
3778 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
3779 rtx libname ATTRIBUTE_UNUSED
, int incoming
, int libcall
)
3781 static CUMULATIVE_ARGS zero_cumulative
;
3783 *cum
= zero_cumulative
;
3785 cum
->fregno
= FP_ARG_MIN_REG
;
3786 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
3787 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
3788 cum
->call_cookie
= ((DEFAULT_ABI
== ABI_V4
&& libcall
)
3789 ? CALL_LIBCALL
: CALL_NORMAL
);
3790 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
3791 cum
->stdarg
= fntype
3792 && (TYPE_ARG_TYPES (fntype
) != 0
3793 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3794 != void_type_node
));
3797 cum
->nargs_prototype
= 1000; /* don't return a PARALLEL */
3799 else if (cum
->prototype
)
3800 cum
->nargs_prototype
= (list_length (TYPE_ARG_TYPES (fntype
)) - 1
3801 + (TYPE_MODE (TREE_TYPE (fntype
)) == BLKmode
3802 || rs6000_return_in_memory (TREE_TYPE (fntype
),
3806 cum
->nargs_prototype
= 0;
3808 /* Check for a longcall attribute. */
3810 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
3811 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
3812 cum
->call_cookie
= CALL_LONG
;
3814 if (TARGET_DEBUG_ARG
)
3816 fprintf (stderr
, "\ninit_cumulative_args:");
3819 tree ret_type
= TREE_TYPE (fntype
);
3820 fprintf (stderr
, " ret code = %s,",
3821 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
3824 if (cum
->call_cookie
& CALL_LONG
)
3825 fprintf (stderr
, " longcall,");
3827 fprintf (stderr
, " proto = %d, nargs = %d\n",
3828 cum
->prototype
, cum
->nargs_prototype
);
3832 /* If defined, a C expression which determines whether, and in which
3833 direction, to pad out an argument with extra space. The value
3834 should be of type `enum direction': either `upward' to pad above
3835 the argument, `downward' to pad below, or `none' to inhibit
3838 For the AIX ABI structs are always stored left shifted in their
3842 function_arg_padding (enum machine_mode mode
, tree type
)
3844 #ifndef AGGREGATE_PADDING_FIXED
3845 #define AGGREGATE_PADDING_FIXED 0
3847 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
3848 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
3851 if (!AGGREGATE_PADDING_FIXED
)
3853 /* GCC used to pass structures of the same size as integer types as
3854 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
3855 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
3856 passed padded downward, except that -mstrict-align further
3857 muddied the water in that multi-component structures of 2 and 4
3858 bytes in size were passed padded upward.
3860 The following arranges for best compatibility with previous
3861 versions of gcc, but removes the -mstrict-align dependency. */
3862 if (BYTES_BIG_ENDIAN
)
3864 HOST_WIDE_INT size
= 0;
3866 if (mode
== BLKmode
)
3868 if (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
)
3869 size
= int_size_in_bytes (type
);
3872 size
= GET_MODE_SIZE (mode
);
3874 if (size
== 1 || size
== 2 || size
== 4)
3880 if (AGGREGATES_PAD_UPWARD_ALWAYS
)
3882 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
3886 /* Fall back to the default. */
3887 return DEFAULT_FUNCTION_ARG_PADDING (mode
, type
);
3890 /* If defined, a C expression that gives the alignment boundary, in bits,
3891 of an argument with the specified mode and type. If it is not defined,
3892 PARM_BOUNDARY is used for all arguments.
3894 V.4 wants long longs to be double word aligned. */
3897 function_arg_boundary (enum machine_mode mode
, tree type ATTRIBUTE_UNUSED
)
3899 if (DEFAULT_ABI
== ABI_V4
&& (mode
== DImode
|| mode
== DFmode
))
3901 else if (SPE_VECTOR_MODE (mode
))
3903 else if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3906 return PARM_BOUNDARY
;
3909 /* Update the data in CUM to advance over an argument
3910 of mode MODE and data type TYPE.
3911 (TYPE is null for libcalls where that information may not be available.) */
3914 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
3915 tree type
, int named
)
3917 cum
->nargs_prototype
--;
3919 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3921 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
))
3924 /* In variable-argument functions, vector arguments get GPRs allocated
3925 even if they are going to be passed in a vector register. */
3926 if (cum
->stdarg
&& DEFAULT_ABI
!= ABI_V4
)
3930 /* Vector parameters must be 16-byte aligned. This places
3931 them at 2 mod 4 in terms of words in 32-bit mode, since
3932 the parameter save area starts at offset 24 from the
3933 stack. In 64-bit mode, they just have to start on an
3934 even word, since the parameter save area is 16-byte
3935 aligned. Space for GPRs is reserved even if the argument
3936 will be passed in memory. */
3938 align
= ((6 - (cum
->words
& 3)) & 3);
3940 align
= cum
->words
& 1;
3941 cum
->words
+= align
+ RS6000_ARG_SIZE (mode
, type
);
3943 if (TARGET_DEBUG_ARG
)
3945 fprintf (stderr
, "function_adv: words = %2d, align=%d, ",
3947 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s\n",
3948 cum
->nargs_prototype
, cum
->prototype
,
3949 GET_MODE_NAME (mode
));
3953 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
3955 && cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3957 else if (DEFAULT_ABI
== ABI_V4
)
3959 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3960 && (mode
== SFmode
|| mode
== DFmode
))
3962 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3967 cum
->words
+= cum
->words
& 1;
3968 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3974 int gregno
= cum
->sysv_gregno
;
3976 /* Aggregates and IEEE quad get passed by reference. */
3977 if ((type
&& AGGREGATE_TYPE_P (type
))
3981 n_words
= RS6000_ARG_SIZE (mode
, type
);
3983 /* Long long and SPE vectors are put in odd registers. */
3984 if (n_words
== 2 && (gregno
& 1) == 0)
3987 /* Long long and SPE vectors are not split between registers
3989 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
3991 /* Long long is aligned on the stack. */
3993 cum
->words
+= cum
->words
& 1;
3994 cum
->words
+= n_words
;
3997 /* Note: continuing to accumulate gregno past when we've started
3998 spilling to the stack indicates the fact that we've started
3999 spilling to the stack to expand_builtin_saveregs. */
4000 cum
->sysv_gregno
= gregno
+ n_words
;
4003 if (TARGET_DEBUG_ARG
)
4005 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
4006 cum
->words
, cum
->fregno
);
4007 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
4008 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
4009 fprintf (stderr
, "mode = %4s, named = %d\n",
4010 GET_MODE_NAME (mode
), named
);
4015 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
4016 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
4018 cum
->words
+= align
+ RS6000_ARG_SIZE (mode
, type
);
4020 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
4021 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
4022 cum
->fregno
+= (mode
== TFmode
? 2 : 1);
4024 if (TARGET_DEBUG_ARG
)
4026 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
4027 cum
->words
, cum
->fregno
);
4028 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
4029 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
4030 fprintf (stderr
, "named = %d, align = %d\n", named
, align
);
4035 /* Determine where to put a SIMD argument on the SPE. */
4038 rs6000_spe_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4043 int gregno
= cum
->sysv_gregno
;
4044 int n_words
= RS6000_ARG_SIZE (mode
, type
);
4046 /* SPE vectors are put in odd registers. */
4047 if (n_words
== 2 && (gregno
& 1) == 0)
4050 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
4053 enum machine_mode m
= SImode
;
4055 r1
= gen_rtx_REG (m
, gregno
);
4056 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
4057 r2
= gen_rtx_REG (m
, gregno
+ 1);
4058 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
4059 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
4066 if (cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
4067 return gen_rtx_REG (mode
, cum
->sysv_gregno
);
4073 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4076 rs6000_mixed_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4077 tree type
, int align_words
)
4081 /* -mpowerpc64 with 32bit ABI splits up a DFmode argument
4082 in vararg list into zero, one or two GPRs */
4083 if (align_words
>= GP_ARG_NUM_REG
)
4084 return gen_rtx_PARALLEL (DFmode
,
4086 gen_rtx_EXPR_LIST (VOIDmode
,
4087 NULL_RTX
, const0_rtx
),
4088 gen_rtx_EXPR_LIST (VOIDmode
,
4092 else if (align_words
+ RS6000_ARG_SIZE (mode
, type
)
4094 /* If this is partially on the stack, then we only
4095 include the portion actually in registers here. */
4096 return gen_rtx_PARALLEL (DFmode
,
4098 gen_rtx_EXPR_LIST (VOIDmode
,
4099 gen_rtx_REG (SImode
,
4103 gen_rtx_EXPR_LIST (VOIDmode
,
4108 /* split a DFmode arg into two GPRs */
4109 return gen_rtx_PARALLEL (DFmode
,
4111 gen_rtx_EXPR_LIST (VOIDmode
,
4112 gen_rtx_REG (SImode
,
4116 gen_rtx_EXPR_LIST (VOIDmode
,
4117 gen_rtx_REG (SImode
,
4121 gen_rtx_EXPR_LIST (VOIDmode
,
4122 gen_rtx_REG (mode
, cum
->fregno
),
4125 /* -mpowerpc64 with 32bit ABI splits up a DImode argument into one
4127 else if (mode
== DImode
)
4129 if (align_words
< GP_ARG_NUM_REG
- 1)
4130 return gen_rtx_PARALLEL (DImode
,
4132 gen_rtx_EXPR_LIST (VOIDmode
,
4133 gen_rtx_REG (SImode
,
4137 gen_rtx_EXPR_LIST (VOIDmode
,
4138 gen_rtx_REG (SImode
,
4142 else if (align_words
== GP_ARG_NUM_REG
- 1)
4143 return gen_rtx_PARALLEL (DImode
,
4145 gen_rtx_EXPR_LIST (VOIDmode
,
4146 NULL_RTX
, const0_rtx
),
4147 gen_rtx_EXPR_LIST (VOIDmode
,
4148 gen_rtx_REG (SImode
,
4153 else if (mode
== BLKmode
&& align_words
<= (GP_ARG_NUM_REG
- 1))
4156 int size
= int_size_in_bytes (type
);
4157 int no_units
= ((size
- 1) / 4) + 1;
4158 int max_no_words
= GP_ARG_NUM_REG
- align_words
;
4159 int rtlvec_len
= no_units
< max_no_words
? no_units
: max_no_words
;
4160 rtx
*rtlvec
= (rtx
*) alloca (rtlvec_len
* sizeof (rtx
));
4162 memset ((char *) rtlvec
, 0, rtlvec_len
* sizeof (rtx
));
4164 for (k
=0; k
< rtlvec_len
; k
++)
4165 rtlvec
[k
] = gen_rtx_EXPR_LIST (VOIDmode
,
4166 gen_rtx_REG (SImode
,
4169 k
== 0 ? const0_rtx
: GEN_INT (k
*4));
4171 return gen_rtx_PARALLEL (BLKmode
, gen_rtvec_v (k
, rtlvec
));
4177 /* Determine where to put an argument to a function.
4178 Value is zero to push the argument on the stack,
4179 or a hard register in which to store the argument.
4181 MODE is the argument's machine mode.
4182 TYPE is the data type of the argument (as a tree).
4183 This is null for libcalls where that information may
4185 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4186 the preceding args and about the function being called.
4187 NAMED is nonzero if this argument is a named parameter
4188 (otherwise it is an extra parameter matching an ellipsis).
4190 On RS/6000 the first eight words of non-FP are normally in registers
4191 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4192 Under V.4, the first 8 FP args are in registers.
4194 If this is floating-point and no prototype is specified, we use
4195 both an FP and integer register (or possibly FP reg and stack). Library
4196 functions (when CALL_LIBCALL is set) always have the proper types for args,
4197 so we can pass the FP value just in one register. emit_library_function
4198 doesn't support PARALLEL anyway. */
4201 function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4202 tree type
, int named
)
4204 enum rs6000_abi abi
= DEFAULT_ABI
;
4206 /* Return a marker to indicate whether CR1 needs to set or clear the
4207 bit that V.4 uses to say fp args were passed in registers.
4208 Assume that we don't need the marker for software floating point,
4209 or compiler generated library calls. */
4210 if (mode
== VOIDmode
)
4213 && cum
->nargs_prototype
< 0
4214 && (cum
->call_cookie
& CALL_LIBCALL
) == 0
4215 && (cum
->prototype
|| TARGET_NO_PROTOTYPE
))
4217 /* For the SPE, we need to crxor CR6 always. */
4219 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
4220 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
4221 return GEN_INT (cum
->call_cookie
4222 | ((cum
->fregno
== FP_ARG_MIN_REG
)
4223 ? CALL_V4_SET_FP_ARGS
4224 : CALL_V4_CLEAR_FP_ARGS
));
4227 return GEN_INT (cum
->call_cookie
);
4230 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
))
4231 return gen_rtx_REG (mode
, cum
->vregno
);
4232 else if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
4234 if (named
|| abi
== ABI_V4
)
4238 /* Vector parameters to varargs functions under AIX or Darwin
4239 get passed in memory and possibly also in GPRs. */
4240 int align
, align_words
;
4241 enum machine_mode part_mode
= mode
;
4243 /* Vector parameters must be 16-byte aligned. This places them at
4244 2 mod 4 in terms of words in 32-bit mode, since the parameter
4245 save area starts at offset 24 from the stack. In 64-bit mode,
4246 they just have to start on an even word, since the parameter
4247 save area is 16-byte aligned. */
4249 align
= ((6 - (cum
->words
& 3)) & 3);
4251 align
= cum
->words
& 1;
4252 align_words
= cum
->words
+ align
;
4254 /* Out of registers? Memory, then. */
4255 if (align_words
>= GP_ARG_NUM_REG
)
4258 /* The vector value goes in GPRs. Only the part of the
4259 value in GPRs is reported here. */
4260 if (align_words
+ CLASS_MAX_NREGS (mode
, GENERAL_REGS
)
4262 /* Fortunately, there are only two possibilities, the value
4263 is either wholly in GPRs or half in GPRs and half not. */
4266 return gen_rtx_REG (part_mode
, GP_ARG_MIN_REG
+ align_words
);
4269 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
4270 return rs6000_spe_function_arg (cum
, mode
, type
);
4271 else if (abi
== ABI_V4
)
4273 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
4274 && (mode
== SFmode
|| mode
== DFmode
))
4276 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
4277 return gen_rtx_REG (mode
, cum
->fregno
);
4284 int gregno
= cum
->sysv_gregno
;
4286 /* Aggregates and IEEE quad get passed by reference. */
4287 if ((type
&& AGGREGATE_TYPE_P (type
))
4291 n_words
= RS6000_ARG_SIZE (mode
, type
);
4293 /* Long long and SPE vectors are put in odd registers. */
4294 if (n_words
== 2 && (gregno
& 1) == 0)
4297 /* Long long do not split between registers and stack. */
4298 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
4299 return gen_rtx_REG (mode
, gregno
);
4306 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
4307 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
4308 int align_words
= cum
->words
+ align
;
4310 if (type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
4313 if (TARGET_32BIT
&& TARGET_POWERPC64
4314 && (mode
== DFmode
|| mode
== DImode
|| mode
== BLKmode
))
4315 return rs6000_mixed_function_arg (cum
, mode
, type
, align_words
);
4317 if (USE_FP_FOR_ARG_P (cum
, mode
, type
))
4320 || ((cum
->nargs_prototype
> 0)
4321 /* IBM AIX extended its linkage convention definition always
4322 to require FP args after register save area hole on the
4324 && (DEFAULT_ABI
!= ABI_AIX
4326 || (align_words
< GP_ARG_NUM_REG
))))
4327 return gen_rtx_REG (mode
, cum
->fregno
);
4329 return gen_rtx_PARALLEL (mode
,
4331 gen_rtx_EXPR_LIST (VOIDmode
,
4332 ((align_words
>= GP_ARG_NUM_REG
)
4335 + RS6000_ARG_SIZE (mode
, type
)
4337 /* If this is partially on the stack, then
4338 we only include the portion actually
4339 in registers here. */
4340 ? gen_rtx_REG (SImode
,
4341 GP_ARG_MIN_REG
+ align_words
)
4342 : gen_rtx_REG (mode
,
4343 GP_ARG_MIN_REG
+ align_words
))),
4345 gen_rtx_EXPR_LIST (VOIDmode
,
4346 gen_rtx_REG (mode
, cum
->fregno
),
4349 else if (align_words
< GP_ARG_NUM_REG
)
4350 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
4356 /* For an arg passed partly in registers and partly in memory,
4357 this is the number of registers used.
4358 For args passed entirely in registers or entirely in memory, zero. */
4361 function_arg_partial_nregs (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4362 tree type
, int named
)
4364 if (DEFAULT_ABI
== ABI_V4
)
4367 if (USE_FP_FOR_ARG_P (cum
, mode
, type
)
4368 || USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
))
4370 if (cum
->nargs_prototype
>= 0)
4374 if (cum
->words
< GP_ARG_NUM_REG
4375 && GP_ARG_NUM_REG
< (cum
->words
+ RS6000_ARG_SIZE (mode
, type
)))
4377 int ret
= GP_ARG_NUM_REG
- cum
->words
;
4378 if (ret
&& TARGET_DEBUG_ARG
)
4379 fprintf (stderr
, "function_arg_partial_nregs: %d\n", ret
);
4387 /* A C expression that indicates when an argument must be passed by
4388 reference. If nonzero for an argument, a copy of that argument is
4389 made in memory and a pointer to the argument is passed instead of
4390 the argument itself. The pointer is passed in whatever way is
4391 appropriate for passing a pointer to that type.
4393 Under V.4, structures and unions are passed by reference.
4395 As an extension to all ABIs, variable sized types are passed by
4399 function_arg_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
4400 enum machine_mode mode ATTRIBUTE_UNUSED
,
4401 tree type
, int named ATTRIBUTE_UNUSED
)
4403 if (DEFAULT_ABI
== ABI_V4
4404 && ((type
&& AGGREGATE_TYPE_P (type
))
4407 if (TARGET_DEBUG_ARG
)
4408 fprintf (stderr
, "function_arg_pass_by_reference: aggregate\n");
4412 return type
&& int_size_in_bytes (type
) < 0;
4416 rs6000_move_block_from_reg(int regno
, rtx x
, int nregs
)
4419 enum machine_mode reg_mode
= TARGET_32BIT
? SImode
: DImode
;
4424 for (i
= 0; i
< nregs
; i
++)
4426 rtx tem
= adjust_address_nv (x
, reg_mode
, i
*GET_MODE_SIZE(reg_mode
));
4427 if (reload_completed
)
4429 if (! strict_memory_address_p (reg_mode
, XEXP (tem
, 0)))
4432 tem
= simplify_gen_subreg (reg_mode
, x
, BLKmode
,
4433 i
* GET_MODE_SIZE(reg_mode
));
4436 tem
= replace_equiv_address (tem
, XEXP (tem
, 0));
4438 if (tem
== NULL_RTX
)
4441 emit_move_insn (tem
, gen_rtx_REG (reg_mode
, regno
+ i
));
4446 /* Perform any needed actions needed for a function that is receiving a
4447 variable number of arguments.
4451 MODE and TYPE are the mode and type of the current parameter.
4453 PRETEND_SIZE is a variable that should be set to the amount of stack
4454 that must be pushed by the prolog to pretend that our caller pushed
4457 Normally, this macro will push all remaining incoming registers on the
4458 stack and set PRETEND_SIZE to the length of the registers pushed. */
4461 setup_incoming_varargs (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4462 tree type
, int *pretend_size ATTRIBUTE_UNUSED
, int no_rtl
)
4464 CUMULATIVE_ARGS next_cum
;
4465 int reg_size
= TARGET_32BIT
? 4 : 8;
4466 rtx save_area
= NULL_RTX
, mem
;
4467 int first_reg_offset
, set
;
4469 /* Skip the last named argument. */
4471 function_arg_advance (&next_cum
, mode
, type
, 1);
4473 if (DEFAULT_ABI
== ABI_V4
)
4475 /* Indicate to allocate space on the stack for varargs save area. */
4476 cfun
->machine
->sysv_varargs_p
= 1;
4478 save_area
= plus_constant (virtual_stack_vars_rtx
,
4479 - RS6000_VARARGS_SIZE
);
4481 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
4485 first_reg_offset
= next_cum
.words
;
4486 save_area
= virtual_incoming_args_rtx
;
4487 cfun
->machine
->sysv_varargs_p
= 0;
4489 if (MUST_PASS_IN_STACK (mode
, type
))
4490 first_reg_offset
+= RS6000_ARG_SIZE (TYPE_MODE (type
), type
);
4493 set
= get_varargs_alias_set ();
4494 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
)
4496 mem
= gen_rtx_MEM (BLKmode
,
4497 plus_constant (save_area
,
4498 first_reg_offset
* reg_size
)),
4499 set_mem_alias_set (mem
, set
);
4500 set_mem_align (mem
, BITS_PER_WORD
);
4502 rs6000_move_block_from_reg (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
4503 GP_ARG_NUM_REG
- first_reg_offset
);
4506 /* Save FP registers if needed. */
4507 if (DEFAULT_ABI
== ABI_V4
4508 && TARGET_HARD_FLOAT
&& TARGET_FPRS
4510 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
)
4512 int fregno
= next_cum
.fregno
;
4513 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
4514 rtx lab
= gen_label_rtx ();
4515 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
) * 8);
4517 emit_jump_insn (gen_rtx_SET (VOIDmode
,
4519 gen_rtx_IF_THEN_ELSE (VOIDmode
,
4520 gen_rtx_NE (VOIDmode
, cr1
,
4522 gen_rtx_LABEL_REF (VOIDmode
, lab
),
4525 while (fregno
<= FP_ARG_V4_MAX_REG
)
4527 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
4528 set_mem_alias_set (mem
, set
);
4529 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
4538 /* Create the va_list data type. */
4541 rs6000_build_builtin_va_list (void)
4543 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
, record
, type_decl
;
4545 /* For AIX, prefer 'char *' because that's what the system
4546 header files like. */
4547 if (DEFAULT_ABI
!= ABI_V4
)
4548 return build_pointer_type (char_type_node
);
4550 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
4551 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
4553 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
4554 unsigned_char_type_node
);
4555 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
4556 unsigned_char_type_node
);
4557 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4559 f_res
= build_decl (FIELD_DECL
, get_identifier ("reserved"),
4560 short_unsigned_type_node
);
4561 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
4563 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
4566 DECL_FIELD_CONTEXT (f_gpr
) = record
;
4567 DECL_FIELD_CONTEXT (f_fpr
) = record
;
4568 DECL_FIELD_CONTEXT (f_res
) = record
;
4569 DECL_FIELD_CONTEXT (f_ovf
) = record
;
4570 DECL_FIELD_CONTEXT (f_sav
) = record
;
4572 TREE_CHAIN (record
) = type_decl
;
4573 TYPE_NAME (record
) = type_decl
;
4574 TYPE_FIELDS (record
) = f_gpr
;
4575 TREE_CHAIN (f_gpr
) = f_fpr
;
4576 TREE_CHAIN (f_fpr
) = f_res
;
4577 TREE_CHAIN (f_res
) = f_ovf
;
4578 TREE_CHAIN (f_ovf
) = f_sav
;
4580 layout_type (record
);
4582 /* The correct type is an array type of one element. */
4583 return build_array_type (record
, build_index_type (size_zero_node
));
4586 /* Implement va_start. */
4589 rs6000_va_start (tree valist
, rtx nextarg
)
4591 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
4592 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
;
4593 tree gpr
, fpr
, ovf
, sav
, t
;
4595 /* Only SVR4 needs something special. */
4596 if (DEFAULT_ABI
!= ABI_V4
)
4598 std_expand_builtin_va_start (valist
, nextarg
);
4602 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
4603 f_fpr
= TREE_CHAIN (f_gpr
);
4604 f_res
= TREE_CHAIN (f_fpr
);
4605 f_ovf
= TREE_CHAIN (f_res
);
4606 f_sav
= TREE_CHAIN (f_ovf
);
4608 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
4609 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
4610 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
4611 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
4612 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
4614 /* Count number of gp and fp argument registers used. */
4615 words
= current_function_args_info
.words
;
4616 n_gpr
= current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
;
4617 n_fpr
= current_function_args_info
.fregno
- FP_ARG_MIN_REG
;
4619 if (TARGET_DEBUG_ARG
)
4620 fprintf (stderr
, "va_start: words = "HOST_WIDE_INT_PRINT_DEC
", n_gpr = "
4621 HOST_WIDE_INT_PRINT_DEC
", n_fpr = "HOST_WIDE_INT_PRINT_DEC
"\n",
4622 words
, n_gpr
, n_fpr
);
4624 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
4625 TREE_SIDE_EFFECTS (t
) = 1;
4626 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4628 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
4629 TREE_SIDE_EFFECTS (t
) = 1;
4630 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4632 /* Find the overflow area. */
4633 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
4635 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
4636 build_int_2 (words
* UNITS_PER_WORD
, 0));
4637 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
4638 TREE_SIDE_EFFECTS (t
) = 1;
4639 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4641 /* Find the register save area. */
4642 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
4643 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
4644 build_int_2 (-RS6000_VARARGS_SIZE
, -1));
4645 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
4646 TREE_SIDE_EFFECTS (t
) = 1;
4647 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4650 /* Implement va_arg. */
4653 rs6000_va_arg (tree valist
, tree type
)
4655 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
;
4656 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
4657 int indirect_p
, size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
4658 rtx lab_false
, lab_over
, addr_rtx
, r
;
4660 if (DEFAULT_ABI
!= ABI_V4
)
4662 /* Variable sized types are passed by reference. */
4663 if (int_size_in_bytes (type
) < 0)
4665 u
= build_pointer_type (type
);
4667 /* Args grow upward. */
4668 t
= build (POSTINCREMENT_EXPR
, TREE_TYPE (valist
), valist
,
4669 build_int_2 (POINTER_SIZE
/ BITS_PER_UNIT
, 0));
4670 TREE_SIDE_EFFECTS (t
) = 1;
4672 t
= build1 (NOP_EXPR
, build_pointer_type (u
), t
);
4673 TREE_SIDE_EFFECTS (t
) = 1;
4675 t
= build1 (INDIRECT_REF
, u
, t
);
4676 TREE_SIDE_EFFECTS (t
) = 1;
4678 return expand_expr (t
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4682 /* Altivec arguments must be aligned to a 128-bit boundary. */
4683 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (TYPE_MODE (type
)))
4685 tree vtype
= TREE_TYPE (valist
);
4686 tree new_valist
, modify
;
4688 /* Round address up to multiple of 16. Computes
4690 new_valist
= fold (build (BIT_AND_EXPR
, vtype
,
4691 fold (build (PLUS_EXPR
, vtype
, valist
,
4692 build_int_2 (15, 0))),
4693 build_int_2 (~15, -1)));
4695 /* Update valist. */
4696 modify
= build (MODIFY_EXPR
, TREE_TYPE (valist
), valist
,
4698 TREE_SIDE_EFFECTS (modify
) = 1;
4699 expand_expr (modify
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4702 return std_expand_builtin_va_arg (valist
, type
);
4706 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
4707 f_fpr
= TREE_CHAIN (f_gpr
);
4708 f_res
= TREE_CHAIN (f_fpr
);
4709 f_ovf
= TREE_CHAIN (f_res
);
4710 f_sav
= TREE_CHAIN (f_ovf
);
4712 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
4713 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
4714 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
4715 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
4716 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
4718 size
= int_size_in_bytes (type
);
4719 rsize
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
4721 if (AGGREGATE_TYPE_P (type
) || TYPE_MODE (type
) == TFmode
)
4723 /* Aggregates and long doubles are passed by reference. */
4729 size
= UNITS_PER_WORD
;
4732 else if (FLOAT_TYPE_P (type
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
4734 /* FP args go in FP registers, if present. */
4743 /* Otherwise into GP registers. */
4751 /* Pull the value out of the saved registers.... */
4753 lab_false
= gen_label_rtx ();
4754 lab_over
= gen_label_rtx ();
4755 addr_rtx
= gen_reg_rtx (Pmode
);
4757 /* AltiVec vectors never go in registers. */
4758 if (!TARGET_ALTIVEC
|| TREE_CODE (type
) != VECTOR_TYPE
)
4760 TREE_THIS_VOLATILE (reg
) = 1;
4761 emit_cmp_and_jump_insns
4762 (expand_expr (reg
, NULL_RTX
, QImode
, EXPAND_NORMAL
),
4763 GEN_INT (8 - n_reg
+ 1), GE
, const1_rtx
, QImode
, 1,
4766 /* Long long is aligned in the registers. */
4769 u
= build (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
4770 build_int_2 (n_reg
- 1, 0));
4771 u
= build (PLUS_EXPR
, TREE_TYPE (reg
), reg
, u
);
4772 u
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, u
);
4773 TREE_SIDE_EFFECTS (u
) = 1;
4774 expand_expr (u
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4778 t
= build (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
4782 u
= build (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
4783 build_int_2 (n_reg
, 0));
4784 TREE_SIDE_EFFECTS (u
) = 1;
4786 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
4787 TREE_SIDE_EFFECTS (u
) = 1;
4789 u
= build (MULT_EXPR
, integer_type_node
, u
, build_int_2 (sav_scale
, 0));
4790 TREE_SIDE_EFFECTS (u
) = 1;
4792 t
= build (PLUS_EXPR
, ptr_type_node
, t
, u
);
4793 TREE_SIDE_EFFECTS (t
) = 1;
4795 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
4797 emit_move_insn (addr_rtx
, r
);
4799 emit_jump_insn (gen_jump (lab_over
));
4803 emit_label (lab_false
);
4805 /* ... otherwise out of the overflow area. */
4807 /* Make sure we don't find reg 7 for the next int arg.
4809 All AltiVec vectors go in the overflow area. So in the AltiVec
4810 case we need to get the vectors from the overflow area, but
4811 remember where the GPRs and FPRs are. */
4812 if (n_reg
> 1 && (TREE_CODE (type
) != VECTOR_TYPE
4813 || !TARGET_ALTIVEC
))
4815 t
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, build_int_2 (8, 0));
4816 TREE_SIDE_EFFECTS (t
) = 1;
4817 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4820 /* Care for on-stack alignment if needed. */
4827 /* AltiVec vectors are 16 byte aligned. */
4828 if (TARGET_ALTIVEC
&& TREE_CODE (type
) == VECTOR_TYPE
)
4833 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), ovf
, build_int_2 (align
, 0));
4834 t
= build (BIT_AND_EXPR
, TREE_TYPE (t
), t
, build_int_2 (-align
-1, -1));
4838 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
4840 emit_move_insn (addr_rtx
, r
);
4842 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
4843 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
4844 TREE_SIDE_EFFECTS (t
) = 1;
4845 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4847 emit_label (lab_over
);
4851 r
= gen_rtx_MEM (Pmode
, addr_rtx
);
4852 set_mem_alias_set (r
, get_varargs_alias_set ());
4853 emit_move_insn (addr_rtx
, r
);
4861 #define def_builtin(MASK, NAME, TYPE, CODE) \
4863 if ((MASK) & target_flags) \
4864 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4868 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4870 static const struct builtin_description bdesc_3arg
[] =
4872 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
4873 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
4874 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
4875 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
4876 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
4877 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
4878 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
4879 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
4880 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
4881 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
4882 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
4883 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
4884 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
4885 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
4886 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
4887 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
4888 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
4889 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
4890 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
4891 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
4892 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
4893 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
4894 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
4897 /* DST operations: void foo (void *, const int, const char). */
4899 static const struct builtin_description bdesc_dst
[] =
4901 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
4902 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
4903 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
4904 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
}
4907 /* Simple binary operations: VECc = foo (VECa, VECb). */
4909 static struct builtin_description bdesc_2arg
[] =
4911 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
4912 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
4913 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
4914 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
4915 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
4916 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
4917 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
4918 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
4919 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
4920 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
4921 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
4922 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
4923 { MASK_ALTIVEC
, CODE_FOR_altivec_vandc
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
4924 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
4925 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
4926 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
4927 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
4928 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
4929 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
4930 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
4931 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
4932 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
4933 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
4934 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
4935 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
4936 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
4937 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
4938 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
4939 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
4940 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
4941 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
4942 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
4943 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
4944 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
4945 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
4946 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
4947 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
4948 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
4949 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
4950 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
4951 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
4952 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
4953 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
4954 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
4955 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
4956 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
4957 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
4958 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
4959 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
4960 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
4961 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
4962 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
4963 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
4964 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
4965 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
4966 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
4967 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
4968 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
4969 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
4970 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
4971 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
4972 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
4973 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
4974 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
4975 { MASK_ALTIVEC
, CODE_FOR_altivec_vnor
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
4976 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
4977 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
4978 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
4979 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
4980 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhss
, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS
},
4981 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
4982 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwss
, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS
},
4983 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
4984 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
4985 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
4986 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
4987 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
4988 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
4989 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
4990 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
4991 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
4992 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
4993 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
4994 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
4995 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
4996 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
4997 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
4998 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
4999 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrb
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
5000 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrh
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
5001 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrw
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
5002 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrab
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
5003 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrah
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
5004 { MASK_ALTIVEC
, CODE_FOR_altivec_vsraw
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
5005 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
5006 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
5007 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
5008 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
5009 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
5010 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
5011 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
5012 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
5013 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
5014 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
5015 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
5016 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
5017 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
5018 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
5019 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
5020 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
5021 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
5022 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
5023 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
5025 /* Place holder, leave as first spe builtin. */
5026 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
5027 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
5028 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
5029 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
5030 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
5031 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
5032 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
5033 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
5034 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
5035 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
5036 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
5037 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
5038 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
5039 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
5040 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
5041 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
5042 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
5043 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
5044 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
5045 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
5046 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
5047 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
5048 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
5049 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
5050 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
5051 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
5052 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
5053 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
5054 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
5055 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
5056 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
5057 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
5058 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
5059 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
5060 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
5061 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
5062 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
5063 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
5064 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
5065 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
5066 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
5067 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
5068 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
5069 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
5070 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
5071 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
5072 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
5073 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
5074 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
5075 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
5076 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
5077 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
5078 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
5079 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
5080 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
5081 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
5082 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
5083 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
5084 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
5085 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
5086 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
5087 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
5088 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
5089 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
5090 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
5091 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
5092 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
5093 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
5094 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
5095 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
5096 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
5097 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
5098 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
5099 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
5100 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
5101 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
5102 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
5103 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
5104 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
5105 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
5106 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
5107 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
5108 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
5109 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
5110 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
5111 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
5112 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
5113 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
5114 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
5115 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
5116 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
5117 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
5118 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
5119 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
5120 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
5121 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
5122 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
5123 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
5124 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
5125 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
5126 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
5127 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
5128 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
5129 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
5130 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
5131 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
5132 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
5133 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
5134 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
5136 /* SPE binary operations expecting a 5-bit unsigned literal. */
5137 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
5139 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
5140 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
5141 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
5142 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
5143 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
5144 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
5145 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
5146 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
5147 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
5148 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
5149 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
5150 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
5151 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
5152 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
5153 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
5154 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
5155 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
5156 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
5157 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
5158 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
5159 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
5160 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
5161 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
5162 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
5163 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
5164 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
5166 /* Place-holder. Leave as last binary SPE builtin. */
5167 { 0, CODE_FOR_xorv2si3
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
},
5170 /* AltiVec predicates. */
5172 struct builtin_description_predicates
5174 const unsigned int mask
;
5175 const enum insn_code icode
;
5177 const char *const name
;
5178 const enum rs6000_builtins code
;
5181 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
5183 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
5184 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
5185 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
5186 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
5187 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
5188 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
5189 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
5190 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
5191 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
5192 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
5193 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
5194 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
5195 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
}
5198 /* SPE predicates. */
5199 static struct builtin_description bdesc_spe_predicates
[] =
5201 /* Place-holder. Leave as first. */
5202 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
5203 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
5204 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
5205 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
5206 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
5207 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
5208 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
5209 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
5210 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
5211 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
5212 /* Place-holder. Leave as last. */
5213 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
5216 /* SPE evsel predicates. */
5217 static struct builtin_description bdesc_spe_evsel
[] =
5219 /* Place-holder. Leave as first. */
5220 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
5221 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
5222 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
5223 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
5224 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
5225 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
5226 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
5227 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
5228 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
5229 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
5230 /* Place-holder. Leave as last. */
5231 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
5234 /* ABS* operations. */
5236 static const struct builtin_description bdesc_abs
[] =
5238 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
5239 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
5240 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
5241 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
5242 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
5243 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
5244 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
5247 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
5250 static struct builtin_description bdesc_1arg
[] =
5252 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
5253 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
5254 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
5255 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
5256 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
5257 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
5258 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
5259 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
5260 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
5261 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
5262 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
5263 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
5264 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
5265 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
5266 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
5267 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
5268 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
5270 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
5271 end with SPE_BUILTIN_EVSUBFUSIAAW. */
5272 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
5273 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
5274 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
5275 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
5276 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
5277 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
5278 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
5279 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
5280 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
5281 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
5282 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
5283 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
5284 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
5285 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
5286 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
5287 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
5288 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
5289 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
5290 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
5291 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
5292 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
5293 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
5294 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
5295 { 0, CODE_FOR_spe_evneg
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
5296 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
5297 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
5298 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
5299 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
5301 /* Place-holder. Leave as last unary SPE builtin. */
5302 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
},
5306 rs6000_expand_unop_builtin (enum insn_code icode
, tree arglist
, rtx target
)
5309 tree arg0
= TREE_VALUE (arglist
);
5310 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5311 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5312 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5314 if (icode
== CODE_FOR_nothing
)
5315 /* Builtin not supported on this processor. */
5318 /* If we got invalid arguments bail out before generating bad rtl. */
5319 if (arg0
== error_mark_node
)
5322 if (icode
== CODE_FOR_altivec_vspltisb
5323 || icode
== CODE_FOR_altivec_vspltish
5324 || icode
== CODE_FOR_altivec_vspltisw
5325 || icode
== CODE_FOR_spe_evsplatfi
5326 || icode
== CODE_FOR_spe_evsplati
)
5328 /* Only allow 5-bit *signed* literals. */
5329 if (GET_CODE (op0
) != CONST_INT
5330 || INTVAL (op0
) > 0x1f
5331 || INTVAL (op0
) < -0x1f)
5333 error ("argument 1 must be a 5-bit signed literal");
5339 || GET_MODE (target
) != tmode
5340 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5341 target
= gen_reg_rtx (tmode
);
5343 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5344 op0
= copy_to_mode_reg (mode0
, op0
);
5346 pat
= GEN_FCN (icode
) (target
, op0
);
5355 altivec_expand_abs_builtin (enum insn_code icode
, tree arglist
, rtx target
)
5357 rtx pat
, scratch1
, scratch2
;
5358 tree arg0
= TREE_VALUE (arglist
);
5359 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5360 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5361 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5363 /* If we have invalid arguments, bail out before generating bad rtl. */
5364 if (arg0
== error_mark_node
)
5368 || GET_MODE (target
) != tmode
5369 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5370 target
= gen_reg_rtx (tmode
);
5372 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5373 op0
= copy_to_mode_reg (mode0
, op0
);
5375 scratch1
= gen_reg_rtx (mode0
);
5376 scratch2
= gen_reg_rtx (mode0
);
5378 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
5387 rs6000_expand_binop_builtin (enum insn_code icode
, tree arglist
, rtx target
)
5390 tree arg0
= TREE_VALUE (arglist
);
5391 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5392 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5393 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5394 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5395 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5396 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5398 if (icode
== CODE_FOR_nothing
)
5399 /* Builtin not supported on this processor. */
5402 /* If we got invalid arguments bail out before generating bad rtl. */
5403 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
5406 if (icode
== CODE_FOR_altivec_vcfux
5407 || icode
== CODE_FOR_altivec_vcfsx
5408 || icode
== CODE_FOR_altivec_vctsxs
5409 || icode
== CODE_FOR_altivec_vctuxs
5410 || icode
== CODE_FOR_altivec_vspltb
5411 || icode
== CODE_FOR_altivec_vsplth
5412 || icode
== CODE_FOR_altivec_vspltw
5413 || icode
== CODE_FOR_spe_evaddiw
5414 || icode
== CODE_FOR_spe_evldd
5415 || icode
== CODE_FOR_spe_evldh
5416 || icode
== CODE_FOR_spe_evldw
5417 || icode
== CODE_FOR_spe_evlhhesplat
5418 || icode
== CODE_FOR_spe_evlhhossplat
5419 || icode
== CODE_FOR_spe_evlhhousplat
5420 || icode
== CODE_FOR_spe_evlwhe
5421 || icode
== CODE_FOR_spe_evlwhos
5422 || icode
== CODE_FOR_spe_evlwhou
5423 || icode
== CODE_FOR_spe_evlwhsplat
5424 || icode
== CODE_FOR_spe_evlwwsplat
5425 || icode
== CODE_FOR_spe_evrlwi
5426 || icode
== CODE_FOR_spe_evslwi
5427 || icode
== CODE_FOR_spe_evsrwis
5428 || icode
== CODE_FOR_spe_evsubifw
5429 || icode
== CODE_FOR_spe_evsrwiu
)
5431 /* Only allow 5-bit unsigned literals. */
5432 if (TREE_CODE (arg1
) != INTEGER_CST
5433 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
5435 error ("argument 2 must be a 5-bit unsigned literal");
5441 || GET_MODE (target
) != tmode
5442 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5443 target
= gen_reg_rtx (tmode
);
5445 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5446 op0
= copy_to_mode_reg (mode0
, op0
);
5447 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5448 op1
= copy_to_mode_reg (mode1
, op1
);
5450 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
5459 altivec_expand_predicate_builtin (enum insn_code icode
, const char *opcode
,
5460 tree arglist
, rtx target
)
5463 tree cr6_form
= TREE_VALUE (arglist
);
5464 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
5465 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5466 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5467 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5468 enum machine_mode tmode
= SImode
;
5469 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5470 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5473 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
5475 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5479 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
5484 /* If we have invalid arguments, bail out before generating bad rtl. */
5485 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
5489 || GET_MODE (target
) != tmode
5490 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5491 target
= gen_reg_rtx (tmode
);
5493 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5494 op0
= copy_to_mode_reg (mode0
, op0
);
5495 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5496 op1
= copy_to_mode_reg (mode1
, op1
);
5498 scratch
= gen_reg_rtx (mode0
);
5500 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
5501 gen_rtx (SYMBOL_REF
, Pmode
, opcode
));
5506 /* The vec_any* and vec_all* predicates use the same opcodes for two
5507 different operations, but the bits in CR6 will be different
5508 depending on what information we want. So we have to play tricks
5509 with CR6 to get the right bits out.
5511 If you think this is disgusting, look at the specs for the
5512 AltiVec predicates. */
5514 switch (cr6_form_int
)
5517 emit_insn (gen_cr6_test_for_zero (target
));
5520 emit_insn (gen_cr6_test_for_zero_reverse (target
));
5523 emit_insn (gen_cr6_test_for_lt (target
));
5526 emit_insn (gen_cr6_test_for_lt_reverse (target
));
5529 error ("argument 1 of __builtin_altivec_predicate is out of range");
5537 altivec_expand_lv_builtin (enum insn_code icode
, tree arglist
, rtx target
)
5540 tree arg0
= TREE_VALUE (arglist
);
5541 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5542 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5543 enum machine_mode mode0
= Pmode
;
5544 enum machine_mode mode1
= Pmode
;
5545 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5546 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5548 if (icode
== CODE_FOR_nothing
)
5549 /* Builtin not supported on this processor. */
5552 /* If we got invalid arguments bail out before generating bad rtl. */
5553 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
5557 || GET_MODE (target
) != tmode
5558 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5559 target
= gen_reg_rtx (tmode
);
5561 op1
= copy_to_mode_reg (mode1
, op1
);
5563 if (op0
== const0_rtx
)
5565 addr
= gen_rtx_MEM (tmode
, op1
);
5569 op0
= copy_to_mode_reg (mode0
, op0
);
5570 addr
= gen_rtx_MEM (tmode
, gen_rtx_PLUS (Pmode
, op0
, op1
));
5573 pat
= GEN_FCN (icode
) (target
, addr
);
5583 altivec_expand_stv_builtin (enum insn_code icode
, tree arglist
)
5585 tree arg0
= TREE_VALUE (arglist
);
5586 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5587 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5588 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5589 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5590 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5592 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5593 enum machine_mode mode1
= Pmode
;
5594 enum machine_mode mode2
= Pmode
;
5596 /* Invalid arguments. Bail before doing anything stoopid! */
5597 if (arg0
== error_mark_node
5598 || arg1
== error_mark_node
5599 || arg2
== error_mark_node
)
5602 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, tmode
))
5603 op0
= copy_to_mode_reg (tmode
, op0
);
5605 op2
= copy_to_mode_reg (mode2
, op2
);
5607 if (op1
== const0_rtx
)
5609 addr
= gen_rtx_MEM (tmode
, op2
);
5613 op1
= copy_to_mode_reg (mode1
, op1
);
5614 addr
= gen_rtx_MEM (tmode
, gen_rtx_PLUS (Pmode
, op1
, op2
));
5617 pat
= GEN_FCN (icode
) (addr
, op0
);
5624 rs6000_expand_ternop_builtin (enum insn_code icode
, tree arglist
, rtx target
)
5627 tree arg0
= TREE_VALUE (arglist
);
5628 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5629 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5630 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5631 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5632 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5633 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5634 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5635 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5636 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
5638 if (icode
== CODE_FOR_nothing
)
5639 /* Builtin not supported on this processor. */
5642 /* If we got invalid arguments bail out before generating bad rtl. */
5643 if (arg0
== error_mark_node
5644 || arg1
== error_mark_node
5645 || arg2
== error_mark_node
)
5648 if (icode
== CODE_FOR_altivec_vsldoi_4sf
5649 || icode
== CODE_FOR_altivec_vsldoi_4si
5650 || icode
== CODE_FOR_altivec_vsldoi_8hi
5651 || icode
== CODE_FOR_altivec_vsldoi_16qi
)
5653 /* Only allow 4-bit unsigned literals. */
5654 if (TREE_CODE (arg2
) != INTEGER_CST
5655 || TREE_INT_CST_LOW (arg2
) & ~0xf)
5657 error ("argument 3 must be a 4-bit unsigned literal");
5663 || GET_MODE (target
) != tmode
5664 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5665 target
= gen_reg_rtx (tmode
);
5667 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5668 op0
= copy_to_mode_reg (mode0
, op0
);
5669 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5670 op1
= copy_to_mode_reg (mode1
, op1
);
5671 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
5672 op2
= copy_to_mode_reg (mode2
, op2
);
5674 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
5682 /* Expand the lvx builtins. */
5684 altivec_expand_ld_builtin (tree exp
, rtx target
, bool *expandedp
)
5686 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5687 tree arglist
= TREE_OPERAND (exp
, 1);
5688 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5690 enum machine_mode tmode
, mode0
;
5692 enum insn_code icode
;
5696 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
5697 icode
= CODE_FOR_altivec_lvx_16qi
;
5699 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
5700 icode
= CODE_FOR_altivec_lvx_8hi
;
5702 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
5703 icode
= CODE_FOR_altivec_lvx_4si
;
5705 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
5706 icode
= CODE_FOR_altivec_lvx_4sf
;
5715 arg0
= TREE_VALUE (arglist
);
5716 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5717 tmode
= insn_data
[icode
].operand
[0].mode
;
5718 mode0
= insn_data
[icode
].operand
[1].mode
;
5721 || GET_MODE (target
) != tmode
5722 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5723 target
= gen_reg_rtx (tmode
);
5725 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5726 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
5728 pat
= GEN_FCN (icode
) (target
, op0
);
5735 /* Expand the stvx builtins. */
5737 altivec_expand_st_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
5740 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5741 tree arglist
= TREE_OPERAND (exp
, 1);
5742 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5744 enum machine_mode mode0
, mode1
;
5746 enum insn_code icode
;
5750 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
5751 icode
= CODE_FOR_altivec_stvx_16qi
;
5753 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
5754 icode
= CODE_FOR_altivec_stvx_8hi
;
5756 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
5757 icode
= CODE_FOR_altivec_stvx_4si
;
5759 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
5760 icode
= CODE_FOR_altivec_stvx_4sf
;
5767 arg0
= TREE_VALUE (arglist
);
5768 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5769 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5770 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5771 mode0
= insn_data
[icode
].operand
[0].mode
;
5772 mode1
= insn_data
[icode
].operand
[1].mode
;
5774 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5775 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
5776 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
5777 op1
= copy_to_mode_reg (mode1
, op1
);
5779 pat
= GEN_FCN (icode
) (op0
, op1
);
5787 /* Expand the dst builtins. */
5789 altivec_expand_dst_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
5792 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5793 tree arglist
= TREE_OPERAND (exp
, 1);
5794 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5795 tree arg0
, arg1
, arg2
;
5796 enum machine_mode mode0
, mode1
, mode2
;
5797 rtx pat
, op0
, op1
, op2
;
5798 struct builtin_description
*d
;
5803 /* Handle DST variants. */
5804 d
= (struct builtin_description
*) bdesc_dst
;
5805 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
5806 if (d
->code
== fcode
)
5808 arg0
= TREE_VALUE (arglist
);
5809 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5810 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5811 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5812 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5813 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5814 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5815 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
5816 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
5818 /* Invalid arguments, bail out before generating bad rtl. */
5819 if (arg0
== error_mark_node
5820 || arg1
== error_mark_node
5821 || arg2
== error_mark_node
)
5824 if (TREE_CODE (arg2
) != INTEGER_CST
5825 || TREE_INT_CST_LOW (arg2
) & ~0x3)
5827 error ("argument to `%s' must be a 2-bit unsigned literal", d
->name
);
5831 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
5832 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
5833 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
5834 op1
= copy_to_mode_reg (mode1
, op1
);
5836 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
5847 /* Expand the builtin in EXP and store the result in TARGET. Store
5848 true in *EXPANDEDP if we found a builtin to expand. */
5850 altivec_expand_builtin (tree exp
, rtx target
, bool *expandedp
)
5852 struct builtin_description
*d
;
5853 struct builtin_description_predicates
*dp
;
5855 enum insn_code icode
;
5856 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5857 tree arglist
= TREE_OPERAND (exp
, 1);
5860 enum machine_mode tmode
, mode0
;
5861 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5863 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
5867 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
5871 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
5879 case ALTIVEC_BUILTIN_STVX
:
5880 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
5881 case ALTIVEC_BUILTIN_STVEBX
:
5882 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
5883 case ALTIVEC_BUILTIN_STVEHX
:
5884 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
5885 case ALTIVEC_BUILTIN_STVEWX
:
5886 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
5887 case ALTIVEC_BUILTIN_STVXL
:
5888 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
5890 case ALTIVEC_BUILTIN_MFVSCR
:
5891 icode
= CODE_FOR_altivec_mfvscr
;
5892 tmode
= insn_data
[icode
].operand
[0].mode
;
5895 || GET_MODE (target
) != tmode
5896 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5897 target
= gen_reg_rtx (tmode
);
5899 pat
= GEN_FCN (icode
) (target
);
5905 case ALTIVEC_BUILTIN_MTVSCR
:
5906 icode
= CODE_FOR_altivec_mtvscr
;
5907 arg0
= TREE_VALUE (arglist
);
5908 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5909 mode0
= insn_data
[icode
].operand
[0].mode
;
5911 /* If we got invalid arguments bail out before generating bad rtl. */
5912 if (arg0
== error_mark_node
)
5915 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5916 op0
= copy_to_mode_reg (mode0
, op0
);
5918 pat
= GEN_FCN (icode
) (op0
);
5923 case ALTIVEC_BUILTIN_DSSALL
:
5924 emit_insn (gen_altivec_dssall ());
5927 case ALTIVEC_BUILTIN_DSS
:
5928 icode
= CODE_FOR_altivec_dss
;
5929 arg0
= TREE_VALUE (arglist
);
5930 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5931 mode0
= insn_data
[icode
].operand
[0].mode
;
5933 /* If we got invalid arguments bail out before generating bad rtl. */
5934 if (arg0
== error_mark_node
)
5937 if (TREE_CODE (arg0
) != INTEGER_CST
5938 || TREE_INT_CST_LOW (arg0
) & ~0x3)
5940 error ("argument to dss must be a 2-bit unsigned literal");
5944 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5945 op0
= copy_to_mode_reg (mode0
, op0
);
5947 emit_insn (gen_altivec_dss (op0
));
5951 /* Expand abs* operations. */
5952 d
= (struct builtin_description
*) bdesc_abs
;
5953 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
5954 if (d
->code
== fcode
)
5955 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
5957 /* Expand the AltiVec predicates. */
5958 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
5959 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
5960 if (dp
->code
== fcode
)
5961 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
, arglist
, target
);
5963 /* LV* are funky. We initialized them differently. */
5966 case ALTIVEC_BUILTIN_LVSL
:
5967 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl
,
5969 case ALTIVEC_BUILTIN_LVSR
:
5970 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr
,
5972 case ALTIVEC_BUILTIN_LVEBX
:
5973 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx
,
5975 case ALTIVEC_BUILTIN_LVEHX
:
5976 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx
,
5978 case ALTIVEC_BUILTIN_LVEWX
:
5979 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx
,
5981 case ALTIVEC_BUILTIN_LVXL
:
5982 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl
,
5984 case ALTIVEC_BUILTIN_LVX
:
5985 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx
,
5996 /* Binops that need to be initialized manually, but can be expanded
5997 automagically by rs6000_expand_binop_builtin. */
5998 static struct builtin_description bdesc_2arg_spe
[] =
6000 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
6001 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
6002 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
6003 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
6004 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
6005 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
6006 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
6007 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
6008 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
6009 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
6010 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
6011 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
6012 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
6013 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
6014 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
6015 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
6016 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
6017 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
6018 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
6019 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
6020 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
6021 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
6024 /* Expand the builtin in EXP and store the result in TARGET. Store
6025 true in *EXPANDEDP if we found a builtin to expand.
6027 This expands the SPE builtins that are not simple unary and binary
6030 spe_expand_builtin (tree exp
, rtx target
, bool *expandedp
)
6032 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6033 tree arglist
= TREE_OPERAND (exp
, 1);
6035 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6036 enum insn_code icode
;
6037 enum machine_mode tmode
, mode0
;
6039 struct builtin_description
*d
;
6044 /* Syntax check for a 5-bit unsigned immediate. */
6047 case SPE_BUILTIN_EVSTDD
:
6048 case SPE_BUILTIN_EVSTDH
:
6049 case SPE_BUILTIN_EVSTDW
:
6050 case SPE_BUILTIN_EVSTWHE
:
6051 case SPE_BUILTIN_EVSTWHO
:
6052 case SPE_BUILTIN_EVSTWWE
:
6053 case SPE_BUILTIN_EVSTWWO
:
6054 arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6055 if (TREE_CODE (arg1
) != INTEGER_CST
6056 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
6058 error ("argument 2 must be a 5-bit unsigned literal");
6066 /* The evsplat*i instructions are not quite generic. */
6069 case SPE_BUILTIN_EVSPLATFI
:
6070 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi
,
6072 case SPE_BUILTIN_EVSPLATI
:
6073 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati
,
6079 d
= (struct builtin_description
*) bdesc_2arg_spe
;
6080 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
6081 if (d
->code
== fcode
)
6082 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
6084 d
= (struct builtin_description
*) bdesc_spe_predicates
;
6085 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
6086 if (d
->code
== fcode
)
6087 return spe_expand_predicate_builtin (d
->icode
, arglist
, target
);
6089 d
= (struct builtin_description
*) bdesc_spe_evsel
;
6090 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
6091 if (d
->code
== fcode
)
6092 return spe_expand_evsel_builtin (d
->icode
, arglist
, target
);
6096 case SPE_BUILTIN_EVSTDDX
:
6097 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx
, arglist
);
6098 case SPE_BUILTIN_EVSTDHX
:
6099 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx
, arglist
);
6100 case SPE_BUILTIN_EVSTDWX
:
6101 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx
, arglist
);
6102 case SPE_BUILTIN_EVSTWHEX
:
6103 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex
, arglist
);
6104 case SPE_BUILTIN_EVSTWHOX
:
6105 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox
, arglist
);
6106 case SPE_BUILTIN_EVSTWWEX
:
6107 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex
, arglist
);
6108 case SPE_BUILTIN_EVSTWWOX
:
6109 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox
, arglist
);
6110 case SPE_BUILTIN_EVSTDD
:
6111 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd
, arglist
);
6112 case SPE_BUILTIN_EVSTDH
:
6113 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh
, arglist
);
6114 case SPE_BUILTIN_EVSTDW
:
6115 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw
, arglist
);
6116 case SPE_BUILTIN_EVSTWHE
:
6117 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe
, arglist
);
6118 case SPE_BUILTIN_EVSTWHO
:
6119 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho
, arglist
);
6120 case SPE_BUILTIN_EVSTWWE
:
6121 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe
, arglist
);
6122 case SPE_BUILTIN_EVSTWWO
:
6123 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo
, arglist
);
6124 case SPE_BUILTIN_MFSPEFSCR
:
6125 icode
= CODE_FOR_spe_mfspefscr
;
6126 tmode
= insn_data
[icode
].operand
[0].mode
;
6129 || GET_MODE (target
) != tmode
6130 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6131 target
= gen_reg_rtx (tmode
);
6133 pat
= GEN_FCN (icode
) (target
);
6138 case SPE_BUILTIN_MTSPEFSCR
:
6139 icode
= CODE_FOR_spe_mtspefscr
;
6140 arg0
= TREE_VALUE (arglist
);
6141 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6142 mode0
= insn_data
[icode
].operand
[0].mode
;
6144 if (arg0
== error_mark_node
)
6147 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
6148 op0
= copy_to_mode_reg (mode0
, op0
);
6150 pat
= GEN_FCN (icode
) (op0
);
6163 spe_expand_predicate_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6165 rtx pat
, scratch
, tmp
;
6166 tree form
= TREE_VALUE (arglist
);
6167 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
6168 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6169 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6170 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6171 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6172 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
6176 if (TREE_CODE (form
) != INTEGER_CST
)
6178 error ("argument 1 of __builtin_spe_predicate must be a constant");
6182 form_int
= TREE_INT_CST_LOW (form
);
6187 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
6191 || GET_MODE (target
) != SImode
6192 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
6193 target
= gen_reg_rtx (SImode
);
6195 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6196 op0
= copy_to_mode_reg (mode0
, op0
);
6197 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
6198 op1
= copy_to_mode_reg (mode1
, op1
);
6200 scratch
= gen_reg_rtx (CCmode
);
6202 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
6207 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
6208 _lower_. We use one compare, but look in different bits of the
6209 CR for each variant.
6211 There are 2 elements in each SPE simd type (upper/lower). The CR
6212 bits are set as follows:
6214 BIT0 | BIT 1 | BIT 2 | BIT 3
6215 U | L | (U | L) | (U & L)
6217 So, for an "all" relationship, BIT 3 would be set.
6218 For an "any" relationship, BIT 2 would be set. Etc.
6220 Following traditional nomenclature, these bits map to:
6222 BIT0 | BIT 1 | BIT 2 | BIT 3
6225 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
6230 /* All variant. OV bit. */
6232 /* We need to get to the OV bit, which is the ORDERED bit. We
6233 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
6234 that's ugly and will trigger a validate_condition_mode abort.
6235 So let's just use another pattern. */
6236 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
6238 /* Any variant. EQ bit. */
6242 /* Upper variant. LT bit. */
6246 /* Lower variant. GT bit. */
6251 error ("argument 1 of __builtin_spe_predicate is out of range");
6255 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
6256 emit_move_insn (target
, tmp
);
6261 /* The evsel builtins look like this:
6263 e = __builtin_spe_evsel_OP (a, b, c, d);
6267 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
6268 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
6272 spe_expand_evsel_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6275 tree arg0
= TREE_VALUE (arglist
);
6276 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6277 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6278 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist
))));
6279 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6280 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6281 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
6282 rtx op3
= expand_expr (arg3
, NULL_RTX
, VOIDmode
, 0);
6283 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6284 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
6289 if (arg0
== error_mark_node
|| arg1
== error_mark_node
6290 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
6294 || GET_MODE (target
) != mode0
6295 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
6296 target
= gen_reg_rtx (mode0
);
6298 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6299 op0
= copy_to_mode_reg (mode0
, op0
);
6300 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
6301 op1
= copy_to_mode_reg (mode0
, op1
);
6302 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
6303 op2
= copy_to_mode_reg (mode0
, op2
);
6304 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
6305 op3
= copy_to_mode_reg (mode0
, op3
);
6307 /* Generate the compare. */
6308 scratch
= gen_reg_rtx (CCmode
);
6309 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
6314 if (mode0
== V2SImode
)
6315 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
6317 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
6322 /* Expand an expression EXP that calls a built-in function,
6323 with result going to TARGET if that's convenient
6324 (and in mode MODE if that's convenient).
6325 SUBTARGET may be used as the target for computing one of EXP's operands.
6326 IGNORE is nonzero if the value is to be ignored. */
6329 rs6000_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
6330 enum machine_mode mode ATTRIBUTE_UNUSED
,
6331 int ignore ATTRIBUTE_UNUSED
)
6333 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6334 tree arglist
= TREE_OPERAND (exp
, 1);
6335 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6336 struct builtin_description
*d
;
6343 ret
= altivec_expand_builtin (exp
, target
, &success
);
6350 ret
= spe_expand_builtin (exp
, target
, &success
);
6356 if (TARGET_ALTIVEC
|| TARGET_SPE
)
6358 /* Handle simple unary operations. */
6359 d
= (struct builtin_description
*) bdesc_1arg
;
6360 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
6361 if (d
->code
== fcode
)
6362 return rs6000_expand_unop_builtin (d
->icode
, arglist
, target
);
6364 /* Handle simple binary operations. */
6365 d
= (struct builtin_description
*) bdesc_2arg
;
6366 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
6367 if (d
->code
== fcode
)
6368 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
6370 /* Handle simple ternary operations. */
6371 d
= (struct builtin_description
*) bdesc_3arg
;
6372 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
6373 if (d
->code
== fcode
)
6374 return rs6000_expand_ternop_builtin (d
->icode
, arglist
, target
);
6382 rs6000_init_builtins (void)
6384 opaque_V2SI_type_node
= copy_node (V2SI_type_node
);
6385 opaque_V2SF_type_node
= copy_node (V2SF_type_node
);
6386 opaque_p_V2SI_type_node
= build_pointer_type (opaque_V2SI_type_node
);
6389 spe_init_builtins ();
6391 altivec_init_builtins ();
6392 if (TARGET_ALTIVEC
|| TARGET_SPE
)
6393 rs6000_common_init_builtins ();
6396 /* Search through a set of builtins and enable the mask bits.
6397 DESC is an array of builtins.
6398 SIZE is the total number of builtins.
6399 START is the builtin enum at which to start.
6400 END is the builtin enum at which to end. */
6402 enable_mask_for_builtins (struct builtin_description
*desc
, int size
,
6403 enum rs6000_builtins start
,
6404 enum rs6000_builtins end
)
6408 for (i
= 0; i
< size
; ++i
)
6409 if (desc
[i
].code
== start
)
6415 for (; i
< size
; ++i
)
6417 /* Flip all the bits on. */
6418 desc
[i
].mask
= target_flags
;
6419 if (desc
[i
].code
== end
)
6425 spe_init_builtins (void)
6427 tree endlink
= void_list_node
;
6428 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
6429 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
6430 struct builtin_description
*d
;
6433 tree v2si_ftype_4_v2si
6434 = build_function_type
6435 (opaque_V2SI_type_node
,
6436 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6437 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6438 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6439 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6442 tree v2sf_ftype_4_v2sf
6443 = build_function_type
6444 (opaque_V2SF_type_node
,
6445 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6446 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6447 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6448 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6451 tree int_ftype_int_v2si_v2si
6452 = build_function_type
6454 tree_cons (NULL_TREE
, integer_type_node
,
6455 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6456 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6459 tree int_ftype_int_v2sf_v2sf
6460 = build_function_type
6462 tree_cons (NULL_TREE
, integer_type_node
,
6463 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6464 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6467 tree void_ftype_v2si_puint_int
6468 = build_function_type (void_type_node
,
6469 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6470 tree_cons (NULL_TREE
, puint_type_node
,
6471 tree_cons (NULL_TREE
,
6475 tree void_ftype_v2si_puint_char
6476 = build_function_type (void_type_node
,
6477 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6478 tree_cons (NULL_TREE
, puint_type_node
,
6479 tree_cons (NULL_TREE
,
6483 tree void_ftype_v2si_pv2si_int
6484 = build_function_type (void_type_node
,
6485 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6486 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
6487 tree_cons (NULL_TREE
,
6491 tree void_ftype_v2si_pv2si_char
6492 = build_function_type (void_type_node
,
6493 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6494 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
6495 tree_cons (NULL_TREE
,
6500 = build_function_type (void_type_node
,
6501 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
6504 = build_function_type (integer_type_node
, endlink
);
6506 tree v2si_ftype_pv2si_int
6507 = build_function_type (opaque_V2SI_type_node
,
6508 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
6509 tree_cons (NULL_TREE
, integer_type_node
,
6512 tree v2si_ftype_puint_int
6513 = build_function_type (opaque_V2SI_type_node
,
6514 tree_cons (NULL_TREE
, puint_type_node
,
6515 tree_cons (NULL_TREE
, integer_type_node
,
6518 tree v2si_ftype_pushort_int
6519 = build_function_type (opaque_V2SI_type_node
,
6520 tree_cons (NULL_TREE
, pushort_type_node
,
6521 tree_cons (NULL_TREE
, integer_type_node
,
6524 tree v2si_ftype_signed_char
6525 = build_function_type (opaque_V2SI_type_node
,
6526 tree_cons (NULL_TREE
, signed_char_type_node
,
6529 /* The initialization of the simple binary and unary builtins is
6530 done in rs6000_common_init_builtins, but we have to enable the
6531 mask bits here manually because we have run out of `target_flags'
6532 bits. We really need to redesign this mask business. */
6534 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
6535 ARRAY_SIZE (bdesc_2arg
),
6538 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
6539 ARRAY_SIZE (bdesc_1arg
),
6541 SPE_BUILTIN_EVSUBFUSIAAW
);
6542 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
6543 ARRAY_SIZE (bdesc_spe_predicates
),
6544 SPE_BUILTIN_EVCMPEQ
,
6545 SPE_BUILTIN_EVFSTSTLT
);
6546 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
6547 ARRAY_SIZE (bdesc_spe_evsel
),
6548 SPE_BUILTIN_EVSEL_CMPGTS
,
6549 SPE_BUILTIN_EVSEL_FSTSTEQ
);
6551 (*lang_hooks
.decls
.pushdecl
)
6552 (build_decl (TYPE_DECL
, get_identifier ("__ev64_opaque__"),
6553 opaque_V2SI_type_node
));
6555 /* Initialize irregular SPE builtins. */
6557 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
6558 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
6559 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
6560 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
6561 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
6562 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
6563 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
6564 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
6565 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
6566 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
6567 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
6568 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
6569 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
6570 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
6571 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
6572 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
6573 def_builtin (target_flags
, "__builtin_spe_evsplatfi", v2si_ftype_signed_char
, SPE_BUILTIN_EVSPLATFI
);
6574 def_builtin (target_flags
, "__builtin_spe_evsplati", v2si_ftype_signed_char
, SPE_BUILTIN_EVSPLATI
);
6577 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
6578 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
6579 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
6580 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
6581 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
6582 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
6583 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
6584 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
6585 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
6586 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
6587 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
6588 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
6589 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
6590 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
6591 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
6592 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
6593 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
6594 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
6595 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
6596 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
6597 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
6598 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
6601 d
= (struct builtin_description
*) bdesc_spe_predicates
;
6602 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
6606 switch (insn_data
[d
->icode
].operand
[1].mode
)
6609 type
= int_ftype_int_v2si_v2si
;
6612 type
= int_ftype_int_v2sf_v2sf
;
6618 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6621 /* Evsel predicates. */
6622 d
= (struct builtin_description
*) bdesc_spe_evsel
;
6623 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
6627 switch (insn_data
[d
->icode
].operand
[1].mode
)
6630 type
= v2si_ftype_4_v2si
;
6633 type
= v2sf_ftype_4_v2sf
;
6639 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6644 altivec_init_builtins (void)
6646 struct builtin_description
*d
;
6647 struct builtin_description_predicates
*dp
;
6649 tree pfloat_type_node
= build_pointer_type (float_type_node
);
6650 tree pint_type_node
= build_pointer_type (integer_type_node
);
6651 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
6652 tree pchar_type_node
= build_pointer_type (char_type_node
);
6654 tree pvoid_type_node
= build_pointer_type (void_type_node
);
6656 tree pcfloat_type_node
= build_pointer_type (build_qualified_type (float_type_node
, TYPE_QUAL_CONST
));
6657 tree pcint_type_node
= build_pointer_type (build_qualified_type (integer_type_node
, TYPE_QUAL_CONST
));
6658 tree pcshort_type_node
= build_pointer_type (build_qualified_type (short_integer_type_node
, TYPE_QUAL_CONST
));
6659 tree pcchar_type_node
= build_pointer_type (build_qualified_type (char_type_node
, TYPE_QUAL_CONST
));
6661 tree pcvoid_type_node
= build_pointer_type (build_qualified_type (void_type_node
, TYPE_QUAL_CONST
));
6663 tree int_ftype_int_v4si_v4si
6664 = build_function_type_list (integer_type_node
,
6665 integer_type_node
, V4SI_type_node
,
6666 V4SI_type_node
, NULL_TREE
);
6667 tree v4sf_ftype_pcfloat
6668 = build_function_type_list (V4SF_type_node
, pcfloat_type_node
, NULL_TREE
);
6669 tree void_ftype_pfloat_v4sf
6670 = build_function_type_list (void_type_node
,
6671 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
6672 tree v4si_ftype_pcint
6673 = build_function_type_list (V4SI_type_node
, pcint_type_node
, NULL_TREE
);
6674 tree void_ftype_pint_v4si
6675 = build_function_type_list (void_type_node
,
6676 pint_type_node
, V4SI_type_node
, NULL_TREE
);
6677 tree v8hi_ftype_pcshort
6678 = build_function_type_list (V8HI_type_node
, pcshort_type_node
, NULL_TREE
);
6679 tree void_ftype_pshort_v8hi
6680 = build_function_type_list (void_type_node
,
6681 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
6682 tree v16qi_ftype_pcchar
6683 = build_function_type_list (V16QI_type_node
, pcchar_type_node
, NULL_TREE
);
6684 tree void_ftype_pchar_v16qi
6685 = build_function_type_list (void_type_node
,
6686 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
6687 tree void_ftype_v4si
6688 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
6689 tree v8hi_ftype_void
6690 = build_function_type (V8HI_type_node
, void_list_node
);
6691 tree void_ftype_void
6692 = build_function_type (void_type_node
, void_list_node
);
6694 = build_function_type_list (void_type_node
, char_type_node
, NULL_TREE
);
6696 tree v16qi_ftype_long_pcvoid
6697 = build_function_type_list (V16QI_type_node
,
6698 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
6699 tree v8hi_ftype_long_pcvoid
6700 = build_function_type_list (V8HI_type_node
,
6701 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
6702 tree v4si_ftype_long_pcvoid
6703 = build_function_type_list (V4SI_type_node
,
6704 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
6706 tree void_ftype_v4si_long_pvoid
6707 = build_function_type_list (void_type_node
,
6708 V4SI_type_node
, long_integer_type_node
,
6709 pvoid_type_node
, NULL_TREE
);
6710 tree void_ftype_v16qi_long_pvoid
6711 = build_function_type_list (void_type_node
,
6712 V16QI_type_node
, long_integer_type_node
,
6713 pvoid_type_node
, NULL_TREE
);
6714 tree void_ftype_v8hi_long_pvoid
6715 = build_function_type_list (void_type_node
,
6716 V8HI_type_node
, long_integer_type_node
,
6717 pvoid_type_node
, NULL_TREE
);
6718 tree int_ftype_int_v8hi_v8hi
6719 = build_function_type_list (integer_type_node
,
6720 integer_type_node
, V8HI_type_node
,
6721 V8HI_type_node
, NULL_TREE
);
6722 tree int_ftype_int_v16qi_v16qi
6723 = build_function_type_list (integer_type_node
,
6724 integer_type_node
, V16QI_type_node
,
6725 V16QI_type_node
, NULL_TREE
);
6726 tree int_ftype_int_v4sf_v4sf
6727 = build_function_type_list (integer_type_node
,
6728 integer_type_node
, V4SF_type_node
,
6729 V4SF_type_node
, NULL_TREE
);
6730 tree v4si_ftype_v4si
6731 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
6732 tree v8hi_ftype_v8hi
6733 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6734 tree v16qi_ftype_v16qi
6735 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6736 tree v4sf_ftype_v4sf
6737 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6738 tree void_ftype_pcvoid_int_char
6739 = build_function_type_list (void_type_node
,
6740 pcvoid_type_node
, integer_type_node
,
6741 char_type_node
, NULL_TREE
);
6743 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat
,
6744 ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
6745 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
,
6746 ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
6747 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint
,
6748 ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
6749 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
,
6750 ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
6751 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort
,
6752 ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
6753 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
,
6754 ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
6755 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar
,
6756 ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
6757 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
,
6758 ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
6759 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
6760 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
6761 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
6762 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_qi
, ALTIVEC_BUILTIN_DSS
);
6763 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVSL
);
6764 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVSR
);
6765 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEBX
);
6766 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEHX
);
6767 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEWX
);
6768 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVXL
);
6769 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVX
);
6770 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVX
);
6771 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
6772 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVXL
);
6773 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
6774 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
6776 /* Add the DST variants. */
6777 d
= (struct builtin_description
*) bdesc_dst
;
6778 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
6779 def_builtin (d
->mask
, d
->name
, void_ftype_pcvoid_int_char
, d
->code
);
6781 /* Initialize the predicates. */
6782 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
6783 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
6785 enum machine_mode mode1
;
6788 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
6793 type
= int_ftype_int_v4si_v4si
;
6796 type
= int_ftype_int_v8hi_v8hi
;
6799 type
= int_ftype_int_v16qi_v16qi
;
6802 type
= int_ftype_int_v4sf_v4sf
;
6808 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
6811 /* Initialize the abs* operators. */
6812 d
= (struct builtin_description
*) bdesc_abs
;
6813 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
6815 enum machine_mode mode0
;
6818 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6823 type
= v4si_ftype_v4si
;
6826 type
= v8hi_ftype_v8hi
;
6829 type
= v16qi_ftype_v16qi
;
6832 type
= v4sf_ftype_v4sf
;
6838 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6843 rs6000_common_init_builtins (void)
6845 struct builtin_description
*d
;
6848 tree v4sf_ftype_v4sf_v4sf_v16qi
6849 = build_function_type_list (V4SF_type_node
,
6850 V4SF_type_node
, V4SF_type_node
,
6851 V16QI_type_node
, NULL_TREE
);
6852 tree v4si_ftype_v4si_v4si_v16qi
6853 = build_function_type_list (V4SI_type_node
,
6854 V4SI_type_node
, V4SI_type_node
,
6855 V16QI_type_node
, NULL_TREE
);
6856 tree v8hi_ftype_v8hi_v8hi_v16qi
6857 = build_function_type_list (V8HI_type_node
,
6858 V8HI_type_node
, V8HI_type_node
,
6859 V16QI_type_node
, NULL_TREE
);
6860 tree v16qi_ftype_v16qi_v16qi_v16qi
6861 = build_function_type_list (V16QI_type_node
,
6862 V16QI_type_node
, V16QI_type_node
,
6863 V16QI_type_node
, NULL_TREE
);
6864 tree v4si_ftype_char
6865 = build_function_type_list (V4SI_type_node
, char_type_node
, NULL_TREE
);
6866 tree v8hi_ftype_char
6867 = build_function_type_list (V8HI_type_node
, char_type_node
, NULL_TREE
);
6868 tree v16qi_ftype_char
6869 = build_function_type_list (V16QI_type_node
, char_type_node
, NULL_TREE
);
6870 tree v8hi_ftype_v16qi
6871 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
6872 tree v4sf_ftype_v4sf
6873 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6875 tree v2si_ftype_v2si_v2si
6876 = build_function_type_list (opaque_V2SI_type_node
,
6877 opaque_V2SI_type_node
,
6878 opaque_V2SI_type_node
, NULL_TREE
);
6880 tree v2sf_ftype_v2sf_v2sf
6881 = build_function_type_list (opaque_V2SF_type_node
,
6882 opaque_V2SF_type_node
,
6883 opaque_V2SF_type_node
, NULL_TREE
);
6885 tree v2si_ftype_int_int
6886 = build_function_type_list (opaque_V2SI_type_node
,
6887 integer_type_node
, integer_type_node
,
6890 tree v2si_ftype_v2si
6891 = build_function_type_list (opaque_V2SI_type_node
,
6892 opaque_V2SI_type_node
, NULL_TREE
);
6894 tree v2sf_ftype_v2sf
6895 = build_function_type_list (opaque_V2SF_type_node
,
6896 opaque_V2SF_type_node
, NULL_TREE
);
6898 tree v2sf_ftype_v2si
6899 = build_function_type_list (opaque_V2SF_type_node
,
6900 opaque_V2SI_type_node
, NULL_TREE
);
6902 tree v2si_ftype_v2sf
6903 = build_function_type_list (opaque_V2SI_type_node
,
6904 opaque_V2SF_type_node
, NULL_TREE
);
6906 tree v2si_ftype_v2si_char
6907 = build_function_type_list (opaque_V2SI_type_node
,
6908 opaque_V2SI_type_node
,
6909 char_type_node
, NULL_TREE
);
6911 tree v2si_ftype_int_char
6912 = build_function_type_list (opaque_V2SI_type_node
,
6913 integer_type_node
, char_type_node
, NULL_TREE
);
6915 tree v2si_ftype_char
6916 = build_function_type_list (opaque_V2SI_type_node
,
6917 char_type_node
, NULL_TREE
);
6919 tree int_ftype_int_int
6920 = build_function_type_list (integer_type_node
,
6921 integer_type_node
, integer_type_node
,
6924 tree v4si_ftype_v4si_v4si
6925 = build_function_type_list (V4SI_type_node
,
6926 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
6927 tree v4sf_ftype_v4si_char
6928 = build_function_type_list (V4SF_type_node
,
6929 V4SI_type_node
, char_type_node
, NULL_TREE
);
6930 tree v4si_ftype_v4sf_char
6931 = build_function_type_list (V4SI_type_node
,
6932 V4SF_type_node
, char_type_node
, NULL_TREE
);
6933 tree v4si_ftype_v4si_char
6934 = build_function_type_list (V4SI_type_node
,
6935 V4SI_type_node
, char_type_node
, NULL_TREE
);
6936 tree v8hi_ftype_v8hi_char
6937 = build_function_type_list (V8HI_type_node
,
6938 V8HI_type_node
, char_type_node
, NULL_TREE
);
6939 tree v16qi_ftype_v16qi_char
6940 = build_function_type_list (V16QI_type_node
,
6941 V16QI_type_node
, char_type_node
, NULL_TREE
);
6942 tree v16qi_ftype_v16qi_v16qi_char
6943 = build_function_type_list (V16QI_type_node
,
6944 V16QI_type_node
, V16QI_type_node
,
6945 char_type_node
, NULL_TREE
);
6946 tree v8hi_ftype_v8hi_v8hi_char
6947 = build_function_type_list (V8HI_type_node
,
6948 V8HI_type_node
, V8HI_type_node
,
6949 char_type_node
, NULL_TREE
);
6950 tree v4si_ftype_v4si_v4si_char
6951 = build_function_type_list (V4SI_type_node
,
6952 V4SI_type_node
, V4SI_type_node
,
6953 char_type_node
, NULL_TREE
);
6954 tree v4sf_ftype_v4sf_v4sf_char
6955 = build_function_type_list (V4SF_type_node
,
6956 V4SF_type_node
, V4SF_type_node
,
6957 char_type_node
, NULL_TREE
);
6958 tree v4sf_ftype_v4sf_v4sf
6959 = build_function_type_list (V4SF_type_node
,
6960 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6961 tree v4sf_ftype_v4sf_v4sf_v4si
6962 = build_function_type_list (V4SF_type_node
,
6963 V4SF_type_node
, V4SF_type_node
,
6964 V4SI_type_node
, NULL_TREE
);
6965 tree v4sf_ftype_v4sf_v4sf_v4sf
6966 = build_function_type_list (V4SF_type_node
,
6967 V4SF_type_node
, V4SF_type_node
,
6968 V4SF_type_node
, NULL_TREE
);
6969 tree v4si_ftype_v4si_v4si_v4si
6970 = build_function_type_list (V4SI_type_node
,
6971 V4SI_type_node
, V4SI_type_node
,
6972 V4SI_type_node
, NULL_TREE
);
6973 tree v8hi_ftype_v8hi_v8hi
6974 = build_function_type_list (V8HI_type_node
,
6975 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6976 tree v8hi_ftype_v8hi_v8hi_v8hi
6977 = build_function_type_list (V8HI_type_node
,
6978 V8HI_type_node
, V8HI_type_node
,
6979 V8HI_type_node
, NULL_TREE
);
6980 tree v4si_ftype_v8hi_v8hi_v4si
6981 = build_function_type_list (V4SI_type_node
,
6982 V8HI_type_node
, V8HI_type_node
,
6983 V4SI_type_node
, NULL_TREE
);
6984 tree v4si_ftype_v16qi_v16qi_v4si
6985 = build_function_type_list (V4SI_type_node
,
6986 V16QI_type_node
, V16QI_type_node
,
6987 V4SI_type_node
, NULL_TREE
);
6988 tree v16qi_ftype_v16qi_v16qi
6989 = build_function_type_list (V16QI_type_node
,
6990 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6991 tree v4si_ftype_v4sf_v4sf
6992 = build_function_type_list (V4SI_type_node
,
6993 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6994 tree v8hi_ftype_v16qi_v16qi
6995 = build_function_type_list (V8HI_type_node
,
6996 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6997 tree v4si_ftype_v8hi_v8hi
6998 = build_function_type_list (V4SI_type_node
,
6999 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
7000 tree v8hi_ftype_v4si_v4si
7001 = build_function_type_list (V8HI_type_node
,
7002 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
7003 tree v16qi_ftype_v8hi_v8hi
7004 = build_function_type_list (V16QI_type_node
,
7005 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
7006 tree v4si_ftype_v16qi_v4si
7007 = build_function_type_list (V4SI_type_node
,
7008 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
7009 tree v4si_ftype_v16qi_v16qi
7010 = build_function_type_list (V4SI_type_node
,
7011 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
7012 tree v4si_ftype_v8hi_v4si
7013 = build_function_type_list (V4SI_type_node
,
7014 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
7015 tree v4si_ftype_v8hi
7016 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
7017 tree int_ftype_v4si_v4si
7018 = build_function_type_list (integer_type_node
,
7019 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
7020 tree int_ftype_v4sf_v4sf
7021 = build_function_type_list (integer_type_node
,
7022 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
7023 tree int_ftype_v16qi_v16qi
7024 = build_function_type_list (integer_type_node
,
7025 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
7026 tree int_ftype_v8hi_v8hi
7027 = build_function_type_list (integer_type_node
,
7028 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
7030 /* Add the simple ternary operators. */
7031 d
= (struct builtin_description
*) bdesc_3arg
;
7032 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
7035 enum machine_mode mode0
, mode1
, mode2
, mode3
;
7038 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
7041 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
7042 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
7043 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
7044 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
7046 /* When all four are of the same mode. */
7047 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
7052 type
= v4si_ftype_v4si_v4si_v4si
;
7055 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
7058 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
7061 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
7067 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
7072 type
= v4si_ftype_v4si_v4si_v16qi
;
7075 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
7078 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
7081 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
7087 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
7088 && mode3
== V4SImode
)
7089 type
= v4si_ftype_v16qi_v16qi_v4si
;
7090 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
7091 && mode3
== V4SImode
)
7092 type
= v4si_ftype_v8hi_v8hi_v4si
;
7093 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
7094 && mode3
== V4SImode
)
7095 type
= v4sf_ftype_v4sf_v4sf_v4si
;
7097 /* vchar, vchar, vchar, 4 bit literal. */
7098 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
7100 type
= v16qi_ftype_v16qi_v16qi_char
;
7102 /* vshort, vshort, vshort, 4 bit literal. */
7103 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
7105 type
= v8hi_ftype_v8hi_v8hi_char
;
7107 /* vint, vint, vint, 4 bit literal. */
7108 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
7110 type
= v4si_ftype_v4si_v4si_char
;
7112 /* vfloat, vfloat, vfloat, 4 bit literal. */
7113 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
7115 type
= v4sf_ftype_v4sf_v4sf_char
;
7120 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
7123 /* Add the simple binary operators. */
7124 d
= (struct builtin_description
*) bdesc_2arg
;
7125 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
7127 enum machine_mode mode0
, mode1
, mode2
;
7130 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
7133 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
7134 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
7135 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
7137 /* When all three operands are of the same mode. */
7138 if (mode0
== mode1
&& mode1
== mode2
)
7143 type
= v4sf_ftype_v4sf_v4sf
;
7146 type
= v4si_ftype_v4si_v4si
;
7149 type
= v16qi_ftype_v16qi_v16qi
;
7152 type
= v8hi_ftype_v8hi_v8hi
;
7155 type
= v2si_ftype_v2si_v2si
;
7158 type
= v2sf_ftype_v2sf_v2sf
;
7161 type
= int_ftype_int_int
;
7168 /* A few other combos we really don't want to do manually. */
7170 /* vint, vfloat, vfloat. */
7171 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
7172 type
= v4si_ftype_v4sf_v4sf
;
7174 /* vshort, vchar, vchar. */
7175 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
7176 type
= v8hi_ftype_v16qi_v16qi
;
7178 /* vint, vshort, vshort. */
7179 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
7180 type
= v4si_ftype_v8hi_v8hi
;
7182 /* vshort, vint, vint. */
7183 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
7184 type
= v8hi_ftype_v4si_v4si
;
7186 /* vchar, vshort, vshort. */
7187 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
7188 type
= v16qi_ftype_v8hi_v8hi
;
7190 /* vint, vchar, vint. */
7191 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
7192 type
= v4si_ftype_v16qi_v4si
;
7194 /* vint, vchar, vchar. */
7195 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
7196 type
= v4si_ftype_v16qi_v16qi
;
7198 /* vint, vshort, vint. */
7199 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
7200 type
= v4si_ftype_v8hi_v4si
;
7202 /* vint, vint, 5 bit literal. */
7203 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
7204 type
= v4si_ftype_v4si_char
;
7206 /* vshort, vshort, 5 bit literal. */
7207 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
7208 type
= v8hi_ftype_v8hi_char
;
7210 /* vchar, vchar, 5 bit literal. */
7211 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
7212 type
= v16qi_ftype_v16qi_char
;
7214 /* vfloat, vint, 5 bit literal. */
7215 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
7216 type
= v4sf_ftype_v4si_char
;
7218 /* vint, vfloat, 5 bit literal. */
7219 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
7220 type
= v4si_ftype_v4sf_char
;
7222 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
7223 type
= v2si_ftype_int_int
;
7225 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
7226 type
= v2si_ftype_v2si_char
;
7228 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
7229 type
= v2si_ftype_int_char
;
7232 else if (mode0
== SImode
)
7237 type
= int_ftype_v4si_v4si
;
7240 type
= int_ftype_v4sf_v4sf
;
7243 type
= int_ftype_v16qi_v16qi
;
7246 type
= int_ftype_v8hi_v8hi
;
7256 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
7259 /* Add the simple unary operators. */
7260 d
= (struct builtin_description
*) bdesc_1arg
;
7261 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
7263 enum machine_mode mode0
, mode1
;
7266 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
7269 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
7270 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
7272 if (mode0
== V4SImode
&& mode1
== QImode
)
7273 type
= v4si_ftype_char
;
7274 else if (mode0
== V8HImode
&& mode1
== QImode
)
7275 type
= v8hi_ftype_char
;
7276 else if (mode0
== V16QImode
&& mode1
== QImode
)
7277 type
= v16qi_ftype_char
;
7278 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
7279 type
= v4sf_ftype_v4sf
;
7280 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
7281 type
= v8hi_ftype_v16qi
;
7282 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
7283 type
= v4si_ftype_v8hi
;
7284 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
7285 type
= v2si_ftype_v2si
;
7286 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
7287 type
= v2sf_ftype_v2sf
;
7288 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
7289 type
= v2sf_ftype_v2si
;
7290 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
7291 type
= v2si_ftype_v2sf
;
7292 else if (mode0
== V2SImode
&& mode1
== QImode
)
7293 type
= v2si_ftype_char
;
7297 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
7302 rs6000_init_libfuncs (void)
7304 if (!TARGET_HARD_FLOAT
)
7307 if (DEFAULT_ABI
!= ABI_V4
)
7309 if (TARGET_XCOFF
&& ! TARGET_POWER2
&& ! TARGET_POWERPC
)
7311 /* AIX library routines for float->int conversion. */
7312 set_conv_libfunc (sfix_optab
, SImode
, DFmode
, "__itrunc");
7313 set_conv_libfunc (ufix_optab
, SImode
, DFmode
, "__uitrunc");
7314 set_conv_libfunc (sfix_optab
, SImode
, TFmode
, "_qitrunc");
7315 set_conv_libfunc (ufix_optab
, SImode
, TFmode
, "_quitrunc");
7318 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
7319 set_optab_libfunc (add_optab
, TFmode
, "_xlqadd");
7320 set_optab_libfunc (sub_optab
, TFmode
, "_xlqsub");
7321 set_optab_libfunc (smul_optab
, TFmode
, "_xlqmul");
7322 set_optab_libfunc (sdiv_optab
, TFmode
, "_xlqdiv");
7326 /* 32-bit SVR4 quad floating point routines. */
7328 set_optab_libfunc (add_optab
, TFmode
, "_q_add");
7329 set_optab_libfunc (sub_optab
, TFmode
, "_q_sub");
7330 set_optab_libfunc (neg_optab
, TFmode
, "_q_neg");
7331 set_optab_libfunc (smul_optab
, TFmode
, "_q_mul");
7332 set_optab_libfunc (sdiv_optab
, TFmode
, "_q_div");
7333 if (TARGET_PPC_GPOPT
|| TARGET_POWER2
)
7334 set_optab_libfunc (sqrt_optab
, TFmode
, "_q_sqrt");
7336 set_optab_libfunc (eq_optab
, TFmode
, "_q_feq");
7337 set_optab_libfunc (ne_optab
, TFmode
, "_q_fne");
7338 set_optab_libfunc (gt_optab
, TFmode
, "_q_fgt");
7339 set_optab_libfunc (ge_optab
, TFmode
, "_q_fge");
7340 set_optab_libfunc (lt_optab
, TFmode
, "_q_flt");
7341 set_optab_libfunc (le_optab
, TFmode
, "_q_fle");
7343 set_conv_libfunc (sext_optab
, TFmode
, SFmode
, "_q_stoq");
7344 set_conv_libfunc (sext_optab
, TFmode
, DFmode
, "_q_dtoq");
7345 set_conv_libfunc (trunc_optab
, SFmode
, TFmode
, "_q_qtos");
7346 set_conv_libfunc (trunc_optab
, DFmode
, TFmode
, "_q_qtod");
7347 set_conv_libfunc (sfix_optab
, SImode
, TFmode
, "_q_qtoi");
7348 set_conv_libfunc (ufix_optab
, SImode
, TFmode
, "_q_qtou");
7349 set_conv_libfunc (sfloat_optab
, TFmode
, SImode
, "_q_itoq");
7353 /* Expand a block move operation, and return 1 if successful. Return 0
7354 if we should let the compiler generate normal code.
7356 operands[0] is the destination
7357 operands[1] is the source
7358 operands[2] is the length
7359 operands[3] is the alignment */
7361 #define MAX_MOVE_REG 4
7364 expand_block_move (rtx operands
[])
7366 rtx orig_dest
= operands
[0];
7367 rtx orig_src
= operands
[1];
7368 rtx bytes_rtx
= operands
[2];
7369 rtx align_rtx
= operands
[3];
7370 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
7375 rtx stores
[MAX_MOVE_REG
];
7378 /* If this is not a fixed size move, just call memcpy */
7382 /* If this is not a fixed size alignment, abort */
7383 if (GET_CODE (align_rtx
) != CONST_INT
)
7385 align
= INTVAL (align_rtx
);
7387 /* Anything to move? */
7388 bytes
= INTVAL (bytes_rtx
);
7392 /* store_one_arg depends on expand_block_move to handle at least the size of
7393 reg_parm_stack_space. */
7394 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
7397 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
7400 rtx (*movstrsi
) (rtx
, rtx
, rtx
, rtx
);
7401 rtx (*mov
) (rtx
, rtx
);
7403 enum machine_mode mode
= BLKmode
;
7407 && bytes
> 24 /* move up to 32 bytes at a time */
7415 && ! fixed_regs
[12])
7417 move_bytes
= (bytes
> 32) ? 32 : bytes
;
7418 gen_func
.movstrsi
= gen_movstrsi_8reg
;
7420 else if (TARGET_STRING
7421 && bytes
> 16 /* move up to 24 bytes at a time */
7427 && ! fixed_regs
[10])
7429 move_bytes
= (bytes
> 24) ? 24 : bytes
;
7430 gen_func
.movstrsi
= gen_movstrsi_6reg
;
7432 else if (TARGET_STRING
7433 && bytes
> 8 /* move up to 16 bytes at a time */
7439 move_bytes
= (bytes
> 16) ? 16 : bytes
;
7440 gen_func
.movstrsi
= gen_movstrsi_4reg
;
7442 else if (bytes
>= 8 && TARGET_POWERPC64
7443 /* 64-bit loads and stores require word-aligned
7445 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
7449 gen_func
.mov
= gen_movdi
;
7451 else if (TARGET_STRING
&& bytes
> 4 && !TARGET_POWERPC64
)
7452 { /* move up to 8 bytes at a time */
7453 move_bytes
= (bytes
> 8) ? 8 : bytes
;
7454 gen_func
.movstrsi
= gen_movstrsi_2reg
;
7456 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
7457 { /* move 4 bytes */
7460 gen_func
.mov
= gen_movsi
;
7462 else if (bytes
== 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
7463 { /* move 2 bytes */
7466 gen_func
.mov
= gen_movhi
;
7468 else if (TARGET_STRING
&& bytes
> 1)
7469 { /* move up to 4 bytes at a time */
7470 move_bytes
= (bytes
> 4) ? 4 : bytes
;
7471 gen_func
.movstrsi
= gen_movstrsi_1reg
;
7473 else /* move 1 byte at a time */
7477 gen_func
.mov
= gen_movqi
;
7480 src
= adjust_address (orig_src
, mode
, offset
);
7481 dest
= adjust_address (orig_dest
, mode
, offset
);
7483 if (mode
!= BLKmode
)
7485 rtx tmp_reg
= gen_reg_rtx (mode
);
7487 emit_insn ((*gen_func
.mov
) (tmp_reg
, src
));
7488 stores
[num_reg
++] = (*gen_func
.mov
) (dest
, tmp_reg
);
7491 if (mode
== BLKmode
|| num_reg
>= MAX_MOVE_REG
|| bytes
== move_bytes
)
7494 for (i
= 0; i
< num_reg
; i
++)
7495 emit_insn (stores
[i
]);
7499 if (mode
== BLKmode
)
7501 /* Move the address into scratch registers. The movstrsi
7502 patterns require zero offset. */
7503 if (!REG_P (XEXP (src
, 0)))
7505 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
7506 src
= replace_equiv_address (src
, src_reg
);
7508 set_mem_size (src
, GEN_INT (move_bytes
));
7510 if (!REG_P (XEXP (dest
, 0)))
7512 rtx dest_reg
= copy_addr_to_reg (XEXP (dest
, 0));
7513 dest
= replace_equiv_address (dest
, dest_reg
);
7515 set_mem_size (dest
, GEN_INT (move_bytes
));
7517 emit_insn ((*gen_func
.movstrsi
) (dest
, src
,
7518 GEN_INT (move_bytes
& 31),
7527 /* Return 1 if OP is a load multiple operation. It is known to be a
7528 PARALLEL and the first section will be tested. */
7531 load_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
7533 int count
= XVECLEN (op
, 0);
7534 unsigned int dest_regno
;
7538 /* Perform a quick check so we don't blow up below. */
7540 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7541 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
7542 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
7545 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
7546 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
7548 for (i
= 1; i
< count
; i
++)
7550 rtx elt
= XVECEXP (op
, 0, i
);
7552 if (GET_CODE (elt
) != SET
7553 || GET_CODE (SET_DEST (elt
)) != REG
7554 || GET_MODE (SET_DEST (elt
)) != SImode
7555 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
7556 || GET_CODE (SET_SRC (elt
)) != MEM
7557 || GET_MODE (SET_SRC (elt
)) != SImode
7558 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
7559 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
7560 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
7561 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != i
* 4)
7568 /* Similar, but tests for store multiple. Here, the second vector element
7569 is a CLOBBER. It will be tested later. */
7572 store_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
7574 int count
= XVECLEN (op
, 0) - 1;
7575 unsigned int src_regno
;
7579 /* Perform a quick check so we don't blow up below. */
7581 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7582 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
7583 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
7586 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
7587 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
7589 for (i
= 1; i
< count
; i
++)
7591 rtx elt
= XVECEXP (op
, 0, i
+ 1);
7593 if (GET_CODE (elt
) != SET
7594 || GET_CODE (SET_SRC (elt
)) != REG
7595 || GET_MODE (SET_SRC (elt
)) != SImode
7596 || REGNO (SET_SRC (elt
)) != src_regno
+ i
7597 || GET_CODE (SET_DEST (elt
)) != MEM
7598 || GET_MODE (SET_DEST (elt
)) != SImode
7599 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
7600 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
7601 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
7602 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != i
* 4)
7609 /* Return a string to perform a load_multiple operation.
7610 operands[0] is the vector.
7611 operands[1] is the source address.
7612 operands[2] is the first destination register. */
7615 rs6000_output_load_multiple (rtx operands
[3])
7617 /* We have to handle the case where the pseudo used to contain the address
7618 is assigned to one of the output registers. */
7620 int words
= XVECLEN (operands
[0], 0);
7623 if (XVECLEN (operands
[0], 0) == 1)
7624 return "{l|lwz} %2,0(%1)";
7626 for (i
= 0; i
< words
; i
++)
7627 if (refers_to_regno_p (REGNO (operands
[2]) + i
,
7628 REGNO (operands
[2]) + i
+ 1, operands
[1], 0))
7632 xop
[0] = GEN_INT (4 * (words
-1));
7633 xop
[1] = operands
[1];
7634 xop
[2] = operands
[2];
7635 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop
);
7640 xop
[0] = GEN_INT (4 * (words
-1));
7641 xop
[1] = operands
[1];
7642 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
7643 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop
);
7648 for (j
= 0; j
< words
; j
++)
7651 xop
[0] = GEN_INT (j
* 4);
7652 xop
[1] = operands
[1];
7653 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + j
);
7654 output_asm_insn ("{l|lwz} %2,%0(%1)", xop
);
7656 xop
[0] = GEN_INT (i
* 4);
7657 xop
[1] = operands
[1];
7658 output_asm_insn ("{l|lwz} %1,%0(%1)", xop
);
7663 return "{lsi|lswi} %2,%1,%N0";
7666 /* Return 1 for a parallel vrsave operation. */
7669 vrsave_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
7671 int count
= XVECLEN (op
, 0);
7672 unsigned int dest_regno
, src_regno
;
7676 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7677 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
7678 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC_VOLATILE
)
7681 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
7682 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
7684 if (dest_regno
!= VRSAVE_REGNO
7685 && src_regno
!= VRSAVE_REGNO
)
7688 for (i
= 1; i
< count
; i
++)
7690 rtx elt
= XVECEXP (op
, 0, i
);
7692 if (GET_CODE (elt
) != CLOBBER
7693 && GET_CODE (elt
) != SET
)
7700 /* Return 1 for an PARALLEL suitable for mfcr. */
7703 mfcr_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
7705 int count
= XVECLEN (op
, 0);
7708 /* Perform a quick check so we don't blow up below. */
7710 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7711 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
7712 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
7715 for (i
= 0; i
< count
; i
++)
7717 rtx exp
= XVECEXP (op
, 0, i
);
7722 src_reg
= XVECEXP (SET_SRC (exp
), 0, 0);
7724 if (GET_CODE (src_reg
) != REG
7725 || GET_MODE (src_reg
) != CCmode
7726 || ! CR_REGNO_P (REGNO (src_reg
)))
7729 if (GET_CODE (exp
) != SET
7730 || GET_CODE (SET_DEST (exp
)) != REG
7731 || GET_MODE (SET_DEST (exp
)) != SImode
7732 || ! INT_REGNO_P (REGNO (SET_DEST (exp
))))
7734 unspec
= SET_SRC (exp
);
7735 maskval
= 1 << (MAX_CR_REGNO
- REGNO (src_reg
));
7737 if (GET_CODE (unspec
) != UNSPEC
7738 || XINT (unspec
, 1) != UNSPEC_MOVESI_FROM_CR
7739 || XVECLEN (unspec
, 0) != 2
7740 || XVECEXP (unspec
, 0, 0) != src_reg
7741 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
7742 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
7748 /* Return 1 for an PARALLEL suitable for mtcrf. */
7751 mtcrf_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
7753 int count
= XVECLEN (op
, 0);
7757 /* Perform a quick check so we don't blow up below. */
7759 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7760 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
7761 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
7763 src_reg
= XVECEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0, 0);
7765 if (GET_CODE (src_reg
) != REG
7766 || GET_MODE (src_reg
) != SImode
7767 || ! INT_REGNO_P (REGNO (src_reg
)))
7770 for (i
= 0; i
< count
; i
++)
7772 rtx exp
= XVECEXP (op
, 0, i
);
7776 if (GET_CODE (exp
) != SET
7777 || GET_CODE (SET_DEST (exp
)) != REG
7778 || GET_MODE (SET_DEST (exp
)) != CCmode
7779 || ! CR_REGNO_P (REGNO (SET_DEST (exp
))))
7781 unspec
= SET_SRC (exp
);
7782 maskval
= 1 << (MAX_CR_REGNO
- REGNO (SET_DEST (exp
)));
7784 if (GET_CODE (unspec
) != UNSPEC
7785 || XINT (unspec
, 1) != UNSPEC_MOVESI_TO_CR
7786 || XVECLEN (unspec
, 0) != 2
7787 || XVECEXP (unspec
, 0, 0) != src_reg
7788 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
7789 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
7795 /* Return 1 for an PARALLEL suitable for lmw. */
7798 lmw_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
7800 int count
= XVECLEN (op
, 0);
7801 unsigned int dest_regno
;
7803 unsigned int base_regno
;
7804 HOST_WIDE_INT offset
;
7807 /* Perform a quick check so we don't blow up below. */
7809 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7810 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
7811 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
7814 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
7815 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
7818 || count
!= 32 - (int) dest_regno
)
7821 if (legitimate_indirect_address_p (src_addr
, 0))
7824 base_regno
= REGNO (src_addr
);
7825 if (base_regno
== 0)
7828 else if (legitimate_offset_address_p (SImode
, src_addr
, 0))
7830 offset
= INTVAL (XEXP (src_addr
, 1));
7831 base_regno
= REGNO (XEXP (src_addr
, 0));
7836 for (i
= 0; i
< count
; i
++)
7838 rtx elt
= XVECEXP (op
, 0, i
);
7841 HOST_WIDE_INT newoffset
;
7843 if (GET_CODE (elt
) != SET
7844 || GET_CODE (SET_DEST (elt
)) != REG
7845 || GET_MODE (SET_DEST (elt
)) != SImode
7846 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
7847 || GET_CODE (SET_SRC (elt
)) != MEM
7848 || GET_MODE (SET_SRC (elt
)) != SImode
)
7850 newaddr
= XEXP (SET_SRC (elt
), 0);
7851 if (legitimate_indirect_address_p (newaddr
, 0))
7856 else if (legitimate_offset_address_p (SImode
, newaddr
, 0))
7858 addr_reg
= XEXP (newaddr
, 0);
7859 newoffset
= INTVAL (XEXP (newaddr
, 1));
7863 if (REGNO (addr_reg
) != base_regno
7864 || newoffset
!= offset
+ 4 * i
)
7871 /* Return 1 for an PARALLEL suitable for stmw. */
7874 stmw_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
7876 int count
= XVECLEN (op
, 0);
7877 unsigned int src_regno
;
7879 unsigned int base_regno
;
7880 HOST_WIDE_INT offset
;
7883 /* Perform a quick check so we don't blow up below. */
7885 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7886 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
7887 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
7890 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
7891 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
7894 || count
!= 32 - (int) src_regno
)
7897 if (legitimate_indirect_address_p (dest_addr
, 0))
7900 base_regno
= REGNO (dest_addr
);
7901 if (base_regno
== 0)
7904 else if (legitimate_offset_address_p (SImode
, dest_addr
, 0))
7906 offset
= INTVAL (XEXP (dest_addr
, 1));
7907 base_regno
= REGNO (XEXP (dest_addr
, 0));
7912 for (i
= 0; i
< count
; i
++)
7914 rtx elt
= XVECEXP (op
, 0, i
);
7917 HOST_WIDE_INT newoffset
;
7919 if (GET_CODE (elt
) != SET
7920 || GET_CODE (SET_SRC (elt
)) != REG
7921 || GET_MODE (SET_SRC (elt
)) != SImode
7922 || REGNO (SET_SRC (elt
)) != src_regno
+ i
7923 || GET_CODE (SET_DEST (elt
)) != MEM
7924 || GET_MODE (SET_DEST (elt
)) != SImode
)
7926 newaddr
= XEXP (SET_DEST (elt
), 0);
7927 if (legitimate_indirect_address_p (newaddr
, 0))
7932 else if (legitimate_offset_address_p (SImode
, newaddr
, 0))
7934 addr_reg
= XEXP (newaddr
, 0);
7935 newoffset
= INTVAL (XEXP (newaddr
, 1));
7939 if (REGNO (addr_reg
) != base_regno
7940 || newoffset
!= offset
+ 4 * i
)
7947 /* A validation routine: say whether CODE, a condition code, and MODE
7948 match. The other alternatives either don't make sense or should
7949 never be generated. */
7952 validate_condition_mode (enum rtx_code code
, enum machine_mode mode
)
7954 if (GET_RTX_CLASS (code
) != '<'
7955 || GET_MODE_CLASS (mode
) != MODE_CC
)
7958 /* These don't make sense. */
7959 if ((code
== GT
|| code
== LT
|| code
== GE
|| code
== LE
)
7960 && mode
== CCUNSmode
)
7963 if ((code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
)
7964 && mode
!= CCUNSmode
)
7967 if (mode
!= CCFPmode
7968 && (code
== ORDERED
|| code
== UNORDERED
7969 || code
== UNEQ
|| code
== LTGT
7970 || code
== UNGT
|| code
== UNLT
7971 || code
== UNGE
|| code
== UNLE
))
7974 /* These should never be generated except for
7975 flag_finite_math_only. */
7976 if (mode
== CCFPmode
7977 && ! flag_finite_math_only
7978 && (code
== LE
|| code
== GE
7979 || code
== UNEQ
|| code
== LTGT
7980 || code
== UNGT
|| code
== UNLT
))
7983 /* These are invalid; the information is not there. */
7984 if (mode
== CCEQmode
7985 && code
!= EQ
&& code
!= NE
)
7989 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7990 We only check the opcode against the mode of the CC value here. */
7993 branch_comparison_operator (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
7995 enum rtx_code code
= GET_CODE (op
);
7996 enum machine_mode cc_mode
;
7998 if (GET_RTX_CLASS (code
) != '<')
8001 cc_mode
= GET_MODE (XEXP (op
, 0));
8002 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
8005 validate_condition_mode (code
, cc_mode
);
8010 /* Return 1 if OP is a comparison operation that is valid for a branch
8011 insn and which is true if the corresponding bit in the CC register
8015 branch_positive_comparison_operator (rtx op
, enum machine_mode mode
)
8019 if (! branch_comparison_operator (op
, mode
))
8022 code
= GET_CODE (op
);
8023 return (code
== EQ
|| code
== LT
|| code
== GT
8024 || (TARGET_E500
&& TARGET_HARD_FLOAT
&& !TARGET_FPRS
&& code
== NE
)
8025 || code
== LTU
|| code
== GTU
8026 || code
== UNORDERED
);
8029 /* Return 1 if OP is a comparison operation that is valid for an scc
8030 insn: it must be a positive comparison. */
8033 scc_comparison_operator (rtx op
, enum machine_mode mode
)
8035 return branch_positive_comparison_operator (op
, mode
);
8039 trap_comparison_operator (rtx op
, enum machine_mode mode
)
8041 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
8043 return GET_RTX_CLASS (GET_CODE (op
)) == '<';
8047 boolean_operator (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8049 enum rtx_code code
= GET_CODE (op
);
8050 return (code
== AND
|| code
== IOR
|| code
== XOR
);
8054 boolean_or_operator (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8056 enum rtx_code code
= GET_CODE (op
);
8057 return (code
== IOR
|| code
== XOR
);
8061 min_max_operator (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8063 enum rtx_code code
= GET_CODE (op
);
8064 return (code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
);
8067 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
8068 mask required to convert the result of a rotate insn into a shift
8069 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
8072 includes_lshift_p (rtx shiftop
, rtx andop
)
8074 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
8076 shift_mask
<<= INTVAL (shiftop
);
8078 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
8081 /* Similar, but for right shift. */
8084 includes_rshift_p (rtx shiftop
, rtx andop
)
8086 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
8088 shift_mask
>>= INTVAL (shiftop
);
8090 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
8093 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8094 to perform a left shift. It must have exactly SHIFTOP least
8095 significant 0's, then one or more 1's, then zero or more 0's. */
8098 includes_rldic_lshift_p (rtx shiftop
, rtx andop
)
8100 if (GET_CODE (andop
) == CONST_INT
)
8102 HOST_WIDE_INT c
, lsb
, shift_mask
;
8105 if (c
== 0 || c
== ~0)
8109 shift_mask
<<= INTVAL (shiftop
);
8111 /* Find the least significant one bit. */
8114 /* It must coincide with the LSB of the shift mask. */
8115 if (-lsb
!= shift_mask
)
8118 /* Invert to look for the next transition (if any). */
8121 /* Remove the low group of ones (originally low group of zeros). */
8124 /* Again find the lsb, and check we have all 1's above. */
8128 else if (GET_CODE (andop
) == CONST_DOUBLE
8129 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
8131 HOST_WIDE_INT low
, high
, lsb
;
8132 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
8134 low
= CONST_DOUBLE_LOW (andop
);
8135 if (HOST_BITS_PER_WIDE_INT
< 64)
8136 high
= CONST_DOUBLE_HIGH (andop
);
8138 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
8139 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
8142 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
8144 shift_mask_high
= ~0;
8145 if (INTVAL (shiftop
) > 32)
8146 shift_mask_high
<<= INTVAL (shiftop
) - 32;
8150 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
8157 return high
== -lsb
;
8160 shift_mask_low
= ~0;
8161 shift_mask_low
<<= INTVAL (shiftop
);
8165 if (-lsb
!= shift_mask_low
)
8168 if (HOST_BITS_PER_WIDE_INT
< 64)
8173 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
8176 return high
== -lsb
;
8180 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
8186 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
8187 to perform a left shift. It must have SHIFTOP or more least
8188 significant 0's, with the remainder of the word 1's. */
8191 includes_rldicr_lshift_p (rtx shiftop
, rtx andop
)
8193 if (GET_CODE (andop
) == CONST_INT
)
8195 HOST_WIDE_INT c
, lsb
, shift_mask
;
8198 shift_mask
<<= INTVAL (shiftop
);
8201 /* Find the least significant one bit. */
8204 /* It must be covered by the shift mask.
8205 This test also rejects c == 0. */
8206 if ((lsb
& shift_mask
) == 0)
8209 /* Check we have all 1's above the transition, and reject all 1's. */
8210 return c
== -lsb
&& lsb
!= 1;
8212 else if (GET_CODE (andop
) == CONST_DOUBLE
8213 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
8215 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
8217 low
= CONST_DOUBLE_LOW (andop
);
8219 if (HOST_BITS_PER_WIDE_INT
< 64)
8221 HOST_WIDE_INT high
, shift_mask_high
;
8223 high
= CONST_DOUBLE_HIGH (andop
);
8227 shift_mask_high
= ~0;
8228 if (INTVAL (shiftop
) > 32)
8229 shift_mask_high
<<= INTVAL (shiftop
) - 32;
8233 if ((lsb
& shift_mask_high
) == 0)
8236 return high
== -lsb
;
8242 shift_mask_low
= ~0;
8243 shift_mask_low
<<= INTVAL (shiftop
);
8247 if ((lsb
& shift_mask_low
) == 0)
8250 return low
== -lsb
&& lsb
!= 1;
8256 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
8257 for lfq and stfq insns.
8259 Note reg1 and reg2 *must* be hard registers. To be sure we will
8260 abort if we are passed pseudo registers. */
8263 registers_ok_for_quad_peep (rtx reg1
, rtx reg2
)
8265 /* We might have been passed a SUBREG. */
8266 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
8269 return (REGNO (reg1
) == REGNO (reg2
) - 1);
8272 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
8273 addr1 and addr2 must be in consecutive memory locations
8274 (addr2 == addr1 + 8). */
8277 addrs_ok_for_quad_peep (rtx addr1
, rtx addr2
)
8282 /* Extract an offset (if used) from the first addr. */
8283 if (GET_CODE (addr1
) == PLUS
)
8285 /* If not a REG, return zero. */
8286 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
8290 reg1
= REGNO (XEXP (addr1
, 0));
8291 /* The offset must be constant! */
8292 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
8294 offset1
= INTVAL (XEXP (addr1
, 1));
8297 else if (GET_CODE (addr1
) != REG
)
8301 reg1
= REGNO (addr1
);
8302 /* This was a simple (mem (reg)) expression. Offset is 0. */
8306 /* Make sure the second address is a (mem (plus (reg) (const_int)))
8307 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
8308 register as addr1. */
8309 if (offset1
== -8 && GET_CODE (addr2
) == REG
&& reg1
== REGNO (addr2
))
8311 if (GET_CODE (addr2
) != PLUS
)
8314 if (GET_CODE (XEXP (addr2
, 0)) != REG
8315 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
8318 if (reg1
!= REGNO (XEXP (addr2
, 0)))
8321 /* The offset for the second addr must be 8 more than the first addr. */
8322 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 8)
8325 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
8330 /* Return the register class of a scratch register needed to copy IN into
8331 or out of a register in CLASS in MODE. If it can be done directly,
8332 NO_REGS is returned. */
8335 secondary_reload_class (enum reg_class
class,
8336 enum machine_mode mode ATTRIBUTE_UNUSED
,
8341 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
8343 && MACHOPIC_INDIRECT
8347 /* We cannot copy a symbolic operand directly into anything
8348 other than BASE_REGS for TARGET_ELF. So indicate that a
8349 register from BASE_REGS is needed as an intermediate
8352 On Darwin, pic addresses require a load from memory, which
8353 needs a base register. */
8354 if (class != BASE_REGS
8355 && (GET_CODE (in
) == SYMBOL_REF
8356 || GET_CODE (in
) == HIGH
8357 || GET_CODE (in
) == LABEL_REF
8358 || GET_CODE (in
) == CONST
))
8362 if (GET_CODE (in
) == REG
)
8365 if (regno
>= FIRST_PSEUDO_REGISTER
)
8367 regno
= true_regnum (in
);
8368 if (regno
>= FIRST_PSEUDO_REGISTER
)
8372 else if (GET_CODE (in
) == SUBREG
)
8374 regno
= true_regnum (in
);
8375 if (regno
>= FIRST_PSEUDO_REGISTER
)
8381 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8383 if (class == GENERAL_REGS
|| class == BASE_REGS
8384 || (regno
>= 0 && INT_REGNO_P (regno
)))
8387 /* Constants, memory, and FP registers can go into FP registers. */
8388 if ((regno
== -1 || FP_REGNO_P (regno
))
8389 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
8392 /* Memory, and AltiVec registers can go into AltiVec registers. */
8393 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
8394 && class == ALTIVEC_REGS
)
8397 /* We can copy among the CR registers. */
8398 if ((class == CR_REGS
|| class == CR0_REGS
)
8399 && regno
>= 0 && CR_REGNO_P (regno
))
8402 /* Otherwise, we need GENERAL_REGS. */
8403 return GENERAL_REGS
;
8406 /* Given a comparison operation, return the bit number in CCR to test. We
8407 know this is a valid comparison.
8409 SCC_P is 1 if this is for an scc. That means that %D will have been
8410 used instead of %C, so the bits will be in different places.
8412 Return -1 if OP isn't a valid comparison for some reason. */
8415 ccr_bit (rtx op
, int scc_p
)
8417 enum rtx_code code
= GET_CODE (op
);
8418 enum machine_mode cc_mode
;
8423 if (GET_RTX_CLASS (code
) != '<')
8428 if (GET_CODE (reg
) != REG
8429 || ! CR_REGNO_P (REGNO (reg
)))
8432 cc_mode
= GET_MODE (reg
);
8433 cc_regnum
= REGNO (reg
);
8434 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
8436 validate_condition_mode (code
, cc_mode
);
8438 /* When generating a sCOND operation, only positive conditions are
8440 if (scc_p
&& code
!= EQ
&& code
!= GT
&& code
!= LT
&& code
!= UNORDERED
8441 && code
!= GTU
&& code
!= LTU
)
8447 if (TARGET_E500
&& !TARGET_FPRS
8448 && TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
8449 return base_bit
+ 1;
8450 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
8452 if (TARGET_E500
&& !TARGET_FPRS
8453 && TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
8454 return base_bit
+ 1;
8455 return base_bit
+ 2;
8456 case GT
: case GTU
: case UNLE
:
8457 return base_bit
+ 1;
8458 case LT
: case LTU
: case UNGE
:
8460 case ORDERED
: case UNORDERED
:
8461 return base_bit
+ 3;
8464 /* If scc, we will have done a cror to put the bit in the
8465 unordered position. So test that bit. For integer, this is ! LT
8466 unless this is an scc insn. */
8467 return scc_p
? base_bit
+ 3 : base_bit
;
8470 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
8477 /* Return the GOT register. */
8480 rs6000_got_register (rtx value ATTRIBUTE_UNUSED
)
8482 /* The second flow pass currently (June 1999) can't update
8483 regs_ever_live without disturbing other parts of the compiler, so
8484 update it here to make the prolog/epilogue code happy. */
8485 if (no_new_pseudos
&& ! regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
])
8486 regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
8488 current_function_uses_pic_offset_table
= 1;
8490 return pic_offset_table_rtx
;
8493 /* Function to init struct machine_function.
8494 This will be called, via a pointer variable,
8495 from push_function_context. */
8497 static struct machine_function
*
8498 rs6000_init_machine_status (void)
8500 return ggc_alloc_cleared (sizeof (machine_function
));
8503 /* These macros test for integers and extract the low-order bits. */
8505 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8506 && GET_MODE (X) == VOIDmode)
8508 #define INT_LOWPART(X) \
8509 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8515 unsigned long val
= INT_LOWPART (op
);
8517 /* If the high bit is zero, the value is the first 1 bit we find
8519 if ((val
& 0x80000000) == 0)
8521 if ((val
& 0xffffffff) == 0)
8525 while (((val
<<= 1) & 0x80000000) == 0)
8530 /* If the high bit is set and the low bit is not, or the mask is all
8531 1's, the value is zero. */
8532 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
8535 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8538 while (((val
>>= 1) & 1) != 0)
8548 unsigned long val
= INT_LOWPART (op
);
8550 /* If the low bit is zero, the value is the first 1 bit we find from
8554 if ((val
& 0xffffffff) == 0)
8558 while (((val
>>= 1) & 1) == 0)
8564 /* If the low bit is set and the high bit is not, or the mask is all
8565 1's, the value is 31. */
8566 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
8569 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8572 while (((val
<<= 1) & 0x80000000) != 0)
8578 /* Locate some local-dynamic symbol still in use by this function
8579 so that we can print its name in some tls_ld pattern. */
8582 rs6000_get_some_local_dynamic_name (void)
8586 if (cfun
->machine
->some_ld_name
)
8587 return cfun
->machine
->some_ld_name
;
8589 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
8591 && for_each_rtx (&PATTERN (insn
),
8592 rs6000_get_some_local_dynamic_name_1
, 0))
8593 return cfun
->machine
->some_ld_name
;
8598 /* Helper function for rs6000_get_some_local_dynamic_name. */
8601 rs6000_get_some_local_dynamic_name_1 (rtx
*px
, void *data ATTRIBUTE_UNUSED
)
8605 if (GET_CODE (x
) == SYMBOL_REF
)
8607 const char *str
= XSTR (x
, 0);
8608 if (SYMBOL_REF_TLS_MODEL (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
8610 cfun
->machine
->some_ld_name
= str
;
8618 /* Print an operand. Recognize special options, documented below. */
8621 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8622 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8624 #define SMALL_DATA_RELOC "sda21"
8625 #define SMALL_DATA_REG 0
8629 print_operand (FILE *file
, rtx x
, int code
)
8633 unsigned HOST_WIDE_INT uval
;
8638 /* Write out an instruction after the call which may be replaced
8639 with glue code by the loader. This depends on the AIX version. */
8640 asm_fprintf (file
, RS6000_CALL_GLUE
);
8643 /* %a is output_address. */
8646 /* If X is a constant integer whose low-order 5 bits are zero,
8647 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8648 in the AIX assembler where "sri" with a zero shift count
8649 writes a trash instruction. */
8650 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
8657 /* If constant, low-order 16 bits of constant, unsigned.
8658 Otherwise, write normally. */
8660 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
8662 print_operand (file
, x
, 0);
8666 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8667 for 64-bit mask direction. */
8668 putc (((INT_LOWPART(x
) & 1) == 0 ? 'r' : 'l'), file
);
8671 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8675 /* X is a CR register. Print the number of the EQ bit of the CR */
8676 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
8677 output_operand_lossage ("invalid %%E value");
8679 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
8683 /* X is a CR register. Print the shift count needed to move it
8684 to the high-order four bits. */
8685 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
8686 output_operand_lossage ("invalid %%f value");
8688 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
8692 /* Similar, but print the count for the rotate in the opposite
8694 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
8695 output_operand_lossage ("invalid %%F value");
8697 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
8701 /* X is a constant integer. If it is negative, print "m",
8702 otherwise print "z". This is to make an aze or ame insn. */
8703 if (GET_CODE (x
) != CONST_INT
)
8704 output_operand_lossage ("invalid %%G value");
8705 else if (INTVAL (x
) >= 0)
8712 /* If constant, output low-order five bits. Otherwise, write
8715 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
8717 print_operand (file
, x
, 0);
8721 /* If constant, output low-order six bits. Otherwise, write
8724 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
8726 print_operand (file
, x
, 0);
8730 /* Print `i' if this is a constant, else nothing. */
8736 /* Write the bit number in CCR for jump. */
8739 output_operand_lossage ("invalid %%j code");
8741 fprintf (file
, "%d", i
);
8745 /* Similar, but add one for shift count in rlinm for scc and pass
8746 scc flag to `ccr_bit'. */
8749 output_operand_lossage ("invalid %%J code");
8751 /* If we want bit 31, write a shift count of zero, not 32. */
8752 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
8756 /* X must be a constant. Write the 1's complement of the
8759 output_operand_lossage ("invalid %%k value");
8761 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
8765 /* X must be a symbolic constant on ELF. Write an
8766 expression suitable for an 'addi' that adds in the low 16
8768 if (GET_CODE (x
) != CONST
)
8770 print_operand_address (file
, x
);
8775 if (GET_CODE (XEXP (x
, 0)) != PLUS
8776 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
8777 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
8778 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
8779 output_operand_lossage ("invalid %%K value");
8780 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
8782 /* For GNU as, there must be a non-alphanumeric character
8783 between 'l' and the number. The '-' is added by
8784 print_operand() already. */
8785 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
8787 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
8791 /* %l is output_asm_label. */
8794 /* Write second word of DImode or DFmode reference. Works on register
8795 or non-indexed memory only. */
8796 if (GET_CODE (x
) == REG
)
8797 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
8798 else if (GET_CODE (x
) == MEM
)
8800 /* Handle possible auto-increment. Since it is pre-increment and
8801 we have already done it, we can just use an offset of word. */
8802 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
8803 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8804 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
8807 output_address (XEXP (adjust_address_nv (x
, SImode
,
8811 if (small_data_operand (x
, GET_MODE (x
)))
8812 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8813 reg_names
[SMALL_DATA_REG
]);
8818 /* MB value for a mask operand. */
8819 if (! mask_operand (x
, SImode
))
8820 output_operand_lossage ("invalid %%m value");
8822 fprintf (file
, "%d", extract_MB (x
));
8826 /* ME value for a mask operand. */
8827 if (! mask_operand (x
, SImode
))
8828 output_operand_lossage ("invalid %%M value");
8830 fprintf (file
, "%d", extract_ME (x
));
8833 /* %n outputs the negative of its operand. */
8836 /* Write the number of elements in the vector times 4. */
8837 if (GET_CODE (x
) != PARALLEL
)
8838 output_operand_lossage ("invalid %%N value");
8840 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
8844 /* Similar, but subtract 1 first. */
8845 if (GET_CODE (x
) != PARALLEL
)
8846 output_operand_lossage ("invalid %%O value");
8848 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
8852 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8854 || INT_LOWPART (x
) < 0
8855 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
8856 output_operand_lossage ("invalid %%p value");
8858 fprintf (file
, "%d", i
);
8862 /* The operand must be an indirect memory reference. The result
8863 is the register number. */
8864 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
8865 || REGNO (XEXP (x
, 0)) >= 32)
8866 output_operand_lossage ("invalid %%P value");
8868 fprintf (file
, "%d", REGNO (XEXP (x
, 0)));
8872 /* This outputs the logical code corresponding to a boolean
8873 expression. The expression may have one or both operands
8874 negated (if one, only the first one). For condition register
8875 logical operations, it will also treat the negated
8876 CR codes as NOTs, but not handle NOTs of them. */
8878 const char *const *t
= 0;
8880 enum rtx_code code
= GET_CODE (x
);
8881 static const char * const tbl
[3][3] = {
8882 { "and", "andc", "nor" },
8883 { "or", "orc", "nand" },
8884 { "xor", "eqv", "xor" } };
8888 else if (code
== IOR
)
8890 else if (code
== XOR
)
8893 output_operand_lossage ("invalid %%q value");
8895 if (GET_CODE (XEXP (x
, 0)) != NOT
)
8899 if (GET_CODE (XEXP (x
, 1)) == NOT
)
8917 /* X is a CR register. Print the mask for `mtcrf'. */
8918 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
8919 output_operand_lossage ("invalid %%R value");
8921 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
8925 /* Low 5 bits of 32 - value */
8927 output_operand_lossage ("invalid %%s value");
8929 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
8933 /* PowerPC64 mask position. All 0's is excluded.
8934 CONST_INT 32-bit mask is considered sign-extended so any
8935 transition must occur within the CONST_INT, not on the boundary. */
8936 if (! mask64_operand (x
, DImode
))
8937 output_operand_lossage ("invalid %%S value");
8939 uval
= INT_LOWPART (x
);
8941 if (uval
& 1) /* Clear Left */
8943 #if HOST_BITS_PER_WIDE_INT > 64
8944 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
8948 else /* Clear Right */
8951 #if HOST_BITS_PER_WIDE_INT > 64
8952 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
8960 fprintf (file
, "%d", i
);
8964 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8965 if (GET_CODE (x
) != REG
|| GET_MODE (x
) != CCmode
)
8968 /* Bit 3 is OV bit. */
8969 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
8971 /* If we want bit 31, write a shift count of zero, not 32. */
8972 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
8976 /* Print the symbolic name of a branch target register. */
8977 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
8978 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
8979 output_operand_lossage ("invalid %%T value");
8980 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
8981 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
8983 fputs ("ctr", file
);
8987 /* High-order 16 bits of constant for use in unsigned operand. */
8989 output_operand_lossage ("invalid %%u value");
8991 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
8992 (INT_LOWPART (x
) >> 16) & 0xffff);
8996 /* High-order 16 bits of constant for use in signed operand. */
8998 output_operand_lossage ("invalid %%v value");
9000 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
9001 (INT_LOWPART (x
) >> 16) & 0xffff);
9005 /* Print `u' if this has an auto-increment or auto-decrement. */
9006 if (GET_CODE (x
) == MEM
9007 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
9008 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
9013 /* Print the trap code for this operand. */
9014 switch (GET_CODE (x
))
9017 fputs ("eq", file
); /* 4 */
9020 fputs ("ne", file
); /* 24 */
9023 fputs ("lt", file
); /* 16 */
9026 fputs ("le", file
); /* 20 */
9029 fputs ("gt", file
); /* 8 */
9032 fputs ("ge", file
); /* 12 */
9035 fputs ("llt", file
); /* 2 */
9038 fputs ("lle", file
); /* 6 */
9041 fputs ("lgt", file
); /* 1 */
9044 fputs ("lge", file
); /* 5 */
9052 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
9055 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
9056 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
9058 print_operand (file
, x
, 0);
9062 /* MB value for a PowerPC64 rldic operand. */
9063 val
= (GET_CODE (x
) == CONST_INT
9064 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
9069 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
9070 if ((val
<<= 1) < 0)
9073 #if HOST_BITS_PER_WIDE_INT == 32
9074 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
9075 i
+= 32; /* zero-extend high-part was all 0's */
9076 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
9078 val
= CONST_DOUBLE_LOW (x
);
9085 for ( ; i
< 64; i
++)
9086 if ((val
<<= 1) < 0)
9091 fprintf (file
, "%d", i
+ 1);
9095 if (GET_CODE (x
) == MEM
9096 && legitimate_indexed_address_p (XEXP (x
, 0), 0))
9101 /* Like 'L', for third word of TImode */
9102 if (GET_CODE (x
) == REG
)
9103 fprintf (file
, "%s", reg_names
[REGNO (x
) + 2]);
9104 else if (GET_CODE (x
) == MEM
)
9106 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
9107 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
9108 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
9110 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
9111 if (small_data_operand (x
, GET_MODE (x
)))
9112 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
9113 reg_names
[SMALL_DATA_REG
]);
9118 /* X is a SYMBOL_REF. Write out the name preceded by a
9119 period and without any trailing data in brackets. Used for function
9120 names. If we are configured for System V (or the embedded ABI) on
9121 the PowerPC, do not emit the period, since those systems do not use
9122 TOCs and the like. */
9123 if (GET_CODE (x
) != SYMBOL_REF
)
9126 if (XSTR (x
, 0)[0] != '.')
9128 switch (DEFAULT_ABI
)
9143 RS6000_OUTPUT_BASENAME (file
, XSTR (x
, 0));
9145 assemble_name (file
, XSTR (x
, 0));
9149 /* Like 'L', for last word of TImode. */
9150 if (GET_CODE (x
) == REG
)
9151 fprintf (file
, "%s", reg_names
[REGNO (x
) + 3]);
9152 else if (GET_CODE (x
) == MEM
)
9154 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
9155 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
9156 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
9158 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
9159 if (small_data_operand (x
, GET_MODE (x
)))
9160 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
9161 reg_names
[SMALL_DATA_REG
]);
9165 /* Print AltiVec or SPE memory operand. */
9170 if (GET_CODE (x
) != MEM
)
9178 if (GET_CODE (tmp
) == REG
)
9180 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
9183 /* Handle [reg+UIMM]. */
9184 else if (GET_CODE (tmp
) == PLUS
&&
9185 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
9189 if (GET_CODE (XEXP (tmp
, 0)) != REG
)
9192 x
= INTVAL (XEXP (tmp
, 1));
9193 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
9197 /* Fall through. Must be [reg+reg]. */
9199 if (GET_CODE (tmp
) == REG
)
9200 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
9201 else if (GET_CODE (tmp
) == PLUS
&& GET_CODE (XEXP (tmp
, 1)) == REG
)
9203 if (REGNO (XEXP (tmp
, 0)) == 0)
9204 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
9205 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
9207 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
9208 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
9216 if (GET_CODE (x
) == REG
)
9217 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
9218 else if (GET_CODE (x
) == MEM
)
9220 /* We need to handle PRE_INC and PRE_DEC here, since we need to
9221 know the width from the mode. */
9222 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
9223 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
9224 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
9225 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
9226 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
9227 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
9229 output_address (XEXP (x
, 0));
9232 output_addr_const (file
, x
);
9236 assemble_name (file
, rs6000_get_some_local_dynamic_name ());
9240 output_operand_lossage ("invalid %%xn code");
9244 /* Print the address of an operand. */
9247 print_operand_address (FILE *file
, rtx x
)
9249 if (GET_CODE (x
) == REG
)
9250 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
9251 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
9252 || GET_CODE (x
) == LABEL_REF
)
9254 output_addr_const (file
, x
);
9255 if (small_data_operand (x
, GET_MODE (x
)))
9256 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
9257 reg_names
[SMALL_DATA_REG
]);
9258 else if (TARGET_TOC
)
9261 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
9263 if (REGNO (XEXP (x
, 0)) == 0)
9264 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
9265 reg_names
[ REGNO (XEXP (x
, 0)) ]);
9267 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
9268 reg_names
[ REGNO (XEXP (x
, 1)) ]);
9270 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
9271 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
"(%s)",
9272 INTVAL (XEXP (x
, 1)), reg_names
[ REGNO (XEXP (x
, 0)) ]);
9274 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
9275 && CONSTANT_P (XEXP (x
, 1)))
9277 output_addr_const (file
, XEXP (x
, 1));
9278 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
9282 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
9283 && CONSTANT_P (XEXP (x
, 1)))
9285 fprintf (file
, "lo16(");
9286 output_addr_const (file
, XEXP (x
, 1));
9287 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
9290 else if (legitimate_constant_pool_address_p (x
))
9292 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
9294 rtx contains_minus
= XEXP (x
, 1);
9298 /* Find the (minus (sym) (toc)) buried in X, and temporarily
9299 turn it into (sym) for output_addr_const. */
9300 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
9301 contains_minus
= XEXP (contains_minus
, 0);
9303 minus
= XEXP (contains_minus
, 0);
9304 symref
= XEXP (minus
, 0);
9305 XEXP (contains_minus
, 0) = symref
;
9310 name
= XSTR (symref
, 0);
9311 newname
= alloca (strlen (name
) + sizeof ("@toc"));
9312 strcpy (newname
, name
);
9313 strcat (newname
, "@toc");
9314 XSTR (symref
, 0) = newname
;
9316 output_addr_const (file
, XEXP (x
, 1));
9318 XSTR (symref
, 0) = name
;
9319 XEXP (contains_minus
, 0) = minus
;
9322 output_addr_const (file
, XEXP (x
, 1));
9324 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
9330 /* Target hook for assembling integer objects. The PowerPC version has
9331 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9332 is defined. It also needs to handle DI-mode objects on 64-bit
9336 rs6000_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
9338 #ifdef RELOCATABLE_NEEDS_FIXUP
9339 /* Special handling for SI values. */
9340 if (size
== 4 && aligned_p
)
9342 extern int in_toc_section (void);
9343 static int recurse
= 0;
9345 /* For -mrelocatable, we mark all addresses that need to be fixed up
9346 in the .fixup section. */
9347 if (TARGET_RELOCATABLE
9348 && !in_toc_section ()
9349 && !in_text_section ()
9351 && GET_CODE (x
) != CONST_INT
9352 && GET_CODE (x
) != CONST_DOUBLE
9358 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
9360 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
9361 fprintf (asm_out_file
, "\t.long\t(");
9362 output_addr_const (asm_out_file
, x
);
9363 fprintf (asm_out_file
, ")@fixup\n");
9364 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
9365 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
9366 fprintf (asm_out_file
, "\t.long\t");
9367 assemble_name (asm_out_file
, buf
);
9368 fprintf (asm_out_file
, "\n\t.previous\n");
9372 /* Remove initial .'s to turn a -mcall-aixdesc function
9373 address into the address of the descriptor, not the function
9375 else if (GET_CODE (x
) == SYMBOL_REF
9376 && XSTR (x
, 0)[0] == '.'
9377 && DEFAULT_ABI
== ABI_AIX
)
9379 const char *name
= XSTR (x
, 0);
9380 while (*name
== '.')
9383 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
9387 #endif /* RELOCATABLE_NEEDS_FIXUP */
9388 return default_assemble_integer (x
, size
, aligned_p
);
9391 #ifdef HAVE_GAS_HIDDEN
9392 /* Emit an assembler directive to set symbol visibility for DECL to
9396 rs6000_assemble_visibility (tree decl
, int vis
)
9398 /* Functions need to have their entry point symbol visibility set as
9399 well as their descriptor symbol visibility. */
9400 if (DEFAULT_ABI
== ABI_AIX
&& TREE_CODE (decl
) == FUNCTION_DECL
)
9402 static const char * const visibility_types
[] = {
9403 NULL
, "internal", "hidden", "protected"
9406 const char *name
, *type
;
9408 name
= ((* targetm
.strip_name_encoding
)
9409 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))));
9410 type
= visibility_types
[vis
];
9412 fprintf (asm_out_file
, "\t.%s\t%s\n", type
, name
);
9413 fprintf (asm_out_file
, "\t.%s\t.%s\n", type
, name
);
9416 default_assemble_visibility (decl
, vis
);
9421 rs6000_reverse_condition (enum machine_mode mode
, enum rtx_code code
)
9423 /* Reversal of FP compares takes care -- an ordered compare
9424 becomes an unordered compare and vice versa. */
9425 if (mode
== CCFPmode
9426 && (!flag_finite_math_only
9427 || code
== UNLT
|| code
== UNLE
|| code
== UNGT
|| code
== UNGE
9428 || code
== UNEQ
|| code
== LTGT
))
9429 return reverse_condition_maybe_unordered (code
);
9431 return reverse_condition (code
);
9434 /* Generate a compare for CODE. Return a brand-new rtx that
9435 represents the result of the compare. */
9438 rs6000_generate_compare (enum rtx_code code
)
9440 enum machine_mode comp_mode
;
9443 if (rs6000_compare_fp_p
)
9444 comp_mode
= CCFPmode
;
9445 else if (code
== GTU
|| code
== LTU
9446 || code
== GEU
|| code
== LEU
)
9447 comp_mode
= CCUNSmode
;
9451 /* First, the compare. */
9452 compare_result
= gen_reg_rtx (comp_mode
);
9454 /* SPE FP compare instructions on the GPRs. Yuck! */
9455 if ((TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
)
9456 && rs6000_compare_fp_p
)
9458 rtx cmp
, or1
, or2
, or_result
, compare_result2
;
9466 cmp
= flag_finite_math_only
9467 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
9469 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
9470 rs6000_compare_op1
);
9478 cmp
= flag_finite_math_only
9479 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
9481 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
9482 rs6000_compare_op1
);
9490 cmp
= flag_finite_math_only
9491 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
9493 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
9494 rs6000_compare_op1
);
9500 /* Synthesize LE and GE from LT/GT || EQ. */
9501 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
9503 /* Synthesize GE/LE frome GT/LT || EQ. */
9509 case LE
: code
= LT
; break;
9510 case GE
: code
= GT
; break;
9511 case LEU
: code
= LT
; break;
9512 case GEU
: code
= GT
; break;
9516 or1
= gen_reg_rtx (SImode
);
9517 or2
= gen_reg_rtx (SImode
);
9518 or_result
= gen_reg_rtx (CCEQmode
);
9519 compare_result2
= gen_reg_rtx (CCFPmode
);
9522 cmp
= flag_finite_math_only
9523 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
9525 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
9526 rs6000_compare_op1
);
9529 /* The MC8540 FP compare instructions set the CR bits
9530 differently than other PPC compare instructions. For
9531 that matter, there is no generic test instruction, but a
9532 testgt, testlt, and testeq. For a true condition, bit 2
9533 is set (x1xx) in the CR. Following the traditional CR
9539 ... bit 2 would be a GT CR alias, so later on we
9540 look in the GT bits for the branch instructions.
9541 However, we must be careful to emit correct RTL in
9542 the meantime, so optimizations don't get confused. */
9544 or1
= gen_rtx (NE
, SImode
, compare_result
, const0_rtx
);
9545 or2
= gen_rtx (NE
, SImode
, compare_result2
, const0_rtx
);
9547 /* OR them together. */
9548 cmp
= gen_rtx_SET (VOIDmode
, or_result
,
9549 gen_rtx_COMPARE (CCEQmode
,
9550 gen_rtx_IOR (SImode
, or1
, or2
),
9552 compare_result
= or_result
;
9557 /* We only care about 1 bit (x1xx), so map everything to NE to
9558 maintain rtl sanity. We'll get to the right bit (x1xx) at
9559 code output time. */
9560 if (code
== NE
|| code
== LTGT
)
9561 /* Do the inverse here because we have no cmpne
9562 instruction. We use the cmpeq instruction and expect
9563 to get a 0 instead. */
9572 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
9573 gen_rtx_COMPARE (comp_mode
,
9575 rs6000_compare_op1
)));
9577 /* Some kinds of FP comparisons need an OR operation;
9578 under flag_finite_math_only we don't bother. */
9579 if (rs6000_compare_fp_p
9580 && ! flag_finite_math_only
9581 && ! (TARGET_HARD_FLOAT
&& TARGET_E500
&& !TARGET_FPRS
)
9582 && (code
== LE
|| code
== GE
9583 || code
== UNEQ
|| code
== LTGT
9584 || code
== UNGT
|| code
== UNLT
))
9586 enum rtx_code or1
, or2
;
9587 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
9588 rtx or_result
= gen_reg_rtx (CCEQmode
);
9592 case LE
: or1
= LT
; or2
= EQ
; break;
9593 case GE
: or1
= GT
; or2
= EQ
; break;
9594 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
9595 case LTGT
: or1
= LT
; or2
= GT
; break;
9596 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
9597 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
9600 validate_condition_mode (or1
, comp_mode
);
9601 validate_condition_mode (or2
, comp_mode
);
9602 or1_rtx
= gen_rtx (or1
, SImode
, compare_result
, const0_rtx
);
9603 or2_rtx
= gen_rtx (or2
, SImode
, compare_result
, const0_rtx
);
9604 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
9605 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
9607 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
9609 compare_result
= or_result
;
9613 validate_condition_mode (code
, GET_MODE (compare_result
));
9615 return gen_rtx (code
, VOIDmode
, compare_result
, const0_rtx
);
9619 /* Emit the RTL for an sCOND pattern. */
9622 rs6000_emit_sCOND (enum rtx_code code
, rtx result
)
9625 enum machine_mode op_mode
;
9626 enum rtx_code cond_code
;
9628 condition_rtx
= rs6000_generate_compare (code
);
9629 cond_code
= GET_CODE (condition_rtx
);
9632 || cond_code
== GE
|| cond_code
== LE
9633 || cond_code
== GEU
|| cond_code
== LEU
9634 || cond_code
== ORDERED
|| cond_code
== UNGE
|| cond_code
== UNLE
)
9636 rtx not_result
= gen_reg_rtx (CCEQmode
);
9637 rtx not_op
, rev_cond_rtx
;
9638 enum machine_mode cc_mode
;
9640 cc_mode
= GET_MODE (XEXP (condition_rtx
, 0));
9642 rev_cond_rtx
= gen_rtx (rs6000_reverse_condition (cc_mode
, cond_code
),
9643 SImode
, XEXP (condition_rtx
, 0), const0_rtx
);
9644 not_op
= gen_rtx_COMPARE (CCEQmode
, rev_cond_rtx
, const0_rtx
);
9645 emit_insn (gen_rtx_SET (VOIDmode
, not_result
, not_op
));
9646 condition_rtx
= gen_rtx_EQ (VOIDmode
, not_result
, const0_rtx
);
9649 op_mode
= GET_MODE (rs6000_compare_op0
);
9650 if (op_mode
== VOIDmode
)
9651 op_mode
= GET_MODE (rs6000_compare_op1
);
9653 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
9655 PUT_MODE (condition_rtx
, DImode
);
9656 convert_move (result
, condition_rtx
, 0);
9660 PUT_MODE (condition_rtx
, SImode
);
9661 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
9665 /* Emit a branch of kind CODE to location LOC. */
9668 rs6000_emit_cbranch (enum rtx_code code
, rtx loc
)
9670 rtx condition_rtx
, loc_ref
;
9672 condition_rtx
= rs6000_generate_compare (code
);
9673 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
9674 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
9675 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
9679 /* Return the string to output a conditional branch to LABEL, which is
9680 the operand number of the label, or -1 if the branch is really a
9683 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9684 condition code register and its mode specifies what kind of
9687 REVERSED is nonzero if we should reverse the sense of the comparison.
9689 INSN is the insn. */
9692 output_cbranch (rtx op
, const char *label
, int reversed
, rtx insn
)
9694 static char string
[64];
9695 enum rtx_code code
= GET_CODE (op
);
9696 rtx cc_reg
= XEXP (op
, 0);
9697 enum machine_mode mode
= GET_MODE (cc_reg
);
9698 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
9699 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
9700 int really_reversed
= reversed
^ need_longbranch
;
9706 validate_condition_mode (code
, mode
);
9708 /* Work out which way this really branches. We could use
9709 reverse_condition_maybe_unordered here always but this
9710 makes the resulting assembler clearer. */
9711 if (really_reversed
)
9713 /* Reversal of FP compares takes care -- an ordered compare
9714 becomes an unordered compare and vice versa. */
9715 if (mode
== CCFPmode
)
9716 code
= reverse_condition_maybe_unordered (code
);
9718 code
= reverse_condition (code
);
9721 if ((TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
9723 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9726 /* Opposite of GT. */
9728 else if (code
== NE
)
9736 /* Not all of these are actually distinct opcodes, but
9737 we distinguish them for clarity of the resulting assembler. */
9739 ccode
= "ne"; break;
9741 ccode
= "eq"; break;
9743 ccode
= "ge"; break;
9744 case GT
: case GTU
: case UNGT
:
9745 ccode
= "gt"; break;
9747 ccode
= "le"; break;
9748 case LT
: case LTU
: case UNLT
:
9749 ccode
= "lt"; break;
9750 case UNORDERED
: ccode
= "un"; break;
9751 case ORDERED
: ccode
= "nu"; break;
9752 case UNGE
: ccode
= "nl"; break;
9753 case UNLE
: ccode
= "ng"; break;
9758 /* Maybe we have a guess as to how likely the branch is.
9759 The old mnemonics don't have a way to specify this information. */
9761 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
9762 if (note
!= NULL_RTX
)
9764 /* PROB is the difference from 50%. */
9765 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
9766 bool always_hint
= rs6000_cpu
!= PROCESSOR_POWER4
;
9768 /* Only hint for highly probable/improbable branches on newer
9769 cpus as static prediction overrides processor dynamic
9770 prediction. For older cpus we may as well always hint, but
9771 assume not taken for branches that are very close to 50% as a
9772 mispredicted taken branch is more expensive than a
9773 mispredicted not-taken branch. */
9775 || abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48)
9777 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
9778 && ((prob
> 0) ^ need_longbranch
))
9786 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
9788 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
9790 /* We need to escape any '%' characters in the reg_names string.
9791 Assume they'd only be the first character.... */
9792 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
9794 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
9798 /* If the branch distance was too far, we may have to use an
9799 unconditional branch to go the distance. */
9800 if (need_longbranch
)
9801 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
9803 s
+= sprintf (s
, ",%s", label
);
9809 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9810 operands of the last comparison is nonzero/true, FALSE_COND if it
9811 is zero/false. Return 0 if the hardware has no such operation. */
9814 rs6000_emit_cmove (rtx dest
, rtx op
, rtx true_cond
, rtx false_cond
)
9816 enum rtx_code code
= GET_CODE (op
);
9817 rtx op0
= rs6000_compare_op0
;
9818 rtx op1
= rs6000_compare_op1
;
9820 enum machine_mode compare_mode
= GET_MODE (op0
);
9821 enum machine_mode result_mode
= GET_MODE (dest
);
9824 /* These modes should always match. */
9825 if (GET_MODE (op1
) != compare_mode
9826 /* In the isel case however, we can use a compare immediate, so
9827 op1 may be a small constant. */
9828 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
9830 if (GET_MODE (true_cond
) != result_mode
)
9832 if (GET_MODE (false_cond
) != result_mode
)
9835 /* First, work out if the hardware can do this at all, or
9836 if it's too slow.... */
9837 if (! rs6000_compare_fp_p
)
9840 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
9844 /* Eliminate half of the comparisons by switching operands, this
9845 makes the remaining code simpler. */
9846 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
9847 || code
== LTGT
|| code
== LT
|| code
== UNLE
)
9849 code
= reverse_condition_maybe_unordered (code
);
9851 true_cond
= false_cond
;
9855 /* UNEQ and LTGT take four instructions for a comparison with zero,
9856 it'll probably be faster to use a branch here too. */
9857 if (code
== UNEQ
&& HONOR_NANS (compare_mode
))
9860 if (GET_CODE (op1
) == CONST_DOUBLE
)
9861 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
9863 /* We're going to try to implement comparisons by performing
9864 a subtract, then comparing against zero. Unfortunately,
9865 Inf - Inf is NaN which is not zero, and so if we don't
9866 know that the operand is finite and the comparison
9867 would treat EQ different to UNORDERED, we can't do it. */
9868 if (HONOR_INFINITIES (compare_mode
)
9869 && code
!= GT
&& code
!= UNGE
9870 && (GET_CODE (op1
) != CONST_DOUBLE
|| real_isinf (&c1
))
9871 /* Constructs of the form (a OP b ? a : b) are safe. */
9872 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
9873 || (! rtx_equal_p (op0
, true_cond
)
9874 && ! rtx_equal_p (op1
, true_cond
))))
9876 /* At this point we know we can use fsel. */
9878 /* Reduce the comparison to a comparison against zero. */
9879 temp
= gen_reg_rtx (compare_mode
);
9880 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
9881 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
9883 op1
= CONST0_RTX (compare_mode
);
9885 /* If we don't care about NaNs we can reduce some of the comparisons
9886 down to faster ones. */
9887 if (! HONOR_NANS (compare_mode
))
9893 true_cond
= false_cond
;
9906 /* Now, reduce everything down to a GE. */
9913 temp
= gen_reg_rtx (compare_mode
);
9914 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
9919 temp
= gen_reg_rtx (compare_mode
);
9920 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
9925 temp
= gen_reg_rtx (compare_mode
);
9926 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
9927 gen_rtx_NEG (compare_mode
,
9928 gen_rtx_ABS (compare_mode
, op0
))));
9933 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9934 temp
= gen_reg_rtx (result_mode
);
9935 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
9936 gen_rtx_IF_THEN_ELSE (result_mode
,
9937 gen_rtx_GE (VOIDmode
,
9939 true_cond
, false_cond
)));
9940 false_cond
= true_cond
;
9943 temp
= gen_reg_rtx (compare_mode
);
9944 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
9949 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9950 temp
= gen_reg_rtx (result_mode
);
9951 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
9952 gen_rtx_IF_THEN_ELSE (result_mode
,
9953 gen_rtx_GE (VOIDmode
,
9955 true_cond
, false_cond
)));
9956 true_cond
= false_cond
;
9959 temp
= gen_reg_rtx (compare_mode
);
9960 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
9968 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
9969 gen_rtx_IF_THEN_ELSE (result_mode
,
9970 gen_rtx_GE (VOIDmode
,
9972 true_cond
, false_cond
)));
9976 /* Same as above, but for ints (isel). */
9979 rs6000_emit_int_cmove (rtx dest
, rtx op
, rtx true_cond
, rtx false_cond
)
9981 rtx condition_rtx
, cr
;
9983 /* All isel implementations thus far are 32-bits. */
9984 if (GET_MODE (rs6000_compare_op0
) != SImode
)
9987 /* We still have to do the compare, because isel doesn't do a
9988 compare, it just looks at the CRx bits set by a previous compare
9990 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
9991 cr
= XEXP (condition_rtx
, 0);
9993 if (GET_MODE (cr
) == CCmode
)
9994 emit_insn (gen_isel_signed (dest
, condition_rtx
,
9995 true_cond
, false_cond
, cr
));
9997 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
9998 true_cond
, false_cond
, cr
));
10004 output_isel (rtx
*operands
)
10006 enum rtx_code code
;
10008 code
= GET_CODE (operands
[1]);
10009 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
10011 PUT_CODE (operands
[1], reverse_condition (code
));
10012 return "isel %0,%3,%2,%j1";
10015 return "isel %0,%2,%3,%j1";
10019 rs6000_emit_minmax (rtx dest
, enum rtx_code code
, rtx op0
, rtx op1
)
10021 enum machine_mode mode
= GET_MODE (op0
);
10025 if (code
== SMAX
|| code
== SMIN
)
10030 if (code
== SMAX
|| code
== UMAX
)
10031 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
10032 op0
, op1
, mode
, 0);
10034 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
10035 op1
, op0
, mode
, 0);
10036 if (target
== NULL_RTX
)
10038 if (target
!= dest
)
10039 emit_move_insn (dest
, target
);
10042 /* Emit instructions to move SRC to DST. Called by splitters for
10043 multi-register moves. It will emit at most one instruction for
10044 each register that is accessed; that is, it won't emit li/lis pairs
10045 (or equivalent for 64-bit code). One of SRC or DST must be a hard
10049 rs6000_split_multireg_move (rtx dst
, rtx src
)
10051 /* The register number of the first register being moved. */
10053 /* The mode that is to be moved. */
10054 enum machine_mode mode
;
10055 /* The mode that the move is being done in, and its size. */
10056 enum machine_mode reg_mode
;
10058 /* The number of registers that will be moved. */
10061 reg
= REG_P (dst
) ? REGNO (dst
) : REGNO (src
);
10062 mode
= GET_MODE (dst
);
10063 nregs
= HARD_REGNO_NREGS (reg
, mode
);
10064 if (FP_REGNO_P (reg
))
10066 else if (ALTIVEC_REGNO_P (reg
))
10067 reg_mode
= V16QImode
;
10069 reg_mode
= word_mode
;
10070 reg_mode_size
= GET_MODE_SIZE (reg_mode
);
10072 if (reg_mode_size
* nregs
!= GET_MODE_SIZE (mode
))
10075 if (REG_P (src
) && REG_P (dst
) && (REGNO (src
) < REGNO (dst
)))
10077 /* Move register range backwards, if we might have destructive
10080 for (i
= nregs
- 1; i
>= 0; i
--)
10081 emit_insn (gen_rtx_SET (VOIDmode
,
10082 simplify_gen_subreg (reg_mode
, dst
, mode
,
10083 i
* reg_mode_size
),
10084 simplify_gen_subreg (reg_mode
, src
, mode
,
10085 i
* reg_mode_size
)));
10091 bool used_update
= false;
10093 if (GET_CODE (src
) == MEM
&& INT_REGNO_P (reg
))
10097 if (GET_CODE (XEXP (src
, 0)) == PRE_INC
10098 || GET_CODE (XEXP (src
, 0)) == PRE_DEC
)
10101 breg
= XEXP (XEXP (src
, 0), 0);
10102 delta_rtx
= GET_CODE (XEXP (src
, 0)) == PRE_INC
10103 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src
)))
10104 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src
)));
10105 emit_insn (TARGET_32BIT
10106 ? gen_addsi3 (breg
, breg
, delta_rtx
)
10107 : gen_adddi3 (breg
, breg
, delta_rtx
));
10108 src
= gen_rtx_MEM (mode
, breg
);
10111 /* We have now address involving an base register only.
10112 If we use one of the registers to address memory,
10113 we have change that register last. */
10115 breg
= (GET_CODE (XEXP (src
, 0)) == PLUS
10116 ? XEXP (XEXP (src
, 0), 0)
10122 if (REGNO (breg
) >= REGNO (dst
)
10123 && REGNO (breg
) < REGNO (dst
) + nregs
)
10124 j
= REGNO (breg
) - REGNO (dst
);
10127 if (GET_CODE (dst
) == MEM
&& INT_REGNO_P (reg
))
10131 if (GET_CODE (XEXP (dst
, 0)) == PRE_INC
10132 || GET_CODE (XEXP (dst
, 0)) == PRE_DEC
)
10135 breg
= XEXP (XEXP (dst
, 0), 0);
10136 delta_rtx
= GET_CODE (XEXP (dst
, 0)) == PRE_INC
10137 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst
)))
10138 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst
)));
10140 /* We have to update the breg before doing the store.
10141 Use store with update, if available. */
10145 rtx nsrc
= simplify_gen_subreg (reg_mode
, src
, mode
, 0);
10146 emit_insn (TARGET_32BIT
10147 ? gen_movsi_update (breg
, breg
, delta_rtx
, nsrc
)
10148 : gen_movdi_update (breg
, breg
, delta_rtx
, nsrc
));
10149 used_update
= true;
10152 emit_insn (TARGET_32BIT
10153 ? gen_addsi3 (breg
, breg
, delta_rtx
)
10154 : gen_adddi3 (breg
, breg
, delta_rtx
));
10155 dst
= gen_rtx_MEM (mode
, breg
);
10159 for (i
= 0; i
< nregs
; i
++)
10161 /* Calculate index to next subword. */
10166 /* If compiler already emited move of first word by
10167 store with update, no need to do anything. */
10168 if (j
== 0 && used_update
)
10171 emit_insn (gen_rtx_SET (VOIDmode
,
10172 simplify_gen_subreg (reg_mode
, dst
, mode
,
10173 j
* reg_mode_size
),
10174 simplify_gen_subreg (reg_mode
, src
, mode
,
10175 j
* reg_mode_size
)));
10181 /* This page contains routines that are used to determine what the
10182 function prologue and epilogue code will do and write them out. */
10184 /* Return the first fixed-point register that is required to be
10185 saved. 32 if none. */
10188 first_reg_to_save (void)
10192 /* Find lowest numbered live register. */
10193 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
10194 if (regs_ever_live
[first_reg
]
10195 && (! call_used_regs
[first_reg
]
10196 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
10197 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
10198 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)))))
10203 && current_function_uses_pic_offset_table
10204 && first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
10205 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
10211 /* Similar, for FP regs. */
10214 first_fp_reg_to_save (void)
10218 /* Find lowest numbered live register. */
10219 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
10220 if (regs_ever_live
[first_reg
])
10226 /* Similar, for AltiVec regs. */
10229 first_altivec_reg_to_save (void)
10233 /* Stack frame remains as is unless we are in AltiVec ABI. */
10234 if (! TARGET_ALTIVEC_ABI
)
10235 return LAST_ALTIVEC_REGNO
+ 1;
10237 /* Find lowest numbered live register. */
10238 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10239 if (regs_ever_live
[i
])
10245 /* Return a 32-bit mask of the AltiVec registers we need to set in
10246 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
10247 the 32-bit word is 0. */
10249 static unsigned int
10250 compute_vrsave_mask (void)
10252 unsigned int i
, mask
= 0;
10254 /* First, find out if we use _any_ altivec registers. */
10255 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10256 if (regs_ever_live
[i
])
10257 mask
|= ALTIVEC_REG_BIT (i
);
10262 /* Next, remove the argument registers from the set. These must
10263 be in the VRSAVE mask set by the caller, so we don't need to add
10264 them in again. More importantly, the mask we compute here is
10265 used to generate CLOBBERs in the set_vrsave insn, and we do not
10266 wish the argument registers to die. */
10267 for (i
= cfun
->args_info
.vregno
- 1; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
10268 mask
&= ~ALTIVEC_REG_BIT (i
);
10270 /* Similarly, remove the return value from the set. */
10273 diddle_return_value (is_altivec_return_reg
, &yes
);
10275 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
10282 is_altivec_return_reg (rtx reg
, void *xyes
)
10284 bool *yes
= (bool *) xyes
;
10285 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
10290 /* Calculate the stack information for the current function. This is
10291 complicated by having two separate calling sequences, the AIX calling
10292 sequence and the V.4 calling sequence.
10294 AIX (and Darwin/Mac OS X) stack frames look like:
10296 SP----> +---------------------------------------+
10297 | back chain to caller | 0 0
10298 +---------------------------------------+
10299 | saved CR | 4 8 (8-11)
10300 +---------------------------------------+
10302 +---------------------------------------+
10303 | reserved for compilers | 12 24
10304 +---------------------------------------+
10305 | reserved for binders | 16 32
10306 +---------------------------------------+
10307 | saved TOC pointer | 20 40
10308 +---------------------------------------+
10309 | Parameter save area (P) | 24 48
10310 +---------------------------------------+
10311 | Alloca space (A) | 24+P etc.
10312 +---------------------------------------+
10313 | Local variable space (L) | 24+P+A
10314 +---------------------------------------+
10315 | Float/int conversion temporary (X) | 24+P+A+L
10316 +---------------------------------------+
10317 | Save area for AltiVec registers (W) | 24+P+A+L+X
10318 +---------------------------------------+
10319 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
10320 +---------------------------------------+
10321 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
10322 +---------------------------------------+
10323 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
10324 +---------------------------------------+
10325 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
10326 +---------------------------------------+
10327 old SP->| back chain to caller's caller |
10328 +---------------------------------------+
10330 The required alignment for AIX configurations is two words (i.e., 8
10334 V.4 stack frames look like:
10336 SP----> +---------------------------------------+
10337 | back chain to caller | 0
10338 +---------------------------------------+
10339 | caller's saved LR | 4
10340 +---------------------------------------+
10341 | Parameter save area (P) | 8
10342 +---------------------------------------+
10343 | Alloca space (A) | 8+P
10344 +---------------------------------------+
10345 | Varargs save area (V) | 8+P+A
10346 +---------------------------------------+
10347 | Local variable space (L) | 8+P+A+V
10348 +---------------------------------------+
10349 | Float/int conversion temporary (X) | 8+P+A+V+L
10350 +---------------------------------------+
10351 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
10352 +---------------------------------------+
10353 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
10354 +---------------------------------------+
10355 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
10356 +---------------------------------------+
10357 | SPE: area for 64-bit GP registers |
10358 +---------------------------------------+
10359 | SPE alignment padding |
10360 +---------------------------------------+
10361 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
10362 +---------------------------------------+
10363 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
10364 +---------------------------------------+
10365 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
10366 +---------------------------------------+
10367 old SP->| back chain to caller's caller |
10368 +---------------------------------------+
10370 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10371 given. (But note below and in sysv4.h that we require only 8 and
10372 may round up the size of our stack frame anyways. The historical
10373 reason is early versions of powerpc-linux which didn't properly
10374 align the stack at program startup. A happy side-effect is that
10375 -mno-eabi libraries can be used with -meabi programs.)
10377 The EABI configuration defaults to the V.4 layout. However,
10378 the stack alignment requirements may differ. If -mno-eabi is not
10379 given, the required stack alignment is 8 bytes; if -mno-eabi is
10380 given, the required alignment is 16 bytes. (But see V.4 comment
10383 #ifndef ABI_STACK_BOUNDARY
10384 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
10387 static rs6000_stack_t
*
10388 rs6000_stack_info (void)
10390 static rs6000_stack_t info
, zero_info
;
10391 rs6000_stack_t
*info_ptr
= &info
;
10392 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10394 HOST_WIDE_INT total_raw_size
;
10396 /* Zero all fields portably. */
10401 /* Cache value so we don't rescan instruction chain over and over. */
10402 if (cfun
->machine
->insn_chain_scanned_p
== 0)
10404 cfun
->machine
->insn_chain_scanned_p
= 1;
10405 info_ptr
->spe_64bit_regs_used
= (int) spe_func_has_64bit_regs_p ();
10409 /* Select which calling sequence. */
10410 info_ptr
->abi
= DEFAULT_ABI
;
10412 /* Calculate which registers need to be saved & save area size. */
10413 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
10414 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10415 even if it currently looks like we won't. */
10416 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
10417 || (flag_pic
== 1 && DEFAULT_ABI
== ABI_V4
)
10418 || (flag_pic
&& DEFAULT_ABI
== ABI_DARWIN
))
10419 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
10420 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
10422 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
10424 /* For the SPE, we have an additional upper 32-bits on each GPR.
10425 Ideally we should save the entire 64-bits only when the upper
10426 half is used in SIMD instructions. Since we only record
10427 registers live (not the size they are used in), this proves
10428 difficult because we'd have to traverse the instruction chain at
10429 the right time, taking reload into account. This is a real pain,
10430 so we opt to save the GPRs in 64-bits always if but one register
10431 gets used in 64-bits. Otherwise, all the registers in the frame
10432 get saved in 32-bits.
10434 So... since when we save all GPRs (except the SP) in 64-bits, the
10435 traditional GP save area will be empty. */
10436 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
10437 info_ptr
->gp_size
= 0;
10439 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
10440 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
10442 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
10443 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
10444 - info_ptr
->first_altivec_reg_save
);
10446 /* Does this function call anything? */
10447 info_ptr
->calls_p
= (! current_function_is_leaf
10448 || cfun
->machine
->ra_needs_full_frame
);
10450 /* Determine if we need to save the link register. */
10451 if (rs6000_ra_ever_killed ()
10452 || (DEFAULT_ABI
== ABI_AIX
10453 && current_function_profile
10454 && !TARGET_PROFILE_KERNEL
)
10455 #ifdef TARGET_RELOCATABLE
10456 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
10458 || (info_ptr
->first_fp_reg_save
!= 64
10459 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
10460 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
10461 || (DEFAULT_ABI
== ABI_V4
&& current_function_calls_alloca
)
10462 || (DEFAULT_ABI
== ABI_DARWIN
10464 && current_function_uses_pic_offset_table
)
10465 || info_ptr
->calls_p
)
10467 info_ptr
->lr_save_p
= 1;
10468 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
10471 /* Determine if we need to save the condition code registers. */
10472 if (regs_ever_live
[CR2_REGNO
]
10473 || regs_ever_live
[CR3_REGNO
]
10474 || regs_ever_live
[CR4_REGNO
])
10476 info_ptr
->cr_save_p
= 1;
10477 if (DEFAULT_ABI
== ABI_V4
)
10478 info_ptr
->cr_size
= reg_size
;
10481 /* If the current function calls __builtin_eh_return, then we need
10482 to allocate stack space for registers that will hold data for
10483 the exception handler. */
10484 if (current_function_calls_eh_return
)
10487 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
10490 /* SPE saves EH registers in 64-bits. */
10491 ehrd_size
= i
* (TARGET_SPE_ABI
10492 && info_ptr
->spe_64bit_regs_used
!= 0
10493 ? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
10498 /* Determine various sizes. */
10499 info_ptr
->reg_size
= reg_size
;
10500 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
10501 info_ptr
->varargs_size
= RS6000_VARARGS_AREA
;
10502 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
10503 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
10506 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
10507 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
10509 info_ptr
->spe_gp_size
= 0;
10511 if (TARGET_ALTIVEC_ABI
)
10512 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
10514 info_ptr
->vrsave_mask
= 0;
10516 if (TARGET_ALTIVEC_VRSAVE
&& info_ptr
->vrsave_mask
)
10517 info_ptr
->vrsave_size
= 4;
10519 info_ptr
->vrsave_size
= 0;
10521 /* Calculate the offsets. */
10522 switch (DEFAULT_ABI
)
10530 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
10531 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
10533 if (TARGET_ALTIVEC_ABI
)
10535 info_ptr
->vrsave_save_offset
10536 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
10538 /* Align stack so vector save area is on a quadword boundary. */
10539 if (info_ptr
->altivec_size
!= 0)
10540 info_ptr
->altivec_padding_size
10541 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
10543 info_ptr
->altivec_padding_size
= 0;
10545 info_ptr
->altivec_save_offset
10546 = info_ptr
->vrsave_save_offset
10547 - info_ptr
->altivec_padding_size
10548 - info_ptr
->altivec_size
;
10550 /* Adjust for AltiVec case. */
10551 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
10554 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
10555 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
10556 info_ptr
->lr_save_offset
= 2*reg_size
;
10560 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
10561 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
10562 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
10564 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
10566 /* Align stack so SPE GPR save area is aligned on a
10567 double-word boundary. */
10568 if (info_ptr
->spe_gp_size
!= 0)
10569 info_ptr
->spe_padding_size
10570 = 8 - (-info_ptr
->cr_save_offset
% 8);
10572 info_ptr
->spe_padding_size
= 0;
10574 info_ptr
->spe_gp_save_offset
10575 = info_ptr
->cr_save_offset
10576 - info_ptr
->spe_padding_size
10577 - info_ptr
->spe_gp_size
;
10579 /* Adjust for SPE case. */
10580 info_ptr
->toc_save_offset
10581 = info_ptr
->spe_gp_save_offset
- info_ptr
->toc_size
;
10583 else if (TARGET_ALTIVEC_ABI
)
10585 info_ptr
->vrsave_save_offset
10586 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
10588 /* Align stack so vector save area is on a quadword boundary. */
10589 if (info_ptr
->altivec_size
!= 0)
10590 info_ptr
->altivec_padding_size
10591 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
10593 info_ptr
->altivec_padding_size
= 0;
10595 info_ptr
->altivec_save_offset
10596 = info_ptr
->vrsave_save_offset
10597 - info_ptr
->altivec_padding_size
10598 - info_ptr
->altivec_size
;
10600 /* Adjust for AltiVec case. */
10601 info_ptr
->toc_save_offset
10602 = info_ptr
->altivec_save_offset
- info_ptr
->toc_size
;
10605 info_ptr
->toc_save_offset
= info_ptr
->cr_save_offset
- info_ptr
->toc_size
;
10606 info_ptr
->ehrd_offset
= info_ptr
->toc_save_offset
- ehrd_size
;
10607 info_ptr
->lr_save_offset
= reg_size
;
10611 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
10612 + info_ptr
->gp_size
10613 + info_ptr
->altivec_size
10614 + info_ptr
->altivec_padding_size
10615 + info_ptr
->spe_gp_size
10616 + info_ptr
->spe_padding_size
10618 + info_ptr
->cr_size
10619 + info_ptr
->lr_size
10620 + info_ptr
->vrsave_size
10621 + info_ptr
->toc_size
,
10622 (TARGET_ALTIVEC_ABI
|| ABI_DARWIN
)
10625 total_raw_size
= (info_ptr
->vars_size
10626 + info_ptr
->parm_size
10627 + info_ptr
->save_size
10628 + info_ptr
->varargs_size
10629 + info_ptr
->fixed_size
);
10631 info_ptr
->total_size
=
10632 RS6000_ALIGN (total_raw_size
, ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
10634 /* Determine if we need to allocate any stack frame:
10636 For AIX we need to push the stack if a frame pointer is needed
10637 (because the stack might be dynamically adjusted), if we are
10638 debugging, if we make calls, or if the sum of fp_save, gp_save,
10639 and local variables are more than the space needed to save all
10640 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10641 + 18*8 = 288 (GPR13 reserved).
10643 For V.4 we don't have the stack cushion that AIX uses, but assume
10644 that the debugger can handle stackless frames. */
10646 if (info_ptr
->calls_p
)
10647 info_ptr
->push_p
= 1;
10649 else if (DEFAULT_ABI
== ABI_V4
)
10650 info_ptr
->push_p
= total_raw_size
> info_ptr
->fixed_size
;
10652 else if (frame_pointer_needed
)
10653 info_ptr
->push_p
= 1;
10655 else if (TARGET_XCOFF
&& write_symbols
!= NO_DEBUG
)
10656 info_ptr
->push_p
= 1;
10660 = total_raw_size
- info_ptr
->fixed_size
> (TARGET_32BIT
? 220 : 288);
10662 /* Zero offsets if we're not saving those registers. */
10663 if (info_ptr
->fp_size
== 0)
10664 info_ptr
->fp_save_offset
= 0;
10666 if (info_ptr
->gp_size
== 0)
10667 info_ptr
->gp_save_offset
= 0;
10669 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
10670 info_ptr
->altivec_save_offset
= 0;
10672 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
10673 info_ptr
->vrsave_save_offset
= 0;
10675 if (! TARGET_SPE_ABI
10676 || info_ptr
->spe_64bit_regs_used
== 0
10677 || info_ptr
->spe_gp_size
== 0)
10678 info_ptr
->spe_gp_save_offset
= 0;
10680 if (! info_ptr
->lr_save_p
)
10681 info_ptr
->lr_save_offset
= 0;
10683 if (! info_ptr
->cr_save_p
)
10684 info_ptr
->cr_save_offset
= 0;
10686 if (! info_ptr
->toc_save_p
)
10687 info_ptr
->toc_save_offset
= 0;
10692 /* Return true if the current function uses any GPRs in 64-bit SIMD
10696 spe_func_has_64bit_regs_p (void)
10700 /* Functions that save and restore all the call-saved registers will
10701 need to save/restore the registers in 64-bits. */
10702 if (current_function_calls_eh_return
10703 || current_function_calls_setjmp
10704 || current_function_has_nonlocal_goto
)
10707 insns
= get_insns ();
10709 for (insn
= NEXT_INSN (insns
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
10715 i
= PATTERN (insn
);
10716 if (GET_CODE (i
) == SET
10717 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i
))))
10726 debug_stack_info (rs6000_stack_t
*info
)
10728 const char *abi_string
;
10731 info
= rs6000_stack_info ();
10733 fprintf (stderr
, "\nStack information for function %s:\n",
10734 ((current_function_decl
&& DECL_NAME (current_function_decl
))
10735 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
10740 default: abi_string
= "Unknown"; break;
10741 case ABI_NONE
: abi_string
= "NONE"; break;
10742 case ABI_AIX
: abi_string
= "AIX"; break;
10743 case ABI_DARWIN
: abi_string
= "Darwin"; break;
10744 case ABI_V4
: abi_string
= "V.4"; break;
10747 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
10749 if (TARGET_ALTIVEC_ABI
)
10750 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
10752 if (TARGET_SPE_ABI
)
10753 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
10755 if (info
->first_gp_reg_save
!= 32)
10756 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
10758 if (info
->first_fp_reg_save
!= 64)
10759 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
10761 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
10762 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
10763 info
->first_altivec_reg_save
);
10765 if (info
->lr_save_p
)
10766 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
10768 if (info
->cr_save_p
)
10769 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
10771 if (info
->toc_save_p
)
10772 fprintf (stderr
, "\ttoc_save_p = %5d\n", info
->toc_save_p
);
10774 if (info
->vrsave_mask
)
10775 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
10778 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
10781 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
10783 if (info
->gp_save_offset
)
10784 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
10786 if (info
->fp_save_offset
)
10787 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
10789 if (info
->altivec_save_offset
)
10790 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
10791 info
->altivec_save_offset
);
10793 if (info
->spe_gp_save_offset
)
10794 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
10795 info
->spe_gp_save_offset
);
10797 if (info
->vrsave_save_offset
)
10798 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
10799 info
->vrsave_save_offset
);
10801 if (info
->lr_save_offset
)
10802 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
10804 if (info
->cr_save_offset
)
10805 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
10807 if (info
->toc_save_offset
)
10808 fprintf (stderr
, "\ttoc_save_offset = %5d\n", info
->toc_save_offset
);
10810 if (info
->varargs_save_offset
)
10811 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
10813 if (info
->total_size
)
10814 fprintf (stderr
, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC
"\n",
10817 if (info
->varargs_size
)
10818 fprintf (stderr
, "\tvarargs_size = %5d\n", info
->varargs_size
);
10820 if (info
->vars_size
)
10821 fprintf (stderr
, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC
"\n",
10824 if (info
->parm_size
)
10825 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
10827 if (info
->fixed_size
)
10828 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
10831 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
10833 if (info
->spe_gp_size
)
10834 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
10837 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
10839 if (info
->altivec_size
)
10840 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
10842 if (info
->vrsave_size
)
10843 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
10845 if (info
->altivec_padding_size
)
10846 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
10847 info
->altivec_padding_size
);
10849 if (info
->spe_padding_size
)
10850 fprintf (stderr
, "\tspe_padding_size = %5d\n",
10851 info
->spe_padding_size
);
10854 fprintf (stderr
, "\tlr_size = %5d\n", info
->lr_size
);
10857 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
10859 if (info
->toc_size
)
10860 fprintf (stderr
, "\ttoc_size = %5d\n", info
->toc_size
);
10862 if (info
->save_size
)
10863 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
10865 if (info
->reg_size
!= 4)
10866 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
10868 fprintf (stderr
, "\n");
10872 rs6000_return_addr (int count
, rtx frame
)
10874 /* Currently we don't optimize very well between prolog and body
10875 code and for PIC code the code can be actually quite bad, so
10876 don't try to be too clever here. */
10877 if (count
!= 0 || (DEFAULT_ABI
!= ABI_AIX
&& flag_pic
))
10879 cfun
->machine
->ra_needs_full_frame
= 1;
10886 plus_constant (copy_to_reg
10887 (gen_rtx_MEM (Pmode
,
10888 memory_address (Pmode
, frame
))),
10889 RETURN_ADDRESS_OFFSET
)));
10892 cfun
->machine
->ra_need_lr
= 1;
10893 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
10896 /* Say whether a function is a candidate for sibcall handling or not.
10897 We do not allow indirect calls to be optimized into sibling calls.
10898 Also, we can't do it if there are any vector parameters; there's
10899 nowhere to put the VRsave code so it works; note that functions with
10900 vector parameters are required to have a prototype, so the argument
10901 type info must be available here. (The tail recursion case can work
10902 with vector parameters, but there's no way to distinguish here.) */
10904 rs6000_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
10909 if (TARGET_ALTIVEC_VRSAVE
)
10911 for (type
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
10912 type
; type
= TREE_CHAIN (type
))
10914 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
10918 if (DEFAULT_ABI
== ABI_DARWIN
10919 || (*targetm
.binds_local_p
) (decl
))
10921 tree attr_list
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
10923 if (!lookup_attribute ("longcall", attr_list
)
10924 || lookup_attribute ("shortcall", attr_list
))
10932 rs6000_ra_ever_killed (void)
10938 /* Irritatingly, there are two kinds of thunks -- those created with
10939 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10940 through the regular part of the compiler. This is a very hacky
10941 way to tell them apart. */
10942 if (current_function_is_thunk
&& !no_new_pseudos
)
10945 /* regs_ever_live has LR marked as used if any sibcalls are present,
10946 but this should not force saving and restoring in the
10947 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10948 clobbers LR, so that is inappropriate. */
10950 /* Also, the prologue can generate a store into LR that
10951 doesn't really count, like this:
10954 bcl to set PIC register
10958 When we're called from the epilogue, we need to avoid counting
10959 this as a store. */
10961 push_topmost_sequence ();
10962 top
= get_insns ();
10963 pop_topmost_sequence ();
10964 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
10966 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
10970 if (FIND_REG_INC_NOTE (insn
, reg
))
10972 else if (GET_CODE (insn
) == CALL_INSN
10973 && !SIBLING_CALL_P (insn
))
10975 else if (set_of (reg
, insn
) != NULL_RTX
10976 && !prologue_epilogue_contains (insn
))
10983 /* Add a REG_MAYBE_DEAD note to the insn. */
10985 rs6000_maybe_dead (rtx insn
)
10987 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
10992 /* Emit instructions needed to load the TOC register.
10993 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10994 a constant pool; or for SVR4 -fpic. */
10997 rs6000_emit_load_toc_table (int fromprolog
)
11000 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
11002 if (TARGET_ELF
&& DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
11004 rtx temp
= (fromprolog
11005 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
11006 : gen_reg_rtx (Pmode
));
11007 insn
= emit_insn (gen_load_toc_v4_pic_si (temp
));
11009 rs6000_maybe_dead (insn
);
11010 insn
= emit_move_insn (dest
, temp
);
11012 rs6000_maybe_dead (insn
);
11014 else if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
== 2)
11017 rtx tempLR
= (fromprolog
11018 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
11019 : gen_reg_rtx (Pmode
));
11020 rtx temp0
= (fromprolog
11021 ? gen_rtx_REG (Pmode
, 0)
11022 : gen_reg_rtx (Pmode
));
11025 /* possibly create the toc section */
11026 if (! toc_initialized
)
11029 function_section (current_function_decl
);
11036 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
11037 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
11039 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
11040 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
11042 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
11044 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
11045 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
11052 static int reload_toc_labelno
= 0;
11054 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
11056 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCG", reload_toc_labelno
++);
11057 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
11059 emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, symF
, tocsym
));
11060 emit_move_insn (dest
, tempLR
);
11061 emit_move_insn (temp0
, gen_rtx_MEM (Pmode
, dest
));
11063 insn
= emit_insn (gen_addsi3 (dest
, temp0
, dest
));
11065 rs6000_maybe_dead (insn
);
11067 else if (TARGET_ELF
&& !TARGET_AIX
&& flag_pic
== 0 && TARGET_MINIMAL_TOC
)
11069 /* This is for AIX code running in non-PIC ELF32. */
11072 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
11073 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
11075 insn
= emit_insn (gen_elf_high (dest
, realsym
));
11077 rs6000_maybe_dead (insn
);
11078 insn
= emit_insn (gen_elf_low (dest
, dest
, realsym
));
11080 rs6000_maybe_dead (insn
);
11082 else if (DEFAULT_ABI
== ABI_AIX
)
11085 insn
= emit_insn (gen_load_toc_aix_si (dest
));
11087 insn
= emit_insn (gen_load_toc_aix_di (dest
));
11089 rs6000_maybe_dead (insn
);
11095 /* Emit instructions to restore the link register after determining where
11096 its value has been stored. */
11099 rs6000_emit_eh_reg_restore (rtx source
, rtx scratch
)
11101 rs6000_stack_t
*info
= rs6000_stack_info ();
11104 operands
[0] = source
;
11105 operands
[1] = scratch
;
11107 if (info
->lr_save_p
)
11109 rtx frame_rtx
= stack_pointer_rtx
;
11110 HOST_WIDE_INT sp_offset
= 0;
11113 if (frame_pointer_needed
11114 || current_function_calls_alloca
11115 || info
->total_size
> 32767)
11117 emit_move_insn (operands
[1], gen_rtx_MEM (Pmode
, frame_rtx
));
11118 frame_rtx
= operands
[1];
11120 else if (info
->push_p
)
11121 sp_offset
= info
->total_size
;
11123 tmp
= plus_constant (frame_rtx
, info
->lr_save_offset
+ sp_offset
);
11124 tmp
= gen_rtx_MEM (Pmode
, tmp
);
11125 emit_move_insn (tmp
, operands
[0]);
11128 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
), operands
[0]);
11132 get_TOC_alias_set (void)
11134 static int set
= -1;
11136 set
= new_alias_set ();
11140 /* This returns nonzero if the current function uses the TOC. This is
11141 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
11142 is generated by the ABI_V4 load_toc_* patterns. */
11149 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
11152 rtx pat
= PATTERN (insn
);
11155 if (GET_CODE (pat
) == PARALLEL
)
11156 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
11158 rtx sub
= XVECEXP (pat
, 0, i
);
11159 if (GET_CODE (sub
) == USE
)
11161 sub
= XEXP (sub
, 0);
11162 if (GET_CODE (sub
) == UNSPEC
11163 && XINT (sub
, 1) == UNSPEC_TOC
)
11172 create_TOC_reference (rtx symbol
)
11174 return gen_rtx_PLUS (Pmode
,
11175 gen_rtx_REG (Pmode
, TOC_REGISTER
),
11176 gen_rtx_CONST (Pmode
,
11177 gen_rtx_MINUS (Pmode
, symbol
,
11178 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
11181 /* If _Unwind_* has been called from within the same module,
11182 toc register is not guaranteed to be saved to 40(1) on function
11183 entry. Save it there in that case. */
11186 rs6000_aix_emit_builtin_unwind_init (void)
11189 rtx stack_top
= gen_reg_rtx (Pmode
);
11190 rtx opcode_addr
= gen_reg_rtx (Pmode
);
11191 rtx opcode
= gen_reg_rtx (SImode
);
11192 rtx tocompare
= gen_reg_rtx (SImode
);
11193 rtx no_toc_save_needed
= gen_label_rtx ();
11195 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
11196 emit_move_insn (stack_top
, mem
);
11198 mem
= gen_rtx_MEM (Pmode
,
11199 gen_rtx_PLUS (Pmode
, stack_top
,
11200 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
11201 emit_move_insn (opcode_addr
, mem
);
11202 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
11203 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
11204 : 0xE8410028, SImode
));
11206 do_compare_rtx_and_jump (opcode
, tocompare
, EQ
, 1,
11207 SImode
, NULL_RTX
, NULL_RTX
,
11208 no_toc_save_needed
);
11210 mem
= gen_rtx_MEM (Pmode
,
11211 gen_rtx_PLUS (Pmode
, stack_top
,
11212 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
11213 emit_move_insn (mem
, gen_rtx_REG (Pmode
, 2));
11214 emit_label (no_toc_save_needed
);
11217 /* This ties together stack memory (MEM with an alias set of
11218 rs6000_sr_alias_set) and the change to the stack pointer. */
11221 rs6000_emit_stack_tie (void)
11223 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
11225 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11226 emit_insn (gen_stack_tie (mem
));
11229 /* Emit the correct code for allocating stack space, as insns.
11230 If COPY_R12, make sure a copy of the old frame is left in r12.
11231 The generated code may use hard register 0 as a temporary. */
11234 rs6000_emit_allocate_stack (HOST_WIDE_INT size
, int copy_r12
)
11237 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
11238 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
11239 rtx todec
= GEN_INT (-size
);
11241 if (current_function_limit_stack
)
11243 if (REG_P (stack_limit_rtx
)
11244 && REGNO (stack_limit_rtx
) > 1
11245 && REGNO (stack_limit_rtx
) <= 31)
11247 emit_insn (TARGET_32BIT
11248 ? gen_addsi3 (tmp_reg
,
11251 : gen_adddi3 (tmp_reg
,
11255 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
11258 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
11260 && DEFAULT_ABI
== ABI_V4
)
11262 rtx toload
= gen_rtx_CONST (VOIDmode
,
11263 gen_rtx_PLUS (Pmode
,
11267 emit_insn (gen_elf_high (tmp_reg
, toload
));
11268 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
11269 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
11273 warning ("stack limit expression is not supported");
11276 if (copy_r12
|| ! TARGET_UPDATE
)
11277 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
11283 /* Need a note here so that try_split doesn't get confused. */
11284 if (get_last_insn() == NULL_RTX
)
11285 emit_note (NOTE_INSN_DELETED
);
11286 insn
= emit_move_insn (tmp_reg
, todec
);
11287 try_split (PATTERN (insn
), insn
, 0);
11291 insn
= emit_insn (TARGET_32BIT
11292 ? gen_movsi_update (stack_reg
, stack_reg
,
11294 : gen_movdi_update (stack_reg
, stack_reg
,
11295 todec
, stack_reg
));
11299 insn
= emit_insn (TARGET_32BIT
11300 ? gen_addsi3 (stack_reg
, stack_reg
, todec
)
11301 : gen_adddi3 (stack_reg
, stack_reg
, todec
));
11302 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
11303 gen_rtx_REG (Pmode
, 12));
11306 RTX_FRAME_RELATED_P (insn
) = 1;
11308 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
11309 gen_rtx_SET (VOIDmode
, stack_reg
,
11310 gen_rtx_PLUS (Pmode
, stack_reg
,
11315 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11316 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11317 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
11318 deduce these equivalences by itself so it wasn't necessary to hold
11319 its hand so much. */
11322 rs6000_frame_related (rtx insn
, rtx reg
, HOST_WIDE_INT val
,
11323 rtx reg2
, rtx rreg
)
11327 /* copy_rtx will not make unique copies of registers, so we need to
11328 ensure we don't have unwanted sharing here. */
11330 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
11333 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
11335 real
= copy_rtx (PATTERN (insn
));
11337 if (reg2
!= NULL_RTX
)
11338 real
= replace_rtx (real
, reg2
, rreg
);
11340 real
= replace_rtx (real
, reg
,
11341 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
11342 STACK_POINTER_REGNUM
),
11345 /* We expect that 'real' is either a SET or a PARALLEL containing
11346 SETs (and possibly other stuff). In a PARALLEL, all the SETs
11347 are important so they all have to be marked RTX_FRAME_RELATED_P. */
11349 if (GET_CODE (real
) == SET
)
11353 temp
= simplify_rtx (SET_SRC (set
));
11355 SET_SRC (set
) = temp
;
11356 temp
= simplify_rtx (SET_DEST (set
));
11358 SET_DEST (set
) = temp
;
11359 if (GET_CODE (SET_DEST (set
)) == MEM
)
11361 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
11363 XEXP (SET_DEST (set
), 0) = temp
;
11366 else if (GET_CODE (real
) == PARALLEL
)
11369 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
11370 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
11372 rtx set
= XVECEXP (real
, 0, i
);
11374 temp
= simplify_rtx (SET_SRC (set
));
11376 SET_SRC (set
) = temp
;
11377 temp
= simplify_rtx (SET_DEST (set
));
11379 SET_DEST (set
) = temp
;
11380 if (GET_CODE (SET_DEST (set
)) == MEM
)
11382 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
11384 XEXP (SET_DEST (set
), 0) = temp
;
11386 RTX_FRAME_RELATED_P (set
) = 1;
11393 real
= spe_synthesize_frame_save (real
);
11395 RTX_FRAME_RELATED_P (insn
) = 1;
11396 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
11401 /* Given an SPE frame note, return a PARALLEL of SETs with the
11402 original note, plus a synthetic register save. */
11405 spe_synthesize_frame_save (rtx real
)
11407 rtx synth
, offset
, reg
, real2
;
11409 if (GET_CODE (real
) != SET
11410 || GET_MODE (SET_SRC (real
)) != V2SImode
)
11413 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11414 frame related note. The parallel contains a set of the register
11415 being saved, and another set to a synthetic register (n+1200).
11416 This is so we can differentiate between 64-bit and 32-bit saves.
11417 Words cannot describe this nastiness. */
11419 if (GET_CODE (SET_DEST (real
)) != MEM
11420 || GET_CODE (XEXP (SET_DEST (real
), 0)) != PLUS
11421 || GET_CODE (SET_SRC (real
)) != REG
)
11425 (set (mem (plus (reg x) (const y)))
11428 (set (mem (plus (reg x) (const y+4)))
11432 real2
= copy_rtx (real
);
11433 PUT_MODE (SET_DEST (real2
), SImode
);
11434 reg
= SET_SRC (real2
);
11435 real2
= replace_rtx (real2
, reg
, gen_rtx_REG (SImode
, REGNO (reg
)));
11436 synth
= copy_rtx (real2
);
11438 if (BYTES_BIG_ENDIAN
)
11440 offset
= XEXP (XEXP (SET_DEST (real2
), 0), 1);
11441 real2
= replace_rtx (real2
, offset
, GEN_INT (INTVAL (offset
) + 4));
11444 reg
= SET_SRC (synth
);
11446 synth
= replace_rtx (synth
, reg
,
11447 gen_rtx_REG (SImode
, REGNO (reg
) + 1200));
11449 offset
= XEXP (XEXP (SET_DEST (synth
), 0), 1);
11450 synth
= replace_rtx (synth
, offset
,
11451 GEN_INT (INTVAL (offset
)
11452 + (BYTES_BIG_ENDIAN
? 0 : 4)));
11454 RTX_FRAME_RELATED_P (synth
) = 1;
11455 RTX_FRAME_RELATED_P (real2
) = 1;
11456 if (BYTES_BIG_ENDIAN
)
11457 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, synth
, real2
));
11459 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, real2
, synth
));
11464 /* Returns an insn that has a vrsave set operation with the
11465 appropriate CLOBBERs. */
11468 generate_set_vrsave (rtx reg
, rs6000_stack_t
*info
, int epiloguep
)
11471 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
11472 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
11475 = gen_rtx_SET (VOIDmode
,
11477 gen_rtx_UNSPEC_VOLATILE (SImode
,
11478 gen_rtvec (2, reg
, vrsave
),
11483 /* We need to clobber the registers in the mask so the scheduler
11484 does not move sets to VRSAVE before sets of AltiVec registers.
11486 However, if the function receives nonlocal gotos, reload will set
11487 all call saved registers live. We will end up with:
11489 (set (reg 999) (mem))
11490 (parallel [ (set (reg vrsave) (unspec blah))
11491 (clobber (reg 999))])
11493 The clobber will cause the store into reg 999 to be dead, and
11494 flow will attempt to delete an epilogue insn. In this case, we
11495 need an unspec use/set of the register. */
11497 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
11498 if (info
->vrsave_mask
!= 0 && ALTIVEC_REG_BIT (i
) != 0)
11500 if (!epiloguep
|| call_used_regs
[i
])
11501 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
11502 gen_rtx_REG (V4SImode
, i
));
11505 rtx reg
= gen_rtx_REG (V4SImode
, i
);
11508 = gen_rtx_SET (VOIDmode
,
11510 gen_rtx_UNSPEC (V4SImode
,
11511 gen_rtvec (1, reg
), 27));
11515 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
11517 for (i
= 0; i
< nclobs
; ++i
)
11518 XVECEXP (insn
, 0, i
) = clobs
[i
];
11523 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11524 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11527 emit_frame_save (rtx frame_reg
, rtx frame_ptr
, enum machine_mode mode
,
11528 unsigned int regno
, int offset
, HOST_WIDE_INT total_size
)
11530 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
11531 rtx replacea
, replaceb
;
11533 int_rtx
= GEN_INT (offset
);
11535 /* Some cases that need register indexed addressing. */
11536 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
11538 && SPE_VECTOR_MODE (mode
)
11539 && !SPE_CONST_OFFSET_OK (offset
)))
11541 /* Whomever calls us must make sure r11 is available in the
11542 flow path of instructions in the prologue. */
11543 offset_rtx
= gen_rtx_REG (Pmode
, 11);
11544 emit_move_insn (offset_rtx
, int_rtx
);
11546 replacea
= offset_rtx
;
11547 replaceb
= int_rtx
;
11551 offset_rtx
= int_rtx
;
11552 replacea
= NULL_RTX
;
11553 replaceb
= NULL_RTX
;
11556 reg
= gen_rtx_REG (mode
, regno
);
11557 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
11558 mem
= gen_rtx_MEM (mode
, addr
);
11559 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11561 insn
= emit_move_insn (mem
, reg
);
11563 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
11566 /* Emit an offset memory reference suitable for a frame store, while
11567 converting to a valid addressing mode. */
11570 gen_frame_mem_offset (enum machine_mode mode
, rtx reg
, int offset
)
11572 rtx int_rtx
, offset_rtx
;
11574 int_rtx
= GEN_INT (offset
);
11576 if (TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
11578 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
11579 emit_move_insn (offset_rtx
, int_rtx
);
11582 offset_rtx
= int_rtx
;
11584 return gen_rtx_MEM (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
11587 /* Emit function prologue as insns. */
11590 rs6000_emit_prologue (void)
11592 rs6000_stack_t
*info
= rs6000_stack_info ();
11593 enum machine_mode reg_mode
= Pmode
;
11594 int reg_size
= UNITS_PER_WORD
;
11595 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
11596 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
11597 rtx frame_reg_rtx
= sp_reg_rtx
;
11598 rtx cr_save_rtx
= NULL_RTX
;
11600 int saving_FPRs_inline
;
11601 int using_store_multiple
;
11602 HOST_WIDE_INT sp_offset
= 0;
11604 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
11606 reg_mode
= V2SImode
;
11610 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
11611 && (!TARGET_SPE_ABI
11612 || info
->spe_64bit_regs_used
== 0)
11613 && info
->first_gp_reg_save
< 31);
11614 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
11615 || FP_SAVE_INLINE (info
->first_fp_reg_save
)
11616 || current_function_calls_eh_return
11617 || cfun
->machine
->ra_need_lr
);
11619 /* For V.4, update stack before we do any saving and set back pointer. */
11621 && (DEFAULT_ABI
== ABI_V4
11622 || current_function_calls_eh_return
))
11624 if (info
->total_size
< 32767)
11625 sp_offset
= info
->total_size
;
11627 frame_reg_rtx
= frame_ptr_rtx
;
11628 rs6000_emit_allocate_stack (info
->total_size
,
11629 (frame_reg_rtx
!= sp_reg_rtx
11630 && (info
->cr_save_p
11632 || info
->first_fp_reg_save
< 64
11633 || info
->first_gp_reg_save
< 32
11635 if (frame_reg_rtx
!= sp_reg_rtx
)
11636 rs6000_emit_stack_tie ();
11639 /* Save AltiVec registers if needed. */
11640 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
11644 /* There should be a non inline version of this, for when we
11645 are saving lots of vector registers. */
11646 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
11647 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
11649 rtx areg
, savereg
, mem
;
11652 offset
= info
->altivec_save_offset
+ sp_offset
11653 + 16 * (i
- info
->first_altivec_reg_save
);
11655 savereg
= gen_rtx_REG (V4SImode
, i
);
11657 areg
= gen_rtx_REG (Pmode
, 0);
11658 emit_move_insn (areg
, GEN_INT (offset
));
11660 /* AltiVec addressing mode is [reg+reg]. */
11661 mem
= gen_rtx_MEM (V4SImode
,
11662 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
11664 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11666 insn
= emit_move_insn (mem
, savereg
);
11668 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11669 areg
, GEN_INT (offset
));
11673 /* VRSAVE is a bit vector representing which AltiVec registers
11674 are used. The OS uses this to determine which vector
11675 registers to save on a context switch. We need to save
11676 VRSAVE on the stack frame, add whatever AltiVec registers we
11677 used in this function, and do the corresponding magic in the
11680 if (TARGET_ALTIVEC
&& TARGET_ALTIVEC_VRSAVE
11681 && info
->vrsave_mask
!= 0)
11683 rtx reg
, mem
, vrsave
;
11686 /* Get VRSAVE onto a GPR. */
11687 reg
= gen_rtx_REG (SImode
, 12);
11688 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
11690 emit_insn (gen_get_vrsave_internal (reg
));
11692 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
11695 offset
= info
->vrsave_save_offset
+ sp_offset
;
11697 = gen_rtx_MEM (SImode
,
11698 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, GEN_INT (offset
)));
11699 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11700 insn
= emit_move_insn (mem
, reg
);
11702 /* Include the registers in the mask. */
11703 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
11705 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
11708 /* If we use the link register, get it into r0. */
11709 if (info
->lr_save_p
)
11710 emit_move_insn (gen_rtx_REG (Pmode
, 0),
11711 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
11713 /* If we need to save CR, put it into r12. */
11714 if (info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
11716 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
11717 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
11720 /* Do any required saving of fpr's. If only one or two to save, do
11721 it ourselves. Otherwise, call function. */
11722 if (saving_FPRs_inline
)
11725 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11726 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
11727 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
11728 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
11729 info
->first_fp_reg_save
+ i
,
11730 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
11733 else if (info
->first_fp_reg_save
!= 64)
11737 const char *alloc_rname
;
11739 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
11741 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
11742 gen_rtx_REG (Pmode
,
11743 LINK_REGISTER_REGNUM
));
11744 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
11745 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
11746 alloc_rname
= ggc_strdup (rname
);
11747 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
11748 gen_rtx_SYMBOL_REF (Pmode
,
11750 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11752 rtx addr
, reg
, mem
;
11753 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
11754 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11755 GEN_INT (info
->fp_save_offset
11756 + sp_offset
+ 8*i
));
11757 mem
= gen_rtx_MEM (DFmode
, addr
);
11758 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11760 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
11762 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11763 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11764 NULL_RTX
, NULL_RTX
);
11767 /* Save GPRs. This is done as a PARALLEL if we are using
11768 the store-multiple instructions. */
11769 if (using_store_multiple
)
11773 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
11774 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
11776 rtx addr
, reg
, mem
;
11777 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
11778 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11779 GEN_INT (info
->gp_save_offset
11782 mem
= gen_rtx_MEM (reg_mode
, addr
);
11783 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11785 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
11787 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11788 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11789 NULL_RTX
, NULL_RTX
);
11794 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
11795 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
11796 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
11797 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
11798 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
11799 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
11801 rtx addr
, reg
, mem
;
11802 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
11804 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
11806 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
11809 if (!SPE_CONST_OFFSET_OK (offset
))
11811 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
11812 emit_move_insn (b
, GEN_INT (offset
));
11815 b
= GEN_INT (offset
);
11817 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
11818 mem
= gen_rtx_MEM (V2SImode
, addr
);
11819 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11820 insn
= emit_move_insn (mem
, reg
);
11822 if (GET_CODE (b
) == CONST_INT
)
11823 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11824 NULL_RTX
, NULL_RTX
);
11826 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11827 b
, GEN_INT (offset
));
11831 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11832 GEN_INT (info
->gp_save_offset
11835 mem
= gen_rtx_MEM (reg_mode
, addr
);
11836 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11838 insn
= emit_move_insn (mem
, reg
);
11839 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11840 NULL_RTX
, NULL_RTX
);
11845 /* ??? There's no need to emit actual instructions here, but it's the
11846 easiest way to get the frame unwind information emitted. */
11847 if (current_function_calls_eh_return
)
11849 unsigned int i
, regno
;
11851 /* In AIX ABI we need to pretend we save r2 here. */
11854 rtx addr
, reg
, mem
;
11856 reg
= gen_rtx_REG (reg_mode
, 2);
11857 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11858 GEN_INT (sp_offset
+ 5 * reg_size
));
11859 mem
= gen_rtx_MEM (reg_mode
, addr
);
11860 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11862 insn
= emit_move_insn (mem
, reg
);
11863 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11864 NULL_RTX
, NULL_RTX
);
11865 PATTERN (insn
) = gen_blockage ();
11870 regno
= EH_RETURN_DATA_REGNO (i
);
11871 if (regno
== INVALID_REGNUM
)
11874 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
11875 info
->ehrd_offset
+ sp_offset
11876 + reg_size
* (int) i
,
11881 /* Save lr if we used it. */
11882 if (info
->lr_save_p
)
11884 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11885 GEN_INT (info
->lr_save_offset
+ sp_offset
));
11886 rtx reg
= gen_rtx_REG (Pmode
, 0);
11887 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
11888 /* This should not be of rs6000_sr_alias_set, because of
11889 __builtin_return_address. */
11891 insn
= emit_move_insn (mem
, reg
);
11892 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11893 reg
, gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
11896 /* Save CR if we use any that must be preserved. */
11897 if (info
->cr_save_p
)
11899 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11900 GEN_INT (info
->cr_save_offset
+ sp_offset
));
11901 rtx mem
= gen_rtx_MEM (SImode
, addr
);
11903 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11905 /* If r12 was used to hold the original sp, copy cr into r0 now
11907 if (REGNO (frame_reg_rtx
) == 12)
11909 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
11910 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
11912 insn
= emit_move_insn (mem
, cr_save_rtx
);
11914 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11915 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11916 But that's OK. All we have to do is specify that _one_ condition
11917 code register is saved in this stack slot. The thrower's epilogue
11918 will then restore all the call-saved registers.
11919 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11920 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11921 cr_save_rtx
, gen_rtx_REG (SImode
, CR2_REGNO
));
11924 /* Update stack and set back pointer unless this is V.4,
11925 for which it was done previously. */
11927 && !(DEFAULT_ABI
== ABI_V4
|| current_function_calls_eh_return
))
11928 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
11930 /* Set frame pointer, if needed. */
11931 if (frame_pointer_needed
)
11933 insn
= emit_move_insn (gen_rtx_REG (Pmode
, FRAME_POINTER_REGNUM
),
11935 RTX_FRAME_RELATED_P (insn
) = 1;
11938 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11939 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
11940 || (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1
11941 && regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
]))
11943 /* If emit_load_toc_table will use the link register, we need to save
11944 it. We use R12 for this purpose because emit_load_toc_table
11945 can use register 0. This allows us to use a plain 'blr' to return
11946 from the procedure more often. */
11947 int save_LR_around_toc_setup
= (TARGET_ELF
11948 && DEFAULT_ABI
!= ABI_AIX
11950 && ! info
->lr_save_p
11951 && EXIT_BLOCK_PTR
->pred
!= NULL
);
11952 if (save_LR_around_toc_setup
)
11954 rtx lr
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
11955 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx
, lr
));
11956 rs6000_emit_load_toc_table (TRUE
);
11957 rs6000_maybe_dead (emit_move_insn (lr
, frame_ptr_rtx
));
11960 rs6000_emit_load_toc_table (TRUE
);
11964 if (DEFAULT_ABI
== ABI_DARWIN
11965 && flag_pic
&& current_function_uses_pic_offset_table
)
11967 rtx dest
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
11968 const char *picbase
= machopic_function_base_name ();
11969 rtx src
= gen_rtx_SYMBOL_REF (Pmode
, picbase
);
11971 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest
, src
)));
11973 rs6000_maybe_dead (
11974 emit_move_insn (gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
),
11975 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)));
11980 /* Write function prologue. */
11983 rs6000_output_function_prologue (FILE *file
,
11984 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
11986 rs6000_stack_t
*info
= rs6000_stack_info ();
11988 if (TARGET_DEBUG_STACK
)
11989 debug_stack_info (info
);
11991 /* Write .extern for any function we will call to save and restore
11993 if (info
->first_fp_reg_save
< 64
11994 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
11995 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11996 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
11997 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
11998 RESTORE_FP_SUFFIX
);
12000 /* Write .extern for AIX common mode routines, if needed. */
12001 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
12003 fputs ("\t.extern __mulh\n", file
);
12004 fputs ("\t.extern __mull\n", file
);
12005 fputs ("\t.extern __divss\n", file
);
12006 fputs ("\t.extern __divus\n", file
);
12007 fputs ("\t.extern __quoss\n", file
);
12008 fputs ("\t.extern __quous\n", file
);
12009 common_mode_defined
= 1;
12012 if (! HAVE_prologue
)
12016 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
12017 the "toplevel" insn chain. */
12018 emit_note (NOTE_INSN_DELETED
);
12019 rs6000_emit_prologue ();
12020 emit_note (NOTE_INSN_DELETED
);
12022 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12026 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
12028 INSN_ADDRESSES_NEW (insn
, addr
);
12033 if (TARGET_DEBUG_STACK
)
12034 debug_rtx_list (get_insns (), 100);
12035 final (get_insns (), file
, FALSE
, FALSE
);
12039 rs6000_pic_labelno
++;
12042 /* Emit function epilogue as insns.
12044 At present, dwarf2out_frame_debug_expr doesn't understand
12045 register restores, so we don't bother setting RTX_FRAME_RELATED_P
12046 anywhere in the epilogue. Most of the insns below would in any case
12047 need special notes to explain where r11 is in relation to the stack. */
12050 rs6000_emit_epilogue (int sibcall
)
12052 rs6000_stack_t
*info
;
12053 int restoring_FPRs_inline
;
12054 int using_load_multiple
;
12055 int using_mfcr_multiple
;
12056 int use_backchain_to_restore_sp
;
12058 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
12059 rtx frame_reg_rtx
= sp_reg_rtx
;
12060 enum machine_mode reg_mode
= Pmode
;
12061 int reg_size
= UNITS_PER_WORD
;
12064 info
= rs6000_stack_info ();
12066 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
12068 reg_mode
= V2SImode
;
12072 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
12073 && (!TARGET_SPE_ABI
12074 || info
->spe_64bit_regs_used
== 0)
12075 && info
->first_gp_reg_save
< 31);
12076 restoring_FPRs_inline
= (sibcall
12077 || current_function_calls_eh_return
12078 || info
->first_fp_reg_save
== 64
12079 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
12080 use_backchain_to_restore_sp
= (frame_pointer_needed
12081 || current_function_calls_alloca
12082 || info
->total_size
> 32767);
12083 using_mfcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
12084 || rs6000_cpu
== PROCESSOR_PPC603
12085 || rs6000_cpu
== PROCESSOR_PPC750
12088 /* If we have a frame pointer, a call to alloca, or a large stack
12089 frame, restore the old stack pointer using the backchain. Otherwise,
12090 we know what size to update it with. */
12091 if (use_backchain_to_restore_sp
)
12093 /* Under V.4, don't reset the stack pointer until after we're done
12094 loading the saved registers. */
12095 if (DEFAULT_ABI
== ABI_V4
)
12096 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
12098 emit_move_insn (frame_reg_rtx
,
12099 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
12102 else if (info
->push_p
)
12104 if (DEFAULT_ABI
== ABI_V4
12105 || current_function_calls_eh_return
)
12106 sp_offset
= info
->total_size
;
12109 emit_insn (TARGET_32BIT
12110 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
12111 GEN_INT (info
->total_size
))
12112 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
12113 GEN_INT (info
->total_size
)));
12117 /* Restore AltiVec registers if needed. */
12118 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
12122 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
12123 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
12125 rtx addr
, areg
, mem
;
12127 areg
= gen_rtx_REG (Pmode
, 0);
12129 (areg
, GEN_INT (info
->altivec_save_offset
12131 + 16 * (i
- info
->first_altivec_reg_save
)));
12133 /* AltiVec addressing mode is [reg+reg]. */
12134 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
12135 mem
= gen_rtx_MEM (V4SImode
, addr
);
12136 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12138 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
12142 /* Restore VRSAVE if needed. */
12143 if (TARGET_ALTIVEC_ABI
&& TARGET_ALTIVEC_VRSAVE
12144 && info
->vrsave_mask
!= 0)
12146 rtx addr
, mem
, reg
;
12148 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
12149 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
12150 mem
= gen_rtx_MEM (SImode
, addr
);
12151 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12152 reg
= gen_rtx_REG (SImode
, 12);
12153 emit_move_insn (reg
, mem
);
12155 emit_insn (generate_set_vrsave (reg
, info
, 1));
12158 /* Get the old lr if we saved it. */
12159 if (info
->lr_save_p
)
12161 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
12162 info
->lr_save_offset
+ sp_offset
);
12164 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12166 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
12169 /* Get the old cr if we saved it. */
12170 if (info
->cr_save_p
)
12172 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
12173 GEN_INT (info
->cr_save_offset
+ sp_offset
));
12174 rtx mem
= gen_rtx_MEM (SImode
, addr
);
12176 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12178 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
12181 /* Set LR here to try to overlap restores below. */
12182 if (info
->lr_save_p
)
12183 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
12184 gen_rtx_REG (Pmode
, 0));
12186 /* Load exception handler data registers, if needed. */
12187 if (current_function_calls_eh_return
)
12189 unsigned int i
, regno
;
12193 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
12194 GEN_INT (sp_offset
+ 5 * reg_size
));
12195 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
12197 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12199 emit_move_insn (gen_rtx_REG (reg_mode
, 2), mem
);
12206 regno
= EH_RETURN_DATA_REGNO (i
);
12207 if (regno
== INVALID_REGNUM
)
12210 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
12211 info
->ehrd_offset
+ sp_offset
12212 + reg_size
* (int) i
);
12213 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12215 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
12219 /* Restore GPRs. This is done as a PARALLEL if we are using
12220 the load-multiple instructions. */
12221 if (using_load_multiple
)
12224 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
12225 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
12227 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
12228 GEN_INT (info
->gp_save_offset
12231 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
12233 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12236 gen_rtx_SET (VOIDmode
,
12237 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
12240 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
12243 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
12244 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
12245 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
12246 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
12247 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
12248 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
12250 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
12251 GEN_INT (info
->gp_save_offset
12254 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
12256 /* Restore 64-bit quantities for SPE. */
12257 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
12259 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
12262 if (!SPE_CONST_OFFSET_OK (offset
))
12264 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
12265 emit_move_insn (b
, GEN_INT (offset
));
12268 b
= GEN_INT (offset
);
12270 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
12271 mem
= gen_rtx_MEM (V2SImode
, addr
);
12274 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12276 emit_move_insn (gen_rtx_REG (reg_mode
,
12277 info
->first_gp_reg_save
+ i
), mem
);
12280 /* Restore fpr's if we need to do it without calling a function. */
12281 if (restoring_FPRs_inline
)
12282 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
12283 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
12284 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
12287 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
12288 GEN_INT (info
->fp_save_offset
12291 mem
= gen_rtx_MEM (DFmode
, addr
);
12292 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12294 emit_move_insn (gen_rtx_REG (DFmode
,
12295 info
->first_fp_reg_save
+ i
),
12299 /* If we saved cr, restore it here. Just those that were used. */
12300 if (info
->cr_save_p
)
12302 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
12305 if (using_mfcr_multiple
)
12307 for (i
= 0; i
< 8; i
++)
12308 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
12314 if (using_mfcr_multiple
&& count
> 1)
12319 p
= rtvec_alloc (count
);
12322 for (i
= 0; i
< 8; i
++)
12323 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
12325 rtvec r
= rtvec_alloc (2);
12326 RTVEC_ELT (r
, 0) = r12_rtx
;
12327 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
12328 RTVEC_ELT (p
, ndx
) =
12329 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
12330 gen_rtx_UNSPEC (CCmode
, r
, UNSPEC_MOVESI_TO_CR
));
12333 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
12338 for (i
= 0; i
< 8; i
++)
12339 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
12341 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
12347 /* If this is V.4, unwind the stack pointer after all of the loads
12348 have been done. We need to emit a block here so that sched
12349 doesn't decide to move the sp change before the register restores
12350 (which may not have any obvious dependency on the stack). This
12351 doesn't hurt performance, because there is no scheduling that can
12352 be done after this point. */
12353 if (DEFAULT_ABI
== ABI_V4
12354 || current_function_calls_eh_return
)
12356 if (frame_reg_rtx
!= sp_reg_rtx
)
12357 rs6000_emit_stack_tie ();
12359 if (use_backchain_to_restore_sp
)
12361 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
12363 else if (sp_offset
!= 0)
12365 emit_insn (TARGET_32BIT
12366 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
12367 GEN_INT (sp_offset
))
12368 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
12369 GEN_INT (sp_offset
)));
12373 if (current_function_calls_eh_return
)
12375 rtx sa
= EH_RETURN_STACKADJ_RTX
;
12376 emit_insn (TARGET_32BIT
12377 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
12378 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
12384 if (! restoring_FPRs_inline
)
12385 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
12387 p
= rtvec_alloc (2);
12389 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
12390 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
12391 gen_rtx_REG (Pmode
,
12392 LINK_REGISTER_REGNUM
));
12394 /* If we have to restore more than two FP registers, branch to the
12395 restore function. It will return to our caller. */
12396 if (! restoring_FPRs_inline
)
12400 const char *alloc_rname
;
12402 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
12403 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
12404 alloc_rname
= ggc_strdup (rname
);
12405 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
12406 gen_rtx_SYMBOL_REF (Pmode
,
12409 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
12412 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
12413 GEN_INT (info
->fp_save_offset
+ 8*i
));
12414 mem
= gen_rtx_MEM (DFmode
, addr
);
12415 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12417 RTVEC_ELT (p
, i
+3) =
12418 gen_rtx_SET (VOIDmode
,
12419 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
12424 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
12428 /* Write function epilogue. */
12431 rs6000_output_function_epilogue (FILE *file
,
12432 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
12434 rs6000_stack_t
*info
= rs6000_stack_info ();
12436 if (! HAVE_epilogue
)
12438 rtx insn
= get_last_insn ();
12439 /* If the last insn was a BARRIER, we don't have to write anything except
12440 the trace table. */
12441 if (GET_CODE (insn
) == NOTE
)
12442 insn
= prev_nonnote_insn (insn
);
12443 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
12445 /* This is slightly ugly, but at least we don't have two
12446 copies of the epilogue-emitting code. */
12449 /* A NOTE_INSN_DELETED is supposed to be at the start
12450 and end of the "toplevel" insn chain. */
12451 emit_note (NOTE_INSN_DELETED
);
12452 rs6000_emit_epilogue (FALSE
);
12453 emit_note (NOTE_INSN_DELETED
);
12455 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12459 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
12461 INSN_ADDRESSES_NEW (insn
, addr
);
12466 if (TARGET_DEBUG_STACK
)
12467 debug_rtx_list (get_insns (), 100);
12468 final (get_insns (), file
, FALSE
, FALSE
);
12474 macho_branch_islands ();
12475 /* Mach-O doesn't support labels at the end of objects, so if
12476 it looks like we might want one, insert a NOP. */
12478 rtx insn
= get_last_insn ();
12481 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_DELETED_LABEL
)
12482 insn
= PREV_INSN (insn
);
12486 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_DELETED_LABEL
)))
12487 fputs ("\tnop\n", file
);
12491 /* Output a traceback table here. See /usr/include/sys/debug.h for info
12494 We don't output a traceback table if -finhibit-size-directive was
12495 used. The documentation for -finhibit-size-directive reads
12496 ``don't output a @code{.size} assembler directive, or anything
12497 else that would cause trouble if the function is split in the
12498 middle, and the two halves are placed at locations far apart in
12499 memory.'' The traceback table has this property, since it
12500 includes the offset from the start of the function to the
12501 traceback table itself.
12503 System V.4 Powerpc's (and the embedded ABI derived from it) use a
12504 different traceback table. */
12505 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
12506 && rs6000_traceback
!= traceback_none
)
12508 const char *fname
= NULL
;
12509 const char *language_string
= lang_hooks
.name
;
12510 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
12512 int optional_tbtab
;
12514 if (rs6000_traceback
== traceback_full
)
12515 optional_tbtab
= 1;
12516 else if (rs6000_traceback
== traceback_part
)
12517 optional_tbtab
= 0;
12519 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
12521 if (optional_tbtab
)
12523 fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
12524 while (*fname
== '.') /* V.4 encodes . in the name */
12527 /* Need label immediately before tbtab, so we can compute
12528 its offset from the function start. */
12529 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
12530 ASM_OUTPUT_LABEL (file
, fname
);
12533 /* The .tbtab pseudo-op can only be used for the first eight
12534 expressions, since it can't handle the possibly variable
12535 length fields that follow. However, if you omit the optional
12536 fields, the assembler outputs zeros for all optional fields
12537 anyways, giving each variable length field is minimum length
12538 (as defined in sys/debug.h). Thus we can not use the .tbtab
12539 pseudo-op at all. */
12541 /* An all-zero word flags the start of the tbtab, for debuggers
12542 that have to find it by searching forward from the entry
12543 point or from the current pc. */
12544 fputs ("\t.long 0\n", file
);
12546 /* Tbtab format type. Use format type 0. */
12547 fputs ("\t.byte 0,", file
);
12549 /* Language type. Unfortunately, there does not seem to be any
12550 official way to discover the language being compiled, so we
12551 use language_string.
12552 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
12553 Java is 13. Objective-C is 14. */
12554 if (! strcmp (language_string
, "GNU C"))
12556 else if (! strcmp (language_string
, "GNU F77"))
12558 else if (! strcmp (language_string
, "GNU Pascal"))
12560 else if (! strcmp (language_string
, "GNU Ada"))
12562 else if (! strcmp (language_string
, "GNU C++"))
12564 else if (! strcmp (language_string
, "GNU Java"))
12566 else if (! strcmp (language_string
, "GNU Objective-C"))
12570 fprintf (file
, "%d,", i
);
12572 /* 8 single bit fields: global linkage (not set for C extern linkage,
12573 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12574 from start of procedure stored in tbtab, internal function, function
12575 has controlled storage, function has no toc, function uses fp,
12576 function logs/aborts fp operations. */
12577 /* Assume that fp operations are used if any fp reg must be saved. */
12578 fprintf (file
, "%d,",
12579 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
12581 /* 6 bitfields: function is interrupt handler, name present in
12582 proc table, function calls alloca, on condition directives
12583 (controls stack walks, 3 bits), saves condition reg, saves
12585 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12586 set up as a frame pointer, even when there is no alloca call. */
12587 fprintf (file
, "%d,",
12588 ((optional_tbtab
<< 6)
12589 | ((optional_tbtab
& frame_pointer_needed
) << 5)
12590 | (info
->cr_save_p
<< 1)
12591 | (info
->lr_save_p
)));
12593 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12595 fprintf (file
, "%d,",
12596 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
12598 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12599 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
12601 if (optional_tbtab
)
12603 /* Compute the parameter info from the function decl argument
12606 int next_parm_info_bit
= 31;
12608 for (decl
= DECL_ARGUMENTS (current_function_decl
);
12609 decl
; decl
= TREE_CHAIN (decl
))
12611 rtx parameter
= DECL_INCOMING_RTL (decl
);
12612 enum machine_mode mode
= GET_MODE (parameter
);
12614 if (GET_CODE (parameter
) == REG
)
12616 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
12622 if (mode
== SFmode
)
12624 else if (mode
== DFmode
|| mode
== TFmode
)
12629 /* If only one bit will fit, don't or in this entry. */
12630 if (next_parm_info_bit
> 0)
12631 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
12632 next_parm_info_bit
-= 2;
12636 fixed_parms
+= ((GET_MODE_SIZE (mode
)
12637 + (UNITS_PER_WORD
- 1))
12639 next_parm_info_bit
-= 1;
12645 /* Number of fixed point parameters. */
12646 /* This is actually the number of words of fixed point parameters; thus
12647 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12648 fprintf (file
, "%d,", fixed_parms
);
12650 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12652 /* This is actually the number of fp registers that hold parameters;
12653 and thus the maximum value is 13. */
12654 /* Set parameters on stack bit if parameters are not in their original
12655 registers, regardless of whether they are on the stack? Xlc
12656 seems to set the bit when not optimizing. */
12657 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
12659 if (! optional_tbtab
)
12662 /* Optional fields follow. Some are variable length. */
12664 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12665 11 double float. */
12666 /* There is an entry for each parameter in a register, in the order that
12667 they occur in the parameter list. Any intervening arguments on the
12668 stack are ignored. If the list overflows a long (max possible length
12669 34 bits) then completely leave off all elements that don't fit. */
12670 /* Only emit this long if there was at least one parameter. */
12671 if (fixed_parms
|| float_parms
)
12672 fprintf (file
, "\t.long %d\n", parm_info
);
12674 /* Offset from start of code to tb table. */
12675 fputs ("\t.long ", file
);
12676 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
12678 RS6000_OUTPUT_BASENAME (file
, fname
);
12680 assemble_name (file
, fname
);
12682 fputs ("-.", file
);
12684 RS6000_OUTPUT_BASENAME (file
, fname
);
12686 assemble_name (file
, fname
);
12690 /* Interrupt handler mask. */
12691 /* Omit this long, since we never set the interrupt handler bit
12694 /* Number of CTL (controlled storage) anchors. */
12695 /* Omit this long, since the has_ctl bit is never set above. */
12697 /* Displacement into stack of each CTL anchor. */
12698 /* Omit this list of longs, because there are no CTL anchors. */
12700 /* Length of function name. */
12703 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
12705 /* Function name. */
12706 assemble_string (fname
, strlen (fname
));
12708 /* Register for alloca automatic storage; this is always reg 31.
12709 Only emit this if the alloca bit was set above. */
12710 if (frame_pointer_needed
)
12711 fputs ("\t.byte 31\n", file
);
12713 fputs ("\t.align 2\n", file
);
12717 /* A C compound statement that outputs the assembler code for a thunk
12718 function, used to implement C++ virtual function calls with
12719 multiple inheritance. The thunk acts as a wrapper around a virtual
12720 function, adjusting the implicit object parameter before handing
12721 control off to the real function.
12723 First, emit code to add the integer DELTA to the location that
12724 contains the incoming first argument. Assume that this argument
12725 contains a pointer, and is the one used to pass the `this' pointer
12726 in C++. This is the incoming argument *before* the function
12727 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12728 values of all other incoming arguments.
12730 After the addition, emit code to jump to FUNCTION, which is a
12731 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12732 not touch the return address. Hence returning from FUNCTION will
12733 return to whoever called the current `thunk'.
12735 The effect must be as if FUNCTION had been called directly with the
12736 adjusted first argument. This macro is responsible for emitting
12737 all of the code for a thunk function; output_function_prologue()
12738 and output_function_epilogue() are not invoked.
12740 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12741 been extracted from it.) It might possibly be useful on some
12742 targets, but probably not.
12744 If you do not define this macro, the target-independent code in the
12745 C++ frontend will generate a less efficient heavyweight thunk that
12746 calls FUNCTION instead of jumping to it. The generic approach does
12747 not support varargs. */
12750 rs6000_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
12751 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
12754 rtx
this, insn
, funexp
;
12756 reload_completed
= 1;
12757 epilogue_completed
= 1;
12758 no_new_pseudos
= 1;
12760 /* Mark the end of the (empty) prologue. */
12761 emit_note (NOTE_INSN_PROLOGUE_END
);
12763 /* Find the "this" pointer. If the function returns a structure,
12764 the structure return pointer is in r3. */
12765 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
12766 this = gen_rtx_REG (Pmode
, 4);
12768 this = gen_rtx_REG (Pmode
, 3);
12770 /* Apply the constant offset, if required. */
12773 rtx delta_rtx
= GEN_INT (delta
);
12774 emit_insn (TARGET_32BIT
12775 ? gen_addsi3 (this, this, delta_rtx
)
12776 : gen_adddi3 (this, this, delta_rtx
));
12779 /* Apply the offset from the vtable, if required. */
12782 rtx vcall_offset_rtx
= GEN_INT (vcall_offset
);
12783 rtx tmp
= gen_rtx_REG (Pmode
, 12);
12785 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this));
12786 if (((unsigned HOST_WIDE_INT
) vcall_offset
) + 0x8000 >= 0x10000)
12788 emit_insn (TARGET_32BIT
12789 ? gen_addsi3 (tmp
, tmp
, vcall_offset_rtx
)
12790 : gen_adddi3 (tmp
, tmp
, vcall_offset_rtx
));
12791 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
12795 rtx loc
= gen_rtx_PLUS (Pmode
, tmp
, vcall_offset_rtx
);
12797 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, loc
));
12799 emit_insn (TARGET_32BIT
12800 ? gen_addsi3 (this, this, tmp
)
12801 : gen_adddi3 (this, this, tmp
));
12804 /* Generate a tail call to the target function. */
12805 if (!TREE_USED (function
))
12807 assemble_external (function
);
12808 TREE_USED (function
) = 1;
12810 funexp
= XEXP (DECL_RTL (function
), 0);
12811 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
12814 if (MACHOPIC_INDIRECT
)
12815 funexp
= machopic_indirect_call_target (funexp
);
12818 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12819 generate sibcall RTL explicitly to avoid constraint abort. */
12820 insn
= emit_call_insn (
12821 gen_rtx_PARALLEL (VOIDmode
,
12823 gen_rtx_CALL (VOIDmode
,
12824 funexp
, const0_rtx
),
12825 gen_rtx_USE (VOIDmode
, const0_rtx
),
12826 gen_rtx_USE (VOIDmode
,
12827 gen_rtx_REG (SImode
,
12828 LINK_REGISTER_REGNUM
)),
12829 gen_rtx_RETURN (VOIDmode
))));
12830 SIBLING_CALL_P (insn
) = 1;
12833 /* Run just enough of rest_of_compilation to get the insns emitted.
12834 There's not really enough bulk here to make other passes such as
12835 instruction scheduling worth while. Note that use_thunk calls
12836 assemble_start_function and assemble_end_function. */
12837 insn
= get_insns ();
12838 insn_locators_initialize ();
12839 shorten_branches (insn
);
12840 final_start_function (insn
, file
, 1);
12841 final (insn
, file
, 1, 0);
12842 final_end_function ();
12844 reload_completed
= 0;
12845 epilogue_completed
= 0;
12846 no_new_pseudos
= 0;
12849 /* A quick summary of the various types of 'constant-pool tables'
12852 Target Flags Name One table per
12853 AIX (none) AIX TOC object file
12854 AIX -mfull-toc AIX TOC object file
12855 AIX -mminimal-toc AIX minimal TOC translation unit
12856 SVR4/EABI (none) SVR4 SDATA object file
12857 SVR4/EABI -fpic SVR4 pic object file
12858 SVR4/EABI -fPIC SVR4 PIC translation unit
12859 SVR4/EABI -mrelocatable EABI TOC function
12860 SVR4/EABI -maix AIX TOC object file
12861 SVR4/EABI -maix -mminimal-toc
12862 AIX minimal TOC translation unit
12864 Name Reg. Set by entries contains:
12865 made by addrs? fp? sum?
12867 AIX TOC 2 crt0 as Y option option
12868 AIX minimal TOC 30 prolog gcc Y Y option
12869 SVR4 SDATA 13 crt0 gcc N Y N
12870 SVR4 pic 30 prolog ld Y not yet N
12871 SVR4 PIC 30 prolog gcc Y option option
12872 EABI TOC 30 prolog gcc Y option option
12876 /* Hash functions for the hash table. */
12879 rs6000_hash_constant (rtx k
)
12881 enum rtx_code code
= GET_CODE (k
);
12882 enum machine_mode mode
= GET_MODE (k
);
12883 unsigned result
= (code
<< 3) ^ mode
;
12884 const char *format
;
12887 format
= GET_RTX_FORMAT (code
);
12888 flen
= strlen (format
);
12894 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
12897 if (mode
!= VOIDmode
)
12898 return real_hash (CONST_DOUBLE_REAL_VALUE (k
)) * result
;
12910 for (; fidx
< flen
; fidx
++)
12911 switch (format
[fidx
])
12916 const char *str
= XSTR (k
, fidx
);
12917 len
= strlen (str
);
12918 result
= result
* 613 + len
;
12919 for (i
= 0; i
< len
; i
++)
12920 result
= result
* 613 + (unsigned) str
[i
];
12925 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
12929 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
12932 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
12933 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
12937 for (i
= 0; i
< sizeof(HOST_WIDE_INT
)/sizeof(unsigned); i
++)
12938 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
12952 toc_hash_function (const void *hash_entry
)
12954 const struct toc_hash_struct
*thc
=
12955 (const struct toc_hash_struct
*) hash_entry
;
12956 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
12959 /* Compare H1 and H2 for equivalence. */
12962 toc_hash_eq (const void *h1
, const void *h2
)
12964 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
12965 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
12967 if (((const struct toc_hash_struct
*) h1
)->key_mode
12968 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
12971 return rtx_equal_p (r1
, r2
);
12974 /* These are the names given by the C++ front-end to vtables, and
12975 vtable-like objects. Ideally, this logic should not be here;
12976 instead, there should be some programmatic way of inquiring as
12977 to whether or not an object is a vtable. */
12979 #define VTABLE_NAME_P(NAME) \
12980 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12981 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12982 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12983 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12986 rs6000_output_symbol_ref (FILE *file
, rtx x
)
12988 /* Currently C++ toc references to vtables can be emitted before it
12989 is decided whether the vtable is public or private. If this is
12990 the case, then the linker will eventually complain that there is
12991 a reference to an unknown section. Thus, for vtables only,
12992 we emit the TOC reference to reference the symbol and not the
12994 const char *name
= XSTR (x
, 0);
12996 if (VTABLE_NAME_P (name
))
12998 RS6000_OUTPUT_BASENAME (file
, name
);
13001 assemble_name (file
, name
);
13004 /* Output a TOC entry. We derive the entry name from what is being
13008 output_toc (FILE *file
, rtx x
, int labelno
, enum machine_mode mode
)
13011 const char *name
= buf
;
13012 const char *real_name
;
13019 /* When the linker won't eliminate them, don't output duplicate
13020 TOC entries (this happens on AIX if there is any kind of TOC,
13021 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
13023 if (TARGET_TOC
&& GET_CODE (x
) != LABEL_REF
)
13025 struct toc_hash_struct
*h
;
13028 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
13029 time because GGC is not initialized at that point. */
13030 if (toc_hash_table
== NULL
)
13031 toc_hash_table
= htab_create_ggc (1021, toc_hash_function
,
13032 toc_hash_eq
, NULL
);
13034 h
= ggc_alloc (sizeof (*h
));
13036 h
->key_mode
= mode
;
13037 h
->labelno
= labelno
;
13039 found
= htab_find_slot (toc_hash_table
, h
, 1);
13040 if (*found
== NULL
)
13042 else /* This is indeed a duplicate.
13043 Set this label equal to that label. */
13045 fputs ("\t.set ", file
);
13046 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
13047 fprintf (file
, "%d,", labelno
);
13048 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
13049 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
13055 /* If we're going to put a double constant in the TOC, make sure it's
13056 aligned properly when strict alignment is on. */
13057 if (GET_CODE (x
) == CONST_DOUBLE
13058 && STRICT_ALIGNMENT
13059 && GET_MODE_BITSIZE (mode
) >= 64
13060 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
13061 ASM_OUTPUT_ALIGN (file
, 3);
13064 (*targetm
.asm_out
.internal_label
) (file
, "LC", labelno
);
13066 /* Handle FP constants specially. Note that if we have a minimal
13067 TOC, things we put here aren't actually in the TOC, so we can allow
13069 if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == TFmode
)
13071 REAL_VALUE_TYPE rv
;
13074 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
13075 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
13079 if (TARGET_MINIMAL_TOC
)
13080 fputs (DOUBLE_INT_ASM_OP
, file
);
13082 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13083 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
13084 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
13085 fprintf (file
, "0x%lx%08lx,0x%lx%08lx\n",
13086 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
13087 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
13092 if (TARGET_MINIMAL_TOC
)
13093 fputs ("\t.long ", file
);
13095 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13096 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
13097 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
13098 fprintf (file
, "0x%lx,0x%lx,0x%lx,0x%lx\n",
13099 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
13100 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
13104 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
13106 REAL_VALUE_TYPE rv
;
13109 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
13110 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
13114 if (TARGET_MINIMAL_TOC
)
13115 fputs (DOUBLE_INT_ASM_OP
, file
);
13117 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
13118 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
13119 fprintf (file
, "0x%lx%08lx\n",
13120 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
13125 if (TARGET_MINIMAL_TOC
)
13126 fputs ("\t.long ", file
);
13128 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
13129 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
13130 fprintf (file
, "0x%lx,0x%lx\n",
13131 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
13135 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
13137 REAL_VALUE_TYPE rv
;
13140 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
13141 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
13145 if (TARGET_MINIMAL_TOC
)
13146 fputs (DOUBLE_INT_ASM_OP
, file
);
13148 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
13149 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
13154 if (TARGET_MINIMAL_TOC
)
13155 fputs ("\t.long ", file
);
13157 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
13158 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
13162 else if (GET_MODE (x
) == VOIDmode
13163 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
13165 unsigned HOST_WIDE_INT low
;
13166 HOST_WIDE_INT high
;
13168 if (GET_CODE (x
) == CONST_DOUBLE
)
13170 low
= CONST_DOUBLE_LOW (x
);
13171 high
= CONST_DOUBLE_HIGH (x
);
13174 #if HOST_BITS_PER_WIDE_INT == 32
13177 high
= (low
& 0x80000000) ? ~0 : 0;
13181 low
= INTVAL (x
) & 0xffffffff;
13182 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
13186 /* TOC entries are always Pmode-sized, but since this
13187 is a bigendian machine then if we're putting smaller
13188 integer constants in the TOC we have to pad them.
13189 (This is still a win over putting the constants in
13190 a separate constant pool, because then we'd have
13191 to have both a TOC entry _and_ the actual constant.)
13193 For a 32-bit target, CONST_INT values are loaded and shifted
13194 entirely within `low' and can be stored in one TOC entry. */
13196 if (TARGET_64BIT
&& POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
13197 abort ();/* It would be easy to make this work, but it doesn't now. */
13199 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
13201 #if HOST_BITS_PER_WIDE_INT == 32
13202 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
13203 POINTER_SIZE
, &low
, &high
, 0);
13206 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
13207 high
= (HOST_WIDE_INT
) low
>> 32;
13214 if (TARGET_MINIMAL_TOC
)
13215 fputs (DOUBLE_INT_ASM_OP
, file
);
13217 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
13218 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
13219 fprintf (file
, "0x%lx%08lx\n",
13220 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
13225 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
13227 if (TARGET_MINIMAL_TOC
)
13228 fputs ("\t.long ", file
);
13230 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
13231 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
13232 fprintf (file
, "0x%lx,0x%lx\n",
13233 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
13237 if (TARGET_MINIMAL_TOC
)
13238 fputs ("\t.long ", file
);
13240 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
13241 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
13247 if (GET_CODE (x
) == CONST
)
13249 if (GET_CODE (XEXP (x
, 0)) != PLUS
)
13252 base
= XEXP (XEXP (x
, 0), 0);
13253 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
13256 if (GET_CODE (base
) == SYMBOL_REF
)
13257 name
= XSTR (base
, 0);
13258 else if (GET_CODE (base
) == LABEL_REF
)
13259 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (XEXP (base
, 0)));
13260 else if (GET_CODE (base
) == CODE_LABEL
)
13261 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
13265 real_name
= (*targetm
.strip_name_encoding
) (name
);
13266 if (TARGET_MINIMAL_TOC
)
13267 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
13270 fprintf (file
, "\t.tc %s", real_name
);
13273 fprintf (file
, ".N%d", - offset
);
13275 fprintf (file
, ".P%d", offset
);
13277 fputs ("[TC],", file
);
13280 /* Currently C++ toc references to vtables can be emitted before it
13281 is decided whether the vtable is public or private. If this is
13282 the case, then the linker will eventually complain that there is
13283 a TOC reference to an unknown section. Thus, for vtables only,
13284 we emit the TOC reference to reference the symbol and not the
13286 if (VTABLE_NAME_P (name
))
13288 RS6000_OUTPUT_BASENAME (file
, name
);
13290 fprintf (file
, "%d", offset
);
13291 else if (offset
> 0)
13292 fprintf (file
, "+%d", offset
);
13295 output_addr_const (file
, x
);
13299 /* Output an assembler pseudo-op to write an ASCII string of N characters
13300 starting at P to FILE.
13302 On the RS/6000, we have to do this using the .byte operation and
13303 write out special characters outside the quoted string.
13304 Also, the assembler is broken; very long strings are truncated,
13305 so we must artificially break them up early. */
13308 output_ascii (FILE *file
, const char *p
, int n
)
13311 int i
, count_string
;
13312 const char *for_string
= "\t.byte \"";
13313 const char *for_decimal
= "\t.byte ";
13314 const char *to_close
= NULL
;
13317 for (i
= 0; i
< n
; i
++)
13320 if (c
>= ' ' && c
< 0177)
13323 fputs (for_string
, file
);
13326 /* Write two quotes to get one. */
13334 for_decimal
= "\"\n\t.byte ";
13338 if (count_string
>= 512)
13340 fputs (to_close
, file
);
13342 for_string
= "\t.byte \"";
13343 for_decimal
= "\t.byte ";
13351 fputs (for_decimal
, file
);
13352 fprintf (file
, "%d", c
);
13354 for_string
= "\n\t.byte \"";
13355 for_decimal
= ", ";
13361 /* Now close the string if we have written one. Then end the line. */
13363 fputs (to_close
, file
);
13366 /* Generate a unique section name for FILENAME for a section type
13367 represented by SECTION_DESC. Output goes into BUF.
13369 SECTION_DESC can be any string, as long as it is different for each
13370 possible section type.
13372 We name the section in the same manner as xlc. The name begins with an
13373 underscore followed by the filename (after stripping any leading directory
13374 names) with the last period replaced by the string SECTION_DESC. If
13375 FILENAME does not contain a period, SECTION_DESC is appended to the end of
13379 rs6000_gen_section_name (char **buf
, const char *filename
,
13380 const char *section_desc
)
13382 const char *q
, *after_last_slash
, *last_period
= 0;
13386 after_last_slash
= filename
;
13387 for (q
= filename
; *q
; q
++)
13390 after_last_slash
= q
+ 1;
13391 else if (*q
== '.')
13395 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
13396 *buf
= (char *) xmalloc (len
);
13401 for (q
= after_last_slash
; *q
; q
++)
13403 if (q
== last_period
)
13405 strcpy (p
, section_desc
);
13406 p
+= strlen (section_desc
);
13410 else if (ISALNUM (*q
))
13414 if (last_period
== 0)
13415 strcpy (p
, section_desc
);
13420 /* Emit profile function. */
13423 output_profile_hook (int labelno ATTRIBUTE_UNUSED
)
13425 if (TARGET_PROFILE_KERNEL
)
13428 if (DEFAULT_ABI
== ABI_AIX
)
13430 #ifndef NO_PROFILE_COUNTERS
13431 # define NO_PROFILE_COUNTERS 0
13433 if (NO_PROFILE_COUNTERS
)
13434 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
13438 const char *label_name
;
13441 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
13442 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
13443 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
13445 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
13449 else if (DEFAULT_ABI
== ABI_DARWIN
)
13451 const char *mcount_name
= RS6000_MCOUNT
;
13452 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
13454 /* Be conservative and always set this, at least for now. */
13455 current_function_uses_pic_offset_table
= 1;
13458 /* For PIC code, set up a stub and collect the caller's address
13459 from r0, which is where the prologue puts it. */
13460 if (MACHOPIC_INDIRECT
)
13462 mcount_name
= machopic_stub_name (mcount_name
);
13463 if (current_function_uses_pic_offset_table
)
13464 caller_addr_regno
= 0;
13467 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
13469 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
13473 /* Write function profiler code. */
13476 output_function_profiler (FILE *file
, int labelno
)
13481 switch (DEFAULT_ABI
)
13490 warning ("no profiling of 64-bit code for this ABI");
13493 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
13494 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
13497 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
13498 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
13499 reg_names
[0], save_lr
, reg_names
[1]);
13500 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
13501 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
13502 assemble_name (file
, buf
);
13503 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
13505 else if (flag_pic
> 1)
13507 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
13508 reg_names
[0], save_lr
, reg_names
[1]);
13509 /* Now, we need to get the address of the label. */
13510 fputs ("\tbl 1f\n\t.long ", file
);
13511 assemble_name (file
, buf
);
13512 fputs ("-.\n1:", file
);
13513 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
13514 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
13515 reg_names
[0], reg_names
[11]);
13516 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
13517 reg_names
[0], reg_names
[0], reg_names
[11]);
13521 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
13522 assemble_name (file
, buf
);
13523 fputs ("@ha\n", file
);
13524 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
13525 reg_names
[0], save_lr
, reg_names
[1]);
13526 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
13527 assemble_name (file
, buf
);
13528 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
13531 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13532 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
13537 if (!TARGET_PROFILE_KERNEL
)
13539 /* Don't do anything, done in output_profile_hook (). */
13546 asm_fprintf (file
, "\tmflr %s\n", reg_names
[0]);
13547 asm_fprintf (file
, "\tstd %s,16(%s)\n", reg_names
[0], reg_names
[1]);
13549 if (current_function_needs_context
)
13551 asm_fprintf (file
, "\tstd %s,24(%s)\n",
13552 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
13553 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
13554 asm_fprintf (file
, "\tld %s,24(%s)\n",
13555 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
13558 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
13566 rs6000_use_dfa_pipeline_interface (void)
13571 /* Power4 load update and store update instructions are cracked into a
13572 load or store and an integer insn which are executed in the same cycle.
13573 Branches have their own dispatch slot which does not count against the
13574 GCC issue rate, but it changes the program flow so there are no other
13575 instructions to issue in this cycle. */
13578 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED
,
13579 int verbose ATTRIBUTE_UNUSED
,
13580 rtx insn
, int more
)
13582 if (GET_CODE (PATTERN (insn
)) == USE
13583 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
13586 if (rs6000_cpu
== PROCESSOR_POWER4
)
13588 if (is_microcoded_insn (insn
))
13590 else if (is_cracked_insn (insn
))
13591 return more
> 2 ? more
- 2 : 0;
13597 /* Adjust the cost of a scheduling dependency. Return the new cost of
13598 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13601 rs6000_adjust_cost (rtx insn
, rtx link
, rtx dep_insn ATTRIBUTE_UNUSED
,
13604 if (! recog_memoized (insn
))
13607 if (REG_NOTE_KIND (link
) != 0)
13610 if (REG_NOTE_KIND (link
) == 0)
13612 /* Data dependency; DEP_INSN writes a register that INSN reads
13613 some cycles later. */
13614 switch (get_attr_type (insn
))
13617 /* Tell the first scheduling pass about the latency between
13618 a mtctr and bctr (and mtlr and br/blr). The first
13619 scheduling pass will not know about this latency since
13620 the mtctr instruction, which has the latency associated
13621 to it, will be generated by reload. */
13622 return TARGET_POWER
? 5 : 4;
13624 /* Leave some extra cycles between a compare and its
13625 dependent branch, to inhibit expensive mispredicts. */
13626 if ((rs6000_cpu_attr
== CPU_PPC603
13627 || rs6000_cpu_attr
== CPU_PPC604
13628 || rs6000_cpu_attr
== CPU_PPC604E
13629 || rs6000_cpu_attr
== CPU_PPC620
13630 || rs6000_cpu_attr
== CPU_PPC630
13631 || rs6000_cpu_attr
== CPU_PPC750
13632 || rs6000_cpu_attr
== CPU_PPC7400
13633 || rs6000_cpu_attr
== CPU_PPC7450
13634 || rs6000_cpu_attr
== CPU_POWER4
)
13635 && recog_memoized (dep_insn
)
13636 && (INSN_CODE (dep_insn
) >= 0)
13637 && (get_attr_type (dep_insn
) == TYPE_CMP
13638 || get_attr_type (dep_insn
) == TYPE_COMPARE
13639 || get_attr_type (dep_insn
) == TYPE_DELAYED_COMPARE
13640 || get_attr_type (dep_insn
) == TYPE_IMUL_COMPARE
13641 || get_attr_type (dep_insn
) == TYPE_LMUL_COMPARE
13642 || get_attr_type (dep_insn
) == TYPE_FPCOMPARE
13643 || get_attr_type (dep_insn
) == TYPE_CR_LOGICAL
13644 || get_attr_type (dep_insn
) == TYPE_DELAYED_CR
))
13649 /* Fall out to return default cost. */
13655 /* The function returns a true if INSN is microcoded.
13656 Return false otherwise. */
13659 is_microcoded_insn (rtx insn
)
13661 if (!insn
|| !INSN_P (insn
)
13662 || GET_CODE (PATTERN (insn
)) == USE
13663 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
13666 if (rs6000_cpu
== PROCESSOR_POWER4
)
13668 enum attr_type type
= get_attr_type (insn
);
13669 if (type
== TYPE_LOAD_EXT_U
13670 || type
== TYPE_LOAD_EXT_UX
13671 || type
== TYPE_LOAD_UX
13672 || type
== TYPE_STORE_UX
13673 || type
== TYPE_MFCR
)
13680 /* The function returns a nonzero value if INSN can be scheduled only
13681 as the first insn in a dispatch group ("dispatch-slot restricted").
13682 In this case, the returned value indicates how many dispatch slots
13683 the insn occupies (at the beginning of the group).
13684 Return 0 otherwise. */
13687 is_dispatch_slot_restricted (rtx insn
)
13689 enum attr_type type
;
13691 if (rs6000_cpu
!= PROCESSOR_POWER4
)
13695 || insn
== NULL_RTX
13696 || GET_CODE (insn
) == NOTE
13697 || GET_CODE (PATTERN (insn
)) == USE
13698 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
13701 type
= get_attr_type (insn
);
13707 case TYPE_DELAYED_CR
:
13708 case TYPE_CR_LOGICAL
:
13720 /* The function returns true if INSN is cracked into 2 instructions
13721 by the processor (and therefore occupies 2 issue slots). */
13724 is_cracked_insn (rtx insn
)
13726 if (!insn
|| !INSN_P (insn
)
13727 || GET_CODE (PATTERN (insn
)) == USE
13728 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
13731 if (rs6000_cpu
== PROCESSOR_POWER4
)
13733 enum attr_type type
= get_attr_type (insn
);
13734 if (type
== TYPE_LOAD_U
|| type
== TYPE_STORE_U
13735 || type
== TYPE_FPLOAD_U
|| type
== TYPE_FPSTORE_U
13736 || type
== TYPE_FPLOAD_UX
|| type
== TYPE_FPSTORE_UX
13737 || type
== TYPE_LOAD_EXT
|| type
== TYPE_DELAYED_CR
13738 || type
== TYPE_COMPARE
|| type
== TYPE_DELAYED_COMPARE
13739 || type
== TYPE_IMUL_COMPARE
|| type
== TYPE_LMUL_COMPARE
13740 || type
== TYPE_IDIV
|| type
== TYPE_LDIV
13741 || type
== TYPE_INSERT_WORD
)
13748 /* The function returns true if INSN can be issued only from
13749 the branch slot. */
13752 is_branch_slot_insn (rtx insn
)
13754 if (!insn
|| !INSN_P (insn
)
13755 || GET_CODE (PATTERN (insn
)) == USE
13756 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
13759 if (rs6000_cpu
== PROCESSOR_POWER4
)
13761 enum attr_type type
= get_attr_type (insn
);
13762 if (type
== TYPE_BRANCH
|| type
== TYPE_JMPREG
)
13770 /* A C statement (sans semicolon) to update the integer scheduling
13771 priority INSN_PRIORITY (INSN). Increase the priority to execute the
13772 INSN earlier, reduce the priority to execute INSN later. Do not
13773 define this macro if you do not need to adjust the scheduling
13774 priorities of insns. */
13777 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED
, int priority
)
13779 /* On machines (like the 750) which have asymmetric integer units,
13780 where one integer unit can do multiply and divides and the other
13781 can't, reduce the priority of multiply/divide so it is scheduled
13782 before other integer operations. */
13785 if (! INSN_P (insn
))
13788 if (GET_CODE (PATTERN (insn
)) == USE
)
13791 switch (rs6000_cpu_attr
) {
13793 switch (get_attr_type (insn
))
13800 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
13801 priority
, priority
);
13802 if (priority
>= 0 && priority
< 0x01000000)
13809 if (is_dispatch_slot_restricted (insn
)
13810 && reload_completed
13811 && current_sched_info
->sched_max_insns_priority
13812 && rs6000_sched_restricted_insns_priority
)
13815 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
13816 if (rs6000_sched_restricted_insns_priority
== 1)
13817 /* Attach highest priority to insn. This means that in
13818 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
13819 precede 'priority' (critical path) considerations. */
13820 return current_sched_info
->sched_max_insns_priority
;
13821 else if (rs6000_sched_restricted_insns_priority
== 2)
13822 /* Increase priority of insn by a minimal amount. This means that in
13823 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
13824 precede dispatch-slot restriction considerations. */
13825 return (priority
+ 1);
13831 /* Return how many instructions the machine can issue per cycle. */
13834 rs6000_issue_rate (void)
13836 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13837 if (!reload_completed
)
13840 switch (rs6000_cpu_attr
) {
13841 case CPU_RIOS1
: /* ? */
13843 case CPU_PPC601
: /* ? */
13865 /* Return how many instructions to look ahead for better insn
13869 rs6000_use_sched_lookahead (void)
13871 if (rs6000_cpu_attr
== CPU_PPC8540
)
13876 /* Determine is PAT refers to memory. */
13879 is_mem_ref (rtx pat
)
13885 if (GET_CODE (pat
) == MEM
)
13888 /* Recursively process the pattern. */
13889 fmt
= GET_RTX_FORMAT (GET_CODE (pat
));
13891 for (i
= GET_RTX_LENGTH (GET_CODE (pat
)) - 1; i
>= 0 && !ret
; i
--)
13894 ret
|= is_mem_ref (XEXP (pat
, i
));
13895 else if (fmt
[i
] == 'E')
13896 for (j
= XVECLEN (pat
, i
) - 1; j
>= 0; j
--)
13897 ret
|= is_mem_ref (XVECEXP (pat
, i
, j
));
13903 /* Determine if PAT is a PATTERN of a load insn. */
13906 is_load_insn1 (rtx pat
)
13908 if (!pat
|| pat
== NULL_RTX
)
13911 if (GET_CODE (pat
) == SET
)
13912 return is_mem_ref (SET_SRC (pat
));
13914 if (GET_CODE (pat
) == PARALLEL
)
13918 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
13919 if (is_load_insn1 (XVECEXP (pat
, 0, i
)))
13926 /* Determine if INSN loads from memory. */
13929 is_load_insn (rtx insn
)
13931 if (!insn
|| !INSN_P (insn
))
13934 if (GET_CODE (insn
) == CALL_INSN
)
13937 return is_load_insn1 (PATTERN (insn
));
13940 /* Determine if PAT is a PATTERN of a store insn. */
13943 is_store_insn1 (rtx pat
)
13945 if (!pat
|| pat
== NULL_RTX
)
13948 if (GET_CODE (pat
) == SET
)
13949 return is_mem_ref (SET_DEST (pat
));
13951 if (GET_CODE (pat
) == PARALLEL
)
13955 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
13956 if (is_store_insn1 (XVECEXP (pat
, 0, i
)))
13963 /* Determine if INSN stores to memory. */
13966 is_store_insn (rtx insn
)
13968 if (!insn
|| !INSN_P (insn
))
13971 return is_store_insn1 (PATTERN (insn
));
13974 /* Returns whether the dependence between INSN and NEXT is considered
13975 costly by the given target. */
13978 rs6000_is_costly_dependence (rtx insn
, rtx next
, rtx link
, int cost
, int distance
)
13980 /* If the flag is not enbled - no dependence is considered costly;
13981 allow all dependent insns in the same group.
13982 This is the most aggressive option. */
13983 if (rs6000_sched_costly_dep
== no_dep_costly
)
13986 /* If the flag is set to 1 - a dependence is always considered costly;
13987 do not allow dependent instructions in the same group.
13988 This is the most conservative option. */
13989 if (rs6000_sched_costly_dep
== all_deps_costly
)
13992 if (rs6000_sched_costly_dep
== store_to_load_dep_costly
13993 && is_load_insn (next
)
13994 && is_store_insn (insn
))
13995 /* Prevent load after store in the same group. */
13998 if (rs6000_sched_costly_dep
== true_store_to_load_dep_costly
13999 && is_load_insn (next
)
14000 && is_store_insn (insn
)
14001 && (!link
|| (int) REG_NOTE_KIND (link
) == 0))
14002 /* Prevent load after store in the same group if it is a true dependence. */
14005 /* The flag is set to X; dependences with latency >= X are considered costly,
14006 and will not be scheduled in the same group. */
14007 if (rs6000_sched_costly_dep
<= max_dep_latency
14008 && ((cost
- distance
) >= (int)rs6000_sched_costly_dep
))
14014 /* Return the next insn after INSN that is found before TAIL is reached,
14015 skipping any "non-active" insns - insns that will not actually occupy
14016 an issue slot. Return NULL_RTX if such an insn is not found. */
14019 get_next_active_insn (rtx insn
, rtx tail
)
14023 if (!insn
|| insn
== tail
)
14026 next_insn
= NEXT_INSN (insn
);
14029 && next_insn
!= tail
14030 && (GET_CODE(next_insn
) == NOTE
14031 || GET_CODE (PATTERN (next_insn
)) == USE
14032 || GET_CODE (PATTERN (next_insn
)) == CLOBBER
))
14034 next_insn
= NEXT_INSN (next_insn
);
14037 if (!next_insn
|| next_insn
== tail
)
14043 /* Return whether the presence of INSN causes a dispatch group termination
14044 of group WHICH_GROUP.
14046 If WHICH_GROUP == current_group, this function will return true if INSN
14047 causes the termination of the current group (i.e, the dispatch group to
14048 which INSN belongs). This means that INSN will be the last insn in the
14049 group it belongs to.
14051 If WHICH_GROUP == previous_group, this function will return true if INSN
14052 causes the termination of the previous group (i.e, the dispatch group that
14053 precedes the group to which INSN belongs). This means that INSN will be
14054 the first insn in the group it belongs to). */
14057 insn_terminates_group_p (rtx insn
, enum group_termination which_group
)
14059 enum attr_type type
;
14064 type
= get_attr_type (insn
);
14066 if (is_microcoded_insn (insn
))
14069 if (which_group
== current_group
)
14071 if (is_branch_slot_insn (insn
))
14075 else if (which_group
== previous_group
)
14077 if (is_dispatch_slot_restricted (insn
))
14085 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
14086 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
14089 is_costly_group (rtx
*group_insns
, rtx next_insn
)
14094 int issue_rate
= rs6000_issue_rate ();
14096 for (i
= 0; i
< issue_rate
; i
++)
14098 rtx insn
= group_insns
[i
];
14101 for (link
= INSN_DEPEND (insn
); link
!= 0; link
= XEXP (link
, 1))
14103 rtx next
= XEXP (link
, 0);
14104 if (next
== next_insn
)
14106 cost
= insn_cost (insn
, link
, next_insn
);
14107 if (rs6000_is_costly_dependence (insn
, next_insn
, link
, cost
, 0))
14116 /* Utility of the function redefine_groups.
14117 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
14118 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
14119 to keep it "far" (in a separate group) from GROUP_INSNS, following
14120 one of the following schemes, depending on the value of the flag
14121 -minsert_sched_nops = X:
14122 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
14123 in order to force NEXT_INSN into a separate group.
14124 (2) X < sched_finish_regroup_exact: insert exactly X nops.
14125 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
14126 insertion (has a group just ended, how many vacant issue slots remain in the
14127 last group, and how many dispatch groups were encountered so far). */
14130 force_new_group (int sched_verbose
, FILE *dump
, rtx
*group_insns
, rtx next_insn
,
14131 bool *group_end
, int can_issue_more
, int *group_count
)
14135 int issue_rate
= rs6000_issue_rate ();
14136 bool end
= *group_end
;
14139 if (next_insn
== NULL_RTX
)
14140 return can_issue_more
;
14142 if (rs6000_sched_insert_nops
> sched_finish_regroup_exact
)
14143 return can_issue_more
;
14145 force
= is_costly_group (group_insns
, next_insn
);
14147 return can_issue_more
;
14149 if (sched_verbose
> 6)
14150 fprintf (dump
,"force: group count = %d, can_issue_more = %d\n",
14151 *group_count
,can_issue_more
);
14153 if (rs6000_sched_insert_nops
== sched_finish_regroup_exact
)
14156 can_issue_more
= 0;
14158 /* Since only a branch can be issued in the last issue_slot, it is
14159 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
14160 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
14161 in this case the last nop will start a new group and the branch will be
14162 forced to the new group. */
14163 if (can_issue_more
&& !is_branch_slot_insn (next_insn
))
14166 while (can_issue_more
> 0)
14169 emit_insn_before (nop
, next_insn
);
14177 if (rs6000_sched_insert_nops
< sched_finish_regroup_exact
)
14179 int n_nops
= rs6000_sched_insert_nops
;
14181 /* Nops can't be issued from the branch slot, so the effective
14182 issue_rate for nops is 'issue_rate - 1'. */
14183 if (can_issue_more
== 0)
14184 can_issue_more
= issue_rate
;
14186 if (can_issue_more
== 0)
14188 can_issue_more
= issue_rate
- 1;
14191 for (i
= 0; i
< issue_rate
; i
++)
14193 group_insns
[i
] = 0;
14200 emit_insn_before (nop
, next_insn
);
14201 if (can_issue_more
== issue_rate
- 1) /* new group begins */
14204 if (can_issue_more
== 0)
14206 can_issue_more
= issue_rate
- 1;
14209 for (i
= 0; i
< issue_rate
; i
++)
14211 group_insns
[i
] = 0;
14217 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
14220 *group_end
= /* Is next_insn going to start a new group? */
14222 || (can_issue_more
== 1 && !is_branch_slot_insn (next_insn
))
14223 || (can_issue_more
<= 2 && is_cracked_insn (next_insn
))
14224 || (can_issue_more
< issue_rate
&&
14225 insn_terminates_group_p (next_insn
, previous_group
)));
14226 if (*group_end
&& end
)
14229 if (sched_verbose
> 6)
14230 fprintf (dump
, "done force: group count = %d, can_issue_more = %d\n",
14231 *group_count
, can_issue_more
);
14232 return can_issue_more
;
14235 return can_issue_more
;
14238 /* This function tries to synch the dispatch groups that the compiler "sees"
14239 with the dispatch groups that the processor dispatcher is expected to
14240 form in practice. It tries to achieve this synchronization by forcing the
14241 estimated processor grouping on the compiler (as opposed to the function
14242 'pad_goups' which tries to force the scheduler's grouping on the processor).
14244 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
14245 examines the (estimated) dispatch groups that will be formed by the processor
14246 dispatcher. It marks these group boundaries to reflect the estimated
14247 processor grouping, overriding the grouping that the scheduler had marked.
14248 Depending on the value of the flag '-minsert-sched-nops' this function can
14249 force certain insns into separate groups or force a certain distance between
14250 them by inserting nops, for example, if there exists a "costly dependence"
14253 The function estimates the group boundaries that the processor will form as
14254 folllows: It keeps track of how many vacant issue slots are available after
14255 each insn. A subsequent insn will start a new group if one of the following
14257 - no more vacant issue slots remain in the current dispatch group.
14258 - only the last issue slot, which is the branch slot, is vacant, but the next
14259 insn is not a branch.
14260 - only the last 2 or less issue slots, including the branch slot, are vacant,
14261 which means that a cracked insn (which occupies two issue slots) can't be
14262 issued in this group.
14263 - less than 'issue_rate' slots are vacant, and the next insn always needs to
14264 start a new group. */
14267 redefine_groups (FILE *dump
, int sched_verbose
, rtx prev_head_insn
, rtx tail
)
14269 rtx insn
, next_insn
;
14271 int can_issue_more
;
14274 int group_count
= 0;
14278 issue_rate
= rs6000_issue_rate ();
14279 group_insns
= alloca (issue_rate
* sizeof (rtx
));
14280 for (i
= 0; i
< issue_rate
; i
++)
14282 group_insns
[i
] = 0;
14284 can_issue_more
= issue_rate
;
14286 insn
= get_next_active_insn (prev_head_insn
, tail
);
14289 while (insn
!= NULL_RTX
)
14291 slot
= (issue_rate
- can_issue_more
);
14292 group_insns
[slot
] = insn
;
14294 rs6000_variable_issue (dump
, sched_verbose
, insn
, can_issue_more
);
14295 if (insn_terminates_group_p (insn
, current_group
))
14296 can_issue_more
= 0;
14298 next_insn
= get_next_active_insn (insn
, tail
);
14299 if (next_insn
== NULL_RTX
)
14300 return group_count
+ 1;
14302 group_end
= /* Is next_insn going to start a new group? */
14303 (can_issue_more
== 0
14304 || (can_issue_more
== 1 && !is_branch_slot_insn (next_insn
))
14305 || (can_issue_more
<= 2 && is_cracked_insn (next_insn
))
14306 || (can_issue_more
< issue_rate
&&
14307 insn_terminates_group_p (next_insn
, previous_group
)));
14309 can_issue_more
= force_new_group (sched_verbose
, dump
, group_insns
,
14310 next_insn
, &group_end
, can_issue_more
, &group_count
);
14315 can_issue_more
= 0;
14316 for (i
= 0; i
< issue_rate
; i
++)
14318 group_insns
[i
] = 0;
14322 if (GET_MODE (next_insn
) == TImode
&& can_issue_more
)
14323 PUT_MODE(next_insn
, VOIDmode
);
14324 else if (!can_issue_more
&& GET_MODE (next_insn
) != TImode
)
14325 PUT_MODE (next_insn
, TImode
);
14328 if (can_issue_more
== 0)
14329 can_issue_more
= issue_rate
;
14332 return group_count
;
14335 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
14336 dispatch group boundaries that the scheduler had marked. Pad with nops
14337 any dispatch groups which have vacant issue slots, in order to force the
14338 scheduler's grouping on the processor dispatcher. The function
14339 returns the number of dispatch groups found. */
14342 pad_groups (FILE *dump
, int sched_verbose
, rtx prev_head_insn
, rtx tail
)
14344 rtx insn
, next_insn
;
14347 int can_issue_more
;
14349 int group_count
= 0;
14351 /* Initialize issue_rate. */
14352 issue_rate
= rs6000_issue_rate ();
14353 can_issue_more
= issue_rate
;
14355 insn
= get_next_active_insn (prev_head_insn
, tail
);
14356 next_insn
= get_next_active_insn (insn
, tail
);
14358 while (insn
!= NULL_RTX
)
14361 rs6000_variable_issue (dump
, sched_verbose
, insn
, can_issue_more
);
14363 group_end
= (next_insn
== NULL_RTX
|| GET_MODE (next_insn
) == TImode
);
14365 if (next_insn
== NULL_RTX
)
14370 /* If the scheduler had marked group termination at this location
14371 (between insn and next_indn), and neither insn nor next_insn will
14372 force group termination, pad the group with nops to force group
14375 && (rs6000_sched_insert_nops
== sched_finish_pad_groups
)
14376 && !insn_terminates_group_p (insn
, current_group
)
14377 && !insn_terminates_group_p (next_insn
, previous_group
))
14379 if (!is_branch_slot_insn(next_insn
))
14382 while (can_issue_more
)
14385 emit_insn_before (nop
, next_insn
);
14390 can_issue_more
= issue_rate
;
14395 next_insn
= get_next_active_insn (insn
, tail
);
14398 return group_count
;
14401 /* The following function is called at the end of scheduling BB.
14402 After reload, it inserts nops at insn group bundling. */
14405 rs6000_sched_finish (FILE *dump
, int sched_verbose
)
14410 fprintf (dump
, "=== Finishing schedule.\n");
14412 if (reload_completed
&& rs6000_cpu
== PROCESSOR_POWER4
)
14414 if (rs6000_sched_insert_nops
== sched_finish_none
)
14417 if (rs6000_sched_insert_nops
== sched_finish_pad_groups
)
14418 n_groups
= pad_groups (dump
, sched_verbose
,
14419 current_sched_info
->prev_head
,
14420 current_sched_info
->next_tail
);
14422 n_groups
= redefine_groups (dump
, sched_verbose
,
14423 current_sched_info
->prev_head
,
14424 current_sched_info
->next_tail
);
14426 if (sched_verbose
>= 6)
14428 fprintf (dump
, "ngroups = %d\n", n_groups
);
14429 print_rtl (dump
, current_sched_info
->prev_head
);
14430 fprintf (dump
, "Done finish_sched\n");
14435 /* Length in units of the trampoline for entering a nested function. */
14438 rs6000_trampoline_size (void)
14442 switch (DEFAULT_ABI
)
14448 ret
= (TARGET_32BIT
) ? 12 : 24;
14453 ret
= (TARGET_32BIT
) ? 40 : 48;
14460 /* Emit RTL insns to initialize the variable parts of a trampoline.
14461 FNADDR is an RTX for the address of the function's pure code.
14462 CXT is an RTX for the static chain value for the function. */
14465 rs6000_initialize_trampoline (rtx addr
, rtx fnaddr
, rtx cxt
)
14467 enum machine_mode pmode
= Pmode
;
14468 int regsize
= (TARGET_32BIT
) ? 4 : 8;
14469 rtx ctx_reg
= force_reg (pmode
, cxt
);
14471 switch (DEFAULT_ABI
)
14476 /* Macros to shorten the code expansions below. */
14477 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
14478 #define MEM_PLUS(addr,offset) \
14479 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
14481 /* Under AIX, just build the 3 word function descriptor */
14484 rtx fn_reg
= gen_reg_rtx (pmode
);
14485 rtx toc_reg
= gen_reg_rtx (pmode
);
14486 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
14487 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
14488 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
14489 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
14490 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
14494 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
14497 emit_library_call (gen_rtx_SYMBOL_REF (SImode
, "__trampoline_setup"),
14498 FALSE
, VOIDmode
, 4,
14500 GEN_INT (rs6000_trampoline_size ()), SImode
,
14510 /* Table of valid machine attributes. */
14512 const struct attribute_spec rs6000_attribute_table
[] =
14514 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
14515 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
14516 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
14517 { NULL
, 0, 0, false, false, false, NULL
}
14520 /* Handle a "longcall" or "shortcall" attribute; arguments as in
14521 struct attribute_spec.handler. */
14524 rs6000_handle_longcall_attribute (tree
*node
, tree name
,
14525 tree args ATTRIBUTE_UNUSED
,
14526 int flags ATTRIBUTE_UNUSED
,
14527 bool *no_add_attrs
)
14529 if (TREE_CODE (*node
) != FUNCTION_TYPE
14530 && TREE_CODE (*node
) != FIELD_DECL
14531 && TREE_CODE (*node
) != TYPE_DECL
)
14533 warning ("`%s' attribute only applies to functions",
14534 IDENTIFIER_POINTER (name
));
14535 *no_add_attrs
= true;
14541 /* Set longcall attributes on all functions declared when
14542 rs6000_default_long_calls is true. */
14544 rs6000_set_default_type_attributes (tree type
)
14546 if (rs6000_default_long_calls
14547 && (TREE_CODE (type
) == FUNCTION_TYPE
14548 || TREE_CODE (type
) == METHOD_TYPE
))
14549 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
14551 TYPE_ATTRIBUTES (type
));
14554 /* Return a reference suitable for calling a function with the
14555 longcall attribute. */
14558 rs6000_longcall_ref (rtx call_ref
)
14560 const char *call_name
;
14563 if (GET_CODE (call_ref
) != SYMBOL_REF
)
14566 /* System V adds '.' to the internal name, so skip them. */
14567 call_name
= XSTR (call_ref
, 0);
14568 if (*call_name
== '.')
14570 while (*call_name
== '.')
14573 node
= get_identifier (call_name
);
14574 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
14577 return force_reg (Pmode
, call_ref
);
14580 #ifdef USING_ELFOS_H
14582 /* A C statement or statements to switch to the appropriate section
14583 for output of RTX in mode MODE. You can assume that RTX is some
14584 kind of constant in RTL. The argument MODE is redundant except in
14585 the case of a `const_int' rtx. Select the section by calling
14586 `text_section' or one of the alternatives for other sections.
14588 Do not define this macro if you put all constants in the read-only
14592 rs6000_elf_select_rtx_section (enum machine_mode mode
, rtx x
,
14593 unsigned HOST_WIDE_INT align
)
14595 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
14598 default_elf_select_rtx_section (mode
, x
, align
);
14601 /* A C statement or statements to switch to the appropriate
14602 section for output of DECL. DECL is either a `VAR_DECL' node
14603 or a constant of some sort. RELOC indicates whether forming
14604 the initial value of DECL requires link-time relocations. */
14607 rs6000_elf_select_section (tree decl
, int reloc
,
14608 unsigned HOST_WIDE_INT align
)
14610 /* Pretend that we're always building for a shared library when
14611 ABI_AIX, because otherwise we end up with dynamic relocations
14612 in read-only sections. This happens for function pointers,
14613 references to vtables in typeinfo, and probably other cases. */
14614 default_elf_select_section_1 (decl
, reloc
, align
,
14615 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
14618 /* A C statement to build up a unique section name, expressed as a
14619 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
14620 RELOC indicates whether the initial value of EXP requires
14621 link-time relocations. If you do not define this macro, GCC will use
14622 the symbol name prefixed by `.' as the section name. Note - this
14623 macro can now be called for uninitialized data items as well as
14624 initialized data and functions. */
14627 rs6000_elf_unique_section (tree decl
, int reloc
)
14629 /* As above, pretend that we're always building for a shared library
14630 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
14631 default_unique_section_1 (decl
, reloc
,
14632 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
14635 /* For a SYMBOL_REF, set generic flags and then perform some
14636 target-specific processing.
14638 When the AIX ABI is requested on a non-AIX system, replace the
14639 function name with the real name (with a leading .) rather than the
14640 function descriptor name. This saves a lot of overriding code to
14641 read the prefixes. */
14644 rs6000_elf_encode_section_info (tree decl
, rtx rtl
, int first
)
14646 default_encode_section_info (decl
, rtl
, first
);
14649 && TREE_CODE (decl
) == FUNCTION_DECL
14651 && DEFAULT_ABI
== ABI_AIX
)
14653 rtx sym_ref
= XEXP (rtl
, 0);
14654 size_t len
= strlen (XSTR (sym_ref
, 0));
14655 char *str
= alloca (len
+ 2);
14657 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
14658 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
14663 rs6000_elf_in_small_data_p (tree decl
)
14665 if (rs6000_sdata
== SDATA_NONE
)
14668 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
))
14670 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
14671 if (strcmp (section
, ".sdata") == 0
14672 || strcmp (section
, ".sdata2") == 0
14673 || strcmp (section
, ".sbss") == 0
14674 || strcmp (section
, ".sbss2") == 0
14675 || strcmp (section
, ".PPC.EMB.sdata0") == 0
14676 || strcmp (section
, ".PPC.EMB.sbss0") == 0)
14681 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
14684 && (unsigned HOST_WIDE_INT
) size
<= g_switch_value
14685 /* If it's not public, and we're not going to reference it there,
14686 there's no need to put it in the small data section. */
14687 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)))
14694 #endif /* USING_ELFOS_H */
14697 /* Return a REG that occurs in ADDR with coefficient 1.
14698 ADDR can be effectively incremented by incrementing REG.
14700 r0 is special and we must not select it as an address
14701 register by this routine since our caller will try to
14702 increment the returned register via an "la" instruction. */
14705 find_addr_reg (rtx addr
)
14707 while (GET_CODE (addr
) == PLUS
)
14709 if (GET_CODE (XEXP (addr
, 0)) == REG
14710 && REGNO (XEXP (addr
, 0)) != 0)
14711 addr
= XEXP (addr
, 0);
14712 else if (GET_CODE (XEXP (addr
, 1)) == REG
14713 && REGNO (XEXP (addr
, 1)) != 0)
14714 addr
= XEXP (addr
, 1);
14715 else if (CONSTANT_P (XEXP (addr
, 0)))
14716 addr
= XEXP (addr
, 1);
14717 else if (CONSTANT_P (XEXP (addr
, 1)))
14718 addr
= XEXP (addr
, 0);
14722 if (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0)
14728 rs6000_fatal_bad_address (rtx op
)
14730 fatal_insn ("bad address", op
);
14736 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
14737 reference and a constant. */
14740 symbolic_operand (rtx op
)
14742 switch (GET_CODE (op
))
14749 return (GET_CODE (op
) == SYMBOL_REF
||
14750 (GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
14751 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
14752 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
14761 static tree branch_island_list
= 0;
14763 /* Remember to generate a branch island for far calls to the given
14767 add_compiler_branch_island (tree label_name
, tree function_name
, int line_number
)
14769 tree branch_island
= build_tree_list (function_name
, label_name
);
14770 TREE_TYPE (branch_island
) = build_int_2 (line_number
, 0);
14771 TREE_CHAIN (branch_island
) = branch_island_list
;
14772 branch_island_list
= branch_island
;
14775 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
14776 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
14777 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
14778 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
14780 /* Generate far-jump branch islands for everything on the
14781 branch_island_list. Invoked immediately after the last instruction
14782 of the epilogue has been emitted; the branch-islands must be
14783 appended to, and contiguous with, the function body. Mach-O stubs
14784 are generated in machopic_output_stub(). */
14787 macho_branch_islands (void)
14790 tree branch_island
;
14792 for (branch_island
= branch_island_list
;
14794 branch_island
= TREE_CHAIN (branch_island
))
14796 const char *label
=
14797 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island
));
14799 darwin_strip_name_encoding (
14800 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island
)));
14801 char name_buf
[512];
14802 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
14803 if (name
[0] == '*' || name
[0] == '&')
14804 strcpy (name_buf
, name
+1);
14808 strcpy (name_buf
+1, name
);
14810 strcpy (tmp_buf
, "\n");
14811 strcat (tmp_buf
, label
);
14812 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
14813 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
14814 fprintf (asm_out_file
, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED
"\n",
14815 BRANCH_ISLAND_LINE_NUMBER(branch_island
));
14816 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
14819 strcat (tmp_buf
, ":\n\tmflr r0\n\tbcl 20,31,");
14820 strcat (tmp_buf
, label
);
14821 strcat (tmp_buf
, "_pic\n");
14822 strcat (tmp_buf
, label
);
14823 strcat (tmp_buf
, "_pic:\n\tmflr r11\n");
14825 strcat (tmp_buf
, "\taddis r11,r11,ha16(");
14826 strcat (tmp_buf
, name_buf
);
14827 strcat (tmp_buf
, " - ");
14828 strcat (tmp_buf
, label
);
14829 strcat (tmp_buf
, "_pic)\n");
14831 strcat (tmp_buf
, "\tmtlr r0\n");
14833 strcat (tmp_buf
, "\taddi r12,r11,lo16(");
14834 strcat (tmp_buf
, name_buf
);
14835 strcat (tmp_buf
, " - ");
14836 strcat (tmp_buf
, label
);
14837 strcat (tmp_buf
, "_pic)\n");
14839 strcat (tmp_buf
, "\tmtctr r12\n\tbctr\n");
14843 strcat (tmp_buf
, ":\nlis r12,hi16(");
14844 strcat (tmp_buf
, name_buf
);
14845 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
14846 strcat (tmp_buf
, name_buf
);
14847 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
14849 output_asm_insn (tmp_buf
, 0);
14850 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
14851 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
14852 fprintf(asm_out_file
, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED
"\n",
14853 BRANCH_ISLAND_LINE_NUMBER (branch_island
));
14854 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
14857 branch_island_list
= 0;
14860 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
14861 already there or not. */
14864 no_previous_def (tree function_name
)
14866 tree branch_island
;
14867 for (branch_island
= branch_island_list
;
14869 branch_island
= TREE_CHAIN (branch_island
))
14870 if (function_name
== BRANCH_ISLAND_FUNCTION_NAME (branch_island
))
14875 /* GET_PREV_LABEL gets the label name from the previous definition of
14879 get_prev_label (tree function_name
)
14881 tree branch_island
;
14882 for (branch_island
= branch_island_list
;
14884 branch_island
= TREE_CHAIN (branch_island
))
14885 if (function_name
== BRANCH_ISLAND_FUNCTION_NAME (branch_island
))
14886 return BRANCH_ISLAND_LABEL_NAME (branch_island
);
14890 /* INSN is either a function call or a millicode call. It may have an
14891 unconditional jump in its delay slot.
14893 CALL_DEST is the routine we are calling. */
14896 output_call (rtx insn
, rtx
*operands
, int dest_operand_number
, int cookie_operand_number
)
14898 static char buf
[256];
14899 if (GET_CODE (operands
[dest_operand_number
]) == SYMBOL_REF
14900 && (INTVAL (operands
[cookie_operand_number
]) & CALL_LONG
))
14903 tree funname
= get_identifier (XSTR (operands
[dest_operand_number
], 0));
14905 if (no_previous_def (funname
))
14907 int line_number
= 0;
14908 rtx label_rtx
= gen_label_rtx ();
14909 char *label_buf
, temp_buf
[256];
14910 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
14911 CODE_LABEL_NUMBER (label_rtx
));
14912 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
14913 labelname
= get_identifier (label_buf
);
14914 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
14916 line_number
= NOTE_LINE_NUMBER (insn
);
14917 add_compiler_branch_island (labelname
, funname
, line_number
);
14920 labelname
= get_prev_label (funname
);
14922 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
14923 instruction will reach 'foo', otherwise link as 'bl L42'".
14924 "L42" should be a 'branch island', that will do a far jump to
14925 'foo'. Branch islands are generated in
14926 macho_branch_islands(). */
14927 sprintf (buf
, "jbsr %%z%d,%.246s",
14928 dest_operand_number
, IDENTIFIER_POINTER (labelname
));
14931 sprintf (buf
, "bl %%z%d", dest_operand_number
);
14935 #endif /* TARGET_MACHO */
14937 /* Generate PIC and indirect symbol stubs. */
14940 machopic_output_stub (FILE *file
, const char *symb
, const char *stub
)
14942 unsigned int length
;
14943 char *symbol_name
, *lazy_ptr_name
;
14944 char *local_label_0
;
14945 static int label
= 0;
14947 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
14948 symb
= (*targetm
.strip_name_encoding
) (symb
);
14951 length
= strlen (symb
);
14952 symbol_name
= alloca (length
+ 32);
14953 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
14955 lazy_ptr_name
= alloca (length
+ 32);
14956 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
14959 machopic_picsymbol_stub1_section ();
14961 machopic_symbol_stub1_section ();
14962 fprintf (file
, "\t.align 2\n");
14964 fprintf (file
, "%s:\n", stub
);
14965 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
14970 local_label_0
= alloca (sizeof("\"L0000000000$spb\""));
14971 sprintf (local_label_0
, "\"L%011d$spb\"", label
);
14973 fprintf (file
, "\tmflr r0\n");
14974 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
14975 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
14976 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
14977 lazy_ptr_name
, local_label_0
);
14978 fprintf (file
, "\tmtlr r0\n");
14979 fprintf (file
, "\tlwzu r12,lo16(%s-%s)(r11)\n",
14980 lazy_ptr_name
, local_label_0
);
14981 fprintf (file
, "\tmtctr r12\n");
14982 fprintf (file
, "\tbctr\n");
14986 fprintf (file
, "\tlis r11,ha16(%s)\n", lazy_ptr_name
);
14987 fprintf (file
, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name
);
14988 fprintf (file
, "\tmtctr r12\n");
14989 fprintf (file
, "\tbctr\n");
14992 machopic_lazy_symbol_ptr_section ();
14993 fprintf (file
, "%s:\n", lazy_ptr_name
);
14994 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
14995 fprintf (file
, "\t.long dyld_stub_binding_helper\n");
14998 /* Legitimize PIC addresses. If the address is already
14999 position-independent, we return ORIG. Newly generated
15000 position-independent addresses go into a reg. This is REG if non
15001 zero, otherwise we allocate register(s) as necessary. */
15003 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
15006 rs6000_machopic_legitimize_pic_address (rtx orig
, enum machine_mode mode
,
15011 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
15012 reg
= gen_reg_rtx (Pmode
);
15014 if (GET_CODE (orig
) == CONST
)
15016 if (GET_CODE (XEXP (orig
, 0)) == PLUS
15017 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
15020 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
15022 /* Use a different reg for the intermediate value, as
15023 it will be marked UNCHANGING. */
15024 rtx reg_temp
= no_new_pseudos
? reg
: gen_reg_rtx (Pmode
);
15027 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
15030 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
15036 if (GET_CODE (offset
) == CONST_INT
)
15038 if (SMALL_INT (offset
))
15039 return plus_constant (base
, INTVAL (offset
));
15040 else if (! reload_in_progress
&& ! reload_completed
)
15041 offset
= force_reg (Pmode
, offset
);
15044 rtx mem
= force_const_mem (Pmode
, orig
);
15045 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
15048 return gen_rtx (PLUS
, Pmode
, base
, offset
);
15051 /* Fall back on generic machopic code. */
15052 return machopic_legitimize_pic_address (orig
, mode
, reg
);
15055 /* This is just a placeholder to make linking work without having to
15056 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
15057 ever needed for Darwin (not too likely!) this would have to get a
15058 real definition. */
15065 #endif /* TARGET_MACHO */
15068 static unsigned int
15069 rs6000_elf_section_type_flags (tree decl
, const char *name
, int reloc
)
15071 return default_section_type_flags_1 (decl
, name
, reloc
,
15072 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
15075 /* Record an element in the table of global constructors. SYMBOL is
15076 a SYMBOL_REF of the function to be called; PRIORITY is a number
15077 between 0 and MAX_INIT_PRIORITY.
15079 This differs from default_named_section_asm_out_constructor in
15080 that we have special handling for -mrelocatable. */
15083 rs6000_elf_asm_out_constructor (rtx symbol
, int priority
)
15085 const char *section
= ".ctors";
15088 if (priority
!= DEFAULT_INIT_PRIORITY
)
15090 sprintf (buf
, ".ctors.%.5u",
15091 /* Invert the numbering so the linker puts us in the proper
15092 order; constructors are run from right to left, and the
15093 linker sorts in increasing order. */
15094 MAX_INIT_PRIORITY
- priority
);
15098 named_section_flags (section
, SECTION_WRITE
);
15099 assemble_align (POINTER_SIZE
);
15101 if (TARGET_RELOCATABLE
)
15103 fputs ("\t.long (", asm_out_file
);
15104 output_addr_const (asm_out_file
, symbol
);
15105 fputs (")@fixup\n", asm_out_file
);
15108 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
15112 rs6000_elf_asm_out_destructor (rtx symbol
, int priority
)
15114 const char *section
= ".dtors";
15117 if (priority
!= DEFAULT_INIT_PRIORITY
)
15119 sprintf (buf
, ".dtors.%.5u",
15120 /* Invert the numbering so the linker puts us in the proper
15121 order; constructors are run from right to left, and the
15122 linker sorts in increasing order. */
15123 MAX_INIT_PRIORITY
- priority
);
15127 named_section_flags (section
, SECTION_WRITE
);
15128 assemble_align (POINTER_SIZE
);
15130 if (TARGET_RELOCATABLE
)
15132 fputs ("\t.long (", asm_out_file
);
15133 output_addr_const (asm_out_file
, symbol
);
15134 fputs (")@fixup\n", asm_out_file
);
15137 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
15141 rs6000_elf_declare_function_name (FILE *file
, const char *name
, tree decl
)
15145 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file
);
15146 ASM_OUTPUT_LABEL (file
, name
);
15147 fputs (DOUBLE_INT_ASM_OP
, file
);
15149 assemble_name (file
, name
);
15150 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file
);
15151 assemble_name (file
, name
);
15152 fputs (",24\n\t.type\t.", file
);
15153 assemble_name (file
, name
);
15154 fputs (",@function\n", file
);
15155 if (TREE_PUBLIC (decl
) && ! DECL_WEAK (decl
))
15157 fputs ("\t.globl\t.", file
);
15158 assemble_name (file
, name
);
15161 ASM_DECLARE_RESULT (file
, DECL_RESULT (decl
));
15163 ASM_OUTPUT_LABEL (file
, name
);
15167 if (TARGET_RELOCATABLE
15168 && (get_pool_size () != 0 || current_function_profile
)
15173 (*targetm
.asm_out
.internal_label
) (file
, "LCL", rs6000_pic_labelno
);
15175 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
15176 fprintf (file
, "\t.long ");
15177 assemble_name (file
, buf
);
15179 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
15180 assemble_name (file
, buf
);
15184 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
15185 ASM_DECLARE_RESULT (file
, DECL_RESULT (decl
));
15187 if (DEFAULT_ABI
== ABI_AIX
)
15189 const char *desc_name
, *orig_name
;
15191 orig_name
= (*targetm
.strip_name_encoding
) (name
);
15192 desc_name
= orig_name
;
15193 while (*desc_name
== '.')
15196 if (TREE_PUBLIC (decl
))
15197 fprintf (file
, "\t.globl %s\n", desc_name
);
15199 fprintf (file
, "%s\n", MINIMAL_TOC_SECTION_ASM_OP
);
15200 fprintf (file
, "%s:\n", desc_name
);
15201 fprintf (file
, "\t.long %s\n", orig_name
);
15202 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file
);
15203 if (DEFAULT_ABI
== ABI_AIX
)
15204 fputs ("\t.long 0\n", file
);
15205 fprintf (file
, "\t.previous\n");
15207 ASM_OUTPUT_LABEL (file
, name
);
15213 rs6000_xcoff_asm_globalize_label (FILE *stream
, const char *name
)
15215 fputs (GLOBAL_ASM_OP
, stream
);
15216 RS6000_OUTPUT_BASENAME (stream
, name
);
15217 putc ('\n', stream
);
15221 rs6000_xcoff_asm_named_section (const char *name
, unsigned int flags
)
15224 static const char * const suffix
[3] = { "PR", "RO", "RW" };
15226 if (flags
& SECTION_CODE
)
15228 else if (flags
& SECTION_WRITE
)
15233 fprintf (asm_out_file
, "\t.csect %s%s[%s],%u\n",
15234 (flags
& SECTION_CODE
) ? "." : "",
15235 name
, suffix
[smclass
], flags
& SECTION_ENTSIZE
);
15239 rs6000_xcoff_select_section (tree decl
, int reloc
,
15240 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
15242 if (decl_readonly_section_1 (decl
, reloc
, 1))
15244 if (TREE_PUBLIC (decl
))
15245 read_only_data_section ();
15247 read_only_private_data_section ();
15251 if (TREE_PUBLIC (decl
))
15254 private_data_section ();
15259 rs6000_xcoff_unique_section (tree decl
, int reloc ATTRIBUTE_UNUSED
)
15263 /* Use select_section for private and uninitialized data. */
15264 if (!TREE_PUBLIC (decl
)
15265 || DECL_COMMON (decl
)
15266 || DECL_INITIAL (decl
) == NULL_TREE
15267 || DECL_INITIAL (decl
) == error_mark_node
15268 || (flag_zero_initialized_in_bss
15269 && initializer_zerop (DECL_INITIAL (decl
))))
15272 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
15273 name
= (*targetm
.strip_name_encoding
) (name
);
15274 DECL_SECTION_NAME (decl
) = build_string (strlen (name
), name
);
15277 /* Select section for constant in constant pool.
15279 On RS/6000, all constants are in the private read-only data area.
15280 However, if this is being placed in the TOC it must be output as a
15284 rs6000_xcoff_select_rtx_section (enum machine_mode mode
, rtx x
,
15285 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
15287 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
15290 read_only_private_data_section ();
15293 /* Remove any trailing [DS] or the like from the symbol name. */
15295 static const char *
15296 rs6000_xcoff_strip_name_encoding (const char *name
)
15301 len
= strlen (name
);
15302 if (name
[len
- 1] == ']')
15303 return ggc_alloc_string (name
, len
- 4);
15308 /* Section attributes. AIX is always PIC. */
15310 static unsigned int
15311 rs6000_xcoff_section_type_flags (tree decl
, const char *name
, int reloc
)
15313 unsigned int align
;
15314 unsigned int flags
= default_section_type_flags_1 (decl
, name
, reloc
, 1);
15316 /* Align to at least UNIT size. */
15317 if (flags
& SECTION_CODE
)
15318 align
= MIN_UNITS_PER_WORD
;
15320 /* Increase alignment of large objects if not already stricter. */
15321 align
= MAX ((DECL_ALIGN (decl
) / BITS_PER_UNIT
),
15322 int_size_in_bytes (TREE_TYPE (decl
)) > MIN_UNITS_PER_WORD
15323 ? UNITS_PER_FP_WORD
: MIN_UNITS_PER_WORD
);
15325 return flags
| (exact_log2 (align
) & SECTION_ENTSIZE
);
15328 /* Output at beginning of assembler file.
15330 Initialize the section names for the RS/6000 at this point.
15332 Specify filename, including full path, to assembler.
15334 We want to go into the TOC section so at least one .toc will be emitted.
15335 Also, in order to output proper .bs/.es pairs, we need at least one static
15336 [RW] section emitted.
15338 Finally, declare mcount when profiling to make the assembler happy. */
15341 rs6000_xcoff_file_start (void)
15343 rs6000_gen_section_name (&xcoff_bss_section_name
,
15344 main_input_filename
, ".bss_");
15345 rs6000_gen_section_name (&xcoff_private_data_section_name
,
15346 main_input_filename
, ".rw_");
15347 rs6000_gen_section_name (&xcoff_read_only_section_name
,
15348 main_input_filename
, ".ro_");
15350 fputs ("\t.file\t", asm_out_file
);
15351 output_quoted_string (asm_out_file
, main_input_filename
);
15352 fputc ('\n', asm_out_file
);
15354 if (write_symbols
!= NO_DEBUG
)
15355 private_data_section ();
15358 fprintf (asm_out_file
, "\t.extern %s\n", RS6000_MCOUNT
);
15359 rs6000_file_start ();
15362 /* Output at end of assembler file.
15363 On the RS/6000, referencing data should automatically pull in text. */
15366 rs6000_xcoff_file_end (void)
15369 fputs ("_section_.text:\n", asm_out_file
);
15371 fputs (TARGET_32BIT
15372 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
15375 #endif /* TARGET_XCOFF */
15378 /* Cross-module name binding. Darwin does not support overriding
15379 functions at dynamic-link time. */
15382 rs6000_binds_local_p (tree decl
)
15384 return default_binds_local_p_1 (decl
, 0);
15388 /* Compute a (partial) cost for rtx X. Return true if the complete
15389 cost has been computed, and false if subexpressions should be
15390 scanned. In either case, *TOTAL contains the cost result. */
15393 rs6000_rtx_costs (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
,
15398 /* On the RS/6000, if it is valid in the insn, it is free.
15399 So this always returns 0. */
15410 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
15411 && ((unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1))
15412 + 0x8000) >= 0x10000)
15413 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
15414 ? COSTS_N_INSNS (2)
15415 : COSTS_N_INSNS (1));
15421 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
15422 && (INTVAL (XEXP (x
, 1)) & (~ (HOST_WIDE_INT
) 0xffff)) != 0
15423 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
15424 ? COSTS_N_INSNS (2)
15425 : COSTS_N_INSNS (1));
15431 *total
= COSTS_N_INSNS (2);
15434 switch (rs6000_cpu
)
15436 case PROCESSOR_RIOS1
:
15437 case PROCESSOR_PPC405
:
15438 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
15439 ? COSTS_N_INSNS (5)
15440 : (INTVAL (XEXP (x
, 1)) >= -256
15441 && INTVAL (XEXP (x
, 1)) <= 255)
15442 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15445 case PROCESSOR_PPC440
:
15446 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
15447 ? COSTS_N_INSNS (3)
15448 : COSTS_N_INSNS (2));
15451 case PROCESSOR_RS64A
:
15452 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
15453 ? GET_MODE (XEXP (x
, 1)) != DImode
15454 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
15455 : (INTVAL (XEXP (x
, 1)) >= -256
15456 && INTVAL (XEXP (x
, 1)) <= 255)
15457 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
15460 case PROCESSOR_RIOS2
:
15461 case PROCESSOR_MPCCORE
:
15462 case PROCESSOR_PPC604e
:
15463 *total
= COSTS_N_INSNS (2);
15466 case PROCESSOR_PPC601
:
15467 *total
= COSTS_N_INSNS (5);
15470 case PROCESSOR_PPC603
:
15471 case PROCESSOR_PPC7400
:
15472 case PROCESSOR_PPC750
:
15473 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
15474 ? COSTS_N_INSNS (5)
15475 : (INTVAL (XEXP (x
, 1)) >= -256
15476 && INTVAL (XEXP (x
, 1)) <= 255)
15477 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
15480 case PROCESSOR_PPC7450
:
15481 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
15482 ? COSTS_N_INSNS (4)
15483 : COSTS_N_INSNS (3));
15486 case PROCESSOR_PPC403
:
15487 case PROCESSOR_PPC604
:
15488 case PROCESSOR_PPC8540
:
15489 *total
= COSTS_N_INSNS (4);
15492 case PROCESSOR_PPC620
:
15493 case PROCESSOR_PPC630
:
15494 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
15495 ? GET_MODE (XEXP (x
, 1)) != DImode
15496 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
15497 : (INTVAL (XEXP (x
, 1)) >= -256
15498 && INTVAL (XEXP (x
, 1)) <= 255)
15499 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15502 case PROCESSOR_POWER4
:
15503 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
15504 ? GET_MODE (XEXP (x
, 1)) != DImode
15505 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
15506 : COSTS_N_INSNS (2));
15515 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
15516 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0)
15518 *total
= COSTS_N_INSNS (2);
15525 switch (rs6000_cpu
)
15527 case PROCESSOR_RIOS1
:
15528 *total
= COSTS_N_INSNS (19);
15531 case PROCESSOR_RIOS2
:
15532 *total
= COSTS_N_INSNS (13);
15535 case PROCESSOR_RS64A
:
15536 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
15537 ? COSTS_N_INSNS (65)
15538 : COSTS_N_INSNS (67));
15541 case PROCESSOR_MPCCORE
:
15542 *total
= COSTS_N_INSNS (6);
15545 case PROCESSOR_PPC403
:
15546 *total
= COSTS_N_INSNS (33);
15549 case PROCESSOR_PPC405
:
15550 *total
= COSTS_N_INSNS (35);
15553 case PROCESSOR_PPC440
:
15554 *total
= COSTS_N_INSNS (34);
15557 case PROCESSOR_PPC601
:
15558 *total
= COSTS_N_INSNS (36);
15561 case PROCESSOR_PPC603
:
15562 *total
= COSTS_N_INSNS (37);
15565 case PROCESSOR_PPC604
:
15566 case PROCESSOR_PPC604e
:
15567 *total
= COSTS_N_INSNS (20);
15570 case PROCESSOR_PPC620
:
15571 case PROCESSOR_PPC630
:
15572 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
15573 ? COSTS_N_INSNS (21)
15574 : COSTS_N_INSNS (37));
15577 case PROCESSOR_PPC750
:
15578 case PROCESSOR_PPC8540
:
15579 case PROCESSOR_PPC7400
:
15580 *total
= COSTS_N_INSNS (19);
15583 case PROCESSOR_PPC7450
:
15584 *total
= COSTS_N_INSNS (23);
15587 case PROCESSOR_POWER4
:
15588 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
15589 ? COSTS_N_INSNS (18)
15590 : COSTS_N_INSNS (34));
15598 *total
= COSTS_N_INSNS (4);
15602 /* MEM should be slightly more expensive than (plus (reg) (const)). */
15611 /* A C expression returning the cost of moving data from a register of class
15612 CLASS1 to one of CLASS2. */
15615 rs6000_register_move_cost (enum machine_mode mode
,
15616 enum reg_class from
, enum reg_class to
)
15618 /* Moves from/to GENERAL_REGS. */
15619 if (reg_classes_intersect_p (to
, GENERAL_REGS
)
15620 || reg_classes_intersect_p (from
, GENERAL_REGS
))
15622 if (! reg_classes_intersect_p (to
, GENERAL_REGS
))
15625 if (from
== FLOAT_REGS
|| from
== ALTIVEC_REGS
)
15626 return (rs6000_memory_move_cost (mode
, from
, 0)
15627 + rs6000_memory_move_cost (mode
, GENERAL_REGS
, 0));
15629 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift.... */
15630 else if (from
== CR_REGS
)
15634 /* A move will cost one instruction per GPR moved. */
15635 return 2 * HARD_REGNO_NREGS (0, mode
);
15638 /* Moving between two similar registers is just one instruction. */
15639 else if (reg_classes_intersect_p (to
, from
))
15640 return mode
== TFmode
? 4 : 2;
15642 /* Everything else has to go through GENERAL_REGS. */
15644 return (rs6000_register_move_cost (mode
, GENERAL_REGS
, to
)
15645 + rs6000_register_move_cost (mode
, from
, GENERAL_REGS
));
15648 /* A C expressions returning the cost of moving data of MODE from a register to
15652 rs6000_memory_move_cost (enum machine_mode mode
, enum reg_class
class,
15653 int in ATTRIBUTE_UNUSED
)
15655 if (reg_classes_intersect_p (class, GENERAL_REGS
))
15656 return 4 * HARD_REGNO_NREGS (0, mode
);
15657 else if (reg_classes_intersect_p (class, FLOAT_REGS
))
15658 return 4 * HARD_REGNO_NREGS (32, mode
);
15659 else if (reg_classes_intersect_p (class, ALTIVEC_REGS
))
15660 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO
, mode
);
15662 return 4 + rs6000_register_move_cost (mode
, class, GENERAL_REGS
);
15665 /* Return an RTX representing where to find the function value of a
15666 function returning MODE. */
15668 rs6000_complex_function_value (enum machine_mode mode
)
15670 unsigned int regno
;
15672 enum machine_mode inner
= GET_MODE_INNER (mode
);
15674 if (FLOAT_MODE_P (mode
))
15675 regno
= FP_ARG_RETURN
;
15678 regno
= GP_ARG_RETURN
;
15680 /* 32-bit is OK since it'll go in r3/r4. */
15682 && GET_MODE_BITSIZE (inner
) >= 32)
15683 return gen_rtx_REG (mode
, regno
);
15686 r1
= gen_rtx_EXPR_LIST (inner
, gen_rtx_REG (inner
, regno
),
15688 r2
= gen_rtx_EXPR_LIST (inner
, gen_rtx_REG (inner
, regno
+ 1),
15689 GEN_INT (GET_MODE_UNIT_SIZE (inner
)));
15690 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
15693 /* Define how to find the value returned by a function.
15694 VALTYPE is the data type of the value (as a tree).
15695 If the precise function being called is known, FUNC is its FUNCTION_DECL;
15696 otherwise, FUNC is 0.
15698 On the SPE, both FPs and vectors are returned in r3.
15700 On RS/6000 an integer value is in r3 and a floating-point value is in
15701 fp1, unless -msoft-float. */
15704 rs6000_function_value (tree valtype
, tree func ATTRIBUTE_UNUSED
)
15706 enum machine_mode mode
;
15707 unsigned int regno
;
15709 if (TARGET_32BIT
&& TARGET_POWERPC64
&& TYPE_MODE (valtype
) == DImode
)
15711 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
15712 return gen_rtx_PARALLEL (DImode
,
15714 gen_rtx_EXPR_LIST (VOIDmode
,
15715 gen_rtx_REG (SImode
, GP_ARG_RETURN
),
15717 gen_rtx_EXPR_LIST (VOIDmode
,
15718 gen_rtx_REG (SImode
,
15719 GP_ARG_RETURN
+ 1),
15723 if ((INTEGRAL_TYPE_P (valtype
)
15724 && TYPE_PRECISION (valtype
) < BITS_PER_WORD
)
15725 || POINTER_TYPE_P (valtype
))
15726 mode
= TARGET_32BIT
? SImode
: DImode
;
15728 mode
= TYPE_MODE (valtype
);
15730 if (TREE_CODE (valtype
) == REAL_TYPE
&& TARGET_HARD_FLOAT
&& TARGET_FPRS
)
15731 regno
= FP_ARG_RETURN
;
15732 else if (TREE_CODE (valtype
) == COMPLEX_TYPE
15733 && TARGET_HARD_FLOAT
15734 && SPLIT_COMPLEX_ARGS
)
15735 return rs6000_complex_function_value (mode
);
15736 else if (TREE_CODE (valtype
) == VECTOR_TYPE
&& TARGET_ALTIVEC
)
15737 regno
= ALTIVEC_ARG_RETURN
;
15739 regno
= GP_ARG_RETURN
;
15741 return gen_rtx_REG (mode
, regno
);
15744 /* Define how to find the value returned by a library function
15745 assuming the value has mode MODE. */
15747 rs6000_libcall_value (enum machine_mode mode
)
15749 unsigned int regno
;
15751 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
15752 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
15753 regno
= FP_ARG_RETURN
;
15754 else if (ALTIVEC_VECTOR_MODE (mode
))
15755 regno
= ALTIVEC_ARG_RETURN
;
15756 else if (COMPLEX_MODE_P (mode
) && SPLIT_COMPLEX_ARGS
)
15757 return rs6000_complex_function_value (mode
);
15759 regno
= GP_ARG_RETURN
;
15761 return gen_rtx_REG (mode
, regno
);
15764 /* Define the offset between two registers, FROM to be eliminated and its
15765 replacement TO, at the start of a routine. */
15767 rs6000_initial_elimination_offset (int from
, int to
)
15769 rs6000_stack_t
*info
= rs6000_stack_info ();
15770 HOST_WIDE_INT offset
;
15772 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
15773 offset
= info
->push_p
? 0 : -info
->total_size
;
15774 else if (from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
15775 offset
= info
->total_size
;
15776 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
15777 offset
= info
->push_p
? info
->total_size
: 0;
15778 else if (from
== RS6000_PIC_OFFSET_TABLE_REGNUM
)
15786 /* Return true if TYPE is of type __ev64_opaque__. */
15789 is_ev64_opaque_type (tree type
)
15792 && (type
== opaque_V2SI_type_node
15793 || type
== opaque_V2SF_type_node
15794 || type
== opaque_p_V2SI_type_node
));
15798 rs6000_dwarf_register_span (rtx reg
)
15802 if (!TARGET_SPE
|| !SPE_VECTOR_MODE (GET_MODE (reg
)))
15805 regno
= REGNO (reg
);
15807 /* The duality of the SPE register size wreaks all kinds of havoc.
15808 This is a way of distinguishing r0 in 32-bits from r0 in
15811 gen_rtx_PARALLEL (VOIDmode
,
15814 gen_rtx_REG (SImode
, regno
+ 1200),
15815 gen_rtx_REG (SImode
, regno
))
15817 gen_rtx_REG (SImode
, regno
),
15818 gen_rtx_REG (SImode
, regno
+ 1200)));
15821 /* Map internal gcc register numbers to DWARF2 register numbers. */
15824 rs6000_dbx_register_number (unsigned int regno
)
15826 if (regno
<= 63 || write_symbols
!= DWARF2_DEBUG
)
15828 if (regno
== MQ_REGNO
)
15830 if (regno
== LINK_REGISTER_REGNUM
)
15832 if (regno
== COUNT_REGISTER_REGNUM
)
15834 if (CR_REGNO_P (regno
))
15835 return regno
- CR0_REGNO
+ 86;
15836 if (regno
== XER_REGNO
)
15838 if (ALTIVEC_REGNO_P (regno
))
15839 return regno
- FIRST_ALTIVEC_REGNO
+ 1124;
15840 if (regno
== VRSAVE_REGNO
)
15842 if (regno
== VSCR_REGNO
)
15844 if (regno
== SPE_ACC_REGNO
)
15846 if (regno
== SPEFSCR_REGNO
)
15848 /* SPE high reg number. We get these values of regno from
15849 rs6000_dwarf_register_span. */
15850 if (regno
>= 1200 && regno
< 1232)
15856 #include "gt-rs6000.h"