1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
58 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
59 && easy_vector_same (x, y))
61 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
63 && easy_vector_same (x, y))
65 #define min(A,B) ((A) < (B) ? (A) : (B))
66 #define max(A,B) ((A) > (B) ? (A) : (B))
70 enum processor_type rs6000_cpu
;
71 struct rs6000_cpu_select rs6000_select
[3] =
73 /* switch name, tune arch */
74 { (const char *)0, "--with-cpu=", 1, 1 },
75 { (const char *)0, "-mcpu=", 1, 1 },
76 { (const char *)0, "-mtune=", 1, 0 },
79 /* Size of long double */
80 const char *rs6000_long_double_size_string
;
81 int rs6000_long_double_type_size
;
83 /* Whether -mabi=altivec has appeared */
84 int rs6000_altivec_abi
;
86 /* Whether VRSAVE instructions should be generated. */
87 int rs6000_altivec_vrsave
;
89 /* String from -mvrsave= option. */
90 const char *rs6000_altivec_vrsave_string
;
92 /* Nonzero if we want SPE ABI extensions. */
95 /* Whether isel instructions should be generated. */
98 /* Whether SPE simd instructions should be generated. */
101 /* Nonzero if we have FPRs. */
104 /* String from -misel=. */
105 const char *rs6000_isel_string
;
107 /* String from -mspe=. */
108 const char *rs6000_spe_string
;
110 /* Set to nonzero once AIX common-mode calls have been defined. */
111 static GTY(()) int common_mode_defined
;
113 /* Save information from a "cmpxx" operation until the branch or scc is
115 rtx rs6000_compare_op0
, rs6000_compare_op1
;
116 int rs6000_compare_fp_p
;
118 /* Label number of label created for -mrelocatable, to call to so we can
119 get the address of the GOT section */
120 int rs6000_pic_labelno
;
123 /* Which abi to adhere to */
124 const char *rs6000_abi_name
= RS6000_ABI_NAME
;
126 /* Semantics of the small data area */
127 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
129 /* Which small data model to use */
130 const char *rs6000_sdata_name
= (char *)0;
132 /* Counter for labels which are to be placed in .fixup. */
133 int fixuplabelno
= 0;
136 /* ABI enumeration available for subtarget to use. */
137 enum rs6000_abi rs6000_current_abi
;
139 /* ABI string from -mabi= option. */
140 const char *rs6000_abi_string
;
143 const char *rs6000_debug_name
;
144 int rs6000_debug_stack
; /* debug stack applications */
145 int rs6000_debug_arg
; /* debug argument handling */
147 const char *rs6000_traceback_name
;
149 traceback_default
= 0,
155 /* Flag to say the TOC is initialized */
157 char toc_label_name
[10];
159 /* Alias set for saves and restores from the rs6000 stack. */
160 static int rs6000_sr_alias_set
;
162 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
163 The only place that looks at this is rs6000_set_default_type_attributes;
164 everywhere else should rely on the presence or absence of a longcall
165 attribute on the function declaration. */
166 int rs6000_default_long_calls
;
167 const char *rs6000_longcall_switch
;
169 struct builtin_description
171 /* mask is not const because we're going to alter it below. This
172 nonsense will go away when we rewrite the -march infrastructure
173 to give us more target flag bits. */
175 const enum insn_code icode
;
176 const char *const name
;
177 const enum rs6000_builtins code
;
180 static bool rs6000_function_ok_for_sibcall
PARAMS ((tree
, tree
));
181 static int num_insns_constant_wide
PARAMS ((HOST_WIDE_INT
));
182 static void validate_condition_mode
183 PARAMS ((enum rtx_code
, enum machine_mode
));
184 static rtx rs6000_generate_compare
PARAMS ((enum rtx_code
));
185 static void rs6000_maybe_dead
PARAMS ((rtx
));
186 static void rs6000_emit_stack_tie
PARAMS ((void));
187 static void rs6000_frame_related
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
));
188 static rtx spe_synthesize_frame_save
PARAMS ((rtx
));
189 static bool spe_func_has_64bit_regs_p
PARAMS ((void));
190 static void emit_frame_save
PARAMS ((rtx
, rtx
, enum machine_mode
,
191 unsigned int, int, int));
192 static rtx gen_frame_mem_offset
PARAMS ((enum machine_mode
, rtx
, int));
193 static void rs6000_emit_allocate_stack
PARAMS ((HOST_WIDE_INT
, int));
194 static unsigned rs6000_hash_constant
PARAMS ((rtx
));
195 static unsigned toc_hash_function
PARAMS ((const void *));
196 static int toc_hash_eq
PARAMS ((const void *, const void *));
197 static int constant_pool_expr_1
PARAMS ((rtx
, int *, int *));
198 static struct machine_function
* rs6000_init_machine_status
PARAMS ((void));
199 static bool rs6000_assemble_integer
PARAMS ((rtx
, unsigned int, int));
200 #ifdef HAVE_GAS_HIDDEN
201 static void rs6000_assemble_visibility
PARAMS ((tree
, int));
203 static int rs6000_ra_ever_killed
PARAMS ((void));
204 static tree rs6000_handle_longcall_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
205 const struct attribute_spec rs6000_attribute_table
[];
206 static void rs6000_set_default_type_attributes
PARAMS ((tree
));
207 static void rs6000_output_function_prologue
PARAMS ((FILE *, HOST_WIDE_INT
));
208 static void rs6000_output_function_epilogue
PARAMS ((FILE *, HOST_WIDE_INT
));
209 static void rs6000_output_mi_thunk
PARAMS ((FILE *, tree
, HOST_WIDE_INT
,
210 HOST_WIDE_INT
, tree
));
211 static rtx rs6000_emit_set_long_const
PARAMS ((rtx
,
212 HOST_WIDE_INT
, HOST_WIDE_INT
));
214 static unsigned int rs6000_elf_section_type_flags
PARAMS ((tree
, const char *,
216 static void rs6000_elf_asm_out_constructor
PARAMS ((rtx
, int));
217 static void rs6000_elf_asm_out_destructor
PARAMS ((rtx
, int));
218 static void rs6000_elf_select_section
PARAMS ((tree
, int,
219 unsigned HOST_WIDE_INT
));
220 static void rs6000_elf_unique_section
PARAMS ((tree
, int));
221 static void rs6000_elf_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
222 unsigned HOST_WIDE_INT
));
223 static void rs6000_elf_encode_section_info
PARAMS ((tree
, int))
225 static const char *rs6000_elf_strip_name_encoding
PARAMS ((const char *));
226 static bool rs6000_elf_in_small_data_p
PARAMS ((tree
));
229 static void rs6000_xcoff_asm_globalize_label
PARAMS ((FILE *, const char *));
230 static void rs6000_xcoff_asm_named_section
PARAMS ((const char *, unsigned int));
231 static void rs6000_xcoff_select_section
PARAMS ((tree
, int,
232 unsigned HOST_WIDE_INT
));
233 static void rs6000_xcoff_unique_section
PARAMS ((tree
, int));
234 static void rs6000_xcoff_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
235 unsigned HOST_WIDE_INT
));
236 static const char * rs6000_xcoff_strip_name_encoding
PARAMS ((const char *));
237 static unsigned int rs6000_xcoff_section_type_flags
PARAMS ((tree
, const char *, int));
238 static void rs6000_xcoff_encode_section_info
PARAMS ((tree
, int))
242 static bool rs6000_binds_local_p
PARAMS ((tree
));
244 static int rs6000_use_dfa_pipeline_interface
PARAMS ((void));
245 static int rs6000_variable_issue
PARAMS ((FILE *, int, rtx
, int));
246 static bool rs6000_rtx_costs
PARAMS ((rtx
, int, int, int *));
247 static int rs6000_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
248 static int rs6000_adjust_priority
PARAMS ((rtx
, int));
249 static int rs6000_issue_rate
PARAMS ((void));
250 static int rs6000_use_sched_lookahead
PARAMS ((void));
252 static void rs6000_init_builtins
PARAMS ((void));
253 static rtx rs6000_expand_unop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
254 static rtx rs6000_expand_binop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
255 static rtx rs6000_expand_ternop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
256 static rtx rs6000_expand_builtin
PARAMS ((tree
, rtx
, rtx
, enum machine_mode
, int));
257 static void altivec_init_builtins
PARAMS ((void));
258 static void rs6000_common_init_builtins
PARAMS ((void));
260 static void enable_mask_for_builtins
PARAMS ((struct builtin_description
*,
261 int, enum rs6000_builtins
,
262 enum rs6000_builtins
));
263 static void spe_init_builtins
PARAMS ((void));
264 static rtx spe_expand_builtin
PARAMS ((tree
, rtx
, bool *));
265 static rtx spe_expand_predicate_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
266 static rtx spe_expand_evsel_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
267 static int rs6000_emit_int_cmove
PARAMS ((rtx
, rtx
, rtx
, rtx
));
269 static rtx altivec_expand_builtin
PARAMS ((tree
, rtx
, bool *));
270 static rtx altivec_expand_ld_builtin
PARAMS ((tree
, rtx
, bool *));
271 static rtx altivec_expand_st_builtin
PARAMS ((tree
, rtx
, bool *));
272 static rtx altivec_expand_dst_builtin
PARAMS ((tree
, rtx
, bool *));
273 static rtx altivec_expand_abs_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
274 static rtx altivec_expand_predicate_builtin
PARAMS ((enum insn_code
, const char *, tree
, rtx
));
275 static rtx altivec_expand_stv_builtin
PARAMS ((enum insn_code
, tree
));
276 static void rs6000_parse_abi_options
PARAMS ((void));
277 static void rs6000_parse_vrsave_option
PARAMS ((void));
278 static void rs6000_parse_isel_option
PARAMS ((void));
279 static void rs6000_parse_spe_option (void);
280 static int first_altivec_reg_to_save
PARAMS ((void));
281 static unsigned int compute_vrsave_mask
PARAMS ((void));
282 static void is_altivec_return_reg
PARAMS ((rtx
, void *));
283 static rtx generate_set_vrsave
PARAMS ((rtx
, rs6000_stack_t
*, int));
284 int easy_vector_constant
PARAMS ((rtx
, enum machine_mode
));
285 static int easy_vector_same
PARAMS ((rtx
, enum machine_mode
));
286 static bool is_ev64_opaque_type
PARAMS ((tree
));
287 static rtx rs6000_dwarf_register_span
PARAMS ((rtx
));
289 /* Hash table stuff for keeping track of TOC entries. */
291 struct toc_hash_struct
GTY(())
293 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
294 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
296 enum machine_mode key_mode
;
300 static GTY ((param_is (struct toc_hash_struct
))) htab_t toc_hash_table
;
302 /* Default register names. */
303 char rs6000_reg_names
[][8] =
305 "0", "1", "2", "3", "4", "5", "6", "7",
306 "8", "9", "10", "11", "12", "13", "14", "15",
307 "16", "17", "18", "19", "20", "21", "22", "23",
308 "24", "25", "26", "27", "28", "29", "30", "31",
309 "0", "1", "2", "3", "4", "5", "6", "7",
310 "8", "9", "10", "11", "12", "13", "14", "15",
311 "16", "17", "18", "19", "20", "21", "22", "23",
312 "24", "25", "26", "27", "28", "29", "30", "31",
313 "mq", "lr", "ctr","ap",
314 "0", "1", "2", "3", "4", "5", "6", "7",
316 /* AltiVec registers. */
317 "0", "1", "2", "3", "4", "5", "6", "7",
318 "8", "9", "10", "11", "12", "13", "14", "15",
319 "16", "17", "18", "19", "20", "21", "22", "23",
320 "24", "25", "26", "27", "28", "29", "30", "31",
326 #ifdef TARGET_REGNAMES
327 static const char alt_reg_names
[][8] =
329 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
330 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
331 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
332 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
333 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
334 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
335 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
336 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
337 "mq", "lr", "ctr", "ap",
338 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
340 /* AltiVec registers. */
341 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
342 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
343 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
344 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
351 #ifndef MASK_STRICT_ALIGN
352 #define MASK_STRICT_ALIGN 0
354 #ifndef TARGET_PROFILE_KERNEL
355 #define TARGET_PROFILE_KERNEL 0
358 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
359 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
361 /* Initialize the GCC target structure. */
362 #undef TARGET_ATTRIBUTE_TABLE
363 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
364 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
365 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
367 #undef TARGET_ASM_ALIGNED_DI_OP
368 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
370 /* Default unaligned ops are only provided for ELF. Find the ops needed
371 for non-ELF systems. */
372 #ifndef OBJECT_FORMAT_ELF
374 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
376 #undef TARGET_ASM_UNALIGNED_HI_OP
377 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
378 #undef TARGET_ASM_UNALIGNED_SI_OP
379 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
380 #undef TARGET_ASM_UNALIGNED_DI_OP
381 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
384 #undef TARGET_ASM_UNALIGNED_HI_OP
385 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
386 #undef TARGET_ASM_UNALIGNED_SI_OP
387 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
391 /* This hook deals with fixups for relocatable code and DI-mode objects
393 #undef TARGET_ASM_INTEGER
394 #define TARGET_ASM_INTEGER rs6000_assemble_integer
396 #ifdef HAVE_GAS_HIDDEN
397 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
398 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
401 #undef TARGET_ASM_FUNCTION_PROLOGUE
402 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
403 #undef TARGET_ASM_FUNCTION_EPILOGUE
404 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
406 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
407 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
408 #undef TARGET_SCHED_VARIABLE_ISSUE
409 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
411 #undef TARGET_SCHED_ISSUE_RATE
412 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
413 #undef TARGET_SCHED_ADJUST_COST
414 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
415 #undef TARGET_SCHED_ADJUST_PRIORITY
416 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
418 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
419 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
421 #undef TARGET_INIT_BUILTINS
422 #define TARGET_INIT_BUILTINS rs6000_init_builtins
424 #undef TARGET_EXPAND_BUILTIN
425 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
428 #undef TARGET_BINDS_LOCAL_P
429 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
432 #undef TARGET_ASM_OUTPUT_MI_THUNK
433 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
435 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
436 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
438 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
439 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
441 #undef TARGET_RTX_COSTS
442 #define TARGET_RTX_COSTS rs6000_rtx_costs
443 #undef TARGET_ADDRESS_COST
444 #define TARGET_ADDRESS_COST hook_int_rtx_0
446 #undef TARGET_VECTOR_OPAQUE_P
447 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
449 #undef TARGET_DWARF_REGISTER_SPAN
450 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
452 struct gcc_target targetm
= TARGET_INITIALIZER
;
454 /* Override command line options. Mostly we process the processor
455 type and sometimes adjust other TARGET_ options. */
458 rs6000_override_options (default_cpu
)
459 const char *default_cpu
;
462 struct rs6000_cpu_select
*ptr
;
464 /* Simplify the entries below by making a mask for any POWER
465 variant and any PowerPC variant. */
467 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
468 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
469 | MASK_PPC_GFXOPT | MASK_POWERPC64)
470 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
474 const char *const name
; /* Canonical processor name. */
475 const enum processor_type processor
; /* Processor type enum value. */
476 const int target_enable
; /* Target flags to enable. */
477 const int target_disable
; /* Target flags to disable. */
478 } const processor_target_table
[]
479 = {{"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
,
480 POWER_MASKS
| POWERPC_MASKS
},
481 {"power", PROCESSOR_POWER
,
482 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
483 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
484 {"power2", PROCESSOR_POWER
,
485 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
486 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
487 {"power3", PROCESSOR_PPC630
,
488 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
490 {"power4", PROCESSOR_POWER4
,
491 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
493 {"powerpc", PROCESSOR_POWERPC
,
494 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
495 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
496 {"powerpc64", PROCESSOR_POWERPC64
,
497 MASK_POWERPC
| MASK_POWERPC64
| MASK_NEW_MNEMONICS
,
498 POWER_MASKS
| POWERPC_OPT_MASKS
},
499 {"rios", PROCESSOR_RIOS1
,
500 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
501 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
502 {"rios1", PROCESSOR_RIOS1
,
503 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
504 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
505 {"rsc", PROCESSOR_PPC601
,
506 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
507 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
508 {"rsc1", PROCESSOR_PPC601
,
509 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
510 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
511 {"rios2", PROCESSOR_RIOS2
,
512 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
| MASK_POWER2
,
513 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
514 {"rs64a", PROCESSOR_RS64A
,
515 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
516 POWER_MASKS
| POWERPC_OPT_MASKS
},
517 {"401", PROCESSOR_PPC403
,
518 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
519 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
520 {"403", PROCESSOR_PPC403
,
521 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
| MASK_STRICT_ALIGN
,
522 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
523 {"405", PROCESSOR_PPC405
,
524 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
525 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
526 {"405f", PROCESSOR_PPC405
,
527 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
528 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
529 {"505", PROCESSOR_MPCCORE
,
530 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
531 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
532 {"601", PROCESSOR_PPC601
,
533 MASK_POWER
| MASK_POWERPC
| MASK_NEW_MNEMONICS
| MASK_MULTIPLE
| MASK_STRING
,
534 MASK_POWER2
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
535 {"602", PROCESSOR_PPC603
,
536 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
537 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
538 {"603", PROCESSOR_PPC603
,
539 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
540 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
541 {"603e", PROCESSOR_PPC603
,
542 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
543 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
544 {"ec603e", PROCESSOR_PPC603
,
545 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
546 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
547 {"604", PROCESSOR_PPC604
,
548 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
549 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
550 {"604e", PROCESSOR_PPC604e
,
551 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
552 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
553 {"620", PROCESSOR_PPC620
,
554 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
556 {"630", PROCESSOR_PPC630
,
557 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
559 {"740", PROCESSOR_PPC750
,
560 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
561 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
562 {"750", PROCESSOR_PPC750
,
563 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
564 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
565 {"7400", PROCESSOR_PPC7400
,
566 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
567 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
568 {"7450", PROCESSOR_PPC7450
,
569 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
570 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
571 {"8540", PROCESSOR_PPC8540
,
572 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
573 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
574 {"801", PROCESSOR_MPCCORE
,
575 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
576 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
577 {"821", PROCESSOR_MPCCORE
,
578 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
579 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
580 {"823", PROCESSOR_MPCCORE
,
581 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
582 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
583 {"860", PROCESSOR_MPCCORE
,
584 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
585 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
}};
587 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
589 /* Save current -mmultiple/-mno-multiple status. */
590 int multiple
= TARGET_MULTIPLE
;
591 /* Save current -mstring/-mno-string status. */
592 int string
= TARGET_STRING
;
594 /* Identify the processor type. */
595 rs6000_select
[0].string
= default_cpu
;
596 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
598 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
600 ptr
= &rs6000_select
[i
];
601 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
603 for (j
= 0; j
< ptt_size
; j
++)
604 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
607 rs6000_cpu
= processor_target_table
[j
].processor
;
611 target_flags
|= processor_target_table
[j
].target_enable
;
612 target_flags
&= ~processor_target_table
[j
].target_disable
;
618 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
625 /* If we are optimizing big endian systems for space, use the load/store
626 multiple and string instructions. */
627 if (BYTES_BIG_ENDIAN
&& optimize_size
)
628 target_flags
|= MASK_MULTIPLE
| MASK_STRING
;
630 /* If -mmultiple or -mno-multiple was explicitly used, don't
631 override with the processor default */
632 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
633 target_flags
= (target_flags
& ~MASK_MULTIPLE
) | multiple
;
635 /* If -mstring or -mno-string was explicitly used, don't override
636 with the processor default. */
637 if ((target_flags_explicit
& MASK_STRING
) != 0)
638 target_flags
= (target_flags
& ~MASK_STRING
) | string
;
640 /* Don't allow -mmultiple or -mstring on little endian systems
641 unless the cpu is a 750, because the hardware doesn't support the
642 instructions used in little endian mode, and causes an alignment
643 trap. The 750 does not cause an alignment trap (except when the
644 target is unaligned). */
646 if (!BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
650 target_flags
&= ~MASK_MULTIPLE
;
651 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
652 warning ("-mmultiple is not supported on little endian systems");
657 target_flags
&= ~MASK_STRING
;
658 if ((target_flags_explicit
& MASK_STRING
) != 0)
659 warning ("-mstring is not supported on little endian systems");
663 /* Set debug flags */
664 if (rs6000_debug_name
)
666 if (! strcmp (rs6000_debug_name
, "all"))
667 rs6000_debug_stack
= rs6000_debug_arg
= 1;
668 else if (! strcmp (rs6000_debug_name
, "stack"))
669 rs6000_debug_stack
= 1;
670 else if (! strcmp (rs6000_debug_name
, "arg"))
671 rs6000_debug_arg
= 1;
673 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
676 if (rs6000_traceback_name
)
678 if (! strncmp (rs6000_traceback_name
, "full", 4))
679 rs6000_traceback
= traceback_full
;
680 else if (! strncmp (rs6000_traceback_name
, "part", 4))
681 rs6000_traceback
= traceback_part
;
682 else if (! strncmp (rs6000_traceback_name
, "no", 2))
683 rs6000_traceback
= traceback_none
;
685 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
686 rs6000_traceback_name
);
689 /* Set size of long double */
690 rs6000_long_double_type_size
= 64;
691 if (rs6000_long_double_size_string
)
694 int size
= strtol (rs6000_long_double_size_string
, &tail
, 10);
695 if (*tail
!= '\0' || (size
!= 64 && size
!= 128))
696 error ("Unknown switch -mlong-double-%s",
697 rs6000_long_double_size_string
);
699 rs6000_long_double_type_size
= size
;
702 /* Handle -mabi= options. */
703 rs6000_parse_abi_options ();
705 /* Handle -mvrsave= option. */
706 rs6000_parse_vrsave_option ();
708 /* Handle -misel= option. */
709 rs6000_parse_isel_option ();
711 /* Handle -mspe= option. */
712 rs6000_parse_spe_option ();
714 #ifdef SUBTARGET_OVERRIDE_OPTIONS
715 SUBTARGET_OVERRIDE_OPTIONS
;
717 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
718 SUBSUBTARGET_OVERRIDE_OPTIONS
;
721 /* The e500 does not have string instructions, and we set
722 MASK_STRING above when optimizing for size. */
723 if (rs6000_cpu
== PROCESSOR_PPC8540
&& (target_flags
& MASK_STRING
) != 0)
724 target_flags
= target_flags
& ~MASK_STRING
;
726 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
727 using TARGET_OPTIONS to handle a toggle switch, but we're out of
728 bits in target_flags so TARGET_SWITCHES cannot be used.
729 Assumption here is that rs6000_longcall_switch points into the
730 text of the complete option, rather than being a copy, so we can
731 scan back for the presence or absence of the no- modifier. */
732 if (rs6000_longcall_switch
)
734 const char *base
= rs6000_longcall_switch
;
735 while (base
[-1] != 'm') base
--;
737 if (*rs6000_longcall_switch
!= '\0')
738 error ("invalid option `%s'", base
);
739 rs6000_default_long_calls
= (base
[0] != 'n');
742 #ifdef TARGET_REGNAMES
743 /* If the user desires alternate register names, copy in the
744 alternate names now. */
746 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
749 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
750 If -maix-struct-return or -msvr4-struct-return was explicitly
751 used, don't override with the ABI default. */
752 if ((target_flags_explicit
& MASK_AIX_STRUCT_RET
) == 0)
754 if (DEFAULT_ABI
== ABI_V4
&& !DRAFT_V4_STRUCT_RET
)
755 target_flags
= (target_flags
& ~MASK_AIX_STRUCT_RET
);
757 target_flags
|= MASK_AIX_STRUCT_RET
;
760 if (TARGET_LONG_DOUBLE_128
761 && (DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
))
762 real_format_for_mode
[TFmode
- QFmode
] = &ibm_extended_format
;
764 /* Allocate an alias set for register saves & restores from stack. */
765 rs6000_sr_alias_set
= new_alias_set ();
768 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
770 /* We can only guarantee the availability of DI pseudo-ops when
771 assembling for 64-bit targets. */
774 targetm
.asm_out
.aligned_op
.di
= NULL
;
775 targetm
.asm_out
.unaligned_op
.di
= NULL
;
778 /* Set maximum branch target alignment at two instructions, eight bytes. */
779 align_jumps_max_skip
= 8;
780 align_loops_max_skip
= 8;
782 /* Arrange to save and restore machine status around nested functions. */
783 init_machine_status
= rs6000_init_machine_status
;
786 /* Handle -misel= option. */
788 rs6000_parse_isel_option ()
790 if (rs6000_isel_string
== 0)
792 else if (! strcmp (rs6000_isel_string
, "yes"))
794 else if (! strcmp (rs6000_isel_string
, "no"))
797 error ("unknown -misel= option specified: '%s'",
801 /* Handle -mspe= option. */
803 rs6000_parse_spe_option (void)
805 if (rs6000_spe_string
== 0)
807 else if (!strcmp (rs6000_spe_string
, "yes"))
809 else if (!strcmp (rs6000_spe_string
, "no"))
812 error ("unknown -mspe= option specified: '%s'", rs6000_spe_string
);
815 /* Handle -mvrsave= options. */
817 rs6000_parse_vrsave_option ()
819 /* Generate VRSAVE instructions by default. */
820 if (rs6000_altivec_vrsave_string
== 0
821 || ! strcmp (rs6000_altivec_vrsave_string
, "yes"))
822 rs6000_altivec_vrsave
= 1;
823 else if (! strcmp (rs6000_altivec_vrsave_string
, "no"))
824 rs6000_altivec_vrsave
= 0;
826 error ("unknown -mvrsave= option specified: '%s'",
827 rs6000_altivec_vrsave_string
);
830 /* Handle -mabi= options. */
832 rs6000_parse_abi_options ()
834 if (rs6000_abi_string
== 0)
836 else if (! strcmp (rs6000_abi_string
, "altivec"))
837 rs6000_altivec_abi
= 1;
838 else if (! strcmp (rs6000_abi_string
, "no-altivec"))
839 rs6000_altivec_abi
= 0;
840 else if (! strcmp (rs6000_abi_string
, "spe"))
844 error ("not configured for ABI: '%s'", rs6000_abi_string
);
847 else if (! strcmp (rs6000_abi_string
, "no-spe"))
850 error ("unknown ABI specified: '%s'", rs6000_abi_string
);
854 optimization_options (level
, size
)
855 int level ATTRIBUTE_UNUSED
;
856 int size ATTRIBUTE_UNUSED
;
860 /* Do anything needed at the start of the asm file. */
863 rs6000_file_start (file
, default_cpu
)
865 const char *default_cpu
;
869 const char *start
= buffer
;
870 struct rs6000_cpu_select
*ptr
;
872 if (flag_verbose_asm
)
874 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
875 rs6000_select
[0].string
= default_cpu
;
877 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
879 ptr
= &rs6000_select
[i
];
880 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
882 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
888 switch (rs6000_sdata
)
890 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
891 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
892 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
893 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
896 if (rs6000_sdata
&& g_switch_value
)
898 fprintf (file
, "%s -G %d", start
, g_switch_value
);
908 /* Return nonzero if this function is known to have a null epilogue. */
913 if (reload_completed
)
915 rs6000_stack_t
*info
= rs6000_stack_info ();
917 if (info
->first_gp_reg_save
== 32
918 && info
->first_fp_reg_save
== 64
919 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
922 && info
->vrsave_mask
== 0
930 /* Returns 1 always. */
933 any_operand (op
, mode
)
934 rtx op ATTRIBUTE_UNUSED
;
935 enum machine_mode mode ATTRIBUTE_UNUSED
;
940 /* Returns 1 if op is the count register. */
942 count_register_operand (op
, mode
)
944 enum machine_mode mode ATTRIBUTE_UNUSED
;
946 if (GET_CODE (op
) != REG
)
949 if (REGNO (op
) == COUNT_REGISTER_REGNUM
)
952 if (REGNO (op
) > FIRST_PSEUDO_REGISTER
)
958 /* Returns 1 if op is an altivec register. */
960 altivec_register_operand (op
, mode
)
962 enum machine_mode mode ATTRIBUTE_UNUSED
;
965 return (register_operand (op
, mode
)
966 && (GET_CODE (op
) != REG
967 || REGNO (op
) > FIRST_PSEUDO_REGISTER
968 || ALTIVEC_REGNO_P (REGNO (op
))));
972 xer_operand (op
, mode
)
974 enum machine_mode mode ATTRIBUTE_UNUSED
;
976 if (GET_CODE (op
) != REG
)
979 if (XER_REGNO_P (REGNO (op
)))
985 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
986 by such constants completes more quickly. */
989 s8bit_cint_operand (op
, mode
)
991 enum machine_mode mode ATTRIBUTE_UNUSED
;
993 return ( GET_CODE (op
) == CONST_INT
994 && (INTVAL (op
) >= -128 && INTVAL (op
) <= 127));
997 /* Return 1 if OP is a constant that can fit in a D field. */
1000 short_cint_operand (op
, mode
)
1002 enum machine_mode mode ATTRIBUTE_UNUSED
;
1004 return (GET_CODE (op
) == CONST_INT
1005 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'));
1008 /* Similar for an unsigned D field. */
1011 u_short_cint_operand (op
, mode
)
1013 enum machine_mode mode ATTRIBUTE_UNUSED
;
1015 return (GET_CODE (op
) == CONST_INT
1016 && CONST_OK_FOR_LETTER_P (INTVAL (op
) & GET_MODE_MASK (mode
), 'K'));
1019 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1022 non_short_cint_operand (op
, mode
)
1024 enum machine_mode mode ATTRIBUTE_UNUSED
;
1026 return (GET_CODE (op
) == CONST_INT
1027 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x8000) >= 0x10000);
1030 /* Returns 1 if OP is a CONST_INT that is a positive value
1031 and an exact power of 2. */
1034 exact_log2_cint_operand (op
, mode
)
1036 enum machine_mode mode ATTRIBUTE_UNUSED
;
1038 return (GET_CODE (op
) == CONST_INT
1040 && exact_log2 (INTVAL (op
)) >= 0);
1043 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1047 gpc_reg_operand (op
, mode
)
1049 enum machine_mode mode
;
1051 return (register_operand (op
, mode
)
1052 && (GET_CODE (op
) != REG
1053 || (REGNO (op
) >= ARG_POINTER_REGNUM
1054 && !XER_REGNO_P (REGNO (op
)))
1055 || REGNO (op
) < MQ_REGNO
));
1058 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1062 cc_reg_operand (op
, mode
)
1064 enum machine_mode mode
;
1066 return (register_operand (op
, mode
)
1067 && (GET_CODE (op
) != REG
1068 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1069 || CR_REGNO_P (REGNO (op
))));
1072 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1073 CR field that isn't CR0. */
1076 cc_reg_not_cr0_operand (op
, mode
)
1078 enum machine_mode mode
;
1080 return (register_operand (op
, mode
)
1081 && (GET_CODE (op
) != REG
1082 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1083 || CR_REGNO_NOT_CR0_P (REGNO (op
))));
1086 /* Returns 1 if OP is either a constant integer valid for a D-field or
1087 a non-special register. If a register, it must be in the proper
1088 mode unless MODE is VOIDmode. */
1091 reg_or_short_operand (op
, mode
)
1093 enum machine_mode mode
;
1095 return short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1098 /* Similar, except check if the negation of the constant would be
1099 valid for a D-field. */
1102 reg_or_neg_short_operand (op
, mode
)
1104 enum machine_mode mode
;
1106 if (GET_CODE (op
) == CONST_INT
)
1107 return CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P');
1109 return gpc_reg_operand (op
, mode
);
1112 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1113 a non-special register. If a register, it must be in the proper
1114 mode unless MODE is VOIDmode. */
1117 reg_or_aligned_short_operand (op
, mode
)
1119 enum machine_mode mode
;
1121 if (gpc_reg_operand (op
, mode
))
1123 else if (short_cint_operand (op
, mode
) && !(INTVAL (op
) & 3))
1130 /* Return 1 if the operand is either a register or an integer whose
1131 high-order 16 bits are zero. */
1134 reg_or_u_short_operand (op
, mode
)
1136 enum machine_mode mode
;
1138 return u_short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1141 /* Return 1 is the operand is either a non-special register or ANY
1142 constant integer. */
1145 reg_or_cint_operand (op
, mode
)
1147 enum machine_mode mode
;
1149 return (GET_CODE (op
) == CONST_INT
|| gpc_reg_operand (op
, mode
));
1152 /* Return 1 is the operand is either a non-special register or ANY
1153 32-bit signed constant integer. */
1156 reg_or_arith_cint_operand (op
, mode
)
1158 enum machine_mode mode
;
1160 return (gpc_reg_operand (op
, mode
)
1161 || (GET_CODE (op
) == CONST_INT
1162 #if HOST_BITS_PER_WIDE_INT != 32
1163 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80000000)
1164 < (unsigned HOST_WIDE_INT
) 0x100000000ll
)
1169 /* Return 1 is the operand is either a non-special register or a 32-bit
1170 signed constant integer valid for 64-bit addition. */
1173 reg_or_add_cint64_operand (op
, mode
)
1175 enum machine_mode mode
;
1177 return (gpc_reg_operand (op
, mode
)
1178 || (GET_CODE (op
) == CONST_INT
1179 #if HOST_BITS_PER_WIDE_INT == 32
1180 && INTVAL (op
) < 0x7fff8000
1182 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80008000)
1188 /* Return 1 is the operand is either a non-special register or a 32-bit
1189 signed constant integer valid for 64-bit subtraction. */
1192 reg_or_sub_cint64_operand (op
, mode
)
1194 enum machine_mode mode
;
1196 return (gpc_reg_operand (op
, mode
)
1197 || (GET_CODE (op
) == CONST_INT
1198 #if HOST_BITS_PER_WIDE_INT == 32
1199 && (- INTVAL (op
)) < 0x7fff8000
1201 && ((unsigned HOST_WIDE_INT
) ((- INTVAL (op
)) + 0x80008000)
1207 /* Return 1 is the operand is either a non-special register or ANY
1208 32-bit unsigned constant integer. */
1211 reg_or_logical_cint_operand (op
, mode
)
1213 enum machine_mode mode
;
1215 if (GET_CODE (op
) == CONST_INT
)
1217 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
1219 if (GET_MODE_BITSIZE (mode
) <= 32)
1222 if (INTVAL (op
) < 0)
1226 return ((INTVAL (op
) & GET_MODE_MASK (mode
)
1227 & (~ (unsigned HOST_WIDE_INT
) 0xffffffff)) == 0);
1229 else if (GET_CODE (op
) == CONST_DOUBLE
)
1231 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
1235 return CONST_DOUBLE_HIGH (op
) == 0;
1238 return gpc_reg_operand (op
, mode
);
1241 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1244 got_operand (op
, mode
)
1246 enum machine_mode mode ATTRIBUTE_UNUSED
;
1248 return (GET_CODE (op
) == SYMBOL_REF
1249 || GET_CODE (op
) == CONST
1250 || GET_CODE (op
) == LABEL_REF
);
1253 /* Return 1 if the operand is a simple references that can be loaded via
1254 the GOT (labels involving addition aren't allowed). */
1257 got_no_const_operand (op
, mode
)
1259 enum machine_mode mode ATTRIBUTE_UNUSED
;
1261 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
);
1264 /* Return the number of instructions it takes to form a constant in an
1265 integer register. */
1268 num_insns_constant_wide (value
)
1269 HOST_WIDE_INT value
;
1271 /* signed constant loadable with {cal|addi} */
1272 if (CONST_OK_FOR_LETTER_P (value
, 'I'))
1275 /* constant loadable with {cau|addis} */
1276 else if (CONST_OK_FOR_LETTER_P (value
, 'L'))
1279 #if HOST_BITS_PER_WIDE_INT == 64
1280 else if (TARGET_POWERPC64
)
1282 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1283 HOST_WIDE_INT high
= value
>> 31;
1285 if (high
== 0 || high
== -1)
1291 return num_insns_constant_wide (high
) + 1;
1293 return (num_insns_constant_wide (high
)
1294 + num_insns_constant_wide (low
) + 1);
1303 num_insns_constant (op
, mode
)
1305 enum machine_mode mode
;
1307 if (GET_CODE (op
) == CONST_INT
)
1309 #if HOST_BITS_PER_WIDE_INT == 64
1310 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
1311 && mask64_operand (op
, mode
))
1315 return num_insns_constant_wide (INTVAL (op
));
1318 else if (GET_CODE (op
) == CONST_DOUBLE
&& mode
== SFmode
)
1323 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1324 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1325 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
1328 else if (GET_CODE (op
) == CONST_DOUBLE
)
1334 int endian
= (WORDS_BIG_ENDIAN
== 0);
1336 if (mode
== VOIDmode
|| mode
== DImode
)
1338 high
= CONST_DOUBLE_HIGH (op
);
1339 low
= CONST_DOUBLE_LOW (op
);
1343 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1344 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
1346 low
= l
[1 - endian
];
1350 return (num_insns_constant_wide (low
)
1351 + num_insns_constant_wide (high
));
1355 if (high
== 0 && low
>= 0)
1356 return num_insns_constant_wide (low
);
1358 else if (high
== -1 && low
< 0)
1359 return num_insns_constant_wide (low
);
1361 else if (mask64_operand (op
, mode
))
1365 return num_insns_constant_wide (high
) + 1;
1368 return (num_insns_constant_wide (high
)
1369 + num_insns_constant_wide (low
) + 1);
1377 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1378 register with one instruction per word. We only do this if we can
1379 safely read CONST_DOUBLE_{LOW,HIGH}. */
1382 easy_fp_constant (op
, mode
)
1384 enum machine_mode mode
;
1386 if (GET_CODE (op
) != CONST_DOUBLE
1387 || GET_MODE (op
) != mode
1388 || (GET_MODE_CLASS (mode
) != MODE_FLOAT
&& mode
!= DImode
))
1391 /* Consider all constants with -msoft-float to be easy. */
1392 if ((TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
1396 /* If we are using V.4 style PIC, consider all constants to be hard. */
1397 if (flag_pic
&& DEFAULT_ABI
== ABI_V4
)
1400 #ifdef TARGET_RELOCATABLE
1401 /* Similarly if we are using -mrelocatable, consider all constants
1403 if (TARGET_RELOCATABLE
)
1412 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1413 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
1415 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1416 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1
1417 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[2]) == 1
1418 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[3]) == 1);
1421 else if (mode
== DFmode
)
1426 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1427 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
1429 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1430 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1);
1433 else if (mode
== SFmode
)
1438 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1439 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1441 return num_insns_constant_wide (l
) == 1;
1444 else if (mode
== DImode
)
1445 return ((TARGET_POWERPC64
1446 && GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_LOW (op
) == 0)
1447 || (num_insns_constant (op
, DImode
) <= 2));
1449 else if (mode
== SImode
)
1455 /* Return non zero if all elements of a vector have the same value. */
1458 easy_vector_same (op
, mode
)
1460 enum machine_mode mode ATTRIBUTE_UNUSED
;
1464 units
= CONST_VECTOR_NUNITS (op
);
1466 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
1467 for (i
= 1; i
< units
; ++i
)
1468 if (INTVAL (CONST_VECTOR_ELT (op
, i
)) != cst
)
1475 /* Return 1 if the operand is a CONST_INT and can be put into a
1476 register without using memory. */
1479 easy_vector_constant (op
, mode
)
1481 enum machine_mode mode
;
1485 if (GET_CODE (op
) != CONST_VECTOR
1490 if (zero_constant (op
, mode
)
1491 && ((TARGET_ALTIVEC
&& ALTIVEC_VECTOR_MODE (mode
))
1492 || (TARGET_SPE
&& SPE_VECTOR_MODE (mode
))))
1495 if (GET_MODE_CLASS (mode
) != MODE_VECTOR_INT
)
1498 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
1499 cst2
= INTVAL (CONST_VECTOR_ELT (op
, 1));
1501 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1503 evmergelo r0, r0, r0
1506 I don't know how efficient it would be to allow bigger constants,
1507 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1508 instructions is better than a 64-bit memory load, but I don't
1509 have the e500 timing specs. */
1510 if (TARGET_SPE
&& mode
== V2SImode
1511 && cst
>= -0x7fff && cst
<= 0x7fff
1512 && cst2
>= -0x7fff && cst
<= 0x7fff)
1515 if (TARGET_ALTIVEC
&& EASY_VECTOR_15 (cst
, op
, mode
))
1518 if (TARGET_ALTIVEC
&& EASY_VECTOR_15_ADD_SELF (cst
, op
, mode
))
1524 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1527 easy_vector_constant_add_self (op
, mode
)
1529 enum machine_mode mode
;
1533 if (!easy_vector_constant (op
, mode
))
1536 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
1538 return TARGET_ALTIVEC
&& EASY_VECTOR_15_ADD_SELF (cst
, op
, mode
);
1542 output_vec_const_move (operands
)
1546 enum machine_mode mode
;
1552 cst
= INTVAL (CONST_VECTOR_ELT (vec
, 0));
1553 cst2
= INTVAL (CONST_VECTOR_ELT (vec
, 1));
1554 mode
= GET_MODE (dest
);
1558 if (zero_constant (vec
, mode
))
1559 return "vxor %0,%0,%0";
1560 else if (EASY_VECTOR_15 (cst
, vec
, mode
))
1562 operands
[1] = GEN_INT (cst
);
1566 return "vspltisw %0,%1";
1568 return "vspltish %0,%1";
1570 return "vspltisb %0,%1";
1575 else if (EASY_VECTOR_15_ADD_SELF (cst
, vec
, mode
))
1583 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1584 pattern of V1DI, V4HI, and V2SF.
1586 FIXME: We should probabl return # and add post reload
1587 splitters for these, but this way is so easy ;-).
1589 operands
[1] = GEN_INT (cst
);
1590 operands
[2] = GEN_INT (cst2
);
1592 return "li %0,%1\n\tevmergelo %0,%0,%0";
1594 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1600 /* Return 1 if the operand is the constant 0. This works for scalars
1601 as well as vectors. */
1603 zero_constant (op
, mode
)
1605 enum machine_mode mode
;
1607 return op
== CONST0_RTX (mode
);
1610 /* Return 1 if the operand is 0.0. */
1612 zero_fp_constant (op
, mode
)
1614 enum machine_mode mode
;
1616 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& op
== CONST0_RTX (mode
);
1619 /* Return 1 if the operand is in volatile memory. Note that during
1620 the RTL generation phase, memory_operand does not return TRUE for
1621 volatile memory references. So this function allows us to
1622 recognize volatile references where its safe. */
1625 volatile_mem_operand (op
, mode
)
1627 enum machine_mode mode
;
1629 if (GET_CODE (op
) != MEM
)
1632 if (!MEM_VOLATILE_P (op
))
1635 if (mode
!= GET_MODE (op
))
1638 if (reload_completed
)
1639 return memory_operand (op
, mode
);
1641 if (reload_in_progress
)
1642 return strict_memory_address_p (mode
, XEXP (op
, 0));
1644 return memory_address_p (mode
, XEXP (op
, 0));
1647 /* Return 1 if the operand is an offsettable memory operand. */
1650 offsettable_mem_operand (op
, mode
)
1652 enum machine_mode mode
;
1654 return ((GET_CODE (op
) == MEM
)
1655 && offsettable_address_p (reload_completed
|| reload_in_progress
,
1656 mode
, XEXP (op
, 0)));
1659 /* Return 1 if the operand is either an easy FP constant (see above) or
1663 mem_or_easy_const_operand (op
, mode
)
1665 enum machine_mode mode
;
1667 return memory_operand (op
, mode
) || easy_fp_constant (op
, mode
);
1670 /* Return 1 if the operand is either a non-special register or an item
1671 that can be used as the operand of a `mode' add insn. */
1674 add_operand (op
, mode
)
1676 enum machine_mode mode
;
1678 if (GET_CODE (op
) == CONST_INT
)
1679 return (CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1680 || CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1682 return gpc_reg_operand (op
, mode
);
1685 /* Return 1 if OP is a constant but not a valid add_operand. */
1688 non_add_cint_operand (op
, mode
)
1690 enum machine_mode mode ATTRIBUTE_UNUSED
;
1692 return (GET_CODE (op
) == CONST_INT
1693 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1694 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1697 /* Return 1 if the operand is a non-special register or a constant that
1698 can be used as the operand of an OR or XOR insn on the RS/6000. */
1701 logical_operand (op
, mode
)
1703 enum machine_mode mode
;
1705 HOST_WIDE_INT opl
, oph
;
1707 if (gpc_reg_operand (op
, mode
))
1710 if (GET_CODE (op
) == CONST_INT
)
1712 opl
= INTVAL (op
) & GET_MODE_MASK (mode
);
1714 #if HOST_BITS_PER_WIDE_INT <= 32
1715 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
&& opl
< 0)
1719 else if (GET_CODE (op
) == CONST_DOUBLE
)
1721 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1724 opl
= CONST_DOUBLE_LOW (op
);
1725 oph
= CONST_DOUBLE_HIGH (op
);
1732 return ((opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0
1733 || (opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff0000) == 0);
1736 /* Return 1 if C is a constant that is not a logical operand (as
1737 above), but could be split into one. */
1740 non_logical_cint_operand (op
, mode
)
1742 enum machine_mode mode
;
1744 return ((GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
1745 && ! logical_operand (op
, mode
)
1746 && reg_or_logical_cint_operand (op
, mode
));
1749 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1750 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1751 Reject all ones and all zeros, since these should have been optimized
1752 away and confuse the making of MB and ME. */
1755 mask_operand (op
, mode
)
1757 enum machine_mode mode ATTRIBUTE_UNUSED
;
1759 HOST_WIDE_INT c
, lsb
;
1761 if (GET_CODE (op
) != CONST_INT
)
1766 /* Fail in 64-bit mode if the mask wraps around because the upper
1767 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1768 if (TARGET_POWERPC64
&& (c
& 0x80000001) == 0x80000001)
1771 /* We don't change the number of transitions by inverting,
1772 so make sure we start with the LS bit zero. */
1776 /* Reject all zeros or all ones. */
1780 /* Find the first transition. */
1783 /* Invert to look for a second transition. */
1786 /* Erase first transition. */
1789 /* Find the second transition (if any). */
1792 /* Match if all the bits above are 1's (or c is zero). */
1796 /* Return 1 for the PowerPC64 rlwinm corner case. */
1799 mask_operand_wrap (op
, mode
)
1801 enum machine_mode mode ATTRIBUTE_UNUSED
;
1803 HOST_WIDE_INT c
, lsb
;
1805 if (GET_CODE (op
) != CONST_INT
)
1810 if ((c
& 0x80000001) != 0x80000001)
1824 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1825 It is if there are no more than one 1->0 or 0->1 transitions.
1826 Reject all zeros, since zero should have been optimized away and
1827 confuses the making of MB and ME. */
1830 mask64_operand (op
, mode
)
1832 enum machine_mode mode ATTRIBUTE_UNUSED
;
1834 if (GET_CODE (op
) == CONST_INT
)
1836 HOST_WIDE_INT c
, lsb
;
1840 /* Reject all zeros. */
1844 /* We don't change the number of transitions by inverting,
1845 so make sure we start with the LS bit zero. */
1849 /* Find the transition, and check that all bits above are 1's. */
1852 /* Match if all the bits above are 1's (or c is zero). */
1858 /* Like mask64_operand, but allow up to three transitions. This
1859 predicate is used by insn patterns that generate two rldicl or
1860 rldicr machine insns. */
1863 mask64_2_operand (op
, mode
)
1865 enum machine_mode mode ATTRIBUTE_UNUSED
;
1867 if (GET_CODE (op
) == CONST_INT
)
1869 HOST_WIDE_INT c
, lsb
;
1873 /* Disallow all zeros. */
1877 /* We don't change the number of transitions by inverting,
1878 so make sure we start with the LS bit zero. */
1882 /* Find the first transition. */
1885 /* Invert to look for a second transition. */
1888 /* Erase first transition. */
1891 /* Find the second transition. */
1894 /* Invert to look for a third transition. */
1897 /* Erase second transition. */
1900 /* Find the third transition (if any). */
1903 /* Match if all the bits above are 1's (or c is zero). */
1909 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1910 implement ANDing by the mask IN. */
1912 build_mask64_2_operands (in
, out
)
1916 #if HOST_BITS_PER_WIDE_INT >= 64
1917 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
1920 if (GET_CODE (in
) != CONST_INT
)
1926 /* Assume c initially something like 0x00fff000000fffff. The idea
1927 is to rotate the word so that the middle ^^^^^^ group of zeros
1928 is at the MS end and can be cleared with an rldicl mask. We then
1929 rotate back and clear off the MS ^^ group of zeros with a
1931 c
= ~c
; /* c == 0xff000ffffff00000 */
1932 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
1933 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
1934 c
= ~c
; /* c == 0x00fff000000fffff */
1935 c
&= -lsb
; /* c == 0x00fff00000000000 */
1936 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1937 c
= ~c
; /* c == 0xff000fffffffffff */
1938 c
&= -lsb
; /* c == 0xff00000000000000 */
1940 while ((lsb
>>= 1) != 0)
1941 shift
++; /* shift == 44 on exit from loop */
1942 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
1943 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
1944 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
1948 /* Assume c initially something like 0xff000f0000000000. The idea
1949 is to rotate the word so that the ^^^ middle group of zeros
1950 is at the LS end and can be cleared with an rldicr mask. We then
1951 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1953 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
1954 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
1955 c
= ~c
; /* c == 0x00fff0ffffffffff */
1956 c
&= -lsb
; /* c == 0x00fff00000000000 */
1957 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1958 c
= ~c
; /* c == 0xff000fffffffffff */
1959 c
&= -lsb
; /* c == 0xff00000000000000 */
1961 while ((lsb
>>= 1) != 0)
1962 shift
++; /* shift == 44 on exit from loop */
1963 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
1964 m1
>>= shift
; /* m1 == 0x0000000000000fff */
1965 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
1968 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1969 masks will be all 1's. We are guaranteed more than one transition. */
1970 out
[0] = GEN_INT (64 - shift
);
1971 out
[1] = GEN_INT (m1
);
1972 out
[2] = GEN_INT (shift
);
1973 out
[3] = GEN_INT (m2
);
1981 /* Return 1 if the operand is either a non-special register or a constant
1982 that can be used as the operand of a PowerPC64 logical AND insn. */
1985 and64_operand (op
, mode
)
1987 enum machine_mode mode
;
1989 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1990 return (gpc_reg_operand (op
, mode
) || mask64_operand (op
, mode
));
1992 return (logical_operand (op
, mode
) || mask64_operand (op
, mode
));
1995 /* Like the above, but also match constants that can be implemented
1996 with two rldicl or rldicr insns. */
1999 and64_2_operand (op
, mode
)
2001 enum machine_mode mode
;
2003 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2004 return gpc_reg_operand (op
, mode
) || mask64_2_operand (op
, mode
);
2006 return logical_operand (op
, mode
) || mask64_2_operand (op
, mode
);
2009 /* Return 1 if the operand is either a non-special register or a
2010 constant that can be used as the operand of an RS/6000 logical AND insn. */
2013 and_operand (op
, mode
)
2015 enum machine_mode mode
;
2017 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2018 return (gpc_reg_operand (op
, mode
) || mask_operand (op
, mode
));
2020 return (logical_operand (op
, mode
) || mask_operand (op
, mode
));
2023 /* Return 1 if the operand is a general register or memory operand. */
2026 reg_or_mem_operand (op
, mode
)
2028 enum machine_mode mode
;
2030 return (gpc_reg_operand (op
, mode
)
2031 || memory_operand (op
, mode
)
2032 || volatile_mem_operand (op
, mode
));
2035 /* Return 1 if the operand is a general register or memory operand without
2036 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2040 lwa_operand (op
, mode
)
2042 enum machine_mode mode
;
2046 if (reload_completed
&& GET_CODE (inner
) == SUBREG
)
2047 inner
= SUBREG_REG (inner
);
2049 return gpc_reg_operand (inner
, mode
)
2050 || (memory_operand (inner
, mode
)
2051 && GET_CODE (XEXP (inner
, 0)) != PRE_INC
2052 && GET_CODE (XEXP (inner
, 0)) != PRE_DEC
2053 && (GET_CODE (XEXP (inner
, 0)) != PLUS
2054 || GET_CODE (XEXP (XEXP (inner
, 0), 1)) != CONST_INT
2055 || INTVAL (XEXP (XEXP (inner
, 0), 1)) % 4 == 0));
2058 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2061 symbol_ref_operand (op
, mode
)
2063 enum machine_mode mode
;
2065 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
2068 return (GET_CODE (op
) == SYMBOL_REF
);
2071 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2072 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2075 call_operand (op
, mode
)
2077 enum machine_mode mode
;
2079 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
2082 return (GET_CODE (op
) == SYMBOL_REF
2083 || (GET_CODE (op
) == REG
2084 && (REGNO (op
) == LINK_REGISTER_REGNUM
2085 || REGNO (op
) == COUNT_REGISTER_REGNUM
2086 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
2089 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2090 this file and the function is not weakly defined. */
2093 current_file_function_operand (op
, mode
)
2095 enum machine_mode mode ATTRIBUTE_UNUSED
;
2097 return (GET_CODE (op
) == SYMBOL_REF
2098 && (SYMBOL_REF_FLAG (op
)
2099 || (op
== XEXP (DECL_RTL (current_function_decl
), 0)
2100 && ! DECL_WEAK (current_function_decl
))));
2103 /* Return 1 if this operand is a valid input for a move insn. */
2106 input_operand (op
, mode
)
2108 enum machine_mode mode
;
2110 /* Memory is always valid. */
2111 if (memory_operand (op
, mode
))
2114 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2115 if (GET_CODE (op
) == CONSTANT_P_RTX
)
2118 /* For floating-point, easy constants are valid. */
2119 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2121 && easy_fp_constant (op
, mode
))
2124 /* Allow any integer constant. */
2125 if (GET_MODE_CLASS (mode
) == MODE_INT
2126 && (GET_CODE (op
) == CONST_INT
2127 || GET_CODE (op
) == CONST_DOUBLE
))
2130 /* Allow easy vector constants. */
2131 if (GET_CODE (op
) == CONST_VECTOR
2132 && easy_vector_constant (op
, mode
))
2135 /* For floating-point or multi-word mode, the only remaining valid type
2137 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2138 || GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2139 return register_operand (op
, mode
);
2141 /* The only cases left are integral modes one word or smaller (we
2142 do not get called for MODE_CC values). These can be in any
2144 if (register_operand (op
, mode
))
2147 /* A SYMBOL_REF referring to the TOC is valid. */
2148 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op
))
2151 /* A constant pool expression (relative to the TOC) is valid */
2152 if (TOC_RELATIVE_EXPR_P (op
))
2155 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2157 if (DEFAULT_ABI
== ABI_V4
2158 && (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
)
2159 && small_data_operand (op
, Pmode
))
2165 /* Return 1 for an operand in small memory on V.4/eabi. */
2168 small_data_operand (op
, mode
)
2169 rtx op ATTRIBUTE_UNUSED
;
2170 enum machine_mode mode ATTRIBUTE_UNUSED
;
2175 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
2178 if (DEFAULT_ABI
!= ABI_V4
)
2181 if (GET_CODE (op
) == SYMBOL_REF
)
2184 else if (GET_CODE (op
) != CONST
2185 || GET_CODE (XEXP (op
, 0)) != PLUS
2186 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
2187 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
2192 rtx sum
= XEXP (op
, 0);
2193 HOST_WIDE_INT summand
;
2195 /* We have to be careful here, because it is the referenced address
2196 that must be 32k from _SDA_BASE_, not just the symbol. */
2197 summand
= INTVAL (XEXP (sum
, 1));
2198 if (summand
< 0 || summand
> g_switch_value
)
2201 sym_ref
= XEXP (sum
, 0);
2204 if (*XSTR (sym_ref
, 0) != '@')
2215 constant_pool_expr_1 (op
, have_sym
, have_toc
)
2220 switch (GET_CODE(op
))
2223 if (CONSTANT_POOL_ADDRESS_P (op
))
2225 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
2233 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
2242 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
2243 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
2245 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
2254 constant_pool_expr_p (op
)
2259 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
2263 toc_relative_expr_p (op
)
2268 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
2271 /* Try machine-dependent ways of modifying an illegitimate address
2272 to be legitimate. If we find one, return the new, valid address.
2273 This is used from only one place: `memory_address' in explow.c.
2275 OLDX is the address as it was before break_out_memory_refs was
2276 called. In some cases it is useful to look at this to decide what
2279 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2281 It is always safe for this function to do nothing. It exists to
2282 recognize opportunities to optimize the output.
2284 On RS/6000, first check for the sum of a register with a constant
2285 integer that is out of range. If so, generate code to add the
2286 constant with the low-order 16 bits masked to the register and force
2287 this result into another register (this can be done with `cau').
2288 Then generate an address of REG+(CONST&0xffff), allowing for the
2289 possibility of bit 16 being a one.
2291 Then check for the sum of a register and something not constant, try to
2292 load the other things into a register and return the sum. */
2294 rs6000_legitimize_address (x
, oldx
, mode
)
2296 rtx oldx ATTRIBUTE_UNUSED
;
2297 enum machine_mode mode
;
2299 if (GET_CODE (x
) == PLUS
2300 && GET_CODE (XEXP (x
, 0)) == REG
2301 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2302 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
2304 HOST_WIDE_INT high_int
, low_int
;
2306 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2307 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
2308 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2309 GEN_INT (high_int
)), 0);
2310 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
2312 else if (GET_CODE (x
) == PLUS
2313 && GET_CODE (XEXP (x
, 0)) == REG
2314 && GET_CODE (XEXP (x
, 1)) != CONST_INT
2315 && GET_MODE_NUNITS (mode
) == 1
2316 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2318 || (mode
!= DFmode
&& mode
!= TFmode
))
2319 && (TARGET_POWERPC64
|| mode
!= DImode
)
2322 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2323 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
2325 else if (ALTIVEC_VECTOR_MODE (mode
))
2329 /* Make sure both operands are registers. */
2330 if (GET_CODE (x
) == PLUS
)
2331 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2332 force_reg (Pmode
, XEXP (x
, 1)));
2334 reg
= force_reg (Pmode
, x
);
2337 else if (SPE_VECTOR_MODE (mode
))
2339 /* We accept [reg + reg] and [reg + OFFSET]. */
2341 if (GET_CODE (x
) == PLUS
)
2343 rtx op1
= XEXP (x
, 0);
2344 rtx op2
= XEXP (x
, 1);
2346 op1
= force_reg (Pmode
, op1
);
2348 if (GET_CODE (op2
) != REG
2349 && (GET_CODE (op2
) != CONST_INT
2350 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
2351 op2
= force_reg (Pmode
, op2
);
2353 return gen_rtx_PLUS (Pmode
, op1
, op2
);
2356 return force_reg (Pmode
, x
);
2362 && GET_CODE (x
) != CONST_INT
2363 && GET_CODE (x
) != CONST_DOUBLE
2365 && GET_MODE_NUNITS (mode
) == 1
2366 && (GET_MODE_BITSIZE (mode
) <= 32
2367 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
2369 rtx reg
= gen_reg_rtx (Pmode
);
2370 emit_insn (gen_elf_high (reg
, (x
)));
2371 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2373 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
2376 && ! MACHO_DYNAMIC_NO_PIC_P
2378 && GET_CODE (x
) != CONST_INT
2379 && GET_CODE (x
) != CONST_DOUBLE
2381 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
2385 rtx reg
= gen_reg_rtx (Pmode
);
2386 emit_insn (gen_macho_high (reg
, (x
)));
2387 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2390 && CONSTANT_POOL_EXPR_P (x
)
2391 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
2393 return create_TOC_reference (x
);
2399 /* The convention appears to be to define this wherever it is used.
2400 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2401 is now used here. */
2402 #ifndef REG_MODE_OK_FOR_BASE_P
2403 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2406 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2407 replace the input X, or the original X if no replacement is called for.
2408 The output parameter *WIN is 1 if the calling macro should goto WIN,
2411 For RS/6000, we wish to handle large displacements off a base
2412 register by splitting the addend across an addiu/addis and the mem insn.
2413 This cuts number of extra insns needed from 3 to 1.
2415 On Darwin, we use this to generate code for floating point constants.
2416 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2417 The Darwin code is inside #if TARGET_MACHO because only then is
2418 machopic_function_base_name() defined. */
2420 rs6000_legitimize_reload_address (x
, mode
, opnum
, type
, ind_levels
, win
)
2422 enum machine_mode mode
;
2425 int ind_levels ATTRIBUTE_UNUSED
;
2428 /* We must recognize output that we have already generated ourselves. */
2429 if (GET_CODE (x
) == PLUS
2430 && GET_CODE (XEXP (x
, 0)) == PLUS
2431 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
2432 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2433 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2435 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2436 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2437 opnum
, (enum reload_type
)type
);
2443 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
2444 && GET_CODE (x
) == LO_SUM
2445 && GET_CODE (XEXP (x
, 0)) == PLUS
2446 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
2447 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
2448 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
2449 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
2450 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
2451 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
2452 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
2454 /* Result of previous invocation of this function on Darwin
2455 floating point constant. */
2456 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2457 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2458 opnum
, (enum reload_type
)type
);
2463 if (GET_CODE (x
) == PLUS
2464 && GET_CODE (XEXP (x
, 0)) == REG
2465 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2466 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
2467 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2468 && !SPE_VECTOR_MODE (mode
)
2469 && !ALTIVEC_VECTOR_MODE (mode
))
2471 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
2472 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
2474 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2476 /* Check for 32-bit overflow. */
2477 if (high
+ low
!= val
)
2483 /* Reload the high part into a base reg; leave the low part
2484 in the mem directly. */
2486 x
= gen_rtx_PLUS (GET_MODE (x
),
2487 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
2491 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2492 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2493 opnum
, (enum reload_type
)type
);
2498 if (GET_CODE (x
) == SYMBOL_REF
2499 && DEFAULT_ABI
== ABI_DARWIN
2500 && !ALTIVEC_VECTOR_MODE (mode
)
2503 /* Darwin load of floating point constant. */
2504 rtx offset
= gen_rtx (CONST
, Pmode
,
2505 gen_rtx (MINUS
, Pmode
, x
,
2506 gen_rtx (SYMBOL_REF
, Pmode
,
2507 machopic_function_base_name ())));
2508 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
2509 gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
,
2510 gen_rtx (HIGH
, Pmode
, offset
)), offset
);
2511 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2512 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2513 opnum
, (enum reload_type
)type
);
2517 if (GET_CODE (x
) == SYMBOL_REF
2518 && DEFAULT_ABI
== ABI_DARWIN
2519 && !ALTIVEC_VECTOR_MODE (mode
)
2520 && MACHO_DYNAMIC_NO_PIC_P
)
2522 /* Darwin load of floating point constant. */
2523 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
2524 gen_rtx (HIGH
, Pmode
, x
), x
);
2525 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2526 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2527 opnum
, (enum reload_type
)type
);
2533 && CONSTANT_POOL_EXPR_P (x
)
2534 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
2536 (x
) = create_TOC_reference (x
);
2544 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2545 that is a valid memory address for an instruction.
2546 The MODE argument is the machine mode for the MEM expression
2547 that wants to use this address.
2549 On the RS/6000, there are four valid address: a SYMBOL_REF that
2550 refers to a constant pool entry of an address (or the sum of it
2551 plus a constant), a short (16-bit signed) constant plus a register,
2552 the sum of two registers, or a register indirect, possibly with an
2553 auto-increment. For DFmode and DImode with a constant plus register,
2554 we must ensure that both words are addressable or PowerPC64 with offset
2557 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2558 32-bit DImode, TImode), indexed addressing cannot be used because
2559 adjacent memory cells are accessed by adding word-sized offsets
2560 during assembly output. */
2562 rs6000_legitimate_address (mode
, x
, reg_ok_strict
)
2563 enum machine_mode mode
;
2567 if (LEGITIMATE_INDIRECT_ADDRESS_P (x
, reg_ok_strict
))
2569 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
2570 && !ALTIVEC_VECTOR_MODE (mode
)
2571 && !SPE_VECTOR_MODE (mode
)
2573 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x
, 0), reg_ok_strict
))
2575 if (LEGITIMATE_SMALL_DATA_P (mode
, x
))
2577 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
2579 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2581 && GET_CODE (x
) == PLUS
2582 && GET_CODE (XEXP (x
, 0)) == REG
2583 && XEXP (x
, 0) == virtual_stack_vars_rtx
2584 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2586 if (LEGITIMATE_OFFSET_ADDRESS_P (mode
, x
, reg_ok_strict
))
2589 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2591 || (mode
!= DFmode
&& mode
!= TFmode
))
2592 && (TARGET_POWERPC64
|| mode
!= DImode
)
2593 && LEGITIMATE_INDEXED_ADDRESS_P (x
, reg_ok_strict
))
2595 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode
, x
, reg_ok_strict
))
2600 /* Try to output insns to set TARGET equal to the constant C if it can
2601 be done in less than N insns. Do all computations in MODE.
2602 Returns the place where the output has been placed if it can be
2603 done and the insns have been emitted. If it would take more than N
2604 insns, zero is returned and no insns and emitted. */
2607 rs6000_emit_set_const (dest
, mode
, source
, n
)
2609 enum machine_mode mode
;
2610 int n ATTRIBUTE_UNUSED
;
2612 rtx result
, insn
, set
;
2613 HOST_WIDE_INT c0
, c1
;
2615 if (mode
== QImode
|| mode
== HImode
)
2618 dest
= gen_reg_rtx (mode
);
2619 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
2622 else if (mode
== SImode
)
2624 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
2626 emit_insn (gen_rtx_SET (VOIDmode
, result
,
2627 GEN_INT (INTVAL (source
)
2628 & (~ (HOST_WIDE_INT
) 0xffff))));
2629 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
2630 gen_rtx_IOR (SImode
, result
,
2631 GEN_INT (INTVAL (source
) & 0xffff))));
2634 else if (mode
== DImode
)
2636 if (GET_CODE (source
) == CONST_INT
)
2638 c0
= INTVAL (source
);
2641 else if (GET_CODE (source
) == CONST_DOUBLE
)
2643 #if HOST_BITS_PER_WIDE_INT >= 64
2644 c0
= CONST_DOUBLE_LOW (source
);
2647 c0
= CONST_DOUBLE_LOW (source
);
2648 c1
= CONST_DOUBLE_HIGH (source
);
2654 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
2659 insn
= get_last_insn ();
2660 set
= single_set (insn
);
2661 if (! CONSTANT_P (SET_SRC (set
)))
2662 set_unique_reg_note (insn
, REG_EQUAL
, source
);
2667 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2668 fall back to a straight forward decomposition. We do this to avoid
2669 exponential run times encountered when looking for longer sequences
2670 with rs6000_emit_set_const. */
2672 rs6000_emit_set_long_const (dest
, c1
, c2
)
2674 HOST_WIDE_INT c1
, c2
;
2676 if (!TARGET_POWERPC64
)
2678 rtx operand1
, operand2
;
2680 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
2682 operand2
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
!= 0,
2684 emit_move_insn (operand1
, GEN_INT (c1
));
2685 emit_move_insn (operand2
, GEN_INT (c2
));
2689 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
2692 ud2
= (c1
& 0xffff0000) >> 16;
2693 #if HOST_BITS_PER_WIDE_INT >= 64
2697 ud4
= (c2
& 0xffff0000) >> 16;
2699 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
2700 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
2703 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
2705 emit_move_insn (dest
, GEN_INT (ud1
));
2708 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
2709 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
2712 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
2715 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
2717 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2719 else if ((ud4
== 0xffff && (ud3
& 0x8000))
2720 || (ud4
== 0 && ! (ud3
& 0x8000)))
2723 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
2726 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
2729 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud2
)));
2730 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (16)));
2732 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2737 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
2740 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
2743 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud3
)));
2745 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (32)));
2747 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
,
2748 GEN_INT (ud2
<< 16)));
2750 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2756 /* Emit a move from SOURCE to DEST in mode MODE. */
2758 rs6000_emit_move (dest
, source
, mode
)
2761 enum machine_mode mode
;
2765 operands
[1] = source
;
2767 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2768 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2769 && ! FLOAT_MODE_P (mode
)
2770 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
2772 /* FIXME. This should never happen. */
2773 /* Since it seems that it does, do the safe thing and convert
2775 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
2777 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2778 && ! FLOAT_MODE_P (mode
)
2779 && ((CONST_DOUBLE_HIGH (operands
[1]) == 0
2780 && CONST_DOUBLE_LOW (operands
[1]) >= 0)
2781 || (CONST_DOUBLE_HIGH (operands
[1]) == -1
2782 && CONST_DOUBLE_LOW (operands
[1]) < 0)))
2785 /* Check if GCC is setting up a block move that will end up using FP
2786 registers as temporaries. We must make sure this is acceptable. */
2787 if (GET_CODE (operands
[0]) == MEM
2788 && GET_CODE (operands
[1]) == MEM
2790 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
2791 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
2792 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
2793 ? 32 : MEM_ALIGN (operands
[0])))
2794 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
2796 : MEM_ALIGN (operands
[1]))))
2797 && ! MEM_VOLATILE_P (operands
[0])
2798 && ! MEM_VOLATILE_P (operands
[1]))
2800 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
2801 adjust_address (operands
[1], SImode
, 0));
2802 emit_move_insn (adjust_address (operands
[0], SImode
, 4),
2803 adjust_address (operands
[1], SImode
, 4));
2807 if (!no_new_pseudos
)
2809 if (GET_CODE (operands
[1]) == MEM
&& optimize
> 0
2810 && (mode
== QImode
|| mode
== HImode
|| mode
== SImode
)
2811 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2813 rtx reg
= gen_reg_rtx (word_mode
);
2815 emit_insn (gen_rtx_SET (word_mode
, reg
,
2816 gen_rtx_ZERO_EXTEND (word_mode
,
2818 operands
[1] = gen_lowpart (mode
, reg
);
2820 if (GET_CODE (operands
[0]) != REG
)
2821 operands
[1] = force_reg (mode
, operands
[1]);
2824 if (mode
== SFmode
&& ! TARGET_POWERPC
2825 && TARGET_HARD_FLOAT
&& TARGET_FPRS
2826 && GET_CODE (operands
[0]) == MEM
)
2830 if (reload_in_progress
|| reload_completed
)
2831 regnum
= true_regnum (operands
[1]);
2832 else if (GET_CODE (operands
[1]) == REG
)
2833 regnum
= REGNO (operands
[1]);
2837 /* If operands[1] is a register, on POWER it may have
2838 double-precision data in it, so truncate it to single
2840 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
2843 newreg
= (no_new_pseudos
? operands
[1] : gen_reg_rtx (mode
));
2844 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
2845 operands
[1] = newreg
;
2849 /* Handle the case where reload calls us with an invalid address;
2850 and the case of CONSTANT_P_RTX. */
2851 if (!ALTIVEC_VECTOR_MODE (mode
)
2852 && (! general_operand (operands
[1], mode
)
2853 || ! nonimmediate_operand (operands
[0], mode
)
2854 || GET_CODE (operands
[1]) == CONSTANT_P_RTX
))
2856 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2860 /* FIXME: In the long term, this switch statement should go away
2861 and be replaced by a sequence of tests based on things like
2867 if (CONSTANT_P (operands
[1])
2868 && GET_CODE (operands
[1]) != CONST_INT
)
2869 operands
[1] = force_const_mem (mode
, operands
[1]);
2875 if (CONSTANT_P (operands
[1])
2876 && ! easy_fp_constant (operands
[1], mode
))
2877 operands
[1] = force_const_mem (mode
, operands
[1]);
2888 if (CONSTANT_P (operands
[1])
2889 && !easy_vector_constant (operands
[1], mode
))
2890 operands
[1] = force_const_mem (mode
, operands
[1]);
2895 /* Use default pattern for address of ELF small data */
2898 && DEFAULT_ABI
== ABI_V4
2899 && (GET_CODE (operands
[1]) == SYMBOL_REF
2900 || GET_CODE (operands
[1]) == CONST
)
2901 && small_data_operand (operands
[1], mode
))
2903 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2907 if (DEFAULT_ABI
== ABI_V4
2908 && mode
== Pmode
&& mode
== SImode
2909 && flag_pic
== 1 && got_operand (operands
[1], mode
))
2911 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
2915 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
2919 && CONSTANT_P (operands
[1])
2920 && GET_CODE (operands
[1]) != HIGH
2921 && GET_CODE (operands
[1]) != CONST_INT
)
2923 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
2925 /* If this is a function address on -mcall-aixdesc,
2926 convert it to the address of the descriptor. */
2927 if (DEFAULT_ABI
== ABI_AIX
2928 && GET_CODE (operands
[1]) == SYMBOL_REF
2929 && XSTR (operands
[1], 0)[0] == '.')
2931 const char *name
= XSTR (operands
[1], 0);
2933 while (*name
== '.')
2935 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
2936 CONSTANT_POOL_ADDRESS_P (new_ref
)
2937 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
2938 SYMBOL_REF_FLAG (new_ref
) = SYMBOL_REF_FLAG (operands
[1]);
2939 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
2940 operands
[1] = new_ref
;
2943 if (DEFAULT_ABI
== ABI_DARWIN
)
2946 if (MACHO_DYNAMIC_NO_PIC_P
)
2948 /* Take care of any required data indirection. */
2949 operands
[1] = rs6000_machopic_legitimize_pic_address (
2950 operands
[1], mode
, operands
[0]);
2951 if (operands
[0] != operands
[1])
2952 emit_insn (gen_rtx_SET (VOIDmode
,
2953 operands
[0], operands
[1]));
2957 emit_insn (gen_macho_high (target
, operands
[1]));
2958 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
2962 emit_insn (gen_elf_high (target
, operands
[1]));
2963 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
2967 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2968 and we have put it in the TOC, we just need to make a TOC-relative
2971 && GET_CODE (operands
[1]) == SYMBOL_REF
2972 && CONSTANT_POOL_EXPR_P (operands
[1])
2973 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
2974 get_pool_mode (operands
[1])))
2976 operands
[1] = create_TOC_reference (operands
[1]);
2978 else if (mode
== Pmode
2979 && CONSTANT_P (operands
[1])
2980 && ((GET_CODE (operands
[1]) != CONST_INT
2981 && ! easy_fp_constant (operands
[1], mode
))
2982 || (GET_CODE (operands
[1]) == CONST_INT
2983 && num_insns_constant (operands
[1], mode
) > 2)
2984 || (GET_CODE (operands
[0]) == REG
2985 && FP_REGNO_P (REGNO (operands
[0]))))
2986 && GET_CODE (operands
[1]) != HIGH
2987 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands
[1])
2988 && ! TOC_RELATIVE_EXPR_P (operands
[1]))
2990 /* Emit a USE operation so that the constant isn't deleted if
2991 expensive optimizations are turned on because nobody
2992 references it. This should only be done for operands that
2993 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2994 This should not be done for operands that contain LABEL_REFs.
2995 For now, we just handle the obvious case. */
2996 if (GET_CODE (operands
[1]) != LABEL_REF
)
2997 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
3000 /* Darwin uses a special PIC legitimizer. */
3001 if (DEFAULT_ABI
== ABI_DARWIN
&& MACHOPIC_INDIRECT
)
3004 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
3006 if (operands
[0] != operands
[1])
3007 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
3012 /* If we are to limit the number of things we put in the TOC and
3013 this is a symbol plus a constant we can add in one insn,
3014 just put the symbol in the TOC and add the constant. Don't do
3015 this if reload is in progress. */
3016 if (GET_CODE (operands
[1]) == CONST
3017 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
3018 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
3019 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
3020 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
3021 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
3022 && ! side_effects_p (operands
[0]))
3025 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
3026 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
3028 sym
= force_reg (mode
, sym
);
3030 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
3032 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
3036 operands
[1] = force_const_mem (mode
, operands
[1]);
3039 && CONSTANT_POOL_EXPR_P (XEXP (operands
[1], 0))
3040 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3041 get_pool_constant (XEXP (operands
[1], 0)),
3042 get_pool_mode (XEXP (operands
[1], 0))))
3045 = gen_rtx_MEM (mode
,
3046 create_TOC_reference (XEXP (operands
[1], 0)));
3047 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
3048 RTX_UNCHANGING_P (operands
[1]) = 1;
3054 if (GET_CODE (operands
[0]) == MEM
3055 && GET_CODE (XEXP (operands
[0], 0)) != REG
3056 && ! reload_in_progress
)
3058 = replace_equiv_address (operands
[0],
3059 copy_addr_to_reg (XEXP (operands
[0], 0)));
3061 if (GET_CODE (operands
[1]) == MEM
3062 && GET_CODE (XEXP (operands
[1], 0)) != REG
3063 && ! reload_in_progress
)
3065 = replace_equiv_address (operands
[1],
3066 copy_addr_to_reg (XEXP (operands
[1], 0)));
3069 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
3071 gen_rtx_SET (VOIDmode
,
3072 operands
[0], operands
[1]),
3073 gen_rtx_CLOBBER (VOIDmode
,
3074 gen_rtx_SCRATCH (SImode
)))));
3083 /* Above, we may have called force_const_mem which may have returned
3084 an invalid address. If we can, fix this up; otherwise, reload will
3085 have to deal with it. */
3086 if (GET_CODE (operands
[1]) == MEM
3087 && ! memory_address_p (mode
, XEXP (operands
[1], 0))
3088 && ! reload_in_progress
)
3089 operands
[1] = adjust_address (operands
[1], mode
, 0);
3091 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
3095 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3096 for a call to a function whose data type is FNTYPE.
3097 For a library call, FNTYPE is 0.
3099 For incoming args we set the number of arguments in the prototype large
3100 so we never return a PARALLEL. */
3103 init_cumulative_args (cum
, fntype
, libname
, incoming
)
3104 CUMULATIVE_ARGS
*cum
;
3106 rtx libname ATTRIBUTE_UNUSED
;
3109 static CUMULATIVE_ARGS zero_cumulative
;
3111 *cum
= zero_cumulative
;
3113 cum
->fregno
= FP_ARG_MIN_REG
;
3114 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
3115 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
3116 cum
->call_cookie
= CALL_NORMAL
;
3117 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
3120 cum
->nargs_prototype
= 1000; /* don't return a PARALLEL */
3122 else if (cum
->prototype
)
3123 cum
->nargs_prototype
= (list_length (TYPE_ARG_TYPES (fntype
)) - 1
3124 + (TYPE_MODE (TREE_TYPE (fntype
)) == BLKmode
3125 || RETURN_IN_MEMORY (TREE_TYPE (fntype
))));
3128 cum
->nargs_prototype
= 0;
3130 cum
->orig_nargs
= cum
->nargs_prototype
;
3132 /* Check for a longcall attribute. */
3134 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
3135 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
3136 cum
->call_cookie
= CALL_LONG
;
3138 if (TARGET_DEBUG_ARG
)
3140 fprintf (stderr
, "\ninit_cumulative_args:");
3143 tree ret_type
= TREE_TYPE (fntype
);
3144 fprintf (stderr
, " ret code = %s,",
3145 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
3148 if (cum
->call_cookie
& CALL_LONG
)
3149 fprintf (stderr
, " longcall,");
3151 fprintf (stderr
, " proto = %d, nargs = %d\n",
3152 cum
->prototype
, cum
->nargs_prototype
);
3156 /* If defined, a C expression which determines whether, and in which
3157 direction, to pad out an argument with extra space. The value
3158 should be of type `enum direction': either `upward' to pad above
3159 the argument, `downward' to pad below, or `none' to inhibit
3162 For the AIX ABI structs are always stored left shifted in their
3166 function_arg_padding (mode
, type
)
3167 enum machine_mode mode
;
3170 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
3173 /* This is the default definition. */
3174 return (! BYTES_BIG_ENDIAN
3177 ? (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3178 && int_size_in_bytes (type
) < (PARM_BOUNDARY
/ BITS_PER_UNIT
))
3179 : GET_MODE_BITSIZE (mode
) < PARM_BOUNDARY
)
3180 ? downward
: upward
));
3183 /* If defined, a C expression that gives the alignment boundary, in bits,
3184 of an argument with the specified mode and type. If it is not defined,
3185 PARM_BOUNDARY is used for all arguments.
3187 V.4 wants long longs to be double word aligned. */
3190 function_arg_boundary (mode
, type
)
3191 enum machine_mode mode
;
3192 tree type ATTRIBUTE_UNUSED
;
3194 if (DEFAULT_ABI
== ABI_V4
&& (mode
== DImode
|| mode
== DFmode
))
3196 else if (SPE_VECTOR_MODE (mode
))
3198 else if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3201 return PARM_BOUNDARY
;
3204 /* Update the data in CUM to advance over an argument
3205 of mode MODE and data type TYPE.
3206 (TYPE is null for libcalls where that information may not be available.) */
3209 function_arg_advance (cum
, mode
, type
, named
)
3210 CUMULATIVE_ARGS
*cum
;
3211 enum machine_mode mode
;
3215 cum
->nargs_prototype
--;
3217 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3219 if (cum
->vregno
<= ALTIVEC_ARG_MAX_REG
&& cum
->nargs_prototype
>= 0)
3222 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3224 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
3225 && named
&& cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3227 else if (DEFAULT_ABI
== ABI_V4
)
3229 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3230 && (mode
== SFmode
|| mode
== DFmode
))
3232 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3237 cum
->words
+= cum
->words
& 1;
3238 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3244 int gregno
= cum
->sysv_gregno
;
3246 /* Aggregates and IEEE quad get passed by reference. */
3247 if ((type
&& AGGREGATE_TYPE_P (type
))
3251 n_words
= RS6000_ARG_SIZE (mode
, type
);
3253 /* Long long and SPE vectors are put in odd registers. */
3254 if (n_words
== 2 && (gregno
& 1) == 0)
3257 /* Long long and SPE vectors are not split between registers
3259 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
3261 /* Long long is aligned on the stack. */
3263 cum
->words
+= cum
->words
& 1;
3264 cum
->words
+= n_words
;
3267 /* Note: continuing to accumulate gregno past when we've started
3268 spilling to the stack indicates the fact that we've started
3269 spilling to the stack to expand_builtin_saveregs. */
3270 cum
->sysv_gregno
= gregno
+ n_words
;
3273 if (TARGET_DEBUG_ARG
)
3275 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3276 cum
->words
, cum
->fregno
);
3277 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
3278 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
3279 fprintf (stderr
, "mode = %4s, named = %d\n",
3280 GET_MODE_NAME (mode
), named
);
3285 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3286 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3288 cum
->words
+= align
+ RS6000_ARG_SIZE (mode
, type
);
3290 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
3291 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3292 cum
->fregno
+= (mode
== TFmode
? 2 : 1);
3294 if (TARGET_DEBUG_ARG
)
3296 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3297 cum
->words
, cum
->fregno
);
3298 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
3299 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
3300 fprintf (stderr
, "named = %d, align = %d\n", named
, align
);
3305 /* Determine where to put an argument to a function.
3306 Value is zero to push the argument on the stack,
3307 or a hard register in which to store the argument.
3309 MODE is the argument's machine mode.
3310 TYPE is the data type of the argument (as a tree).
3311 This is null for libcalls where that information may
3313 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3314 the preceding args and about the function being called.
3315 NAMED is nonzero if this argument is a named parameter
3316 (otherwise it is an extra parameter matching an ellipsis).
3318 On RS/6000 the first eight words of non-FP are normally in registers
3319 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3320 Under V.4, the first 8 FP args are in registers.
3322 If this is floating-point and no prototype is specified, we use
3323 both an FP and integer register (or possibly FP reg and stack). Library
3324 functions (when TYPE is zero) always have the proper types for args,
3325 so we can pass the FP value just in one register. emit_library_function
3326 doesn't support PARALLEL anyway. */
3329 function_arg (cum
, mode
, type
, named
)
3330 CUMULATIVE_ARGS
*cum
;
3331 enum machine_mode mode
;
3335 enum rs6000_abi abi
= DEFAULT_ABI
;
3337 /* Return a marker to indicate whether CR1 needs to set or clear the
3338 bit that V.4 uses to say fp args were passed in registers.
3339 Assume that we don't need the marker for software floating point,
3340 or compiler generated library calls. */
3341 if (mode
== VOIDmode
)
3344 && cum
->nargs_prototype
< 0
3345 && type
&& (cum
->prototype
|| TARGET_NO_PROTOTYPE
))
3347 /* For the SPE, we need to crxor CR6 always. */
3349 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
3350 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3351 return GEN_INT (cum
->call_cookie
3352 | ((cum
->fregno
== FP_ARG_MIN_REG
)
3353 ? CALL_V4_SET_FP_ARGS
3354 : CALL_V4_CLEAR_FP_ARGS
));
3357 return GEN_INT (cum
->call_cookie
);
3360 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3362 if (named
&& cum
->vregno
<= ALTIVEC_ARG_MAX_REG
)
3363 return gen_rtx_REG (mode
, cum
->vregno
);
3367 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
) && named
)
3369 if (cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3370 return gen_rtx_REG (mode
, cum
->sysv_gregno
);
3374 else if (abi
== ABI_V4
)
3376 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3377 && (mode
== SFmode
|| mode
== DFmode
))
3379 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3380 return gen_rtx_REG (mode
, cum
->fregno
);
3387 int gregno
= cum
->sysv_gregno
;
3389 /* Aggregates and IEEE quad get passed by reference. */
3390 if ((type
&& AGGREGATE_TYPE_P (type
))
3394 n_words
= RS6000_ARG_SIZE (mode
, type
);
3396 /* Long long and SPE vectors are put in odd registers. */
3397 if (n_words
== 2 && (gregno
& 1) == 0)
3400 /* Long long and SPE vectors are not split between registers
3402 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
3404 /* SPE vectors in ... get split into 2 registers. */
3405 if (TARGET_SPE
&& TARGET_SPE_ABI
3406 && SPE_VECTOR_MODE (mode
) && !named
)
3409 enum machine_mode m
= SImode
;
3411 r1
= gen_rtx_REG (m
, gregno
);
3412 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
3413 r2
= gen_rtx_REG (m
, gregno
+ 1);
3414 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
3415 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
3417 return gen_rtx_REG (mode
, gregno
);
3425 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3426 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3427 int align_words
= cum
->words
+ align
;
3429 if (type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
3432 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
))
3435 || ((cum
->nargs_prototype
> 0)
3436 /* IBM AIX extended its linkage convention definition always
3437 to require FP args after register save area hole on the
3439 && (DEFAULT_ABI
!= ABI_AIX
3441 || (align_words
< GP_ARG_NUM_REG
))))
3442 return gen_rtx_REG (mode
, cum
->fregno
);
3444 return gen_rtx_PARALLEL (mode
,
3446 gen_rtx_EXPR_LIST (VOIDmode
,
3447 ((align_words
>= GP_ARG_NUM_REG
)
3450 + RS6000_ARG_SIZE (mode
, type
)
3452 /* If this is partially on the stack, then
3453 we only include the portion actually
3454 in registers here. */
3455 ? gen_rtx_REG (SImode
,
3456 GP_ARG_MIN_REG
+ align_words
)
3457 : gen_rtx_REG (mode
,
3458 GP_ARG_MIN_REG
+ align_words
))),
3460 gen_rtx_EXPR_LIST (VOIDmode
,
3461 gen_rtx_REG (mode
, cum
->fregno
),
3464 else if (align_words
< GP_ARG_NUM_REG
)
3465 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
3471 /* For an arg passed partly in registers and partly in memory,
3472 this is the number of registers used.
3473 For args passed entirely in registers or entirely in memory, zero. */
3476 function_arg_partial_nregs (cum
, mode
, type
, named
)
3477 CUMULATIVE_ARGS
*cum
;
3478 enum machine_mode mode
;
3480 int named ATTRIBUTE_UNUSED
;
3482 if (DEFAULT_ABI
== ABI_V4
)
3485 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
)
3486 || USE_ALTIVEC_FOR_ARG_P (*cum
, mode
, type
))
3488 if (cum
->nargs_prototype
>= 0)
3492 if (cum
->words
< GP_ARG_NUM_REG
3493 && GP_ARG_NUM_REG
< (cum
->words
+ RS6000_ARG_SIZE (mode
, type
)))
3495 int ret
= GP_ARG_NUM_REG
- cum
->words
;
3496 if (ret
&& TARGET_DEBUG_ARG
)
3497 fprintf (stderr
, "function_arg_partial_nregs: %d\n", ret
);
3505 /* A C expression that indicates when an argument must be passed by
3506 reference. If nonzero for an argument, a copy of that argument is
3507 made in memory and a pointer to the argument is passed instead of
3508 the argument itself. The pointer is passed in whatever way is
3509 appropriate for passing a pointer to that type.
3511 Under V.4, structures and unions are passed by reference.
3513 As an extension to all ABIs, variable sized types are passed by
3517 function_arg_pass_by_reference (cum
, mode
, type
, named
)
3518 CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
;
3519 enum machine_mode mode ATTRIBUTE_UNUSED
;
3521 int named ATTRIBUTE_UNUSED
;
3523 if (DEFAULT_ABI
== ABI_V4
3524 && ((type
&& AGGREGATE_TYPE_P (type
))
3527 if (TARGET_DEBUG_ARG
)
3528 fprintf (stderr
, "function_arg_pass_by_reference: aggregate\n");
3532 return type
&& int_size_in_bytes (type
) <= 0;
3535 /* Perform any needed actions needed for a function that is receiving a
3536 variable number of arguments.
3540 MODE and TYPE are the mode and type of the current parameter.
3542 PRETEND_SIZE is a variable that should be set to the amount of stack
3543 that must be pushed by the prolog to pretend that our caller pushed
3546 Normally, this macro will push all remaining incoming registers on the
3547 stack and set PRETEND_SIZE to the length of the registers pushed. */
3550 setup_incoming_varargs (cum
, mode
, type
, pretend_size
, no_rtl
)
3551 CUMULATIVE_ARGS
*cum
;
3552 enum machine_mode mode
;
3554 int *pretend_size ATTRIBUTE_UNUSED
;
3558 CUMULATIVE_ARGS next_cum
;
3559 int reg_size
= TARGET_32BIT
? 4 : 8;
3560 rtx save_area
= NULL_RTX
, mem
;
3561 int first_reg_offset
, set
;
3565 fntype
= TREE_TYPE (current_function_decl
);
3566 stdarg_p
= (TYPE_ARG_TYPES (fntype
) != 0
3567 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3568 != void_type_node
));
3570 /* For varargs, we do not want to skip the dummy va_dcl argument.
3571 For stdargs, we do want to skip the last named argument. */
3574 function_arg_advance (&next_cum
, mode
, type
, 1);
3576 if (DEFAULT_ABI
== ABI_V4
)
3578 /* Indicate to allocate space on the stack for varargs save area. */
3579 cfun
->machine
->sysv_varargs_p
= 1;
3581 save_area
= plus_constant (virtual_stack_vars_rtx
,
3582 - RS6000_VARARGS_SIZE
);
3584 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
3588 first_reg_offset
= next_cum
.words
;
3589 save_area
= virtual_incoming_args_rtx
;
3590 cfun
->machine
->sysv_varargs_p
= 0;
3592 if (MUST_PASS_IN_STACK (mode
, type
))
3593 first_reg_offset
+= RS6000_ARG_SIZE (TYPE_MODE (type
), type
);
3596 set
= get_varargs_alias_set ();
3597 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
)
3599 mem
= gen_rtx_MEM (BLKmode
,
3600 plus_constant (save_area
,
3601 first_reg_offset
* reg_size
)),
3602 set_mem_alias_set (mem
, set
);
3603 set_mem_align (mem
, BITS_PER_WORD
);
3606 (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
3607 GP_ARG_NUM_REG
- first_reg_offset
,
3608 (GP_ARG_NUM_REG
- first_reg_offset
) * UNITS_PER_WORD
);
3611 /* Save FP registers if needed. */
3612 if (DEFAULT_ABI
== ABI_V4
3613 && TARGET_HARD_FLOAT
&& TARGET_FPRS
3615 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
)
3617 int fregno
= next_cum
.fregno
;
3618 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
3619 rtx lab
= gen_label_rtx ();
3620 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
) * 8);
3622 emit_jump_insn (gen_rtx_SET (VOIDmode
,
3624 gen_rtx_IF_THEN_ELSE (VOIDmode
,
3625 gen_rtx_NE (VOIDmode
, cr1
,
3627 gen_rtx_LABEL_REF (VOIDmode
, lab
),
3630 while (fregno
<= FP_ARG_V4_MAX_REG
)
3632 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
3633 set_mem_alias_set (mem
, set
);
3634 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
3643 /* Create the va_list data type. */
3646 rs6000_build_va_list ()
3648 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
3650 /* For AIX, prefer 'char *' because that's what the system
3651 header files like. */
3652 if (DEFAULT_ABI
!= ABI_V4
)
3653 return build_pointer_type (char_type_node
);
3655 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
3656 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
3658 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
3659 unsigned_char_type_node
);
3660 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
3661 unsigned_char_type_node
);
3662 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
3664 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
3667 DECL_FIELD_CONTEXT (f_gpr
) = record
;
3668 DECL_FIELD_CONTEXT (f_fpr
) = record
;
3669 DECL_FIELD_CONTEXT (f_ovf
) = record
;
3670 DECL_FIELD_CONTEXT (f_sav
) = record
;
3672 TREE_CHAIN (record
) = type_decl
;
3673 TYPE_NAME (record
) = type_decl
;
3674 TYPE_FIELDS (record
) = f_gpr
;
3675 TREE_CHAIN (f_gpr
) = f_fpr
;
3676 TREE_CHAIN (f_fpr
) = f_ovf
;
3677 TREE_CHAIN (f_ovf
) = f_sav
;
3679 layout_type (record
);
3681 /* The correct type is an array type of one element. */
3682 return build_array_type (record
, build_index_type (size_zero_node
));
3685 /* Implement va_start. */
3688 rs6000_va_start (valist
, nextarg
)
3692 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
3693 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3694 tree gpr
, fpr
, ovf
, sav
, t
;
3696 /* Only SVR4 needs something special. */
3697 if (DEFAULT_ABI
!= ABI_V4
)
3699 std_expand_builtin_va_start (valist
, nextarg
);
3703 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3704 f_fpr
= TREE_CHAIN (f_gpr
);
3705 f_ovf
= TREE_CHAIN (f_fpr
);
3706 f_sav
= TREE_CHAIN (f_ovf
);
3708 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3709 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3710 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3711 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3712 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3714 /* Count number of gp and fp argument registers used. */
3715 words
= current_function_args_info
.words
;
3716 n_gpr
= current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
;
3717 n_fpr
= current_function_args_info
.fregno
- FP_ARG_MIN_REG
;
3719 if (TARGET_DEBUG_ARG
)
3721 fputs ("va_start: words = ", stderr
);
3722 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, words
);
3723 fputs (", n_gpr = ", stderr
);
3724 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_gpr
);
3725 fputs (", n_fpr = ", stderr
);
3726 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_fpr
);
3727 putc ('\n', stderr
);
3730 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
3731 TREE_SIDE_EFFECTS (t
) = 1;
3732 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3734 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
3735 TREE_SIDE_EFFECTS (t
) = 1;
3736 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3738 /* Find the overflow area. */
3739 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
3741 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
3742 build_int_2 (words
* UNITS_PER_WORD
, 0));
3743 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3744 TREE_SIDE_EFFECTS (t
) = 1;
3745 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3747 /* Find the register save area. */
3748 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
3749 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
3750 build_int_2 (-RS6000_VARARGS_SIZE
, -1));
3751 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
3752 TREE_SIDE_EFFECTS (t
) = 1;
3753 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3756 /* Implement va_arg. */
3759 rs6000_va_arg (valist
, type
)
3762 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3763 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
3764 int indirect_p
, size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
3765 rtx lab_false
, lab_over
, addr_rtx
, r
;
3767 if (DEFAULT_ABI
!= ABI_V4
)
3769 /* Variable sized types are passed by reference. */
3770 if (int_size_in_bytes (type
) <= 0)
3772 u
= build_pointer_type (type
);
3774 /* Args grow upward. */
3775 t
= build (POSTINCREMENT_EXPR
, TREE_TYPE (valist
), valist
,
3776 build_int_2 (POINTER_SIZE
/ BITS_PER_UNIT
, 0));
3777 TREE_SIDE_EFFECTS (t
) = 1;
3779 t
= build1 (NOP_EXPR
, build_pointer_type (u
), t
);
3780 TREE_SIDE_EFFECTS (t
) = 1;
3782 t
= build1 (INDIRECT_REF
, u
, t
);
3783 TREE_SIDE_EFFECTS (t
) = 1;
3785 return expand_expr (t
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
3788 return std_expand_builtin_va_arg (valist
, type
);
3791 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3792 f_fpr
= TREE_CHAIN (f_gpr
);
3793 f_ovf
= TREE_CHAIN (f_fpr
);
3794 f_sav
= TREE_CHAIN (f_ovf
);
3796 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3797 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3798 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3799 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3800 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3802 size
= int_size_in_bytes (type
);
3803 rsize
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
3805 if (AGGREGATE_TYPE_P (type
) || TYPE_MODE (type
) == TFmode
)
3807 /* Aggregates and long doubles are passed by reference. */
3813 size
= UNITS_PER_WORD
;
3816 else if (FLOAT_TYPE_P (type
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3818 /* FP args go in FP registers, if present. */
3827 /* Otherwise into GP registers. */
3835 /* Pull the value out of the saved registers ... */
3837 lab_false
= gen_label_rtx ();
3838 lab_over
= gen_label_rtx ();
3839 addr_rtx
= gen_reg_rtx (Pmode
);
3841 /* AltiVec vectors never go in registers. */
3842 if (!TARGET_ALTIVEC
|| TREE_CODE (type
) != VECTOR_TYPE
)
3844 TREE_THIS_VOLATILE (reg
) = 1;
3845 emit_cmp_and_jump_insns
3846 (expand_expr (reg
, NULL_RTX
, QImode
, EXPAND_NORMAL
),
3847 GEN_INT (8 - n_reg
+ 1), GE
, const1_rtx
, QImode
, 1,
3850 /* Long long is aligned in the registers. */
3853 u
= build (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
3854 build_int_2 (n_reg
- 1, 0));
3855 u
= build (PLUS_EXPR
, TREE_TYPE (reg
), reg
, u
);
3856 u
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, u
);
3857 TREE_SIDE_EFFECTS (u
) = 1;
3858 expand_expr (u
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3862 t
= build (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
3866 u
= build (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
3867 build_int_2 (n_reg
, 0));
3868 TREE_SIDE_EFFECTS (u
) = 1;
3870 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
3871 TREE_SIDE_EFFECTS (u
) = 1;
3873 u
= build (MULT_EXPR
, integer_type_node
, u
, build_int_2 (sav_scale
, 0));
3874 TREE_SIDE_EFFECTS (u
) = 1;
3876 t
= build (PLUS_EXPR
, ptr_type_node
, t
, u
);
3877 TREE_SIDE_EFFECTS (t
) = 1;
3879 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3881 emit_move_insn (addr_rtx
, r
);
3883 emit_jump_insn (gen_jump (lab_over
));
3887 emit_label (lab_false
);
3889 /* ... otherwise out of the overflow area. */
3891 /* Make sure we don't find reg 7 for the next int arg.
3893 All AltiVec vectors go in the overflow area. So in the AltiVec
3894 case we need to get the vectors from the overflow area, but
3895 remember where the GPRs and FPRs are. */
3896 if (n_reg
> 1 && (TREE_CODE (type
) != VECTOR_TYPE
3897 || !TARGET_ALTIVEC
))
3899 t
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, build_int_2 (8, 0));
3900 TREE_SIDE_EFFECTS (t
) = 1;
3901 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3904 /* Care for on-stack alignment if needed. */
3911 /* AltiVec vectors are 16 byte aligned. */
3912 if (TARGET_ALTIVEC
&& TREE_CODE (type
) == VECTOR_TYPE
)
3917 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), ovf
, build_int_2 (align
, 0));
3918 t
= build (BIT_AND_EXPR
, TREE_TYPE (t
), t
, build_int_2 (-align
-1, -1));
3922 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3924 emit_move_insn (addr_rtx
, r
);
3926 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
3927 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3928 TREE_SIDE_EFFECTS (t
) = 1;
3929 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3931 emit_label (lab_over
);
3935 r
= gen_rtx_MEM (Pmode
, addr_rtx
);
3936 set_mem_alias_set (r
, get_varargs_alias_set ());
3937 emit_move_insn (addr_rtx
, r
);
3945 #define def_builtin(MASK, NAME, TYPE, CODE) \
3947 if ((MASK) & target_flags) \
3948 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3952 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3954 static const struct builtin_description bdesc_3arg
[] =
3956 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
3957 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
3958 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
3959 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
3960 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
3961 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
3962 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
3963 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
3964 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
3965 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
3966 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
3967 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
3968 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
3969 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
3970 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
3971 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
3972 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
3973 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
3974 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
3975 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
3976 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
3977 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
3978 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
3981 /* DST operations: void foo (void *, const int, const char). */
3983 static const struct builtin_description bdesc_dst
[] =
3985 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
3986 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
3987 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
3988 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
}
3991 /* Simple binary operations: VECc = foo (VECa, VECb). */
3993 static struct builtin_description bdesc_2arg
[] =
3995 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
3996 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
3997 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
3998 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
3999 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
4000 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
4001 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
4002 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
4003 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
4004 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
4005 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
4006 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
4007 { MASK_ALTIVEC
, CODE_FOR_altivec_vandc
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
4008 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
4009 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
4010 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
4011 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
4012 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
4013 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
4014 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
4015 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
4016 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
4017 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
4018 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
4019 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
4020 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
4021 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
4022 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
4023 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
4024 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
4025 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
4026 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
4027 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
4028 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
4029 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
4030 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
4031 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
4032 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
4033 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
4034 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
4035 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
4036 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
4037 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
4038 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
4039 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
4040 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
4041 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
4042 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
4043 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
4044 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
4045 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
4046 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
4047 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
4048 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
4049 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
4050 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
4051 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
4052 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
4053 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
4054 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
4055 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
4056 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
4057 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
4058 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
4059 { MASK_ALTIVEC
, CODE_FOR_altivec_vnor
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
4060 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
4061 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
4062 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
4063 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
4064 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhss
, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS
},
4065 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
4066 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwss
, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS
},
4067 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
4068 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
4069 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
4070 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
4071 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
4072 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
4073 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
4074 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
4075 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
4076 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
4077 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
4078 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
4079 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
4080 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
4081 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
4082 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
4083 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrb
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
4084 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrh
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
4085 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrw
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
4086 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrab
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
4087 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrah
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
4088 { MASK_ALTIVEC
, CODE_FOR_altivec_vsraw
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
4089 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
4090 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
4091 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
4092 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
4093 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
4094 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
4095 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
4096 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
4097 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
4098 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
4099 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
4100 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
4101 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
4102 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
4103 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
4104 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
4105 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
4106 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
4107 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
4109 /* Place holder, leave as first spe builtin. */
4110 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
4111 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
4112 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
4113 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
4114 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
4115 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
4116 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
4117 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
4118 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
4119 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
4120 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
4121 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
4122 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
4123 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
4124 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
4125 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
4126 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
4127 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
4128 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
4129 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
4130 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
4131 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
4132 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
4133 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
4134 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
4135 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
4136 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
4137 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
4138 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
4139 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
4140 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
4141 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
4142 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
4143 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
4144 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
4145 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
4146 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
4147 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
4148 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
4149 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
4150 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
4151 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
4152 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
4153 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
4154 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
4155 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
4156 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
4157 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
4158 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
4159 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
4160 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
4161 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
4162 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
4163 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
4164 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
4165 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
4166 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
4167 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
4168 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
4169 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
4170 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
4171 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
4172 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
4173 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
4174 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
4175 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
4176 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
4177 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
4178 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
4179 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
4180 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
4181 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
4182 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
4183 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
4184 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
4185 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
4186 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
4187 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
4188 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
4189 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
4190 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
4191 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
4192 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
4193 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
4194 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
4195 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
4196 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
4197 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
4198 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
4199 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
4200 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
4201 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
4202 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
4203 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
4204 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
4205 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
4206 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
4207 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
4208 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
4209 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
4210 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
4211 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
4212 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
4213 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
4214 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
4215 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
4216 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
4217 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
4218 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
4220 /* SPE binary operations expecting a 5-bit unsigned literal. */
4221 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
4223 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
4224 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
4225 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
4226 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
4227 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
4228 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
4229 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
4230 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
4231 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
4232 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
4233 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
4234 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
4235 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
4236 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
4237 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
4238 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
4239 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
4240 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
4241 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
4242 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
4243 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
4244 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
4245 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
4246 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
4247 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
4248 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
4250 /* Place-holder. Leave as last binary SPE builtin. */
4251 { 0, CODE_FOR_xorv2si3
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
},
4254 /* AltiVec predicates. */
4256 struct builtin_description_predicates
4258 const unsigned int mask
;
4259 const enum insn_code icode
;
4261 const char *const name
;
4262 const enum rs6000_builtins code
;
4265 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
4267 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
4268 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
4269 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
4270 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
4271 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
4272 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
4273 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
4274 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
4275 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
4276 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
4277 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
4278 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
4279 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
}
4282 /* SPE predicates. */
4283 static struct builtin_description bdesc_spe_predicates
[] =
4285 /* Place-holder. Leave as first. */
4286 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
4287 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
4288 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
4289 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
4290 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
4291 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
4292 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
4293 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
4294 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
4295 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
4296 /* Place-holder. Leave as last. */
4297 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
4300 /* SPE evsel predicates. */
4301 static struct builtin_description bdesc_spe_evsel
[] =
4303 /* Place-holder. Leave as first. */
4304 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
4305 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
4306 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
4307 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
4308 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
4309 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
4310 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
4311 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
4312 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
4313 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
4314 /* Place-holder. Leave as last. */
4315 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
4318 /* ABS* operations. */
4320 static const struct builtin_description bdesc_abs
[] =
4322 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
4323 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
4324 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
4325 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
4326 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
4327 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
4328 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
4331 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4334 static struct builtin_description bdesc_1arg
[] =
4336 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
4337 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
4338 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
4339 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
4340 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
4341 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
4342 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
4343 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
4344 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
4345 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
4346 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
4347 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
4348 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
4349 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
4350 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
4351 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
4352 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
4354 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4355 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4356 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
4357 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
4358 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
4359 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
4360 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
4361 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
4362 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
4363 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
4364 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
4365 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
4366 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
4367 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
4368 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
4369 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
4370 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
4371 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
4372 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
4373 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
4374 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
4375 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
4376 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
4377 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
4378 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
4379 { 0, CODE_FOR_spe_evneg
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
4380 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
4381 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
4382 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
4383 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
4384 { 0, CODE_FOR_spe_evsplatfi
, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI
},
4385 { 0, CODE_FOR_spe_evsplati
, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI
},
4387 /* Place-holder. Leave as last unary SPE builtin. */
4388 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
},
4392 rs6000_expand_unop_builtin (icode
, arglist
, target
)
4393 enum insn_code icode
;
4398 tree arg0
= TREE_VALUE (arglist
);
4399 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4400 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4401 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4403 if (icode
== CODE_FOR_nothing
)
4404 /* Builtin not supported on this processor. */
4407 /* If we got invalid arguments bail out before generating bad rtl. */
4408 if (arg0
== error_mark_node
)
4411 if (icode
== CODE_FOR_altivec_vspltisb
4412 || icode
== CODE_FOR_altivec_vspltish
4413 || icode
== CODE_FOR_altivec_vspltisw
4414 || icode
== CODE_FOR_spe_evsplatfi
4415 || icode
== CODE_FOR_spe_evsplati
)
4417 /* Only allow 5-bit *signed* literals. */
4418 if (GET_CODE (op0
) != CONST_INT
4419 || INTVAL (op0
) > 0x1f
4420 || INTVAL (op0
) < -0x1f)
4422 error ("argument 1 must be a 5-bit signed literal");
4428 || GET_MODE (target
) != tmode
4429 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4430 target
= gen_reg_rtx (tmode
);
4432 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4433 op0
= copy_to_mode_reg (mode0
, op0
);
4435 pat
= GEN_FCN (icode
) (target
, op0
);
4444 altivec_expand_abs_builtin (icode
, arglist
, target
)
4445 enum insn_code icode
;
4449 rtx pat
, scratch1
, scratch2
;
4450 tree arg0
= TREE_VALUE (arglist
);
4451 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4452 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4453 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4455 /* If we have invalid arguments, bail out before generating bad rtl. */
4456 if (arg0
== error_mark_node
)
4460 || GET_MODE (target
) != tmode
4461 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4462 target
= gen_reg_rtx (tmode
);
4464 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4465 op0
= copy_to_mode_reg (mode0
, op0
);
4467 scratch1
= gen_reg_rtx (mode0
);
4468 scratch2
= gen_reg_rtx (mode0
);
4470 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
4479 rs6000_expand_binop_builtin (icode
, arglist
, target
)
4480 enum insn_code icode
;
4485 tree arg0
= TREE_VALUE (arglist
);
4486 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4487 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4488 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4489 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4490 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4491 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4493 if (icode
== CODE_FOR_nothing
)
4494 /* Builtin not supported on this processor. */
4497 /* If we got invalid arguments bail out before generating bad rtl. */
4498 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4501 if (icode
== CODE_FOR_altivec_vcfux
4502 || icode
== CODE_FOR_altivec_vcfsx
4503 || icode
== CODE_FOR_altivec_vctsxs
4504 || icode
== CODE_FOR_altivec_vctuxs
4505 || icode
== CODE_FOR_altivec_vspltb
4506 || icode
== CODE_FOR_altivec_vsplth
4507 || icode
== CODE_FOR_altivec_vspltw
4508 || icode
== CODE_FOR_spe_evaddiw
4509 || icode
== CODE_FOR_spe_evldd
4510 || icode
== CODE_FOR_spe_evldh
4511 || icode
== CODE_FOR_spe_evldw
4512 || icode
== CODE_FOR_spe_evlhhesplat
4513 || icode
== CODE_FOR_spe_evlhhossplat
4514 || icode
== CODE_FOR_spe_evlhhousplat
4515 || icode
== CODE_FOR_spe_evlwhe
4516 || icode
== CODE_FOR_spe_evlwhos
4517 || icode
== CODE_FOR_spe_evlwhou
4518 || icode
== CODE_FOR_spe_evlwhsplat
4519 || icode
== CODE_FOR_spe_evlwwsplat
4520 || icode
== CODE_FOR_spe_evrlwi
4521 || icode
== CODE_FOR_spe_evslwi
4522 || icode
== CODE_FOR_spe_evsrwis
4523 || icode
== CODE_FOR_spe_evsrwiu
)
4525 /* Only allow 5-bit unsigned literals. */
4526 if (TREE_CODE (arg1
) != INTEGER_CST
4527 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
4529 error ("argument 2 must be a 5-bit unsigned literal");
4535 || GET_MODE (target
) != tmode
4536 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4537 target
= gen_reg_rtx (tmode
);
4539 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4540 op0
= copy_to_mode_reg (mode0
, op0
);
4541 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4542 op1
= copy_to_mode_reg (mode1
, op1
);
4544 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
4553 altivec_expand_predicate_builtin (icode
, opcode
, arglist
, target
)
4554 enum insn_code icode
;
4560 tree cr6_form
= TREE_VALUE (arglist
);
4561 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
4562 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4563 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4564 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4565 enum machine_mode tmode
= SImode
;
4566 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4567 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4570 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
4572 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4576 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
4581 /* If we have invalid arguments, bail out before generating bad rtl. */
4582 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4586 || GET_MODE (target
) != tmode
4587 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4588 target
= gen_reg_rtx (tmode
);
4590 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4591 op0
= copy_to_mode_reg (mode0
, op0
);
4592 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4593 op1
= copy_to_mode_reg (mode1
, op1
);
4595 scratch
= gen_reg_rtx (mode0
);
4597 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
4598 gen_rtx (SYMBOL_REF
, Pmode
, opcode
));
4603 /* The vec_any* and vec_all* predicates use the same opcodes for two
4604 different operations, but the bits in CR6 will be different
4605 depending on what information we want. So we have to play tricks
4606 with CR6 to get the right bits out.
4608 If you think this is disgusting, look at the specs for the
4609 AltiVec predicates. */
4611 switch (cr6_form_int
)
4614 emit_insn (gen_cr6_test_for_zero (target
));
4617 emit_insn (gen_cr6_test_for_zero_reverse (target
));
4620 emit_insn (gen_cr6_test_for_lt (target
));
4623 emit_insn (gen_cr6_test_for_lt_reverse (target
));
4626 error ("argument 1 of __builtin_altivec_predicate is out of range");
4634 altivec_expand_stv_builtin (icode
, arglist
)
4635 enum insn_code icode
;
4638 tree arg0
= TREE_VALUE (arglist
);
4639 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4640 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4641 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4642 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4643 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4645 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
4646 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
4647 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
4649 /* Invalid arguments. Bail before doing anything stoopid! */
4650 if (arg0
== error_mark_node
4651 || arg1
== error_mark_node
4652 || arg2
== error_mark_node
)
4655 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
4656 op0
= copy_to_mode_reg (mode2
, op0
);
4657 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
4658 op1
= copy_to_mode_reg (mode0
, op1
);
4659 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
4660 op2
= copy_to_mode_reg (mode1
, op2
);
4662 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
4669 rs6000_expand_ternop_builtin (icode
, arglist
, target
)
4670 enum insn_code icode
;
4675 tree arg0
= TREE_VALUE (arglist
);
4676 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4677 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4678 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4679 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4680 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4681 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4682 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4683 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4684 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
4686 if (icode
== CODE_FOR_nothing
)
4687 /* Builtin not supported on this processor. */
4690 /* If we got invalid arguments bail out before generating bad rtl. */
4691 if (arg0
== error_mark_node
4692 || arg1
== error_mark_node
4693 || arg2
== error_mark_node
)
4696 if (icode
== CODE_FOR_altivec_vsldoi_4sf
4697 || icode
== CODE_FOR_altivec_vsldoi_4si
4698 || icode
== CODE_FOR_altivec_vsldoi_8hi
4699 || icode
== CODE_FOR_altivec_vsldoi_16qi
)
4701 /* Only allow 4-bit unsigned literals. */
4702 if (TREE_CODE (arg2
) != INTEGER_CST
4703 || TREE_INT_CST_LOW (arg2
) & ~0xf)
4705 error ("argument 3 must be a 4-bit unsigned literal");
4711 || GET_MODE (target
) != tmode
4712 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4713 target
= gen_reg_rtx (tmode
);
4715 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4716 op0
= copy_to_mode_reg (mode0
, op0
);
4717 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4718 op1
= copy_to_mode_reg (mode1
, op1
);
4719 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
4720 op2
= copy_to_mode_reg (mode2
, op2
);
4722 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
4730 /* Expand the lvx builtins. */
4732 altivec_expand_ld_builtin (exp
, target
, expandedp
)
4737 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4738 tree arglist
= TREE_OPERAND (exp
, 1);
4739 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4741 enum machine_mode tmode
, mode0
;
4743 enum insn_code icode
;
4747 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
4748 icode
= CODE_FOR_altivec_lvx_16qi
;
4750 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
4751 icode
= CODE_FOR_altivec_lvx_8hi
;
4753 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
4754 icode
= CODE_FOR_altivec_lvx_4si
;
4756 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
4757 icode
= CODE_FOR_altivec_lvx_4sf
;
4766 arg0
= TREE_VALUE (arglist
);
4767 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4768 tmode
= insn_data
[icode
].operand
[0].mode
;
4769 mode0
= insn_data
[icode
].operand
[1].mode
;
4772 || GET_MODE (target
) != tmode
4773 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4774 target
= gen_reg_rtx (tmode
);
4776 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4777 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4779 pat
= GEN_FCN (icode
) (target
, op0
);
4786 /* Expand the stvx builtins. */
4788 altivec_expand_st_builtin (exp
, target
, expandedp
)
4790 rtx target ATTRIBUTE_UNUSED
;
4793 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4794 tree arglist
= TREE_OPERAND (exp
, 1);
4795 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4797 enum machine_mode mode0
, mode1
;
4799 enum insn_code icode
;
4803 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
4804 icode
= CODE_FOR_altivec_stvx_16qi
;
4806 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
4807 icode
= CODE_FOR_altivec_stvx_8hi
;
4809 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
4810 icode
= CODE_FOR_altivec_stvx_4si
;
4812 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
4813 icode
= CODE_FOR_altivec_stvx_4sf
;
4820 arg0
= TREE_VALUE (arglist
);
4821 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4822 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4823 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4824 mode0
= insn_data
[icode
].operand
[0].mode
;
4825 mode1
= insn_data
[icode
].operand
[1].mode
;
4827 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4828 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4829 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
4830 op1
= copy_to_mode_reg (mode1
, op1
);
4832 pat
= GEN_FCN (icode
) (op0
, op1
);
4840 /* Expand the dst builtins. */
4842 altivec_expand_dst_builtin (exp
, target
, expandedp
)
4844 rtx target ATTRIBUTE_UNUSED
;
4847 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4848 tree arglist
= TREE_OPERAND (exp
, 1);
4849 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4850 tree arg0
, arg1
, arg2
;
4851 enum machine_mode mode0
, mode1
, mode2
;
4852 rtx pat
, op0
, op1
, op2
;
4853 struct builtin_description
*d
;
4858 /* Handle DST variants. */
4859 d
= (struct builtin_description
*) bdesc_dst
;
4860 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
4861 if (d
->code
== fcode
)
4863 arg0
= TREE_VALUE (arglist
);
4864 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4865 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4866 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4867 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4868 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4869 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
4870 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
4871 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
4873 /* Invalid arguments, bail out before generating bad rtl. */
4874 if (arg0
== error_mark_node
4875 || arg1
== error_mark_node
4876 || arg2
== error_mark_node
)
4879 if (TREE_CODE (arg2
) != INTEGER_CST
4880 || TREE_INT_CST_LOW (arg2
) & ~0x3)
4882 error ("argument to `%s' must be a 2-bit unsigned literal", d
->name
);
4886 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
4887 op0
= copy_to_mode_reg (mode0
, op0
);
4888 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
4889 op1
= copy_to_mode_reg (mode1
, op1
);
4891 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
4902 /* Expand the builtin in EXP and store the result in TARGET. Store
4903 true in *EXPANDEDP if we found a builtin to expand. */
4905 altivec_expand_builtin (exp
, target
, expandedp
)
4910 struct builtin_description
*d
;
4911 struct builtin_description_predicates
*dp
;
4913 enum insn_code icode
;
4914 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4915 tree arglist
= TREE_OPERAND (exp
, 1);
4918 enum machine_mode tmode
, mode0
;
4919 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4921 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
4925 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
4929 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
4937 case ALTIVEC_BUILTIN_STVX
:
4938 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
4939 case ALTIVEC_BUILTIN_STVEBX
:
4940 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
4941 case ALTIVEC_BUILTIN_STVEHX
:
4942 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
4943 case ALTIVEC_BUILTIN_STVEWX
:
4944 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
4945 case ALTIVEC_BUILTIN_STVXL
:
4946 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
4948 case ALTIVEC_BUILTIN_MFVSCR
:
4949 icode
= CODE_FOR_altivec_mfvscr
;
4950 tmode
= insn_data
[icode
].operand
[0].mode
;
4953 || GET_MODE (target
) != tmode
4954 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4955 target
= gen_reg_rtx (tmode
);
4957 pat
= GEN_FCN (icode
) (target
);
4963 case ALTIVEC_BUILTIN_MTVSCR
:
4964 icode
= CODE_FOR_altivec_mtvscr
;
4965 arg0
= TREE_VALUE (arglist
);
4966 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4967 mode0
= insn_data
[icode
].operand
[0].mode
;
4969 /* If we got invalid arguments bail out before generating bad rtl. */
4970 if (arg0
== error_mark_node
)
4973 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4974 op0
= copy_to_mode_reg (mode0
, op0
);
4976 pat
= GEN_FCN (icode
) (op0
);
4981 case ALTIVEC_BUILTIN_DSSALL
:
4982 emit_insn (gen_altivec_dssall ());
4985 case ALTIVEC_BUILTIN_DSS
:
4986 icode
= CODE_FOR_altivec_dss
;
4987 arg0
= TREE_VALUE (arglist
);
4988 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4989 mode0
= insn_data
[icode
].operand
[0].mode
;
4991 /* If we got invalid arguments bail out before generating bad rtl. */
4992 if (arg0
== error_mark_node
)
4995 if (TREE_CODE (arg0
) != INTEGER_CST
4996 || TREE_INT_CST_LOW (arg0
) & ~0x3)
4998 error ("argument to dss must be a 2-bit unsigned literal");
5002 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5003 op0
= copy_to_mode_reg (mode0
, op0
);
5005 emit_insn (gen_altivec_dss (op0
));
5009 /* Expand abs* operations. */
5010 d
= (struct builtin_description
*) bdesc_abs
;
5011 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
5012 if (d
->code
== fcode
)
5013 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
5015 /* Expand the AltiVec predicates. */
5016 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
5017 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
5018 if (dp
->code
== fcode
)
5019 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
, arglist
, target
);
5021 /* LV* are funky. We initialized them differently. */
5024 case ALTIVEC_BUILTIN_LVSL
:
5025 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl
,
5027 case ALTIVEC_BUILTIN_LVSR
:
5028 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr
,
5030 case ALTIVEC_BUILTIN_LVEBX
:
5031 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx
,
5033 case ALTIVEC_BUILTIN_LVEHX
:
5034 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx
,
5036 case ALTIVEC_BUILTIN_LVEWX
:
5037 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx
,
5039 case ALTIVEC_BUILTIN_LVXL
:
5040 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl
,
5042 case ALTIVEC_BUILTIN_LVX
:
5043 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx
,
5054 /* Binops that need to be initialized manually, but can be expanded
5055 automagically by rs6000_expand_binop_builtin. */
5056 static struct builtin_description bdesc_2arg_spe
[] =
5058 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
5059 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
5060 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
5061 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
5062 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
5063 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
5064 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
5065 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
5066 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
5067 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
5068 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
5069 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
5070 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
5071 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
5072 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
5073 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
5074 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
5075 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
5076 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
5077 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
5078 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
5079 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
5082 /* Expand the builtin in EXP and store the result in TARGET. Store
5083 true in *EXPANDEDP if we found a builtin to expand.
5085 This expands the SPE builtins that are not simple unary and binary
5088 spe_expand_builtin (exp
, target
, expandedp
)
5093 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5094 tree arglist
= TREE_OPERAND (exp
, 1);
5096 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5097 enum insn_code icode
;
5098 enum machine_mode tmode
, mode0
;
5100 struct builtin_description
*d
;
5105 /* Syntax check for a 5-bit unsigned immediate. */
5108 case SPE_BUILTIN_EVSTDD
:
5109 case SPE_BUILTIN_EVSTDH
:
5110 case SPE_BUILTIN_EVSTDW
:
5111 case SPE_BUILTIN_EVSTWHE
:
5112 case SPE_BUILTIN_EVSTWHO
:
5113 case SPE_BUILTIN_EVSTWWE
:
5114 case SPE_BUILTIN_EVSTWWO
:
5115 arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5116 if (TREE_CODE (arg1
) != INTEGER_CST
5117 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
5119 error ("argument 2 must be a 5-bit unsigned literal");
5127 d
= (struct builtin_description
*) bdesc_2arg_spe
;
5128 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
5129 if (d
->code
== fcode
)
5130 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
5132 d
= (struct builtin_description
*) bdesc_spe_predicates
;
5133 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
5134 if (d
->code
== fcode
)
5135 return spe_expand_predicate_builtin (d
->icode
, arglist
, target
);
5137 d
= (struct builtin_description
*) bdesc_spe_evsel
;
5138 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
5139 if (d
->code
== fcode
)
5140 return spe_expand_evsel_builtin (d
->icode
, arglist
, target
);
5144 case SPE_BUILTIN_EVSTDDX
:
5145 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx
, arglist
);
5146 case SPE_BUILTIN_EVSTDHX
:
5147 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx
, arglist
);
5148 case SPE_BUILTIN_EVSTDWX
:
5149 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx
, arglist
);
5150 case SPE_BUILTIN_EVSTWHEX
:
5151 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex
, arglist
);
5152 case SPE_BUILTIN_EVSTWHOX
:
5153 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox
, arglist
);
5154 case SPE_BUILTIN_EVSTWWEX
:
5155 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex
, arglist
);
5156 case SPE_BUILTIN_EVSTWWOX
:
5157 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox
, arglist
);
5158 case SPE_BUILTIN_EVSTDD
:
5159 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd
, arglist
);
5160 case SPE_BUILTIN_EVSTDH
:
5161 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh
, arglist
);
5162 case SPE_BUILTIN_EVSTDW
:
5163 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw
, arglist
);
5164 case SPE_BUILTIN_EVSTWHE
:
5165 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe
, arglist
);
5166 case SPE_BUILTIN_EVSTWHO
:
5167 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho
, arglist
);
5168 case SPE_BUILTIN_EVSTWWE
:
5169 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe
, arglist
);
5170 case SPE_BUILTIN_EVSTWWO
:
5171 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo
, arglist
);
5172 case SPE_BUILTIN_MFSPEFSCR
:
5173 icode
= CODE_FOR_spe_mfspefscr
;
5174 tmode
= insn_data
[icode
].operand
[0].mode
;
5177 || GET_MODE (target
) != tmode
5178 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5179 target
= gen_reg_rtx (tmode
);
5181 pat
= GEN_FCN (icode
) (target
);
5186 case SPE_BUILTIN_MTSPEFSCR
:
5187 icode
= CODE_FOR_spe_mtspefscr
;
5188 arg0
= TREE_VALUE (arglist
);
5189 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5190 mode0
= insn_data
[icode
].operand
[0].mode
;
5192 if (arg0
== error_mark_node
)
5195 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5196 op0
= copy_to_mode_reg (mode0
, op0
);
5198 pat
= GEN_FCN (icode
) (op0
);
5211 spe_expand_predicate_builtin (icode
, arglist
, target
)
5212 enum insn_code icode
;
5216 rtx pat
, scratch
, tmp
;
5217 tree form
= TREE_VALUE (arglist
);
5218 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
5219 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5220 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5221 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5222 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5223 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5227 if (TREE_CODE (form
) != INTEGER_CST
)
5229 error ("argument 1 of __builtin_spe_predicate must be a constant");
5233 form_int
= TREE_INT_CST_LOW (form
);
5238 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
5242 || GET_MODE (target
) != SImode
5243 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
5244 target
= gen_reg_rtx (SImode
);
5246 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5247 op0
= copy_to_mode_reg (mode0
, op0
);
5248 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5249 op1
= copy_to_mode_reg (mode1
, op1
);
5251 scratch
= gen_reg_rtx (CCmode
);
5253 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5258 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5259 _lower_. We use one compare, but look in different bits of the
5260 CR for each variant.
5262 There are 2 elements in each SPE simd type (upper/lower). The CR
5263 bits are set as follows:
5265 BIT0 | BIT 1 | BIT 2 | BIT 3
5266 U | L | (U | L) | (U & L)
5268 So, for an "all" relationship, BIT 3 would be set.
5269 For an "any" relationship, BIT 2 would be set. Etc.
5271 Following traditional nomenclature, these bits map to:
5273 BIT0 | BIT 1 | BIT 2 | BIT 3
5276 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5281 /* All variant. OV bit. */
5283 /* We need to get to the OV bit, which is the ORDERED bit. We
5284 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5285 that's ugly and will trigger a validate_condition_mode abort.
5286 So let's just use another pattern. */
5287 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
5289 /* Any variant. EQ bit. */
5293 /* Upper variant. LT bit. */
5297 /* Lower variant. GT bit. */
5302 error ("argument 1 of __builtin_spe_predicate is out of range");
5306 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
5307 emit_move_insn (target
, tmp
);
5312 /* The evsel builtins look like this:
5314 e = __builtin_spe_evsel_OP (a, b, c, d);
5318 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5319 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5323 spe_expand_evsel_builtin (icode
, arglist
, target
)
5324 enum insn_code icode
;
5329 tree arg0
= TREE_VALUE (arglist
);
5330 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5331 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5332 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist
))));
5333 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5334 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5335 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5336 rtx op3
= expand_expr (arg3
, NULL_RTX
, VOIDmode
, 0);
5337 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5338 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5343 if (arg0
== error_mark_node
|| arg1
== error_mark_node
5344 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
5348 || GET_MODE (target
) != mode0
5349 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
5350 target
= gen_reg_rtx (mode0
);
5352 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5353 op0
= copy_to_mode_reg (mode0
, op0
);
5354 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
5355 op1
= copy_to_mode_reg (mode0
, op1
);
5356 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
5357 op2
= copy_to_mode_reg (mode0
, op2
);
5358 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
5359 op3
= copy_to_mode_reg (mode0
, op3
);
5361 /* Generate the compare. */
5362 scratch
= gen_reg_rtx (CCmode
);
5363 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5368 if (mode0
== V2SImode
)
5369 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
5371 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
5376 /* Expand an expression EXP that calls a built-in function,
5377 with result going to TARGET if that's convenient
5378 (and in mode MODE if that's convenient).
5379 SUBTARGET may be used as the target for computing one of EXP's operands.
5380 IGNORE is nonzero if the value is to be ignored. */
5383 rs6000_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
5386 rtx subtarget ATTRIBUTE_UNUSED
;
5387 enum machine_mode mode ATTRIBUTE_UNUSED
;
5388 int ignore ATTRIBUTE_UNUSED
;
5390 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5391 tree arglist
= TREE_OPERAND (exp
, 1);
5392 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5393 struct builtin_description
*d
;
5400 ret
= altivec_expand_builtin (exp
, target
, &success
);
5407 ret
= spe_expand_builtin (exp
, target
, &success
);
5413 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5415 /* Handle simple unary operations. */
5416 d
= (struct builtin_description
*) bdesc_1arg
;
5417 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5418 if (d
->code
== fcode
)
5419 return rs6000_expand_unop_builtin (d
->icode
, arglist
, target
);
5421 /* Handle simple binary operations. */
5422 d
= (struct builtin_description
*) bdesc_2arg
;
5423 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5424 if (d
->code
== fcode
)
5425 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
5427 /* Handle simple ternary operations. */
5428 d
= (struct builtin_description
*) bdesc_3arg
;
5429 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5430 if (d
->code
== fcode
)
5431 return rs6000_expand_ternop_builtin (d
->icode
, arglist
, target
);
5439 rs6000_init_builtins ()
5442 spe_init_builtins ();
5444 altivec_init_builtins ();
5445 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5446 rs6000_common_init_builtins ();
5449 /* Search through a set of builtins and enable the mask bits.
5450 DESC is an array of builtins.
5451 SIZE is the total number of builtins.
5452 START is the builtin enum at which to start.
5453 END is the builtin enum at which to end. */
5455 enable_mask_for_builtins (desc
, size
, start
, end
)
5456 struct builtin_description
*desc
;
5458 enum rs6000_builtins start
, end
;
5462 for (i
= 0; i
< size
; ++i
)
5463 if (desc
[i
].code
== start
)
5469 for (; i
< size
; ++i
)
5471 /* Flip all the bits on. */
5472 desc
[i
].mask
= target_flags
;
5473 if (desc
[i
].code
== end
)
5479 spe_init_builtins ()
5481 tree endlink
= void_list_node
;
5482 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
5483 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
5484 tree pv2si_type_node
= build_pointer_type (V2SI_type_node
);
5485 struct builtin_description
*d
;
5488 tree v2si_ftype_4_v2si
5489 = build_function_type
5491 tree_cons (NULL_TREE
, V2SI_type_node
,
5492 tree_cons (NULL_TREE
, V2SI_type_node
,
5493 tree_cons (NULL_TREE
, V2SI_type_node
,
5494 tree_cons (NULL_TREE
, V2SI_type_node
,
5497 tree v2sf_ftype_4_v2sf
5498 = build_function_type
5500 tree_cons (NULL_TREE
, V2SF_type_node
,
5501 tree_cons (NULL_TREE
, V2SF_type_node
,
5502 tree_cons (NULL_TREE
, V2SF_type_node
,
5503 tree_cons (NULL_TREE
, V2SF_type_node
,
5506 tree int_ftype_int_v2si_v2si
5507 = build_function_type
5509 tree_cons (NULL_TREE
, integer_type_node
,
5510 tree_cons (NULL_TREE
, V2SI_type_node
,
5511 tree_cons (NULL_TREE
, V2SI_type_node
,
5514 tree int_ftype_int_v2sf_v2sf
5515 = build_function_type
5517 tree_cons (NULL_TREE
, integer_type_node
,
5518 tree_cons (NULL_TREE
, V2SF_type_node
,
5519 tree_cons (NULL_TREE
, V2SF_type_node
,
5522 tree void_ftype_v2si_puint_int
5523 = build_function_type (void_type_node
,
5524 tree_cons (NULL_TREE
, V2SI_type_node
,
5525 tree_cons (NULL_TREE
, puint_type_node
,
5526 tree_cons (NULL_TREE
,
5530 tree void_ftype_v2si_puint_char
5531 = build_function_type (void_type_node
,
5532 tree_cons (NULL_TREE
, V2SI_type_node
,
5533 tree_cons (NULL_TREE
, puint_type_node
,
5534 tree_cons (NULL_TREE
,
5538 tree void_ftype_v2si_pv2si_int
5539 = build_function_type (void_type_node
,
5540 tree_cons (NULL_TREE
, V2SI_type_node
,
5541 tree_cons (NULL_TREE
, pv2si_type_node
,
5542 tree_cons (NULL_TREE
,
5546 tree void_ftype_v2si_pv2si_char
5547 = build_function_type (void_type_node
,
5548 tree_cons (NULL_TREE
, V2SI_type_node
,
5549 tree_cons (NULL_TREE
, pv2si_type_node
,
5550 tree_cons (NULL_TREE
,
5555 = build_function_type (void_type_node
,
5556 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
5559 = build_function_type (integer_type_node
,
5560 tree_cons (NULL_TREE
, void_type_node
, endlink
));
5562 tree v2si_ftype_pv2si_int
5563 = build_function_type (V2SI_type_node
,
5564 tree_cons (NULL_TREE
, pv2si_type_node
,
5565 tree_cons (NULL_TREE
, integer_type_node
,
5568 tree v2si_ftype_puint_int
5569 = build_function_type (V2SI_type_node
,
5570 tree_cons (NULL_TREE
, puint_type_node
,
5571 tree_cons (NULL_TREE
, integer_type_node
,
5574 tree v2si_ftype_pushort_int
5575 = build_function_type (V2SI_type_node
,
5576 tree_cons (NULL_TREE
, pushort_type_node
,
5577 tree_cons (NULL_TREE
, integer_type_node
,
5580 /* The initialization of the simple binary and unary builtins is
5581 done in rs6000_common_init_builtins, but we have to enable the
5582 mask bits here manually because we have run out of `target_flags'
5583 bits. We really need to redesign this mask business. */
5585 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
5586 ARRAY_SIZE (bdesc_2arg
),
5589 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
5590 ARRAY_SIZE (bdesc_1arg
),
5592 SPE_BUILTIN_EVSUBFUSIAAW
);
5593 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
5594 ARRAY_SIZE (bdesc_spe_predicates
),
5595 SPE_BUILTIN_EVCMPEQ
,
5596 SPE_BUILTIN_EVFSTSTLT
);
5597 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
5598 ARRAY_SIZE (bdesc_spe_evsel
),
5599 SPE_BUILTIN_EVSEL_CMPGTS
,
5600 SPE_BUILTIN_EVSEL_FSTSTEQ
);
5602 /* Initialize irregular SPE builtins. */
5604 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
5605 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
5606 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
5607 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
5608 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
5609 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
5610 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
5611 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
5612 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
5613 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
5614 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
5615 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
5616 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
5617 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
5618 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
5619 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
5622 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
5623 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
5624 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
5625 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
5626 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
5627 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
5628 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
5629 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
5630 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
5631 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
5632 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
5633 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
5634 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
5635 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
5636 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
5637 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
5638 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
5639 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
5640 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
5641 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
5642 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
5643 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
5646 d
= (struct builtin_description
*) bdesc_spe_predicates
;
5647 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
5651 switch (insn_data
[d
->icode
].operand
[1].mode
)
5654 type
= int_ftype_int_v2si_v2si
;
5657 type
= int_ftype_int_v2sf_v2sf
;
5663 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5666 /* Evsel predicates. */
5667 d
= (struct builtin_description
*) bdesc_spe_evsel
;
5668 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
5672 switch (insn_data
[d
->icode
].operand
[1].mode
)
5675 type
= v2si_ftype_4_v2si
;
5678 type
= v2sf_ftype_4_v2sf
;
5684 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5689 altivec_init_builtins ()
5691 struct builtin_description
*d
;
5692 struct builtin_description_predicates
*dp
;
5694 tree pfloat_type_node
= build_pointer_type (float_type_node
);
5695 tree pint_type_node
= build_pointer_type (integer_type_node
);
5696 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
5697 tree pchar_type_node
= build_pointer_type (char_type_node
);
5699 tree pvoid_type_node
= build_pointer_type (void_type_node
);
5701 tree pcfloat_type_node
= build_pointer_type (build_qualified_type (float_type_node
, TYPE_QUAL_CONST
));
5702 tree pcint_type_node
= build_pointer_type (build_qualified_type (integer_type_node
, TYPE_QUAL_CONST
));
5703 tree pcshort_type_node
= build_pointer_type (build_qualified_type (short_integer_type_node
, TYPE_QUAL_CONST
));
5704 tree pcchar_type_node
= build_pointer_type (build_qualified_type (char_type_node
, TYPE_QUAL_CONST
));
5706 tree pcvoid_type_node
= build_pointer_type (build_qualified_type (void_type_node
, TYPE_QUAL_CONST
));
5708 tree int_ftype_int_v4si_v4si
5709 = build_function_type_list (integer_type_node
,
5710 integer_type_node
, V4SI_type_node
,
5711 V4SI_type_node
, NULL_TREE
);
5712 tree v4sf_ftype_pcfloat
5713 = build_function_type_list (V4SF_type_node
, pcfloat_type_node
, NULL_TREE
);
5714 tree void_ftype_pfloat_v4sf
5715 = build_function_type_list (void_type_node
,
5716 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
5717 tree v4si_ftype_pcint
5718 = build_function_type_list (V4SI_type_node
, pcint_type_node
, NULL_TREE
);
5719 tree void_ftype_pint_v4si
5720 = build_function_type_list (void_type_node
,
5721 pint_type_node
, V4SI_type_node
, NULL_TREE
);
5722 tree v8hi_ftype_pcshort
5723 = build_function_type_list (V8HI_type_node
, pcshort_type_node
, NULL_TREE
);
5724 tree void_ftype_pshort_v8hi
5725 = build_function_type_list (void_type_node
,
5726 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
5727 tree v16qi_ftype_pcchar
5728 = build_function_type_list (V16QI_type_node
, pcchar_type_node
, NULL_TREE
);
5729 tree void_ftype_pchar_v16qi
5730 = build_function_type_list (void_type_node
,
5731 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
5732 tree void_ftype_v4si
5733 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
5734 tree v8hi_ftype_void
5735 = build_function_type (V8HI_type_node
, void_list_node
);
5736 tree void_ftype_void
5737 = build_function_type (void_type_node
, void_list_node
);
5739 = build_function_type_list (void_type_node
, char_type_node
, NULL_TREE
);
5741 tree v16qi_ftype_int_pcvoid
5742 = build_function_type_list (V16QI_type_node
,
5743 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5744 tree v8hi_ftype_int_pcvoid
5745 = build_function_type_list (V8HI_type_node
,
5746 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5747 tree v4si_ftype_int_pcvoid
5748 = build_function_type_list (V4SI_type_node
,
5749 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
5751 tree void_ftype_v4si_int_pvoid
5752 = build_function_type_list (void_type_node
,
5753 V4SI_type_node
, integer_type_node
,
5754 pvoid_type_node
, NULL_TREE
);
5755 tree void_ftype_v16qi_int_pvoid
5756 = build_function_type_list (void_type_node
,
5757 V16QI_type_node
, integer_type_node
,
5758 pvoid_type_node
, NULL_TREE
);
5759 tree void_ftype_v8hi_int_pvoid
5760 = build_function_type_list (void_type_node
,
5761 V8HI_type_node
, integer_type_node
,
5762 pvoid_type_node
, NULL_TREE
);
5763 tree int_ftype_int_v8hi_v8hi
5764 = build_function_type_list (integer_type_node
,
5765 integer_type_node
, V8HI_type_node
,
5766 V8HI_type_node
, NULL_TREE
);
5767 tree int_ftype_int_v16qi_v16qi
5768 = build_function_type_list (integer_type_node
,
5769 integer_type_node
, V16QI_type_node
,
5770 V16QI_type_node
, NULL_TREE
);
5771 tree int_ftype_int_v4sf_v4sf
5772 = build_function_type_list (integer_type_node
,
5773 integer_type_node
, V4SF_type_node
,
5774 V4SF_type_node
, NULL_TREE
);
5775 tree v4si_ftype_v4si
5776 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5777 tree v8hi_ftype_v8hi
5778 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5779 tree v16qi_ftype_v16qi
5780 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5781 tree v4sf_ftype_v4sf
5782 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5783 tree void_ftype_pcvoid_int_char
5784 = build_function_type_list (void_type_node
,
5785 pcvoid_type_node
, integer_type_node
,
5786 char_type_node
, NULL_TREE
);
5788 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat
,
5789 ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
5790 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
,
5791 ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
5792 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint
,
5793 ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
5794 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
,
5795 ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
5796 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort
,
5797 ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
5798 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
,
5799 ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
5800 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar
,
5801 ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
5802 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
,
5803 ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
5804 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
5805 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
5806 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
5807 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_qi
, ALTIVEC_BUILTIN_DSS
);
5808 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVSL
);
5809 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVSR
);
5810 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEBX
);
5811 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEHX
);
5812 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEWX
);
5813 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVXL
);
5814 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVX
);
5815 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVX
);
5816 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
5817 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVXL
);
5818 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
5819 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
5821 /* Add the DST variants. */
5822 d
= (struct builtin_description
*) bdesc_dst
;
5823 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
5824 def_builtin (d
->mask
, d
->name
, void_ftype_pcvoid_int_char
, d
->code
);
5826 /* Initialize the predicates. */
5827 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
5828 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
5830 enum machine_mode mode1
;
5833 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
5838 type
= int_ftype_int_v4si_v4si
;
5841 type
= int_ftype_int_v8hi_v8hi
;
5844 type
= int_ftype_int_v16qi_v16qi
;
5847 type
= int_ftype_int_v4sf_v4sf
;
5853 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
5856 /* Initialize the abs* operators. */
5857 d
= (struct builtin_description
*) bdesc_abs
;
5858 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
5860 enum machine_mode mode0
;
5863 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5868 type
= v4si_ftype_v4si
;
5871 type
= v8hi_ftype_v8hi
;
5874 type
= v16qi_ftype_v16qi
;
5877 type
= v4sf_ftype_v4sf
;
5883 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5888 rs6000_common_init_builtins ()
5890 struct builtin_description
*d
;
5893 tree v4sf_ftype_v4sf_v4sf_v16qi
5894 = build_function_type_list (V4SF_type_node
,
5895 V4SF_type_node
, V4SF_type_node
,
5896 V16QI_type_node
, NULL_TREE
);
5897 tree v4si_ftype_v4si_v4si_v16qi
5898 = build_function_type_list (V4SI_type_node
,
5899 V4SI_type_node
, V4SI_type_node
,
5900 V16QI_type_node
, NULL_TREE
);
5901 tree v8hi_ftype_v8hi_v8hi_v16qi
5902 = build_function_type_list (V8HI_type_node
,
5903 V8HI_type_node
, V8HI_type_node
,
5904 V16QI_type_node
, NULL_TREE
);
5905 tree v16qi_ftype_v16qi_v16qi_v16qi
5906 = build_function_type_list (V16QI_type_node
,
5907 V16QI_type_node
, V16QI_type_node
,
5908 V16QI_type_node
, NULL_TREE
);
5909 tree v4si_ftype_char
5910 = build_function_type_list (V4SI_type_node
, char_type_node
, NULL_TREE
);
5911 tree v8hi_ftype_char
5912 = build_function_type_list (V8HI_type_node
, char_type_node
, NULL_TREE
);
5913 tree v16qi_ftype_char
5914 = build_function_type_list (V16QI_type_node
, char_type_node
, NULL_TREE
);
5915 tree v8hi_ftype_v16qi
5916 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
5917 tree v4sf_ftype_v4sf
5918 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5920 tree v2si_ftype_v2si_v2si
5921 = build_function_type_list (V2SI_type_node
,
5922 V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
5924 tree v2sf_ftype_v2sf_v2sf
5925 = build_function_type_list (V2SF_type_node
,
5926 V2SF_type_node
, V2SF_type_node
, NULL_TREE
);
5928 tree v2si_ftype_int_int
5929 = build_function_type_list (V2SI_type_node
,
5930 integer_type_node
, integer_type_node
,
5933 tree v2si_ftype_v2si
5934 = build_function_type_list (V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
5936 tree v2sf_ftype_v2sf
5937 = build_function_type_list (V2SF_type_node
,
5938 V2SF_type_node
, NULL_TREE
);
5940 tree v2sf_ftype_v2si
5941 = build_function_type_list (V2SF_type_node
,
5942 V2SI_type_node
, NULL_TREE
);
5944 tree v2si_ftype_v2sf
5945 = build_function_type_list (V2SI_type_node
,
5946 V2SF_type_node
, NULL_TREE
);
5948 tree v2si_ftype_v2si_char
5949 = build_function_type_list (V2SI_type_node
,
5950 V2SI_type_node
, char_type_node
, NULL_TREE
);
5952 tree v2si_ftype_int_char
5953 = build_function_type_list (V2SI_type_node
,
5954 integer_type_node
, char_type_node
, NULL_TREE
);
5956 tree v2si_ftype_char
5957 = build_function_type_list (V2SI_type_node
, char_type_node
, NULL_TREE
);
5959 tree int_ftype_int_int
5960 = build_function_type_list (integer_type_node
,
5961 integer_type_node
, integer_type_node
,
5964 tree v4si_ftype_v4si_v4si
5965 = build_function_type_list (V4SI_type_node
,
5966 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5967 tree v4sf_ftype_v4si_char
5968 = build_function_type_list (V4SF_type_node
,
5969 V4SI_type_node
, char_type_node
, NULL_TREE
);
5970 tree v4si_ftype_v4sf_char
5971 = build_function_type_list (V4SI_type_node
,
5972 V4SF_type_node
, char_type_node
, NULL_TREE
);
5973 tree v4si_ftype_v4si_char
5974 = build_function_type_list (V4SI_type_node
,
5975 V4SI_type_node
, char_type_node
, NULL_TREE
);
5976 tree v8hi_ftype_v8hi_char
5977 = build_function_type_list (V8HI_type_node
,
5978 V8HI_type_node
, char_type_node
, NULL_TREE
);
5979 tree v16qi_ftype_v16qi_char
5980 = build_function_type_list (V16QI_type_node
,
5981 V16QI_type_node
, char_type_node
, NULL_TREE
);
5982 tree v16qi_ftype_v16qi_v16qi_char
5983 = build_function_type_list (V16QI_type_node
,
5984 V16QI_type_node
, V16QI_type_node
,
5985 char_type_node
, NULL_TREE
);
5986 tree v8hi_ftype_v8hi_v8hi_char
5987 = build_function_type_list (V8HI_type_node
,
5988 V8HI_type_node
, V8HI_type_node
,
5989 char_type_node
, NULL_TREE
);
5990 tree v4si_ftype_v4si_v4si_char
5991 = build_function_type_list (V4SI_type_node
,
5992 V4SI_type_node
, V4SI_type_node
,
5993 char_type_node
, NULL_TREE
);
5994 tree v4sf_ftype_v4sf_v4sf_char
5995 = build_function_type_list (V4SF_type_node
,
5996 V4SF_type_node
, V4SF_type_node
,
5997 char_type_node
, NULL_TREE
);
5998 tree v4sf_ftype_v4sf_v4sf
5999 = build_function_type_list (V4SF_type_node
,
6000 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6001 tree v4sf_ftype_v4sf_v4sf_v4si
6002 = build_function_type_list (V4SF_type_node
,
6003 V4SF_type_node
, V4SF_type_node
,
6004 V4SI_type_node
, NULL_TREE
);
6005 tree v4sf_ftype_v4sf_v4sf_v4sf
6006 = build_function_type_list (V4SF_type_node
,
6007 V4SF_type_node
, V4SF_type_node
,
6008 V4SF_type_node
, NULL_TREE
);
6009 tree v4si_ftype_v4si_v4si_v4si
6010 = build_function_type_list (V4SI_type_node
,
6011 V4SI_type_node
, V4SI_type_node
,
6012 V4SI_type_node
, NULL_TREE
);
6013 tree v8hi_ftype_v8hi_v8hi
6014 = build_function_type_list (V8HI_type_node
,
6015 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6016 tree v8hi_ftype_v8hi_v8hi_v8hi
6017 = build_function_type_list (V8HI_type_node
,
6018 V8HI_type_node
, V8HI_type_node
,
6019 V8HI_type_node
, NULL_TREE
);
6020 tree v4si_ftype_v8hi_v8hi_v4si
6021 = build_function_type_list (V4SI_type_node
,
6022 V8HI_type_node
, V8HI_type_node
,
6023 V4SI_type_node
, NULL_TREE
);
6024 tree v4si_ftype_v16qi_v16qi_v4si
6025 = build_function_type_list (V4SI_type_node
,
6026 V16QI_type_node
, V16QI_type_node
,
6027 V4SI_type_node
, NULL_TREE
);
6028 tree v16qi_ftype_v16qi_v16qi
6029 = build_function_type_list (V16QI_type_node
,
6030 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6031 tree v4si_ftype_v4sf_v4sf
6032 = build_function_type_list (V4SI_type_node
,
6033 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6034 tree v8hi_ftype_v16qi_v16qi
6035 = build_function_type_list (V8HI_type_node
,
6036 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6037 tree v4si_ftype_v8hi_v8hi
6038 = build_function_type_list (V4SI_type_node
,
6039 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6040 tree v8hi_ftype_v4si_v4si
6041 = build_function_type_list (V8HI_type_node
,
6042 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
6043 tree v16qi_ftype_v8hi_v8hi
6044 = build_function_type_list (V16QI_type_node
,
6045 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6046 tree v4si_ftype_v16qi_v4si
6047 = build_function_type_list (V4SI_type_node
,
6048 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
6049 tree v4si_ftype_v16qi_v16qi
6050 = build_function_type_list (V4SI_type_node
,
6051 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6052 tree v4si_ftype_v8hi_v4si
6053 = build_function_type_list (V4SI_type_node
,
6054 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
6055 tree v4si_ftype_v8hi
6056 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
6057 tree int_ftype_v4si_v4si
6058 = build_function_type_list (integer_type_node
,
6059 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
6060 tree int_ftype_v4sf_v4sf
6061 = build_function_type_list (integer_type_node
,
6062 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6063 tree int_ftype_v16qi_v16qi
6064 = build_function_type_list (integer_type_node
,
6065 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6066 tree int_ftype_v8hi_v8hi
6067 = build_function_type_list (integer_type_node
,
6068 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6070 /* Add the simple ternary operators. */
6071 d
= (struct builtin_description
*) bdesc_3arg
;
6072 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
6075 enum machine_mode mode0
, mode1
, mode2
, mode3
;
6078 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6081 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6082 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6083 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
6084 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
6086 /* When all four are of the same mode. */
6087 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
6092 type
= v4si_ftype_v4si_v4si_v4si
;
6095 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
6098 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
6101 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
6107 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
6112 type
= v4si_ftype_v4si_v4si_v16qi
;
6115 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
6118 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
6121 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
6127 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
6128 && mode3
== V4SImode
)
6129 type
= v4si_ftype_v16qi_v16qi_v4si
;
6130 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
6131 && mode3
== V4SImode
)
6132 type
= v4si_ftype_v8hi_v8hi_v4si
;
6133 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
6134 && mode3
== V4SImode
)
6135 type
= v4sf_ftype_v4sf_v4sf_v4si
;
6137 /* vchar, vchar, vchar, 4 bit literal. */
6138 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
6140 type
= v16qi_ftype_v16qi_v16qi_char
;
6142 /* vshort, vshort, vshort, 4 bit literal. */
6143 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
6145 type
= v8hi_ftype_v8hi_v8hi_char
;
6147 /* vint, vint, vint, 4 bit literal. */
6148 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
6150 type
= v4si_ftype_v4si_v4si_char
;
6152 /* vfloat, vfloat, vfloat, 4 bit literal. */
6153 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
6155 type
= v4sf_ftype_v4sf_v4sf_char
;
6160 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6163 /* Add the simple binary operators. */
6164 d
= (struct builtin_description
*) bdesc_2arg
;
6165 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
6167 enum machine_mode mode0
, mode1
, mode2
;
6170 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6173 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6174 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6175 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
6177 /* When all three operands are of the same mode. */
6178 if (mode0
== mode1
&& mode1
== mode2
)
6183 type
= v4sf_ftype_v4sf_v4sf
;
6186 type
= v4si_ftype_v4si_v4si
;
6189 type
= v16qi_ftype_v16qi_v16qi
;
6192 type
= v8hi_ftype_v8hi_v8hi
;
6195 type
= v2si_ftype_v2si_v2si
;
6198 type
= v2sf_ftype_v2sf_v2sf
;
6201 type
= int_ftype_int_int
;
6208 /* A few other combos we really don't want to do manually. */
6210 /* vint, vfloat, vfloat. */
6211 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
6212 type
= v4si_ftype_v4sf_v4sf
;
6214 /* vshort, vchar, vchar. */
6215 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
6216 type
= v8hi_ftype_v16qi_v16qi
;
6218 /* vint, vshort, vshort. */
6219 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
6220 type
= v4si_ftype_v8hi_v8hi
;
6222 /* vshort, vint, vint. */
6223 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
6224 type
= v8hi_ftype_v4si_v4si
;
6226 /* vchar, vshort, vshort. */
6227 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
6228 type
= v16qi_ftype_v8hi_v8hi
;
6230 /* vint, vchar, vint. */
6231 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
6232 type
= v4si_ftype_v16qi_v4si
;
6234 /* vint, vchar, vchar. */
6235 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
6236 type
= v4si_ftype_v16qi_v16qi
;
6238 /* vint, vshort, vint. */
6239 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
6240 type
= v4si_ftype_v8hi_v4si
;
6242 /* vint, vint, 5 bit literal. */
6243 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
6244 type
= v4si_ftype_v4si_char
;
6246 /* vshort, vshort, 5 bit literal. */
6247 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
6248 type
= v8hi_ftype_v8hi_char
;
6250 /* vchar, vchar, 5 bit literal. */
6251 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
6252 type
= v16qi_ftype_v16qi_char
;
6254 /* vfloat, vint, 5 bit literal. */
6255 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
6256 type
= v4sf_ftype_v4si_char
;
6258 /* vint, vfloat, 5 bit literal. */
6259 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
6260 type
= v4si_ftype_v4sf_char
;
6262 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
6263 type
= v2si_ftype_int_int
;
6265 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
6266 type
= v2si_ftype_v2si_char
;
6268 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
6269 type
= v2si_ftype_int_char
;
6272 else if (mode0
== SImode
)
6277 type
= int_ftype_v4si_v4si
;
6280 type
= int_ftype_v4sf_v4sf
;
6283 type
= int_ftype_v16qi_v16qi
;
6286 type
= int_ftype_v8hi_v8hi
;
6296 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6299 /* Add the simple unary operators. */
6300 d
= (struct builtin_description
*) bdesc_1arg
;
6301 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
6303 enum machine_mode mode0
, mode1
;
6306 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6309 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6310 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6312 if (mode0
== V4SImode
&& mode1
== QImode
)
6313 type
= v4si_ftype_char
;
6314 else if (mode0
== V8HImode
&& mode1
== QImode
)
6315 type
= v8hi_ftype_char
;
6316 else if (mode0
== V16QImode
&& mode1
== QImode
)
6317 type
= v16qi_ftype_char
;
6318 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
6319 type
= v4sf_ftype_v4sf
;
6320 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
6321 type
= v8hi_ftype_v16qi
;
6322 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
6323 type
= v4si_ftype_v8hi
;
6324 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
6325 type
= v2si_ftype_v2si
;
6326 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
6327 type
= v2sf_ftype_v2sf
;
6328 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
6329 type
= v2sf_ftype_v2si
;
6330 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
6331 type
= v2si_ftype_v2sf
;
6332 else if (mode0
== V2SImode
&& mode1
== QImode
)
6333 type
= v2si_ftype_char
;
6337 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6342 /* Expand a block move operation, and return 1 if successful. Return 0
6343 if we should let the compiler generate normal code.
6345 operands[0] is the destination
6346 operands[1] is the source
6347 operands[2] is the length
6348 operands[3] is the alignment */
6350 #define MAX_MOVE_REG 4
6353 expand_block_move (operands
)
6356 rtx orig_dest
= operands
[0];
6357 rtx orig_src
= operands
[1];
6358 rtx bytes_rtx
= operands
[2];
6359 rtx align_rtx
= operands
[3];
6360 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
6366 /* If this is not a fixed size move, just call memcpy */
6370 /* If this is not a fixed size alignment, abort */
6371 if (GET_CODE (align_rtx
) != CONST_INT
)
6373 align
= INTVAL (align_rtx
);
6375 /* Anything to move? */
6376 bytes
= INTVAL (bytes_rtx
);
6380 /* store_one_arg depends on expand_block_move to handle at least the size of
6381 reg_parm_stack_space. */
6382 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
6385 if (TARGET_STRING
) /* string instructions are available */
6387 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6390 rtx (*movstrsi
) PARAMS ((rtx
, rtx
, rtx
, rtx
));
6391 rtx (*mov
) PARAMS ((rtx
, rtx
));
6393 enum machine_mode mode
= BLKmode
;
6396 if (bytes
> 24 /* move up to 32 bytes at a time */
6404 && ! fixed_regs
[12])
6406 move_bytes
= (bytes
> 32) ? 32 : bytes
;
6407 gen_func
.movstrsi
= gen_movstrsi_8reg
;
6409 else if (bytes
> 16 /* move up to 24 bytes at a time */
6415 && ! fixed_regs
[10])
6417 move_bytes
= (bytes
> 24) ? 24 : bytes
;
6418 gen_func
.movstrsi
= gen_movstrsi_6reg
;
6420 else if (bytes
> 8 /* move up to 16 bytes at a time */
6426 move_bytes
= (bytes
> 16) ? 16 : bytes
;
6427 gen_func
.movstrsi
= gen_movstrsi_4reg
;
6429 else if (bytes
>= 8 && TARGET_POWERPC64
6430 /* 64-bit loads and stores require word-aligned
6432 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6436 gen_func
.mov
= gen_movdi
;
6438 else if (bytes
> 4 && !TARGET_POWERPC64
)
6439 { /* move up to 8 bytes at a time */
6440 move_bytes
= (bytes
> 8) ? 8 : bytes
;
6441 gen_func
.movstrsi
= gen_movstrsi_2reg
;
6443 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6444 { /* move 4 bytes */
6447 gen_func
.mov
= gen_movsi
;
6449 else if (bytes
== 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6450 { /* move 2 bytes */
6453 gen_func
.mov
= gen_movhi
;
6455 else if (bytes
== 1) /* move 1 byte */
6459 gen_func
.mov
= gen_movqi
;
6462 { /* move up to 4 bytes at a time */
6463 move_bytes
= (bytes
> 4) ? 4 : bytes
;
6464 gen_func
.movstrsi
= gen_movstrsi_1reg
;
6467 src
= adjust_address (orig_src
, mode
, offset
);
6468 dest
= adjust_address (orig_dest
, mode
, offset
);
6470 if (mode
== BLKmode
)
6472 /* Move the address into scratch registers. The movstrsi
6473 patterns require zero offset. */
6474 if (!REG_P (XEXP (src
, 0)))
6476 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
6477 src
= replace_equiv_address (src
, src_reg
);
6479 set_mem_size (src
, GEN_INT (move_bytes
));
6481 if (!REG_P (XEXP (dest
, 0)))
6483 rtx dest_reg
= copy_addr_to_reg (XEXP (dest
, 0));
6484 dest
= replace_equiv_address (dest
, dest_reg
);
6486 set_mem_size (dest
, GEN_INT (move_bytes
));
6488 emit_insn ((*gen_func
.movstrsi
) (dest
, src
,
6489 GEN_INT (move_bytes
& 31),
6494 rtx tmp_reg
= gen_reg_rtx (mode
);
6496 emit_insn ((*gen_func
.mov
) (tmp_reg
, src
));
6497 emit_insn ((*gen_func
.mov
) (dest
, tmp_reg
));
6502 else /* string instructions not available */
6504 rtx stores
[MAX_MOVE_REG
];
6508 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6510 rtx (*gen_mov_func
) PARAMS ((rtx
, rtx
));
6511 enum machine_mode mode
;
6512 rtx src
, dest
, tmp_reg
;
6514 /* Generate the appropriate load and store, saving the stores
6516 if (bytes
>= 8 && TARGET_POWERPC64
6517 /* 64-bit loads and stores require word-aligned
6519 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6523 gen_mov_func
= gen_movdi
;
6525 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6529 gen_mov_func
= gen_movsi
;
6531 else if (bytes
>= 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6535 gen_mov_func
= gen_movhi
;
6541 gen_mov_func
= gen_movqi
;
6544 src
= adjust_address (orig_src
, mode
, offset
);
6545 dest
= adjust_address (orig_dest
, mode
, offset
);
6546 tmp_reg
= gen_reg_rtx (mode
);
6548 emit_insn ((*gen_mov_func
) (tmp_reg
, src
));
6549 stores
[num_reg
++] = (*gen_mov_func
) (dest
, tmp_reg
);
6551 if (num_reg
>= MAX_MOVE_REG
)
6553 for (i
= 0; i
< num_reg
; i
++)
6554 emit_insn (stores
[i
]);
6559 for (i
= 0; i
< num_reg
; i
++)
6560 emit_insn (stores
[i
]);
6567 /* Return 1 if OP is a load multiple operation. It is known to be a
6568 PARALLEL and the first section will be tested. */
6571 load_multiple_operation (op
, mode
)
6573 enum machine_mode mode ATTRIBUTE_UNUSED
;
6575 int count
= XVECLEN (op
, 0);
6576 unsigned int dest_regno
;
6580 /* Perform a quick check so we don't blow up below. */
6582 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6583 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6584 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6587 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6588 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6590 for (i
= 1; i
< count
; i
++)
6592 rtx elt
= XVECEXP (op
, 0, i
);
6594 if (GET_CODE (elt
) != SET
6595 || GET_CODE (SET_DEST (elt
)) != REG
6596 || GET_MODE (SET_DEST (elt
)) != SImode
6597 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6598 || GET_CODE (SET_SRC (elt
)) != MEM
6599 || GET_MODE (SET_SRC (elt
)) != SImode
6600 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
6601 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
6602 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
6603 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != i
* 4)
6610 /* Similar, but tests for store multiple. Here, the second vector element
6611 is a CLOBBER. It will be tested later. */
6614 store_multiple_operation (op
, mode
)
6616 enum machine_mode mode ATTRIBUTE_UNUSED
;
6618 int count
= XVECLEN (op
, 0) - 1;
6619 unsigned int src_regno
;
6623 /* Perform a quick check so we don't blow up below. */
6625 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6626 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6627 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6630 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6631 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6633 for (i
= 1; i
< count
; i
++)
6635 rtx elt
= XVECEXP (op
, 0, i
+ 1);
6637 if (GET_CODE (elt
) != SET
6638 || GET_CODE (SET_SRC (elt
)) != REG
6639 || GET_MODE (SET_SRC (elt
)) != SImode
6640 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6641 || GET_CODE (SET_DEST (elt
)) != MEM
6642 || GET_MODE (SET_DEST (elt
)) != SImode
6643 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
6644 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
6645 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
6646 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != i
* 4)
6653 /* Return a string to perform a load_multiple operation.
6654 operands[0] is the vector.
6655 operands[1] is the source address.
6656 operands[2] is the first destination register. */
6659 rs6000_output_load_multiple (operands
)
6662 /* We have to handle the case where the pseudo used to contain the address
6663 is assigned to one of the output registers. */
6665 int words
= XVECLEN (operands
[0], 0);
6668 if (XVECLEN (operands
[0], 0) == 1)
6669 return "{l|lwz} %2,0(%1)";
6671 for (i
= 0; i
< words
; i
++)
6672 if (refers_to_regno_p (REGNO (operands
[2]) + i
,
6673 REGNO (operands
[2]) + i
+ 1, operands
[1], 0))
6677 xop
[0] = GEN_INT (4 * (words
-1));
6678 xop
[1] = operands
[1];
6679 xop
[2] = operands
[2];
6680 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop
);
6685 xop
[0] = GEN_INT (4 * (words
-1));
6686 xop
[1] = operands
[1];
6687 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
6688 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop
);
6693 for (j
= 0; j
< words
; j
++)
6696 xop
[0] = GEN_INT (j
* 4);
6697 xop
[1] = operands
[1];
6698 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + j
);
6699 output_asm_insn ("{l|lwz} %2,%0(%1)", xop
);
6701 xop
[0] = GEN_INT (i
* 4);
6702 xop
[1] = operands
[1];
6703 output_asm_insn ("{l|lwz} %1,%0(%1)", xop
);
6708 return "{lsi|lswi} %2,%1,%N0";
6711 /* Return 1 for a parallel vrsave operation. */
6714 vrsave_operation (op
, mode
)
6716 enum machine_mode mode ATTRIBUTE_UNUSED
;
6718 int count
= XVECLEN (op
, 0);
6719 unsigned int dest_regno
, src_regno
;
6723 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6724 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6725 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC_VOLATILE
)
6728 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6729 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6731 if (dest_regno
!= VRSAVE_REGNO
6732 && src_regno
!= VRSAVE_REGNO
)
6735 for (i
= 1; i
< count
; i
++)
6737 rtx elt
= XVECEXP (op
, 0, i
);
6739 if (GET_CODE (elt
) != CLOBBER
6740 && GET_CODE (elt
) != SET
)
6747 /* Return 1 for an PARALLEL suitable for mtcrf. */
6750 mtcrf_operation (op
, mode
)
6752 enum machine_mode mode ATTRIBUTE_UNUSED
;
6754 int count
= XVECLEN (op
, 0);
6758 /* Perform a quick check so we don't blow up below. */
6760 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6761 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
6762 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
6764 src_reg
= XVECEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0, 0);
6766 if (GET_CODE (src_reg
) != REG
6767 || GET_MODE (src_reg
) != SImode
6768 || ! INT_REGNO_P (REGNO (src_reg
)))
6771 for (i
= 0; i
< count
; i
++)
6773 rtx exp
= XVECEXP (op
, 0, i
);
6777 if (GET_CODE (exp
) != SET
6778 || GET_CODE (SET_DEST (exp
)) != REG
6779 || GET_MODE (SET_DEST (exp
)) != CCmode
6780 || ! CR_REGNO_P (REGNO (SET_DEST (exp
))))
6782 unspec
= SET_SRC (exp
);
6783 maskval
= 1 << (MAX_CR_REGNO
- REGNO (SET_DEST (exp
)));
6785 if (GET_CODE (unspec
) != UNSPEC
6786 || XINT (unspec
, 1) != 20
6787 || XVECLEN (unspec
, 0) != 2
6788 || XVECEXP (unspec
, 0, 0) != src_reg
6789 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
6790 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
6796 /* Return 1 for an PARALLEL suitable for lmw. */
6799 lmw_operation (op
, mode
)
6801 enum machine_mode mode ATTRIBUTE_UNUSED
;
6803 int count
= XVECLEN (op
, 0);
6804 unsigned int dest_regno
;
6806 unsigned int base_regno
;
6807 HOST_WIDE_INT offset
;
6810 /* Perform a quick check so we don't blow up below. */
6812 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6813 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6814 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6817 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6818 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6821 || count
!= 32 - (int) dest_regno
)
6824 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr
, 0))
6827 base_regno
= REGNO (src_addr
);
6828 if (base_regno
== 0)
6831 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, src_addr
, 0))
6833 offset
= INTVAL (XEXP (src_addr
, 1));
6834 base_regno
= REGNO (XEXP (src_addr
, 0));
6839 for (i
= 0; i
< count
; i
++)
6841 rtx elt
= XVECEXP (op
, 0, i
);
6844 HOST_WIDE_INT newoffset
;
6846 if (GET_CODE (elt
) != SET
6847 || GET_CODE (SET_DEST (elt
)) != REG
6848 || GET_MODE (SET_DEST (elt
)) != SImode
6849 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6850 || GET_CODE (SET_SRC (elt
)) != MEM
6851 || GET_MODE (SET_SRC (elt
)) != SImode
)
6853 newaddr
= XEXP (SET_SRC (elt
), 0);
6854 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6859 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6861 addr_reg
= XEXP (newaddr
, 0);
6862 newoffset
= INTVAL (XEXP (newaddr
, 1));
6866 if (REGNO (addr_reg
) != base_regno
6867 || newoffset
!= offset
+ 4 * i
)
6874 /* Return 1 for an PARALLEL suitable for stmw. */
6877 stmw_operation (op
, mode
)
6879 enum machine_mode mode ATTRIBUTE_UNUSED
;
6881 int count
= XVECLEN (op
, 0);
6882 unsigned int src_regno
;
6884 unsigned int base_regno
;
6885 HOST_WIDE_INT offset
;
6888 /* Perform a quick check so we don't blow up below. */
6890 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6891 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6892 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6895 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6896 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6899 || count
!= 32 - (int) src_regno
)
6902 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr
, 0))
6905 base_regno
= REGNO (dest_addr
);
6906 if (base_regno
== 0)
6909 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, dest_addr
, 0))
6911 offset
= INTVAL (XEXP (dest_addr
, 1));
6912 base_regno
= REGNO (XEXP (dest_addr
, 0));
6917 for (i
= 0; i
< count
; i
++)
6919 rtx elt
= XVECEXP (op
, 0, i
);
6922 HOST_WIDE_INT newoffset
;
6924 if (GET_CODE (elt
) != SET
6925 || GET_CODE (SET_SRC (elt
)) != REG
6926 || GET_MODE (SET_SRC (elt
)) != SImode
6927 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6928 || GET_CODE (SET_DEST (elt
)) != MEM
6929 || GET_MODE (SET_DEST (elt
)) != SImode
)
6931 newaddr
= XEXP (SET_DEST (elt
), 0);
6932 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6937 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6939 addr_reg
= XEXP (newaddr
, 0);
6940 newoffset
= INTVAL (XEXP (newaddr
, 1));
6944 if (REGNO (addr_reg
) != base_regno
6945 || newoffset
!= offset
+ 4 * i
)
6952 /* A validation routine: say whether CODE, a condition code, and MODE
6953 match. The other alternatives either don't make sense or should
6954 never be generated. */
6957 validate_condition_mode (code
, mode
)
6959 enum machine_mode mode
;
6961 if (GET_RTX_CLASS (code
) != '<'
6962 || GET_MODE_CLASS (mode
) != MODE_CC
)
6965 /* These don't make sense. */
6966 if ((code
== GT
|| code
== LT
|| code
== GE
|| code
== LE
)
6967 && mode
== CCUNSmode
)
6970 if ((code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
)
6971 && mode
!= CCUNSmode
)
6974 if (mode
!= CCFPmode
6975 && (code
== ORDERED
|| code
== UNORDERED
6976 || code
== UNEQ
|| code
== LTGT
6977 || code
== UNGT
|| code
== UNLT
6978 || code
== UNGE
|| code
== UNLE
))
6981 /* These should never be generated except for
6982 flag_unsafe_math_optimizations and flag_finite_math_only. */
6983 if (mode
== CCFPmode
6984 && ! flag_unsafe_math_optimizations
6985 && ! flag_finite_math_only
6986 && (code
== LE
|| code
== GE
6987 || code
== UNEQ
|| code
== LTGT
6988 || code
== UNGT
|| code
== UNLT
))
6991 /* These are invalid; the information is not there. */
6992 if (mode
== CCEQmode
6993 && code
!= EQ
&& code
!= NE
)
6997 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6998 We only check the opcode against the mode of the CC value here. */
7001 branch_comparison_operator (op
, mode
)
7003 enum machine_mode mode ATTRIBUTE_UNUSED
;
7005 enum rtx_code code
= GET_CODE (op
);
7006 enum machine_mode cc_mode
;
7008 if (GET_RTX_CLASS (code
) != '<')
7011 cc_mode
= GET_MODE (XEXP (op
, 0));
7012 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
7015 validate_condition_mode (code
, cc_mode
);
7020 /* Return 1 if OP is a comparison operation that is valid for a branch
7021 insn and which is true if the corresponding bit in the CC register
7025 branch_positive_comparison_operator (op
, mode
)
7027 enum machine_mode mode
;
7031 if (! branch_comparison_operator (op
, mode
))
7034 code
= GET_CODE (op
);
7035 return (code
== EQ
|| code
== LT
|| code
== GT
7036 || (TARGET_E500
&& TARGET_HARD_FLOAT
&& !TARGET_FPRS
&& code
== NE
)
7037 || code
== LTU
|| code
== GTU
7038 || code
== UNORDERED
);
7041 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
7042 We check the opcode against the mode of the CC value and disallow EQ or
7043 NE comparisons for integers. */
7046 scc_comparison_operator (op
, mode
)
7048 enum machine_mode mode
;
7050 enum rtx_code code
= GET_CODE (op
);
7051 enum machine_mode cc_mode
;
7053 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
7056 if (GET_RTX_CLASS (code
) != '<')
7059 cc_mode
= GET_MODE (XEXP (op
, 0));
7060 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
7063 validate_condition_mode (code
, cc_mode
);
7065 if (code
== NE
&& cc_mode
!= CCFPmode
)
7072 trap_comparison_operator (op
, mode
)
7074 enum machine_mode mode
;
7076 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
7078 return GET_RTX_CLASS (GET_CODE (op
)) == '<';
7082 boolean_operator (op
, mode
)
7084 enum machine_mode mode ATTRIBUTE_UNUSED
;
7086 enum rtx_code code
= GET_CODE (op
);
7087 return (code
== AND
|| code
== IOR
|| code
== XOR
);
7091 boolean_or_operator (op
, mode
)
7093 enum machine_mode mode ATTRIBUTE_UNUSED
;
7095 enum rtx_code code
= GET_CODE (op
);
7096 return (code
== IOR
|| code
== XOR
);
7100 min_max_operator (op
, mode
)
7102 enum machine_mode mode ATTRIBUTE_UNUSED
;
7104 enum rtx_code code
= GET_CODE (op
);
7105 return (code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
);
7108 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7109 mask required to convert the result of a rotate insn into a shift
7110 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7113 includes_lshift_p (shiftop
, andop
)
7117 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
7119 shift_mask
<<= INTVAL (shiftop
);
7121 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
7124 /* Similar, but for right shift. */
7127 includes_rshift_p (shiftop
, andop
)
7131 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
7133 shift_mask
>>= INTVAL (shiftop
);
7135 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
7138 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7139 to perform a left shift. It must have exactly SHIFTOP least
7140 significant 0's, then one or more 1's, then zero or more 0's. */
7143 includes_rldic_lshift_p (shiftop
, andop
)
7147 if (GET_CODE (andop
) == CONST_INT
)
7149 HOST_WIDE_INT c
, lsb
, shift_mask
;
7152 if (c
== 0 || c
== ~0)
7156 shift_mask
<<= INTVAL (shiftop
);
7158 /* Find the least significant one bit. */
7161 /* It must coincide with the LSB of the shift mask. */
7162 if (-lsb
!= shift_mask
)
7165 /* Invert to look for the next transition (if any). */
7168 /* Remove the low group of ones (originally low group of zeros). */
7171 /* Again find the lsb, and check we have all 1's above. */
7175 else if (GET_CODE (andop
) == CONST_DOUBLE
7176 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
7178 HOST_WIDE_INT low
, high
, lsb
;
7179 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
7181 low
= CONST_DOUBLE_LOW (andop
);
7182 if (HOST_BITS_PER_WIDE_INT
< 64)
7183 high
= CONST_DOUBLE_HIGH (andop
);
7185 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
7186 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
7189 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
7191 shift_mask_high
= ~0;
7192 if (INTVAL (shiftop
) > 32)
7193 shift_mask_high
<<= INTVAL (shiftop
) - 32;
7197 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
7204 return high
== -lsb
;
7207 shift_mask_low
= ~0;
7208 shift_mask_low
<<= INTVAL (shiftop
);
7212 if (-lsb
!= shift_mask_low
)
7215 if (HOST_BITS_PER_WIDE_INT
< 64)
7220 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
7223 return high
== -lsb
;
7227 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
7233 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7234 to perform a left shift. It must have SHIFTOP or more least
7235 signifigant 0's, with the remainder of the word 1's. */
7238 includes_rldicr_lshift_p (shiftop
, andop
)
7242 if (GET_CODE (andop
) == CONST_INT
)
7244 HOST_WIDE_INT c
, lsb
, shift_mask
;
7247 shift_mask
<<= INTVAL (shiftop
);
7250 /* Find the least signifigant one bit. */
7253 /* It must be covered by the shift mask.
7254 This test also rejects c == 0. */
7255 if ((lsb
& shift_mask
) == 0)
7258 /* Check we have all 1's above the transition, and reject all 1's. */
7259 return c
== -lsb
&& lsb
!= 1;
7261 else if (GET_CODE (andop
) == CONST_DOUBLE
7262 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
7264 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
7266 low
= CONST_DOUBLE_LOW (andop
);
7268 if (HOST_BITS_PER_WIDE_INT
< 64)
7270 HOST_WIDE_INT high
, shift_mask_high
;
7272 high
= CONST_DOUBLE_HIGH (andop
);
7276 shift_mask_high
= ~0;
7277 if (INTVAL (shiftop
) > 32)
7278 shift_mask_high
<<= INTVAL (shiftop
) - 32;
7282 if ((lsb
& shift_mask_high
) == 0)
7285 return high
== -lsb
;
7291 shift_mask_low
= ~0;
7292 shift_mask_low
<<= INTVAL (shiftop
);
7296 if ((lsb
& shift_mask_low
) == 0)
7299 return low
== -lsb
&& lsb
!= 1;
7305 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7306 for lfq and stfq insns.
7308 Note reg1 and reg2 *must* be hard registers. To be sure we will
7309 abort if we are passed pseudo registers. */
7312 registers_ok_for_quad_peep (reg1
, reg2
)
7315 /* We might have been passed a SUBREG. */
7316 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
7319 return (REGNO (reg1
) == REGNO (reg2
) - 1);
7322 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7323 addr1 and addr2 must be in consecutive memory locations
7324 (addr2 == addr1 + 8). */
7327 addrs_ok_for_quad_peep (addr1
, addr2
)
7334 /* Extract an offset (if used) from the first addr. */
7335 if (GET_CODE (addr1
) == PLUS
)
7337 /* If not a REG, return zero. */
7338 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
7342 reg1
= REGNO (XEXP (addr1
, 0));
7343 /* The offset must be constant! */
7344 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
7346 offset1
= INTVAL (XEXP (addr1
, 1));
7349 else if (GET_CODE (addr1
) != REG
)
7353 reg1
= REGNO (addr1
);
7354 /* This was a simple (mem (reg)) expression. Offset is 0. */
7358 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7359 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7360 register as addr1. */
7361 if (offset1
== -8 && GET_CODE (addr2
) == REG
&& reg1
== REGNO (addr2
))
7363 if (GET_CODE (addr2
) != PLUS
)
7366 if (GET_CODE (XEXP (addr2
, 0)) != REG
7367 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
7370 if (reg1
!= REGNO (XEXP (addr2
, 0)))
7373 /* The offset for the second addr must be 8 more than the first addr. */
7374 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 8)
7377 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7382 /* Return the register class of a scratch register needed to copy IN into
7383 or out of a register in CLASS in MODE. If it can be done directly,
7384 NO_REGS is returned. */
7387 secondary_reload_class (class, mode
, in
)
7388 enum reg_class
class;
7389 enum machine_mode mode ATTRIBUTE_UNUSED
;
7394 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
7396 && MACHOPIC_INDIRECT
7400 /* We cannot copy a symbolic operand directly into anything
7401 other than BASE_REGS for TARGET_ELF. So indicate that a
7402 register from BASE_REGS is needed as an intermediate
7405 On Darwin, pic addresses require a load from memory, which
7406 needs a base register. */
7407 if (class != BASE_REGS
7408 && (GET_CODE (in
) == SYMBOL_REF
7409 || GET_CODE (in
) == HIGH
7410 || GET_CODE (in
) == LABEL_REF
7411 || GET_CODE (in
) == CONST
))
7415 if (GET_CODE (in
) == REG
)
7418 if (regno
>= FIRST_PSEUDO_REGISTER
)
7420 regno
= true_regnum (in
);
7421 if (regno
>= FIRST_PSEUDO_REGISTER
)
7425 else if (GET_CODE (in
) == SUBREG
)
7427 regno
= true_regnum (in
);
7428 if (regno
>= FIRST_PSEUDO_REGISTER
)
7434 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7436 if (class == GENERAL_REGS
|| class == BASE_REGS
7437 || (regno
>= 0 && INT_REGNO_P (regno
)))
7440 /* Constants, memory, and FP registers can go into FP registers. */
7441 if ((regno
== -1 || FP_REGNO_P (regno
))
7442 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
7445 /* Memory, and AltiVec registers can go into AltiVec registers. */
7446 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
7447 && class == ALTIVEC_REGS
)
7450 /* We can copy among the CR registers. */
7451 if ((class == CR_REGS
|| class == CR0_REGS
)
7452 && regno
>= 0 && CR_REGNO_P (regno
))
7455 /* Otherwise, we need GENERAL_REGS. */
7456 return GENERAL_REGS
;
7459 /* Given a comparison operation, return the bit number in CCR to test. We
7460 know this is a valid comparison.
7462 SCC_P is 1 if this is for an scc. That means that %D will have been
7463 used instead of %C, so the bits will be in different places.
7465 Return -1 if OP isn't a valid comparison for some reason. */
7472 enum rtx_code code
= GET_CODE (op
);
7473 enum machine_mode cc_mode
;
7478 if (GET_RTX_CLASS (code
) != '<')
7483 if (GET_CODE (reg
) != REG
7484 || ! CR_REGNO_P (REGNO (reg
)))
7487 cc_mode
= GET_MODE (reg
);
7488 cc_regnum
= REGNO (reg
);
7489 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
7491 validate_condition_mode (code
, cc_mode
);
7496 if (TARGET_E500
&& !TARGET_FPRS
7497 && TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7498 return base_bit
+ 1;
7499 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
7501 if (TARGET_E500
&& !TARGET_FPRS
7502 && TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7503 return base_bit
+ 1;
7504 return base_bit
+ 2;
7505 case GT
: case GTU
: case UNLE
:
7506 return base_bit
+ 1;
7507 case LT
: case LTU
: case UNGE
:
7509 case ORDERED
: case UNORDERED
:
7510 return base_bit
+ 3;
7513 /* If scc, we will have done a cror to put the bit in the
7514 unordered position. So test that bit. For integer, this is ! LT
7515 unless this is an scc insn. */
7516 return scc_p
? base_bit
+ 3 : base_bit
;
7519 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
7526 /* Return the GOT register. */
7529 rs6000_got_register (value
)
7530 rtx value ATTRIBUTE_UNUSED
;
7532 /* The second flow pass currently (June 1999) can't update
7533 regs_ever_live without disturbing other parts of the compiler, so
7534 update it here to make the prolog/epilogue code happy. */
7535 if (no_new_pseudos
&& ! regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
])
7536 regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
7538 current_function_uses_pic_offset_table
= 1;
7540 return pic_offset_table_rtx
;
7543 /* Function to init struct machine_function.
7544 This will be called, via a pointer variable,
7545 from push_function_context. */
7547 static struct machine_function
*
7548 rs6000_init_machine_status ()
7550 return ggc_alloc_cleared (sizeof (machine_function
));
7553 /* These macros test for integers and extract the low-order bits. */
7555 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7556 && GET_MODE (X) == VOIDmode)
7558 #define INT_LOWPART(X) \
7559 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7566 unsigned long val
= INT_LOWPART (op
);
7568 /* If the high bit is zero, the value is the first 1 bit we find
7570 if ((val
& 0x80000000) == 0)
7572 if ((val
& 0xffffffff) == 0)
7576 while (((val
<<= 1) & 0x80000000) == 0)
7581 /* If the high bit is set and the low bit is not, or the mask is all
7582 1's, the value is zero. */
7583 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
7586 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7589 while (((val
>>= 1) & 1) != 0)
7600 unsigned long val
= INT_LOWPART (op
);
7602 /* If the low bit is zero, the value is the first 1 bit we find from
7606 if ((val
& 0xffffffff) == 0)
7610 while (((val
>>= 1) & 1) == 0)
7616 /* If the low bit is set and the high bit is not, or the mask is all
7617 1's, the value is 31. */
7618 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
7621 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7624 while (((val
<<= 1) & 0x80000000) != 0)
7630 /* Print an operand. Recognize special options, documented below. */
7633 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7634 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7636 #define SMALL_DATA_RELOC "sda21"
7637 #define SMALL_DATA_REG 0
7641 print_operand (file
, x
, code
)
7648 unsigned HOST_WIDE_INT uval
;
7653 /* Write out an instruction after the call which may be replaced
7654 with glue code by the loader. This depends on the AIX version. */
7655 asm_fprintf (file
, RS6000_CALL_GLUE
);
7658 /* %a is output_address. */
7661 /* If X is a constant integer whose low-order 5 bits are zero,
7662 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7663 in the AIX assembler where "sri" with a zero shift count
7664 writes a trash instruction. */
7665 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
7672 /* If constant, low-order 16 bits of constant, unsigned.
7673 Otherwise, write normally. */
7675 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
7677 print_operand (file
, x
, 0);
7681 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7682 for 64-bit mask direction. */
7683 putc (((INT_LOWPART(x
) & 1) == 0 ? 'r' : 'l'), file
);
7686 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7690 /* There used to be a comment for 'C' reading "This is an
7691 optional cror needed for certain floating-point
7692 comparisons. Otherwise write nothing." */
7694 /* Similar, except that this is for an scc, so we must be able to
7695 encode the test in a single bit that is one. We do the above
7696 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7697 if (GET_CODE (x
) == LE
|| GET_CODE (x
) == GE
7698 || GET_CODE (x
) == LEU
|| GET_CODE (x
) == GEU
)
7700 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7702 fprintf (file
, "cror %d,%d,%d\n\t", base_bit
+ 3,
7704 base_bit
+ (GET_CODE (x
) == GE
|| GET_CODE (x
) == GEU
));
7707 else if (GET_CODE (x
) == NE
)
7709 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7711 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 3,
7712 base_bit
+ 2, base_bit
+ 2);
7714 else if (TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
7715 && GET_CODE (x
) == EQ
7716 && GET_MODE (XEXP (x
, 0)) == CCFPmode
)
7718 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7720 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 1,
7721 base_bit
+ 1, base_bit
+ 1);
7726 /* X is a CR register. Print the number of the EQ bit of the CR */
7727 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7728 output_operand_lossage ("invalid %%E value");
7730 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
7734 /* X is a CR register. Print the shift count needed to move it
7735 to the high-order four bits. */
7736 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7737 output_operand_lossage ("invalid %%f value");
7739 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
7743 /* Similar, but print the count for the rotate in the opposite
7745 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7746 output_operand_lossage ("invalid %%F value");
7748 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
7752 /* X is a constant integer. If it is negative, print "m",
7753 otherwise print "z". This is to make an aze or ame insn. */
7754 if (GET_CODE (x
) != CONST_INT
)
7755 output_operand_lossage ("invalid %%G value");
7756 else if (INTVAL (x
) >= 0)
7763 /* If constant, output low-order five bits. Otherwise, write
7766 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
7768 print_operand (file
, x
, 0);
7772 /* If constant, output low-order six bits. Otherwise, write
7775 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
7777 print_operand (file
, x
, 0);
7781 /* Print `i' if this is a constant, else nothing. */
7787 /* Write the bit number in CCR for jump. */
7790 output_operand_lossage ("invalid %%j code");
7792 fprintf (file
, "%d", i
);
7796 /* Similar, but add one for shift count in rlinm for scc and pass
7797 scc flag to `ccr_bit'. */
7800 output_operand_lossage ("invalid %%J code");
7802 /* If we want bit 31, write a shift count of zero, not 32. */
7803 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
7807 /* X must be a constant. Write the 1's complement of the
7810 output_operand_lossage ("invalid %%k value");
7812 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
7816 /* X must be a symbolic constant on ELF. Write an
7817 expression suitable for an 'addi' that adds in the low 16
7819 if (GET_CODE (x
) != CONST
)
7821 print_operand_address (file
, x
);
7826 if (GET_CODE (XEXP (x
, 0)) != PLUS
7827 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
7828 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
7829 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
7830 output_operand_lossage ("invalid %%K value");
7831 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
7833 /* For GNU as, there must be a non-alphanumeric character
7834 between 'l' and the number. The '-' is added by
7835 print_operand() already. */
7836 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
7838 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
7842 /* %l is output_asm_label. */
7845 /* Write second word of DImode or DFmode reference. Works on register
7846 or non-indexed memory only. */
7847 if (GET_CODE (x
) == REG
)
7848 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
7849 else if (GET_CODE (x
) == MEM
)
7851 /* Handle possible auto-increment. Since it is pre-increment and
7852 we have already done it, we can just use an offset of word. */
7853 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7854 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7855 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
7858 output_address (XEXP (adjust_address_nv (x
, SImode
,
7862 if (small_data_operand (x
, GET_MODE (x
)))
7863 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7864 reg_names
[SMALL_DATA_REG
]);
7869 /* MB value for a mask operand. */
7870 if (! mask_operand (x
, SImode
))
7871 output_operand_lossage ("invalid %%m value");
7873 fprintf (file
, "%d", extract_MB (x
));
7877 /* ME value for a mask operand. */
7878 if (! mask_operand (x
, SImode
))
7879 output_operand_lossage ("invalid %%M value");
7881 fprintf (file
, "%d", extract_ME (x
));
7884 /* %n outputs the negative of its operand. */
7887 /* Write the number of elements in the vector times 4. */
7888 if (GET_CODE (x
) != PARALLEL
)
7889 output_operand_lossage ("invalid %%N value");
7891 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
7895 /* Similar, but subtract 1 first. */
7896 if (GET_CODE (x
) != PARALLEL
)
7897 output_operand_lossage ("invalid %%O value");
7899 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
7903 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7905 || INT_LOWPART (x
) < 0
7906 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
7907 output_operand_lossage ("invalid %%p value");
7909 fprintf (file
, "%d", i
);
7913 /* The operand must be an indirect memory reference. The result
7914 is the register number. */
7915 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
7916 || REGNO (XEXP (x
, 0)) >= 32)
7917 output_operand_lossage ("invalid %%P value");
7919 fprintf (file
, "%d", REGNO (XEXP (x
, 0)));
7923 /* This outputs the logical code corresponding to a boolean
7924 expression. The expression may have one or both operands
7925 negated (if one, only the first one). For condition register
7926 logical operations, it will also treat the negated
7927 CR codes as NOTs, but not handle NOTs of them. */
7929 const char *const *t
= 0;
7931 enum rtx_code code
= GET_CODE (x
);
7932 static const char * const tbl
[3][3] = {
7933 { "and", "andc", "nor" },
7934 { "or", "orc", "nand" },
7935 { "xor", "eqv", "xor" } };
7939 else if (code
== IOR
)
7941 else if (code
== XOR
)
7944 output_operand_lossage ("invalid %%q value");
7946 if (GET_CODE (XEXP (x
, 0)) != NOT
)
7950 if (GET_CODE (XEXP (x
, 1)) == NOT
)
7961 /* X is a CR register. Print the mask for `mtcrf'. */
7962 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7963 output_operand_lossage ("invalid %%R value");
7965 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
7969 /* Low 5 bits of 32 - value */
7971 output_operand_lossage ("invalid %%s value");
7973 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
7977 /* PowerPC64 mask position. All 0's is excluded.
7978 CONST_INT 32-bit mask is considered sign-extended so any
7979 transition must occur within the CONST_INT, not on the boundary. */
7980 if (! mask64_operand (x
, DImode
))
7981 output_operand_lossage ("invalid %%S value");
7983 uval
= INT_LOWPART (x
);
7985 if (uval
& 1) /* Clear Left */
7987 #if HOST_BITS_PER_WIDE_INT > 64
7988 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
7992 else /* Clear Right */
7995 #if HOST_BITS_PER_WIDE_INT > 64
7996 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
8004 fprintf (file
, "%d", i
);
8008 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8009 if (GET_CODE (x
) != REG
|| GET_MODE (x
) != CCmode
)
8012 /* Bit 3 is OV bit. */
8013 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
8015 /* If we want bit 31, write a shift count of zero, not 32. */
8016 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
8020 /* Print the symbolic name of a branch target register. */
8021 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
8022 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
8023 output_operand_lossage ("invalid %%T value");
8024 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
8025 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
8027 fputs ("ctr", file
);
8031 /* High-order 16 bits of constant for use in unsigned operand. */
8033 output_operand_lossage ("invalid %%u value");
8035 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
8036 (INT_LOWPART (x
) >> 16) & 0xffff);
8040 /* High-order 16 bits of constant for use in signed operand. */
8042 output_operand_lossage ("invalid %%v value");
8044 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
8045 (INT_LOWPART (x
) >> 16) & 0xffff);
8049 /* Print `u' if this has an auto-increment or auto-decrement. */
8050 if (GET_CODE (x
) == MEM
8051 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
8052 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
8057 /* Print the trap code for this operand. */
8058 switch (GET_CODE (x
))
8061 fputs ("eq", file
); /* 4 */
8064 fputs ("ne", file
); /* 24 */
8067 fputs ("lt", file
); /* 16 */
8070 fputs ("le", file
); /* 20 */
8073 fputs ("gt", file
); /* 8 */
8076 fputs ("ge", file
); /* 12 */
8079 fputs ("llt", file
); /* 2 */
8082 fputs ("lle", file
); /* 6 */
8085 fputs ("lgt", file
); /* 1 */
8088 fputs ("lge", file
); /* 5 */
8096 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8099 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
8100 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
8102 print_operand (file
, x
, 0);
8106 /* MB value for a PowerPC64 rldic operand. */
8107 val
= (GET_CODE (x
) == CONST_INT
8108 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
8113 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
8114 if ((val
<<= 1) < 0)
8117 #if HOST_BITS_PER_WIDE_INT == 32
8118 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
8119 i
+= 32; /* zero-extend high-part was all 0's */
8120 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
8122 val
= CONST_DOUBLE_LOW (x
);
8129 for ( ; i
< 64; i
++)
8130 if ((val
<<= 1) < 0)
8135 fprintf (file
, "%d", i
+ 1);
8139 if (GET_CODE (x
) == MEM
8140 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x
, 0), 0))
8145 /* Like 'L', for third word of TImode */
8146 if (GET_CODE (x
) == REG
)
8147 fprintf (file
, "%s", reg_names
[REGNO (x
) + 2]);
8148 else if (GET_CODE (x
) == MEM
)
8150 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
8151 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8152 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
8154 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
8155 if (small_data_operand (x
, GET_MODE (x
)))
8156 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8157 reg_names
[SMALL_DATA_REG
]);
8162 /* X is a SYMBOL_REF. Write out the name preceded by a
8163 period and without any trailing data in brackets. Used for function
8164 names. If we are configured for System V (or the embedded ABI) on
8165 the PowerPC, do not emit the period, since those systems do not use
8166 TOCs and the like. */
8167 if (GET_CODE (x
) != SYMBOL_REF
)
8170 if (XSTR (x
, 0)[0] != '.')
8172 switch (DEFAULT_ABI
)
8182 case ABI_AIX_NODESC
:
8188 RS6000_OUTPUT_BASENAME (file
, XSTR (x
, 0));
8190 assemble_name (file
, XSTR (x
, 0));
8195 /* Like 'L', for last word of TImode. */
8196 if (GET_CODE (x
) == REG
)
8197 fprintf (file
, "%s", reg_names
[REGNO (x
) + 3]);
8198 else if (GET_CODE (x
) == MEM
)
8200 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
8201 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8202 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
8204 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
8205 if (small_data_operand (x
, GET_MODE (x
)))
8206 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8207 reg_names
[SMALL_DATA_REG
]);
8211 /* Print AltiVec or SPE memory operand. */
8216 if (GET_CODE (x
) != MEM
)
8224 if (GET_CODE (tmp
) == REG
)
8226 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
8229 /* Handle [reg+UIMM]. */
8230 else if (GET_CODE (tmp
) == PLUS
&&
8231 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
8235 if (GET_CODE (XEXP (tmp
, 0)) != REG
)
8238 x
= INTVAL (XEXP (tmp
, 1));
8239 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
8243 /* Fall through. Must be [reg+reg]. */
8245 if (GET_CODE (tmp
) == REG
)
8246 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
8247 else if (GET_CODE (tmp
) == PLUS
&& GET_CODE (XEXP (tmp
, 1)) == REG
)
8249 if (REGNO (XEXP (tmp
, 0)) == 0)
8250 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
8251 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
8253 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
8254 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
8262 if (GET_CODE (x
) == REG
)
8263 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
8264 else if (GET_CODE (x
) == MEM
)
8266 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8267 know the width from the mode. */
8268 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
8269 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
8270 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
8271 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8272 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
8273 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
8275 output_address (XEXP (x
, 0));
8278 output_addr_const (file
, x
);
8282 output_operand_lossage ("invalid %%xn code");
8286 /* Print the address of an operand. */
8289 print_operand_address (file
, x
)
8293 if (GET_CODE (x
) == REG
)
8294 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
8295 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
8296 || GET_CODE (x
) == LABEL_REF
)
8298 output_addr_const (file
, x
);
8299 if (small_data_operand (x
, GET_MODE (x
)))
8300 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8301 reg_names
[SMALL_DATA_REG
]);
8302 else if (TARGET_TOC
)
8305 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
8307 if (REGNO (XEXP (x
, 0)) == 0)
8308 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
8309 reg_names
[ REGNO (XEXP (x
, 0)) ]);
8311 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
8312 reg_names
[ REGNO (XEXP (x
, 1)) ]);
8314 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
8316 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (XEXP (x
, 1)));
8317 fprintf (file
, "(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8320 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8321 && CONSTANT_P (XEXP (x
, 1)))
8323 output_addr_const (file
, XEXP (x
, 1));
8324 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8328 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8329 && CONSTANT_P (XEXP (x
, 1)))
8331 fprintf (file
, "lo16(");
8332 output_addr_const (file
, XEXP (x
, 1));
8333 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8336 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
8338 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
8340 rtx contains_minus
= XEXP (x
, 1);
8344 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8345 turn it into (sym) for output_addr_const. */
8346 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
8347 contains_minus
= XEXP (contains_minus
, 0);
8349 minus
= XEXP (contains_minus
, 0);
8350 symref
= XEXP (minus
, 0);
8351 XEXP (contains_minus
, 0) = symref
;
8356 name
= XSTR (symref
, 0);
8357 newname
= alloca (strlen (name
) + sizeof ("@toc"));
8358 strcpy (newname
, name
);
8359 strcat (newname
, "@toc");
8360 XSTR (symref
, 0) = newname
;
8362 output_addr_const (file
, XEXP (x
, 1));
8364 XSTR (symref
, 0) = name
;
8365 XEXP (contains_minus
, 0) = minus
;
8368 output_addr_const (file
, XEXP (x
, 1));
8370 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
8376 /* Target hook for assembling integer objects. The PowerPC version has
8377 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8378 is defined. It also needs to handle DI-mode objects on 64-bit
8382 rs6000_assemble_integer (x
, size
, aligned_p
)
8387 #ifdef RELOCATABLE_NEEDS_FIXUP
8388 /* Special handling for SI values. */
8389 if (size
== 4 && aligned_p
)
8391 extern int in_toc_section
PARAMS ((void));
8392 static int recurse
= 0;
8394 /* For -mrelocatable, we mark all addresses that need to be fixed up
8395 in the .fixup section. */
8396 if (TARGET_RELOCATABLE
8397 && !in_toc_section ()
8398 && !in_text_section ()
8400 && GET_CODE (x
) != CONST_INT
8401 && GET_CODE (x
) != CONST_DOUBLE
8407 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
8409 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
8410 fprintf (asm_out_file
, "\t.long\t(");
8411 output_addr_const (asm_out_file
, x
);
8412 fprintf (asm_out_file
, ")@fixup\n");
8413 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
8414 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
8415 fprintf (asm_out_file
, "\t.long\t");
8416 assemble_name (asm_out_file
, buf
);
8417 fprintf (asm_out_file
, "\n\t.previous\n");
8421 /* Remove initial .'s to turn a -mcall-aixdesc function
8422 address into the address of the descriptor, not the function
8424 else if (GET_CODE (x
) == SYMBOL_REF
8425 && XSTR (x
, 0)[0] == '.'
8426 && DEFAULT_ABI
== ABI_AIX
)
8428 const char *name
= XSTR (x
, 0);
8429 while (*name
== '.')
8432 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
8436 #endif /* RELOCATABLE_NEEDS_FIXUP */
8437 return default_assemble_integer (x
, size
, aligned_p
);
8440 #ifdef HAVE_GAS_HIDDEN
8441 /* Emit an assembler directive to set symbol visibility for DECL to
8445 rs6000_assemble_visibility (decl
, vis
)
8449 /* Functions need to have their entry point symbol visibility set as
8450 well as their descriptor symbol visibility. */
8451 if (DEFAULT_ABI
== ABI_AIX
&& TREE_CODE (decl
) == FUNCTION_DECL
)
8453 static const char * const visibility_types
[] = {
8454 NULL
, "internal", "hidden", "protected"
8457 const char *name
, *type
;
8459 name
= ((* targetm
.strip_name_encoding
)
8460 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))));
8461 type
= visibility_types
[vis
];
8463 fprintf (asm_out_file
, "\t.%s\t%s\n", type
, name
);
8464 fprintf (asm_out_file
, "\t.%s\t.%s\n", type
, name
);
8467 default_assemble_visibility (decl
, vis
);
8472 rs6000_reverse_condition (mode
, code
)
8473 enum machine_mode mode
;
8476 /* Reversal of FP compares takes care -- an ordered compare
8477 becomes an unordered compare and vice versa. */
8478 if (mode
== CCFPmode
&& !flag_unsafe_math_optimizations
)
8479 return reverse_condition_maybe_unordered (code
);
8481 return reverse_condition (code
);
8484 /* Generate a compare for CODE. Return a brand-new rtx that
8485 represents the result of the compare. */
8488 rs6000_generate_compare (code
)
8491 enum machine_mode comp_mode
;
8494 if (rs6000_compare_fp_p
)
8495 comp_mode
= CCFPmode
;
8496 else if (code
== GTU
|| code
== LTU
8497 || code
== GEU
|| code
== LEU
)
8498 comp_mode
= CCUNSmode
;
8502 /* First, the compare. */
8503 compare_result
= gen_reg_rtx (comp_mode
);
8505 /* SPE FP compare instructions on the GPRs. Yuck! */
8506 if ((TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
)
8507 && rs6000_compare_fp_p
)
8509 rtx cmp
, or1
, or2
, or_result
, compare_result2
;
8517 cmp
= flag_unsafe_math_optimizations
8518 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
8520 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
8521 rs6000_compare_op1
);
8529 cmp
= flag_unsafe_math_optimizations
8530 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
8532 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
8533 rs6000_compare_op1
);
8541 cmp
= flag_unsafe_math_optimizations
8542 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
8544 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
8545 rs6000_compare_op1
);
8551 /* Synthesize LE and GE from LT/GT || EQ. */
8552 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
8554 /* Synthesize GE/LE frome GT/LT || EQ. */
8560 case LE
: code
= LT
; break;
8561 case GE
: code
= GT
; break;
8562 case LEU
: code
= LT
; break;
8563 case GEU
: code
= GT
; break;
8567 or1
= gen_reg_rtx (SImode
);
8568 or2
= gen_reg_rtx (SImode
);
8569 or_result
= gen_reg_rtx (CCEQmode
);
8570 compare_result2
= gen_reg_rtx (CCFPmode
);
8573 cmp
= flag_unsafe_math_optimizations
8574 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8576 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8577 rs6000_compare_op1
);
8580 /* The MC8540 FP compare instructions set the CR bits
8581 differently than other PPC compare instructions. For
8582 that matter, there is no generic test instruction, but a
8583 testgt, testlt, and testeq. For a true condition, bit 2
8584 is set (x1xx) in the CR. Following the traditional CR
8590 ... bit 2 would be a GT CR alias, so later on we
8591 look in the GT bits for the branch instructions.
8592 However, we must be careful to emit correct RTL in
8593 the meantime, so optimizations don't get confused. */
8595 or1
= gen_rtx (NE
, SImode
, compare_result
, const0_rtx
);
8596 or2
= gen_rtx (NE
, SImode
, compare_result2
, const0_rtx
);
8598 /* OR them together. */
8599 cmp
= gen_rtx_SET (VOIDmode
, or_result
,
8600 gen_rtx_COMPARE (CCEQmode
,
8601 gen_rtx_IOR (SImode
, or1
, or2
),
8603 compare_result
= or_result
;
8608 /* We only care about 1 bit (x1xx), so map everything to NE to
8609 maintain rtl sanity. We'll get to the right bit (x1xx) at
8610 code output time. */
8611 if (code
== NE
|| code
== LTGT
)
8612 /* Do the inverse here because we have no cmpne
8613 instruction. We use the cmpeq instruction and expect
8614 to get a 0 instead. */
8623 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
8624 gen_rtx_COMPARE (comp_mode
,
8626 rs6000_compare_op1
)));
8628 /* Some kinds of FP comparisons need an OR operation;
8629 except for flag_unsafe_math_optimizations we don't bother. */
8630 if (rs6000_compare_fp_p
8631 && ! flag_unsafe_math_optimizations
8632 && ! (TARGET_HARD_FLOAT
&& TARGET_E500
&& !TARGET_FPRS
)
8633 && (code
== LE
|| code
== GE
8634 || code
== UNEQ
|| code
== LTGT
8635 || code
== UNGT
|| code
== UNLT
))
8637 enum rtx_code or1
, or2
;
8638 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
8639 rtx or_result
= gen_reg_rtx (CCEQmode
);
8643 case LE
: or1
= LT
; or2
= EQ
; break;
8644 case GE
: or1
= GT
; or2
= EQ
; break;
8645 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
8646 case LTGT
: or1
= LT
; or2
= GT
; break;
8647 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
8648 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
8651 validate_condition_mode (or1
, comp_mode
);
8652 validate_condition_mode (or2
, comp_mode
);
8653 or1_rtx
= gen_rtx (or1
, SImode
, compare_result
, const0_rtx
);
8654 or2_rtx
= gen_rtx (or2
, SImode
, compare_result
, const0_rtx
);
8655 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
8656 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
8658 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
8660 compare_result
= or_result
;
8664 validate_condition_mode (code
, GET_MODE (compare_result
));
8666 return gen_rtx (code
, VOIDmode
, compare_result
, const0_rtx
);
8670 /* Emit the RTL for an sCOND pattern. */
8673 rs6000_emit_sCOND (code
, result
)
8678 enum machine_mode op_mode
;
8680 condition_rtx
= rs6000_generate_compare (code
);
8682 op_mode
= GET_MODE (rs6000_compare_op0
);
8683 if (op_mode
== VOIDmode
)
8684 op_mode
= GET_MODE (rs6000_compare_op1
);
8686 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
8688 PUT_MODE (condition_rtx
, DImode
);
8689 convert_move (result
, condition_rtx
, 0);
8693 PUT_MODE (condition_rtx
, SImode
);
8694 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
8698 /* Emit a branch of kind CODE to location LOC. */
8701 rs6000_emit_cbranch (code
, loc
)
8705 rtx condition_rtx
, loc_ref
;
8707 condition_rtx
= rs6000_generate_compare (code
);
8708 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
8709 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
8710 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
8714 /* Return the string to output a conditional branch to LABEL, which is
8715 the operand number of the label, or -1 if the branch is really a
8718 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8719 condition code register and its mode specifies what kind of
8722 REVERSED is nonzero if we should reverse the sense of the comparison.
8724 INSN is the insn. */
8727 output_cbranch (op
, label
, reversed
, insn
)
8733 static char string
[64];
8734 enum rtx_code code
= GET_CODE (op
);
8735 rtx cc_reg
= XEXP (op
, 0);
8736 enum machine_mode mode
= GET_MODE (cc_reg
);
8737 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
8738 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
8739 int really_reversed
= reversed
^ need_longbranch
;
8745 validate_condition_mode (code
, mode
);
8747 /* Work out which way this really branches. We could use
8748 reverse_condition_maybe_unordered here always but this
8749 makes the resulting assembler clearer. */
8750 if (really_reversed
)
8752 /* Reversal of FP compares takes care -- an ordered compare
8753 becomes an unordered compare and vice versa. */
8754 if (mode
== CCFPmode
)
8755 code
= reverse_condition_maybe_unordered (code
);
8757 code
= reverse_condition (code
);
8760 if ((TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
8762 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8765 /* Opposite of GT. */
8767 else if (code
== NE
)
8775 /* Not all of these are actually distinct opcodes, but
8776 we distinguish them for clarity of the resulting assembler. */
8778 ccode
= "ne"; break;
8780 ccode
= "eq"; break;
8782 ccode
= "ge"; break;
8783 case GT
: case GTU
: case UNGT
:
8784 ccode
= "gt"; break;
8786 ccode
= "le"; break;
8787 case LT
: case LTU
: case UNLT
:
8788 ccode
= "lt"; break;
8789 case UNORDERED
: ccode
= "un"; break;
8790 case ORDERED
: ccode
= "nu"; break;
8791 case UNGE
: ccode
= "nl"; break;
8792 case UNLE
: ccode
= "ng"; break;
8797 /* Maybe we have a guess as to how likely the branch is.
8798 The old mnemonics don't have a way to specify this information. */
8800 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
8801 if (note
!= NULL_RTX
)
8803 /* PROB is the difference from 50%. */
8804 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
8805 bool always_hint
= rs6000_cpu
!= PROCESSOR_POWER4
;
8807 /* Only hint for highly probable/improbable branches on newer
8808 cpus as static prediction overrides processor dynamic
8809 prediction. For older cpus we may as well always hint, but
8810 assume not taken for branches that are very close to 50% as a
8811 mispredicted taken branch is more expensive than a
8812 mispredicted not-taken branch. */
8814 || abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48)
8816 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
8817 && ((prob
> 0) ^ need_longbranch
))
8825 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
8827 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
8829 /* We need to escape any '%' characters in the reg_names string.
8830 Assume they'd only be the first character... */
8831 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
8833 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
8837 /* If the branch distance was too far, we may have to use an
8838 unconditional branch to go the distance. */
8839 if (need_longbranch
)
8840 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
8842 s
+= sprintf (s
, ",%s", label
);
8848 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8849 operands of the last comparison is nonzero/true, FALSE_COND if it
8850 is zero/false. Return 0 if the hardware has no such operation. */
8853 rs6000_emit_cmove (dest
, op
, true_cond
, false_cond
)
8859 enum rtx_code code
= GET_CODE (op
);
8860 rtx op0
= rs6000_compare_op0
;
8861 rtx op1
= rs6000_compare_op1
;
8863 enum machine_mode compare_mode
= GET_MODE (op0
);
8864 enum machine_mode result_mode
= GET_MODE (dest
);
8867 /* These modes should always match. */
8868 if (GET_MODE (op1
) != compare_mode
8869 /* In the isel case however, we can use a compare immediate, so
8870 op1 may be a small constant. */
8871 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
8873 if (GET_MODE (true_cond
) != result_mode
)
8875 if (GET_MODE (false_cond
) != result_mode
)
8878 /* First, work out if the hardware can do this at all, or
8879 if it's too slow... */
8880 if (! rs6000_compare_fp_p
)
8883 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
8887 /* Eliminate half of the comparisons by switching operands, this
8888 makes the remaining code simpler. */
8889 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
8890 || code
== LTGT
|| code
== LT
)
8892 code
= reverse_condition_maybe_unordered (code
);
8894 true_cond
= false_cond
;
8898 /* UNEQ and LTGT take four instructions for a comparison with zero,
8899 it'll probably be faster to use a branch here too. */
8903 if (GET_CODE (op1
) == CONST_DOUBLE
)
8904 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
8906 /* We're going to try to implement comparisons by performing
8907 a subtract, then comparing against zero. Unfortunately,
8908 Inf - Inf is NaN which is not zero, and so if we don't
8909 know that the operand is finite and the comparison
8910 would treat EQ different to UNORDERED, we can't do it. */
8911 if (! flag_unsafe_math_optimizations
8912 && code
!= GT
&& code
!= UNGE
8913 && (GET_CODE (op1
) != CONST_DOUBLE
|| real_isinf (&c1
))
8914 /* Constructs of the form (a OP b ? a : b) are safe. */
8915 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
8916 || (! rtx_equal_p (op0
, true_cond
)
8917 && ! rtx_equal_p (op1
, true_cond
))))
8919 /* At this point we know we can use fsel. */
8921 /* Reduce the comparison to a comparison against zero. */
8922 temp
= gen_reg_rtx (compare_mode
);
8923 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8924 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
8926 op1
= CONST0_RTX (compare_mode
);
8928 /* If we don't care about NaNs we can reduce some of the comparisons
8929 down to faster ones. */
8930 if (flag_unsafe_math_optimizations
)
8936 true_cond
= false_cond
;
8949 /* Now, reduce everything down to a GE. */
8956 temp
= gen_reg_rtx (compare_mode
);
8957 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8962 temp
= gen_reg_rtx (compare_mode
);
8963 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
8968 temp
= gen_reg_rtx (compare_mode
);
8969 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8970 gen_rtx_NEG (compare_mode
,
8971 gen_rtx_ABS (compare_mode
, op0
))));
8976 temp
= gen_reg_rtx (result_mode
);
8977 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8978 gen_rtx_IF_THEN_ELSE (result_mode
,
8979 gen_rtx_GE (VOIDmode
,
8981 true_cond
, false_cond
)));
8983 true_cond
= false_cond
;
8985 temp
= gen_reg_rtx (compare_mode
);
8986 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8991 temp
= gen_reg_rtx (result_mode
);
8992 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8993 gen_rtx_IF_THEN_ELSE (result_mode
,
8994 gen_rtx_GE (VOIDmode
,
8996 true_cond
, false_cond
)));
8998 false_cond
= true_cond
;
9000 temp
= gen_reg_rtx (compare_mode
);
9001 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
9009 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
9010 gen_rtx_IF_THEN_ELSE (result_mode
,
9011 gen_rtx_GE (VOIDmode
,
9013 true_cond
, false_cond
)));
9017 /* Same as above, but for ints (isel). */
9020 rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
)
9026 rtx condition_rtx
, cr
;
9028 /* All isel implementations thus far are 32-bits. */
9029 if (GET_MODE (rs6000_compare_op0
) != SImode
)
9032 /* We still have to do the compare, because isel doesn't do a
9033 compare, it just looks at the CRx bits set by a previous compare
9035 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
9036 cr
= XEXP (condition_rtx
, 0);
9038 if (GET_MODE (cr
) == CCmode
)
9039 emit_insn (gen_isel_signed (dest
, condition_rtx
,
9040 true_cond
, false_cond
, cr
));
9042 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
9043 true_cond
, false_cond
, cr
));
9049 output_isel (operands
)
9054 code
= GET_CODE (operands
[1]);
9055 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
9057 PUT_CODE (operands
[1], reverse_condition (code
));
9058 return "isel %0,%3,%2,%j1";
9061 return "isel %0,%2,%3,%j1";
9065 rs6000_emit_minmax (dest
, code
, op0
, op1
)
9071 enum machine_mode mode
= GET_MODE (op0
);
9075 if (code
== SMAX
|| code
== SMIN
)
9080 if (code
== SMAX
|| code
== UMAX
)
9081 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
9084 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
9086 if (target
== NULL_RTX
)
9089 emit_move_insn (dest
, target
);
9092 /* This page contains routines that are used to determine what the
9093 function prologue and epilogue code will do and write them out. */
9095 /* Return the first fixed-point register that is required to be
9096 saved. 32 if none. */
9099 first_reg_to_save ()
9103 /* Find lowest numbered live register. */
9104 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
9105 if (regs_ever_live
[first_reg
]
9106 && (! call_used_regs
[first_reg
]
9107 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
9108 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
9109 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)))))
9114 && current_function_uses_pic_offset_table
9115 && first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
9116 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
9122 /* Similar, for FP regs. */
9125 first_fp_reg_to_save ()
9129 /* Find lowest numbered live register. */
9130 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
9131 if (regs_ever_live
[first_reg
])
9137 /* Similar, for AltiVec regs. */
9140 first_altivec_reg_to_save ()
9144 /* Stack frame remains as is unless we are in AltiVec ABI. */
9145 if (! TARGET_ALTIVEC_ABI
)
9146 return LAST_ALTIVEC_REGNO
+ 1;
9148 /* Find lowest numbered live register. */
9149 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
9150 if (regs_ever_live
[i
])
9156 /* Return a 32-bit mask of the AltiVec registers we need to set in
9157 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9158 the 32-bit word is 0. */
9161 compute_vrsave_mask ()
9163 unsigned int i
, mask
= 0;
9165 /* First, find out if we use _any_ altivec registers. */
9166 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
9167 if (regs_ever_live
[i
])
9168 mask
|= ALTIVEC_REG_BIT (i
);
9173 /* Next, remove the argument registers from the set. These must
9174 be in the VRSAVE mask set by the caller, so we don't need to add
9175 them in again. More importantly, the mask we compute here is
9176 used to generate CLOBBERs in the set_vrsave insn, and we do not
9177 wish the argument registers to die. */
9178 for (i
= cfun
->args_info
.vregno
; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
9179 mask
&= ~ALTIVEC_REG_BIT (i
);
9181 /* Similarly, remove the return value from the set. */
9184 diddle_return_value (is_altivec_return_reg
, &yes
);
9186 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
9193 is_altivec_return_reg (reg
, xyes
)
9197 bool *yes
= (bool *) xyes
;
9198 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
9203 /* Calculate the stack information for the current function. This is
9204 complicated by having two separate calling sequences, the AIX calling
9205 sequence and the V.4 calling sequence.
9207 AIX (and Darwin/Mac OS X) stack frames look like:
9209 SP----> +---------------------------------------+
9210 | back chain to caller | 0 0
9211 +---------------------------------------+
9212 | saved CR | 4 8 (8-11)
9213 +---------------------------------------+
9215 +---------------------------------------+
9216 | reserved for compilers | 12 24
9217 +---------------------------------------+
9218 | reserved for binders | 16 32
9219 +---------------------------------------+
9220 | saved TOC pointer | 20 40
9221 +---------------------------------------+
9222 | Parameter save area (P) | 24 48
9223 +---------------------------------------+
9224 | Alloca space (A) | 24+P etc.
9225 +---------------------------------------+
9226 | Local variable space (L) | 24+P+A
9227 +---------------------------------------+
9228 | Float/int conversion temporary (X) | 24+P+A+L
9229 +---------------------------------------+
9230 | Save area for AltiVec registers (W) | 24+P+A+L+X
9231 +---------------------------------------+
9232 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9233 +---------------------------------------+
9234 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9235 +---------------------------------------+
9236 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9237 +---------------------------------------+
9238 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9239 +---------------------------------------+
9240 old SP->| back chain to caller's caller |
9241 +---------------------------------------+
9243 The required alignment for AIX configurations is two words (i.e., 8
9247 V.4 stack frames look like:
9249 SP----> +---------------------------------------+
9250 | back chain to caller | 0
9251 +---------------------------------------+
9252 | caller's saved LR | 4
9253 +---------------------------------------+
9254 | Parameter save area (P) | 8
9255 +---------------------------------------+
9256 | Alloca space (A) | 8+P
9257 +---------------------------------------+
9258 | Varargs save area (V) | 8+P+A
9259 +---------------------------------------+
9260 | Local variable space (L) | 8+P+A+V
9261 +---------------------------------------+
9262 | Float/int conversion temporary (X) | 8+P+A+V+L
9263 +---------------------------------------+
9264 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9265 +---------------------------------------+
9266 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9267 +---------------------------------------+
9268 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9269 +---------------------------------------+
9270 | SPE: area for 64-bit GP registers |
9271 +---------------------------------------+
9272 | SPE alignment padding |
9273 +---------------------------------------+
9274 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9275 +---------------------------------------+
9276 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9277 +---------------------------------------+
9278 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9279 +---------------------------------------+
9280 old SP->| back chain to caller's caller |
9281 +---------------------------------------+
9283 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9284 given. (But note below and in sysv4.h that we require only 8 and
9285 may round up the size of our stack frame anyways. The historical
9286 reason is early versions of powerpc-linux which didn't properly
9287 align the stack at program startup. A happy side-effect is that
9288 -mno-eabi libraries can be used with -meabi programs.)
9290 The EABI configuration defaults to the V.4 layout, unless
9291 -mcall-aix is used, in which case the AIX layout is used. However,
9292 the stack alignment requirements may differ. If -mno-eabi is not
9293 given, the required stack alignment is 8 bytes; if -mno-eabi is
9294 given, the required alignment is 16 bytes. (But see V.4 comment
9297 #ifndef ABI_STACK_BOUNDARY
9298 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9302 rs6000_stack_info ()
9304 static rs6000_stack_t info
, zero_info
;
9305 rs6000_stack_t
*info_ptr
= &info
;
9306 int reg_size
= TARGET_POWERPC64
? 8 : 4;
9310 /* Zero all fields portably. */
9315 /* Cache value so we don't rescan instruction chain over and over. */
9316 if (cfun
->machine
->insn_chain_scanned_p
== 0)
9318 cfun
->machine
->insn_chain_scanned_p
= 1;
9319 info_ptr
->spe_64bit_regs_used
= (int) spe_func_has_64bit_regs_p ();
9323 /* Select which calling sequence. */
9324 info_ptr
->abi
= DEFAULT_ABI
;
9326 /* Calculate which registers need to be saved & save area size. */
9327 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
9328 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9329 even if it currently looks like we won't. */
9330 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
9331 || (flag_pic
== 1 && DEFAULT_ABI
== ABI_V4
)
9332 || (flag_pic
&& DEFAULT_ABI
== ABI_DARWIN
))
9333 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
9334 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
9336 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
9338 /* For the SPE, we have an additional upper 32-bits on each GPR.
9339 Ideally we should save the entire 64-bits only when the upper
9340 half is used in SIMD instructions. Since we only record
9341 registers live (not the size they are used in), this proves
9342 difficult because we'd have to traverse the instruction chain at
9343 the right time, taking reload into account. This is a real pain,
9344 so we opt to save the GPRs in 64-bits always if but one register
9345 gets used in 64-bits. Otherwise, all the registers in the frame
9346 get saved in 32-bits.
9348 So... since when we save all GPRs (except the SP) in 64-bits, the
9349 traditional GP save area will be empty. */
9350 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
9351 info_ptr
->gp_size
= 0;
9353 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
9354 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
9356 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
9357 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
9358 - info_ptr
->first_altivec_reg_save
);
9360 /* Does this function call anything? */
9361 info_ptr
->calls_p
= (! current_function_is_leaf
9362 || cfun
->machine
->ra_needs_full_frame
);
9364 /* Determine if we need to save the link register. */
9365 if (rs6000_ra_ever_killed ()
9366 || (DEFAULT_ABI
== ABI_AIX
9367 && current_function_profile
9368 && !TARGET_PROFILE_KERNEL
)
9369 #ifdef TARGET_RELOCATABLE
9370 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
9372 || (info_ptr
->first_fp_reg_save
!= 64
9373 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
9374 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
9375 || (DEFAULT_ABI
== ABI_V4
&& current_function_calls_alloca
)
9376 || (DEFAULT_ABI
== ABI_DARWIN
9378 && current_function_uses_pic_offset_table
)
9379 || info_ptr
->calls_p
)
9381 info_ptr
->lr_save_p
= 1;
9382 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
9385 /* Determine if we need to save the condition code registers. */
9386 if (regs_ever_live
[CR2_REGNO
]
9387 || regs_ever_live
[CR3_REGNO
]
9388 || regs_ever_live
[CR4_REGNO
])
9390 info_ptr
->cr_save_p
= 1;
9391 if (DEFAULT_ABI
== ABI_V4
)
9392 info_ptr
->cr_size
= reg_size
;
9395 /* If the current function calls __builtin_eh_return, then we need
9396 to allocate stack space for registers that will hold data for
9397 the exception handler. */
9398 if (current_function_calls_eh_return
)
9401 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
9404 /* SPE saves EH registers in 64-bits. */
9405 ehrd_size
= i
* (TARGET_SPE_ABI
9406 && info_ptr
->spe_64bit_regs_used
!= 0
9407 ? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
9412 /* Determine various sizes. */
9413 info_ptr
->reg_size
= reg_size
;
9414 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
9415 info_ptr
->varargs_size
= RS6000_VARARGS_AREA
;
9416 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
9417 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
9420 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
9421 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
9423 info_ptr
->spe_gp_size
= 0;
9425 if (TARGET_ALTIVEC_ABI
&& TARGET_ALTIVEC_VRSAVE
)
9427 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
9428 info_ptr
->vrsave_size
= info_ptr
->vrsave_mask
? 4 : 0;
9432 info_ptr
->vrsave_mask
= 0;
9433 info_ptr
->vrsave_size
= 0;
9436 /* Calculate the offsets. */
9437 switch (DEFAULT_ABI
)
9444 case ABI_AIX_NODESC
:
9446 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9447 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9449 if (TARGET_ALTIVEC_ABI
)
9451 info_ptr
->vrsave_save_offset
9452 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
9454 /* Align stack so vector save area is on a quadword boundary. */
9455 if (info_ptr
->altivec_size
!= 0)
9456 info_ptr
->altivec_padding_size
9457 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9459 info_ptr
->altivec_padding_size
= 0;
9461 info_ptr
->altivec_save_offset
9462 = info_ptr
->vrsave_save_offset
9463 - info_ptr
->altivec_padding_size
9464 - info_ptr
->altivec_size
;
9466 /* Adjust for AltiVec case. */
9467 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
9470 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
9471 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
9472 info_ptr
->lr_save_offset
= 2*reg_size
;
9476 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9477 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9478 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
9480 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
9482 /* Align stack so SPE GPR save area is aligned on a
9483 double-word boundary. */
9484 if (info_ptr
->spe_gp_size
!= 0)
9485 info_ptr
->spe_padding_size
9486 = 8 - (-info_ptr
->cr_save_offset
% 8);
9488 info_ptr
->spe_padding_size
= 0;
9490 info_ptr
->spe_gp_save_offset
9491 = info_ptr
->cr_save_offset
9492 - info_ptr
->spe_padding_size
9493 - info_ptr
->spe_gp_size
;
9495 /* Adjust for SPE case. */
9496 info_ptr
->toc_save_offset
9497 = info_ptr
->spe_gp_save_offset
- info_ptr
->toc_size
;
9499 else if (TARGET_ALTIVEC_ABI
)
9501 info_ptr
->vrsave_save_offset
9502 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
9504 /* Align stack so vector save area is on a quadword boundary. */
9505 if (info_ptr
->altivec_size
!= 0)
9506 info_ptr
->altivec_padding_size
9507 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9509 info_ptr
->altivec_padding_size
= 0;
9511 info_ptr
->altivec_save_offset
9512 = info_ptr
->vrsave_save_offset
9513 - info_ptr
->altivec_padding_size
9514 - info_ptr
->altivec_size
;
9516 /* Adjust for AltiVec case. */
9517 info_ptr
->toc_save_offset
9518 = info_ptr
->altivec_save_offset
- info_ptr
->toc_size
;
9521 info_ptr
->toc_save_offset
= info_ptr
->cr_save_offset
- info_ptr
->toc_size
;
9522 info_ptr
->ehrd_offset
= info_ptr
->toc_save_offset
- ehrd_size
;
9523 info_ptr
->lr_save_offset
= reg_size
;
9527 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
9529 + info_ptr
->altivec_size
9530 + info_ptr
->altivec_padding_size
9531 + info_ptr
->vrsave_size
9532 + info_ptr
->spe_gp_size
9533 + info_ptr
->spe_padding_size
9537 + info_ptr
->vrsave_size
9538 + info_ptr
->toc_size
,
9539 (TARGET_ALTIVEC_ABI
|| ABI_DARWIN
)
9542 total_raw_size
= (info_ptr
->vars_size
9543 + info_ptr
->parm_size
9544 + info_ptr
->save_size
9545 + info_ptr
->varargs_size
9546 + info_ptr
->fixed_size
);
9548 info_ptr
->total_size
=
9549 RS6000_ALIGN (total_raw_size
, ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
9551 /* Determine if we need to allocate any stack frame:
9553 For AIX we need to push the stack if a frame pointer is needed
9554 (because the stack might be dynamically adjusted), if we are
9555 debugging, if we make calls, or if the sum of fp_save, gp_save,
9556 and local variables are more than the space needed to save all
9557 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9558 + 18*8 = 288 (GPR13 reserved).
9560 For V.4 we don't have the stack cushion that AIX uses, but assume
9561 that the debugger can handle stackless frames. */
9563 if (info_ptr
->calls_p
)
9564 info_ptr
->push_p
= 1;
9566 else if (DEFAULT_ABI
== ABI_V4
)
9567 info_ptr
->push_p
= total_raw_size
> info_ptr
->fixed_size
;
9569 else if (frame_pointer_needed
)
9570 info_ptr
->push_p
= 1;
9572 else if (TARGET_XCOFF
&& write_symbols
!= NO_DEBUG
)
9573 info_ptr
->push_p
= 1;
9577 = total_raw_size
- info_ptr
->fixed_size
> (TARGET_32BIT
? 220 : 288);
9579 /* Zero offsets if we're not saving those registers. */
9580 if (info_ptr
->fp_size
== 0)
9581 info_ptr
->fp_save_offset
= 0;
9583 if (info_ptr
->gp_size
== 0)
9584 info_ptr
->gp_save_offset
= 0;
9586 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
9587 info_ptr
->altivec_save_offset
= 0;
9589 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
9590 info_ptr
->vrsave_save_offset
= 0;
9592 if (! TARGET_SPE_ABI
9593 || info_ptr
->spe_64bit_regs_used
== 0
9594 || info_ptr
->spe_gp_size
== 0)
9595 info_ptr
->spe_gp_save_offset
= 0;
9597 if (! info_ptr
->lr_save_p
)
9598 info_ptr
->lr_save_offset
= 0;
9600 if (! info_ptr
->cr_save_p
)
9601 info_ptr
->cr_save_offset
= 0;
9603 if (! info_ptr
->toc_save_p
)
9604 info_ptr
->toc_save_offset
= 0;
9609 /* Return true if the current function uses any GPRs in 64-bit SIMD
9613 spe_func_has_64bit_regs_p ()
9617 /* Functions that save and restore all the call-saved registers will
9618 need to save/restore the registers in 64-bits. */
9619 if (current_function_calls_eh_return
9620 || current_function_calls_setjmp
9621 || current_function_has_nonlocal_goto
)
9624 insns
= get_insns ();
9626 for (insn
= NEXT_INSN (insns
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
9633 if (GET_CODE (i
) == SET
9634 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i
))))
9643 debug_stack_info (info
)
9644 rs6000_stack_t
*info
;
9646 const char *abi_string
;
9649 info
= rs6000_stack_info ();
9651 fprintf (stderr
, "\nStack information for function %s:\n",
9652 ((current_function_decl
&& DECL_NAME (current_function_decl
))
9653 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
9658 default: abi_string
= "Unknown"; break;
9659 case ABI_NONE
: abi_string
= "NONE"; break;
9661 case ABI_AIX_NODESC
: abi_string
= "AIX"; break;
9662 case ABI_DARWIN
: abi_string
= "Darwin"; break;
9663 case ABI_V4
: abi_string
= "V.4"; break;
9666 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
9668 if (TARGET_ALTIVEC_ABI
)
9669 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
9672 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
9674 if (info
->first_gp_reg_save
!= 32)
9675 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
9677 if (info
->first_fp_reg_save
!= 64)
9678 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
9680 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
9681 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
9682 info
->first_altivec_reg_save
);
9684 if (info
->lr_save_p
)
9685 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
9687 if (info
->cr_save_p
)
9688 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
9690 if (info
->toc_save_p
)
9691 fprintf (stderr
, "\ttoc_save_p = %5d\n", info
->toc_save_p
);
9693 if (info
->vrsave_mask
)
9694 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
9697 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
9700 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
9702 if (info
->gp_save_offset
)
9703 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
9705 if (info
->fp_save_offset
)
9706 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
9708 if (info
->altivec_save_offset
)
9709 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
9710 info
->altivec_save_offset
);
9712 if (info
->spe_gp_save_offset
)
9713 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
9714 info
->spe_gp_save_offset
);
9716 if (info
->vrsave_save_offset
)
9717 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
9718 info
->vrsave_save_offset
);
9720 if (info
->lr_save_offset
)
9721 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
9723 if (info
->cr_save_offset
)
9724 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
9726 if (info
->toc_save_offset
)
9727 fprintf (stderr
, "\ttoc_save_offset = %5d\n", info
->toc_save_offset
);
9729 if (info
->varargs_save_offset
)
9730 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
9732 if (info
->total_size
)
9733 fprintf (stderr
, "\ttotal_size = %5d\n", info
->total_size
);
9735 if (info
->varargs_size
)
9736 fprintf (stderr
, "\tvarargs_size = %5d\n", info
->varargs_size
);
9738 if (info
->vars_size
)
9739 fprintf (stderr
, "\tvars_size = %5d\n", info
->vars_size
);
9741 if (info
->parm_size
)
9742 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
9744 if (info
->fixed_size
)
9745 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
9748 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
9750 if (info
->spe_gp_size
)
9751 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
9754 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
9756 if (info
->altivec_size
)
9757 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
9759 if (info
->vrsave_size
)
9760 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
9762 if (info
->altivec_padding_size
)
9763 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
9764 info
->altivec_padding_size
);
9766 if (info
->spe_padding_size
)
9767 fprintf (stderr
, "\tspe_padding_size = %5d\n",
9768 info
->spe_padding_size
);
9771 fprintf (stderr
, "\tlr_size = %5d\n", info
->lr_size
);
9774 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
9777 fprintf (stderr
, "\ttoc_size = %5d\n", info
->toc_size
);
9779 if (info
->save_size
)
9780 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
9782 if (info
->reg_size
!= 4)
9783 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
9785 fprintf (stderr
, "\n");
9789 rs6000_return_addr (count
, frame
)
9793 /* Currently we don't optimize very well between prolog and body
9794 code and for PIC code the code can be actually quite bad, so
9795 don't try to be too clever here. */
9796 if (count
!= 0 || (DEFAULT_ABI
!= ABI_AIX
&& flag_pic
))
9798 cfun
->machine
->ra_needs_full_frame
= 1;
9805 plus_constant (copy_to_reg
9806 (gen_rtx_MEM (Pmode
,
9807 memory_address (Pmode
, frame
))),
9808 RETURN_ADDRESS_OFFSET
)));
9811 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
9814 /* Say whether a function is a candidate for sibcall handling or not.
9815 We do not allow indirect calls to be optimized into sibling calls.
9816 Also, we can't do it if there are any vector parameters; there's
9817 nowhere to put the VRsave code so it works; note that functions with
9818 vector parameters are required to have a prototype, so the argument
9819 type info must be available here. (The tail recursion case can work
9820 with vector parameters, but there's no way to distinguish here.) */
9822 rs6000_function_ok_for_sibcall (decl
, exp
)
9824 tree exp ATTRIBUTE_UNUSED
;
9829 if (TARGET_ALTIVEC_VRSAVE
)
9831 for (type
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
9832 type
; type
= TREE_CHAIN (type
))
9834 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
9838 if (DEFAULT_ABI
== ABI_DARWIN
9839 || (*targetm
.binds_local_p
) (decl
))
9841 tree attr_list
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
9843 if (!lookup_attribute ("longcall", attr_list
)
9844 || lookup_attribute ("shortcall", attr_list
))
9852 rs6000_ra_ever_killed ()
9858 /* Irritatingly, there are two kinds of thunks -- those created with
9859 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9860 through the regular part of the compiler. This is a very hacky
9861 way to tell them apart. */
9862 if (current_function_is_thunk
&& !no_new_pseudos
)
9865 /* regs_ever_live has LR marked as used if any sibcalls are present,
9866 but this should not force saving and restoring in the
9867 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
9868 clobbers LR, so that is inappropriate. */
9870 /* Also, the prologue can generate a store into LR that
9871 doesn't really count, like this:
9874 bcl to set PIC register
9878 When we're called from the epilogue, we need to avoid counting
9881 push_topmost_sequence ();
9883 pop_topmost_sequence ();
9884 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
9886 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
9890 if (FIND_REG_INC_NOTE (insn
, reg
))
9892 else if (GET_CODE (insn
) == CALL_INSN
9893 && !SIBLING_CALL_P (insn
))
9895 else if (set_of (reg
, insn
) != NULL_RTX
9896 && !prologue_epilogue_contains (insn
))
9903 /* Add a REG_MAYBE_DEAD note to the insn. */
9905 rs6000_maybe_dead (insn
)
9908 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
9913 /* Emit instructions needed to load the TOC register.
9914 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9915 a constant pool; or for SVR4 -fpic. */
9918 rs6000_emit_load_toc_table (fromprolog
)
9922 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
9924 if (TARGET_ELF
&& DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
9926 rtx temp
= (fromprolog
9927 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9928 : gen_reg_rtx (Pmode
));
9929 insn
= emit_insn (gen_load_toc_v4_pic_si (temp
));
9931 rs6000_maybe_dead (insn
);
9932 insn
= emit_move_insn (dest
, temp
);
9934 rs6000_maybe_dead (insn
);
9936 else if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
== 2)
9939 rtx tempLR
= (fromprolog
9940 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9941 : gen_reg_rtx (Pmode
));
9942 rtx temp0
= (fromprolog
9943 ? gen_rtx_REG (Pmode
, 0)
9944 : gen_reg_rtx (Pmode
));
9947 /* possibly create the toc section */
9948 if (! toc_initialized
)
9951 function_section (current_function_decl
);
9958 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
9959 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9961 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
9962 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9964 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
9966 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
9967 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
9974 static int reload_toc_labelno
= 0;
9976 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
9978 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCG", reload_toc_labelno
++);
9979 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9981 emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, symF
, tocsym
));
9982 emit_move_insn (dest
, tempLR
);
9983 emit_move_insn (temp0
, gen_rtx_MEM (Pmode
, dest
));
9985 insn
= emit_insn (gen_addsi3 (dest
, temp0
, dest
));
9987 rs6000_maybe_dead (insn
);
9989 else if (TARGET_ELF
&& !TARGET_AIX
&& flag_pic
== 0 && TARGET_MINIMAL_TOC
)
9991 /* This is for AIX code running in non-PIC ELF32. */
9994 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
9995 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9997 insn
= emit_insn (gen_elf_high (dest
, realsym
));
9999 rs6000_maybe_dead (insn
);
10000 insn
= emit_insn (gen_elf_low (dest
, dest
, realsym
));
10002 rs6000_maybe_dead (insn
);
10004 else if (DEFAULT_ABI
== ABI_AIX
)
10007 insn
= emit_insn (gen_load_toc_aix_si (dest
));
10009 insn
= emit_insn (gen_load_toc_aix_di (dest
));
10011 rs6000_maybe_dead (insn
);
10018 get_TOC_alias_set ()
10020 static int set
= -1;
10022 set
= new_alias_set ();
10026 /* This retuns nonzero if the current function uses the TOC. This is
10027 determined by the presence of (unspec ... 7), which is generated by
10028 the various load_toc_* patterns. */
10035 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
10038 rtx pat
= PATTERN (insn
);
10041 if (GET_CODE (pat
) == PARALLEL
)
10042 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
10043 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == UNSPEC
10044 && XINT (XVECEXP (PATTERN (insn
), 0, i
), 1) == 7)
10051 create_TOC_reference (symbol
)
10054 return gen_rtx_PLUS (Pmode
,
10055 gen_rtx_REG (Pmode
, TOC_REGISTER
),
10056 gen_rtx_CONST (Pmode
,
10057 gen_rtx_MINUS (Pmode
, symbol
,
10058 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
10062 /* __throw will restore its own return address to be the same as the
10063 return address of the function that the throw is being made to.
10064 This is unfortunate, because we want to check the original
10065 return address to see if we need to restore the TOC.
10066 So we have to squirrel it away here.
10067 This is used only in compiling __throw and __rethrow.
10069 Most of this code should be removed by CSE. */
10070 static rtx insn_after_throw
;
10072 /* This does the saving... */
10074 rs6000_aix_emit_builtin_unwind_init ()
10077 rtx stack_top
= gen_reg_rtx (Pmode
);
10078 rtx opcode_addr
= gen_reg_rtx (Pmode
);
10080 insn_after_throw
= gen_reg_rtx (SImode
);
10082 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
10083 emit_move_insn (stack_top
, mem
);
10085 mem
= gen_rtx_MEM (Pmode
,
10086 gen_rtx_PLUS (Pmode
, stack_top
,
10087 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
10088 emit_move_insn (opcode_addr
, mem
);
10089 emit_move_insn (insn_after_throw
, gen_rtx_MEM (SImode
, opcode_addr
));
10092 /* Emit insns to _restore_ the TOC register, at runtime (specifically
10093 in _eh.o). Only used on AIX.
10095 The idea is that on AIX, function calls look like this:
10096 bl somefunction-trampoline
10100 somefunction-trampoline:
10102 ... load function address in the count register ...
10104 or like this, if the linker determines that this is not a cross-module call
10105 and so the TOC need not be restored:
10108 or like this, if the compiler could determine that this is not a
10111 now, the tricky bit here is that register 2 is saved and restored
10112 by the _linker_, so we can't readily generate debugging information
10113 for it. So we need to go back up the call chain looking at the
10114 insns at return addresses to see which calls saved the TOC register
10115 and so see where it gets restored from.
10117 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
10118 just before the actual epilogue.
10120 On the bright side, this incurs no space or time overhead unless an
10121 exception is thrown, except for the extra code in libgcc.a.
10123 The parameter STACKSIZE is a register containing (at runtime)
10124 the amount to be popped off the stack in addition to the stack frame
10125 of this routine (which will be __throw or __rethrow, and so is
10126 guaranteed to have a stack frame). */
10129 rs6000_emit_eh_toc_restore (stacksize
)
10133 rtx bottom_of_stack
= gen_reg_rtx (Pmode
);
10134 rtx tocompare
= gen_reg_rtx (SImode
);
10135 rtx opcode
= gen_reg_rtx (SImode
);
10136 rtx opcode_addr
= gen_reg_rtx (Pmode
);
10138 rtx loop_start
= gen_label_rtx ();
10139 rtx no_toc_restore_needed
= gen_label_rtx ();
10140 rtx loop_exit
= gen_label_rtx ();
10142 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
10143 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10144 emit_move_insn (bottom_of_stack
, mem
);
10146 top_of_stack
= expand_binop (Pmode
, add_optab
,
10147 bottom_of_stack
, stacksize
,
10148 NULL_RTX
, 1, OPTAB_WIDEN
);
10150 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
10151 : 0xE8410028, SImode
));
10153 if (insn_after_throw
== NULL_RTX
)
10155 emit_move_insn (opcode
, insn_after_throw
);
10157 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
10158 emit_label (loop_start
);
10160 do_compare_rtx_and_jump (opcode
, tocompare
, NE
, 1,
10161 SImode
, NULL_RTX
, NULL_RTX
,
10162 no_toc_restore_needed
);
10164 mem
= gen_rtx_MEM (Pmode
,
10165 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
10166 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
10167 emit_move_insn (gen_rtx_REG (Pmode
, 2), mem
);
10169 emit_label (no_toc_restore_needed
);
10170 do_compare_rtx_and_jump (top_of_stack
, bottom_of_stack
, EQ
, 1,
10171 Pmode
, NULL_RTX
, NULL_RTX
,
10174 mem
= gen_rtx_MEM (Pmode
, bottom_of_stack
);
10175 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10176 emit_move_insn (bottom_of_stack
, mem
);
10178 mem
= gen_rtx_MEM (Pmode
,
10179 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
10180 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
10181 emit_move_insn (opcode_addr
, mem
);
10182 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
10184 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
10185 emit_jump (loop_start
);
10186 emit_note (NULL
, NOTE_INSN_LOOP_END
);
10187 emit_label (loop_exit
);
10189 #endif /* TARGET_AIX */
10191 /* This ties together stack memory (MEM with an alias set of
10192 rs6000_sr_alias_set) and the change to the stack pointer. */
10195 rs6000_emit_stack_tie ()
10197 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
10199 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10200 emit_insn (gen_stack_tie (mem
));
10203 /* Emit the correct code for allocating stack space, as insns.
10204 If COPY_R12, make sure a copy of the old frame is left in r12.
10205 The generated code may use hard register 0 as a temporary. */
10208 rs6000_emit_allocate_stack (size
, copy_r12
)
10209 HOST_WIDE_INT size
;
10213 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
10214 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
10215 rtx todec
= GEN_INT (-size
);
10217 if (current_function_limit_stack
)
10219 if (REG_P (stack_limit_rtx
)
10220 && REGNO (stack_limit_rtx
) > 1
10221 && REGNO (stack_limit_rtx
) <= 31)
10223 emit_insn (TARGET_32BIT
10224 ? gen_addsi3 (tmp_reg
,
10227 : gen_adddi3 (tmp_reg
,
10231 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
10234 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
10236 && DEFAULT_ABI
== ABI_V4
)
10238 rtx toload
= gen_rtx_CONST (VOIDmode
,
10239 gen_rtx_PLUS (Pmode
,
10243 emit_insn (gen_elf_high (tmp_reg
, toload
));
10244 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
10245 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
10249 warning ("stack limit expression is not supported");
10252 if (copy_r12
|| ! TARGET_UPDATE
)
10253 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
10259 /* Need a note here so that try_split doesn't get confused. */
10260 if (get_last_insn() == NULL_RTX
)
10261 emit_note (0, NOTE_INSN_DELETED
);
10262 insn
= emit_move_insn (tmp_reg
, todec
);
10263 try_split (PATTERN (insn
), insn
, 0);
10267 insn
= emit_insn (TARGET_32BIT
10268 ? gen_movsi_update (stack_reg
, stack_reg
,
10270 : gen_movdi_update (stack_reg
, stack_reg
,
10271 todec
, stack_reg
));
10275 insn
= emit_insn (TARGET_32BIT
10276 ? gen_addsi3 (stack_reg
, stack_reg
, todec
)
10277 : gen_adddi3 (stack_reg
, stack_reg
, todec
));
10278 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
10279 gen_rtx_REG (Pmode
, 12));
10282 RTX_FRAME_RELATED_P (insn
) = 1;
10284 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10285 gen_rtx_SET (VOIDmode
, stack_reg
,
10286 gen_rtx_PLUS (Pmode
, stack_reg
,
10291 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10292 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10293 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10294 deduce these equivalences by itself so it wasn't necessary to hold
10295 its hand so much. */
10298 rs6000_frame_related (insn
, reg
, val
, reg2
, rreg
)
10307 /* copy_rtx will not make unique copies of registers, so we need to
10308 ensure we don't have unwanted sharing here. */
10310 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
10313 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
10315 real
= copy_rtx (PATTERN (insn
));
10317 if (reg2
!= NULL_RTX
)
10318 real
= replace_rtx (real
, reg2
, rreg
);
10320 real
= replace_rtx (real
, reg
,
10321 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
10322 STACK_POINTER_REGNUM
),
10325 /* We expect that 'real' is either a SET or a PARALLEL containing
10326 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10327 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10329 if (GET_CODE (real
) == SET
)
10333 temp
= simplify_rtx (SET_SRC (set
));
10335 SET_SRC (set
) = temp
;
10336 temp
= simplify_rtx (SET_DEST (set
));
10338 SET_DEST (set
) = temp
;
10339 if (GET_CODE (SET_DEST (set
)) == MEM
)
10341 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10343 XEXP (SET_DEST (set
), 0) = temp
;
10346 else if (GET_CODE (real
) == PARALLEL
)
10349 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
10350 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
10352 rtx set
= XVECEXP (real
, 0, i
);
10354 temp
= simplify_rtx (SET_SRC (set
));
10356 SET_SRC (set
) = temp
;
10357 temp
= simplify_rtx (SET_DEST (set
));
10359 SET_DEST (set
) = temp
;
10360 if (GET_CODE (SET_DEST (set
)) == MEM
)
10362 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10364 XEXP (SET_DEST (set
), 0) = temp
;
10366 RTX_FRAME_RELATED_P (set
) = 1;
10373 real
= spe_synthesize_frame_save (real
);
10375 RTX_FRAME_RELATED_P (insn
) = 1;
10376 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10381 /* Given an SPE frame note, return a PARALLEL of SETs with the
10382 original note, plus a synthetic register save. */
10385 spe_synthesize_frame_save (real
)
10388 rtx synth
, offset
, reg
, real2
;
10390 if (GET_CODE (real
) != SET
10391 || GET_MODE (SET_SRC (real
)) != V2SImode
)
10394 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10395 frame related note. The parallel contains a set of the register
10396 being saved, and another set to a synthetic register (n+1200).
10397 This is so we can differentiate between 64-bit and 32-bit saves.
10398 Words cannot describe this nastiness. */
10400 if (GET_CODE (SET_DEST (real
)) != MEM
10401 || GET_CODE (XEXP (SET_DEST (real
), 0)) != PLUS
10402 || GET_CODE (SET_SRC (real
)) != REG
)
10406 (set (mem (plus (reg x) (const y)))
10409 (set (mem (plus (reg x) (const y+4)))
10413 real2
= copy_rtx (real
);
10414 PUT_MODE (SET_DEST (real2
), SImode
);
10415 reg
= SET_SRC (real2
);
10416 real2
= replace_rtx (real2
, reg
, gen_rtx_REG (SImode
, REGNO (reg
)));
10417 synth
= copy_rtx (real2
);
10419 if (BYTES_BIG_ENDIAN
)
10421 offset
= XEXP (XEXP (SET_DEST (real2
), 0), 1);
10422 real2
= replace_rtx (real2
, offset
, GEN_INT (INTVAL (offset
) + 4));
10425 reg
= SET_SRC (synth
);
10427 synth
= replace_rtx (synth
, reg
,
10428 gen_rtx_REG (SImode
, REGNO (reg
) + 1200));
10430 offset
= XEXP (XEXP (SET_DEST (synth
), 0), 1);
10431 synth
= replace_rtx (synth
, offset
,
10432 GEN_INT (INTVAL (offset
)
10433 + (BYTES_BIG_ENDIAN
? 0 : 4)));
10435 RTX_FRAME_RELATED_P (synth
) = 1;
10436 RTX_FRAME_RELATED_P (real2
) = 1;
10437 if (BYTES_BIG_ENDIAN
)
10438 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, synth
, real2
));
10440 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, real2
, synth
));
10445 /* Returns an insn that has a vrsave set operation with the
10446 appropriate CLOBBERs. */
10449 generate_set_vrsave (reg
, info
, epiloguep
)
10451 rs6000_stack_t
*info
;
10455 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
10456 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10459 = gen_rtx_SET (VOIDmode
,
10461 gen_rtx_UNSPEC_VOLATILE (SImode
,
10462 gen_rtvec (2, reg
, vrsave
),
10467 /* We need to clobber the registers in the mask so the scheduler
10468 does not move sets to VRSAVE before sets of AltiVec registers.
10470 However, if the function receives nonlocal gotos, reload will set
10471 all call saved registers live. We will end up with:
10473 (set (reg 999) (mem))
10474 (parallel [ (set (reg vrsave) (unspec blah))
10475 (clobber (reg 999))])
10477 The clobber will cause the store into reg 999 to be dead, and
10478 flow will attempt to delete an epilogue insn. In this case, we
10479 need an unspec use/set of the register. */
10481 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10482 if (info
->vrsave_mask
!= 0 && ALTIVEC_REG_BIT (i
) != 0)
10484 if (!epiloguep
|| call_used_regs
[i
])
10485 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
10486 gen_rtx_REG (V4SImode
, i
));
10489 rtx reg
= gen_rtx_REG (V4SImode
, i
);
10492 = gen_rtx_SET (VOIDmode
,
10494 gen_rtx_UNSPEC (V4SImode
,
10495 gen_rtvec (1, reg
), 27));
10499 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
10501 for (i
= 0; i
< nclobs
; ++i
)
10502 XVECEXP (insn
, 0, i
) = clobs
[i
];
10507 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10508 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10511 emit_frame_save (frame_reg
, frame_ptr
, mode
, regno
, offset
, total_size
)
10514 enum machine_mode mode
;
10515 unsigned int regno
;
10519 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
10520 rtx replacea
, replaceb
;
10522 int_rtx
= GEN_INT (offset
);
10524 /* Some cases that need register indexed addressing. */
10525 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
10527 && SPE_VECTOR_MODE (mode
)
10528 && !SPE_CONST_OFFSET_OK (offset
)))
10530 /* Whomever calls us must make sure r11 is available in the
10531 flow path of instructions in the prologue. */
10532 offset_rtx
= gen_rtx_REG (Pmode
, 11);
10533 emit_move_insn (offset_rtx
, int_rtx
);
10535 replacea
= offset_rtx
;
10536 replaceb
= int_rtx
;
10540 offset_rtx
= int_rtx
;
10541 replacea
= NULL_RTX
;
10542 replaceb
= NULL_RTX
;
10545 reg
= gen_rtx_REG (mode
, regno
);
10546 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
10547 mem
= gen_rtx_MEM (mode
, addr
);
10548 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10550 insn
= emit_move_insn (mem
, reg
);
10552 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
10555 /* Emit an offset memory reference suitable for a frame store, while
10556 converting to a valid addressing mode. */
10559 gen_frame_mem_offset (mode
, reg
, offset
)
10560 enum machine_mode mode
;
10564 rtx int_rtx
, offset_rtx
;
10566 int_rtx
= GEN_INT (offset
);
10568 if (TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
10570 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10571 emit_move_insn (offset_rtx
, int_rtx
);
10574 offset_rtx
= int_rtx
;
10576 return gen_rtx_MEM (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
10579 /* Emit function prologue as insns. */
10582 rs6000_emit_prologue ()
10584 rs6000_stack_t
*info
= rs6000_stack_info ();
10585 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
10586 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10587 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
10588 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
10589 rtx frame_reg_rtx
= sp_reg_rtx
;
10590 rtx cr_save_rtx
= NULL
;
10592 int saving_FPRs_inline
;
10593 int using_store_multiple
;
10594 HOST_WIDE_INT sp_offset
= 0;
10596 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
10598 reg_mode
= V2SImode
;
10602 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
10603 && (!TARGET_SPE_ABI
10604 || info
->spe_64bit_regs_used
== 0)
10605 && info
->first_gp_reg_save
< 31);
10606 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
10607 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
10609 /* For V.4, update stack before we do any saving and set back pointer. */
10610 if (info
->push_p
&& DEFAULT_ABI
== ABI_V4
)
10612 if (info
->total_size
< 32767)
10613 sp_offset
= info
->total_size
;
10615 frame_reg_rtx
= frame_ptr_rtx
;
10616 rs6000_emit_allocate_stack (info
->total_size
,
10617 (frame_reg_rtx
!= sp_reg_rtx
10618 && (info
->cr_save_p
10620 || info
->first_fp_reg_save
< 64
10621 || info
->first_gp_reg_save
< 32
10623 if (frame_reg_rtx
!= sp_reg_rtx
)
10624 rs6000_emit_stack_tie ();
10627 /* Save AltiVec registers if needed. */
10628 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
10632 /* There should be a non inline version of this, for when we
10633 are saving lots of vector registers. */
10634 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10635 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
10637 rtx areg
, savereg
, mem
;
10640 offset
= info
->altivec_save_offset
+ sp_offset
10641 + 16 * (i
- info
->first_altivec_reg_save
);
10643 savereg
= gen_rtx_REG (V4SImode
, i
);
10645 areg
= gen_rtx_REG (Pmode
, 0);
10646 emit_move_insn (areg
, GEN_INT (offset
));
10648 /* AltiVec addressing mode is [reg+reg]. */
10649 mem
= gen_rtx_MEM (V4SImode
,
10650 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
10652 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10654 insn
= emit_move_insn (mem
, savereg
);
10656 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10657 areg
, GEN_INT (offset
));
10661 /* VRSAVE is a bit vector representing which AltiVec registers
10662 are used. The OS uses this to determine which vector
10663 registers to save on a context switch. We need to save
10664 VRSAVE on the stack frame, add whatever AltiVec registers we
10665 used in this function, and do the corresponding magic in the
10668 if (TARGET_ALTIVEC
&& info
->vrsave_mask
!= 0)
10670 rtx reg
, mem
, vrsave
;
10673 /* Get VRSAVE onto a GPR. */
10674 reg
= gen_rtx_REG (SImode
, 12);
10675 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10677 emit_insn (gen_get_vrsave_internal (reg
));
10679 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
10682 offset
= info
->vrsave_save_offset
+ sp_offset
;
10684 = gen_rtx_MEM (SImode
,
10685 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, GEN_INT (offset
)));
10686 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10687 insn
= emit_move_insn (mem
, reg
);
10689 /* Include the registers in the mask. */
10690 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
10692 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
10695 /* If we use the link register, get it into r0. */
10696 if (info
->lr_save_p
)
10697 emit_move_insn (gen_rtx_REG (Pmode
, 0),
10698 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10700 /* If we need to save CR, put it into r12. */
10701 if (info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
10703 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
10704 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10707 /* Do any required saving of fpr's. If only one or two to save, do
10708 it ourselves. Otherwise, call function. */
10709 if (saving_FPRs_inline
)
10712 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10713 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
10714 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
10715 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
10716 info
->first_fp_reg_save
+ i
,
10717 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
10720 else if (info
->first_fp_reg_save
!= 64)
10724 const char *alloc_rname
;
10726 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
10728 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
10729 gen_rtx_REG (Pmode
,
10730 LINK_REGISTER_REGNUM
));
10731 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
10732 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
10733 alloc_rname
= ggc_strdup (rname
);
10734 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
10735 gen_rtx_SYMBOL_REF (Pmode
,
10737 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10739 rtx addr
, reg
, mem
;
10740 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
10741 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10742 GEN_INT (info
->fp_save_offset
10743 + sp_offset
+ 8*i
));
10744 mem
= gen_rtx_MEM (DFmode
, addr
);
10745 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10747 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10749 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10750 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10751 NULL_RTX
, NULL_RTX
);
10754 /* Save GPRs. This is done as a PARALLEL if we are using
10755 the store-multiple instructions. */
10756 if (using_store_multiple
)
10760 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
10761 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10763 rtx addr
, reg
, mem
;
10764 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10765 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10766 GEN_INT (info
->gp_save_offset
10769 mem
= gen_rtx_MEM (reg_mode
, addr
);
10770 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10772 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10774 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10775 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10776 NULL_RTX
, NULL_RTX
);
10781 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10782 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
10783 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
10784 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
10785 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
10786 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
10788 rtx addr
, reg
, mem
;
10789 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10791 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
10793 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
10796 if (!SPE_CONST_OFFSET_OK (offset
))
10798 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10799 emit_move_insn (b
, GEN_INT (offset
));
10802 b
= GEN_INT (offset
);
10804 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
10805 mem
= gen_rtx_MEM (V2SImode
, addr
);
10806 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10807 insn
= emit_move_insn (mem
, reg
);
10809 if (GET_CODE (b
) == CONST_INT
)
10810 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10811 NULL_RTX
, NULL_RTX
);
10813 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10814 b
, GEN_INT (offset
));
10818 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10819 GEN_INT (info
->gp_save_offset
10822 mem
= gen_rtx_MEM (reg_mode
, addr
);
10823 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10825 insn
= emit_move_insn (mem
, reg
);
10826 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10827 NULL_RTX
, NULL_RTX
);
10832 /* ??? There's no need to emit actual instructions here, but it's the
10833 easiest way to get the frame unwind information emitted. */
10834 if (current_function_calls_eh_return
)
10836 unsigned int i
, regno
;
10840 regno
= EH_RETURN_DATA_REGNO (i
);
10841 if (regno
== INVALID_REGNUM
)
10844 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
10845 info
->ehrd_offset
+ sp_offset
10846 + reg_size
* (int) i
,
10851 /* Save lr if we used it. */
10852 if (info
->lr_save_p
)
10854 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10855 GEN_INT (info
->lr_save_offset
+ sp_offset
));
10856 rtx reg
= gen_rtx_REG (Pmode
, 0);
10857 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
10858 /* This should not be of rs6000_sr_alias_set, because of
10859 __builtin_return_address. */
10861 insn
= emit_move_insn (mem
, reg
);
10862 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10863 reg
, gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10866 /* Save CR if we use any that must be preserved. */
10867 if (info
->cr_save_p
)
10869 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10870 GEN_INT (info
->cr_save_offset
+ sp_offset
));
10871 rtx mem
= gen_rtx_MEM (SImode
, addr
);
10873 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10875 /* If r12 was used to hold the original sp, copy cr into r0 now
10877 if (REGNO (frame_reg_rtx
) == 12)
10879 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
10880 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10882 insn
= emit_move_insn (mem
, cr_save_rtx
);
10884 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10885 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10886 OK. All we have to do is specify that _one_ condition code
10887 register is saved in this stack slot. The thrower's epilogue
10888 will then restore all the call-saved registers.
10889 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10890 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10891 cr_save_rtx
, gen_rtx_REG (SImode
, CR2_REGNO
));
10894 /* Update stack and set back pointer unless this is V.4,
10895 for which it was done previously. */
10896 if (info
->push_p
&& DEFAULT_ABI
!= ABI_V4
)
10897 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
10899 /* Set frame pointer, if needed. */
10900 if (frame_pointer_needed
)
10902 insn
= emit_move_insn (gen_rtx_REG (Pmode
, FRAME_POINTER_REGNUM
),
10904 RTX_FRAME_RELATED_P (insn
) = 1;
10907 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10908 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
10909 || (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1
10910 && regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
]))
10912 /* If emit_load_toc_table will use the link register, we need to save
10913 it. We use R11 for this purpose because emit_load_toc_table
10914 can use register 0. This allows us to use a plain 'blr' to return
10915 from the procedure more often. */
10916 int save_LR_around_toc_setup
= (TARGET_ELF
10917 && DEFAULT_ABI
!= ABI_AIX
10919 && ! info
->lr_save_p
10920 && EXIT_BLOCK_PTR
->pred
!= NULL
);
10921 if (save_LR_around_toc_setup
)
10922 emit_move_insn (gen_rtx_REG (Pmode
, 11),
10923 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10925 rs6000_emit_load_toc_table (TRUE
);
10927 if (save_LR_around_toc_setup
)
10928 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
10929 gen_rtx_REG (Pmode
, 11));
10933 if (DEFAULT_ABI
== ABI_DARWIN
10934 && flag_pic
&& current_function_uses_pic_offset_table
)
10936 rtx dest
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
10937 const char *picbase
= machopic_function_base_name ();
10938 rtx src
= gen_rtx_SYMBOL_REF (Pmode
, picbase
);
10940 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest
, src
)));
10942 rs6000_maybe_dead (
10943 emit_move_insn (gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
),
10944 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)));
10949 /* Write function prologue. */
10952 rs6000_output_function_prologue (file
, size
)
10954 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
10956 rs6000_stack_t
*info
= rs6000_stack_info ();
10958 if (TARGET_DEBUG_STACK
)
10959 debug_stack_info (info
);
10961 /* Write .extern for any function we will call to save and restore
10963 if (info
->first_fp_reg_save
< 64
10964 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
10965 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10966 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
10967 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
10968 RESTORE_FP_SUFFIX
);
10970 /* Write .extern for AIX common mode routines, if needed. */
10971 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
10973 fputs ("\t.extern __mulh\n", file
);
10974 fputs ("\t.extern __mull\n", file
);
10975 fputs ("\t.extern __divss\n", file
);
10976 fputs ("\t.extern __divus\n", file
);
10977 fputs ("\t.extern __quoss\n", file
);
10978 fputs ("\t.extern __quous\n", file
);
10979 common_mode_defined
= 1;
10982 if (! HAVE_prologue
)
10986 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10987 the "toplevel" insn chain. */
10988 emit_note (0, NOTE_INSN_DELETED
);
10989 rs6000_emit_prologue ();
10990 emit_note (0, NOTE_INSN_DELETED
);
10992 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10996 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
10998 INSN_ADDRESSES_NEW (insn
, addr
);
11003 if (TARGET_DEBUG_STACK
)
11004 debug_rtx_list (get_insns (), 100);
11005 final (get_insns (), file
, FALSE
, FALSE
);
11009 rs6000_pic_labelno
++;
11012 /* Emit function epilogue as insns.
11014 At present, dwarf2out_frame_debug_expr doesn't understand
11015 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11016 anywhere in the epilogue. Most of the insns below would in any case
11017 need special notes to explain where r11 is in relation to the stack. */
11020 rs6000_emit_epilogue (sibcall
)
11023 rs6000_stack_t
*info
;
11024 int restoring_FPRs_inline
;
11025 int using_load_multiple
;
11026 int using_mfcr_multiple
;
11027 int use_backchain_to_restore_sp
;
11029 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
11030 rtx frame_reg_rtx
= sp_reg_rtx
;
11031 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
11032 int reg_size
= TARGET_POWERPC64
? 8 : 4;
11035 info
= rs6000_stack_info ();
11037 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
11039 reg_mode
= V2SImode
;
11043 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
11044 && (!TARGET_SPE_ABI
11045 || info
->spe_64bit_regs_used
== 0)
11046 && info
->first_gp_reg_save
< 31);
11047 restoring_FPRs_inline
= (sibcall
11048 || current_function_calls_eh_return
11049 || info
->first_fp_reg_save
== 64
11050 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
11051 use_backchain_to_restore_sp
= (frame_pointer_needed
11052 || current_function_calls_alloca
11053 || info
->total_size
> 32767);
11054 using_mfcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
11055 || rs6000_cpu
== PROCESSOR_PPC603
11056 || rs6000_cpu
== PROCESSOR_PPC750
11059 /* If we have a frame pointer, a call to alloca, or a large stack
11060 frame, restore the old stack pointer using the backchain. Otherwise,
11061 we know what size to update it with. */
11062 if (use_backchain_to_restore_sp
)
11064 /* Under V.4, don't reset the stack pointer until after we're done
11065 loading the saved registers. */
11066 if (DEFAULT_ABI
== ABI_V4
)
11067 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
11069 emit_move_insn (frame_reg_rtx
,
11070 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
11073 else if (info
->push_p
)
11075 if (DEFAULT_ABI
== ABI_V4
)
11076 sp_offset
= info
->total_size
;
11079 emit_insn (TARGET_32BIT
11080 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
11081 GEN_INT (info
->total_size
))
11082 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
11083 GEN_INT (info
->total_size
)));
11087 /* Restore AltiVec registers if needed. */
11088 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
11092 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
11093 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
11095 rtx addr
, areg
, mem
;
11097 areg
= gen_rtx_REG (Pmode
, 0);
11099 (areg
, GEN_INT (info
->altivec_save_offset
11101 + 16 * (i
- info
->first_altivec_reg_save
)));
11103 /* AltiVec addressing mode is [reg+reg]. */
11104 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
11105 mem
= gen_rtx_MEM (V4SImode
, addr
);
11106 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11108 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
11112 /* Restore VRSAVE if needed. */
11113 if (TARGET_ALTIVEC_ABI
&& info
->vrsave_mask
!= 0)
11115 rtx addr
, mem
, reg
;
11117 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11118 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
11119 mem
= gen_rtx_MEM (SImode
, addr
);
11120 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11121 reg
= gen_rtx_REG (SImode
, 12);
11122 emit_move_insn (reg
, mem
);
11124 emit_insn (generate_set_vrsave (reg
, info
, 1));
11127 /* Get the old lr if we saved it. */
11128 if (info
->lr_save_p
)
11130 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
11131 info
->lr_save_offset
+ sp_offset
);
11133 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11135 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
11138 /* Get the old cr if we saved it. */
11139 if (info
->cr_save_p
)
11141 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11142 GEN_INT (info
->cr_save_offset
+ sp_offset
));
11143 rtx mem
= gen_rtx_MEM (SImode
, addr
);
11145 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11147 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
11150 /* Set LR here to try to overlap restores below. */
11151 if (info
->lr_save_p
)
11152 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
11153 gen_rtx_REG (Pmode
, 0));
11155 /* Load exception handler data registers, if needed. */
11156 if (current_function_calls_eh_return
)
11158 unsigned int i
, regno
;
11164 regno
= EH_RETURN_DATA_REGNO (i
);
11165 if (regno
== INVALID_REGNUM
)
11168 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
11169 info
->ehrd_offset
+ sp_offset
11170 + reg_size
* (int) i
);
11171 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11173 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
11177 /* Restore GPRs. This is done as a PARALLEL if we are using
11178 the load-multiple instructions. */
11179 if (using_load_multiple
)
11182 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
11183 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
11185 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11186 GEN_INT (info
->gp_save_offset
11189 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
11191 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11194 gen_rtx_SET (VOIDmode
,
11195 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
11198 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11201 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
11202 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
11203 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
11204 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
11205 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
11206 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
11208 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11209 GEN_INT (info
->gp_save_offset
11212 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
11214 /* Restore 64-bit quantities for SPE. */
11215 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
11217 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
11220 if (!SPE_CONST_OFFSET_OK (offset
))
11222 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
11223 emit_move_insn (b
, GEN_INT (offset
));
11226 b
= GEN_INT (offset
);
11228 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
11229 mem
= gen_rtx_MEM (V2SImode
, addr
);
11232 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11234 emit_move_insn (gen_rtx_REG (reg_mode
,
11235 info
->first_gp_reg_save
+ i
), mem
);
11238 /* Restore fpr's if we need to do it without calling a function. */
11239 if (restoring_FPRs_inline
)
11240 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11241 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
11242 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
11245 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11246 GEN_INT (info
->fp_save_offset
11249 mem
= gen_rtx_MEM (DFmode
, addr
);
11250 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11252 emit_move_insn (gen_rtx_REG (DFmode
,
11253 info
->first_fp_reg_save
+ i
),
11257 /* If we saved cr, restore it here. Just those that were used. */
11258 if (info
->cr_save_p
)
11260 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
11263 if (using_mfcr_multiple
)
11265 for (i
= 0; i
< 8; i
++)
11266 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11272 if (using_mfcr_multiple
&& count
> 1)
11277 p
= rtvec_alloc (count
);
11280 for (i
= 0; i
< 8; i
++)
11281 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11283 rtvec r
= rtvec_alloc (2);
11284 RTVEC_ELT (r
, 0) = r12_rtx
;
11285 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
11286 RTVEC_ELT (p
, ndx
) =
11287 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
11288 gen_rtx_UNSPEC (CCmode
, r
, 20));
11291 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11296 for (i
= 0; i
< 8; i
++)
11297 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11299 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
11305 /* If this is V.4, unwind the stack pointer after all of the loads
11306 have been done. We need to emit a block here so that sched
11307 doesn't decide to move the sp change before the register restores
11308 (which may not have any obvious dependency on the stack). This
11309 doesn't hurt performance, because there is no scheduling that can
11310 be done after this point. */
11311 if (DEFAULT_ABI
== ABI_V4
)
11313 if (frame_reg_rtx
!= sp_reg_rtx
)
11314 rs6000_emit_stack_tie ();
11316 if (use_backchain_to_restore_sp
)
11318 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
11320 else if (sp_offset
!= 0)
11322 emit_insn (TARGET_32BIT
11323 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
11324 GEN_INT (sp_offset
))
11325 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
11326 GEN_INT (sp_offset
)));
11330 if (current_function_calls_eh_return
)
11332 rtx sa
= EH_RETURN_STACKADJ_RTX
;
11333 emit_insn (TARGET_32BIT
11334 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
11335 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
11341 if (! restoring_FPRs_inline
)
11342 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
11344 p
= rtvec_alloc (2);
11346 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
11347 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
11348 gen_rtx_REG (Pmode
,
11349 LINK_REGISTER_REGNUM
));
11351 /* If we have to restore more than two FP registers, branch to the
11352 restore function. It will return to our caller. */
11353 if (! restoring_FPRs_inline
)
11357 const char *alloc_rname
;
11359 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
11360 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
11361 alloc_rname
= ggc_strdup (rname
);
11362 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
11363 gen_rtx_SYMBOL_REF (Pmode
,
11366 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11369 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
11370 GEN_INT (info
->fp_save_offset
+ 8*i
));
11371 mem
= gen_rtx_MEM (DFmode
, addr
);
11372 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11374 RTVEC_ELT (p
, i
+3) =
11375 gen_rtx_SET (VOIDmode
,
11376 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
11381 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11385 /* Write function epilogue. */
11388 rs6000_output_function_epilogue (file
, size
)
11390 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
11392 rs6000_stack_t
*info
= rs6000_stack_info ();
11394 if (! HAVE_epilogue
)
11396 rtx insn
= get_last_insn ();
11397 /* If the last insn was a BARRIER, we don't have to write anything except
11398 the trace table. */
11399 if (GET_CODE (insn
) == NOTE
)
11400 insn
= prev_nonnote_insn (insn
);
11401 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
11403 /* This is slightly ugly, but at least we don't have two
11404 copies of the epilogue-emitting code. */
11407 /* A NOTE_INSN_DELETED is supposed to be at the start
11408 and end of the "toplevel" insn chain. */
11409 emit_note (0, NOTE_INSN_DELETED
);
11410 rs6000_emit_epilogue (FALSE
);
11411 emit_note (0, NOTE_INSN_DELETED
);
11413 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11417 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
11419 INSN_ADDRESSES_NEW (insn
, addr
);
11424 if (TARGET_DEBUG_STACK
)
11425 debug_rtx_list (get_insns (), 100);
11426 final (get_insns (), file
, FALSE
, FALSE
);
11431 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11434 We don't output a traceback table if -finhibit-size-directive was
11435 used. The documentation for -finhibit-size-directive reads
11436 ``don't output a @code{.size} assembler directive, or anything
11437 else that would cause trouble if the function is split in the
11438 middle, and the two halves are placed at locations far apart in
11439 memory.'' The traceback table has this property, since it
11440 includes the offset from the start of the function to the
11441 traceback table itself.
11443 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11444 different traceback table. */
11445 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
11446 && rs6000_traceback
!= traceback_none
)
11448 const char *fname
= NULL
;
11449 const char *language_string
= lang_hooks
.name
;
11450 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
11452 int optional_tbtab
;
11454 if (rs6000_traceback
== traceback_full
)
11455 optional_tbtab
= 1;
11456 else if (rs6000_traceback
== traceback_part
)
11457 optional_tbtab
= 0;
11459 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
11461 if (optional_tbtab
)
11463 fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
11464 while (*fname
== '.') /* V.4 encodes . in the name */
11467 /* Need label immediately before tbtab, so we can compute
11468 its offset from the function start. */
11469 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
11470 ASM_OUTPUT_LABEL (file
, fname
);
11473 /* The .tbtab pseudo-op can only be used for the first eight
11474 expressions, since it can't handle the possibly variable
11475 length fields that follow. However, if you omit the optional
11476 fields, the assembler outputs zeros for all optional fields
11477 anyways, giving each variable length field is minimum length
11478 (as defined in sys/debug.h). Thus we can not use the .tbtab
11479 pseudo-op at all. */
11481 /* An all-zero word flags the start of the tbtab, for debuggers
11482 that have to find it by searching forward from the entry
11483 point or from the current pc. */
11484 fputs ("\t.long 0\n", file
);
11486 /* Tbtab format type. Use format type 0. */
11487 fputs ("\t.byte 0,", file
);
11489 /* Language type. Unfortunately, there doesn't seem to be any
11490 official way to get this info, so we use language_string. C
11491 is 0. C++ is 9. No number defined for Obj-C, so use the
11492 value for C for now. There is no official value for Java,
11493 although IBM appears to be using 13. There is no official value
11494 for Chill, so we've chosen 44 pseudo-randomly. */
11495 if (! strcmp (language_string
, "GNU C")
11496 || ! strcmp (language_string
, "GNU Objective-C"))
11498 else if (! strcmp (language_string
, "GNU F77"))
11500 else if (! strcmp (language_string
, "GNU Ada"))
11502 else if (! strcmp (language_string
, "GNU Pascal"))
11504 else if (! strcmp (language_string
, "GNU C++"))
11506 else if (! strcmp (language_string
, "GNU Java"))
11508 else if (! strcmp (language_string
, "GNU CHILL"))
11512 fprintf (file
, "%d,", i
);
11514 /* 8 single bit fields: global linkage (not set for C extern linkage,
11515 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11516 from start of procedure stored in tbtab, internal function, function
11517 has controlled storage, function has no toc, function uses fp,
11518 function logs/aborts fp operations. */
11519 /* Assume that fp operations are used if any fp reg must be saved. */
11520 fprintf (file
, "%d,",
11521 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
11523 /* 6 bitfields: function is interrupt handler, name present in
11524 proc table, function calls alloca, on condition directives
11525 (controls stack walks, 3 bits), saves condition reg, saves
11527 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11528 set up as a frame pointer, even when there is no alloca call. */
11529 fprintf (file
, "%d,",
11530 ((optional_tbtab
<< 6)
11531 | ((optional_tbtab
& frame_pointer_needed
) << 5)
11532 | (info
->cr_save_p
<< 1)
11533 | (info
->lr_save_p
)));
11535 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11537 fprintf (file
, "%d,",
11538 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
11540 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11541 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
11543 if (optional_tbtab
)
11545 /* Compute the parameter info from the function decl argument
11548 int next_parm_info_bit
= 31;
11550 for (decl
= DECL_ARGUMENTS (current_function_decl
);
11551 decl
; decl
= TREE_CHAIN (decl
))
11553 rtx parameter
= DECL_INCOMING_RTL (decl
);
11554 enum machine_mode mode
= GET_MODE (parameter
);
11556 if (GET_CODE (parameter
) == REG
)
11558 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
11564 if (mode
== SFmode
)
11566 else if (mode
== DFmode
|| mode
== TFmode
)
11571 /* If only one bit will fit, don't or in this entry. */
11572 if (next_parm_info_bit
> 0)
11573 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
11574 next_parm_info_bit
-= 2;
11578 fixed_parms
+= ((GET_MODE_SIZE (mode
)
11579 + (UNITS_PER_WORD
- 1))
11581 next_parm_info_bit
-= 1;
11587 /* Number of fixed point parameters. */
11588 /* This is actually the number of words of fixed point parameters; thus
11589 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11590 fprintf (file
, "%d,", fixed_parms
);
11592 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11594 /* This is actually the number of fp registers that hold parameters;
11595 and thus the maximum value is 13. */
11596 /* Set parameters on stack bit if parameters are not in their original
11597 registers, regardless of whether they are on the stack? Xlc
11598 seems to set the bit when not optimizing. */
11599 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
11601 if (! optional_tbtab
)
11604 /* Optional fields follow. Some are variable length. */
11606 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11607 11 double float. */
11608 /* There is an entry for each parameter in a register, in the order that
11609 they occur in the parameter list. Any intervening arguments on the
11610 stack are ignored. If the list overflows a long (max possible length
11611 34 bits) then completely leave off all elements that don't fit. */
11612 /* Only emit this long if there was at least one parameter. */
11613 if (fixed_parms
|| float_parms
)
11614 fprintf (file
, "\t.long %d\n", parm_info
);
11616 /* Offset from start of code to tb table. */
11617 fputs ("\t.long ", file
);
11618 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
11620 RS6000_OUTPUT_BASENAME (file
, fname
);
11622 assemble_name (file
, fname
);
11624 fputs ("-.", file
);
11626 RS6000_OUTPUT_BASENAME (file
, fname
);
11628 assemble_name (file
, fname
);
11632 /* Interrupt handler mask. */
11633 /* Omit this long, since we never set the interrupt handler bit
11636 /* Number of CTL (controlled storage) anchors. */
11637 /* Omit this long, since the has_ctl bit is never set above. */
11639 /* Displacement into stack of each CTL anchor. */
11640 /* Omit this list of longs, because there are no CTL anchors. */
11642 /* Length of function name. */
11645 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
11647 /* Function name. */
11648 assemble_string (fname
, strlen (fname
));
11650 /* Register for alloca automatic storage; this is always reg 31.
11651 Only emit this if the alloca bit was set above. */
11652 if (frame_pointer_needed
)
11653 fputs ("\t.byte 31\n", file
);
11655 fputs ("\t.align 2\n", file
);
11659 /* A C compound statement that outputs the assembler code for a thunk
11660 function, used to implement C++ virtual function calls with
11661 multiple inheritance. The thunk acts as a wrapper around a virtual
11662 function, adjusting the implicit object parameter before handing
11663 control off to the real function.
11665 First, emit code to add the integer DELTA to the location that
11666 contains the incoming first argument. Assume that this argument
11667 contains a pointer, and is the one used to pass the `this' pointer
11668 in C++. This is the incoming argument *before* the function
11669 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11670 values of all other incoming arguments.
11672 After the addition, emit code to jump to FUNCTION, which is a
11673 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11674 not touch the return address. Hence returning from FUNCTION will
11675 return to whoever called the current `thunk'.
11677 The effect must be as if FUNCTION had been called directly with the
11678 adjusted first argument. This macro is responsible for emitting
11679 all of the code for a thunk function; output_function_prologue()
11680 and output_function_epilogue() are not invoked.
11682 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11683 been extracted from it.) It might possibly be useful on some
11684 targets, but probably not.
11686 If you do not define this macro, the target-independent code in the
11687 C++ frontend will generate a less efficient heavyweight thunk that
11688 calls FUNCTION instead of jumping to it. The generic approach does
11689 not support varargs. */
11692 rs6000_output_mi_thunk (file
, thunk_fndecl
, delta
, vcall_offset
, function
)
11694 tree thunk_fndecl ATTRIBUTE_UNUSED
;
11695 HOST_WIDE_INT delta
;
11696 HOST_WIDE_INT vcall_offset
;
11699 rtx
this, insn
, funexp
;
11701 reload_completed
= 1;
11702 no_new_pseudos
= 1;
11704 /* Mark the end of the (empty) prologue. */
11705 emit_note (NULL
, NOTE_INSN_PROLOGUE_END
);
11707 /* Find the "this" pointer. If the function returns a structure,
11708 the structure return pointer is in r3. */
11709 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
))))
11710 this = gen_rtx_REG (Pmode
, 4);
11712 this = gen_rtx_REG (Pmode
, 3);
11714 /* Apply the constant offset, if required. */
11717 rtx delta_rtx
= GEN_INT (delta
);
11718 emit_insn (TARGET_32BIT
11719 ? gen_addsi3 (this, this, delta_rtx
)
11720 : gen_adddi3 (this, this, delta_rtx
));
11723 /* Apply the offset from the vtable, if required. */
11726 rtx vcall_offset_rtx
= GEN_INT (vcall_offset
);
11727 rtx tmp
= gen_rtx_REG (Pmode
, 12);
11729 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this));
11730 emit_insn (TARGET_32BIT
11731 ? gen_addsi3 (tmp
, tmp
, vcall_offset_rtx
)
11732 : gen_adddi3 (tmp
, tmp
, vcall_offset_rtx
));
11733 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
11734 emit_insn (TARGET_32BIT
11735 ? gen_addsi3 (this, this, tmp
)
11736 : gen_adddi3 (this, this, tmp
));
11739 /* Generate a tail call to the target function. */
11740 if (!TREE_USED (function
))
11742 assemble_external (function
);
11743 TREE_USED (function
) = 1;
11745 funexp
= XEXP (DECL_RTL (function
), 0);
11747 SYMBOL_REF_FLAG (funexp
) = 0;
11748 if (current_file_function_operand (funexp
, VOIDmode
)
11749 && (! lookup_attribute ("longcall",
11750 TYPE_ATTRIBUTES (TREE_TYPE (function
)))
11751 || lookup_attribute ("shortcall",
11752 TYPE_ATTRIBUTES (TREE_TYPE (function
)))))
11753 SYMBOL_REF_FLAG (funexp
) = 1;
11755 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
11758 if (MACHOPIC_INDIRECT
)
11759 funexp
= machopic_indirect_call_target (funexp
);
11762 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
11763 generate sibcall RTL explicitly to avoid constraint abort. */
11764 insn
= emit_call_insn (
11765 gen_rtx_PARALLEL (VOIDmode
,
11767 gen_rtx_CALL (VOIDmode
,
11768 funexp
, const0_rtx
),
11769 gen_rtx_USE (VOIDmode
, const0_rtx
),
11770 gen_rtx_USE (VOIDmode
,
11771 gen_rtx_REG (SImode
,
11772 LINK_REGISTER_REGNUM
)),
11773 gen_rtx_RETURN (VOIDmode
))));
11774 SIBLING_CALL_P (insn
) = 1;
11777 /* Run just enough of rest_of_compilation to get the insns emitted.
11778 There's not really enough bulk here to make other passes such as
11779 instruction scheduling worth while. Note that use_thunk calls
11780 assemble_start_function and assemble_end_function. */
11781 insn
= get_insns ();
11782 shorten_branches (insn
);
11783 final_start_function (insn
, file
, 1);
11784 final (insn
, file
, 1, 0);
11785 final_end_function ();
11787 reload_completed
= 0;
11788 no_new_pseudos
= 0;
11791 /* A quick summary of the various types of 'constant-pool tables'
11794 Target Flags Name One table per
11795 AIX (none) AIX TOC object file
11796 AIX -mfull-toc AIX TOC object file
11797 AIX -mminimal-toc AIX minimal TOC translation unit
11798 SVR4/EABI (none) SVR4 SDATA object file
11799 SVR4/EABI -fpic SVR4 pic object file
11800 SVR4/EABI -fPIC SVR4 PIC translation unit
11801 SVR4/EABI -mrelocatable EABI TOC function
11802 SVR4/EABI -maix AIX TOC object file
11803 SVR4/EABI -maix -mminimal-toc
11804 AIX minimal TOC translation unit
11806 Name Reg. Set by entries contains:
11807 made by addrs? fp? sum?
11809 AIX TOC 2 crt0 as Y option option
11810 AIX minimal TOC 30 prolog gcc Y Y option
11811 SVR4 SDATA 13 crt0 gcc N Y N
11812 SVR4 pic 30 prolog ld Y not yet N
11813 SVR4 PIC 30 prolog gcc Y option option
11814 EABI TOC 30 prolog gcc Y option option
11818 /* Hash functions for the hash table. */
11821 rs6000_hash_constant (k
)
11824 enum rtx_code code
= GET_CODE (k
);
11825 enum machine_mode mode
= GET_MODE (k
);
11826 unsigned result
= (code
<< 3) ^ mode
;
11827 const char *format
;
11830 format
= GET_RTX_FORMAT (code
);
11831 flen
= strlen (format
);
11837 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
11840 if (mode
!= VOIDmode
)
11841 return real_hash (CONST_DOUBLE_REAL_VALUE (k
)) * result
;
11853 for (; fidx
< flen
; fidx
++)
11854 switch (format
[fidx
])
11859 const char *str
= XSTR (k
, fidx
);
11860 len
= strlen (str
);
11861 result
= result
* 613 + len
;
11862 for (i
= 0; i
< len
; i
++)
11863 result
= result
* 613 + (unsigned) str
[i
];
11868 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
11872 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
11875 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
11876 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
11880 for (i
= 0; i
< sizeof(HOST_WIDE_INT
)/sizeof(unsigned); i
++)
11881 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
11893 toc_hash_function (hash_entry
)
11894 const void * hash_entry
;
11896 const struct toc_hash_struct
*thc
=
11897 (const struct toc_hash_struct
*) hash_entry
;
11898 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
11901 /* Compare H1 and H2 for equivalence. */
11904 toc_hash_eq (h1
, h2
)
11908 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
11909 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
11911 if (((const struct toc_hash_struct
*) h1
)->key_mode
11912 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
11915 return rtx_equal_p (r1
, r2
);
11918 /* These are the names given by the C++ front-end to vtables, and
11919 vtable-like objects. Ideally, this logic should not be here;
11920 instead, there should be some programmatic way of inquiring as
11921 to whether or not an object is a vtable. */
11923 #define VTABLE_NAME_P(NAME) \
11924 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11925 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11926 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11927 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11930 rs6000_output_symbol_ref (file
, x
)
11934 /* Currently C++ toc references to vtables can be emitted before it
11935 is decided whether the vtable is public or private. If this is
11936 the case, then the linker will eventually complain that there is
11937 a reference to an unknown section. Thus, for vtables only,
11938 we emit the TOC reference to reference the symbol and not the
11940 const char *name
= XSTR (x
, 0);
11942 if (VTABLE_NAME_P (name
))
11944 RS6000_OUTPUT_BASENAME (file
, name
);
11947 assemble_name (file
, name
);
11950 /* Output a TOC entry. We derive the entry name from what is being
11954 output_toc (file
, x
, labelno
, mode
)
11958 enum machine_mode mode
;
11961 const char *name
= buf
;
11962 const char *real_name
;
11969 /* When the linker won't eliminate them, don't output duplicate
11970 TOC entries (this happens on AIX if there is any kind of TOC,
11971 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
11973 if (TARGET_TOC
&& GET_CODE (x
) != LABEL_REF
)
11975 struct toc_hash_struct
*h
;
11978 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
11979 time because GGC is not initialised at that point. */
11980 if (toc_hash_table
== NULL
)
11981 toc_hash_table
= htab_create_ggc (1021, toc_hash_function
,
11982 toc_hash_eq
, NULL
);
11984 h
= ggc_alloc (sizeof (*h
));
11986 h
->key_mode
= mode
;
11987 h
->labelno
= labelno
;
11989 found
= htab_find_slot (toc_hash_table
, h
, 1);
11990 if (*found
== NULL
)
11992 else /* This is indeed a duplicate.
11993 Set this label equal to that label. */
11995 fputs ("\t.set ", file
);
11996 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11997 fprintf (file
, "%d,", labelno
);
11998 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11999 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
12005 /* If we're going to put a double constant in the TOC, make sure it's
12006 aligned properly when strict alignment is on. */
12007 if (GET_CODE (x
) == CONST_DOUBLE
12008 && STRICT_ALIGNMENT
12009 && GET_MODE_BITSIZE (mode
) >= 64
12010 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
12011 ASM_OUTPUT_ALIGN (file
, 3);
12014 (*targetm
.asm_out
.internal_label
) (file
, "LC", labelno
);
12016 /* Handle FP constants specially. Note that if we have a minimal
12017 TOC, things we put here aren't actually in the TOC, so we can allow
12019 if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == TFmode
)
12021 REAL_VALUE_TYPE rv
;
12024 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
12025 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
12029 if (TARGET_MINIMAL_TOC
)
12030 fputs (DOUBLE_INT_ASM_OP
, file
);
12032 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12033 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
12034 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
12035 fprintf (file
, "0x%lx%08lx,0x%lx%08lx\n",
12036 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
12037 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
12042 if (TARGET_MINIMAL_TOC
)
12043 fputs ("\t.long ", file
);
12045 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12046 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
12047 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
12048 fprintf (file
, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12049 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
12050 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
12054 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
12056 REAL_VALUE_TYPE rv
;
12059 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
12060 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
12064 if (TARGET_MINIMAL_TOC
)
12065 fputs (DOUBLE_INT_ASM_OP
, file
);
12067 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
12068 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
12069 fprintf (file
, "0x%lx%08lx\n",
12070 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
12075 if (TARGET_MINIMAL_TOC
)
12076 fputs ("\t.long ", file
);
12078 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
12079 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
12080 fprintf (file
, "0x%lx,0x%lx\n",
12081 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
12085 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
12087 REAL_VALUE_TYPE rv
;
12090 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
12091 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
12095 if (TARGET_MINIMAL_TOC
)
12096 fputs (DOUBLE_INT_ASM_OP
, file
);
12098 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
12099 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
12104 if (TARGET_MINIMAL_TOC
)
12105 fputs ("\t.long ", file
);
12107 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
12108 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
12112 else if (GET_MODE (x
) == VOIDmode
12113 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
12115 unsigned HOST_WIDE_INT low
;
12116 HOST_WIDE_INT high
;
12118 if (GET_CODE (x
) == CONST_DOUBLE
)
12120 low
= CONST_DOUBLE_LOW (x
);
12121 high
= CONST_DOUBLE_HIGH (x
);
12124 #if HOST_BITS_PER_WIDE_INT == 32
12127 high
= (low
& 0x80000000) ? ~0 : 0;
12131 low
= INTVAL (x
) & 0xffffffff;
12132 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
12136 /* TOC entries are always Pmode-sized, but since this
12137 is a bigendian machine then if we're putting smaller
12138 integer constants in the TOC we have to pad them.
12139 (This is still a win over putting the constants in
12140 a separate constant pool, because then we'd have
12141 to have both a TOC entry _and_ the actual constant.)
12143 For a 32-bit target, CONST_INT values are loaded and shifted
12144 entirely within `low' and can be stored in one TOC entry. */
12146 if (TARGET_64BIT
&& POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
12147 abort ();/* It would be easy to make this work, but it doesn't now. */
12149 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
12151 #if HOST_BITS_PER_WIDE_INT == 32
12152 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
12153 POINTER_SIZE
, &low
, &high
, 0);
12156 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
12157 high
= (HOST_WIDE_INT
) low
>> 32;
12164 if (TARGET_MINIMAL_TOC
)
12165 fputs (DOUBLE_INT_ASM_OP
, file
);
12167 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
12168 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12169 fprintf (file
, "0x%lx%08lx\n",
12170 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12175 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
12177 if (TARGET_MINIMAL_TOC
)
12178 fputs ("\t.long ", file
);
12180 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
12181 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12182 fprintf (file
, "0x%lx,0x%lx\n",
12183 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12187 if (TARGET_MINIMAL_TOC
)
12188 fputs ("\t.long ", file
);
12190 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
12191 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
12197 if (GET_CODE (x
) == CONST
)
12199 if (GET_CODE (XEXP (x
, 0)) != PLUS
)
12202 base
= XEXP (XEXP (x
, 0), 0);
12203 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
12206 if (GET_CODE (base
) == SYMBOL_REF
)
12207 name
= XSTR (base
, 0);
12208 else if (GET_CODE (base
) == LABEL_REF
)
12209 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (XEXP (base
, 0)));
12210 else if (GET_CODE (base
) == CODE_LABEL
)
12211 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
12215 real_name
= (*targetm
.strip_name_encoding
) (name
);
12216 if (TARGET_MINIMAL_TOC
)
12217 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
12220 fprintf (file
, "\t.tc %s", real_name
);
12223 fprintf (file
, ".N%d", - offset
);
12225 fprintf (file
, ".P%d", offset
);
12227 fputs ("[TC],", file
);
12230 /* Currently C++ toc references to vtables can be emitted before it
12231 is decided whether the vtable is public or private. If this is
12232 the case, then the linker will eventually complain that there is
12233 a TOC reference to an unknown section. Thus, for vtables only,
12234 we emit the TOC reference to reference the symbol and not the
12236 if (VTABLE_NAME_P (name
))
12238 RS6000_OUTPUT_BASENAME (file
, name
);
12240 fprintf (file
, "%d", offset
);
12241 else if (offset
> 0)
12242 fprintf (file
, "+%d", offset
);
12245 output_addr_const (file
, x
);
12249 /* Output an assembler pseudo-op to write an ASCII string of N characters
12250 starting at P to FILE.
12252 On the RS/6000, we have to do this using the .byte operation and
12253 write out special characters outside the quoted string.
12254 Also, the assembler is broken; very long strings are truncated,
12255 so we must artificially break them up early. */
12258 output_ascii (file
, p
, n
)
12264 int i
, count_string
;
12265 const char *for_string
= "\t.byte \"";
12266 const char *for_decimal
= "\t.byte ";
12267 const char *to_close
= NULL
;
12270 for (i
= 0; i
< n
; i
++)
12273 if (c
>= ' ' && c
< 0177)
12276 fputs (for_string
, file
);
12279 /* Write two quotes to get one. */
12287 for_decimal
= "\"\n\t.byte ";
12291 if (count_string
>= 512)
12293 fputs (to_close
, file
);
12295 for_string
= "\t.byte \"";
12296 for_decimal
= "\t.byte ";
12304 fputs (for_decimal
, file
);
12305 fprintf (file
, "%d", c
);
12307 for_string
= "\n\t.byte \"";
12308 for_decimal
= ", ";
12314 /* Now close the string if we have written one. Then end the line. */
12316 fputs (to_close
, file
);
12319 /* Generate a unique section name for FILENAME for a section type
12320 represented by SECTION_DESC. Output goes into BUF.
12322 SECTION_DESC can be any string, as long as it is different for each
12323 possible section type.
12325 We name the section in the same manner as xlc. The name begins with an
12326 underscore followed by the filename (after stripping any leading directory
12327 names) with the last period replaced by the string SECTION_DESC. If
12328 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12332 rs6000_gen_section_name (buf
, filename
, section_desc
)
12334 const char *filename
;
12335 const char *section_desc
;
12337 const char *q
, *after_last_slash
, *last_period
= 0;
12341 after_last_slash
= filename
;
12342 for (q
= filename
; *q
; q
++)
12345 after_last_slash
= q
+ 1;
12346 else if (*q
== '.')
12350 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
12351 *buf
= (char *) xmalloc (len
);
12356 for (q
= after_last_slash
; *q
; q
++)
12358 if (q
== last_period
)
12360 strcpy (p
, section_desc
);
12361 p
+= strlen (section_desc
);
12365 else if (ISALNUM (*q
))
12369 if (last_period
== 0)
12370 strcpy (p
, section_desc
);
12375 /* Emit profile function. */
12378 output_profile_hook (labelno
)
12379 int labelno ATTRIBUTE_UNUSED
;
12381 if (TARGET_PROFILE_KERNEL
)
12384 if (DEFAULT_ABI
== ABI_AIX
)
12386 #ifdef NO_PROFILE_COUNTERS
12387 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
12390 const char *label_name
;
12393 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12394 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
12395 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
12397 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
12401 else if (DEFAULT_ABI
== ABI_DARWIN
)
12403 const char *mcount_name
= RS6000_MCOUNT
;
12404 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
12406 /* Be conservative and always set this, at least for now. */
12407 current_function_uses_pic_offset_table
= 1;
12410 /* For PIC code, set up a stub and collect the caller's address
12411 from r0, which is where the prologue puts it. */
12412 if (MACHOPIC_INDIRECT
)
12414 mcount_name
= machopic_stub_name (mcount_name
);
12415 if (current_function_uses_pic_offset_table
)
12416 caller_addr_regno
= 0;
12419 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
12421 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
12425 /* Write function profiler code. */
12428 output_function_profiler (file
, labelno
)
12435 switch (DEFAULT_ABI
)
12442 /* Fall through. */
12444 case ABI_AIX_NODESC
:
12447 warning ("no profiling of 64-bit code for this ABI");
12450 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12451 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
12454 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
12455 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12456 reg_names
[0], save_lr
, reg_names
[1]);
12457 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
12458 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
12459 assemble_name (file
, buf
);
12460 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
12462 else if (flag_pic
> 1)
12464 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12465 reg_names
[0], save_lr
, reg_names
[1]);
12466 /* Now, we need to get the address of the label. */
12467 fputs ("\tbl 1f\n\t.long ", file
);
12468 assemble_name (file
, buf
);
12469 fputs ("-.\n1:", file
);
12470 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
12471 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
12472 reg_names
[0], reg_names
[11]);
12473 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
12474 reg_names
[0], reg_names
[0], reg_names
[11]);
12478 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
12479 assemble_name (file
, buf
);
12480 fputs ("@ha\n", file
);
12481 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12482 reg_names
[0], save_lr
, reg_names
[1]);
12483 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
12484 assemble_name (file
, buf
);
12485 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
12488 if (current_function_needs_context
&& DEFAULT_ABI
== ABI_AIX_NODESC
)
12490 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12491 reg_names
[STATIC_CHAIN_REGNUM
],
12493 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12494 asm_fprintf (file
, "\t{l|lwz} %s,%d(%s)\n",
12495 reg_names
[STATIC_CHAIN_REGNUM
],
12499 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12500 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12505 if (!TARGET_PROFILE_KERNEL
)
12507 /* Don't do anything, done in output_profile_hook (). */
12514 asm_fprintf (file
, "\tmflr %s\n", reg_names
[0]);
12515 asm_fprintf (file
, "\tstd %s,16(%s)\n", reg_names
[0], reg_names
[1]);
12517 if (current_function_needs_context
)
12519 asm_fprintf (file
, "\tstd %s,24(%s)\n",
12520 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
12521 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12522 asm_fprintf (file
, "\tld %s,24(%s)\n",
12523 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
12526 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12534 rs6000_use_dfa_pipeline_interface ()
12539 /* Power4 load update and store update instructions are cracked into a
12540 load or store and an integer insn which are executed in the same cycle.
12541 Branches have their own dispatch slot which does not count against the
12542 GCC issue rate, but it changes the program flow so there are no other
12543 instructions to issue in this cycle. */
12546 rs6000_variable_issue (stream
, verbose
, insn
, more
)
12547 FILE *stream ATTRIBUTE_UNUSED
;
12548 int verbose ATTRIBUTE_UNUSED
;
12552 if (GET_CODE (PATTERN (insn
)) == USE
12553 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
12556 if (rs6000_cpu
== PROCESSOR_POWER4
)
12558 enum attr_type type
= get_attr_type (insn
);
12559 if (type
== TYPE_LOAD_EXT_U
|| type
== TYPE_LOAD_EXT_UX
12560 || type
== TYPE_LOAD_UX
|| type
== TYPE_STORE_UX
)
12562 else if (type
== TYPE_LOAD_U
|| type
== TYPE_STORE_U
12563 || type
== TYPE_FPLOAD_U
|| type
== TYPE_FPSTORE_U
12564 || type
== TYPE_FPLOAD_UX
|| type
== TYPE_FPSTORE_UX
12565 || type
== TYPE_LOAD_EXT
|| type
== TYPE_DELAYED_CR
12566 || type
== TYPE_COMPARE
|| type
== TYPE_DELAYED_COMPARE
12567 || type
== TYPE_IMUL_COMPARE
|| type
== TYPE_LMUL_COMPARE
12568 || type
== TYPE_IDIV
|| type
== TYPE_LDIV
)
12569 return more
> 2 ? more
- 2 : 0;
12575 /* Adjust the cost of a scheduling dependency. Return the new cost of
12576 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12579 rs6000_adjust_cost (insn
, link
, dep_insn
, cost
)
12582 rtx dep_insn ATTRIBUTE_UNUSED
;
12585 if (! recog_memoized (insn
))
12588 if (REG_NOTE_KIND (link
) != 0)
12591 if (REG_NOTE_KIND (link
) == 0)
12593 /* Data dependency; DEP_INSN writes a register that INSN reads
12594 some cycles later. */
12595 switch (get_attr_type (insn
))
12598 /* Tell the first scheduling pass about the latency between
12599 a mtctr and bctr (and mtlr and br/blr). The first
12600 scheduling pass will not know about this latency since
12601 the mtctr instruction, which has the latency associated
12602 to it, will be generated by reload. */
12603 return TARGET_POWER
? 5 : 4;
12605 /* Leave some extra cycles between a compare and its
12606 dependent branch, to inhibit expensive mispredicts. */
12607 if ((rs6000_cpu_attr
== CPU_PPC603
12608 || rs6000_cpu_attr
== CPU_PPC604
12609 || rs6000_cpu_attr
== CPU_PPC604E
12610 || rs6000_cpu_attr
== CPU_PPC620
12611 || rs6000_cpu_attr
== CPU_PPC630
12612 || rs6000_cpu_attr
== CPU_PPC750
12613 || rs6000_cpu_attr
== CPU_PPC7400
12614 || rs6000_cpu_attr
== CPU_PPC7450
12615 || rs6000_cpu_attr
== CPU_POWER4
)
12616 && recog_memoized (dep_insn
)
12617 && (INSN_CODE (dep_insn
) >= 0)
12618 && (get_attr_type (dep_insn
) == TYPE_CMP
12619 || get_attr_type (dep_insn
) == TYPE_COMPARE
12620 || get_attr_type (dep_insn
) == TYPE_DELAYED_COMPARE
12621 || get_attr_type (dep_insn
) == TYPE_IMUL_COMPARE
12622 || get_attr_type (dep_insn
) == TYPE_LMUL_COMPARE
12623 || get_attr_type (dep_insn
) == TYPE_FPCOMPARE
12624 || get_attr_type (dep_insn
) == TYPE_CR_LOGICAL
12625 || get_attr_type (dep_insn
) == TYPE_DELAYED_CR
))
12630 /* Fall out to return default cost. */
12636 /* A C statement (sans semicolon) to update the integer scheduling
12637 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12638 INSN earlier, increase the priority to execute INSN later. Do not
12639 define this macro if you do not need to adjust the scheduling
12640 priorities of insns. */
12643 rs6000_adjust_priority (insn
, priority
)
12644 rtx insn ATTRIBUTE_UNUSED
;
12647 /* On machines (like the 750) which have asymmetric integer units,
12648 where one integer unit can do multiply and divides and the other
12649 can't, reduce the priority of multiply/divide so it is scheduled
12650 before other integer operations. */
12653 if (! INSN_P (insn
))
12656 if (GET_CODE (PATTERN (insn
)) == USE
)
12659 switch (rs6000_cpu_attr
) {
12661 switch (get_attr_type (insn
))
12668 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
12669 priority
, priority
);
12670 if (priority
>= 0 && priority
< 0x01000000)
12680 /* Return how many instructions the machine can issue per cycle. */
12683 rs6000_issue_rate ()
12685 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
12686 if (!reload_completed
)
12689 switch (rs6000_cpu_attr
) {
12690 case CPU_RIOS1
: /* ? */
12692 case CPU_PPC601
: /* ? */
12713 /* Return how many instructions to look ahead for better insn
12717 rs6000_use_sched_lookahead ()
12719 if (rs6000_cpu_attr
== CPU_PPC8540
)
12725 /* Length in units of the trampoline for entering a nested function. */
12728 rs6000_trampoline_size ()
12732 switch (DEFAULT_ABI
)
12738 ret
= (TARGET_32BIT
) ? 12 : 24;
12743 case ABI_AIX_NODESC
:
12744 ret
= (TARGET_32BIT
) ? 40 : 48;
12751 /* Emit RTL insns to initialize the variable parts of a trampoline.
12752 FNADDR is an RTX for the address of the function's pure code.
12753 CXT is an RTX for the static chain value for the function. */
12756 rs6000_initialize_trampoline (addr
, fnaddr
, cxt
)
12761 enum machine_mode pmode
= Pmode
;
12762 int regsize
= (TARGET_32BIT
) ? 4 : 8;
12763 rtx ctx_reg
= force_reg (pmode
, cxt
);
12765 switch (DEFAULT_ABI
)
12770 /* Macros to shorten the code expansions below. */
12771 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12772 #define MEM_PLUS(addr,offset) \
12773 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12775 /* Under AIX, just build the 3 word function descriptor */
12778 rtx fn_reg
= gen_reg_rtx (pmode
);
12779 rtx toc_reg
= gen_reg_rtx (pmode
);
12780 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
12781 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
12782 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
12783 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
12784 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
12788 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12791 case ABI_AIX_NODESC
:
12792 emit_library_call (gen_rtx_SYMBOL_REF (SImode
, "__trampoline_setup"),
12793 FALSE
, VOIDmode
, 4,
12795 GEN_INT (rs6000_trampoline_size ()), SImode
,
12805 /* Table of valid machine attributes. */
12807 const struct attribute_spec rs6000_attribute_table
[] =
12809 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12810 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12811 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12812 { NULL
, 0, 0, false, false, false, NULL
}
12815 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12816 struct attribute_spec.handler. */
12819 rs6000_handle_longcall_attribute (node
, name
, args
, flags
, no_add_attrs
)
12822 tree args ATTRIBUTE_UNUSED
;
12823 int flags ATTRIBUTE_UNUSED
;
12824 bool *no_add_attrs
;
12826 if (TREE_CODE (*node
) != FUNCTION_TYPE
12827 && TREE_CODE (*node
) != FIELD_DECL
12828 && TREE_CODE (*node
) != TYPE_DECL
)
12830 warning ("`%s' attribute only applies to functions",
12831 IDENTIFIER_POINTER (name
));
12832 *no_add_attrs
= true;
12838 /* Set longcall attributes on all functions declared when
12839 rs6000_default_long_calls is true. */
12841 rs6000_set_default_type_attributes (type
)
12844 if (rs6000_default_long_calls
12845 && (TREE_CODE (type
) == FUNCTION_TYPE
12846 || TREE_CODE (type
) == METHOD_TYPE
))
12847 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
12849 TYPE_ATTRIBUTES (type
));
12852 /* Return a reference suitable for calling a function with the
12853 longcall attribute. */
12856 rs6000_longcall_ref (call_ref
)
12859 const char *call_name
;
12862 if (GET_CODE (call_ref
) != SYMBOL_REF
)
12865 /* System V adds '.' to the internal name, so skip them. */
12866 call_name
= XSTR (call_ref
, 0);
12867 if (*call_name
== '.')
12869 while (*call_name
== '.')
12872 node
= get_identifier (call_name
);
12873 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
12876 return force_reg (Pmode
, call_ref
);
12880 #ifdef USING_ELFOS_H
12882 /* A C statement or statements to switch to the appropriate section
12883 for output of RTX in mode MODE. You can assume that RTX is some
12884 kind of constant in RTL. The argument MODE is redundant except in
12885 the case of a `const_int' rtx. Select the section by calling
12886 `text_section' or one of the alternatives for other sections.
12888 Do not define this macro if you put all constants in the read-only
12892 rs6000_elf_select_rtx_section (mode
, x
, align
)
12893 enum machine_mode mode
;
12895 unsigned HOST_WIDE_INT align
;
12897 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
12900 default_elf_select_rtx_section (mode
, x
, align
);
12903 /* A C statement or statements to switch to the appropriate
12904 section for output of DECL. DECL is either a `VAR_DECL' node
12905 or a constant of some sort. RELOC indicates whether forming
12906 the initial value of DECL requires link-time relocations. */
12909 rs6000_elf_select_section (decl
, reloc
, align
)
12912 unsigned HOST_WIDE_INT align
;
12914 /* Pretend that we're always building for a shared library when
12915 ABI_AIX, because otherwise we end up with dynamic relocations
12916 in read-only sections. This happens for function pointers,
12917 references to vtables in typeinfo, and probably other cases. */
12918 default_elf_select_section_1 (decl
, reloc
, align
,
12919 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12922 /* A C statement to build up a unique section name, expressed as a
12923 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12924 RELOC indicates whether the initial value of EXP requires
12925 link-time relocations. If you do not define this macro, GCC will use
12926 the symbol name prefixed by `.' as the section name. Note - this
12927 macro can now be called for uninitialized data items as well as
12928 initialized data and functions. */
12931 rs6000_elf_unique_section (decl
, reloc
)
12935 /* As above, pretend that we're always building for a shared library
12936 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
12937 default_unique_section_1 (decl
, reloc
,
12938 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
12941 /* If we are referencing a function that is static or is known to be
12942 in this file, make the SYMBOL_REF special. We can use this to indicate
12943 that we can branch to this function without emitting a no-op after the
12944 call. For real AIX calling sequences, we also replace the
12945 function name with the real name (1 or 2 leading .'s), rather than
12946 the function descriptor name. This saves a lot of overriding code
12947 to read the prefixes. */
12950 rs6000_elf_encode_section_info (decl
, first
)
12957 if (TREE_CODE (decl
) == FUNCTION_DECL
)
12959 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
12960 if ((*targetm
.binds_local_p
) (decl
))
12961 SYMBOL_REF_FLAG (sym_ref
) = 1;
12963 if (!TARGET_AIX
&& DEFAULT_ABI
== ABI_AIX
)
12965 size_t len1
= (DEFAULT_ABI
== ABI_AIX
) ? 1 : 2;
12966 size_t len2
= strlen (XSTR (sym_ref
, 0));
12967 char *str
= alloca (len1
+ len2
+ 1);
12970 memcpy (str
+ len1
, XSTR (sym_ref
, 0), len2
+ 1);
12972 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len1
+ len2
);
12975 else if (rs6000_sdata
!= SDATA_NONE
12976 && DEFAULT_ABI
== ABI_V4
12977 && TREE_CODE (decl
) == VAR_DECL
)
12979 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
12980 int size
= int_size_in_bytes (TREE_TYPE (decl
));
12981 tree section_name
= DECL_SECTION_NAME (decl
);
12982 const char *name
= (char *)0;
12985 if ((*targetm
.binds_local_p
) (decl
))
12986 SYMBOL_REF_FLAG (sym_ref
) = 1;
12990 if (TREE_CODE (section_name
) == STRING_CST
)
12992 name
= TREE_STRING_POINTER (section_name
);
12993 len
= TREE_STRING_LENGTH (section_name
);
13000 ? ((len
== sizeof (".sdata") - 1
13001 && strcmp (name
, ".sdata") == 0)
13002 || (len
== sizeof (".sdata2") - 1
13003 && strcmp (name
, ".sdata2") == 0)
13004 || (len
== sizeof (".sbss") - 1
13005 && strcmp (name
, ".sbss") == 0)
13006 || (len
== sizeof (".sbss2") - 1
13007 && strcmp (name
, ".sbss2") == 0)
13008 || (len
== sizeof (".PPC.EMB.sdata0") - 1
13009 && strcmp (name
, ".PPC.EMB.sdata0") == 0)
13010 || (len
== sizeof (".PPC.EMB.sbss0") - 1
13011 && strcmp (name
, ".PPC.EMB.sbss0") == 0))
13012 : (size
> 0 && size
<= g_switch_value
))
13014 size_t len
= strlen (XSTR (sym_ref
, 0));
13015 char *str
= alloca (len
+ 2);
13018 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
13019 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
13024 static const char *
13025 rs6000_elf_strip_name_encoding (str
)
13028 while (*str
== '*' || *str
== '@')
13034 rs6000_elf_in_small_data_p (decl
)
13037 if (rs6000_sdata
== SDATA_NONE
)
13040 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
))
13042 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
13043 if (strcmp (section
, ".sdata") == 0
13044 || strcmp (section
, ".sdata2") == 0
13045 || strcmp (section
, ".sbss") == 0)
13050 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
13053 && size
<= g_switch_value
13054 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)))
13061 #endif /* USING_ELFOS_H */
13064 /* Return a REG that occurs in ADDR with coefficient 1.
13065 ADDR can be effectively incremented by incrementing REG.
13067 r0 is special and we must not select it as an address
13068 register by this routine since our caller will try to
13069 increment the returned register via an "la" instruction. */
13072 find_addr_reg (addr
)
13075 while (GET_CODE (addr
) == PLUS
)
13077 if (GET_CODE (XEXP (addr
, 0)) == REG
13078 && REGNO (XEXP (addr
, 0)) != 0)
13079 addr
= XEXP (addr
, 0);
13080 else if (GET_CODE (XEXP (addr
, 1)) == REG
13081 && REGNO (XEXP (addr
, 1)) != 0)
13082 addr
= XEXP (addr
, 1);
13083 else if (CONSTANT_P (XEXP (addr
, 0)))
13084 addr
= XEXP (addr
, 1);
13085 else if (CONSTANT_P (XEXP (addr
, 1)))
13086 addr
= XEXP (addr
, 0);
13090 if (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0)
13096 rs6000_fatal_bad_address (op
)
13099 fatal_insn ("bad address", op
);
13105 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13106 reference and a constant. */
13109 symbolic_operand (op
)
13112 switch (GET_CODE (op
))
13119 return (GET_CODE (op
) == SYMBOL_REF
||
13120 (GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
13121 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
13122 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
13129 #ifdef RS6000_LONG_BRANCH
13131 static tree stub_list
= 0;
13133 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13134 procedure calls to the linked list. */
13137 add_compiler_stub (label_name
, function_name
, line_number
)
13139 tree function_name
;
13142 tree stub
= build_tree_list (function_name
, label_name
);
13143 TREE_TYPE (stub
) = build_int_2 (line_number
, 0);
13144 TREE_CHAIN (stub
) = stub_list
;
13148 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13149 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13150 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13152 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13153 handling procedure calls from the linked list and initializes the
13157 output_compiler_stub ()
13160 char label_buf
[256];
13164 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
13166 fprintf (asm_out_file
,
13167 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub
)));
13169 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13170 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
13171 fprintf (asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub
));
13172 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13174 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))[0] == '*')
13176 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))+1);
13179 label_buf
[0] = '_';
13180 strcpy (label_buf
+1,
13181 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
)));
13184 strcpy (tmp_buf
, "lis r12,hi16(");
13185 strcat (tmp_buf
, label_buf
);
13186 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
13187 strcat (tmp_buf
, label_buf
);
13188 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
13189 output_asm_insn (tmp_buf
, 0);
13191 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13192 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
13193 fprintf(asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub
));
13194 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13200 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13201 already there or not. */
13204 no_previous_def (function_name
)
13205 tree function_name
;
13208 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
13209 if (function_name
== STUB_FUNCTION_NAME (stub
))
13214 /* GET_PREV_LABEL gets the label name from the previous definition of
13218 get_prev_label (function_name
)
13219 tree function_name
;
13222 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
13223 if (function_name
== STUB_FUNCTION_NAME (stub
))
13224 return STUB_LABEL_NAME (stub
);
13228 /* INSN is either a function call or a millicode call. It may have an
13229 unconditional jump in its delay slot.
13231 CALL_DEST is the routine we are calling. */
13234 output_call (insn
, call_dest
, operand_number
)
13237 int operand_number
;
13239 static char buf
[256];
13240 if (GET_CODE (call_dest
) == SYMBOL_REF
&& TARGET_LONG_BRANCH
&& !flag_pic
)
13243 tree funname
= get_identifier (XSTR (call_dest
, 0));
13245 if (no_previous_def (funname
))
13247 int line_number
= 0;
13248 rtx label_rtx
= gen_label_rtx ();
13249 char *label_buf
, temp_buf
[256];
13250 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
13251 CODE_LABEL_NUMBER (label_rtx
));
13252 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
13253 labelname
= get_identifier (label_buf
);
13254 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
13256 line_number
= NOTE_LINE_NUMBER (insn
);
13257 add_compiler_stub (labelname
, funname
, line_number
);
13260 labelname
= get_prev_label (funname
);
13262 sprintf (buf
, "jbsr %%z%d,%.246s",
13263 operand_number
, IDENTIFIER_POINTER (labelname
));
13268 sprintf (buf
, "bl %%z%d", operand_number
);
13273 #endif /* RS6000_LONG_BRANCH */
13275 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13277 const char *const symbol_ = (SYMBOL); \
13278 char *buffer_ = (BUF); \
13279 if (symbol_[0] == '"') \
13281 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13283 else if (name_needs_quotes(symbol_)) \
13285 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13289 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13294 /* Generate PIC and indirect symbol stubs. */
13297 machopic_output_stub (file
, symb
, stub
)
13299 const char *symb
, *stub
;
13301 unsigned int length
;
13302 char *symbol_name
, *lazy_ptr_name
;
13303 char *local_label_0
;
13304 static int label
= 0;
13306 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13307 symb
= (*targetm
.strip_name_encoding
) (symb
);
13311 length
= strlen (symb
);
13312 symbol_name
= alloca (length
+ 32);
13313 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
13315 lazy_ptr_name
= alloca (length
+ 32);
13316 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
13318 local_label_0
= alloca (length
+ 32);
13319 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0
, symb
, length
, 0);
13322 machopic_picsymbol_stub1_section ();
13324 machopic_symbol_stub1_section ();
13325 fprintf (file
, "\t.align 2\n");
13327 fprintf (file
, "%s:\n", stub
);
13328 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
13332 fprintf (file
, "\tmflr r0\n");
13333 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
13334 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
13335 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
13336 lazy_ptr_name
, local_label_0
);
13337 fprintf (file
, "\tmtlr r0\n");
13338 fprintf (file
, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13339 lazy_ptr_name
, local_label_0
);
13340 fprintf (file
, "\tmtctr r12\n");
13341 fprintf (file
, "\tbctr\n");
13345 fprintf (file
, "\tlis r11,ha16(%s)\n", lazy_ptr_name
);
13346 fprintf (file
, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name
);
13347 fprintf (file
, "\tmtctr r12\n");
13348 fprintf (file
, "\tbctr\n");
13351 machopic_lazy_symbol_ptr_section ();
13352 fprintf (file
, "%s:\n", lazy_ptr_name
);
13353 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
13354 fprintf (file
, "\t.long dyld_stub_binding_helper\n");
13357 /* Legitimize PIC addresses. If the address is already
13358 position-independent, we return ORIG. Newly generated
13359 position-independent addresses go into a reg. This is REG if non
13360 zero, otherwise we allocate register(s) as necessary. */
13362 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13365 rs6000_machopic_legitimize_pic_address (orig
, mode
, reg
)
13367 enum machine_mode mode
;
13372 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
13373 reg
= gen_reg_rtx (Pmode
);
13375 if (GET_CODE (orig
) == CONST
)
13377 if (GET_CODE (XEXP (orig
, 0)) == PLUS
13378 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
13381 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
13384 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
13387 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
13393 if (GET_CODE (offset
) == CONST_INT
)
13395 if (SMALL_INT (offset
))
13396 return plus_constant (base
, INTVAL (offset
));
13397 else if (! reload_in_progress
&& ! reload_completed
)
13398 offset
= force_reg (Pmode
, offset
);
13401 rtx mem
= force_const_mem (Pmode
, orig
);
13402 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
13405 return gen_rtx (PLUS
, Pmode
, base
, offset
);
13408 /* Fall back on generic machopic code. */
13409 return machopic_legitimize_pic_address (orig
, mode
, reg
);
13412 /* This is just a placeholder to make linking work without having to
13413 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13414 ever needed for Darwin (not too likely!) this would have to get a
13415 real definition. */
13422 #endif /* TARGET_MACHO */
13425 static unsigned int
13426 rs6000_elf_section_type_flags (decl
, name
, reloc
)
13432 = default_section_type_flags_1 (decl
, name
, reloc
,
13433 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
13435 if (TARGET_RELOCATABLE
)
13436 flags
|= SECTION_WRITE
;
13441 /* Record an element in the table of global constructors. SYMBOL is
13442 a SYMBOL_REF of the function to be called; PRIORITY is a number
13443 between 0 and MAX_INIT_PRIORITY.
13445 This differs from default_named_section_asm_out_constructor in
13446 that we have special handling for -mrelocatable. */
13449 rs6000_elf_asm_out_constructor (symbol
, priority
)
13453 const char *section
= ".ctors";
13456 if (priority
!= DEFAULT_INIT_PRIORITY
)
13458 sprintf (buf
, ".ctors.%.5u",
13459 /* Invert the numbering so the linker puts us in the proper
13460 order; constructors are run from right to left, and the
13461 linker sorts in increasing order. */
13462 MAX_INIT_PRIORITY
- priority
);
13466 named_section_flags (section
, SECTION_WRITE
);
13467 assemble_align (POINTER_SIZE
);
13469 if (TARGET_RELOCATABLE
)
13471 fputs ("\t.long (", asm_out_file
);
13472 output_addr_const (asm_out_file
, symbol
);
13473 fputs (")@fixup\n", asm_out_file
);
13476 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13480 rs6000_elf_asm_out_destructor (symbol
, priority
)
13484 const char *section
= ".dtors";
13487 if (priority
!= DEFAULT_INIT_PRIORITY
)
13489 sprintf (buf
, ".dtors.%.5u",
13490 /* Invert the numbering so the linker puts us in the proper
13491 order; constructors are run from right to left, and the
13492 linker sorts in increasing order. */
13493 MAX_INIT_PRIORITY
- priority
);
13497 named_section_flags (section
, SECTION_WRITE
);
13498 assemble_align (POINTER_SIZE
);
13500 if (TARGET_RELOCATABLE
)
13502 fputs ("\t.long (", asm_out_file
);
13503 output_addr_const (asm_out_file
, symbol
);
13504 fputs (")@fixup\n", asm_out_file
);
13507 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13513 rs6000_xcoff_asm_globalize_label (stream
, name
)
13517 fputs (GLOBAL_ASM_OP
, stream
);
13518 RS6000_OUTPUT_BASENAME (stream
, name
);
13519 putc ('\n', stream
);
13523 rs6000_xcoff_asm_named_section (name
, flags
)
13525 unsigned int flags
;
13528 static const char * const suffix
[3] = { "PR", "RO", "RW" };
13530 if (flags
& SECTION_CODE
)
13532 else if (flags
& SECTION_WRITE
)
13537 fprintf (asm_out_file
, "\t.csect %s%s[%s],%u\n",
13538 (flags
& SECTION_CODE
) ? "." : "",
13539 name
, suffix
[smclass
], flags
& SECTION_ENTSIZE
);
13543 rs6000_xcoff_select_section (decl
, reloc
, align
)
13546 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13548 if (decl_readonly_section_1 (decl
, reloc
, 1))
13550 if (TREE_PUBLIC (decl
))
13551 read_only_data_section ();
13553 read_only_private_data_section ();
13557 if (TREE_PUBLIC (decl
))
13560 private_data_section ();
13565 rs6000_xcoff_unique_section (decl
, reloc
)
13567 int reloc ATTRIBUTE_UNUSED
;
13571 /* Use select_section for private and uninitialized data. */
13572 if (!TREE_PUBLIC (decl
)
13573 || DECL_COMMON (decl
)
13574 || DECL_INITIAL (decl
) == NULL_TREE
13575 || DECL_INITIAL (decl
) == error_mark_node
13576 || (flag_zero_initialized_in_bss
13577 && initializer_zerop (DECL_INITIAL (decl
))))
13580 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
13581 name
= (*targetm
.strip_name_encoding
) (name
);
13582 DECL_SECTION_NAME (decl
) = build_string (strlen (name
), name
);
13585 /* Select section for constant in constant pool.
13587 On RS/6000, all constants are in the private read-only data area.
13588 However, if this is being placed in the TOC it must be output as a
13592 rs6000_xcoff_select_rtx_section (mode
, x
, align
)
13593 enum machine_mode mode
;
13595 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13597 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
13600 read_only_private_data_section ();
13603 /* Remove any trailing [DS] or the like from the symbol name. */
13605 static const char *
13606 rs6000_xcoff_strip_name_encoding (name
)
13612 len
= strlen (name
);
13613 if (name
[len
- 1] == ']')
13614 return ggc_alloc_string (name
, len
- 4);
13619 /* Section attributes. AIX is always PIC. */
13621 static unsigned int
13622 rs6000_xcoff_section_type_flags (decl
, name
, reloc
)
13627 unsigned int align
;
13628 unsigned int flags
= default_section_type_flags_1 (decl
, name
, reloc
, 1);
13630 /* Align to at least UNIT size. */
13631 if (flags
& SECTION_CODE
)
13632 align
= MIN_UNITS_PER_WORD
;
13634 /* Increase alignment of large objects if not already stricter. */
13635 align
= MAX ((DECL_ALIGN (decl
) / BITS_PER_UNIT
),
13636 int_size_in_bytes (TREE_TYPE (decl
)) > MIN_UNITS_PER_WORD
13637 ? UNITS_PER_FP_WORD
: MIN_UNITS_PER_WORD
);
13639 return flags
| (exact_log2 (align
) & SECTION_ENTSIZE
);
13643 rs6000_xcoff_encode_section_info (decl
, first
)
13645 int first ATTRIBUTE_UNUSED
;
13647 if (TREE_CODE (decl
) == FUNCTION_DECL
13648 && (*targetm
.binds_local_p
) (decl
))
13649 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl
), 0)) = 1;
13651 #endif /* TARGET_XCOFF */
13654 /* Cross-module name binding. Darwin does not support overriding
13655 functions at dynamic-link time. */
13658 rs6000_binds_local_p (decl
)
13661 return default_binds_local_p_1 (decl
, 0);
13665 /* Compute a (partial) cost for rtx X. Return true if the complete
13666 cost has been computed, and false if subexpressions should be
13667 scanned. In either case, *TOTAL contains the cost result. */
13670 rs6000_rtx_costs (x
, code
, outer_code
, total
)
13672 int code
, outer_code ATTRIBUTE_UNUSED
;
13677 /* On the RS/6000, if it is valid in the insn, it is free.
13678 So this always returns 0. */
13689 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
13690 && ((unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1))
13691 + 0x8000) >= 0x10000)
13692 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
13693 ? COSTS_N_INSNS (2)
13694 : COSTS_N_INSNS (1));
13700 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
13701 && (INTVAL (XEXP (x
, 1)) & (~ (HOST_WIDE_INT
) 0xffff)) != 0
13702 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
13703 ? COSTS_N_INSNS (2)
13704 : COSTS_N_INSNS (1));
13710 *total
= COSTS_N_INSNS (2);
13713 switch (rs6000_cpu
)
13715 case PROCESSOR_RIOS1
:
13716 case PROCESSOR_PPC405
:
13717 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13718 ? COSTS_N_INSNS (5)
13719 : (INTVAL (XEXP (x
, 1)) >= -256
13720 && INTVAL (XEXP (x
, 1)) <= 255)
13721 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13724 case PROCESSOR_RS64A
:
13725 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13726 ? GET_MODE (XEXP (x
, 1)) != DImode
13727 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
13728 : (INTVAL (XEXP (x
, 1)) >= -256
13729 && INTVAL (XEXP (x
, 1)) <= 255)
13730 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
13733 case PROCESSOR_RIOS2
:
13734 case PROCESSOR_MPCCORE
:
13735 case PROCESSOR_PPC604e
:
13736 *total
= COSTS_N_INSNS (2);
13739 case PROCESSOR_PPC601
:
13740 *total
= COSTS_N_INSNS (5);
13743 case PROCESSOR_PPC603
:
13744 case PROCESSOR_PPC7400
:
13745 case PROCESSOR_PPC750
:
13746 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13747 ? COSTS_N_INSNS (5)
13748 : (INTVAL (XEXP (x
, 1)) >= -256
13749 && INTVAL (XEXP (x
, 1)) <= 255)
13750 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
13753 case PROCESSOR_PPC7450
:
13754 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13755 ? COSTS_N_INSNS (4)
13756 : COSTS_N_INSNS (3));
13759 case PROCESSOR_PPC403
:
13760 case PROCESSOR_PPC604
:
13761 case PROCESSOR_PPC8540
:
13762 *total
= COSTS_N_INSNS (4);
13765 case PROCESSOR_PPC620
:
13766 case PROCESSOR_PPC630
:
13767 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13768 ? GET_MODE (XEXP (x
, 1)) != DImode
13769 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
13770 : (INTVAL (XEXP (x
, 1)) >= -256
13771 && INTVAL (XEXP (x
, 1)) <= 255)
13772 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13775 case PROCESSOR_POWER4
:
13776 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
13777 ? GET_MODE (XEXP (x
, 1)) != DImode
13778 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
13779 : COSTS_N_INSNS (2));
13788 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
13789 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0)
13791 *total
= COSTS_N_INSNS (2);
13798 switch (rs6000_cpu
)
13800 case PROCESSOR_RIOS1
:
13801 *total
= COSTS_N_INSNS (19);
13804 case PROCESSOR_RIOS2
:
13805 *total
= COSTS_N_INSNS (13);
13808 case PROCESSOR_RS64A
:
13809 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
13810 ? COSTS_N_INSNS (65)
13811 : COSTS_N_INSNS (67));
13814 case PROCESSOR_MPCCORE
:
13815 *total
= COSTS_N_INSNS (6);
13818 case PROCESSOR_PPC403
:
13819 *total
= COSTS_N_INSNS (33);
13822 case PROCESSOR_PPC405
:
13823 *total
= COSTS_N_INSNS (35);
13826 case PROCESSOR_PPC601
:
13827 *total
= COSTS_N_INSNS (36);
13830 case PROCESSOR_PPC603
:
13831 *total
= COSTS_N_INSNS (37);
13834 case PROCESSOR_PPC604
:
13835 case PROCESSOR_PPC604e
:
13836 *total
= COSTS_N_INSNS (20);
13839 case PROCESSOR_PPC620
:
13840 case PROCESSOR_PPC630
:
13841 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
13842 ? COSTS_N_INSNS (21)
13843 : COSTS_N_INSNS (37));
13846 case PROCESSOR_PPC750
:
13847 case PROCESSOR_PPC8540
:
13848 case PROCESSOR_PPC7400
:
13849 *total
= COSTS_N_INSNS (19);
13852 case PROCESSOR_PPC7450
:
13853 *total
= COSTS_N_INSNS (23);
13856 case PROCESSOR_POWER4
:
13857 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
13858 ? COSTS_N_INSNS (18)
13859 : COSTS_N_INSNS (34));
13867 *total
= COSTS_N_INSNS (4);
13871 /* MEM should be slightly more expensive than (plus (reg) (const)) */
13880 /* A C expression returning the cost of moving data from a register of class
13881 CLASS1 to one of CLASS2. */
13884 rs6000_register_move_cost (mode
, from
, to
)
13885 enum machine_mode mode
;
13886 enum reg_class from
, to
;
13888 /* Moves from/to GENERAL_REGS. */
13889 if (reg_classes_intersect_p (to
, GENERAL_REGS
)
13890 || reg_classes_intersect_p (from
, GENERAL_REGS
))
13892 if (! reg_classes_intersect_p (to
, GENERAL_REGS
))
13895 if (from
== FLOAT_REGS
|| from
== ALTIVEC_REGS
)
13896 return (rs6000_memory_move_cost (mode
, from
, 0)
13897 + rs6000_memory_move_cost (mode
, GENERAL_REGS
, 0));
13899 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13900 else if (from
== CR_REGS
)
13904 /* A move will cost one instruction per GPR moved. */
13905 return 2 * HARD_REGNO_NREGS (0, mode
);
13908 /* Moving between two similar registers is just one instruction. */
13909 else if (reg_classes_intersect_p (to
, from
))
13910 return mode
== TFmode
? 4 : 2;
13912 /* Everything else has to go through GENERAL_REGS. */
13914 return (rs6000_register_move_cost (mode
, GENERAL_REGS
, to
)
13915 + rs6000_register_move_cost (mode
, from
, GENERAL_REGS
));
13918 /* A C expressions returning the cost of moving data of MODE from a register to
13922 rs6000_memory_move_cost (mode
, class, in
)
13923 enum machine_mode mode
;
13924 enum reg_class
class;
13925 int in ATTRIBUTE_UNUSED
;
13927 if (reg_classes_intersect_p (class, GENERAL_REGS
))
13928 return 4 * HARD_REGNO_NREGS (0, mode
);
13929 else if (reg_classes_intersect_p (class, FLOAT_REGS
))
13930 return 4 * HARD_REGNO_NREGS (32, mode
);
13931 else if (reg_classes_intersect_p (class, ALTIVEC_REGS
))
13932 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO
, mode
);
13934 return 4 + rs6000_register_move_cost (mode
, class, GENERAL_REGS
);
13937 /* Return true if TYPE is of type __ev64_opaque__. */
13940 is_ev64_opaque_type (type
)
13944 && TREE_CODE (type
) == VECTOR_TYPE
13945 && TYPE_NAME (type
)
13946 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
13947 && DECL_NAME (TYPE_NAME (type
))
13948 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type
))),
13949 "__ev64_opaque__") == 0);
13953 rs6000_dwarf_register_span (reg
)
13958 if (!TARGET_SPE
|| !SPE_VECTOR_MODE (GET_MODE (reg
)))
13961 regno
= REGNO (reg
);
13963 /* The duality of the SPE register size wreaks all kinds of havoc.
13964 This is a way of distinguishing r0 in 32-bits from r0 in
13967 gen_rtx_PARALLEL (VOIDmode
,
13970 gen_rtx_REG (SImode
, regno
+ 1200),
13971 gen_rtx_REG (SImode
, regno
))
13973 gen_rtx_REG (SImode
, regno
),
13974 gen_rtx_REG (SImode
, regno
+ 1200)));
13977 #include "gt-rs6000.h"